diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 865f5df601bc8..b701315ae968b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.52.0 +current_version = 0.57.3 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.editorconfig b/.editorconfig index ca4c327686160..9b977f3960d54 100644 --- a/.editorconfig +++ b/.editorconfig @@ -23,6 +23,93 @@ ij_json_spaces_within_braces = false ij_json_spaces_within_brackets = false ij_json_wrap_long_lines = false +[{*.kt,*.kts}] +indent_style = space +insert_final_newline = true +max_line_length = 100 +indent_size = 4 +ij_continuation_indent_size = 4 +ij_java_names_count_to_use_import_on_demand = 9999 +ij_kotlin_align_in_columns_case_branch = false +ij_kotlin_align_multiline_binary_operation = false +ij_kotlin_align_multiline_extends_list = false +ij_kotlin_align_multiline_method_parentheses = false +ij_kotlin_align_multiline_parameters = true +ij_kotlin_align_multiline_parameters_in_calls = false +ij_kotlin_allow_trailing_comma = true +ij_kotlin_allow_trailing_comma_on_call_site = true +ij_kotlin_assignment_wrap = normal +ij_kotlin_blank_lines_after_class_header = 0 +ij_kotlin_blank_lines_around_block_when_branches = 0 +ij_kotlin_blank_lines_before_declaration_with_comment_or_annotation_on_separate_line = 1 +ij_kotlin_block_comment_at_first_column = true +ij_kotlin_call_parameters_new_line_after_left_paren = true +ij_kotlin_call_parameters_right_paren_on_new_line = false +ij_kotlin_call_parameters_wrap = on_every_item +ij_kotlin_catch_on_new_line = false +ij_kotlin_class_annotation_wrap = split_into_lines +ij_kotlin_code_style_defaults = KOTLIN_OFFICIAL +ij_kotlin_continuation_indent_for_chained_calls = true +ij_kotlin_continuation_indent_for_expression_bodies = true +ij_kotlin_continuation_indent_in_argument_lists = true +ij_kotlin_continuation_indent_in_elvis = false +ij_kotlin_continuation_indent_in_if_conditions = false +ij_kotlin_continuation_indent_in_parameter_lists = false +ij_kotlin_continuation_indent_in_supertype_lists = false +ij_kotlin_else_on_new_line = false +ij_kotlin_enum_constants_wrap = off +ij_kotlin_extends_list_wrap = normal +ij_kotlin_field_annotation_wrap = off +ij_kotlin_finally_on_new_line = false +ij_kotlin_if_rparen_on_new_line = false +ij_kotlin_import_nested_classes = false +ij_kotlin_imports_layout = * +ij_kotlin_insert_whitespaces_in_simple_one_line_method = true +ij_kotlin_keep_blank_lines_before_right_brace = 2 +ij_kotlin_keep_blank_lines_in_code = 2 +ij_kotlin_keep_blank_lines_in_declarations = 2 +ij_kotlin_keep_first_column_comment = true +ij_kotlin_keep_indents_on_empty_lines = false +ij_kotlin_keep_line_breaks = true +ij_kotlin_lbrace_on_next_line = false +ij_kotlin_line_comment_add_space = false +ij_kotlin_line_comment_at_first_column = true +ij_kotlin_method_annotation_wrap = split_into_lines +ij_kotlin_method_call_chain_wrap = normal +ij_kotlin_method_parameters_new_line_after_left_paren = true +ij_kotlin_method_parameters_right_paren_on_new_line = true +ij_kotlin_method_parameters_wrap = on_every_item +ij_kotlin_name_count_to_use_star_import = 9999 +ij_kotlin_name_count_to_use_star_import_for_members = 9999 +ij_kotlin_parameter_annotation_wrap = off +ij_kotlin_space_after_comma = true +ij_kotlin_space_after_extend_colon = true +ij_kotlin_space_after_type_colon = true +ij_kotlin_space_before_catch_parentheses = true +ij_kotlin_space_before_comma = false +ij_kotlin_space_before_extend_colon = true +ij_kotlin_space_before_for_parentheses = true +ij_kotlin_space_before_if_parentheses = true +ij_kotlin_space_before_lambda_arrow = true +ij_kotlin_space_before_type_colon = false +ij_kotlin_space_before_when_parentheses = true +ij_kotlin_space_before_while_parentheses = true +ij_kotlin_spaces_around_additive_operators = true +ij_kotlin_spaces_around_assignment_operators = true +ij_kotlin_spaces_around_equality_operators = true +ij_kotlin_spaces_around_function_type_arrow = true +ij_kotlin_spaces_around_logical_operators = true +ij_kotlin_spaces_around_multiplicative_operators = true +ij_kotlin_spaces_around_range = false +ij_kotlin_spaces_around_relational_operators = true +ij_kotlin_spaces_around_unary_operator = false +ij_kotlin_spaces_around_when_arrow = true +ij_kotlin_variable_annotation_wrap = off +ij_kotlin_while_on_new_line = false +ij_kotlin_wrap_elvis_expressions = 1 +ij_kotlin_wrap_expression_body_functions = 1 +ij_kotlin_wrap_first_method_in_call_chain = false + [{*.markdown,*.md}] ij_markdown_force_one_space_after_blockquote_symbol = true ij_markdown_force_one_space_after_header_symbol = true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b3b9368f31d47..7367643197a9a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,63 +6,48 @@ /airbyte-integrations/connectors/destination-chroma @airbytehq/ai-language-models /airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based @airbytehq/ai-language-models -# CDK and Connector Acceptance Tests +# CI/CD +/.github/ @airbytehq/connector-extensibility +/airbyte-ci/ @airbytehq/connector-extensibility + +# Python CDK and Connector Acceptance Tests /airbyte-cdk/python @airbytehq/connector-extensibility /airbyte-integrations/connector-templates/ @airbytehq/connector-extensibility /airbyte-integrations/bases/connector-acceptance-test/ @airbytehq/connector-extensibility @lazebnyi @oustynova +# Build customization file change +/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-extensibility + # Protocol related items /docs/understanding-airbyte/airbyte-protocol.md @airbytehq/protocol-reviewers -# Normalization -/airbyte-integrations/bases/base-normalization/ @airbytehq/destinations - -# Java-based connectors -/airbyte-integrations/bases/base-java/ @airbytehq/jdbc-connectors - -# Java-based source connectors -/airbyte-integrations/bases/debezium-v1-4-2/ @airbytehq/dbsources -/airbyte-integrations/bases/debezium-v1-9-6/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-jdbc/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-alloydb/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-bigquery/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-clickhouse/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-cockroachdb/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-db2/ @airbytehq/dbsources +# Java CDK +/airbyte-cdk/java/airbyte-cdk @airbytehq/dbsources @airbytehq/destinations +/airbyte-cdk/java/airbyte-cdk/*-sources/ @airbytehq/dbsources +/airbyte-cdk/java/airbyte-cdk/*-destinations/ @airbytehq/destinations +/airbyte-cdk/java/airbyte-cdk/typing-deduping/ @airbytehq/destinations + +# Java connectors catch-all +/buildSrc/ @airbytehq/dbsources @airbytehq/destinations +/airbyte-integrations/connectors/source-*/**/*.java @airbytehq/dbsources +/airbyte-integrations/connectors/source-*/**/*.kt @airbytehq/dbsources +/airbyte-integrations/connectors/source-*/**/*.gradle @airbytehq/dbsources +/airbyte-integrations/connectors-performance/source-harness/ @airbytehq/dbsources +/airbyte-integrations/connectors/destination-*/**/*.java @airbytehq/destinations +/airbyte-integrations/connectors/destination-*/**/*.kt @airbytehq/destinations +/airbyte-integrations/connectors/destination-*/**/*.gradle @airbytehq/destinations +/airbyte-integrations/connectors-performance/destination-harness/ @airbytehq/dbsources + +# Java-based certified or incubating source connectors +/airbyte-integrations/connectors/source-mongodb-v2/ @airbytehq/dbsources /airbyte-integrations/connectors/source-mssql/ @airbytehq/dbsources /airbyte-integrations/connectors/source-mysql/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-oracle/ @airbytehq/dbsources /airbyte-integrations/connectors/source-postgres/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-redshift/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-snowflake/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-tidb/ @airbytehq/dbsources -# Java-based destination connectors -airbyte-cdk/java/airbyte-cdk/db-destinations/ @airbytehq/destinations -airbyte-cdk/java/airbyte-cdk/s3-destinations/ @airbytehq/destinations -airbyte-cdk/java/airbyte-cdk/typing-deduping/ @airbytehq/destinations -/airbyte-integrations/bases/standard-destination-test/ @airbytehq/destinations -/airbyte-integrations/bases/base-java-s3/ @airbytehq/destinations -/airbyte-integrations/bases/bases-destination-jdbc/ @airbytehq/destinations +# Java-based certified or incubating destination connectors /airbyte-integrations/connectors/destination-bigquery/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-bigquery-denormalized/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-azure-blob-storage/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-clickhouse/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-databricks/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-gcs/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mariadb-columnstore/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mysql/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mssql/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-oracle/ @airbytehq/destinations /airbyte-integrations/connectors/destination-postgres/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-redshift/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-rockset/ @airbytehq/destinations +/airbyte-integrations/connectors/destination-postgres-strict-encrypt/ @airbytehq/destinations /airbyte-integrations/connectors/destination-s3/ @airbytehq/destinations /airbyte-integrations/connectors/destination-snowflake/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-tidb/ @airbytehq/destinations - -# Build customization file change -/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-extensibility - -# airbyte-ci -/airbyte-ci @airbytehq/connector-extensibility +/airbyte-integrations/connectors/destination-redshift/ @airbytehq/destinations diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 12a69ca2c40ea..7b358a9906fff 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,6 +1,12 @@ --- blank_issues_enabled: false contact_links: - - name: Ask a question, get community support or request new features/connectors - url: https://github.com/airbytehq/airbyte/discussions/ - about: Use Github Discussion to request features/connectors or discuss ideas or issues. + - name: Ask a question or get help troubleshooting issues + url: https://github.com/airbytehq/airbyte/discussions/new?category=questions + about: Use GitHub Discussions to engage with fellow users. Share insights on best practices, address issues, and explore potential workarounds collaboratively. + - name: Request a New Connector + url: https://github.com/airbytehq/airbyte/discussions/new?category=new-connector-request + about: Request a New Source or a New Destination Connector. + - name: Request a New Feature + url: https://github.com/airbytehq/airbyte/discussions/new?category=ideas-and-features + about: A new idea or want to discuss a new feature, here is the place. diff --git a/.github/ISSUE_TEMPLATE/issue-cli.yaml b/.github/ISSUE_TEMPLATE/issue-cli.yaml new file mode 100644 index 0000000000000..aad00d2519d9f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue-cli.yaml @@ -0,0 +1,26 @@ +name: CLI Issue +about: Use this when you're using the Octavia CLI. +title: "Octavia CLI Issue: " +labels: [type/bug, area/octavia-cli, needs-triage] +body: + - type: input + id: cli-version + attributes: + label: Octavia CLI Version + description: Give the Octavia CLI version you're using. + validations: + required: true + - type: textarea + id: description + attributes: + label: Revelant information + description: Please give any aditional information you have your steps to reproduce the problem. + - type: textarea + id: logs + attributes: + label: Relevant log output + description: | + Please copy and paste any relevant log output. + This will be automatically formatted into code, so no need for backticks. + We strongly recommend to upload the log file to further debugging. + render: shell diff --git a/.github/ISSUE_TEMPLATE/issue-connector.yaml b/.github/ISSUE_TEMPLATE/issue-connector.yaml new file mode 100644 index 0000000000000..96712c59d26ce --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue-connector.yaml @@ -0,0 +1,51 @@ +name: Connector Issue +about: Use this when you're facing issue with connector +title: "Connector Issue: " +labels: [type/bug, area/connectors, needs-triage] +body: + - type: input + id: connector-name + attributes: + label: Connector Name + description: Give the connector name in form of airbyte/source-pokeapi, airbyte/destination-snowflake + validations: + required: true + - type: input + id: connector-version + attributes: + label: Connector Version + description: Give the connector version you're using. + validations: + required: true + - type: dropdown + id: step + attributes: + label: What step the error happened? + multiple: false + options: + - Configuring a new connector + - During the sync + - Updating the connector + - Other + - type: textarea + id: description + attributes: + label: Revelant information + description: Please give any aditional information you have your steps to reproduce the problem. + - type: textarea + id: logs + attributes: + label: Relevant log output + description: | + Please copy and paste any relevant log output. + This will be automatically formatted into code, so no need for backticks. + We strongly recommend to upload the log file to further debugging. + render: shell + - type: checkboxes + id: submit-pr + attributes: + label: Contribute + description: Are you willing to submit the fix? + options: + - label: Yes + required: true diff --git a/.github/ISSUE_TEMPLATE/issue-platform.yaml b/.github/ISSUE_TEMPLATE/issue-platform.yaml new file mode 100644 index 0000000000000..d42bb161a8d1c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue-platform.yaml @@ -0,0 +1,36 @@ +name: Platform Issue +about: Use this when you're facing a platform, deploy or infrastructure issue. +title: "Platform Issue: " +labels: [type/bug, area/platform, needs-triage] +body: + - type: input + id: platform-version + attributes: + label: Platform Version + description: Give the Airbyte Platform version you're using. + validations: + required: true + - type: dropdown + id: step + attributes: + label: What step the error happened? + multiple: false + options: + - On deploy + - During the Sync + - Upgrading the Platform + - Other + - type: textarea + id: description + attributes: + label: Revelant information + description: Please give any aditional information you have your steps to reproduce the problem. + - type: textarea + id: logs + attributes: + label: Relevant log output + description: | + Please copy and paste any relevant log output. + This will be automatically formatted into code, so no need for backticks. + We strongly recommend to upload the log file to further debugging. + render: shell diff --git a/.github/ISSUE_TEMPLATE/new-integration-request.yaml b/.github/ISSUE_TEMPLATE/new-integration-request.yaml new file mode 100644 index 0000000000000..56bccc13636b9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new-integration-request.yaml @@ -0,0 +1,37 @@ +name: New Connector Request +about: Use this to request a new connector +title: "" +labels: [area/connectors, new-connector] +body: + - type: input + id: connector-name + attributes: + label: Connector Name + description: What is the service or database you want to integrate + validations: + required: true + - type: dropdown + id: type + attributes: + label: What type of integration + multiple: false + options: + - Source + - Destination + - type: textarea + id: description + attributes: + label: Revelant Information + description: >- + Why do you need this integration? How does your team intend to use the data? This helps us understand the use case. + How often do you want to run syncs? + If this is an API source connector, which entities/endpoints do you need supported? + If the connector is for a paid service, can we name you as a mutual user when we subscribe for an account? Which company should we name? + - type: checkboxes + id: submit-pr + attributes: + label: Contribute + description: Are you willing to submit the fix? + options: + - label: Yes + required: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 85f751847748d..14bbbdf7b43dc 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,109 +1,28 @@ +## What -## What -*Describe what the change is solving* -*It helps to add screenshots if it affects the frontend.* - ## How -*Describe the solution* - -## Recommended reading order -1. `x.java` -2. `y.python` - -## 🚨 User Impact 🚨 -*Are there any breaking changes? What is the end result perceived by the user?* - -*For connector PRs, use this section to explain which type of semantic versioning bump occurs as a result of the changes. Refer to our [Semantic Versioning for Connectors](https://docs.airbyte.com/contributing-to-airbyte/#semantic-versioning-for-connectors) guidelines for more information. **Breaking changes to connectors must be documented by an Airbyte engineer (PR author, or reviewer for community PRs) by using the [Breaking Change Release Playbook](https://docs.google.com/document/d/1VYQggHbL_PN0dDDu7rCyzBLGRtX-R3cpwXaY8QxEgzw/edit).*** - -*If there are breaking changes, please merge this PR with the 🚨🚨 emoji so changelog authors can further highlight this if needed.* - - -## Pre-merge Actions -*Expand the relevant checklist and delete the others.* - -
New Connector - -### Community member or Airbyter - -- **Community member?** Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) -- Unit & integration tests added and passing. Community members, please provide proof of success locally e.g: screenshot or copy-paste unit, integration, and acceptance test output. To run acceptance tests for a Python connector, follow instructions in the README. For java connectors run `./gradlew :airbyte-integrations:connectors::integrationTest`. -- Connector version is set to `0.0.1` - - `Dockerfile` has version `0.0.1` -- Documentation updated - - Connector's `README.md` - - Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing) - - `docs/integrations//.md` including changelog with an entry for the initial version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - - `docs/integrations/README.md` - -### Airbyter - -If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - -- Create a non-forked branch based on this PR and test the below items on it -- Build is successful -- If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci). - -
- -
Updating a connector - -### Community member or Airbyter - -- Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) -- Unit & integration tests added - - -### Airbyter - -If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - -- Create a non-forked branch based on this PR and test the below items on it -- Build is successful -- If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci). - -
- -
Connector Generator - -- Issue acceptance criteria met -- PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook) -- If adding a new generator, add it to the [list of scaffold modules being tested](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connector-templates/generator/build.gradle#L41) -- The generator test modules (all connectors with `-scaffold` in their name) have been updated with the latest scaffold by running `./gradlew :airbyte-integrations:connector-templates:generator:generateScaffolds` then checking in your changes -- Documentation which references the generator is updated as needed - -
- -
Updating the Python CDK + -### Airbyter +## Review guide + -Before merging: -- Pull Request description explains what problem it is solving -- Code change is unit tested -- Build and my-py check pass -- Smoke test the change on at least one affected connector - - On Github: Run [this workflow](https://github.com/airbytehq/airbyte/actions/workflows/connectors_tests.yml), passing `--use-local-cdk --name=source-` as options - - Locally: `airbyte-ci connectors --use-local-cdk --name=source- test` -- PR is reviewed and approved - -After merging: -- [Publish the CDK](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) - - The CDK does not follow proper semantic versioning. Choose minor if this the change has significant user impact or is a breaking change. Choose patch otherwise. - - Write a thoughtful changelog message so we know what was updated. -- Merge the platform PR that was auto-created for updating the Connector Builder's CDK version - - This step is optional if the change does not affect the connector builder or declarative connectors. +## User Impact + -
+## Can this PR be safely reverted and rolled back? + +- [ ] YES 💚 +- [ ] NO ❌ diff --git a/.github/secret_scanning.yml b/.github/secret_scanning.yml new file mode 100644 index 0000000000000..09662852598c9 --- /dev/null +++ b/.github/secret_scanning.yml @@ -0,0 +1,4 @@ +# GitHub Secret Scanning config +paths-ignore: + # Ignore sample configurations in airbyte-integrations. + - "airbyte-integrations/connectors/**" diff --git a/.github/workflows/airbyte-ci-tests.yml b/.github/workflows/airbyte-ci-tests.yml index a3b2461b07fe4..d1a72c6fe44a2 100644 --- a/.github/workflows/airbyte-ci-tests.yml +++ b/.github/workflows/airbyte-ci-tests.yml @@ -30,15 +30,14 @@ jobs: with: # Note: expressions within a filter are OR'ed filters: | + # This list is duplicated in `pipelines/airbyte_ci/test/__init__.py` internal_poetry_packages: - - airbyte-lib/** - airbyte-ci/connectors/pipelines/** - airbyte-ci/connectors/base_images/** - airbyte-ci/connectors/common_utils/** - airbyte-ci/connectors/connector_ops/** - airbyte-ci/connectors/connectors_qa/** - airbyte-ci/connectors/ci_credentials/** - - airbyte-ci/connectors/live-tests/** - airbyte-ci/connectors/metadata_service/lib/** - airbyte-ci/connectors/metadata_service/orchestrator/** - airbyte-integrations/bases/connector-acceptance-test/** diff --git a/.github/workflows/connector_code_freeze.yml b/.github/workflows/connector_code_freeze.yml index a5ac4c8a36616..03d3bce37a8f5 100644 --- a/.github/workflows/connector_code_freeze.yml +++ b/.github/workflows/connector_code_freeze.yml @@ -46,7 +46,7 @@ jobs: - name: Get changed files if: steps.check-code-freeze-in-effect.outputs.is_in_code_freeze == 'true' id: changed-files - uses: tj-actions/changed-files@v40 + uses: tj-actions/changed-files@v44 with: files_yaml: | connectors: diff --git a/.github/workflows/connectors_tests.yml b/.github/workflows/connectors_tests.yml index 5c10ebdbef8da..80335348e6bc2 100644 --- a/.github/workflows/connectors_tests.yml +++ b/.github/workflows/connectors_tests.yml @@ -41,7 +41,7 @@ jobs: - '*' - 'airbyte-ci/**/*' - 'airbyte-integrations/connectors/**/*' - - 'airbyte-cdk/**/*' + - 'airbyte-cdk/java/**/*' - 'buildSrc/**/*' # The Connector CI Tests is a status check emitted by airbyte-ci # We make it pass once we have determined that there are no changes to the connectors diff --git a/.github/workflows/contractors_review_requirements.yml b/.github/workflows/contractors_review_requirements.yml new file mode 100644 index 0000000000000..b90fe2ec61eb5 --- /dev/null +++ b/.github/workflows/contractors_review_requirements.yml @@ -0,0 +1,36 @@ +name: Connector Ops CI - Check contractors review requirements + +on: + pull_request: + types: + - opened + - pull_request_review + - ready_for_review + - reopened + - synchronize +jobs: + check-review-requirements: + name: "Check if a review is required from Connector teams" + runs-on: ubuntu-latest + + if: ${{ github.repository == 'airbytehq/airbyte' }} + steps: + - name: Check contributor team membership + uses: tspascoal/get-user-teams-membership@v3 + id: actorTeams + with: + username: ${{ github.actor }} + GITHUB_TOKEN: ${{ secrets.OCTAVIA_4_ROOT_ACCESS }} + - if: ${{ (contains(steps.actorTeams.outputs.teams, 'community-contractor')) }} + name: Check if the review requirements are met + uses: Automattic/action-required-review@v3 + with: + status: Required review + token: ${{ secrets.OCTAVIA_4_ROOT_ACCESS }} + request-reviews: true + fail: true + requirements: | + - paths: unmatched + teams: + - connector-extensibility + - gl-python diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 45cfc134b28a2..dd2c7cf52d6f9 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -37,17 +37,18 @@ jobs: java: - '**/*.java' - '**/*.gradle' + - '**/*.kt' - 'airbyte-cdk/java/**/*' run-check: needs: - changes if: needs.changes.outputs.java == 'true' - # The gradle check task which we will run is embarrassingly parallelizable. - # We therefore run this on a machine with a maximum number of cores. - # We pay per time and per core, so there should be little difference in total cost. - # The latency overhead of setting up gradle prior to running the actual task adds up to about a minute. - runs-on: connector-test-xxlarge + # As of now, 16 cores seems to be enough. + # Any revision upwards should be based on a performance analysis of gradle scans. + # See https://github.com/airbytehq/airbyte/pull/36055 for an example of this, + # which explains why which we went down from 64 cores to 16. + runs-on: connector-test-large name: Gradle Check timeout-minutes: 30 steps: @@ -57,10 +58,6 @@ jobs: with: distribution: "zulu" java-version: "21" - - name: Install Pip - run: curl -fsSL https://bootstrap.pypa.io/get-pip.py | python3 - - name: Install Pyenv - run: python3 -m pip install virtualenv --user - name: Docker login # Some tests use testcontainers which pull images from DockerHub. uses: docker/login-action@v1 diff --git a/.github/workflows/legacy-publish-command.yml b/.github/workflows/legacy-publish-command.yml deleted file mode 100644 index e20bd25fd77f3..0000000000000 --- a/.github/workflows/legacy-publish-command.yml +++ /dev/null @@ -1,448 +0,0 @@ -name: "[Legacy] Publish Connector Image" -on: - workflow_dispatch: - inputs: - repo: - description: "Repo to check out code from. Defaults to the main airbyte repo. Set this when building connectors from forked repos." - required: false - default: "airbytehq/airbyte" - gitref: - description: "The git ref to check out from the specified repository." - required: false - default: master - connector: - description: "Airbyte Connector" - required: true - comment-id: - description: "The comment-id of the slash command. Used to update the comment with the status." - required: false - parallel: - description: "Switching this to true will spin up 5 build agents instead of 1 and allow multi connector publishes to run in parallel" - required: true - default: "false" - run-tests: - description: "Should run tests when publishing" - required: true - default: "true" - pre-release: - description: "Should publish a pre-release version" - required: true - default: "false" - -jobs: - ## Gradle Build - # In case of self-hosted EC2 errors, remove this block. - start-publish-image-runner-0: - name: Start Build EC2 Runner 0 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - start-publish-image-runner-1: - if: github.event.inputs.parallel == 'true' && success() - name: Start Build EC2 Runner 1 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - start-publish-image-runner-2: - if: github.event.inputs.parallel == 'true' && success() - name: Start Build EC2 Runner 2 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - start-publish-image-runner-3: - if: github.event.inputs.parallel == 'true' && success() - name: Start Build EC2 Runner 3 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - start-publish-image-runner-4: - if: github.event.inputs.parallel == 'true' && success() - name: Start Build EC2 Runner 4 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - preprocess-matrix: - needs: start-publish-image-runner-0 - runs-on: ${{ needs.start-publish-image-runner-0.outputs.label }} - outputs: - connectorjson: ${{ steps.preprocess.outputs.connectorjson }} - steps: - # given a string input of a single connector or comma separated list of connectors e.g. connector1, connector2 - # this step builds an array, by removing whitespace, add in quotation marks around connectors and braces [ ] at the start and end - # finally, it sets it as output from this job so we can use this array of connectors as our matrix strategy for publishing - - id: preprocess - run: | - start="[\"" - replace="\",\"" - end="\"]" - stripped_connector="$(echo "${{ github.event.inputs.connector }}" | tr -d ' ')" - middle=${stripped_connector//,/$replace} - full="$start$middle$end" - echo "connectorjson=$full" >> $GITHUB_OUTPUT - write-initial-output-to-comment: - name: Set up git comment - if: github.event.inputs.comment-id - needs: start-publish-image-runner-0 - runs-on: ${{ needs.start-publish-image-runner-0.outputs.label }} - steps: - - name: Print start message - if: github.event.inputs.comment-id && success() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :clock2: Publishing the following connectors:
${{ github.event.inputs.connector }}
https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - - name: Create table header - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | -
- - | Connector | Version | Did it publish? | - - name: Create table separator - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - | --- | --- | --- | - publish-image: - timeout-minutes: 240 - needs: - - start-publish-image-runner-0 - - preprocess-matrix - - write-initial-output-to-comment - strategy: - max-parallel: 5 - fail-fast: false - matrix: - connector: ${{ fromJSON(needs.preprocess-matrix.outputs.connectorjson) }} - runs-on: runner-pool-${{ github.run_id }} - steps: - - name: Set up Cloud SDK - uses: google-github-actions/setup-gcloud@v0 - with: - service_account_key: ${{ secrets.SPEC_CACHE_SERVICE_ACCOUNT_KEY }} - export_default_credentials: true - - name: Search for valid connector name format - id: regex - uses: AsasInnab/regex-action@v1 - with: - regex_pattern: "^(connectors|bases)/[a-zA-Z0-9-_]+$" - regex_flags: "i" # required to be set for this plugin - search_string: ${{ matrix.connector }} - - name: Validate input workflow format - if: steps.regex.outputs.first_match != matrix.connector - run: echo "The connector provided has an invalid format!" && exit 1 - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }} - - name: Install Java - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" - - name: Install Python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Install Pyenv - run: | - python3 -m pip install --quiet virtualenv --user - rm -r venv || echo "no pre-existing venv" - python3 -m virtualenv venv - source venv/bin/activate - - name: Install CI scripts - run: | - source venv/bin/activate - pip install --quiet -e ./airbyte-ci/connectors/ci_credentials - pip install --quiet -e ./airbyte-ci/connectors/connector_ops - - name: Write Integration Test Credentials for ${{ matrix.connector }} - run: | - source venv/bin/activate - ci_credentials ${{ matrix.connector }} write-to-storage - # normalization also runs destination-specific tests, so fetch their creds also - if [ 'bases/base-normalization' = "${{ matrix.connector }}" ] || [ 'base-normalization' = "${{ matrix.connector }}" ]; then - ci_credentials destination-bigquery write-to-storage - ci_credentials destination-postgres write-to-storage - ci_credentials destination-snowflake write-to-storage - fi - env: - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - - name: Set Name and Version Environment Vars - if: startsWith(matrix.connector, 'connectors') - run: | - source tools/lib/lib.sh - DOCKERFILE=airbyte-integrations/${{ matrix.connector }}/Dockerfile - echo "IMAGE_NAME=$(echo ${{ matrix.connector }} | cut -d"/" -f2)" >> $GITHUB_ENV - echo "IMAGE_VERSION=$(_get_docker_image_version ${DOCKERFILE} ${{ github.event.inputs.pre-release }})" >> $GITHUB_ENV - - name: Prepare Sentry - if: startsWith(matrix.connector, 'connectors') - uses: Wandalen/wretry.action@v1.0.42 - with: - attempt_limit: 3 - attempt_delay: 5000 # in ms - command: curl -sL https://sentry.io/get-cli/ | bash || echo "sentry cli already installed" - - name: Run QA checks for ${{ matrix.connector }} - id: qa_checks - if: always() - run: | - source venv/bin/activate - run-qa-checks ${{ matrix.connector }} - - name: Publish ${{ matrix.connector }} - id: publish - env: - DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} - DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} - # Oracle expects this variable to be set. Although usually present, this is not set by default on Github virtual runners. - TZ: UTC - uses: Wandalen/wretry.action@v1.0.42 - with: - command: | - echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u ${DOCKER_HUB_USERNAME} -p ${DOCKER_HUB_PASSWORD} - ./tools/integrations/manage.sh publish airbyte-integrations/${{ matrix.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache ${{ github.event.inputs.pre-release == 'true' && '--pre_release' || '' }} - attempt_limit: 3 - attempt_delay: 5000 in # ms - - name: Update Integration Test Credentials after test run for ${{ github.event.inputs.connector }} - if: always() - run: | - source venv/bin/activate - ci_credentials ${{ matrix.connector }} update-secrets - # normalization also runs destination-specific tests, so fetch their creds also - if [ 'bases/base-normalization' = "${{ matrix.connector }}" ] || [ 'base-normalization' = "${{ matrix.connector }}" ]; then - ci_credentials destination-bigquery update-secrets - ci_credentials destination-postgres update-secrets - ci_credentials destination-snowflake update-secrets - fi - env: - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - - name: Create Sentry Release - if: startsWith(matrix.connector, 'connectors') && success() - run: | - SENTRY_RELEASE_NAME="airbyte-${{ env.IMAGE_NAME }}@${{ env.IMAGE_VERSION }}" - sentry-cli releases set-commits "$SENTRY_RELEASE_NAME" --auto --ignore-missing && - sentry-cli releases finalize "$SENTRY_RELEASE_NAME" - env: - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_CONNECTOR_RELEASE_AUTH_TOKEN }} - SENTRY_ORG: airbytehq - SENTRY_PROJECT: connector-incident-management - - name: Process outcomes into emojis - if: ${{ always() && github.event.inputs.comment-id }} - run: | - if [[ ${{ steps.publish.outcome }} = "success" ]]; then - echo "PUBLISH_OUTCOME=:white_check_mark:" >> $GITHUB_ENV - else - echo "PUBLISH_OUTCOME=:x:" >> $GITHUB_ENV - fi - - name: Add connector outcome line to table - if: ${{ always() && github.event.inputs.comment-id }} - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - | ${{ matrix.connector }} | ${{ env.IMAGE_VERSION }} | ${{ env.PUBLISH_OUTCOME }} | - add-helpful-info-to-git-comment: - if: ${{ always() && github.event.inputs.comment-id }} - name: Add extra info to git comment - needs: - - start-publish-image-runner-0 # required to get output from the start-runner job - - publish-image # required to wait when the main job is done - runs-on: ubuntu-latest - steps: - - name: Add hint for manual seed definition update - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | -
- - if you have connectors that successfully published but failed definition generation, follow [step 4 here ▶️](https://docs.airbyte.com/connector-development/#publishing-a-connector) - # In case of self-hosted EC2 errors, remove this block. - stop-publish-image-runner-0: - if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs - name: Stop Build EC2 Runner - needs: - - start-publish-image-runner-0 # required to get output from the start-runner job - - preprocess-matrix - - publish-image # required to wait when the main job is done - - add-helpful-info-to-git-comment - runs-on: ubuntu-latest - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Stop EC2 runner - uses: airbytehq/ec2-github-runner@base64v1.1.0 - with: - mode: stop - github-token: ${{ env.PAT }} - label: ${{ needs.start-publish-image-runner-0.outputs.label }} - ec2-instance-id: ${{ needs.start-publish-image-runner-0.outputs.ec2-instance-id }} - stop-publish-image-runner-multi: - if: ${{ always() && github.event.inputs.parallel == 'true' }} - name: Stop Build EC2 Runner - needs: - - start-publish-image-runner-0 - - start-publish-image-runner-1 - - start-publish-image-runner-2 - - start-publish-image-runner-3 - - start-publish-image-runner-4 - - preprocess-matrix - - publish-image # required to wait when the main job is done - strategy: - fail-fast: false - matrix: - ec2-instance: - [ - { - "label": "${{ needs.start-publish-image-runner-1.outputs.label }}", - "id": "${{ needs.start-publish-image-runner-1.outputs.ec2-instance-id }}", - }, - { - "label": "${{ needs.start-publish-image-runner-2.outputs.label }}", - "id": "${{ needs.start-publish-image-runner-2.outputs.ec2-instance-id }}", - }, - { - "label": "${{ needs.start-publish-image-runner-3.outputs.label }}", - "id": "${{ needs.start-publish-image-runner-3.outputs.ec2-instance-id }}", - }, - { - "label": "${{ needs.start-publish-image-runner-4.outputs.label }}", - "id": "${{ needs.start-publish-image-runner-4.outputs.ec2-instance-id }}", - }, - ] - runs-on: ubuntu-latest - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Stop EC2 runner - uses: airbytehq/ec2-github-runner@base64v1.1.0 - with: - mode: stop - github-token: ${{ env.PAT }} - label: ${{ matrix.ec2-instance.label }} - ec2-instance-id: ${{ matrix.ec2-instance.id }} diff --git a/.github/workflows/legacy-test-command.yml b/.github/workflows/legacy-test-command.yml deleted file mode 100644 index 3d530b81e1434..0000000000000 --- a/.github/workflows/legacy-test-command.yml +++ /dev/null @@ -1,223 +0,0 @@ -name: Run Integration Test -on: - workflow_dispatch: - inputs: - connector: - description: "Airbyte Connector" - required: true - repo: - description: "Repo to check out code from. Defaults to the main airbyte repo. Set this when building connectors from forked repos." - required: false - default: "airbytehq/airbyte" - gitref: - description: "The git ref to check out from the specified repository." - required: false - default: master - comment-id: - description: "The comment-id of the slash command. Used to update the comment with the status." - required: false - uuid: - description: "Custom UUID of workflow run. Used because GitHub dispatches endpoint does not return workflow run id." - required: false - connector-acceptance-test-version: - description: "Set a specific connector acceptance test version to use. Enter 'dev' to test, build and use a local version of Connector Acceptance Test." - required: false - default: "latest" -jobs: - uuid: - name: "Custom UUID of workflow run" - timeout-minutes: 10 - runs-on: ubuntu-latest - steps: - - name: UUID ${{ github.event.inputs.uuid }} - run: true - start-test-runner: - name: Start Build EC2 Runner - needs: uuid - timeout-minutes: 10 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - pipeline-start-timestamp: ${{ steps.get-start-timestamp.outputs.start-timestamp }} - steps: - - name: Get start timestamp - id: get-start-timestamp - run: echo "::set-output name=start-timestamp::$(date +%s)" - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - integration-test: - timeout-minutes: 240 - needs: start-test-runner - runs-on: ${{ needs.start-test-runner.outputs.label }} - steps: - - name: Link comment to workflow run - if: github.event.inputs.comment-id - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :clock2: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - - name: Search for valid connector name format - id: regex - uses: AsasInnab/regex-action@v1 - with: - regex_pattern: "^((connectors|bases)/)?[a-zA-Z0-9-_]+$" - regex_flags: "i" # required to be set for this plugin - search_string: ${{ github.event.inputs.connector }} - - name: Validate input workflow format - if: steps.regex.outputs.first_match != github.event.inputs.connector - run: echo "The connector provided has an invalid format!" && exit 1 - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.gitref }} - - name: Install Java - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" - - name: Install Python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Install CI scripts - # all CI python packages have the prefix "ci_" - run: | - pip install --quiet -e ./airbyte-ci/connectors/ci_credentials - pip install --quiet -e ./airbyte-ci/connectors/connector_ops - - name: Write Integration Test Credentials for ${{ github.event.inputs.connector }} - run: | - ci_credentials ${{ github.event.inputs.connector }} write-to-storage - # normalization also runs destination-specific tests, so fetch their creds also - if [ 'bases/base-normalization' = "${{ github.event.inputs.connector }}" ] || [ 'base-normalization' = "${{ github.event.inputs.connector }}" ]; then - ci_credentials destination-bigquery write-to-storage - ci_credentials destination-postgres write-to-storage - ci_credentials destination-snowflake write-to-storage - fi - env: - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - - name: Test ${{ github.event.inputs.connector }} - id: test - env: - ACTION_RUN_ID: ${{github.run_id}} - # Oracle expects this variable to be set. Although usually present, this is not set by default on Github virtual runners. - TZ: UTC - ORG_GRADLE_PROJECT_connectorAcceptanceTestVersion: ${{github.event.inputs.connector-acceptance-test-version}} - S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - S3_BUILD_CACHE_SECRET_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - uses: Wandalen/wretry.action@v1.0.42 - with: - command: ./tools/bin/ci_integration_test.sh ${{ github.event.inputs.connector }} - attempt_limit: 3 - attempt_delay: 10000 # in ms - - name: Update Integration Test Credentials after test run for ${{ github.event.inputs.connector }} - if: always() - run: | - ci_credentials ${{ github.event.inputs.connector }} update-secrets - # normalization also runs destination-specific tests, so fetch their creds also - if [ 'bases/base-normalization' = "${{ github.event.inputs.connector }}" ] || [ 'base-normalization' = "${{ github.event.inputs.connector }}" ]; then - ci_credentials destination-bigquery update-secrets - ci_credentials destination-postgres update-secrets - ci_credentials destination-snowflake update-secrets - fi - env: - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - - name: Archive test reports artifacts - if: github.event.inputs.comment-id && failure() - uses: actions/upload-artifact@v3 - with: - name: test-reports - path: | - **/${{ github.event.inputs.connector }}/build/reports/tests/**/** - **/${{ github.event.inputs.connector }}/acceptance_tests_logs/** - **/normalization_test_output/**/dbt_output.log - **/normalization_test_output/**/destination_output.log - **/normalization_test_output/**/build/compiled/airbyte_utils/** - **/normalization_test_output/**/build/run/airbyte_utils/** - **/normalization_test_output/**/models/generated/** - - name: Test coverage reports artifacts - if: github.event.inputs.comment-id && success() - uses: actions/upload-artifact@v3 - with: - name: test-reports - path: | - **/${{ github.event.inputs.connector }}/htmlcov/** - retention-days: 3 - - name: Run QA checks for ${{ github.event.inputs.connector }} - id: qa_checks - if: always() - run: | - run-qa-checks ${{ github.event.inputs.connector }} - - name: Report Observability - if: always() - run: ./tools/status/report_observability.sh ${{ github.event.inputs.connector }} ${{github.run_id}} ${{ needs.start-test-runner.outputs.pipeline-start-timestamp }} ${{ github.event.inputs.gitref }} ${{ github.sha }} ${{steps.test.outcome}} ${{steps.qa_checks.outcome}} - env: - AWS_ACCESS_KEY_ID: ${{ secrets.STATUS_API_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.STATUS_API_AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: "us-east-2" - - name: Add Success Comment - if: github.event.inputs.comment-id && success() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :white_check_mark: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - ${{env.PYTHON_UNITTEST_COVERAGE_REPORT}} - > ${{env.TEST_SUMMARY_INFO}} - - name: Add Failure Comment - if: github.event.inputs.comment-id && failure() - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :x: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - > :bug: ${{env.GRADLE_SCAN_LINK}} - > ${{env.TEST_SUMMARY_INFO}} - # In case of self-hosted EC2 errors, remove this block. - stop-test-runner: - name: Stop Build EC2 Runner - timeout-minutes: 10 - needs: - - start-test-runner # required to get output from the start-runner job - - integration-test # required to wait when the main job is done - - uuid - runs-on: ubuntu-latest - if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Stop EC2 runner - uses: supertopher/ec2-github-runner@base64v1.0.10 - with: - mode: stop - github-token: ${{ env.PAT }} - label: ${{ needs.start-test-runner.outputs.label }} - ec2-instance-id: ${{ needs.start-test-runner.outputs.ec2-instance-id }} diff --git a/.github/workflows/publish-airbyte-lib-command-manually.yml b/.github/workflows/publish-airbyte-lib-command-manually.yml deleted file mode 100644 index e596444414d31..0000000000000 --- a/.github/workflows/publish-airbyte-lib-command-manually.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: Publish AirbyteLib Manually -on: workflow_dispatch - -concurrency: - group: publish-airbyte-lib - cancel-in-progress: false - -jobs: - get_ci_runner: - runs-on: ubuntu-latest - name: Get CI runner - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - ref: ${{ github.head_ref }} - token: ${{ secrets.GH_PAT_APPROVINGTON_OCTAVIA }} - fetch-depth: 1 - - name: Get CI runner - id: get_ci_runner - uses: ./.github/actions/airbyte-ci-requirements - with: - runner_type: "publish" - runner_size: "large" - # Getting ci requirements for connectors publish command as there is no special one for poetry publish - airbyte_ci_command: "connectors publish" - github_token: ${{ secrets.GH_PAT_APPROVINGTON_OCTAVIA }} - sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} - outputs: - runner_name: ${{ steps.get_ci_runner.outputs.runner_name }} - publish_connectors: - name: Publish airbyte-lib - needs: get_ci_runner - runs-on: ${{ needs.get_ci_runner.outputs.runner_name }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Publish - id: publish-airbyte-lib - uses: ./.github/actions/run-airbyte-ci - with: - context: "manual" - dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN }} - docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }} - docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }} - gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }} - gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }} - github_token: ${{ secrets.GITHUB_TOKEN }} - metadata_service_gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }} - sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} - slack_webhook_url: ${{ secrets.PUBLISH_ON_MERGE_SLACK_WEBHOOK }} - spec_cache_gcs_credentials: ${{ secrets.SPEC_CACHE_SERVICE_ACCOUNT_KEY_PUBLISH }} - s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - tailscale_auth_key: ${{ secrets.TAILSCALE_AUTH_KEY }} - subcommand: "poetry --package-path=airbyte-lib publish" - python_registry_token: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/publish-cdk-command-manually.yml b/.github/workflows/publish-cdk-command-manually.yml index a6b9ae7897729..4c8525e3e73b7 100644 --- a/.github/workflows/publish-cdk-command-manually.yml +++ b/.github/workflows/publish-cdk-command-manually.yml @@ -28,52 +28,28 @@ concurrency: cancel-in-progress: false jobs: - # We are using these runners because they are the same as the one for `publish-command.yml` - # One problem we had using `ubuntu-latest` for example is that the user is not root and some commands would fail in - # `manage.sh` (specifically `apt-get`) - start-publish-docker-image-runner-0: - name: Start Build EC2 Runner 0 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - with: - repository: airbytehq/airbyte - ref: master - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - label: ${{ github.run_id }}-publisher - build-cdk: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - name: Install Python + uses: actions/setup-python@v4 with: python-version: "3.10" - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" + - name: Install Poetry + id: install_poetry + uses: snok/install-poetry@v1 - name: Checkout Airbyte uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} + - name: Install Dependencies + id: install_dependencies + working-directory: airbyte-cdk/python + run: poetry install - name: Build CDK Package - run: (cd airbyte-cdk/python; ./gradlew --no-daemon --no-build-cache :build) + working-directory: airbyte-cdk/python + run: poetry run poe build - name: Post failure to Slack channel dev-connectors-extensibility if: ${{ failure() }} uses: slackapi/slack-github-action@v1.23.0 @@ -113,26 +89,29 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.10" + - name: Install Poetry + id: install_poetry + uses: snok/install-poetry@v1 - name: Checkout Airbyte uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }} - - name: "Publish Python CDK: bump version" + token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }} # This token is what allows us to commit directly to master + - name: "Publish Python CDK: bump Poetry package version" id: bumpversion run: | - pip install bumpversion cd airbyte-cdk/python - bumpversion ${{ github.event.inputs.release-type }} - new_version="$(grep -i 'current_version = ' .bumpversion.cfg | sed -e 's/.* = //')" + # Bump package version + poetry version ${{ github.event.inputs.release-type }} + new_version="$(poetry version -s)" awk -v NEW_VERSION="$new_version" -v CHANGELOG_MESSAGE="${{ github.event.inputs.changelog-message }}" 'NR==3{print "## " NEW_VERSION "\n" CHANGELOG_MESSAGE "\n"}1' CHANGELOG.md > tmp && mv tmp CHANGELOG.md echo NEW_VERSION=$new_version >> $GITHUB_OUTPUT - name: Commit and Push Changes uses: stefanzweifel/git-auto-commit-action@v4 with: - file_pattern: airbyte-cdk/python/setup.py airbyte-cdk/python/.bumpversion.cfg airbyte-cdk/python/CHANGELOG.md airbyte-cdk/python/Dockerfile - commit_message: 🤖 Bump ${{ github.event.inputs.release-type }} version of Python CDK + file_pattern: airbyte-cdk/python/pyproject.toml airbyte-cdk/python/CHANGELOG.md + commit_message: 🤖 ${{ github.event.inputs.release-type }} bump Python CDK to version ${{ steps.bumpversion.outputs.NEW_VERSION }} commit_user_name: Octavia Squidington III commit_user_email: octavia-squidington-iii@users.noreply.github.com - name: Post failure to Slack channel dev-connectors-extensibility @@ -165,6 +144,7 @@ jobs: SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }} publish-cdk: + name: Publish Python CDK to PyPi needs: bump-version runs-on: ubuntu-latest steps: @@ -173,16 +153,12 @@ jobs: with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - - name: Publish Python Package - uses: mariamrf/py-package-publish-action@v1.1.0 + - name: Build and publish to pypi + uses: JRubics/poetry-publish@v2.0 with: - # specify the same version as in ~/.python-version + pypi_token: ${{ secrets.PYPI_TOKEN }} python_version: "3.10" - pip_version: "23.2" - subdir: "airbyte-cdk/python/" - env: - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} + package_directory: "airbyte-cdk/python" - name: Post failure to Slack channel dev-connectors-extensibility if: ${{ failure() }} uses: slackapi/slack-github-action@v1.23.0 @@ -212,39 +188,64 @@ jobs: env: SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }} - publish-docker-image: - timeout-minutes: 240 + bump-manifest-source: + name: Bump CDK dependency of source-declarative-manifest needs: - - start-publish-docker-image-runner-0 + - bump-version - publish-cdk - runs-on: runner-pool-${{ github.run_id }} + runs-on: ubuntu-latest steps: + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install Poetry + id: install_poetry + uses: snok/install-poetry@v1 - name: Checkout Airbyte uses: actions/checkout@v3 with: - repository: airbytehq/airbyte + repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - - name: Install Java - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" - - name: Install Python - uses: actions/setup-python@v4 + token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }} # This token is what allows us to commit directly to master + - name: Bump CDK dependency of source-declarative-manifest + timeout-minutes: 10 + run: | + cd airbyte-integrations/connectors/source-declarative-manifest + echo "Attempting to pull the newly published version of airbyte-cdk." + while true; do + # --no-cache to force looking for the new version + poetry add airbyte-cdk==${{needs.bump-version.outputs.new_cdk_version}} --no-cache && break + # Loop to wait for the new version to be available to poetry + echo "Couldn't add new dependency. This is normal if the dependency could not (yet) be found. Retrying in 10 seconds..." + sleep 10 + done + echo "Successfully updated the CDK dependency of source-declarative-manifest to ${{needs.bump-version.outputs.new_cdk_version}}" + - name: Bump version of source-declarative-manifest + uses: ./.github/actions/run-airbyte-ci with: - python-version: "3.10" - - name: Publish source-declarative-manifest - id: publish - env: - DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} - DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} - # Oracle expects this variable to be set. Although usually present, this is not set by default on Github virtual runners. - TZ: UTC - uses: Wandalen/wretry.action@v1.0.42 + context: "master" # TODO: figure out why changing this yells with `The ci_gcs_credentials was not set on this PipelineContext.` + dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_2 }} + docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }} + docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }} + gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }} + gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }} + github_token: ${{ secrets.GITHUB_TOKEN }} + metadata_service_gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }} + sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} + slack_webhook_url: ${{ secrets.PUBLISH_ON_MERGE_SLACK_WEBHOOK }} + spec_cache_gcs_credentials: ${{ secrets.SPEC_CACHE_SERVICE_ACCOUNT_KEY_PUBLISH }} + s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + # There is no pull request number as we do this manually, so will just reference when we started doing it manually for now + subcommand: "connectors --concurrency=1 --execute-timeout=3600 --name=source-declarative-manifest bump_version ${{ github.event.inputs.release-type }} '36501' 'Bump CDK version to ${{needs.bump-version.outputs.new_cdk_version}}'" + python_registry_token: ${{ secrets.PYPI_TOKEN }} + - name: Commit and Push Changes + uses: stefanzweifel/git-auto-commit-action@v4 with: - command: | - docker login -u ${DOCKER_HUB_USERNAME} -p ${DOCKER_HUB_PASSWORD} - ./tools/integrations/manage.sh publish airbyte-cdk/python false + file_pattern: docs/integrations/sources/low-code.md airbyte-integrations/connectors/source-declarative-manifest/* + commit_message: 🤖 Cut version ${{needs.bump-version.outputs.new_cdk_version}} of source-declarative-manifest + commit_user_name: Octavia Squidington III + commit_user_email: octavia-squidington-iii@users.noreply.github.com - name: Post failure to Slack channel dev-connectors-extensibility if: ${{ failure() }} uses: slackapi/slack-github-action@v1.23.0 @@ -253,13 +254,20 @@ jobs: channel-id: C04J1M66D8B payload: | { - "text": "Error during `publish-docker-image` while publishing Python CDK!", + "text": ":warning: A new version of Python CDK has been released but `source-declarative-manifest` and Connector Builder haven't been automatically updated", "blocks": [ { "type": "section", "text": { "type": "mrkdwn", - "text": "Error while publishing Docker image following Python CDK release!" + "text": "A new version of Python CDK has been released with : ${{ github.event.inputs.changelog-message }}\n\n" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": ":warning: Could not bump version of `source-declarative-manifest`.>\n" } }, { @@ -277,7 +285,7 @@ jobs: update-connector-builder: needs: - bump-version - - publish-docker-image + - bump-manifest-source runs-on: ubuntu-latest steps: - uses: actions/setup-python@v4 @@ -292,7 +300,7 @@ jobs: run: | PREVIOUS_VERSION=$(cat oss/airbyte-connector-builder-resources/CDK_VERSION) sed -i "s/${PREVIOUS_VERSION}/${{needs.bump-version.outputs.new_cdk_version}}/g" oss/airbyte-connector-builder-server/Dockerfile - sed -i "s/${PREVIOUS_VERSION}/${{needs.bump-version.outputs.new_cdk_version}}/g" airbyte-connector-builder-server-wrapped/Dockerfile + sed -i "s/${PREVIOUS_VERSION}/${{needs.bump-version.outputs.new_cdk_version}}/g" cloud/airbyte-connector-builder-server-wrapped/Dockerfile sed -i "s/airbyte-cdk==${PREVIOUS_VERSION}/airbyte-cdk==${{needs.bump-version.outputs.new_cdk_version}}/g" oss/airbyte-connector-builder-server/requirements.in echo ${{needs.bump-version.outputs.new_cdk_version}} > oss/airbyte-connector-builder-resources/CDK_VERSION cd oss/airbyte-connector-builder-server @@ -378,33 +386,3 @@ jobs: } env: SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }} - - # In case of self-hosted EC2 errors, remove this block. - stop-publish-docker-image-runner-0: - if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs - name: Stop Build EC2 Runner - needs: - - start-publish-docker-image-runner-0 # required to get output from the start-runner job - - publish-docker-image # required to wait when the main job is done - runs-on: ubuntu-latest - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-2 - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Stop EC2 runner - uses: airbytehq/ec2-github-runner@base64v1.1.0 - with: - mode: stop - github-token: ${{ env.PAT }} - label: ${{ needs.start-publish-docker-image-runner-0.outputs.label }} - ec2-instance-id: ${{ needs.start-publish-docker-image-runner-0.outputs.ec2-instance-id }} diff --git a/.github/workflows/publish-java-cdk-command.yml b/.github/workflows/publish-java-cdk-command.yml index b43a7224aa7cd..fbb4287e4fb7f 100644 --- a/.github/workflows/publish-java-cdk-command.yml +++ b/.github/workflows/publish-java-cdk-command.yml @@ -1,3 +1,5 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + # Usage: This workflow can be invoked manually or by a slash command. # # To invoke via GitHub UI, go to Actions tab, select the workflow, and click "Run workflow". @@ -8,6 +10,11 @@ # /publish-java-cdk force=true # Force-publish if needing to replace an already published version name: Publish Java CDK on: + push: + branches: + - master + paths: + - "airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties" workflow_dispatch: inputs: repo: @@ -41,8 +48,8 @@ concurrency: env: # Use the provided GITREF or default to the branch triggering the workflow. GITREF: ${{ github.event.inputs.gitref || github.ref }} - FORCE: "${{ github.event.inputs.force == null && 'false' || github.event.inputs.force }}" - DRY_RUN: "${{ github.event.inputs.dry-run == null && 'true' || github.event.inputs.dry-run }}" + FORCE: "${{ github.event_name == 'push' || github.event.inputs.force == null && 'false' || github.event.inputs.force }}" + DRY_RUN: "${{ github.event_name == 'push' && 'false' || github.event.inputs.dry-run == null && 'true' || github.event.inputs.dry-run }}" CDK_VERSION_FILE_PATH: "./airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties" S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} S3_BUILD_CACHE_SECRET_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} @@ -70,6 +77,9 @@ jobs: id: read-target-java-cdk-version run: | cdk_version=$(cat $CDK_VERSION_FILE_PATH | tr -d '\n') + echo "CDK_VERSION=$CDK_VERSION" + echo "FORCE=$FORCE" + echo "DRY_RUN=$DRY_RUN" if [[ -z "$cdk_version" ]]; then echo "Failed to retrieve CDK version from $CDK_VERSION_FILE_PATH" exit 1 @@ -101,7 +111,8 @@ jobs: arguments: --scan :airbyte-cdk:java:airbyte-cdk:cdkBuild - name: Check for Existing Version - if: ${{ !(env.FORCE == 'true') }} + # we only check existing version if it's a manual trigger and FORCE is set to false + if: ${{ (env.FORCE != 'true') }} uses: burrunan/gradle-cache-action@v1 env: CI: true diff --git a/.github/workflows/python_cdk_tests.yml b/.github/workflows/python_cdk_tests.yml index 0fc8a9b5bacbb..e19d75a6f922b 100644 --- a/.github/workflows/python_cdk_tests.yml +++ b/.github/workflows/python_cdk_tests.yml @@ -37,22 +37,24 @@ jobs: needs: - changes if: needs.changes.outputs.python_cdk == 'true' - runs-on: connector-test-large + runs-on: ubuntu-latest name: Python CDK Tests timeout-minutes: 30 steps: - name: Checkout Airbyte uses: actions/checkout@v3 - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" - - name: Install Pip - run: curl -fsSL https://bootstrap.pypa.io/get-pip.py | python3 - - name: Install Pyenv - run: python3 -m pip install virtualenv --user - - name: Run Gradle Check on Python CDK - run: (cd airbyte-cdk/python && CI=true ./gradlew --no-daemon --no-build-cache --scan check) + - name: Install Poetry + uses: snok/install-poetry@v1 + - name: Install Dependencies + id: install_dependencies + working-directory: airbyte-cdk/python + run: poetry install --all-extras + - name: Build CDK Package + working-directory: airbyte-cdk/python + run: poetry run poe build + - name: Check Python CDK + working-directory: airbyte-cdk/python + run: poetry run poe check-ci set-instatus-incident-on-failure: name: Create Instatus Incident on Failure diff --git a/.github/workflows/run-mypy-on-modified-cdk-files.yml b/.github/workflows/run-mypy-on-modified-cdk-files.yml index 2c3d965095210..0f67b540b9af7 100644 --- a/.github/workflows/run-mypy-on-modified-cdk-files.yml +++ b/.github/workflows/run-mypy-on-modified-cdk-files.yml @@ -20,7 +20,7 @@ jobs: - run: pip install mypy==1.6.0 - name: Get Python changed files id: changed-py-files - uses: tj-actions/changed-files@v23 + uses: tj-actions/changed-files@v43 with: files: "airbyte-cdk/python/airbyte_cdk/**/*.py" - name: Run if any of the listed files above is changed diff --git a/.github/workflows/run-qa-engine.yml b/.github/workflows/run-qa-engine.yml deleted file mode 100644 index 6b7ae8b3ca37d..0000000000000 --- a/.github/workflows/run-qa-engine.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Connector Ops CI - QA Engine - -on: - workflow_dispatch: - schedule: - # same time as Generate Build Report - # 6AM UTC is 8AM EET, 7AM CET, 11PM PST. - - cron: "0 6 * * *" -jobs: - run-qa-engine: - name: "Run QA Engine" - runs-on: ubuntu-latest - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Setup Cloud SDK - uses: google-github-actions/setup-gcloud@v0 - with: - service_account_key: ${{ secrets.QA_ENGINE_AIRBYTE_DATA_PROD_SA }} - export_default_credentials: true - - name: Preparing Runner to the Build process - uses: ./.github/actions/runner-prepare-for-build - - name: Install ci-connector-ops package - run: pip install airbyte-ci/connectors/qa-engine - - name: Run QA Engine - env: - LOGLEVEL: INFO - GITHUB_API_TOKEN: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} - run: run-qa-engine --create-prs diff --git a/.github/workflows/slash-commands.yml b/.github/workflows/slash-commands.yml index 17e1ea6830602..f205da2d76c9b 100644 --- a/.github/workflows/slash-commands.yml +++ b/.github/workflows/slash-commands.yml @@ -23,11 +23,8 @@ jobs: permission: write commands: | test - legacy-test test-performance - publish publish-java-cdk - legacy-publish connector-performance static-args: | repo=${{ steps.getref.outputs.repo }} diff --git a/.github/workflows/stale-community-issues.yaml b/.github/workflows/stale-community-issues.yaml new file mode 100644 index 0000000000000..8e2a2005a83a1 --- /dev/null +++ b/.github/workflows/stale-community-issues.yaml @@ -0,0 +1,27 @@ +name: Stale Issue action +on: + schedule: + - cron: "0 9 * * *" + +jobs: + close-issues: + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - uses: actions/stale@v5 + with: + any-of-labels: "community" + exempt-issue-labels: "frozen" + days-before-issue-stale: 180 + days-before-issue-close: 20 + stale-issue-label: "stale" + operations-per-run: 100 + ascending: true + stale-issue-message: > + At Airbyte, we seek to be clear about the project priorities and roadmap. + This issue has not had any activity for 180 days, suggesting that it's not as critical as others. + It's possible it has already been fixed. It is being marked as stale and will be closed in 20 days if there is no activity. + To keep it open, please comment to let us know why it is important to you and if it is still reproducible on recent versions of Airbyte. + close-issue-message: "This issue was closed because it has been inactive for 20 days since being marked as stale." + repo-token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} diff --git a/.github/workflows/stale-routed-issues.yaml b/.github/workflows/stale-routed-issues.yaml new file mode 100644 index 0000000000000..ac829669b8d2b --- /dev/null +++ b/.github/workflows/stale-routed-issues.yaml @@ -0,0 +1,25 @@ +name: Stale issue action +on: + schedule: + - cron: "* 8 * * *" + +jobs: + close-issues: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v5 + with: + any-of-labels: "frozen" + days-before-issue-stale: 365 + days-before-issue-close: 20 + stale-issue-label: "stale" + stale-issue-message: > + At Airbyte, we seek to be clear about the project priorities and roadmap. + This issue has not had any activity for 365 days, suggesting that it's not as critical as others. + It's possible it has already been fixed. It is being marked as stale and will be closed in 20 days if there is no activity. + To keep it open, please comment to let us know why it is important to you and if it is still reproducible on recent versions of Airbyte. + close-issue-message: "This issue was closed because it has been inactive for 20 days since being marked as stale." + repo-token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} diff --git a/.github/workflows/upload-metadata-files.yml b/.github/workflows/upload-metadata-files.yml index a3da502477480..7028c461ec8a5 100644 --- a/.github/workflows/upload-metadata-files.yml +++ b/.github/workflows/upload-metadata-files.yml @@ -12,7 +12,7 @@ jobs: uses: actions/checkout@v2 - name: Get changed files id: changed-files - uses: tj-actions/changed-files@v35 + uses: tj-actions/changed-files@v44 with: files: "airbyte-integrations/connectors/**/metadata.yaml" - name: Setup Python 3.10 diff --git a/.gitignore b/.gitignore index 1c085c1c381df..827ebe77a7969 100644 --- a/.gitignore +++ b/.gitignore @@ -102,3 +102,6 @@ scan-journal.log # connectors' cache *.sqlite + +# mvn +target/ diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index e158953abc5bf..5e5ac038047a4 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -16,7 +16,6 @@ This page will walk through the process of developing with the Java CDK. * [Publishing the CDK and switching to a pinned CDK reference](#publishing-the-cdk-and-switching-to-a-pinned-cdk-reference) * [Troubleshooting CDK Dependency Caches](#troubleshooting-cdk-dependency-caches) * [Developing a connector against a pinned CDK version](#developing-a-connector-against-a-pinned-cdk-version) - * [Common Debugging Tips](#common-debugging-tips) * [Changelog](#changelog) * [Java CDK](#java-cdk) @@ -24,9 +23,9 @@ This page will walk through the process of developing with the Java CDK. ### What is included in the Java CDK? -The java CDK is comprised of separate modules: +The java CDK is comprised of separate modules, among which: -- `core` - Shared classes for building connectors of all types. +- `dependencies` and `core` - Shared classes for building connectors of all types. - `db-sources` - Shared classes for building DB sources. - `db-destinations` - Shared classes for building DB destinations. @@ -34,7 +33,6 @@ Each CDK submodule may contain these elements: - `src/main` - (Required.) The classes that will ship with the connector, providing capabilities to the connectors. - `src/test` - (Required.) These are unit tests that run as part of every build of the CDK. They help ensure that CDK `main` code is in a healthy state. -- `src/test-integration` - (Optional.) Integration tests which provide a more extensive test of the code in `src/main`. These are not by the `build` command but are executed as part of the `integrationTest` or `integrationTestJava` Gradle tasks. - `src/testFixtures` - (Optional.) These shared classes are exported for connectors for use in the connectors' own test implementations. Connectors will have access to these classes within their unit and integration tests, but the classes will not be shipped with connectors when they are published. ### How is the CDK published? @@ -83,8 +81,8 @@ Note: - By running the publish with `dry-run=true`, you can confirm the process is working as expected, without actually publishing the changes. - In dry-run mode, you can also view and download the jars that are generated. To do so, navigate to the job status in GitHub Actions and navigate to the 'artifacts' section. - You can also invoke manually in the GitHub Web UI. To do so: go to `Actions` tab, select the `Publish Java CDK` workflow, and click `Run workflow`. -- You can view and administer published CDK versions here: https://admin.cloudrepo.io/repository/airbyte-public-jars/io/airbyte/airbyte-cdk -- The public endpoint for published CDK versions is here: https://airbyte.mycloudrepo.io/public/repositories/airbyte-public-jars/io/airbyte/airbyte-cdk/ +- You can view and administer published CDK versions here: https://admin.cloudrepo.io/repository/airbyte-public-jars/io/airbyte/cdk +- The public endpoint for published CDK versions is here: https://airbyte.mycloudrepo.io/public/repositories/airbyte-public-jars/io/airbyte/cdk/ ## Developing Connectors with the Java CDK @@ -94,7 +92,6 @@ You can reference the CDK in your connector's `build.gradle` file: ```groovy plugins { - id 'application' id 'airbyte-java-connector' } @@ -105,7 +102,6 @@ airbyteJavaConnector { // local cdk project. } -airbyteJavaConnector.addCdkDependencies() ``` Replace `0.1.0` with the CDK version you are working with. If you're actively developing the CDK and want to use the latest version locally, use the `useLocalCdk` flag to use the live CDK code during builds and tests. @@ -128,10 +124,7 @@ After the above, you can build and test your connector as usual. Gradle will aut Once you are done developing and testing your CDK changes: 1. Publish the CDK using the instructions here in this readme. -2. After publishing the CDK, update the `useLocalCdk` setting by running `./gradlew :airbyte-integrations:connectors::disableLocalCdkRefs`. to automatically revert `useLocalCdk` to `false`. -3. You can optionally run `./gradlew :airbyte-integrations:connectors::assertNotUsingLocalCdk` to ensure that the project is not using a local CDK reference. - -_Note: You can also use `./gradlew assertNotUsingLocalCdk` or `./gradlew disableLocalCdkRefs` to run these tasks on **all** connectors simultaneously._ +2. After publishing the CDK, update the `useLocalCdk` setting to `false`. ### Troubleshooting CDK Dependency Caches @@ -145,27 +138,85 @@ You can always pin your connector to a prior stable version of the CDK, which ma Maven and Gradle will automatically reference the correct (pinned) version of the CDK for your connector, and you can use your local IDE to browse the prior version of the codebase that corresponds to that version. -## Common Debugging Tips - -MavenLocal debugging steps: - -1. Confirm local publish status by running: - `ls -la ~/.m2/repository/io/airbyte/airbyte-cdk/*` -2. Confirm jar contents by running: - `jar tf ~/.m2/repository/io/airbyte/airbyte-cdk/0.0.2-SNAPSHOT/airbyte-cdk-0.0.2-SNAPSHOT.jar` -3. Remove CDK artifacts from MavenLocal by running: - `rm -rf ~/.m2/repository/io/airbyte/airbyte-cdk/*` -4. Rebuid CDK artifacts by running: - `./gradlew :airbyte-cdk:java:airbyte-cdk:build` - or - `./gradlew :airbyte-cdk:java:airbyte-cdk:publishToMavenLocal` - ## Changelog ### Java CDK | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.30.3 | 2024-04-12 | [\#37106](https://github.com/airbytehq/airbyte/pull/37106) | Destinations: Simplify constructors in `AsyncStreamConsumer` | +| 0.30.2 | 2024-04-12 | [\#36926](https://github.com/airbytehq/airbyte/pull/36926) | Destinations: Remove `JdbcSqlOperations#formatData`; misc changes for java interop | +| 0.30.1 | 2024-04-11 | [\#36919](https://github.com/airbytehq/airbyte/pull/36919) | Fix regression in sources conversion of null values | +| 0.30.0 | 2024-04-11 | [\#36974](https://github.com/airbytehq/airbyte/pull/36974) | Destinations: Pass config to jdbc sqlgenerator; allow cascade drop | +| 0.29.13 | 2024-04-10 | [\#36981](https://github.com/airbytehq/airbyte/pull/36981) | DB sources : Emit analytics for data type serialization errors. | +| 0.29.12 | 2024-04-10 | [\#36973](https://github.com/airbytehq/airbyte/pull/36973) | Destinations: Make flush batch size configurable for JdbcInsertFlush | +| 0.29.11 | 2024-04-10 | [\#36865](https://github.com/airbytehq/airbyte/pull/36865) | Sources : Remove noisy log line. | +| 0.29.10 | 2024-04-10 | [\#36805](https://github.com/airbytehq/airbyte/pull/36805) | Destinations: Enhance CatalogParser name collision handling; add DV2 tests for long identifiers | +| 0.29.9 | 2024-04-09 | [\#36047](https://github.com/airbytehq/airbyte/pull/36047) | Destinations: CDK updates for raw-only destinations | +| 0.29.8 | 2024-04-08 | [\#36868](https://github.com/airbytehq/airbyte/pull/36868) | Destinations: s3-destinations Compilation fixes for connector | +| 0.29.7 | 2024-04-08 | [\#36768](https://github.com/airbytehq/airbyte/pull/36768) | Destinations: Make destination state fetch/commit logic more resilient to errors | +| 0.29.6 | 2024-04-05 | [\#36577](https://github.com/airbytehq/airbyte/pull/36577) | Do not send system_error trace message for config exceptions. | +| 0.29.5 | 2024-04-05 | [\#36620](https://github.com/airbytehq/airbyte/pull/36620) | Missed changes - open for extension for destination-postgres | +| 0.29.3 | 2024-04-04 | [\#36759](https://github.com/airbytehq/airbyte/pull/36759) | Minor fixes. | +| 0.29.3 | 2024-04-04 | [\#36706](https://github.com/airbytehq/airbyte/pull/36706) | Enabling spotbugs for s3-destination. | +| 0.29.3 | 2024-04-03 | [\#36705](https://github.com/airbytehq/airbyte/pull/36705) | Enabling spotbugs for db-sources. | +| 0.29.3 | 2024-04-03 | [\#36704](https://github.com/airbytehq/airbyte/pull/36704) | Enabling spotbugs for datastore-postgres. | +| 0.29.3 | 2024-04-03 | [\#36703](https://github.com/airbytehq/airbyte/pull/36703) | Enabling spotbugs for gcs-destination. | +| 0.29.3 | 2024-04-03 | [\#36702](https://github.com/airbytehq/airbyte/pull/36702) | Enabling spotbugs for db-destinations. | +| 0.29.3 | 2024-04-03 | [\#36701](https://github.com/airbytehq/airbyte/pull/36701) | Enabling spotbugs for typing_and_deduping. | +| 0.29.3 | 2024-04-03 | [\#36612](https://github.com/airbytehq/airbyte/pull/36612) | Enabling spotbugs for dependencies. | +| 0.29.5 | 2024-04-05 | [\#36577](https://github.com/airbytehq/airbyte/pull/36577) | Do not send system_error trace message for config exceptions. | +| 0.29.3 | 2024-04-04 | [\#36759](https://github.com/airbytehq/airbyte/pull/36759) | Minor fixes. | +| 0.29.3 | 2024-04-04 | [\#36706](https://github.com/airbytehq/airbyte/pull/36706) | Enabling spotbugs for s3-destination. | +| 0.29.3 | 2024-04-03 | [\#36705](https://github.com/airbytehq/airbyte/pull/36705) | Enabling spotbugs for db-sources. | +| 0.29.3 | 2024-04-03 | [\#36704](https://github.com/airbytehq/airbyte/pull/36704) | Enabling spotbugs for datastore-postgres. | +| 0.29.3 | 2024-04-03 | [\#36703](https://github.com/airbytehq/airbyte/pull/36703) | Enabling spotbugs for gcs-destination. | +| 0.29.3 | 2024-04-03 | [\#36702](https://github.com/airbytehq/airbyte/pull/36702) | Enabling spotbugs for db-destinations. | +| 0.29.3 | 2024-04-03 | [\#36701](https://github.com/airbytehq/airbyte/pull/36701) | Enabling spotbugs for typing_and_deduping. | +| 0.29.3 | 2024-04-03 | [\#36612](https://github.com/airbytehq/airbyte/pull/36612) | Enabling spotbugs for dependencies. | +| 0.29.2 | 2024-04-04 | [\#36845](https://github.com/airbytehq/airbyte/pull/36772) | Changes to make source-mongo compileable | +| 0.29.1 | 2024-04-03 | [\#36772](https://github.com/airbytehq/airbyte/pull/36772) | Changes to make source-mssql compileable | +| 0.29.0 | 2024-04-02 | [\#36759](https://github.com/airbytehq/airbyte/pull/36759) | Build artifact publication changes and fixes. | +| 0.28.21 | 2024-04-02 | [\#36673](https://github.com/airbytehq/airbyte/pull/36673) | Change the destination message parsing to use standard java/kotlin classes. Adds logging to catch empty lines. | +| 0.28.20 | 2024-04-01 | [\#36584](https://github.com/airbytehq/airbyte/pull/36584) | Changes to make source-postgres compileable | +| 0.28.19 | 2024-03-29 | [\#36619](https://github.com/airbytehq/airbyte/pull/36619) | Changes to make destination-postgres compileable | +| 0.28.19 | 2024-03-29 | [\#36588](https://github.com/airbytehq/airbyte/pull/36588) | Changes to make destination-redshift compileable | +| 0.28.19 | 2024-03-29 | [\#36610](https://github.com/airbytehq/airbyte/pull/36610) | remove airbyte-api generation, pull depdendency jars instead | +| 0.28.19 | 2024-03-29 | [\#36611](https://github.com/airbytehq/airbyte/pull/36611) | disable spotbugs for CDK tes and testFixtures tasks | +| 0.28.18 | 2024-03-28 | [\#36606](https://github.com/airbytehq/airbyte/pull/36574) | disable spotbugs for CDK tes and testFixtures tasks | +| 0.28.18 | 2024-03-28 | [\#36574](https://github.com/airbytehq/airbyte/pull/36574) | Fix ContainerFactory | +| 0.28.18 | 2024-03-27 | [\#36570](https://github.com/airbytehq/airbyte/pull/36570) | Convert missing s3-destinations tests to Kotlin | +| 0.28.18 | 2024-03-27 | [\#36446](https://github.com/airbytehq/airbyte/pull/36446) | Convert dependencies submodule to Kotlin | +| 0.28.18 | 2024-03-27 | [\#36445](https://github.com/airbytehq/airbyte/pull/36445) | Convert functional out Checked interfaces to kotlin | +| 0.28.18 | 2024-03-27 | [\#36444](https://github.com/airbytehq/airbyte/pull/36444) | Use apache-commons classes in our Checked functional interfaces | +| 0.28.18 | 2024-03-27 | [\#36467](https://github.com/airbytehq/airbyte/pull/36467) | Convert #36465 to Kotlin | +| 0.28.18 | 2024-03-27 | [\#36473](https://github.com/airbytehq/airbyte/pull/36473) | Convert convert #36396 to Kotlin | +| 0.28.18 | 2024-03-27 | [\#36439](https://github.com/airbytehq/airbyte/pull/36439) | Convert db-destinations submodule to Kotlin | +| 0.28.18 | 2024-03-27 | [\#36438](https://github.com/airbytehq/airbyte/pull/36438) | Convert db-sources submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36437](https://github.com/airbytehq/airbyte/pull/36437) | Convert gsc submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36421](https://github.com/airbytehq/airbyte/pull/36421) | Convert typing-deduping submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36420](https://github.com/airbytehq/airbyte/pull/36420) | Convert s3-destinations submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36419](https://github.com/airbytehq/airbyte/pull/36419) | Convert azure submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36413](https://github.com/airbytehq/airbyte/pull/36413) | Convert postgres submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36412](https://github.com/airbytehq/airbyte/pull/36412) | Convert mongodb submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36411](https://github.com/airbytehq/airbyte/pull/36411) | Convert datastore-bigquery submodule to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36205](https://github.com/airbytehq/airbyte/pull/36205) | Convert core/main to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36204](https://github.com/airbytehq/airbyte/pull/36204) | Convert core/test to Kotlin | +| 0.28.18 | 2024-03-26 | [\#36190](https://github.com/airbytehq/airbyte/pull/36190) | Convert core/testFixtures to Kotlin | +| 0.28.0 | 2024-03-26 | [\#36514](https://github.com/airbytehq/airbyte/pull/36514) | Bump CDK version to 0.28.0 | +| 0.27.7 | 2024-03-26 | [\#36466](https://github.com/airbytehq/airbyte/pull/36466) | Destinations: fix support for case-sensitive fields in destination state. | +| 0.27.6 | 2024-03-26 | [\#36432](https://github.com/airbytehq/airbyte/pull/36432) | Sources support for AirbyteRecordMessageMeta during reading source data types. | +| 0.27.5 | 2024-03-25 | [\#36461](https://github.com/airbytehq/airbyte/pull/36461) | Destinations: Handle case-sensitive columns in destination state handling. | +| 0.27.4 | 2024-03-25 | [\#36333](https://github.com/airbytehq/airbyte/pull/36333) | Sunset DebeziumSourceDecoratingIterator. | +| 0.27.1 | 2024-03-22 | [\#36296](https://github.com/airbytehq/airbyte/pull/36296) | Destinations: (async framework) Do not log invalid message data. | +| 0.27.0 | 2024-03-21 | [\#36364](https://github.com/airbytehq/airbyte/pull/36364) | Sources: Increase debezium initial record wait time to 40 minute. | +| 0.26.1 | 2024-03-19 | [\#35599](https://github.com/airbytehq/airbyte/pull/35599) | Sunset SourceDecoratingIterator. | +| 0.26.0 | 2024-03-19 | [\#36263](https://github.com/airbytehq/airbyte/pull/36263) | Improve conversion of debezium Date type for some edge case in mssql. | +| 0.25.0 | 2024-03-18 | [\#36203](https://github.com/airbytehq/airbyte/pull/36203) | Wiring of Transformer to StagingConsumerFactory and JdbcBufferedConsumerFactory; import changes for Kotlin conversion; State message logs to debug | +| 0.24.1 | 2024-03-13 | [\#36022](https://github.com/airbytehq/airbyte/pull/36022) | Move log4j2-test.xml to test fixtures, away from runtime classpath. | +| 0.24.0 | 2024-03-13 | [\#35944](https://github.com/airbytehq/airbyte/pull/35944) | Add `_airbyte_meta` in raw table and test fixture updates | +| 0.23.20 | 2024-03-12 | [\#36011](https://github.com/airbytehq/airbyte/pull/36011) | Debezium configuration for conversion of null value on a column with default value. | +| 0.23.19 | 2024-03-11 | [\#35904](https://github.com/airbytehq/airbyte/pull/35904) | Add retries to the debezium engine. | | 0.23.18 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Null check when retrieving destination state | | 0.23.16 | 2024-03-06 | [\#35842](https://github.com/airbytehq/airbyte/pull/35842) | Improve logging in debezium processing. | | 0.23.15 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/build.gradle b/airbyte-cdk/java/airbyte-cdk/azure-destinations/build.gradle index 9aa48c2623eae..00798c33d3880 100644 --- a/airbyte-cdk/java/airbyte-cdk/azure-destinations/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/azure-destinations/build.gradle @@ -1,6 +1,14 @@ +compileKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - implementation project(':airbyte-cdk:java:airbyte-cdk:core') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') implementation 'com.azure:azure-storage-blob:12.12.0' + + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core')) } diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.java b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.java deleted file mode 100644 index db5a806ea2a6a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.azure; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Locale; - -public class AzureBlobStorageConfig { - - private static final String DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME = "blob.core.windows.net"; - - private final String endpointDomainName; - private final String accountName; - private final String containerName; - private final String sasToken; - - public AzureBlobStorageConfig( - String endpointDomainName, - String accountName, - String containerName, - String sasToken) { - this.endpointDomainName = endpointDomainName; - this.accountName = accountName; - this.containerName = containerName; - this.sasToken = sasToken; - } - - public String getEndpointDomainName() { - return endpointDomainName == null ? DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME : endpointDomainName; - } - - public String getAccountName() { - return accountName; - } - - public String getContainerName() { - return containerName; - } - - public String getSasToken() { - return sasToken; - } - - public String getEndpointUrl() { - return String.format(Locale.ROOT, "https://%s.%s", getAccountName(), getEndpointDomainName()); - } - - public static AzureBlobStorageConfig getAzureBlobConfig(JsonNode config) { - - return new AzureBlobStorageConfig( - config.get("azure_blob_storage_endpoint_domain_name") == null ? DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME - : config.get("azure_blob_storage_endpoint_domain_name").asText(), - config.get("azure_blob_storage_account_name").asText(), - config.get("azure_blob_storage_container_name").asText(), - config.get("azure_blob_storage_sas_token").asText()); - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.java b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.java deleted file mode 100644 index a4fb22120a218..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.azure; - -import com.azure.storage.blob.BlobContainerClient; -import com.azure.storage.blob.specialized.AppendBlobClient; -import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.cdk.integrations.destination.jdbc.StagingFilenameGenerator; -import io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.time.Instant; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class AzureBlobStorageStreamCopier implements StreamCopier { - - private static final Logger LOGGER = LoggerFactory.getLogger(AzureBlobStorageStreamCopier.class); - protected StagingFilenameGenerator filenameGenerator; - protected final String stagingFolder; - protected final Set azureStagingFiles = new HashSet<>(); - protected final AzureBlobStorageConfig azureBlobConfig; - protected final String tmpTableName; - protected final String schemaName; - protected final String streamName; - protected final JdbcDatabase db; - protected final Set activeStagingWriterFileNames = new HashSet<>(); - private final StandardNameTransformer nameTransformer; - private final SqlOperations sqlOperations; - private final DestinationSyncMode destSyncMode; - private final SpecializedBlobClientBuilder specializedBlobClientBuilder; - private final HashMap csvPrinters = new HashMap<>(); - private final HashMap blobClients = new HashMap<>(); - private String currentFile; - - public AzureBlobStorageStreamCopier(final String stagingFolder, - final DestinationSyncMode destSyncMode, - final String schema, - final String streamName, - final SpecializedBlobClientBuilder specializedBlobClientBuilder, - final JdbcDatabase db, - final AzureBlobStorageConfig azureBlobConfig, - final StandardNameTransformer nameTransformer, - final SqlOperations sqlOperations) { - this.stagingFolder = stagingFolder; - this.destSyncMode = destSyncMode; - this.schemaName = schema; - this.streamName = streamName; - this.db = db; - this.nameTransformer = nameTransformer; - this.sqlOperations = sqlOperations; - this.tmpTableName = nameTransformer.getTmpTableName(streamName); - this.specializedBlobClientBuilder = specializedBlobClientBuilder; - this.azureBlobConfig = azureBlobConfig; - this.filenameGenerator = new StagingFilenameGenerator(streamName, GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES); - } - - public static void attemptAzureBlobWriteAndDelete(final AzureBlobStorageConfig config) { - AppendBlobClient appendBlobClient = null; - try { - appendBlobClient = new SpecializedBlobClientBuilder() - .endpoint(config.getEndpointUrl()) - .sasToken(config.getSasToken()) - .containerName(config.getContainerName()) - .blobName("testAzureBlob" + UUID.randomUUID()) - .buildAppendBlobClient(); - - final BlobContainerClient containerClient = getBlobContainerClient(appendBlobClient); - writeTestDataIntoBlob(appendBlobClient); - listCreatedBlob(containerClient); - } finally { - if (appendBlobClient != null && appendBlobClient.exists()) { - LOGGER.info("Deleting blob: " + appendBlobClient.getBlobName()); - appendBlobClient.delete(); - } - } - - } - - private static void listCreatedBlob(final BlobContainerClient containerClient) { - containerClient.listBlobs().forEach(blobItem -> LOGGER.info("Blob name: " + blobItem.getName() + "Snapshot: " + blobItem.getSnapshot())); - } - - private static void writeTestDataIntoBlob(final AppendBlobClient appendBlobClient) { - final String test = "test_data"; - LOGGER.info("Writing test data to Azure Blob storage: " + test); - final InputStream dataStream = new ByteArrayInputStream(test.getBytes(StandardCharsets.UTF_8)); - - final Integer blobCommittedBlockCount = appendBlobClient.appendBlock(dataStream, test.length()) - .getBlobCommittedBlockCount(); - - LOGGER.info("blobCommittedBlockCount: " + blobCommittedBlockCount); - } - - private static BlobContainerClient getBlobContainerClient(final AppendBlobClient appendBlobClient) { - final BlobContainerClient containerClient = appendBlobClient.getContainerClient(); - if (!containerClient.exists()) { - containerClient.create(); - } - - if (!appendBlobClient.exists()) { - appendBlobClient.create(); - LOGGER.info("blobContainerClient created"); - } else { - LOGGER.info("blobContainerClient already exists"); - } - return containerClient; - } - - public Set getAzureStagingFiles() { - return azureStagingFiles; - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage, final String azureFileName) throws Exception { - if (csvPrinters.containsKey(azureFileName)) { - csvPrinters.get(azureFileName).printRecord(id, - Jsons.serialize(recordMessage.getData()), - Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); - } - } - - @Override - public String prepareStagingFile() { - currentFile = prepareAzureStagingFile(); - if (!azureStagingFiles.contains(currentFile)) { - - azureStagingFiles.add(currentFile); - activeStagingWriterFileNames.add(currentFile); - - final AppendBlobClient appendBlobClient = specializedBlobClientBuilder - .blobName(currentFile) - .buildAppendBlobClient(); - blobClients.put(currentFile, appendBlobClient); - appendBlobClient.create(true); - - final BufferedOutputStream bufferedOutputStream = - new BufferedOutputStream(appendBlobClient.getBlobOutputStream(), Math.toIntExact(GlobalDataSizeConstants.MAX_FILE_SIZE)); - final var writer = new PrintWriter(bufferedOutputStream, true, StandardCharsets.UTF_8); - try { - csvPrinters.put(currentFile, new CSVPrinter(writer, CSVFormat.DEFAULT)); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - return currentFile; - } - - private String prepareAzureStagingFile() { - return String.join("/", stagingFolder, schemaName, filenameGenerator.getStagingFilename()); - } - - @Override - public void closeStagingUploader(final boolean hasFailed) throws Exception { - LOGGER.info("Uploading remaining data for {} stream.", streamName); - for (final var csvPrinter : csvPrinters.values()) { - csvPrinter.close(); - } - LOGGER.info("All data for {} stream uploaded.", streamName); - } - - @Override - public void createDestinationSchema() throws Exception { - LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName); - sqlOperations.createSchemaIfNotExists(db, schemaName); - } - - @Override - public void createTemporaryTable() throws Exception { - LOGGER.info("Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.", streamName, schemaName, tmpTableName); - sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName); - } - - @Override - public void copyStagingFileToTemporaryTable() throws Exception { - LOGGER.info("Starting copy to tmp table: {} in destination for stream: {}, schema: {}.", tmpTableName, streamName, schemaName); - for (final var azureStagingFile : azureStagingFiles) { - copyAzureBlobCsvFileIntoTable(db, getFullAzurePath(azureStagingFile), schemaName, tmpTableName, azureBlobConfig); - } - LOGGER.info("Copy to tmp table {} in destination for stream {} complete.", tmpTableName, streamName); - } - - private String getFullAzurePath(final String azureStagingFile) { - return "azure://" + azureBlobConfig.getAccountName() + "." + azureBlobConfig.getEndpointDomainName() - + "/" + azureBlobConfig.getContainerName() + "/" + azureStagingFile; - } - - @Override - public String createDestinationTable() throws Exception { - final var destTableName = nameTransformer.getRawTableName(streamName); - LOGGER.info("Preparing table {} in destination.", destTableName); - sqlOperations.createTableIfNotExists(db, schemaName, destTableName); - LOGGER.info("Table {} in destination prepared.", tmpTableName); - - return destTableName; - } - - @Override - public String generateMergeStatement(final String destTableName) throws Exception { - LOGGER.info("Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.", tmpTableName, destTableName, schemaName); - final var queries = new StringBuilder(); - if (destSyncMode.equals(DestinationSyncMode.OVERWRITE)) { - queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName)); - LOGGER.info("Destination OVERWRITE mode detected. Dest table: {}, schema: {}, truncated.", destTableName, schemaName); - } - queries.append(sqlOperations.insertTableQuery(db, schemaName, tmpTableName, destTableName)); - return queries.toString(); - } - - @Override - public void removeFileAndDropTmpTable() throws Exception { - LOGGER.info("Begin cleaning azure blob staging files."); - for (final AppendBlobClient appendBlobClient : blobClients.values()) { - appendBlobClient.delete(); - } - LOGGER.info("Azure Blob staging files cleaned."); - - LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName); - sqlOperations.dropTableIfExists(db, schemaName, tmpTableName); - LOGGER.info("{} tmp table in destination cleaned.", tmpTableName); - } - - @Override - public void closeNonCurrentStagingFileWriters() throws Exception { - LOGGER.info("Begin closing non current file writers"); - final Set removedKeys = new HashSet<>(); - for (final String key : activeStagingWriterFileNames) { - if (!key.equals(currentFile)) { - csvPrinters.get(key).close(); - csvPrinters.remove(key); - removedKeys.add(key); - } - } - activeStagingWriterFileNames.removeAll(removedKeys); - } - - @Override - public String getCurrentFile() { - return currentFile; - } - - @VisibleForTesting - public String getTmpTableName() { - return tmpTableName; - } - - public abstract void copyAzureBlobCsvFileIntoTable(JdbcDatabase database, - String snowflakeAzureExternalStageName, - String schema, - String tableName, - AzureBlobStorageConfig config) - throws SQLException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.java b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.java deleted file mode 100644 index 043b2435d695f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.azure; - -import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; - -public abstract class AzureBlobStorageStreamCopierFactory implements StreamCopierFactory { - - @Override - public StreamCopier create(String configuredSchema, - AzureBlobStorageConfig azureBlobConfig, - String stagingFolder, - ConfiguredAirbyteStream configuredStream, - StandardNameTransformer nameTransformer, - JdbcDatabase db, - SqlOperations sqlOperations) { - try { - AirbyteStream stream = configuredStream.getStream(); - DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode(); - String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); - String streamName = stream.getName(); - - final SpecializedBlobClientBuilder specializedBlobClientBuilder = new SpecializedBlobClientBuilder() - .endpoint(azureBlobConfig.getEndpointUrl()) - .sasToken(azureBlobConfig.getSasToken()) - .containerName(azureBlobConfig.getContainerName()); - - return create(stagingFolder, syncMode, schema, streamName, specializedBlobClientBuilder, db, azureBlobConfig, nameTransformer, sqlOperations); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public abstract StreamCopier create(String stagingFolder, - DestinationSyncMode syncMode, - String schema, - String streamName, - SpecializedBlobClientBuilder specializedBlobClientBuilder, - JdbcDatabase db, - AzureBlobStorageConfig azureBlobConfig, - StandardNameTransformer nameTransformer, - SqlOperations sqlOperations) - throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.kt b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.kt new file mode 100644 index 0000000000000..748621ab79301 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageConfig.kt @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.azure + +import com.fasterxml.jackson.databind.JsonNode +import java.util.* + +class AzureBlobStorageConfig( + val endpointDomainName: String = DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME, + val accountName: String, + val containerName: String, + val sasToken: String +) { + + val endpointUrl: String + get() = String.format(Locale.ROOT, "https://%s.%s", accountName, endpointDomainName) + + companion object { + private const val DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME = "blob.core.windows.net" + + fun getAzureBlobConfig(config: JsonNode): AzureBlobStorageConfig { + return AzureBlobStorageConfig( + if (config["azure_blob_storage_endpoint_domain_name"] == null) + DEFAULT_STORAGE_ENDPOINT_DOMAIN_NAME + else config["azure_blob_storage_endpoint_domain_name"].asText(), + config["azure_blob_storage_account_name"].asText(), + config["azure_blob_storage_container_name"].asText(), + config["azure_blob_storage_sas_token"].asText() + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.kt b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.kt new file mode 100644 index 0000000000000..41b9621eec19b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.kt @@ -0,0 +1,299 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.azure + +import com.azure.storage.blob.BlobContainerClient +import com.azure.storage.blob.models.BlobItem +import com.azure.storage.blob.specialized.AppendBlobClient +import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.cdk.integrations.destination.jdbc.StagingFilenameGenerator +import io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.* +import java.nio.charset.StandardCharsets +import java.sql.SQLException +import java.sql.Timestamp +import java.time.Instant +import java.util.* +import java.util.function.Consumer +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class AzureBlobStorageStreamCopier( + protected val stagingFolder: String, + private val destSyncMode: DestinationSyncMode, + protected val schemaName: String, + protected val streamName: String, + private val specializedBlobClientBuilder: SpecializedBlobClientBuilder, + protected val db: JdbcDatabase, + protected val azureBlobConfig: AzureBlobStorageConfig, + private val nameTransformer: StandardNameTransformer, + private val sqlOperations: SqlOperations +) : StreamCopier { + protected var filenameGenerator: StagingFilenameGenerator = + StagingFilenameGenerator( + streamName, + GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES.toLong() + ) + protected val azureStagingFiles: MutableSet = HashSet() + + @get:VisibleForTesting val tmpTableName: String = nameTransformer.getTmpTableName(streamName) + protected val activeStagingWriterFileNames: MutableSet = HashSet() + private val csvPrinters = HashMap() + private val blobClients = HashMap() + override var currentFile: String? = null + + @Throws(Exception::class) + override fun write(id: UUID?, recordMessage: AirbyteRecordMessage?, azureFileName: String?) { + if (csvPrinters.containsKey(azureFileName)) { + csvPrinters[azureFileName]!!.printRecord( + id, + Jsons.serialize(recordMessage!!.data), + Timestamp.from(Instant.ofEpochMilli(recordMessage.emittedAt)) + ) + } + } + + override fun prepareStagingFile(): String? { + currentFile = prepareAzureStagingFile() + val currentFile = this.currentFile!! + if (!azureStagingFiles.contains(currentFile)) { + azureStagingFiles.add(currentFile) + activeStagingWriterFileNames.add(currentFile) + + val appendBlobClient = + specializedBlobClientBuilder.blobName(currentFile).buildAppendBlobClient() + blobClients[currentFile] = appendBlobClient + appendBlobClient.create(true) + + val bufferedOutputStream = + BufferedOutputStream( + appendBlobClient.blobOutputStream, + Math.toIntExact(GlobalDataSizeConstants.MAX_FILE_SIZE) + ) + val writer = PrintWriter(bufferedOutputStream, true, StandardCharsets.UTF_8) + try { + csvPrinters[currentFile] = CSVPrinter(writer, CSVFormat.DEFAULT) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + return currentFile + } + + private fun prepareAzureStagingFile(): String { + return java.lang.String.join( + "/", + stagingFolder, + schemaName, + filenameGenerator.stagingFilename + ) + } + + @Throws(Exception::class) + override fun closeStagingUploader(hasFailed: Boolean) { + LOGGER.info("Uploading remaining data for {} stream.", streamName) + for (csvPrinter in csvPrinters.values) { + csvPrinter.close() + } + LOGGER.info("All data for {} stream uploaded.", streamName) + } + + @Throws(Exception::class) + override fun createDestinationSchema() { + LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName) + sqlOperations.createSchemaIfNotExists(db, schemaName) + } + + @Throws(Exception::class) + override fun createTemporaryTable() { + LOGGER.info( + "Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.", + streamName, + schemaName, + tmpTableName + ) + sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName) + } + + @Throws(Exception::class) + override fun copyStagingFileToTemporaryTable() { + LOGGER.info( + "Starting copy to tmp table: {} in destination for stream: {}, schema: {}.", + tmpTableName, + streamName, + schemaName + ) + for (azureStagingFile in azureStagingFiles) { + copyAzureBlobCsvFileIntoTable( + db, + getFullAzurePath(azureStagingFile), + schemaName, + tmpTableName, + azureBlobConfig + ) + } + LOGGER.info( + "Copy to tmp table {} in destination for stream {} complete.", + tmpTableName, + streamName + ) + } + + private fun getFullAzurePath(azureStagingFile: String?): String { + return ("azure://" + + azureBlobConfig.accountName + + "." + + azureBlobConfig.endpointDomainName + + "/" + + azureBlobConfig.containerName + + "/" + + azureStagingFile) + } + + @Throws(Exception::class) + override fun createDestinationTable(): String? { + val destTableName = nameTransformer.getRawTableName(streamName) + LOGGER.info("Preparing table {} in destination.", destTableName) + sqlOperations.createTableIfNotExists(db, schemaName, destTableName) + LOGGER.info("Table {} in destination prepared.", tmpTableName) + + return destTableName + } + + @Throws(Exception::class) + override fun generateMergeStatement(destTableName: String?): String { + LOGGER.info( + "Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.", + tmpTableName, + destTableName, + schemaName + ) + val queries = StringBuilder() + if (destSyncMode == DestinationSyncMode.OVERWRITE) { + queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName)) + LOGGER.info( + "Destination OVERWRITE mode detected. Dest table: {}, schema: {}, truncated.", + destTableName, + schemaName + ) + } + queries.append(sqlOperations.insertTableQuery(db, schemaName, tmpTableName, destTableName)) + return queries.toString() + } + + @Throws(Exception::class) + override fun removeFileAndDropTmpTable() { + LOGGER.info("Begin cleaning azure blob staging files.") + for (appendBlobClient in blobClients.values) { + appendBlobClient.delete() + } + LOGGER.info("Azure Blob staging files cleaned.") + + LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName) + sqlOperations.dropTableIfExists(db, schemaName, tmpTableName) + LOGGER.info("{} tmp table in destination cleaned.", tmpTableName) + } + + @Throws(Exception::class) + override fun closeNonCurrentStagingFileWriters() { + LOGGER.info("Begin closing non current file writers") + val removedKeys: MutableSet = HashSet() + for (key in activeStagingWriterFileNames) { + if (key != currentFile) { + csvPrinters[key]!!.close() + csvPrinters.remove(key) + removedKeys.add(key) + } + } + activeStagingWriterFileNames.removeAll(removedKeys) + } + + @Throws(SQLException::class) + abstract fun copyAzureBlobCsvFileIntoTable( + database: JdbcDatabase?, + snowflakeAzureExternalStageName: String?, + schema: String?, + tableName: String?, + config: AzureBlobStorageConfig? + ) + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(AzureBlobStorageStreamCopier::class.java) + fun attemptAzureBlobWriteAndDelete(config: AzureBlobStorageConfig) { + var appendBlobClient: AppendBlobClient? = null + try { + appendBlobClient = + SpecializedBlobClientBuilder() + .endpoint(config.endpointUrl) + .sasToken(config.sasToken) + .containerName(config.containerName) + .blobName("testAzureBlob" + UUID.randomUUID()) + .buildAppendBlobClient() + + val containerClient = getBlobContainerClient(appendBlobClient) + writeTestDataIntoBlob(appendBlobClient) + listCreatedBlob(containerClient) + } finally { + if (appendBlobClient != null && appendBlobClient.exists()) { + LOGGER.info("Deleting blob: " + appendBlobClient.blobName) + appendBlobClient.delete() + } + } + } + + private fun listCreatedBlob(containerClient: BlobContainerClient) { + containerClient + .listBlobs() + .forEach( + Consumer { blobItem: BlobItem -> + LOGGER.info( + "Blob name: " + blobItem.name + "Snapshot: " + blobItem.snapshot + ) + } + ) + } + + private fun writeTestDataIntoBlob(appendBlobClient: AppendBlobClient?) { + val test = "test_data" + LOGGER.info("Writing test data to Azure Blob storage: $test") + val dataStream: InputStream = + ByteArrayInputStream(test.toByteArray(StandardCharsets.UTF_8)) + + val blobCommittedBlockCount = + appendBlobClient!! + .appendBlock(dataStream, test.length.toLong()) + .blobCommittedBlockCount + + LOGGER.info("blobCommittedBlockCount: $blobCommittedBlockCount") + } + + private fun getBlobContainerClient( + appendBlobClient: AppendBlobClient? + ): BlobContainerClient { + val containerClient = appendBlobClient!!.containerClient + if (!containerClient.exists()) { + containerClient.create() + } + + if (!appendBlobClient.exists()) { + appendBlobClient.create() + LOGGER.info("blobContainerClient created") + } else { + LOGGER.info("blobContainerClient already exists") + } + return containerClient + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.kt b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.kt new file mode 100644 index 0000000000000..e28e88fd6b19d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopierFactory.kt @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.azure + +import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory.Companion.getSchema +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode + +abstract class AzureBlobStorageStreamCopierFactory : StreamCopierFactory { + override fun create( + configuredSchema: String?, + azureBlobConfig: AzureBlobStorageConfig, + stagingFolder: String?, + configuredStream: ConfiguredAirbyteStream?, + nameTransformer: StandardNameTransformer?, + db: JdbcDatabase?, + sqlOperations: SqlOperations? + ): StreamCopier? { + try { + val stream = configuredStream!!.stream + val syncMode = configuredStream.destinationSyncMode + val schema = getSchema(stream.namespace, configuredSchema!!, nameTransformer!!) + val streamName = stream.name + + val specializedBlobClientBuilder = + SpecializedBlobClientBuilder() + .endpoint(azureBlobConfig.endpointUrl) + .sasToken(azureBlobConfig.sasToken) + .containerName(azureBlobConfig.containerName) + + return create( + stagingFolder, + syncMode, + schema, + streamName, + specializedBlobClientBuilder, + db, + azureBlobConfig, + nameTransformer, + sqlOperations + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + } + + @Throws(Exception::class) + abstract fun create( + stagingFolder: String?, + syncMode: DestinationSyncMode?, + schema: String?, + streamName: String?, + specializedBlobClientBuilder: SpecializedBlobClientBuilder?, + db: JdbcDatabase?, + azureBlobConfig: AzureBlobStorageConfig?, + nameTransformer: StandardNameTransformer?, + sqlOperations: SqlOperations? + ): StreamCopier? +} diff --git a/airbyte-cdk/java/airbyte-cdk/build.gradle b/airbyte-cdk/java/airbyte-cdk/build.gradle index 089b72dbc5cde..ffa096f594886 100644 --- a/airbyte-cdk/java/airbyte-cdk/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/build.gradle @@ -1,44 +1,22 @@ -import org.jetbrains.kotlin.gradle.dsl.JvmTarget -import org.jetbrains.kotlin.gradle.dsl.KotlinVersion -plugins { - id 'org.jetbrains.kotlin.jvm' version '1.9.22' -} - final var cdkVersion = { var props = new Properties() file("core/src/main/resources/version.properties").withInputStream(props::load) return props.getProperty('version', 'undefined') }() - - allprojects { apply plugin: 'java-library' apply plugin: 'maven-publish' - apply plugin: 'java-test-fixtures' - apply plugin: 'org.jetbrains.kotlin.jvm' group 'io.airbyte.cdk' - def artifactBaseName = 'airbyte-cdk-' + project.name - // E.g. airbyte-cdk-core, airbyte-cdk-db-sources, airbyte-cdk-db-destinations, etc. - project.version = cdkVersion publishing { publications { - main(MavenPublication) { - groupId = 'io.airbyte.cdk' - artifactId = artifactBaseName - version = project.version + cdk(MavenPublication) { from components.java } - testFixtures(MavenPublication) { - groupId = 'io.airbyte.cdk' - artifactId = artifactBaseName + '-test-fixtures' - version = project.version - artifact project.tasks.testFixturesJar - } } // This repository is only defined and used in the context of an artifact publishing // It's different from the 'airbyte-public-jars' defined in settings.graddle only in its omission @@ -53,26 +31,10 @@ allprojects { } } } - - compileKotlin { - compilerOptions { - jvmTarget = JvmTarget.JVM_21 - languageVersion = KotlinVersion.KOTLIN_1_9 - } - } - compileTestKotlin { - compilerOptions { - jvmTarget = JvmTarget.JVM_21 - languageVersion = KotlinVersion.KOTLIN_1_9 - } - } } - project.configurations { - testImplementation.extendsFrom implementation - testFixturesImplementation.extendsFrom implementation - testFixturesRuntimeOnly.extendsFrom runtimeOnly - } + // This is necessary because the mockit.kotlin any() generates a bunch of bad casts + spotbugsTest.omitVisitors = ['FindBadCast2'] } description = "Airbyte Connector Development Kit (CDK) for Java." diff --git a/airbyte-cdk/java/airbyte-cdk/core/build.gradle b/airbyte-cdk/java/airbyte-cdk/core/build.gradle index 50764c9c9fac9..d9b3bdd260564 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/core/build.gradle @@ -1,3 +1,6 @@ +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.dsl.KotlinVersion + java { // TODO: rewrite code to avoid javac wornings in the first place compileJava { @@ -11,6 +14,43 @@ java { } } +compileTestFixturesKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = false + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } +} + +compileTestKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = false + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } +} + +compileKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = false + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } +} + + dependencies { api 'com.datadoghq:dd-trace-api:1.28.0' @@ -19,7 +59,7 @@ dependencies { api 'org.jooq:jooq:3.16.23' api 'org.apache.commons:commons-csv:1.10.0' - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') implementation 'commons-cli:commons-cli:1.4' implementation 'io.aesy:datasize:1.0.0' @@ -36,13 +76,14 @@ dependencies { testFixturesApi 'org.testcontainers:testcontainers:1.19.0' testFixturesApi 'org.testcontainers:jdbc:1.19.0' - testImplementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - testImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:db-sources')) - testImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:datastore-postgres')) + testImplementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + testImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-db-sources')) + testImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-datastore-postgres')) testImplementation 'mysql:mysql-connector-java:8.0.33' testImplementation 'org.postgresql:postgresql:42.6.0' testImplementation 'org.testcontainers:mysql:1.19.0' testImplementation 'org.testcontainers:postgresql:1.19.0' testImplementation 'org.xbib.elasticsearch:joptsimple:6.3.2.1' + testImplementation 'org.mockito.kotlin:mockito-kotlin:5.2.1' } diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/CDKConstants.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/CDKConstants.java deleted file mode 100644 index a7e73115d9fed..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/CDKConstants.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -public final class CDKConstants { - - private CDKConstants() { - // restrict instantiation - } - - public static final String VERSION = getVersion(); - - private static String getVersion() { - Properties prop = new Properties(); - - try (InputStream inputStream = CDKConstants.class.getClassLoader().getResourceAsStream("version.properties")) { - prop.load(inputStream); - return prop.getProperty("version"); - } catch (IOException e) { - throw new RuntimeException("Could not read version properties file", e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/AbstractDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/AbstractDatabase.java deleted file mode 100644 index 94c994163c420..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/AbstractDatabase.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import com.fasterxml.jackson.databind.JsonNode; - -/** - * A wrapper around the instantiated {@link javax.sql.DataSource}. - * - * Note that this class does not implement {@link AutoCloseable}/{@link java.io.Closeable}, as it is - * not the responsibility of this class to close the provided {@link javax.sql.DataSource}. This is - * to avoid accidentally closing a shared resource. - */ -public class AbstractDatabase { - - private JsonNode sourceConfig; - private JsonNode databaseConfig; - - public JsonNode getSourceConfig() { - return sourceConfig; - } - - public void setSourceConfig(final JsonNode sourceConfig) { - this.sourceConfig = sourceConfig; - } - - public JsonNode getDatabaseConfig() { - return databaseConfig; - } - - public void setDatabaseConfig(final JsonNode databaseConfig) { - this.databaseConfig = databaseConfig; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ContextQueryFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ContextQueryFunction.java deleted file mode 100644 index c568227960e8e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ContextQueryFunction.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import java.sql.SQLException; -import org.jooq.DSLContext; - -@FunctionalInterface -public interface ContextQueryFunction { - - T query(DSLContext context) throws SQLException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeSupplier.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeSupplier.java deleted file mode 100644 index 56f6adc64e63b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeSupplier.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import java.sql.SQLException; - -@FunctionalInterface -public interface DataTypeSupplier { - - DataType apply() throws SQLException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeUtils.java deleted file mode 100644 index cf44f8d9a8a45..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DataTypeUtils.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import java.sql.Date; -import java.sql.SQLException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.time.Duration; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.format.DateTimeFormatter; -import java.util.function.Function; - -/** - * TODO : Replace all the DateTime related logic of this class with - * {@link io.airbyte.cdk.db.jdbc.DateTimeConverter} - */ -public class DataTypeUtils { - - public static final String DATE_FORMAT_PATTERN = "yyyy-MM-dd'T'HH:mm:ss'Z'"; - - public static final String DATE_FORMAT_WITH_MILLISECONDS_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - - public static final DateTimeFormatter TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); - public static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS"); - public static final DateTimeFormatter TIMETZ_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSXXX"); - public static final DateTimeFormatter TIMESTAMPTZ_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSXXX"); - public static final DateTimeFormatter OFFSETDATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSS XXX"); - public static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd"); - - // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a static final. - public static DateFormat getDateFormat() { - return new SimpleDateFormat(DATE_FORMAT_PATTERN); // Quoted "Z" to indicate UTC, no timezone offset; - } - - // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a static final. - public static DateFormat getDateFormatMillisPattern() { - return new SimpleDateFormat(DATE_FORMAT_WITH_MILLISECONDS_PATTERN); - } - - public static T returnNullIfInvalid(final DataTypeSupplier valueProducer) { - return returnNullIfInvalid(valueProducer, ignored -> true); - } - - public static T returnNullIfInvalid(final DataTypeSupplier valueProducer, final Function isValidFn) { - // Some edge case values (e.g: Infinity, NaN) have no java or JSON equivalent, and will throw an - // exception when parsed. We want to parse those - // values as null. - // This method reduces error handling boilerplate. - try { - final T value = valueProducer.apply(); - return isValidFn.apply(value) ? value : null; - } catch (final SQLException e) { - return null; - } - } - - public static String toISO8601StringWithMicroseconds(final Instant instant) { - - final String dateWithMilliseconds = getDateFormatMillisPattern().format(Date.from(instant)); - return dateWithMilliseconds.substring(0, 23) + calculateMicrosecondsString(instant.getNano()) + dateWithMilliseconds.substring(23); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private static String calculateMicrosecondsString(final int nano) { - final var microSeconds = (nano / 1000) % 1000; - final String result; - if (microSeconds < 10) { - result = "00" + microSeconds; - } else if (microSeconds < 100) { - result = "0" + microSeconds; - } else { - result = "" + microSeconds; - } - return result; - } - - public static String toISO8601StringWithMilliseconds(final long epochMillis) { - return getDateFormatMillisPattern().format(Date.from(Instant.ofEpochMilli(epochMillis))); - } - - public static String toISO8601String(final long epochMillis) { - return getDateFormat().format(Date.from(Instant.ofEpochMilli(epochMillis))); - } - - public static String toISO8601String(final java.util.Date date) { - return getDateFormat().format(date); - } - - public static String toISOTimeString(final LocalDateTime dateTime) { - return DateTimeFormatter.ISO_TIME.format(dateTime.toLocalTime()); - } - - public static String toISO8601String(final LocalDate date) { - return toISO8601String(date.atStartOfDay()); - } - - public static String toISO8601String(final LocalDateTime date) { - return date.format(DateTimeFormatter.ofPattern(DATE_FORMAT_PATTERN)); - } - - public static String toISO8601String(final OffsetDateTime date) { - return date.format(OFFSETDATETIME_FORMATTER); - } - - public static String toISO8601String(final Duration duration) { - return getDateFormat().format(Date.from(Instant.ofEpochSecond(Math.abs(duration.getSeconds()), Math.abs(duration.getNano())))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/Database.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/Database.java deleted file mode 100644 index ba817100fd7ab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/Database.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import java.sql.SQLException; -import org.jooq.DSLContext; -import org.jooq.impl.DSL; - -/** - * Database object for interacting with a Jooq connection. - */ -public class Database { - - private final DSLContext dslContext; - - public Database(final DSLContext dslContext) { - this.dslContext = dslContext; - } - - public T query(final ContextQueryFunction transform) throws SQLException { - return transform.query(dslContext); - } - - public T transaction(final ContextQueryFunction transform) throws SQLException { - return dslContext.transactionResult(configuration -> transform.query(DSL.using(configuration))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DbAnalyticsUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DbAnalyticsUtils.java deleted file mode 100644 index c10cffa61fe2f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/DbAnalyticsUtils.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import io.airbyte.protocol.models.v0.AirbyteAnalyticsTraceMessage; - -/** - * Utility class to define constants associated with database source connector analytics events. - * Make sure to add the analytics event to - * https://www.notion.so/Connector-Analytics-Events-892a79a49852465f8d59a18bd84c36de - */ -public class DbAnalyticsUtils { - - public static final String CDC_CURSOR_INVALID_KEY = "db-sources-cdc-cursor-invalid"; - - public static AirbyteAnalyticsTraceMessage cdcCursorInvalidMessage() { - return new AirbyteAnalyticsTraceMessage().withType(CDC_CURSOR_INVALID_KEY).withValue("1"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ExceptionWrappingDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ExceptionWrappingDatabase.java deleted file mode 100644 index b200cfe9e69a6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/ExceptionWrappingDatabase.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import java.io.IOException; -import java.sql.SQLException; - -/** - * Wraps a {@link Database} object and throwing IOExceptions instead of SQLExceptions. - */ -public class ExceptionWrappingDatabase { - - private final Database database; - - public ExceptionWrappingDatabase(final Database database) { - this.database = database; - } - - public T query(final ContextQueryFunction transform) throws IOException { - try { - return database.query(transform); - } catch (final SQLException e) { - throw new IOException(e); - } - } - - public T transaction(final ContextQueryFunction transform) throws IOException { - try { - return database.transaction(transform); - } catch (final SQLException e) { - throw new IOException(e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/IncrementalUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/IncrementalUtils.java deleted file mode 100644 index 7436963249c03..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/IncrementalUtils.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.Optional; - -public class IncrementalUtils { - - private static final String PROPERTIES = "properties"; - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public static String getCursorField(final ConfiguredAirbyteStream stream) { - if (stream.getCursorField().size() == 0) { - throw new IllegalStateException("No cursor field specified for stream attempting to do incremental."); - } else if (stream.getCursorField().size() > 1) { - throw new IllegalStateException("Source does not support nested cursor fields."); - } else { - return stream.getCursorField().get(0); - } - } - - public static Optional getCursorFieldOptional(final ConfiguredAirbyteStream stream) { - try { - return Optional.ofNullable(getCursorField(stream)); - } catch (final IllegalStateException e) { - return Optional.empty(); - } - } - - public static JsonSchemaPrimitive getCursorType(final ConfiguredAirbyteStream stream, final String cursorField) { - if (stream.getStream().getJsonSchema().get(PROPERTIES) == null) { - throw new IllegalStateException(String.format("No properties found in stream: %s.", stream.getStream().getName())); - } - - if (stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField) == null) { - throw new IllegalStateException( - String.format("Could not find cursor field: %s in schema for stream: %s.", cursorField, stream.getStream().getName())); - } - - if (stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField).get("type") == null && - stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField).get("$ref") == null) { - throw new IllegalStateException( - String.format("Could not find cursor type for field: %s in schema for stream: %s.", cursorField, stream.getStream().getName())); - } - - if (stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField).get("type") == null) { - return JsonSchemaPrimitiveUtil.PRIMITIVE_TO_REFERENCE_BIMAP.inverse() - .get(stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField).get("$ref").asText()); - } else { - return JsonSchemaPrimitive.valueOf(stream.getStream().getJsonSchema().get(PROPERTIES).get(cursorField).get("type").asText().toUpperCase()); - } - } - - /** - * Comparator where if original is less than candidate then value less than 0, if greater than - * candidate then value greater than 0, else 0 - * - * @param original the first value to compare - * @param candidate the second value to compare - * @param type primitive type used to determine comparison - * @return - */ - public static int compareCursors(final String original, final String candidate, final JsonSchemaPrimitive type) { - if (original == null && candidate == null) { - return 0; - } - - if (candidate == null) { - return 1; - } - - if (original == null) { - return -1; - } - - switch (type) { - case STRING, STRING_V1, DATE_V1, TIME_WITH_TIMEZONE_V1, TIME_WITHOUT_TIMEZONE_V1, TIMESTAMP_WITH_TIMEZONE_V1, TIMESTAMP_WITHOUT_TIMEZONE_V1 -> { - return original.compareTo(candidate); - } - case NUMBER, NUMBER_V1, INTEGER_V1 -> { - // todo (cgardens) - handle big decimal. this is currently an overflow risk. - return Double.compare(Double.parseDouble(original), Double.parseDouble(candidate)); - } - case BOOLEAN, BOOLEAN_V1 -> { - return Boolean.compare(Boolean.parseBoolean(original), Boolean.parseBoolean(candidate)); - } - // includes OBJECT, ARRAY, NULL - default -> throw new IllegalStateException(String.format("Cannot use field of type %s as a comparable", type)); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.java deleted file mode 100644 index 0bdcc8a9ed144..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; - -public interface JdbcCompatibleSourceOperations extends SourceOperations { - - /** - * Read from a result set, and copy the value of the column at colIndex to the Json object. - *

- * - * @param colIndex 1-based column index. - */ - void copyToJsonField(final ResultSet resultSet, final int colIndex, final ObjectNode json) throws SQLException; - - /** - * Set the cursor field in incremental table query. - */ - void setCursorField(final PreparedStatement preparedStatement, - final int parameterIndex, - final SourceType cursorFieldType, - final String value) - throws SQLException; - - /** - * Determine the database specific type of the input field based on its column metadata. - */ - SourceType getDatabaseFieldType(final JsonNode field); - - /** - * This method will verify that filed could be used as cursor for incremental sync - * - * @param type - table field type that should be checked - * @return true is field type can be used as cursor field for incremental sync - */ - boolean isCursorType(final SourceType type); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SourceOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SourceOperations.java deleted file mode 100644 index de69ce653a757..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SourceOperations.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.JsonSchemaType; -import java.sql.SQLException; - -public interface SourceOperations { - - /** - * Converts a database row into it's JSON representation. - * - * @throws SQLException - */ - JsonNode rowToJson(QueryResult queryResult) throws SQLException; - - /** - * Converts a database source type into an Airbyte type, which is currently represented by a - * {@link JsonSchemaType} - */ - JsonSchemaType getAirbyteType(SourceType sourceType); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SqlDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SqlDatabase.java deleted file mode 100644 index 6cb912d325a3d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/SqlDatabase.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.stream.Stream; - -public abstract class SqlDatabase extends AbstractDatabase { - - public abstract void execute(String sql) throws Exception; - - public abstract Stream unsafeQuery(String sql, String... params) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/ConnectionFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/ConnectionFactory.java deleted file mode 100644 index ab2eb4d212b05..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/ConnectionFactory.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.Map; -import java.util.Properties; - -/** - * This class as been added in order to be able to save the connection in a test. It was found that - * the {@link javax.sql.DataSource} close method wasn't propagating the connection properly. It - * shouldn't be needed in our application code. - */ -public class ConnectionFactory { - - /** - * Construct a new {@link Connection} instance using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param connectionProperties The extra properties to add to the connection. - * @param jdbcConnectionString The JDBC connection string. - * @return The configured {@link Connection} - */ - public static Connection create(final String username, - final String password, - final Map connectionProperties, - final String jdbcConnectionString) { - try { - Properties properties = new Properties(); - properties.put("user", username); - properties.put("password", password); - connectionProperties.forEach((k, v) -> properties.put(k, v)); - - return DriverManager.getConnection(jdbcConnectionString, - properties); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DSLContextFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DSLContextFactory.java deleted file mode 100644 index b70888255e1cb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DSLContextFactory.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import java.time.Duration; -import java.util.Map; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.jooq.impl.DSL; - -/** - * Temporary factory class that provides convenience methods for creating a {@link DSLContext} - * instances. This class will be removed once the project has been converted to leverage an - * application framework to manage the creation and injection of {@link DSLContext} objects. - */ -public class DSLContextFactory { - - /** - * Constructs a configured {@link DSLContext} instance using the provided configuration. - * - * @param dataSource The {@link DataSource} used to connect to the database. - * @param dialect The SQL dialect to use with objects created from this context. - * @return The configured {@link DSLContext}. - */ - public static DSLContext create(final DataSource dataSource, final SQLDialect dialect) { - return DSL.using(dataSource, dialect); - } - - /** - * Constructs a configured {@link DSLContext} instance using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @param jdbcConnectionString The JDBC connection string. - * @param dialect The SQL dialect to use with objects created from this context. - * @return The configured {@link DSLContext}. - */ - public static DSLContext create(final String username, - final String password, - final String driverClassName, - final String jdbcConnectionString, - final SQLDialect dialect) { - return DSL.using(DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString), dialect); - } - - /** - * Constructs a configured {@link DSLContext} instance using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @param jdbcConnectionString The JDBC connection string. - * @param dialect The SQL dialect to use with objects created from this context. - * @param connectionProperties Additional configuration properties for the underlying driver. - * @return The configured {@link DSLContext}. - */ - public static DSLContext create(final String username, - final String password, - final String driverClassName, - final String jdbcConnectionString, - final SQLDialect dialect, - final Map connectionProperties, - final Duration connectionTimeout) { - return DSL.using(DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString, connectionProperties, - connectionTimeout), dialect); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DataSourceFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DataSourceFactory.java deleted file mode 100644 index a4324a30ebf71..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DataSourceFactory.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import com.google.common.base.Preconditions; -import com.zaxxer.hikari.HikariConfig; -import com.zaxxer.hikari.HikariDataSource; -import java.io.Closeable; -import java.time.Duration; -import java.util.Map; -import javax.sql.DataSource; - -/** - * Temporary factory class that provides convenience methods for creating a {@link DataSource} - * instance. This class will be removed once the project has been converted to leverage an - * application framework to manage the creation and injection of {@link DataSource} objects. - */ -public class DataSourceFactory { - - /** - * Constructs a new {@link DataSource} using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @param jdbcConnectionString The JDBC connection string. - * @return The configured {@link DataSource}. - */ - public static DataSource create(final String username, - final String password, - final String driverClassName, - final String jdbcConnectionString) { - return new DataSourceBuilder(username, password, driverClassName, jdbcConnectionString) - .build(); - } - - /** - * Constructs a new {@link DataSource} using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @param jdbcConnectionString The JDBC connection string. - * @param connectionProperties Additional configuration properties for the underlying driver. - * @return The configured {@link DataSource}. - */ - public static DataSource create(final String username, - final String password, - final String driverClassName, - final String jdbcConnectionString, - final Map connectionProperties, - final Duration connectionTimeout) { - return new DataSourceBuilder(username, password, driverClassName, jdbcConnectionString) - .withConnectionProperties(connectionProperties) - .withConnectionTimeout(connectionTimeout) - .build(); - } - - /** - * Constructs a new {@link DataSource} using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param host The host address of the database. - * @param port The port of the database. - * @param database The name of the database. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @return The configured {@link DataSource}. - */ - public static DataSource create(final String username, - final String password, - final String host, - final int port, - final String database, - final String driverClassName) { - return new DataSourceBuilder(username, password, driverClassName, host, port, database) - .build(); - } - - /** - * Constructs a new {@link DataSource} using the provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param host The host address of the database. - * @param port The port of the database. - * @param database The name of the database. - * @param driverClassName The fully qualified name of the JDBC driver class. - * @param connectionProperties Additional configuration properties for the underlying driver. - * @return The configured {@link DataSource}. - */ - public static DataSource create(final String username, - final String password, - final String host, - final int port, - final String database, - final String driverClassName, - final Map connectionProperties) { - return new DataSourceBuilder(username, password, driverClassName, host, port, database) - .withConnectionProperties(connectionProperties) - .build(); - } - - /** - * Convenience method that constructs a new {@link DataSource} for a PostgreSQL database using the - * provided configuration. - * - * @param username The username of the database user. - * @param password The password of the database user. - * @param host The host address of the database. - * @param port The port of the database. - * @param database The name of the database. - * @return The configured {@link DataSource}. - */ - public static DataSource createPostgres(final String username, - final String password, - final String host, - final int port, - final String database) { - return new DataSourceBuilder(username, password, "org.postgresql.Driver", host, port, database) - .build(); - } - - /** - * Utility method that attempts to close the provided {@link DataSource} if it implements - * {@link Closeable}. - * - * @param dataSource The {@link DataSource} to close. - * @throws Exception if unable to close the data source. - */ - public static void close(final DataSource dataSource) throws Exception { - if (dataSource != null) { - if (dataSource instanceof final AutoCloseable closeable) { - closeable.close(); - } - } - } - - /** - * Builder class used to configure and construct {@link DataSource} instances. - */ - public static class DataSourceBuilder { - - private Map connectionProperties = Map.of(); - private String database; - private String driverClassName; - private String host; - private String jdbcUrl; - private int maximumPoolSize = 10; - private int minimumPoolSize = 0; - private Duration connectionTimeout = Duration.ZERO; - private String password; - private int port = 5432; - private String username; - private String connectionInitSql; - - private DataSourceBuilder(final String username, - final String password, - final String driverClassName) { - this.username = username; - this.password = password; - this.driverClassName = driverClassName; - } - - public DataSourceBuilder(final String username, - final String password, - final String driverClassName, - final String jdbcUrl) { - this(username, password, driverClassName); - this.jdbcUrl = jdbcUrl; - } - - public DataSourceBuilder(final String username, - final String password, - final String driverClassName, - final String host, - final int port, - final String database) { - this(username, password, driverClassName); - this.host = host; - this.port = port; - this.database = database; - } - - public DataSourceBuilder withConnectionProperties(final Map connectionProperties) { - if (connectionProperties != null) { - this.connectionProperties = connectionProperties; - } - return this; - } - - public DataSourceBuilder withDatabase(final String database) { - this.database = database; - return this; - } - - public DataSourceBuilder withDriverClassName(final String driverClassName) { - this.driverClassName = driverClassName; - return this; - } - - public DataSourceBuilder withHost(final String host) { - this.host = host; - return this; - } - - public DataSourceBuilder withJdbcUrl(final String jdbcUrl) { - this.jdbcUrl = jdbcUrl; - return this; - } - - public DataSourceBuilder withMaximumPoolSize(final Integer maximumPoolSize) { - if (maximumPoolSize != null) { - this.maximumPoolSize = maximumPoolSize; - } - return this; - } - - public DataSourceBuilder withMinimumPoolSize(final Integer minimumPoolSize) { - if (minimumPoolSize != null) { - this.minimumPoolSize = minimumPoolSize; - } - return this; - } - - public DataSourceBuilder withConnectionTimeout(final Duration connectionTimeout) { - if (connectionTimeout != null) { - this.connectionTimeout = connectionTimeout; - } - return this; - } - - public DataSourceBuilder withPassword(final String password) { - this.password = password; - return this; - } - - public DataSourceBuilder withPort(final Integer port) { - if (port != null) { - this.port = port; - } - return this; - } - - public DataSourceBuilder withUsername(final String username) { - this.username = username; - return this; - } - - public DataSourceBuilder withConnectionInitSql(final String sql) { - this.connectionInitSql = sql; - return this; - } - - public DataSource build() { - final DatabaseDriver databaseDriver = DatabaseDriver.findByDriverClassName(driverClassName); - - Preconditions.checkNotNull(databaseDriver, "Unknown or blank driver class name: '" + driverClassName + "'."); - - final HikariConfig config = new HikariConfig(); - - config.setDriverClassName(databaseDriver.getDriverClassName()); - config.setJdbcUrl(jdbcUrl != null ? jdbcUrl : String.format(databaseDriver.getUrlFormatString(), host, port, database)); - config.setMaximumPoolSize(maximumPoolSize); - config.setMinimumIdle(minimumPoolSize); - // HikariCP uses milliseconds for all time values: - // https://github.com/brettwooldridge/HikariCP#gear-configuration-knobs-baby - config.setConnectionTimeout(connectionTimeout.toMillis()); - config.setPassword(password); - config.setUsername(username); - - /* - * Disable to prevent failing on startup. Applications may start prior to the database container - * being available. To avoid failing to create the connection pool, disable the fail check. This - * will preserve existing behavior that tests for the connection on first use, not on creation. - */ - config.setInitializationFailTimeout(Integer.MIN_VALUE); - - config.setConnectionInitSql(connectionInitSql); - - connectionProperties.forEach(config::addDataSourceProperty); - - return new HikariDataSource(config); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java deleted file mode 100644 index 39bdfdfc0aa72..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -/** - * Collection of JDBC driver class names and the associated JDBC URL format string. - */ -public enum DatabaseDriver { - - CLICKHOUSE("com.clickhouse.jdbc.ClickHouseDriver", "jdbc:clickhouse:%s://%s:%d/%s"), - DATABRICKS("com.databricks.client.jdbc.Driver", "jdbc:databricks://%s:%s;HttpPath=%s;SSL=1;UserAgentEntry=Airbyte"), - DB2("com.ibm.db2.jcc.DB2Driver", "jdbc:db2://%s:%d/%s"), - STARBURST("io.trino.jdbc.TrinoDriver", "jdbc:trino://%s:%s/%s?SSL=true&source=airbyte"), - MARIADB("org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"), - MSSQLSERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d;databaseName=%s"), - MYSQL("com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s"), - ORACLE("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d/%s"), - VERTICA("com.vertica.jdbc.Driver", "jdbc:vertica://%s:%d/%s"), - POSTGRESQL("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s"), - REDSHIFT("com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s"), - SNOWFLAKE("net.snowflake.client.jdbc.SnowflakeDriver", "jdbc:snowflake://%s/"), - YUGABYTEDB("com.yugabyte.Driver", "jdbc:yugabytedb://%s:%d/%s"), - EXASOL("com.exasol.jdbc.EXADriver", "jdbc:exa:%s:%d"), - TERADATA("com.teradata.jdbc.TeraDriver", "jdbc:teradata://%s/"); - - private final String driverClassName; - private final String urlFormatString; - - DatabaseDriver(final String driverClassName, final String urlFormatString) { - this.driverClassName = driverClassName; - this.urlFormatString = urlFormatString; - } - - public String getDriverClassName() { - return driverClassName; - } - - public String getUrlFormatString() { - return urlFormatString; - } - - /** - * Finds the {@link DatabaseDriver} enumerated value that matches the provided driver class name. - * - * @param driverClassName The driver class name. - * @return The matching {@link DatabaseDriver} enumerated value or {@code null} if no match is - * found. - */ - public static DatabaseDriver findByDriverClassName(final String driverClassName) { - DatabaseDriver selected = null; - - for (final DatabaseDriver candidate : values()) { - if (candidate.getDriverClassName().equalsIgnoreCase(driverClassName)) { - selected = candidate; - break; - } - } - - return selected; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.java deleted file mode 100644 index c87c8047501ca..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static io.airbyte.cdk.db.DataTypeUtils.TIMESTAMPTZ_FORMATTER; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.db.DataTypeUtils; -import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; -import io.airbyte.commons.json.Jsons; -import java.math.BigDecimal; -import java.sql.Date; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.text.ParseException; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetDateTime; -import java.time.OffsetTime; -import java.time.chrono.IsoEra; -import java.time.format.DateTimeParseException; -import java.util.Base64; -import java.util.Collections; - -/** - * Source operation skeleton for JDBC compatible databases. - */ -public abstract class AbstractJdbcCompatibleSourceOperations implements JdbcCompatibleSourceOperations { - - /** - * A Date representing the earliest date in CE. Any date before this is in BCE. - */ - private static final Date ONE_CE = Date.valueOf("0001-01-01"); - - @Override - public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { - // the first call communicates with the database. after that the result is cached. - final int columnCount = queryContext.getMetaData().getColumnCount(); - final ObjectNode jsonNode = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); - - for (int i = 1; i <= columnCount; i++) { - // attempt to access the column. this allows us to know if it is null before we do type-specific - // parsing. if it is null, we can move on. while awkward, this seems to be the agreed upon way of - // checking for null values with jdbc. - queryContext.getObject(i); - if (queryContext.wasNull()) { - continue; - } - - // convert to java types that will convert into reasonable json. - copyToJsonField(queryContext, i, jsonNode); - } - - return jsonNode; - } - - protected void putArray(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - final ArrayNode arrayNode = new ObjectMapper().createArrayNode(); - final ResultSet arrayResultSet = resultSet.getArray(index).getResultSet(); - while (arrayResultSet.next()) { - arrayNode.add(arrayResultSet.getString(2)); - } - node.set(columnName, arrayNode); - } - - protected void putBoolean(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, resultSet.getBoolean(index)); - } - - /** - * In some sources Short might have value larger than {@link Short#MAX_VALUE}. E.q. MySQL has - * unsigned smallint type, which can contain value 65535. If we fail to cast Short value, we will - * try to cast Integer. - */ - protected void putShortInt(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - try { - node.put(columnName, resultSet.getShort(index)); - } catch (final SQLException e) { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getInt(index))); - } - } - - /** - * In some sources Integer might have value larger than {@link Integer#MAX_VALUE}. E.q. MySQL has - * unsigned Integer type, which can contain value 3428724653. If we fail to cast Integer value, we - * will try to cast Long. - */ - protected void putInteger(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - try { - node.put(columnName, resultSet.getInt(index)); - } catch (final SQLException e) { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getLong(index))); - } - } - - protected void putBigInt(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getLong(index))); - } - - protected void putDouble(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getDouble(index), Double::isFinite)); - } - - protected void putFloat(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getFloat(index), Float::isFinite)); - } - - protected void putBigDecimal(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, DataTypeUtils.returnNullIfInvalid(() -> resultSet.getBigDecimal(index))); - } - - protected void putString(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, resultSet.getString(index)); - } - - protected void putDate(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, resultSet.getString(index)); - } - - protected void putTime(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, DateTimeConverter.convertToTime(getObject(resultSet, index, LocalTime.class))); - } - - protected void putTimestamp(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - try { - node.put(columnName, DateTimeConverter.convertToTimestamp(getObject(resultSet, index, LocalDateTime.class))); - } catch (Exception e) { - // for backward compatibility - final Instant instant = resultSet.getTimestamp(index).toInstant(); - node.put(columnName, DataTypeUtils.toISO8601StringWithMicroseconds(instant)); - } - } - - protected void putBinary(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, resultSet.getBytes(index)); - } - - protected void putDefault(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - node.put(columnName, resultSet.getString(index)); - } - - protected void setTime(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - try { - preparedStatement.setObject(parameterIndex, LocalTime.parse(value)); - } catch (final DateTimeParseException e) { - setTimestamp(preparedStatement, parameterIndex, value); - } - } - - protected void setTimestamp(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - try { - preparedStatement.setObject(parameterIndex, LocalDateTime.parse(value)); - } catch (final DateTimeParseException e) { - preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)); - } - } - - protected void setDate(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - try { - preparedStatement.setObject(parameterIndex, LocalDate.parse(value)); - } catch (final DateTimeParseException e) { - setDateAsTimestamp(preparedStatement, parameterIndex, value); - } - } - - private void setDateAsTimestamp(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - try { - final Timestamp from = Timestamp.from(DataTypeUtils.getDateFormat().parse(value).toInstant()); - preparedStatement.setDate(parameterIndex, new Date(from.getTime())); - } catch (final ParseException e) { - throw new RuntimeException(e); - } - } - - protected void setBit(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - // todo (cgardens) - currently we do not support bit because it requires special handling in the - // prepared statement. - // see - // https://www.postgresql-archive.org/Problems-with-BIT-datatype-and-preparedStatment-td5733533.html. - throw new RuntimeException("BIT value is not supported as incremental parameter!"); - } - - protected void setBoolean(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setBoolean(parameterIndex, Boolean.parseBoolean(value)); - } - - protected void setShortInt(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setShort(parameterIndex, Short.parseShort(value)); - } - - protected void setInteger(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setInt(parameterIndex, Integer.parseInt(value)); - } - - protected void setBigInteger(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setLong(parameterIndex, new BigDecimal(value).toBigInteger().longValue()); - } - - protected void setDouble(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setDouble(parameterIndex, Double.parseDouble(value)); - } - - protected void setReal(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setFloat(parameterIndex, Float.parseFloat(value)); - } - - protected void setDecimal(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setBigDecimal(parameterIndex, new BigDecimal(value)); - } - - protected void setString(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setString(parameterIndex, value); - } - - protected void setBinary(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - preparedStatement.setBytes(parameterIndex, Base64.getDecoder().decode(value)); - } - - protected ObjectType getObject(final ResultSet resultSet, final int index, final Class clazz) throws SQLException { - return resultSet.getObject(index, clazz); - } - - protected void putTimeWithTimezone(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - final OffsetTime timetz = getObject(resultSet, index, OffsetTime.class); - node.put(columnName, DateTimeConverter.convertToTimeWithTimezone(timetz)); - } - - protected void putTimestampWithTimezone(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) - throws SQLException { - final OffsetDateTime timestamptz = getObject(resultSet, index, OffsetDateTime.class); - final LocalDate localDate = timestamptz.toLocalDate(); - node.put(columnName, resolveEra(localDate, timestamptz.format(TIMESTAMPTZ_FORMATTER))); - } - - /** - * Modifies a string representation of a date/timestamp and normalizes its era indicator. - * Specifically, if this is a BCE value: - *

    - *
  • The leading negative sign will be removed if present
  • - *
  • The "BC" suffix will be appended, if not already present
  • - *
- * - * You most likely would prefer to call one of the overloaded methods, which accept temporal types. - */ - public static String resolveEra(final boolean isBce, final String value) { - String mangledValue = value; - if (isBce) { - if (mangledValue.startsWith("-")) { - mangledValue = mangledValue.substring(1); - } - if (!mangledValue.endsWith(" BC")) { - mangledValue += " BC"; - } - } - return mangledValue; - } - - public static boolean isBce(final LocalDate date) { - return date.getEra().equals(IsoEra.BCE); - } - - public static String resolveEra(final LocalDate date, final String value) { - return resolveEra(isBce(date), value); - } - - /** - * java.sql.Date objects don't properly represent their era (for example, using toLocalDate() always - * returns an object in CE). So to determine the era, we just check whether the date is before 1 AD. - * - * This is technically kind of sketchy due to ancient timestamps being weird (leap years, etc.), but - * my understanding is that {@link #ONE_CE} has the same weirdness, so it cancels out. - */ - public static String resolveEra(final Date date, final String value) { - return resolveEra(date.before(ONE_CE), value); - } - - /** - * See {@link #resolveEra(Date, String)} for explanation. - */ - public static String resolveEra(final Timestamp timestamp, final String value) { - return resolveEra(timestamp.before(ONE_CE), value); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DateTimeConverter.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DateTimeConverter.java deleted file mode 100644 index 215b679fe697d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DateTimeConverter.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static io.airbyte.cdk.db.DataTypeUtils.DATE_FORMATTER; -import static io.airbyte.cdk.db.DataTypeUtils.TIMESTAMPTZ_FORMATTER; -import static io.airbyte.cdk.db.DataTypeUtils.TIMESTAMP_FORMATTER; -import static io.airbyte.cdk.db.DataTypeUtils.TIMETZ_FORMATTER; -import static io.airbyte.cdk.db.DataTypeUtils.TIME_FORMATTER; -import static io.airbyte.cdk.db.jdbc.AbstractJdbcCompatibleSourceOperations.resolveEra; -import static java.time.ZoneOffset.UTC; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import java.sql.Date; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.Duration; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetDateTime; -import java.time.OffsetTime; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DateTimeConverter { - - private static final Logger LOGGER = LoggerFactory.getLogger(DateTimeConverter.class); - public static final DateTimeFormatter TIME_WITH_TIMEZONE_FORMATTER = DateTimeFormatter.ofPattern( - "HH:mm:ss[.][SSSSSSSSS][SSSSSSS][SSSSSS][SSSSS][SSSS][SSS][SS][S][''][XXX][XX][X]"); - private static boolean loggedUnknownTimeWithTimeZoneClass = false; - private static boolean loggedUnknownTimeClass = false; - private static boolean loggedUnknownTimestampWithTimeZoneClass = false; - private static boolean loggedUnknownTimestampClass = false; - private static boolean loggedUnknownDateClass = false; - - public static String convertToTimeWithTimezone(final Object time) { - if (time instanceof final java.time.OffsetTime timetz) { - return hasZeroSecondsAndNanos(timetz.toLocalTime()) ? timetz.format(TIMETZ_FORMATTER) : timetz.toString(); - } else { - if (!loggedUnknownTimeWithTimeZoneClass) { - LOGGER.info("Unknown class for Time with timezone data type" + time.getClass()); - loggedUnknownTimeWithTimeZoneClass = true; - } - final OffsetTime timetz = OffsetTime.parse(time.toString(), TIME_WITH_TIMEZONE_FORMATTER); - return hasZeroSecondsAndNanos(timetz.toLocalTime()) ? timetz.format(TIMETZ_FORMATTER) : timetz.toString(); - } - } - - public static String convertToTimestampWithTimezone(final Object timestamp) { - if (timestamp instanceof final Timestamp t) { - // In snapshot mode, debezium produces a java.sql.Timestamp object for the TIMESTAMPTZ type. - // Conceptually, a timestamp with timezone is an Instant. But t.toInstant() actually mangles the - // value for ancient dates, because leap years weren't applied consistently in ye olden days. - // Additionally, toInstant() (and toLocalDateTime()) actually lose the era indicator, so we can't - // rely on their getEra() methods. - // So we have special handling for this case, which sidesteps the toInstant conversion. - final ZonedDateTime timestamptz = t.toLocalDateTime().atZone(UTC); - final String value = timestamptz.format(TIMESTAMPTZ_FORMATTER); - return resolveEra(t, value); - } else if (timestamp instanceof final OffsetDateTime t) { - return resolveEra(t.toLocalDate(), t.format(TIMESTAMPTZ_FORMATTER)); - } else if (timestamp instanceof final ZonedDateTime timestamptz) { - return resolveEra(timestamptz.toLocalDate(), timestamptz.format(TIMESTAMPTZ_FORMATTER)); - } else if (timestamp instanceof final Instant instant) { - final OffsetDateTime offsetDateTime = OffsetDateTime.ofInstant(instant, UTC); - final ZonedDateTime timestamptz = ZonedDateTime.from(offsetDateTime); - final LocalDate localDate = timestamptz.toLocalDate(); - final String value = timestamptz.format(TIMESTAMPTZ_FORMATTER); - return resolveEra(localDate, value); - } else { - if (!loggedUnknownTimestampWithTimeZoneClass) { - LOGGER.info("Unknown class for Timestamp with time zone data type" + timestamp.getClass()); - loggedUnknownTimestampWithTimeZoneClass = true; - } - final Instant instant = Instant.parse(timestamp.toString()); - final OffsetDateTime offsetDateTime = OffsetDateTime.ofInstant(instant, UTC); - final ZonedDateTime timestamptz = ZonedDateTime.from(offsetDateTime); - final LocalDate localDate = timestamptz.toLocalDate(); - final String value = timestamptz.format(TIMESTAMPTZ_FORMATTER); - return resolveEra(localDate, value); - } - } - - /** - * See {@link #convertToTimestampWithTimezone(Object)} for explanation of the weird things happening - * here. - */ - public static String convertToTimestamp(final Object timestamp) { - if (timestamp instanceof final Timestamp t) { - // Snapshot mode - final LocalDateTime localDateTime = t.toLocalDateTime(); - return resolveEra(t, - hasZeroSecondsAndNanos(localDateTime.toLocalTime()) ? localDateTime.format(TIMESTAMP_FORMATTER) : localDateTime.toString()); - } else if (timestamp instanceof final Instant i) { - // Incremental mode - return resolveEra(i.atZone(UTC).toLocalDate(), i.atOffset(UTC).toLocalDateTime().format(TIMESTAMP_FORMATTER)); - } else if (timestamp instanceof final LocalDateTime localDateTime) { - final LocalDate date = localDateTime.toLocalDate(); - return resolveEra(date, - hasZeroSecondsAndNanos(localDateTime.toLocalTime()) ? localDateTime.format(TIMESTAMP_FORMATTER) : localDateTime.toString()); - } else { - if (!loggedUnknownTimestampClass) { - LOGGER.info("Unknown class for Timestamp data type" + timestamp.getClass()); - loggedUnknownTimestampClass = true; - } - final LocalDateTime localDateTime = LocalDateTime.parse(timestamp.toString()); - final LocalDate date = localDateTime.toLocalDate(); - return resolveEra(date, - hasZeroSecondsAndNanos(localDateTime.toLocalTime()) ? localDateTime.format(TIMESTAMP_FORMATTER) : localDateTime.toString()); - } - } - - /** - * See {@link #convertToTimestampWithTimezone(Object)} for explanation of the weird things happening - * here. - */ - public static String convertToDate(final Object date) { - if (date instanceof final Date d) { - // Snapshot mode - final LocalDate localDate = ((Date) date).toLocalDate(); - return resolveEra(d, localDate.format(DATE_FORMATTER)); - } else if (date instanceof LocalDate d) { - // Incremental mode - return resolveEra(d, d.format(DATE_FORMATTER)); - } else { - if (!loggedUnknownDateClass) { - LOGGER.info("Unknown class for Date data type" + date.getClass()); - loggedUnknownDateClass = true; - } - final LocalDate localDate = LocalDate.parse(date.toString()); - return resolveEra(localDate, localDate.format(DATE_FORMATTER)); - } - } - - public static String convertToTime(final Object time) { - if (time instanceof final Time sqlTime) { - return formatTime(sqlTime.toLocalTime()); - } else if (time instanceof final LocalTime localTime) { - return formatTime(localTime); - } else if (time instanceof java.time.Duration) { - long value = ((Duration) time).toNanos(); - if (value >= 0 && value < TimeUnit.DAYS.toNanos(1)) { - return formatTime(LocalTime.ofNanoOfDay(value)); - } else { - final long updatedValue = Math.min(Math.abs(value), LocalTime.MAX.toNanoOfDay()); - LOGGER.debug("Time values must use number of nanoseconds greater than 0 and less than 86400000000000 but its {}, converting to {} ", value, - updatedValue); - return formatTime(LocalTime.ofNanoOfDay(updatedValue)); - } - } else { - if (!loggedUnknownTimeClass) { - LOGGER.info("Unknown class for Time data type" + time.getClass()); - loggedUnknownTimeClass = true; - } - - final String valueAsString = time.toString(); - if (valueAsString.startsWith("24")) { - LOGGER.debug("Time value {} is above range, converting to 23:59:59", valueAsString); - return LocalTime.MAX.toString(); - } - return formatTime(LocalTime.parse(valueAsString)); - } - } - - private static String formatTime(LocalTime localTime) { - return hasZeroSecondsAndNanos(localTime) ? localTime.format(TIME_FORMATTER) : localTime.toString(); - } - - public static boolean hasZeroSecondsAndNanos(LocalTime localTime) { - return (localTime.getSecond() == 0 && localTime.getNano() == 0); - } - - public static void putJavaSQLDate(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { - final Date date = resultSet.getDate(index); - node.put(columnName, convertToDate(date)); - } - - public static void putJavaSQLTime(ObjectNode node, String columnName, ResultSet resultSet, int index) throws SQLException { - // resultSet.getTime() will lose nanoseconds precision - final LocalTime localTime = resultSet.getTimestamp(index).toLocalDateTime().toLocalTime(); - node.put(columnName, convertToTime(localTime)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.java deleted file mode 100644 index 183073715bc7a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import com.google.errorprone.annotations.MustBeClosed; -import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; -import io.airbyte.commons.exceptions.ConnectionErrorException; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.commons.functional.CheckedFunction; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.List; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Database object for interacting with a JDBC connection. Can be used for any JDBC compliant db. - */ -public class DefaultJdbcDatabase extends JdbcDatabase { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJdbcDatabase.class); - - protected final DataSource dataSource; - - public DefaultJdbcDatabase(final DataSource dataSource) { - this(dataSource, JdbcUtils.getDefaultSourceOperations()); - } - - public DefaultJdbcDatabase(final DataSource dataSource, final JdbcCompatibleSourceOperations sourceOperations) { - super(sourceOperations); - this.dataSource = dataSource; - } - - @Override - public void execute(final CheckedConsumer query) throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - query.accept(connection); - } - } - - @Override - public List bufferedResultSetQuery(final CheckedFunction query, - final CheckedFunction recordTransform) - throws SQLException { - try (final Connection connection = dataSource.getConnection(); - final Stream results = toUnsafeStream(query.apply(connection), recordTransform)) { - return results.collect(Collectors.toList()); - } - } - - @Override - @MustBeClosed - public Stream unsafeResultSetQuery(final CheckedFunction query, - final CheckedFunction recordTransform) - throws SQLException { - final Connection connection = dataSource.getConnection(); - return toUnsafeStream(query.apply(connection), recordTransform) - .onClose(() -> { - try { - connection.close(); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - }); - } - - @Override - public DatabaseMetaData getMetaData() throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - final DatabaseMetaData metaData = connection.getMetaData(); - return metaData; - } catch (final SQLException e) { - // Some databases like Redshift will have null cause - if (Objects.isNull(e.getCause()) || !(e.getCause() instanceof SQLException)) { - throw new ConnectionErrorException(e.getSQLState(), e.getErrorCode(), e.getMessage(), e); - } else { - final SQLException cause = (SQLException) e.getCause(); - throw new ConnectionErrorException(e.getSQLState(), cause.getErrorCode(), cause.getMessage(), e); - } - } - } - - @Override - public T executeMetadataQuery(Function query) { - try (final Connection connection = dataSource.getConnection()) { - final DatabaseMetaData metaData = connection.getMetaData(); - return query.apply(metaData); - } catch (final SQLException e) { - // Some databases like Redshift will have null cause - if (Objects.isNull(e.getCause()) || !(e.getCause() instanceof SQLException)) { - throw new ConnectionErrorException(e.getSQLState(), e.getErrorCode(), e.getMessage(), e); - } else { - final SQLException cause = (SQLException) e.getCause(); - throw new ConnectionErrorException(e.getSQLState(), cause.getErrorCode(), cause.getMessage(), e); - } - } - } - - /** - * You CANNOT assume that data will be returned from this method before the entire {@link ResultSet} - * is buffered in memory. Review the implementation of the database's JDBC driver or use the - * StreamingJdbcDriver if you need this guarantee. The caller should close the returned stream to - * release the database connection. - * - * @param statementCreator create a {@link PreparedStatement} from a {@link Connection}. - * @param recordTransform transform each record of that result set into the desired type. do NOT - * just pass the {@link ResultSet} through. it is a stateful object will not be accessible if - * returned from recordTransform. - * @param type that each record will be mapped to. - * @return Result of the query mapped to a stream. - * @throws SQLException SQL related exceptions. - */ - @Override - @MustBeClosed - public Stream unsafeQuery(final CheckedFunction statementCreator, - final CheckedFunction recordTransform) - throws SQLException { - final Connection connection = dataSource.getConnection(); - return toUnsafeStream(statementCreator.apply(connection).executeQuery(), recordTransform) - .onClose(() -> { - try { - LOGGER.info("closing connection"); - connection.close(); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcConstants.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcConstants.java deleted file mode 100644 index 21777a0e3a70d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcConstants.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -public final class JdbcConstants { - - // constants defined in the DatabaseMetaData#getColumns method - // reference: https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html - public static final String JDBC_COLUMN_DATABASE_NAME = "TABLE_CAT"; - public static final String JDBC_COLUMN_SCHEMA_NAME = "TABLE_SCHEM"; - public static final String JDBC_COLUMN_TABLE_NAME = "TABLE_NAME"; - public static final String JDBC_COLUMN_COLUMN_NAME = "COLUMN_NAME"; - public static final String JDBC_COLUMN_DATA_TYPE = "DATA_TYPE"; - public static final String JDBC_COLUMN_TYPE = "TYPE"; - - public static final String JDBC_COLUMN_TYPE_NAME = "TYPE_NAME"; - public static final String JDBC_COLUMN_SIZE = "COLUMN_SIZE"; - public static final String JDBC_INDEX_NAME = "INDEX_NAME"; - public static final String JDBC_IS_NULLABLE = "IS_NULLABLE"; - public static final String JDBC_DECIMAL_DIGITS = "DECIMAL_DIGITS"; - public static final String JDBC_INDEX_NON_UNIQUE = "NON_UNIQUE"; - public static final String INTERNAL_SCHEMA_NAME = "schemaName"; - public static final String INTERNAL_TABLE_NAME = "tableName"; - public static final String INTERNAL_COLUMN_NAME = "columnName"; - public static final String INTERNAL_COLUMN_TYPE = "columnType"; - public static final String INTERNAL_COLUMN_TYPE_NAME = "columnTypeName"; - public static final String INTERNAL_COLUMN_SIZE = "columnSize"; - public static final String INTERNAL_IS_NULLABLE = "isNullable"; - public static final String INTERNAL_DECIMAL_DIGITS = "decimalDigits"; - public static final String KEY_SEQ = "KEY_SEQ"; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcDatabase.java deleted file mode 100644 index 4fb40b74e96f9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcDatabase.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.errorprone.annotations.MustBeClosed; -import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; -import io.airbyte.cdk.db.SqlDatabase; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.commons.functional.CheckedFunction; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.util.List; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; - -/** - * Database object for interacting with a JDBC connection. - */ -public abstract class JdbcDatabase extends SqlDatabase { - - protected final JdbcCompatibleSourceOperations sourceOperations; - protected Exception streamException; - protected boolean isStreamFailed; - - public JdbcDatabase(final JdbcCompatibleSourceOperations sourceOperations) { - this.sourceOperations = sourceOperations; - } - - /** - * Execute a database query. - * - * @param query the query to execute against the database. - * @throws SQLException SQL related exceptions. - */ - public abstract void execute(CheckedConsumer query) throws SQLException; - - @Override - public void execute(final String sql) throws SQLException { - execute(connection -> connection.createStatement().execute(sql)); - } - - public void executeWithinTransaction(final List queries) throws SQLException { - execute(connection -> { - connection.setAutoCommit(false); - for (final String s : queries) { - connection.createStatement().execute(s); - } - connection.commit(); - connection.setAutoCommit(true); - }); - } - - /** - * Map records returned in a result set. It is an "unsafe" stream because the stream must be - * manually closed. Otherwise, there will be a database connection leak. - * - * @param resultSet the result set - * @param mapper function to make each record of the result set - * @param type that each record will be mapped to - * @return stream of records that the result set is mapped to. - */ - @MustBeClosed - protected static Stream toUnsafeStream(final ResultSet resultSet, final CheckedFunction mapper) { - return StreamSupport.stream(new Spliterators.AbstractSpliterator<>(Long.MAX_VALUE, Spliterator.ORDERED) { - - @Override - public boolean tryAdvance(final Consumer action) { - try { - if (!resultSet.next()) { - resultSet.close(); - return false; - } - action.accept(mapper.apply(resultSet)); - return true; - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - - }, false); - } - - /** - * Use a connection to create a {@link ResultSet} and map it into a list. The entire - * {@link ResultSet} will be buffered in memory before the list is returned. The caller does not - * need to worry about closing any database resources. - * - * @param query execute a query using a {@link Connection} to get a {@link ResultSet}. - * @param recordTransform transform each record of that result set into the desired type. do NOT - * just pass the {@link ResultSet} through. it is a stateful object will not be accessible if - * returned from recordTransform. - * @param type that each record will be mapped to. - * @return Result of the query mapped to a list. - * @throws SQLException SQL related exceptions. - */ - public abstract List bufferedResultSetQuery(CheckedFunction query, - CheckedFunction recordTransform) - throws SQLException; - - /** - * Use a connection to create a {@link ResultSet} and map it into a stream. You CANNOT assume that - * data will be returned from this method before the entire {@link ResultSet} is buffered in memory. - * Review the implementation of the database's JDBC driver or use the StreamingJdbcDriver if you - * need this guarantee. It is "unsafe" because the caller should close the returned stream to - * release the database connection. Otherwise, there will be a connection leak. - * - * @param query execute a query using a {@link Connection} to get a {@link ResultSet}. - * @param recordTransform transform each record of that result set into the desired type. do NOT - * just pass the {@link ResultSet} through. it is a stateful object will not be accessible if - * returned from recordTransform. - * @param type that each record will be mapped to. - * @return Result of the query mapped to a stream. - * @throws SQLException SQL related exceptions. - */ - @MustBeClosed - public abstract Stream unsafeResultSetQuery(CheckedFunction query, - CheckedFunction recordTransform) - throws SQLException; - - /** - * String query is a common use case for {@link JdbcDatabase#unsafeResultSetQuery}. So this method - * is created as syntactic sugar. - */ - public List queryStrings(final CheckedFunction query, - final CheckedFunction recordTransform) - throws SQLException { - try (final Stream stream = unsafeResultSetQuery(query, recordTransform)) { - return stream.toList(); - } - } - - /** - * Use a connection to create a {@link PreparedStatement} and map it into a stream. You CANNOT - * assume that data will be returned from this method before the entire {@link ResultSet} is - * buffered in memory. Review the implementation of the database's JDBC driver or use the - * StreamingJdbcDriver if you need this guarantee. It is "unsafe" because the caller should close - * the returned stream to release the database connection. Otherwise, there will be a connection - * leak. - * - * @param statementCreator create a {@link PreparedStatement} from a {@link Connection}. - * @param recordTransform transform each record of that result set into the desired type. do NOT - * just pass the {@link ResultSet} through. it is a stateful object will not be accessible if - * returned from recordTransform. - * @param type that each record will be mapped to. - * @return Result of the query mapped to a stream.void execute(String sql) - * @throws SQLException SQL related exceptions. - */ - @MustBeClosed - public abstract Stream unsafeQuery(CheckedFunction statementCreator, - CheckedFunction recordTransform) - throws SQLException; - - /** - * Json query is a common use case for - * {@link JdbcDatabase#unsafeQuery(CheckedFunction, CheckedFunction)}. So this method is created as - * syntactic sugar. - */ - public List queryJsons(final CheckedFunction statementCreator, - final CheckedFunction recordTransform) - throws SQLException { - try (final Stream stream = unsafeQuery(statementCreator, recordTransform)) { - return stream.toList(); - } - } - - public int queryInt(final String sql, final String... params) throws SQLException { - try (final Stream stream = unsafeQuery( - c -> getPreparedStatement(sql, params, c), - rs -> rs.getInt(1))) { - return stream.findFirst().get(); - } - } - - public boolean queryBoolean(final String sql, final String... params) throws SQLException { - try (final Stream stream = unsafeQuery( - c -> getPreparedStatement(sql, params, c), - rs -> rs.getBoolean(1))) { - return stream.findFirst().get(); - } - } - - /** - * It is "unsafe" because the caller must manually close the returned stream. Otherwise, there will - * be a database connection leak. - */ - @MustBeClosed - @Override - public Stream unsafeQuery(final String sql, final String... params) throws SQLException { - return unsafeQuery(connection -> { - final PreparedStatement statement = connection.prepareStatement(sql); - int i = 1; - for (final String param : params) { - statement.setString(i, param); - ++i; - } - return statement; - }, sourceOperations::rowToJson); - } - - /** - * Json query is a common use case for {@link JdbcDatabase#unsafeQuery(String, String...)}. So this - * method is created as syntactic sugar. - */ - public List queryJsons(final String sql, final String... params) throws SQLException { - try (final Stream stream = unsafeQuery(sql, params)) { - return stream.toList(); - } - } - - public ResultSetMetaData queryMetadata(final String sql, final String... params) throws SQLException { - try (final Stream q = unsafeQuery( - c -> getPreparedStatement(sql, params, c), - ResultSet::getMetaData)) { - return q.findFirst().orElse(null); - } - } - - /** - * Implementations of DatabaseMetadata hold a reference of the Connection object. It is safe to use - * this to retrieve static information like getIndentifierQuoteString() etc but calling methods - * which return a ResultSet needs the connection to be still open. This may or may not work - * depending on how the underlying Connection object is handled eg. Hikari's ProxyConnection is not - * actually closed, rather recycled into Pool. See {@link #executeMetadataQuery(Function)} which - * gives the caller a safe alternative to access ResultSet methods of DatabaseMetadata in the - * consumer before closing connection. - * - * @return - * @throws SQLException - */ - - public abstract DatabaseMetaData getMetaData() throws SQLException; - - public abstract T executeMetadataQuery(Function query) throws SQLException; - - private static PreparedStatement getPreparedStatement(String sql, String[] params, Connection c) throws SQLException { - PreparedStatement statement = c.prepareStatement(sql); - int i = 1; - for (String param : params) { - statement.setString(i, param); - i++; - } - return statement; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.java deleted file mode 100644 index eb7dbafdcab33..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_SCHEMA_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_TABLE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcUtils.ALLOWED_CURSOR_TYPES; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.db.SourceOperations; -import io.airbyte.protocol.models.JsonSchemaType; -import java.sql.JDBCType; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.time.OffsetDateTime; -import java.time.OffsetTime; -import java.time.format.DateTimeParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Implementation of source operations with standard JDBC types. - */ -public class JdbcSourceOperations extends AbstractJdbcCompatibleSourceOperations implements SourceOperations { - - private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSourceOperations.class); - - protected JDBCType safeGetJdbcType(final int columnTypeInt) { - try { - return JDBCType.valueOf(columnTypeInt); - } catch (final Exception e) { - return JDBCType.VARCHAR; - } - } - - @Override - public void copyToJsonField(final ResultSet resultSet, final int colIndex, final ObjectNode json) throws SQLException { - final int columnTypeInt = resultSet.getMetaData().getColumnType(colIndex); - final String columnName = resultSet.getMetaData().getColumnName(colIndex); - final JDBCType columnType = safeGetJdbcType(columnTypeInt); - - // https://www.cis.upenn.edu/~bcpierce/courses/629/jdkdocs/guide/jdbc/getstart/mapping.doc.html - switch (columnType) { - case BIT, BOOLEAN -> putBoolean(json, columnName, resultSet, colIndex); - case TINYINT, SMALLINT -> putShortInt(json, columnName, resultSet, colIndex); - case INTEGER -> putInteger(json, columnName, resultSet, colIndex); - case BIGINT -> putBigInt(json, columnName, resultSet, colIndex); - case FLOAT, DOUBLE -> putDouble(json, columnName, resultSet, colIndex); - case REAL -> putFloat(json, columnName, resultSet, colIndex); - case NUMERIC, DECIMAL -> putBigDecimal(json, columnName, resultSet, colIndex); - case CHAR, VARCHAR, LONGVARCHAR -> putString(json, columnName, resultSet, colIndex); - case DATE -> putDate(json, columnName, resultSet, colIndex); - case TIME -> putTime(json, columnName, resultSet, colIndex); - case TIMESTAMP -> putTimestamp(json, columnName, resultSet, colIndex); - case TIMESTAMP_WITH_TIMEZONE -> putTimestampWithTimezone(json, columnName, resultSet, colIndex); - case BLOB, BINARY, VARBINARY, LONGVARBINARY -> putBinary(json, columnName, resultSet, colIndex); - case ARRAY -> putArray(json, columnName, resultSet, colIndex); - default -> putDefault(json, columnName, resultSet, colIndex); - } - } - - @Override - public void setCursorField(final PreparedStatement preparedStatement, - final int parameterIndex, - final JDBCType cursorFieldType, - final String value) - throws SQLException { - switch (cursorFieldType) { - - case TIMESTAMP -> setTimestamp(preparedStatement, parameterIndex, value); - case TIMESTAMP_WITH_TIMEZONE -> setTimestampWithTimezone(preparedStatement, parameterIndex, value); - case TIME -> setTime(preparedStatement, parameterIndex, value); - case TIME_WITH_TIMEZONE -> setTimeWithTimezone(preparedStatement, parameterIndex, value); - case DATE -> setDate(preparedStatement, parameterIndex, value); - case BIT -> setBit(preparedStatement, parameterIndex, value); - case BOOLEAN -> setBoolean(preparedStatement, parameterIndex, value); - case TINYINT, SMALLINT -> setShortInt(preparedStatement, parameterIndex, value); - case INTEGER -> setInteger(preparedStatement, parameterIndex, value); - case BIGINT -> setBigInteger(preparedStatement, parameterIndex, value); - case FLOAT, DOUBLE -> setDouble(preparedStatement, parameterIndex, value); - case REAL -> setReal(preparedStatement, parameterIndex, value); - case NUMERIC, DECIMAL -> setDecimal(preparedStatement, parameterIndex, value); - case CHAR, NCHAR, NVARCHAR, VARCHAR, LONGVARCHAR -> setString(preparedStatement, parameterIndex, value); - case BINARY, BLOB -> setBinary(preparedStatement, parameterIndex, value); - // since cursor are expected to be comparable, handle cursor typing strictly and error on - // unrecognized types - default -> throw new IllegalArgumentException(String.format("%s cannot be used as a cursor.", cursorFieldType)); - } - } - - protected void setTimestampWithTimezone(final PreparedStatement preparedStatement, final int parameterIndex, final String value) - throws SQLException { - try { - preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)); - } catch (final DateTimeParseException e) { - throw new RuntimeException(e); - } - } - - protected void setTimeWithTimezone(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { - try { - preparedStatement.setObject(parameterIndex, OffsetTime.parse(value)); - } catch (final DateTimeParseException e) { - throw new RuntimeException(e); - } - } - - @Override - public JDBCType getDatabaseFieldType(final JsonNode field) { - try { - return JDBCType.valueOf(field.get(INTERNAL_COLUMN_TYPE).asInt()); - } catch (final IllegalArgumentException ex) { - LOGGER.warn(String.format("Could not convert column: %s from table: %s.%s with type: %s. Casting to VARCHAR.", - field.get(INTERNAL_COLUMN_NAME), - field.get(INTERNAL_SCHEMA_NAME), - field.get(INTERNAL_TABLE_NAME), - field.get(INTERNAL_COLUMN_TYPE))); - return JDBCType.VARCHAR; - } - } - - @Override - public boolean isCursorType(final JDBCType type) { - return ALLOWED_CURSOR_TYPES.contains(type); - } - - @Override - public JsonSchemaType getAirbyteType(final JDBCType jdbcType) { - return switch (jdbcType) { - case BIT, BOOLEAN -> JsonSchemaType.BOOLEAN; - case TINYINT, SMALLINT -> JsonSchemaType.INTEGER; - case INTEGER -> JsonSchemaType.INTEGER; - case BIGINT -> JsonSchemaType.INTEGER; - case FLOAT, DOUBLE -> JsonSchemaType.NUMBER; - case REAL -> JsonSchemaType.NUMBER; - case NUMERIC, DECIMAL -> JsonSchemaType.NUMBER; - case CHAR, NCHAR, NVARCHAR, VARCHAR, LONGVARCHAR -> JsonSchemaType.STRING; - case DATE -> JsonSchemaType.STRING; - case TIME -> JsonSchemaType.STRING; - case TIMESTAMP -> JsonSchemaType.STRING; - case BLOB, BINARY, VARBINARY, LONGVARBINARY -> JsonSchemaType.STRING_BASE_64; - case ARRAY -> JsonSchemaType.ARRAY; - // since column types aren't necessarily meaningful to Airbyte, liberally convert all unrecgonised - // types to String - default -> JsonSchemaType.STRING; - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcUtils.java deleted file mode 100644 index a247359a70925..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/JdbcUtils.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static java.sql.JDBCType.BIGINT; -import static java.sql.JDBCType.DATE; -import static java.sql.JDBCType.DECIMAL; -import static java.sql.JDBCType.DOUBLE; -import static java.sql.JDBCType.FLOAT; -import static java.sql.JDBCType.INTEGER; -import static java.sql.JDBCType.LONGVARCHAR; -import static java.sql.JDBCType.NUMERIC; -import static java.sql.JDBCType.NVARCHAR; -import static java.sql.JDBCType.REAL; -import static java.sql.JDBCType.SMALLINT; -import static java.sql.JDBCType.TIME; -import static java.sql.JDBCType.TIMESTAMP; -import static java.sql.JDBCType.TIMESTAMP_WITH_TIMEZONE; -import static java.sql.JDBCType.TIME_WITH_TIMEZONE; -import static java.sql.JDBCType.TINYINT; -import static java.sql.JDBCType.VARCHAR; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Maps; -import io.airbyte.commons.exceptions.ConfigErrorException; -import java.sql.JDBCType; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.jooq.JSONFormat; - -public class JdbcUtils { - - // config parameters in alphabetical order - public static final String CONNECTION_PROPERTIES_KEY = "connection_properties"; - public static final String DATABASE_KEY = "database"; - public static final String ENCRYPTION_KEY = "encryption"; - public static final String HOST_KEY = "host"; - public static final List HOST_LIST_KEY = List.of("host"); - public static final String JDBC_URL_KEY = "jdbc_url"; - public static final String JDBC_URL_PARAMS_KEY = "jdbc_url_params"; - public static final String PASSWORD_KEY = "password"; - public static final String PORT_KEY = "port"; - - public static final List PORT_LIST_KEY = List.of("port"); - public static final String SCHEMA_KEY = "schema"; - // NOTE: this is the plural version of SCHEMA_KEY - public static final String SCHEMAS_KEY = "schemas"; - public static final String SSL_KEY = "ssl"; - public static final List SSL_MODE_DISABLE = List.of("disable", "disabled"); - public static final String SSL_MODE_KEY = "ssl_mode"; - public static final String TLS_KEY = "tls"; - public static final String USERNAME_KEY = "username"; - public static final String MODE_KEY = "mode"; - public static final String AMPERSAND = "&"; - public static final String EQUALS = "="; - - // An estimate for how much additional data in sent over the wire due to conversion of source data - // into {@link AirbyteMessage}. This is due to - // the fact that records are in JSON format and all database fields are converted to Strings. - // Currently, this is used in the logic for emitting - // estimate trace messages. - public static final int PLATFORM_DATA_INCREASE_FACTOR = 2; - public static final Set ALLOWED_CURSOR_TYPES = - Set.of(TIMESTAMP_WITH_TIMEZONE, TIMESTAMP, TIME_WITH_TIMEZONE, TIME, DATE, TINYINT, SMALLINT, INTEGER, - BIGINT, FLOAT, DOUBLE, REAL, NUMERIC, DECIMAL, NVARCHAR, VARCHAR, LONGVARCHAR); - private static final JdbcSourceOperations defaultSourceOperations = new JdbcSourceOperations(); - - private static final JSONFormat defaultJSONFormat = new JSONFormat().recordFormat(JSONFormat.RecordFormat.OBJECT); - - public static JdbcSourceOperations getDefaultSourceOperations() { - return defaultSourceOperations; - } - - public static JSONFormat getDefaultJSONFormat() { - return defaultJSONFormat; - } - - public static String getFullyQualifiedTableName(final String schemaName, final String tableName) { - return schemaName != null ? schemaName + "." + tableName : tableName; - } - - public static Map parseJdbcParameters(final JsonNode config, final String jdbcUrlParamsKey) { - return parseJdbcParameters(config, jdbcUrlParamsKey, "&"); - } - - public static Map parseJdbcParameters(final JsonNode config, final String jdbcUrlParamsKey, final String delimiter) { - if (config.has(jdbcUrlParamsKey)) { - return parseJdbcParameters(config.get(jdbcUrlParamsKey).asText(), delimiter); - } else { - return Maps.newHashMap(); - } - } - - public static Map parseJdbcParameters(final String jdbcPropertiesString) { - return parseJdbcParameters(jdbcPropertiesString, "&"); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public static Map parseJdbcParameters(final String jdbcPropertiesString, final String delimiter) { - final Map parameters = new HashMap<>(); - if (!jdbcPropertiesString.isBlank()) { - final String[] keyValuePairs = jdbcPropertiesString.split(delimiter); - for (final String kv : keyValuePairs) { - final String[] split = kv.split("="); - if (split.length == 2) { - parameters.put(split[0], split[1]); - } else { - throw new ConfigErrorException( - "jdbc_url_params must be formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). Got " - + jdbcPropertiesString); - } - } - } - return parameters; - } - - /** - * Checks that SSL_KEY has not been set or that an SSL_KEY is set and value can be mapped to true - * (e.g. non-zero integers, string true, etc) - * - * @param config A configuration used to check Jdbc connection - * @return true: if ssl has not been set and ssl mode not equals disabled or it has been set with - * true, false: in all other cases - */ - public static boolean useSsl(final JsonNode config) { - if (!config.has(SSL_KEY)) { - if (config.has(SSL_MODE_KEY) && config.get(SSL_MODE_KEY).has(MODE_KEY)) { - return !SSL_MODE_DISABLE.contains(config.get(SSL_MODE_KEY).get(MODE_KEY).asText()); - } else - return true; - } else - return config.get(SSL_KEY).asBoolean(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.java deleted file mode 100644 index 50b5a36d03cbc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import com.google.errorprone.annotations.MustBeClosed; -import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; -import io.airbyte.cdk.db.jdbc.streaming.JdbcStreamingQueryConfig; -import io.airbyte.commons.functional.CheckedFunction; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.function.Consumer; -import java.util.function.Supplier; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This database allows a developer to specify a {@link JdbcStreamingQueryConfig}. This allows the - * developer to specify the correct configuration in order for a {@link PreparedStatement} to - * execute as in a streaming / chunked manner. - */ -public class StreamingJdbcDatabase extends DefaultJdbcDatabase { - - private static final Logger LOGGER = LoggerFactory.getLogger(StreamingJdbcDatabase.class); - - private final Supplier streamingQueryConfigProvider; - - public StreamingJdbcDatabase(final DataSource dataSource, - final JdbcCompatibleSourceOperations sourceOperations, - final Supplier streamingQueryConfigProvider) { - super(dataSource, sourceOperations); - this.streamingQueryConfigProvider = streamingQueryConfigProvider; - } - - /** - * Assuming that the {@link JdbcStreamingQueryConfig} is configured correctly for the JDBC driver - * being used, this method will return data in streaming / chunked fashion. Review the provided - * {@link JdbcStreamingQueryConfig} to understand the size of these chunks. If the entire stream is - * consumed the database connection will be closed automatically and the caller need not call close - * on the returned stream. This query (and the first chunk) are fetched immediately. Subsequent - * chunks will not be pulled until the first chunk is consumed. - * - * @param statementCreator create a {@link PreparedStatement} from a {@link Connection}. - * @param recordTransform transform each record of that result set into the desired type. do NOT - * just pass the {@link ResultSet} through. it is a stateful object will not be accessible if - * returned from recordTransform. - * @param type that each record will be mapped to. - * @return Result of the query mapped to a stream. This stream must be closed! - * @throws SQLException SQL related exceptions. - */ - @Override - @MustBeClosed - public Stream unsafeQuery(final CheckedFunction statementCreator, - final CheckedFunction recordTransform) - throws SQLException { - try { - final Connection connection = dataSource.getConnection(); - final PreparedStatement statement = statementCreator.apply(connection); - final JdbcStreamingQueryConfig streamingConfig = streamingQueryConfigProvider.get(); - streamingConfig.initialize(connection, statement); - return toUnsafeStream(statement.executeQuery(), recordTransform, streamingConfig) - .onClose(() -> { - try { - if (!connection.getAutoCommit()) { - connection.setAutoCommit(true); - } - connection.close(); - if (isStreamFailed) { - throw new RuntimeException(streamException); - } - } catch (final SQLException e) { - throw new RuntimeException(e); - } - }); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - - /** - * This method differs from {@link DefaultJdbcDatabase#toUnsafeStream} in that it takes a streaming - * config that adjusts the fetch size dynamically according to sampled row size. - */ - protected Stream toUnsafeStream(final ResultSet resultSet, - final CheckedFunction mapper, - final JdbcStreamingQueryConfig streamingConfig) { - return StreamSupport.stream(new Spliterators.AbstractSpliterator<>(Long.MAX_VALUE, Spliterator.ORDERED) { - - @Override - public boolean tryAdvance(final Consumer action) { - try { - if (!resultSet.next()) { - resultSet.close(); - return false; - } - final T dataRow = mapper.apply(resultSet); - streamingConfig.accept(resultSet, dataRow); - action.accept(dataRow); - return true; - } catch (final SQLException e) { - LOGGER.error("SQLState: {}, Message: {}", e.getSQLState(), e.getMessage()); - streamException = e; - isStreamFailed = true; - throw new RuntimeException(e); - } - } - - }, false); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.java deleted file mode 100644 index f7c933ac3184c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class AdaptiveStreamingQueryConfig implements JdbcStreamingQueryConfig { - - private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveStreamingQueryConfig.class); - private final FetchSizeEstimator fetchSizeEstimator; - private int currentFetchSize; - - public AdaptiveStreamingQueryConfig() { - this.fetchSizeEstimator = TwoStageSizeEstimator.getInstance(); - this.currentFetchSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE; - } - - @Override - public void initialize(final Connection connection, final Statement preparedStatement) throws SQLException { - connection.setAutoCommit(false); - preparedStatement.setFetchSize(FetchSizeConstants.INITIAL_SAMPLE_SIZE); - currentFetchSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE; - LOGGER.info("Set initial fetch size: {} rows", preparedStatement.getFetchSize()); - } - - @Override - public void accept(final ResultSet resultSet, final Object rowData) throws SQLException { - fetchSizeEstimator.accept(rowData); - final Optional newFetchSize = fetchSizeEstimator.getFetchSize(); - - if (newFetchSize.isPresent() && currentFetchSize != newFetchSize.get()) { - LOGGER.info("Set new fetch size: {} rows", newFetchSize.get()); - resultSet.setFetchSize(newFetchSize.get()); - currentFetchSize = newFetchSize.get(); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.java deleted file mode 100644 index 0582f25fc00ef..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; - -/** - * Fetch size (number of rows) = target buffer byte size / max row byte size - */ -public abstract class BaseSizeEstimator implements FetchSizeEstimator { - - // desired buffer size in memory - private final long targetBufferByteSize; - private final int minFetchSize; - private final int defaultFetchSize; - private final int maxFetchSize; - - protected double maxRowByteSize = 0.0; - - protected BaseSizeEstimator(final long targetBufferByteSize, - final int minFetchSize, - final int defaultFetchSize, - final int maxFetchSize) { - this.targetBufferByteSize = targetBufferByteSize; - this.minFetchSize = minFetchSize; - this.defaultFetchSize = defaultFetchSize; - this.maxFetchSize = maxFetchSize; - } - - /** - * What we really want is to know how much memory each {@code rowData} takes. However, there is no - * easy way to measure that. So we use the byte size of the serialized row to approximate that. - */ - @VisibleForTesting - public static long getEstimatedByteSize(final Object rowData) { - if (rowData == null) { - return 0L; - } - // The string length is multiplied by 4 assuming each character is a - // full UTF-8 character. In reality, a UTF-8 character is encoded as - // 1 to 4 bytes. So this is an overestimation. This is alright, because - // the whole method only provides an estimation. Please never convert - // the string to byte[] to get the exact length. That conversion is known - // to introduce a lot of memory overhead. - // - // We are using 3L as the median byte-size of a serialized char here assuming that most chars fit - // into the ASCII space (fewer bytes) - - return Jsons.serialize(rowData).length() * 3L; - } - - /** - * This method ensures that the fetch size is between {@code minFetchSize} and {@code maxFetchSize}, - * inclusively. - */ - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - protected int getBoundedFetchSize() { - if (maxRowByteSize <= 0.0) { - return defaultFetchSize; - } - final long rawFetchSize = Math.round(targetBufferByteSize / maxRowByteSize); - if (rawFetchSize > Integer.MAX_VALUE) { - return maxFetchSize; - } - return Math.max(minFetchSize, Math.min(maxFetchSize, (int) rawFetchSize)); - } - - double getMaxRowByteSize() { - return maxRowByteSize; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.java deleted file mode 100644 index 26e07e8c016d2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -public final class FetchSizeConstants { - - // The desired buffer size in memory to store the fetched rows. - // This size is not enforced. It is only used to calculate a proper - // fetch size. The max row size the connector can handle is actually - // limited by the heap size. - public static final double TARGET_BUFFER_SIZE_RATIO = 0.6; - public static final long MIN_BUFFER_BYTE_SIZE = 250L * 1024L * 1024L; // 250 MB - // sample size for making the first estimation of the row size - public static final int INITIAL_SAMPLE_SIZE = 10; - // sample every N rows during the post-initial stage - public static final int SAMPLE_FREQUENCY = 100; - - public static final int MIN_FETCH_SIZE = 1; - public static final int DEFAULT_FETCH_SIZE = 1000; - public static final int MAX_FETCH_SIZE = 1_000_000_000; - - private FetchSizeConstants() {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.java deleted file mode 100644 index acbd491c1dbfa..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import java.util.Optional; -import java.util.function.Consumer; - -public interface FetchSizeEstimator extends Consumer { - - /** - * @return the estimated fetch size when the estimation is ready - */ - Optional getFetchSize(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.java deleted file mode 100644 index 1972f14c9b397..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import java.util.Optional; - -/** - * This class estimates the max row byte size by measuring the first consecutive - * {@code initialSampleSize} rows. - */ -public class InitialSizeEstimator extends BaseSizeEstimator implements FetchSizeEstimator { - - private final int sampleSize; - private int counter = 0; - - public InitialSizeEstimator(final long bufferByteSize, - final int initialSampleSize, - final int minFetchSize, - final int defaultFetchSize, - final int maxFetchSize) { - super(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize); - this.sampleSize = initialSampleSize; - } - - @Override - public void accept(final Object row) { - final long byteSize = getEstimatedByteSize(row); - if (maxRowByteSize < byteSize) { - maxRowByteSize = byteSize; - } - counter++; - } - - @Override - public Optional getFetchSize() { - if (counter < sampleSize) { - return Optional.empty(); - } - return Optional.of(getBoundedFetchSize()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.java deleted file mode 100644 index b79b40b64ca9b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import io.airbyte.commons.functional.CheckedBiConsumer; -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -/* - * Interface that defines how to stream results from a Jdbc database. This involves determining - * updating what the fetch size should be based on the size of the existing rows. 1. The config - * initializes the fetch size and sets up the estimator. 2. The config then accepts each row and - * feeds it to the estimator. If the estimator has a new estimate, it updates the fetch size. - */ - -public interface JdbcStreamingQueryConfig extends CheckedBiConsumer { - - void initialize(final Connection connection, final Statement statement) throws SQLException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.java deleted file mode 100644 index 253e92e46b16f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -public class NoOpStreamingQueryConfig implements JdbcStreamingQueryConfig { - - @Override - public void initialize(final Connection connection, final Statement preparedStatement) throws SQLException {} - - @Override - public void accept(final ResultSet resultSet, final Object o) throws SQLException {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.java deleted file mode 100644 index e075c40e50108..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import java.util.Optional; - -/** - * This class adjusts the max row byte size by measuring one row out of every - * {@code sampleFrequency} rows. - */ -public class SamplingSizeEstimator extends BaseSizeEstimator implements FetchSizeEstimator { - - private final int sampleFrequency; - - private int counter = 0; - private boolean hasNewEstimation = false; - - public SamplingSizeEstimator(final long bufferByteSize, - final int sampleFrequency, - final double initialRowByteSize, - final int minFetchSize, - final int defaultFetchSize, - final int maxFetchSize) { - super(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize); - this.sampleFrequency = sampleFrequency; - this.maxRowByteSize = initialRowByteSize; - } - - @Override - public void accept(final Object row) { - counter++; - if (counter < sampleFrequency) { - return; - } - - counter = 0; - final long rowByteSize = getEstimatedByteSize(row); - if (this.maxRowByteSize < rowByteSize) { - this.maxRowByteSize = rowByteSize; - hasNewEstimation = true; - } - } - - @Override - public Optional getFetchSize() { - if (!hasNewEstimation) { - return Optional.empty(); - } - - hasNewEstimation = false; - return Optional.of(getBoundedFetchSize()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.java deleted file mode 100644 index aceba25813b10..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import com.google.common.annotations.VisibleForTesting; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This estimator first uses the {@link InitialSizeEstimator} to calculate an initial fetch size by - * sampling the first N rows consecutively, and then switches to {@link SamplingSizeEstimator} to - * periodically adjust the fetch size by sampling every M rows. - */ -public class TwoStageSizeEstimator implements FetchSizeEstimator { - - private static final Logger LOGGER = LoggerFactory.getLogger(TwoStageSizeEstimator.class); - - private final int initialSampleSize; - private BaseSizeEstimator delegate; - private int counter = 0; - - public static TwoStageSizeEstimator getInstance() { - return new TwoStageSizeEstimator(); - } - - private TwoStageSizeEstimator() { - this.initialSampleSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE; - this.delegate = new InitialSizeEstimator( - FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, - initialSampleSize, - FetchSizeConstants.MIN_FETCH_SIZE, - FetchSizeConstants.DEFAULT_FETCH_SIZE, - FetchSizeConstants.MAX_FETCH_SIZE); - } - - @Override - public Optional getFetchSize() { - return delegate.getFetchSize(); - } - - @Override - public void accept(final Object rowData) { - if (counter <= initialSampleSize + 1) { - counter++; - // switch to SamplingSizeEstimator after the initial N rows - if (delegate instanceof InitialSizeEstimator && counter > initialSampleSize) { - delegate = new SamplingSizeEstimator( - getTargetBufferByteSize(Runtime.getRuntime().maxMemory()), - FetchSizeConstants.SAMPLE_FREQUENCY, - delegate.getMaxRowByteSize(), - FetchSizeConstants.MIN_FETCH_SIZE, - FetchSizeConstants.DEFAULT_FETCH_SIZE, - FetchSizeConstants.MAX_FETCH_SIZE); - } - } - - delegate.accept(rowData); - } - - @VisibleForTesting - static long getTargetBufferByteSize(final Long maxMemory) { - if (maxMemory == null || maxMemory == Long.MAX_VALUE) { - LOGGER.info("No max memory limit found, use min JDBC buffer size: {}", FetchSizeConstants.MIN_BUFFER_BYTE_SIZE); - return FetchSizeConstants.MIN_BUFFER_BYTE_SIZE; - } - final long targetBufferByteSize = Math.round(maxMemory * FetchSizeConstants.TARGET_BUFFER_SIZE_RATIO); - final long finalBufferByteSize = Math.max(FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, targetBufferByteSize); - LOGGER.info("Max memory limit: {}, JDBC buffer size: {}", maxMemory, finalBufferByteSize); - return finalBufferByteSize; - } - - @VisibleForTesting - BaseSizeEstimator getDelegate() { - return delegate; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/JsonUtil.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/JsonUtil.java deleted file mode 100644 index 0be5e4aa6ad55..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/JsonUtil.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.util; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ContainerNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import java.math.BigDecimal; - -public class JsonUtil { - - private static final String ERROR_MESSAGE = "Can't populate the node type : "; - - public static void putBooleanValueIntoJson(final ContainerNode node, final boolean value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - - public static void putLongValueIntoJson(final ContainerNode node, final long value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - - public static void putDoubleValueIntoJson(final ContainerNode node, final double value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - - public static void putBigDecimalValueIntoJson(final ContainerNode node, final BigDecimal value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - - public static void putStringValueIntoJson(final ContainerNode node, final String value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - - public static void putBytesValueIntoJson(final ContainerNode node, final byte[] value, final String fieldName) { - if (node instanceof ArrayNode) { - ((ArrayNode) node).add(value); - } else if (node instanceof ObjectNode) { - ((ObjectNode) node).put(fieldName, value); - } else { - throw new RuntimeException(ERROR_MESSAGE + node.getClass().getName()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/SSLCertificateUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/SSLCertificateUtils.java deleted file mode 100644 index 7a502d18bfc6a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/util/SSLCertificateUtils.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.util; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyFactory; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.PrivateKey; -import java.security.SecureRandom; -import java.security.cert.Certificate; -import java.security.cert.CertificateException; -import java.security.cert.CertificateFactory; -import java.security.spec.InvalidKeySpecException; -import java.security.spec.PKCS8EncodedKeySpec; -import java.util.Objects; -import java.util.Random; -import java.util.concurrent.TimeUnit; -import javax.net.ssl.SSLContext; -import org.apache.http.ssl.SSLContextBuilder; -import org.apache.http.ssl.SSLContexts; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * General SSL utilities used for certificate and keystore operations related to secured db - * connections. - */ -public class SSLCertificateUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(SSLCertificateUtils.class); - private static final String PKCS_12 = "PKCS12"; - private static final String X509 = "X.509"; - private static final Random RANDOM = new SecureRandom(); - // #17000: postgres driver is hardcoded to only load an entry alias "user" - public static final String KEYSTORE_ENTRY_PREFIX = "user"; - public static final String KEYSTORE_FILE_NAME = KEYSTORE_ENTRY_PREFIX + "keystore_"; - public static final String KEYSTORE_FILE_TYPE = ".p12"; - - private static URI saveKeyStoreToFile(final KeyStore keyStore, final String keyStorePassword, final FileSystem filesystem, final String directory) - throws IOException, CertificateException, KeyStoreException, NoSuchAlgorithmException { - final FileSystem fs = Objects.requireNonNullElse(filesystem, FileSystems.getDefault()); - final Path pathToStore = fs.getPath(Objects.toString(directory, "")); - final Path pathToFile = pathToStore.resolve(KEYSTORE_FILE_NAME + RANDOM.nextInt() + KEYSTORE_FILE_TYPE); - final OutputStream os = Files.newOutputStream(pathToFile); - keyStore.store(os, keyStorePassword.toCharArray()); - assert (Files.exists(pathToFile) == true); - - return pathToFile.toUri(); - } - - private static void runProcess(final String cmd, final Runtime run) throws IOException, InterruptedException { - LOGGER.debug("running [{}]", cmd); - final Process p = run.exec(cmd); - if (!p.waitFor(30, TimeUnit.SECONDS)) { - p.destroy(); - throw new RuntimeException("Timeout while executing: " + cmd); - } - } - - private static Certificate fromPEMString(final String certString) throws CertificateException { - final CertificateFactory cf = CertificateFactory.getInstance(X509); - final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(certString.getBytes(StandardCharsets.UTF_8)); - final BufferedInputStream bufferedInputStream = new BufferedInputStream(byteArrayInputStream); - return cf.generateCertificate(bufferedInputStream); - } - - public static URI keyStoreFromCertificate(final Certificate cert, - final String keyStorePassword, - final FileSystem filesystem, - final String directory) - throws KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { - final KeyStore keyStore = KeyStore.getInstance(PKCS_12); - keyStore.load(null); - keyStore.setCertificateEntry(KEYSTORE_ENTRY_PREFIX + "1", cert); - return saveKeyStoreToFile(keyStore, keyStorePassword, filesystem, directory); - } - - public static URI keyStoreFromCertificate(final String certString, - final String keyStorePassword, - final FileSystem filesystem, - final String directory) - throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - return keyStoreFromCertificate(fromPEMString(certString), keyStorePassword, filesystem, directory); - } - - public static URI keyStoreFromCertificate(final String certString, - final String keyStorePassword) - throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - return keyStoreFromCertificate(fromPEMString(certString), keyStorePassword, null, null); - } - - public static URI keyStoreFromCertificate(final String certString, final String keyStorePassword, final String directory) - throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - return keyStoreFromCertificate(certString, keyStorePassword, FileSystems.getDefault(), directory); - } - - public static URI keyStoreFromClientCertificate( - final Certificate cert, - final PrivateKey key, - final String keyStorePassword, - final FileSystem filesystem, - final String directory) - throws KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { - final KeyStore keyStore = KeyStore.getInstance(PKCS_12); - keyStore.load(null); - keyStore.setKeyEntry(KEYSTORE_ENTRY_PREFIX, key, keyStorePassword.toCharArray(), new Certificate[] {cert}); - return saveKeyStoreToFile(keyStore, keyStorePassword, filesystem, directory); - } - - public static URI keyStoreFromClientCertificate( - final String certString, - final String keyString, - final String keyStorePassword, - final FileSystem filesystem, - final String directory) - throws IOException, InterruptedException, NoSuchAlgorithmException, InvalidKeySpecException, CertificateException, KeyStoreException { - - // Convert RSA key (PKCS#1) to PKCS#8 key - // Note: java.security doesn't have a built-in support of PKCS#1 format. A conversion using openssl - // is necessary. - // Since this is a single operation it's better than adding an external lib (e.g BouncyCastle) - final Path tmpDir = Files.createTempDirectory(null); - final Path pkcs1Key = Files.createTempFile(tmpDir, null, null); - final Path pkcs8Key = Files.createTempFile(tmpDir, null, null); - pkcs1Key.toFile().deleteOnExit(); - pkcs8Key.toFile().deleteOnExit(); - - Files.write(pkcs1Key, keyString.getBytes(StandardCharsets.UTF_8)); - runProcess( - "openssl pkcs8 -topk8 -inform PEM -outform DER -in " + pkcs1Key.toAbsolutePath() + " -out " + pkcs8Key.toAbsolutePath() - + " -nocrypt -passout pass:" + keyStorePassword, - Runtime.getRuntime()); - - final PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(Files.readAllBytes(pkcs8Key)); - PrivateKey privateKey; - try { - privateKey = KeyFactory.getInstance("RSA").generatePrivate(spec); - } catch (final InvalidKeySpecException ex1) { - try { - privateKey = KeyFactory.getInstance("DSA").generatePrivate(spec); - } catch (final InvalidKeySpecException ex2) { - privateKey = KeyFactory.getInstance("EC").generatePrivate(spec); - } - } - - return keyStoreFromClientCertificate(fromPEMString(certString), privateKey, keyStorePassword, filesystem, directory); - - } - - public static URI keyStoreFromClientCertificate( - final String certString, - final String keyString, - final String keyStorePassword, - final String directory) - throws CertificateException, IOException, NoSuchAlgorithmException, InvalidKeySpecException, KeyStoreException, InterruptedException { - return keyStoreFromClientCertificate(certString, keyString, keyStorePassword, FileSystems.getDefault(), directory); - } - - public static SSLContext createContextFromCaCert(String caCertificate) { - try { - CertificateFactory factory = CertificateFactory.getInstance(X509); - Certificate trustedCa = factory.generateCertificate( - new ByteArrayInputStream(caCertificate.getBytes(StandardCharsets.UTF_8))); - KeyStore trustStore = KeyStore.getInstance(PKCS_12); - trustStore.load(null, null); - trustStore.setCertificateEntry("ca", trustedCa); - SSLContextBuilder sslContextBuilder = - SSLContexts.custom().loadTrustMaterial(trustStore, null); - return sslContextBuilder.build(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/BaseConnector.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/BaseConnector.java deleted file mode 100644 index 29a54a5f68221..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/BaseConnector.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations; - -import io.airbyte.cdk.integrations.base.Integration; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.ConnectorSpecification; - -public abstract class BaseConnector implements Integration { - - /** - * By convention the spec is stored as a resource for java connectors. That resource is called - * spec.json. - * - * @return specification. - * @throws Exception - any exception. - */ - @Override - public ConnectorSpecification spec() throws Exception { - // return a JsonSchema representation of the spec for the integration. - final String resourceString = MoreResources.readResource("spec.json"); - return Jsons.deserialize(resourceString, ConnectorSpecification.class); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/JdbcConnector.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/JdbcConnector.java deleted file mode 100644 index 44f749e1694ab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/JdbcConnector.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations; - -import io.airbyte.cdk.db.factory.DatabaseDriver; -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.time.temporal.TemporalUnit; -import java.util.Map; -import java.util.Optional; - -public abstract class JdbcConnector extends BaseConnector { - - public static final String POSTGRES_CONNECT_TIMEOUT_KEY = "connectTimeout"; - public static final Duration POSTGRES_CONNECT_TIMEOUT_DEFAULT_DURATION = Duration.ofSeconds(10); - - public static final String CONNECT_TIMEOUT_KEY = "connectTimeout"; - public static final Duration CONNECT_TIMEOUT_DEFAULT = Duration.ofSeconds(60); - - protected final String driverClassName; - - protected JdbcConnector(String driverClassName) { - this.driverClassName = driverClassName; - } - - protected Duration getConnectionTimeout(final Map connectionProperties) { - return getConnectionTimeout(connectionProperties, driverClassName); - } - - /** - * Retrieves connectionTimeout value from connection properties in millis, default minimum timeout - * is 60 seconds since Hikari default of 30 seconds is not enough for acceptance tests. In the case - * the value is 0, pass the value along as Hikari and Postgres use default max value for 0 timeout - * value. - * - * NOTE: Postgres timeout is measured in seconds: - * https://jdbc.postgresql.org/documentation/head/connect.html - * - * @param connectionProperties custom jdbc_url_parameters containing information on connection - * properties - * @param driverClassName name of the JDBC driver - * @return DataSourceBuilder class used to create dynamic fields for DataSource - */ - public static Duration getConnectionTimeout(final Map connectionProperties, String driverClassName) { - final Optional parsedConnectionTimeout = switch (DatabaseDriver.findByDriverClassName(driverClassName)) { - case POSTGRESQL -> maybeParseDuration(connectionProperties.get(POSTGRES_CONNECT_TIMEOUT_KEY), ChronoUnit.SECONDS) - .or(() -> Optional.of(POSTGRES_CONNECT_TIMEOUT_DEFAULT_DURATION)); - case MYSQL -> maybeParseDuration(connectionProperties.get("connectTimeout"), ChronoUnit.MILLIS); - case MSSQLSERVER -> maybeParseDuration(connectionProperties.get("loginTimeout"), ChronoUnit.SECONDS); - default -> maybeParseDuration(connectionProperties.get(CONNECT_TIMEOUT_KEY), ChronoUnit.SECONDS) - // Enforce minimum timeout duration for unspecified data sources. - .filter(d -> d.compareTo(CONNECT_TIMEOUT_DEFAULT) >= 0); - }; - return parsedConnectionTimeout.orElse(CONNECT_TIMEOUT_DEFAULT); - } - - private static Optional maybeParseDuration(final String stringValue, TemporalUnit unit) { - if (stringValue == null) { - return Optional.empty(); - } - final long number; - try { - number = Long.parseLong(stringValue); - } catch (NumberFormatException __) { - return Optional.empty(); - } - if (number < 0) { - return Optional.empty(); - } - return Optional.of(Duration.of(number, unit)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.java deleted file mode 100644 index 94c75b6f01180..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import java.util.Comparator; -import java.util.HashSet; -import java.util.Optional; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import javax.validation.constraints.NotNull; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class AirbyteExceptionHandler implements Thread.UncaughtExceptionHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteExceptionHandler.class); - public static final String logMessage = "Something went wrong in the connector. See the logs for more details."; - - // Basic deinterpolation helpers to avoid doing _really_ dumb deinterpolation. - // E.g. if "id" is in the list of strings to remove, we don't want to modify the message "Invalid - // identifier". - private static final String REGEX_PREFIX = "(^|[^A-Za-z0-9])"; - private static final String REGEX_SUFFIX = "($|[^A-Za-z0-9])"; - - /** - * If this list is populated, then the exception handler will attempt to deinterpolate the error - * message before emitting a trace message. This is useful for connectors which (a) emit a single - * exception class, and (b) rely on that exception's message to distinguish between error types. - *

- * If this is active, then the trace message will: - *

    - *
  1. Not contain the stacktrace at all. This causes Sentry to use its fallback grouping (using - * exception class and message)
  2. - *
  3. Contain the original exception message as the external message, and a mangled message as the - * internal message.
  4. - *
- */ - @VisibleForTesting - static final Set STRINGS_TO_DEINTERPOLATE = new HashSet<>(); - static { - addCommonStringsToDeinterpolate(); - } - - @VisibleForTesting - static final Set> THROWABLES_TO_DEINTERPOLATE = new HashSet<>(); - - @Override - public void uncaughtException(final Thread thread, final Throwable throwable) { - // This is a naive AirbyteTraceMessage emission in order to emit one when any error occurs in a - // connector. - // If a connector implements AirbyteTraceMessage emission itself, this code will result in an - // additional one being emitted. - // this is fine tho because: - // "The earliest AirbyteTraceMessage where type=error will be used to populate the FailureReason for - // the sync." - // from the spec: - // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# - LOGGER.error(logMessage, throwable); - - // Attempt to deinterpolate the error message before emitting a trace message - final String mangledMessage; - // If any exception in the chain is of a deinterpolatable type, find it and deinterpolate its - // message. - // This assumes that any wrapping exceptions are just noise (e.g. runtime exception). - final Optional deinterpolatableException = ExceptionUtils.getThrowableList(throwable).stream() - .filter(t -> THROWABLES_TO_DEINTERPOLATE.stream().anyMatch(deinterpolatableClass -> deinterpolatableClass.isAssignableFrom(t.getClass()))) - .findFirst(); - final boolean messageWasMangled; - if (deinterpolatableException.isPresent()) { - final String originalMessage = deinterpolatableException.get().getMessage(); - mangledMessage = STRINGS_TO_DEINTERPOLATE.stream() - // Sort the strings longest to shortest, in case any target string is a substring of another - // e.g. "airbyte_internal" should be swapped out before "airbyte" - .sorted(Comparator.comparing(String::length).reversed()) - .reduce(originalMessage, AirbyteExceptionHandler::deinterpolate); - messageWasMangled = !mangledMessage.equals(originalMessage); - } else { - mangledMessage = throwable.getMessage(); - messageWasMangled = false; - } - - if (!messageWasMangled) { - // If we did not modify the message (either not a deinterpolatable class, or we tried to - // deinterpolate but made no changes) then emit our default trace message - AirbyteTraceMessageUtility.emitSystemErrorTrace(throwable, logMessage); - } else { - // If we did modify the message, then emit a custom trace message - AirbyteTraceMessageUtility.emitCustomErrorTrace(throwable.getMessage(), mangledMessage); - } - - terminate(); - } - - @NotNull - private static String deinterpolate(final String message, final String targetString) { - // (?i) makes the pattern case-insensitive - final String quotedTarget = '(' + "(?i)" + Pattern.quote(targetString) + ')'; - final String targetRegex = REGEX_PREFIX + quotedTarget + REGEX_SUFFIX; - final Pattern pattern = Pattern.compile(targetRegex); - final Matcher matcher = pattern.matcher(message); - - // The pattern has three capturing groups: - // 1. The character before the target string (or an empty string, if it matched start-of-string) - // 2. The target string - // 3. The character after the target string (or empty string for end-of-string) - // We want to preserve the characters before and after the target string, so we use $1 and $3 to - // reinsert them - // but the target string is replaced with just '?' - return matcher.replaceAll("$1?$3"); - } - - public static void addThrowableForDeinterpolation(final Class klass) { - THROWABLES_TO_DEINTERPOLATE.add(klass); - } - - public static void addStringForDeinterpolation(final String string) { - if (string != null) { - STRINGS_TO_DEINTERPOLATE.add(string.toLowerCase()); - } - } - - public static void addAllStringsInConfigForDeinterpolation(final JsonNode node) { - if (node.isTextual()) { - addStringForDeinterpolation(node.asText()); - } else if (node.isContainerNode()) { - for (final JsonNode subNode : node) { - addAllStringsInConfigForDeinterpolation(subNode); - } - } - } - - // by doing this in a separate method we can mock it to avoid closing the jvm and therefore test - // properly - protected void terminate() { - System.exit(1); - } - - @VisibleForTesting - static void addCommonStringsToDeinterpolate() { - // Add some common strings to deinterpolate, regardless of what the connector is doing - addStringForDeinterpolation("airbyte"); - addStringForDeinterpolation("config"); - addStringForDeinterpolation("configuration"); - addStringForDeinterpolation("description"); - addStringForDeinterpolation("email"); - addStringForDeinterpolation("id"); - addStringForDeinterpolation("location"); - addStringForDeinterpolation("message"); - addStringForDeinterpolation("name"); - addStringForDeinterpolation("state"); - addStringForDeinterpolation("status"); - addStringForDeinterpolation("type"); - addStringForDeinterpolation("userEmail"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.java deleted file mode 100644 index 85f045a0c2156..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.protocol.models.v0.AirbyteMessage; - -/** - * Interface for the destination's consumption of incoming records wrapped in an - * {@link io.airbyte.protocol.models.v0.AirbyteMessage}. - * - * This is via the accept method, which commonly handles parsing, validation, batching and writing - * of the transformed data to the final destination i.e. the technical system data is being written - * to. - * - * Lifecycle: - *
    - *
  • 1. Instantiate consumer.
  • - *
  • 2. start() to initialize any resources that need to be created BEFORE the consumer consumes - * any messages.
  • - *
  • 3. Consumes ALL records via {@link AirbyteMessageConsumer#accept(AirbyteMessage)}
  • - *
  • 4. Always (on success or failure) finalize by calling - * {@link AirbyteMessageConsumer#close()}
  • - *
- * We encourage implementing this interface using the {@link FailureTrackingAirbyteMessageConsumer} - * class. - */ -public interface AirbyteMessageConsumer extends CheckedConsumer, AutoCloseable { - - void start() throws Exception; - - /** - * Consumes all {@link AirbyteMessage}s - * - * @param message {@link AirbyteMessage} to be processed - * @throws Exception - */ - @Override - void accept(AirbyteMessage message) throws Exception; - - /** - * Executes at the end of consumption of all incoming streamed data regardless of success or failure - * - * @throws Exception - */ - @Override - void close() throws Exception; - - /** - * Append a function to be called on {@link AirbyteMessageConsumer#close}. - */ - static AirbyteMessageConsumer appendOnClose(final AirbyteMessageConsumer consumer, final VoidCallable voidCallable) { - return new AirbyteMessageConsumer() { - - @Override - public void start() throws Exception { - consumer.start(); - } - - @Override - public void accept(final AirbyteMessage message) throws Exception { - consumer.accept(message); - } - - @Override - public void close() throws Exception { - consumer.close(); - voidCallable.call(); - } - - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.java deleted file mode 100644 index d3513c336681f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import io.airbyte.commons.stream.AirbyteStreamStatusHolder; -import io.airbyte.protocol.models.v0.AirbyteAnalyticsTraceMessage; -import io.airbyte.protocol.models.v0.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.v0.AirbyteErrorTraceMessage.FailureType; -import io.airbyte.protocol.models.v0.AirbyteEstimateTraceMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteTraceMessage; -import java.time.Instant; -import java.util.function.Consumer; -import org.apache.commons.lang3.exception.ExceptionUtils; - -public final class AirbyteTraceMessageUtility { - - private AirbyteTraceMessageUtility() {} - - public static void emitSystemErrorTrace(final Throwable e, final String displayMessage) { - emitErrorTrace(e, displayMessage, FailureType.SYSTEM_ERROR); - } - - public static void emitConfigErrorTrace(final Throwable e, final String displayMessage) { - emitErrorTrace(e, displayMessage, FailureType.CONFIG_ERROR); - } - - public static void emitCustomErrorTrace(final String displayMessage, final String internalMessage) { - emitMessage(makeAirbyteMessageFromTraceMessage( - makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ERROR) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(FailureType.SYSTEM_ERROR) - .withMessage(displayMessage) - .withInternalMessage(internalMessage)))); - } - - public static void emitEstimateTrace(final long byteEstimate, - final AirbyteEstimateTraceMessage.Type type, - final long rowEstimate, - final String streamName, - final String streamNamespace) { - emitMessage(makeAirbyteMessageFromTraceMessage( - makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ESTIMATE) - .withEstimate(new AirbyteEstimateTraceMessage() - .withByteEstimate(byteEstimate) - .withType(type) - .withRowEstimate(rowEstimate) - .withName(streamName) - .withNamespace(streamNamespace)))); - } - - public static void emitAnalyticsTrace(final AirbyteAnalyticsTraceMessage airbyteAnalyticsTraceMessage) { - emitMessage(makeAnalyticsTraceAirbyteMessage(airbyteAnalyticsTraceMessage)); - } - - public static void emitErrorTrace(final Throwable e, final String displayMessage, final FailureType failureType) { - emitMessage(makeErrorTraceAirbyteMessage(e, displayMessage, failureType)); - } - - public static void emitStreamStatusTrace(final AirbyteStreamStatusHolder airbyteStreamStatusHolder) { - emitMessage(makeStreamStatusTraceAirbyteMessage(airbyteStreamStatusHolder)); - } - - // todo: handle the other types of trace message we'll expect in the future, see - // io.airbyte.protocol.models.v0.AirbyteTraceMessage - // & the tech spec: - // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# - // public void emitNotificationTrace() {} - // public void emitMetricTrace() {} - - private static void emitMessage(final AirbyteMessage message) { - // Not sure why defaultOutputRecordCollector is under Destination specifically, - // but this matches usage elsewhere in base-java - final Consumer outputRecordCollector = Destination::defaultOutputRecordCollector; - outputRecordCollector.accept(message); - } - - private static AirbyteMessage makeErrorTraceAirbyteMessage( - final Throwable e, - final String displayMessage, - final FailureType failureType) { - - return makeAirbyteMessageFromTraceMessage( - makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ERROR) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(failureType) - .withMessage(displayMessage) - .withInternalMessage(e.toString()) - .withStackTrace(ExceptionUtils.getStackTrace(e)))); - } - - private static AirbyteMessage makeAnalyticsTraceAirbyteMessage(final AirbyteAnalyticsTraceMessage airbyteAnalyticsTraceMessage) { - return new AirbyteMessage().withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage() - .withAnalytics(airbyteAnalyticsTraceMessage) - .withType(AirbyteTraceMessage.Type.ANALYTICS) - .withEmittedAt((double) Instant.now().toEpochMilli())); - } - - private static AirbyteMessage makeStreamStatusTraceAirbyteMessage(final AirbyteStreamStatusHolder airbyteStreamStatusHolder) { - return makeAirbyteMessageFromTraceMessage(airbyteStreamStatusHolder.toTraceMessage()); - } - - private static AirbyteMessage makeAirbyteMessageFromTraceMessage(final AirbyteTraceMessage airbyteTraceMessage) { - return new AirbyteMessage().withType(Type.TRACE).withTrace(airbyteTraceMessage); - } - - private static AirbyteTraceMessage makeAirbyteTraceMessage(final AirbyteTraceMessage.Type traceMessageType) { - return new AirbyteTraceMessage().withType(traceMessageType).withEmittedAt((double) System.currentTimeMillis()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Command.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Command.java deleted file mode 100644 index 6e5897db49cca..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Command.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -public enum Command { - SPEC, - CHECK, - DISCOVER, - READ, - WRITE -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.java deleted file mode 100644 index 47a0c0b048138..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Minimal abstract class intended to handle the case where the destination can commit records every - * time a state message appears. This class does that commit and then immediately emits the state - * message. This should only be used in cases when the commit is relatively cheap. immediately. - */ -public abstract class CommitOnStateAirbyteMessageConsumer extends FailureTrackingAirbyteMessageConsumer implements AirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(CommitOnStateAirbyteMessageConsumer.class); - - private final Consumer outputRecordCollector; - - public CommitOnStateAirbyteMessageConsumer(final Consumer outputRecordCollector) { - this.outputRecordCollector = outputRecordCollector; - } - - @Override - public void accept(final AirbyteMessage message) throws Exception { - if (message.getType() == Type.STATE) { - commit(); - outputRecordCollector.accept(message); - } - super.accept(message); - } - - public abstract void commit() throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Destination.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Destination.java deleted file mode 100644 index acd958a0323bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Destination.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonPropertyDescription; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Optional; -import java.util.function.Consumer; -import lombok.extern.slf4j.Slf4j; - -public interface Destination extends Integration { - - /** - * Return a consumer that writes messages to the destination. - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @param catalog - schema of the incoming messages. - * @return Consumer that accepts message. The {@link AirbyteMessageConsumer#accept(AirbyteMessage)} - * will be called n times where n is the number of messages. - * {@link AirbyteMessageConsumer#close()} will always be called once regardless of success - * or failure. - * @throws Exception - any exception. - */ - AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog catalog, - Consumer outputRecordCollector) - throws Exception; - - /** - * Default implementation allows us to not have to touch existing destinations while avoiding a lot - * of conditional statements in {@link IntegrationRunner}. This is preferred over #getConsumer and - * is the default Async Framework method. - * - * @param config config - * @param catalog catalog - * @param outputRecordCollector outputRecordCollector - * @return AirbyteMessageConsumer wrapped in SerializedAirbyteMessageConsumer to maintain legacy - * behavior. - * @throws Exception exception - */ - default SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - return new ShimToSerializedAirbyteMessageConsumer(getConsumer(config, catalog, outputRecordCollector)); - } - - static void defaultOutputRecordCollector(final AirbyteMessage message) { - System.out.println(Jsons.serialize(message)); - } - - /** - * Backwards-compatibility wrapper for an AirbyteMessageConsumer. Strips the sizeInBytes argument - * away from the .accept call. - */ - @Slf4j - class ShimToSerializedAirbyteMessageConsumer implements SerializedAirbyteMessageConsumer { - - private final AirbyteMessageConsumer consumer; - - public ShimToSerializedAirbyteMessageConsumer(final AirbyteMessageConsumer consumer) { - this.consumer = consumer; - } - - @Override - public void start() throws Exception { - consumer.start(); - } - - /** - * Consumes an {@link AirbyteMessage} for processing. - *

- * If the provided JSON string is invalid AND represents a {@link AirbyteMessage.Type#STATE} - * message, processing is halted. Otherwise, the invalid message is logged and execution continues. - * - * @param inputString JSON representation of an {@link AirbyteMessage}. - * @throws Exception if an invalid state message is provided or the consumer is unable to accept the - * provided message. - */ - @Override - public void accept(final String inputString, final Integer sizeInBytes) throws Exception { - consumeMessage(consumer, inputString); - } - - @Override - public void close() throws Exception { - consumer.close(); - } - - /** - * Consumes an {@link AirbyteMessage} for processing. - *

- * If the provided JSON string is invalid AND represents a {@link AirbyteMessage.Type#STATE} - * message, processing is halted. Otherwise, the invalid message is logged and execution continues. - * - * @param consumer An {@link AirbyteMessageConsumer} that can handle the provided message. - * @param inputString JSON representation of an {@link AirbyteMessage}. - * @throws Exception if an invalid state message is provided or the consumer is unable to accept the - * provided message. - */ - @VisibleForTesting - static void consumeMessage(final AirbyteMessageConsumer consumer, final String inputString) throws Exception { - - final Optional messageOptional = Jsons.tryDeserialize(inputString, AirbyteMessage.class); - if (messageOptional.isPresent()) { - consumer.accept(messageOptional.get()); - } else { - if (isStateMessage(inputString)) { - throw new IllegalStateException("Invalid state message: " + inputString); - } else { - log.error("Received invalid message: " + inputString); - } - } - } - - /** - * Tests whether the provided JSON string represents a state message. - * - * @param input a JSON string that represents an {@link AirbyteMessage}. - * @return {@code true} if the message is a state message, {@code false} otherwise. - */ - @SuppressWarnings("OptionalIsPresent") - private static boolean isStateMessage(final String input) { - final Optional deserialized = Jsons.tryDeserialize(input, AirbyteTypeMessage.class); - if (deserialized.isPresent()) { - return deserialized.get().getType() == Type.STATE; - } else { - return false; - } - } - - /** - * Custom class for parsing a JSON message to determine the type of the represented - * {@link AirbyteMessage}. Do the bare minimum deserialisation by reading only the type field. - */ - private static class AirbyteTypeMessage { - - @JsonProperty("type") - @JsonPropertyDescription("Message type") - private AirbyteMessage.Type type; - - @JsonProperty("type") - public AirbyteMessage.Type getType() { - return type; - } - - @JsonProperty("type") - public void setType(final AirbyteMessage.Type type) { - this.type = type; - } - - } - - } - - /** - * Denotes if the destination fully supports Destinations V2. - */ - default boolean isV2Destination() { - return false; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/DestinationConfig.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/DestinationConfig.java deleted file mode 100644 index b2f9221253351..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/DestinationConfig.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Singleton of destination config for easy lookup of values. - */ -public class DestinationConfig { - - private static final Logger LOGGER = LoggerFactory.getLogger(DestinationConfig.class); - - private static DestinationConfig config; - - // whether the destination fully supports Destinations V2 - private boolean isV2Destination; - - @VisibleForTesting - protected JsonNode root; - - private DestinationConfig() {} - - @VisibleForTesting - public static void initialize(final JsonNode root) { - initialize(root, false); - } - - public static void initialize(final JsonNode root, final boolean isV2Destination) { - if (config == null) { - if (root == null) { - throw new IllegalArgumentException("Cannot create DestinationConfig from null."); - } - config = new DestinationConfig(); - config.root = root; - config.isV2Destination = isV2Destination; - } else { - LOGGER.warn("Singleton was already initialized."); - } - } - - public static DestinationConfig getInstance() { - if (config == null) { - throw new IllegalStateException("Singleton not initialized."); - } - return config; - } - - @VisibleForTesting - public static void clearInstance() { - config = null; - } - - public JsonNode getNodeValue(final String key) { - final JsonNode node = config.root.get(key); - if (node == null) { - LOGGER.debug("Cannot find node with key {} ", key); - } - return node; - } - - // string value, otherwise empty string - public String getTextValue(final String key) { - final JsonNode node = getNodeValue(key); - if (node == null || !node.isTextual()) { - LOGGER.debug("Cannot retrieve text value for node with key {}", key); - return ""; - } - return node.asText(); - } - - // boolean value, otherwise false - public boolean getBooleanValue(final String key) { - final JsonNode node = getNodeValue(key); - if (node == null || !node.isBoolean()) { - LOGGER.debug("Cannot retrieve boolean value for node with key {}", key); - return false; - } - return node.asBoolean(); - } - - public boolean getIsV2Destination() { - return isV2Destination; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.java deleted file mode 100644 index 39b1d87e66891..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Minimal abstract class intended to provide a consistent structure to classes seeking to implement - * the {@link AirbyteMessageConsumer} interface. The original interface methods are wrapped in - * generic exception handlers - any exception is caught and logged. - * - * Two methods are intended for extension: - *

    - *
  • startTracked: Wraps set up of necessary infrastructure/configuration before message - * consumption.
  • - *
  • acceptTracked: Wraps actual processing of each - * {@link io.airbyte.protocol.models.v0.AirbyteMessage}.
  • - *
- * - * Though not necessary, we highly encourage using this class when implementing destinations. See - * child classes for examples. - */ -public abstract class FailureTrackingAirbyteMessageConsumer implements AirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(FailureTrackingAirbyteMessageConsumer.class); - - private boolean hasFailed = false; - - /** - * Wraps setup of necessary infrastructure/configuration before message consumption - * - * @throws Exception - */ - protected abstract void startTracked() throws Exception; - - @Override - public void start() throws Exception { - try { - startTracked(); - } catch (final Exception e) { - LOGGER.error("Exception while starting consumer", e); - hasFailed = true; - throw e; - } - } - - /** - * Processing of AirbyteMessages with general functionality of storing STATE messages, serializing - * RECORD messages and storage within a buffer - * - * NOTE: Not all the functionality mentioned above is always true but generally applies - * - * @param msg {@link AirbyteMessage} to be processed - * @throws Exception - */ - protected abstract void acceptTracked(AirbyteMessage msg) throws Exception; - - @Override - public void accept(final AirbyteMessage msg) throws Exception { - try { - acceptTracked(msg); - } catch (final Exception e) { - LOGGER.error("Exception while accepting message", e); - hasFailed = true; - throw e; - } - } - - protected abstract void close(boolean hasFailed) throws Exception; - - @Override - public void close() throws Exception { - if (hasFailed) { - LOGGER.warn("Airbyte message consumer: failed."); - } else { - LOGGER.info("Airbyte message consumer: succeeded."); - } - close(hasFailed); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Integration.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Integration.java deleted file mode 100644 index ae3b15eb8acfd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Integration.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.ConnectorSpecification; - -public interface Integration { - - /** - * Fetch the specification for the integration. - * - * @return specification. - * @throws Exception - any exception. - */ - ConnectorSpecification spec() throws Exception; - - /** - * Check whether, given the current configuration, the integration can connect to the integration. - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @return Whether or not the connection was successful. Optional message if it was not. - * @throws Exception - any exception. - */ - AirbyteConnectionStatus check(JsonNode config) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationCliParser.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationCliParser.java deleted file mode 100644 index 12bfd9009c47e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationCliParser.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.google.common.base.Preconditions; -import io.airbyte.commons.cli.Clis; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionGroup; -import org.apache.commons.cli.Options; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// todo (cgardens) - use argparse4j.github.io instead of org.apache.commons.cli to leverage better -// sub-parser support. - -/** - * Parses command line args to a type safe config object for each command type. - */ -public class IntegrationCliParser { - - private static final Logger LOGGER = LoggerFactory.getLogger(IntegrationCliParser.class); - - private static final OptionGroup COMMAND_GROUP; - - static { - final var optionGroup = new OptionGroup(); - optionGroup.setRequired(true); - - optionGroup.addOption(Option.builder() - .longOpt(Command.SPEC.toString().toLowerCase()) - .desc("outputs the json configuration specification") - .build()); - optionGroup.addOption(Option.builder() - .longOpt(Command.CHECK.toString().toLowerCase()) - .desc("checks the config can be used to connect") - .build()); - optionGroup.addOption(Option.builder() - .longOpt(Command.DISCOVER.toString().toLowerCase()) - .desc("outputs a catalog describing the source's catalog") - .build()); - optionGroup.addOption(Option.builder() - .longOpt(Command.READ.toString().toLowerCase()) - .desc("reads the source and outputs messages to STDOUT") - .build()); - optionGroup.addOption(Option.builder() - .longOpt(Command.WRITE.toString().toLowerCase()) - .desc("writes messages from STDIN to the integration") - .build()); - - COMMAND_GROUP = optionGroup; - } - - public IntegrationConfig parse(final String[] args) { - final Command command = parseCommand(args); - return parseOptions(args, command); - } - - private static Command parseCommand(final String[] args) { - final Options options = new Options(); - options.addOptionGroup(COMMAND_GROUP); - - final CommandLine parsed = Clis.parse(args, options, Clis.getRelaxedParser()); - return Command.valueOf(parsed.getOptions()[0].getLongOpt().toUpperCase()); - } - - private static IntegrationConfig parseOptions(final String[] args, final Command command) { - - final Options options = new Options(); - options.addOptionGroup(COMMAND_GROUP); // so that the parser does not throw an exception when encounter command args. - - switch (command) { - case SPEC -> { - // no args. - } - case CHECK, DISCOVER -> options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) - .desc(JavaBaseConstants.ARGS_CONFIG_DESC) - .hasArg(true) - .required(true) - .build()); - case READ -> { - options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) - .desc(JavaBaseConstants.ARGS_CONFIG_DESC) - .hasArg(true) - .required(true) - .build()); - options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_CATALOG_KEY) - .desc(JavaBaseConstants.ARGS_CATALOG_DESC) - .hasArg(true) - .build()); - options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_STATE_KEY) - .desc(JavaBaseConstants.ARGS_PATH_DESC) - .hasArg(true) - .build()); - } - case WRITE -> { - options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) - .desc(JavaBaseConstants.ARGS_CONFIG_DESC) - .hasArg(true) - .required(true).build()); - options.addOption(Option.builder() - .longOpt(JavaBaseConstants.ARGS_CATALOG_KEY) - .desc(JavaBaseConstants.ARGS_CATALOG_DESC) - .hasArg(true) - .build()); - } - default -> throw new IllegalStateException("Unexpected value: " + command); - } - - final CommandLine parsed = Clis.parse(args, options, command.toString().toLowerCase()); - Preconditions.checkNotNull(parsed); - final Map argsMap = new HashMap<>(); - for (final Option option : parsed.getOptions()) { - argsMap.put(option.getLongOpt(), option.getValue()); - } - LOGGER.info("integration args: {}", argsMap); - - switch (command) { - case SPEC -> { - return IntegrationConfig.spec(); - } - case CHECK -> { - return IntegrationConfig.check(Path.of(argsMap.get(JavaBaseConstants.ARGS_CONFIG_KEY))); - } - case DISCOVER -> { - return IntegrationConfig.discover(Path.of(argsMap.get(JavaBaseConstants.ARGS_CONFIG_KEY))); - } - case READ -> { - return IntegrationConfig.read( - Path.of(argsMap.get(JavaBaseConstants.ARGS_CONFIG_KEY)), - Path.of(argsMap.get(JavaBaseConstants.ARGS_CATALOG_KEY)), - argsMap.containsKey(JavaBaseConstants.ARGS_STATE_KEY) ? Path.of(argsMap.get(JavaBaseConstants.ARGS_STATE_KEY)) : null); - } - case WRITE -> { - return IntegrationConfig.write( - Path.of(argsMap.get(JavaBaseConstants.ARGS_CONFIG_KEY)), - Path.of(argsMap.get(JavaBaseConstants.ARGS_CATALOG_KEY))); - } - default -> throw new IllegalStateException("Unexpected value: " + command); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationConfig.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationConfig.java deleted file mode 100644 index bbce8342419f7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationConfig.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.google.common.base.Preconditions; -import java.nio.file.Path; -import java.util.Objects; -import java.util.Optional; - -public class IntegrationConfig { - - private final Command command; - private final Path configPath; - private final Path catalogPath; - private final Path statePath; - - private IntegrationConfig(final Command command, final Path configPath, final Path catalogPath, final Path statePath) { - this.command = command; - this.configPath = configPath; - this.catalogPath = catalogPath; - this.statePath = statePath; - } - - public static IntegrationConfig spec() { - return new IntegrationConfig(Command.SPEC, null, null, null); - } - - public static IntegrationConfig check(final Path config) { - Preconditions.checkNotNull(config); - return new IntegrationConfig(Command.CHECK, config, null, null); - } - - public static IntegrationConfig discover(final Path config) { - Preconditions.checkNotNull(config); - return new IntegrationConfig(Command.DISCOVER, config, null, null); - } - - public static IntegrationConfig read(final Path configPath, final Path catalogPath, final Path statePath) { - Preconditions.checkNotNull(configPath); - Preconditions.checkNotNull(catalogPath); - return new IntegrationConfig(Command.READ, configPath, catalogPath, statePath); - } - - public static IntegrationConfig write(final Path configPath, final Path catalogPath) { - Preconditions.checkNotNull(configPath); - Preconditions.checkNotNull(catalogPath); - return new IntegrationConfig(Command.WRITE, configPath, catalogPath, null); - } - - public Command getCommand() { - return command; - } - - public Path getConfigPath() { - Preconditions.checkState(command != Command.SPEC); - return configPath; - } - - public Path getCatalogPath() { - Preconditions.checkState(command == Command.READ || command == Command.WRITE); - return catalogPath; - } - - public Optional getStatePath() { - Preconditions.checkState(command == Command.READ); - return Optional.ofNullable(statePath); - } - - @Override - public String toString() { - return "IntegrationConfig{" + - "command=" + command + - ", configPath='" + configPath + '\'' + - ", catalogPath='" + catalogPath + '\'' + - ", statePath='" + statePath + '\'' + - '}'; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final IntegrationConfig that = (IntegrationConfig) o; - return command == that.command && - Objects.equals(configPath, that.configPath) && - Objects.equals(catalogPath, that.catalogPath) && - Objects.equals(statePath, that.statePath); - } - - @Override - public int hashCode() { - return Objects.hash(command, configPath, catalogPath, statePath); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationRunner.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationRunner.java deleted file mode 100644 index 8fd71f3875b27..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/IntegrationRunner.java +++ /dev/null @@ -1,435 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import datadog.trace.api.Trace; -import io.airbyte.cdk.integrations.util.ApmTraceUtils; -import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import io.airbyte.cdk.integrations.util.concurrent.ConcurrentStreamConsumer; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.stream.StreamStatusUtils; -import io.airbyte.commons.string.Strings; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.io.BufferedInputStream; -import java.io.ByteArrayOutputStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import org.apache.commons.lang3.ThreadUtils; -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Accepts EITHER a destination or a source. Routes commands from the commandline to the appropriate - * methods on the integration. Keeps itself DRY for methods that are common between source and - * destination. - */ -public class IntegrationRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(IntegrationRunner.class); - - public static final String TYPE_AND_DEDUPE_THREAD_NAME = "type-and-dedupe"; - - /** - * Filters threads that should not be considered when looking for orphaned threads at shutdown of - * the integration runner. - *

- *

- * N.B. Daemon threads don't block the JVM if the main `currentThread` exits, so they are not - * problematic. Additionally, ignore database connection pool threads, which stay active so long as - * the database connection pool is open. - */ - @VisibleForTesting - static final Predicate ORPHANED_THREAD_FILTER = runningThread -> !runningThread.getName().equals(Thread.currentThread().getName()) - && !runningThread.isDaemon() && !TYPE_AND_DEDUPE_THREAD_NAME.equals(runningThread.getName()); - - public static final int INTERRUPT_THREAD_DELAY_MINUTES = 1; - public static final int EXIT_THREAD_DELAY_MINUTES = 2; - - public static final int FORCED_EXIT_CODE = 2; - - private static final Runnable EXIT_HOOK = () -> System.exit(FORCED_EXIT_CODE); - - private final IntegrationCliParser cliParser; - private final Consumer outputRecordCollector; - private final Integration integration; - private final Destination destination; - private final Source source; - private final FeatureFlags featureFlags; - private static JsonSchemaValidator validator; - - public IntegrationRunner(final Destination destination) { - this(new IntegrationCliParser(), Destination::defaultOutputRecordCollector, destination, null); - } - - public IntegrationRunner(final Source source) { - this(new IntegrationCliParser(), Destination::defaultOutputRecordCollector, null, source); - } - - @VisibleForTesting - IntegrationRunner(final IntegrationCliParser cliParser, - final Consumer outputRecordCollector, - final Destination destination, - final Source source) { - Preconditions.checkState(destination != null ^ source != null, "can only pass in a destination or a source"); - this.cliParser = cliParser; - this.outputRecordCollector = outputRecordCollector; - // integration iface covers the commands that are the same for both source and destination. - integration = source != null ? source : destination; - this.source = source; - this.destination = destination; - this.featureFlags = new EnvVariableFeatureFlags(); - validator = new JsonSchemaValidator(); - - Thread.setDefaultUncaughtExceptionHandler(new AirbyteExceptionHandler()); - } - - @VisibleForTesting - IntegrationRunner(final IntegrationCliParser cliParser, - final Consumer outputRecordCollector, - final Destination destination, - final Source source, - final JsonSchemaValidator jsonSchemaValidator) { - this(cliParser, outputRecordCollector, destination, source); - validator = jsonSchemaValidator; - } - - @Trace(operationName = "RUN_OPERATION") - public void run(final String[] args) throws Exception { - final IntegrationConfig parsed = cliParser.parse(args); - try { - runInternal(parsed); - } catch (final Exception e) { - throw e; - } - } - - private void runInternal(final IntegrationConfig parsed) throws Exception { - LOGGER.info("Running integration: {}", integration.getClass().getName()); - LOGGER.info("Command: {}", parsed.getCommand()); - LOGGER.info("Integration config: {}", parsed); - - try { - switch (parsed.getCommand()) { - // common - case SPEC -> outputRecordCollector.accept(new AirbyteMessage().withType(Type.SPEC).withSpec(integration.spec())); - case CHECK -> { - final JsonNode config = parseConfig(parsed.getConfigPath()); - if (integration instanceof Destination) { - DestinationConfig.initialize(config, ((Destination) integration).isV2Destination()); - } - try { - validateConfig(integration.spec().getConnectionSpecification(), config, "CHECK"); - } catch (final Exception e) { - // if validation fails don't throw an exception, return a failed connection check message - outputRecordCollector.accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus( - new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED).withMessage(e.getMessage()))); - } - - outputRecordCollector.accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(integration.check(config))); - } - // source only - case DISCOVER -> { - final JsonNode config = parseConfig(parsed.getConfigPath()); - validateConfig(integration.spec().getConnectionSpecification(), config, "DISCOVER"); - outputRecordCollector.accept(new AirbyteMessage().withType(Type.CATALOG).withCatalog(source.discover(config))); - } - // todo (cgardens) - it is incongruous that that read and write return airbyte message (the - // envelope) while the other commands return what goes inside it. - case READ -> { - final JsonNode config = parseConfig(parsed.getConfigPath()); - validateConfig(integration.spec().getConnectionSpecification(), config, "READ"); - final ConfiguredAirbyteCatalog catalog = parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog.class); - final Optional stateOptional = parsed.getStatePath().map(IntegrationRunner::parseConfig); - try { - if (featureFlags.concurrentSourceStreamRead()) { - LOGGER.info("Concurrent source stream read enabled."); - readConcurrent(config, catalog, stateOptional); - } else { - readSerial(config, catalog, stateOptional); - } - } finally { - if (source instanceof AutoCloseable) { - ((AutoCloseable) source).close(); - } - } - } - // destination only - case WRITE -> { - final JsonNode config = parseConfig(parsed.getConfigPath()); - validateConfig(integration.spec().getConnectionSpecification(), config, "WRITE"); - // save config to singleton - DestinationConfig.initialize(config, ((Destination) integration).isV2Destination()); - final ConfiguredAirbyteCatalog catalog = parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog.class); - - try (final SerializedAirbyteMessageConsumer consumer = destination.getSerializedMessageConsumer(config, catalog, outputRecordCollector)) { - consumeWriteStream(consumer); - } finally { - stopOrphanedThreads(); - } - } - default -> throw new IllegalStateException("Unexpected value: " + parsed.getCommand()); - } - } catch (final Exception e) { - // Many of the exceptions thrown are nested inside layers of RuntimeExceptions. An attempt is made - // to - // find the root exception that corresponds to a configuration error. If that does not exist, we - // just return the original exception. - ApmTraceUtils.addExceptionToTrace(e); - final Throwable rootThrowable = ConnectorExceptionUtil.getRootConfigError(e); - final String displayMessage = ConnectorExceptionUtil.getDisplayMessage(rootThrowable); - // If the source connector throws a config error, a trace message with the relevant message should - // be surfaced. - if (ConnectorExceptionUtil.isConfigError(rootThrowable)) { - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, displayMessage); - } - if (parsed.getCommand().equals(Command.CHECK)) { - // Currently, special handling is required for the CHECK case since the user display information in - // the trace message is - // not properly surfaced to the FE. In the future, we can remove this and just throw an exception. - outputRecordCollector - .accept( - new AirbyteMessage() - .withType(Type.CONNECTION_STATUS) - .withConnectionStatus( - new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage(displayMessage))); - return; - } - throw e; - } - - LOGGER.info("Completed integration: {}", integration.getClass().getName()); - } - - private void produceMessages(final AutoCloseableIterator messageIterator, final Consumer recordCollector) { - messageIterator.getAirbyteStream().ifPresent(s -> LOGGER.debug("Producing messages for stream {}...", s)); - messageIterator.forEachRemaining(recordCollector); - messageIterator.getAirbyteStream().ifPresent(s -> LOGGER.debug("Finished producing messages for stream {}...")); - } - - private void readConcurrent(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final Optional stateOptional) - throws Exception { - final Collection> streams = source.readStreams(config, catalog, stateOptional.orElse(null)); - - try (final ConcurrentStreamConsumer streamConsumer = new ConcurrentStreamConsumer(this::consumeFromStream, streams.size())) { - /* - * Break the streams into partitions equal to the number of concurrent streams supported by the - * stream consumer. - */ - final Integer partitionSize = streamConsumer.getParallelism(); - final List>> partitions = Lists.partition(streams.stream().toList(), - partitionSize); - - // Submit each stream partition for concurrent execution - partitions.forEach(partition -> { - streamConsumer.accept(partition); - }); - - // Check for any exceptions that were raised during the concurrent execution - if (streamConsumer.getException().isPresent()) { - throw streamConsumer.getException().get(); - } - } catch (final Exception e) { - LOGGER.error("Unable to perform concurrent read.", e); - throw e; - } finally { - stopOrphanedThreads(); - } - } - - private void readSerial(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final Optional stateOptional) throws Exception { - try (final AutoCloseableIterator messageIterator = source.read(config, catalog, stateOptional.orElse(null))) { - produceMessages(messageIterator, outputRecordCollector); - } finally { - stopOrphanedThreads(); - } - } - - private void consumeFromStream(final AutoCloseableIterator stream) { - try { - final Consumer streamStatusTrackingRecordConsumer = StreamStatusUtils.statusTrackingRecordCollector(stream, - outputRecordCollector, Optional.of(AirbyteTraceMessageUtility::emitStreamStatusTrace)); - produceMessages(stream, streamStatusTrackingRecordConsumer); - } catch (final Exception e) { - stream.getAirbyteStream().ifPresent(s -> LOGGER.error("Failed to consume from stream {}.", s, e)); - throw new RuntimeException(e); - } - } - - @VisibleForTesting - static void consumeWriteStream(final SerializedAirbyteMessageConsumer consumer) throws Exception { - try (final BufferedInputStream bis = new BufferedInputStream(System.in); - final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { - consumeWriteStream(consumer, bis, baos); - } - } - - @VisibleForTesting - static void consumeWriteStream(final SerializedAirbyteMessageConsumer consumer, - final BufferedInputStream bis, - final ByteArrayOutputStream baos) - throws Exception { - consumer.start(); - - final byte[] buffer = new byte[8192]; // 8K buffer - int bytesRead; - boolean lastWasNewLine = false; - - while ((bytesRead = bis.read(buffer)) != -1) { - for (int i = 0; i < bytesRead; i++) { - final byte b = buffer[i]; - if (b == '\n' || b == '\r') { - if (!lastWasNewLine && baos.size() > 0) { - consumer.accept(baos.toString(StandardCharsets.UTF_8), baos.size()); - baos.reset(); - } - lastWasNewLine = true; - } else { - baos.write(b); - lastWasNewLine = false; - } - } - } - - // Handle last line if there's one - if (baos.size() > 0) { - consumer.accept(baos.toString(StandardCharsets.UTF_8), baos.size()); - } - } - - /** - * Stops any non-daemon threads that could block the JVM from exiting when the main thread is done. - * - * If any active non-daemon threads would be left as orphans, this method will schedule some - * interrupt/exit hooks after giving it some time delay to close up properly. It is generally - * preferred to have a proper closing sequence from children threads instead of interrupting or - * force exiting the process, so this mechanism serve as a fallback while surfacing warnings in logs - * for maintainers to fix the code behavior instead. - */ - static void stopOrphanedThreads() { - stopOrphanedThreads(EXIT_HOOK, - INTERRUPT_THREAD_DELAY_MINUTES, - TimeUnit.MINUTES, - EXIT_THREAD_DELAY_MINUTES, - TimeUnit.MINUTES); - } - - /** - * Stops any non-daemon threads that could block the JVM from exiting when the main thread is done. - *

- * If any active non-daemon threads would be left as orphans, this method will schedule some - * interrupt/exit hooks after giving it some time delay to close up properly. It is generally - * preferred to have a proper closing sequence from children threads instead of interrupting or - * force exiting the process, so this mechanism serve as a fallback while surfacing warnings in logs - * for maintainers to fix the code behavior instead. - *

- * - * @param exitHook The {@link Runnable} exit hook to execute for any orphaned threads. - * @param interruptTimeDelay The time to delay execution of the orphaned thread interrupt attempt. - * @param interruptTimeUnit The time unit of the interrupt delay. - * @param exitTimeDelay The time to delay execution of the orphaned thread exit hook. - * @param exitTimeUnit The time unit of the exit delay. - */ - @VisibleForTesting - static void stopOrphanedThreads(final Runnable exitHook, - final int interruptTimeDelay, - final TimeUnit interruptTimeUnit, - final int exitTimeDelay, - final TimeUnit exitTimeUnit) { - final Thread currentThread = Thread.currentThread(); - - final List runningThreads = ThreadUtils.getAllThreads() - .stream() - .filter(ORPHANED_THREAD_FILTER) - .collect(Collectors.toList()); - if (!runningThreads.isEmpty()) { - LOGGER.warn(""" - The main thread is exiting while children non-daemon threads from a connector are still active. - Ideally, this situation should not happen... - Please check with maintainers if the connector or library code should safely clean up its threads before quitting instead. - The main thread is: {}""", dumpThread(currentThread)); - final ScheduledExecutorService scheduledExecutorService = Executors - .newSingleThreadScheduledExecutor(new BasicThreadFactory.Builder() - // this thread executor will create daemon threads, so it does not block exiting if all other active - // threads are already stopped. - .daemon(true).build()); - for (final Thread runningThread : runningThreads) { - final String str = "Active non-daemon thread: " + dumpThread(runningThread); - LOGGER.warn(str); - // even though the main thread is already shutting down, we still leave some chances to the children - // threads to close properly on their own. - // So, we schedule an interrupt hook after a fixed time delay instead... - scheduledExecutorService.schedule(runningThread::interrupt, interruptTimeDelay, interruptTimeUnit); - } - scheduledExecutorService.schedule(() -> { - if (ThreadUtils.getAllThreads().stream() - .anyMatch(runningThread -> !runningThread.isDaemon() && !runningThread.getName().equals(currentThread.getName()))) { - LOGGER.error("Failed to interrupt children non-daemon threads, forcefully exiting NOW...\n"); - exitHook.run(); - } - }, exitTimeDelay, exitTimeUnit); - } - } - - private static String dumpThread(final Thread thread) { - return String.format("%s (%s)\n Thread stacktrace: %s", thread.getName(), thread.getState(), - Strings.join(List.of(thread.getStackTrace()), "\n at ")); - } - - private static void validateConfig(final JsonNode schemaJson, final JsonNode objectJson, final String operationType) throws Exception { - final Set validationResult = validator.validate(schemaJson, objectJson); - if (!validationResult.isEmpty()) { - throw new Exception(String.format("Verification error(s) occurred for %s. Errors: %s ", - operationType, validationResult)); - } - } - - public static JsonNode parseConfig(final Path path) { - return Jsons.deserialize(IOs.readFile(path)); - } - - private static T parseConfig(final Path path, final Class klass) { - final JsonNode jsonNode = parseConfig(path); - return Jsons.object(jsonNode, klass); - } - - /** - * @param connectorImage Expected format: [organization/]image[:version] - */ - @VisibleForTesting - static String parseConnectorVersion(final String connectorImage) { - if (connectorImage == null || connectorImage.equals("")) { - return "unknown"; - } - - final String[] tokens = connectorImage.split(":"); - return tokens[tokens.length - 1]; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/JavaBaseConstants.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/JavaBaseConstants.java deleted file mode 100644 index 5001d6119e7aa..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/JavaBaseConstants.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import java.util.List; - -public final class JavaBaseConstants { - - private JavaBaseConstants() {} - - public static final String ARGS_CONFIG_KEY = "config"; - public static final String ARGS_CATALOG_KEY = "catalog"; - public static final String ARGS_STATE_KEY = "state"; - - public static final String ARGS_CONFIG_DESC = "path to the json configuration file"; - public static final String ARGS_CATALOG_DESC = "input path for the catalog"; - public static final String ARGS_PATH_DESC = "path to the json-encoded state file"; - - public static final String COLUMN_NAME_AB_ID = "_airbyte_ab_id"; - public static final String COLUMN_NAME_EMITTED_AT = "_airbyte_emitted_at"; - public static final String COLUMN_NAME_DATA = "_airbyte_data"; - public static final List LEGACY_RAW_TABLE_COLUMNS = List.of( - COLUMN_NAME_AB_ID, - COLUMN_NAME_DATA, - COLUMN_NAME_EMITTED_AT); - - // destination v2 - public static final String COLUMN_NAME_AB_RAW_ID = "_airbyte_raw_id"; - public static final String COLUMN_NAME_AB_LOADED_AT = "_airbyte_loaded_at"; - public static final String COLUMN_NAME_AB_EXTRACTED_AT = "_airbyte_extracted_at"; - public static final String COLUMN_NAME_AB_META = "_airbyte_meta"; - public static final List V2_RAW_TABLE_COLUMN_NAMES = List.of( - COLUMN_NAME_AB_RAW_ID, - COLUMN_NAME_AB_EXTRACTED_AT, - COLUMN_NAME_AB_LOADED_AT, - COLUMN_NAME_DATA); - public static final List V2_FINAL_TABLE_METADATA_COLUMNS = List.of( - COLUMN_NAME_AB_RAW_ID, - COLUMN_NAME_AB_EXTRACTED_AT, - COLUMN_NAME_AB_META); - - public static final String DEFAULT_AIRBYTE_INTERNAL_NAMESPACE = "airbyte_internal"; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.java deleted file mode 100644 index 60eccd4e449e6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.functional.CheckedBiConsumer; -import io.airbyte.protocol.models.v0.AirbyteMessage; - -/** - * Interface for the destination's consumption of incoming messages as strings. This interface is - * backwards compatible with {@link AirbyteMessageConsumer}. - *

- * This is via the accept method, which commonly handles parsing, validation, batching and writing - * of the transformed data to the final destination i.e. the technical system data is being written - * to. - *

- * Lifecycle: - *

    - *
  • 1. Instantiate consumer.
  • - *
  • 2. start() to initialize any resources that need to be created BEFORE the consumer consumes - * any messages.
  • - *
  • 3. Consumes ALL records via - * {@link SerializedAirbyteMessageConsumer#accept(String, Integer)}
  • - *
  • 4. Always (on success or failure) finalize by calling - * {@link SerializedAirbyteMessageConsumer#close()}
  • - *
- */ -public interface SerializedAirbyteMessageConsumer extends CheckedBiConsumer, AutoCloseable { - - /** - * Initialize anything needed for the consumer. Must be called before accept. - * - * @throws Exception exception - */ - void start() throws Exception; - - /** - * Consumes all {@link AirbyteMessage}s - * - * @param message {@link AirbyteMessage} as a string - * @param sizeInBytes size of that string in bytes - * @throws Exception exception - */ - @Override - void accept(String message, Integer sizeInBytes) throws Exception; - - /** - * Executes at the end of consumption of all incoming streamed data regardless of success or failure - * - * @throws Exception exception - */ - @Override - void close() throws Exception; - - /** - * Append a function to be called on {@link SerializedAirbyteMessageConsumer#close}. - */ - static SerializedAirbyteMessageConsumer appendOnClose(final SerializedAirbyteMessageConsumer consumer, final VoidCallable voidCallable) { - return new SerializedAirbyteMessageConsumer() { - - @Override - public void start() throws Exception { - consumer.start(); - } - - @Override - public void accept(final String message, final Integer sizeInBytes) throws Exception { - consumer.accept(message, sizeInBytes); - } - - @Override - public void close() throws Exception { - consumer.close(); - voidCallable.call(); - } - - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Source.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Source.java deleted file mode 100644 index 1f092a3b16dd8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/Source.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Collection; -import java.util.List; - -public interface Source extends Integration { - - /** - * Discover the current schema in the source. - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @return Description of the schema. - * @throws Exception - any exception. - */ - AirbyteCatalog discover(JsonNode config) throws Exception; - - /** - * Return a iterator of messages pulled from the source. - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @param catalog - schema of the incoming messages. - * @param state - state of the incoming messages. - * @return {@link AutoCloseableIterator} that produces message. The iterator will be consumed until - * no records remain or until an exception is thrown. {@link AutoCloseableIterator#close()} - * will always be called once regardless of success or failure. - * @throws Exception - any exception. - */ - AutoCloseableIterator read(JsonNode config, ConfiguredAirbyteCatalog catalog, JsonNode state) throws Exception; - - /** - * Returns a collection of iterators of messages pulled from the source, each representing a - * "stream". - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @param catalog - schema of the incoming messages. - * @param state - state of the incoming messages. - * @return The collection of {@link AutoCloseableIterator} instances that produce messages for each - * configured "stream" - * @throws Exception - any exception - */ - default Collection> readStreams(JsonNode config, ConfiguredAirbyteCatalog catalog, JsonNode state) - throws Exception { - return List.of(read(config, catalog, state)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.java deleted file mode 100644 index 8820b1d7017f9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import java.util.Optional; - -public class TypingAndDedupingFlag { - - public static boolean isDestinationV2() { - return DestinationConfig.getInstance().getIsV2Destination() - || DestinationConfig.getInstance().getBooleanValue("use_1s1t_format"); - } - - public static Optional getRawNamespaceOverride(final String option) { - final String rawOverride = DestinationConfig.getInstance().getTextValue(option); - if (rawOverride == null || rawOverride.isEmpty()) { - return Optional.empty(); - } else { - return Optional.of(rawOverride); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java deleted file mode 100644 index 81d508b0dd2c7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.adaptive; - -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import java.util.function.Supplier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class launches different variants of a destination connector based on where Airbyte is - * deployed. - */ -public class AdaptiveDestinationRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveDestinationRunner.class); - - private static final String DEPLOYMENT_MODE_KEY = EnvVariableFeatureFlags.DEPLOYMENT_MODE; - private static final String CLOUD_MODE = "CLOUD"; - - public static OssDestinationBuilder baseOnEnv() { - final String mode = System.getenv(DEPLOYMENT_MODE_KEY); - return new OssDestinationBuilder(mode); - } - - public static final class OssDestinationBuilder { - - private final String deploymentMode; - - private OssDestinationBuilder(final String deploymentMode) { - this.deploymentMode = deploymentMode; - } - - public CloudDestinationBuilder withOssDestination(final Supplier ossDestinationSupplier) { - return new CloudDestinationBuilder<>(deploymentMode, ossDestinationSupplier); - } - - } - - public static final class CloudDestinationBuilder { - - private final String deploymentMode; - private final Supplier ossDestinationSupplier; - - public CloudDestinationBuilder(final String deploymentMode, final Supplier ossDestinationSupplier) { - this.deploymentMode = deploymentMode; - this.ossDestinationSupplier = ossDestinationSupplier; - } - - public Runner withCloudDestination(final Supplier cloudDestinationSupplier) { - return new Runner<>(deploymentMode, ossDestinationSupplier, cloudDestinationSupplier); - } - - } - - public static final class Runner { - - private final String deploymentMode; - private final Supplier ossDestinationSupplier; - private final Supplier cloudDestinationSupplier; - - public Runner(final String deploymentMode, - final Supplier ossDestinationSupplier, - final Supplier cloudDestinationSupplier) { - this.deploymentMode = deploymentMode; - this.ossDestinationSupplier = ossDestinationSupplier; - this.cloudDestinationSupplier = cloudDestinationSupplier; - } - - private Destination getDestination() { - LOGGER.info("Running destination under deployment mode: {}", deploymentMode); - if (deploymentMode != null && deploymentMode.equals(CLOUD_MODE)) { - return cloudDestinationSupplier.get(); - } - if (deploymentMode == null) { - LOGGER.warn("Deployment mode is null, default to OSS mode"); - } - return ossDestinationSupplier.get(); - } - - public void run(final String[] args) throws Exception { - final Destination destination = getDestination(); - LOGGER.info("Starting destination: {}", destination.getClass().getName()); - new IntegrationRunner(destination).run(args); - LOGGER.info("Completed destination: {}", destination.getClass().getName()); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java deleted file mode 100644 index 4bb7f021db50f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.adaptive; - -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import java.util.function.Supplier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class launches different variants of a source connector based on where Airbyte is deployed. - */ -public class AdaptiveSourceRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveSourceRunner.class); - - public static final String DEPLOYMENT_MODE_KEY = EnvVariableFeatureFlags.DEPLOYMENT_MODE; - public static final String CLOUD_MODE = "CLOUD"; - - public static OssSourceBuilder baseOnEnv() { - final String mode = System.getenv(DEPLOYMENT_MODE_KEY); - return new OssSourceBuilder(mode); - } - - public static final class OssSourceBuilder { - - private final String deploymentMode; - - private OssSourceBuilder(final String deploymentMode) { - this.deploymentMode = deploymentMode; - } - - public CloudSourceBuilder withOssSource(final Supplier ossSourceSupplier) { - return new CloudSourceBuilder<>(deploymentMode, ossSourceSupplier); - } - - } - - public static final class CloudSourceBuilder { - - private final String deploymentMode; - private final Supplier ossSourceSupplier; - - public CloudSourceBuilder(final String deploymentMode, final Supplier ossSourceSupplier) { - this.deploymentMode = deploymentMode; - this.ossSourceSupplier = ossSourceSupplier; - } - - public Runner withCloudSource(final Supplier cloudSourceSupplier) { - return new Runner<>(deploymentMode, ossSourceSupplier, cloudSourceSupplier); - } - - } - - public static final class Runner { - - private final String deploymentMode; - private final Supplier ossSourceSupplier; - private final Supplier cloudSourceSupplier; - - public Runner(final String deploymentMode, - final Supplier ossSourceSupplier, - final Supplier cloudSourceSupplier) { - this.deploymentMode = deploymentMode; - this.ossSourceSupplier = ossSourceSupplier; - this.cloudSourceSupplier = cloudSourceSupplier; - } - - private Source getSource() { - LOGGER.info("Running source under deployment mode: {}", deploymentMode); - if (deploymentMode != null && deploymentMode.equals(CLOUD_MODE)) { - return cloudSourceSupplier.get(); - } - if (deploymentMode == null) { - LOGGER.warn("Deployment mode is null, default to OSS mode"); - } - return ossSourceSupplier.get(); - } - - public void run(final String[] args) throws Exception { - final Source source = getSource(); - LOGGER.info("Starting source: {}", source.getClass().getName()); - new IntegrationRunner(source).run(args); - LOGGER.info("Completed source: {}", source.getClass().getName()); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.java deleted file mode 100644 index 43c19ad6281e2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.errors.messages; - -import java.util.Objects; - -public class ErrorMessage { - - // TODO: this could be built using a Builder design pattern instead of passing in 0 to indicate no - // errorCode exists - public static String getErrorMessage(final String stateCode, final int errorCode, final String message, final Exception exception) { - if (Objects.isNull(message)) { - return configMessage(stateCode, 0, exception.getMessage()); - } else { - return configMessage(stateCode, errorCode, message); - } - } - - private static String configMessage(final String stateCode, final int errorCode, final String message) { - final String stateCodePart = Objects.isNull(stateCode) ? "" : String.format("State code: %s; ", stateCode); - final String errorCodePart = errorCode == 0 ? "" : String.format("Error code: %s; ", errorCode); - return String.format("%s%sMessage: %s", stateCodePart, errorCodePart, message); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.java deleted file mode 100644 index 209c98fdf0ae9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.spec_modification; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.util.function.Consumer; - -public abstract class SpecModifyingDestination implements Destination { - - private final Destination destination; - - public SpecModifyingDestination(final Destination destination) { - this.destination = destination; - } - - public abstract ConnectorSpecification modifySpec(ConnectorSpecification originalSpec) throws Exception; - - @Override - public ConnectorSpecification spec() throws Exception { - return modifySpec(destination.spec()); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - return destination.check(config); - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - return destination.getConsumer(config, catalog, outputRecordCollector); - } - - @Override - public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - return destination.getSerializedMessageConsumer(config, catalog, outputRecordCollector); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.java deleted file mode 100644 index aa7f1b2b2a4ec..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.spec_modification; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.util.Collection; - -/** - * In some cases we want to prune or mutate the spec for an existing source. The common case is that - * we want to remove features that are not appropriate for some reason. e.g. In cloud, we do not - * want to allow users to send data unencrypted. - */ -public abstract class SpecModifyingSource implements Source { - - private final Source source; - - public SpecModifyingSource(final Source source) { - this.source = source; - } - - public abstract ConnectorSpecification modifySpec(ConnectorSpecification originalSpec) throws Exception; - - @Override - public ConnectorSpecification spec() throws Exception { - return modifySpec(source.spec()); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - return source.check(config); - } - - @Override - public AirbyteCatalog discover(final JsonNode config) throws Exception { - return source.discover(config); - } - - @Override - public AutoCloseableIterator read(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final JsonNode state) - throws Exception { - return source.read(config, catalog, state); - } - - @Override - public Collection> readStreams(JsonNode config, ConfiguredAirbyteCatalog catalog, JsonNode state) - throws Exception { - return source.readStreams(config, catalog, state); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshHelpers.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshHelpers.java deleted file mode 100644 index ab577063c0e34..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshHelpers.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.io.IOException; -import java.util.Optional; - -public class SshHelpers { - - public static ConnectorSpecification getSpecAndInjectSsh() throws IOException { - return getSpecAndInjectSsh(Optional.empty()); - } - - public static ConnectorSpecification getSpecAndInjectSsh(final Optional group) throws IOException { - final ConnectorSpecification originalSpec = Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class); - return injectSshIntoSpec(originalSpec, group); - } - - public static ConnectorSpecification injectSshIntoSpec(final ConnectorSpecification connectorSpecification) throws IOException { - return injectSshIntoSpec(connectorSpecification, Optional.empty()); - } - - public static ConnectorSpecification injectSshIntoSpec(final ConnectorSpecification connectorSpecification, final Optional group) - throws IOException { - final ConnectorSpecification originalSpec = Jsons.clone(connectorSpecification); - final ObjectNode propNode = (ObjectNode) originalSpec.getConnectionSpecification().get("properties"); - final ObjectNode tunnelMethod = (ObjectNode) Jsons.deserialize(MoreResources.readResource("ssh-tunnel-spec.json")); - if (group.isPresent()) { - tunnelMethod.put("group", group.get()); - } - propNode.set("tunnel_method", tunnelMethod); - return originalSpec; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshTunnel.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshTunnel.java deleted file mode 100644 index 649f0576a589d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshTunnel.java +++ /dev/null @@ -1,485 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.commons.functional.CheckedFunction; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.io.IOException; -import java.io.StringReader; -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.URL; -import java.security.GeneralSecurityException; -import java.security.KeyPair; -import java.time.Duration; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Optional; -import javax.validation.constraints.NotNull; -import org.apache.sshd.client.SshClient; -import org.apache.sshd.client.keyverifier.AcceptAllServerKeyVerifier; -import org.apache.sshd.client.session.ClientSession; -import org.apache.sshd.common.SshException; -import org.apache.sshd.common.session.SessionHeartbeatController; -import org.apache.sshd.common.util.net.SshdSocketAddress; -import org.apache.sshd.common.util.security.SecurityUtils; -import org.apache.sshd.core.CoreModuleProperties; -import org.apache.sshd.server.forward.AcceptAllForwardingFilter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// todo (cgardens) - this needs unit tests. it is currently tested transitively via source postgres -// integration tests. -/** - * Encapsulates the connection configuration for an ssh tunnel port forward through a proxy/bastion - * host plus the remote host and remote port to forward to a specified local port. - */ -public class SshTunnel implements AutoCloseable { - - private static final Logger LOGGER = LoggerFactory.getLogger(SshTunnel.class); - public static final String SSH_TIMEOUT_DISPLAY_MESSAGE = - "Timed out while opening a SSH Tunnel. Please double check the given SSH configurations and try again."; - - public static final String CONNECTION_OPTIONS_KEY = "ssh_connection_options"; - public static final String SESSION_HEARTBEAT_INTERVAL_KEY = "session_heartbeat_interval"; - public static final long SESSION_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS = 1000; - public static final String GLOBAL_HEARTBEAT_INTERVAL_KEY = "global_heartbeat_interval"; - public static final long GLOBAL_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS = 2000; - public static final String IDLE_TIMEOUT_KEY = "idle_timeout"; - public static final long IDLE_TIMEOUT_DEFAULT_INFINITE = 0; - - public enum TunnelMethod { - NO_TUNNEL, - SSH_PASSWORD_AUTH, - SSH_KEY_AUTH - } - - public record SshConnectionOptions(Duration sessionHeartbeatInterval, - Duration globalHeartbeatInterval, - Duration idleTimeout) {} - - public static final int TIMEOUT_MILLIS = 15000; // 15 seconds - - private final JsonNode config; - private final List hostKey; - private final List portKey; - - private final TunnelMethod tunnelMethod; - private final String tunnelHost; - private final int tunnelPort; - private final String tunnelUser; - private final String sshKey; - private final String endPointKey; - private final String remoteServiceProtocol; - private final String remoteServicePath; - private final String tunnelUserPassword; - private final String remoteServiceHost; - private final int remoteServicePort; - protected int tunnelLocalPort; - - private SshClient sshclient; - private ClientSession tunnelSession; - - /** - * - * @param config - the full config that was passed to the source. - * @param hostKey - a list of keys that point to the database host name. should be pointing to where - * in the config remoteDatabaseHost is found. - * @param portKey - a list of keys that point to the database port. should be pointing to where in - * the config remoteDatabasePort is found. - * @param endPointKey - key that points to the endpoint URL (this is commonly used for REST-based - * services such as Elastic and MongoDB) - * @param remoteServiceUrl - URL of the remote endpoint (this is commonly used for REST-based * - * services such as Elastic and MongoDB) - * @param tunnelMethod - the type of ssh method that should be used (includes not using SSH at all). - * @param tunnelHost - host name of the machine to which we will establish an ssh connection (e.g. - * hostname of the bastion). - * @param tunnelPort - port of the machine to which we will establish an ssh connection. (e.g. port - * of the bastion). - * @param tunnelUser - user that is allowed to access the tunnelHost. - * @param sshKey - the ssh key that will be used to make the ssh connection. can be null if we are - * using tunnelUserPassword instead. - * @param tunnelUserPassword - the password for the tunnelUser. can be null if we are using sshKey - * instead. - * @param remoteServiceHost - the actual host name of the remote service (as it is known to the - * tunnel host). - * @param remoteServicePort - the actual port of the remote service (as it is known to the tunnel - * host). - * @param connectionOptions - optional connection options for ssh client. - */ - public SshTunnel(final JsonNode config, - final List hostKey, - final List portKey, - final String endPointKey, - final String remoteServiceUrl, - final TunnelMethod tunnelMethod, - final String tunnelHost, - final int tunnelPort, - final String tunnelUser, - final String sshKey, - final String tunnelUserPassword, - final String remoteServiceHost, - final int remoteServicePort, - final Optional connectionOptions) { - this.config = config; - this.hostKey = hostKey; - this.portKey = portKey; - this.endPointKey = endPointKey; - Preconditions.checkNotNull(tunnelMethod); - this.tunnelMethod = tunnelMethod; - - if (tunnelMethod.equals(TunnelMethod.NO_TUNNEL)) { - this.tunnelHost = null; - this.tunnelPort = 0; - this.tunnelUser = null; - this.sshKey = null; - this.tunnelUserPassword = null; - this.remoteServiceHost = null; - this.remoteServicePort = 0; - this.remoteServiceProtocol = null; - this.remoteServicePath = null; - } else { - Preconditions.checkNotNull(tunnelHost); - Preconditions.checkArgument(tunnelPort > 0); - Preconditions.checkNotNull(tunnelUser); - if (tunnelMethod.equals(TunnelMethod.SSH_KEY_AUTH)) { - Preconditions.checkNotNull(sshKey); - } - if (tunnelMethod.equals(TunnelMethod.SSH_PASSWORD_AUTH)) { - Preconditions.checkNotNull(tunnelUserPassword); - } - // must provide either host/port or endpoint - Preconditions.checkArgument((hostKey != null && portKey != null) || endPointKey != null); - Preconditions.checkArgument((remoteServiceHost != null && remoteServicePort > 0) || remoteServiceUrl != null); - if (remoteServiceUrl != null) { - URL urlObject = null; - try { - urlObject = new URL(remoteServiceUrl); - } catch (final MalformedURLException e) { - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, - String.format("Provided value for remote service URL is not valid: %s", remoteServiceUrl)); - } - Preconditions.checkNotNull(urlObject, "Failed to parse URL of remote service"); - this.remoteServiceHost = urlObject.getHost(); - this.remoteServicePort = urlObject.getPort(); - this.remoteServiceProtocol = urlObject.getProtocol(); - this.remoteServicePath = urlObject.getPath(); - } else { - this.remoteServiceProtocol = null; - this.remoteServicePath = null; - this.remoteServiceHost = remoteServiceHost; - this.remoteServicePort = remoteServicePort; - } - - this.tunnelHost = tunnelHost; - this.tunnelPort = tunnelPort; - this.tunnelUser = tunnelUser; - this.sshKey = sshKey; - this.tunnelUserPassword = tunnelUserPassword; - this.sshclient = connectionOptions.map(sshConnectionOptions -> createClient(sshConnectionOptions.sessionHeartbeatInterval(), - sshConnectionOptions.globalHeartbeatInterval(), - sshConnectionOptions.idleTimeout())).orElseGet(this::createClient); - this.tunnelSession = openTunnel(sshclient); - } - } - - public SshTunnel(final JsonNode config, - final List hostKey, - final List portKey, - final String endPointKey, - final String remoteServiceUrl, - final TunnelMethod tunnelMethod, - final String tunnelHost, - final int tunnelPort, - final String tunnelUser, - final String sshKey, - final String tunnelUserPassword, - final String remoteServiceHost, - final int remoteServicePort) { - this(config, - hostKey, - portKey, - endPointKey, - remoteServiceUrl, - tunnelMethod, - tunnelHost, - tunnelPort, - tunnelUser, - sshKey, - tunnelUserPassword, - remoteServiceHost, - remoteServicePort, - Optional.empty()); - } - - public JsonNode getOriginalConfig() { - return config; - } - - public JsonNode getConfigInTunnel() throws Exception { - if (tunnelMethod.equals(TunnelMethod.NO_TUNNEL)) { - return getOriginalConfig(); - } else { - final JsonNode clone = Jsons.clone(config); - if (hostKey != null) { - Jsons.replaceNestedString(clone, hostKey, SshdSocketAddress.LOCALHOST_ADDRESS.getHostName()); - } - if (portKey != null) { - Jsons.replaceNestedInt(clone, portKey, tunnelLocalPort); - } - if (endPointKey != null) { - final URL tunnelEndPointURL = - new URL(remoteServiceProtocol, SshdSocketAddress.LOCALHOST_ADDRESS.getHostName(), tunnelLocalPort, remoteServicePath); - Jsons.replaceNestedString(clone, Arrays.asList(endPointKey), tunnelEndPointURL.toString()); - } - return clone; - } - } - - public static SshTunnel getInstance(final JsonNode config, final List hostKey, final List portKey) { - final TunnelMethod tunnelMethod = Jsons.getOptional(config, "tunnel_method", "tunnel_method") - .map(method -> TunnelMethod.valueOf(method.asText().trim())) - .orElse(TunnelMethod.NO_TUNNEL); - LOGGER.info("Starting connection with method: {}", tunnelMethod); - - return new SshTunnel( - config, - hostKey, - portKey, - null, - null, - tunnelMethod, - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_host")), - Jsons.getIntOrZero(config, "tunnel_method", "tunnel_port"), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user")), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "ssh_key")), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user_password")), - Strings.safeTrim(Jsons.getStringOrNull(config, hostKey)), - Jsons.getIntOrZero(config, portKey), - getSshConnectionOptions(config)); - } - - @NotNull - private static Optional getSshConnectionOptions(JsonNode config) { - // piggybacking on JsonNode config to make it configurable at connector level. - Optional connectionOptionConfig = Jsons.getOptional(config, CONNECTION_OPTIONS_KEY); - final Optional connectionOptions; - if (connectionOptionConfig.isPresent()) { - JsonNode connectionOptionsNode = connectionOptionConfig.get(); - Duration sessionHeartbeatInterval = Jsons.getOptional(connectionOptionsNode, SESSION_HEARTBEAT_INTERVAL_KEY) - .map(interval -> Duration.ofMillis(interval.asLong())) - .orElse(Duration.ofSeconds(1)); - Duration globalHeartbeatInterval = Jsons.getOptional(connectionOptionsNode, GLOBAL_HEARTBEAT_INTERVAL_KEY) - .map(interval -> Duration.ofMillis(interval.asLong())) - .orElse(Duration.ofSeconds(2)); - Duration idleTimeout = Jsons.getOptional(connectionOptionsNode, IDLE_TIMEOUT_KEY) - .map(interval -> Duration.ofMillis(interval.asLong())) - .orElse(Duration.ZERO); - connectionOptions = Optional.of( - new SshConnectionOptions(sessionHeartbeatInterval, globalHeartbeatInterval, idleTimeout)); - } else { - connectionOptions = Optional.empty(); - } - return connectionOptions; - } - - public static SshTunnel getInstance(final JsonNode config, final String endPointKey) throws Exception { - final TunnelMethod tunnelMethod = Jsons.getOptional(config, "tunnel_method", "tunnel_method") - .map(method -> TunnelMethod.valueOf(method.asText().trim())) - .orElse(TunnelMethod.NO_TUNNEL); - LOGGER.info("Starting connection with method: {}", tunnelMethod); - - return new SshTunnel( - config, - null, - null, - endPointKey, - Jsons.getStringOrNull(config, endPointKey), - tunnelMethod, - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_host")), - Jsons.getIntOrZero(config, "tunnel_method", "tunnel_port"), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user")), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "ssh_key")), - Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user_password")), - null, 0, - getSshConnectionOptions(config)); - } - - public static void sshWrap(final JsonNode config, - final List hostKey, - final List portKey, - final CheckedConsumer wrapped) - throws Exception { - sshWrap(config, hostKey, portKey, (configInTunnel) -> { - wrapped.accept(configInTunnel); - return null; - }); - } - - public static void sshWrap(final JsonNode config, - final String endPointKey, - final CheckedConsumer wrapped) - throws Exception { - sshWrap(config, endPointKey, (configInTunnel) -> { - wrapped.accept(configInTunnel); - return null; - }); - } - - public static T sshWrap(final JsonNode config, - final List hostKey, - final List portKey, - final CheckedFunction wrapped) - throws Exception { - try (final SshTunnel sshTunnel = SshTunnel.getInstance(config, hostKey, portKey)) { - return wrapped.apply(sshTunnel.getConfigInTunnel()); - } - } - - public static T sshWrap(final JsonNode config, - final String endPointKey, - final CheckedFunction wrapped) - throws Exception { - try (final SshTunnel sshTunnel = SshTunnel.getInstance(config, endPointKey)) { - return wrapped.apply(sshTunnel.getConfigInTunnel()); - } - } - - /** - * Closes a tunnel if one was open, and otherwise doesn't do anything (safe to run). - */ - @Override - public void close() { - try { - if (tunnelSession != null) { - tunnelSession.close(); - tunnelSession = null; - } - if (sshclient != null) { - sshclient.stop(); - sshclient = null; - } - } catch (final Throwable t) { - throw new RuntimeException(t); - } - } - - /** - * From the OPENSSH private key string, use mina-sshd to deserialize the key pair, reconstruct the - * keys from the key info, and return the key pair for use in authentication. - * - * @return The {@link KeyPair} to add - may not be {@code null} - * @see loadKeyPairs() - */ - KeyPair getPrivateKeyPair() throws IOException, GeneralSecurityException { - final String validatedKey = validateKey(); - final var keyPairs = SecurityUtils - .getKeyPairResourceParser() - .loadKeyPairs(null, null, null, new StringReader(validatedKey)); - - if (keyPairs != null && keyPairs.iterator().hasNext()) { - return keyPairs.iterator().next(); - } - throw new ConfigErrorException("Unable to load private key pairs, verify key pairs are properly inputted"); - } - - private String validateKey() { - return sshKey.replace("\\n", "\n"); - } - - /** - * Generates a new ssh client and returns it, with forwarding set to accept all types; use this - * before opening a tunnel. - */ - private SshClient createClient() { - java.security.Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); - final SshClient client = SshClient.setUpDefaultClient(); - client.setForwardingFilter(AcceptAllForwardingFilter.INSTANCE); - client.setServerKeyVerifier(AcceptAllServerKeyVerifier.INSTANCE); - return client; - } - - private SshClient createClient(Duration sessionHeartbeatInterval, Duration globalHeartbeatInterval, Duration idleTimeout) { - LOGGER.info("Creating SSH client with Heartbeat and Keepalive enabled"); - final SshClient client = createClient(); - // Session level heartbeat using SSH_MSG_IGNORE every second. - client.setSessionHeartbeat(SessionHeartbeatController.HeartbeatType.IGNORE, sessionHeartbeatInterval); - // idle-timeout zero indicates NoTimeout. - CoreModuleProperties.IDLE_TIMEOUT.set(client, idleTimeout); - // Use tcp keep-alive mechanism. - CoreModuleProperties.SOCKET_KEEPALIVE.set(client, true); - // Additional delay used for ChannelOutputStream to wait for space in the remote socket send buffer. - CoreModuleProperties.WAIT_FOR_SPACE_TIMEOUT.set(client, Duration.ofMinutes(2)); - // Global keepalive message sent every 2 seconds. This precedes the session level heartbeat. - CoreModuleProperties.HEARTBEAT_INTERVAL.set(client, globalHeartbeatInterval); - return client; - } - - /** - * Starts an ssh session; wrap this in a try-finally and use closeTunnel() to close it. - */ - ClientSession openTunnel(final SshClient client) { - try { - client.start(); - final ClientSession session = client.connect( - tunnelUser.trim(), - tunnelHost.trim(), - tunnelPort) - .verify(TIMEOUT_MILLIS) - .getSession(); - if (tunnelMethod.equals(TunnelMethod.SSH_KEY_AUTH)) { - session.addPublicKeyIdentity(getPrivateKeyPair()); - } - if (tunnelMethod.equals(TunnelMethod.SSH_PASSWORD_AUTH)) { - session.addPasswordIdentity(tunnelUserPassword); - } - - session.auth().verify(TIMEOUT_MILLIS); - final SshdSocketAddress address = session.startLocalPortForwarding( - // entering 0 lets the OS pick a free port for us. - new SshdSocketAddress(InetSocketAddress.createUnresolved(SshdSocketAddress.LOCALHOST_ADDRESS.getHostName(), 0)), - new SshdSocketAddress(remoteServiceHost, remoteServicePort)); - - // discover the port that the OS picked and remember it so that we can use it when we try to connect - tunnelLocalPort = address.getPort(); - - LOGGER.info(String.format("Established tunneling session to %s:%d. Port forwarding started on %s ", - remoteServiceHost, remoteServicePort, address.toInetSocketAddress())); - return session; - } catch (final IOException | GeneralSecurityException e) { - if (e instanceof SshException && e.getMessage() - .toLowerCase(Locale.ROOT) - .contains("failed to get operation result within specified timeout")) { - throw new ConfigErrorException(SSH_TIMEOUT_DISPLAY_MESSAGE, e); - } else { - throw new RuntimeException(e); - } - } - } - - @Override - public String toString() { - return "SshTunnel{" + - "hostKey=" + hostKey + - ", portKey=" + portKey + - ", tunnelMethod=" + tunnelMethod + - ", tunnelHost='" + tunnelHost + '\'' + - ", tunnelPort=" + tunnelPort + - ", tunnelUser='" + tunnelUser + '\'' + - ", remoteServiceHost='" + remoteServiceHost + '\'' + - ", remoteServicePort=" + remoteServicePort + - ", tunnelLocalPort=" + tunnelLocalPort + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.java deleted file mode 100644 index f0873bb05edb9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.CONNECTION_OPTIONS_KEY; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.GLOBAL_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.GLOBAL_HEARTBEAT_INTERVAL_KEY; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.SESSION_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.SESSION_HEARTBEAT_INTERVAL_KEY; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.getInstance; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.sshWrap; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.util.List; -import java.util.Optional; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Decorates a Destination with an SSH Tunnel using the standard configuration that Airbyte uses for - * configuring SSH. - */ -public class SshWrappedDestination implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(SshWrappedDestination.class); - - private final Destination delegate; - private final List hostKey; - private final List portKey; - private final String endPointKey; - - public SshWrappedDestination(final Destination delegate, - final List hostKey, - final List portKey) { - this.delegate = delegate; - this.hostKey = hostKey; - this.portKey = portKey; - this.endPointKey = null; - } - - public SshWrappedDestination(final Destination delegate, - final String endPointKey) { - this.delegate = delegate; - this.endPointKey = endPointKey; - this.portKey = null; - this.hostKey = null; - } - - @Override - public ConnectorSpecification spec() throws Exception { - // inject the standard ssh configuration into the spec. - final ConnectorSpecification originalSpec = delegate.spec(); - final ObjectNode propNode = (ObjectNode) originalSpec.getConnectionSpecification().get("properties"); - propNode.set("tunnel_method", Jsons.deserialize(MoreResources.readResource("ssh-tunnel-spec.json"))); - return originalSpec; - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - try { - return (endPointKey != null) ? sshWrap(config, endPointKey, delegate::check) - : sshWrap(config, hostKey, portKey, delegate::check); - } catch (final RuntimeException e) { - final String sshErrorMessage = "Could not connect with provided SSH configuration. Error: " + e.getMessage(); - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, sshErrorMessage); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage(sshErrorMessage); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - final SshTunnel tunnel = getTunnelInstance(config); - - final AirbyteMessageConsumer delegateConsumer; - try { - delegateConsumer = delegate.getConsumer(tunnel.getConfigInTunnel(), catalog, outputRecordCollector); - } catch (final Exception e) { - LOGGER.error("Exception occurred while getting the delegate consumer, closing SSH tunnel", e); - tunnel.close(); - throw e; - } - return AirbyteMessageConsumer.appendOnClose(delegateConsumer, tunnel::close); - } - - @Override - public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - final JsonNode clone = Jsons.clone(config); - Optional connectionOptionsConfig = Jsons.getOptional(clone, CONNECTION_OPTIONS_KEY); - if (connectionOptionsConfig.isEmpty()) { - LOGGER.info("No SSH connection options found, using defaults"); - if (clone instanceof ObjectNode) { // Defensive check, it will always be object node - ObjectNode connectionOptions = ((ObjectNode) clone).putObject(CONNECTION_OPTIONS_KEY); - connectionOptions.put(SESSION_HEARTBEAT_INTERVAL_KEY, SESSION_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS); - connectionOptions.put(GLOBAL_HEARTBEAT_INTERVAL_KEY, GLOBAL_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS); - } - } - final SshTunnel tunnel = getTunnelInstance(clone); - final SerializedAirbyteMessageConsumer delegateConsumer; - try { - delegateConsumer = delegate.getSerializedMessageConsumer(tunnel.getConfigInTunnel(), catalog, outputRecordCollector); - } catch (final Exception e) { - LOGGER.error("Exception occurred while getting the delegate consumer, closing SSH tunnel", e); - tunnel.close(); - throw e; - } - return SerializedAirbyteMessageConsumer.appendOnClose(delegateConsumer, tunnel::close); - } - - protected SshTunnel getTunnelInstance(final JsonNode config) throws Exception { - return (endPointKey != null) - ? getInstance(config, endPointKey) - : getInstance(config, hostKey, portKey); - } - - @Override - public boolean isV2Destination() { - return delegate.isV2Destination(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.java deleted file mode 100644 index 7abc65d277f74..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SshWrappedSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(SshWrappedSource.class); - private final Source delegate; - private final List hostKey; - private final List portKey; - private final Optional sshGroup; - - public SshWrappedSource(final Source delegate, final List hostKey, final List portKey) { - this.delegate = delegate; - this.hostKey = hostKey; - this.portKey = portKey; - this.sshGroup = Optional.empty(); - } - - public SshWrappedSource(final Source delegate, final List hostKey, final List portKey, final String sshGroup) { - this.delegate = delegate; - this.hostKey = hostKey; - this.portKey = portKey; - this.sshGroup = Optional.of(sshGroup); - } - - @Override - public ConnectorSpecification spec() throws Exception { - return SshHelpers.injectSshIntoSpec(delegate.spec(), sshGroup); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - try { - return SshTunnel.sshWrap(config, hostKey, portKey, delegate::check); - } catch (final RuntimeException e) { - final String sshErrorMessage = "Could not connect with provided SSH configuration. Error: " + e.getMessage(); - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, sshErrorMessage); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage(sshErrorMessage); - } - } - - @Override - public AirbyteCatalog discover(final JsonNode config) throws Exception { - return SshTunnel.sshWrap(config, hostKey, portKey, delegate::discover); - } - - @Override - public AutoCloseableIterator read(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final JsonNode state) - throws Exception { - final SshTunnel tunnel = SshTunnel.getInstance(config, hostKey, portKey); - final AutoCloseableIterator delegateRead; - try { - delegateRead = delegate.read(tunnel.getConfigInTunnel(), catalog, state); - } catch (final Exception e) { - LOGGER.error("Exception occurred while getting the delegate read iterator, closing SSH tunnel", e); - tunnel.close(); - throw e; - } - return AutoCloseableIterators.appendOnClose(delegateRead, tunnel::close); - } - - @Override - public Collection> readStreams(JsonNode config, ConfiguredAirbyteCatalog catalog, JsonNode state) - throws Exception { - final SshTunnel tunnel = SshTunnel.getInstance(config, hostKey, portKey); - try { - return delegate.readStreams(tunnel.getConfigInTunnel(), catalog, state); - } catch (final Exception e) { - LOGGER.error("Exception occurred while getting the delegate read stream iterators, closing SSH tunnel", e); - tunnel.close(); - throw e; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/readme.md b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/readme.md deleted file mode 100644 index 749b2df491a08..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/ssh/readme.md +++ /dev/null @@ -1,63 +0,0 @@ -# Developing an SSH Connector - -## Goal -Easy development of any connector that needs the ability to connect to a resource via SSH Tunnel. - -## Overview -Our SSH connector support is designed to be easy to plug into any existing connector. There are a few major pieces to consider: -1. Add SSH Configuration to the Spec - for SSH, we need to take in additional configuration, so we need to inject extra fields into the connector configuration. -2. Add SSH Logic to the Connector - before the connector code begins to execute we need to start an SSH tunnel. This library provides logic to create that tunnel (and clean it up). -3. Acceptance Testing - it is a good practice to include acceptance testing for the SSH version of a connector for at least one of the SSH types (password or ssh key). While unit testing for the SSH functionality exists in this package (coming soon), high-level acceptance testing to make sure this feature works with the individual connector belongs in the connector. -4. Normalization Support for Destinations - if the connector is a destination and supports normalization, there's a small change required in the normalization code to update the config so that dbt uses the right credentials for the SSH tunnel. - -## How To - -### Add SSH Configuration to the Spec -1. The `SshHelpers` class provides 2 helper functions that injects the SSH configuration objects into a spec JsonSchema for an existing connector. Usually the `spec()` method for a connector looks like `Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class);`. These helpers are just injecting the ssh spec (`ssh-tunnel-spec.json`) into that spec. -2. You may need to update tests to reflect that new fields have been added to the spec. Usually updating the tests just requires using these helpers in the tests. - -### Add SSH Logic to the Connector -1. This package provides a Source decorated class to make it easy to add SSH logic to an existing source. Simply pass the source you want to wrap into the constructor of the `SshWrappedSource`. That class also requires two other fields: `hostKey` and `portKey`. Both of these fields are pointers to fields in the connector specification. The `hostKey` is a pointer to the field that hold the host of the resource you want to connect and `portKey` is the port. In a simple case, where the host name for a connector is just defined in the top-level `host` field, then `hostKey` would simply be: `["host"]`. If that field is nested, however, then it might be: `["database", "configuration", "host"]`. - -### Acceptance Testing -1. The only difference between existing acceptance testing and acceptance testing with SSH is that the configuration that is used for testing needs to contain additional fields. You can see the `Postgres Source ssh key creds` in lastpass to see an example of what that might look like. Those credentials leverage an existing bastion host in our test infrastructure. (As future work, we want to get rid of the need to use a static bastion server and instead do it in docker so we can run it all locally.) - -### Normalization Support for Destinations -1. The core functionality for ssh tunnelling with normalization is already in place but you'll need to add a small tweak to `transform_config/transform.py` in the normalization module. Find the function `transform_{connector}()` and add at the start: - ``` - if TransformConfig.is_ssh_tunnelling(config): - config = TransformConfig.get_ssh_altered_config(config, port_key="port", host_key="host") - ``` - Replace port_key and host_key as necessary. Look at `transform_postgres()` to see an example. -2. To make sure your changes are present in Normalization when running tests on the connector locally, you'll need to change [this version tag](https://github.com/airbytehq/airbyte/blob/6d9ba022646441c7f298ca4dcaa3df59b9a19fbb/airbyte-workers/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java#L50) to `dev` so that the new locally built docker image for Normalization is used. Don't push this change with the PR though. -3. If your `host_key="host"` and `port_key="port"` then this step is not necessary. However if the key names differ for your connector, you will also need to add some logic into `sshtunneling.sh` (within airbyte-workers) to handle this, as currently it assumes that the keys are exactly `host` and `port`. -4. When making your PR, make sure that you've version bumped Normalization (in `airbyte-workers/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java` and `airbyte-integrations/bases/base-normalization/Dockerfile`). You'll need to /legacy-test & /legacy-publish Normalization _first_ so that when you the connector is tested, it can use the new version. - -## Misc - -### How to wrap the protocol in an SSH Tunnel -For `spec()`, `check()`, and `discover()` wrapping the connector in an SSH tunnel is easier to think about because when they return all work is done and the tunnel can be closed. Thus, each of these methods can simply be wrapped in a try-with-resource of the SSH Tunnel. - -For `read()` and `write()` they return an iterator and consumer respectively that perform work that must happen within the SSH Tunnel after the method has returned. Therefore, the `close` function on the iterator and consumer have to handle closing the SSH tunnel; the methods themselves cannot just be wrapped in a try-with-resource. This is handled for you by the `SshWrappedSource`, but if you need to implement any of this manually you must take it into account. - -### Name Mangling -One of the least intuitive pieces of the SSH setup to follow is the replacement of host names and ports. The reason `SshWrappedSource` needs to know how to get the hostname and port of the database you are trying to connect to is that when it builds the SSH tunnel that forwards to the database, it needs to know the hostname and port so that the tunnel forwards requests to the right place. After the SSH tunnel is established and forwarding to the database, the connector code itself runs. - -There's a trick here though! The connector should NOT try to connect to the hostname and port of the database. Instead, it should be trying to connect to `localhost` and whatever port we are forwarding to the database. The `SshTunnel#sshWrap` removes the original host and port from the configuration for the connector and replaces it with `localhost` and the correct port. So from the connector code's point of view it is just operating on localhost. - -There is a tradeoff here. -* (Good) The way we have structured this allows users to configure a connector in the UI in a way that it is intuitive to user. They put in the host and port they think about referring to the database as (they don't need to worry about any of the localhost version). -* (Good) The connector code does not need to know anything about SSH, it can just operate on the host and port it gets (and we let SSH Tunnel handle swapping the names for us) which makes writing a connector easier. -* (Bad) The downside is that the `SshTunnel` logic is more complicated because it is absorbing all of this name swapping so that neither user nor connector developer need to worry about it. In our estimation, the good outweighs the extra complexity incurred here. - - -### Acceptance Testing via ssh tunnel using SshBastion and JdbcDatabaseContainer in Docker -1. The `SshBastion` class provides 3 helper functions: - `initAndStartBastion()`to initialize and start SSH Bastion server in Docker test container and creates new `Network` for bastion and tested jdbc container - `getTunnelConfig()`which return JsoneNode with all necessary configuration to establish ssh tunnel. Connection configuration for integration tests is now taken directly from container settings and does not require a real database connection - `stopAndCloseContainers` to stop and close SshBastion and JdbcDatabaseContainer at the end of the test - -## Future Work -* Add unit / integration testing for `ssh` package. -* Restructure spec so that instead of having `SSH Key Authentication` or `Password Authentication` options for `tunnel_method`, just have an `SSH` option and then within that `SSH` option have a `oneOf` for password or key. This is blocked because we cannot use `oneOf`s nested in `oneOf`s. -* Improve the process of acceptance testing by allowing doing acceptance testing using a bastion running in a docker container instead of having to use dedicated infrastructure and a static database. diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.java deleted file mode 100644 index 7fadf5c3c8be6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination; - -/** - * Destination have their own Naming conventions (which characters are valid or rejected in - * identifiers names) This class transform a random string used to a valid identifier names for each - * specific destination. - */ -public interface NamingConventionTransformer { - - /** - * Handle Naming Conversions of an input name to output a valid identifier name for the desired - * destination. - * - * @param name of the identifier to check proper naming conventions - * @return modified name with invalid characters replaced by '_' and adapted for the chosen - * destination. - */ - String getIdentifier(String name); - - /** - * Handle naming conversions of an input name to output a valid namespace for the desired - * destination. - */ - String getNamespace(String namespace); - - /** - * Same as getIdentifier but returns also the name of the table for storing raw data - * - * @param name of the identifier to check proper naming conventions - * @return modified name with invalid characters replaced by '_' and adapted for the chosen - * destination. - * - * @deprecated as this is very SQL specific, prefer using getIdentifier instead - */ - @Deprecated - String getRawTableName(String name); - - /** - * Same as getIdentifier but returns also the name of the table for storing tmp data - * - * @param name of the identifier to check proper naming conventions - * @return modified name with invalid characters replaced by '_' and adapted for the chosen - * destination. - * - * @deprecated as this is very SQL specific, prefer using getIdentifier instead - */ - @Deprecated - String getTmpTableName(String name); - - default String getTmpTableName(final String streamName, final String randomSuffix) { - return getTmpTableName(streamName); - } - - String convertStreamName(final String input); - - String applyDefaultCase(final String input); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StandardNameTransformer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StandardNameTransformer.java deleted file mode 100644 index cc9c2dc4cd15b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StandardNameTransformer.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import io.airbyte.commons.text.Names; -import io.airbyte.commons.util.MoreIterators; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - -public class StandardNameTransformer implements NamingConventionTransformer { - - private static final String NON_JSON_PATH_CHARACTERS_PATTERN = "['\"`]"; - - @Override - public String getIdentifier(final String name) { - return convertStreamName(name); - } - - /** - * Most destinations have the same naming requirement for namespace and stream names. - */ - @Override - public String getNamespace(final String namespace) { - return convertStreamName(namespace); - } - - @Override - // @Deprecated see https://github.com/airbytehq/airbyte/issues/35333 - public String getRawTableName(final String streamName) { - return convertStreamName("_airbyte_raw_" + streamName); - } - - @Override - public String getTmpTableName(final String streamName) { - return convertStreamName(Strings.addRandomSuffix("_airbyte_tmp", "_", 3) + "_" + streamName); - } - - @Override - public String getTmpTableName(final String streamName, final String randomSuffix) { - return convertStreamName("_airbyte_tmp" + "_" + randomSuffix + "_" + streamName); - } - - @Override - public String convertStreamName(final String input) { - return Names.toAlphanumericAndUnderscore(input); - } - - @Override - public String applyDefaultCase(final String input) { - return input; - } - - /** - * Rebuild a JsonNode adding sanitized property names (a subset of special characters replaced by - * underscores) while keeping original property names too. This is needed by some destinations as - * their json extract functions have limitations on how such special characters are parsed. These - * naming rules may be different to schema/table/column naming conventions. - */ - public static JsonNode formatJsonPath(final JsonNode root) { - if (root.isObject()) { - final Map properties = new HashMap<>(); - final var keys = Jsons.keys(root); - for (final var key : keys) { - final JsonNode property = root.get(key); - // keep original key - properties.put(key, formatJsonPath(property)); - } - for (final var key : keys) { - final JsonNode property = root.get(key); - final String formattedKey = key.replaceAll(NON_JSON_PATH_CHARACTERS_PATTERN, "_"); - if (!properties.containsKey(formattedKey)) { - // duplicate property in a formatted key to be extracted in normalization - properties.put(formattedKey, formatJsonPath(property)); - } - } - return Jsons.jsonNode(properties); - } else if (root.isArray()) { - return Jsons.jsonNode(MoreIterators.toList(root.elements()).stream() - .map(StandardNameTransformer::formatJsonPath) - .collect(Collectors.toList())); - } else { - return root; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StreamSyncSummary.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StreamSyncSummary.java deleted file mode 100644 index d4a76c862ac76..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/StreamSyncSummary.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination; - -import java.util.Optional; - -/** - * @param recordsWritten The number of records written to the stream, or empty if the caller does - * not track this information. (this is primarily for backwards-compatibility with the legacy - * destinations framework; new implementations should always provide this information). If - * this value is empty, consumers should assume that the sync wrote nonzero records for this - * stream. - */ -public record StreamSyncSummary(Optional recordsWritten) { - - public static final StreamSyncSummary DEFAULT = new StreamSyncSummary(Optional.empty()); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java deleted file mode 100644 index b4cdd9bd73ee3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager.DefaultDestStateLifecycleManager; -import io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager.DestStateLifecycleManager; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferFlushType; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferingStrategy; -import io.airbyte.commons.functional.CheckedFunction; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.time.Duration; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class consumes AirbyteMessages from the worker. - * - *

- * Record Messages: It adds record messages to a buffer. Under 2 conditions, it will flush the - * records in the buffer to a temporary table in the destination. Condition 1: The buffer fills up - * (the buffer is designed to be small enough as not to exceed the memory of the container). - * Condition 2: On close. - *

- * - *

- * State Messages: This consumer tracks the last state message it has accepted. It also tracks the - * last state message that was committed to the temporary table. For now, we only emit a message if - * everything is successful. Once checkpointing is turned on, we will emit the state message as long - * as the onClose successfully commits any messages to the raw table. - *

- * - *

- * All other message types are ignored. - *

- * - *

- * Throughout the lifecycle of the consumer, messages get promoted from buffered to flushed to - * committed. A record message when it is received is immediately buffered. When the buffer fills - * up, all buffered records are flushed out of memory using the user-provided recordBuffer. When - * this flush happens, a state message is moved from pending to flushed. On close, if the - * user-provided onClose function is successful, then the flushed state record is considered - * committed and is then emitted. We expect this class to only ever emit either 1 state message (in - * the case of a full or partial success) or 0 state messages (in the case where the onClose step - * was never reached or did not complete without exception). - *

- * - *

- * When a record is "flushed" it is moved from the docker container to the destination. By - * convention, it is usually placed in some sort of temporary storage on the destination (e.g. a - * temporary database or file store). The logic in close handles committing the temporary - * representation data to the final store (e.g. final table). In the case of staging destinations - * they often have additional temporary stores. The common pattern for staging destination is that - * flush pushes the data into a staging area in cloud storage and then close copies from staging to - * a temporary table AND then copies from the temporary table into the final table. This abstraction - * is blind to the detail of how staging destinations implement their close. - *

- */ -public class BufferedStreamConsumer extends FailureTrackingAirbyteMessageConsumer implements AirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(BufferedStreamConsumer.class); - - private final OnStartFunction onStart; - private final OnCloseFunction onClose; - private final Set streamNames; - private final ConfiguredAirbyteCatalog catalog; - private final CheckedFunction isValidRecord; - private final Map streamToIgnoredRecordCount; - private final Consumer outputRecordCollector; - private final BufferingStrategy bufferingStrategy; - private final DestStateLifecycleManager stateManager; - - private boolean hasStarted; - private boolean hasClosed; - - private Instant nextFlushDeadline; - private final Duration bufferFlushFrequency; - private final String defaultNamespace; - - /** - * Feel free to continue using this in non-1s1t destinations - it may be easier to use. However, - * 1s1t destinations should prefer the version which accepts a {@code defaultNamespace}. - */ - @Deprecated - public BufferedStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final BufferingStrategy bufferingStrategy, - final OnCloseFunction onClose, - final ConfiguredAirbyteCatalog catalog, - final CheckedFunction isValidRecord) { - this(outputRecordCollector, - onStart, - bufferingStrategy, - onClose, - catalog, - isValidRecord, - Duration.ofMinutes(15), - // This is purely for backwards compatibility. Many older destinations handle this internally. - // Starting with Destinations V2, we recommend passing in an explicit namespace. - null); - } - - public BufferedStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final BufferingStrategy bufferingStrategy, - final OnCloseFunction onClose, - final ConfiguredAirbyteCatalog catalog, - final CheckedFunction isValidRecord, - final String defaultNamespace) { - this(outputRecordCollector, - onStart, - bufferingStrategy, - onClose, - catalog, - isValidRecord, - Duration.ofMinutes(15), - defaultNamespace); - } - - /* - * NOTE: this is only used for testing purposes, future work would be re-visit if #acceptTracked - * should take in an Instant parameter which would require refactoring all MessageConsumers - */ - @VisibleForTesting - BufferedStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final BufferingStrategy bufferingStrategy, - final OnCloseFunction onClose, - final ConfiguredAirbyteCatalog catalog, - final CheckedFunction isValidRecord, - final Duration flushFrequency, - final String defaultNamespace) { - this.outputRecordCollector = outputRecordCollector; - this.hasStarted = false; - this.hasClosed = false; - this.onStart = onStart; - this.onClose = onClose; - this.catalog = catalog; - this.streamNames = AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog); - this.isValidRecord = isValidRecord; - this.streamToIgnoredRecordCount = new HashMap<>(); - this.bufferingStrategy = bufferingStrategy; - this.stateManager = new DefaultDestStateLifecycleManager(defaultNamespace); - this.bufferFlushFrequency = flushFrequency; - this.defaultNamespace = defaultNamespace; - } - - @Override - protected void startTracked() throws Exception { - // todo (cgardens) - if we reuse this pattern, consider moving it into FailureTrackingConsumer. - Preconditions.checkState(!hasStarted, "Consumer has already been started."); - hasStarted = true; - nextFlushDeadline = Instant.now().plus(bufferFlushFrequency); - streamToIgnoredRecordCount.clear(); - LOGGER.info("{} started.", BufferedStreamConsumer.class); - onStart.call(); - } - - /** - * AcceptTracked will still process AirbyteMessages as usual with the addition of periodically - * flushing buffer and writing data to destination storage - * - * @param message {@link AirbyteMessage} to be processed - * @throws Exception - */ - @Override - protected void acceptTracked(final AirbyteMessage message) throws Exception { - Preconditions.checkState(hasStarted, "Cannot accept records until consumer has started"); - if (message.getType() == Type.RECORD) { - final AirbyteRecordMessage record = message.getRecord(); - if (Strings.isNullOrEmpty(record.getNamespace())) { - record.setNamespace(defaultNamespace); - } - final AirbyteStreamNameNamespacePair stream; - stream = AirbyteStreamNameNamespacePair.fromRecordMessage(record); - - // if stream is not part of list of streams to sync to then throw invalid stream exception - if (!streamNames.contains(stream)) { - throwUnrecognizedStream(catalog, message); - } - - if (!isValidRecord.apply(record.getData())) { - streamToIgnoredRecordCount.put(stream, streamToIgnoredRecordCount.getOrDefault(stream, 0L) + 1L); - return; - } - - final Optional flushType = bufferingStrategy.addRecord(stream, message); - // if present means that a flush occurred - if (flushType.isPresent()) { - if (BufferFlushType.FLUSH_ALL.equals(flushType.get())) { - markStatesAsFlushedToDestination(); - } else if (BufferFlushType.FLUSH_SINGLE_STREAM.equals(flushType.get())) { - if (stateManager.supportsPerStreamFlush()) { - // per-stream instance can handle flush of just a single stream - markStatesAsFlushedToDestination(stream); - } - /* - * We don't mark {@link AirbyteStateMessage} as committed in the case with GLOBAL/LEGACY because - * within a single stream being flushed it is not deterministic that all the AirbyteRecordMessages - * have been committed - */ - } - } - } else if (message.getType() == Type.STATE) { - stateManager.addState(message); - } else { - LOGGER.warn("Unexpected message: " + message.getType()); - } - periodicBufferFlush(); - } - - /** - * After marking states as committed, return the state message to platform then clear state messages - * to avoid resending the same state message to the platform. Also updates the next time a buffer - * flush should occur since it is deterministic that when this method is called all data has been - * successfully committed to destination - */ - private void markStatesAsFlushedToDestination() { - stateManager.markPendingAsCommitted(); - stateManager.listCommitted().forEach(outputRecordCollector); - stateManager.clearCommitted(); - nextFlushDeadline = Instant.now().plus(bufferFlushFrequency); - } - - private void markStatesAsFlushedToDestination(final AirbyteStreamNameNamespacePair stream) { - stateManager.markPendingAsCommitted(stream); - stateManager.listCommitted().forEach(outputRecordCollector); - stateManager.clearCommitted(); - nextFlushDeadline = Instant.now().plus(bufferFlushFrequency); - } - - /** - * Periodically flushes buffered data to destination storage when exceeding flush deadline. Also - * resets the last time a flush occurred - */ - private void periodicBufferFlush() throws Exception { - // When the last time the buffered has been flushed exceed the frequency, flush the current - // buffer before receiving incoming AirbyteMessage - if (Instant.now().isAfter(nextFlushDeadline)) { - LOGGER.info("Periodic buffer flush started"); - try { - bufferingStrategy.flushAllBuffers(); - markStatesAsFlushedToDestination(); - } catch (final Exception e) { - LOGGER.error("Periodic buffer flush failed", e); - throw e; - } - } - } - - private static void throwUnrecognizedStream(final ConfiguredAirbyteCatalog catalog, final AirbyteMessage message) { - throw new IllegalArgumentException( - String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", - Jsons.serialize(catalog), Jsons.serialize(message))); - } - - /** - * Cleans up buffer based on whether the sync was successful or some exception occurred. In the case - * where a failure occurred we do a simple clean up any lingering data. Otherwise, flush any - * remaining data that has been stored. This is fine even if the state has not been received since - * this Airbyte promises at least once delivery - * - * @param hasFailed true if the stream replication failed partway through, false otherwise - * @throws Exception - */ - @Override - protected void close(final boolean hasFailed) throws Exception { - Preconditions.checkState(hasStarted, "Cannot close; has not started."); - Preconditions.checkState(!hasClosed, "Has already closed."); - hasClosed = true; - - streamToIgnoredRecordCount - .forEach((pair, count) -> LOGGER.warn("A total of {} record(s) of data from stream {} were invalid and were ignored.", count, pair)); - if (hasFailed) { - LOGGER.error("executing on failed close procedure."); - } else { - LOGGER.info("executing on success close procedure."); - // When flushing the buffer, this will call the respective #flushBufferFunction which bundles - // the flush and commit operation, so if successful then mark state as committed - bufferingStrategy.flushAllBuffers(); - markStatesAsFlushedToDestination(); - } - bufferingStrategy.close(); - - try { - /* - * TODO: (ryankfu) Remove usage of hasFailed with onClose after all destination connectors have been - * updated to support checkpointing - * - * flushed is empty in 2 cases: 1. either it is full refresh (no state is emitted necessarily) 2. it - * is stream but no states were flushed in both of these cases, if there was a failure, we should - * not bother committing. otherwise attempt to commit - */ - if (stateManager.listFlushed().isEmpty()) { - // Not updating this class to track record count, because we want to kill it in favor of the - // AsyncStreamConsumer - onClose.accept(hasFailed, new HashMap<>()); - } else { - /* - * if any state message was flushed that means we should try to commit what we have. if - * hasFailed=false, then it could be full success. if hasFailed=true, then going for partial - * success. - */ - onClose.accept(false, null); - } - - stateManager.listCommitted().forEach(outputRecordCollector); - } catch (final Exception e) { - LOGGER.error("Close failed.", e); - throw e; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.java deleted file mode 100644 index 4a48ef00a1e62..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; - -@FunctionalInterface -public interface CheckAndRemoveRecordWriter { - - /** - * Compares the name of the current staging file with the method argument. If the names are - * different, then the staging writer corresponding to `stagingFileName` is closed and the name of - * the new file where the record will be sent will be returned. - */ - String apply(AirbyteStreamNameNamespacePair stream, String stagingFileName) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.java deleted file mode 100644 index 39c4da662a885..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.commons.functional.CheckedBiConsumer; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; - -/** - * Interface allowing destination to specify clean up logic that must be executed after all - * record-related logic has finished. - *

- * The map of StreamSyncSummaries MUST be non-null, but MAY be empty. Streams not present in the map - * will be treated as equivalent to {@link StreamSyncSummary#DEFAULT}. - */ -public interface OnCloseFunction extends CheckedBiConsumer, Exception> { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.java deleted file mode 100644 index e13b95dcda687..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import io.airbyte.commons.concurrency.VoidCallable; - -public interface OnStartFunction extends VoidCallable { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.java deleted file mode 100644 index 5c7e244238764..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.HashMap; -import java.util.Map; - -/** - * This class estimate the byte size of the record message. To reduce memory footprint, 1) it - * assumes that a character is always four bytes, and 2) it only performs a sampling every N - * records. The size of the samples are averaged together to protect the estimation against - * outliers. - */ -public class RecordSizeEstimator { - - // by default, perform one estimation for every 20 records - private static final int DEFAULT_SAMPLE_BATCH_SIZE = 20; - - // latest estimated record message size for each stream - private final Map streamRecordSizeEstimation; - // number of record messages until next real sampling for each stream - private final Map streamSampleCountdown; - // number of record messages - private final int sampleBatchSize; - - /** - * The estimator will perform a real calculation once per sample batch. The size of the batch is - * determined by {@code sampleBatchSize}. - */ - public RecordSizeEstimator(final int sampleBatchSize) { - streamRecordSizeEstimation = new HashMap<>(); - streamSampleCountdown = new HashMap<>(); - this.sampleBatchSize = sampleBatchSize; - } - - public RecordSizeEstimator() { - this(DEFAULT_SAMPLE_BATCH_SIZE); - } - - public long getEstimatedByteSize(final AirbyteRecordMessage record) { - final String stream = record.getStream(); - final Integer countdown = streamSampleCountdown.get(stream); - - // this is a new stream; initialize its estimation - if (countdown == null) { - final long byteSize = getStringByteSize(record.getData()); - streamRecordSizeEstimation.put(stream, byteSize); - streamSampleCountdown.put(stream, sampleBatchSize - 1); - return byteSize; - } - - // this stream needs update; compute a new estimation - if (countdown <= 0) { - final long prevMeanByteSize = streamRecordSizeEstimation.get(stream); - final long currentByteSize = getStringByteSize(record.getData()); - final long newMeanByteSize = prevMeanByteSize / 2 + currentByteSize / 2; - streamRecordSizeEstimation.put(stream, newMeanByteSize); - streamSampleCountdown.put(stream, sampleBatchSize - 1); - return newMeanByteSize; - } - - // this stream does not need update; return current estimation - streamSampleCountdown.put(stream, countdown - 1); - return streamRecordSizeEstimation.get(stream); - } - - @VisibleForTesting - public static long getStringByteSize(final JsonNode data) { - // assume UTF-8 encoding, and each char is 4 bytes long - return Jsons.serialize(data).length() * 4L; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.java deleted file mode 100644 index e9ed108718229..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import io.airbyte.commons.functional.CheckedBiConsumer; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.List; - -public interface RecordWriter extends CheckedBiConsumer, Exception> { - - @Override - void accept(AirbyteStreamNameNamespacePair stream, List records) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.java deleted file mode 100644 index ad486abb991bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import io.airbyte.protocol.models.v0.AirbyteMessage; - -/** - * Allows specifying transformation logic from Airbyte Json to String. - */ -public interface StreamDateFormatter { - - String getFormattedDate(AirbyteMessage message); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java deleted file mode 100644 index 23b2ac33495b1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Queue; -import java.util.function.Supplier; - -/** - * Detects the type of the state being received by anchoring on the first state type it sees. Fail - * if receives states of multiple types--each instance of this class can only support state messages - * of one type. The protocol specifies that a source should emit state messages of a single type - * during a sync, so a single instance of this manager is sufficient for a destination to track - * state during a sync. - * - *

- * Strategy: Delegates state messages of each type to a StateManager that is appropriate to that - * state type. - *

- * - *

- * Per the protocol, if state type is not set, assumes the LEGACY state type. - *

- */ -public class DefaultDestStateLifecycleManager implements DestStateLifecycleManager { - - private AirbyteStateType stateType; - private final Supplier internalStateManagerSupplier; - - public DefaultDestStateLifecycleManager(final String defaultNamespace) { - this(new DestSingleStateLifecycleManager(), new DestStreamStateLifecycleManager(defaultNamespace)); - } - - @VisibleForTesting - DefaultDestStateLifecycleManager(final DestStateLifecycleManager singleStateManager, final DestStateLifecycleManager streamStateManager) { - stateType = null; - // allows us to delegate calls to the appropriate underlying state manager. - internalStateManagerSupplier = () -> { - if (stateType == AirbyteStateType.GLOBAL || stateType == AirbyteStateType.LEGACY || stateType == null) { - return singleStateManager; - } else if (stateType == AirbyteStateType.STREAM) { - return streamStateManager; - } else { - throw new IllegalArgumentException("unrecognized state type"); - } - }; - } - - @Override - public void addState(final AirbyteMessage message) { - Preconditions.checkArgument(message.getType() == Type.STATE, "Messages passed to State Manager must be of type STATE."); - Preconditions.checkArgument(isStateTypeCompatible(stateType, message.getState().getType())); - - setManagerStateTypeIfNotSet(message); - - internalStateManagerSupplier.get().addState(message); - } - - /** - * Given the type of previously recorded state by the state manager, determines if a newly added - * state message's type is compatible. Based on the previously set state type, determines if a new - * one is compatible. If the previous state is null, any new state is compatible. If new state type - * is null, it should be treated as LEGACY. Thus, previousStateType == LEGACY and newStateType == - * null IS compatible. All other state types are compatible based on equality. - * - * @param previousStateType - state type previously recorded by the state manager - * @param newStateType - state message of a newly added message - * @return true if compatible, otherwise false - */ - private static boolean isStateTypeCompatible(final AirbyteStateType previousStateType, final AirbyteStateType newStateType) { - return previousStateType == null || previousStateType == AirbyteStateType.LEGACY && newStateType == null || previousStateType == newStateType; - } - - /** - * If the state type for the manager is not set, sets it using the state type from the message. If - * the type on the message is null, we assume it is LEGACY. After the first, state message is added - * to the manager, the state type is set and is immutable. - * - * @param message - state message whose state will be used if internal state type is not set - */ - private void setManagerStateTypeIfNotSet(final AirbyteMessage message) { - // detect and set state type. - if (stateType == null) { - if (message.getState().getType() == null) { - stateType = AirbyteStateType.LEGACY; - } else { - stateType = message.getState().getType(); - } - } - } - - @Override - public void markPendingAsFlushed() { - internalStateManagerSupplier.get().markPendingAsFlushed(); - } - - @Override - public Queue listFlushed() { - return internalStateManagerSupplier.get().listFlushed(); - } - - @Override - public void markFlushedAsCommitted() { - internalStateManagerSupplier.get().markFlushedAsCommitted(); - } - - @Override - public void markPendingAsCommitted() { - internalStateManagerSupplier.get().markPendingAsCommitted(); - } - - @Override - public void markPendingAsCommitted(final AirbyteStreamNameNamespacePair stream) { - internalStateManagerSupplier.get().markPendingAsCommitted(stream); - } - - @Override - public void clearCommitted() { - internalStateManagerSupplier.get().clearCommitted(); - } - - @Override - public Queue listCommitted() { - return internalStateManagerSupplier.get().listCommitted(); - } - - @Override - public boolean supportsPerStreamFlush() { - return internalStateManagerSupplier.get().supportsPerStreamFlush(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java deleted file mode 100644 index 39158a3d31447..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Queue; - -/** - * This {@link DestStateLifecycleManager} handles any state where there is a guarantee that any - * single state message represents the state for the ENTIRE connection. At the time of writing, - * GLOBAL and LEGACY state types are the state type that match this pattern. - * - *

- * Does NOT store duplicates. Because each state message represents the entire state for the - * connection, it only stores (and emits) the LAST state it received at each phase. - *

- */ -public class DestSingleStateLifecycleManager implements DestStateLifecycleManager { - - private AirbyteMessage lastPendingState; - private AirbyteMessage lastFlushedState; - private AirbyteMessage lastCommittedState; - - @Override - public void addState(final AirbyteMessage message) { - lastPendingState = message; - } - - @VisibleForTesting - Queue listPending() { - return stateMessageToQueue(lastPendingState); - } - - @Override - public void markPendingAsFlushed() { - if (lastPendingState != null) { - lastFlushedState = lastPendingState; - lastPendingState = null; - } - } - - @Override - public Queue listFlushed() { - return stateMessageToQueue(lastFlushedState); - } - - @Override - public void markFlushedAsCommitted() { - if (lastFlushedState != null) { - lastCommittedState = lastFlushedState; - lastFlushedState = null; - } - } - - @Override - public void clearCommitted() { - lastCommittedState = null; - } - - @Override - public void markPendingAsCommitted() { - if (lastPendingState != null) { - lastCommittedState = lastPendingState; - lastPendingState = null; - } - } - - @Override - public void markPendingAsCommitted(final AirbyteStreamNameNamespacePair stream) { - // We declare supportsPerStreamFlush as false, so this method should never be called. - throw new IllegalStateException("Committing a single stream state is not supported for this state type."); - } - - @Override - public Queue listCommitted() { - return stateMessageToQueue(lastCommittedState); - } - - private static Queue stateMessageToQueue(final AirbyteMessage stateMessage) { - return new LinkedList<>(stateMessage == null ? Collections.emptyList() : List.of(stateMessage)); - } - - @Override - public boolean supportsPerStreamFlush() { - return false; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java deleted file mode 100644 index 183b84a1ccf06..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Queue; - -/** - * This class manages the lifecycle of state message. It tracks state messages that are in 3 states: - *
    - *
  1. pending - associated records have been accepted by the connector but has NOT been pushed to - * the destination
  2. - *
  3. flushed - associated records have been flushed to tmp storage in the destination but have NOT - * been committed
  4. - *
  5. committed - associated records have been committed
  6. - *
- * - */ -public interface DestStateLifecycleManager { - - /** - * Accepts a state into the manager. The state starts in a pending state. - * - * @param message - airbyte message of type state - */ - void addState(AirbyteMessage message); - - /** - * Moves any tracked state messages that are currently pending to flushed. - * - * @Deprecated since destination checkpointing will be bundling flush & commit into the same - * operation - */ - void markPendingAsFlushed(); - - /** - * List all tracked state messages that are flushed. - * - * @return list of state messages - */ - Queue listFlushed(); - - /** - * Moves any tracked state messages that are currently flushed to committed. - * - * @Deprecated since destination checkpointing will be bundling flush and commit into the same - * operation - */ - void markFlushedAsCommitted(); - - /** - * Clears any committed state messages, this is called after returning the state message to the - * platform. The rationale behind this logic is to avoid returning duplicated state messages that - * would otherwise be held in the `committed` state - */ - void clearCommitted(); - - /** - * Moves any tracked state messages that are currently pending to committed. - * - * Note: that this is skipping "flushed" state since flushed meant that this was using a staging - * area to hold onto files, for the changes with checkpointing this step is skipped. It follows - * under the guiding principle that destination needs to commit - * {@link io.airbyte.protocol.models.AirbyteRecordMessage} more frequently to checkpoint. The new - * transaction logic will be: - * - * Buffer -(flush)-> Staging (Blob Storage) -(commit to airbyte_raw)-> Destination table - */ - void markPendingAsCommitted(); - - /** - * Mark all pending states for the given stream as committed. - */ - void markPendingAsCommitted(AirbyteStreamNameNamespacePair stream); - - /** - * List all tracked state messages that are committed. - * - * @return list of state messages - */ - Queue listCommitted(); - - boolean supportsPerStreamFlush(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java deleted file mode 100644 index 68f93d5f44f82..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Queue; -import java.util.stream.Collectors; - -/** - * This {@link DestStateLifecycleManager} handles any state where the state messages are scoped by - * stream. In these cases, at each state of the process, it tracks the LAST state message for EACH - * stream (no duplicates!). - * - *

- * Guaranteed to output state messages in order relative to other messages of the SAME state. Does - * NOT guarantee that state messages of different streams will be output in the order in which they - * were received. State messages across streams will be emitted in alphabetical order (primary sort - * on namespace, secondary on name). - *

- */ -public class DestStreamStateLifecycleManager implements DestStateLifecycleManager { - - private final Map streamToLastPendingState; - private final Map streamToLastFlushedState; - private final Map streamToLastCommittedState; - private final String defaultNamespace; - - public DestStreamStateLifecycleManager(final String defaultNamespace) { - this.defaultNamespace = defaultNamespace; - streamToLastPendingState = new HashMap<>(); - streamToLastFlushedState = new HashMap<>(); - streamToLastCommittedState = new HashMap<>(); - } - - @Override - public void addState(final AirbyteMessage message) { - Preconditions.checkArgument(message.getState().getType() == AirbyteStateType.STREAM); - final StreamDescriptor originalStreamId = message.getState().getStream().getStreamDescriptor(); - final StreamDescriptor actualStreamId; - final String namespace = originalStreamId.getNamespace(); - if (namespace == null || namespace.isEmpty()) { - // If the state's namespace is null/empty, we need to be able to find it using the default namespace - // (because many destinations actually set records' namespace to the default namespace before - // they make it into this class). - // Clone the streamdescriptor so that we don't modify the original state message. - actualStreamId = new StreamDescriptor() - .withName(originalStreamId.getName()) - .withNamespace(defaultNamespace); - } else { - actualStreamId = originalStreamId; - } - streamToLastPendingState.put(actualStreamId, message); - } - - @VisibleForTesting - Queue listPending() { - return listStatesInOrder(streamToLastPendingState); - } - - /* - * Similar to #markFlushedAsCommmitted, this method should no longer be used to align with the - * changes to destination checkpointing where flush/commit operations will be bundled - */ - @Deprecated - @Override - public void markPendingAsFlushed() { - moveToNextPhase(streamToLastPendingState, streamToLastFlushedState); - } - - @Override - public Queue listFlushed() { - return listStatesInOrder(streamToLastFlushedState); - } - - /* - * During the process of migration to destination checkpointing, this method should no longer be in - * use in favor of #markPendingAsCommitted where states will be flushed/committed as a singular - * transaction - */ - @Deprecated - @Override - public void markFlushedAsCommitted() { - moveToNextPhase(streamToLastFlushedState, streamToLastCommittedState); - } - - @Override - public void clearCommitted() { - streamToLastCommittedState.clear(); - } - - @Override - public void markPendingAsCommitted() { - moveToNextPhase(streamToLastPendingState, streamToLastCommittedState); - } - - @Override - public void markPendingAsCommitted(final AirbyteStreamNameNamespacePair stream) { - // streamToLastCommittedState is keyed using defaultNamespace instead of namespace=null. (see - // #addState) - // Many destinations actually modify the records' namespace immediately after reading them from - // stdin, - // but we should have a null-check here just in case. - final String actualNamespace = stream.getNamespace() == null ? defaultNamespace : stream.getNamespace(); - final StreamDescriptor sd = new StreamDescriptor().withName(stream.getName()).withNamespace(actualNamespace); - final AirbyteMessage lastPendingState = streamToLastPendingState.remove(sd); - if (lastPendingState != null) { - streamToLastCommittedState.put(sd, lastPendingState); - } - } - - @Override - public Queue listCommitted() { - return listStatesInOrder(streamToLastCommittedState); - } - - @Override - public boolean supportsPerStreamFlush() { - return true; - } - - /** - * Lists out the states in the stream to state maps. Guarantees a deterministic sort order, which is - * handy because we are going from a map (unsorted) to a queue. The sort order primary sort on - * namespace (with null at the top) followed by secondary sort on name. This maps onto the pretty - * common order that we list streams elsewhere. - * - * @param streamToState - map of stream descriptor to its last state - * @return queue with the states ordered per the sort mentioned above - */ - private static Queue listStatesInOrder(final Map streamToState) { - return streamToState - .entrySet() - .stream() - // typically, we support by namespace and then stream name, so we retain that pattern here. - .sorted(Comparator - ., String>comparing( - entry -> entry.getKey().getNamespace(), - Comparator.nullsFirst(Comparator.naturalOrder())) // namespace is allowed to be null - .thenComparing(entry -> entry.getKey().getName())) - .map(Entry::getValue) - .collect(Collectors.toCollection(LinkedList::new)); - } - - /** - * Moves all state messages from previous phase into next phase. - * - * @param prevPhase - map of stream to state messages for previous phase that will be moved to next - * phase. when this method returns this map will be empty. - * @param nextPhase - map into which state messages from prevPhase will be added. - */ - private static void moveToNextPhase(final Map prevPhase, final Map nextPhase) { - if (!prevPhase.isEmpty()) { - nextPhase.putAll(prevPhase); - prevPhase.clear(); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.java deleted file mode 100644 index 14d17485588a9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * SQL queries required for successfully syncing to a destination connector. These operations - * include the ability to: - *
    - *
  • Write - insert records from source connector
  • - *
  • Create - overloaded function but primarily to create tables if they don't exist (e.g. tmp - * tables to "stage" records before finalizing to final table
  • - *
  • Drop - removes a table from the schema
  • - *
  • Insert - move data from one table to another table - usually used for inserting data from tmp - * to final table (aka airbyte_raw)
  • - *
- */ -public interface SqlOperations { - - Logger LOGGER = LoggerFactory.getLogger(SqlOperations.class); - - /** - * Create a schema with provided name if it does not already exist. - * - * @param database Database that the connector is syncing - * @param schemaName Name of schema. - * @throws Exception exception - */ - void createSchemaIfNotExists(JdbcDatabase database, String schemaName) throws Exception; - - /** - * Denotes whether the schema exists in destination database - * - * @param database Database that the connector is syncing - * @param schemaName Name of schema. - * @return true if the schema exists in destination database, false if it doesn't - */ - default boolean isSchemaExists(final JdbcDatabase database, final String schemaName) throws Exception { - return false; - } - - /** - * Create a table with provided name in provided schema if it does not already exist. - * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param tableName Name of table - * @throws Exception exception - */ - void createTableIfNotExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; - - /** - * Query to create a table with provided name in provided schema if it does not already exist. - * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param tableName Name of table - * @return query - */ - String createTableQuery(JdbcDatabase database, String schemaName, String tableName); - - /** - * Drop the table if it exists. - * - * @param schemaName Name of schema - * @param tableName Name of table - * @throws Exception exception - */ - void dropTableIfExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; - - /** - * Query to remove all records from a table. Assumes the table exists. - * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param tableName Name of table - * @return Query - */ - String truncateTableQuery(JdbcDatabase database, String schemaName, String tableName); - - /** - * Insert records into table. Assumes the table exists. - * - * @param database Database that the connector is syncing - * @param records Records to insert. - * @param schemaName Name of schema - * @param tableName Name of table - * @throws Exception exception - */ - void insertRecords(JdbcDatabase database, List records, String schemaName, String tableName) throws Exception; - - /** - * Query to insert all records from source table to destination table. Both tables must be in the - * specified schema. Assumes both table exist. - * - *

- * NOTE: this is an append-only operation meaning that data can be duplicated - *

- * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param sourceTableName Name of source table - * @param destinationTableName Name of destination table - * @return SQL Query string - */ - String insertTableQuery(JdbcDatabase database, String schemaName, String sourceTableName, String destinationTableName); - - /** - * Given an arbitrary number of queries, execute a transaction. - * - * @param database Database that the connector is syncing - * @param queries Queries to execute - * @throws Exception exception - */ - void executeTransaction(JdbcDatabase database, List queries) throws Exception; - - /** - * Check if the data record is valid and ok to be written to destination - */ - boolean isValidData(final JsonNode data); - - /** - * Denotes whether the destination has the concept of schema or not - * - * @return true if the destination supports schema (ex: Postgres), false if it doesn't(MySQL) - */ - boolean isSchemaRequired(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.java deleted file mode 100644 index be12c50249b88..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc; - -import static io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants.MAX_FILE_SIZE; - -/** - * The staging file is uploaded to cloud storage in multiple parts. This class keeps track of the - * filename, and returns a new one when the old file has had enough parts. - */ -public class StagingFilenameGenerator { - - private final String streamName; - - // the file suffix will change after the max number of file - // parts have been generated for the current suffix; - // its value starts from 0. - private int currentFileSuffix = 0; - // the number of parts that have been generated for the current - // file suffix; its value range will be [1, maxPartsPerFile] - private int currentFileSuffixPartCount = 0; - - // This variable is responsible to set the size of chunks size (In MB). After chunks created in - // S3 or GCS they will be uploaded to Snowflake or Redshift. These service have some limitations for - // the uploading file. - // So we make the calculation to determine how many parts we can put to the single chunk file. - private final long iterations; - - /** - * @param streamName - the name of table will be processed - * @param chunkSize - the number of optimal chunk size for the service. - */ - public StagingFilenameGenerator(final String streamName, final long chunkSize) { - this.streamName = streamName; - this.iterations = MAX_FILE_SIZE / chunkSize; - } - - /** - * This method is assumed to be called whenever one part of a file is going to be created. The - * currentFileSuffix increments from 0. The currentFileSuffixPartCount cycles from 1 to - * maxPartsPerFile. - */ - public String getStagingFilename() { - if (currentFileSuffixPartCount < iterations) { - // when the number of parts for the file has not reached the max, - // keep using the same file (i.e. keep the suffix) - currentFileSuffixPartCount += 1; - } else { - // otherwise, reset the part counter, and use a different file - // (i.e. update the suffix) - currentFileSuffix += 1; - currentFileSuffixPartCount = 1; - } - return String.format("%s_%05d", streamName, currentFileSuffix); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.java deleted file mode 100644 index 16f83a9a0955c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.constants; - -import io.aesy.datasize.ByteUnit.IEC; -import io.aesy.datasize.DataSize; - -public interface GlobalDataSizeConstants { - - /** 25 MB to BYTES as comparison will be done in BYTES */ - int DEFAULT_MAX_BATCH_SIZE_BYTES = DataSize.of(25L, IEC.MEBIBYTE).toUnit(IEC.BYTE).getValue().intValue(); - /** - * This constant determines the max possible size of file(e.g. 100 MB / 25 megabytes ≈ 4 chunks of - * file) see StagingFilenameGenerator.java:28 - */ - long MAX_FILE_SIZE = DataSize.of(100L, IEC.MEBIBYTE).toUnit(IEC.BYTE).getValue().longValue(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.java deleted file mode 100644 index a00446457a04e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy; - -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.UUID; - -/** - * StreamCopier is responsible for writing to a staging persistence and providing methods to remove - * the staged data. - */ -public interface StreamCopier { - - /** - * Writes a value to a staging file for the stream. - */ - void write(UUID id, AirbyteRecordMessage recordMessage, String fileName) throws Exception; - - /** - * Closes the writer for the stream to the current staging file. The staging file must be of a - * certain size specified in GlobalDataSizeConstants + one more buffer. The writer for the stream - * will close with a note that no errors were found. - */ - void closeNonCurrentStagingFileWriters() throws Exception; - - /** - * Closes the writer for the stream to the staging persistence. This method should block until all - * buffered data has been written to the persistence. - */ - void closeStagingUploader(boolean hasFailed) throws Exception; - - /** - * Creates a temporary table in the target database. - */ - void createTemporaryTable() throws Exception; - - /** - * Copies the staging file to the temporary table. This method should block until the copy/upload - * has completed. - */ - void copyStagingFileToTemporaryTable() throws Exception; - - /** - * Creates the destination schema if it does not already exist. - */ - void createDestinationSchema() throws Exception; - - /** - * Creates the destination table if it does not already exist. - * - * @return the name of the destination table - */ - String createDestinationTable() throws Exception; - - /** - * Generates a merge SQL statement from the temporary table to the final table. - */ - String generateMergeStatement(String destTableName) throws Exception; - - /** - * Cleans up the copier by removing the staging file and dropping the temporary table after - * completion or failure. - */ - void removeFileAndDropTmpTable() throws Exception; - - /** - * Creates the staging file and all the necessary items to write data to this file. - * - * @return A string that unqiuely identifies the file. E.g. the filename, or a unique suffix that is - * appended to a shared filename prefix - */ - String prepareStagingFile(); - - /** - * @return current staging file name - */ - String getCurrentFile(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.java deleted file mode 100644 index adb4ff3c9b60e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy; - -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; - -public interface StreamCopierFactory { - - StreamCopier create(String configuredSchema, - T config, - String stagingFolder, - ConfiguredAirbyteStream configuredStream, - StandardNameTransformer nameTransformer, - JdbcDatabase db, - SqlOperations sqlOperations); - - static String getSchema(final String namespace, final String configuredSchema, final StandardNameTransformer nameTransformer) { - if (namespace != null) { - return nameTransformer.convertStreamName(namespace); - } else { - return nameTransformer.convertStreamName(configuredSchema); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java deleted file mode 100644 index 80940bcf612bf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.normalization; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.destination.normalization.SentryExceptionHelper.ErrorMapKeys; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteLogMessage.Level; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Stream; -import org.apache.logging.log4j.util.Strings; - -/** - * A simple wrapper for base-normalization logs. Reads messages off of stdin and sticks them into - * appropriate AirbyteMessages (log or trace), then dumps those messages to stdout - *

- * does mostly the same thing as - * {@link io.airbyte.workers.normalization.NormalizationAirbyteStreamFactory}. That class is not - * actively developed, and will be deleted after all destinations run normalization in-connector. - *

- * Aggregates all error logs and emits them as a single trace message at the end. If the underlying - * process emits any trace messages, they are passed through immediately. - */ -public class NormalizationLogParser { - - private final List dbtErrors = new ArrayList<>(); - - public Stream create(final BufferedReader bufferedReader) { - return bufferedReader.lines().flatMap(this::toMessages); - } - - public List getDbtErrors() { - return dbtErrors; - } - - @VisibleForTesting - Stream toMessages(final String line) { - if (Strings.isEmpty(line)) { - return Stream.of(logMessage(Level.INFO, "")); - } - final Optional json = Jsons.tryDeserializeWithoutWarn(line); - if (json.isPresent()) { - return jsonToMessage(json.get()); - } else { - return nonJsonLineToMessage(line); - } - } - - /** - * Wrap the line in an AirbyteLogMessage, and do very naive dbt error log detection. - *

- * This is needed for dbt < 1.0.0, which don't support json-format logs. - */ - private Stream nonJsonLineToMessage(final String line) { - // Super hacky thing to try and detect error lines - if (line.contains("[error]")) { - dbtErrors.add(line); - } - return Stream.of(logMessage(Level.INFO, line)); - } - - /** - * There are two cases here: Either the json is already an AirbyteMessage (and we should just emit - * it without change), or it's dbt json log, and we need to do some extra work to convert it to a - * log message + aggregate error logs. - */ - private Stream jsonToMessage(final JsonNode jsonLine) { - final Optional message = Jsons.tryObject(jsonLine, AirbyteMessage.class); - if (message.isPresent()) { - // This line is already an AirbyteMessage; we can just return it directly - // (these messages come from the transform_config / transform_catalog scripts) - return message.stream(); - } else { - /* - * This line is a JSON-format dbt log. We need to extract the message and wrap it in a logmessage - * And if it's an error, we also need to collect it into dbtErrors. Example log message, formatted - * for readability: { "code": "A001", "data": { "v": "=1.0.9" }, "invocation_id": - * "3f9a0b9f-9623-4c25-8708-1f6ae851e738", "level": "info", "log_version": 1, "msg": - * "Running with dbt=1.0.9", "node_info": {}, "pid": 65, "thread_name": "MainThread", "ts": - * "2023-04-12T21:03:23.079315Z", "type": "log_line" } - */ - final String logLevel = (jsonLine.hasNonNull("level")) ? jsonLine.get("level").asText() : ""; - String logMsg = jsonLine.hasNonNull("msg") ? jsonLine.get("msg").asText() : ""; - final Level level; - switch (logLevel) { - case "debug" -> level = Level.DEBUG; - case "info" -> level = Level.INFO; - case "warn" -> level = Level.WARN; - case "error" -> { - // This is also not _amazing_, but we make the assumption that all error logs should be emitted in - // the trace message - // In practice, this seems to be a valid assumption. - level = Level.ERROR; - dbtErrors.add(logMsg); - } - default -> { - level = Level.INFO; - logMsg = jsonLine.toPrettyString(); - } - } - return Stream.of(logMessage(level, logMsg)); - } - } - - private static AirbyteMessage logMessage(final Level level, final String message) { - return new AirbyteMessage() - .withType(Type.LOG) - .withLog(new AirbyteLogMessage() - .withLevel(level) - .withMessage(message)); - } - - public static void main(final String[] args) { - final NormalizationLogParser normalizationLogParser = new NormalizationLogParser(); - final Stream airbyteMessageStream = - normalizationLogParser.create(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8))); - airbyteMessageStream.forEachOrdered(message -> System.out.println(Jsons.serialize(message))); - - final List errors = normalizationLogParser.getDbtErrors(); - final String dbtErrorStack = String.join("\n", errors); - if (!"".equals(dbtErrorStack)) { - final Map errorMap = SentryExceptionHelper.getUsefulErrorMessageAndTypeFromDbtError(dbtErrorStack); - final String internalMessage = errorMap.get(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY); - final AirbyteMessage traceMessage = new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage() - .withType(AirbyteTraceMessage.Type.ERROR) - .withEmittedAt((double) System.currentTimeMillis()) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(FailureType.SYSTEM_ERROR) - .withMessage("Normalization failed during the dbt run. This may indicate a problem with the data itself.") - .withStackTrace("AirbyteDbtError: \n" + dbtErrorStack) - .withInternalMessage(internalMessage))); - System.out.println(Jsons.serialize(traceMessage)); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.java deleted file mode 100644 index 9c875bf011c53..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.normalization; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This is copied out of platform - * (https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java#L257) - */ -public class SentryExceptionHelper { - - private static final Logger LOGGER = LoggerFactory.getLogger(SentryExceptionHelper.class); - - /** - * Keys to known error types. - */ - public enum ErrorMapKeys { - ERROR_MAP_MESSAGE_KEY, - ERROR_MAP_TYPE_KEY - } - - public static Map getUsefulErrorMessageAndTypeFromDbtError(final String stacktrace) { - // the dbt 'stacktrace' is really just all the log messages at 'error' level, stuck together. - // therefore there is not a totally consistent structure to these, - // see the docs: https://docs.getdbt.com/guides/legacy/debugging-errors - // the logic below is built based on the ~450 unique dbt errors we encountered before this PR - // and is a best effort to isolate the useful part of the error logs for debugging and grouping - // and bring some semblance of exception 'types' to differentiate between errors. - final Map errorMessageAndType = new HashMap<>(); - final String[] stacktraceLines = stacktrace.split("\n"); - - boolean defaultNextLine = false; - // TODO: this whole code block is quite ugh, commented to try and make each part clear but could be - // much more readable. - mainLoop: for (int i = 0; i < stacktraceLines.length; i++) { - // This order is important due to how these errors can co-occur. - // This order attempts to keep error definitions consistent based on our observations of possible - // dbt error structures. - try { - // Database Errors - if (stacktraceLines[i].contains("Database Error in model")) { - // Database Error : SQL compilation error - if (stacktraceLines[i + 1].contains("SQL compilation error")) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, - String.format("%s %s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtDatabaseSQLCompilationError"); - break; - // Database Error: Invalid input - } else if (stacktraceLines[i + 1].contains("Invalid input")) { - for (final String followingLine : Arrays.copyOfRange(stacktraceLines, i + 1, stacktraceLines.length)) { - if (followingLine.trim().startsWith("context:")) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, - String.format("%s\n%s", stacktraceLines[i + 1].trim(), followingLine.trim())); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtDatabaseInvalidInputError"); - break mainLoop; - } - } - // Database Error: Syntax error - } else if (stacktraceLines[i + 1].contains("syntax error at or near \"")) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, - String.format("%s\n%s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtDatabaseSyntaxError"); - break; - // Database Error: default - } else { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtDatabaseError"); - defaultNextLine = true; - } - // Unhandled Error - } else if (stacktraceLines[i].contains("Unhandled error while executing model")) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtUnhandledError"); - defaultNextLine = true; - // Compilation Errors - } else if (stacktraceLines[i].contains("Compilation Error")) { - // Compilation Error: Ambiguous Relation - if (stacktraceLines[i + 1].contains("When searching for a relation, dbt found an approximate match.")) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, - String.format("%s %s", stacktraceLines[i + 1].trim(), stacktraceLines[i + 2].trim())); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtCompilationAmbiguousRelationError"); - break; - // Compilation Error: default - } else { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtCompilationError"); - defaultNextLine = true; - } - // Runtime Errors - } else if (stacktraceLines[i].contains("Runtime Error")) { - // Runtime Error: Database error - for (final String followingLine : Arrays.copyOfRange(stacktraceLines, i + 1, stacktraceLines.length)) { - if ("Database Error".equals(followingLine.trim())) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, - String.format("%s", stacktraceLines[Arrays.stream(stacktraceLines).toList().indexOf(followingLine) + 1].trim())); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtRuntimeDatabaseError"); - break mainLoop; - } - } - // Runtime Error: default - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtRuntimeError"); - defaultNextLine = true; - // Database Error: formatted differently, catch last to avoid counting other types of errors as - // Database Error - } else if ("Database Error".equals(stacktraceLines[i].trim())) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "DbtDatabaseError"); - defaultNextLine = true; - } - // handle the default case without repeating code - if (defaultNextLine) { - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, stacktraceLines[i + 1].trim()); - break; - } - } catch (final ArrayIndexOutOfBoundsException e) { - // this means our logic is slightly off, our assumption of where error lines are is incorrect - LOGGER.warn("Failed trying to parse useful error message out of dbt error, defaulting to full stacktrace"); - } - } - if (errorMessageAndType.isEmpty()) { - // For anything we haven't caught, just return full stacktrace - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY, stacktrace); - errorMessageAndType.put(ErrorMapKeys.ERROR_MAP_TYPE_KEY, "AirbyteDbtError"); - } - return errorMessageAndType; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.java deleted file mode 100644 index 38f9510309510..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import com.google.common.io.CountingOutputStream; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Base implementation of a {@link SerializableBuffer}. It is composed of a {@link BufferStorage} - * where the actual data is being stored in a serialized format. - *

- * Such data format is defined by concrete implementation inheriting from this base abstract class. - * To do so, necessary methods on handling "writer" methods should be defined. This writer would - * take care of converting {@link AirbyteRecordMessage} into the serialized form of the data such as - * it can be stored in the outputStream of the {@link BufferStorage}. - */ -public abstract class BaseSerializedBuffer implements SerializableBuffer { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseSerializedBuffer.class); - private static final String GZ_SUFFIX = ".gz"; - - private final BufferStorage bufferStorage; - private final CountingOutputStream byteCounter; - - private boolean useCompression; - private GzipCompressorOutputStream compressedBuffer; - private InputStream inputStream; - private boolean isStarted; - private boolean isClosed; - - protected BaseSerializedBuffer(final BufferStorage bufferStorage) throws Exception { - this.bufferStorage = bufferStorage; - byteCounter = new CountingOutputStream(bufferStorage.getOutputStream()); - useCompression = true; - compressedBuffer = null; - inputStream = null; - isStarted = false; - isClosed = false; - } - - /** - * Initializes the writer objects such that it can now write to the downstream @param outputStream - */ - protected abstract void initWriter(OutputStream outputStream) throws Exception; - - /** - * Transform the @param record into a serialized form of the data and writes it to the registered - * OutputStream provided when {@link BaseSerializedBuffer#initWriter} was called. - */ - @Deprecated - protected abstract void writeRecord(AirbyteRecordMessage record) throws IOException; - - /** - * TODO: (ryankfu) move destination to use serialized record string instead of passing entire - * AirbyteRecord - * - * @param recordString serialized record - * @param emittedAt timestamp of the record in milliseconds - * @throws IOException - */ - protected void writeRecord(final String recordString, final long emittedAt) throws IOException { - writeRecord(Jsons.deserialize(recordString, AirbyteRecordMessage.class).withEmittedAt(emittedAt)); - } - - /** - * Stops the writer from receiving new data and prepares it for being finalized and converted into - * an InputStream to read from instead. This is used when flushing the buffer into some other - * destination. - */ - protected abstract void flushWriter() throws IOException; - - protected abstract void closeWriter() throws IOException; - - public SerializableBuffer withCompression(final boolean useCompression) { - if (!isStarted) { - this.useCompression = useCompression; - return this; - } - throw new RuntimeException("Options should be configured before starting to write"); - } - - @Override - public long accept(final AirbyteRecordMessage record) throws Exception { - if (!isStarted) { - if (useCompression) { - compressedBuffer = new GzipCompressorOutputStream(byteCounter); - initWriter(compressedBuffer); - } else { - initWriter(byteCounter); - } - isStarted = true; - } - if (inputStream == null && !isClosed) { - final long startCount = byteCounter.getCount(); - writeRecord(record); - return byteCounter.getCount() - startCount; - } else { - throw new IllegalCallerException("Buffer is already closed, it cannot accept more messages"); - } - } - - @Override - public long accept(final String recordString, final long emittedAt) throws Exception { - if (!isStarted) { - if (useCompression) { - compressedBuffer = new GzipCompressorOutputStream(byteCounter); - initWriter(compressedBuffer); - } else { - initWriter(byteCounter); - } - isStarted = true; - } - if (inputStream == null && !isClosed) { - final long startCount = byteCounter.getCount(); - writeRecord(recordString, emittedAt); - return byteCounter.getCount() - startCount; - } else { - throw new IllegalCallerException("Buffer is already closed, it cannot accept more messages"); - } - } - - @Override - public String getFilename() throws IOException { - if (useCompression && !bufferStorage.getFilename().endsWith(GZ_SUFFIX)) { - return bufferStorage.getFilename() + GZ_SUFFIX; - } - return bufferStorage.getFilename(); - } - - @Override - public File getFile() throws IOException { - if (useCompression && !bufferStorage.getFilename().endsWith(GZ_SUFFIX)) { - if (bufferStorage.getFile().renameTo(new File(bufferStorage.getFilename() + GZ_SUFFIX))) { - LOGGER.info("Renaming compressed file to include .gz file extension"); - } - } - return bufferStorage.getFile(); - } - - protected InputStream convertToInputStream() throws IOException { - return bufferStorage.convertToInputStream(); - } - - @Override - public InputStream getInputStream() { - return inputStream; - } - - @Override - public void flush() throws IOException { - if (inputStream == null && !isClosed) { - flushWriter(); - if (compressedBuffer != null) { - LOGGER.debug("Wrapping up compression and write GZIP trailer data."); - compressedBuffer.flush(); - compressedBuffer.close(); - } - closeWriter(); - bufferStorage.close(); - inputStream = convertToInputStream(); - LOGGER.info("Finished writing data to {} ({})", getFilename(), FileUtils.byteCountToDisplaySize(byteCounter.getCount())); - } - } - - @Override - public long getByteCount() { - return byteCounter.getCount(); - } - - @Override - public void close() throws Exception { - if (!isClosed) { - // inputStream can be null if the accept method encounters - // an error before inputStream is initialized - if (inputStream != null) { - inputStream.close(); - } - bufferStorage.deleteFile(); - isClosed = true; - } - } - - @Override - public long getMaxTotalBufferSizeInBytes() { - return bufferStorage.getMaxTotalBufferSizeInBytes(); - } - - @Override - public long getMaxPerStreamBufferSizeInBytes() { - return bufferStorage.getMaxPerStreamBufferSizeInBytes(); - } - - @Override - public int getMaxConcurrentStreamsInBuffer() { - return bufferStorage.getMaxConcurrentStreamsInBuffer(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.java deleted file mode 100644 index 27dd99307e864..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.commons.functional.CheckedBiFunction; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; - -public interface BufferCreateFunction extends - CheckedBiFunction { - - @Override - SerializableBuffer apply(AirbyteStreamNameNamespacePair stream, ConfiguredAirbyteCatalog configuredCatalog) - throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.java deleted file mode 100644 index 05fcd08f1a957..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.java +++ /dev/null @@ -1,10 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -public enum BufferFlushType { - FLUSH_ALL, - FLUSH_SINGLE_STREAM -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.java deleted file mode 100644 index 4deab7d9c3642..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * This interface abstract the actual object that is used to store incoming data being buffered. It - * could be a file, in-memory or some other objects. - * - * However, in order to be used as part of the {@link SerializableBuffer}, this - * {@link BufferStorage} should implement some methods used to determine how to write into and read - * from the storage once we are done buffering - * - * Some easy methods for manipulating the storage viewed as a file or InputStream are therefore - * required. - * - * Depending on the implementation of the storage medium, it would also determine what storage - * limits are possible. - */ -public interface BufferStorage { - - /** - * Builds a new outputStream on which to write the data for storage. - */ - OutputStream getOutputStream() throws IOException; - - String getFilename() throws IOException; - - File getFile() throws IOException; - - /** - * Once buffering has reached some limits, the storage stream should be turned into an InputStream. - * This method should assume we are not going to write to buffer anymore, and it is safe to convert - * to some other format to be read from now. - */ - InputStream convertToInputStream() throws IOException; - - void close() throws IOException; - - /** - * Cleans-up any file that was produced in the process of buffering (if any were produced) - */ - void deleteFile() throws IOException; - - /* - * Depending on the implementation of the storage, methods below defined reasonable thresholds - * associated with using this kind of buffer storage. - * - * These could also be dynamically configured/tuned at runtime if needed (from user input for - * example?) - */ - - /** - * @return How much storage should be used overall by all buffers - */ - long getMaxTotalBufferSizeInBytes(); - - /** - * @return How much storage should be used for a particular stream at a time before flushing it - */ - long getMaxPerStreamBufferSizeInBytes(); - - /** - * @return How many concurrent buffers can be handled at once in parallel - */ - int getMaxConcurrentStreamsInBuffer(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.java deleted file mode 100644 index 1f640d2f8ceb6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Optional; - -/** - * High-level interface used by - * {@link io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer} - * - * A Record buffering strategy relies on the capacity available of underlying - * {@link SerializableBuffer} to determine what to do when consuming a new {@link AirbyteMessage} - * into the buffer. It also defines when to flush such buffers and how to empty them once they fill - * up. - * - */ -public interface BufferingStrategy extends AutoCloseable { - - /** - * Add a new message to the buffer while consuming streams, also handles when a buffer flush when - * buffer has been filled - * - * @param stream stream associated with record - * @param message {@link AirbyteMessage} to be added to the buffer - * @return an optional value if a flushed occur with the respective flush type, otherwise an empty - * value means only a record was added - * @throws Exception throw on failure - */ - Optional addRecord(AirbyteStreamNameNamespacePair stream, AirbyteMessage message) throws Exception; - - /** - * Flush buffered messages in a buffer from a particular stream - */ - void flushSingleBuffer(AirbyteStreamNameNamespacePair stream, SerializableBuffer buffer) throws Exception; - - /** - * Flush all buffers that were buffering message data so far. - */ - void flushAllBuffers() throws Exception; - - /** - * Removes all stream buffers. - */ - void clear() throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.java deleted file mode 100644 index d26b5bb098141..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.file.Files; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileBuffer implements BufferStorage { - - private static final Logger LOGGER = LoggerFactory.getLogger(FileBuffer.class); - - // The per stream size limit is following recommendations from: - // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations - // "To optimize the number of parallel operations for a load, - // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size compressed." - public static final long MAX_PER_STREAM_BUFFER_SIZE_BYTES = 200 * 1024 * 1024; // 200 MB - /* - * Other than the per-file size limit, we also limit the total size (which would limit how many - * concurrent streams we can buffer simultaneously too) Since this class is storing data on disk, - * the buffer size limits below are tied to the necessary disk storage space. - */ - public static final long MAX_TOTAL_BUFFER_SIZE_BYTES = 1024 * 1024 * 1024; // 1 GB - /* - * We limit number of stream being buffered simultaneously anyway (limit how many files are - * stored/open for writing) - * - * Note: This value can be tuned to increase performance with the tradeoff of increased memory usage - * (~31 MB per buffer). See {@link StreamTransferManager} - * - * For connections with interleaved data (e.g. Change Data Capture), having less buffers than the - * number of streams being synced will cause buffer thrashing where buffers will need to be flushed - * before another stream's buffer can be created. Increasing the default max will reduce likelihood - * of thrashing but not entirely eliminate unless number of buffers equals streams to be synced - */ - public static final int DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER = 10; - public static final String FILE_BUFFER_COUNT_KEY = "file_buffer_count"; - // This max is subject to change as no proper load testing has been done to verify the side effects - public static final int MAX_CONCURRENT_STREAM_IN_BUFFER = 50; - /* - * Use this soft cap as a guidance for customers to not exceed the recommended number of buffers - * which is 1 GB (total buffer size) / 31 MB (rough size of each buffer) ~= 32 buffers - */ - public static final int SOFT_CAP_CONCURRENT_STREAM_IN_BUFFER = 20; - - private final String fileExtension; - private File tempFile; - private OutputStream outputStream; - private final int maxConcurrentStreams; - - public FileBuffer(final String fileExtension) { - this.fileExtension = fileExtension; - this.maxConcurrentStreams = DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER; - tempFile = null; - outputStream = null; - } - - public FileBuffer(final String fileExtension, final int maxConcurrentStreams) { - this.fileExtension = fileExtension; - this.maxConcurrentStreams = maxConcurrentStreams; - tempFile = null; - outputStream = null; - } - - @Override - public OutputStream getOutputStream() throws IOException { - if (outputStream == null || tempFile == null) { - tempFile = Files.createTempFile(UUID.randomUUID().toString(), fileExtension).toFile(); - outputStream = new BufferedOutputStream(new FileOutputStream(tempFile)); - } - return outputStream; - } - - @Override - public String getFilename() throws IOException { - return getFile().getName(); - } - - @Override - public File getFile() throws IOException { - if (tempFile == null) { - getOutputStream(); - } - return tempFile; - } - - @Override - public InputStream convertToInputStream() throws IOException { - return new FileInputStream(getFile()); - } - - @Override - public void close() throws IOException { - outputStream.close(); - } - - @Override - public void deleteFile() throws IOException { - LOGGER.info("Deleting tempFile data {}", getFilename()); - Files.deleteIfExists(getFile().toPath()); - } - - @Override - public long getMaxTotalBufferSizeInBytes() { - return MAX_TOTAL_BUFFER_SIZE_BYTES; - } - - @Override - public long getMaxPerStreamBufferSizeInBytes() { - return MAX_PER_STREAM_BUFFER_SIZE_BYTES; - } - - @Override - public int getMaxConcurrentStreamsInBuffer() { - return maxConcurrentStreams; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.java deleted file mode 100644 index 8d4022754d9d8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.commons.functional.CheckedBiConsumer; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; - -public interface FlushBufferFunction extends CheckedBiConsumer { - - @Override - void accept(AirbyteStreamNameNamespacePair stream, SerializableBuffer buffer) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.java deleted file mode 100644 index 7f178d32a79b4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.file.Files; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Instead of storing buffered data on disk like the {@link FileBuffer}, this {@link BufferStorage} - * accumulates message data in-memory instead. Thus, a bigger heap size would be required. - */ -public class InMemoryBuffer implements BufferStorage { - - private static final Logger LOGGER = LoggerFactory.getLogger(InMemoryBuffer.class); - - // The per stream size limit is following recommendations from: - // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations - // "To optimize the number of parallel operations for a load, - // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size compressed." - public static final long MAX_PER_STREAM_BUFFER_SIZE_BYTES = 200 * 1024 * 1024; // 200 MB - // Other than the per-file size limit, we also limit the total size (which would limit how many - // concurrent streams we can buffer simultaneously too) - // Since this class is storing data in memory, the buffer size limits below are tied to the - // necessary RAM space. - public static final long MAX_TOTAL_BUFFER_SIZE_BYTES = 1024 * 1024 * 1024; // 1 GB - // we limit number of stream being buffered simultaneously anyway - public static final int MAX_CONCURRENT_STREAM_IN_BUFFER = 100; - - private final String fileExtension; - private final ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); - private File tempFile; - private String filename; - - public InMemoryBuffer(final String fileExtension) { - this.fileExtension = fileExtension; - tempFile = null; - filename = null; - } - - @Override - public OutputStream getOutputStream() { - return byteBuffer; - } - - @Override - public String getFilename() { - if (filename == null) { - filename = UUID.randomUUID().toString(); - } - return filename; - } - - @Override - public File getFile() throws IOException { - if (tempFile == null) { - tempFile = Files.createTempFile(getFilename(), fileExtension).toFile(); - } - return tempFile; - } - - @Override - public InputStream convertToInputStream() { - return new ByteArrayInputStream(byteBuffer.toByteArray()); - } - - @Override - public void close() throws IOException { - byteBuffer.close(); - } - - @Override - public void deleteFile() throws IOException { - if (tempFile != null) { - LOGGER.info("Deleting tempFile data {}", getFilename()); - Files.deleteIfExists(tempFile.toPath()); - } - } - - @Override - public long getMaxTotalBufferSizeInBytes() { - return MAX_TOTAL_BUFFER_SIZE_BYTES; - } - - @Override - public long getMaxPerStreamBufferSizeInBytes() { - return MAX_PER_STREAM_BUFFER_SIZE_BYTES; - } - - @Override - public int getMaxConcurrentStreamsInBuffer() { - return MAX_CONCURRENT_STREAM_IN_BUFFER; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java deleted file mode 100644 index 932ac18f1ced5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.CheckAndRemoveRecordWriter; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordSizeEstimator; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordWriter; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This is the default implementation of a {@link BufferStorage} to be backward compatible. Data is - * being buffered in a {@link List} as they are being consumed. - * - * This should be deprecated as we slowly move towards using {@link SerializedBufferingStrategy} - * instead. - */ -public class InMemoryRecordBufferingStrategy implements BufferingStrategy { - - private static final Logger LOGGER = LoggerFactory.getLogger(InMemoryRecordBufferingStrategy.class); - - private Map> streamBuffer = new HashMap<>(); - private final RecordWriter recordWriter; - private final CheckAndRemoveRecordWriter checkAndRemoveRecordWriter; - private String fileName; - - private final RecordSizeEstimator recordSizeEstimator; - private final long maxQueueSizeInBytes; - private long bufferSizeInBytes; - - public InMemoryRecordBufferingStrategy(final RecordWriter recordWriter, - final long maxQueueSizeInBytes) { - this(recordWriter, null, maxQueueSizeInBytes); - } - - public InMemoryRecordBufferingStrategy(final RecordWriter recordWriter, - final CheckAndRemoveRecordWriter checkAndRemoveRecordWriter, - final long maxQueueSizeInBytes) { - this.recordWriter = recordWriter; - this.checkAndRemoveRecordWriter = checkAndRemoveRecordWriter; - - this.maxQueueSizeInBytes = maxQueueSizeInBytes; - this.bufferSizeInBytes = 0; - this.recordSizeEstimator = new RecordSizeEstimator(); - } - - @Override - public Optional addRecord(final AirbyteStreamNameNamespacePair stream, final AirbyteMessage message) throws Exception { - Optional flushed = Optional.empty(); - - final long messageSizeInBytes = recordSizeEstimator.getEstimatedByteSize(message.getRecord()); - if (bufferSizeInBytes + messageSizeInBytes > maxQueueSizeInBytes) { - flushAllBuffers(); - flushed = Optional.of(BufferFlushType.FLUSH_ALL); - } - - final List bufferedRecords = streamBuffer.computeIfAbsent(stream, k -> new ArrayList<>()); - bufferedRecords.add(message.getRecord()); - bufferSizeInBytes += messageSizeInBytes; - - return flushed; - } - - @Override - public void flushSingleBuffer(final AirbyteStreamNameNamespacePair stream, final SerializableBuffer buffer) throws Exception { - LOGGER.info("Flushing single stream {}: {} records", stream.getName(), streamBuffer.get(stream).size()); - recordWriter.accept(stream, streamBuffer.get(stream)); - LOGGER.info("Flushing completed for {}", stream.getName()); - } - - @Override - public void flushAllBuffers() throws Exception { - for (final Map.Entry> entry : streamBuffer.entrySet()) { - LOGGER.info("Flushing {}: {} records ({})", entry.getKey().getName(), entry.getValue().size(), - FileUtils.byteCountToDisplaySize(bufferSizeInBytes)); - recordWriter.accept(entry.getKey(), entry.getValue()); - if (checkAndRemoveRecordWriter != null) { - fileName = checkAndRemoveRecordWriter.apply(entry.getKey(), fileName); - } - LOGGER.info("Flushing completed for {}", entry.getKey().getName()); - } - close(); - clear(); - bufferSizeInBytes = 0; - } - - @Override - public void clear() { - streamBuffer = new HashMap<>(); - } - - @Override - public void close() throws Exception {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.java deleted file mode 100644 index 79477ab5cc5b5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; - -/** - * A {@link SerializableBuffer} is designed to be used as part of a - * {@link SerializedBufferingStrategy}. - * - *

- * It encapsulates the actual implementation of a buffer: both the medium storage (usually defined - * as part of {@link BufferStorage}. and the format of the serialized data when it is written to the - * buffer. - *

- * - *

- * A {@link BaseSerializedBuffer} class is provided, and should be the expected class to derive from - * when implementing a new format of buffer. The storage aspects are normally provided through - * composition of {@link BufferStorage}. - *

- * - */ -public interface SerializableBuffer extends AutoCloseable { - - /** - * Adds a {@link AirbyteRecordMessage} to the buffer and returns the size of the message in bytes - * - * @param record {@link AirbyteRecordMessage} to be added to buffer - * @return number of bytes written to the buffer - */ - @Deprecated - long accept(AirbyteRecordMessage record) throws Exception; - - /** - * TODO: (ryankfu) Move all destination connectors to pass the serialized record string instead of - * the entire AirbyteRecordMessage - * - * @param recordString serialized record - * @param emittedAt timestamp of the record in milliseconds - * @return number of bytes written to the buffer - * @throws Exception - */ - long accept(String recordString, long emittedAt) throws Exception; - - /** - * Flush a buffer implementation. - */ - void flush() throws Exception; - - /** - * The buffer implementation should be keeping track of how many bytes it accumulated so far. If any - * flush events were triggered, the amount of bytes accumulated would also have been decreased - * accordingly. This method @return such statistics. - */ - long getByteCount(); - - /** - * @return the filename representation of this buffer. - */ - String getFilename() throws IOException; - - /** - * @return a temporary representation as a file of this buffer. - */ - File getFile() throws IOException; - - /** - * @return the InputStream to read data back from this buffer once it is done adding messages to it. - */ - InputStream getInputStream() throws FileNotFoundException; - - /* - * Depending on the implementation of the storage, methods below defined reasonable thresholds - * associated with using this kind of buffer implementation. - * - * These could also be dynamically configured/tuned at runtime if needed (from user input for - * example?) - */ - - /** - * @return How much storage should be used overall by all buffers - */ - long getMaxTotalBufferSizeInBytes(); - - /** - * @return How much storage should be used for a particular stream at a time before flushing it - */ - long getMaxPerStreamBufferSizeInBytes(); - - /** - * @return How many concurrent buffers can be handled at once in parallel - */ - int getMaxConcurrentStreamsInBuffer(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.java deleted file mode 100644 index 39ed0fc142352..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Buffering Strategy used to convert {@link io.airbyte.protocol.models.AirbyteRecordMessage} into a - * stream of bytes to more readily save and transmit information - * - *

- * This class is meant to be used in conjunction with {@link SerializableBuffer} - *

- */ -public class SerializedBufferingStrategy implements BufferingStrategy { - - private static final Logger LOGGER = LoggerFactory.getLogger(SerializedBufferingStrategy.class); - - private final BufferCreateFunction onCreateBuffer; - private final FlushBufferFunction onStreamFlush; - - private Map allBuffers = new HashMap<>(); - private long totalBufferSizeInBytes; - private final ConfiguredAirbyteCatalog catalog; - - /** - * Creates instance of Serialized Buffering Strategy used to handle the logic of flushing buffer - * with an associated buffer type - * - * @param onCreateBuffer type of buffer used upon creation - * @param catalog collection of {@link io.airbyte.protocol.models.ConfiguredAirbyteStream} - * @param onStreamFlush buffer flush logic used throughout the streaming of messages - */ - public SerializedBufferingStrategy(final BufferCreateFunction onCreateBuffer, - final ConfiguredAirbyteCatalog catalog, - final FlushBufferFunction onStreamFlush) { - this.onCreateBuffer = onCreateBuffer; - this.catalog = catalog; - this.onStreamFlush = onStreamFlush; - this.totalBufferSizeInBytes = 0; - } - - /** - * Handles both adding records and when buffer is full to also flush - * - * @param stream stream associated with record - * @param message {@link AirbyteMessage} to buffer - * @return Optional which contains a {@link BufferFlushType} if a flush occurred, otherwise empty) - */ - @Override - public Optional addRecord(final AirbyteStreamNameNamespacePair stream, final AirbyteMessage message) throws Exception { - Optional flushed = Optional.empty(); - - final SerializableBuffer buffer = getOrCreateBuffer(stream); - if (buffer == null) { - throw new RuntimeException(String.format("Failed to create/get buffer for stream %s.%s", stream.getNamespace(), stream.getName())); - } - - final long actualMessageSizeInBytes = buffer.accept(message.getRecord()); - totalBufferSizeInBytes += actualMessageSizeInBytes; - // Flushes buffer when either the buffer was completely filled or only a single stream was filled - if (totalBufferSizeInBytes >= buffer.getMaxTotalBufferSizeInBytes() - || allBuffers.size() >= buffer.getMaxConcurrentStreamsInBuffer()) { - flushAllBuffers(); - flushed = Optional.of(BufferFlushType.FLUSH_ALL); - } else if (buffer.getByteCount() >= buffer.getMaxPerStreamBufferSizeInBytes()) { - flushSingleBuffer(stream, buffer); - /* - * Note: This branch is needed to indicate to the {@link DefaultDestStateLifeCycleManager} that an - * individual stream was flushed, there is no guarantee that it will flush records in the same order - * that state messages were received. The outcome here is that records get flushed but our updating - * of which state messages have been flushed falls behind. - * - * This is not ideal from a checkpoint point of view, because it means in the case where there is a - * failure, we will not be able to report that those records that were flushed and committed were - * committed because there corresponding state messages weren't marked as flushed. Thus, it weakens - * checkpointing, but it does not cause a correctness issue. - * - * In non-failure cases, using this conditional branch relies on the state messages getting flushed - * by some other means. That can be caused by the previous branch in this conditional. It is - * guaranteed by the fact that we always flush all state messages at the end of a sync. - */ - flushed = Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM); - } - return flushed; - } - - /** - * Creates a new buffer for each stream if buffers do not already exist, else return already - * computed buffer - */ - private SerializableBuffer getOrCreateBuffer(final AirbyteStreamNameNamespacePair stream) { - return allBuffers.computeIfAbsent(stream, k -> { - LOGGER.info("Starting a new buffer for stream {} (current state: {} in {} buffers)", - stream.getName(), - FileUtils.byteCountToDisplaySize(totalBufferSizeInBytes), - allBuffers.size()); - try { - return onCreateBuffer.apply(stream, catalog); - } catch (final Exception e) { - LOGGER.error("Failed to create a new buffer for stream {}", stream.getName(), e); - throw new RuntimeException(e); - } - }); - } - - @Override - public void flushSingleBuffer(final AirbyteStreamNameNamespacePair stream, final SerializableBuffer buffer) throws Exception { - LOGGER.info("Flushing buffer of stream {} ({})", stream.getName(), FileUtils.byteCountToDisplaySize(buffer.getByteCount())); - onStreamFlush.accept(stream, buffer); - totalBufferSizeInBytes -= buffer.getByteCount(); - allBuffers.remove(stream); - LOGGER.info("Flushing completed for {}", stream.getName()); - } - - @Override - public void flushAllBuffers() throws Exception { - LOGGER.info("Flushing all {} current buffers ({} in total)", allBuffers.size(), FileUtils.byteCountToDisplaySize(totalBufferSizeInBytes)); - for (final Entry entry : allBuffers.entrySet()) { - final AirbyteStreamNameNamespacePair stream = entry.getKey(); - final SerializableBuffer buffer = entry.getValue(); - LOGGER.info("Flushing buffer of stream {} ({})", stream.getName(), FileUtils.byteCountToDisplaySize(buffer.getByteCount())); - onStreamFlush.accept(stream, buffer); - LOGGER.info("Flushing completed for {}", stream.getName()); - } - close(); - clear(); - totalBufferSizeInBytes = 0; - } - - @Override - public void clear() throws Exception { - LOGGER.debug("Reset all buffers"); - allBuffers = new HashMap<>(); - } - - @Override - public void close() throws Exception { - final List exceptionsThrown = new ArrayList<>(); - for (final Entry entry : allBuffers.entrySet()) { - try { - final AirbyteStreamNameNamespacePair stream = entry.getKey(); - LOGGER.info("Closing buffer for stream {}", stream.getName()); - final SerializableBuffer buffer = entry.getValue(); - buffer.close(); - } catch (final Exception e) { - exceptionsThrown.add(e); - LOGGER.error("Exception while closing stream buffer", e); - } - } - - ConnectorExceptionUtil.logAllAndThrowFirst("Exceptions thrown while closing buffers: ", exceptionsThrown); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingOperations.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingOperations.java deleted file mode 100644 index f91fada8b4653..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingOperations.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.staging; - -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import java.time.Instant; -import java.util.List; -import java.util.UUID; - -/** - * Staging operations focuses on the SQL queries that are needed to success move data into a staging - * environment like GCS or S3. In general, the reference of staging is the usage of an object - * storage for the purposes of efficiently uploading bulk data to destinations - * - * TODO: This interface is shared between Snowflake and Redshift connectors where the staging - * mechanism is different wire protocol. Make the interface more Generic and have sub interfaces to - * support BlobStorageOperations or Jdbc based staging operations. - */ -public interface StagingOperations extends SqlOperations { - - /** - * @param outputTableName The name of the table this staging file will be loaded into (typically a - * raw table). Not all destinations use the table name in the staging path (e.g. Snowflake - * simply uses a timestamp + UUID), but e.g. Redshift does rely on this to ensure uniqueness. - */ - String getStagingPath(UUID connectionId, String namespace, String streamName, String outputTableName, Instant writeDatetime); - - /** - * Returns the staging environment's name - * - * @param namespace Name of schema - * @param streamName Name of the stream - * @return Fully qualified name of the staging environment - */ - String getStageName(String namespace, String streamName); - - /** - * Create a staging folder where to upload temporary files before loading into the final destination - */ - void createStageIfNotExists(JdbcDatabase database, String stageName) throws Exception; - - /** - * Upload the data file into the stage area. - * - * @param database database used for syncing - * @param recordsData records stored in in-memory buffer - * @param schemaName name of schema - * @param stagingPath path of staging folder to data files - * @return the name of the file that was uploaded. - */ - String uploadRecordsToStage(JdbcDatabase database, SerializableBuffer recordsData, String schemaName, String stageName, String stagingPath) - throws Exception; - - /** - * Load the data stored in the stage area into a temporary table in the destination - * - * @param database database interface - * @param stagingPath path to staging files - * @param stagedFiles collection of staged files - * @param tableName name of table to write staging files to - * @param schemaName name of schema - */ - void copyIntoTableFromStage(JdbcDatabase database, - String stageName, - String stagingPath, - List stagedFiles, - String tableName, - String schemaName) - throws Exception; - - /** - * Delete the stage area and all staged files that was in it - * - * @param database database used for syncing - * @param stageName Name of the staging area used to store files - */ - void dropStageIfExists(JdbcDatabase database, String stageName, String stagingPath) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtils.java deleted file mode 100644 index c5d79569a2c35..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtils.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import java.text.DecimalFormat; - -/** - * Replicate the behavior of {@link org.apache.commons.io.FileUtils} to match the proclivities of - * Davin and Charles. Courteously written by ChatGPT. - */ -public class AirbyteFileUtils { - - private static final double ONE_KB = 1024; - private static final double ONE_MB = ONE_KB * 1024; - private static final double ONE_GB = ONE_MB * 1024; - private static final double ONE_TB = ONE_GB * 1024; - private static final DecimalFormat df = new DecimalFormat("#.##"); - - /** - * Replicate the behavior of {@link org.apache.commons.io.FileUtils} but instead of rounding down to - * the nearest whole number, it rounds to two decimal places. - * - * @param sizeInBytes size in bytes - * @return human-readable size - */ - public static String byteCountToDisplaySize(final long sizeInBytes) { - - if (sizeInBytes < ONE_KB) { - return df.format(sizeInBytes) + " bytes"; - } else if (sizeInBytes < ONE_MB) { - return df.format((double) sizeInBytes / ONE_KB) + " KB"; - } else if (sizeInBytes < ONE_GB) { - return df.format((double) sizeInBytes / ONE_MB) + " MB"; - } else if (sizeInBytes < ONE_TB) { - return df.format((double) sizeInBytes / ONE_GB) + " GB"; - } else { - return df.format((double) sizeInBytes / ONE_TB) + " TB"; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumer.java deleted file mode 100644 index 711326fd919b0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumer.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static java.util.stream.Collectors.toMap; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction; -import io.airbyte.cdk.integrations.destination_async.buffers.BufferEnqueue; -import io.airbyte.cdk.integrations.destination_async.buffers.BufferManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.state.FlushFailure; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Async version of the - * {@link io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer}. - *

- * With this consumer, a destination is able to continue reading records until hitting the maximum - * memory limit governed by {@link GlobalMemoryManager}. Record writing is decoupled via - * {@link FlushWorkers}. See the other linked class for more detail. - */ -@Slf4j -public class AsyncStreamConsumer implements SerializedAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(AsyncStreamConsumer.class); - - private final OnStartFunction onStart; - private final OnCloseFunction onClose; - private final ConfiguredAirbyteCatalog catalog; - private final BufferManager bufferManager; - private final BufferEnqueue bufferEnqueue; - private final FlushWorkers flushWorkers; - private final Set streamNames; - private final FlushFailure flushFailure; - private final String defaultNamespace; - // Note that this map will only be populated for streams with nonzero records. - private final ConcurrentMap recordCounts; - - private boolean hasStarted; - private boolean hasClosed; - private boolean hasFailed = false; - // This is to account for the references when deserialization to a PartialAirbyteMessage. The - // calculation is as follows: - // PartialAirbyteMessage (4) + Max( PartialRecordMessage(4), PartialStateMessage(6)) with - // PartialStateMessage being larger with more nested objects within it. Using 8 bytes as we assumed - // a 64 bit JVM. - final int PARTIAL_DESERIALIZE_REF_BYTES = 10 * 8; - - public AsyncStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final OnCloseFunction onClose, - final DestinationFlushFunction flusher, - final ConfiguredAirbyteCatalog catalog, - final BufferManager bufferManager, - final String defaultNamespace) { - this(outputRecordCollector, onStart, onClose, flusher, catalog, bufferManager, new FlushFailure(), defaultNamespace); - } - - public AsyncStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final OnCloseFunction onClose, - final DestinationFlushFunction flusher, - final ConfiguredAirbyteCatalog catalog, - final BufferManager bufferManager, - final String defaultNamespace, - final ExecutorService workerPool) { - this(outputRecordCollector, onStart, onClose, flusher, catalog, bufferManager, new FlushFailure(), defaultNamespace, workerPool); - } - - @VisibleForTesting - public AsyncStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final OnCloseFunction onClose, - final DestinationFlushFunction flusher, - final ConfiguredAirbyteCatalog catalog, - final BufferManager bufferManager, - final FlushFailure flushFailure, - final String defaultNamespace, - final ExecutorService workerPool) { - this.defaultNamespace = defaultNamespace; - hasStarted = false; - hasClosed = false; - - this.onStart = onStart; - this.onClose = onClose; - this.catalog = catalog; - this.bufferManager = bufferManager; - bufferEnqueue = bufferManager.getBufferEnqueue(); - this.flushFailure = flushFailure; - flushWorkers = - new FlushWorkers(bufferManager.getBufferDequeue(), flusher, outputRecordCollector, flushFailure, bufferManager.getStateManager(), workerPool); - streamNames = StreamDescriptorUtils.fromConfiguredCatalog(catalog); - this.recordCounts = new ConcurrentHashMap<>(); - } - - @VisibleForTesting - public AsyncStreamConsumer(final Consumer outputRecordCollector, - final OnStartFunction onStart, - final OnCloseFunction onClose, - final DestinationFlushFunction flusher, - final ConfiguredAirbyteCatalog catalog, - final BufferManager bufferManager, - final FlushFailure flushFailure, - final String defaultNamespace) { - this(outputRecordCollector, onStart, onClose, flusher, catalog, bufferManager, flushFailure, defaultNamespace, Executors.newFixedThreadPool(5)); - } - - @Override - public void start() throws Exception { - Preconditions.checkState(!hasStarted, "Consumer has already been started."); - hasStarted = true; - - flushWorkers.start(); - - LOGGER.info("{} started.", AsyncStreamConsumer.class); - onStart.call(); - } - - @Override - public void accept(final String messageString, final Integer sizeInBytes) throws Exception { - Preconditions.checkState(hasStarted, "Cannot accept records until consumer has started"); - propagateFlushWorkerExceptionIfPresent(); - /* - * intentionally putting extractStream outside the buffer manager so that if in the future we want - * to try to use a thread pool to partially deserialize to get record type and stream name, we can - * do it without touching buffer manager. - */ - final var message = deserializeAirbyteMessage(messageString); - if (Type.RECORD.equals(message.getType())) { - if (Strings.isNullOrEmpty(message.getRecord().getNamespace())) { - message.getRecord().setNamespace(defaultNamespace); - } - validateRecord(message); - - getRecordCounter(message.getRecord().getStreamDescriptor()).incrementAndGet(); - } - bufferEnqueue.addRecord(message, sizeInBytes + PARTIAL_DESERIALIZE_REF_BYTES, defaultNamespace); - } - - /** - * Deserializes to a {@link PartialAirbyteMessage} which can represent both a Record or a State - * Message - * - * PartialAirbyteMessage holds either: - *

  • entire serialized message string when message is a valid State Message - *
  • serialized AirbyteRecordMessage when message is a valid Record Message
  • - * - * @param messageString the string to deserialize - * @return PartialAirbyteMessage if the message is valid, empty otherwise - */ - @VisibleForTesting - public static PartialAirbyteMessage deserializeAirbyteMessage(final String messageString) { - // TODO: (ryankfu) plumb in the serialized AirbyteStateMessage to match AirbyteRecordMessage code - // parity. https://github.com/airbytehq/airbyte/issues/27530 for additional context - final var partial = Jsons.tryDeserializeExact(messageString, PartialAirbyteMessage.class) - .orElseThrow(() -> new RuntimeException("Unable to deserialize PartialAirbyteMessage.")); - - final var msgType = partial.getType(); - if (Type.RECORD.equals(msgType) && partial.getRecord().getData() != null) { - // store serialized json - partial.withSerialized(partial.getRecord().getData().toString()); - // The connector doesn't need to be able to access to the record value. We can serialize it here and - // drop the json - // object. Having this data stored as a string is slightly more optimal for the memory usage. - partial.getRecord().setData(null); - } else if (Type.STATE.equals(msgType)) { - partial.withSerialized(messageString); - } else { - throw new RuntimeException(String.format("Unsupported message type: %s", msgType)); - } - - return partial; - } - - @Override - public void close() throws Exception { - Preconditions.checkState(hasStarted, "Cannot close; has not started."); - Preconditions.checkState(!hasClosed, "Has already closed."); - hasClosed = true; - - // assume closing upload workers will flush all accepted records. - // we need to close the workers before closing the bufferManagers (and underlying buffers) - // or we risk in-memory data. - flushWorkers.close(); - - bufferManager.close(); - - final Map streamSyncSummaries = streamNames.stream().collect(toMap( - streamDescriptor -> streamDescriptor, - streamDescriptor -> new StreamSyncSummary( - Optional.of(getRecordCounter(streamDescriptor).get())))); - onClose.accept(hasFailed, streamSyncSummaries); - - // as this throws an exception, we need to be after all other close functions. - propagateFlushWorkerExceptionIfPresent(); - LOGGER.info("{} closed", AsyncStreamConsumer.class); - } - - private AtomicLong getRecordCounter(final StreamDescriptor streamDescriptor) { - return recordCounts.computeIfAbsent(streamDescriptor, sd -> new AtomicLong()); - } - - private void propagateFlushWorkerExceptionIfPresent() throws Exception { - if (flushFailure.isFailed()) { - hasFailed = true; - if (flushFailure.getException() == null) { - throw new RuntimeException("The Destination failed with a missing exception. This should not happen. Please check logs."); - } - throw flushFailure.getException(); - } - } - - private void validateRecord(final PartialAirbyteMessage message) { - final StreamDescriptor streamDescriptor = new StreamDescriptor() - .withNamespace(message.getRecord().getNamespace()) - .withName(message.getRecord().getStream()); - // if stream is not part of list of streams to sync to then throw invalid stream exception - if (!streamNames.contains(streamDescriptor)) { - throwUnrecognizedStream(catalog, message); - } - } - - private static void throwUnrecognizedStream(final ConfiguredAirbyteCatalog catalog, final PartialAirbyteMessage message) { - throw new IllegalArgumentException( - String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", - Jsons.serialize(catalog), Jsons.serialize(message))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DestinationFlushFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DestinationFlushFunction.java deleted file mode 100644 index 22878f7780ba4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DestinationFlushFunction.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.stream.Stream; - -/** - * An interface meant to be used with {@link FlushWorkers}. - *

    - * A destination instructs workers how to write data by specifying - * {@link #flush(StreamDescriptor, Stream)}. This keeps the worker abstraction generic and reusable. - *

    - * e.g. A database destination's flush function likely involves parsing the stream into SQL - * statements. - *

    - * There are 2 different destination types as of this writing: - *

      - *
    • 1. Destinations that upload files. This includes warehouses and databases.
    • - *
    • 2. Destinations that upload data streams. This mostly includes various Cloud storages. This - * will include reverse-ETL in the future
    • - *
    - * In both cases, the simplest way to model the incoming data is as a stream. - */ -public interface DestinationFlushFunction { - - /** - * Flush a batch of data to the destination. - * - * @param decs the Airbyte stream the data stream belongs to - * @param stream a bounded {@link AirbyteMessage} stream ideally of - * {@link #getOptimalBatchSizeBytes()} size - * @throws Exception - */ - void flush(StreamDescriptor decs, Stream stream) throws Exception; - - /** - * When invoking {@link #flush(StreamDescriptor, Stream)}, best effort attempt to invoke flush with - * a batch of this size. Useful for Destinations that have optimal flush batch sizes. - *

    - * If you increase this, make sure that {@link #getQueueFlushThresholdBytes()} is larger than this - * value. Otherwise we may trigger flushes before reaching the optimal batch size. - * - * @return the optimal batch size in bytes - */ - long getOptimalBatchSizeBytes(); - - /** - * This value should be at least as high as {@link #getOptimalBatchSizeBytes()}. It's used by - * {@link DetectStreamToFlush} as part of deciding when a stream needs to be flushed. I'm being - * vague because I don't understand the specifics. - */ - default long getQueueFlushThresholdBytes() { - return Math.max(10 * 1024 * 1024, getOptimalBatchSizeBytes()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlush.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlush.java deleted file mode 100644 index ccd24736ce21d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlush.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Clock; -import java.time.Instant; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.tuple.ImmutablePair; - -/** - * This class finds the best, next stream to flush. - */ -@Slf4j -public class DetectStreamToFlush { - - private static final double EAGER_FLUSH_THRESHOLD = 0.90; - private static final long MAX_TIME_BETWEEN_FLUSH_MS = 5 * 60 * 1000; - private final BufferDequeue bufferDequeue; - private final RunningFlushWorkers runningFlushWorkers; - private final AtomicBoolean isClosing; - private final DestinationFlushFunction flusher; - private final Clock nowProvider; - private final ConcurrentMap latestFlushTimeMsPerStream = new ConcurrentHashMap<>(); - - public DetectStreamToFlush(final BufferDequeue bufferDequeue, - final RunningFlushWorkers runningFlushWorkers, - final AtomicBoolean isClosing, - final DestinationFlushFunction flusher) { - this(bufferDequeue, runningFlushWorkers, isClosing, flusher, Clock.systemUTC()); - } - - @VisibleForTesting - DetectStreamToFlush(final BufferDequeue bufferDequeue, - final RunningFlushWorkers runningFlushWorkers, - final AtomicBoolean isClosing, - final DestinationFlushFunction flusher, - final Clock nowProvider) { - this.bufferDequeue = bufferDequeue; - this.runningFlushWorkers = runningFlushWorkers; - this.isClosing = isClosing; - this.flusher = flusher; - this.nowProvider = nowProvider; - } - - /** - * Get the best, next stream that is ready to be flushed. - * - * @return best, next stream to flush. If no stream is ready to be flushed, return empty. - */ - public Optional getNextStreamToFlush() { - return getNextStreamToFlush(computeQueueThreshold()); - } - - /** - * We have a minimum threshold for the size of a queue before we will flush it. The threshold helps - * us avoid uploading small amounts of data at a time, which is really resource inefficient. - * Depending on certain conditions, we dynamically adjust this threshold. - *

    - * Rules: - *

  • default - By default the, the threshold is a set at a constant: - * QUEUE_FLUSH_THRESHOLD_BYTES.
  • - *
  • memory pressure - If we are getting close to maxing out available memory, we reduce it to - * zero. This helps in the case where there are a lot of streams, so total memory usage is high, but - * each individual queue isn't that large.
  • - *
  • closing - If the Flush Worker is closing, we reduce it to zero. We close when all records - * have been added to the queue, at which point, our goal is to flush out any non-empty queues.
  • - * - * @return based on the conditions, the threshold in bytes. - */ - @VisibleForTesting - long computeQueueThreshold() { - final boolean isBuffer90Full = - EAGER_FLUSH_THRESHOLD <= (double) bufferDequeue.getTotalGlobalQueueSizeBytes() / bufferDequeue.getMaxQueueSizeBytes(); - // when we are closing or queues are very full, flush regardless of how few items are in the queue. - return isClosing.get() || isBuffer90Full ? 0 : flusher.getQueueFlushThresholdBytes(); - } - - // todo (cgardens) - improve prioritization by getting a better estimate of how much data running - // workers will process. we have access to their batch sizes after all! - /** - * Iterates over streams until it finds one that is ready to flush. Streams are ordered by priority. - * Return an empty optional if no streams are ready. - *

    - * A stream is ready to flush if it either meets a size threshold or a time threshold. See - * {@link #isSizeTriggered(StreamDescriptor, long)} and {@link #isTimeTriggered(long)} for details - * on these triggers. - * - * @param queueSizeThresholdBytes - the size threshold to use for determining if a stream is ready - * to flush. - * @return the next stream to flush. if no stream is ready to flush, empty. - */ - @VisibleForTesting - Optional getNextStreamToFlush(final long queueSizeThresholdBytes) { - for (final StreamDescriptor stream : orderStreamsByPriority(bufferDequeue.getBufferedStreams())) { - final long latestFlushTimeMs = latestFlushTimeMsPerStream.computeIfAbsent(stream, _k -> nowProvider.millis()); - final ImmutablePair isTimeTriggeredResult = isTimeTriggered(latestFlushTimeMs); - final ImmutablePair isSizeTriggeredResult = isSizeTriggered(stream, queueSizeThresholdBytes); - - final String debugString = String.format( - "trigger info: %s - %s, %s , %s", - stream.getNamespace(), - stream.getName(), - isTimeTriggeredResult.getRight(), - isSizeTriggeredResult.getRight()); - log.debug("computed: {}", debugString); - - if (isSizeTriggeredResult.getLeft() || isTimeTriggeredResult.getLeft()) { - log.info("flushing: {}", debugString); - latestFlushTimeMsPerStream.put(stream, nowProvider.millis()); - return Optional.of(stream); - } - } - return Optional.empty(); - } - - /** - * The time trigger is based on the last time a record was added to the queue. We don't want records - * to sit forever, even if the queue is not that full (bad for time to value for users). Also, the - * more time passes since a record was added, the less likely another record is coming (caveat is - * CDC where it's random). - *

    - * This method also returns debug string with info that about the computation. We do it this way, so - * that the debug info that is printed is exactly what is used in the computation. - * - * @param latestFlushTimeMs latestFlushTimeMs - * @return is time triggered and a debug string - */ - @VisibleForTesting - ImmutablePair isTimeTriggered(final long latestFlushTimeMs) { - final long timeSinceLastFlushMs = nowProvider.millis() - latestFlushTimeMs; - final Boolean isTimeTriggered = timeSinceLastFlushMs >= MAX_TIME_BETWEEN_FLUSH_MS; - final String debugString = String.format("time trigger: %s", isTimeTriggered); - - return ImmutablePair.of(isTimeTriggered, debugString); - } - - /** - * For the size threshold, the size of the data in the queue is compared to the threshold that is - * passed into this method. - *

    - * One caveat, is that if that stream already has a worker running, we "penalize" its size. We do - * this by computing what the size of the queue would be after the running workers for that queue - * complete. This is based on a dumb estimate of how much data a worker can process. There is an - * opportunity for optimization here, by being smarter about predicting how much data a running - * worker is likely to process. - *

    - * This method also returns debug string with info that about the computation. We do it this way, so - * that the debug info that is printed is exactly what is used in the computation. - * - * @param stream stream - * @param queueSizeThresholdBytes min size threshold to determine if a queue is ready to flush - * @return is size triggered and a debug string - */ - @VisibleForTesting - ImmutablePair isSizeTriggered(final StreamDescriptor stream, final long queueSizeThresholdBytes) { - final long currentQueueSize = bufferDequeue.getQueueSizeBytes(stream).orElseThrow(); - final long sizeOfRunningWorkersEstimate = estimateSizeOfRunningWorkers(stream, currentQueueSize); - final long queueSizeAfterRunningWorkers = currentQueueSize - sizeOfRunningWorkersEstimate; - final boolean isSizeTriggered = queueSizeAfterRunningWorkers > queueSizeThresholdBytes; - - final String debugString = String.format( - "size trigger: %s current threshold b: %s, queue size b: %s, penalty b: %s, after penalty b: %s", - isSizeTriggered, - AirbyteFileUtils.byteCountToDisplaySize(queueSizeThresholdBytes), - AirbyteFileUtils.byteCountToDisplaySize(currentQueueSize), - AirbyteFileUtils.byteCountToDisplaySize(sizeOfRunningWorkersEstimate), - AirbyteFileUtils.byteCountToDisplaySize(queueSizeAfterRunningWorkers)); - - return ImmutablePair.of(isSizeTriggered, debugString); - } - - /** - * For a stream, determines how many bytes will be processed by CURRENTLY running workers. For the - * purpose of this calculation, workers can be in one of two states. First, they can have a batch, - * in which case, we can read the size in bytes from the batch to know how many records that batch - * will pull of the queue. Second, it might not have a batch yet, in which case, we assume the min - * of bytes in the queue or the optimal flush size. - *

    - * - * @param stream stream - * @return estimate of records remaining to be process - */ - @VisibleForTesting - long estimateSizeOfRunningWorkers(final StreamDescriptor stream, final long currentQueueSize) { - final List> runningWorkerBatchesSizes = runningFlushWorkers.getSizesOfRunningWorkerBatches(stream); - final long workersWithBatchesSize = runningWorkerBatchesSizes.stream().filter(Optional::isPresent).mapToLong(Optional::get).sum(); - final long workersWithoutBatchesCount = runningWorkerBatchesSizes.stream().filter(Optional::isEmpty).count(); - final long workersWithoutBatchesSizeEstimate = Math.min(flusher.getOptimalBatchSizeBytes(), currentQueueSize) * workersWithoutBatchesCount; - return (workersWithBatchesSize + workersWithoutBatchesSizeEstimate); - } - - // todo (cgardens) - perf test whether it would make sense to flip 1 & 2. - /** - * Sort stream descriptors in order of priority with which we would want to flush them. - *

    - * Priority is in the following order: - *

  • 1. size in queue (descending)
  • - *
  • 2. time since last record (ascending)
  • - *
  • 3. alphabetical by namespace + stream name.
  • - *

    - * In other words, move the biggest queues first, because they are most likely to use available - * resources optimally. Then get rid of old stuff (time to value for the user and, generally, as the - * age of the last record grows, the likelihood of getting any more records from that stream - * decreases, so by flushing them, we can totally complete that stream). Finally, tertiary sort by - * name so the order is deterministic. - * - * @param streams streams to sort. - * @return streams sorted by priority. - */ - @VisibleForTesting - List orderStreamsByPriority(final Set streams) { - // eagerly pull attributes so that values are consistent throughout comparison - final Map> sdToQueueSize = streams.stream() - .collect(Collectors.toMap(s -> s, bufferDequeue::getQueueSizeBytes)); - - final Map> sdToTimeOfLastRecord = streams.stream() - .collect(Collectors.toMap(s -> s, bufferDequeue::getTimeOfLastRecord)); - - return streams.stream() - .sorted(Comparator.comparing((StreamDescriptor s) -> sdToQueueSize.get(s).orElseThrow(), Comparator.reverseOrder()) - // if no time is present, it suggests the queue has no records. set MAX time as a sentinel value to - // represent no records. - .thenComparing(s -> sdToTimeOfLastRecord.get(s).orElse(Instant.MAX)) - .thenComparing(s -> s.getNamespace() + s.getName())) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/FlushWorkers.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/FlushWorkers.java deleted file mode 100644 index 32b01c5702917..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/FlushWorkers.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.cdk.integrations.destination_async.buffers.StreamAwareQueue.MessageWithMeta; -import io.airbyte.cdk.integrations.destination_async.state.FlushFailure; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; - -/** - * Parallel flushing of Destination data. - *

    - * In combination with a {@link DestinationFlushFunction} and the {@link #workerPool}, this class - * allows for parallel data flushing. - *

    - * Parallelising is important as it 1) minimises Destination backpressure 2) minimises the effect of - * IO pauses on Destination performance. The second point is particularly important since a majority - * of Destination work is IO bound. - *

    - * The {@link #supervisorThread} assigns work to worker threads by looping over - * {@link #bufferDequeue} - a dequeue interface over in-memory queues of {@link AirbyteMessage}. See - * {@link #retrieveWork()} for assignment logic. - *

    - * Within a worker thread, a worker best-effort reads a - * {@link DestinationFlushFunction#getOptimalBatchSizeBytes()} batch from the in-memory stream and - * calls {@link DestinationFlushFunction#flush(StreamDescriptor, Stream)} on the returned data. - */ -@Slf4j -public class FlushWorkers implements AutoCloseable { - - private static final long SUPERVISOR_INITIAL_DELAY_SECS = 0L; - private static final long SUPERVISOR_PERIOD_SECS = 1L; - private static final long DEBUG_INITIAL_DELAY_SECS = 0L; - private static final long DEBUG_PERIOD_SECS = 60L; - - private final ScheduledExecutorService supervisorThread; - private final ExecutorService workerPool; - private final BufferDequeue bufferDequeue; - private final DestinationFlushFunction flusher; - private final Consumer outputRecordCollector; - private final ScheduledExecutorService debugLoop; - private final RunningFlushWorkers runningFlushWorkers; - private final DetectStreamToFlush detectStreamToFlush; - - private final FlushFailure flushFailure; - - private final AtomicBoolean isClosing; - private final GlobalAsyncStateManager stateManager; - - public FlushWorkers(final BufferDequeue bufferDequeue, - final DestinationFlushFunction flushFunction, - final Consumer outputRecordCollector, - final FlushFailure flushFailure, - final GlobalAsyncStateManager stateManager) { - this(bufferDequeue, flushFunction, outputRecordCollector, flushFailure, stateManager, Executors.newFixedThreadPool(5)); - } - - public FlushWorkers(final BufferDequeue bufferDequeue, - final DestinationFlushFunction flushFunction, - final Consumer outputRecordCollector, - final FlushFailure flushFailure, - final GlobalAsyncStateManager stateManager, - final ExecutorService workerPool) { - this.bufferDequeue = bufferDequeue; - this.outputRecordCollector = outputRecordCollector; - this.flushFailure = flushFailure; - this.stateManager = stateManager; - this.workerPool = workerPool; - flusher = flushFunction; - debugLoop = Executors.newSingleThreadScheduledExecutor(); - supervisorThread = Executors.newScheduledThreadPool(1); - isClosing = new AtomicBoolean(false); - runningFlushWorkers = new RunningFlushWorkers(); - detectStreamToFlush = new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, isClosing, flusher); - } - - public void start() { - log.info("Start async buffer supervisor"); - supervisorThread.scheduleAtFixedRate(this::retrieveWork, - SUPERVISOR_INITIAL_DELAY_SECS, - SUPERVISOR_PERIOD_SECS, - TimeUnit.SECONDS); - debugLoop.scheduleAtFixedRate(this::printWorkerInfo, - DEBUG_INITIAL_DELAY_SECS, - DEBUG_PERIOD_SECS, - TimeUnit.SECONDS); - } - - private void retrieveWork() { - try { - // This will put a new log line every second which is too much, sampling it doesn't bring much value - // so it is set to debug - log.debug("Retrieve Work -- Finding queues to flush"); - final ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) workerPool; - int allocatableThreads = threadPoolExecutor.getMaximumPoolSize() - threadPoolExecutor.getActiveCount(); - - while (allocatableThreads > 0) { - final Optional next = detectStreamToFlush.getNextStreamToFlush(); - - if (next.isPresent()) { - final StreamDescriptor desc = next.get(); - final UUID flushWorkerId = UUID.randomUUID(); - runningFlushWorkers.trackFlushWorker(desc, flushWorkerId); - allocatableThreads--; - flush(desc, flushWorkerId); - } else { - break; - } - } - } catch (final Exception e) { - log.error("Flush worker error: ", e); - flushFailure.propagateException(e); - throw new RuntimeException(e); - } - } - - private void printWorkerInfo() { - final var workerInfo = new StringBuilder().append("[ASYNC WORKER INFO] "); - - final ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) workerPool; - - final int queueSize = threadPoolExecutor.getQueue().size(); - final int activeCount = threadPoolExecutor.getActiveCount(); - - workerInfo.append(String.format("Pool queue size: %d, Active threads: %d", queueSize, activeCount)); - log.info(workerInfo.toString()); - - } - - private void flush(final StreamDescriptor desc, final UUID flushWorkerId) { - workerPool.submit(() -> { - log.info("Flush Worker ({}) -- Worker picked up work.", humanReadableFlushWorkerId(flushWorkerId)); - try { - log.info("Flush Worker ({}) -- Attempting to read from queue namespace: {}, stream: {}.", - humanReadableFlushWorkerId(flushWorkerId), - desc.getNamespace(), - desc.getName()); - - try (final var batch = bufferDequeue.take(desc, flusher.getOptimalBatchSizeBytes())) { - runningFlushWorkers.registerBatchSize(desc, flushWorkerId, batch.getSizeInBytes()); - final Map stateIdToCount = batch.getData() - .stream() - .map(MessageWithMeta::stateId) - .collect(Collectors.groupingBy( - stateId -> stateId, - Collectors.counting())); - log.info("Flush Worker ({}) -- Batch contains: {} records, {} bytes.", - humanReadableFlushWorkerId(flushWorkerId), - batch.getData().size(), - AirbyteFileUtils.byteCountToDisplaySize(batch.getSizeInBytes())); - - flusher.flush(desc, batch.getData().stream().map(MessageWithMeta::message)); - batch.flushStates(stateIdToCount, outputRecordCollector); - } - - log.info("Flush Worker ({}) -- Worker finished flushing. Current queue size: {}", - humanReadableFlushWorkerId(flushWorkerId), - bufferDequeue.getQueueSizeInRecords(desc).orElseThrow()); - } catch (final Exception e) { - log.error(String.format("Flush Worker (%s) -- flush worker error: ", humanReadableFlushWorkerId(flushWorkerId)), e); - flushFailure.propagateException(e); - throw new RuntimeException(e); - } finally { - runningFlushWorkers.completeFlushWorker(desc, flushWorkerId); - } - }); - } - - @Override - public void close() throws Exception { - log.info("Closing flush workers -- waiting for all buffers to flush"); - isClosing.set(true); - // wait for all buffers to be flushed. - while (true) { - final Map streamDescriptorToRemainingRecords = bufferDequeue.getBufferedStreams() - .stream() - .collect(Collectors.toMap(desc -> desc, desc -> bufferDequeue.getQueueSizeInRecords(desc).orElseThrow())); - - final boolean anyRecordsLeft = streamDescriptorToRemainingRecords - .values() - .stream() - .anyMatch(size -> size > 0); - - if (!anyRecordsLeft) { - break; - } - - final var workerInfo = new StringBuilder().append("REMAINING_BUFFERS_INFO").append(System.lineSeparator()); - streamDescriptorToRemainingRecords.entrySet() - .stream() - .filter(entry -> entry.getValue() > 0) - .forEach(entry -> workerInfo.append(String.format(" Namespace: %s Stream: %s -- remaining records: %d", - entry.getKey().getNamespace(), - entry.getKey().getName(), - entry.getValue()))); - log.info(workerInfo.toString()); - log.info("Waiting for all streams to flush."); - Thread.sleep(1000); - } - log.info("Closing flush workers -- all buffers flushed"); - - // before shutting down the supervisor, flush all state. - stateManager.flushStates(outputRecordCollector); - supervisorThread.shutdown(); - while (!supervisorThread.awaitTermination(5L, TimeUnit.MINUTES)) { - log.info("Waiting for flush worker supervisor to shut down"); - } - log.info("Closing flush workers -- supervisor shut down"); - - log.info("Closing flush workers -- Starting worker pool shutdown.."); - workerPool.shutdown(); - while (!workerPool.awaitTermination(5L, TimeUnit.MINUTES)) { - log.info("Waiting for flush workers to shut down"); - } - log.info("Closing flush workers -- workers shut down"); - - debugLoop.shutdownNow(); - } - - private static String humanReadableFlushWorkerId(final UUID flushWorkerId) { - return flushWorkerId.toString().substring(0, 5); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManager.java deleted file mode 100644 index ca8aea8fdbcbc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManager.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import java.util.concurrent.atomic.AtomicLong; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; - -/** - * Responsible for managing buffer memory across multiple queues in a thread-safe way. This does not - * allocate or free memory in the traditional sense, but rather manages based off memory estimates - * provided by the callers. - *

    - * The goal is to enable maximum allowed memory bounds for each queue to be dynamically adjusted - * according to the overall available memory. Memory blocks are managed in chunks of - * {@link #BLOCK_SIZE_BYTES}, and the total amount of memory managed is configured at creation time. - *

    - * As a destination has no information about incoming per-stream records, having static queue sizes - * can cause unnecessary backpressure on a per-stream basis. By providing a dynamic, global view of - * buffer memory management, this class allows each queue to release and request memory dynamically, - * enabling effective sharing of global memory resources across all the queues, and avoiding - * accidental stream backpressure. - *

    - * This becomes particularly useful in the following scenarios: - *

      - *
    • 1. When the incoming records belong to a single stream. Dynamic allocation ensures this one - * stream can utilise all memory.
    • - *
    • 2. When the incoming records are from multiple streams, such as with Change Data Capture - * (CDC). Here, dynamic allocation let us create as many queues as possible, allowing all streams to - * be processed in parallel without accidental backpressure from unnecessary eager flushing.
    • - *
    - */ -@Slf4j -public class GlobalMemoryManager { - - // In cases where a queue is rapidly expanding, a larger block size allows less allocation calls. On - // the flip size, a smaller block size allows more granular memory management. Since this overhead - // is minimal for now, err on a smaller block sizes. - public static final long BLOCK_SIZE_BYTES = 10 * 1024 * 1024; // 10MB - private final long maxMemoryBytes; - - private final AtomicLong currentMemoryBytes = new AtomicLong(0); - - public GlobalMemoryManager(final long maxMemoryBytes) { - this.maxMemoryBytes = maxMemoryBytes; - } - - public long getMaxMemoryBytes() { - return maxMemoryBytes; - } - - public long getCurrentMemoryBytes() { - return currentMemoryBytes.get(); - } - - /** - * Requests a block of memory of {@link #BLOCK_SIZE_BYTES}. Return 0 if memory cannot be freed. - * - * @return the size of the allocated block, in bytes - */ - public synchronized long requestMemory() { - // todo(davin): what happens if the incoming record is larger than 30MB? - if (currentMemoryBytes.get() >= maxMemoryBytes) { - return 0L; - } - - final var freeMem = maxMemoryBytes - currentMemoryBytes.get(); - // Never allocate more than free memory size. - final var toAllocateBytes = Math.min(freeMem, BLOCK_SIZE_BYTES); - currentMemoryBytes.addAndGet(toAllocateBytes); - - log.debug("Memory Requested: max: {}, allocated: {}, allocated in this request: {}", - FileUtils.byteCountToDisplaySize(maxMemoryBytes), - FileUtils.byteCountToDisplaySize(currentMemoryBytes.get()), - FileUtils.byteCountToDisplaySize(toAllocateBytes)); - return toAllocateBytes; - } - - /** - * Releases a block of memory of the given size. If the amount of memory released exceeds the - * current memory allocation, a warning will be logged. - * - * @param bytes the size of the block to free, in bytes - */ - public void free(final long bytes) { - log.info("Freeing {} bytes..", bytes); - currentMemoryBytes.addAndGet(-bytes); - - final long currentMemory = currentMemoryBytes.get(); - if (currentMemory < 0) { - log.info("Freed more memory than allocated ({} of {})", bytes, currentMemory + bytes); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/OnCloseFunction.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/OnCloseFunction.java deleted file mode 100644 index c1bd6f097d8f1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/OnCloseFunction.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; -import java.util.function.BiConsumer; - -/** - * Async version of - * {@link io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnCloseFunction}. - * Separately out for easier versioning. - */ -public interface OnCloseFunction extends BiConsumer> { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkers.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkers.java deleted file mode 100644 index dd0d2ed08e0b9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkers.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import com.google.common.base.Preconditions; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * Track the number of flush workers (and their size) that are currently running for a given stream. - */ -public class RunningFlushWorkers { - - private final ConcurrentMap>> streamToFlushWorkerToBatchSize; - - public RunningFlushWorkers() { - streamToFlushWorkerToBatchSize = new ConcurrentHashMap<>(); - } - - /** - * Call this when a worker starts flushing a stream. - * - * @param stream the stream that is being flushed - * @param flushWorkerId flush worker id - */ - public void trackFlushWorker(final StreamDescriptor stream, final UUID flushWorkerId) { - streamToFlushWorkerToBatchSize.computeIfAbsent( - stream, - ignored -> new ConcurrentHashMap<>()).computeIfAbsent(flushWorkerId, ignored -> Optional.empty()); - } - - /** - * Call this when a worker completes flushing a stream. - * - * @param stream the stream that was flushed - * @param flushWorkerId flush worker id - */ - public void completeFlushWorker(final StreamDescriptor stream, final UUID flushWorkerId) { - Preconditions.checkState(streamToFlushWorkerToBatchSize.containsKey(stream) - && streamToFlushWorkerToBatchSize.get(stream).containsKey(flushWorkerId), - "Cannot complete flush worker for stream that has not started."); - streamToFlushWorkerToBatchSize.get(stream).remove(flushWorkerId); - if (streamToFlushWorkerToBatchSize.get(stream).isEmpty()) { - streamToFlushWorkerToBatchSize.remove(stream); - } - } - - /** - * When a worker gets a batch of records, register its size so that it can be referenced for - * estimating how many records will be left in the queue after the batch is done. - * - * @param stream stream - * @param batchSize batch size - */ - public void registerBatchSize(final StreamDescriptor stream, final UUID flushWorkerId, final long batchSize) { - Preconditions.checkState(streamToFlushWorkerToBatchSize.containsKey(stream) - && streamToFlushWorkerToBatchSize.get(stream).containsKey(flushWorkerId), - "Cannot register a batch size for a flush worker that has not been initialized"); - streamToFlushWorkerToBatchSize.get(stream).put(flushWorkerId, Optional.of(batchSize)); - } - - /** - * For a stream get how many bytes are in each running worker. If the worker doesn't have a batch - * yet, return empty optional. - * - * @param stream stream - * @return bytes in batches currently being processed - */ - public List> getSizesOfRunningWorkerBatches(final StreamDescriptor stream) { - return new ArrayList<>(streamToFlushWorkerToBatchSize.getOrDefault(stream, new ConcurrentHashMap<>()).values()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/StreamDescriptorUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/StreamDescriptorUtils.java deleted file mode 100644 index bd93f55ebd228..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/StreamDescriptorUtils.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.HashSet; -import java.util.Set; - -/** - * Helper functions to extract {@link StreamDescriptor} from other POJOs. - */ -public class StreamDescriptorUtils { - - public static StreamDescriptor fromRecordMessage(final AirbyteRecordMessage msg) { - return new StreamDescriptor().withName(msg.getStream()).withNamespace(msg.getNamespace()); - } - - public static StreamDescriptor fromAirbyteStream(final AirbyteStream stream) { - return new StreamDescriptor().withName(stream.getName()).withNamespace(stream.getNamespace()); - } - - public static StreamDescriptor fromConfiguredAirbyteSteam(final ConfiguredAirbyteStream stream) { - return fromAirbyteStream(stream.getStream()); - } - - public static Set fromConfiguredCatalog(final ConfiguredAirbyteCatalog catalog) { - final var pairs = new HashSet(); - - for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { - final var pair = fromAirbyteStream(stream.getStream()); - pairs.add(pair); - } - - return pairs; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeue.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeue.java deleted file mode 100644 index 3650733cfbcf9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeue.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.buffers.MemoryBoundedLinkedBlockingQueue.MemoryItem; -import io.airbyte.cdk.integrations.destination_async.buffers.StreamAwareQueue.MessageWithMeta; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReentrantLock; - -/** - * Represents the minimal interface over the underlying buffer queues required for dequeue - * operations with the aim of minimizing lower-level queue access. - *

    - * Aside from {@link #take(StreamDescriptor, long)}, all public methods in this class represents - * queue metadata required to determine buffer flushing. - */ -// todo (cgardens) - make all the metadata methods more efficient. -public class BufferDequeue { - - private final GlobalMemoryManager memoryManager; - private final ConcurrentMap buffers; - private final GlobalAsyncStateManager stateManager; - private final ConcurrentMap bufferLocks; - - public BufferDequeue(final GlobalMemoryManager memoryManager, - final ConcurrentMap buffers, - final GlobalAsyncStateManager stateManager) { - this.memoryManager = memoryManager; - this.buffers = buffers; - this.stateManager = stateManager; - bufferLocks = new ConcurrentHashMap<>(); - } - - /** - * Primary dequeue method. Reads from queue up to optimalBytesToRead OR until the queue is empty. - * - * @param streamDescriptor specific buffer to take from - * @param optimalBytesToRead bytes to read, if possible - * @return autocloseable batch object, that frees memory. - */ - public MemoryAwareMessageBatch take(final StreamDescriptor streamDescriptor, final long optimalBytesToRead) { - final var lock = bufferLocks.computeIfAbsent(streamDescriptor, _k -> new ReentrantLock()); - lock.lock(); - - final var queue = buffers.get(streamDescriptor); - - try { - final AtomicLong bytesRead = new AtomicLong(); - - final List output = new LinkedList<>(); - while (queue.size() > 0) { - final MemoryItem memoryItem = queue.peek().orElseThrow(); - - // otherwise pull records until we hit the memory limit. - final long newSize = memoryItem.size() + bytesRead.get(); - if (newSize <= optimalBytesToRead) { - bytesRead.addAndGet(memoryItem.size()); - output.add(queue.poll().item()); - } else { - break; - } - } - - if (queue.isEmpty()) { - final var batchSizeBytes = bytesRead.get(); - final var allocatedBytes = queue.getMaxMemoryUsage(); - - // Free unused allocation for the queue. - // When the batch flushes it will flush its allocation. - memoryManager.free(allocatedBytes - batchSizeBytes); - - // Shrink queue to 0 — any new messages will reallocate. - queue.addMaxMemory(-allocatedBytes); - } else { - queue.addMaxMemory(-bytesRead.get()); - } - - return new MemoryAwareMessageBatch( - output, - bytesRead.get(), - memoryManager, - stateManager); - } finally { - lock.unlock(); - } - } - - /** - * The following methods are provide metadata for buffer flushing calculations. Consumers are - * expected to call it to retrieve the currently buffered streams as a handle to the remaining - * methods. - */ - public Set getBufferedStreams() { - return new HashSet<>(buffers.keySet()); - } - - public long getMaxQueueSizeBytes() { - return memoryManager.getMaxMemoryBytes(); - } - - public long getTotalGlobalQueueSizeBytes() { - return buffers.values().stream().map(StreamAwareQueue::getCurrentMemoryUsage).mapToLong(Long::longValue).sum(); - } - - public Optional getQueueSizeInRecords(final StreamDescriptor streamDescriptor) { - return getBuffer(streamDescriptor).map(buf -> Long.valueOf(buf.size())); - } - - public Optional getQueueSizeBytes(final StreamDescriptor streamDescriptor) { - return getBuffer(streamDescriptor).map(StreamAwareQueue::getCurrentMemoryUsage); - } - - public Optional getTimeOfLastRecord(final StreamDescriptor streamDescriptor) { - return getBuffer(streamDescriptor).flatMap(StreamAwareQueue::getTimeOfLastMessage); - } - - private Optional getBuffer(final StreamDescriptor streamDescriptor) { - if (buffers.containsKey(streamDescriptor)) { - return Optional.of(buffers.get(streamDescriptor)); - } - return Optional.empty(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueue.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueue.java deleted file mode 100644 index 09f67f62c786c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueue.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import static java.lang.Thread.sleep; - -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.concurrent.ConcurrentMap; - -/** - * Represents the minimal interface over the underlying buffer queues required for enqueue - * operations with the aim of minimizing lower-level queue access. - */ -public class BufferEnqueue { - - private final GlobalMemoryManager memoryManager; - private final ConcurrentMap buffers; - private final GlobalAsyncStateManager stateManager; - - public BufferEnqueue(final GlobalMemoryManager memoryManager, - final ConcurrentMap buffers, - final GlobalAsyncStateManager stateManager) { - this.memoryManager = memoryManager; - this.buffers = buffers; - this.stateManager = stateManager; - } - - /** - * Buffer a record. Contains memory management logic to dynamically adjust queue size based via - * {@link GlobalMemoryManager} accounting for incoming records. - * - * @param message to buffer - * @param sizeInBytes - */ - public void addRecord(final PartialAirbyteMessage message, final Integer sizeInBytes, final String defaultNamespace) { - if (message.getType() == Type.RECORD) { - handleRecord(message, sizeInBytes); - } else if (message.getType() == Type.STATE) { - stateManager.trackState(message, sizeInBytes, defaultNamespace); - } - } - - private void handleRecord(final PartialAirbyteMessage message, final Integer sizeInBytes) { - final StreamDescriptor streamDescriptor = extractStateFromRecord(message); - final var queue = buffers.computeIfAbsent(streamDescriptor, _k -> new StreamAwareQueue(memoryManager.requestMemory())); - final long stateId = stateManager.getStateIdAndIncrementCounter(streamDescriptor); - - var addedToQueue = queue.offer(message, sizeInBytes, stateId); - - int i = 0; - while (!addedToQueue) { - final var newlyAllocatedMemory = memoryManager.requestMemory(); - if (newlyAllocatedMemory > 0) { - queue.addMaxMemory(newlyAllocatedMemory); - } - addedToQueue = queue.offer(message, sizeInBytes, stateId); - i++; - if (i > 5) { - try { - sleep(500); - } catch (final InterruptedException e) { - throw new RuntimeException(e); - } - } - } - } - - private static StreamDescriptor extractStateFromRecord(final PartialAirbyteMessage message) { - return new StreamDescriptor() - .withNamespace(message.getRecord().getNamespace()) - .withName(message.getRecord().getStream()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferManager.java deleted file mode 100644 index 1d824a2b14c04..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferManager.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.destination_async.AirbyteFileUtils; -import io.airbyte.cdk.integrations.destination_async.FlushWorkers; -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.ArrayList; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Slf4j -public class BufferManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(BufferManager.class); - - public final long maxMemory; - private final ConcurrentMap buffers; - private final BufferEnqueue bufferEnqueue; - private final BufferDequeue bufferDequeue; - private final GlobalMemoryManager memoryManager; - - private final GlobalAsyncStateManager stateManager; - private final ScheduledExecutorService debugLoop; - private static final long DEBUG_PERIOD_SECS = 60L; - - public static final double MEMORY_LIMIT_RATIO = 0.7; - - public BufferManager() { - this((long) (Runtime.getRuntime().maxMemory() * MEMORY_LIMIT_RATIO)); - } - - /** - * @param memoryLimit the amount of estimated memory we allow for all buffers. The - * GlobalMemoryManager will apply back pressure once this quota is filled. "Memory" can be - * released back once flushing finishes. This number should be large enough we don't block - * reading unnecessarily, but small enough we apply back pressure before OOMing. - */ - public BufferManager(final long memoryLimit) { - maxMemory = memoryLimit; - LOGGER.info("Max 'memory' available for buffer allocation {}", FileUtils.byteCountToDisplaySize(maxMemory)); - memoryManager = new GlobalMemoryManager(maxMemory); - this.stateManager = new GlobalAsyncStateManager(memoryManager); - buffers = new ConcurrentHashMap<>(); - bufferEnqueue = new BufferEnqueue(memoryManager, buffers, stateManager); - bufferDequeue = new BufferDequeue(memoryManager, buffers, stateManager); - debugLoop = Executors.newSingleThreadScheduledExecutor(); - debugLoop.scheduleAtFixedRate(this::printQueueInfo, 0, DEBUG_PERIOD_SECS, TimeUnit.SECONDS); - } - - public GlobalAsyncStateManager getStateManager() { - return stateManager; - } - - @VisibleForTesting - protected GlobalMemoryManager getMemoryManager() { - return memoryManager; - } - - @VisibleForTesting - protected ConcurrentMap getBuffers() { - return buffers; - } - - public BufferEnqueue getBufferEnqueue() { - return bufferEnqueue; - } - - public BufferDequeue getBufferDequeue() { - return bufferDequeue; - } - - /** - * Closing a queue will flush all items from it. For this reason, this method needs to be called - * after {@link FlushWorkers#close()}. This allows the upload workers to make sure all items in the - * queue has been flushed. - */ - public void close() throws Exception { - debugLoop.shutdownNow(); - log.info("Buffers cleared.."); - } - - private void printQueueInfo() { - final var queueInfo = new StringBuilder().append("[ASYNC QUEUE INFO] "); - final ArrayList messages = new ArrayList<>(); - - messages - .add(String.format("Global: max: %s, allocated: %s (%s MB), %% used: %s", - AirbyteFileUtils.byteCountToDisplaySize(memoryManager.getMaxMemoryBytes()), - AirbyteFileUtils.byteCountToDisplaySize(memoryManager.getCurrentMemoryBytes()), - (double) memoryManager.getCurrentMemoryBytes() / 1024 / 1024, - (double) memoryManager.getCurrentMemoryBytes() / memoryManager.getMaxMemoryBytes())); - - for (final var entry : buffers.entrySet()) { - final var queue = entry.getValue(); - messages.add( - String.format("Queue `%s`, num records: %d, num bytes: %s, allocated bytes: %s", - entry.getKey().getName(), queue.size(), AirbyteFileUtils.byteCountToDisplaySize(queue.getCurrentMemoryUsage()), - AirbyteFileUtils.byteCountToDisplaySize(queue.getMaxMemoryUsage()))); - } - - messages.add(stateManager.getMemoryUsageMessage()); - - queueInfo.append(String.join(" | ", messages)); - - log.info(queueInfo.toString()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryAwareMessageBatch.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryAwareMessageBatch.java deleted file mode 100644 index 213f30e7768e7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryAwareMessageBatch.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.buffers.StreamAwareQueue.MessageWithMeta; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * POJO abstraction representing one discrete buffer read. This allows ergonomics dequeues by - * {@link io.airbyte.cdk.integrations.destination_async.FlushWorkers}. - *

    - * The contained stream **IS EXPECTED to be a BOUNDED** stream. Returning a boundless stream has - * undefined behaviour. - *

    - * Once done, consumers **MUST** invoke {@link #close()}. As the {@link #batch} has already been - * retrieved from in-memory buffers, we need to update {@link GlobalMemoryManager} to reflect the - * freed up memory and avoid memory leaks. - */ -public class MemoryAwareMessageBatch implements AutoCloseable { - - private static final Logger LOGGER = LoggerFactory.getLogger(MemoryAwareMessageBatch.class); - private final List batch; - - private final long sizeInBytes; - private final GlobalMemoryManager memoryManager; - private final GlobalAsyncStateManager stateManager; - - public MemoryAwareMessageBatch(final List batch, - final long sizeInBytes, - final GlobalMemoryManager memoryManager, - final GlobalAsyncStateManager stateManager) { - this.batch = batch; - this.sizeInBytes = sizeInBytes; - this.memoryManager = memoryManager; - this.stateManager = stateManager; - } - - public long getSizeInBytes() { - return sizeInBytes; - } - - public List getData() { - return batch; - } - - @Override - public void close() throws Exception { - memoryManager.free(sizeInBytes); - } - - /** - * For the batch, marks all the states that have now been flushed. Also writes the states that can - * be flushed back to platform via stateManager. - *

    - */ - public void flushStates(final Map stateIdToCount, final Consumer outputRecordCollector) { - stateIdToCount.forEach(stateManager::decrement); - stateManager.flushStates(outputRecordCollector); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueue.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueue.java deleted file mode 100644 index 3478a8258a2f5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueue.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; -import javax.annotation.Nonnull; -import lombok.extern.slf4j.Slf4j; - -/** - * This class is meant to emulate the behavior of a LinkedBlockingQueue, but instead of being - * bounded on number of items in the queue, it is bounded by the memory it is allowed to use. The - * amount of memory it is allowed to use can be resized after it is instantiated. - *

    - * This class intentionally hides the underlying queue inside of it. For this class to work, it has - * to override each method on a queue that adds or removes records from the queue. The Queue - * interface has a lot of methods to override, and we don't want to spend the time overriding a lot - * of methods that won't be used. By hiding the queue, we avoid someone accidentally using a queue - * method that has not been modified. If you need access to another of the queue methods, pattern - * match adding the memory tracking as seen in {@link HiddenQueue}, and then delegate to that method - * from this top-level class. - * - * @param type in the queue - */ -@Slf4j -class MemoryBoundedLinkedBlockingQueue { - - private final HiddenQueue hiddenQueue; - - public MemoryBoundedLinkedBlockingQueue(final long maxMemoryUsage) { - hiddenQueue = new HiddenQueue<>(maxMemoryUsage); - } - - public long getCurrentMemoryUsage() { - return hiddenQueue.currentMemoryUsage.get(); - } - - public void addMaxMemory(final long maxMemoryUsage) { - hiddenQueue.maxMemoryUsage.addAndGet(maxMemoryUsage); - } - - public int size() { - return hiddenQueue.size(); - } - - public boolean offer(final E e, final long itemSizeInBytes) { - return hiddenQueue.offer(e, itemSizeInBytes); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem peek() { - return hiddenQueue.peek(); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem take() throws InterruptedException { - return hiddenQueue.take(); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll() { - return hiddenQueue.poll(); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll(final long timeout, final TimeUnit unit) throws InterruptedException { - return hiddenQueue.poll(timeout, unit); - } - - public long getMaxMemoryUsage() { - return hiddenQueue.getMaxMemoryUsage(); - } - - /** - * Extends LinkedBlockingQueue so that we can get a LinkedBlockingQueue bounded by memory. Hidden as - * an inner class, so it doesn't get misused, see top-level javadoc comment. - * - * @param - */ - private static class HiddenQueue extends LinkedBlockingQueue> { - - private final AtomicLong currentMemoryUsage; - private final AtomicLong maxMemoryUsage; - - public HiddenQueue(final long maxMemoryUsage) { - currentMemoryUsage = new AtomicLong(0); - this.maxMemoryUsage = new AtomicLong(maxMemoryUsage); - } - - public long getMaxMemoryUsage() { - return maxMemoryUsage.get(); - } - - public boolean offer(final E e, final long itemSizeInBytes) { - final long newMemoryUsage = currentMemoryUsage.addAndGet(itemSizeInBytes); - if (newMemoryUsage <= maxMemoryUsage.get()) { - final boolean success = super.offer(new MemoryItem<>(e, itemSizeInBytes)); - if (!success) { - currentMemoryUsage.addAndGet(-itemSizeInBytes); - } - log.debug("offer status: {}", success); - return success; - } else { - currentMemoryUsage.addAndGet(-itemSizeInBytes); - log.debug("offer failed"); - return false; - } - } - - @Nonnull - @Override - public MemoryBoundedLinkedBlockingQueue.MemoryItem take() throws InterruptedException { - final MemoryItem memoryItem = super.take(); - currentMemoryUsage.addAndGet(-memoryItem.size()); - return memoryItem; - } - - @Override - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll() { - final MemoryItem memoryItem = super.poll(); - if (memoryItem != null) { - currentMemoryUsage.addAndGet(-memoryItem.size()); - return memoryItem; - } - return null; - } - - @Override - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll(final long timeout, final TimeUnit unit) throws InterruptedException { - final MemoryItem memoryItem = super.poll(timeout, unit); - if (memoryItem != null) { - currentMemoryUsage.addAndGet(-memoryItem.size()); - return memoryItem; - } - return null; - } - - } - - public record MemoryItem (E item, long size) {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueue.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueue.java deleted file mode 100644 index bf75b4415b49c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueue.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import java.time.Instant; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class StreamAwareQueue { - - private final AtomicReference timeOfLastMessage; - - private final MemoryBoundedLinkedBlockingQueue memoryAwareQueue; - - public StreamAwareQueue(final long maxMemoryUsage) { - memoryAwareQueue = new MemoryBoundedLinkedBlockingQueue<>(maxMemoryUsage); - timeOfLastMessage = new AtomicReference<>(); - } - - public long getCurrentMemoryUsage() { - return memoryAwareQueue.getCurrentMemoryUsage(); - } - - public long getMaxMemoryUsage() { - return memoryAwareQueue.getMaxMemoryUsage(); - } - - public void addMaxMemory(final long maxMemoryUsage) { - memoryAwareQueue.addMaxMemory(maxMemoryUsage); - } - - public boolean isEmpty() { - return memoryAwareQueue.size() == 0; - } - - public Optional getTimeOfLastMessage() { - // if the queue is empty, the time of last message is irrelevant - if (size() == 0) { - return Optional.empty(); - } - return Optional.ofNullable(timeOfLastMessage.get()); - } - - public Optional> peek() { - return Optional.ofNullable(memoryAwareQueue.peek()); - } - - public int size() { - return memoryAwareQueue.size(); - } - - public boolean offer(final PartialAirbyteMessage message, final long messageSizeInBytes, final long stateId) { - if (memoryAwareQueue.offer(new MessageWithMeta(message, stateId), messageSizeInBytes)) { - timeOfLastMessage.set(Instant.now()); - return true; - } else { - return false; - } - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem take() throws InterruptedException { - return memoryAwareQueue.take(); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll() { - return memoryAwareQueue.poll(); - } - - public MemoryBoundedLinkedBlockingQueue.MemoryItem poll(final long timeout, final TimeUnit unit) throws InterruptedException { - return memoryAwareQueue.poll(timeout, unit); - } - - public record MessageWithMeta(PartialAirbyteMessage message, long stateId) {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteMessage.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteMessage.java deleted file mode 100644 index c0d3739b32850..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteMessage.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.partial_messages; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonPropertyDescription; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import java.util.Objects; - -public class PartialAirbyteMessage { - - @JsonProperty("type") - @JsonPropertyDescription("Message type") - private AirbyteMessage.Type type; - - @JsonProperty("record") - private PartialAirbyteRecordMessage record; - - @JsonProperty("state") - private PartialAirbyteStateMessage state; - - @JsonProperty("serialized") - private String serialized; - - public PartialAirbyteMessage() {} - - @JsonProperty("type") - public AirbyteMessage.Type getType() { - return type; - } - - @JsonProperty("type") - public void setType(final AirbyteMessage.Type type) { - this.type = type; - } - - public PartialAirbyteMessage withType(final AirbyteMessage.Type type) { - this.type = type; - return this; - } - - @JsonProperty("record") - public PartialAirbyteRecordMessage getRecord() { - return record; - } - - @JsonProperty("record") - public void setRecord(final PartialAirbyteRecordMessage record) { - this.record = record; - } - - public PartialAirbyteMessage withRecord(final PartialAirbyteRecordMessage record) { - this.record = record; - return this; - } - - @JsonProperty("state") - public PartialAirbyteStateMessage getState() { - return state; - } - - @JsonProperty("state") - public void setState(final PartialAirbyteStateMessage state) { - this.state = state; - } - - public PartialAirbyteMessage withState(final PartialAirbyteStateMessage state) { - this.state = state; - return this; - } - - /** - * For record messages, this stores the serialized data blob (i.e. - * {@code Jsons.serialize(message.getRecord().getData())}). For state messages, this stores the - * _entire_ message (i.e. {@code Jsons.serialize(message)}). - *

    - * See - * {@link io.airbyte.cdk.integrations.destination_async.AsyncStreamConsumer#deserializeAirbyteMessage(String)} - * for the exact logic of how this field is populated. - */ - @JsonProperty("serialized") - public String getSerialized() { - return serialized; - } - - @JsonProperty("serialized") - public void setSerialized(final String serialized) { - this.serialized = serialized; - } - - public PartialAirbyteMessage withSerialized(final String serialized) { - this.serialized = serialized; - return this; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final PartialAirbyteMessage that = (PartialAirbyteMessage) o; - return type == that.type && Objects.equals(record, that.record) && Objects.equals(state, that.state) - && Objects.equals(serialized, that.serialized); - } - - @Override - public int hashCode() { - return Objects.hash(type, record, state, serialized); - } - - @Override - public String toString() { - return "PartialAirbyteMessage{" + - "type=" + type + - ", record=" + record + - ", state=" + state + - ", serialized='" + serialized + '\'' + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteRecordMessage.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteRecordMessage.java deleted file mode 100644 index ebd903fcfc871..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteRecordMessage.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.partial_messages; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonPropertyDescription; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Objects; - -// TODO: (ryankfu) remove this and test with low memory resources to ensure OOM is still not a -// factor, shouldn't be -// but weird things have happened -public class PartialAirbyteRecordMessage { - - @JsonProperty("namespace") - private String namespace; - @JsonProperty("stream") - private String stream; - - @JsonProperty("data") - private JsonNode data; - - @JsonProperty("emitted_at") - @JsonPropertyDescription("when the data was emitted from the source. epoch in millisecond.") - private long emittedAt; - - public PartialAirbyteRecordMessage() {} - - @JsonProperty("namespace") - public String getNamespace() { - return namespace; - } - - @JsonProperty("namespace") - public void setNamespace(final String namespace) { - this.namespace = namespace; - } - - public PartialAirbyteRecordMessage withNamespace(final String namespace) { - this.namespace = namespace; - return this; - } - - @JsonProperty("stream") - public String getStream() { - return stream; - } - - @JsonProperty("stream") - public void setStream(final String stream) { - this.stream = stream; - } - - public PartialAirbyteRecordMessage withStream(final String stream) { - this.stream = stream; - return this; - } - - @JsonProperty("data") - public JsonNode getData() { - return data; - } - - @JsonProperty("data") - public void setData(final JsonNode data) { - this.data = data; - } - - public PartialAirbyteRecordMessage withData(final JsonNode data) { - this.data = data; - return this; - } - - @JsonProperty("emitted_at") - public Long getEmittedAt() { - return this.emittedAt; - } - - @JsonProperty("emitted_at") - public void setEmittedAt(final long emittedAt) { - this.emittedAt = emittedAt; - } - - public PartialAirbyteRecordMessage withEmittedAt(final Long emittedAt) { - this.emittedAt = emittedAt; - return this; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final PartialAirbyteRecordMessage that = (PartialAirbyteRecordMessage) o; - return Objects.equals(namespace, that.namespace) - && Objects.equals(stream, that.stream) - && Objects.equals(emittedAt, that.emittedAt); - } - - @Override - public int hashCode() { - return Objects.hash(namespace, stream, emittedAt); - } - - @Override - public String toString() { - return "PartialAirbyteRecordMessage{" + - "namespace='" + namespace + '\'' + - ", stream='" + stream + '\'' + - ", emittedAt='" + emittedAt + '\'' + - '}'; - } - - public StreamDescriptor getStreamDescriptor() { - return new StreamDescriptor().withName(stream).withNamespace(namespace); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStateMessage.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStateMessage.java deleted file mode 100644 index d91a4a13c403b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStateMessage.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.partial_messages; - -import com.fasterxml.jackson.annotation.JsonProperty; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import java.util.Objects; - -public class PartialAirbyteStateMessage { - - @JsonProperty("type") - private AirbyteStateType type; - - @JsonProperty("stream") - private PartialAirbyteStreamState stream; - - public PartialAirbyteStateMessage() {} - - @JsonProperty("type") - public AirbyteStateType getType() { - return type; - } - - @JsonProperty("type") - public void setType(final AirbyteStateType type) { - this.type = type; - } - - public PartialAirbyteStateMessage withType(final AirbyteStateType type) { - this.type = type; - return this; - } - - @JsonProperty("stream") - public PartialAirbyteStreamState getStream() { - return stream; - } - - @JsonProperty("stream") - public void setStream(final PartialAirbyteStreamState stream) { - this.stream = stream; - } - - public PartialAirbyteStateMessage withStream(final PartialAirbyteStreamState stream) { - this.stream = stream; - return this; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final PartialAirbyteStateMessage that = (PartialAirbyteStateMessage) o; - return type == that.type && Objects.equals(stream, that.stream); - } - - @Override - public int hashCode() { - return Objects.hash(type, stream); - } - - @Override - public String toString() { - return "PartialAirbyteStateMessage{" + - "type=" + type + - ", stream=" + stream + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStreamState.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStreamState.java deleted file mode 100644 index 7076c5fcbc714..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/partial_messages/PartialAirbyteStreamState.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.partial_messages; - -import com.fasterxml.jackson.annotation.JsonProperty; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Objects; - -public class PartialAirbyteStreamState { - - @JsonProperty("stream_descriptor") - private StreamDescriptor streamDescriptor; - - public PartialAirbyteStreamState() { - streamDescriptor = streamDescriptor; - } - - @JsonProperty("stream_descriptor") - public StreamDescriptor getStreamDescriptor() { - return streamDescriptor; - } - - @JsonProperty("stream_descriptor") - public void setStreamDescriptor(final StreamDescriptor streamDescriptor) { - this.streamDescriptor = streamDescriptor; - } - - public PartialAirbyteStreamState withStreamDescriptor(final StreamDescriptor streamDescriptor) { - this.streamDescriptor = streamDescriptor; - return this; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final PartialAirbyteStreamState that = (PartialAirbyteStreamState) o; - return Objects.equals(streamDescriptor, that.streamDescriptor); - } - - @Override - public int hashCode() { - return Objects.hash(streamDescriptor); - } - - @Override - public String toString() { - return "PartialAirbyteStreamState{" + - "streamDescriptor=" + streamDescriptor + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/FlushFailure.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/FlushFailure.java deleted file mode 100644 index 2188c6c66bc95..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/FlushFailure.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.state; - -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -public class FlushFailure { - - private final AtomicBoolean isFailed = new AtomicBoolean(false); - - private final AtomicReference exceptionAtomicReference = new AtomicReference<>(); - - public void propagateException(Exception e) { - this.isFailed.set(true); - this.exceptionAtomicReference.set(e); - } - - public boolean isFailed() { - return isFailed.get(); - } - - public Exception getException() { - return exceptionAtomicReference.get(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManager.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManager.java deleted file mode 100644 index 845dfdd629ea0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManager.java +++ /dev/null @@ -1,422 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.state; - -import static java.lang.Thread.sleep; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.Collection; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.mina.util.ConcurrentHashSet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Responsible for managing state within the Destination. The general approach is a ref counter - * approach - each state message is associated with a record count. This count represents the number - * of preceding records. For a state to be emitted, all preceding records have to be written to the - * destination i.e. the counter is 0. - *

    - * A per-stream state queue is maintained internally, with each state within the queue having a - * counter. This means we *ALLOW* records succeeding an unemitted state to be written. This - * decouples record writing from state management at the cost of potentially repeating work if an - * upstream state is never written. - *

    - * One important detail here is the difference between how PER-STREAM & NON-PER-STREAM is handled. - * The PER-STREAM case is simple, and is as described above. The NON-PER-STREAM case is slightly - * tricky. Because we don't know the stream type to begin with, we always assume PER_STREAM until - * the first state message arrives. If this state message is a GLOBAL state, we alias all existing - * state ids to a single global state id via a set of alias ids. From then onwards, we use one id - - * {@link #SENTINEL_GLOBAL_DESC} regardless of stream. Read - * {@link #convertToGlobalIfNeeded(AirbyteMessage)} for more detail. - */ -@Slf4j -public class GlobalAsyncStateManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(GlobalAsyncStateManager.class); - - private static final StreamDescriptor SENTINEL_GLOBAL_DESC = new StreamDescriptor().withName(UUID.randomUUID().toString()); - private final GlobalMemoryManager memoryManager; - - /** - * Memory that the manager has allocated to it to use. It can ask for more memory as needed. - */ - private final AtomicLong memoryAllocated; - /** - * Memory that the manager is currently using. - */ - private final AtomicLong memoryUsed; - - private boolean preState = true; - private final ConcurrentMap> descToStateIdQ = new ConcurrentHashMap<>(); - /** - * Both {@link stateIdToCounter} and {@link stateIdToCounterForPopulatingDestinationStats} are used - * to maintain a counter for the number of records associated with a give state i.e. before a state - * was received, how many records were seen until that point. As records are received the value for - * both are incremented. The difference is the purpose of the two attributes. - * {@link stateIdToCounter} is used to determine whether a state is safe to emit or not. This is - * done by decrementing the value as records are committed to the destination. If the value hits 0, - * it means all the records associated with a given state have been committed to the destination, it - * is safe to emit the state back to platform. But because of this we can't use it to determine the - * actual number of records that are associated with a state to update the value of - * {@link AirbyteStateMessage#destinationStats} at the time of emitting the state message. That's - * where we need {@link stateIdToCounterForPopulatingDestinationStats}, which is only reset when a - * state message has been emitted. - */ - private final ConcurrentMap stateIdToCounter = new ConcurrentHashMap<>(); - private final ConcurrentMap stateIdToCounterForPopulatingDestinationStats = new ConcurrentHashMap<>(); - private final ConcurrentMap> stateIdToState = new ConcurrentHashMap<>(); - - // Alias-ing only exists in the non-STREAM case where we have to convert existing state ids to one - // single global id. - // This only happens once. - private final Set aliasIds = new ConcurrentHashSet<>(); - private long retroactiveGlobalStateId = 0; - // All access to this field MUST be guarded by a synchronized(lock) block - private long arrivalNumber = 0; - - private final Object LOCK = new Object(); - - public GlobalAsyncStateManager(final GlobalMemoryManager memoryManager) { - this.memoryManager = memoryManager; - this.memoryAllocated = new AtomicLong(memoryManager.requestMemory()); - this.memoryUsed = new AtomicLong(); - } - - // Always assume STREAM to begin, and convert only if needed. Most state is per stream anyway. - private AirbyteStateMessage.AirbyteStateType stateType = AirbyteStateMessage.AirbyteStateType.STREAM; - - /** - * Main method to process state messages. - *

    - * The first incoming state message tells us the type of state we are dealing with. We then convert - * internal data structures if needed. - *

    - * Because state messages are a watermark, all preceding records need to be flushed before the state - * message can be processed. - */ - public void trackState(final PartialAirbyteMessage message, final long sizeInBytes, final String defaultNamespace) { - if (preState) { - convertToGlobalIfNeeded(message); - preState = false; - } - // stateType should not change after a conversion. - Preconditions.checkArgument(stateType == extractStateType(message)); - - closeState(message, sizeInBytes, defaultNamespace); - } - - /** - * Identical to {@link #getStateId(StreamDescriptor)} except this increments the associated counter - * by 1. Intended to be called whenever a record is ingested. - * - * @param streamDescriptor - stream to get stateId for. - * @return state id - */ - public long getStateIdAndIncrementCounter(final StreamDescriptor streamDescriptor) { - return getStateIdAndIncrement(streamDescriptor, 1); - } - - /** - * Each decrement represent one written record for a state. A zero counter means there are no more - * inflight records associated with a state and the state can be flushed. - * - * @param stateId reference to a state. - * @param count to decrement. - */ - public void decrement(final long stateId, final long count) { - synchronized (LOCK) { - log.trace("decrementing state id: {}, count: {}", stateId, count); - stateIdToCounter.get(getStateAfterAlias(stateId)).addAndGet(-count); - } - } - - /** - * Flushes state messages with no more inflight records i.e. counter = 0 across all streams. - * Intended to be called by {@link io.airbyte.cdk.integrations.destination_async.FlushWorkers} after - * a worker has finished flushing its record batch. - *

    - */ - public void flushStates(final Consumer outputRecordCollector) { - Long bytesFlushed = 0L; - synchronized (LOCK) { - for (final Map.Entry> entry : descToStateIdQ.entrySet()) { - // Remove all states with 0 counters. - // Per-stream synchronized is required to make sure the state (at the head of the queue) - // logic is applied to is the state actually removed. - - final LinkedBlockingDeque stateIdQueue = entry.getValue(); - while (true) { - final Long oldestStateId = stateIdQueue.peek(); - // no state to flush for this stream - if (oldestStateId == null) { - break; - } - - // technically possible this map hasn't been updated yet. - // This can be if you call the flush method if there are 0 records/states - final var oldestStateCounter = stateIdToCounter.get(oldestStateId); - if (oldestStateCounter == null) { - break; - } - - final var oldestState = stateIdToState.get(oldestStateId); - // no state to flush for this stream - if (oldestState == null) { - break; - } - - final var allRecordsCommitted = oldestStateCounter.get() == 0; - if (allRecordsCommitted) { - final StateMessageWithArrivalNumber stateMessage = oldestState.getLeft(); - final double flushedRecordsAssociatedWithState = stateIdToCounterForPopulatingDestinationStats.get(oldestStateId).doubleValue(); - - log.info("State with arrival number {} emitted from thread {} at {}", stateMessage.arrivalNumber(), Thread.currentThread().getName(), - Instant.now().toString()); - final AirbyteMessage message = Jsons.deserialize(stateMessage.partialAirbyteStateMessage.getSerialized(), AirbyteMessage.class); - message.getState().setDestinationStats(new AirbyteStateStats().withRecordCount(flushedRecordsAssociatedWithState)); - outputRecordCollector.accept(message); - - bytesFlushed += oldestState.getRight(); - - // cleanup - entry.getValue().poll(); - stateIdToState.remove(oldestStateId); - stateIdToCounter.remove(oldestStateId); - stateIdToCounterForPopulatingDestinationStats.remove(oldestStateId); - } else { - break; - } - } - } - } - - freeBytes(bytesFlushed); - } - - private Long getStateIdAndIncrement(final StreamDescriptor streamDescriptor, final long increment) { - final StreamDescriptor resolvedDescriptor = stateType == AirbyteStateMessage.AirbyteStateType.STREAM ? streamDescriptor : SENTINEL_GLOBAL_DESC; - // As concurrent collections do not guarantee data consistency when iterating, use `get` instead of - // `containsKey`. - if (descToStateIdQ.get(resolvedDescriptor) == null) { - registerNewStreamDescriptor(resolvedDescriptor); - } - synchronized (LOCK) { - final Long stateId = descToStateIdQ.get(resolvedDescriptor).peekLast(); - final var update = stateIdToCounter.get(stateId).addAndGet(increment); - if (increment >= 0) { - stateIdToCounterForPopulatingDestinationStats.get(stateId).addAndGet(increment); - } - log.trace("State id: {}, count: {}", stateId, update); - return stateId; - } - } - - /** - * Return the internal id of a state message. This is the id that should be used to reference a - * state when interacting with all methods in this class. - * - * @param streamDescriptor - stream to get stateId for. - * @return state id - */ - private long getStateId(final StreamDescriptor streamDescriptor) { - return getStateIdAndIncrement(streamDescriptor, 0); - } - - /** - * Pass this the number of bytes that were flushed. It will track those internally and if the - * memoryUsed gets signficantly lower than what is allocated, then it will return it to the memory - * manager. We don't always return to the memory manager to avoid needlessly allocating / - * de-allocating memory rapidly over a few bytes. - * - * @param bytesFlushed bytes that were flushed (and should be removed from memory used). - */ - private void freeBytes(final long bytesFlushed) { - LOGGER.debug("Bytes flushed memory to store state message. Allocated: {}, Used: {}, Flushed: {}, % Used: {}", - FileUtils.byteCountToDisplaySize(memoryAllocated.get()), - FileUtils.byteCountToDisplaySize(memoryUsed.get()), - FileUtils.byteCountToDisplaySize(bytesFlushed), - (double) memoryUsed.get() / memoryAllocated.get()); - - memoryManager.free(bytesFlushed); - memoryAllocated.addAndGet(-bytesFlushed); - memoryUsed.addAndGet(-bytesFlushed); - LOGGER.debug("Returned {} of memory back to the memory manager.", FileUtils.byteCountToDisplaySize(bytesFlushed)); - } - - private void convertToGlobalIfNeeded(final PartialAirbyteMessage message) { - // instead of checking for global or legacy, check for the inverse of stream. - stateType = extractStateType(message); - if (stateType != AirbyteStateMessage.AirbyteStateType.STREAM) {// alias old stream-level state ids to single global state id - // upon conversion, all previous tracking data structures need to be cleared as we move - // into the non-STREAM world for correctness. - synchronized (LOCK) { - aliasIds.addAll(descToStateIdQ.values().stream().flatMap(Collection::stream).toList()); - descToStateIdQ.clear(); - retroactiveGlobalStateId = StateIdProvider.getNextId(); - - descToStateIdQ.put(SENTINEL_GLOBAL_DESC, new LinkedBlockingDeque<>()); - descToStateIdQ.get(SENTINEL_GLOBAL_DESC).add(retroactiveGlobalStateId); - - final long combinedCounter = stateIdToCounter.values() - .stream() - .mapToLong(AtomicLong::get) - .sum(); - stateIdToCounter.clear(); - stateIdToCounter.put(retroactiveGlobalStateId, new AtomicLong(combinedCounter)); - - final long statsCounter = stateIdToCounterForPopulatingDestinationStats.values() - .stream() - .mapToLong(AtomicLong::get) - .sum(); - stateIdToCounterForPopulatingDestinationStats.clear(); - stateIdToCounterForPopulatingDestinationStats.put(retroactiveGlobalStateId, new AtomicLong(statsCounter)); - } - } - } - - private AirbyteStateMessage.AirbyteStateType extractStateType(final PartialAirbyteMessage message) { - if (message.getState().getType() == null) { - // Treated the same as GLOBAL. - return AirbyteStateMessage.AirbyteStateType.LEGACY; - } else { - return message.getState().getType(); - } - } - - /** - * When a state message is received, 'close' the previous state to associate the existing state id - * to the newly arrived state message. We also increment the state id in preparation for the next - * state message. - */ - private void closeState(final PartialAirbyteMessage message, final long sizeInBytes, final String defaultNamespace) { - final StreamDescriptor resolvedDescriptor = extractStream(message, defaultNamespace).orElse(SENTINEL_GLOBAL_DESC); - synchronized (LOCK) { - log.info("State with arrival number {} received", arrivalNumber); - stateIdToState.put(getStateId(resolvedDescriptor), ImmutablePair.of(new StateMessageWithArrivalNumber(message, arrivalNumber), sizeInBytes)); - arrivalNumber++; - } - registerNewStateId(resolvedDescriptor); - allocateMemoryToState(sizeInBytes); - } - - /** - * Given the size of a state message, tracks how much memory the manager is using and requests - * additional memory from the memory manager if needed. - * - * @param sizeInBytes size of the state message - */ - @SuppressWarnings("BusyWait") - private void allocateMemoryToState(final long sizeInBytes) { - if (memoryAllocated.get() < memoryUsed.get() + sizeInBytes) { - while (memoryAllocated.get() < memoryUsed.get() + sizeInBytes) { - memoryAllocated.addAndGet(memoryManager.requestMemory()); - try { - LOGGER.debug("Insufficient memory to store state message. Allocated: {}, Used: {}, Size of State Msg: {}, Needed: {}", - FileUtils.byteCountToDisplaySize(memoryAllocated.get()), - FileUtils.byteCountToDisplaySize(memoryUsed.get()), - FileUtils.byteCountToDisplaySize(sizeInBytes), - FileUtils.byteCountToDisplaySize(sizeInBytes - (memoryAllocated.get() - memoryUsed.get()))); - sleep(1000); - } catch (final InterruptedException e) { - throw new RuntimeException(e); - } - } - LOGGER.debug(getMemoryUsageMessage()); - } - } - - public String getMemoryUsageMessage() { - return String.format("State Manager memory usage: Allocated: %s, Used: %s, percentage Used %f", - FileUtils.byteCountToDisplaySize(memoryAllocated.get()), - FileUtils.byteCountToDisplaySize(memoryUsed.get()), - (double) memoryUsed.get() / memoryAllocated.get()); - } - - /** - * If the user has selected the Destination Namespace as the Destination default while setting up - * the connector, the platform sets the namespace as null in the StreamDescriptor in the - * AirbyteMessages (both record and state messages). The destination checks that if the namespace is - * empty or null, if yes then re-populates it with the defaultNamespace. See - * {@link io.airbyte.cdk.integrations.destination_async.AsyncStreamConsumer#accept(String,Integer)} - * But destination only does this for the record messages. So when state messages arrive without a - * namespace and since the destination doesn't repopulate it with the default namespace, there is a - * mismatch between the StreamDescriptor from record messages and state messages. That breaks the - * logic of the state management class as {@link descToStateIdQ} needs to have consistent - * StreamDescriptor. This is why while trying to extract the StreamDescriptor from state messages, - * we check if the namespace is null, if yes then replace it with defaultNamespace to keep it - * consistent with the record messages. - */ - private static Optional extractStream(final PartialAirbyteMessage message, final String defaultNamespace) { - if (message.getState().getType() != null && message.getState().getType() == AirbyteStateMessage.AirbyteStateType.STREAM) { - final StreamDescriptor streamDescriptor = message.getState().getStream().getStreamDescriptor(); - if (Strings.isNullOrEmpty(streamDescriptor.getNamespace())) { - return Optional.of(new StreamDescriptor().withName(streamDescriptor.getName()).withNamespace(defaultNamespace)); - } - return Optional.of(streamDescriptor); - } - return Optional.empty(); - } - - private long getStateAfterAlias(final long stateId) { - if (aliasIds.contains(stateId)) { - return retroactiveGlobalStateId; - } else { - return stateId; - } - } - - private void registerNewStreamDescriptor(final StreamDescriptor resolvedDescriptor) { - synchronized (LOCK) { - descToStateIdQ.put(resolvedDescriptor, new LinkedBlockingDeque<>()); - } - registerNewStateId(resolvedDescriptor); - } - - private void registerNewStateId(final StreamDescriptor resolvedDescriptor) { - final long stateId = StateIdProvider.getNextId(); - synchronized (LOCK) { - stateIdToCounter.put(stateId, new AtomicLong(0)); - stateIdToCounterForPopulatingDestinationStats.put(stateId, new AtomicLong(0)); - descToStateIdQ.get(resolvedDescriptor).add(stateId); - } - } - - /** - * Simplify internal tracking by providing a global always increasing counter for state ids. - */ - private static class StateIdProvider { - - private static final AtomicLong pk = new AtomicLong(0); - - public static long getNextId() { - return pk.incrementAndGet(); - } - - } - - private record StateMessageWithArrivalNumber(PartialAirbyteMessage partialAirbyteStateMessage, long arrivalNumber) {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ApmTraceUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ApmTraceUtils.java deleted file mode 100644 index cc1ff5935b382..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ApmTraceUtils.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util; - -import datadog.trace.api.DDTags; -import datadog.trace.api.interceptor.MutableSpan; -import io.opentracing.Span; -import io.opentracing.log.Fields; -import io.opentracing.tag.Tags; -import io.opentracing.util.GlobalTracer; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Map; - -/** - * Collection of utility methods to help with performance tracing. - */ -public class ApmTraceUtils { - - /** - * String format for the name of tags added to spans. - */ - public static final String TAG_FORMAT = "airbyte.%s.%s"; - - /** - * Standard prefix for tags added to spans. - */ - public static final String TAG_PREFIX = "metadata"; - - /** - * Adds all the provided tags to the currently active span, if one exists.
    - * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. - * - * @param tags A map of tags to be added to the currently active span. - */ - public static void addTagsToTrace(final Map tags) { - addTagsToTrace(tags, TAG_PREFIX); - } - - /** - * Adds all provided tags to the currently active span, if one exists, under the provided tag name - * namespace. - * - * @param tags A map of tags to be added to the currently active span. - * @param tagPrefix The prefix to be added to each custom tag name. - */ - public static void addTagsToTrace(final Map tags, final String tagPrefix) { - addTagsToTrace(GlobalTracer.get().activeSpan(), tags, tagPrefix); - } - - /** - * Adds all the provided tags to the provided span, if one exists. - * - * @param span The {@link Span} that will be associated with the tags. - * @param tags A map of tags to be added to the currently active span. - * @param tagPrefix The prefix to be added to each custom tag name. - */ - public static void addTagsToTrace(final Span span, final Map tags, final String tagPrefix) { - if (span != null) { - tags.entrySet().forEach(entry -> { - span.setTag(formatTag(entry.getKey(), tagPrefix), entry.getValue().toString()); - }); - } - } - - /** - * Adds an exception to the currently active span, if one exists. - * - * @param t The {@link Throwable} to be added to the currently active span. - */ - public static void addExceptionToTrace(final Throwable t) { - addExceptionToTrace(GlobalTracer.get().activeSpan(), t); - } - - /** - * Adds an exception to the provided span, if one exists. - * - * @param span The {@link Span} that will be associated with the exception. - * @param t The {@link Throwable} to be added to the provided span. - */ - public static void addExceptionToTrace(final Span span, final Throwable t) { - if (span != null) { - span.setTag(Tags.ERROR, true); - span.log(Map.of(Fields.ERROR_OBJECT, t)); - } - } - - /** - * Adds all the provided tags to the root span. - * - * @param tags A map of tags to be added to the root span. - */ - public static void addTagsToRootSpan(final Map tags) { - final Span activeSpan = GlobalTracer.get().activeSpan(); - if (activeSpan instanceof MutableSpan) { - final MutableSpan localRootSpan = ((MutableSpan) activeSpan).getLocalRootSpan(); - tags.entrySet().forEach(entry -> { - localRootSpan.setTag(formatTag(entry.getKey(), TAG_PREFIX), entry.getValue().toString()); - }); - } - } - - /** - * Adds an exception to the root span, if an active one exists. - * - * @param t The {@link Throwable} to be added to the provided span. - */ - public static void recordErrorOnRootSpan(final Throwable t) { - final Span activeSpan = GlobalTracer.get().activeSpan(); - if (activeSpan != null) { - activeSpan.setTag(Tags.ERROR, true); - activeSpan.log(Map.of(Fields.ERROR_OBJECT, t)); - } - if (activeSpan instanceof MutableSpan) { - final MutableSpan localRootSpan = ((MutableSpan) activeSpan).getLocalRootSpan(); - localRootSpan.setError(true); - localRootSpan.setTag(DDTags.ERROR_MSG, t.getMessage()); - localRootSpan.setTag(DDTags.ERROR_TYPE, t.getClass().getName()); - final StringWriter errorString = new StringWriter(); - t.printStackTrace(new PrintWriter(errorString)); - localRootSpan.setTag(DDTags.ERROR_STACK, errorString.toString()); - } - } - - /** - * Formats the tag key using {@link #TAG_FORMAT} provided by this utility, using the default tag - * prefix {@link #TAG_PREFIX}. - * - * @param tagKey The tag key to format. - * @return The formatted tag key. - */ - public static String formatTag(final String tagKey) { - return formatTag(tagKey, TAG_PREFIX); - } - - /** - * Formats the tag key using {@link #TAG_FORMAT} provided by this utility with the provided tag - * prefix. - * - * @param tagKey The tag key to format. - * @param tagPrefix The prefix to be added to each custom tag name. - * @return The formatted tag key. - */ - public static String formatTag(final String tagKey, final String tagPrefix) { - return String.format(TAG_FORMAT, tagPrefix, tagKey); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.java deleted file mode 100644 index dc49697d3e99a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util; - -import static java.util.stream.Collectors.joining; - -import com.google.common.collect.ImmutableList; -import io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.exceptions.ConnectionErrorException; -import io.airbyte.commons.functional.Either; -import java.sql.SQLException; -import java.sql.SQLSyntaxErrorException; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Utility class defining methods for handling configuration exceptions in connectors. - */ -public class ConnectorExceptionUtil { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorExceptionUtil.class); - - public static final String COMMON_EXCEPTION_MESSAGE_TEMPLATE = "Could not connect with provided configuration. Error: %s"; - static final String RECOVERY_CONNECTION_ERROR_MESSAGE = - "We're having issues syncing from a Postgres replica that is configured as a hot standby server. " + - "Please see https://go.airbyte.com/pg-hot-standby-error-message for options and workarounds"; - - public static final List HTTP_AUTHENTICATION_ERROR_CODES = ImmutableList.of(401, 403); - - public static boolean isConfigError(final Throwable e) { - return isConfigErrorException(e) || isConnectionError(e) || - isRecoveryConnectionException(e) || isUnknownColumnInFieldListException(e); - } - - public static String getDisplayMessage(final Throwable e) { - if (e instanceof ConfigErrorException) { - return ((ConfigErrorException) e).getDisplayMessage(); - } else if (e instanceof final ConnectionErrorException connEx) { - return ErrorMessage.getErrorMessage(connEx.getStateCode(), connEx.getErrorCode(), connEx.getExceptionMessage(), connEx); - } else if (isRecoveryConnectionException(e)) { - return RECOVERY_CONNECTION_ERROR_MESSAGE; - } else if (isUnknownColumnInFieldListException(e)) { - return e.getMessage(); - } else { - return String.format(COMMON_EXCEPTION_MESSAGE_TEMPLATE, e.getMessage() != null ? e.getMessage() : ""); - } - } - - /** - * Returns the first instance of an exception associated with a configuration error (if it exists). - * Otherwise, the original exception is returned. - */ - public static Throwable getRootConfigError(final Exception e) { - Throwable current = e; - while (current != null) { - if (ConnectorExceptionUtil.isConfigError(current)) { - return current; - } else { - current = current.getCause(); - } - } - return e; - } - - /** - * Log all the exceptions, and rethrow the first. This is useful for e.g. running multiple futures - * and waiting for them to complete/fail. Rather than combining them into a single mega-exception - * (which works poorly in the UI), we just log all of them, and throw the first exception. - *

    - * In most cases, all the exceptions will look very similar, so the user only needs to see the first - * exception anyway. This mimics e.g. a for-loop over multiple tasks, where the loop would break on - * the first exception. - */ - public static void logAllAndThrowFirst(final String initialMessage, final Collection throwables) throws T { - if (!throwables.isEmpty()) { - final String stacktraces = throwables.stream().map(ExceptionUtils::getStackTrace).collect(joining("\n")); - LOGGER.error(initialMessage + stacktraces + "\nRethrowing first exception."); - throw throwables.iterator().next(); - } - } - - public static List getResultsOrLogAndThrowFirst(final String initialMessage, - final List> eithers) - throws T { - List throwables = eithers.stream().filter(Either::isLeft).map(Either::getLeft).toList(); - if (!throwables.isEmpty()) { - logAllAndThrowFirst(initialMessage, throwables); - } - // No need to filter on isRight since isLeft will throw before reaching this line. - return eithers.stream().map(Either::getRight).toList(); - } - - private static boolean isConfigErrorException(Throwable e) { - return e instanceof ConfigErrorException; - } - - private static boolean isConnectionError(Throwable e) { - return e instanceof ConnectionErrorException; - } - - private static boolean isRecoveryConnectionException(Throwable e) { - return e instanceof SQLException && e.getMessage() - .toLowerCase(Locale.ROOT) - .contains("due to conflict with recovery"); - } - - private static boolean isUnknownColumnInFieldListException(Throwable e) { - return e instanceof SQLSyntaxErrorException - && e.getMessage() - .toLowerCase(Locale.ROOT) - .contains("unknown column") - && e.getMessage() - .toLowerCase(Locale.ROOT) - .contains("in 'field list'"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.java deleted file mode 100644 index 3830ebf42eac0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util.concurrent; - -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.commons.stream.AirbyteStreamStatusHolder; -import io.airbyte.commons.stream.StreamStatusUtils; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.ThreadPoolExecutor.AbortPolicy; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * {@link Consumer} implementation that consumes {@link AirbyteMessage} records from each provided - * stream concurrently. - *

    - *

    - * The consumer calculates the parallelism based on the provided requested parallelism. If the - * requested parallelism is greater than zero, the minimum value between the requested parallelism - * and the maximum number of allowed threads is chosen as the parallelism value. Otherwise, the - * minimum parallelism value is selected. This is to avoid issues with attempting to execute with a - * parallelism value of zero, which is not allowed by the underlying {@link ExecutorService}. - *

    - *

    - * This consumer will capture any raised exceptions during execution of each stream. Anu exceptions - * are stored and made available by calling the {@link #getException()} method. - */ -public class ConcurrentStreamConsumer implements Consumer>>, AutoCloseable { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConcurrentStreamConsumer.class); - - /** - * Name of threads spawned by the {@link ConcurrentStreamConsumer}. - */ - public static final String CONCURRENT_STREAM_THREAD_NAME = "concurrent-stream-thread"; - - private final ExecutorService executorService; - private final List exceptions; - private final Integer parallelism; - private final Consumer> streamConsumer; - private final Optional> streamStatusEmitter = - Optional.of(AirbyteTraceMessageUtility::emitStreamStatusTrace); - - /** - * Constructs a new {@link ConcurrentStreamConsumer} that will use the provided stream consumer to - * execute each stream submitted to the {@link #accept(Collection)} method of - * this consumer. Streams submitted to the {@link #accept(Collection)} method - * will be converted to a {@link Runnable} and executed on an {@link ExecutorService} configured by - * this consumer to ensure concurrent execution of each stream. - * - * @param streamConsumer The {@link Consumer} that accepts streams as an - * {@link AutoCloseableIterator}. - * @param requestedParallelism The requested amount of parallelism that will be used as a hint to - * determine the appropriate number of threads to execute concurrently. - */ - public ConcurrentStreamConsumer(final Consumer> streamConsumer, final Integer requestedParallelism) { - this.parallelism = computeParallelism(requestedParallelism); - this.executorService = createExecutorService(parallelism); - this.exceptions = new ArrayList<>(); - this.streamConsumer = streamConsumer; - } - - @Override - public void accept(final Collection> streams) { - /* - * Submit the provided streams to the underlying executor service for concurrent execution. This - * thread will track the status of each stream as well as consuming all messages produced from each - * stream, passing them to the provided message consumer for further processing. Any exceptions - * raised within the thread will be captured and exposed to the caller. - */ - final Collection> futures = streams.stream() - .map(stream -> new ConcurrentStreamRunnable(stream, this)) - .map(runnable -> CompletableFuture.runAsync(runnable, executorService)) - .collect(Collectors.toList()); - - /* - * Wait for the submitted streams to complete before returning. This uses the join() method to allow - * all streams to complete even if one or more encounters an exception. - */ - LOGGER.debug("Waiting for all streams to complete...."); - CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()])).join(); - LOGGER.debug("Completed consuming from all streams."); - } - - /** - * Returns the first captured {@link Exception}. - * - * @return The first captured {@link Exception} or an empty {@link Optional} if no exceptions were - * captured during execution. - */ - public Optional getException() { - if (!exceptions.isEmpty()) { - return Optional.of(exceptions.get(0)); - } else { - return Optional.empty(); - } - } - - /** - * Returns the list of exceptions captured during execution of the streams, if any. - * - * @return The collection of captured exceptions or an empty list. - */ - public List getExceptions() { - return Collections.unmodifiableList(exceptions); - } - - /** - * Returns the parallelism value that will be used by this consumer to execute the consumption of - * data from the provided streams in parallel. - * - * @return The parallelism value of this consumer. - */ - public Integer getParallelism() { - return computeParallelism(parallelism); - } - - /** - * Calculates the parallelism based on the requested parallelism. If the requested parallelism is - * greater than zero, the minimum value between the parallelism and the maximum parallelism is - * chosen as the parallelism count. Otherwise, the minimum parallelism is selected. This is to avoid - * issues with attempting to create an executor service with a thread pool size of 0, which is not - * allowed. - * - * @param requestedParallelism The requested parallelism. - * @return The selected parallelism based on the factors outlined above. - */ - private Integer computeParallelism(final Integer requestedParallelism) { - /* - * Selects the default thread pool size based on the provided value via an environment variable or - * the number of available processors if the environment variable is not set/present. This is to - * ensure that we do not over-parallelize unless requested explicitly. - */ - final Integer defaultPoolSize = Optional.ofNullable(System.getenv("DEFAULT_CONCURRENT_STREAM_CONSUMER_THREADS")) - .map(Integer::parseInt) - .orElseGet(() -> Runtime.getRuntime().availableProcessors()); - LOGGER.debug("Default parallelism: {}, Requested parallelism: {}", defaultPoolSize, requestedParallelism); - final Integer parallelism = Math.min(defaultPoolSize, requestedParallelism > 0 ? requestedParallelism : 1); - LOGGER.debug("Computed concurrent stream consumer parallelism: {}", parallelism); - return parallelism; - } - - /** - * Creates the {@link ExecutorService} that will be used by the consumer to consume from the - * provided streams in parallel. - * - * @param nThreads The number of threads to execute concurrently. - * @return The configured {@link ExecutorService}. - */ - private ExecutorService createExecutorService(final Integer nThreads) { - return new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), - new ConcurrentStreamThreadFactory(), new AbortPolicy()); - } - - /** - * Executes the stream by providing it to the configured {@link #streamConsumer}. - * - * @param stream The stream to be executed. - */ - private void executeStream(final AutoCloseableIterator stream) { - try (stream) { - stream.getAirbyteStream().ifPresent(s -> LOGGER.debug("Consuming from stream {}...", s)); - StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter); - streamConsumer.accept(stream); - StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter); - stream.getAirbyteStream().ifPresent(s -> LOGGER.debug("Consumption from stream {} complete.", s)); - } catch (final Exception e) { - stream.getAirbyteStream().ifPresent(s -> LOGGER.error("Unable to consume from stream {}.", s, e)); - StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter); - exceptions.add(e); - } - } - - @Override - public void close() throws Exception { - // Block waiting for the executor service to close - executorService.shutdownNow(); - executorService.awaitTermination(30, TimeUnit.SECONDS); - } - - /** - * Custom {@link ThreadFactory} that names the threads used to concurrently execute streams. - */ - private static class ConcurrentStreamThreadFactory implements ThreadFactory { - - @Override - public Thread newThread(final Runnable r) { - final Thread thread = new Thread(r); - if (r instanceof ConcurrentStreamRunnable) { - final AutoCloseableIterator stream = ((ConcurrentStreamRunnable) r).stream(); - if (stream.getAirbyteStream().isPresent()) { - final AirbyteStreamNameNamespacePair airbyteStream = stream.getAirbyteStream().get(); - thread.setName(String.format("%s-%s-%s", CONCURRENT_STREAM_THREAD_NAME, airbyteStream.getNamespace(), airbyteStream.getName())); - } else { - thread.setName(CONCURRENT_STREAM_THREAD_NAME); - } - } else { - thread.setName(CONCURRENT_STREAM_THREAD_NAME); - } - return thread; - } - - } - - /** - * Custom {@link Runnable} that exposes the stream for thread naming purposes. - * - * @param stream The stream that is part of the {@link Runnable} execution. - * @param consumer The {@link ConcurrentStreamConsumer} that will execute the stream. - */ - private record ConcurrentStreamRunnable(AutoCloseableIterator stream, ConcurrentStreamConsumer consumer) implements Runnable { - - @Override - public void run() { - consumer.executeStream(stream); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/CDKConstants.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/CDKConstants.kt new file mode 100644 index 0000000000000..f78ea1c103d3e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/CDKConstants.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk + +import java.io.IOException +import java.util.* + +object CDKConstants { + val VERSION: String = version + + private val version: String + get() { + val prop = Properties() + + try { + CDKConstants::class + .java + .classLoader + .getResourceAsStream("version.properties") + .use { inputStream -> + prop.load(inputStream) + return prop.getProperty("version") + } + } catch (e: IOException) { + throw RuntimeException("Could not read version properties file", e) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/AbstractDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/AbstractDatabase.kt new file mode 100644 index 0000000000000..091b4b0be57d6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/AbstractDatabase.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import com.fasterxml.jackson.databind.JsonNode + +/** + * A wrapper around the instantiated [javax.sql.DataSource]. + * + * Note that this class does not implement [AutoCloseable]/[java.io.Closeable], as it is not the + * responsibility of this class to close the provided [javax.sql.DataSource]. This is to avoid + * accidentally closing a shared resource. + */ +open class AbstractDatabase { + var sourceConfig: JsonNode? = null + var databaseConfig: JsonNode? = null +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ContextQueryFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ContextQueryFunction.kt new file mode 100644 index 0000000000000..847a1032cbeb0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ContextQueryFunction.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import java.sql.SQLException +import org.jooq.DSLContext + +fun interface ContextQueryFunction { + @Throws(SQLException::class) fun query(context: DSLContext?): T +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeSupplier.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeSupplier.kt new file mode 100644 index 0000000000000..755d1b65a9ae5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeSupplier.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import java.sql.SQLException + +fun interface DataTypeSupplier { + @Throws(SQLException::class) fun apply(): DataType +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeUtils.kt new file mode 100644 index 0000000000000..e9eb616a7ec79 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DataTypeUtils.kt @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import java.sql.Date +import java.sql.SQLException +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.time.* +import java.time.format.DateTimeFormatter +import java.util.function.Function +import kotlin.math.abs + +/** + * TODO : Replace all the DateTime related logic of this class with + * [io.airbyte.cdk.db.jdbc.DateTimeConverter] + */ +object DataTypeUtils { + const val DATE_FORMAT_PATTERN: String = "yyyy-MM-dd'T'HH:mm:ss'Z'" + + const val DATE_FORMAT_WITH_MILLISECONDS_PATTERN: String = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" + + @JvmField val TIME_FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS") + @JvmField + val TIMESTAMP_FORMATTER: DateTimeFormatter = + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS") + @JvmField + val TIMETZ_FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSXXX") + @JvmField + val TIMESTAMPTZ_FORMATTER: DateTimeFormatter = + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSXXX") + @JvmField + val OFFSETDATETIME_FORMATTER: DateTimeFormatter = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSSS XXX") + @JvmField val DATE_FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd") + + @JvmStatic + val dateFormat: DateFormat + // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a + // static final. + get() = + SimpleDateFormat(DATE_FORMAT_PATTERN) // Quoted "Z" to indicate UTC, no timezone offset; + + val dateFormatMillisPattern: DateFormat + // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a + // static final. + get() = SimpleDateFormat(DATE_FORMAT_WITH_MILLISECONDS_PATTERN) + + @JvmStatic + fun returnNullIfInvalid(valueProducer: DataTypeSupplier): T? { + return returnNullIfInvalid(valueProducer, Function { _: T? -> true }) + } + + @JvmStatic + fun returnNullIfInvalid( + valueProducer: DataTypeSupplier, + isValidFn: Function + ): T? { + // Some edge case values (e.g: Infinity, NaN) have no java or JSON equivalent, and will + // throw an + // exception when parsed. We want to parse those + // values as null. + // This method reduces error handling boilerplate. + try { + val value = valueProducer.apply() + return if (isValidFn.apply(value)) value else null + } catch (e: SQLException) { + return null + } + } + + @JvmStatic + fun toISO8601StringWithMicroseconds(instant: Instant): String { + val dateWithMilliseconds = dateFormatMillisPattern.format(Date.from(instant)) + return dateWithMilliseconds.substring(0, 23) + + calculateMicrosecondsString(instant.nano) + + dateWithMilliseconds.substring(23) + } + + private fun calculateMicrosecondsString(nano: Int): String { + val microSeconds = (nano / 1000) % 1000 + val result = + if (microSeconds < 10) { + "00$microSeconds" + } else if (microSeconds < 100) { + "0$microSeconds" + } else { + "" + microSeconds + } + return result + } + + @JvmStatic + fun toISO8601StringWithMilliseconds(epochMillis: Long): String { + return dateFormatMillisPattern.format(Date.from(Instant.ofEpochMilli(epochMillis))) + } + + @JvmStatic + fun toISO8601String(epochMillis: Long): String { + return dateFormat.format(Date.from(Instant.ofEpochMilli(epochMillis))) + } + + @JvmStatic + fun toISO8601String(date: java.util.Date?): String { + return dateFormat.format(date) + } + + @JvmStatic + fun toISOTimeString(dateTime: LocalDateTime): String { + return DateTimeFormatter.ISO_TIME.format(dateTime.toLocalTime()) + } + + @JvmStatic + fun toISO8601String(date: LocalDate): String { + return toISO8601String(date.atStartOfDay()) + } + + @JvmStatic + fun toISO8601String(date: LocalDateTime): String { + return date.format(DateTimeFormatter.ofPattern(DATE_FORMAT_PATTERN)) + } + + @JvmStatic + fun toISO8601String(date: OffsetDateTime): String { + return date.format(OFFSETDATETIME_FORMATTER) + } + + @JvmStatic + fun toISO8601String(duration: Duration): String { + return dateFormat.format( + Date.from( + Instant.ofEpochSecond( + abs(duration.seconds.toDouble()).toLong(), + abs(duration.nano.toDouble()).toLong() + ) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/Database.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/Database.kt new file mode 100644 index 0000000000000..1cc63dfb35bc2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/Database.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import java.sql.SQLException +import org.jooq.Configuration +import org.jooq.DSLContext +import org.jooq.impl.DSL + +/** Database object for interacting with a Jooq connection. */ +open class Database(private val dslContext: DSLContext?) { + @Throws(SQLException::class) + open fun query(transform: ContextQueryFunction): T? { + return transform.query(dslContext) + } + + @Throws(SQLException::class) + open fun transaction(transform: ContextQueryFunction): T? { + return dslContext!!.transactionResult { configuration: Configuration? -> + transform.query(DSL.using(configuration)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DbAnalyticsUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DbAnalyticsUtils.kt new file mode 100644 index 0000000000000..ffe7897b79e2f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/DbAnalyticsUtils.kt @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import io.airbyte.protocol.models.v0.AirbyteAnalyticsTraceMessage + +/** + * Utility class to define constants associated with database source connector analytics events. + * Make sure to add the analytics event to + * https://www.notion.so/Connector-Analytics-Events-892a79a49852465f8d59a18bd84c36de + */ +object DbAnalyticsUtils { + const val CDC_CURSOR_INVALID_KEY: String = "db-sources-cdc-cursor-invalid" + const val DATA_TYPES_SERIALIZATION_ERROR_KEY = "db-sources-data-serialization-error" + + @JvmStatic + fun cdcCursorInvalidMessage(): AirbyteAnalyticsTraceMessage { + return AirbyteAnalyticsTraceMessage().withType(CDC_CURSOR_INVALID_KEY).withValue("1") + } + + @JvmStatic + fun dataTypesSerializationErrorMessage(): AirbyteAnalyticsTraceMessage { + return AirbyteAnalyticsTraceMessage() + .withType(DATA_TYPES_SERIALIZATION_ERROR_KEY) + .withValue("1") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ExceptionWrappingDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ExceptionWrappingDatabase.kt new file mode 100644 index 0000000000000..a3d67c6b10b02 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/ExceptionWrappingDatabase.kt @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import java.io.* +import java.sql.SQLException + +/** Wraps a [Database] object and throwing IOExceptions instead of SQLExceptions. */ +class ExceptionWrappingDatabase(private val database: Database) { + @Throws(IOException::class) + fun query(transform: ContextQueryFunction): T? { + try { + return database.query(transform) + } catch (e: SQLException) { + throw IOException(e) + } + } + + @Throws(IOException::class) + fun transaction(transform: ContextQueryFunction): T? { + try { + return database.transaction(transform) + } catch (e: SQLException) { + throw IOException(e) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/IncrementalUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/IncrementalUtils.kt new file mode 100644 index 0000000000000..11a3aee480c0c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/IncrementalUtils.kt @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.lang.Boolean +import java.util.* +import kotlin.IllegalStateException +import kotlin.Int +import kotlin.String +import kotlin.check +import kotlin.checkNotNull + +object IncrementalUtils { + private const val PROPERTIES = "properties" + + @JvmStatic + fun getCursorField(stream: ConfiguredAirbyteStream): String { + check(stream.cursorField.size != 0) { + "No cursor field specified for stream attempting to do incremental." + } + check(stream.cursorField.size <= 1) { "Source does not support nested cursor fields." } + return stream.cursorField[0] + } + + @JvmStatic + fun getCursorFieldOptional(stream: ConfiguredAirbyteStream): Optional { + return try { + Optional.ofNullable(getCursorField(stream)) + } catch (e: IllegalStateException) { + Optional.empty() + } + } + + @JvmStatic + fun getCursorType( + stream: ConfiguredAirbyteStream, + cursorField: String? + ): JsonSchemaPrimitiveUtil.JsonSchemaPrimitive? { + checkNotNull(stream.stream.jsonSchema[PROPERTIES]) { + String.format("No properties found in stream: %s.", stream.stream.name) + } + + checkNotNull(stream.stream.jsonSchema[PROPERTIES][cursorField]) { + String.format( + "Could not find cursor field: %s in schema for stream: %s.", + cursorField, + stream.stream.name + ) + } + + check( + !(stream.stream.jsonSchema[PROPERTIES][cursorField]["type"] == null && + stream.stream.jsonSchema[PROPERTIES][cursorField]["\$ref"] == null) + ) { + String.format( + "Could not find cursor type for field: %s in schema for stream: %s.", + cursorField, + stream.stream.name + ) + } + + return if (stream.stream.jsonSchema[PROPERTIES][cursorField]["type"] == null) { + JsonSchemaPrimitiveUtil.PRIMITIVE_TO_REFERENCE_BIMAP.inverse()[ + stream.stream.jsonSchema[PROPERTIES][cursorField]["\$ref"].asText()] + } else { + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.valueOf( + stream.stream.jsonSchema[PROPERTIES][cursorField]["type"] + .asText() + .uppercase(Locale.getDefault()) + ) + } + } + + /** + * Comparator where if original is less than candidate then value less than 0, if greater than + * candidate then value greater than 0, else 0 + * + * @param original the first value to compare + * @param candidate the second value to compare + * @param type primitive type used to determine comparison + * @return + */ + @JvmStatic + fun compareCursors( + original: String?, + candidate: String?, + type: JsonSchemaPrimitiveUtil.JsonSchemaPrimitive? + ): Int { + if (original == null && candidate == null) { + return 0 + } + + if (candidate == null) { + return 1 + } + + if (original == null) { + return -1 + } + + return when (type) { + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.DATE_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.TIME_WITH_TIMEZONE_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.TIME_WITHOUT_TIMEZONE_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.TIMESTAMP_WITH_TIMEZONE_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.TIMESTAMP_WITHOUT_TIMEZONE_V1 -> { + original.compareTo(candidate) + } + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NUMBER, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NUMBER_V1, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.INTEGER_V1 -> { + // todo (cgardens) - handle big decimal. this is currently an overflow risk. + java.lang.Double.compare(original.toDouble(), candidate.toDouble()) + } + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.BOOLEAN, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.BOOLEAN_V1 -> { + Boolean.compare(original.toBoolean(), candidate.toBoolean()) + } + else -> + throw IllegalStateException( + String.format("Cannot use field of type %s as a comparable", type) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.kt new file mode 100644 index 0000000000000..210fd088aae5e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/JdbcCompatibleSourceOperations.kt @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.db.jdbc.AirbyteRecordData +import java.sql.* + +interface JdbcCompatibleSourceOperations : SourceOperations { + /** + * Read from a result set, and copy the value of the column at colIndex to the Json object. + * + * @param colIndex 1-based column index. + */ + @Throws(SQLException::class) + fun copyToJsonField(resultSet: ResultSet, colIndex: Int, json: ObjectNode) + + /** Set the cursor field in incremental table query. */ + @Throws(SQLException::class) + fun setCursorField( + preparedStatement: PreparedStatement, + parameterIndex: Int, + cursorFieldType: SourceType?, + value: String? + ) + + /** Determine the database specific type of the input field based on its column metadata. */ + fun getDatabaseFieldType(field: JsonNode): SourceType + + /** + * This method will verify that filed could be used as cursor for incremental sync + * + * @param type + * - table field type that should be checked + * @return true is field type can be used as cursor field for incremental sync + */ + fun isCursorType(type: SourceType?): Boolean + + @Throws(SQLException::class) + abstract fun convertDatabaseRowToAirbyteRecordData(queryContext: ResultSet): AirbyteRecordData +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SourceOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SourceOperations.kt new file mode 100644 index 0000000000000..f0c1efad00d1f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SourceOperations.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.JsonSchemaType +import java.sql.SQLException + +interface SourceOperations { + /** + * Converts a database row into it's JSON representation. + * + * @throws SQLException + */ + @Throws(SQLException::class) fun rowToJson(queryResult: QueryResult): JsonNode + + /** + * Converts a database source type into an Airbyte type, which is currently represented by a + * [JsonSchemaType] + */ + fun getAirbyteType(sourceType: SourceType): JsonSchemaType +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SqlDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SqlDatabase.kt new file mode 100644 index 0000000000000..66aa08ad47892 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/SqlDatabase.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import com.fasterxml.jackson.databind.JsonNode +import java.util.stream.Stream + +abstract class SqlDatabase : AbstractDatabase() { + @Throws(Exception::class) abstract fun execute(sql: String?) + + @Throws(Exception::class) + abstract fun unsafeQuery(sql: String?, vararg params: String?): Stream +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/ConnectionFactory.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/ConnectionFactory.kt new file mode 100644 index 0000000000000..81ce6a01144b5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/ConnectionFactory.kt @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import java.sql.Connection +import java.sql.DriverManager +import java.sql.SQLException +import java.util.* + +/** + * This class as been added in order to be able to save the connection in a test. It was found that + * the [javax.sql.DataSource] close method wasn't propagating the connection properly. It shouldn't + * be needed in our application code. + */ +object ConnectionFactory { + /** + * Construct a new [Connection] instance using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param connectionProperties The extra properties to add to the connection. + * @param jdbcConnectionString The JDBC connection string. + * @return The configured [Connection] + */ + @JvmStatic + fun create( + username: String?, + password: String?, + connectionProperties: Map, + jdbcConnectionString: String? + ): Connection { + try { + val properties = Properties() + properties["user"] = username + properties["password"] = password + connectionProperties.forEach { (k: String?, v: String?) -> properties[k] = v } + + return DriverManager.getConnection(jdbcConnectionString, properties) + } catch (e: SQLException) { + throw RuntimeException(e) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DSLContextFactory.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DSLContextFactory.kt new file mode 100644 index 0000000000000..2ecf74e0fd94c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DSLContextFactory.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import java.time.Duration +import javax.sql.DataSource +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.jooq.impl.DSL + +/** + * Temporary factory class that provides convenience methods for creating a [DSLContext] instances. + * This class will be removed once the project has been converted to leverage an application + * framework to manage the creation and injection of [DSLContext] objects. + */ +object DSLContextFactory { + /** + * Constructs a configured [DSLContext] instance using the provided configuration. + * + * @param dataSource The [DataSource] used to connect to the database. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured [DSLContext]. + */ + fun create(dataSource: DataSource?, dialect: SQLDialect?): DSLContext { + return DSL.using(dataSource, dialect) + } + + /** + * Constructs a configured [DSLContext] instance using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured [DSLContext]. + */ + @JvmStatic + fun create( + username: String?, + password: String?, + driverClassName: String, + jdbcConnectionString: String?, + dialect: SQLDialect? + ): DSLContext { + return DSL.using( + DataSourceFactory.create(username, password, driverClassName, jdbcConnectionString), + dialect + ) + } + + /** + * Constructs a configured [DSLContext] instance using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param dialect The SQL dialect to use with objects created from this context. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured [DSLContext]. + */ + fun create( + username: String?, + password: String?, + driverClassName: String, + jdbcConnectionString: String?, + dialect: SQLDialect?, + connectionProperties: Map?, + connectionTimeout: Duration? + ): DSLContext { + return DSL.using( + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcConnectionString, + connectionProperties, + connectionTimeout + ), + dialect + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DataSourceFactory.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DataSourceFactory.kt new file mode 100644 index 0000000000000..0b3625d18dd29 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DataSourceFactory.kt @@ -0,0 +1,298 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import com.google.common.base.Preconditions +import com.zaxxer.hikari.HikariConfig +import com.zaxxer.hikari.HikariDataSource +import java.time.Duration +import javax.sql.DataSource + +/** + * Temporary factory class that provides convenience methods for creating a [DataSource] instance. + * This class will be removed once the project has been converted to leverage an application + * framework to manage the creation and injection of [DataSource] objects. + */ +object DataSourceFactory { + /** + * Constructs a new [DataSource] using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @return The configured [DataSource]. + */ + @JvmStatic + fun create( + username: String?, + password: String?, + driverClassName: String, + jdbcConnectionString: String? + ): DataSource { + return DataSourceBuilder(username, password, driverClassName, jdbcConnectionString).build() + } + + /** + * Constructs a new [DataSource] using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param jdbcConnectionString The JDBC connection string. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured [DataSource]. + */ + @JvmStatic + fun create( + username: String?, + password: String?, + driverClassName: String, + jdbcConnectionString: String?, + connectionProperties: Map?, + connectionTimeout: Duration? + ): DataSource { + return DataSourceBuilder(username, password, driverClassName, jdbcConnectionString) + .withConnectionProperties(connectionProperties) + .withConnectionTimeout(connectionTimeout) + .build() + } + + /** + * Constructs a new [DataSource] using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @return The configured [DataSource]. + */ + fun create( + username: String?, + password: String?, + host: String?, + port: Int, + database: String?, + driverClassName: String + ): DataSource { + return DataSourceBuilder(username, password, driverClassName, host, port, database).build() + } + + /** + * Constructs a new [DataSource] using the provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @param driverClassName The fully qualified name of the JDBC driver class. + * @param connectionProperties Additional configuration properties for the underlying driver. + * @return The configured [DataSource]. + */ + fun create( + username: String?, + password: String?, + host: String?, + port: Int, + database: String?, + driverClassName: String, + connectionProperties: Map? + ): DataSource { + return DataSourceBuilder(username, password, driverClassName, host, port, database) + .withConnectionProperties(connectionProperties) + .build() + } + + /** + * Convenience method that constructs a new [DataSource] for a PostgreSQL database using the + * provided configuration. + * + * @param username The username of the database user. + * @param password The password of the database user. + * @param host The host address of the database. + * @param port The port of the database. + * @param database The name of the database. + * @return The configured [DataSource]. + */ + fun createPostgres( + username: String?, + password: String?, + host: String?, + port: Int, + database: String? + ): DataSource { + return DataSourceBuilder(username, password, "org.postgresql.Driver", host, port, database) + .build() + } + + /** + * Utility method that attempts to close the provided [DataSource] if it implements [Closeable]. + * + * @param dataSource The [DataSource] to close. + * @throws Exception if unable to close the data source. + */ + @JvmStatic + @Throws(Exception::class) + fun close(dataSource: DataSource?) { + if (dataSource != null) { + if (dataSource is AutoCloseable) { + dataSource.close() + } + } + } + + /** Builder class used to configure and construct [DataSource] instances. */ + class DataSourceBuilder + private constructor( + private var username: String?, + private var password: String?, + private var driverClassName: String + ) { + private var connectionProperties: Map = java.util.Map.of() + private var database: String? = null + private var host: String? = null + private var jdbcUrl: String? = null + private var maximumPoolSize = 10 + private var minimumPoolSize = 0 + private var connectionTimeout: Duration = Duration.ZERO + private var port = 5432 + private var connectionInitSql: String? = null + + constructor( + username: String?, + password: String?, + driverClassName: String, + jdbcUrl: String? + ) : this(username, password, driverClassName) { + this.jdbcUrl = jdbcUrl + } + + constructor( + username: String?, + password: String?, + driverClassName: String, + host: String?, + port: Int, + database: String? + ) : this(username, password, driverClassName) { + this.host = host + this.port = port + this.database = database + } + + fun withConnectionProperties( + connectionProperties: Map? + ): DataSourceBuilder { + if (connectionProperties != null) { + this.connectionProperties = connectionProperties + } + return this + } + + fun withDatabase(database: String?): DataSourceBuilder { + this.database = database + return this + } + + fun withDriverClassName(driverClassName: String): DataSourceBuilder { + this.driverClassName = driverClassName + return this + } + + fun withHost(host: String?): DataSourceBuilder { + this.host = host + return this + } + + fun withJdbcUrl(jdbcUrl: String?): DataSourceBuilder { + this.jdbcUrl = jdbcUrl + return this + } + + fun withMaximumPoolSize(maximumPoolSize: Int): DataSourceBuilder { + if (maximumPoolSize != null) { + this.maximumPoolSize = maximumPoolSize + } + return this + } + + fun withMinimumPoolSize(minimumPoolSize: Int): DataSourceBuilder { + if (minimumPoolSize != null) { + this.minimumPoolSize = minimumPoolSize + } + return this + } + + fun withConnectionTimeout(connectionTimeout: Duration?): DataSourceBuilder { + if (connectionTimeout != null) { + this.connectionTimeout = connectionTimeout + } + return this + } + + fun withPassword(password: String?): DataSourceBuilder { + this.password = password + return this + } + + fun withPort(port: Int): DataSourceBuilder { + if (port != null) { + this.port = port + } + return this + } + + fun withUsername(username: String?): DataSourceBuilder { + this.username = username + return this + } + + fun withConnectionInitSql(sql: String?): DataSourceBuilder { + this.connectionInitSql = sql + return this + } + + fun build(): DataSource { + val databaseDriver: DatabaseDriver = + DatabaseDriver.Companion.findByDriverClassName(driverClassName) + + Preconditions.checkNotNull( + databaseDriver, + "Unknown or blank driver class name: '$driverClassName'." + ) + + val config = HikariConfig() + + config.driverClassName = databaseDriver.driverClassName + config.jdbcUrl = + if (jdbcUrl != null) jdbcUrl + else String.format(databaseDriver.urlFormatString, host, port, database) + config.maximumPoolSize = maximumPoolSize + config.minimumIdle = minimumPoolSize + // HikariCP uses milliseconds for all time values: + // https://github.com/brettwooldridge/HikariCP#gear-configuration-knobs-baby + config.connectionTimeout = connectionTimeout.toMillis() + config.password = password + config.username = username + + /* + * Disable to prevent failing on startup. Applications may start prior to the database container + * being available. To avoid failing to create the connection pool, disable the fail check. This + * will preserve existing behavior that tests for the connection on first use, not on creation. + */ + config.initializationFailTimeout = Int.MIN_VALUE.toLong() + + config.connectionInitSql = connectionInitSql + + connectionProperties.forEach { (propertyName: String?, value: String?) -> + config.addDataSourceProperty(propertyName, value) + } + + return HikariDataSource(config) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DatabaseDriver.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DatabaseDriver.kt new file mode 100644 index 0000000000000..8f20d997b2c8b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/factory/DatabaseDriver.kt @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +/** Collection of JDBC driver class names and the associated JDBC URL format string. */ +enum class DatabaseDriver(val driverClassName: String, val urlFormatString: String) { + CLICKHOUSE("com.clickhouse.jdbc.ClickHouseDriver", "jdbc:clickhouse:%s://%s:%d/%s"), + DATABRICKS( + "com.databricks.client.jdbc.Driver", + "jdbc:databricks://%s:%s;HttpPath=%s;SSL=1;UserAgentEntry=Airbyte" + ), + DB2("com.ibm.db2.jcc.DB2Driver", "jdbc:db2://%s:%d/%s"), + STARBURST("io.trino.jdbc.TrinoDriver", "jdbc:trino://%s:%s/%s?SSL=true&source=airbyte"), + MARIADB("org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"), + MSSQLSERVER( + "com.microsoft.sqlserver.jdbc.SQLServerDriver", + "jdbc:sqlserver://%s:%d;databaseName=%s" + ), + MYSQL("com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s"), + ORACLE("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d/%s"), + VERTICA("com.vertica.jdbc.Driver", "jdbc:vertica://%s:%d/%s"), + POSTGRESQL("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s"), + REDSHIFT("com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s"), + SNOWFLAKE("net.snowflake.client.jdbc.SnowflakeDriver", "jdbc:snowflake://%s/"), + YUGABYTEDB("com.yugabyte.Driver", "jdbc:yugabytedb://%s:%d/%s"), + EXASOL("com.exasol.jdbc.EXADriver", "jdbc:exa:%s:%d"), + TERADATA("com.teradata.jdbc.TeraDriver", "jdbc:teradata://%s/"); + + companion object { + /** + * Finds the [DatabaseDriver] enumerated value that matches the provided driver class name. + * + * @param driverClassName The driver class name. + * @return The matching [DatabaseDriver] enumerated value or `null` if no match is found. + */ + @JvmStatic + fun findByDriverClassName(driverClassName: String?): DatabaseDriver { + lateinit var selected: DatabaseDriver + + for (candidate in entries) { + if (candidate.driverClassName.equals(driverClassName, ignoreCase = true)) { + selected = candidate + break + } + } + + return selected + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.kt new file mode 100644 index 0000000000000..db78b4ab3b8b2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.kt @@ -0,0 +1,520 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.db.DataTypeUtils +import io.airbyte.cdk.db.DbAnalyticsUtils.dataTypesSerializationErrorMessage +import io.airbyte.cdk.db.JdbcCompatibleSourceOperations +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange +import java.math.BigDecimal +import java.sql.* +import java.sql.Date +import java.text.ParseException +import java.time.* +import java.time.chrono.IsoEra +import java.time.format.DateTimeParseException +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Source operation skeleton for JDBC compatible databases. */ +abstract class AbstractJdbcCompatibleSourceOperations : + JdbcCompatibleSourceOperations { + + private val LOGGER: Logger = + LoggerFactory.getLogger(AbstractJdbcCompatibleSourceOperations::class.java) + + @Throws(SQLException::class) + override fun convertDatabaseRowToAirbyteRecordData(queryContext: ResultSet): AirbyteRecordData { + // the first call communicates with the database. after that the result is cached. + val columnCount = queryContext.metaData.columnCount + val jsonNode = Jsons.jsonNode(emptyMap()) as ObjectNode + val metaChanges: MutableList = + ArrayList() + + for (i in 1..columnCount) { + val columnName = queryContext.metaData.getColumnName(i) + val columnTypeName = queryContext.metaData.getColumnTypeName(i) + try { + // convert to java types that will convert into reasonable json. + copyToJsonField(queryContext, i, jsonNode) + } catch (e: java.lang.Exception) { + LOGGER.info( + "Failed to serialize column: {}, of type {}, with error {}", + columnName, + columnTypeName, + e.message + ) + AirbyteTraceMessageUtility.emitAnalyticsTrace(dataTypesSerializationErrorMessage()) + metaChanges.add( + AirbyteRecordMessageMetaChange() + .withField(columnName) + .withChange(AirbyteRecordMessageMetaChange.Change.NULLED) + .withReason( + AirbyteRecordMessageMetaChange.Reason.SOURCE_SERIALIZATION_ERROR, + ), + ) + } + } + + return AirbyteRecordData(jsonNode, AirbyteRecordMessageMeta().withChanges(metaChanges)) + } + @Throws(SQLException::class) + override fun rowToJson(queryContext: ResultSet): JsonNode { + // the first call communicates with the database. after that the result is cached. + val columnCount = queryContext.metaData.columnCount + val jsonNode = Jsons.jsonNode(emptyMap()) as ObjectNode + + for (i in 1..columnCount) { + // attempt to access the column. this allows us to know if it is null before we do + // type-specific + // parsing. if it is null, we can move on. while awkward, this seems to be the agreed + // upon way of + // checking for null values with jdbc. + queryContext.getObject(i) + if (queryContext.wasNull()) { + continue + } + + // convert to java types that will convert into reasonable json. + copyToJsonField(queryContext, i, jsonNode) + } + + return jsonNode + } + + @Throws(SQLException::class) + protected fun putArray( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + val arrayNode = ObjectMapper().createArrayNode() + val arrayResultSet = resultSet.getArray(index).resultSet + while (arrayResultSet.next()) { + arrayNode.add(arrayResultSet.getString(2)) + } + node.set(columnName, arrayNode) + } + + @Throws(SQLException::class) + protected open fun putBoolean( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, resultSet.getBoolean(index)) + } + + /** + * In some sources Short might have value larger than [Short.MAX_VALUE]. E.q. MySQL has unsigned + * smallint type, which can contain value 65535. If we fail to cast Short value, we will try to + * cast Integer. + */ + @Throws(SQLException::class) + protected fun putShortInt( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + try { + node.put(columnName, resultSet.getShort(index)) + } catch (e: SQLException) { + node.put(columnName, DataTypeUtils.returnNullIfInvalid { resultSet.getInt(index) }) + } + } + + /** + * In some sources Integer might have value larger than [Integer.MAX_VALUE]. E.q. MySQL has + * unsigned Integer type, which can contain value 3428724653. If we fail to cast Integer value, + * we will try to cast Long. + */ + @Throws(SQLException::class) + protected fun putInteger( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + try { + node.put(columnName, resultSet.getInt(index)) + } catch (e: SQLException) { + node.put(columnName, DataTypeUtils.returnNullIfInvalid { resultSet.getLong(index) }) + } + } + + @Throws(SQLException::class) + protected fun putBigInt( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, DataTypeUtils.returnNullIfInvalid { resultSet.getLong(index) }) + } + + @Throws(SQLException::class) + protected open fun putDouble( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put( + columnName, + DataTypeUtils.returnNullIfInvalid( + { resultSet.getDouble(index) }, + { d: Double? -> java.lang.Double.isFinite(d!!) }, + ), + ) + } + + @Throws(SQLException::class) + protected fun putFloat( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put( + columnName, + DataTypeUtils.returnNullIfInvalid( + { resultSet.getFloat(index) }, + { f: Float? -> java.lang.Float.isFinite(f!!) }, + ), + ) + } + + @Throws(SQLException::class) + protected open fun putBigDecimal( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, DataTypeUtils.returnNullIfInvalid { resultSet.getBigDecimal(index) }) + } + + @Throws(SQLException::class) + protected fun putString( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, resultSet.getString(index)) + } + + @Throws(SQLException::class) + protected open fun putDate( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, resultSet.getString(index)) + } + + @Throws(SQLException::class) + protected open fun putTime( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put( + columnName, + DateTimeConverter.convertToTime(getObject(resultSet, index, LocalTime::class.java)), + ) + } + + @Throws(SQLException::class) + protected open fun putTimestamp( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + try { + node.put( + columnName, + DateTimeConverter.convertToTimestamp( + getObject(resultSet, index, LocalDateTime::class.java), + ), + ) + } catch (e: Exception) { + // for backward compatibility + val instant = resultSet.getTimestamp(index).toInstant() + node.put(columnName, DataTypeUtils.toISO8601StringWithMicroseconds(instant)) + } + } + + @Throws(SQLException::class) + protected open fun putBinary( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, resultSet.getBytes(index)) + } + + @Throws(SQLException::class) + protected fun putDefault( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + node.put(columnName, resultSet.getString(index)) + } + + @Throws(SQLException::class) + protected fun setTime( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + try { + preparedStatement.setObject(parameterIndex, LocalTime.parse(value)) + } catch (e: DateTimeParseException) { + setTimestamp(preparedStatement, parameterIndex, value) + } + } + + @Throws(SQLException::class) + protected open fun setTimestamp( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + try { + preparedStatement.setObject(parameterIndex, LocalDateTime.parse(value)) + } catch (e: DateTimeParseException) { + preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)) + } + } + + @Throws(SQLException::class) + protected open fun setDate( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + try { + preparedStatement.setObject(parameterIndex, LocalDate.parse(value)) + } catch (e: DateTimeParseException) { + setDateAsTimestamp(preparedStatement, parameterIndex, value) + } + } + + @Throws(SQLException::class) + private fun setDateAsTimestamp( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + try { + val from = Timestamp.from(DataTypeUtils.dateFormat.parse(value).toInstant()) + preparedStatement.setDate(parameterIndex, Date(from.time)) + } catch (e: ParseException) { + throw RuntimeException(e) + } + } + + @Throws(SQLException::class) + protected fun setBit( + preparedStatement: PreparedStatement?, + parameterIndex: Int, + value: String? + ) { + // todo (cgardens) - currently we do not support bit because it requires special handling in + // the + // prepared statement. + // see + // https://www.postgresql-archive.org/Problems-with-BIT-datatype-and-preparedStatment-td5733533.html. + throw RuntimeException("BIT value is not supported as incremental parameter!") + } + + @Throws(SQLException::class) + protected fun setBoolean( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setBoolean(parameterIndex, value.toBoolean()) + } + + @Throws(SQLException::class) + protected fun setShortInt( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setShort(parameterIndex, value.toShort()) + } + + @Throws(SQLException::class) + protected fun setInteger( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setInt(parameterIndex, value.toInt()) + } + + @Throws(SQLException::class) + protected fun setBigInteger( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setLong(parameterIndex, BigDecimal(value).toBigInteger().toLong()) + } + + @Throws(SQLException::class) + protected fun setDouble( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setDouble(parameterIndex, value.toDouble()) + } + + @Throws(SQLException::class) + protected fun setReal( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setFloat(parameterIndex, value.toFloat()) + } + + @Throws(SQLException::class) + protected fun setDecimal( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String + ) { + preparedStatement.setBigDecimal(parameterIndex, BigDecimal(value)) + } + + @Throws(SQLException::class) + protected fun setString( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + preparedStatement.setString(parameterIndex, value) + } + + @Throws(SQLException::class) + protected fun setBinary( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + preparedStatement.setBytes(parameterIndex, Base64.getDecoder().decode(value)) + } + + @Throws(SQLException::class) + protected fun getObject( + resultSet: ResultSet, + index: Int, + clazz: Class? + ): ObjectType { + return resultSet.getObject(index, clazz) + } + + @Throws(SQLException::class) + protected open fun putTimeWithTimezone( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + val timetz = getObject(resultSet, index, OffsetTime::class.java) + node.put(columnName, DateTimeConverter.convertToTimeWithTimezone(timetz)) + } + + @Throws(SQLException::class) + protected open fun putTimestampWithTimezone( + node: ObjectNode, + columnName: String?, + resultSet: ResultSet, + index: Int + ) { + val timestamptz = getObject(resultSet, index, OffsetDateTime::class.java) + val localDate = timestamptz.toLocalDate() + node.put( + columnName, + resolveEra(localDate, timestamptz.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER)), + ) + } + + companion object { + /** A Date representing the earliest date in CE. Any date before this is in BCE. */ + private val ONE_CE: Date = Date.valueOf("0001-01-01") + + /** + * Modifies a string representation of a date/timestamp and normalizes its era indicator. + * Specifically, if this is a BCE value: + * + * * The leading negative sign will be removed if present + * * The "BC" suffix will be appended, if not already present + * + * You most likely would prefer to call one of the overloaded methods, which accept temporal + * types. + */ + fun resolveEra(isBce: Boolean, value: String): String { + var mangledValue = value + if (isBce) { + if (mangledValue.startsWith("-")) { + mangledValue = mangledValue.substring(1) + } + if (!mangledValue.endsWith(" BC")) { + mangledValue += " BC" + } + } + return mangledValue + } + + fun isBce(date: LocalDate): Boolean { + return date.era == IsoEra.BCE + } + + @JvmStatic + fun resolveEra(date: LocalDate, value: String): String { + return resolveEra(isBce(date), value) + } + + /** + * java.sql.Date objects don't properly represent their era (for example, using + * toLocalDate() always returns an object in CE). So to determine the era, we just check + * whether the date is before 1 AD. + * + * This is technically kind of sketchy due to ancient timestamps being weird (leap years, + * etc.), but my understanding is that [.ONE_CE] has the same weirdness, so it cancels out. + */ + @JvmStatic + fun resolveEra(date: Date, value: String): String { + return resolveEra(date.before(ONE_CE), value) + } + + /** See [.resolveEra] for explanation. */ + @JvmStatic + fun resolveEra(timestamp: Timestamp, value: String): String { + return resolveEra(timestamp.before(ONE_CE), value) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AirbyteRecordData.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AirbyteRecordData.kt new file mode 100644 index 0000000000000..2b4043fc72434 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AirbyteRecordData.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta + +@JvmRecord +data class AirbyteRecordData(val rawRowData: JsonNode, val meta: AirbyteRecordMessageMeta) diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DateTimeConverter.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DateTimeConverter.kt new file mode 100644 index 0000000000000..30f0c2e11e70a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DateTimeConverter.kt @@ -0,0 +1,232 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.db.DataTypeUtils +import java.sql.* +import java.time.* +import java.time.format.DateTimeFormatter +import java.util.concurrent.* +import kotlin.math.abs +import kotlin.math.min +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object DateTimeConverter { + private val LOGGER: Logger = LoggerFactory.getLogger(DateTimeConverter::class.java) + val TIME_WITH_TIMEZONE_FORMATTER: DateTimeFormatter = + DateTimeFormatter.ofPattern( + "HH:mm:ss[.][SSSSSSSSS][SSSSSSS][SSSSSS][SSSSS][SSSS][SSS][SS][S][''][XXX][XX][X]" + ) + private var loggedUnknownTimeWithTimeZoneClass = false + private var loggedUnknownTimeClass = false + private var loggedUnknownTimestampWithTimeZoneClass = false + private var loggedUnknownTimestampClass = false + private var loggedUnknownDateClass = false + + @JvmStatic + fun convertToTimeWithTimezone(time: Any): String { + if (time is OffsetTime) { + return if (hasZeroSecondsAndNanos(time.toLocalTime())) + time.format(DataTypeUtils.TIMETZ_FORMATTER) + else time.toString() + } else { + if (!loggedUnknownTimeWithTimeZoneClass) { + LOGGER.info("Unknown class for Time with timezone data type" + time.javaClass) + loggedUnknownTimeWithTimeZoneClass = true + } + val timetz = OffsetTime.parse(time.toString(), TIME_WITH_TIMEZONE_FORMATTER) + return if (hasZeroSecondsAndNanos(timetz.toLocalTime())) + timetz.format(DataTypeUtils.TIMETZ_FORMATTER) + else timetz.toString() + } + } + + @JvmStatic + fun convertToTimestampWithTimezone(timestamp: Any): String { + if (timestamp is Timestamp) { + // In snapshot mode, debezium produces a java.sql.Timestamp object for the TIMESTAMPTZ + // type. + // Conceptually, a timestamp with timezone is an Instant. But t.toInstant() actually + // mangles the + // value for ancient dates, because leap years weren't applied consistently in ye olden + // days. + // Additionally, toInstant() (and toLocalDateTime()) actually lose the era indicator, so + // we can't + // rely on their getEra() methods. + // So we have special handling for this case, which sidesteps the toInstant conversion. + val timestamptz: ZonedDateTime = timestamp.toLocalDateTime().atZone(ZoneOffset.UTC) + val value = timestamptz.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER) + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra(timestamp, value) + } else if (timestamp is OffsetDateTime) { + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + timestamp.toLocalDate(), + timestamp.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER) + ) + } else if (timestamp is ZonedDateTime) { + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + timestamp.toLocalDate(), + timestamp.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER) + ) + } else if (timestamp is Instant) { + val offsetDateTime = OffsetDateTime.ofInstant(timestamp, ZoneOffset.UTC) + val timestamptz = ZonedDateTime.from(offsetDateTime) + val localDate = timestamptz.toLocalDate() + val value = timestamptz.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER) + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra(localDate, value) + } else { + if (!loggedUnknownTimestampWithTimeZoneClass) { + LOGGER.info( + "Unknown class for Timestamp with time zone data type" + timestamp.javaClass + ) + loggedUnknownTimestampWithTimeZoneClass = true + } + val instant = Instant.parse(timestamp.toString()) + val offsetDateTime = OffsetDateTime.ofInstant(instant, ZoneOffset.UTC) + val timestamptz = ZonedDateTime.from(offsetDateTime) + val localDate = timestamptz.toLocalDate() + val value = timestamptz.format(DataTypeUtils.TIMESTAMPTZ_FORMATTER) + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra(localDate, value) + } + } + + /** See [.convertToTimestampWithTimezone] for explanation of the weird things happening here. */ + @JvmStatic + fun convertToTimestamp(timestamp: Any): String { + if (timestamp is Timestamp) { + // Snapshot mode + val localDateTime: LocalDateTime = timestamp.toLocalDateTime() + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + timestamp, + if (hasZeroSecondsAndNanos(localDateTime.toLocalTime())) + localDateTime.format(DataTypeUtils.TIMESTAMP_FORMATTER) + else localDateTime.toString() + ) + } else if (timestamp is Instant) { + // Incremental mode + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + timestamp.atZone(ZoneOffset.UTC).toLocalDate(), + timestamp + .atOffset(ZoneOffset.UTC) + .toLocalDateTime() + .format(DataTypeUtils.TIMESTAMP_FORMATTER) + ) + } else if (timestamp is LocalDateTime) { + val date: LocalDate = timestamp.toLocalDate() + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + date, + if (hasZeroSecondsAndNanos(timestamp.toLocalTime())) + timestamp.format(DataTypeUtils.TIMESTAMP_FORMATTER) + else timestamp.toString() + ) + } else { + if (!loggedUnknownTimestampClass) { + LOGGER.info("Unknown class for Timestamp data type" + timestamp.javaClass) + loggedUnknownTimestampClass = true + } + val localDateTime = LocalDateTime.parse(timestamp.toString()) + val date = localDateTime.toLocalDate() + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + date, + if (hasZeroSecondsAndNanos(localDateTime.toLocalTime())) + localDateTime.format(DataTypeUtils.TIMESTAMP_FORMATTER) + else localDateTime.toString() + ) + } + } + + /** See [.convertToTimestampWithTimezone] for explanation of the weird things happening here. */ + @JvmStatic + fun convertToDate(date: Any): String { + if (date is Date) { + // Snapshot mode + val localDate = date.toLocalDate() + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + date, + localDate.format(DataTypeUtils.DATE_FORMATTER) + ) + } else if (date is LocalDate) { + // Incremental mode + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + date, + date.format(DataTypeUtils.DATE_FORMATTER) + ) + } else if (date is Integer) { + // Incremental mode + return LocalDate.ofEpochDay(date.toLong()).format(DataTypeUtils.DATE_FORMATTER) + } else { + if (!loggedUnknownDateClass) { + LOGGER.info("Unknown class for Date data type" + date.javaClass) + loggedUnknownDateClass = true + } + val localDate = LocalDate.parse(date.toString()) + return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra( + localDate, + localDate.format(DataTypeUtils.DATE_FORMATTER) + ) + } + } + + @JvmStatic + fun convertToTime(time: Any): String { + if (time is Time) { + return formatTime(time.toLocalTime()) + } else if (time is LocalTime) { + return formatTime(time) + } else if (time is Duration) { + val value = time.toNanos() + if (value >= 0 && value < TimeUnit.DAYS.toNanos(1)) { + return formatTime(LocalTime.ofNanoOfDay(value)) + } else { + val updatedValue = + min(abs(value.toDouble()), LocalTime.MAX.toNanoOfDay().toDouble()).toLong() + LOGGER.debug( + "Time values must use number of nanoseconds greater than 0 and less than 86400000000000 but its {}, converting to {} ", + value, + updatedValue + ) + return formatTime(LocalTime.ofNanoOfDay(updatedValue)) + } + } else { + if (!loggedUnknownTimeClass) { + LOGGER.info("Unknown class for Time data type" + time.javaClass) + loggedUnknownTimeClass = true + } + + val valueAsString = time.toString() + if (valueAsString.startsWith("24")) { + LOGGER.debug("Time value {} is above range, converting to 23:59:59", valueAsString) + return LocalTime.MAX.toString() + } + return formatTime(LocalTime.parse(valueAsString)) + } + } + + @JvmStatic + private fun formatTime(localTime: LocalTime): String { + return if (hasZeroSecondsAndNanos(localTime)) localTime.format(DataTypeUtils.TIME_FORMATTER) + else localTime.toString() + } + + @JvmStatic + fun hasZeroSecondsAndNanos(localTime: LocalTime): Boolean { + return (localTime.second == 0 && localTime.nano == 0) + } + + @Throws(SQLException::class) + @JvmStatic + fun putJavaSQLDate(node: ObjectNode, columnName: String?, resultSet: ResultSet, index: Int) { + val date = resultSet.getDate(index) + node.put(columnName, convertToDate(date)) + } + + @Throws(SQLException::class) + @JvmStatic + fun putJavaSQLTime(node: ObjectNode, columnName: String?, resultSet: ResultSet, index: Int) { + // resultSet.getTime() will lose nanoseconds precision + val localTime = resultSet.getTimestamp(index).toLocalDateTime().toLocalTime() + node.put(columnName, convertToTime(localTime)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.kt new file mode 100644 index 0000000000000..f3fbe2ee9cc35 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.kt @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.google.errorprone.annotations.MustBeClosed +import io.airbyte.cdk.db.JdbcCompatibleSourceOperations +import io.airbyte.commons.exceptions.ConnectionErrorException +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.functional.CheckedFunction +import java.sql.* +import java.util.* +import java.util.function.Function +import java.util.stream.Stream +import javax.sql.DataSource +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Database object for interacting with a JDBC connection. Can be used for any JDBC compliant db. + */ +open class DefaultJdbcDatabase +@JvmOverloads +constructor( + protected val dataSource: DataSource, + sourceOperations: JdbcCompatibleSourceOperations<*>? = JdbcUtils.defaultSourceOperations +) : JdbcDatabase(sourceOperations) { + @Throws(SQLException::class) + override fun execute(query: CheckedConsumer) { + dataSource.connection.use { connection -> query.accept(connection) } + } + + @Throws(SQLException::class) + override fun bufferedResultSetQuery( + query: CheckedFunction, + recordTransform: CheckedFunction + ): List { + dataSource.connection.use { connection -> + toUnsafeStream(query.apply(connection), recordTransform).use { results -> + return results.toList() + } + } + } + + @MustBeClosed + @Throws(SQLException::class) + override fun unsafeResultSetQuery( + query: CheckedFunction, + recordTransform: CheckedFunction + ): Stream { + val connection = dataSource.connection + return JdbcDatabase.Companion.toUnsafeStream(query.apply(connection), recordTransform) + .onClose( + Runnable { + try { + connection.close() + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + ) + } + + @get:Throws(SQLException::class) + override val metaData: DatabaseMetaData + get() { + try { + dataSource.connection.use { connection -> + val metaData = connection.metaData + return metaData + } + } catch (e: SQLException) { + // Some databases like Redshift will have null cause + if (Objects.isNull(e.cause) || e.cause !is SQLException) { + throw ConnectionErrorException(e.sqlState, e.errorCode, e.message, e) + } else { + val cause = e.cause as SQLException? + throw ConnectionErrorException(e.sqlState, cause!!.errorCode, cause.message, e) + } + } + } + + override fun executeMetadataQuery(query: Function): T { + try { + dataSource.connection.use { connection -> + val metaData = connection.metaData + return query.apply(metaData) + } + } catch (e: SQLException) { + // Some databases like Redshift will have null cause + if (Objects.isNull(e.cause) || e.cause !is SQLException) { + throw ConnectionErrorException(e.sqlState, e.errorCode, e.message, e) + } else { + val cause = e.cause as SQLException? + throw ConnectionErrorException(e.sqlState, cause!!.errorCode, cause.message, e) + } + } + } + + /** + * You CANNOT assume that data will be returned from this method before the entire [ResultSet] + * is buffered in memory. Review the implementation of the database's JDBC driver or use the + * StreamingJdbcDriver if you need this guarantee. The caller should close the returned stream + * to release the database connection. + * + * @param statementCreator create a [PreparedStatement] from a [Connection]. + * @param recordTransform transform each record of that result set into the desired type. do NOT + * just pass the [ResultSet] through. it is a stateful object will not be accessible if returned + * from recordTransform. + * @param type that each record will be mapped to. + * @return Result of the query mapped to a stream. + * @throws SQLException SQL related exceptions. + */ + @MustBeClosed + @Throws(SQLException::class) + override fun unsafeQuery( + statementCreator: CheckedFunction, + recordTransform: CheckedFunction + ): Stream { + val connection = dataSource.connection + return JdbcDatabase.Companion.toUnsafeStream( + statementCreator.apply(connection).executeQuery(), + recordTransform + ) + .onClose( + Runnable { + try { + LOGGER.info("closing connection") + connection.close() + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DefaultJdbcDatabase::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcConstants.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcConstants.kt new file mode 100644 index 0000000000000..8c4d6bb7da093 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcConstants.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +object JdbcConstants { + // constants defined in the DatabaseMetaData#getColumns method + // reference: https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html + const val JDBC_COLUMN_DATABASE_NAME: String = "TABLE_CAT" + const val JDBC_COLUMN_SCHEMA_NAME: String = "TABLE_SCHEM" + const val JDBC_COLUMN_TABLE_NAME: String = "TABLE_NAME" + const val JDBC_COLUMN_COLUMN_NAME: String = "COLUMN_NAME" + const val JDBC_COLUMN_DATA_TYPE: String = "DATA_TYPE" + const val JDBC_COLUMN_TYPE: String = "TYPE" + + const val JDBC_COLUMN_TYPE_NAME: String = "TYPE_NAME" + const val JDBC_COLUMN_SIZE: String = "COLUMN_SIZE" + const val JDBC_INDEX_NAME: String = "INDEX_NAME" + const val JDBC_IS_NULLABLE: String = "IS_NULLABLE" + const val JDBC_DECIMAL_DIGITS: String = "DECIMAL_DIGITS" + const val JDBC_INDEX_NON_UNIQUE: String = "NON_UNIQUE" + const val INTERNAL_SCHEMA_NAME: String = "schemaName" + const val INTERNAL_TABLE_NAME: String = "tableName" + const val INTERNAL_COLUMN_NAME: String = "columnName" + const val INTERNAL_COLUMN_TYPE: String = "columnType" + const val INTERNAL_COLUMN_TYPE_NAME: String = "columnTypeName" + const val INTERNAL_COLUMN_SIZE: String = "columnSize" + const val INTERNAL_IS_NULLABLE: String = "isNullable" + const val INTERNAL_DECIMAL_DIGITS: String = "decimalDigits" + const val KEY_SEQ: String = "KEY_SEQ" +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcDatabase.kt new file mode 100644 index 0000000000000..9f4ca36ca76d6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcDatabase.kt @@ -0,0 +1,263 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.errorprone.annotations.MustBeClosed +import io.airbyte.cdk.db.JdbcCompatibleSourceOperations +import io.airbyte.cdk.db.SqlDatabase +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.functional.CheckedFunction +import java.sql.* +import java.util.Spliterators.AbstractSpliterator +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Stream +import java.util.stream.StreamSupport + +/** Database object for interacting with a JDBC connection. */ +abstract class JdbcDatabase(protected val sourceOperations: JdbcCompatibleSourceOperations<*>?) : + SqlDatabase() { + protected var streamException: Exception? = null + protected var isStreamFailed: Boolean = false + + /** + * Execute a database query. + * + * @param query the query to execute against the database. + * @throws SQLException SQL related exceptions. + */ + @Throws(SQLException::class) + abstract fun execute(query: CheckedConsumer) + + @Throws(SQLException::class) + override fun execute(sql: String?) { + execute { connection: Connection -> connection.createStatement().execute(sql) } + } + + @Throws(SQLException::class) + fun executeWithinTransaction(queries: List) { + execute { connection: Connection -> + connection.autoCommit = false + for (s in queries) { + connection.createStatement().execute(s) + } + connection.commit() + connection.autoCommit = true + } + } + + /** + * Use a connection to create a [ResultSet] and map it into a list. The entire [ResultSet] will + * be buffered in memory before the list is returned. The caller does not need to worry about + * closing any database resources. + * + * @param query execute a query using a [Connection] to get a [ResultSet]. + * @param recordTransform transform each record of that result set into the desired type. do NOT + * just pass the [ResultSet] through. it is a stateful object will not be accessible if returned + * from recordTransform. + * @param type that each record will be mapped to. + * @return Result of the query mapped to a list. + * @throws SQLException SQL related exceptions. + */ + @Throws(SQLException::class) + abstract fun bufferedResultSetQuery( + query: CheckedFunction, + recordTransform: CheckedFunction + ): List + + /** + * Use a connection to create a [ResultSet] and map it into a stream. You CANNOT assume that + * data will be returned from this method before the entire [ResultSet] is buffered in memory. + * Review the implementation of the database's JDBC driver or use the StreamingJdbcDriver if you + * need this guarantee. It is "unsafe" because the caller should close the returned stream to + * release the database connection. Otherwise, there will be a connection leak. + * + * @param query execute a query using a [Connection] to get a [ResultSet]. + * @param recordTransform transform each record of that result set into the desired type. do NOT + * just pass the [ResultSet] through. it is a stateful object will not be accessible if returned + * from recordTransform. + * @param type that each record will be mapped to. + * @return Result of the query mapped to a stream. + * @throws SQLException SQL related exceptions. + */ + @MustBeClosed + @Throws(SQLException::class) + abstract fun unsafeResultSetQuery( + query: CheckedFunction, + recordTransform: CheckedFunction + ): Stream + + /** + * String query is a common use case for [JdbcDatabase.unsafeResultSetQuery]. So this method is + * created as syntactic sugar. + */ + @Throws(SQLException::class) + fun queryStrings( + query: CheckedFunction, + recordTransform: CheckedFunction + ): List { + unsafeResultSetQuery(query, recordTransform).use { stream -> + return stream.toList() + } + } + + /** + * Use a connection to create a [PreparedStatement] and map it into a stream. You CANNOT assume + * that data will be returned from this method before the entire [ResultSet] is buffered in + * memory. Review the implementation of the database's JDBC driver or use the + * StreamingJdbcDriver if you need this guarantee. It is "unsafe" because the caller should + * close the returned stream to release the database connection. Otherwise, there will be a + * connection leak. + * + * @paramstatementCreator create a [PreparedStatement] from a [Connection]. + * @param recordTransform transform each record of that result set into the desired type. do NOT + * just pass the [ResultSet] through. it is a stateful object will not be accessible if returned + * from recordTransform. + * @param type that each record will be mapped to. + * @return Result of the query mapped to a stream.void execute(String sql) + * @throws SQLException SQL related exceptions. + */ + @MustBeClosed + @Throws(SQLException::class) + abstract fun unsafeQuery( + statementCreator: CheckedFunction, + recordTransform: CheckedFunction + ): Stream + + /** + * Json query is a common use case for [JdbcDatabase.unsafeQuery]. So this method is created as + * syntactic sugar. + */ + @Throws(SQLException::class) + fun queryJsons( + statementCreator: CheckedFunction, + recordTransform: CheckedFunction + ): List { + unsafeQuery(statementCreator, recordTransform).use { stream -> + return stream.toList() + } + } + + @Throws(SQLException::class) + fun queryInt(sql: String, vararg params: String): Int { + unsafeQuery( + { c: Connection -> getPreparedStatement(sql, params, c) }, + { rs: ResultSet -> rs.getInt(1) } + ) + .use { stream -> + return stream.findFirst().get() + } + } + + @Throws(SQLException::class) + fun queryBoolean(sql: String, vararg params: String): Boolean { + unsafeQuery( + { c: Connection -> getPreparedStatement(sql, params, c) }, + { rs: ResultSet -> rs.getBoolean(1) } + ) + .use { stream -> + return stream.findFirst().get() + } + } + + /** + * It is "unsafe" because the caller must manually close the returned stream. Otherwise, there + * will be a database connection leak. + */ + @MustBeClosed + @Throws(SQLException::class) + override fun unsafeQuery(sql: String?, vararg params: String?): Stream { + return unsafeQuery( + { connection: Connection -> + val statement = connection.prepareStatement(sql) + var i = 1 + for (param in params) { + statement.setString(i, param) + ++i + } + statement + }, + { queryResult: ResultSet -> sourceOperations!!.rowToJson(queryResult) } + ) + } + + /** + * Json query is a common use case for [JdbcDatabase.unsafeQuery]. So this method is created as + * syntactic sugar. + */ + @Throws(SQLException::class) + fun queryJsons(sql: String?, vararg params: String?): List { + unsafeQuery(sql, *params).use { stream -> + return stream.toList() + } + } + + @Throws(SQLException::class) + fun queryMetadata(sql: String, vararg params: String): ResultSetMetaData? { + unsafeQuery( + { c: Connection -> getPreparedStatement(sql, params, c) }, + { obj: ResultSet -> obj.metaData }, + ) + .use { q -> + return q.findFirst().orElse(null) + } + } + + @get:Throws(SQLException::class) abstract val metaData: DatabaseMetaData + + @Throws(SQLException::class) + abstract fun executeMetadataQuery(query: Function): T + + companion object { + /** + * Map records returned in a result set. It is an "unsafe" stream because the stream must be + * manually closed. Otherwise, there will be a database connection leak. + * + * @param resultSet the result set + * @param mapper function to make each record of the result set + * @param type that each record will be mapped to + * @return stream of records that the result set is mapped to. + */ + @JvmStatic + @MustBeClosed + fun toUnsafeStream( + resultSet: ResultSet, + mapper: CheckedFunction + ): Stream { + return StreamSupport.stream( + object : AbstractSpliterator(Long.MAX_VALUE, ORDERED) { + override fun tryAdvance(action: Consumer): Boolean { + try { + if (!resultSet.next()) { + resultSet.close() + return false + } + action.accept(mapper.apply(resultSet)) + return true + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + }, + false + ) + } + + @Throws(SQLException::class) + private fun getPreparedStatement( + sql: String, + params: Array, + c: Connection + ): PreparedStatement { + val statement = c.prepareStatement(sql) + var i = 1 + for (param in params) { + statement.setString(i, param) + i++ + } + return statement + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.kt new file mode 100644 index 0000000000000..d2717ca0dc3eb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcSourceOperations.kt @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.db.SourceOperations +import io.airbyte.protocol.models.JsonSchemaType +import java.sql.* +import java.time.OffsetDateTime +import java.time.OffsetTime +import java.time.format.DateTimeParseException +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Implementation of source operations with standard JDBC types. */ +open class JdbcSourceOperations : + AbstractJdbcCompatibleSourceOperations(), SourceOperations { + protected fun safeGetJdbcType(columnTypeInt: Int): JDBCType { + return try { + JDBCType.valueOf(columnTypeInt) + } catch (e: Exception) { + JDBCType.VARCHAR + } + } + + @Throws(SQLException::class) + override fun copyToJsonField(resultSet: ResultSet, colIndex: Int, json: ObjectNode) { + val columnTypeInt = resultSet.metaData.getColumnType(colIndex) + val columnName = resultSet.metaData.getColumnName(colIndex) + val columnType = safeGetJdbcType(columnTypeInt) + + when (columnType) { + JDBCType.BIT, + JDBCType.BOOLEAN -> putBoolean(json!!, columnName, resultSet, colIndex) + JDBCType.TINYINT, + JDBCType.SMALLINT -> putShortInt(json!!, columnName, resultSet, colIndex) + JDBCType.INTEGER -> putInteger(json!!, columnName, resultSet, colIndex) + JDBCType.BIGINT -> putBigInt(json!!, columnName, resultSet, colIndex) + JDBCType.FLOAT, + JDBCType.DOUBLE -> putDouble(json!!, columnName, resultSet, colIndex) + JDBCType.REAL -> putFloat(json!!, columnName, resultSet, colIndex) + JDBCType.NUMERIC, + JDBCType.DECIMAL -> putBigDecimal(json!!, columnName, resultSet, colIndex) + JDBCType.CHAR, + JDBCType.VARCHAR, + JDBCType.LONGVARCHAR -> putString(json!!, columnName, resultSet, colIndex) + JDBCType.DATE -> putDate(json!!, columnName, resultSet, colIndex) + JDBCType.TIME -> putTime(json!!, columnName, resultSet, colIndex) + JDBCType.TIMESTAMP -> putTimestamp(json!!, columnName, resultSet, colIndex) + JDBCType.TIMESTAMP_WITH_TIMEZONE -> + putTimestampWithTimezone(json!!, columnName, resultSet, colIndex) + JDBCType.BLOB, + JDBCType.BINARY, + JDBCType.VARBINARY, + JDBCType.LONGVARBINARY -> putBinary(json!!, columnName, resultSet, colIndex) + JDBCType.ARRAY -> putArray(json!!, columnName, resultSet, colIndex) + else -> putDefault(json!!, columnName, resultSet, colIndex) + } + } + + @Throws(SQLException::class) + override fun setCursorField( + preparedStatement: PreparedStatement, + parameterIndex: Int, + cursorFieldType: JDBCType?, + value: String? + ) { + when (cursorFieldType) { + JDBCType.TIMESTAMP -> setTimestamp(preparedStatement, parameterIndex, value) + JDBCType.TIMESTAMP_WITH_TIMEZONE -> + setTimestampWithTimezone(preparedStatement, parameterIndex, value) + JDBCType.TIME -> setTime(preparedStatement, parameterIndex, value) + JDBCType.TIME_WITH_TIMEZONE -> + setTimeWithTimezone(preparedStatement, parameterIndex, value) + JDBCType.DATE -> setDate(preparedStatement, parameterIndex, value!!) + JDBCType.BIT -> setBit(preparedStatement, parameterIndex, value) + JDBCType.BOOLEAN -> setBoolean(preparedStatement, parameterIndex, value!!) + JDBCType.TINYINT, + JDBCType.SMALLINT -> setShortInt(preparedStatement, parameterIndex, value!!) + JDBCType.INTEGER -> setInteger(preparedStatement, parameterIndex, value!!) + JDBCType.BIGINT -> setBigInteger(preparedStatement, parameterIndex, value!!) + JDBCType.FLOAT, + JDBCType.DOUBLE -> setDouble(preparedStatement, parameterIndex, value!!) + JDBCType.REAL -> setReal(preparedStatement, parameterIndex, value!!) + JDBCType.NUMERIC, + JDBCType.DECIMAL -> setDecimal(preparedStatement, parameterIndex, value!!) + JDBCType.CHAR, + JDBCType.NCHAR, + JDBCType.NVARCHAR, + JDBCType.VARCHAR, + JDBCType.LONGVARCHAR -> setString(preparedStatement, parameterIndex, value) + JDBCType.BINARY, + JDBCType.BLOB -> setBinary(preparedStatement, parameterIndex, value) + else -> + throw IllegalArgumentException( + String.format("%s cannot be used as a cursor.", cursorFieldType) + ) + } + } + + @Throws(SQLException::class) + protected open fun setTimestampWithTimezone( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + try { + preparedStatement.setObject(parameterIndex, OffsetDateTime.parse(value)) + } catch (e: DateTimeParseException) { + throw RuntimeException(e) + } + } + + @Throws(SQLException::class) + protected fun setTimeWithTimezone( + preparedStatement: PreparedStatement, + parameterIndex: Int, + value: String? + ) { + try { + preparedStatement.setObject(parameterIndex, OffsetTime.parse(value)) + } catch (e: DateTimeParseException) { + throw RuntimeException(e) + } + } + + override fun getDatabaseFieldType(field: JsonNode): JDBCType { + try { + return JDBCType.valueOf(field[JdbcConstants.INTERNAL_COLUMN_TYPE].asInt()) + } catch (ex: IllegalArgumentException) { + LOGGER.warn( + String.format( + "Could not convert column: %s from table: %s.%s with type: %s. Casting to VARCHAR.", + field[JdbcConstants.INTERNAL_COLUMN_NAME], + field[JdbcConstants.INTERNAL_SCHEMA_NAME], + field[JdbcConstants.INTERNAL_TABLE_NAME], + field[JdbcConstants.INTERNAL_COLUMN_TYPE] + ) + ) + return JDBCType.VARCHAR + } + } + + override fun isCursorType(type: JDBCType?): Boolean { + return JdbcUtils.ALLOWED_CURSOR_TYPES.contains(type) + } + + override fun getAirbyteType(jdbcType: JDBCType): JsonSchemaType { + return when (jdbcType) { + JDBCType.BIT, + JDBCType.BOOLEAN -> JsonSchemaType.BOOLEAN + JDBCType.TINYINT, + JDBCType.SMALLINT -> JsonSchemaType.INTEGER + JDBCType.INTEGER -> JsonSchemaType.INTEGER + JDBCType.BIGINT -> JsonSchemaType.INTEGER + JDBCType.FLOAT, + JDBCType.DOUBLE -> JsonSchemaType.NUMBER + JDBCType.REAL -> JsonSchemaType.NUMBER + JDBCType.NUMERIC, + JDBCType.DECIMAL -> JsonSchemaType.NUMBER + JDBCType.CHAR, + JDBCType.NCHAR, + JDBCType.NVARCHAR, + JDBCType.VARCHAR, + JDBCType.LONGVARCHAR -> JsonSchemaType.STRING + JDBCType.DATE -> JsonSchemaType.STRING + JDBCType.TIME -> JsonSchemaType.STRING + JDBCType.TIMESTAMP -> JsonSchemaType.STRING + JDBCType.BLOB, + JDBCType.BINARY, + JDBCType.VARBINARY, + JDBCType.LONGVARBINARY -> JsonSchemaType.STRING_BASE_64 + JDBCType.ARRAY -> JsonSchemaType.ARRAY + else -> JsonSchemaType.STRING + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(JdbcSourceOperations::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcUtils.kt new file mode 100644 index 0000000000000..ee829ba6d9ec4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/JdbcUtils.kt @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Maps +import io.airbyte.commons.exceptions.ConfigErrorException +import java.sql.JDBCType +import org.jooq.JSONFormat + +object JdbcUtils { + // config parameters in alphabetical order + const val CONNECTION_PROPERTIES_KEY: String = "connection_properties" + const val DATABASE_KEY: String = "database" + const val ENCRYPTION_KEY: String = "encryption" + const val HOST_KEY: String = "host" + @JvmField val HOST_LIST_KEY: List = listOf("host") + const val JDBC_URL_KEY: String = "jdbc_url" + const val JDBC_URL_PARAMS_KEY: String = "jdbc_url_params" + const val PASSWORD_KEY: String = "password" + const val PORT_KEY: String = "port" + + @JvmField val PORT_LIST_KEY: List = listOf("port") + const val SCHEMA_KEY: String = "schema" + + // NOTE: this is the plural version of SCHEMA_KEY + const val SCHEMAS_KEY: String = "schemas" + const val SSL_KEY: String = "ssl" + val SSL_MODE_DISABLE: List = listOf("disable", "disabled") + const val SSL_MODE_KEY: String = "ssl_mode" + const val TLS_KEY: String = "tls" + const val USERNAME_KEY: String = "username" + const val MODE_KEY: String = "mode" + const val AMPERSAND: String = "&" + const val EQUALS: String = "=" + + // An estimate for how much additional data in sent over the wire due to conversion of source + // data + // into {@link AirbyteMessage}. This is due to + // the fact that records are in JSON format and all database fields are converted to Strings. + // Currently, this is used in the logic for emitting + // estimate trace messages. + const val PLATFORM_DATA_INCREASE_FACTOR: Int = 2 + val ALLOWED_CURSOR_TYPES: Set = + java.util.Set.of( + JDBCType.TIMESTAMP_WITH_TIMEZONE, + JDBCType.TIMESTAMP, + JDBCType.TIME_WITH_TIMEZONE, + JDBCType.TIME, + JDBCType.DATE, + JDBCType.TINYINT, + JDBCType.SMALLINT, + JDBCType.INTEGER, + JDBCType.BIGINT, + JDBCType.FLOAT, + JDBCType.DOUBLE, + JDBCType.REAL, + JDBCType.NUMERIC, + JDBCType.DECIMAL, + JDBCType.NVARCHAR, + JDBCType.VARCHAR, + JDBCType.LONGVARCHAR + ) + @JvmStatic val defaultSourceOperations: JdbcSourceOperations = JdbcSourceOperations() + + @JvmStatic + val defaultJSONFormat: JSONFormat = JSONFormat().recordFormat(JSONFormat.RecordFormat.OBJECT) + + @JvmStatic + fun getFullyQualifiedTableName(schemaName: String?, tableName: String): String { + return if (schemaName != null) "$schemaName.$tableName" else tableName + } + + @JvmStatic + @JvmOverloads + fun parseJdbcParameters( + config: JsonNode, + jdbcUrlParamsKey: String?, + delimiter: String = "&" + ): Map { + return if (config.has(jdbcUrlParamsKey)) { + parseJdbcParameters(config[jdbcUrlParamsKey].asText(), delimiter) + } else { + Maps.newHashMap() + } + } + + @JvmStatic + @JvmOverloads + fun parseJdbcParameters( + jdbcPropertiesString: String, + delimiter: String = "&" + ): Map { + val parameters: MutableMap = HashMap() + if (!jdbcPropertiesString.isBlank()) { + val keyValuePairs = + jdbcPropertiesString + .split(delimiter.toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + for (kv in keyValuePairs) { + val split = kv.split("=".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray() + if (split.size == 2) { + parameters[split[0]] = split[1] + } else { + throw ConfigErrorException( + "jdbc_url_params must be formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). Got " + + jdbcPropertiesString + ) + } + } + } + return parameters + } + + /** + * Checks that SSL_KEY has not been set or that an SSL_KEY is set and value can be mapped to + * true (e.g. non-zero integers, string true, etc) + * + * @param config A configuration used to check Jdbc connection + * @return true: if ssl has not been set and ssl mode not equals disabled or it has been set + * with true, false: in all other cases + */ + @JvmStatic + fun useSsl(config: JsonNode): Boolean { + return if (!config.has(SSL_KEY)) { + if (config.has(SSL_MODE_KEY) && config[SSL_MODE_KEY].has(MODE_KEY)) { + !SSL_MODE_DISABLE.contains(config[SSL_MODE_KEY][MODE_KEY].asText()) + } else true + } else config[SSL_KEY].asBoolean() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.kt new file mode 100644 index 0000000000000..a95b534355a8b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/StreamingJdbcDatabase.kt @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.google.errorprone.annotations.MustBeClosed +import io.airbyte.cdk.db.JdbcCompatibleSourceOperations +import io.airbyte.cdk.db.jdbc.streaming.JdbcStreamingQueryConfig +import io.airbyte.commons.functional.CheckedFunction +import java.sql.* +import java.util.Spliterators.AbstractSpliterator +import java.util.function.Consumer +import java.util.function.Supplier +import java.util.stream.Stream +import java.util.stream.StreamSupport +import javax.sql.DataSource +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This database allows a developer to specify a [JdbcStreamingQueryConfig]. This allows the + * developer to specify the correct configuration in order for a [PreparedStatement] to execute as + * in a streaming / chunked manner. + */ +class StreamingJdbcDatabase( + dataSource: DataSource, + sourceOperations: JdbcCompatibleSourceOperations<*>?, + private val streamingQueryConfigProvider: Supplier +) : DefaultJdbcDatabase(dataSource, sourceOperations) { + /** + * Assuming that the [JdbcStreamingQueryConfig] is configured correctly for the JDBC driver + * being used, this method will return data in streaming / chunked fashion. Review the provided + * [JdbcStreamingQueryConfig] to understand the size of these chunks. If the entire stream is + * consumed the database connection will be closed automatically and the caller need not call + * close on the returned stream. This query (and the first chunk) are fetched immediately. + * Subsequent chunks will not be pulled until the first chunk is consumed. + * + * @param statementCreator create a [PreparedStatement] from a [Connection]. + * @param recordTransform transform each record of that result set into the desired type. do NOT + * just pass the [ResultSet] through. it is a stateful object will not be accessible if returned + * from recordTransform. + * @param type that each record will be mapped to. + * @return Result of the query mapped to a stream. This stream must be closed! + * @throws SQLException SQL related exceptions. + */ + @MustBeClosed + @Throws(SQLException::class) + override fun unsafeQuery( + statementCreator: CheckedFunction, + recordTransform: CheckedFunction + ): Stream { + try { + val connection = dataSource.connection + val statement = statementCreator.apply(connection) + val streamingConfig = streamingQueryConfigProvider.get() + streamingConfig.initialize(connection, statement) + return toUnsafeStream(statement.executeQuery(), recordTransform, streamingConfig) + .onClose { + try { + if (!connection.autoCommit) { + connection.autoCommit = true + } + connection.close() + if (isStreamFailed) { + throw RuntimeException(streamException) + } + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + + /** + * This method differs from [DefaultJdbcDatabase.toUnsafeStream] in that it takes a streaming + * config that adjusts the fetch size dynamically according to sampled row size. + */ + protected fun toUnsafeStream( + resultSet: ResultSet, + mapper: CheckedFunction, + streamingConfig: JdbcStreamingQueryConfig + ): Stream { + return StreamSupport.stream( + object : AbstractSpliterator(Long.MAX_VALUE, ORDERED) { + override fun tryAdvance(action: Consumer): Boolean { + try { + if (!resultSet.next()) { + resultSet.close() + return false + } + val dataRow = mapper.apply(resultSet) + streamingConfig.accept(resultSet, dataRow) + action.accept(dataRow) + return true + } catch (e: SQLException) { + LOGGER.error("SQLState: {}, Message: {}", e.sqlState, e.message) + streamException = e + isStreamFailed = true + throw RuntimeException(e) + } + } + }, + false + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(StreamingJdbcDatabase::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.kt new file mode 100644 index 0000000000000..a879285215b1f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfig.kt @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.sql.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +open class AdaptiveStreamingQueryConfig : JdbcStreamingQueryConfig { + private val fetchSizeEstimator: FetchSizeEstimator = TwoStageSizeEstimator.Companion.instance + private var currentFetchSize: Int + + init { + this.currentFetchSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE + } + + @Throws(SQLException::class) + override fun initialize(connection: Connection, preparedStatement: Statement) { + connection.autoCommit = false + preparedStatement.fetchSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE + currentFetchSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE + LOGGER.info("Set initial fetch size: {} rows", preparedStatement.fetchSize) + } + + @Throws(SQLException::class) + override fun accept(resultSet: ResultSet, rowData: Any) { + fetchSizeEstimator.accept(rowData) + val newFetchSize = fetchSizeEstimator.fetchSize + + if (newFetchSize!!.isPresent && currentFetchSize != newFetchSize.get()) { + LOGGER.info("Set new fetch size: {} rows", newFetchSize.get()) + resultSet.fetchSize = newFetchSize.get() + currentFetchSize = newFetchSize.get() + } + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(AdaptiveStreamingQueryConfig::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.kt new file mode 100644 index 0000000000000..0689603b6106c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimator.kt @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import kotlin.math.max +import kotlin.math.min + +/** Fetch size (number of rows) = target buffer byte size / max row byte size */ +abstract class BaseSizeEstimator +protected constructor( // desired buffer size in memory + private val targetBufferByteSize: Long, + private val minFetchSize: Int, + private val defaultFetchSize: Int, + private val maxFetchSize: Int +) : FetchSizeEstimator { + var maxRowByteSize: Double = 0.0 + protected set + + val boundedFetchSize: Int + /** + * This method ensures that the fetch size is between `minFetchSize` and `maxFetchSize`, + * inclusively. + */ + get() { + if (maxRowByteSize <= 0.0) { + return defaultFetchSize + } + val rawFetchSize = Math.round(targetBufferByteSize / maxRowByteSize) + if (rawFetchSize > Int.MAX_VALUE) { + return maxFetchSize + } + return max( + minFetchSize.toDouble(), + min(maxFetchSize.toDouble(), rawFetchSize.toInt().toDouble()) + ) + .toInt() + } + + companion object { + /** + * What we really want is to know how much memory each `rowData` takes. However, there is no + * easy way to measure that. So we use the byte size of the serialized row to approximate + * that. + */ + @VisibleForTesting + fun getEstimatedByteSize(rowData: Any?): Long { + if (rowData == null) { + return 0L + } + + // The string length is multiplied by 4 assuming each character is a + // full UTF-8 character. In reality, a UTF-8 character is encoded as + // 1 to 4 bytes. So this is an overestimation. This is alright, because + // the whole method only provides an estimation. Please never convert + // the string to byte[] to get the exact length. That conversion is known + // to introduce a lot of memory overhead. + // + // We are using 3L as the median byte-size of a serialized char here assuming that most + // chars fit + // into the ASCII space (fewer bytes) + return Jsons.serialize(rowData).length * 3L + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.kt new file mode 100644 index 0000000000000..a53de5b6e4c9f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeConstants.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +object FetchSizeConstants { + // The desired buffer size in memory to store the fetched rows. + // This size is not enforced. It is only used to calculate a proper + // fetch size. The max row size the connector can handle is actually + // limited by the heap size. + const val TARGET_BUFFER_SIZE_RATIO: Double = 0.6 + const val MIN_BUFFER_BYTE_SIZE: Long = 250L * 1024L * 1024L // 250 MB + + // sample size for making the first estimation of the row size + const val INITIAL_SAMPLE_SIZE: Int = 10 + + // sample every N rows during the post-initial stage + const val SAMPLE_FREQUENCY: Int = 100 + + const val MIN_FETCH_SIZE: Int = 1 + const val DEFAULT_FETCH_SIZE: Int = 1000 + const val MAX_FETCH_SIZE: Int = 1000000000 +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.kt new file mode 100644 index 0000000000000..704f6c6307214 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/FetchSizeEstimator.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.util.* +import java.util.function.Consumer + +interface FetchSizeEstimator : Consumer { + /** @return the estimated fetch size when the estimation is ready */ + val fetchSize: Optional +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.kt new file mode 100644 index 0000000000000..0fea0bd3052c7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimator.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.util.* + +/** + * This class estimates the max row byte size by measuring the first consecutive `initialSampleSize` + * rows. + */ +class InitialSizeEstimator( + bufferByteSize: Long, + private val sampleSize: Int, + minFetchSize: Int, + defaultFetchSize: Int, + maxFetchSize: Int +) : + BaseSizeEstimator(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize), + FetchSizeEstimator { + private var counter = 0 + + override fun accept(row: Any) { + val byteSize: Long = BaseSizeEstimator.Companion.getEstimatedByteSize(row) + if (maxRowByteSize < byteSize) { + maxRowByteSize = byteSize.toDouble() + } + counter++ + } + + override val fetchSize: Optional + get() { + if (counter < sampleSize) { + return Optional.empty() + } + return Optional.of(boundedFetchSize) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.kt new file mode 100644 index 0000000000000..5dda441b10c76 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/JdbcStreamingQueryConfig.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import io.airbyte.commons.functional.CheckedBiConsumer +import java.sql.* + +/* + * Interface that defines how to stream results from a Jdbc database. This involves determining + * updating what the fetch size should be based on the size of the existing rows. 1. The config + * initializes the fetch size and sets up the estimator. 2. The config then accepts each row and + * feeds it to the estimator. If the estimator has a new estimate, it updates the fetch size. + */ +interface JdbcStreamingQueryConfig : CheckedBiConsumer { + @Throws(SQLException::class) fun initialize(connection: Connection, statement: Statement) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.kt new file mode 100644 index 0000000000000..6c31589e80ac4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/NoOpStreamingQueryConfig.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.sql.* + +class NoOpStreamingQueryConfig : JdbcStreamingQueryConfig { + @Throws(SQLException::class) + override fun initialize(connection: Connection, preparedStatement: Statement) {} + + @Throws(SQLException::class) override fun accept(resultSet: ResultSet, o: Any) {} +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.kt new file mode 100644 index 0000000000000..da5b232abdb59 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimator.kt @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.util.* + +/** + * This class adjusts the max row byte size by measuring one row out of every `sampleFrequency` + * rows. + */ +class SamplingSizeEstimator( + bufferByteSize: Long, + private val sampleFrequency: Int, + initialRowByteSize: Double, + minFetchSize: Int, + defaultFetchSize: Int, + maxFetchSize: Int +) : + BaseSizeEstimator(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize), + FetchSizeEstimator { + private var counter = 0 + private var hasNewEstimation = false + + init { + this.maxRowByteSize = initialRowByteSize + } + + override fun accept(row: Any) { + counter++ + if (counter < sampleFrequency) { + return + } + + counter = 0 + val rowByteSize: Long = BaseSizeEstimator.Companion.getEstimatedByteSize(row) + if (this.maxRowByteSize < rowByteSize) { + this.maxRowByteSize = rowByteSize.toDouble() + hasNewEstimation = true + } + } + + override val fetchSize: Optional + get() { + if (!hasNewEstimation) { + return Optional.empty() + } + + hasNewEstimation = false + return Optional.of(boundedFetchSize) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.kt new file mode 100644 index 0000000000000..16d485861809d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimator.kt @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import com.google.common.annotations.VisibleForTesting +import java.util.* +import kotlin.math.max +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This estimator first uses the [InitialSizeEstimator] to calculate an initial fetch size by + * sampling the first N rows consecutively, and then switches to [SamplingSizeEstimator] to + * periodically adjust the fetch size by sampling every M rows. + */ +class TwoStageSizeEstimator private constructor() : FetchSizeEstimator { + private val initialSampleSize = FetchSizeConstants.INITIAL_SAMPLE_SIZE + + @get:VisibleForTesting + var delegate: BaseSizeEstimator + private set + private var counter = 0 + + init { + this.delegate = + InitialSizeEstimator( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, + initialSampleSize, + FetchSizeConstants.MIN_FETCH_SIZE, + FetchSizeConstants.DEFAULT_FETCH_SIZE, + FetchSizeConstants.MAX_FETCH_SIZE + ) + } + + override val fetchSize: Optional + get() = delegate.fetchSize + + override fun accept(rowData: Any) { + if (counter <= initialSampleSize + 1) { + counter++ + // switch to SamplingSizeEstimator after the initial N rows + if (delegate is InitialSizeEstimator && counter > initialSampleSize) { + delegate = + SamplingSizeEstimator( + getTargetBufferByteSize(Runtime.getRuntime().maxMemory()), + FetchSizeConstants.SAMPLE_FREQUENCY, + delegate.maxRowByteSize, + FetchSizeConstants.MIN_FETCH_SIZE, + FetchSizeConstants.DEFAULT_FETCH_SIZE, + FetchSizeConstants.MAX_FETCH_SIZE + ) + } + } + + delegate.accept(rowData) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(TwoStageSizeEstimator::class.java) + + val instance: TwoStageSizeEstimator + get() = TwoStageSizeEstimator() + + @VisibleForTesting + fun getTargetBufferByteSize(maxMemory: Long?): Long { + if (maxMemory == null || maxMemory == Long.MAX_VALUE) { + LOGGER.info( + "No max memory limit found, use min JDBC buffer size: {}", + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE + ) + return FetchSizeConstants.MIN_BUFFER_BYTE_SIZE + } + val targetBufferByteSize = + Math.round(maxMemory * FetchSizeConstants.TARGET_BUFFER_SIZE_RATIO) + val finalBufferByteSize = + max( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE.toDouble(), + targetBufferByteSize.toDouble() + ) + .toLong() + LOGGER.info( + "Max memory limit: {}, JDBC buffer size: {}", + maxMemory, + finalBufferByteSize + ) + return finalBufferByteSize + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/JsonUtil.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/JsonUtil.kt new file mode 100644 index 0000000000000..52f8c9761965b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/JsonUtil.kt @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.util + +import com.fasterxml.jackson.databind.node.ArrayNode +import com.fasterxml.jackson.databind.node.ContainerNode +import com.fasterxml.jackson.databind.node.ObjectNode +import java.math.BigDecimal + +object JsonUtil { + private const val ERROR_MESSAGE = "Can't populate the node type : " + + @JvmStatic + fun putBooleanValueIntoJson(node: ContainerNode<*>, value: Boolean, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } + + @JvmStatic + fun putLongValueIntoJson(node: ContainerNode<*>, value: Long, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } + + @JvmStatic + fun putDoubleValueIntoJson(node: ContainerNode<*>, value: Double, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } + + @JvmStatic + fun putBigDecimalValueIntoJson(node: ContainerNode<*>, value: BigDecimal?, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } + + @JvmStatic + fun putStringValueIntoJson(node: ContainerNode<*>, value: String?, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } + + @JvmStatic + fun putBytesValueIntoJson(node: ContainerNode<*>, value: ByteArray?, fieldName: String?) { + if (node is ArrayNode) { + node.add(value) + } else if (node is ObjectNode) { + node.put(fieldName, value) + } else { + throw RuntimeException(ERROR_MESSAGE + node.javaClass.name) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtils.kt new file mode 100644 index 0000000000000..e7a2dfd546ccf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtils.kt @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.util + +import java.io.* +import java.net.URI +import java.nio.charset.StandardCharsets +import java.nio.file.FileSystem +import java.nio.file.FileSystems +import java.nio.file.Files +import java.security.* +import java.security.cert.Certificate +import java.security.cert.CertificateException +import java.security.cert.CertificateFactory +import java.security.spec.InvalidKeySpecException +import java.security.spec.PKCS8EncodedKeySpec +import java.util.* +import java.util.concurrent.* +import javax.net.ssl.SSLContext +import org.apache.http.ssl.SSLContexts +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * General SSL utilities used for certificate and keystore operations related to secured db + * connections. + */ +object SSLCertificateUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(SSLCertificateUtils::class.java) + private const val PKCS_12 = "PKCS12" + private const val X509 = "X.509" + private val RANDOM: Random = SecureRandom() + + // #17000: postgres driver is hardcoded to only load an entry alias "user" + const val KEYSTORE_ENTRY_PREFIX: String = "user" + const val KEYSTORE_FILE_NAME: String = KEYSTORE_ENTRY_PREFIX + "keystore_" + const val KEYSTORE_FILE_TYPE: String = ".p12" + + @Throws( + IOException::class, + CertificateException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + private fun saveKeyStoreToFile( + keyStore: KeyStore, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ): URI { + val fs = Objects.requireNonNullElse(filesystem, FileSystems.getDefault()) + val pathToStore = fs!!.getPath(Objects.toString(directory, "")) + val pathToFile = + pathToStore.resolve(KEYSTORE_FILE_NAME + RANDOM.nextInt() + KEYSTORE_FILE_TYPE) + val os = Files.newOutputStream(pathToFile) + keyStore.store(os, keyStorePassword.toCharArray()) + assert(Files.exists(pathToFile) == true) + return pathToFile.toUri() + } + + @Throws(IOException::class, InterruptedException::class) + private fun runProcess(cmd: String, run: Runtime) { + LOGGER.debug("running [{}]", cmd) + val p = run.exec(cmd) + if (!p.waitFor(30, TimeUnit.SECONDS)) { + p.destroy() + throw RuntimeException("Timeout while executing: $cmd") + } + } + + @Throws(CertificateException::class) + private fun fromPEMString(certString: String): Certificate { + val cf = CertificateFactory.getInstance(X509) + val byteArrayInputStream = + ByteArrayInputStream(certString.toByteArray(StandardCharsets.UTF_8)) + val bufferedInputStream = BufferedInputStream(byteArrayInputStream) + return cf.generateCertificate(bufferedInputStream) + } + + @Throws( + KeyStoreException::class, + CertificateException::class, + IOException::class, + NoSuchAlgorithmException::class + ) + fun keyStoreFromCertificate( + cert: Certificate?, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ): URI { + val keyStore = KeyStore.getInstance(PKCS_12) + keyStore.load(null) + keyStore.setCertificateEntry(KEYSTORE_ENTRY_PREFIX + "1", cert) + return saveKeyStoreToFile(keyStore, keyStorePassword, filesystem, directory) + } + + @JvmStatic + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun keyStoreFromCertificate( + certString: String, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ): URI { + return keyStoreFromCertificate( + fromPEMString(certString), + keyStorePassword, + filesystem, + directory + ) + } + + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun keyStoreFromCertificate(certString: String, keyStorePassword: String): URI { + return keyStoreFromCertificate(fromPEMString(certString), keyStorePassword, null, null) + } + + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun keyStoreFromCertificate( + certString: String, + keyStorePassword: String, + directory: String? + ): URI { + return keyStoreFromCertificate( + certString, + keyStorePassword, + FileSystems.getDefault(), + directory + ) + } + + @Throws( + KeyStoreException::class, + CertificateException::class, + IOException::class, + NoSuchAlgorithmException::class + ) + fun keyStoreFromClientCertificate( + cert: Certificate, + key: PrivateKey?, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ): URI { + val keyStore = KeyStore.getInstance(PKCS_12) + keyStore.load(null) + keyStore.setKeyEntry( + KEYSTORE_ENTRY_PREFIX, + key, + keyStorePassword.toCharArray(), + arrayOf(cert) + ) + return saveKeyStoreToFile(keyStore, keyStorePassword, filesystem, directory) + } + + @Throws( + IOException::class, + InterruptedException::class, + NoSuchAlgorithmException::class, + InvalidKeySpecException::class, + CertificateException::class, + KeyStoreException::class + ) + fun keyStoreFromClientCertificate( + certString: String, + keyString: String, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ): URI { + // Convert RSA key (PKCS#1) to PKCS#8 key + // Note: java.security doesn't have a built-in support of PKCS#1 format. A conversion using + // openssl + // is necessary. + // Since this is a single operation it's better than adding an external lib (e.g + // BouncyCastle) + + val tmpDir = Files.createTempDirectory(null) + val pkcs1Key = Files.createTempFile(tmpDir, null, null) + val pkcs8Key = Files.createTempFile(tmpDir, null, null) + pkcs1Key.toFile().deleteOnExit() + pkcs8Key.toFile().deleteOnExit() + + Files.write(pkcs1Key, keyString.toByteArray(StandardCharsets.UTF_8)) + runProcess( + "openssl pkcs8 -topk8 -inform PEM -outform DER -in " + + pkcs1Key.toAbsolutePath() + + " -out " + + pkcs8Key.toAbsolutePath() + + " -nocrypt -passout pass:" + + keyStorePassword, + Runtime.getRuntime() + ) + + val spec = PKCS8EncodedKeySpec(Files.readAllBytes(pkcs8Key)) + var privateKey = + try { + KeyFactory.getInstance("RSA").generatePrivate(spec) + } catch (ex1: InvalidKeySpecException) { + try { + KeyFactory.getInstance("DSA").generatePrivate(spec) + } catch (ex2: InvalidKeySpecException) { + KeyFactory.getInstance("EC").generatePrivate(spec) + } + } + + return keyStoreFromClientCertificate( + fromPEMString(certString), + privateKey, + keyStorePassword, + filesystem, + directory + ) + } + + @JvmStatic + @Throws( + CertificateException::class, + IOException::class, + NoSuchAlgorithmException::class, + InvalidKeySpecException::class, + KeyStoreException::class, + InterruptedException::class + ) + fun keyStoreFromClientCertificate( + certString: String, + keyString: String, + keyStorePassword: String, + directory: String? + ): URI { + return keyStoreFromClientCertificate( + certString, + keyString, + keyStorePassword, + FileSystems.getDefault(), + directory + ) + } + + fun createContextFromCaCert(caCertificate: String): SSLContext { + try { + val factory = CertificateFactory.getInstance(X509) + val trustedCa = + factory.generateCertificate( + ByteArrayInputStream(caCertificate.toByteArray(StandardCharsets.UTF_8)) + ) + val trustStore = KeyStore.getInstance(PKCS_12) + trustStore.load(null, null) + trustStore.setCertificateEntry("ca", trustedCa) + val sslContextBuilder = SSLContexts.custom().loadTrustMaterial(trustStore, null) + return sslContextBuilder.build() + } catch (e: Exception) { + throw RuntimeException(e) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/BaseConnector.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/BaseConnector.kt new file mode 100644 index 0000000000000..b3b6587d0d649 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/BaseConnector.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations + +import io.airbyte.cdk.integrations.base.Integration +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.ConnectorSpecification + +abstract class BaseConnector : Integration { + /** + * By convention the spec is stored as a resource for java connectors. That resource is called + * spec.json. + * + * @return specification. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) + override fun spec(): ConnectorSpecification { + // return a JsonSchema representation of the spec for the integration. + val resourceString = MoreResources.readResource("spec.json") + return Jsons.deserialize(resourceString, ConnectorSpecification::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/JdbcConnector.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/JdbcConnector.kt new file mode 100644 index 0000000000000..5190a53314bf4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/JdbcConnector.kt @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations + +import io.airbyte.cdk.db.factory.DatabaseDriver +import java.time.Duration +import java.time.temporal.ChronoUnit +import java.time.temporal.TemporalUnit +import java.util.* + +abstract class JdbcConnector +protected constructor(@JvmField protected val driverClassName: String) : BaseConnector() { + protected fun getConnectionTimeout(connectionProperties: Map): Duration { + return getConnectionTimeout(connectionProperties, driverClassName) + } + + companion object { + const val POSTGRES_CONNECT_TIMEOUT_KEY: String = "connectTimeout" + val POSTGRES_CONNECT_TIMEOUT_DEFAULT_DURATION: Duration = Duration.ofSeconds(10) + + const val CONNECT_TIMEOUT_KEY: String = "connectTimeout" + @JvmField val CONNECT_TIMEOUT_DEFAULT: Duration = Duration.ofSeconds(60) + + /** + * Retrieves connectionTimeout value from connection properties in millis, default minimum + * timeout is 60 seconds since Hikari default of 30 seconds is not enough for acceptance + * tests. In the case the value is 0, pass the value along as Hikari and Postgres use + * default max value for 0 timeout value. + * + * NOTE: Postgres timeout is measured in seconds: + * https://jdbc.postgresql.org/documentation/head/connect.html + * + * @param connectionProperties custom jdbc_url_parameters containing information on + * connection properties + * @param driverClassName name of the JDBC driver + * @return DataSourceBuilder class used to create dynamic fields for DataSource + */ + @JvmStatic + fun getConnectionTimeout( + connectionProperties: Map, + driverClassName: String? + ): Duration { + val parsedConnectionTimeout = + when (DatabaseDriver.Companion.findByDriverClassName(driverClassName)) { + DatabaseDriver.POSTGRESQL -> + maybeParseDuration( + connectionProperties[POSTGRES_CONNECT_TIMEOUT_KEY], + ChronoUnit.SECONDS + ) + .or { Optional.of(POSTGRES_CONNECT_TIMEOUT_DEFAULT_DURATION) } + DatabaseDriver.MYSQL -> + maybeParseDuration( + connectionProperties["connectTimeout"], + ChronoUnit.MILLIS + ) + DatabaseDriver.MSSQLSERVER -> + maybeParseDuration(connectionProperties["loginTimeout"], ChronoUnit.SECONDS) + else -> + maybeParseDuration( + connectionProperties[CONNECT_TIMEOUT_KEY], + ChronoUnit.SECONDS + ) // Enforce minimum timeout duration for unspecified data sources. + .filter { d: Duration -> d.compareTo(CONNECT_TIMEOUT_DEFAULT) >= 0 } + } + return parsedConnectionTimeout.orElse(CONNECT_TIMEOUT_DEFAULT) + } + + private fun maybeParseDuration( + stringValue: String?, + unit: TemporalUnit + ): Optional { + if (stringValue == null) { + return Optional.empty() + } + val number: Long + try { + number = stringValue.toLong() + } catch (`__`: NumberFormatException) { + return Optional.empty() + } + if (number < 0) { + return Optional.empty() + } + return Optional.of(Duration.of(number, unit)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.kt new file mode 100644 index 0000000000000..de346a65b6ac2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandler.kt @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import java.util.* +import java.util.regex.Pattern +import javax.validation.constraints.NotNull +import org.apache.commons.lang3.exception.ExceptionUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class AirbyteExceptionHandler : Thread.UncaughtExceptionHandler { + override fun uncaughtException(thread: Thread, throwable: Throwable) { + // This is a naive AirbyteTraceMessage emission in order to emit one when any error occurs + // in a + // connector. + // If a connector implements AirbyteTraceMessage emission itself, this code will result in + // an + // additional one being emitted. + // this is fine tho because: + // "The earliest AirbyteTraceMessage where type=error will be used to populate the + // FailureReason for + // the sync." + // from the spec: + // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# + LOGGER.error(logMessage, throwable) + + val rootThrowable = ConnectorExceptionUtil.getRootConfigError(Exception(throwable)) + + if (ConnectorExceptionUtil.isConfigError(rootThrowable)) { + terminate() + } + + // Attempt to deinterpolate the error message before emitting a trace message + val mangledMessage: String? + // If any exception in the chain is of a deinterpolatable type, find it and deinterpolate + // its + // message. + // This assumes that any wrapping exceptions are just noise (e.g. runtime exception). + val deinterpolatableException = + ExceptionUtils.getThrowableList(throwable) + .stream() + .filter { t: Throwable -> + THROWABLES_TO_DEINTERPOLATE.stream().anyMatch { + deinterpolatableClass: Class -> + deinterpolatableClass.isAssignableFrom(t.javaClass) + } + } + .findFirst() + val messageWasMangled: Boolean + if (deinterpolatableException.isPresent) { + val originalMessage = deinterpolatableException.get().message + mangledMessage = + STRINGS_TO_DEINTERPOLATE + .stream() // Sort the strings longest to shortest, in case any target string is + // a substring of another + // e.g. "airbyte_internal" should be swapped out before "airbyte" + .sorted(Comparator.comparing { obj: String -> obj.length }.reversed()) + .reduce(originalMessage) { message: String?, targetString: String? -> + deinterpolate(message, targetString) + } + messageWasMangled = mangledMessage != originalMessage + } else { + mangledMessage = throwable.message + messageWasMangled = false + } + + if (!messageWasMangled) { + // If we did not modify the message (either not a deinterpolatable class, or we tried to + // deinterpolate but made no changes) then emit our default trace message + AirbyteTraceMessageUtility.emitSystemErrorTrace(throwable, logMessage) + } else { + // If we did modify the message, then emit a custom trace message + AirbyteTraceMessageUtility.emitCustomErrorTrace(throwable.message, mangledMessage) + } + + terminate() + } + + // by doing this in a separate method we can mock it to avoid closing the jvm and therefore test + // properly + fun terminate() { + System.exit(1) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(AirbyteExceptionHandler::class.java) + const val logMessage: String = + "Something went wrong in the connector. See the logs for more details." + + // Basic deinterpolation helpers to avoid doing _really_ dumb deinterpolation. + // E.g. if "id" is in the list of strings to remove, we don't want to modify the message + // "Invalid + // identifier". + private const val REGEX_PREFIX = "(^|[^A-Za-z0-9])" + private const val REGEX_SUFFIX = "($|[^A-Za-z0-9])" + + /** + * If this list is populated, then the exception handler will attempt to deinterpolate the + * error message before emitting a trace message. This is useful for connectors which (a) + * emit a single exception class, and (b) rely on that exception's message to distinguish + * between error types. + * + * If this is active, then the trace message will: + * + * 1. Not contain the stacktrace at all. This causes Sentry to use its fallback grouping + * (using exception class and message) + * 1. Contain the original exception message as the external message, and a mangled message + * as the internal message. + */ + @VisibleForTesting val STRINGS_TO_DEINTERPOLATE: MutableSet = HashSet() + + init { + addCommonStringsToDeinterpolate() + } + + @VisibleForTesting + val THROWABLES_TO_DEINTERPOLATE: MutableSet> = HashSet() + + private fun deinterpolate(message: String?, targetString: String?): @NotNull String? { + // (?i) makes the pattern case-insensitive + val quotedTarget = '('.toString() + "(?i)" + Pattern.quote(targetString) + ')' + val targetRegex = REGEX_PREFIX + quotedTarget + REGEX_SUFFIX + val pattern = Pattern.compile(targetRegex) + val matcher = pattern.matcher(message) + + // The pattern has three capturing groups: + // 1. The character before the target string (or an empty string, if it matched + // start-of-string) + // 2. The target string + // 3. The character after the target string (or empty string for end-of-string) + // We want to preserve the characters before and after the target string, so we use $1 + // and $3 to + // reinsert them + // but the target string is replaced with just '?' + return matcher.replaceAll("$1?$3") + } + + @JvmStatic + fun addThrowableForDeinterpolation(klass: Class) { + THROWABLES_TO_DEINTERPOLATE.add(klass) + } + + @JvmStatic + fun addStringForDeinterpolation(string: String?) { + if (string != null) { + STRINGS_TO_DEINTERPOLATE.add(string.lowercase(Locale.getDefault())) + } + } + + fun addAllStringsInConfigForDeinterpolation(node: JsonNode) { + if (node.isTextual) { + addStringForDeinterpolation(node.asText()) + } else if (node.isContainerNode) { + for (subNode in node) { + addAllStringsInConfigForDeinterpolation(subNode) + } + } + } + + internal fun addCommonStringsToDeinterpolate() { + // Add some common strings to deinterpolate, regardless of what the connector is doing + addStringForDeinterpolation("airbyte") + addStringForDeinterpolation("config") + addStringForDeinterpolation("configuration") + addStringForDeinterpolation("description") + addStringForDeinterpolation("email") + addStringForDeinterpolation("id") + addStringForDeinterpolation("location") + addStringForDeinterpolation("message") + addStringForDeinterpolation("name") + addStringForDeinterpolation("state") + addStringForDeinterpolation("status") + addStringForDeinterpolation("type") + addStringForDeinterpolation("userEmail") + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.kt new file mode 100644 index 0000000000000..640f49b55211b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteMessageConsumer.kt @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.protocol.models.v0.AirbyteMessage + +/** + * Interface for the destination's consumption of incoming records wrapped in an + * [io.airbyte.protocol.models.v0.AirbyteMessage]. + * + * This is via the accept method, which commonly handles parsing, validation, batching and writing + * of the transformed data to the final destination i.e. the technical system data is being written + * to. + * + * Lifecycle: + * + * * 1. Instantiate consumer. + * * 2. start() to initialize any resources that need to be created BEFORE the consumer consumes any + * messages. + * * 3. Consumes ALL records via [AirbyteMessageConsumer.accept] + * * 4. Always (on success or failure) finalize by calling [AirbyteMessageConsumer.close] + * + * We encourage implementing this interface using the [FailureTrackingAirbyteMessageConsumer] class. + */ +interface AirbyteMessageConsumer : CheckedConsumer, AutoCloseable { + @Throws(Exception::class) fun start() + + /** + * Consumes all [AirbyteMessage]s + * + * @param message [AirbyteMessage] to be processed + * @throws Exception + */ + @Throws(Exception::class) override fun accept(message: AirbyteMessage) + + /** + * Executes at the end of consumption of all incoming streamed data regardless of success or + * failure + * + * @throws Exception + */ + @Throws(Exception::class) override fun close() + + companion object { + /** Append a function to be called on [AirbyteMessageConsumer.close]. */ + fun appendOnClose( + consumer: AirbyteMessageConsumer?, + voidCallable: VoidCallable + ): AirbyteMessageConsumer? { + return object : AirbyteMessageConsumer { + @Throws(Exception::class) + override fun start() { + consumer!!.start() + } + + @Throws(Exception::class) + override fun accept(message: AirbyteMessage) { + consumer!!.accept(message) + } + + @Throws(Exception::class) + override fun close() { + consumer!!.close() + voidCallable.call() + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.kt new file mode 100644 index 0000000000000..e6aed62660241 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtility.kt @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.protocol.models.v0.* +import java.time.Instant +import java.util.function.Consumer +import org.apache.commons.lang3.exception.ExceptionUtils + +object AirbyteTraceMessageUtility { + fun emitSystemErrorTrace(e: Throwable, displayMessage: String?) { + emitErrorTrace(e, displayMessage, AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR) + } + + @JvmStatic + fun emitConfigErrorTrace(e: Throwable, displayMessage: String?) { + emitErrorTrace(e, displayMessage, AirbyteErrorTraceMessage.FailureType.CONFIG_ERROR) + } + + fun emitCustomErrorTrace(displayMessage: String?, internalMessage: String?) { + emitMessage( + makeAirbyteMessageFromTraceMessage( + makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ERROR) + .withError( + AirbyteErrorTraceMessage() + .withFailureType(AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR) + .withMessage(displayMessage) + .withInternalMessage(internalMessage) + ) + ) + ) + } + + @JvmStatic + fun emitEstimateTrace( + byteEstimate: Long, + type: AirbyteEstimateTraceMessage.Type?, + rowEstimate: Long, + streamName: String?, + streamNamespace: String? + ) { + emitMessage( + makeAirbyteMessageFromTraceMessage( + makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ESTIMATE) + .withEstimate( + AirbyteEstimateTraceMessage() + .withByteEstimate(byteEstimate) + .withType(type) + .withRowEstimate(rowEstimate) + .withName(streamName) + .withNamespace(streamNamespace) + ) + ) + ) + } + + @JvmStatic + fun emitAnalyticsTrace(airbyteAnalyticsTraceMessage: AirbyteAnalyticsTraceMessage) { + emitMessage(makeAnalyticsTraceAirbyteMessage(airbyteAnalyticsTraceMessage)) + } + + @JvmStatic + fun emitErrorTrace( + e: Throwable, + displayMessage: String?, + failureType: AirbyteErrorTraceMessage.FailureType + ) { + emitMessage(makeErrorTraceAirbyteMessage(e, displayMessage, failureType)) + } + + @JvmStatic + fun emitStreamStatusTrace(airbyteStreamStatusHolder: AirbyteStreamStatusHolder) { + emitMessage(makeStreamStatusTraceAirbyteMessage(airbyteStreamStatusHolder)) + } + + // todo: handle the other types of trace message we'll expect in the future, see + // io.airbyte.protocol.models.v0.AirbyteTraceMessage + // & the tech spec: + // https://docs.google.com/document/d/1ctrj3Yh_GjtQ93aND-WH3ocqGxsmxyC3jfiarrF6NY0/edit# + // public void emitNotificationTrace() {} + // public void emitMetricTrace() {} + private fun emitMessage(message: AirbyteMessage) { + // Not sure why defaultOutputRecordCollector is under Destination specifically, + // but this matches usage elsewhere in base-java + val outputRecordCollector = + Consumer { message: AirbyteMessage? -> + Destination.Companion.defaultOutputRecordCollector(message) + } + outputRecordCollector.accept(message) + } + + private fun makeErrorTraceAirbyteMessage( + e: Throwable, + displayMessage: String?, + failureType: AirbyteErrorTraceMessage.FailureType + ): AirbyteMessage { + return makeAirbyteMessageFromTraceMessage( + makeAirbyteTraceMessage(AirbyteTraceMessage.Type.ERROR) + .withError( + AirbyteErrorTraceMessage() + .withFailureType(failureType) + .withMessage(displayMessage) + .withInternalMessage(e.toString()) + .withStackTrace(ExceptionUtils.getStackTrace(e)) + ) + ) + } + + private fun makeAnalyticsTraceAirbyteMessage( + airbyteAnalyticsTraceMessage: AirbyteAnalyticsTraceMessage + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withAnalytics(airbyteAnalyticsTraceMessage) + .withType(AirbyteTraceMessage.Type.ANALYTICS) + .withEmittedAt(Instant.now().toEpochMilli().toDouble()) + ) + } + + private fun makeStreamStatusTraceAirbyteMessage( + airbyteStreamStatusHolder: AirbyteStreamStatusHolder + ): AirbyteMessage { + return makeAirbyteMessageFromTraceMessage(airbyteStreamStatusHolder.toTraceMessage()) + } + + private fun makeAirbyteMessageFromTraceMessage( + airbyteTraceMessage: AirbyteTraceMessage + ): AirbyteMessage { + return AirbyteMessage().withType(AirbyteMessage.Type.TRACE).withTrace(airbyteTraceMessage) + } + + private fun makeAirbyteTraceMessage( + traceMessageType: AirbyteTraceMessage.Type + ): AirbyteTraceMessage { + return AirbyteTraceMessage() + .withType(traceMessageType) + .withEmittedAt(System.currentTimeMillis().toDouble()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Command.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Command.kt new file mode 100644 index 0000000000000..c12c022eb9281 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Command.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +enum class Command { + SPEC, + CHECK, + DISCOVER, + READ, + WRITE +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.kt new file mode 100644 index 0000000000000..bd789e689f78a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/CommitOnStateAirbyteMessageConsumer.kt @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.protocol.models.v0.AirbyteMessage +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Minimal abstract class intended to handle the case where the destination can commit records every + * time a state message appears. This class does that commit and then immediately emits the state + * message. This should only be used in cases when the commit is relatively cheap. immediately. + */ +abstract class CommitOnStateAirbyteMessageConsumer( + private val outputRecordCollector: Consumer +) : FailureTrackingAirbyteMessageConsumer(), AirbyteMessageConsumer { + @Throws(Exception::class) + override fun accept(message: AirbyteMessage) { + if (message.type == AirbyteMessage.Type.STATE) { + commit() + outputRecordCollector.accept(message) + } + super.accept(message) + } + + @Throws(Exception::class) abstract fun commit() + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(CommitOnStateAirbyteMessageConsumer::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Destination.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Destination.kt new file mode 100644 index 0000000000000..0094e8c169e1b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Destination.kt @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonPropertyDescription +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.function.Consumer +import lombok.extern.slf4j.Slf4j +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +interface Destination : Integration { + /** + * Return a consumer that writes messages to the destination. + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @param catalog + * - schema of the incoming messages. + * @return Consumer that accepts message. The [AirbyteMessageConsumer.accept] will be called n + * times where n is the number of messages. [AirbyteMessageConsumer.close] will always be called + * once regardless of success or failure. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) + fun getConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): AirbyteMessageConsumer? + + /** + * Default implementation allows us to not have to touch existing destinations while avoiding a + * lot of conditional statements in [IntegrationRunner]. This is preferred over #getConsumer and + * is the default Async Framework method. + * + * @param config config + * @param catalog catalog + * @param outputRecordCollector outputRecordCollector + * @return AirbyteMessageConsumer wrapped in SerializedAirbyteMessageConsumer to maintain legacy + * behavior. + * @throws Exception exception + */ + @Throws(Exception::class) + fun getSerializedMessageConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): SerializedAirbyteMessageConsumer? { + return ShimToSerializedAirbyteMessageConsumer( + getConsumer(config, catalog, outputRecordCollector) + ) + } + + /** + * Backwards-compatibility wrapper for an AirbyteMessageConsumer. Strips the sizeInBytes + * argument away from the .accept call. + */ + @Slf4j + class ShimToSerializedAirbyteMessageConsumer(private val consumer: AirbyteMessageConsumer?) : + SerializedAirbyteMessageConsumer { + @Throws(Exception::class) + override fun start() { + consumer!!.start() + } + + /** + * Consumes an [AirbyteMessage] for processing. + * + * If the provided JSON string is invalid AND represents a [AirbyteMessage.Type.STATE] + * message, processing is halted. Otherwise, the invalid message is logged and execution + * continues. + * + * @param inputString JSON representation of an [AirbyteMessage]. + * @throws Exception if an invalid state message is provided or the consumer is unable to + * accept the provided message. + */ + @Throws(Exception::class) + override fun accept(inputString: String, sizeInBytes: Int) { + consumeMessage(consumer, inputString) + } + + @Throws(Exception::class) + override fun close() { + consumer!!.close() + } + + /** + * Custom class for parsing a JSON message to determine the type of the represented + * [AirbyteMessage]. Do the bare minimum deserialisation by reading only the type field. + */ + private class AirbyteTypeMessage { + @get:JsonProperty("type") + @set:JsonProperty("type") + @JsonProperty("type") + @JsonPropertyDescription("Message type") + var type: AirbyteMessage.Type? = null + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(ShimToSerializedAirbyteMessageConsumer::class.java) + /** + * Consumes an [AirbyteMessage] for processing. + * + * If the provided JSON string is invalid AND represents a [AirbyteMessage.Type.STATE] + * message, processing is halted. Otherwise, the invalid message is logged and execution + * continues. + * + * @param consumer An [AirbyteMessageConsumer] that can handle the provided message. + * @param inputString JSON representation of an [AirbyteMessage]. + * @throws Exception if an invalid state message is provided or the consumer is unable + * to accept the provided message. + */ + @VisibleForTesting + @Throws(Exception::class) + fun consumeMessage(consumer: AirbyteMessageConsumer?, inputString: String) { + val messageOptional = Jsons.tryDeserialize(inputString, AirbyteMessage::class.java) + if (messageOptional.isPresent) { + consumer!!.accept(messageOptional.get()) + } else { + check(!isStateMessage(inputString)) { "Invalid state message: $inputString" } + LOGGER.error("Received invalid message: $inputString") + } + } + + /** + * Tests whether the provided JSON string represents a state message. + * + * @param input a JSON string that represents an [AirbyteMessage]. + * @return `true` if the message is a state message, `false` otherwise. + */ + private fun isStateMessage(input: String): Boolean { + val deserialized = Jsons.tryDeserialize(input, AirbyteTypeMessage::class.java) + return if (deserialized.isPresent) { + deserialized.get().type == AirbyteMessage.Type.STATE + } else { + false + } + } + } + } + + val isV2Destination: Boolean + /** Denotes if the destination fully supports Destinations V2. */ + get() = false + + companion object { + @JvmStatic + fun defaultOutputRecordCollector(message: AirbyteMessage?) { + println(Jsons.serialize(message)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/DestinationConfig.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/DestinationConfig.kt new file mode 100644 index 0000000000000..cdfbfc4f0373e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/DestinationConfig.kt @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Singleton of destination config for easy lookup of values. */ +class DestinationConfig private constructor() { + // whether the destination fully supports Destinations V2 + var isV2Destination: Boolean = false + private set + + @VisibleForTesting var root: JsonNode? = null + + fun getNodeValue(key: String?): JsonNode? { + val node = config!!.root!![key] + if (node == null) { + LOGGER.debug("Cannot find node with key {} ", key) + } + return node + } + + // string value, otherwise empty string + fun getTextValue(key: String?): String { + val node = getNodeValue(key) + if (node == null || !node.isTextual) { + LOGGER.debug("Cannot retrieve text value for node with key {}", key) + return "" + } + return node.asText() + } + + // boolean value, otherwise false + fun getBooleanValue(key: String?): Boolean { + val node = getNodeValue(key) + if (node == null || !node.isBoolean) { + LOGGER.debug("Cannot retrieve boolean value for node with key {}", key) + return false + } + return node.asBoolean() + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DestinationConfig::class.java) + + private var config: DestinationConfig? = null + + @JvmStatic + @VisibleForTesting + fun initialize(root: JsonNode?) { + initialize(root, false) + } + + fun initialize(root: JsonNode?, isV2Destination: Boolean) { + if (config == null) { + requireNotNull(root) { "Cannot create DestinationConfig from null." } + config = DestinationConfig() + config!!.root = root + config!!.isV2Destination = isV2Destination + } else { + LOGGER.warn("Singleton was already initialized.") + } + } + + @JvmStatic + val instance: DestinationConfig? + get() { + checkNotNull(config) { "Singleton not initialized." } + return config + } + + @JvmStatic + @VisibleForTesting + fun clearInstance() { + config = null + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.kt new file mode 100644 index 0000000000000..fb7bd59672313 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumer.kt @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.protocol.models.v0.AirbyteMessage +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Minimal abstract class intended to provide a consistent structure to classes seeking to implement + * the [AirbyteMessageConsumer] interface. The original interface methods are wrapped in generic + * exception handlers - any exception is caught and logged. + * + * Two methods are intended for extension: + * + * * startTracked: Wraps set up of necessary infrastructure/configuration before message + * consumption. + * * acceptTracked: Wraps actual processing of each [io.airbyte.protocol.models.v0.AirbyteMessage]. + * + * Though not necessary, we highly encourage using this class when implementing destinations. See + * child classes for examples. + */ +abstract class FailureTrackingAirbyteMessageConsumer : AirbyteMessageConsumer { + private var hasFailed = false + + /** + * Wraps setup of necessary infrastructure/configuration before message consumption + * + * @throws Exception + */ + @Throws(Exception::class) protected abstract fun startTracked() + + @Throws(Exception::class) + override fun start() { + try { + startTracked() + } catch (e: Exception) { + LOGGER.error("Exception while starting consumer", e) + hasFailed = true + throw e + } + } + + /** + * Processing of AirbyteMessages with general functionality of storing STATE messages, + * serializing RECORD messages and storage within a buffer + * + * NOTE: Not all the functionality mentioned above is always true but generally applies + * + * @param msg [AirbyteMessage] to be processed + * @throws Exception + */ + @Throws(Exception::class) protected abstract fun acceptTracked(msg: AirbyteMessage) + + @Throws(Exception::class) + override fun accept(msg: AirbyteMessage) { + try { + acceptTracked(msg) + } catch (e: Exception) { + LOGGER.error("Exception while accepting message", e) + hasFailed = true + throw e + } + } + + @Throws(Exception::class) protected abstract fun close(hasFailed: Boolean) + + @Throws(Exception::class) + override fun close() { + if (hasFailed) { + LOGGER.warn("Airbyte message consumer: failed.") + } else { + LOGGER.info("Airbyte message consumer: succeeded.") + } + close(hasFailed) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(FailureTrackingAirbyteMessageConsumer::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Integration.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Integration.kt new file mode 100644 index 0000000000000..0ed88f7207aab --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Integration.kt @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.ConnectorSpecification + +interface Integration { + /** + * Fetch the specification for the integration. + * + * @return specification. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) fun spec(): ConnectorSpecification + + /** + * Check whether, given the current configuration, the integration can connect to the + * integration. + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @return Whether or not the connection was successful. Optional message if it was not. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) fun check(config: JsonNode): AirbyteConnectionStatus? +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParser.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParser.kt new file mode 100644 index 0000000000000..3af592161f398 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParser.kt @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.google.common.base.Preconditions +import io.airbyte.commons.cli.Clis +import java.nio.file.Path +import java.util.* +import org.apache.commons.cli.Option +import org.apache.commons.cli.OptionGroup +import org.apache.commons.cli.Options +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +// todo (cgardens) - use argparse4j.github.io instead of org.apache.commons.cli to leverage better +// sub-parser support. +/** Parses command line args to a type safe config object for each command type. */ +class IntegrationCliParser { + fun parse(args: Array): IntegrationConfig { + val command = parseCommand(args) + return parseOptions(args, command) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(IntegrationCliParser::class.java) + + private val COMMAND_GROUP: OptionGroup + + init { + val optionGroup = OptionGroup() + optionGroup.isRequired = true + + optionGroup.addOption( + Option.builder() + .longOpt(Command.SPEC.toString().lowercase(Locale.getDefault())) + .desc("outputs the json configuration specification") + .build() + ) + optionGroup.addOption( + Option.builder() + .longOpt(Command.CHECK.toString().lowercase(Locale.getDefault())) + .desc("checks the config can be used to connect") + .build() + ) + optionGroup.addOption( + Option.builder() + .longOpt(Command.DISCOVER.toString().lowercase(Locale.getDefault())) + .desc("outputs a catalog describing the source's catalog") + .build() + ) + optionGroup.addOption( + Option.builder() + .longOpt(Command.READ.toString().lowercase(Locale.getDefault())) + .desc("reads the source and outputs messages to STDOUT") + .build() + ) + optionGroup.addOption( + Option.builder() + .longOpt(Command.WRITE.toString().lowercase(Locale.getDefault())) + .desc("writes messages from STDIN to the integration") + .build() + ) + + COMMAND_GROUP = optionGroup + } + + private fun parseCommand(args: Array): Command { + val options = Options() + options.addOptionGroup(COMMAND_GROUP) + + val parsed = Clis.parse(args, options, Clis.getRelaxedParser()) + return Command.valueOf(parsed.options[0].longOpt.uppercase(Locale.getDefault())) + } + + private fun parseOptions(args: Array, command: Command): IntegrationConfig { + val options = Options() + options.addOptionGroup( + COMMAND_GROUP + ) // so that the parser does not throw an exception when encounter command args. + + when (command) { + Command.SPEC -> { + // no args. + } + Command.CHECK, + Command.DISCOVER -> + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) + .desc(JavaBaseConstants.ARGS_CONFIG_DESC) + .hasArg(true) + .required(true) + .build() + ) + Command.READ -> { + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) + .desc(JavaBaseConstants.ARGS_CONFIG_DESC) + .hasArg(true) + .required(true) + .build() + ) + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_CATALOG_KEY) + .desc(JavaBaseConstants.ARGS_CATALOG_DESC) + .hasArg(true) + .build() + ) + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_STATE_KEY) + .desc(JavaBaseConstants.ARGS_PATH_DESC) + .hasArg(true) + .build() + ) + } + Command.WRITE -> { + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_CONFIG_KEY) + .desc(JavaBaseConstants.ARGS_CONFIG_DESC) + .hasArg(true) + .required(true) + .build() + ) + options.addOption( + Option.builder() + .longOpt(JavaBaseConstants.ARGS_CATALOG_KEY) + .desc(JavaBaseConstants.ARGS_CATALOG_DESC) + .hasArg(true) + .build() + ) + } + else -> throw IllegalStateException("Unexpected value: $command") + } + val parsed = + Clis.parse(args, options, command.toString().lowercase(Locale.getDefault())) + Preconditions.checkNotNull(parsed) + val argsMap: MutableMap = HashMap() + for (option in parsed.options) { + argsMap[option.longOpt] = option.value + } + LOGGER.info("integration args: {}", argsMap) + + return when (command) { + Command.SPEC -> { + IntegrationConfig.Companion.spec() + } + Command.CHECK -> { + IntegrationConfig.Companion.check( + Path.of(argsMap[JavaBaseConstants.ARGS_CONFIG_KEY]) + ) + } + Command.DISCOVER -> { + IntegrationConfig.Companion.discover( + Path.of(argsMap[JavaBaseConstants.ARGS_CONFIG_KEY]) + ) + } + Command.READ -> { + IntegrationConfig.Companion.read( + Path.of(argsMap[JavaBaseConstants.ARGS_CONFIG_KEY]), + Path.of(argsMap[JavaBaseConstants.ARGS_CATALOG_KEY]), + if (argsMap.containsKey(JavaBaseConstants.ARGS_STATE_KEY)) + Path.of(argsMap[JavaBaseConstants.ARGS_STATE_KEY]) + else null + ) + } + Command.WRITE -> { + IntegrationConfig.Companion.write( + Path.of(argsMap[JavaBaseConstants.ARGS_CONFIG_KEY]), + Path.of(argsMap[JavaBaseConstants.ARGS_CATALOG_KEY]) + ) + } + else -> throw IllegalStateException("Unexpected value: $command") + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfig.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfig.kt new file mode 100644 index 0000000000000..7ddc115fac981 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfig.kt @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.google.common.base.Preconditions +import java.nio.file.Path +import java.util.* + +class IntegrationConfig +private constructor( + val command: Command, + private val configPath: Path?, + private val catalogPath: Path?, + private val statePath: Path? +) { + fun getConfigPath(): Path? { + Preconditions.checkState(command != Command.SPEC) + return configPath + } + + fun getCatalogPath(): Path? { + Preconditions.checkState(command == Command.READ || command == Command.WRITE) + return catalogPath + } + + fun getStatePath(): Optional { + Preconditions.checkState(command == Command.READ) + return Optional.ofNullable(statePath) + } + + override fun toString(): String { + return "IntegrationConfig{" + + "command=" + + command + + ", configPath='" + + configPath + + '\'' + + ", catalogPath='" + + catalogPath + + '\'' + + ", statePath='" + + statePath + + '\'' + + '}' + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as IntegrationConfig + return command == that.command && + configPath == that.configPath && + catalogPath == that.catalogPath && + statePath == that.statePath + } + + override fun hashCode(): Int { + return Objects.hash(command, configPath, catalogPath, statePath) + } + + companion object { + fun spec(): IntegrationConfig { + return IntegrationConfig(Command.SPEC, null, null, null) + } + + fun check(config: Path): IntegrationConfig { + Preconditions.checkNotNull(config) + return IntegrationConfig(Command.CHECK, config, null, null) + } + + fun discover(config: Path): IntegrationConfig { + Preconditions.checkNotNull(config) + return IntegrationConfig(Command.DISCOVER, config, null, null) + } + + fun read(configPath: Path, catalogPath: Path, statePath: Path?): IntegrationConfig { + Preconditions.checkNotNull(configPath) + Preconditions.checkNotNull(catalogPath) + return IntegrationConfig(Command.READ, configPath, catalogPath, statePath) + } + + fun write(configPath: Path, catalogPath: Path): IntegrationConfig { + Preconditions.checkNotNull(configPath) + Preconditions.checkNotNull(catalogPath) + return IntegrationConfig(Command.WRITE, configPath, catalogPath, null) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunner.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunner.kt new file mode 100644 index 0000000000000..a6f05790bc44c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunner.kt @@ -0,0 +1,537 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import com.google.common.collect.Lists +import datadog.trace.api.Trace +import io.airbyte.cdk.integrations.util.ApmTraceUtils +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import io.airbyte.cdk.integrations.util.concurrent.ConcurrentStreamConsumer +import io.airbyte.commons.features.EnvVariableFeatureFlags +import io.airbyte.commons.features.FeatureFlags +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.commons.stream.StreamStatusUtils +import io.airbyte.commons.string.Strings +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.validation.json.JsonSchemaValidator +import java.io.* +import java.nio.charset.StandardCharsets +import java.nio.file.Path +import java.util.* +import java.util.concurrent.* +import java.util.function.Consumer +import java.util.function.Predicate +import java.util.stream.Collectors +import org.apache.commons.lang3.ThreadUtils +import org.apache.commons.lang3.concurrent.BasicThreadFactory +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Accepts EITHER a destination or a source. Routes commands from the commandline to the appropriate + * methods on the integration. Keeps itself DRY for methods that are common between source and + * destination. + */ +class IntegrationRunner +@VisibleForTesting +internal constructor( + cliParser: IntegrationCliParser, + outputRecordCollector: Consumer, + destination: Destination?, + source: Source? +) { + private val cliParser: IntegrationCliParser + private val outputRecordCollector: Consumer + private val integration: Integration + private val destination: Destination? + private val source: Source? + private val featureFlags: FeatureFlags + + constructor( + destination: Destination? + ) : this( + IntegrationCliParser(), + Consumer { message: AirbyteMessage -> + Destination.Companion.defaultOutputRecordCollector(message) + }, + destination, + null + ) + + constructor( + source: Source? + ) : this( + IntegrationCliParser(), + Consumer { message: AirbyteMessage -> + Destination.Companion.defaultOutputRecordCollector(message) + }, + null, + source + ) + + init { + Preconditions.checkState( + (destination != null) xor (source != null), + "can only pass in a destination or a source" + ) + this.cliParser = cliParser + this.outputRecordCollector = outputRecordCollector + // integration iface covers the commands that are the same for both source and destination. + integration = source ?: destination!! + this.source = source + this.destination = destination + this.featureFlags = EnvVariableFeatureFlags() + validator = JsonSchemaValidator() + + Thread.setDefaultUncaughtExceptionHandler(AirbyteExceptionHandler()) + } + + @VisibleForTesting + internal constructor( + cliParser: IntegrationCliParser, + outputRecordCollector: Consumer, + destination: Destination?, + source: Source?, + jsonSchemaValidator: JsonSchemaValidator + ) : this(cliParser, outputRecordCollector, destination, source) { + validator = jsonSchemaValidator + } + + @Trace(operationName = "RUN_OPERATION") + @Throws(Exception::class) + fun run(args: Array) { + val parsed = cliParser.parse(args) + try { + runInternal(parsed) + } catch (e: Exception) { + throw e + } + } + + @Throws(Exception::class) + private fun runInternal(parsed: IntegrationConfig?) { + LOGGER.info("Running integration: {}", integration.javaClass.name) + LOGGER.info("Command: {}", parsed!!.command) + LOGGER.info("Integration config: {}", parsed) + + try { + when (parsed!!.command) { + Command.SPEC -> + outputRecordCollector.accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.SPEC) + .withSpec(integration.spec()) + ) + Command.CHECK -> { + val config = parseConfig(parsed!!.getConfigPath()) + if (integration is Destination) { + DestinationConfig.Companion.initialize(config, integration.isV2Destination) + } + try { + validateConfig( + integration.spec()!!.connectionSpecification, + config, + "CHECK" + ) + } catch (e: Exception) { + // if validation fails don't throw an exception, return a failed connection + // check message + outputRecordCollector.accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus( + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(e.message) + ) + ) + } + + outputRecordCollector.accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus(integration.check(config)) + ) + } + Command.DISCOVER -> { + val config = parseConfig(parsed!!.getConfigPath()) + validateConfig(integration.spec()!!.connectionSpecification, config, "DISCOVER") + outputRecordCollector.accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CATALOG) + .withCatalog(source!!.discover(config)) + ) + } + Command.READ -> { + val config = parseConfig(parsed!!.getConfigPath()) + validateConfig(integration.spec()!!.connectionSpecification, config, "READ") + val catalog = + parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog::class.java) + val stateOptional = + parsed.getStatePath().map { path: Path? -> parseConfig(path) } + try { + if (featureFlags.concurrentSourceStreamRead()) { + LOGGER.info("Concurrent source stream read enabled.") + readConcurrent(config, catalog, stateOptional) + } else { + readSerial(config, catalog, stateOptional) + } + } finally { + if (source is AutoCloseable) { + (source as AutoCloseable).close() + } + } + } + Command.WRITE -> { + val config = parseConfig(parsed!!.getConfigPath()) + validateConfig(integration.spec()!!.connectionSpecification, config, "WRITE") + // save config to singleton + DestinationConfig.Companion.initialize( + config, + (integration as Destination).isV2Destination + ) + val catalog = + parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog::class.java) + + try { + destination!! + .getSerializedMessageConsumer(config, catalog, outputRecordCollector) + .use { consumer -> consumeWriteStream(consumer!!) } + } finally { + stopOrphanedThreads() + } + } + else -> throw IllegalStateException("Unexpected value: " + parsed!!.command) + } + } catch (e: Exception) { + // Many of the exceptions thrown are nested inside layers of RuntimeExceptions. An + // attempt is made + // to + // find the root exception that corresponds to a configuration error. If that does not + // exist, we + // just return the original exception. + ApmTraceUtils.addExceptionToTrace(e) + val rootThrowable = ConnectorExceptionUtil.getRootConfigError(e) + val displayMessage = ConnectorExceptionUtil.getDisplayMessage(rootThrowable) + // If the source connector throws a config error, a trace message with the relevant + // message should + // be surfaced. + if (ConnectorExceptionUtil.isConfigError(rootThrowable)) { + AirbyteTraceMessageUtility.emitConfigErrorTrace(e, displayMessage) + } + if (parsed!!.command == Command.CHECK) { + // Currently, special handling is required for the CHECK case since the user display + // information in + // the trace message is + // not properly surfaced to the FE. In the future, we can remove this and just throw + // an exception. + outputRecordCollector.accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus( + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(displayMessage) + ) + ) + return + } + throw e + } + + LOGGER.info("Completed integration: {}", integration.javaClass.name) + } + + private fun produceMessages( + messageIterator: AutoCloseableIterator, + recordCollector: Consumer + ) { + messageIterator!!.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("Producing messages for stream {}...", s) + } + messageIterator.forEachRemaining(recordCollector) + messageIterator.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("Finished producing messages for stream {}...", s) + } + } + + @Throws(Exception::class) + private fun readConcurrent( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + stateOptional: Optional + ) { + val streams = source!!.readStreams(config, catalog, stateOptional.orElse(null)) + + try { + ConcurrentStreamConsumer( + { stream: AutoCloseableIterator -> + this.consumeFromStream(stream) + }, + streams!!.size + ) + .use { streamConsumer -> + /* + * Break the streams into partitions equal to the number of concurrent streams supported by the + * stream consumer. + */ + val partitionSize = streamConsumer.parallelism + val partitions = Lists.partition(streams.stream().toList(), partitionSize!!) + + // Submit each stream partition for concurrent execution + partitions.forEach( + Consumer { partition: List> -> + streamConsumer.accept(partition) + } + ) + + // Check for any exceptions that were raised during the concurrent execution + if (streamConsumer.exception.isPresent) { + throw streamConsumer.exception.get() + } + } + } catch (e: Exception) { + LOGGER.error("Unable to perform concurrent read.", e) + throw e + } finally { + stopOrphanedThreads() + } + } + + @Throws(Exception::class) + private fun readSerial( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + stateOptional: Optional + ) { + try { + source!!.read(config, catalog, stateOptional.orElse(null)).use { messageIterator -> + produceMessages(messageIterator, outputRecordCollector) + } + } finally { + stopOrphanedThreads() + } + } + + private fun consumeFromStream(stream: AutoCloseableIterator) { + try { + val streamStatusTrackingRecordConsumer = + StreamStatusUtils.statusTrackingRecordCollector( + stream, + outputRecordCollector, + Optional.of( + Consumer { obj: AirbyteStreamStatusHolder -> + AirbyteTraceMessageUtility.emitStreamStatusTrace(obj) + } + ) + ) + produceMessages(stream, streamStatusTrackingRecordConsumer) + } catch (e: Exception) { + stream!!.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.error("Failed to consume from stream {}.", s, e) + } + throw RuntimeException(e) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(IntegrationRunner::class.java) + + const val TYPE_AND_DEDUPE_THREAD_NAME: String = "type-and-dedupe" + + /** + * Filters threads that should not be considered when looking for orphaned threads at + * shutdown of the integration runner. + * + * **N.B.** Daemon threads don't block the JVM if the main `currentThread` exits, so they + * are not problematic. Additionally, ignore database connection pool threads, which stay + * active so long as the database connection pool is open. + */ + @VisibleForTesting + val ORPHANED_THREAD_FILTER: Predicate = Predicate { runningThread: Thread -> + (runningThread.name != Thread.currentThread().name && + !runningThread.isDaemon && + TYPE_AND_DEDUPE_THREAD_NAME != runningThread.name) + } + + const val INTERRUPT_THREAD_DELAY_MINUTES: Int = 1 + const val EXIT_THREAD_DELAY_MINUTES: Int = 2 + + const val FORCED_EXIT_CODE: Int = 2 + + private val EXIT_HOOK = Runnable { System.exit(FORCED_EXIT_CODE) } + + private lateinit var validator: JsonSchemaValidator + + @Throws(Exception::class) + internal fun consumeWriteStream( + consumer: SerializedAirbyteMessageConsumer, + inputStream: InputStream = System.`in` + ) { + LOGGER.info("Starting buffered read of input stream") + consumer.start() + inputStream.bufferedReader(StandardCharsets.UTF_8).use { + var emptyLines = 0 + it.lines().forEach { line: String -> + if (line.isNotEmpty()) { + consumer.accept(line, line.toByteArray(StandardCharsets.UTF_8).size) + } else { + emptyLines++ + // We've occasionally seen this loop not exit + // maybe it's because we keep getting streams of empty lines? + // TODO: Monitor the logs for occurrences of this log line and if this isn't + // an issue, remove it. + if (emptyLines % 1_000 == 0 && emptyLines < 10_000) { + LOGGER.warn("Encountered $emptyLines empty lines during execution") + } + } + } + if (emptyLines > 0) { + LOGGER.warn("Encountered $emptyLines empty lines in the input stream.") + } + } + LOGGER.info("Finished buffered read of input stream") + } + + /** + * Stops any non-daemon threads that could block the JVM from exiting when the main thread + * is done. + * + * If any active non-daemon threads would be left as orphans, this method will schedule some + * interrupt/exit hooks after giving it some time delay to close up properly. It is + * generally preferred to have a proper closing sequence from children threads instead of + * interrupting or force exiting the process, so this mechanism serve as a fallback while + * surfacing warnings in logs for maintainers to fix the code behavior instead. + * + * @param exitHook The [Runnable] exit hook to execute for any orphaned threads. + * @param interruptTimeDelay The time to delay execution of the orphaned thread interrupt + * attempt. + * @param interruptTimeUnit The time unit of the interrupt delay. + * @param exitTimeDelay The time to delay execution of the orphaned thread exit hook. + * @param exitTimeUnit The time unit of the exit delay. + */ + @VisibleForTesting + fun stopOrphanedThreads( + exitHook: Runnable = EXIT_HOOK, + interruptTimeDelay: Int = INTERRUPT_THREAD_DELAY_MINUTES, + interruptTimeUnit: TimeUnit = TimeUnit.MINUTES, + exitTimeDelay: Int = EXIT_THREAD_DELAY_MINUTES, + exitTimeUnit: TimeUnit = TimeUnit.MINUTES + ) { + val currentThread = Thread.currentThread() + + val runningThreads = + ThreadUtils.getAllThreads() + .stream() + .filter(ORPHANED_THREAD_FILTER) + .collect(Collectors.toList()) + if (runningThreads.isNotEmpty()) { + LOGGER.warn( + """ + The main thread is exiting while children non-daemon threads from a connector are still active. + Ideally, this situation should not happen... + Please check with maintainers if the connector or library code should safely clean up its threads before quitting instead. + The main thread is: {} + """.trimIndent(), + dumpThread(currentThread) + ) + val scheduledExecutorService = + Executors.newSingleThreadScheduledExecutor( + BasicThreadFactory + .Builder() // this thread executor will create daemon threads, so it + // does not block exiting if all other active + // threads are already stopped. + .daemon(true) + .build() + ) + for (runningThread in runningThreads) { + val str = "Active non-daemon thread: " + dumpThread(runningThread) + LOGGER.warn(str) + // even though the main thread is already shutting down, we still leave some + // chances to the children + // threads to close properly on their own. + // So, we schedule an interrupt hook after a fixed time delay instead... + scheduledExecutorService.schedule( + { runningThread.interrupt() }, + interruptTimeDelay.toLong(), + interruptTimeUnit + ) + } + scheduledExecutorService.schedule( + { + if ( + ThreadUtils.getAllThreads().stream().anyMatch { runningThread: Thread -> + !runningThread.isDaemon && runningThread.name != currentThread.name + } + ) { + LOGGER.error( + "Failed to interrupt children non-daemon threads, forcefully exiting NOW...\n" + ) + exitHook.run() + } + }, + exitTimeDelay.toLong(), + exitTimeUnit + ) + } + } + + private fun dumpThread(thread: Thread): String { + return String.format( + "%s (%s)\n Thread stacktrace: %s", + thread.name, + thread.state, + Strings.join(java.util.List.of(*thread.stackTrace), "\n at ") + ) + } + + @Throws(Exception::class) + private fun validateConfig( + schemaJson: JsonNode, + objectJson: JsonNode, + operationType: String + ) { + val validationResult = validator.validate(schemaJson, objectJson) + if (validationResult.isNotEmpty()) { + throw Exception( + String.format( + "Verification error(s) occurred for %s. Errors: %s ", + operationType, + validationResult + ) + ) + } + } + + fun parseConfig(path: Path?): JsonNode { + return Jsons.deserialize(IOs.readFile(path)) + } + + private fun parseConfig(path: Path?, klass: Class): T { + val jsonNode = parseConfig(path) + return Jsons.`object`(jsonNode, klass) + } + + /** @param connectorImage Expected format: [organization/]image[:version] */ + @VisibleForTesting + fun parseConnectorVersion(connectorImage: String?): String { + if (connectorImage == null || connectorImage == "") { + return "unknown" + } + + val tokens = + connectorImage.split(":".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray() + return tokens[tokens.size - 1] + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/JavaBaseConstants.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/JavaBaseConstants.kt new file mode 100644 index 0000000000000..3c888d64142fa --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/JavaBaseConstants.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import java.util.* +import org.apache.commons.lang3.StringUtils + +fun upperQuoted(column: String): String { + return StringUtils.wrap(column.uppercase(Locale.getDefault()), "\"") +} + +object JavaBaseConstants { + const val ARGS_CONFIG_KEY: String = "config" + const val ARGS_CATALOG_KEY: String = "catalog" + const val ARGS_STATE_KEY: String = "state" + + const val ARGS_CONFIG_DESC: String = "path to the json configuration file" + const val ARGS_CATALOG_DESC: String = "input path for the catalog" + const val ARGS_PATH_DESC: String = "path to the json-encoded state file" + + const val COLUMN_NAME_AB_ID: String = "_airbyte_ab_id" + const val COLUMN_NAME_EMITTED_AT: String = "_airbyte_emitted_at" + const val COLUMN_NAME_DATA: String = "_airbyte_data" + @JvmField + val LEGACY_RAW_TABLE_COLUMNS: List = + java.util.List.of(COLUMN_NAME_AB_ID, COLUMN_NAME_DATA, COLUMN_NAME_EMITTED_AT) + + // destination v2 + const val COLUMN_NAME_AB_RAW_ID: String = "_airbyte_raw_id" + const val COLUMN_NAME_AB_LOADED_AT: String = "_airbyte_loaded_at" + const val COLUMN_NAME_AB_EXTRACTED_AT: String = "_airbyte_extracted_at" + const val COLUMN_NAME_AB_META: String = "_airbyte_meta" + + // Meta was introduced later, so to avoid triggering raw table soft-reset in v1->v2 + // use this column list. + @JvmField + val V2_RAW_TABLE_COLUMN_NAMES_WITHOUT_META: Set = + java.util.Set.of( + COLUMN_NAME_AB_RAW_ID, + COLUMN_NAME_AB_EXTRACTED_AT, + COLUMN_NAME_AB_LOADED_AT, + COLUMN_NAME_DATA, + ) + @JvmField + val V2_RAW_TABLE_COLUMN_NAMES: List = + java.util.List.of( + COLUMN_NAME_AB_RAW_ID, + COLUMN_NAME_AB_EXTRACTED_AT, + COLUMN_NAME_AB_LOADED_AT, + COLUMN_NAME_DATA, + COLUMN_NAME_AB_META, + ) + @JvmField + val V2_FINAL_TABLE_METADATA_COLUMNS: List = + java.util.List.of(COLUMN_NAME_AB_RAW_ID, COLUMN_NAME_AB_EXTRACTED_AT, COLUMN_NAME_AB_META) + + const val DEFAULT_AIRBYTE_INTERNAL_NAMESPACE: String = "airbyte_internal" +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.kt new file mode 100644 index 0000000000000..437459dea970d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/SerializedAirbyteMessageConsumer.kt @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.functional.CheckedBiConsumer + +/** + * Interface for the destination's consumption of incoming messages as strings. This interface is + * backwards compatible with [AirbyteMessageConsumer]. + * + * This is via the accept method, which commonly handles parsing, validation, batching and writing + * of the transformed data to the final destination i.e. the technical system data is being written + * to. + * + * Lifecycle: + * + * * 1. Instantiate consumer. + * * 2. start() to initialize any resources that need to be created BEFORE the consumer consumes any + * messages. + * * 3. Consumes ALL records via [SerializedAirbyteMessageConsumer.accept] + * * 4. Always (on success or failure) finalize by calling [SerializedAirbyteMessageConsumer.close] + */ +interface SerializedAirbyteMessageConsumer : + CheckedBiConsumer, AutoCloseable { + /** + * Initialize anything needed for the consumer. Must be called before accept. + * + * @throws Exception exception + */ + @Throws(Exception::class) fun start() + + /** + * Consumes all [AirbyteMessage]s + * + * @param message [AirbyteMessage] as a string + * @param sizeInBytes size of that string in bytes + * @throws Exception exception + */ + @Throws(Exception::class) override fun accept(message: String, sizeInBytes: Int) + + /** + * Executes at the end of consumption of all incoming streamed data regardless of success or + * failure + * + * @throws Exception exception + */ + @Throws(Exception::class) override fun close() + + companion object { + /** Append a function to be called on [SerializedAirbyteMessageConsumer.close]. */ + fun appendOnClose( + consumer: SerializedAirbyteMessageConsumer?, + voidCallable: VoidCallable + ): SerializedAirbyteMessageConsumer? { + return object : SerializedAirbyteMessageConsumer { + @Throws(Exception::class) + override fun start() { + consumer!!.start() + } + + @Throws(Exception::class) + override fun accept(message: String, sizeInBytes: Int) { + consumer!!.accept(message, sizeInBytes) + } + + @Throws(Exception::class) + override fun close() { + consumer!!.close() + voidCallable.call() + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Source.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Source.kt new file mode 100644 index 0000000000000..5bbaa0033d500 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/Source.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.List + +interface Source : Integration { + /** + * Discover the current schema in the source. + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @return Description of the schema. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) fun discover(config: JsonNode): AirbyteCatalog + + /** + * Return a iterator of messages pulled from the source. + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @param catalog + * - schema of the incoming messages. + * @param state + * - state of the incoming messages. + * @return [AutoCloseableIterator] that produces message. The iterator will be consumed until no + * records remain or until an exception is thrown. [AutoCloseableIterator.close] will always be + * called once regardless of success or failure. + * @throws Exception + * - any exception. + */ + @Throws(Exception::class) + fun read( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): AutoCloseableIterator + + /** + * Returns a collection of iterators of messages pulled from the source, each representing a + * "stream". + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @param catalog + * - schema of the incoming messages. + * @param state + * - state of the incoming messages. + * @return The collection of [AutoCloseableIterator] instances that produce messages for each + * configured "stream" + * @throws Exception + * - any exception + */ + @Throws(Exception::class) + fun readStreams( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): Collection>? { + return List.of(read(config, catalog, state)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.kt new file mode 100644 index 0000000000000..c047db85b3161 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/TypingAndDedupingFlag.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import java.util.* + +object TypingAndDedupingFlag { + @JvmStatic + val isDestinationV2: Boolean + get() = + (DestinationConfig.Companion.instance!!.isV2Destination || + DestinationConfig.Companion.instance!!.getBooleanValue("use_1s1t_format")) + + @JvmStatic + fun getRawNamespaceOverride(option: String?): Optional { + val rawOverride: String = DestinationConfig.Companion.instance!!.getTextValue(option) + return if (rawOverride == null || rawOverride.isEmpty()) { + Optional.empty() + } else { + Optional.of(rawOverride) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.kt new file mode 100644 index 0000000000000..40c9797bf2afc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.adaptive + +import io.airbyte.cdk.integrations.base.Destination +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.commons.features.EnvVariableFeatureFlags +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class launches different variants of a destination connector based on where Airbyte is + * deployed. + */ +object AdaptiveDestinationRunner { + private val LOGGER: Logger = LoggerFactory.getLogger(AdaptiveDestinationRunner::class.java) + + private const val DEPLOYMENT_MODE_KEY = EnvVariableFeatureFlags.DEPLOYMENT_MODE + private const val CLOUD_MODE = "CLOUD" + + @JvmStatic + fun baseOnEnv(): OssDestinationBuilder { + val mode = System.getenv(DEPLOYMENT_MODE_KEY) + return OssDestinationBuilder(mode) + } + + class OssDestinationBuilder(private val deploymentMode: String?) { + fun withOssDestination( + ossDestinationSupplier: Supplier + ): CloudDestinationBuilder { + return CloudDestinationBuilder(deploymentMode, ossDestinationSupplier) + } + } + + class CloudDestinationBuilder( + private val deploymentMode: String?, + private val ossDestinationSupplier: Supplier + ) { + fun withCloudDestination( + cloudDestinationSupplier: Supplier + ): Runner { + return Runner(deploymentMode, ossDestinationSupplier, cloudDestinationSupplier) + } + } + + class Runner( + private val deploymentMode: String?, + private val ossDestinationSupplier: Supplier, + private val cloudDestinationSupplier: Supplier + ) { + private val destination: Destination + get() { + LOGGER.info("Running destination under deployment mode: {}", deploymentMode) + if (deploymentMode != null && deploymentMode == CLOUD_MODE) { + return cloudDestinationSupplier.get() + } + if (deploymentMode == null) { + LOGGER.warn("Deployment mode is null, default to OSS mode") + } + return ossDestinationSupplier.get() + } + + @Throws(Exception::class) + fun run(args: Array) { + val destination = destination + LOGGER.info("Starting destination: {}", destination.javaClass.name) + IntegrationRunner(destination).run(args) + LOGGER.info("Completed destination: {}", destination.javaClass.name) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.kt new file mode 100644 index 0000000000000..73514b4a0a08f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.kt @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.adaptive + +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.commons.features.EnvVariableFeatureFlags +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class launches different variants of a source connector based on where Airbyte is deployed. + */ +object AdaptiveSourceRunner { + private val LOGGER: Logger = LoggerFactory.getLogger(AdaptiveSourceRunner::class.java) + + const val DEPLOYMENT_MODE_KEY: String = EnvVariableFeatureFlags.DEPLOYMENT_MODE + const val CLOUD_MODE: String = "CLOUD" + + fun baseOnEnv(): OssSourceBuilder { + val mode = System.getenv(DEPLOYMENT_MODE_KEY) + return OssSourceBuilder(mode) + } + + class OssSourceBuilder(private val deploymentMode: String) { + fun withOssSource(ossSourceSupplier: Supplier): CloudSourceBuilder { + return CloudSourceBuilder(deploymentMode, ossSourceSupplier) + } + } + + class CloudSourceBuilder( + private val deploymentMode: String, + private val ossSourceSupplier: Supplier + ) { + fun withCloudSource(cloudSourceSupplier: Supplier): Runner { + return Runner(deploymentMode, ossSourceSupplier, cloudSourceSupplier) + } + } + + class Runner( + private val deploymentMode: String?, + private val ossSourceSupplier: Supplier, + private val cloudSourceSupplier: Supplier + ) { + private val source: Source + get() { + LOGGER.info("Running source under deployment mode: {}", deploymentMode) + if (deploymentMode != null && deploymentMode == CLOUD_MODE) { + return cloudSourceSupplier.get() + } + if (deploymentMode == null) { + LOGGER.warn("Deployment mode is null, default to OSS mode") + } + return ossSourceSupplier.get() + } + + @Throws(Exception::class) + fun run(args: Array) { + val source = source + LOGGER.info("Starting source: {}", source.javaClass.name) + IntegrationRunner(source).run(args) + LOGGER.info("Completed source: {}", source.javaClass.name) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.kt new file mode 100644 index 0000000000000..b842dec462390 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/errors/messages/ErrorMessage.kt @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.errors.messages + +import java.util.* + +object ErrorMessage { + // TODO: this could be built using a Builder design pattern instead of passing in 0 to indicate + // no + // errorCode exists + @JvmStatic + fun getErrorMessage( + stateCode: String?, + errorCode: Int, + message: String?, + exception: Exception + ): String { + return if (Objects.isNull(message)) { + configMessage(stateCode, 0, exception.message) + } else { + configMessage(stateCode, errorCode, message) + } + } + + private fun configMessage(stateCode: String?, errorCode: Int, message: String?): String { + val stateCodePart = + if (Objects.isNull(stateCode)) "" else String.format("State code: %s; ", stateCode) + val errorCodePart = if (errorCode == 0) "" else String.format("Error code: %s; ", errorCode) + return String.format("%s%sMessage: %s", stateCodePart, errorCodePart, message) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.kt new file mode 100644 index 0000000000000..45a93269b6efc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingDestination.kt @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.spec_modification + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.base.Destination +import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConnectorSpecification +import java.util.function.Consumer + +abstract class SpecModifyingDestination(private val destination: Destination) : Destination { + @Throws(Exception::class) + abstract fun modifySpec(originalSpec: ConnectorSpecification): ConnectorSpecification + + @Throws(Exception::class) + override fun spec(): ConnectorSpecification { + return modifySpec(destination.spec()) + } + + @Throws(Exception::class) + override fun check(config: JsonNode): AirbyteConnectionStatus? { + return destination.check(config) + } + + @Throws(Exception::class) + override fun getConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): AirbyteMessageConsumer? { + return destination.getConsumer(config, catalog, outputRecordCollector) + } + + @Throws(Exception::class) + override fun getSerializedMessageConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): SerializedAirbyteMessageConsumer? { + return destination.getSerializedMessageConsumer(config, catalog, outputRecordCollector) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.kt new file mode 100644 index 0000000000000..f9c81b8c21a1b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/spec_modification/SpecModifyingSource.kt @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.spec_modification + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.v0.* + +/** + * In some cases we want to prune or mutate the spec for an existing source. The common case is that + * we want to remove features that are not appropriate for some reason. e.g. In cloud, we do not + * want to allow users to send data unencrypted. + */ +abstract class SpecModifyingSource(private val source: Source) : Source { + @Throws(Exception::class) + abstract fun modifySpec(originalSpec: ConnectorSpecification): ConnectorSpecification + + @Throws(Exception::class) + override fun spec(): ConnectorSpecification { + return modifySpec(source.spec()) + } + + @Throws(Exception::class) + override fun check(config: JsonNode): AirbyteConnectionStatus? { + return source.check(config) + } + + @Throws(Exception::class) + override fun discover(config: JsonNode): AirbyteCatalog { + return source.discover(config) + } + + @Throws(Exception::class) + override fun read( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): AutoCloseableIterator { + return source.read(config, catalog, state) + } + + @Throws(Exception::class) + override fun readStreams( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): Collection>? { + return source.readStreams(config, catalog, state) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshHelpers.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshHelpers.kt new file mode 100644 index 0000000000000..3c8d9b3214afc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshHelpers.kt @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.ConnectorSpecification +import java.io.* +import java.util.* + +object SshHelpers { + @get:Throws(IOException::class) + @JvmStatic + val specAndInjectSsh: ConnectorSpecification? + get() = getSpecAndInjectSsh(Optional.empty()) + + @Throws(IOException::class) + @JvmStatic + fun getSpecAndInjectSsh(group: Optional): ConnectorSpecification? { + val originalSpec = + Jsons.deserialize( + MoreResources.readResource("spec.json"), + ConnectorSpecification::class.java + ) + return injectSshIntoSpec(originalSpec, group) + } + + @JvmOverloads + @Throws(IOException::class) + @JvmStatic + fun injectSshIntoSpec( + connectorSpecification: ConnectorSpecification, + group: Optional = Optional.empty() + ): ConnectorSpecification { + val originalSpec = Jsons.clone(connectorSpecification) + val propNode = originalSpec!!.connectionSpecification["properties"] as ObjectNode + val tunnelMethod = + Jsons.deserialize(MoreResources.readResource("ssh-tunnel-spec.json")) as ObjectNode + if (group.isPresent) { + tunnelMethod.put("group", group.get()) + } + propNode.set("tunnel_method", tunnelMethod) + return originalSpec + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnel.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnel.kt new file mode 100644 index 0000000000000..e254d1ad52c73 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnel.kt @@ -0,0 +1,571 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Preconditions +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.io.* +import java.net.InetSocketAddress +import java.net.MalformedURLException +import java.net.URL +import java.security.* +import java.time.Duration +import java.util.* +import javax.validation.constraints.NotNull +import org.apache.sshd.client.SshClient +import org.apache.sshd.client.keyverifier.AcceptAllServerKeyVerifier +import org.apache.sshd.client.session.ClientSession +import org.apache.sshd.common.SshException +import org.apache.sshd.common.session.SessionHeartbeatController +import org.apache.sshd.common.util.net.SshdSocketAddress +import org.apache.sshd.common.util.security.SecurityUtils +import org.apache.sshd.core.CoreModuleProperties +import org.apache.sshd.server.forward.AcceptAllForwardingFilter +import org.bouncycastle.jce.provider.BouncyCastleProvider +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +// todo (cgardens) - this needs unit tests. it is currently tested transitively via source postgres +// integration tests. +/** + * Encapsulates the connection configuration for an ssh tunnel port forward through a proxy/bastion + * host plus the remote host and remote port to forward to a specified local port. + */ +open class SshTunnel +@JvmOverloads +constructor( + val originalConfig: JsonNode, + private val hostKey: List?, + private val portKey: List?, + private val endPointKey: String?, + remoteServiceUrl: String?, + tunnelMethod: TunnelMethod, + tunnelHost: String?, + tunnelPort: Int, + tunnelUser: String?, + sshKey: String?, + tunnelUserPassword: String?, + remoteServiceHost: String?, + remoteServicePort: Int, + connectionOptions: Optional? = Optional.empty() +) : AutoCloseable { + enum class TunnelMethod { + NO_TUNNEL, + SSH_PASSWORD_AUTH, + SSH_KEY_AUTH + } + + @JvmRecord + data class SshConnectionOptions( + val sessionHeartbeatInterval: Duration, + val globalHeartbeatInterval: Duration, + val idleTimeout: Duration + ) + + private val tunnelMethod: TunnelMethod + private var tunnelHost: String? = null + private var tunnelPort = 0 + private var tunnelUser: String? = null + private var sshKey: String? = null + private var remoteServiceProtocol: String? = null + private var remoteServicePath: String? = null + private var tunnelUserPassword: String? = null + private var remoteServiceHost: String? = null + private var remoteServicePort = 0 + protected var tunnelLocalPort: Int = 0 + + private var sshclient: SshClient? = null + private var tunnelSession: ClientSession? = null + + /** + * + * @param config + * - the full config that was passed to the source. + * @param hostKey + * - a list of keys that point to the database host name. should be pointing to where in the + * config remoteDatabaseHost is found. + * @param portKey + * - a list of keys that point to the database port. should be pointing to where in the config + * remoteDatabasePort is found. + * @param endPointKey + * - key that points to the endpoint URL (this is commonly used for REST-based services such as + * Elastic and MongoDB) + * @param remoteServiceUrl + * - URL of the remote endpoint (this is commonly used for REST-based * services such as Elastic + * and MongoDB) + * @param tunnelMethod + * - the type of ssh method that should be used (includes not using SSH at all). + * @param tunnelHost + * - host name of the machine to which we will establish an ssh connection (e.g. hostname of the + * bastion). + * @param tunnelPort + * - port of the machine to which we will establish an ssh connection. (e.g. port of the + * bastion). + * @param tunnelUser + * - user that is allowed to access the tunnelHost. + * @param sshKey + * - the ssh key that will be used to make the ssh connection. can be null if we are using + * tunnelUserPassword instead. + * @param tunnelUserPassword + * - the password for the tunnelUser. can be null if we are using sshKey instead. + * @param remoteServiceHost + * - the actual host name of the remote service (as it is known to the tunnel host). + * @param remoteServicePort + * - the actual port of the remote service (as it is known to the tunnel host). + * @param connectionOptions + * - optional connection options for ssh client. + */ + init { + Preconditions.checkNotNull(tunnelMethod) + this.tunnelMethod = tunnelMethod + + if (tunnelMethod == TunnelMethod.NO_TUNNEL) { + this.tunnelHost = null + this.tunnelPort = 0 + this.tunnelUser = null + this.sshKey = null + this.tunnelUserPassword = null + this.remoteServiceHost = null + this.remoteServicePort = 0 + this.remoteServiceProtocol = null + this.remoteServicePath = null + } else { + Preconditions.checkNotNull(tunnelHost) + Preconditions.checkArgument(tunnelPort > 0) + Preconditions.checkNotNull(tunnelUser) + if (tunnelMethod == TunnelMethod.SSH_KEY_AUTH) { + Preconditions.checkNotNull(sshKey) + } + if (tunnelMethod == TunnelMethod.SSH_PASSWORD_AUTH) { + Preconditions.checkNotNull(tunnelUserPassword) + } + // must provide either host/port or endpoint + Preconditions.checkArgument((hostKey != null && portKey != null) || endPointKey != null) + Preconditions.checkArgument( + (remoteServiceHost != null && remoteServicePort > 0) || remoteServiceUrl != null + ) + if (remoteServiceUrl != null) { + var urlObject: URL? = null + try { + urlObject = URL(remoteServiceUrl) + } catch (e: MalformedURLException) { + AirbyteTraceMessageUtility.emitConfigErrorTrace( + e, + String.format( + "Provided value for remote service URL is not valid: %s", + remoteServiceUrl + ) + ) + } + Preconditions.checkNotNull(urlObject, "Failed to parse URL of remote service") + this.remoteServiceHost = urlObject!!.host + this.remoteServicePort = urlObject.port + this.remoteServiceProtocol = urlObject.protocol + this.remoteServicePath = urlObject.path + } else { + this.remoteServiceProtocol = null + this.remoteServicePath = null + this.remoteServiceHost = remoteServiceHost + this.remoteServicePort = remoteServicePort + } + + this.tunnelHost = tunnelHost + this.tunnelPort = tunnelPort + this.tunnelUser = tunnelUser + this.sshKey = sshKey + this.tunnelUserPassword = tunnelUserPassword + this.sshclient = + connectionOptions!! + .map { sshConnectionOptions: SshConnectionOptions -> + createClient( + sshConnectionOptions.sessionHeartbeatInterval, + sshConnectionOptions.globalHeartbeatInterval, + sshConnectionOptions.idleTimeout + ) + } + .orElseGet { this.createClient() } + this.tunnelSession = openTunnel(sshclient!!) + } + } + + @get:Throws(Exception::class) + val configInTunnel: JsonNode + get() { + if (tunnelMethod == TunnelMethod.NO_TUNNEL) { + return originalConfig + } else { + val clone = Jsons.clone(originalConfig) + if (hostKey != null) { + Jsons.replaceNestedString( + clone, + hostKey, + SshdSocketAddress.LOCALHOST_ADDRESS.hostName + ) + } + if (portKey != null) { + Jsons.replaceNestedInt(clone, portKey, tunnelLocalPort) + } + if (endPointKey != null) { + val tunnelEndPointURL = + URL( + remoteServiceProtocol!!, + SshdSocketAddress.LOCALHOST_ADDRESS.hostName, + tunnelLocalPort, + remoteServicePath!! + ) + Jsons.replaceNestedString( + clone, + Arrays.asList(endPointKey), + tunnelEndPointURL.toString() + ) + } + return clone + } + } + + /** Closes a tunnel if one was open, and otherwise doesn't do anything (safe to run). */ + override fun close() { + try { + if (tunnelSession != null) { + tunnelSession!!.close() + tunnelSession = null + } + if (sshclient != null) { + sshclient!!.stop() + sshclient = null + } + } catch (t: Throwable) { + throw RuntimeException(t) + } + } + + @get:Throws(IOException::class, GeneralSecurityException::class) + val privateKeyPair: KeyPair + /** + * From the OPENSSH private key string, use mina-sshd to deserialize the key pair, + * reconstruct the keys from the key info, and return the key pair for use in + * authentication. + * + * @return The [KeyPair] to add - may not be `null` + * @see [loadKeyPairs + * ](https://javadoc.io/static/org.apache.sshd/sshd-common/2.8.0/org/apache/sshd/common/config/keys/loader/KeyPairResourceLoader.html.loadKeyPairs-org.apache.sshd.common.session.SessionContext-org.apache.sshd.common.util.io.resource.IoResource-org.apache.sshd.common.config.keys.FilePasswordProvider-) + */ + get() { + val validatedKey = validateKey() + val keyPairs = + SecurityUtils.getKeyPairResourceParser() + .loadKeyPairs(null, null, null, StringReader(validatedKey)) + + if (keyPairs != null && keyPairs.iterator().hasNext()) { + return keyPairs.iterator().next() + } + throw ConfigErrorException( + "Unable to load private key pairs, verify key pairs are properly inputted" + ) + } + + private fun validateKey(): String { + return sshKey!!.replace("\\n", "\n") + } + + /** + * Generates a new ssh client and returns it, with forwarding set to accept all types; use this + * before opening a tunnel. + */ + private fun createClient(): SshClient { + Security.addProvider(BouncyCastleProvider()) + val client = SshClient.setUpDefaultClient() + client.forwardingFilter = AcceptAllForwardingFilter.INSTANCE + client.serverKeyVerifier = AcceptAllServerKeyVerifier.INSTANCE + return client + } + + private fun createClient( + sessionHeartbeatInterval: Duration, + globalHeartbeatInterval: Duration, + idleTimeout: Duration + ): SshClient { + LOGGER.info("Creating SSH client with Heartbeat and Keepalive enabled") + val client = createClient() + // Session level heartbeat using SSH_MSG_IGNORE every second. + client.setSessionHeartbeat( + SessionHeartbeatController.HeartbeatType.IGNORE, + sessionHeartbeatInterval + ) + // idle-timeout zero indicates NoTimeout. + CoreModuleProperties.IDLE_TIMEOUT[client] = idleTimeout + // Use tcp keep-alive mechanism. + CoreModuleProperties.SOCKET_KEEPALIVE[client] = true + // Additional delay used for ChannelOutputStream to wait for space in the remote socket send + // buffer. + CoreModuleProperties.WAIT_FOR_SPACE_TIMEOUT[client] = Duration.ofMinutes(2) + // Global keepalive message sent every 2 seconds. This precedes the session level heartbeat. + CoreModuleProperties.HEARTBEAT_INTERVAL[client] = globalHeartbeatInterval + return client + } + + /** Starts an ssh session; wrap this in a try-finally and use closeTunnel() to close it. */ + open fun openTunnel(client: SshClient): ClientSession? { + try { + client!!.start() + val session = + client + .connect( + tunnelUser!!.trim { it <= ' ' }, + tunnelHost!!.trim { it <= ' ' }, + tunnelPort + ) + .verify(TIMEOUT_MILLIS.toLong()) + .session + if (tunnelMethod == TunnelMethod.SSH_KEY_AUTH) { + session.addPublicKeyIdentity(privateKeyPair) + } + if (tunnelMethod == TunnelMethod.SSH_PASSWORD_AUTH) { + session.addPasswordIdentity(tunnelUserPassword) + } + + session.auth().verify(TIMEOUT_MILLIS.toLong()) + val address = + session.startLocalPortForwarding( // entering 0 lets the OS pick a free port for us. + SshdSocketAddress( + InetSocketAddress.createUnresolved( + SshdSocketAddress.LOCALHOST_ADDRESS.hostName, + 0 + ) + ), + SshdSocketAddress(remoteServiceHost, remoteServicePort) + ) + + // discover the port that the OS picked and remember it so that we can use it when we + // try to connect + tunnelLocalPort = address.port + + LOGGER.info( + String.format( + "Established tunneling session to %s:%d. Port forwarding started on %s ", + remoteServiceHost, + remoteServicePort, + address.toInetSocketAddress() + ) + ) + return session + } catch (e: IOException) { + if ( + e is SshException && + e.message!! + .lowercase() + .contains("failed to get operation result within specified timeout") + ) { + throw ConfigErrorException(SSH_TIMEOUT_DISPLAY_MESSAGE, e) + } else { + throw RuntimeException(e) + } + } catch (e: GeneralSecurityException) { + if ( + e is SshException && + e.message!! + .lowercase() + .contains("failed to get operation result within specified timeout") + ) { + throw ConfigErrorException(SSH_TIMEOUT_DISPLAY_MESSAGE, e) + } else { + throw RuntimeException(e) + } + } + } + + override fun toString(): String { + return "SshTunnel{" + + "hostKey=" + + hostKey + + ", portKey=" + + portKey + + ", tunnelMethod=" + + tunnelMethod + + ", tunnelHost='" + + tunnelHost + + '\'' + + ", tunnelPort=" + + tunnelPort + + ", tunnelUser='" + + tunnelUser + + '\'' + + ", remoteServiceHost='" + + remoteServiceHost + + '\'' + + ", remoteServicePort=" + + remoteServicePort + + ", tunnelLocalPort=" + + tunnelLocalPort + + '}' + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(SshTunnel::class.java) + const val SSH_TIMEOUT_DISPLAY_MESSAGE: String = + "Timed out while opening a SSH Tunnel. Please double check the given SSH configurations and try again." + + const val CONNECTION_OPTIONS_KEY: String = "ssh_connection_options" + const val SESSION_HEARTBEAT_INTERVAL_KEY: String = "session_heartbeat_interval" + const val SESSION_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS: Long = 1000 + const val GLOBAL_HEARTBEAT_INTERVAL_KEY: String = "global_heartbeat_interval" + const val GLOBAL_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS: Long = 2000 + const val IDLE_TIMEOUT_KEY: String = "idle_timeout" + const val IDLE_TIMEOUT_DEFAULT_INFINITE: Long = 0 + + const val TIMEOUT_MILLIS: Int = 15000 // 15 seconds + + @JvmStatic + fun getInstance(config: JsonNode, hostKey: List, portKey: List): SshTunnel { + val tunnelMethod = + Jsons.getOptional(config, "tunnel_method", "tunnel_method") + .map { method: JsonNode -> + TunnelMethod.valueOf(method.asText().trim { it <= ' ' }) + } + .orElse(TunnelMethod.NO_TUNNEL) + LOGGER.info("Starting connection with method: {}", tunnelMethod) + + return SshTunnel( + config, + hostKey, + portKey, + null, + null, + tunnelMethod, + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_host")), + Jsons.getIntOrZero(config, "tunnel_method", "tunnel_port"), + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user")), + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "ssh_key")), + Strings.safeTrim( + Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user_password") + ), + Strings.safeTrim(Jsons.getStringOrNull(config, hostKey)), + Jsons.getIntOrZero(config, portKey), + getSshConnectionOptions(config) + ) + } + + private fun getSshConnectionOptions( + config: JsonNode? + ): @NotNull Optional? { + // piggybacking on JsonNode config to make it configurable at connector level. + val connectionOptionConfig = Jsons.getOptional(config, CONNECTION_OPTIONS_KEY) + val connectionOptions: Optional + if (connectionOptionConfig.isPresent) { + val connectionOptionsNode = connectionOptionConfig.get() + val sessionHeartbeatInterval = + Jsons.getOptional(connectionOptionsNode, SESSION_HEARTBEAT_INTERVAL_KEY) + .map { interval: JsonNode -> Duration.ofMillis(interval.asLong()) } + .orElse(Duration.ofSeconds(1)) + val globalHeartbeatInterval = + Jsons.getOptional(connectionOptionsNode, GLOBAL_HEARTBEAT_INTERVAL_KEY) + .map { interval: JsonNode -> Duration.ofMillis(interval.asLong()) } + .orElse(Duration.ofSeconds(2)) + val idleTimeout = + Jsons.getOptional(connectionOptionsNode, IDLE_TIMEOUT_KEY) + .map { interval: JsonNode -> Duration.ofMillis(interval.asLong()) } + .orElse(Duration.ZERO) + connectionOptions = + Optional.of( + SshConnectionOptions( + sessionHeartbeatInterval, + globalHeartbeatInterval, + idleTimeout + ) + ) + } else { + connectionOptions = Optional.empty() + } + return connectionOptions + } + + @Throws(Exception::class) + fun getInstance(config: JsonNode, endPointKey: String): SshTunnel { + val tunnelMethod = + Jsons.getOptional(config, "tunnel_method", "tunnel_method") + .map { method: JsonNode -> + TunnelMethod.valueOf(method.asText().trim { it <= ' ' }) + } + .orElse(TunnelMethod.NO_TUNNEL) + LOGGER.info("Starting connection with method: {}", tunnelMethod) + + return SshTunnel( + config, + null, + null, + endPointKey, + Jsons.getStringOrNull(config, endPointKey), + tunnelMethod, + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_host")), + Jsons.getIntOrZero(config, "tunnel_method", "tunnel_port"), + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user")), + Strings.safeTrim(Jsons.getStringOrNull(config, "tunnel_method", "ssh_key")), + Strings.safeTrim( + Jsons.getStringOrNull(config, "tunnel_method", "tunnel_user_password") + ), + null, + 0, + getSshConnectionOptions(config) + ) + } + + @JvmStatic + @Throws(Exception::class) + fun sshWrap( + config: JsonNode, + hostKey: List, + portKey: List, + wrapped: CheckedConsumer + ) { + sshWrap(config, hostKey, portKey) { configInTunnel: JsonNode? -> + wrapped.accept(configInTunnel) + null + } + } + + @JvmStatic + @Throws(Exception::class) + fun sshWrap( + config: JsonNode, + endPointKey: String, + wrapped: CheckedConsumer + ) { + sshWrap(config, endPointKey) { configInTunnel: JsonNode? -> + wrapped.accept(configInTunnel) + null + } + } + + @JvmStatic + @Throws(Exception::class) + fun sshWrap( + config: JsonNode, + hostKey: List, + portKey: List, + wrapped: CheckedFunction + ): T { + getInstance(config, hostKey, portKey).use { sshTunnel -> + return wrapped.apply(sshTunnel.configInTunnel) + } + } + + @JvmStatic + @Throws(Exception::class) + fun sshWrap( + config: JsonNode, + endPointKey: String, + wrapped: CheckedFunction + ): T { + getInstance(config, endPointKey).use { sshTunnel -> + return wrapped.apply(sshTunnel.configInTunnel) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.kt new file mode 100644 index 0000000000000..0a1ea1b26512d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedDestination.kt @@ -0,0 +1,177 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.cdk.integrations.base.Destination +import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConnectorSpecification +import java.util.* +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Decorates a Destination with an SSH Tunnel using the standard configuration that Airbyte uses for + * configuring SSH. + */ +class SshWrappedDestination : Destination { + private val delegate: Destination + private val hostKey: List? + private val portKey: List? + private val endPointKey: String? + + constructor(delegate: Destination, hostKey: List, portKey: List) { + this.delegate = delegate + this.hostKey = hostKey + this.portKey = portKey + this.endPointKey = null + } + + constructor(delegate: Destination, endPointKey: String) { + this.delegate = delegate + this.endPointKey = endPointKey + this.portKey = null + this.hostKey = null + } + + @Throws(Exception::class) + override fun spec(): ConnectorSpecification { + // inject the standard ssh configuration into the spec. + val originalSpec = delegate.spec() + val propNode = originalSpec!!.connectionSpecification["properties"] as ObjectNode + propNode.set( + "tunnel_method", + Jsons.deserialize(MoreResources.readResource("ssh-tunnel-spec.json")) + ) + return originalSpec + } + + @Throws(Exception::class) + override fun check(config: JsonNode): AirbyteConnectionStatus? { + try { + return if ((endPointKey != null)) + SshTunnel.Companion.sshWrap( + config, + endPointKey, + CheckedFunction { + config: JsonNode -> + delegate.check(config) + } + ) + else + SshTunnel.Companion.sshWrap( + config, + hostKey!!, + portKey!!, + CheckedFunction { + config: JsonNode -> + delegate.check(config) + } + ) + } catch (e: RuntimeException) { + val sshErrorMessage = + "Could not connect with provided SSH configuration. Error: " + e.message + AirbyteTraceMessageUtility.emitConfigErrorTrace(e, sshErrorMessage) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(sshErrorMessage) + } + } + + @Throws(Exception::class) + override fun getConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): AirbyteMessageConsumer? { + val tunnel = getTunnelInstance(config) + + val delegateConsumer: AirbyteMessageConsumer? + try { + delegateConsumer = + delegate.getConsumer(tunnel.configInTunnel, catalog, outputRecordCollector) + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while getting the delegate consumer, closing SSH tunnel", + e + ) + tunnel.close() + throw e + } + return AirbyteMessageConsumer.Companion.appendOnClose( + delegateConsumer, + VoidCallable { tunnel.close() } + ) + } + + @Throws(Exception::class) + override fun getSerializedMessageConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): SerializedAirbyteMessageConsumer? { + val clone = Jsons.clone(config) + val connectionOptionsConfig: Optional = + Jsons.getOptional(clone, SshTunnel.Companion.CONNECTION_OPTIONS_KEY) + if (connectionOptionsConfig.isEmpty) { + LOGGER.info("No SSH connection options found, using defaults") + if (clone is ObjectNode) { // Defensive check, it will always be object node + val connectionOptions = clone.putObject(SshTunnel.Companion.CONNECTION_OPTIONS_KEY) + connectionOptions.put( + SshTunnel.Companion.SESSION_HEARTBEAT_INTERVAL_KEY, + SshTunnel.Companion.SESSION_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS + ) + connectionOptions.put( + SshTunnel.Companion.GLOBAL_HEARTBEAT_INTERVAL_KEY, + SshTunnel.Companion.GLOBAL_HEARTBEAT_INTERVAL_DEFAULT_IN_MILLIS + ) + } + } + val tunnel = getTunnelInstance(clone) + val delegateConsumer: SerializedAirbyteMessageConsumer? + try { + delegateConsumer = + delegate.getSerializedMessageConsumer( + tunnel.configInTunnel, + catalog, + outputRecordCollector + ) + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while getting the delegate consumer, closing SSH tunnel", + e + ) + tunnel.close() + throw e + } + return SerializedAirbyteMessageConsumer.Companion.appendOnClose( + delegateConsumer, + VoidCallable { tunnel.close() } + ) + } + + @Throws(Exception::class) + protected fun getTunnelInstance(config: JsonNode): SshTunnel { + return if ((endPointKey != null)) SshTunnel.Companion.getInstance(config, endPointKey) + else SshTunnel.Companion.getInstance(config, hostKey!!, portKey!!) + } + + override val isV2Destination: Boolean + get() = delegate.isV2Destination + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(SshWrappedDestination::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.kt new file mode 100644 index 0000000000000..0fc93b80a225c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/SshWrappedSource.kt @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.v0.* +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class SshWrappedSource : Source { + private val delegate: Source + private val hostKey: List + private val portKey: List + private val sshGroup: Optional + + constructor(delegate: Source, hostKey: List, portKey: List) { + this.delegate = delegate + this.hostKey = hostKey + this.portKey = portKey + this.sshGroup = Optional.empty() + } + + constructor(delegate: Source, hostKey: List, portKey: List, sshGroup: String) { + this.delegate = delegate + this.hostKey = hostKey + this.portKey = portKey + this.sshGroup = Optional.of(sshGroup) + } + + @Throws(Exception::class) + override fun spec(): ConnectorSpecification { + return SshHelpers.injectSshIntoSpec(delegate.spec(), sshGroup) + } + + @Throws(Exception::class) + override fun check(config: JsonNode): AirbyteConnectionStatus? { + try { + return SshTunnel.Companion.sshWrap( + config, + hostKey, + portKey, + CheckedFunction { config: JsonNode + -> + delegate.check(config) + } + ) + } catch (e: RuntimeException) { + val sshErrorMessage = + "Could not connect with provided SSH configuration. Error: " + e.message + AirbyteTraceMessageUtility.emitConfigErrorTrace(e, sshErrorMessage) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(sshErrorMessage) + } + } + + @Throws(Exception::class) + override fun discover(config: JsonNode): AirbyteCatalog { + return SshTunnel.Companion.sshWrap( + config, + hostKey, + portKey, + CheckedFunction { config: JsonNode -> + delegate.discover(config) + } + ) + } + + @Throws(Exception::class) + override fun read( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): AutoCloseableIterator { + val tunnel: SshTunnel = SshTunnel.Companion.getInstance(config, hostKey, portKey) + val delegateRead: AutoCloseableIterator + try { + delegateRead = delegate.read(tunnel.configInTunnel, catalog, state) + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while getting the delegate read iterator, closing SSH tunnel", + e + ) + tunnel.close() + throw e + } + return AutoCloseableIterators.appendOnClose(delegateRead) { tunnel.close() } + } + + @Throws(Exception::class) + override fun readStreams( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): Collection>? { + val tunnel: SshTunnel = SshTunnel.Companion.getInstance(config, hostKey, portKey) + try { + return delegate.readStreams(tunnel.configInTunnel, catalog, state) + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while getting the delegate read stream iterators, closing SSH tunnel", + e + ) + tunnel.close() + throw e + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(SshWrappedSource::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/readme.md b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/readme.md new file mode 100644 index 0000000000000..f72da8f4384a1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/base/ssh/readme.md @@ -0,0 +1,51 @@ +# Developing an SSH Connector + +## Goal +Easy development of any connector that needs the ability to connect to a resource via SSH Tunnel. + +## Overview +Our SSH connector support is designed to be easy to plug into any existing connector. There are a few major pieces to consider: +1. Add SSH Configuration to the Spec - for SSH, we need to take in additional configuration, so we need to inject extra fields into the connector configuration. +2. Add SSH Logic to the Connector - before the connector code begins to execute we need to start an SSH tunnel. This library provides logic to create that tunnel (and clean it up). +3. Acceptance Testing - it is a good practice to include acceptance testing for the SSH version of a connector for at least one of the SSH types (password or ssh key). While unit testing for the SSH functionality exists in this package (coming soon), high-level acceptance testing to make sure this feature works with the individual connector belongs in the connector. + +## How To + +### Add SSH Configuration to the Spec +1. The `SshHelpers` class provides 2 helper functions that injects the SSH configuration objects into a spec JsonSchema for an existing connector. Usually the `spec()` method for a connector looks like `Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class);`. These helpers are just injecting the ssh spec (`ssh-tunnel-spec.json`) into that spec. +2. You may need to update tests to reflect that new fields have been added to the spec. Usually updating the tests just requires using these helpers in the tests. + +### Add SSH Logic to the Connector +1. This package provides a Source decorated class to make it easy to add SSH logic to an existing source. Simply pass the source you want to wrap into the constructor of the `SshWrappedSource`. That class also requires two other fields: `hostKey` and `portKey`. Both of these fields are pointers to fields in the connector specification. The `hostKey` is a pointer to the field that hold the host of the resource you want to connect and `portKey` is the port. In a simple case, where the host name for a connector is just defined in the top-level `host` field, then `hostKey` would simply be: `["host"]`. If that field is nested, however, then it might be: `["database", "configuration", "host"]`. + +### Acceptance Testing +1. The only difference between existing acceptance testing and acceptance testing with SSH is that the configuration that is used for testing needs to contain additional fields. You can see the `Postgres Source ssh key creds` in lastpass to see an example of what that might look like. Those credentials leverage an existing bastion host in our test infrastructure. (As future work, we want to get rid of the need to use a static bastion server and instead do it in docker so we can run it all locally.) + +## Misc + +### How to wrap the protocol in an SSH Tunnel +For `spec()`, `check()`, and `discover()` wrapping the connector in an SSH tunnel is easier to think about because when they return all work is done and the tunnel can be closed. Thus, each of these methods can simply be wrapped in a try-with-resource of the SSH Tunnel. + +For `read()` and `write()` they return an iterator and consumer respectively that perform work that must happen within the SSH Tunnel after the method has returned. Therefore, the `close` function on the iterator and consumer have to handle closing the SSH tunnel; the methods themselves cannot just be wrapped in a try-with-resource. This is handled for you by the `SshWrappedSource`, but if you need to implement any of this manually you must take it into account. + +### Name Mangling +One of the least intuitive pieces of the SSH setup to follow is the replacement of host names and ports. The reason `SshWrappedSource` needs to know how to get the hostname and port of the database you are trying to connect to is that when it builds the SSH tunnel that forwards to the database, it needs to know the hostname and port so that the tunnel forwards requests to the right place. After the SSH tunnel is established and forwarding to the database, the connector code itself runs. + +There's a trick here though! The connector should NOT try to connect to the hostname and port of the database. Instead, it should be trying to connect to `localhost` and whatever port we are forwarding to the database. The `SshTunnel#sshWrap` removes the original host and port from the configuration for the connector and replaces it with `localhost` and the correct port. So from the connector code's point of view it is just operating on localhost. + +There is a tradeoff here. +* (Good) The way we have structured this allows users to configure a connector in the UI in a way that it is intuitive to user. They put in the host and port they think about referring to the database as (they don't need to worry about any of the localhost version). +* (Good) The connector code does not need to know anything about SSH, it can just operate on the host and port it gets (and we let SSH Tunnel handle swapping the names for us) which makes writing a connector easier. +* (Bad) The downside is that the `SshTunnel` logic is more complicated because it is absorbing all of this name swapping so that neither user nor connector developer need to worry about it. In our estimation, the good outweighs the extra complexity incurred here. + + +### Acceptance Testing via ssh tunnel using SshBastion and JdbcDatabaseContainer in Docker +1. The `SshBastion` class provides 3 helper functions: + `initAndStartBastion()`to initialize and start SSH Bastion server in Docker test container and creates new `Network` for bastion and tested jdbc container + `getTunnelConfig()`which return JsoneNode with all necessary configuration to establish ssh tunnel. Connection configuration for integration tests is now taken directly from container settings and does not require a real database connection + `stopAndCloseContainers` to stop and close SshBastion and JdbcDatabaseContainer at the end of the test + +## Future Work +* Add unit / integration testing for `ssh` package. +* Restructure spec so that instead of having `SSH Key Authentication` or `Password Authentication` options for `tunnel_method`, just have an `SSH` option and then within that `SSH` option have a `oneOf` for password or key. This is blocked because we cannot use `oneOf`s nested in `oneOf`s. +* Improve the process of acceptance testing by allowing doing acceptance testing using a bastion running in a docker container instead of having to use dedicated infrastructure and a static database. diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.kt new file mode 100644 index 0000000000000..b500c5025998d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/NamingConventionTransformer.kt @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination + +/** + * Destination have their own Naming conventions (which characters are valid or rejected in + * identifiers names) This class transform a random string used to a valid identifier names for each + * specific destination. + */ +interface NamingConventionTransformer { + /** + * Handle Naming Conversions of an input name to output a valid identifier name for the desired + * destination. + * + * @param name of the identifier to check proper naming conventions + * @return modified name with invalid characters replaced by '_' and adapted for the chosen + * destination. + */ + fun getIdentifier(name: String): String + + /** + * Handle naming conversions of an input name to output a valid namespace for the desired + * destination. + */ + fun getNamespace(namespace: String): String + + /** + * Same as getIdentifier but returns also the name of the table for storing raw data + * + * @param name of the identifier to check proper naming conventions + * @return modified name with invalid characters replaced by '_' and adapted for the chosen + * destination. + */ + @Deprecated("as this is very SQL specific, prefer using getIdentifier instead") + fun getRawTableName(name: String): String + + /** + * Same as getIdentifier but returns also the name of the table for storing tmp data + * + * @param name of the identifier to check proper naming conventions + * @return modified name with invalid characters replaced by '_' and adapted for the chosen + * destination. + */ + @Deprecated("as this is very SQL specific, prefer using getIdentifier instead") + fun getTmpTableName(name: String): String + + fun getTmpTableName(streamName: String, randomSuffix: String): String { + return getTmpTableName(streamName) + } + + fun convertStreamName(input: String): String + + fun applyDefaultCase(input: String): String +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/PropertyNameSimplifyingDataTransformer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/PropertyNameSimplifyingDataTransformer.kt new file mode 100644 index 0000000000000..86265630a0e9e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/PropertyNameSimplifyingDataTransformer.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta +import io.airbyte.protocol.models.v0.StreamDescriptor + +/** @see StandardNameTransformer.formatJsonPath for details on what this class does. */ +class PropertyNameSimplifyingDataTransformer : StreamAwareDataTransformer { + override fun transform( + streamDescriptor: StreamDescriptor?, + data: JsonNode?, + meta: AirbyteRecordMessageMeta?, + ): Pair { + if (data == null) { + return Pair(null, meta) + } + return Pair(StandardNameTransformer.formatJsonPath(data), meta) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StandardNameTransformer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StandardNameTransformer.kt new file mode 100644 index 0000000000000..e6b05d4d8f3f8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StandardNameTransformer.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import io.airbyte.commons.text.Names +import io.airbyte.commons.util.MoreIterators +import java.util.stream.Collectors + +open class StandardNameTransformer : NamingConventionTransformer { + override fun getIdentifier(name: String): String { + return convertStreamName(name) + } + + /** Most destinations have the same naming requirement for namespace and stream names. */ + override fun getNamespace(namespace: String): String { + return convertStreamName(namespace) + } + + // @Deprecated see https://github.com/airbytehq/airbyte/issues/35333 + override fun getRawTableName(streamName: String): String { + return convertStreamName("_airbyte_raw_$streamName") + } + + override fun getTmpTableName(streamName: String): String { + return convertStreamName(Strings.addRandomSuffix("_airbyte_tmp", "_", 3) + "_" + streamName) + } + + override fun getTmpTableName(streamName: String, randomSuffix: String): String { + return convertStreamName("_airbyte_tmp" + "_" + randomSuffix + "_" + streamName) + } + + override fun convertStreamName(input: String): String { + return Names.toAlphanumericAndUnderscore(input) + } + + override fun applyDefaultCase(input: String): String { + return input + } + + companion object { + private const val NON_JSON_PATH_CHARACTERS_PATTERN = "['\"`]" + + /** + * Rebuild a JsonNode adding sanitized property names (a subset of special characters + * replaced by underscores) while keeping original property names too. This is needed by + * some destinations as their json extract functions have limitations on how such special + * characters are parsed. These naming rules may be different to schema/table/column naming + * conventions. + */ + @JvmStatic + fun formatJsonPath(root: JsonNode): JsonNode { + if (root.isObject) { + val properties: MutableMap = HashMap() + val keys = Jsons.keys(root) + for (key in keys) { + val property = root[key] + // keep original key + properties[key] = formatJsonPath(property) + } + for (key in keys) { + val property = root[key] + val formattedKey = key.replace(NON_JSON_PATH_CHARACTERS_PATTERN.toRegex(), "_") + if (!properties.containsKey(formattedKey)) { + // duplicate property in a formatted key to be extracted in normalization + properties[formattedKey] = formatJsonPath(property) + } + } + return Jsons.jsonNode>(properties) + } else if (root.isArray) { + return Jsons.jsonNode( + MoreIterators.toList(root.elements()) + .stream() + .map { root: JsonNode -> formatJsonPath(root) } + .collect(Collectors.toList()) + ) + } else { + return root + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StreamSyncSummary.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StreamSyncSummary.kt new file mode 100644 index 0000000000000..ba9688dcb80c5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/StreamSyncSummary.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination + +import java.util.* + +/** + * @param recordsWritten The number of records written to the stream, or empty if the caller does + * not track this information. (this is primarily for backwards-compatibility with the legacy + * destinations framework; new implementations should always provide this information). If this + * value is empty, consumers should assume that the sync wrote nonzero records for this stream. + */ +class StreamSyncSummary(val recordsWritten: Optional) { + + companion object { + @JvmField val DEFAULT: StreamSyncSummary = StreamSyncSummary(Optional.empty()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtils.kt new file mode 100644 index 0000000000000..86ddf3be8a784 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtils.kt @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import java.text.DecimalFormat + +/** + * Replicate the behavior of [org.apache.commons.io.FileUtils] to match the proclivities of Davin + * and Charles. Courteously written by ChatGPT. + */ +object AirbyteFileUtils { + private const val ONE_KB = 1024.0 + private const val ONE_MB = ONE_KB * 1024 + private const val ONE_GB = ONE_MB * 1024 + private const val ONE_TB = ONE_GB * 1024 + private val df = DecimalFormat("#.##") + + /** + * Replicate the behavior of [org.apache.commons.io.FileUtils] but instead of rounding down to + * the nearest whole number, it rounds to two decimal places. + * + * @param sizeInBytes size in bytes + * @return human-readable size + */ + fun byteCountToDisplaySize(sizeInBytes: Long): String { + return if (sizeInBytes < ONE_KB) { + df.format(sizeInBytes) + " bytes" + } else if (sizeInBytes < ONE_MB) { + df.format(sizeInBytes.toDouble() / ONE_KB) + " KB" + } else if (sizeInBytes < ONE_GB) { + df.format(sizeInBytes.toDouble() / ONE_MB) + " MB" + } else if (sizeInBytes < ONE_TB) { + df.format(sizeInBytes.toDouble() / ONE_GB) + " GB" + } else { + df.format(sizeInBytes.toDouble() / ONE_TB) + " TB" + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumer.kt new file mode 100644 index 0000000000000..291a4606ebda3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumer.kt @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import com.google.common.base.Preconditions +import com.google.common.base.Strings +import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.cdk.integrations.destination.async.buffers.BufferEnqueue +import io.airbyte.cdk.integrations.destination.async.buffers.BufferManager +import io.airbyte.cdk.integrations.destination.async.deser.AirbyteMessageDeserializer +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.state.FlushFailure +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnCloseFunction +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.Optional +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors +import java.util.concurrent.atomic.AtomicLong +import java.util.function.Consumer +import java.util.stream.Collectors +import kotlin.jvm.optionals.getOrNull + +private val logger = KotlinLogging.logger {} + +/** + * Async version of the + * [io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer]. + * + * With this consumer, a destination is able to continue reading records until hitting the maximum + * memory limit governed by [GlobalMemoryManager]. Record writing is decoupled via [FlushWorkers]. + * See the other linked class for more detail. + */ +class AsyncStreamConsumer( + outputRecordCollector: Consumer, + private val onStart: OnStartFunction, + private val onClose: OnCloseFunction, + onFlush: DestinationFlushFunction, + private val catalog: ConfiguredAirbyteCatalog, + private val bufferManager: BufferManager, + private val defaultNamespace: Optional, + private val flushFailure: FlushFailure = FlushFailure(), + workerPool: ExecutorService = Executors.newFixedThreadPool(5), + private val airbyteMessageDeserializer: AirbyteMessageDeserializer, +) : SerializedAirbyteMessageConsumer { + private val bufferEnqueue: BufferEnqueue = bufferManager.bufferEnqueue + private val flushWorkers: FlushWorkers = + FlushWorkers( + bufferManager.bufferDequeue, + onFlush, + outputRecordCollector, + flushFailure, + bufferManager.stateManager, + workerPool, + ) + private val streamNames: Set = + StreamDescriptorUtils.fromConfiguredCatalog( + catalog, + ) + + // Note that this map will only be populated for streams with nonzero records. + private val recordCounts: ConcurrentMap = ConcurrentHashMap() + + private var hasStarted = false + private var hasClosed = false + private var hasFailed = false + + internal constructor( + outputRecordCollector: Consumer, + onStart: OnStartFunction, + onClose: OnCloseFunction, + flusher: DestinationFlushFunction, + catalog: ConfiguredAirbyteCatalog, + bufferManager: BufferManager, + flushFailure: FlushFailure, + defaultNamespace: Optional, + ) : this( + outputRecordCollector, + onStart, + onClose, + flusher, + catalog, + bufferManager, + defaultNamespace, + flushFailure, + Executors.newFixedThreadPool(5), + AirbyteMessageDeserializer(), + ) + + @Throws(Exception::class) + override fun start() { + Preconditions.checkState(!hasStarted, "Consumer has already been started.") + hasStarted = true + + flushWorkers.start() + + logger.info { "${AsyncStreamConsumer::class.java} started." } + onStart.call() + } + + @Throws(Exception::class) + override fun accept( + message: String, + sizeInBytes: Int, + ) { + Preconditions.checkState(hasStarted, "Cannot accept records until consumer has started") + propagateFlushWorkerExceptionIfPresent() + /* + * intentionally putting extractStream outside the buffer manager so that if in the future we want + * to try to use a thread pool to partially deserialize to get record type and stream name, we can + * do it without touching buffer manager. + */ + val partialAirbyteMessage = + airbyteMessageDeserializer.deserializeAirbyteMessage( + message, + ) + if (AirbyteMessage.Type.RECORD == partialAirbyteMessage.type) { + if (Strings.isNullOrEmpty(partialAirbyteMessage.record?.namespace)) { + partialAirbyteMessage.record?.namespace = defaultNamespace.getOrNull() + } + validateRecord(partialAirbyteMessage) + + partialAirbyteMessage.record?.streamDescriptor?.let { + getRecordCounter(it).incrementAndGet() + } + } + bufferEnqueue.addRecord( + partialAirbyteMessage, + sizeInBytes + PARTIAL_DESERIALIZE_REF_BYTES, + defaultNamespace, + ) + } + + @Throws(Exception::class) + override fun close() { + Preconditions.checkState(hasStarted, "Cannot close; has not started.") + Preconditions.checkState(!hasClosed, "Has already closed.") + hasClosed = true + + // assume closing upload workers will flush all accepted records. + // we need to close the workers before closing the bufferManagers (and underlying buffers) + // or we risk in-memory data. + flushWorkers.close() + + bufferManager.close() + + val streamSyncSummaries = + streamNames + .stream() + .collect( + Collectors.toMap( + { streamDescriptor: StreamDescriptor -> streamDescriptor }, + { streamDescriptor: StreamDescriptor -> + StreamSyncSummary( + Optional.of(getRecordCounter(streamDescriptor).get()), + ) + }, + ), + ) + onClose.accept(hasFailed, streamSyncSummaries) + + // as this throws an exception, we need to be after all other close functions. + propagateFlushWorkerExceptionIfPresent() + logger.info { "${AsyncStreamConsumer::class.java} closed" } + } + + private fun getRecordCounter(streamDescriptor: StreamDescriptor): AtomicLong { + return recordCounts.computeIfAbsent( + streamDescriptor, + ) { + AtomicLong() + } + } + + @Throws(Exception::class) + private fun propagateFlushWorkerExceptionIfPresent() { + if (flushFailure.isFailed()) { + hasFailed = true + throw flushFailure.exception + } + } + + private fun validateRecord(message: PartialAirbyteMessage) { + val streamDescriptor = + StreamDescriptor() + .withNamespace(message.record?.namespace) + .withName(message.record?.stream) + // if stream is not part of list of streams to sync to then throw invalid stream exception + if (!streamNames.contains(streamDescriptor)) { + throwUnrecognizedStream(catalog, message) + } + } + + companion object { + // This is to account for the references when deserialization to a PartialAirbyteMessage. + // The calculation is as follows: PartialAirbyteMessage (4) + Max( PartialRecordMessage(4), + // PartialStateMessage(6)) with PartialStateMessage being larger with more nested objects + // within it. Using 8 bytes as we assumed a 64 bit JVM. + private const val PARTIAL_DESERIALIZE_REF_BYTES: Int = 10 * 8 + + private fun throwUnrecognizedStream( + catalog: ConfiguredAirbyteCatalog?, + message: PartialAirbyteMessage, + ) { + throw IllegalArgumentException( + "Message contained record from a stream that was not in the catalog. " + + "\ncatalog: ${Jsons.serialize(catalog)}, " + + "\nmessage: ${Jsons.serialize(message)}", + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlush.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlush.kt new file mode 100644 index 0000000000000..4e65a6d9826db --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlush.kt @@ -0,0 +1,283 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.time.Clock +import java.time.Instant +import java.util.Optional +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.atomic.AtomicBoolean +import java.util.stream.Collectors +import kotlin.math.min + +private val logger = KotlinLogging.logger {} + +/** This class finds the best, next stream to flush. */ +class DetectStreamToFlush +@VisibleForTesting +internal constructor( + private val bufferDequeue: BufferDequeue, + private val runningFlushWorkers: RunningFlushWorkers, + private val isClosing: AtomicBoolean, + private val flusher: DestinationFlushFunction, + private val nowProvider: Clock, +) { + private val latestFlushTimeMsPerStream: ConcurrentMap = + ConcurrentHashMap() + + constructor( + bufferDequeue: BufferDequeue, + runningFlushWorkers: RunningFlushWorkers, + isClosing: AtomicBoolean, + flusher: DestinationFlushFunction, + ) : this(bufferDequeue, runningFlushWorkers, isClosing, flusher, Clock.systemUTC()) + + val nextStreamToFlush: Optional + /** + * Get the best, next stream that is ready to be flushed. + * + * @return best, next stream to flush. If no stream is ready to be flushed, return empty. + */ + get() = getNextStreamToFlush(computeQueueThreshold()) + + /** + * We have a minimum threshold for the size of a queue before we will flush it. The threshold + * helps us avoid uploading small amounts of data at a time, which is really resource + * inefficient. Depending on certain conditions, we dynamically adjust this threshold. + * + * Rules: + * * default - By default the, the threshold is a set at a constant: + * QUEUE_FLUSH_THRESHOLD_BYTES. + * * memory pressure - If we are getting close to maxing out available memory, we reduce it to + * zero. This helps in the case where there are a lot of streams, so total memory usage is high, + * but each individual queue isn't that large. + * * closing - If the Flush Worker is closing, we reduce it to zero. We close when all records + * have been added to the queue, at which point, our goal is to flush out any non-empty queues. + * + * @return based on the conditions, the threshold in bytes. + */ + @VisibleForTesting + fun computeQueueThreshold(): Long { + val isBuffer90Full = + EAGER_FLUSH_THRESHOLD <= + bufferDequeue.totalGlobalQueueSizeBytes.toDouble() / bufferDequeue.maxQueueSizeBytes + // when we are closing or queues are very full, flush regardless of how few items are in the + // queue. + return if (isClosing.get() || isBuffer90Full) 0 else flusher.queueFlushThresholdBytes + } + + // todo (cgardens) - improve prioritization by getting a better estimate of how much data + // running + // workers will process. we have access to their batch sizes after all! + + /** + * Iterates over streams until it finds one that is ready to flush. Streams are ordered by + * priority. Return an empty optional if no streams are ready. + * + * A stream is ready to flush if it either meets a size threshold or a time threshold. See + * [.isSizeTriggered] and [.isTimeTriggered] for details on these triggers. + * + * @param queueSizeThresholdBytes + * - the size threshold to use for determining if a stream is ready to flush. + * @return the next stream to flush. if no stream is ready to flush, empty. + */ + @VisibleForTesting + fun getNextStreamToFlush(queueSizeThresholdBytes: Long): Optional { + for (stream in orderStreamsByPriority(bufferDequeue.bufferedStreams)) { + val latestFlushTimeMs = + latestFlushTimeMsPerStream.computeIfAbsent( + stream, + ) { _: StreamDescriptor -> + nowProvider.millis() + } + val isTimeTriggeredResult = isTimeTriggered(latestFlushTimeMs) + val isSizeTriggeredResult = isSizeTriggered(stream, queueSizeThresholdBytes) + + val debugString = + "trigger info: ${stream.namespace} - ${stream.name}, " + + "${isTimeTriggeredResult.second} , ${isSizeTriggeredResult.second}" + logger.debug { "computed: $debugString" } + + if (isSizeTriggeredResult.first || isTimeTriggeredResult.first) { + logger.info { "flushing: $debugString" } + latestFlushTimeMsPerStream[stream] = nowProvider.millis() + return Optional.of(stream) + } + } + return Optional.empty() + } + + /** + * The time trigger is based on the last time a record was added to the queue. We don't want + * records to sit forever, even if the queue is not that full (bad for time to value for users). + * Also, the more time passes since a record was added, the less likely another record is coming + * (caveat is CDC where it's random). + * + * This method also returns debug string with info that about the computation. We do it this + * way, so that the debug info that is printed is exactly what is used in the computation. + * + * @param latestFlushTimeMs latestFlushTimeMs + * @return is time triggered and a debug string + */ + @VisibleForTesting + fun isTimeTriggered(latestFlushTimeMs: Long): Pair { + val timeSinceLastFlushMs = nowProvider.millis() - latestFlushTimeMs + val isTimeTriggered = timeSinceLastFlushMs >= MAX_TIME_BETWEEN_FLUSH_MS + val debugString = "time trigger: $isTimeTriggered" + + return Pair(isTimeTriggered, debugString) + } + + /** + * For the size threshold, the size of the data in the queue is compared to the threshold that + * is passed into this method. + * + * One caveat, is that if that stream already has a worker running, we "penalize" its size. We + * do this by computing what the size of the queue would be after the running workers for that + * queue complete. This is based on a dumb estimate of how much data a worker can process. There + * is an opportunity for optimization here, by being smarter about predicting how much data a + * running worker is likely to process. + * + * This method also returns debug string with info that about the computation. We do it this + * way, so that the debug info that is printed is exactly what is used in the computation. + * + * @param stream stream + * @param queueSizeThresholdBytes min size threshold to determine if a queue is ready to flush + * @return is size triggered and a debug string + */ + @VisibleForTesting + fun isSizeTriggered( + stream: StreamDescriptor, + queueSizeThresholdBytes: Long, + ): Pair { + val currentQueueSize = bufferDequeue.getQueueSizeBytes(stream).orElseThrow() + val sizeOfRunningWorkersEstimate = estimateSizeOfRunningWorkers(stream, currentQueueSize) + val queueSizeAfterRunningWorkers = currentQueueSize - sizeOfRunningWorkersEstimate + val isSizeTriggered = queueSizeAfterRunningWorkers > queueSizeThresholdBytes + + val debugString = + "size trigger: $isSizeTriggered " + + "current threshold b: ${AirbyteFileUtils.byteCountToDisplaySize(queueSizeThresholdBytes)}, " + + "queue size b: ${AirbyteFileUtils.byteCountToDisplaySize(currentQueueSize)}, " + + "penalty b: ${AirbyteFileUtils.byteCountToDisplaySize(sizeOfRunningWorkersEstimate)}, " + + "after penalty b: ${AirbyteFileUtils.byteCountToDisplaySize(queueSizeAfterRunningWorkers)}" + + return Pair(isSizeTriggered, debugString) + } + + /** + * For a stream, determines how many bytes will be processed by CURRENTLY running workers. For + * the purpose of this calculation, workers can be in one of two states. First, they can have a + * batch, in which case, we can read the size in bytes from the batch to know how many records + * that batch will pull of the queue. Second, it might not have a batch yet, in which case, we + * assume the min of bytes in the queue or the optimal flush size. + * + * @param stream stream + * @return estimate of records remaining to be process + */ + @VisibleForTesting + fun estimateSizeOfRunningWorkers( + stream: StreamDescriptor, + currentQueueSize: Long, + ): Long { + val runningWorkerBatchesSizes = + runningFlushWorkers.getSizesOfRunningWorkerBatches( + stream, + ) + val workersWithBatchesSize = + runningWorkerBatchesSizes + .stream() + .filter { obj: Optional -> obj.isPresent } + .mapToLong { obj: Optional -> obj.get() } + .sum() + val workersWithoutBatchesCount = + runningWorkerBatchesSizes.stream().filter { obj: Optional -> obj.isEmpty }.count() + val workersWithoutBatchesSizeEstimate = + (min( + flusher.optimalBatchSizeBytes.toDouble(), + currentQueueSize.toDouble(), + ) * workersWithoutBatchesCount) + .toLong() + return (workersWithBatchesSize + workersWithoutBatchesSizeEstimate) + } + + // todo (cgardens) - perf test whether it would make sense to flip 1 & 2. + + /** + * Sort stream descriptors in order of priority with which we would want to flush them. + * + * Priority is in the following order: + * * 1. size in queue (descending) + * * 2. time since last record (ascending) + * * 3. alphabetical by namespace + stream name. + * + * In other words, move the biggest queues first, because they are most likely to use available + * resources optimally. Then get rid of old stuff (time to value for the user and, generally, as + * the age of the last record grows, the likelihood of getting any more records from that stream + * decreases, so by flushing them, we can totally complete that stream). Finally, tertiary sort + * by name so the order is deterministic. + * + * @param streams streams to sort. + * @return streams sorted by priority. + */ + @VisibleForTesting + fun orderStreamsByPriority(streams: Set): List { + // eagerly pull attributes so that values are consistent throughout comparison + val sdToQueueSize = + streams + .stream() + .collect( + Collectors.toMap( + { s: StreamDescriptor -> s }, + { streamDescriptor: StreamDescriptor -> + bufferDequeue.getQueueSizeBytes( + streamDescriptor, + ) + }, + ), + ) + + val sdToTimeOfLastRecord = + streams + .stream() + .collect( + Collectors.toMap( + { s: StreamDescriptor -> s }, + { streamDescriptor: StreamDescriptor -> + bufferDequeue.getTimeOfLastRecord( + streamDescriptor, + ) + }, + ), + ) + + return streams + .stream() + .sorted( + Comparator.comparing( + { s: StreamDescriptor -> sdToQueueSize[s]!!.orElseThrow() }, + Comparator.reverseOrder(), + ) // if no time is present, it suggests the queue has no records. set MAX time + // as a sentinel value to + // represent no records. + .thenComparing { s: StreamDescriptor -> + sdToTimeOfLastRecord[s]!!.orElse(Instant.MAX) + } + .thenComparing { s: StreamDescriptor -> s.namespace + s.name }, + ) + .collect(Collectors.toList()) + } + + companion object { + private const val EAGER_FLUSH_THRESHOLD = 0.90 + private const val MAX_TIME_BETWEEN_FLUSH_MS = (5 * 60 * 1000).toLong() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/FlushWorkers.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/FlushWorkers.kt new file mode 100644 index 0000000000000..aa4d0baae0604 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/FlushWorkers.kt @@ -0,0 +1,283 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.StreamAwareQueue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.cdk.integrations.destination.async.state.FlushFailure +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.UUID +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors +import java.util.concurrent.ScheduledExecutorService +import java.util.concurrent.ThreadPoolExecutor +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicBoolean +import java.util.function.Consumer +import java.util.stream.Collectors + +private val logger = KotlinLogging.logger {} + +/** + * Parallel flushing of Destination data. + * + * In combination with a [DestinationFlushFunction] and the [.workerPool], this class allows for + * parallel data flushing. + * + * Parallelising is important as it 1) minimises Destination backpressure 2) minimises the effect of + * IO pauses on Destination performance. The second point is particularly important since a majority + * of Destination work is IO bound. + * + * The [.supervisorThread] assigns work to worker threads by looping over [.bufferDequeue] - a + * dequeue interface over in-memory queues of [AirbyteMessage]. See [.retrieveWork] for assignment + * logic. + * + * Within a worker thread, a worker best-effort reads a + * [DestinationFlushFunction.optimalBatchSizeBytes] batch from the in-memory stream and calls + * [DestinationFlushFunction.flush] on the returned data. + */ +@SuppressFBWarnings(value = ["NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE"]) +class FlushWorkers +@JvmOverloads +constructor( + private val bufferDequeue: io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue, + private val flusher: DestinationFlushFunction, + private val outputRecordCollector: Consumer, + private val flushFailure: FlushFailure, + private val stateManager: GlobalAsyncStateManager, + private val workerPool: ExecutorService = Executors.newFixedThreadPool(5), +) : AutoCloseable { + private val supervisorThread: ScheduledExecutorService = Executors.newScheduledThreadPool(1) + private val debugLoop: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor() + private val runningFlushWorkers = RunningFlushWorkers() + private val detectStreamToFlush: DetectStreamToFlush + + private val isClosing = AtomicBoolean(false) + + init { + detectStreamToFlush = + DetectStreamToFlush( + bufferDequeue, + runningFlushWorkers, + isClosing, + flusher, + ) + } + + fun start() { + logger.info { "Start async buffer supervisor" } + supervisorThread.scheduleAtFixedRate( + { this.retrieveWork() }, + SUPERVISOR_INITIAL_DELAY_SECS, + SUPERVISOR_PERIOD_SECS, + TimeUnit.SECONDS, + ) + debugLoop.scheduleAtFixedRate( + { this.printWorkerInfo() }, + DEBUG_INITIAL_DELAY_SECS, + DEBUG_PERIOD_SECS, + TimeUnit.SECONDS, + ) + } + + private fun retrieveWork() { + try { + // This will put a new log line every second which is too much, sampling it doesn't + // bring much value + // so it is set to debug + logger.debug { "Retrieve Work -- Finding queues to flush" } + val threadPoolExecutor = workerPool as ThreadPoolExecutor + var allocatableThreads = + threadPoolExecutor.maximumPoolSize - threadPoolExecutor.activeCount + + while (allocatableThreads > 0) { + val next = detectStreamToFlush.nextStreamToFlush + + if (next.isPresent) { + val desc = next.get() + val flushWorkerId = UUID.randomUUID() + runningFlushWorkers.trackFlushWorker(desc, flushWorkerId) + allocatableThreads-- + flush(desc, flushWorkerId) + } else { + break + } + } + } catch (e: Exception) { + logger.error(e) { "Flush worker error: " } + flushFailure.propagateException(e) + throw RuntimeException(e) + } + } + + private fun printWorkerInfo() { + val workerInfo = StringBuilder().append("[ASYNC WORKER INFO] ") + + val threadPoolExecutor = workerPool as ThreadPoolExecutor + + val queueSize = threadPoolExecutor.queue.size + val activeCount = threadPoolExecutor.activeCount + + workerInfo.append("Pool queue size: $queueSize, Active threads: $activeCount") + logger.info { workerInfo.toString() } + } + + private fun flush( + desc: StreamDescriptor, + flushWorkerId: UUID, + ) { + workerPool.submit { + logger.info { + "Flush Worker (${humanReadableFlushWorkerId( + flushWorkerId, + )}) -- Worker picked up work." + } + try { + logger.info { + "Flush Worker (${humanReadableFlushWorkerId( + flushWorkerId, + )}) -- Attempting to read from queue namespace: ${desc.namespace}, stream: ${desc.name}." + } + + bufferDequeue.take(desc, flusher.optimalBatchSizeBytes).use { batch -> + runningFlushWorkers.registerBatchSize( + desc, + flushWorkerId, + batch.sizeInBytes, + ) + val stateIdToCount = + batch.data + .stream() + .map(StreamAwareQueue.MessageWithMeta::stateId) + .collect( + Collectors.groupingBy( + { stateId: Long -> stateId }, + Collectors.counting(), + ), + ) + logger.info { + "Flush Worker (${humanReadableFlushWorkerId( + flushWorkerId, + )}) -- Batch contains: ${batch.data.size} records, ${AirbyteFileUtils.byteCountToDisplaySize( + batch.sizeInBytes, + )} bytes." + } + + flusher.flush( + desc, + batch.data + .stream() + .map( + StreamAwareQueue.MessageWithMeta::message, + ), + ) + batch.flushStates(stateIdToCount, outputRecordCollector) + } + logger.info { + "Flush Worker (${humanReadableFlushWorkerId( + flushWorkerId, + )}) -- Worker finished flushing. Current queue size: ${bufferDequeue.getQueueSizeInRecords( + desc, + ).orElseThrow()}" + } + } catch (e: Exception) { + logger.error(e) { + "Flush Worker (${humanReadableFlushWorkerId( + flushWorkerId, + )}) -- flush worker error: " + } + flushFailure.propagateException(e) + throw RuntimeException(e) + } finally { + runningFlushWorkers.completeFlushWorker(desc, flushWorkerId) + } + } + } + + @Throws(Exception::class) + override fun close() { + logger.info { "Closing flush workers -- waiting for all buffers to flush" } + isClosing.set(true) + // wait for all buffers to be flushed. + while (true) { + val streamDescriptorToRemainingRecords = + bufferDequeue.bufferedStreams + .stream() + .collect( + Collectors.toMap( + { desc: StreamDescriptor -> desc }, + { desc: StreamDescriptor -> + bufferDequeue.getQueueSizeInRecords(desc).orElseThrow() + }, + ), + ) + + val anyRecordsLeft = + streamDescriptorToRemainingRecords.values.stream().anyMatch { size: Long -> + size > 0 + } + + if (!anyRecordsLeft) { + break + } + + val workerInfo = + StringBuilder() + .append( + "REMAINING_BUFFERS_INFO", + ) + .append(System.lineSeparator()) + streamDescriptorToRemainingRecords.entries + .stream() + .filter { entry: Map.Entry -> entry.value > 0 } + .forEach { entry: Map.Entry -> + workerInfo.append( + String.format( + " Namespace: %s Stream: %s -- remaining records: %d", + entry.key.namespace, + entry.key.name, + entry.value, + ), + ) + } + logger.info { workerInfo.toString() } + logger.info { "Waiting for all streams to flush." } + Thread.sleep(1000) + } + logger.info { "Closing flush workers -- all buffers flushed" } + + // before shutting down the supervisor, flush all state. + stateManager.flushStates(outputRecordCollector) + supervisorThread.shutdown() + while (!supervisorThread.awaitTermination(5L, TimeUnit.MINUTES)) { + logger.info { "Waiting for flush worker supervisor to shut down" } + } + logger.info { "Closing flush workers -- supervisor shut down" } + + logger.info { "Closing flush workers -- Starting worker pool shutdown.." } + workerPool.shutdown() + while (!workerPool.awaitTermination(5L, TimeUnit.MINUTES)) { + logger.info { "Waiting for flush workers to shut down" } + } + logger.info { "Closing flush workers -- workers shut down" } + + debugLoop.shutdownNow() + } + + companion object { + private const val SUPERVISOR_INITIAL_DELAY_SECS = 0L + private const val SUPERVISOR_PERIOD_SECS = 1L + private const val DEBUG_INITIAL_DELAY_SECS = 0L + private const val DEBUG_PERIOD_SECS = 60L + + private fun humanReadableFlushWorkerId(flushWorkerId: UUID): String { + return flushWorkerId.toString().substring(0, 5) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManager.kt new file mode 100644 index 0000000000000..b63aac047a71f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManager.kt @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.atomic.AtomicLong +import kotlin.math.min +import org.apache.commons.io.FileUtils + +private val logger = KotlinLogging.logger {} + +/** + * Responsible for managing buffer memory across multiple queues in a thread-safe way. This does not + * allocate or free memory in the traditional sense, but rather manages based off memory estimates + * provided by the callers. + * + * The goal is to enable maximum allowed memory bounds for each queue to be dynamically adjusted + * according to the overall available memory. Memory blocks are managed in chunks of + * [.BLOCK_SIZE_BYTES], and the total amount of memory managed is configured at creation time. + * + * As a destination has no information about incoming per-stream records, having static queue sizes + * can cause unnecessary backpressure on a per-stream basis. By providing a dynamic, global view of + * buffer memory management, this class allows each queue to release and request memory dynamically, + * enabling effective sharing of global memory resources across all the queues, and avoiding + * accidental stream backpressure. + * + * This becomes particularly useful in the following scenarios: + * + * * 1. When the incoming records belong to a single stream. Dynamic allocation ensures this one + * stream can utilise all memory. + * * 2. When the incoming records are from multiple streams, such as with Change Data Capture (CDC). + * Here, dynamic allocation let us create as many queues as possible, allowing all streams to be + * processed in parallel without accidental backpressure from unnecessary eager flushing. + */ +class GlobalMemoryManager(val maxMemoryBytes: Long) { + val currentMemoryBytes = AtomicLong(0) + + fun getCurrentMemoryBytes(): Long { + return currentMemoryBytes.get() + } + + /** + * Requests a block of memory of [.BLOCK_SIZE_BYTES]. Return 0 if memory cannot be freed. + * + * @return the size of the allocated block, in bytes + */ + @Synchronized + fun requestMemory(): Long { + // todo(davin): what happens if the incoming record is larger than 30MB? + if (currentMemoryBytes.get() >= maxMemoryBytes) { + return 0L + } + + val freeMem = maxMemoryBytes - currentMemoryBytes.get() + // Never allocate more than free memory size. + val toAllocateBytes = min(freeMem.toDouble(), BLOCK_SIZE_BYTES.toDouble()).toLong() + currentMemoryBytes.addAndGet(toAllocateBytes) + + logger.debug { + "Memory Requested: max: ${FileUtils.byteCountToDisplaySize( + maxMemoryBytes, + )}, allocated: ${FileUtils.byteCountToDisplaySize( + currentMemoryBytes.get(), + )}, allocated in this request: ${FileUtils.byteCountToDisplaySize(toAllocateBytes)}" + } + return toAllocateBytes + } + + /** + * Releases a block of memory of the given size. If the amount of memory released exceeds the + * current memory allocation, a warning will be logged. + * + * @param bytes the size of the block to free, in bytes + */ + fun free(bytes: Long) { + logger.info { "Freeing $bytes bytes.." } + currentMemoryBytes.addAndGet(-bytes) + + val currentMemory = currentMemoryBytes.get() + if (currentMemory < 0) { + logger.info { "Freed more memory than allocated ($bytes of ${currentMemory + bytes })" } + } + } + + companion object { + // In cases where a queue is rapidly expanding, a larger block size allows less allocation + // calls. On + // the flip size, a smaller block size allows more granular memory management. Since this + // overhead + // is minimal for now, err on a smaller block sizes. + const val BLOCK_SIZE_BYTES: Long = + (10 * 1024 * 1024 // 10MB + ) + .toLong() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkers.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkers.kt new file mode 100644 index 0000000000000..f6205fd6401d9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkers.kt @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import com.google.common.base.Preconditions +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.UUID +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap + +/** + * Track the number of flush workers (and their size) that are currently running for a given stream. + */ +class RunningFlushWorkers { + private val streamToFlushWorkerToBatchSize: + ConcurrentMap>> = + ConcurrentHashMap() + + /** + * Call this when a worker starts flushing a stream. + * + * @param stream the stream that is being flushed + * @param flushWorkerId flush worker id + */ + fun trackFlushWorker( + stream: StreamDescriptor, + flushWorkerId: UUID, + ) { + streamToFlushWorkerToBatchSize + .computeIfAbsent( + stream, + ) { + ConcurrentHashMap() + } + .computeIfAbsent( + flushWorkerId, + ) { + Optional.empty() + } + } + + /** + * Call this when a worker completes flushing a stream. + * + * @param stream the stream that was flushed + * @param flushWorkerId flush worker id + */ + fun completeFlushWorker( + stream: StreamDescriptor, + flushWorkerId: UUID, + ) { + Preconditions.checkState( + streamToFlushWorkerToBatchSize.containsKey(stream) && + streamToFlushWorkerToBatchSize[stream]!!.containsKey(flushWorkerId), + "Cannot complete flush worker for stream that has not started.", + ) + streamToFlushWorkerToBatchSize[stream]!!.remove(flushWorkerId) + if (streamToFlushWorkerToBatchSize[stream]!!.isEmpty()) { + streamToFlushWorkerToBatchSize.remove(stream) + } + } + + /** + * When a worker gets a batch of records, register its size so that it can be referenced for + * estimating how many records will be left in the queue after the batch is done. + * + * @param stream stream + * @param batchSize batch size + */ + fun registerBatchSize( + stream: StreamDescriptor, + flushWorkerId: UUID, + batchSize: Long, + ) { + Preconditions.checkState( + (streamToFlushWorkerToBatchSize.containsKey(stream) && + streamToFlushWorkerToBatchSize[stream]!!.containsKey(flushWorkerId)), + "Cannot register a batch size for a flush worker that has not been initialized", + ) + streamToFlushWorkerToBatchSize[stream]!![flushWorkerId] = Optional.of(batchSize) + } + + /** + * For a stream get how many bytes are in each running worker. If the worker doesn't have a + * batch yet, return empty optional. + * + * @param stream stream + * @return bytes in batches currently being processed + */ + fun getSizesOfRunningWorkerBatches(stream: StreamDescriptor): List> { + return ArrayList( + streamToFlushWorkerToBatchSize.getOrDefault(stream, ConcurrentHashMap()).values, + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/StreamDescriptorUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/StreamDescriptorUtils.kt new file mode 100644 index 0000000000000..c1861b5bc21ed --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/StreamDescriptorUtils.kt @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.StreamDescriptor + +/** Helper functions to extract [StreamDescriptor] from other POJOs. */ +object StreamDescriptorUtils { + fun fromRecordMessage(msg: AirbyteRecordMessage): StreamDescriptor { + return StreamDescriptor().withName(msg.stream).withNamespace(msg.namespace) + } + + fun fromAirbyteStream(stream: AirbyteStream): StreamDescriptor { + return StreamDescriptor().withName(stream.name).withNamespace(stream.namespace) + } + + fun fromConfiguredAirbyteSteam(stream: ConfiguredAirbyteStream): StreamDescriptor { + return fromAirbyteStream(stream.stream) + } + + fun fromConfiguredCatalog(catalog: ConfiguredAirbyteCatalog): Set { + val pairs = HashSet() + + for (stream in catalog.streams) { + val pair = fromAirbyteStream(stream.stream) + pairs.add(pair) + } + + return pairs + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeue.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeue.kt new file mode 100644 index 0000000000000..012e22abe37fc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeue.kt @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.time.Instant +import java.util.LinkedList +import java.util.Optional +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.atomic.AtomicLong +import java.util.concurrent.locks.ReentrantLock + +/** + * Represents the minimal interface over the underlying buffer queues required for dequeue + * operations with the aim of minimizing lower-level queue access. + * + * Aside from [.take], all public methods in this class represents queue metadata required to + * determine buffer flushing. + */ +// todo (cgardens) - make all the metadata methods more efficient. +class BufferDequeue( + private val memoryManager: GlobalMemoryManager, + private val buffers: ConcurrentMap, + private val stateManager: GlobalAsyncStateManager, +) { + private val bufferLocks: ConcurrentMap = ConcurrentHashMap() + + /** + * Primary dequeue method. Reads from queue up to optimalBytesToRead OR until the queue is + * empty. + * + * @param streamDescriptor specific buffer to take from + * @param optimalBytesToRead bytes to read, if possible + * @return autocloseable batch object, that frees memory. + */ + fun take( + streamDescriptor: StreamDescriptor, + optimalBytesToRead: Long, + ): MemoryAwareMessageBatch { + val lock: ReentrantLock = + bufferLocks.computeIfAbsent( + streamDescriptor, + ) { + ReentrantLock() + } + lock.lock() + + val queue: StreamAwareQueue? = buffers[streamDescriptor] + + try { + val bytesRead = AtomicLong() + + val output: MutableList = LinkedList() + while (queue!!.size() > 0) { + val memoryItem: + MemoryBoundedLinkedBlockingQueue.MemoryItem< + StreamAwareQueue.MessageWithMeta?>? = + queue.peek().orElseThrow() + + // otherwise pull records until we hit the memory limit. + val newSize: Long = (memoryItem?.size ?: 0) + bytesRead.get() + if (newSize <= optimalBytesToRead) { + memoryItem?.size?.let { bytesRead.addAndGet(it) } + queue.poll()?.item?.let { output.add(it) } + } else { + break + } + } + + if (queue.isEmpty) { + val batchSizeBytes: Long = bytesRead.get() + val allocatedBytes: Long = queue.maxMemoryUsage + + // Free unused allocation for the queue. + // When the batch flushes it will flush its allocation. + memoryManager.free(allocatedBytes - batchSizeBytes) + + // Shrink queue to 0 — any new messages will reallocate. + queue.addMaxMemory(-allocatedBytes) + } else { + queue.addMaxMemory(-bytesRead.get()) + } + + return MemoryAwareMessageBatch( + output, + bytesRead.get(), + memoryManager, + stateManager, + ) + } finally { + lock.unlock() + } + } + + val bufferedStreams: Set + /** + * The following methods are provide metadata for buffer flushing calculations. Consumers + * are expected to call it to retrieve the currently buffered streams as a handle to the + * remaining methods. + */ + get() = HashSet(buffers.keys) + + val maxQueueSizeBytes: Long + get() = memoryManager.maxMemoryBytes + + val totalGlobalQueueSizeBytes: Long + get() = + buffers.values + .stream() + .map { obj: StreamAwareQueue -> obj.currentMemoryUsage } + .mapToLong { obj: Long -> obj } + .sum() + + fun getQueueSizeInRecords(streamDescriptor: StreamDescriptor): Optional { + return getBuffer(streamDescriptor).map { buf: StreamAwareQueue -> buf.size().toLong() } + } + + fun getQueueSizeBytes(streamDescriptor: StreamDescriptor): Optional { + return getBuffer(streamDescriptor).map { obj: StreamAwareQueue -> obj.currentMemoryUsage } + } + + fun getTimeOfLastRecord(streamDescriptor: StreamDescriptor): Optional { + return getBuffer(streamDescriptor).flatMap { obj: StreamAwareQueue -> + obj.getTimeOfLastMessage() + } + } + + private fun getBuffer(streamDescriptor: StreamDescriptor): Optional { + if (buffers.containsKey(streamDescriptor)) { + return Optional.of( + (buffers[streamDescriptor])!!, + ) + } + return Optional.empty() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueue.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueue.kt new file mode 100644 index 0000000000000..79b70e9da3fe4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueue.kt @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.concurrent.ConcurrentMap + +/** + * Represents the minimal interface over the underlying buffer queues required for enqueue + * operations with the aim of minimizing lower-level queue access. + */ +class BufferEnqueue( + private val memoryManager: GlobalMemoryManager, + private val buffers: ConcurrentMap, + private val stateManager: GlobalAsyncStateManager, +) { + /** + * Buffer a record. Contains memory management logic to dynamically adjust queue size based via + * [GlobalMemoryManager] accounting for incoming records. + * + * @param message to buffer + * @param sizeInBytes + */ + fun addRecord( + message: PartialAirbyteMessage, + sizeInBytes: Int, + defaultNamespace: Optional, + ) { + if (message.type == AirbyteMessage.Type.RECORD) { + handleRecord(message, sizeInBytes) + } else if (message.type == AirbyteMessage.Type.STATE) { + stateManager.trackState(message, sizeInBytes.toLong(), defaultNamespace.orElse("")) + } + } + + private fun handleRecord( + message: PartialAirbyteMessage, + sizeInBytes: Int, + ) { + val streamDescriptor = extractStateFromRecord(message) + val queue = + buffers.computeIfAbsent( + streamDescriptor, + ) { + StreamAwareQueue(memoryManager.requestMemory()) + } + val stateId = stateManager.getStateIdAndIncrementCounter(streamDescriptor) + + var addedToQueue = queue.offer(message, sizeInBytes.toLong(), stateId) + + var i = 0 + while (!addedToQueue) { + val newlyAllocatedMemory = memoryManager.requestMemory() + if (newlyAllocatedMemory > 0) { + queue.addMaxMemory(newlyAllocatedMemory) + } + addedToQueue = queue.offer(message, sizeInBytes.toLong(), stateId) + i++ + if (i > 5) { + try { + Thread.sleep(500) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + } + } + } + + companion object { + private fun extractStateFromRecord(message: PartialAirbyteMessage): StreamDescriptor { + return StreamDescriptor() + .withNamespace(message.record?.namespace) + .withName(message.record?.stream) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferManager.kt new file mode 100644 index 0000000000000..f762d90b4a2a8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferManager.kt @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.integrations.destination.async.AirbyteFileUtils +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.Executors +import java.util.concurrent.ScheduledExecutorService +import java.util.concurrent.TimeUnit +import org.apache.commons.io.FileUtils + +private val logger = KotlinLogging.logger {} + +class BufferManager +@JvmOverloads +constructor( + maxMemory: Long = (Runtime.getRuntime().maxMemory() * MEMORY_LIMIT_RATIO).toLong(), +) { + @get:VisibleForTesting val buffers: ConcurrentMap + val bufferEnqueue: BufferEnqueue + val bufferDequeue: BufferDequeue + + @get:VisibleForTesting val memoryManager: GlobalMemoryManager + + val stateManager: GlobalAsyncStateManager + private val debugLoop: ScheduledExecutorService + + /** + * @param maxMemory the amount of estimated memory we allow for all buffers. The + * GlobalMemoryManager will apply back pressure once this quota is filled. "Memory" can be + * released back once flushing finishes. This number should be large enough we don't block + * reading unnecessarily, but small enough we apply back pressure before OOMing. + */ + init { + logger.info { + "Max 'memory' available for buffer allocation ${FileUtils.byteCountToDisplaySize(maxMemory)}" + } + memoryManager = GlobalMemoryManager(maxMemory) + this.stateManager = GlobalAsyncStateManager(memoryManager) + buffers = ConcurrentHashMap() + bufferEnqueue = BufferEnqueue(memoryManager, buffers, stateManager) + bufferDequeue = BufferDequeue(memoryManager, buffers, stateManager) + debugLoop = Executors.newSingleThreadScheduledExecutor() + debugLoop.scheduleAtFixedRate( + { this.printQueueInfo() }, + 0, + DEBUG_PERIOD_SECS, + TimeUnit.SECONDS, + ) + } + + /** + * Closing a queue will flush all items from it. For this reason, this method needs to be called + * after [io.airbyte.cdk.integrations.destination.async.FlushWorkers.close]. This allows the + * upload workers to make sure all items in the queue has been flushed. + */ + @Throws(Exception::class) + fun close() { + debugLoop.shutdownNow() + logger.info { "Buffers cleared.." } + } + + private fun printQueueInfo() { + val queueInfo = StringBuilder().append("[ASYNC QUEUE INFO] ") + val messages = mutableListOf() + + messages.add( + "Global: max: ${ AirbyteFileUtils.byteCountToDisplaySize(memoryManager.maxMemoryBytes)}, allocated: ${AirbyteFileUtils.byteCountToDisplaySize(memoryManager.currentMemoryBytes.get())} (${memoryManager.currentMemoryBytes.toDouble() / 1024 / 1024} MB), %% used: ${memoryManager.currentMemoryBytes.toDouble() / memoryManager.maxMemoryBytes}", + ) + + for ((key, queue) in buffers) { + messages.add( + "Queue `${key.name}`, num records: ${queue.size()}, num bytes: ${AirbyteFileUtils.byteCountToDisplaySize(queue.currentMemoryUsage)}, allocated bytes: ${AirbyteFileUtils.byteCountToDisplaySize(queue.maxMemoryUsage)}" + ) + } + + messages.add(stateManager.memoryUsageMessage) + + queueInfo.append(messages.joinToString(separator = " | ")) + + logger.info { queueInfo.toString() } + } + + companion object { + private const val DEBUG_PERIOD_SECS = 60L + + const val MEMORY_LIMIT_RATIO: Double = 0.7 + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryAwareMessageBatch.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryAwareMessageBatch.kt new file mode 100644 index 0000000000000..15338f73201ad --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryAwareMessageBatch.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.AirbyteMessage +import java.util.function.Consumer + +/** + * POJO abstraction representing one discrete buffer read. This allows ergonomics dequeues by + * [io.airbyte.cdk.integrations.destination.async.FlushWorkers]. + * + * The contained stream **IS EXPECTED to be a BOUNDED** stream. Returning a boundless stream has + * undefined behaviour. + * + * Once done, consumers **MUST** invoke [.close]. As the [.batch] has already been retrieved from + * in-memory buffers, we need to update [GlobalMemoryManager] to reflect the freed up memory and + * avoid memory leaks. + */ +class MemoryAwareMessageBatch( + val data: List, + val sizeInBytes: Long, + private val memoryManager: GlobalMemoryManager, + private val stateManager: GlobalAsyncStateManager, +) : AutoCloseable { + @Throws(Exception::class) + override fun close() { + memoryManager.free(sizeInBytes) + } + + /** + * For the batch, marks all the states that have now been flushed. Also writes the states that + * can be flushed back to platform via stateManager. + */ + fun flushStates( + stateIdToCount: Map, + outputRecordCollector: Consumer, + ) { + stateIdToCount.forEach { (stateId: Long?, count: Long?) -> + stateManager.decrement( + stateId!!, + count!!, + ) + } + stateManager.flushStates(outputRecordCollector) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueue.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueue.kt new file mode 100644 index 0000000000000..a81fcf1db50e1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueue.kt @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicLong +import javax.annotation.Nonnull + +private val logger = KotlinLogging.logger {} + +/** + * This class is meant to emulate the behavior of a LinkedBlockingQueue, but instead of being + * bounded on number of items in the queue, it is bounded by the memory it is allowed to use. The + * amount of memory it is allowed to use can be resized after it is instantiated. + * + * This class intentionally hides the underlying queue inside of it. For this class to work, it has + * to override each method on a queue that adds or removes records from the queue. The Queue + * interface has a lot of methods to override, and we don't want to spend the time overriding a lot + * of methods that won't be used. By hiding the queue, we avoid someone accidentally using a queue + * method that has not been modified. If you need access to another of the queue methods, pattern + * match adding the memory tracking as seen in [HiddenQueue], and then delegate to that method from + * this top-level class. + * + * @param type in the queue + */ +class MemoryBoundedLinkedBlockingQueue(maxMemoryUsage: Long) { + private val hiddenQueue = HiddenQueue(maxMemoryUsage) + + val currentMemoryUsage: Long + get() = hiddenQueue.currentMemoryUsage.get() + + fun addMaxMemory(maxMemoryUsage: Long) { + hiddenQueue.maxMemoryUsage.addAndGet(maxMemoryUsage) + } + + fun size(): Int { + return hiddenQueue.size + } + + fun offer( + e: E, + itemSizeInBytes: Long, + ): Boolean { + return hiddenQueue.offer(e, itemSizeInBytes) + } + + fun peek(): MemoryItem? { + return hiddenQueue.peek() + } + + @Throws(InterruptedException::class) + fun take(): MemoryItem { + return hiddenQueue.take() + } + + fun poll(): MemoryItem? { + return hiddenQueue.poll() + } + + @Throws(InterruptedException::class) + fun poll( + timeout: Long, + unit: TimeUnit, + ): MemoryItem? { + return hiddenQueue.poll(timeout, unit) + } + + val maxMemoryUsage: Long + get() = hiddenQueue.getMaxMemoryUsage() + + /** + * Extends LinkedBlockingQueue so that we can get a LinkedBlockingQueue bounded by memory. + * Hidden as an inner class, so it doesn't get misused, see top-level javadoc comment. + * + * @param + */ + private class HiddenQueue(maxMemoryUsage: Long) : LinkedBlockingQueue?>() { + val currentMemoryUsage: AtomicLong = AtomicLong(0) + val maxMemoryUsage: AtomicLong = AtomicLong(maxMemoryUsage) + + fun getMaxMemoryUsage(): Long { + return maxMemoryUsage.get() + } + + fun offer( + e: E, + itemSizeInBytes: Long, + ): Boolean { + val newMemoryUsage = currentMemoryUsage.addAndGet(itemSizeInBytes) + if (newMemoryUsage <= maxMemoryUsage.get()) { + val success = super.offer(MemoryItem(e, itemSizeInBytes)) + if (!success) { + currentMemoryUsage.addAndGet(-itemSizeInBytes) + } + logger.debug { "offer status: $success" } + return success + } else { + currentMemoryUsage.addAndGet(-itemSizeInBytes) + logger.debug { "offer failed" } + return false + } + } + + @Nonnull + @Throws(InterruptedException::class) + override fun take(): MemoryItem { + val memoryItem = super.take()!! + currentMemoryUsage.addAndGet(-memoryItem.size) + return memoryItem + } + + override fun poll(): MemoryItem? { + val memoryItem = super.poll() + if (memoryItem != null) { + currentMemoryUsage.addAndGet(-memoryItem.size) + return memoryItem + } + return null + } + + @Throws(InterruptedException::class) + override fun poll( + timeout: Long, + unit: TimeUnit, + ): MemoryItem? { + val memoryItem = super.poll(timeout, unit) + if (memoryItem != null) { + currentMemoryUsage.addAndGet(-memoryItem.size) + return memoryItem + } + return null + } + } + + @JvmRecord data class MemoryItem(val item: E, val size: Long) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueue.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueue.kt new file mode 100644 index 0000000000000..50b6e6ef963e6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueue.kt @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import java.time.Instant +import java.util.Optional +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicReference + +class StreamAwareQueue(maxMemoryUsage: Long) { + private val timeOfLastMessage: AtomicReference = AtomicReference() + + private val memoryAwareQueue: MemoryBoundedLinkedBlockingQueue = + MemoryBoundedLinkedBlockingQueue(maxMemoryUsage) + + val currentMemoryUsage: Long + get() = memoryAwareQueue.currentMemoryUsage + + val maxMemoryUsage: Long + get() = memoryAwareQueue.maxMemoryUsage + + fun addMaxMemory(maxMemoryUsage: Long) { + memoryAwareQueue.addMaxMemory(maxMemoryUsage) + } + + val isEmpty: Boolean + get() = memoryAwareQueue.size() == 0 + + fun getTimeOfLastMessage(): Optional { + // if the queue is empty, the time of last message is irrelevant + if (size() == 0) { + return Optional.empty() + } + return Optional.ofNullable(timeOfLastMessage.get()) + } + + fun peek(): Optional> { + return Optional.ofNullable(memoryAwareQueue.peek()) + } + + fun size(): Int { + return memoryAwareQueue.size() + } + + fun offer( + message: PartialAirbyteMessage, + messageSizeInBytes: Long, + stateId: Long, + ): Boolean { + if (memoryAwareQueue.offer(MessageWithMeta(message, stateId), messageSizeInBytes)) { + timeOfLastMessage.set(Instant.now()) + return true + } else { + return false + } + } + + @Throws(InterruptedException::class) + fun take(): MemoryBoundedLinkedBlockingQueue.MemoryItem { + return memoryAwareQueue.take() + } + + fun poll(): MemoryBoundedLinkedBlockingQueue.MemoryItem? { + return memoryAwareQueue.poll() + } + + @Throws(InterruptedException::class) + fun poll( + timeout: Long, + unit: TimeUnit, + ): MemoryBoundedLinkedBlockingQueue.MemoryItem? { + return memoryAwareQueue.poll(timeout, unit) + } + + @JvmRecord data class MessageWithMeta(val message: PartialAirbyteMessage, val stateId: Long) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/AirbyteMessageDeserializer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/AirbyteMessageDeserializer.kt new file mode 100644 index 0000000000000..ffca265970cc4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/AirbyteMessageDeserializer.kt @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.async.deser + +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage + +class AirbyteMessageDeserializer( + private val dataTransformer: StreamAwareDataTransformer = IdentityDataTransformer(), +) { + /** + * Deserializes to a [PartialAirbyteMessage] which can represent both a Record or a State + * Message + * + * PartialAirbyteMessage holds either: + * * entire serialized message string when message is a valid State Message + * * serialized AirbyteRecordMessage when message is a valid Record Message + * + * @param message the string to deserialize + * @return PartialAirbyteMessage if the message is valid, empty otherwise + */ + fun deserializeAirbyteMessage( + message: String?, + ): PartialAirbyteMessage { + // TODO: This is doing some sketchy assumptions by deserializing either the whole or the + // partial based on type. + // Use JsonSubTypes and extend StdDeserializer to properly handle this. + // Make immutability a first class citizen in the PartialAirbyteMessage class. + val partial = + Jsons.tryDeserializeExact(message, PartialAirbyteMessage::class.java).orElseThrow { + RuntimeException("Unable to deserialize PartialAirbyteMessage.") + } + + val msgType = partial.type + if (AirbyteMessage.Type.RECORD == msgType && partial.record?.data != null) { + // Transform data provided by destination. + val transformedData = + dataTransformer.transform( + partial.record?.streamDescriptor, + partial.record?.data, + partial.record?.meta, + ) + // store serialized json & meta + partial.withSerialized(Jsons.serialize(transformedData.first)) + partial.record?.meta = transformedData.second + // The connector doesn't need to be able to access to the record value. We can serialize + // it here and + // drop the json + // object. Having this data stored as a string is slightly more optimal for the memory + // usage. + partial.record?.data = null + } else if (AirbyteMessage.Type.STATE == msgType) { + partial.withSerialized(message) + } else { + throw RuntimeException(String.format("Unsupported message type: %s", msgType)) + } + + return partial + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/IdentityDataTransformer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/IdentityDataTransformer.kt new file mode 100644 index 0000000000000..73abff4f22083 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/IdentityDataTransformer.kt @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.deser + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta +import io.airbyte.protocol.models.v0.StreamDescriptor + +/** Identity transformer which echoes back the original data and meta. */ +class IdentityDataTransformer : StreamAwareDataTransformer { + override fun transform( + streamDescriptor: StreamDescriptor?, + data: JsonNode?, + meta: AirbyteRecordMessageMeta?, + ): Pair { + return Pair(data, meta) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/StreamAwareDataTransformer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/StreamAwareDataTransformer.kt new file mode 100644 index 0000000000000..77bc97dceda99 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/deser/StreamAwareDataTransformer.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.deser + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta +import io.airbyte.protocol.models.v0.StreamDescriptor + +interface StreamAwareDataTransformer { + /** + * Transforms the input data by applying destination limitations and populating + * [AirbyteRecordMessageMeta]. The returned pair contains the transformed data and the merged + * meta information from upstream. + * + * @param streamDescriptor + * @param data + * @param meta + * @return + */ + fun transform( + streamDescriptor: StreamDescriptor?, + data: JsonNode?, + meta: AirbyteRecordMessageMeta?, + ): Pair +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/function/DestinationFlushFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/function/DestinationFlushFunction.kt new file mode 100644 index 0000000000000..0b9d2096ae665 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/function/DestinationFlushFunction.kt @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.function + +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.stream.Stream +import kotlin.math.max + +/** + * An interface meant to be used with [FlushWorkers]. + * + * A destination instructs workers how to write data by specifying [.flush]. This keeps the worker + * abstraction generic and reusable. + * + * e.g. A database destination's flush function likely involves parsing the stream into SQL + * statements. + * + * There are 2 different destination types as of this writing: + * + * * 1. Destinations that upload files. This includes warehouses and databases. + * * 2. Destinations that upload data streams. This mostly includes various Cloud storages. This + * will include reverse-ETL in the future + * + * In both cases, the simplest way to model the incoming data is as a stream. + */ +interface DestinationFlushFunction { + /** + * Flush a batch of data to the destination. + * + * @param decs the Airbyte stream the data stream belongs to + * @param stream a bounded [AirbyteMessage] stream ideally of [.getOptimalBatchSizeBytes] size + * @throws Exception + */ + @Throws(Exception::class) + fun flush( + decs: StreamDescriptor, + stream: Stream, + ) + + /** + * When invoking [.flush], best effort attempt to invoke flush with a batch of this size. Useful + * for Destinations that have optimal flush batch sizes. + * + * If you increase this, make sure that [.getQueueFlushThresholdBytes] is larger than this + * value. Otherwise we may trigger flushes before reaching the optimal batch size. + * + * @return the optimal batch size in bytes + */ + val optimalBatchSizeBytes: Long + + val queueFlushThresholdBytes: Long + /** + * This value should be at least as high as [.getOptimalBatchSizeBytes]. It's used by + * [DetectStreamToFlush] as part of deciding when a stream needs to be flushed. I'm being + * vague because I don't understand the specifics. + */ + get() = max((10 * 1024 * 1024).toDouble(), optimalBatchSizeBytes.toDouble()).toLong() +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessage.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessage.kt new file mode 100644 index 0000000000000..8ae021c6ca8cc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessage.kt @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.model + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonPropertyDescription +import io.airbyte.protocol.models.v0.AirbyteMessage +import java.util.Objects + +class PartialAirbyteMessage { + @get:JsonProperty("type") + @set:JsonProperty("type") + @JsonProperty("type") + @JsonPropertyDescription("Message type") + var type: AirbyteMessage.Type? = null + + @get:JsonProperty("record") + @set:JsonProperty("record") + @JsonProperty("record") + var record: PartialAirbyteRecordMessage? = null + + @get:JsonProperty("state") + @set:JsonProperty("state") + @JsonProperty("state") + var state: PartialAirbyteStateMessage? = null + + /** + * For record messages, this stores the serialized data blob (i.e. + * `Jsons.serialize(message.getRecord().getData())`). For state messages, this stores the + * _entire_ message (i.e. `Jsons.serialize(message)`). + * + * See + * [io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer.deserializeAirbyteMessage] + * for the exact logic of how this field is populated. + */ + @get:JsonProperty("serialized") + @set:JsonProperty("serialized") + @JsonProperty("serialized") + var serialized: String? = null + + fun withType(type: AirbyteMessage.Type?): PartialAirbyteMessage { + this.type = type + return this + } + + fun withRecord(record: PartialAirbyteRecordMessage?): PartialAirbyteMessage { + this.record = record + return this + } + + fun withState(state: PartialAirbyteStateMessage?): PartialAirbyteMessage { + this.state = state + return this + } + + fun withSerialized(serialized: String?): PartialAirbyteMessage { + this.serialized = serialized + return this + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + val that = other as PartialAirbyteMessage + return type == that.type && + record == that.record && + state == that.state && + serialized == that.serialized + } + + override fun hashCode(): Int { + return Objects.hash(type, record, state, serialized) + } + + override fun toString(): String { + return "PartialAirbyteMessage{" + + "type=" + + type + + ", record=" + + record + + ", state=" + + state + + ", serialized='" + + serialized + + '\'' + + '}' + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteRecordMessage.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteRecordMessage.kt new file mode 100644 index 0000000000000..fd26f6ad5747c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteRecordMessage.kt @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.model + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonPropertyDescription +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Objects + +// TODO: (ryankfu) remove this and test with low memory resources to ensure OOM is still not a +// factor, shouldn't be +// but weird things have happened +class PartialAirbyteRecordMessage { + @get:JsonProperty("namespace") + @set:JsonProperty("namespace") + @JsonProperty("namespace") + var namespace: String? = null + + @get:JsonProperty("stream") + @set:JsonProperty("stream") + @JsonProperty("stream") + var stream: String? = null + + @get:JsonProperty("data") + @set:JsonProperty("data") + @JsonProperty("data") + var data: JsonNode? = null + + @get:JsonProperty("emitted_at") + @set:JsonProperty("emitted_at") + @JsonProperty("emitted_at") + @JsonPropertyDescription("when the data was emitted from the source. epoch in millisecond.") + var emittedAt: Long = 0 + + @get:JsonProperty("meta") + @set:JsonProperty("meta") + @JsonProperty("meta") + var meta: AirbyteRecordMessageMeta? = null + + fun withNamespace(namespace: String?): PartialAirbyteRecordMessage { + this.namespace = namespace + return this + } + + fun withStream(stream: String?): PartialAirbyteRecordMessage { + this.stream = stream + return this + } + + fun withData(data: JsonNode?): PartialAirbyteRecordMessage { + this.data = data + return this + } + + fun withEmittedAt(emittedAt: Long): PartialAirbyteRecordMessage { + this.emittedAt = emittedAt + return this + } + + fun withMeta(meta: AirbyteRecordMessageMeta?): PartialAirbyteRecordMessage { + this.meta = meta + return this + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + val that = other as PartialAirbyteRecordMessage + return namespace == that.namespace && + stream == that.stream && + emittedAt == that.emittedAt && + meta == that.meta + } + + override fun hashCode(): Int { + return Objects.hash(namespace, stream, emittedAt, meta) + } + + override fun toString(): String { + return "PartialAirbyteRecordMessage{" + + "namespace='" + + namespace + + '\'' + + ", stream='" + + stream + + '\'' + + ", emittedAt='" + + emittedAt + + '\'' + + ", meta='" + + meta + + '\'' + + '}' + } + + val streamDescriptor: StreamDescriptor + get() = StreamDescriptor().withName(stream).withNamespace(namespace) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStateMessage.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStateMessage.kt new file mode 100644 index 0000000000000..cde5edce55c58 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStateMessage.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.model + +import com.fasterxml.jackson.annotation.JsonProperty +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import java.util.Objects + +class PartialAirbyteStateMessage { + @get:JsonProperty("type") + @set:JsonProperty("type") + @JsonProperty("type") + var type: AirbyteStateMessage.AirbyteStateType? = null + + @get:JsonProperty("stream") + @set:JsonProperty("stream") + @JsonProperty("stream") + var stream: PartialAirbyteStreamState? = null + + fun withType(type: AirbyteStateMessage.AirbyteStateType?): PartialAirbyteStateMessage { + this.type = type + return this + } + + fun withStream(stream: PartialAirbyteStreamState?): PartialAirbyteStateMessage { + this.stream = stream + return this + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + val that = other as PartialAirbyteStateMessage + return type == that.type && stream == that.stream + } + + override fun hashCode(): Int { + return Objects.hash(type, stream) + } + + override fun toString(): String { + return "PartialAirbyteStateMessage{" + "type=" + type + ", stream=" + stream + '}' + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStreamState.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStreamState.kt new file mode 100644 index 0000000000000..80874ea9fadcb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteStreamState.kt @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.model + +import com.fasterxml.jackson.annotation.JsonProperty +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Objects + +class PartialAirbyteStreamState { + @get:JsonProperty("stream_descriptor") + @set:JsonProperty("stream_descriptor") + @JsonProperty("stream_descriptor") + var streamDescriptor: StreamDescriptor? = null + + fun withStreamDescriptor(streamDescriptor: StreamDescriptor): PartialAirbyteStreamState { + this.streamDescriptor = streamDescriptor + return this + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + val that = other as PartialAirbyteStreamState + return streamDescriptor == that.streamDescriptor + } + + override fun hashCode(): Int { + return Objects.hash(streamDescriptor) + } + + override fun toString(): String { + return "PartialAirbyteStreamState{" + "streamDescriptor=" + streamDescriptor + '}' + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/FlushFailure.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/FlushFailure.kt new file mode 100644 index 0000000000000..380d6eb1ae179 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/FlushFailure.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.state + +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.atomic.AtomicReference + +class FlushFailure { + private val isFailed = AtomicBoolean(false) + + private val exceptionAtomicReference = AtomicReference() + + fun propagateException(e: Exception) { + isFailed.set(true) + exceptionAtomicReference.set(e) + } + + fun isFailed(): Boolean { + return isFailed.get() + } + + val exception: Exception + get() = exceptionAtomicReference.get() +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManager.kt new file mode 100644 index 0000000000000..e494e7ac9a0f4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManager.kt @@ -0,0 +1,474 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.state + +import com.google.common.base.Preconditions +import com.google.common.base.Strings +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.time.Instant +import java.util.Optional +import java.util.UUID +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.LinkedBlockingDeque +import java.util.concurrent.atomic.AtomicLong +import java.util.function.Consumer +import org.apache.commons.io.FileUtils +import org.apache.mina.util.ConcurrentHashSet + +private val logger = KotlinLogging.logger {} + +/** + * Responsible for managing state within the Destination. The general approach is a ref counter + * approach - each state message is associated with a record count. This count represents the number + * of preceding records. For a state to be emitted, all preceding records have to be written to the + * destination i.e. the counter is 0. + * + * A per-stream state queue is maintained internally, with each state within the queue having a + * counter. This means we *ALLOW* records succeeding an unemitted state to be written. This + * decouples record writing from state management at the cost of potentially repeating work if an + * upstream state is never written. + * + * One important detail here is the difference between how PER-STREAM & NON-PER-STREAM is handled. + * The PER-STREAM case is simple, and is as described above. The NON-PER-STREAM case is slightly + * tricky. Because we don't know the stream type to begin with, we always assume PER_STREAM until + * the first state message arrives. If this state message is a GLOBAL state, we alias all existing + * state ids to a single global state id via a set of alias ids. From then onwards, we use one id - + * [.SENTINEL_GLOBAL_DESC] regardless of stream. Read [.convertToGlobalIfNeeded] for more detail. + */ +class GlobalAsyncStateManager(private val memoryManager: GlobalMemoryManager) { + /** Memory that the manager has allocated to it to use. It can ask for more memory as needed. */ + private val memoryAllocated: AtomicLong = AtomicLong(memoryManager.requestMemory()) + + /** Memory that the manager is currently using. */ + private val memoryUsed: AtomicLong = AtomicLong() + + private var preState: Boolean = true + private val descToStateIdQ: ConcurrentMap> = + ConcurrentHashMap() + + /** + * Both [stateIdToCounter] and [stateIdToCounterForPopulatingDestinationStats] are used to + * maintain a counter for the number of records associated with a give state i.e. before a state + * was received, how many records were seen until that point. As records are received the value + * for both are incremented. The difference is the purpose of the two attributes. + * [stateIdToCounter] is used to determine whether a state is safe to emit or not. This is done + * by decrementing the value as records are committed to the destination. If the value hits 0, + * it means all the records associated with a given state have been committed to the + * destination, it is safe to emit the state back to platform. But because of this we can't use + * it to determine the actual number of records that are associated with a state to update the + * value of [AirbyteStateMessage.destinationStats] at the time of emitting the state message. + * That's where we need [stateIdToCounterForPopulatingDestinationStats], which is only reset + * when a state message has been emitted. + */ + private val stateIdToCounter: ConcurrentMap = ConcurrentHashMap() + private val stateIdToCounterForPopulatingDestinationStats: ConcurrentMap = + ConcurrentHashMap() + private val stateIdToState: ConcurrentMap> = + ConcurrentHashMap() + + // Alias-ing only exists in the non-STREAM case where we have to convert existing state ids to + // one + // single global id. + // This only happens once. + private val aliasIds: MutableSet = ConcurrentHashSet() + private var retroactiveGlobalStateId: Long = 0 + + // All access to this field MUST be guarded by a synchronized(lock) block + private var arrivalNumber: Long = 0 + + private val lock: Any = Any() + + // Always assume STREAM to begin, and convert only if needed. Most state is per stream anyway. + private var stateType: AirbyteStateMessage.AirbyteStateType = + AirbyteStateMessage.AirbyteStateType.STREAM + + /** + * Main method to process state messages. + * + * The first incoming state message tells us the type of state we are dealing with. We then + * convert internal data structures if needed. + * + * Because state messages are a watermark, all preceding records need to be flushed before the + * state message can be processed. + */ + fun trackState( + message: PartialAirbyteMessage, + sizeInBytes: Long, + defaultNamespace: String, + ) { + if (preState) { + convertToGlobalIfNeeded(message) + preState = false + } + // stateType should not change after a conversion. + Preconditions.checkArgument(stateType == extractStateType(message)) + + closeState(message, sizeInBytes, defaultNamespace) + } + + /** + * Identical to [.getStateId] except this increments the associated counter by 1. Intended to be + * called whenever a record is ingested. + * + * @param streamDescriptor + * - stream to get stateId for. + * @return state id + */ + fun getStateIdAndIncrementCounter(streamDescriptor: StreamDescriptor): Long { + return getStateIdAndIncrement(streamDescriptor, 1) + } + + /** + * Each decrement represent one written record for a state. A zero counter means there are no + * more inflight records associated with a state and the state can be flushed. + * + * @param stateId reference to a state. + * @param count to decrement. + */ + fun decrement( + stateId: Long, + count: Long, + ) { + synchronized(lock) { + logger.trace { "decrementing state id: $stateId, count: $count" } + stateIdToCounter[getStateAfterAlias(stateId)]!!.addAndGet(-count) + } + } + + /** + * Flushes state messages with no more inflight records i.e. counter = 0 across all streams. + * Intended to be called by [io.airbyte.cdk.integrations.destination.async.FlushWorkers] after a + * worker has finished flushing its record batch. + */ + fun flushStates(outputRecordCollector: Consumer) { + var bytesFlushed: Long = 0L + logger.info { "Flushing states" } + synchronized(lock) { + for (entry: Map.Entry?> in + descToStateIdQ.entries) { + // Remove all states with 0 counters. + // Per-stream synchronized is required to make sure the state (at the head of the + // queue) + // logic is applied to is the state actually removed. + + val stateIdQueue: LinkedBlockingDeque? = entry.value + while (true) { + val oldestStateId: Long = stateIdQueue!!.peek() ?: break + // no state to flush for this stream + + // technically possible this map hasn't been updated yet. + // This can be if you call the flush method if there are 0 records/states + val oldestStateCounter: AtomicLong = stateIdToCounter[oldestStateId] ?: break + + val oldestState: Pair = + stateIdToState[oldestStateId] ?: break + // no state to flush for this stream + + val allRecordsCommitted: Boolean = oldestStateCounter.get() == 0L + if (allRecordsCommitted) { + val stateMessage: StateMessageWithArrivalNumber = oldestState.first + val flushedRecordsAssociatedWithState: Double = + stateIdToCounterForPopulatingDestinationStats[oldestStateId]!! + .toDouble() + + logger.debug { + "State with arrival number ${stateMessage.arrivalNumber} emitted from thread ${Thread.currentThread().name} at ${Instant.now()}" + } + val message: AirbyteMessage = + Jsons.deserialize( + stateMessage.partialAirbyteStateMessage.serialized, + AirbyteMessage::class.java, + ) + message.state.destinationStats = + AirbyteStateStats().withRecordCount(flushedRecordsAssociatedWithState) + outputRecordCollector.accept(message) + + bytesFlushed += oldestState.second + + // cleanup + entry.value!!.poll() + stateIdToState.remove(oldestStateId) + stateIdToCounter.remove(oldestStateId) + stateIdToCounterForPopulatingDestinationStats.remove(oldestStateId) + } else { + break + } + } + } + } + logger.info { "Flushing states complete" } + freeBytes(bytesFlushed) + } + + private fun getStateIdAndIncrement( + streamDescriptor: StreamDescriptor, + increment: Long, + ): Long { + val resolvedDescriptor: StreamDescriptor = + if (stateType == AirbyteStateMessage.AirbyteStateType.STREAM) streamDescriptor + else SENTINEL_GLOBAL_DESC + // As concurrent collections do not guarantee data consistency when iterating, use `get` + // instead of + // `containsKey`. + if (descToStateIdQ[resolvedDescriptor] == null) { + registerNewStreamDescriptor(resolvedDescriptor) + } + synchronized(lock) { + val stateId: Long = descToStateIdQ[resolvedDescriptor]!!.peekLast() + val update: Long = stateIdToCounter[stateId]!!.addAndGet(increment) + if (increment >= 0) { + stateIdToCounterForPopulatingDestinationStats[stateId]!!.addAndGet(increment) + } + logger.trace { "State id: $stateId, count: $update" } + return stateId + } + } + + /** + * Return the internal id of a state message. This is the id that should be used to reference a + * state when interacting with all methods in this class. + * + * @param streamDescriptor + * - stream to get stateId for. + * @return state id + */ + private fun getStateId(streamDescriptor: StreamDescriptor): Long { + return getStateIdAndIncrement(streamDescriptor, 0) + } + + /** + * Pass this the number of bytes that were flushed. It will track those internally and if the + * memoryUsed gets signficantly lower than what is allocated, then it will return it to the + * memory manager. We don't always return to the memory manager to avoid needlessly allocating / + * de-allocating memory rapidly over a few bytes. + * + * @param bytesFlushed bytes that were flushed (and should be removed from memory used). + */ + private fun freeBytes(bytesFlushed: Long) { + logger.debug { + "Bytes flushed memory to store state message. Allocated: " + + "${FileUtils.byteCountToDisplaySize(memoryAllocated.get())}, " + + "Used: ${FileUtils.byteCountToDisplaySize(memoryUsed.get())}, " + + "Flushed: ${FileUtils.byteCountToDisplaySize(bytesFlushed)}, " + + "% Used: ${memoryUsed.get().toDouble() / memoryAllocated.get()}" + } + + memoryManager.free(bytesFlushed) + memoryAllocated.addAndGet(-bytesFlushed) + memoryUsed.addAndGet(-bytesFlushed) + logger.debug { + "Returned ${FileUtils.byteCountToDisplaySize(bytesFlushed)} of memory back to the memory manager." + } + } + + private fun convertToGlobalIfNeeded(message: PartialAirbyteMessage) { + // instead of checking for global or legacy, check for the inverse of stream. + stateType = extractStateType(message) + if ( + stateType != AirbyteStateMessage.AirbyteStateType.STREAM + ) { // alias old stream-level state ids to single global state id + // upon conversion, all previous tracking data structures need to be cleared as we move + // into the non-STREAM world for correctness. + synchronized(lock) { + aliasIds.addAll( + descToStateIdQ.values + .stream() + .flatMap { obj: LinkedBlockingDeque -> obj.stream() } + .toList(), + ) + descToStateIdQ.clear() + retroactiveGlobalStateId = StateIdProvider.nextId + + descToStateIdQ[SENTINEL_GLOBAL_DESC] = LinkedBlockingDeque() + descToStateIdQ[SENTINEL_GLOBAL_DESC]!!.add(retroactiveGlobalStateId) + + val combinedCounter: Long = + stateIdToCounter.values + .stream() + .mapToLong { obj: AtomicLong -> obj.get() } + .sum() + stateIdToCounter.clear() + stateIdToCounter[retroactiveGlobalStateId] = AtomicLong(combinedCounter) + + val statsCounter: Long = + stateIdToCounterForPopulatingDestinationStats.values + .stream() + .mapToLong { obj: AtomicLong -> obj.get() } + .sum() + stateIdToCounterForPopulatingDestinationStats.clear() + stateIdToCounterForPopulatingDestinationStats.put( + retroactiveGlobalStateId, + AtomicLong(statsCounter), + ) + } + } + } + + private fun extractStateType( + message: PartialAirbyteMessage, + ): AirbyteStateMessage.AirbyteStateType { + return if (message.state?.type == null) { + // Treated the same as GLOBAL. + AirbyteStateMessage.AirbyteStateType.LEGACY + } else { + message.state?.type!! + } + } + + /** + * When a state message is received, 'close' the previous state to associate the existing state + * id to the newly arrived state message. We also increment the state id in preparation for the + * next state message. + */ + private fun closeState( + message: PartialAirbyteMessage, + sizeInBytes: Long, + defaultNamespace: String, + ) { + val resolvedDescriptor: StreamDescriptor = + extractStream(message, defaultNamespace) + .orElse( + SENTINEL_GLOBAL_DESC, + ) + synchronized(lock) { + logger.debug { "State with arrival number $arrivalNumber received" } + stateIdToState[getStateId(resolvedDescriptor)] = + Pair( + StateMessageWithArrivalNumber( + message, + arrivalNumber, + ), + sizeInBytes, + ) + arrivalNumber++ + } + registerNewStateId(resolvedDescriptor) + allocateMemoryToState(sizeInBytes) + } + + /** + * Given the size of a state message, tracks how much memory the manager is using and requests + * additional memory from the memory manager if needed. + * + * @param sizeInBytes size of the state message + */ + private fun allocateMemoryToState(sizeInBytes: Long) { + if (memoryAllocated.get() < memoryUsed.get() + sizeInBytes) { + while (memoryAllocated.get() < memoryUsed.get() + sizeInBytes) { + memoryAllocated.addAndGet(memoryManager.requestMemory()) + try { + logger.debug { + "Insufficient memory to store state message. " + + "Allocated: ${FileUtils.byteCountToDisplaySize(memoryAllocated.get())}, " + + "Used: ${FileUtils.byteCountToDisplaySize(memoryUsed.get())}, " + + "Size of State Msg: ${FileUtils.byteCountToDisplaySize(sizeInBytes)}, " + + "Needed: ${FileUtils.byteCountToDisplaySize( + sizeInBytes - (memoryAllocated.get() - memoryUsed.get()), + )}" + } + Thread.sleep(1000) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + } + logger.debug { memoryUsageMessage } + } + } + + val memoryUsageMessage: String + get() = + "State Manager memory usage: Allocated: ${FileUtils.byteCountToDisplaySize(memoryAllocated.get())}, Used: ${FileUtils.byteCountToDisplaySize(memoryUsed.get())}, percentage Used ${memoryUsed.get().toDouble() / memoryAllocated.get()}" + + private fun getStateAfterAlias(stateId: Long): Long { + return if (aliasIds.contains(stateId)) { + retroactiveGlobalStateId + } else { + stateId + } + } + + private fun registerNewStreamDescriptor(resolvedDescriptor: StreamDescriptor) { + synchronized(lock) { descToStateIdQ.put(resolvedDescriptor, LinkedBlockingDeque()) } + registerNewStateId(resolvedDescriptor) + } + + private fun registerNewStateId(resolvedDescriptor: StreamDescriptor) { + val stateId: Long = StateIdProvider.nextId + synchronized(lock) { + stateIdToCounter[stateId] = AtomicLong(0) + stateIdToCounterForPopulatingDestinationStats[stateId] = AtomicLong(0) + descToStateIdQ[resolvedDescriptor]!!.add(stateId) + } + } + + /** Simplify internal tracking by providing a global always increasing counter for state ids. */ + private object StateIdProvider { + private val pk: AtomicLong = AtomicLong(0) + + val nextId: Long + get() { + return pk.incrementAndGet() + } + } + + @JvmRecord + private data class StateMessageWithArrivalNumber( + val partialAirbyteStateMessage: PartialAirbyteMessage, + val arrivalNumber: Long, + ) + + companion object { + private val SENTINEL_GLOBAL_DESC: StreamDescriptor = + StreamDescriptor() + .withName( + UUID.randomUUID().toString(), + ) + + /** + * If the user has selected the Destination Namespace as the Destination default while + * setting up the connector, the platform sets the namespace as null in the StreamDescriptor + * in the AirbyteMessages (both record and state messages). The destination checks that if + * the namespace is empty or null, if yes then re-populates it with the defaultNamespace. + * See [io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer.accept] But + * destination only does this for the record messages. So when state messages arrive without + * a namespace and since the destination doesn't repopulate it with the default namespace, + * there is a mismatch between the StreamDescriptor from record messages and state messages. + * That breaks the logic of the state management class as [descToStateIdQ] needs to have + * consistent StreamDescriptor. This is why while trying to extract the StreamDescriptor + * from state messages, we check if the namespace is null, if yes then replace it with + * defaultNamespace to keep it consistent with the record messages. + */ + private fun extractStream( + message: PartialAirbyteMessage, + defaultNamespace: String, + ): Optional { + if ( + message.state?.type != null && + message.state?.type == AirbyteStateMessage.AirbyteStateType.STREAM + ) { + val streamDescriptor: StreamDescriptor? = message.state?.stream?.streamDescriptor + if (Strings.isNullOrEmpty(streamDescriptor?.namespace)) { + return Optional.of( + StreamDescriptor() + .withName( + streamDescriptor?.name, + ) + .withNamespace(defaultNamespace), + ) + } + return streamDescriptor?.let { Optional.of(it) } ?: Optional.empty() + } + return Optional.empty() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.kt new file mode 100644 index 0000000000000..579eb05062420 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.kt @@ -0,0 +1,317 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import com.google.common.base.Strings +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer +import io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager.DefaultDestStateLifecycleManager +import io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager.DestStateLifecycleManager +import io.airbyte.cdk.integrations.destination.record_buffer.BufferFlushType +import io.airbyte.cdk.integrations.destination.record_buffer.BufferingStrategy +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.time.Duration +import java.time.Instant +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class consumes AirbyteMessages from the worker. + * + * Record Messages: It adds record messages to a buffer. Under 2 conditions, it will flush the + * records in the buffer to a temporary table in the destination. Condition 1: The buffer fills up + * (the buffer is designed to be small enough as not to exceed the memory of the container). + * Condition 2: On close. + * + * State Messages: This consumer tracks the last state message it has accepted. It also tracks the + * last state message that was committed to the temporary table. For now, we only emit a message if + * everything is successful. Once checkpointing is turned on, we will emit the state message as long + * as the onClose successfully commits any messages to the raw table. + * + * All other message types are ignored. + * + * Throughout the lifecycle of the consumer, messages get promoted from buffered to flushed to + * committed. A record message when it is received is immediately buffered. When the buffer fills + * up, all buffered records are flushed out of memory using the user-provided recordBuffer. When + * this flush happens, a state message is moved from pending to flushed. On close, if the + * user-provided onClose function is successful, then the flushed state record is considered + * committed and is then emitted. We expect this class to only ever emit either 1 state message (in + * the case of a full or partial success) or 0 state messages (in the case where the onClose step + * was never reached or did not complete without exception). + * + * When a record is "flushed" it is moved from the docker container to the destination. By + * convention, it is usually placed in some sort of temporary storage on the destination (e.g. a + * temporary database or file store). The logic in close handles committing the temporary + * representation data to the final store (e.g. final table). In the case of staging destinations + * they often have additional temporary stores. The common pattern for staging destination is that + * flush pushes the data into a staging area in cloud storage and then close copies from staging to + * a temporary table AND then copies from the temporary table into the final table. This abstraction + * is blind to the detail of how staging destinations implement their close. + */ +class BufferedStreamConsumer +@VisibleForTesting +internal constructor( + private val outputRecordCollector: Consumer, + private val onStart: OnStartFunction, + private val bufferingStrategy: BufferingStrategy, + private val onClose: OnCloseFunction, + private val catalog: ConfiguredAirbyteCatalog?, + private val isValidRecord: CheckedFunction, + private val bufferFlushFrequency: Duration, + private val defaultNamespace: String? +) : FailureTrackingAirbyteMessageConsumer(), AirbyteMessageConsumer { + private val streamNames: Set = + AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog) + private val streamToIgnoredRecordCount: MutableMap = + HashMap() + private val stateManager: DestStateLifecycleManager = + DefaultDestStateLifecycleManager(defaultNamespace) + + private var hasStarted = false + private var hasClosed = false + + private var nextFlushDeadline: Instant? = null + + /** + * Feel free to continue using this in non-1s1t destinations - it may be easier to use. However, + * 1s1t destinations should prefer the version which accepts a `defaultNamespace`. + */ + @Deprecated("") + constructor( + outputRecordCollector: Consumer, + onStart: OnStartFunction, + bufferingStrategy: BufferingStrategy, + onClose: OnCloseFunction, + catalog: ConfiguredAirbyteCatalog?, + isValidRecord: CheckedFunction + ) : this( + outputRecordCollector, + onStart, + bufferingStrategy, + onClose, + catalog, + isValidRecord, + Duration.ofMinutes( + 15 + ), // This is purely for backwards compatibility. Many older destinations handle this + // internally. + // Starting with Destinations V2, we recommend passing in an explicit namespace. + null + ) + + constructor( + outputRecordCollector: Consumer, + onStart: OnStartFunction, + bufferingStrategy: BufferingStrategy, + onClose: OnCloseFunction, + catalog: ConfiguredAirbyteCatalog?, + isValidRecord: CheckedFunction, + defaultNamespace: String? + ) : this( + outputRecordCollector, + onStart, + bufferingStrategy, + onClose, + catalog, + isValidRecord, + Duration.ofMinutes(15), + defaultNamespace + ) + + @Throws(Exception::class) + override fun startTracked() { + // todo (cgardens) - if we reuse this pattern, consider moving it into + // FailureTrackingConsumer. + Preconditions.checkState(!hasStarted, "Consumer has already been started.") + hasStarted = true + nextFlushDeadline = Instant.now().plus(bufferFlushFrequency) + streamToIgnoredRecordCount.clear() + LOGGER.info("{} started.", BufferedStreamConsumer::class.java) + onStart.call() + } + + /** + * AcceptTracked will still process AirbyteMessages as usual with the addition of periodically + * flushing buffer and writing data to destination storage + * + * @param message [AirbyteMessage] to be processed + * @throws Exception + */ + @Throws(Exception::class) + override fun acceptTracked(message: AirbyteMessage) { + Preconditions.checkState(hasStarted, "Cannot accept records until consumer has started") + if (message.type == AirbyteMessage.Type.RECORD) { + val record = message.record + if (Strings.isNullOrEmpty(record.namespace)) { + record.namespace = defaultNamespace + } + val stream = AirbyteStreamNameNamespacePair.fromRecordMessage(record) + + // if stream is not part of list of streams to sync to then throw invalid stream + // exception + if (!streamNames.contains(stream)) { + throwUnrecognizedStream(catalog, message) + } + + if (!isValidRecord.apply(record.data)!!) { + streamToIgnoredRecordCount[stream] = + streamToIgnoredRecordCount.getOrDefault(stream, 0L) + 1L + return + } + + val flushType = bufferingStrategy.addRecord(stream, message) + // if present means that a flush occurred + if (flushType!!.isPresent) { + if (BufferFlushType.FLUSH_ALL == flushType.get()) { + markStatesAsFlushedToDestination() + } else if (BufferFlushType.FLUSH_SINGLE_STREAM == flushType.get()) { + if (stateManager.supportsPerStreamFlush()) { + // per-stream instance can handle flush of just a single stream + markStatesAsFlushedToDestination(stream) + } + /* + * We don't mark {@link AirbyteStateMessage} as committed in the case with GLOBAL/LEGACY because + * within a single stream being flushed it is not deterministic that all the AirbyteRecordMessages + * have been committed + */ + } + } + } else if (message.type == AirbyteMessage.Type.STATE) { + stateManager.addState(message) + } else { + LOGGER.warn("Unexpected message: " + message.type) + } + periodicBufferFlush() + } + + /** + * After marking states as committed, return the state message to platform then clear state + * messages to avoid resending the same state message to the platform. Also updates the next + * time a buffer flush should occur since it is deterministic that when this method is called + * all data has been successfully committed to destination + */ + private fun markStatesAsFlushedToDestination() { + stateManager.markPendingAsCommitted() + stateManager.listCommitted()!!.forEach(outputRecordCollector) + stateManager.clearCommitted() + nextFlushDeadline = Instant.now().plus(bufferFlushFrequency) + } + + private fun markStatesAsFlushedToDestination(stream: AirbyteStreamNameNamespacePair) { + stateManager.markPendingAsCommitted(stream) + stateManager.listCommitted()!!.forEach(outputRecordCollector) + stateManager.clearCommitted() + nextFlushDeadline = Instant.now().plus(bufferFlushFrequency) + } + + /** + * Periodically flushes buffered data to destination storage when exceeding flush deadline. Also + * resets the last time a flush occurred + */ + @Throws(Exception::class) + private fun periodicBufferFlush() { + // When the last time the buffered has been flushed exceed the frequency, flush the current + // buffer before receiving incoming AirbyteMessage + if (Instant.now().isAfter(nextFlushDeadline)) { + LOGGER.info("Periodic buffer flush started") + try { + bufferingStrategy.flushAllBuffers() + markStatesAsFlushedToDestination() + } catch (e: Exception) { + LOGGER.error("Periodic buffer flush failed", e) + throw e + } + } + } + + /** + * Cleans up buffer based on whether the sync was successful or some exception occurred. In the + * case where a failure occurred we do a simple clean up any lingering data. Otherwise, flush + * any remaining data that has been stored. This is fine even if the state has not been received + * since this Airbyte promises at least once delivery + * + * @param hasFailed true if the stream replication failed partway through, false otherwise + * @throws Exception + */ + @Throws(Exception::class) + override fun close(hasFailed: Boolean) { + Preconditions.checkState(hasStarted, "Cannot close; has not started.") + Preconditions.checkState(!hasClosed, "Has already closed.") + hasClosed = true + + streamToIgnoredRecordCount.forEach { (pair: AirbyteStreamNameNamespacePair?, count: Long?) + -> + LOGGER.warn( + "A total of {} record(s) of data from stream {} were invalid and were ignored.", + count, + pair + ) + } + if (hasFailed) { + LOGGER.error("executing on failed close procedure.") + } else { + LOGGER.info("executing on success close procedure.") + // When flushing the buffer, this will call the respective #flushBufferFunction which + // bundles + // the flush and commit operation, so if successful then mark state as committed + bufferingStrategy.flushAllBuffers() + markStatesAsFlushedToDestination() + } + bufferingStrategy.close() + + try { + /* + * TODO: (ryankfu) Remove usage of hasFailed with onClose after all destination connectors have been + * updated to support checkpointing + * + * flushed is empty in 2 cases: 1. either it is full refresh (no state is emitted necessarily) 2. it + * is stream but no states were flushed in both of these cases, if there was a failure, we should + * not bother committing. otherwise attempt to commit + */ + if (stateManager.listFlushed()!!.isEmpty()) { + // Not updating this class to track record count, because we want to kill it in + // favor of the + // AsyncStreamConsumer + onClose.accept(hasFailed, HashMap()) + } else { + /* + * if any state message was flushed that means we should try to commit what we have. if + * hasFailed=false, then it could be full success. if hasFailed=true, then going for partial + * success. + */ + onClose.accept(false, null) + } + + stateManager.listCommitted()!!.forEach(outputRecordCollector) + } catch (e: Exception) { + LOGGER.error("Close failed.", e) + throw e + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BufferedStreamConsumer::class.java) + + private fun throwUnrecognizedStream( + catalog: ConfiguredAirbyteCatalog?, + message: AirbyteMessage + ) { + throw IllegalArgumentException( + String.format( + "Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", + Jsons.serialize(catalog), + Jsons.serialize(message) + ) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.kt new file mode 100644 index 0000000000000..3934b9659c4e1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/CheckAndRemoveRecordWriter.kt @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair + +fun interface CheckAndRemoveRecordWriter { + /** + * Compares the name of the current staging file with the method argument. If the names are + * different, then the staging writer corresponding to `stagingFileName` is closed and the name + * of the new file where the record will be sent will be returned. + */ + @Throws(Exception::class) + fun apply(stream: AirbyteStreamNameNamespacePair?, stagingFileName: String?): String? +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.kt new file mode 100644 index 0000000000000..eda44c7d15534 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnCloseFunction.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.commons.functional.CheckedBiConsumer +import io.airbyte.protocol.models.v0.StreamDescriptor + +/** + * Interface allowing destination to specify clean up logic that must be executed after all + * record-related logic has finished. + * + * The map of StreamSyncSummaries MUST be non-null, but MAY be empty. Streams not present in the map + * will be treated as equivalent to [StreamSyncSummary.DEFAULT]. + */ +fun interface OnCloseFunction : + CheckedBiConsumer, Exception> diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.kt new file mode 100644 index 0000000000000..d4c0be975553a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/OnStartFunction.kt @@ -0,0 +1,9 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import io.airbyte.commons.concurrency.VoidCallable + +fun interface OnStartFunction : VoidCallable diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.kt new file mode 100644 index 0000000000000..22f24f3ee1e68 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimator.kt @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage + +/** + * This class estimate the byte size of the record message. To reduce memory footprint, 1) it + * assumes that a character is always four bytes, and 2) it only performs a sampling every N + * records. The size of the samples are averaged together to protect the estimation against + * outliers. + */ +class RecordSizeEstimator +@JvmOverloads +constructor( // number of record messages +private val sampleBatchSize: Int = DEFAULT_SAMPLE_BATCH_SIZE) { + // latest estimated record message size for each stream + private val streamRecordSizeEstimation: MutableMap = HashMap() + + // number of record messages until next real sampling for each stream + private val streamSampleCountdown: MutableMap = HashMap() + + fun getEstimatedByteSize(record: AirbyteRecordMessage): Long { + val stream = record.stream + val countdown = streamSampleCountdown[stream] + + // this is a new stream; initialize its estimation + if (countdown == null) { + val byteSize = getStringByteSize(record.data) + streamRecordSizeEstimation[stream] = byteSize + streamSampleCountdown[stream] = sampleBatchSize - 1 + return byteSize + } + + // this stream needs update; compute a new estimation + if (countdown <= 0) { + val prevMeanByteSize = streamRecordSizeEstimation[stream]!! + val currentByteSize = getStringByteSize(record.data) + val newMeanByteSize = prevMeanByteSize / 2 + currentByteSize / 2 + streamRecordSizeEstimation[stream] = newMeanByteSize + streamSampleCountdown[stream] = sampleBatchSize - 1 + return newMeanByteSize + } + + // this stream does not need update; return current estimation + streamSampleCountdown[stream] = countdown - 1 + return streamRecordSizeEstimation[stream]!! + } + + companion object { + // by default, perform one estimation for every 20 records + private const val DEFAULT_SAMPLE_BATCH_SIZE = 20 + + @VisibleForTesting + fun getStringByteSize(data: JsonNode): Long { + // assume UTF-8 encoding, and each char is 4 bytes long + return Jsons.serialize(data).length * 4L + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.kt new file mode 100644 index 0000000000000..b0bf8a5700826 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordWriter.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import io.airbyte.commons.functional.CheckedBiConsumer +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair + +fun interface RecordWriter : + CheckedBiConsumer, Exception> { + @Throws(Exception::class) + override fun accept(stream: AirbyteStreamNameNamespacePair, records: List) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.kt new file mode 100644 index 0000000000000..5f8f40fd3811e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/StreamDateFormatter.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import io.airbyte.protocol.models.v0.AirbyteMessage + +/** Allows specifying transformation logic from Airbyte Json to String. */ +interface StreamDateFormatter { + fun getFormattedDate(message: AirbyteMessage?): String? +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.kt new file mode 100644 index 0000000000000..738fb29b51afd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManager.kt @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import java.util.function.Supplier + +/** + * Detects the type of the state being received by anchoring on the first state type it sees. Fail + * if receives states of multiple types--each instance of this class can only support state messages + * of one type. The protocol specifies that a source should emit state messages of a single type + * during a sync, so a single instance of this manager is sufficient for a destination to track + * state during a sync. + * + * Strategy: Delegates state messages of each type to a StateManager that is appropriate to that + * state type. + * + * Per the protocol, if state type is not set, assumes the LEGACY state type. + */ +class DefaultDestStateLifecycleManager +@VisibleForTesting +internal constructor( + singleStateManager: DestStateLifecycleManager, + streamStateManager: DestStateLifecycleManager +) : DestStateLifecycleManager { + private var stateType: AirbyteStateMessage.AirbyteStateType? = null + + // allows us to delegate calls to the appropriate underlying state manager. + private val internalStateManagerSupplier = Supplier { + if ( + stateType == AirbyteStateMessage.AirbyteStateType.GLOBAL || + stateType == AirbyteStateMessage.AirbyteStateType.LEGACY || + stateType == null + ) { + return@Supplier singleStateManager + } else if (stateType == AirbyteStateMessage.AirbyteStateType.STREAM) { + return@Supplier streamStateManager + } else { + throw IllegalArgumentException("unrecognized state type") + } + } + + constructor( + defaultNamespace: String? + ) : this(DestSingleStateLifecycleManager(), DestStreamStateLifecycleManager(defaultNamespace)) + + override fun addState(message: AirbyteMessage) { + Preconditions.checkArgument( + message.type == AirbyteMessage.Type.STATE, + "Messages passed to State Manager must be of type STATE." + ) + Preconditions.checkArgument(isStateTypeCompatible(stateType, message.state.type)) + + setManagerStateTypeIfNotSet(message) + + internalStateManagerSupplier.get().addState(message) + } + + /** + * If the state type for the manager is not set, sets it using the state type from the message. + * If the type on the message is null, we assume it is LEGACY. After the first, state message is + * added to the manager, the state type is set and is immutable. + * + * @param message + * - state message whose state will be used if internal state type is not set + */ + private fun setManagerStateTypeIfNotSet(message: AirbyteMessage) { + // detect and set state type. + if (stateType == null) { + stateType = + if (message.state.type == null) { + AirbyteStateMessage.AirbyteStateType.LEGACY + } else { + message.state.type + } + } + } + + override fun markPendingAsFlushed() { + internalStateManagerSupplier.get().markPendingAsFlushed() + } + + override fun listFlushed(): Queue { + return internalStateManagerSupplier.get().listFlushed() + } + + override fun markFlushedAsCommitted() { + internalStateManagerSupplier.get().markFlushedAsCommitted() + } + + override fun markPendingAsCommitted() { + internalStateManagerSupplier.get().markPendingAsCommitted() + } + + override fun markPendingAsCommitted(stream: AirbyteStreamNameNamespacePair) { + internalStateManagerSupplier.get().markPendingAsCommitted(stream) + } + + override fun clearCommitted() { + internalStateManagerSupplier.get().clearCommitted() + } + + override fun listCommitted(): Queue? { + return internalStateManagerSupplier.get().listCommitted() + } + + override fun supportsPerStreamFlush(): Boolean { + return internalStateManagerSupplier.get().supportsPerStreamFlush() + } + + companion object { + /** + * Given the type of previously recorded state by the state manager, determines if a newly + * added state message's type is compatible. Based on the previously set state type, + * determines if a new one is compatible. If the previous state is null, any new state is + * compatible. If new state type is null, it should be treated as LEGACY. Thus, + * previousStateType == LEGACY and newStateType == null IS compatible. All other state types + * are compatible based on equality. + * + * @param previousStateType + * - state type previously recorded by the state manager + * @param newStateType + * - state message of a newly added message + * @return true if compatible, otherwise false + */ + private fun isStateTypeCompatible( + previousStateType: AirbyteStateMessage.AirbyteStateType?, + newStateType: AirbyteStateMessage.AirbyteStateType? + ): Boolean { + return previousStateType == null || + previousStateType == AirbyteStateMessage.AirbyteStateType.LEGACY && + newStateType == null || + previousStateType == newStateType + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.kt new file mode 100644 index 0000000000000..1e654c3f39c77 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManager.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import java.util.List + +/** + * This [DestStateLifecycleManager] handles any state where there is a guarantee that any single + * state message represents the state for the ENTIRE connection. At the time of writing, GLOBAL and + * LEGACY state types are the state type that match this pattern. + * + * Does NOT store duplicates. Because each state message represents the entire state for the + * connection, it only stores (and emits) the LAST state it received at each phase. + */ +class DestSingleStateLifecycleManager : DestStateLifecycleManager { + private var lastPendingState: AirbyteMessage? = null + private var lastFlushedState: AirbyteMessage? = null + private var lastCommittedState: AirbyteMessage? = null + + override fun addState(message: AirbyteMessage) { + lastPendingState = message + } + + @VisibleForTesting + fun listPending(): Queue { + return stateMessageToQueue(lastPendingState) + } + + override fun markPendingAsFlushed() { + if (lastPendingState != null) { + lastFlushedState = lastPendingState + lastPendingState = null + } + } + + override fun listFlushed(): Queue { + return stateMessageToQueue(lastFlushedState) + } + + override fun markFlushedAsCommitted() { + if (lastFlushedState != null) { + lastCommittedState = lastFlushedState + lastFlushedState = null + } + } + + override fun clearCommitted() { + lastCommittedState = null + } + + override fun markPendingAsCommitted() { + if (lastPendingState != null) { + lastCommittedState = lastPendingState + lastPendingState = null + } + } + + override fun markPendingAsCommitted(stream: AirbyteStreamNameNamespacePair) { + // We declare supportsPerStreamFlush as false, so this method should never be called. + throw IllegalStateException( + "Committing a single stream state is not supported for this state type." + ) + } + + override fun listCommitted(): Queue? { + return stateMessageToQueue(lastCommittedState) + } + + override fun supportsPerStreamFlush(): Boolean { + return false + } + + companion object { + private fun stateMessageToQueue(stateMessage: AirbyteMessage?): Queue { + return LinkedList( + if (stateMessage == null) emptyList() else List.of(stateMessage) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.kt new file mode 100644 index 0000000000000..d0f829916474c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStateLifecycleManager.kt @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* + +/** + * This class manages the lifecycle of state message. It tracks state messages that are in 3 states: + * + * 1. pending - associated records have been accepted by the connector but has NOT been pushed to + * the destination + * 1. flushed - associated records have been flushed to tmp storage in the destination but have NOT + * been committed + * 1. committed - associated records have been committed + */ +interface DestStateLifecycleManager { + /** + * Accepts a state into the manager. The state starts in a pending state. + * + * @param message + * - airbyte message of type state + */ + fun addState(message: AirbyteMessage) + + /** + * Moves any tracked state messages that are currently pending to flushed. + * + * @Deprecated since destination checkpointing will be bundling flush & commit into the same + * operation + */ + fun markPendingAsFlushed() + + /** + * List all tracked state messages that are flushed. + * + * @return list of state messages + */ + fun listFlushed(): Queue + + /** + * Moves any tracked state messages that are currently flushed to committed. + * + * @Deprecated since destination checkpointing will be bundling flush and commit into the same + * operation + */ + fun markFlushedAsCommitted() + + /** + * Clears any committed state messages, this is called after returning the state message to the + * platform. The rationale behind this logic is to avoid returning duplicated state messages + * that would otherwise be held in the `committed` state + */ + fun clearCommitted() + + /** + * Moves any tracked state messages that are currently pending to committed. + * + * Note: that this is skipping "flushed" state since flushed meant that this was using a staging + * area to hold onto files, for the changes with checkpointing this step is skipped. It follows + * under the guiding principle that destination needs to commit + * [io.airbyte.protocol.models.AirbyteRecordMessage] more frequently to checkpoint. The new + * transaction logic will be: + * + * Buffer -(flush)-> Staging (Blob Storage) -(commit to airbyte_raw)-> Destination table + */ + fun markPendingAsCommitted() + + /** Mark all pending states for the given stream as committed. */ + fun markPendingAsCommitted(stream: AirbyteStreamNameNamespacePair) + + /** + * List all tracked state messages that are committed. + * + * @return list of state messages + */ + fun listCommitted(): Queue? + + fun supportsPerStreamFlush(): Boolean +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.kt new file mode 100644 index 0000000000000..278107786ef08 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManager.kt @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.* +import java.util.function.Supplier +import java.util.stream.Collectors + +/** + * This [DestStateLifecycleManager] handles any state where the state messages are scoped by stream. + * In these cases, at each state of the process, it tracks the LAST state message for EACH stream + * (no duplicates!). + * + * Guaranteed to output state messages in order relative to other messages of the SAME state. Does + * NOT guarantee that state messages of different streams will be output in the order in which they + * were received. State messages across streams will be emitted in alphabetical order (primary sort + * on namespace, secondary on name). + */ +class DestStreamStateLifecycleManager(private val defaultNamespace: String?) : + DestStateLifecycleManager { + private val streamToLastPendingState: MutableMap = HashMap() + private val streamToLastFlushedState: MutableMap = HashMap() + private val streamToLastCommittedState: MutableMap = HashMap() + + override fun addState(message: AirbyteMessage) { + Preconditions.checkArgument( + message.state.type == AirbyteStateMessage.AirbyteStateType.STREAM + ) + val originalStreamId = message.state.stream.streamDescriptor + val actualStreamId: StreamDescriptor + val namespace = originalStreamId.namespace + actualStreamId = + if (namespace == null || namespace.isEmpty()) { + // If the state's namespace is null/empty, we need to be able to find it using the + // default namespace + // (because many destinations actually set records' namespace to the default + // namespace before + // they make it into this class). + // Clone the streamdescriptor so that we don't modify the original state message. + StreamDescriptor().withName(originalStreamId.name).withNamespace(defaultNamespace) + } else { + originalStreamId + } + streamToLastPendingState[actualStreamId] = message + } + + @VisibleForTesting + fun listPending(): Queue { + return listStatesInOrder(streamToLastPendingState) + } + + /* + * Similar to #markFlushedAsCommmitted, this method should no longer be used to align with the + * changes to destination checkpointing where flush/commit operations will be bundled + */ + @Deprecated("") + override fun markPendingAsFlushed() { + moveToNextPhase(streamToLastPendingState, streamToLastFlushedState) + } + + override fun listFlushed(): Queue { + return listStatesInOrder(streamToLastFlushedState) + } + + /* + * During the process of migration to destination checkpointing, this method should no longer be in + * use in favor of #markPendingAsCommitted where states will be flushed/committed as a singular + * transaction + */ + @Deprecated("") + override fun markFlushedAsCommitted() { + moveToNextPhase(streamToLastFlushedState, streamToLastCommittedState) + } + + override fun clearCommitted() { + streamToLastCommittedState.clear() + } + + override fun markPendingAsCommitted() { + moveToNextPhase(streamToLastPendingState, streamToLastCommittedState) + } + + override fun markPendingAsCommitted(stream: AirbyteStreamNameNamespacePair) { + // streamToLastCommittedState is keyed using defaultNamespace instead of namespace=null. + // (see + // #addState) + // Many destinations actually modify the records' namespace immediately after reading them + // from + // stdin, + // but we should have a null-check here just in case. + val actualNamespace = if (stream.namespace == null) defaultNamespace else stream.namespace + val sd = StreamDescriptor().withName(stream.name).withNamespace(actualNamespace) + val lastPendingState = streamToLastPendingState.remove(sd) + if (lastPendingState != null) { + streamToLastCommittedState[sd] = lastPendingState + } + } + + override fun listCommitted(): Queue { + return listStatesInOrder(streamToLastCommittedState) + } + + override fun supportsPerStreamFlush(): Boolean { + return true + } + + companion object { + /** + * Lists out the states in the stream to state maps. Guarantees a deterministic sort order, + * which is handy because we are going from a map (unsorted) to a queue. The sort order + * primary sort on namespace (with null at the top) followed by secondary sort on name. This + * maps onto the pretty common order that we list streams elsewhere. + * + * @param streamToState + * - map of stream descriptor to its last state + * @return queue with the states ordered per the sort mentioned above + */ + private fun listStatesInOrder( + streamToState: Map + ): Queue { + return streamToState.entries + .stream() // typically, we support by namespace and then stream name, so we retain + // that pattern here. + .sorted( + Comparator.comparing, String>( + { entry: Map.Entry -> + entry.key.namespace + }, + Comparator.nullsFirst(Comparator.naturalOrder()) + ) // namespace is allowed to be null + .thenComparing { entry: Map.Entry + -> + entry.key.name + } + ) + .map { obj: Map.Entry -> + obj.value + } + .collect( + Collectors.toCollection>( + Supplier> { LinkedList() } + ) + ) + } + + /** + * Moves all state messages from previous phase into next phase. + * + * @param prevPhase + * - map of stream to state messages for previous phase that will be moved to next phase. + * when this method returns this map will be empty. + * @param nextPhase + * - map into which state messages from prevPhase will be added. + */ + private fun moveToNextPhase( + prevPhase: MutableMap, + nextPhase: MutableMap + ) { + if (!prevPhase.isEmpty()) { + nextPhase.putAll(prevPhase) + prevPhase.clear() + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.kt new file mode 100644 index 0000000000000..a11bcfb447431 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/SqlOperations.kt @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * SQL queries required for successfully syncing to a destination connector. These operations + * include the ability to: + * + * * Write - insert records from source connector + * * Create - overloaded function but primarily to create tables if they don't exist (e.g. tmp + * tables to "stage" records before finalizing to final table + * * Drop - removes a table from the schema + * * Insert - move data from one table to another table - usually used for inserting data from tmp + * to final table (aka airbyte_raw) + */ +interface SqlOperations { + /** + * Create a schema with provided name if it does not already exist. + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema. + * @throws Exception exception + */ + @Throws(Exception::class) + fun createSchemaIfNotExists(database: JdbcDatabase?, schemaName: String?) + + /** + * Denotes whether the schema exists in destination database + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema. + * @return true if the schema exists in destination database, false if it doesn't + */ + @Throws(Exception::class) + fun isSchemaExists(database: JdbcDatabase?, schemaName: String?): Boolean { + return false + } + + /** + * Create a table with provided name in provided schema if it does not already exist. + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param tableName Name of table + * @throws Exception exception + */ + @Throws(Exception::class) + fun createTableIfNotExists(database: JdbcDatabase, schemaName: String?, tableName: String?) + + /** + * Query to create a table with provided name in provided schema if it does not already exist. + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param tableName Name of table + * @return query + */ + fun createTableQuery(database: JdbcDatabase?, schemaName: String?, tableName: String?): String? + + /** + * Drop the table if it exists. + * + * @param schemaName Name of schema + * @param tableName Name of table + * @throws Exception exception + */ + @Throws(Exception::class) + fun dropTableIfExists(database: JdbcDatabase, schemaName: String?, tableName: String?) + + /** + * Query to remove all records from a table. Assumes the table exists. + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param tableName Name of table + * @return Query + */ + fun truncateTableQuery(database: JdbcDatabase?, schemaName: String?, tableName: String?): String + + /** + * Insert records into table. Assumes the table exists. + * + * @param database Database that the connector is syncing + * @param records Records to insert. + * @param schemaName Name of schema + * @param tableName Name of table + * @throws Exception exception + */ + @Throws(Exception::class) + fun insertRecords( + database: JdbcDatabase, + records: List, + schemaName: String?, + tableName: String? + ) + + /** + * Query to insert all records from source table to destination table. Both tables must be in + * the specified schema. Assumes both table exist. + * + * NOTE: this is an append-only operation meaning that data can be duplicated + * + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param sourceTableName Name of source table + * @param destinationTableName Name of destination table + * @return SQL Query string + */ + fun insertTableQuery( + database: JdbcDatabase?, + schemaName: String?, + sourceTableName: String?, + destinationTableName: String? + ): String? + + /** + * Given an arbitrary number of queries, execute a transaction. + * + * @param database Database that the connector is syncing + * @param queries Queries to execute + * @throws Exception exception + */ + @Throws(Exception::class) fun executeTransaction(database: JdbcDatabase, queries: List) + + /** Check if the data record is valid and ok to be written to destination */ + fun isValidData(data: JsonNode?): Boolean + + /** + * Denotes whether the destination has the concept of schema or not + * + * @return true if the destination supports schema (ex: Postgres), false if it doesn't(MySQL) + */ + val isSchemaRequired: Boolean + + companion object { + val LOGGER: Logger = LoggerFactory.getLogger(SqlOperations::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.kt new file mode 100644 index 0000000000000..6ba2794115c6a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/StagingFilenameGenerator.kt @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc + +import io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants + +/** + * The staging file is uploaded to cloud storage in multiple parts. This class keeps track of the + * filename, and returns a new one when the old file has had enough parts. + */ +class StagingFilenameGenerator(private val streamName: String, chunkSize: Long) { + // the file suffix will change after the max number of file + // parts have been generated for the current suffix; + // its value starts from 0. + private var currentFileSuffix = 0 + + // the number of parts that have been generated for the current + // file suffix; its value range will be [1, maxPartsPerFile] + private var currentFileSuffixPartCount = 0 + + // This variable is responsible to set the size of chunks size (In MB). After chunks created in + // S3 or GCS they will be uploaded to Snowflake or Redshift. These service have some limitations + // for + // the uploading file. + // So we make the calculation to determine how many parts we can put to the single chunk file. + private val iterations: Long = GlobalDataSizeConstants.Companion.MAX_FILE_SIZE / chunkSize + + val stagingFilename: String + /** + * This method is assumed to be called whenever one part of a file is going to be created. + * The currentFileSuffix increments from 0. The currentFileSuffixPartCount cycles from 1 to + * maxPartsPerFile. + */ + get() { + if (currentFileSuffixPartCount < iterations) { + // when the number of parts for the file has not reached the max, + // keep using the same file (i.e. keep the suffix) + currentFileSuffixPartCount += 1 + } else { + // otherwise, reset the part counter, and use a different file + // (i.e. update the suffix) + currentFileSuffix += 1 + currentFileSuffixPartCount = 1 + } + return String.format("%s_%05d", streamName, currentFileSuffix) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.kt new file mode 100644 index 0000000000000..b9c7675d8de06 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/constants/GlobalDataSizeConstants.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.constants + +import io.aesy.datasize.ByteUnit +import io.aesy.datasize.DataSize + +interface GlobalDataSizeConstants { + companion object { + /** 25 MB to BYTES as comparison will be done in BYTES */ + @JvmField + val DEFAULT_MAX_BATCH_SIZE_BYTES: Int = + DataSize.of(25L, ByteUnit.IEC.MEBIBYTE).toUnit(ByteUnit.IEC.BYTE).value.toInt() + + /** + * This constant determines the max possible size of file(e.g. 100 MB / 25 megabytes ≈ 4 + * chunks of file) see StagingFilenameGenerator.java:28 + */ + @JvmField + val MAX_FILE_SIZE: Long = + DataSize.of(100L, ByteUnit.IEC.MEBIBYTE).toUnit(ByteUnit.IEC.BYTE).value.toLong() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.kt new file mode 100644 index 0000000000000..bb8530eb46b7f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopier.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy + +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.util.* + +/** + * StreamCopier is responsible for writing to a staging persistence and providing methods to remove + * the staged data. + */ +interface StreamCopier { + /** Writes a value to a staging file for the stream. */ + @Throws(Exception::class) + fun write(id: UUID?, recordMessage: AirbyteRecordMessage?, fileName: String?) + + /** + * Closes the writer for the stream to the current staging file. The staging file must be of a + * certain size specified in GlobalDataSizeConstants + one more buffer. The writer for the + * stream will close with a note that no errors were found. + */ + @Throws(Exception::class) fun closeNonCurrentStagingFileWriters() + + /** + * Closes the writer for the stream to the staging persistence. This method should block until + * all buffered data has been written to the persistence. + */ + @Throws(Exception::class) fun closeStagingUploader(hasFailed: Boolean) + + /** Creates a temporary table in the target database. */ + @Throws(Exception::class) fun createTemporaryTable() + + /** + * Copies the staging file to the temporary table. This method should block until the + * copy/upload has completed. + */ + @Throws(Exception::class) fun copyStagingFileToTemporaryTable() + + /** Creates the destination schema if it does not already exist. */ + @Throws(Exception::class) fun createDestinationSchema() + + /** + * Creates the destination table if it does not already exist. + * + * @return the name of the destination table + */ + @Throws(Exception::class) fun createDestinationTable(): String? + + /** Generates a merge SQL statement from the temporary table to the final table. */ + @Throws(Exception::class) fun generateMergeStatement(destTableName: String?): String + + /** + * Cleans up the copier by removing the staging file and dropping the temporary table after + * completion or failure. + */ + @Throws(Exception::class) fun removeFileAndDropTmpTable() + + /** + * Creates the staging file and all the necessary items to write data to this file. + * + * @return A string that unqiuely identifies the file. E.g. the filename, or a unique suffix + * that is appended to a shared filename prefix + */ + fun prepareStagingFile(): String? + + /** @return current staging file name */ + val currentFile: String? +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.kt new file mode 100644 index 0000000000000..6d0fba6ea55e1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/StreamCopierFactory.kt @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream + +interface StreamCopierFactory { + fun create( + configuredSchema: String?, + config: T, + stagingFolder: String?, + configuredStream: ConfiguredAirbyteStream?, + nameTransformer: StandardNameTransformer?, + db: JdbcDatabase?, + sqlOperations: SqlOperations? + ): StreamCopier? + + companion object { + @JvmStatic + fun getSchema( + namespace: String?, + configuredSchema: String, + nameTransformer: StandardNameTransformer + ): String? { + return if (namespace != null) { + nameTransformer.convertStreamName(namespace) + } else { + nameTransformer.convertStreamName(configuredSchema) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.kt new file mode 100644 index 0000000000000..1993b7e536f46 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.kt @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.normalization + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteErrorTraceMessage +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import java.io.* +import java.nio.charset.StandardCharsets +import java.util.stream.Stream +import org.apache.logging.log4j.util.Strings + +/** + * A simple wrapper for base-normalization logs. Reads messages off of stdin and sticks them into + * appropriate AirbyteMessages (log or trace), then dumps those messages to stdout + * + * does mostly the same thing as + * [io.airbyte.workers.normalization.NormalizationAirbyteStreamFactory]. That class is not actively + * developed, and will be deleted after all destinations run normalization in-connector. + * + * Aggregates all error logs and emits them as a single trace message at the end. If the underlying + * process emits any trace messages, they are passed through immediately. + */ +class NormalizationLogParser { + val dbtErrors: MutableList = ArrayList() + + fun create(bufferedReader: BufferedReader): Stream { + return bufferedReader.lines().flatMap { line: String -> this.toMessages(line) } + } + + @VisibleForTesting + fun toMessages(line: String): Stream { + if (Strings.isEmpty(line)) { + return Stream.of(logMessage(AirbyteLogMessage.Level.INFO, "")) + } + val json = Jsons.tryDeserializeWithoutWarn(line) + return if (json.isPresent) { + jsonToMessage(json.get()) + } else { + nonJsonLineToMessage(line) + } + } + + /** + * Wrap the line in an AirbyteLogMessage, and do very naive dbt error log detection. + * + * This is needed for dbt < 1.0.0, which don't support json-format logs. + */ + private fun nonJsonLineToMessage(line: String): Stream { + // Super hacky thing to try and detect error lines + if (line.contains("[error]")) { + dbtErrors.add(line) + } + return Stream.of(logMessage(AirbyteLogMessage.Level.INFO, line)) + } + + /** + * There are two cases here: Either the json is already an AirbyteMessage (and we should just + * emit it without change), or it's dbt json log, and we need to do some extra work to convert + * it to a log message + aggregate error logs. + */ + private fun jsonToMessage(jsonLine: JsonNode): Stream { + val message = Jsons.tryObject(jsonLine, AirbyteMessage::class.java) + if (message.isPresent) { + // This line is already an AirbyteMessage; we can just return it directly + // (these messages come from the transform_config / transform_catalog scripts) + return message.stream() + } else { + /* + * This line is a JSON-format dbt log. We need to extract the message and wrap it in a logmessage + * And if it's an error, we also need to collect it into dbtErrors. Example log message, formatted + * for readability: { "code": "A001", "data": { "v": "=1.0.9" }, "invocation_id": + * "3f9a0b9f-9623-4c25-8708-1f6ae851e738", "level": "info", "log_version": 1, "msg": + * "Running with dbt=1.0.9", "node_info": {}, "pid": 65, "thread_name": "MainThread", "ts": + * "2023-04-12T21:03:23.079315Z", "type": "log_line" } + */ + val logLevel = if ((jsonLine.hasNonNull("level"))) jsonLine["level"].asText() else "" + var logMsg = if (jsonLine.hasNonNull("msg")) jsonLine["msg"].asText() else "" + val level: AirbyteLogMessage.Level + when (logLevel) { + "debug" -> level = AirbyteLogMessage.Level.DEBUG + "info" -> level = AirbyteLogMessage.Level.INFO + "warn" -> level = AirbyteLogMessage.Level.WARN + "error" -> { + // This is also not _amazing_, but we make the assumption that all error logs + // should be emitted in + // the trace message + // In practice, this seems to be a valid assumption. + level = AirbyteLogMessage.Level.ERROR + dbtErrors.add(logMsg) + } + else -> { + level = AirbyteLogMessage.Level.INFO + logMsg = jsonLine.toPrettyString() + } + } + return Stream.of(logMessage(level, logMsg)) + } + } + + companion object { + private fun logMessage(level: AirbyteLogMessage.Level, message: String): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.LOG) + .withLog(AirbyteLogMessage().withLevel(level).withMessage(message)) + } + + @JvmStatic + fun main(args: Array) { + val normalizationLogParser = NormalizationLogParser() + val airbyteMessageStream = + normalizationLogParser.create( + BufferedReader(InputStreamReader(System.`in`, StandardCharsets.UTF_8)) + ) + airbyteMessageStream.forEachOrdered { message: AirbyteMessage? -> + println(Jsons.serialize(message)) + } + + val errors = normalizationLogParser.dbtErrors + val dbtErrorStack = java.lang.String.join("\n", errors) + if ("" != dbtErrorStack) { + val errorMap = + SentryExceptionHelper.getUsefulErrorMessageAndTypeFromDbtError(dbtErrorStack) + val internalMessage = + errorMap!![SentryExceptionHelper.ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] + val traceMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.ERROR) + .withEmittedAt(System.currentTimeMillis().toDouble()) + .withError( + AirbyteErrorTraceMessage() + .withFailureType( + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR + ) + .withMessage( + "Normalization failed during the dbt run. This may indicate a problem with the data itself." + ) + .withStackTrace("AirbyteDbtError: \n$dbtErrorStack") + .withInternalMessage(internalMessage) + ) + ) + println(Jsons.serialize(traceMessage)) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.kt new file mode 100644 index 0000000000000..74548816238b3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/normalization/SentryExceptionHelper.kt @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.normalization + +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This is copied out of platform + * (https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java#L257) + */ +object SentryExceptionHelper { + private val LOGGER: Logger = LoggerFactory.getLogger(SentryExceptionHelper::class.java) + + fun getUsefulErrorMessageAndTypeFromDbtError(stacktrace: String): Map { + // the dbt 'stacktrace' is really just all the log messages at 'error' level, stuck + // together. + // therefore there is not a totally consistent structure to these, + // see the docs: https://docs.getdbt.com/guides/legacy/debugging-errors + // the logic below is built based on the ~450 unique dbt errors we encountered before this + // PR + // and is a best effort to isolate the useful part of the error logs for debugging and + // grouping + // and bring some semblance of exception 'types' to differentiate between errors. + val errorMessageAndType: MutableMap = HashMap() + val stacktraceLines = + stacktrace.split("\n".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray() + + var defaultNextLine = false + // TODO: this whole code block is quite ugh, commented to try and make each part clear but + // could be + // much more readable. + mainLoop@ for (i in stacktraceLines.indices) { + // This order is important due to how these errors can co-occur. + // This order attempts to keep error definitions consistent based on our observations of + // possible + // dbt error structures. + try { + // Database Errors + if (stacktraceLines[i].contains("Database Error in model")) { + // Database Error : SQL compilation error + if (stacktraceLines[i + 1].contains("SQL compilation error")) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + String.format( + "%s %s", + stacktraceLines[i + 1].trim { it <= ' ' }, + stacktraceLines[i + 2].trim { it <= ' ' } + ) + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = + "DbtDatabaseSQLCompilationError" + break + // Database Error: Invalid input + } else if (stacktraceLines[i + 1].contains("Invalid input")) { + for (followingLine in + Arrays.copyOfRange( + stacktraceLines, + i + 1, + stacktraceLines.size + )) { + if (followingLine.trim { it <= ' ' }.startsWith("context:")) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + String.format( + "%s\n%s", + stacktraceLines[i + 1].trim { it <= ' ' }, + followingLine.trim { it <= ' ' } + ) + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = + "DbtDatabaseInvalidInputError" + break@mainLoop + } + } + // Database Error: Syntax error + } else if (stacktraceLines[i + 1].contains("syntax error at or near \"")) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + String.format( + "%s\n%s", + stacktraceLines[i + 1].trim { it <= ' ' }, + stacktraceLines[i + 2].trim { it <= ' ' } + ) + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = + "DbtDatabaseSyntaxError" + break + // Database Error: default + } else { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "DbtDatabaseError" + defaultNextLine = true + } + // Unhandled Error + } else if (stacktraceLines[i].contains("Unhandled error while executing model")) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "DbtUnhandledError" + defaultNextLine = true + // Compilation Errors + } else if (stacktraceLines[i].contains("Compilation Error")) { + // Compilation Error: Ambiguous Relation + if ( + stacktraceLines[i + 1].contains( + "When searching for a relation, dbt found an approximate match." + ) + ) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + String.format( + "%s %s", + stacktraceLines[i + 1].trim { it <= ' ' }, + stacktraceLines[i + 2].trim { it <= ' ' } + ) + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = + "DbtCompilationAmbiguousRelationError" + break + // Compilation Error: default + } else { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "DbtCompilationError" + defaultNextLine = true + } + // Runtime Errors + } else if (stacktraceLines[i].contains("Runtime Error")) { + // Runtime Error: Database error + for (followingLine in + Arrays.copyOfRange(stacktraceLines, i + 1, stacktraceLines.size)) { + if ("Database Error" == followingLine.trim { it <= ' ' }) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + String.format( + "%s", + stacktraceLines[ + Arrays.stream(stacktraceLines) + .toList() + .indexOf(followingLine) + 1 + ] + .trim { it <= ' ' } + ) + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = + "DbtRuntimeDatabaseError" + break@mainLoop + } + } + // Runtime Error: default + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "DbtRuntimeError" + defaultNextLine = true + // Database Error: formatted differently, catch last to avoid counting other + // types of errors as + // Database Error + } else if ("Database Error" == stacktraceLines[i].trim { it <= ' ' }) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "DbtDatabaseError" + defaultNextLine = true + } + // handle the default case without repeating code + if (defaultNextLine) { + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = + stacktraceLines[i + 1].trim { it <= ' ' } + break + } + } catch (e: ArrayIndexOutOfBoundsException) { + // this means our logic is slightly off, our assumption of where error lines are is + // incorrect + LOGGER.warn( + "Failed trying to parse useful error message out of dbt error, defaulting to full stacktrace" + ) + } + } + if (errorMessageAndType.isEmpty()) { + // For anything we haven't caught, just return full stacktrace + errorMessageAndType[ErrorMapKeys.ERROR_MAP_MESSAGE_KEY] = stacktrace + errorMessageAndType[ErrorMapKeys.ERROR_MAP_TYPE_KEY] = "AirbyteDbtError" + } + return errorMessageAndType + } + + /** Keys to known error types. */ + enum class ErrorMapKeys { + ERROR_MAP_MESSAGE_KEY, + ERROR_MAP_TYPE_KEY + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.kt new file mode 100644 index 0000000000000..f76f26d2c13e8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BaseSerializedBuffer.kt @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import com.google.common.io.CountingOutputStream +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.io.* +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream +import org.apache.commons.io.FileUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Base implementation of a [SerializableBuffer]. It is composed of a [BufferStorage] where the + * actual data is being stored in a serialized format. + * + * Such data format is defined by concrete implementation inheriting from this base abstract class. + * To do so, necessary methods on handling "writer" methods should be defined. This writer would + * take care of converting [AirbyteRecordMessage] into the serialized form of the data such as it + * can be stored in the outputStream of the [BufferStorage]. + */ +abstract class BaseSerializedBuffer +protected constructor(private val bufferStorage: BufferStorage) : SerializableBuffer { + private val byteCounter = CountingOutputStream(bufferStorage.getOutputStream()) + + private var useCompression = true + private var compressedBuffer: GzipCompressorOutputStream? = null + override var inputStream: InputStream? = null + private var isStarted = false + private var isClosed = false + + /** + * Initializes the writer objects such that it can now write to the downstream @param + * outputStream + */ + @Throws(Exception::class) protected abstract fun initWriter(outputStream: OutputStream) + + /** + * Transform the @param record into a serialized form of the data and writes it to the + * registered OutputStream provided when [BaseSerializedBuffer.initWriter] was called. + */ + @Deprecated("") + @Throws(IOException::class) + protected abstract fun writeRecord(record: AirbyteRecordMessage) + + /** + * TODO: (ryankfu) move destination to use serialized record string instead of passing entire + * AirbyteRecord + * + * @param recordString serialized record + * @param airbyteMetaString + * @param emittedAt timestamp of the record in milliseconds + * @throws IOException + */ + @Throws(IOException::class) + protected abstract fun writeRecord( + recordString: String, + airbyteMetaString: String, + emittedAt: Long + ) + + /** + * Stops the writer from receiving new data and prepares it for being finalized and converted + * into an InputStream to read from instead. This is used when flushing the buffer into some + * other destination. + */ + @Throws(IOException::class) protected abstract fun flushWriter() + + @Throws(IOException::class) protected abstract fun closeWriter() + + fun withCompression(useCompression: Boolean): SerializableBuffer { + if (!isStarted) { + this.useCompression = useCompression + return this + } + throw RuntimeException("Options should be configured before starting to write") + } + + @Throws(Exception::class) + override fun accept(record: AirbyteRecordMessage): Long { + if (!isStarted) { + if (useCompression) { + compressedBuffer = GzipCompressorOutputStream(byteCounter) + initWriter(compressedBuffer!!) + } else { + initWriter(byteCounter) + } + isStarted = true + } + if (inputStream == null && !isClosed) { + val startCount = byteCounter.count + writeRecord(record) + return byteCounter.count - startCount + } else { + throw IllegalCallerException("Buffer is already closed, it cannot accept more messages") + } + } + + @Throws(Exception::class) + override fun accept(recordString: String, airbyteMetaString: String, emittedAt: Long): Long { + if (!isStarted) { + if (useCompression) { + compressedBuffer = GzipCompressorOutputStream(byteCounter) + initWriter(compressedBuffer!!) + } else { + initWriter(byteCounter) + } + isStarted = true + } + if (inputStream == null && !isClosed) { + val startCount = byteCounter.count + writeRecord(recordString, airbyteMetaString, emittedAt) + return byteCounter.count - startCount + } else { + throw IllegalCallerException("Buffer is already closed, it cannot accept more messages") + } + } + + override val filename: String + @Throws(IOException::class) + get() { + if (useCompression && !bufferStorage.filename.endsWith(GZ_SUFFIX)) { + return bufferStorage.filename + GZ_SUFFIX + } + return bufferStorage.filename + } + + override val file: File? + @Throws(IOException::class) + get() { + if (useCompression && !bufferStorage.filename!!.endsWith(GZ_SUFFIX)) { + if (bufferStorage.file!!.renameTo(File(bufferStorage.filename + GZ_SUFFIX))) { + LOGGER.info("Renaming compressed file to include .gz file extension") + } + } + return bufferStorage.file + } + + @Throws(IOException::class) + protected fun convertToInputStream(): InputStream { + return bufferStorage.convertToInputStream() + } + + @Throws(IOException::class) + override fun flush() { + if (inputStream == null && !isClosed) { + flushWriter() + LOGGER.debug("Wrapping up compression and write GZIP trailer data.") + compressedBuffer?.flush() + compressedBuffer?.close() + closeWriter() + bufferStorage.close() + inputStream = convertToInputStream() + LOGGER.info( + "Finished writing data to {} ({})", + filename, + FileUtils.byteCountToDisplaySize(byteCounter.count) + ) + } + } + + override val byteCount: Long + get() = byteCounter.count + + @Throws(Exception::class) + override fun close() { + if (!isClosed) { + // inputStream can be null if the accept method encounters + // an error before inputStream is initialized + inputStream?.close() + bufferStorage.deleteFile() + isClosed = true + } + } + + override val maxTotalBufferSizeInBytes: Long = bufferStorage.maxTotalBufferSizeInBytes + + override val maxPerStreamBufferSizeInBytes: Long = bufferStorage.maxPerStreamBufferSizeInBytes + + override val maxConcurrentStreamsInBuffer: Int = bufferStorage.maxConcurrentStreamsInBuffer + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseSerializedBuffer::class.java) + private const val GZ_SUFFIX = ".gz" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.kt new file mode 100644 index 0000000000000..d8b476b7ed717 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferCreateFunction.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.commons.functional.CheckedBiFunction +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog + +fun interface BufferCreateFunction : + CheckedBiFunction< + AirbyteStreamNameNamespacePair, ConfiguredAirbyteCatalog, SerializableBuffer, Exception> { + @Throws(Exception::class) + override fun apply( + stream: AirbyteStreamNameNamespacePair, + configuredCatalog: ConfiguredAirbyteCatalog + ): SerializableBuffer? +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.kt new file mode 100644 index 0000000000000..1e5ecbb7e7d3f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferFlushType.kt @@ -0,0 +1,9 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +enum class BufferFlushType { + FLUSH_ALL, + FLUSH_SINGLE_STREAM +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.kt new file mode 100644 index 0000000000000..2856fd7dc4e4e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferStorage.kt @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import java.io.* + +/** + * This interface abstract the actual object that is used to store incoming data being buffered. It + * could be a file, in-memory or some other objects. + * + * However, in order to be used as part of the [SerializableBuffer], this [BufferStorage] should + * implement some methods used to determine how to write into and read from the storage once we are + * done buffering + * + * Some easy methods for manipulating the storage viewed as a file or InputStream are therefore + * required. + * + * Depending on the implementation of the storage medium, it would also determine what storage + * limits are possible. + */ +interface BufferStorage { + @get:Throws(IOException::class) val filename: String + + @get:Throws(IOException::class) val file: File + + /** + * Once buffering has reached some limits, the storage stream should be turned into an + * InputStream. This method should assume we are not going to write to buffer anymore, and it is + * safe to convert to some other format to be read from now. + */ + @Throws(IOException::class) fun convertToInputStream(): InputStream + + @Throws(IOException::class) fun close() + + /** Cleans-up any file that was produced in the process of buffering (if any were produced) */ + @Throws(IOException::class) fun deleteFile() + + @Throws(IOException::class) fun getOutputStream(): OutputStream + + /* + * Depending on the implementation of the storage, methods below defined reasonable thresholds + * associated with using this kind of buffer storage. + * + * These could also be dynamically configured/tuned at runtime if needed (from user input for + * example?) + */ + /** @return How much storage should be used overall by all buffers */ + val maxTotalBufferSizeInBytes: Long + + /** + * @return How much storage should be used for a particular stream at a time before flushing it + */ + val maxPerStreamBufferSizeInBytes: Long + + /** @return How many concurrent buffers can be handled at once in parallel */ + val maxConcurrentStreamsInBuffer: Int +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.kt new file mode 100644 index 0000000000000..2ad4a3d652854 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/BufferingStrategy.kt @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* + +/** + * High-level interface used by + * [io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer] + * + * A Record buffering strategy relies on the capacity available of underlying [SerializableBuffer] + * to determine what to do when consuming a new [AirbyteMessage] into the buffer. It also defines + * when to flush such buffers and how to empty them once they fill up. + */ +interface BufferingStrategy : AutoCloseable { + /** + * Add a new message to the buffer while consuming streams, also handles when a buffer flush + * when buffer has been filled + * + * @param stream stream associated with record + * @param message [AirbyteMessage] to be added to the buffer + * @return an optional value if a flushed occur with the respective flush type, otherwise an + * empty value means only a record was added + * @throws Exception throw on failure + */ + @Throws(Exception::class) + fun addRecord( + stream: AirbyteStreamNameNamespacePair, + message: AirbyteMessage + ): Optional + + /** Flush buffered messages in a buffer from a particular stream */ + @Throws(Exception::class) + fun flushSingleBuffer(stream: AirbyteStreamNameNamespacePair, buffer: SerializableBuffer) + + /** Flush all buffers that were buffering message data so far. */ + @Throws(Exception::class) fun flushAllBuffers() + + /** Removes all stream buffers. */ + @Throws(Exception::class) fun clear() +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.kt new file mode 100644 index 0000000000000..6b3d21fc0b723 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FileBuffer.kt @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import java.io.* +import java.nio.file.Files +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class FileBuffer : BufferStorage { + private val fileExtension: String + private lateinit var tempFile: File + private var outputStream: OutputStream? = null + override val maxConcurrentStreamsInBuffer: Int + + // The per stream size limit is following recommendations from: + // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations + // "To optimize the number of parallel operations for a load, + // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size compressed." + override val maxPerStreamBufferSizeInBytes: Long = MAX_PER_STREAM_BUFFER_SIZE_BYTES + /* + * Other than the per-file size limit, we also limit the total size (which would limit how many + * concurrent streams we can buffer simultaneously too) Since this class is storing data on disk, + * the buffer size limits below are tied to the necessary disk storage space. + */ + override val maxTotalBufferSizeInBytes: Long = MAX_TOTAL_BUFFER_SIZE_BYTES + + constructor(fileExtension: String) { + this.fileExtension = fileExtension + this.maxConcurrentStreamsInBuffer = DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER + } + + constructor(fileExtension: String, maxConcurrentStreams: Int) { + this.fileExtension = fileExtension + this.maxConcurrentStreamsInBuffer = maxConcurrentStreams + } + + @Throws(IOException::class) + override fun getOutputStream(): OutputStream { + if (outputStream == null || !::tempFile.isInitialized) { + tempFile = Files.createTempFile(UUID.randomUUID().toString(), fileExtension).toFile() + outputStream = BufferedOutputStream(FileOutputStream(tempFile)) + } + return outputStream!! + } + + @get:Throws(IOException::class) + override val filename: String + get() = file.name + + @get:Throws(IOException::class) + override val file: File + get() { + if (!::tempFile.isInitialized) { + getOutputStream() + } + return tempFile + } + + @Throws(IOException::class) + override fun convertToInputStream(): InputStream { + return FileInputStream(file) + } + + @Throws(IOException::class) + override fun close() { + outputStream?.close() + } + + @Throws(IOException::class) + override fun deleteFile() { + LOGGER.info("Deleting tempFile data {}", filename) + Files.deleteIfExists(file.toPath()) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(FileBuffer::class.java) + + /* + * We limit number of stream being buffered simultaneously anyway (limit how many files are + * stored/open for writing) + * + * Note: This value can be tuned to increase performance with the tradeoff of increased memory usage + * (~31 MB per buffer). See {@link StreamTransferManager} + * + * For connections with interleaved data (e.g. Change Data Capture), having less buffers than the + * number of streams being synced will cause buffer thrashing where buffers will need to be flushed + * before another stream's buffer can be created. Increasing the default max will reduce likelihood + * of thrashing but not entirely eliminate unless number of buffers equals streams to be synced + */ + const val DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER: Int = 10 + const val FILE_BUFFER_COUNT_KEY: String = "file_buffer_count" + + // This max is subject to change as no proper load testing has been done to verify the side + // effects + const val MAX_CONCURRENT_STREAM_IN_BUFFER: Int = 50 + + /* + * Use this soft cap as a guidance for customers to not exceed the recommended number of buffers + * which is 1 GB (total buffer size) / 31 MB (rough size of each buffer) ~= 32 buffers + */ + const val SOFT_CAP_CONCURRENT_STREAM_IN_BUFFER: Int = 20 + + // The per stream size limit is following recommendations from: + // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations + // "To optimize the number of parallel operations for a load, + // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size + // compressed." + @JvmStatic + val MAX_PER_STREAM_BUFFER_SIZE_BYTES: Long = + (200 * 1024 * 1024 // 200 MB + ) + .toLong() + /* + * Other than the per-file size limit, we also limit the total size (which would limit how many + * concurrent streams we can buffer simultaneously too) Since this class is storing data on disk, + * the buffer size limits below are tied to the necessary disk storage space. + */ + @JvmStatic + val MAX_TOTAL_BUFFER_SIZE_BYTES: Long = + (1024 * 1024 * 1024 // 1 GB + ) + .toLong() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.kt new file mode 100644 index 0000000000000..7748ce1c7c435 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/FlushBufferFunction.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.commons.functional.CheckedBiConsumer +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair + +fun interface FlushBufferFunction : + CheckedBiConsumer { + @Throws(Exception::class) + override fun accept(stream: AirbyteStreamNameNamespacePair, buffer: SerializableBuffer) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.kt new file mode 100644 index 0000000000000..05bf5c932ccaf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryBuffer.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations.Companion.LOGGER +import java.io.* +import java.nio.file.Files +import java.util.* + +/** + * Instead of storing buffered data on disk like the [FileBuffer], this [BufferStorage] accumulates + * message data in-memory instead. Thus, a bigger heap size would be required. + */ +class InMemoryBuffer(private val fileExtension: String) : BufferStorage { + private val byteBuffer = ByteArrayOutputStream() + private var tempFile: File? = null + override var filename: String = UUID.randomUUID().toString() + + // The per stream size limit is following recommendations from: + // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations + // "To optimize the number of parallel operations for a load, + // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size compressed." + override val maxPerStreamBufferSizeInBytes: Long = + (200 * 1024 * 1024 // 200 MB + ) + .toLong() + + // Other than the per-file size limit, we also limit the total size (which would limit how many + // concurrent streams we can buffer simultaneously too) + // Since this class is storing data in memory, the buffer size limits below are tied to the + // necessary RAM space. + override val maxTotalBufferSizeInBytes: Long = + (1024 * 1024 * 1024 // 1 GB + ) + .toLong() + + // we limit number of stream being buffered simultaneously anyway + override val maxConcurrentStreamsInBuffer: Int = 100 + + override fun getOutputStream(): OutputStream { + return byteBuffer + } + + @get:Throws(IOException::class) + override val file: File + get() { + val tempFile = this.tempFile ?: Files.createTempFile(filename, fileExtension).toFile() + this.tempFile = tempFile + return tempFile + } + + override fun convertToInputStream(): InputStream { + return ByteArrayInputStream(byteBuffer.toByteArray()) + } + + @Throws(IOException::class) + override fun close() { + byteBuffer.close() + } + + @Throws(IOException::class) + override fun deleteFile() { + var pathToDelete = tempFile?.toPath() + if (pathToDelete != null) { + LOGGER.info("Deleting tempFile data {}", filename) + Files.deleteIfExists(pathToDelete) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.kt new file mode 100644 index 0000000000000..635187a732b63 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.kt @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.CheckAndRemoveRecordWriter +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordSizeEstimator +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordWriter +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import org.apache.commons.io.FileUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This is the default implementation of a [BufferStorage] to be backward compatible. Data is being + * buffered in a [<] as they are being consumed. + * + * This should be deprecated as we slowly move towards using [SerializedBufferingStrategy] instead. + */ +class InMemoryRecordBufferingStrategy( + private val recordWriter: RecordWriter, + private val checkAndRemoveRecordWriter: CheckAndRemoveRecordWriter?, + private val maxQueueSizeInBytes: Long +) : BufferingStrategy { + private var streamBuffer: + MutableMap> = + HashMap() + private var fileName: String? = null + + private val recordSizeEstimator = RecordSizeEstimator() + private var bufferSizeInBytes: Long = 0 + + constructor( + recordWriter: RecordWriter, + maxQueueSizeInBytes: Long + ) : this(recordWriter, null, maxQueueSizeInBytes) + + @Throws(Exception::class) + override fun addRecord( + stream: AirbyteStreamNameNamespacePair, + message: AirbyteMessage + ): Optional { + var flushed: Optional = Optional.empty() + + val messageSizeInBytes = recordSizeEstimator.getEstimatedByteSize(message.record) + if (bufferSizeInBytes + messageSizeInBytes > maxQueueSizeInBytes) { + flushAllBuffers() + flushed = Optional.of(BufferFlushType.FLUSH_ALL) + } + + val bufferedRecords = + streamBuffer.computeIfAbsent(stream) { k: AirbyteStreamNameNamespacePair? -> + ArrayList() + } + bufferedRecords.add(message.record) + bufferSizeInBytes += messageSizeInBytes + + return flushed + } + + @Throws(Exception::class) + override fun flushSingleBuffer( + stream: AirbyteStreamNameNamespacePair, + buffer: SerializableBuffer + ) { + LOGGER.info( + "Flushing single stream {}: {} records", + stream.name, + streamBuffer[stream]!!.size + ) + recordWriter.accept(stream, streamBuffer[stream]!!.toList()) + LOGGER.info("Flushing completed for {}", stream.name) + } + + @Throws(Exception::class) + override fun flushAllBuffers() { + for ((key, value) in streamBuffer) { + LOGGER.info( + "Flushing {}: {} records ({})", + key.name, + value.size, + FileUtils.byteCountToDisplaySize(bufferSizeInBytes) + ) + recordWriter.accept(key, value) + if (checkAndRemoveRecordWriter != null) { + fileName = checkAndRemoveRecordWriter.apply(key, fileName) + } + LOGGER.info("Flushing completed for {}", key.name) + } + close() + clear() + bufferSizeInBytes = 0 + } + + override fun clear() { + streamBuffer = HashMap() + } + + @Throws(Exception::class) override fun close() {} + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(InMemoryRecordBufferingStrategy::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.kt new file mode 100644 index 0000000000000..0469d30f1f5fa --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializableBuffer.kt @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.io.* + +/** + * A [SerializableBuffer] is designed to be used as part of a [SerializedBufferingStrategy]. + * + * It encapsulates the actual implementation of a buffer: both the medium storage (usually defined + * as part of [BufferStorage]. and the format of the serialized data when it is written to the + * buffer. + * + * A [BaseSerializedBuffer] class is provided, and should be the expected class to derive from when + * implementing a new format of buffer. The storage aspects are normally provided through + * composition of [BufferStorage]. + */ +interface SerializableBuffer : AutoCloseable { + /** + * Adds a [AirbyteRecordMessage] to the buffer and returns the size of the message in bytes + * + * @param record [AirbyteRecordMessage] to be added to buffer + * @return number of bytes written to the buffer + */ + @Deprecated("") @Throws(Exception::class) fun accept(record: AirbyteRecordMessage): Long + + /** + * TODO: (ryankfu) Move all destination connectors to pass the serialized record string instead + * of the entire AirbyteRecordMessage + * + * @param recordString serialized record + * @param airbyteMetaString The serialized airbyte_meta entry + * @param emittedAt timestamp of the record in milliseconds + * @return number of bytes written to the buffer + * @throws Exception + */ + @Throws(Exception::class) + fun accept(recordString: String, airbyteMetaString: String, emittedAt: Long): Long + + /** Flush a buffer implementation. */ + @Throws(Exception::class) fun flush() + + /** + * The buffer implementation should be keeping track of how many bytes it accumulated so far. If + * any flush events were triggered, the amount of bytes accumulated would also have been + * decreased accordingly. This method @return such statistics. + */ + val byteCount: Long + + @get:Throws(IOException::class) val filename: String + + @get:Throws(IOException::class) val file: File? + + @get:Throws(FileNotFoundException::class) val inputStream: InputStream? + + /* + * Depending on the implementation of the storage, methods below defined reasonable thresholds + * associated with using this kind of buffer implementation. + * + * These could also be dynamically configured/tuned at runtime if needed (from user input for + * example?) + */ + /** @return How much storage should be used overall by all buffers */ + val maxTotalBufferSizeInBytes: Long + + /** + * @return How much storage should be used for a particular stream at a time before flushing it + */ + val maxPerStreamBufferSizeInBytes: Long + + /** @return How many concurrent buffers can be handled at once in parallel */ + val maxConcurrentStreamsInBuffer: Int +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.kt new file mode 100644 index 0000000000000..bdb1112d81077 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategy.kt @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.* +import org.apache.commons.io.FileUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Buffering Strategy used to convert [io.airbyte.protocol.models.AirbyteRecordMessage] into a + * stream of bytes to more readily save and transmit information + * + * This class is meant to be used in conjunction with [SerializableBuffer] + */ +class SerializedBufferingStrategy +/** + * Creates instance of Serialized Buffering Strategy used to handle the logic of flushing buffer + * with an associated buffer type + * + * @param onCreateBuffer type of buffer used upon creation + * @param catalog collection of [io.airbyte.protocol.models.ConfiguredAirbyteStream] + * @param onStreamFlush buffer flush logic used throughout the streaming of messages + */ +( + private val onCreateBuffer: BufferCreateFunction, + private val catalog: ConfiguredAirbyteCatalog, + private val onStreamFlush: FlushBufferFunction +) : BufferingStrategy { + private var allBuffers: MutableMap = + HashMap() + private var totalBufferSizeInBytes: Long = 0 + + /** + * Handles both adding records and when buffer is full to also flush + * + * @param stream stream associated with record + * @param message [AirbyteMessage] to buffer + * @return Optional which contains a [BufferFlushType] if a flush occurred, otherwise empty) + */ + @Throws(Exception::class) + override fun addRecord( + stream: AirbyteStreamNameNamespacePair, + message: AirbyteMessage + ): Optional { + var flushed: Optional = Optional.empty() + + val buffer = + getOrCreateBuffer(stream) + ?: throw RuntimeException( + String.format( + "Failed to create/get buffer for stream %s.%s", + stream.namespace, + stream.name + ) + ) + + val actualMessageSizeInBytes = buffer.accept(message.record) + totalBufferSizeInBytes += actualMessageSizeInBytes + // Flushes buffer when either the buffer was completely filled or only a single stream was + // filled + if ( + totalBufferSizeInBytes >= buffer.maxTotalBufferSizeInBytes || + allBuffers.size >= buffer.maxConcurrentStreamsInBuffer + ) { + flushAllBuffers() + flushed = Optional.of(BufferFlushType.FLUSH_ALL) + } else if (buffer.byteCount >= buffer.maxPerStreamBufferSizeInBytes) { + flushSingleBuffer(stream, buffer) + /* + * Note: This branch is needed to indicate to the {@link DefaultDestStateLifeCycleManager} that an + * individual stream was flushed, there is no guarantee that it will flush records in the same order + * that state messages were received. The outcome here is that records get flushed but our updating + * of which state messages have been flushed falls behind. + * + * This is not ideal from a checkpoint point of view, because it means in the case where there is a + * failure, we will not be able to report that those records that were flushed and committed were + * committed because there corresponding state messages weren't marked as flushed. Thus, it weakens + * checkpointing, but it does not cause a correctness issue. + * + * In non-failure cases, using this conditional branch relies on the state messages getting flushed + * by some other means. That can be caused by the previous branch in this conditional. It is + * guaranteed by the fact that we always flush all state messages at the end of a sync. + */ + flushed = Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM) + } + return flushed + } + + /** + * Creates a new buffer for each stream if buffers do not already exist, else return already + * computed buffer + */ + private fun getOrCreateBuffer(stream: AirbyteStreamNameNamespacePair): SerializableBuffer { + return allBuffers.computeIfAbsent(stream) { k: AirbyteStreamNameNamespacePair? -> + LOGGER.info( + "Starting a new buffer for stream {} (current state: {} in {} buffers)", + stream.name, + FileUtils.byteCountToDisplaySize(totalBufferSizeInBytes), + allBuffers.size + ) + try { + return@computeIfAbsent onCreateBuffer.apply(stream, catalog)!! + } catch (e: Exception) { + LOGGER.error("Failed to create a new buffer for stream {}", stream.name, e) + throw RuntimeException(e) + } + } + } + + @Throws(Exception::class) + override fun flushSingleBuffer( + stream: AirbyteStreamNameNamespacePair, + buffer: SerializableBuffer + ) { + LOGGER.info( + "Flushing buffer of stream {} ({})", + stream.name, + FileUtils.byteCountToDisplaySize(buffer.byteCount) + ) + onStreamFlush.accept(stream, buffer) + totalBufferSizeInBytes -= buffer.byteCount + allBuffers.remove(stream) + LOGGER.info("Flushing completed for {}", stream.name) + } + + @Throws(Exception::class) + override fun flushAllBuffers() { + LOGGER.info( + "Flushing all {} current buffers ({} in total)", + allBuffers.size, + FileUtils.byteCountToDisplaySize(totalBufferSizeInBytes) + ) + for ((stream, buffer) in allBuffers) { + LOGGER.info( + "Flushing buffer of stream {} ({})", + stream.name, + FileUtils.byteCountToDisplaySize(buffer.byteCount) + ) + onStreamFlush.accept(stream, buffer) + LOGGER.info("Flushing completed for {}", stream.name) + } + close() + clear() + totalBufferSizeInBytes = 0 + } + + @Throws(Exception::class) + override fun clear() { + LOGGER.debug("Reset all buffers") + allBuffers = HashMap() + } + + @Throws(Exception::class) + override fun close() { + val exceptionsThrown: MutableList = ArrayList() + for ((stream, buffer) in allBuffers) { + try { + LOGGER.info("Closing buffer for stream {}", stream.name) + buffer.close() + } catch (e: Exception) { + exceptionsThrown.add(e) + LOGGER.error("Exception while closing stream buffer", e) + } + } + + ConnectorExceptionUtil.logAllAndThrowFirst( + "Exceptions thrown while closing buffers: ", + exceptionsThrown + ) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(SerializedBufferingStrategy::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingOperations.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingOperations.kt new file mode 100644 index 0000000000000..ee84df4cf6320 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingOperations.kt @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.staging + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import java.time.Instant +import java.util.* + +/** + * Staging operations focuses on the SQL queries that are needed to success move data into a staging + * environment like GCS or S3. In general, the reference of staging is the usage of an object + * storage for the purposes of efficiently uploading bulk data to destinations + * + * TODO: This interface is shared between Snowflake and Redshift connectors where the staging + * mechanism is different wire protocol. Make the interface more Generic and have sub interfaces to + * support BlobStorageOperations or Jdbc based staging operations. + */ +interface StagingOperations : SqlOperations { + /** + * @param outputTableName The name of the table this staging file will be loaded into (typically + * a raw table). Not all destinations use the table name in the staging path (e.g. Snowflake + * simply uses a timestamp + UUID), but e.g. Redshift does rely on this to ensure uniqueness. + */ + fun getStagingPath( + connectionId: UUID?, + namespace: String?, + streamName: String?, + outputTableName: String?, + writeDatetime: Instant? + ): String? + + /** + * Returns the staging environment's name + * + * @param namespace Name of schema + * @param streamName Name of the stream + * @return Fully qualified name of the staging environment + */ + fun getStageName(namespace: String?, streamName: String?): String? + + /** + * Create a staging folder where to upload temporary files before loading into the final + * destination + */ + @Throws(Exception::class) + fun createStageIfNotExists(database: JdbcDatabase?, stageName: String?) + + /** + * Upload the data file into the stage area. + * + * @param database database used for syncing + * @param recordsData records stored in in-memory buffer + * @param schemaName name of schema + * @param stagingPath path of staging folder to data files + * @return the name of the file that was uploaded. + */ + @Throws(Exception::class) + fun uploadRecordsToStage( + database: JdbcDatabase?, + recordsData: SerializableBuffer?, + schemaName: String?, + stageName: String?, + stagingPath: String? + ): String? + + /** + * Load the data stored in the stage area into a temporary table in the destination + * + * @param database database interface + * @param stagingPath path to staging files + * @param stagedFiles collection of staged files + * @param tableName name of table to write staging files to + * @param schemaName name of schema + */ + @Throws(Exception::class) + fun copyIntoTableFromStage( + database: JdbcDatabase?, + stageName: String?, + stagingPath: String?, + stagedFiles: List?, + tableName: String?, + schemaName: String? + ) + + /** + * Delete the stage area and all staged files that was in it + * + * @param database database used for syncing + * @param stageName Name of the staging area used to store files + */ + @Throws(Exception::class) + fun dropStageIfExists(database: JdbcDatabase?, stageName: String?, stagingPath: String?) +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ApmTraceUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ApmTraceUtils.kt new file mode 100644 index 0000000000000..ee4fd35e5d09e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ApmTraceUtils.kt @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util + +import datadog.trace.api.DDTags +import datadog.trace.api.interceptor.MutableSpan +import io.opentracing.Span +import io.opentracing.log.Fields +import io.opentracing.tag.Tags +import io.opentracing.util.GlobalTracer +import java.io.* +import java.util.function.Consumer + +/** Collection of utility methods to help with performance tracing. */ +object ApmTraceUtils { + /** String format for the name of tags added to spans. */ + const val TAG_FORMAT: String = "airbyte.%s.%s" + + /** Standard prefix for tags added to spans. */ + const val TAG_PREFIX: String = "metadata" + + /** + * Adds all provided tags to the currently active span, if one exists, under the provided tag + * name namespace. + * + * @param tags A map of tags to be added to the currently active span. + * @param tagPrefix The prefix to be added to each custom tag name. + */ + /** + * Adds all the provided tags to the currently active span, if one exists.

    All tags + * added via this method will use the default [.TAG_PREFIX] namespace. + * + * @param tags A map of tags to be added to the currently active span. + */ + @JvmOverloads + fun addTagsToTrace(tags: Map, tagPrefix: String? = TAG_PREFIX) { + addTagsToTrace(GlobalTracer.get().activeSpan(), tags, tagPrefix) + } + + /** + * Adds all the provided tags to the provided span, if one exists. + * + * @param span The [Span] that will be associated with the tags. + * @param tags A map of tags to be added to the currently active span. + * @param tagPrefix The prefix to be added to each custom tag name. + */ + fun addTagsToTrace(span: Span?, tags: Map, tagPrefix: String?) { + if (span != null) { + tags.entries.forEach( + Consumer { entry: Map.Entry -> + span.setTag(formatTag(entry.key, tagPrefix), entry.value.toString()) + } + ) + } + } + + /** + * Adds an exception to the currently active span, if one exists. + * + * @param t The [Throwable] to be added to the currently active span. + */ + @JvmStatic + fun addExceptionToTrace(t: Throwable?) { + addExceptionToTrace(GlobalTracer.get().activeSpan(), t) + } + + /** + * Adds an exception to the provided span, if one exists. + * + * @param span The [Span] that will be associated with the exception. + * @param t The [Throwable] to be added to the provided span. + */ + fun addExceptionToTrace(span: Span?, t: Throwable?) { + if (span != null) { + span.setTag(Tags.ERROR, true) + span.log(java.util.Map.of(Fields.ERROR_OBJECT, t)) + } + } + + /** + * Adds all the provided tags to the root span. + * + * @param tags A map of tags to be added to the root span. + */ + fun addTagsToRootSpan(tags: Map) { + val activeSpan = GlobalTracer.get().activeSpan() + if (activeSpan is MutableSpan) { + val localRootSpan = (activeSpan as MutableSpan).localRootSpan + tags.entries.forEach( + Consumer { entry: Map.Entry -> + localRootSpan.setTag(formatTag(entry.key, TAG_PREFIX), entry.value.toString()) + } + ) + } + } + + /** + * Adds an exception to the root span, if an active one exists. + * + * @param t The [Throwable] to be added to the provided span. + */ + fun recordErrorOnRootSpan(t: Throwable) { + val activeSpan = GlobalTracer.get().activeSpan() + if (activeSpan != null) { + activeSpan.setTag(Tags.ERROR, true) + activeSpan.log(java.util.Map.of(Fields.ERROR_OBJECT, t)) + } + if (activeSpan is MutableSpan) { + val localRootSpan = (activeSpan as MutableSpan).localRootSpan + localRootSpan.setError(true) + localRootSpan.setTag(DDTags.ERROR_MSG, t.message) + localRootSpan.setTag(DDTags.ERROR_TYPE, t.javaClass.name) + val errorString = StringWriter() + t.printStackTrace(PrintWriter(errorString)) + localRootSpan.setTag(DDTags.ERROR_STACK, errorString.toString()) + } + } + + /** + * Formats the tag key using [.TAG_FORMAT] provided by this utility with the provided tag + * prefix. + * + * @param tagKey The tag key to format. + * @param tagPrefix The prefix to be added to each custom tag name. + * @return The formatted tag key. + */ + /** + * Formats the tag key using [.TAG_FORMAT] provided by this utility, using the default tag + * prefix [.TAG_PREFIX]. + * + * @param tagKey The tag key to format. + * @return The formatted tag key. + */ + @JvmOverloads + fun formatTag(tagKey: String?, tagPrefix: String? = TAG_PREFIX): String { + return String.format(TAG_FORMAT, tagPrefix, tagKey) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt similarity index 100% rename from airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt rename to airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.kt new file mode 100644 index 0000000000000..c38c74a79d662 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtil.kt @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util + +import com.google.common.collect.ImmutableList +import io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.exceptions.ConnectionErrorException +import io.airbyte.commons.functional.Either +import java.sql.SQLException +import java.sql.SQLSyntaxErrorException +import java.util.stream.Collectors +import org.apache.commons.lang3.exception.ExceptionUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Utility class defining methods for handling configuration exceptions in connectors. */ +object ConnectorExceptionUtil { + private val LOGGER: Logger = LoggerFactory.getLogger(ConnectorExceptionUtil::class.java) + + const val COMMON_EXCEPTION_MESSAGE_TEMPLATE: String = + "Could not connect with provided configuration. Error: %s" + const val RECOVERY_CONNECTION_ERROR_MESSAGE: String = + "We're having issues syncing from a Postgres replica that is configured as a hot standby server. " + + "Please see https://go.airbyte.com/pg-hot-standby-error-message for options and workarounds" + + @JvmField val HTTP_AUTHENTICATION_ERROR_CODES: List = ImmutableList.of(401, 403) + + fun isConfigError(e: Throwable?): Boolean { + return isConfigErrorException(e) || + isConnectionError(e) || + isRecoveryConnectionException(e) || + isUnknownColumnInFieldListException(e) + } + + fun getDisplayMessage(e: Throwable?): String? { + return if (e is ConfigErrorException) { + e.displayMessage + } else if (e is ConnectionErrorException) { + ErrorMessage.getErrorMessage(e.stateCode, e.errorCode, e.exceptionMessage, e) + } else if (isRecoveryConnectionException(e)) { + RECOVERY_CONNECTION_ERROR_MESSAGE + } else if (isUnknownColumnInFieldListException(e)) { + e!!.message + } else { + String.format( + COMMON_EXCEPTION_MESSAGE_TEMPLATE, + if (e!!.message != null) e.message else "" + ) + } + } + + /** + * Returns the first instance of an exception associated with a configuration error (if it + * exists). Otherwise, the original exception is returned. + */ + fun getRootConfigError(e: Exception?): Throwable? { + var current: Throwable? = e + while (current != null) { + if (isConfigError(current)) { + return current + } else { + current = current.cause + } + } + return e + } + + /** + * Log all the exceptions, and rethrow the first. This is useful for e.g. running multiple + * futures and waiting for them to complete/fail. Rather than combining them into a single + * mega-exception (which works poorly in the UI), we just log all of them, and throw the first + * exception. + * + * In most cases, all the exceptions will look very similar, so the user only needs to see the + * first exception anyway. This mimics e.g. a for-loop over multiple tasks, where the loop would + * break on the first exception. + */ + @JvmStatic + fun logAllAndThrowFirst(initialMessage: String, throwables: Collection) { + if (!throwables.isEmpty()) { + val stacktraces = + throwables + .stream() + .map { throwable: Throwable? -> ExceptionUtils.getStackTrace(throwable) } + .collect(Collectors.joining("\n")) + LOGGER.error("$initialMessage$stacktraces\nRethrowing first exception.") + throw throwables.iterator().next() + } + } + + @JvmStatic + fun getResultsOrLogAndThrowFirst( + initialMessage: String, + eithers: List> + ): List { + val throwables: List = eithers.filter { it.isLeft() }.map { it.left!! }.toList() + if (throwables.isNotEmpty()) { + logAllAndThrowFirst(initialMessage, throwables) + } + // No need to filter on isRight since isLeft will throw before reaching this line. + return eithers.stream().map { obj: Either -> obj.right!! }.toList() + } + + private fun isConfigErrorException(e: Throwable?): Boolean { + return e is ConfigErrorException + } + + private fun isConnectionError(e: Throwable?): Boolean { + return e is ConnectionErrorException + } + + private fun isRecoveryConnectionException(e: Throwable?): Boolean { + return e is SQLException && + e.message!!.lowercase().contains("due to conflict with recovery") + } + + private fun isUnknownColumnInFieldListException(e: Throwable?): Boolean { + return (e is SQLSyntaxErrorException && + e.message!!.lowercase().contains("unknown column") && + e.message!!.lowercase().contains("in 'field list'")) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.kt new file mode 100644 index 0000000000000..a864ecfa2a3bb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumer.kt @@ -0,0 +1,256 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util.concurrent + +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.commons.stream.StreamStatusUtils +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteMessage +import java.util.* +import java.util.concurrent.* +import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import java.util.function.Consumer +import java.util.stream.Collectors +import kotlin.math.min +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * [Consumer] implementation that consumes [AirbyteMessage] records from each provided stream + * concurrently. + * + * The consumer calculates the parallelism based on the provided requested parallelism. If the + * requested parallelism is greater than zero, the minimum value between the requested parallelism + * and the maximum number of allowed threads is chosen as the parallelism value. Otherwise, the + * minimum parallelism value is selected. This is to avoid issues with attempting to execute with a + * parallelism value of zero, which is not allowed by the underlying [ExecutorService]. + * + * This consumer will capture any raised exceptions during execution of each stream. Anu exceptions + * are stored and made available by calling the [.getException] method. + */ +class ConcurrentStreamConsumer( + streamConsumer: Consumer>, + requestedParallelism: Int +) : Consumer>>, AutoCloseable { + private val executorService: ExecutorService + private val exceptions: MutableList + /** + * the parallelism value that will be used by this consumer to execute the consumption of data + * from the provided streams in parallel. + * + * @return The parallelism value of this consumer. + */ + val parallelism: Int + private val streamConsumer: Consumer> + private val streamStatusEmitter = + Optional.of( + Consumer { obj: AirbyteStreamStatusHolder -> + AirbyteTraceMessageUtility.emitStreamStatusTrace(obj) + } + ) + + /** + * Constructs a new [ConcurrentStreamConsumer] that will use the provided stream consumer to + * execute each stream submitted to the [<][.accept] method of this consumer. Streams + * submitted to the [<][.accept] method will be converted to a [Runnable] and executed on an + * [ExecutorService] configured by this consumer to ensure concurrent execution of each stream. + * + * @param streamConsumer The [Consumer] that accepts streams as an [AutoCloseableIterator]. + * @param requestedParallelism The requested amount of parallelism that will be used as a hint + * to determine the appropriate number of threads to execute concurrently. + */ + init { + this.parallelism = computeParallelism(requestedParallelism) + this.executorService = createExecutorService(parallelism) + this.exceptions = ArrayList() + this.streamConsumer = streamConsumer + } + + override fun accept(streams: Collection>) { + /* + * Submit the provided streams to the underlying executor service for concurrent execution. This + * thread will track the status of each stream as well as consuming all messages produced from each + * stream, passing them to the provided message consumer for further processing. Any exceptions + * raised within the thread will be captured and exposed to the caller. + */ + val futures: Collection> = + streams + .stream() + .map { stream: AutoCloseableIterator -> + ConcurrentStreamRunnable(stream, this) + } + .map { runnable: ConcurrentStreamRunnable -> + CompletableFuture.runAsync(runnable, executorService) + } + .collect(Collectors.toList()) + + /* + * Wait for the submitted streams to complete before returning. This uses the join() method to allow + * all streams to complete even if one or more encounters an exception. + */ + LOGGER.debug("Waiting for all streams to complete....") + CompletableFuture.allOf(*futures.toTypedArray>()).join() + LOGGER.debug("Completed consuming from all streams.") + } + + val exception: Optional + /** + * Returns the first captured [Exception]. + * + * @return The first captured [Exception] or an empty [Optional] if no exceptions were + * captured during execution. + */ + get() = + if (!exceptions.isEmpty()) { + Optional.of(exceptions[0]) + } else { + Optional.empty() + } + + /** + * Returns the list of exceptions captured during execution of the streams, if any. + * + * @return The collection of captured exceptions or an empty list. + */ + fun getExceptions(): List { + return Collections.unmodifiableList(exceptions) + } + + /** + * Calculates the parallelism based on the requested parallelism. If the requested parallelism + * is greater than zero, the minimum value between the parallelism and the maximum parallelism + * is chosen as the parallelism count. Otherwise, the minimum parallelism is selected. This is + * to avoid issues with attempting to create an executor service with a thread pool size of 0, + * which is not allowed. + * + * @param requestedParallelism The requested parallelism. + * @return The selected parallelism based on the factors outlined above. + */ + private fun computeParallelism(requestedParallelism: Int): Int { + /* + * Selects the default thread pool size based on the provided value via an environment variable or + * the number of available processors if the environment variable is not set/present. This is to + * ensure that we do not over-parallelize unless requested explicitly. + */ + val defaultPoolSize = + Optional.ofNullable(System.getenv("DEFAULT_CONCURRENT_STREAM_CONSUMER_THREADS")) + .map { s: String -> s.toInt() } + .orElseGet { Runtime.getRuntime().availableProcessors() } + LOGGER.debug( + "Default parallelism: {}, Requested parallelism: {}", + defaultPoolSize, + requestedParallelism + ) + val parallelism = + min( + defaultPoolSize.toDouble(), + (if (requestedParallelism > 0) requestedParallelism else 1).toDouble() + ) + .toInt() + LOGGER.debug("Computed concurrent stream consumer parallelism: {}", parallelism) + return parallelism + } + + /** + * Creates the [ExecutorService] that will be used by the consumer to consume from the provided + * streams in parallel. + * + * @param nThreads The number of threads to execute concurrently. + * @return The configured [ExecutorService]. + */ + private fun createExecutorService(nThreads: Int): ExecutorService { + return ThreadPoolExecutor( + nThreads, + nThreads, + 0L, + TimeUnit.MILLISECONDS, + LinkedBlockingQueue(), + ConcurrentStreamThreadFactory(), + AbortPolicy() + ) + } + + /** + * Executes the stream by providing it to the configured [.streamConsumer]. + * + * @param stream The stream to be executed. + */ + private fun executeStream(stream: AutoCloseableIterator) { + try { + stream.use { + stream!!.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("Consuming from stream {}...", s) + } + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + streamConsumer.accept(stream) + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + stream.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("Consumption from stream {} complete.", s) + } + } + } catch (e: Exception) { + stream!!.airbyteStream.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.error("Unable to consume from stream {}.", s, e) + } + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + exceptions.add(e) + } + } + + @Throws(Exception::class) + override fun close() { + // Block waiting for the executor service to close + executorService.shutdownNow() + executorService.awaitTermination(30, TimeUnit.SECONDS) + } + + /** Custom [ThreadFactory] that names the threads used to concurrently execute streams. */ + private class ConcurrentStreamThreadFactory : ThreadFactory { + override fun newThread(r: Runnable): Thread { + val thread = Thread(r) + if (r is ConcurrentStreamRunnable) { + val stream = r.stream + if (stream!!.airbyteStream.isPresent) { + val airbyteStream = stream.airbyteStream.get() + thread.name = + String.format( + "%s-%s-%s", + CONCURRENT_STREAM_THREAD_NAME, + airbyteStream.namespace, + airbyteStream.name + ) + } else { + thread.name = CONCURRENT_STREAM_THREAD_NAME + } + } else { + thread.name = CONCURRENT_STREAM_THREAD_NAME + } + return thread + } + } + + /** + * Custom [Runnable] that exposes the stream for thread naming purposes. + * + * @param stream The stream that is part of the [Runnable] execution. + * @param consumer The [ConcurrentStreamConsumer] that will execute the stream. + */ + private class ConcurrentStreamRunnable( + val stream: AutoCloseableIterator, + val consumer: ConcurrentStreamConsumer + ) : Runnable { + override fun run() { + consumer.executeStream(stream) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(ConcurrentStreamConsumer::class.java) + + /** Name of threads spawned by the [ConcurrentStreamConsumer]. */ + const val CONCURRENT_STREAM_THREAD_NAME: String = "concurrent-stream-thread" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index c6b68bc8f36fd..3df70a91eac55 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.18 +version=0.30.5 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/IncrementalUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/IncrementalUtilsTest.java deleted file mode 100644 index 830fa37435390..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/IncrementalUtilsTest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.Collections; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class IncrementalUtilsTest { - - private static final String STREAM_NAME = "shoes"; - private static final String UUID_FIELD_NAME = "ascending_inventory_uuid"; - private static final ConfiguredAirbyteStream STREAM = CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - null, - Field.of("ascending_inventory_uuid", JsonSchemaType.STRING)); - - private static final ConfiguredAirbyteStream STREAM_V1 = CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - null, - Field.of("ascending_inventory_uuid", JsonSchemaType.STRING_V1)); - private static final String ABC = "abc"; - - @Test - void testGetCursorField() { - final ConfiguredAirbyteStream stream = Jsons.clone(STREAM); - stream.setCursorField(Lists.newArrayList(UUID_FIELD_NAME)); - Assertions.assertEquals(UUID_FIELD_NAME, IncrementalUtils.getCursorField(stream)); - } - - @Test - void testGetCursorFieldNoCursorFieldSet() { - assertThrows(IllegalStateException.class, () -> Assertions - .assertEquals(UUID_FIELD_NAME, IncrementalUtils.getCursorField(STREAM))); - } - - @Test - void testGetCursorFieldCompositCursor() { - final ConfiguredAirbyteStream stream = Jsons.clone(STREAM); - stream.setCursorField(Lists.newArrayList(UUID_FIELD_NAME, "something_else")); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.getCursorField(stream)); - } - - @Test - void testGetCursorType() { - Assertions.assertEquals(JsonSchemaPrimitive.STRING, IncrementalUtils.getCursorType(STREAM, UUID_FIELD_NAME)); - } - - @Test - void testGetCursorType_V1() { - Assertions.assertEquals(JsonSchemaPrimitive.STRING_V1, IncrementalUtils.getCursorType(STREAM_V1, UUID_FIELD_NAME)); - } - - @Test - void testGetCursorTypeNoProperties() { - final ConfiguredAirbyteStream stream = Jsons.clone(STREAM); - stream.getStream().setJsonSchema(Jsons.jsonNode(Collections.emptyMap())); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.getCursorType(stream, UUID_FIELD_NAME)); - } - - @Test - void testGetCursorTypeNoCursor() { - assertThrows(IllegalStateException.class, () -> IncrementalUtils.getCursorType(STREAM, "does not exist")); - } - - @Test - void testGetCursorTypeCursorHasNoType() { - final ConfiguredAirbyteStream stream = Jsons.clone(STREAM); - ((ObjectNode) stream.getStream().getJsonSchema().get("properties").get(UUID_FIELD_NAME)).remove("type"); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.getCursorType(stream, UUID_FIELD_NAME)); - } - - @Test - void testCompareCursors() { - assertTrue(IncrementalUtils.compareCursors(ABC, "def", JsonSchemaPrimitive.STRING) < 0); - assertTrue(IncrementalUtils.compareCursors(ABC, "def", JsonSchemaPrimitive.STRING_V1) < 0); - Assertions.assertEquals(0, IncrementalUtils.compareCursors(ABC, ABC, JsonSchemaPrimitive.STRING)); - assertTrue(IncrementalUtils.compareCursors("1", "2", JsonSchemaPrimitive.NUMBER) < 0); - assertTrue(IncrementalUtils.compareCursors("1", "2", JsonSchemaPrimitive.INTEGER_V1) < 0); - assertTrue(IncrementalUtils.compareCursors("5000000000", "5000000001", JsonSchemaPrimitive.NUMBER) < 0); - assertTrue(IncrementalUtils.compareCursors("false", "true", JsonSchemaPrimitive.BOOLEAN) < 0); - assertTrue(IncrementalUtils.compareCursors(null, "def", JsonSchemaPrimitive.STRING) < 1); - assertTrue(IncrementalUtils.compareCursors(ABC, null, JsonSchemaPrimitive.STRING) > 0); - Assertions.assertEquals(0, IncrementalUtils.compareCursors(null, null, JsonSchemaPrimitive.STRING)); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.compareCursors("a", "a", JsonSchemaPrimitive.ARRAY)); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.compareCursors("a", "a", JsonSchemaPrimitive.OBJECT)); - assertThrows(IllegalStateException.class, () -> IncrementalUtils.compareCursors("a", "a", JsonSchemaPrimitive.NULL)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.java deleted file mode 100644 index 4b560ae4b8762..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.check.impl; - -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * Common test setup for database availability check tests. - */ -class CommonDatabaseCheckTest { - - protected static final long TIMEOUT_MS = 500L; - - protected PostgreSQLContainer container; - - protected DataSource dataSource; - - protected DSLContext dslContext; - - @BeforeEach - void setup() { - container = new PostgreSQLContainer<>("postgres:13-alpine"); - container.start(); - - dataSource = DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); - } - - @SuppressWarnings("PMD.SignatureDeclareThrowsException") - @AfterEach - void cleanup() throws Exception { - DataSourceFactory.close(dataSource); - container.stop(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/CommonFactoryTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/CommonFactoryTest.java deleted file mode 100644 index 04dae353989df..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/CommonFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * Common test suite for the classes found in the {@code io.airbyte.cdk.db.factory} package. - */ -class CommonFactoryTest { - - private static final String DATABASE_NAME = "airbyte_test_database"; - - protected static PostgreSQLContainer container; - - @BeforeAll - public static void dbSetup() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName(DATABASE_NAME) - .withUsername("docker") - .withPassword("docker"); - container.start(); - } - - @AfterAll - public static void dbDown() { - container.close(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DSLContextFactoryTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DSLContextFactoryTest.java deleted file mode 100644 index d673b71cfa566..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DSLContextFactoryTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import io.airbyte.cdk.integrations.JdbcConnector; -import java.util.Map; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link DSLContextFactory} class. - */ -class DSLContextFactoryTest extends CommonFactoryTest { - - @Test - void testCreatingADslContext() { - final DataSource dataSource = - DataSourceFactory.create(container.getUsername(), container.getPassword(), container.getDriverClassName(), container.getJdbcUrl()); - final SQLDialect dialect = SQLDialect.POSTGRES; - final DSLContext dslContext = DSLContextFactory.create(dataSource, dialect); - assertNotNull(dslContext); - assertEquals(dialect, dslContext.configuration().dialect()); - } - - @Test - void testCreatingADslContextWithIndividualConfiguration() { - final SQLDialect dialect = SQLDialect.POSTGRES; - final DSLContext dslContext = DSLContextFactory.create( - container.getUsername(), - container.getPassword(), - container.getDriverClassName(), - container.getJdbcUrl(), - dialect); - assertNotNull(dslContext); - assertEquals(dialect, dslContext.configuration().dialect()); - } - - @Test - void testCreatingADslContextWithIndividualConfigurationAndConnectionProperties() { - final Map connectionProperties = Map.of("foo", "bar"); - final SQLDialect dialect = SQLDialect.POSTGRES; - final DSLContext dslContext = DSLContextFactory.create( - container.getUsername(), - container.getPassword(), - container.getDriverClassName(), - container.getJdbcUrl(), - dialect, - connectionProperties, - JdbcConnector.CONNECT_TIMEOUT_DEFAULT); - assertNotNull(dslContext); - assertEquals(dialect, dslContext.configuration().dialect()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DataSourceFactoryTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DataSourceFactoryTest.java deleted file mode 100644 index db8850af63a4c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/factory/DataSourceFactoryTest.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.factory; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.zaxxer.hikari.HikariDataSource; -import io.airbyte.cdk.integrations.JdbcConnector; -import java.util.Map; -import javax.sql.DataSource; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.MySQLContainer; - -/** - * Test suite for the {@link DataSourceFactory} class. - */ -class DataSourceFactoryTest extends CommonFactoryTest { - - private static final String CONNECT_TIMEOUT = "connectTimeout"; - - static String database; - static String driverClassName; - static String host; - static String jdbcUrl; - static String password; - static Integer port; - static String username; - - @BeforeAll - static void setup() { - host = container.getHost(); - port = container.getFirstMappedPort(); - database = container.getDatabaseName(); - username = container.getUsername(); - password = container.getPassword(); - driverClassName = container.getDriverClassName(); - jdbcUrl = container.getJdbcUrl(); - } - - @Test - void testCreatingDataSourceWithConnectionTimeoutSetAboveDefault() { - final Map connectionProperties = Map.of( - CONNECT_TIMEOUT, "61"); - final DataSource dataSource = DataSourceFactory.create( - username, - password, - driverClassName, - jdbcUrl, - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName)); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(61000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - - @Test - void testCreatingPostgresDataSourceWithConnectionTimeoutSetBelowDefault() { - final Map connectionProperties = Map.of( - CONNECT_TIMEOUT, "30"); - final DataSource dataSource = DataSourceFactory.create( - username, - password, - driverClassName, - jdbcUrl, - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName)); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(30000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - - @Test - void testCreatingMySQLDataSourceWithConnectionTimeoutSetBelowDefault() { - try (MySQLContainer mySQLContainer = new MySQLContainer<>("mysql:8.0")) { - mySQLContainer.start(); - final Map connectionProperties = Map.of( - CONNECT_TIMEOUT, "5000"); - final DataSource dataSource = DataSourceFactory.create( - mySQLContainer.getUsername(), - mySQLContainer.getPassword(), - mySQLContainer.getDriverClassName(), - mySQLContainer.getJdbcUrl(), - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, mySQLContainer.getDriverClassName())); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(5000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - } - - @Test - void testCreatingDataSourceWithConnectionTimeoutSetWithZero() { - final Map connectionProperties = Map.of( - CONNECT_TIMEOUT, "0"); - final DataSource dataSource = DataSourceFactory.create( - username, - password, - driverClassName, - jdbcUrl, - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName)); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(Integer.MAX_VALUE, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - - @Test - void testCreatingPostgresDataSourceWithConnectionTimeoutNotSet() { - final Map connectionProperties = Map.of(); - final DataSource dataSource = DataSourceFactory.create( - username, - password, - driverClassName, - jdbcUrl, - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName)); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - - @Test - void testCreatingMySQLDataSourceWithConnectionTimeoutNotSet() { - try (MySQLContainer mySQLContainer = new MySQLContainer<>("mysql:8.0")) { - mySQLContainer.start(); - final Map connectionProperties = Map.of(); - final DataSource dataSource = DataSourceFactory.create( - mySQLContainer.getUsername(), - mySQLContainer.getPassword(), - mySQLContainer.getDriverClassName(), - mySQLContainer.getJdbcUrl(), - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, mySQLContainer.getDriverClassName())); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(60000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); - } - - } - - @Test - void testCreatingADataSourceWithJdbcUrl() { - final DataSource dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testCreatingADataSourceWithJdbcUrlAndConnectionProperties() { - final Map connectionProperties = Map.of("foo", "bar"); - - final DataSource dataSource = DataSourceFactory.create( - username, - password, - driverClassName, - jdbcUrl, - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName)); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testCreatingADataSourceWithHostAndPort() { - final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testCreatingADataSourceWithHostPortAndConnectionProperties() { - final Map connectionProperties = Map.of("foo", "bar"); - - final DataSource dataSource = DataSourceFactory.create(username, password, host, port, database, driverClassName, connectionProperties); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testCreatingAnInvalidDataSourceWithHostAndPort() { - final String driverClassName = "Unknown"; - - assertThrows(RuntimeException.class, () -> { - DataSourceFactory.create(username, password, host, port, database, driverClassName); - }); - } - - @Test - void testCreatingAPostgresqlDataSource() { - final DataSource dataSource = DataSourceFactory.createPostgres(username, password, host, port, database); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testClosingADataSource() { - final HikariDataSource dataSource1 = mock(HikariDataSource.class); - Assertions.assertDoesNotThrow(() -> DataSourceFactory.close(dataSource1)); - verify(dataSource1, times(1)).close(); - - final DataSource dataSource2 = mock(DataSource.class); - Assertions.assertDoesNotThrow(() -> DataSourceFactory.close(dataSource2)); - - Assertions.assertDoesNotThrow(() -> DataSourceFactory.close(null)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.java deleted file mode 100644 index 8d4c2b9a07286..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.sql.SQLException; -import java.util.List; -import java.util.stream.Stream; -import javax.sql.DataSource; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -class TestDefaultJdbcDatabase { - - private static final List RECORDS_AS_JSON = Lists.newArrayList( - Jsons.jsonNode(ImmutableMap.of("id", 1, "name", "picard")), - Jsons.jsonNode(ImmutableMap.of("id", 2, "name", "crusher")), - Jsons.jsonNode(ImmutableMap.of("id", 3, "name", "vash"))); - - private static PostgreSQLContainer PSQL_DB; - private final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations(); - private DataSource dataSource; - private JdbcDatabase database; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - - @BeforeEach - void setup() throws Exception { - final String dbName = Strings.addRandomSuffix("db", "_", 10); - - final JsonNode config = getConfig(PSQL_DB, dbName); - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - dataSource = getDataSourceFromConfig(config); - database = new DefaultJdbcDatabase(dataSource); - database.execute(connection -> { - connection.createStatement().execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - connection.createStatement().execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - }); - } - - @AfterEach - void close() throws Exception { - DataSourceFactory.close(dataSource); - } - - @Test - void testBufferedResultQuery() throws SQLException { - final List actual = database.bufferedResultSetQuery( - connection -> connection.createStatement().executeQuery("SELECT * FROM id_and_name;"), - sourceOperations::rowToJson); - - assertEquals(RECORDS_AS_JSON, actual); - } - - @Test - void testResultSetQuery() throws SQLException { - try (final Stream actual = database.unsafeResultSetQuery( - connection -> connection.createStatement().executeQuery("SELECT * FROM id_and_name;"), - sourceOperations::rowToJson)) { - assertEquals(RECORDS_AS_JSON, actual.toList()); - } - } - - @Test - void testQuery() throws SQLException { - final List actual = database.queryJsons( - connection -> connection.prepareStatement("SELECT * FROM id_and_name;"), - sourceOperations::rowToJson); - assertEquals(RECORDS_AS_JSON, actual); - } - - private DataSource getDataSourceFromConfig(final JsonNode config) { - return DataSourceFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - } - - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) - .build()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestJdbcUtils.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestJdbcUtils.java deleted file mode 100644 index 317566be30ec7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestJdbcUtils.java +++ /dev/null @@ -1,430 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.BinaryNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.stream.MoreStreams; -import io.airbyte.commons.string.Strings; -import io.airbyte.protocol.models.JsonSchemaType; -import java.math.BigDecimal; -import java.sql.Connection; -import java.sql.JDBCType; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import javax.sql.DataSource; -import org.bouncycastle.util.encoders.Base64; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -@SuppressWarnings("PMD.CheckResultSet") -class TestJdbcUtils { - - private String dbName; - private static final String ONE_POINT_0 = "1.0,"; - - private static final List RECORDS_AS_JSON = Lists.newArrayList( - Jsons.jsonNode(ImmutableMap.of("id", 1, "name", "picard")), - Jsons.jsonNode(ImmutableMap.of("id", 2, "name", "crusher")), - Jsons.jsonNode(ImmutableMap.of("id", 3, "name", "vash"))); - - private static PostgreSQLContainer PSQL_DB; - - private DataSource dataSource; - private static final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations(); - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - - } - - @BeforeEach - void setup() throws Exception { - dbName = Strings.addRandomSuffix("db", "_", 10); - - final JsonNode config = getConfig(PSQL_DB, dbName); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - dataSource = DataSourceFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - final JdbcDatabase defaultJdbcDatabase = new DefaultJdbcDatabase(dataSource); - - defaultJdbcDatabase.execute(connection -> { - connection.createStatement().execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - connection.createStatement().execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - }); - } - - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) - .build()); - } - - // Takes in a generic sslValue because useSsl maps sslValue to a boolean - private JsonNode getConfigWithSsl(final PostgreSQLContainer psqlDb, final String dbName, final T sslValue) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("host", psqlDb.getHost()) - .put("port", psqlDb.getFirstMappedPort()) - .put("database", dbName) - .put("username", psqlDb.getUsername()) - .put("password", psqlDb.getPassword()) - .put("ssl", sslValue) - .build()); - } - - @Test - void testRowToJson() throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - final ResultSet rs = connection.createStatement().executeQuery("SELECT * FROM id_and_name;"); - rs.next(); - assertEquals(RECORDS_AS_JSON.get(0), sourceOperations.rowToJson(rs)); - } - } - - @Test - void testToStream() throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - final ResultSet rs = connection.createStatement().executeQuery("SELECT * FROM id_and_name;"); - final List actual = JdbcDatabase.toUnsafeStream(rs, sourceOperations::rowToJson).collect(Collectors.toList()); - assertEquals(RECORDS_AS_JSON, actual); - } - } - - // test conversion of every JDBCType that we support to Json. - @Test - void testSetJsonField() throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - createTableWithAllTypes(connection); - insertRecordOfEachType(connection); - assertExpectedOutputValues(connection, jsonFieldExpectedValues()); - assertExpectedOutputTypes(connection); - } - } - - // test setting on a PreparedStatement every JDBCType that we support. - @Test - void testSetStatementField() throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - createTableWithAllTypes(connection); - - final PreparedStatement ps = connection.prepareStatement("INSERT INTO data VALUES(?::bit,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);"); - - // insert the bit here to stay consistent even though setStatementField does not support it yet. - ps.setString(1, "1"); - sourceOperations.setCursorField(ps, 2, JDBCType.BOOLEAN, "true"); - sourceOperations.setCursorField(ps, 3, JDBCType.SMALLINT, "1"); - sourceOperations.setCursorField(ps, 4, JDBCType.INTEGER, "1"); - sourceOperations.setCursorField(ps, 5, JDBCType.BIGINT, "1"); - sourceOperations.setCursorField(ps, 6, JDBCType.FLOAT, "1.0"); - sourceOperations.setCursorField(ps, 7, JDBCType.DOUBLE, "1.0"); - sourceOperations.setCursorField(ps, 8, JDBCType.REAL, "1.0"); - sourceOperations.setCursorField(ps, 9, JDBCType.NUMERIC, "1"); - sourceOperations.setCursorField(ps, 10, JDBCType.DECIMAL, "1"); - sourceOperations.setCursorField(ps, 11, JDBCType.CHAR, "a"); - sourceOperations.setCursorField(ps, 12, JDBCType.VARCHAR, "a"); - sourceOperations.setCursorField(ps, 13, JDBCType.DATE, "2020-11-01"); - sourceOperations.setCursorField(ps, 14, JDBCType.TIME, "05:00:00.000"); - sourceOperations.setCursorField(ps, 15, JDBCType.TIMESTAMP, "2001-09-29T03:00:00.000"); - sourceOperations.setCursorField(ps, 16, JDBCType.BINARY, "61616161"); - - ps.execute(); - - assertExpectedOutputValues(connection, expectedValues()); - assertExpectedOutputTypes(connection); - } - } - - @Test - void testUseSslWithSslNotSet() { - final JsonNode config = getConfig(PSQL_DB, dbName); - final boolean sslSet = JdbcUtils.useSsl(config); - assertTrue(sslSet); - } - - @Test - void testUseSslWithSslSetAndValueStringFalse() { - final JsonNode config = getConfigWithSsl(PSQL_DB, dbName, "false"); - final boolean sslSet = JdbcUtils.useSsl(config); - assertFalse(sslSet); - } - - @Test - void testUseSslWithSslSetAndValueIntegerFalse() { - final JsonNode config = getConfigWithSsl(PSQL_DB, dbName, 0); - final boolean sslSet = JdbcUtils.useSsl(config); - assertFalse(sslSet); - } - - @Test - void testUseSslWithSslSetAndValueStringTrue() { - final JsonNode config = getConfigWithSsl(PSQL_DB, dbName, "true"); - final boolean sslSet = JdbcUtils.useSsl(config); - assertTrue(sslSet); - } - - @Test - void testUssSslWithSslSetAndValueIntegerTrue() { - final JsonNode config = getConfigWithSsl(PSQL_DB, dbName, 3); - final boolean sslSet = JdbcUtils.useSsl(config); - assertTrue(sslSet); - } - - @Test - void testUseSslWithEmptySslKeyAndSslModeVerifyFull() { - final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", PSQL_DB.getHost()) - .put("port", PSQL_DB.getFirstMappedPort()) - .put("database", dbName) - .put("username", PSQL_DB.getUsername()) - .put("password", PSQL_DB.getPassword()) - .put("ssl_mode", ImmutableMap.builder() - .put("mode", "verify-full") - .put("ca_certificate", "test_ca_cert") - .put("client_certificate", "test_client_cert") - .put("client_key", "test_client_key") - .put("client_key_password", "test_pass") - .build()) - .build()); - final boolean sslSet = JdbcUtils.useSsl(config); - assertTrue(sslSet); - } - - @Test - void testUseSslWithEmptySslKeyAndSslModeDisable() { - final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", PSQL_DB.getHost()) - .put("port", PSQL_DB.getFirstMappedPort()) - .put("database", dbName) - .put("username", PSQL_DB.getUsername()) - .put("password", PSQL_DB.getPassword()) - .put("ssl_mode", ImmutableMap.builder() - .put("mode", "disable") - .build()) - .build()); - final boolean sslSet = JdbcUtils.useSsl(config); - assertFalse(sslSet); - } - - private static void createTableWithAllTypes(final Connection connection) throws SQLException { - // jdbctype not included because they are not directly supported in postgres: TINYINT, LONGVARCHAR, - // VARBINAR, LONGVARBINARY - connection.createStatement().execute("CREATE TABLE data(" - + "bit BIT, " - + "boolean BOOLEAN, " - + "smallint SMALLINT," - + "int INTEGER," - + "bigint BIGINT," - + "float FLOAT," - + "double DOUBLE PRECISION," - + "real REAL," - + "numeric NUMERIC," - + "decimal DECIMAL," - + "char CHAR," - + "varchar VARCHAR," - + "date DATE," - + "time TIME," - + "timestamp TIMESTAMP," - + "binary1 bytea," - + "text_array _text," - + "int_array int[]" - + ");"); - - } - - private static void insertRecordOfEachType(final Connection connection) throws SQLException { - connection.createStatement().execute("INSERT INTO data(" - + "bit," - + "boolean," - + "smallint," - + "int," - + "bigint," - + "float," - + "double," - + "real," - + "numeric," - + "decimal," - + "char," - + "varchar," - + "date," - + "time," - + "timestamp," - + "binary1," - + "text_array," - + "int_array" - + ") VALUES(" - + "1::bit(1)," - + "true," - + "1," - + "1," - + "1," - + ONE_POINT_0 - + ONE_POINT_0 - + ONE_POINT_0 - + "1," - + ONE_POINT_0 - + "'a'," - + "'a'," - + "'2020-11-01'," - + "'05:00'," - + "'2001-09-29 03:00'," - + "decode('61616161', 'hex')," - + "'{one,two,three}'," - + "'{1,2,3}'" - + ");"); - } - - private static void assertExpectedOutputValues(final Connection connection, final ObjectNode expected) throws SQLException { - final ResultSet resultSet = connection.createStatement().executeQuery("SELECT * FROM data;"); - - resultSet.next(); - final JsonNode actual = sourceOperations.rowToJson(resultSet); - - // field-wise comparison to make debugging easier. - MoreStreams.toStream(expected.fields()).forEach(e -> assertEquals(e.getValue(), actual.get(e.getKey()), "key: " + e.getKey())); - assertEquals(expected, actual); - } - - private static void assertExpectedOutputTypes(final Connection connection) throws SQLException { - final ResultSet resultSet = connection.createStatement().executeQuery("SELECT * FROM data;"); - - resultSet.next(); - final int columnCount = resultSet.getMetaData().getColumnCount(); - final Map actual = new HashMap<>(columnCount); - for (int i = 1; i <= columnCount; i++) { - actual.put(resultSet.getMetaData().getColumnName(i), - sourceOperations.getAirbyteType(JDBCType.valueOf(resultSet.getMetaData().getColumnType(i)))); - } - - final Map expected = ImmutableMap.builder() - .put("bit", JsonSchemaType.BOOLEAN) - .put("boolean", JsonSchemaType.BOOLEAN) - .put("smallint", JsonSchemaType.INTEGER) - .put("int", JsonSchemaType.INTEGER) - .put("bigint", JsonSchemaType.INTEGER) - .put("float", JsonSchemaType.NUMBER) - .put("double", JsonSchemaType.NUMBER) - .put("real", JsonSchemaType.NUMBER) - .put("numeric", JsonSchemaType.NUMBER) - .put("decimal", JsonSchemaType.NUMBER) - .put("char", JsonSchemaType.STRING) - .put("varchar", JsonSchemaType.STRING) - .put("date", JsonSchemaType.STRING) - .put("time", JsonSchemaType.STRING) - .put("timestamp", JsonSchemaType.STRING) - .put("binary1", JsonSchemaType.STRING_BASE_64) - .put("text_array", JsonSchemaType.ARRAY) - .put("int_array", JsonSchemaType.ARRAY) - .build(); - - assertEquals(actual, expected); - } - - private ObjectNode jsonFieldExpectedValues() { - final ObjectNode expected = expectedValues(); - final ArrayNode arrayNode = new ObjectMapper().createArrayNode(); - arrayNode.add("one"); - arrayNode.add("two"); - arrayNode.add("three"); - expected.set("text_array", arrayNode); - - final ArrayNode arrayNode2 = new ObjectMapper().createArrayNode(); - arrayNode2.add("1"); - arrayNode2.add("2"); - arrayNode2.add("3"); - expected.set("int_array", arrayNode2); - - expected.set("binary1", new BinaryNode("aaaa".getBytes(Charsets.UTF_8))); - - return expected; - } - - private ObjectNode expectedValues() { - final ObjectNode expected = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); - expected.put("bit", true); - expected.put("boolean", true); - expected.put("smallint", (short) 1); - expected.put("int", 1); - expected.put("bigint", (long) 1); - expected.put("float", (double) 1.0); - expected.put("double", (double) 1.0); - expected.put("real", (float) 1.0); - expected.put("numeric", new BigDecimal(1)); - expected.put("decimal", new BigDecimal(1)); - expected.put("char", "a"); - expected.put("varchar", "a"); - expected.put("date", "2020-11-01"); - expected.put("time", "05:00:00.000000"); - expected.put("timestamp", "2001-09-29T03:00:00.000000"); - expected.put("binary1", Base64.decode("61616161".getBytes(Charsets.UTF_8))); - return expected; - } - - @ParameterizedTest - @CsvSource({"'3E+1', 30", - "'30', 30", - "'999000000000', 999000000000", - "'999E+9', 999000000000", - "'1.79E+3', 1790"}) - void testSetStatementSpecialValues(final String colValue, final long value) throws SQLException { - try (final Connection connection = dataSource.getConnection()) { - createTableWithAllTypes(connection); - - final PreparedStatement ps = connection.prepareStatement("INSERT INTO data(bigint) VALUES(?);"); - - // insert the bit here to stay consistent even though setStatementField does not support it yet. - sourceOperations.setCursorField(ps, 1, JDBCType.BIGINT, colValue); - ps.execute(); - - assertExpectedOutputValues(connection, - ((ObjectNode) Jsons.jsonNode(Collections.emptyMap())) - .put("bigint", (long) value)); - assertExpectedOutputTypes(connection); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.java deleted file mode 100644 index c33db8f0d5221..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.spy; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.db.jdbc.streaming.FetchSizeConstants; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import javax.sql.DataSource; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.MethodOrderer; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestMethodOrder; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -class TestStreamingJdbcDatabase { - - private static PostgreSQLContainer PSQL_DB; - private final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations(); - private JdbcDatabase defaultJdbcDatabase; - private JdbcDatabase streamingJdbcDatabase; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - - @BeforeEach - void setup() { - final String dbName = Strings.addRandomSuffix("db", "_", 10); - - final JsonNode config = getConfig(PSQL_DB, dbName); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - final DataSource connectionPool = DataSourceFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - defaultJdbcDatabase = spy(new DefaultJdbcDatabase(connectionPool)); - streamingJdbcDatabase = new StreamingJdbcDatabase(connectionPool, JdbcUtils.getDefaultSourceOperations(), AdaptiveStreamingQueryConfig::new); - } - - @Test - @Order(1) - void testQuery() throws SQLException { - defaultJdbcDatabase.execute(connection -> { - connection.createStatement().execute( - """ - DROP TABLE IF EXISTS id_and_name; - CREATE TABLE id_and_name (id INTEGER, name VARCHAR(200)); - INSERT INTO id_and_name (id, name) VALUES (1, 'picard'), (2, 'crusher'), (3, 'vash'); - """); - }); - - // grab references to connection and prepared statement, so we can verify the streaming config is - // invoked. - final AtomicReference connection1 = new AtomicReference<>(); - final AtomicReference ps1 = new AtomicReference<>(); - final List actual = streamingJdbcDatabase.queryJsons(connection -> { - connection1.set(connection); - final PreparedStatement ps = connection.prepareStatement("SELECT * FROM id_and_name;"); - ps1.set(ps); - return ps; - }, sourceOperations::rowToJson); - final List expectedRecords = Lists.newArrayList( - Jsons.jsonNode(Map.of("id", 1, "name", "picard")), - Jsons.jsonNode(Map.of("id", 2, "name", "crusher")), - Jsons.jsonNode(Map.of("id", 3, "name", "vash"))); - assertEquals(expectedRecords, actual); - } - - /** - * Test stream querying a table with 20 rows. Each row is 10 MB large. The table in this test must - * contain more than {@code - * FetchSizeConstants.INITIAL_SAMPLE_SIZE} rows. Otherwise, all rows will be fetched in the first - * fetch, the fetch size won't be adjusted, and the test will fail. - */ - @Order(2) - @Test - void testLargeRow() throws SQLException { - defaultJdbcDatabase.execute(connection -> connection.createStatement() - .execute( - """ - DROP TABLE IF EXISTS id_and_name; - CREATE TABLE id_and_name (id INTEGER, name TEXT); - INSERT INTO id_and_name SELECT id, repeat('a', 10485760) as name from generate_series(1, 20) as id; - """)); - - final AtomicReference connection1 = new AtomicReference<>(); - final AtomicReference ps1 = new AtomicReference<>(); - final Set fetchSizes = new HashSet<>(); - final List actual = streamingJdbcDatabase.queryJsons( - connection -> { - connection1.set(connection); - final PreparedStatement ps = connection.prepareStatement("SELECT * FROM id_and_name;"); - ps1.set(ps); - return ps; - }, - resultSet -> { - fetchSizes.add(resultSet.getFetchSize()); - return sourceOperations.rowToJson(resultSet); - }); - assertEquals(20, actual.size()); - - // Two fetch sizes should be set on the result set, one is the initial sample size, - // and the other is smaller than the initial value because of the large row. - // This check assumes that FetchSizeConstants.TARGET_BUFFER_BYTE_SIZE = 200 MB. - // Update this check if the buffer size constant is changed. - assertEquals(2, fetchSizes.size()); - final List sortedSizes = fetchSizes.stream().sorted().toList(); - assertTrue(sortedSizes.get(0) < FetchSizeConstants.INITIAL_SAMPLE_SIZE); - assertEquals(FetchSizeConstants.INITIAL_SAMPLE_SIZE, sortedSizes.get(1)); - } - - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) - .build()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.java deleted file mode 100644 index 2123206c87638..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import java.sql.ResultSet; -import java.sql.SQLException; -import joptsimple.internal.Strings; -import org.junit.jupiter.api.Test; - -class AdaptiveStreamingQueryConfigTest { - - @Test - void testFetchSizeUpdate() throws SQLException { - final AdaptiveStreamingQueryConfig queryConfig = new AdaptiveStreamingQueryConfig(); - final ResultSet resultSet = mock(ResultSet.class); - for (int i = 0; i < FetchSizeConstants.INITIAL_SAMPLE_SIZE - 1; ++i) { - queryConfig.accept(resultSet, Strings.repeat(Character.forDigit(i, 10), i + 1)); - verify(resultSet, never()).setFetchSize(anyInt()); - } - queryConfig.accept(resultSet, "final sampling in the initial stage"); - verify(resultSet, times(1)).setFetchSize(anyInt()); - queryConfig.accept(resultSet, "abcd"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.java deleted file mode 100644 index a2a89f960269b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.json.Jsons; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class BaseSizeEstimatorTest { - - @Test - void testGetEstimatedByteSize() { - assertEquals(0L, BaseSizeEstimator.getEstimatedByteSize(null)); - assertEquals(21L, BaseSizeEstimator.getEstimatedByteSize("12345")); - assertEquals(45L, BaseSizeEstimator.getEstimatedByteSize(Jsons.jsonNode(Map.of("key", "value")))); - } - - public static class TestSizeEstimator extends BaseSizeEstimator { - - protected TestSizeEstimator(final long bufferByteSize, final int minFetchSize, final int defaultFetchSize, final int maxFetchSize) { - super(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize); - } - - @Override - public Optional getFetchSize() { - return Optional.empty(); - } - - @Override - public void accept(final Object o) {} - - public void setMeanByteSize(final double meanByteSize) { - this.maxRowByteSize = meanByteSize; - } - - } - - @Test - void testGetBoundedFetchSize() { - final long bufferByteSize = 120; - final int minFetchSize = 10; - final int defaultFetchSize = 20; - final int maxFetchSize = 40; - final TestSizeEstimator sizeEstimator = new TestSizeEstimator(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize); - - sizeEstimator.setMeanByteSize(-1.0); - assertEquals(defaultFetchSize, sizeEstimator.getBoundedFetchSize()); - - sizeEstimator.setMeanByteSize(0.0); - assertEquals(defaultFetchSize, sizeEstimator.getBoundedFetchSize()); - - // fetch size = 5 < min fetch size - sizeEstimator.setMeanByteSize(bufferByteSize / 5.0); - assertEquals(minFetchSize, sizeEstimator.getBoundedFetchSize()); - - // fetch size = 10 within [min fetch size, max fetch size] - sizeEstimator.setMeanByteSize(bufferByteSize / 10.0); - assertEquals(10, sizeEstimator.getBoundedFetchSize()); - - // fetch size = 30 within [min fetch size, max fetch size] - sizeEstimator.setMeanByteSize(bufferByteSize / 30.0); - assertEquals(30, sizeEstimator.getBoundedFetchSize()); - - // fetch size = 40 within [min fetch size, max fetch size] - sizeEstimator.setMeanByteSize(bufferByteSize / 40.0); - assertEquals(40, sizeEstimator.getBoundedFetchSize()); - - // fetch size = 60 > max fetch size - sizeEstimator.setMeanByteSize(bufferByteSize / 60.0); - assertEquals(maxFetchSize, sizeEstimator.getBoundedFetchSize()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.java deleted file mode 100644 index f4031f085fc39..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import static org.junit.jupiter.api.Assertions.*; - -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class InitialSizeEstimatorTest { - - @Test - void testIt() { - final long bufferByteSize = 120; - final int initialSampleSize = 5; - final int minFetchSize = 1; - final int defaultFetchSize = 20; - final int maxFetchSize = 120; - final InitialSizeEstimator sizeEstimator = new InitialSizeEstimator( - bufferByteSize, - initialSampleSize, - minFetchSize, - defaultFetchSize, - maxFetchSize); - - // size: 3 * 4 = 12 - sizeEstimator.accept("1"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - // size: 4 * 4 = 16 - sizeEstimator.accept("11"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - // size: 5 * 4 = 20 - sizeEstimator.accept("111"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - // size: 6 * 4 = 24 - sizeEstimator.accept("1111"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - // size: 7 * 4 = 28, fetch size is available - sizeEstimator.accept("11111"); - final Optional fetchSize = sizeEstimator.getFetchSize(); - assertTrue(fetchSize.isPresent()); - final long expectedMaxByteSize = 21L; - assertEquals(expectedMaxByteSize, Math.round(sizeEstimator.getMaxRowByteSize())); - assertEquals((bufferByteSize / expectedMaxByteSize) + 1, fetchSize.get().longValue()); // + 1 needed for int remainder rounding - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.java deleted file mode 100644 index 75ba6c872318b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import static org.junit.jupiter.api.Assertions.*; - -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class SamplingSizeEstimatorTest { - - @Test - void testIt() { - final long bufferByteSize = 120; - final int sampleFrequency = 3; - final long initialByteSize = 10; - final int minFetchSize = 1; - final int defaultFetchSize = 20; - final int maxFetchSize = 120; - final SamplingSizeEstimator sizeEstimator = new SamplingSizeEstimator( - bufferByteSize, - sampleFrequency, - initialByteSize, - minFetchSize, - defaultFetchSize, - maxFetchSize); - - double maxByteSize = initialByteSize; - - // size: 3 * 3 = 12, not sampled - sizeEstimator.accept("1"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - assertEquals(maxByteSize, sizeEstimator.getMaxRowByteSize()); - - // size: 4 * 3 = 16, not sampled - sizeEstimator.accept("11"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - assertEquals(maxByteSize, sizeEstimator.getMaxRowByteSize()); - - // size: 5 * 3 = 15, sampled, fetch size is ready - sizeEstimator.accept("111"); - final Optional fetchSize1 = sizeEstimator.getFetchSize(); - maxByteSize = 15; - assertDoubleEquals(15, sizeEstimator.getMaxRowByteSize()); - assertDoubleEquals(bufferByteSize / maxByteSize, fetchSize1.get().doubleValue()); - - // size: 6 * 3 = 24, not sampled - sizeEstimator.accept("1111"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - assertDoubleEquals(maxByteSize, sizeEstimator.getMaxRowByteSize()); - - // size: 7 * 3 = 28, not sampled - sizeEstimator.accept("11111"); - assertFalse(sizeEstimator.getFetchSize().isPresent()); - assertDoubleEquals(maxByteSize, sizeEstimator.getMaxRowByteSize()); - - // size: 8 * 3 = 24, sampled, fetch size is ready - sizeEstimator.accept("111111"); - final Optional fetchSize2 = sizeEstimator.getFetchSize(); - assertTrue(fetchSize2.isPresent()); - maxByteSize = 24; - assertDoubleEquals(maxByteSize, sizeEstimator.getMaxRowByteSize()); - assertDoubleEquals(bufferByteSize / maxByteSize, fetchSize2.get().doubleValue()); - } - - private static void assertDoubleEquals(final double expected, final double actual) { - assertEquals(Math.round(expected), Math.round(actual)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.java deleted file mode 100644 index a3314817a3104..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.jdbc.streaming; - -import static org.junit.jupiter.api.Assertions.*; - -import org.junit.jupiter.api.Test; - -class TwoStageSizeEstimatorTest { - - @Test - void testDelegationSwitch() { - final TwoStageSizeEstimator sizeEstimator = TwoStageSizeEstimator.getInstance(); - for (int i = 0; i < FetchSizeConstants.INITIAL_SAMPLE_SIZE; ++i) { - sizeEstimator.accept("1"); - assertTrue(sizeEstimator.getDelegate() instanceof InitialSizeEstimator); - } - // delegation is changed after initial sampling - for (int i = 0; i < 3; ++i) { - sizeEstimator.accept("1"); - assertTrue(sizeEstimator.getDelegate() instanceof SamplingSizeEstimator); - } - } - - @Test - void testGetTargetBufferByteSize() { - assertEquals(FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, - TwoStageSizeEstimator.getTargetBufferByteSize(null)); - assertEquals(FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, - TwoStageSizeEstimator.getTargetBufferByteSize(Long.MAX_VALUE)); - assertEquals(FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, - TwoStageSizeEstimator.getTargetBufferByteSize(FetchSizeConstants.MIN_BUFFER_BYTE_SIZE - 10L)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.java deleted file mode 100644 index b0387077245e2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.db.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.nio.file.FileSystem; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateException; -import java.security.spec.InvalidKeySpecException; -import org.junit.jupiter.api.Test; - -class SSLCertificateUtilsTest { - - private static final String SLASH_TMP = "/tmp"; - private static final String KEY_STORE_PASSWORD = "123456"; - private static final String KEY_STORE_PASSWORD2 = "78910"; - static final String caPem = "-----BEGIN CERTIFICATE-----\n" - + "MIIDAzCCAeugAwIBAgIBATANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" - + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" - + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowPDE6MDgGA1UEAwwxTXlTUUxf\n" - + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DQV9DZXJ0aWZpY2F0ZTCCASIw\n" - + "DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKb2tDaE4TO/4xKRZ0QqpB4ho3cy\n" - + "daw85Sn8VNLa42EJgZVpSr0WCFl11Go7r0O2TMvceaWsnJU7FLhYHSR+Dlm62yVO\n" - + "0DsnMOC0kUoDnjSE/PmponWnoC79fgXV7AwKxSW4LLxYlPHQb4Kb7rv+UJ3KbxZz\n" - + "zB7JEm9WQCJ/byn1/jxQtoPGvWL2csX3RFr9QNh8UgpOBQsbebeLWNgxdYda2sz3\n" - + "kJcwk754Vj1mx6iszjLP0oHZu+RuoM+xIrpDmpPNMW/0rQl6q+vCymNxaxX8+MuW\n" - + "czRJ1hjh4cVjArp8YhJCEMVaLajVkhbzYaPRsdW1NGjh+C3eZnOm5fRi35kCAwEA\n" - + "AaMQMA4wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAWKlbtUosXVy7\n" - + "LbFEuL3c2Igs023v0mQNvtZVBl5Qpsxpc3+ybmQfksEQoPxPKmWpsnWv5Bsvt335\n" - + "/NHv1wSajHEpoyDBtF1QT2rR/kjezFpiH9AY3xwtBdZhTDlc5UBrpyv+Issn1CZF\n" - + "edcIk54Gzxifn+Et5WP8b6HV/ehdE0qQPtHDmendEaIHXg12/NE+hj3DocSVm8w/\n" - + "LUNeYd9wXefwMrEWwDn0DZSsShZmgJoppA15qOnq+FVW/bhZwRv5L4l3AJv0SGoA\n" - + "o7DXxD0VGHDA6aC4tJssZbrnoDCBPzYmt9s9GwVupuEroJHZ0Wks4pt4Wx50DUgA\n" - + "KC3v0Mo/gg==\n" - + "-----END CERTIFICATE-----\n"; - - static final String caPem_Bad = "-----BEGIN CERTIFICATE-----\n" - + "MIIDAzCCAeugAwIBAgIBATANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" - + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" - + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowPDE6MDgGA1UEAwwxTXlTUUxf\n" - + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DQV9DZXJ0aWZpY2F0ZTCCASIw\n" - + "DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKb2tDaE4TO/4xKRZ0QqpB4ho3cy\n" - + "daw85Sn8VNLa42EJgZVpSr0WCFl11Go7r0O2TMvceaWsnJU7FLhYHSR+Dlm62yVO\n" - + "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\n" - + "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\n" - + "/NHv1wSajHEpoyDBtF1QT2rR/kjezFpiH9AY3xwtBdZhTDlc5UBrpyv+Issn1CZF\n" - + "edcIk54Gzxifn+Et5WP8b6HV/ehdE0qQPtHDmendEaIHXg12/NE+hj3DocSVm8w/\n" - + "LUNeYd9wXefwMrEWwDn0DZSsShZmgJoppA15qOnq+FVW/bhZwRv5L4l3AJv0SGoA\n" - + "o7DXxD0VGHDA6aC4tJssZbrnoDCBPzYmt9s9GwVupuEroJHZ0Wks4pt4Wx50DUgA\n" - + "KC3v0Mo/gg==\n" - + "-----END CERTIFICATE-----\n"; - - public static final String clientPem = "-----BEGIN CERTIFICATE-----\n" - + "MIIDBDCCAeygAwIBAgIBAzANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" - + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" - + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowQDE+MDwGA1UEAww1TXlTUUxf\n" - + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DbGllbnRfQ2VydGlmaWNhdGUw\n" - + "ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV/eRPDZmrPP8d2pKsFizU\n" - + "JQkGOYDKXOilLibR1TQwN/8MToop8+mvtMi7zr/cWBDR0qTObbduWFQdK82vGppS\n" - + "ZgrRG3QWVpe8NNI9AhriVZiOmcEQqgAhbgos57Tkjy3qghNbUN1KGb3I0DnNOtvF\n" - + "RIdATbE+LxOTgCzz/Cw6DVReunQvVo9T4EC4PBBUelMWlAJLo61AQVLM3ufx4ug2\n" - + "1wbV6D/aSRooNhkwWcwk+2vabxKnOzFAQzNU7dIZlBpo6coHFwZDUxtdM2DtuLHn\n" - + "/r9CsMw8p4wtdIRXrTDmiF/xTXKnABGM8kEqPovZ6eh7He1jrzLTVANUfNQc5b8F\n" - + "AgMBAAGjDTALMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBAGDJ6XgLBzat\n" - + "rpLDfGHR/tZ4eFzt1Nhjzl4CyFjpUcr2e2K5XmuveJAecaQSHff2zXwfGpg/BIen\n" - + "WcPm2daIzcfN/wWg8ENMB/JE3dMq44pOmWs2g4FPDQuaH81IV0hGX4klk2XZpskJ\n" - + "moWXyGY43Xr3bbNBjyOxgBsQc4kD96ODMUKfzNMH4p9hXKAMrF9DqHwQUho5uM6M\n" - + "RnU7Uzr745xw7LKJglCgO20t4302wzsUAEPuCTcB9wJy1/cRbMmoLAdUdn6XhFb4\n" - + "pR3UDNJvXGc8by6VWrXOeB0BFeB3beMxezlTHDOWoWeJwvEfAAD/dpwHXwp5dm9L\n" - + "VjtlERcTfH8=\n" - + "-----END CERTIFICATE-----\n"; - - public static final String clientKey = "-----BEGIN RSA PRIVATE KEY-----\n" - + "MIIEowIBAAKCAQEAlf3kTw2Zqzz/HdqSrBYs1CUJBjmAylzopS4m0dU0MDf/DE6K\n" - + "KfPpr7TIu86/3FgQ0dKkzm23blhUHSvNrxqaUmYK0Rt0FlaXvDTSPQIa4lWYjpnB\n" - + "EKoAIW4KLOe05I8t6oITW1DdShm9yNA5zTrbxUSHQE2xPi8Tk4As8/wsOg1UXrp0\n" - + "L1aPU+BAuDwQVHpTFpQCS6OtQEFSzN7n8eLoNtcG1eg/2kkaKDYZMFnMJPtr2m8S\n" - + "pzsxQEMzVO3SGZQaaOnKBxcGQ1MbXTNg7bix5/6/QrDMPKeMLXSEV60w5ohf8U1y\n" - + "pwARjPJBKj6L2enoex3tY68y01QDVHzUHOW/BQIDAQABAoIBAHk/CHyC4PKUVyHZ\n" - + "2vCy6EABRB89AogSvJkyCn1anFpSGaDoKDWrjv7S4+U1RtCme8oxPboE5N+VFUGT\n" - + "dCwVFCSBikLor1mTXAruo/hfKD5HtQ+o6HFBCuP7IMyV7RtJRnOn/F+3qXpJ/qlC\n" - + "8UaeSqNXNwHbC+jZgzibxzrfYRz3BqnBYZsSP7/piN+rk6vAGs7WeawO1adqsLS6\n" - + "Hr9GilEe+bW/CtXsah3AYVwxDnwo/c03JYBdzYkRRqLgJ9dDG/5o/88FeeKbVb+U\n" - + "ZrGV9adwa+KGwsuMTYi7pkXUosm+43hLkmYUykxFv0vfkGz8EnDh4MBtY66QMkUJ\n" - + "cQgWl6ECgYEAxVJNsxpJjEa+d737iK7ylza0GhcTI3+uNPN92u0oucictMzLIm7N\n" - + "HAUhrHoO71NDYQYJlox7BG8mjze7l6fkwGfpg2u/KsN0vIqc+F+gIQeC7kmpRxQk\n" - + "l96pxMW25VhibZJFBaDx9UeBkR9RBnI1AF3jD3+wOdua+C9CMahdTDkCgYEAwph4\n" - + "FY2gcOSpA0Xz1cOFPNuwQhy9Lh3MJsb1kt20hlTcmpp3GpBrzyggiyIlpBtBHDrP\n" - + "6FcjZtV58bv8ckKB8jklvooJkyjmowBx+L7mHZ6/7QFPDQkp/dY9dQPtWjgrPyo+\n" - + "rLIN+SoVmyKdyXXaauyjyEPAexsuxzUKq0MMIS0CgYEAirvJQYnT+DqtJAeBWKKY\n" - + "kdS2YDmlDSpyU2x3KnvgTG9OLphmojkBIRhCir/uzDngf9D84Mq4m2+CzuNCk+hJ\n" - + "nzXwKqSQ7gIqi31xy/d/4Hklh2BnEkCJUfYNqvnQFARGf/99Y+268Ndrs5svHrch\n" - + "qLZaNMV0I9nRZXnksoFLx5ECgYBJ8LFAT041V005Jy1dfit0Um2I0W64xS27VkId\n" - + "igx8NmaUgDjdaR7t2etzsofm8UwuM9KoD+QtwNPTHIDx0X+a0EgdPEojFpl8OkEU\n" - + "KUU64AVBQwwMgfzorK0xd0qKy2jzWVPzPry8flczWVXnJNbXZg9dmxDaNhvyKZ9i\n" - + "L9m+CQKBgG3kkQTtsT7k1kQt/6cqjAaBq9Koi0gbS8hWjTioqPKHVQAAEjqVkmqa\n" - + "uuD/3Knh1gCgxW4jAUokRwfM7IgVA/plQQDQaKBzcFUl94Hl+t6VuvdvtA02MboE\n" - + "7TicEc38QKFoLN2hti0Bmm1eJCionsSPiuyDYH5XnhSz7TDjV9sM\n" - + "-----END RSA PRIVATE KEY-----\n"; - - public static final String clientKey_wrong_format = "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDBmUvDIVGZ5HsRgnXKns2fTf26pfKND45xu" - + "NOEWVetpvo3lGc28vVMvtPiNH/kuELxo5NesC89iotxfbOTl4I9BbjFVg3eO1nNhwmToU2f1kJJ5QFRjFw+xacIMsfBT5xy/v9U7ohZXdEk6txYkOpvhfja" - + "JcLDutT+NtzRdBsttgyItp5fODnk02G4bLsJ68jVH1/CXkDRvxktLR0/NctbtPVuACwA1QG9MsVbH3cE7SymIrzgI8JHwud63dQUb5iQWZ0iIDBqmF95wvg" - + "ox9O4QjnZCkHxo3kuYxBPaxAuMMVTohLBH/oAvo0FJt+0XF453sLPO8x3zOUnJJLhn4VHAgMBAAECggEBALQ4UB7F1YC9ARO7rouAaUnzAE/QS4qlAKU8uS" - + "prQQOWfTdgHvU4FsHqorPgy23PWgI3k+iBenh/kG+F5LVwRP0pZmfNQ/uspFx/aJrVfb1dZzgCxsdzMiv9MxCetPVvduRWHLqjkqoee6MyPwzzWkmXHaF1p" - + "WkvczdzOvyAaQyS3UPsnQzS0kt4mELGZs/E24K9vD9KfSrdRXxkk3fsLFbLrrau/mEhQ/CKX7Xl4MBchiH+lF8kHvpAc27fevrnDPToZp2cbfSc1oeeKjIM" - + "VmYFKytTCi5IXCNG6S0H31rNpX+5VbdZc1iJLPH7Ch6J+dRzX36R+5zSmp7OIl5gAoECgYEA5f1p/umqMW91HQ+amZoIg6gldFfGglFM5IVCrn0RRB/BrgH" - + "Rnpo0jo3JaOUyQMfyDz69lkpKEgejYTPGDkz3kJmpA54rBwmFitB13ZaqhzM63VzYE3hPdCqpy1VTLxW2+T5nEbLuiR4rC2Y7z+CRBmYdQUNxSq90rCpveg" - + "XIq4sCgYEA135M0fmeBAjTiz3f2pRt7ne64WzY4jJ0SRe6BrVA6PnI9V5+wwtRzyhee9A0awzal6t0OvAdxmnAZg3PsP1fbOPeVwXbvBKtZ4rM7adv6UdYy" - + "6oxjd9eULK92YnVOcZPf595WmoK28L37EHlxjP8p6lnMBk/BF9Y3N3rz2xyNLUCgYAZ8qdczTwYa7zI1JPatJg1Umk3YRfSaB3GwootaYrjJroRSb8+p6M6" - + "WiDZJtKuoGBc+/Uj2anVsurp8o9r2Z8sv0lkURoFpztb1/0UTQVcT5lalDkEqVQ9hPq3KB9Edqy4HiQ+yPNEoRS2KoihAXMbR7YRQOytQnJlYjxFhhWH1QK" - + "BgQCNFv97FyETaSgAacGQHlCfqrqr75VM/FXQqX09+RyHrUubA4ShdV7Z8Id0L0yyrlbMqRBPqnkEOKck6nQKYMpCxCsF9Sr6R4xLV8B29YK7TOBhcIxDZH" - + "UfBvhwXuNBkYrpd2OABCAZ5NxoTnj/vXf12l9aSZ1N4pOPAKntRAa+ZQKBgQDCPgJQfZePJGOvSIkW/TkXcHpGsexb5p900Si23BLjnMtCNMSkHuIWb60xq" - + "I3vLFKhrLiYzYVQ5n3C6PYLcdfiDYwruYU3zmtr/gpg/QzcsvTe5CW/hxTAkzsZsFBOquJyuyCRBGN59tH6N6ietu8zzvCc8EeJJX7N7AX0ezF7lQ=="; - - void testkeyStoreFromCertificateInternal(final String certString, final String pwd, final FileSystem fs, final String directory) - throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - final URI ksUri = SSLCertificateUtils.keyStoreFromCertificate(certString, pwd, fs, directory); - - final KeyStore ks = KeyStore.getInstance("PKCS12"); - final InputStream inputStream = Files.newInputStream(Path.of(ksUri)); - ks.load(inputStream, pwd.toCharArray()); - assertEquals(1, ks.size()); - Files.delete(Path.of(ksUri)); - } - - @Test - void testkeyStoreFromCertificate() throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - testkeyStoreFromCertificateInternal(caPem, KEY_STORE_PASSWORD, null, SLASH_TMP); - - final Exception exception = assertThrows(CertificateException.class, () -> { - testkeyStoreFromCertificateInternal(caPem_Bad, KEY_STORE_PASSWORD, null, SLASH_TMP); - }); - assertNotNull(exception); - } - - @Test - void testkeyStoreFromCertificateInMemory() throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException { - testkeyStoreFromCertificateInternal(caPem, KEY_STORE_PASSWORD2, null, null); - - final Exception exception = assertThrows(CertificateException.class, () -> { - testkeyStoreFromCertificateInternal(caPem_Bad, KEY_STORE_PASSWORD, null, null); - }); - assertNotNull(exception); - } - - @SuppressFBWarnings("HARD_CODE_PASSWORD") - void testKeyStoreFromClientCertificateInternal( - final String certString, - final String keyString, - final String keyStorePassword, - final FileSystem filesystem, - final String directory) - throws KeyStoreException, IOException, CertificateException, NoSuchAlgorithmException, InvalidKeySpecException, InterruptedException { - final URI ksUri = SSLCertificateUtils.keyStoreFromClientCertificate(certString, keyString, keyStorePassword, filesystem, directory); - final KeyStore ks = KeyStore.getInstance("PKCS12"); - final InputStream inputStream = Files.newInputStream(Path.of(ksUri)); - ks.load(inputStream, KEY_STORE_PASSWORD.toCharArray()); - assertTrue(ks.isKeyEntry(SSLCertificateUtils.KEYSTORE_ENTRY_PREFIX)); - assertFalse(ks.isKeyEntry("cd_")); - assertEquals(1, ks.size()); - Files.delete(Path.of(ksUri)); - } - - @Test - void testKeyStoreFromClientCertificate() - throws CertificateException, IOException, NoSuchAlgorithmException, InvalidKeySpecException, KeyStoreException, InterruptedException { - testKeyStoreFromClientCertificateInternal(clientPem, clientKey, KEY_STORE_PASSWORD, null, SLASH_TMP); - - final Exception exceptionKey = assertThrows(InvalidKeySpecException.class, () -> { - testKeyStoreFromClientCertificateInternal(clientPem, clientKey_wrong_format, KEY_STORE_PASSWORD, null, SLASH_TMP); - }); - assertNotNull(exceptionKey); - - final Exception exceptionCert = assertThrows(CertificateException.class, () -> { - testKeyStoreFromClientCertificateInternal(caPem_Bad, clientKey, KEY_STORE_PASSWORD, null, SLASH_TMP); - }); - assertNotNull(exceptionCert); - } - - @Test - void testKeyStoreFromClientCertificateInMemory() - throws CertificateException, IOException, NoSuchAlgorithmException, InvalidKeySpecException, KeyStoreException, InterruptedException { - testKeyStoreFromClientCertificateInternal(clientPem, clientKey, KEY_STORE_PASSWORD, null, null); - - final Exception exceptionKey = assertThrows(InvalidKeySpecException.class, () -> { - testKeyStoreFromClientCertificateInternal(clientPem, clientKey_wrong_format, KEY_STORE_PASSWORD, null, null); - }); - assertNotNull(exceptionKey); - - final Exception exceptionCert = assertThrows(CertificateException.class, () -> { - testKeyStoreFromClientCertificateInternal(caPem_Bad, clientKey, KEY_STORE_PASSWORD, null, null); - }); - assertNotNull(exceptionCert); - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.java deleted file mode 100644 index 23f871f0bacde..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.spy; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.PrintStream; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.Optional; -import lombok.SneakyThrows; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -public class AirbyteExceptionHandlerTest { - - PrintStream originalOut = System.out; - private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); - private AirbyteExceptionHandler airbyteExceptionHandler; - - @BeforeEach - public void setup() { - System.setOut(new PrintStream(outContent, true, StandardCharsets.UTF_8)); - - // mocking terminate() method in AirbyteExceptionHandler, so we don't kill the JVM - airbyteExceptionHandler = spy(new AirbyteExceptionHandler()); - doNothing().when(airbyteExceptionHandler).terminate(); - - AirbyteExceptionHandler.addThrowableForDeinterpolation(RuntimeException.class); - } - - @Test - void testTraceMessageEmission() throws Exception { - runTestWithMessage("error"); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - assertAll( - () -> assertEquals(AirbyteTraceMessage.Type.ERROR, traceMessage.getTrace().getType()), - () -> assertEquals(AirbyteExceptionHandler.logMessage, traceMessage.getTrace().getError().getMessage()), - () -> assertEquals(AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR, traceMessage.getTrace().getError().getFailureType())); - } - - @Test - void testMessageDeinterpolation() throws Exception { - AirbyteExceptionHandler.addStringForDeinterpolation("foo"); - AirbyteExceptionHandler.addStringForDeinterpolation("bar"); - - // foo and bar are added to the list explicitly - // name and description are added implicitly by the exception handler. - // all of them should be replaced by '?' - // (including FOO, which should be detected case-insensitively) - runTestWithMessage("Error happened in arst_FOO_bar_zxcv (name: description)"); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - assertAll( - () -> assertEquals(AirbyteTraceMessage.Type.ERROR, traceMessage.getTrace().getType()), - () -> assertEquals("Error happened in arst_FOO_bar_zxcv (name: description)", traceMessage.getTrace().getError().getMessage()), - () -> assertEquals("Error happened in arst_?_?_zxcv (?: ?)", traceMessage.getTrace().getError().getInternalMessage()), - () -> assertEquals(AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR, traceMessage.getTrace().getError().getFailureType()), - () -> Assertions.assertNull(traceMessage.getTrace().getError().getStackTrace(), - "Stacktrace should be null if deinterpolating the error message")); - } - - /** - * We should only deinterpolate whole words, i.e. if the target string is not adjacent to an - * alphanumeric character. - */ - @Test - void testMessageSmartDeinterpolation() throws Exception { - AirbyteExceptionHandler.addStringForDeinterpolation("foo"); - AirbyteExceptionHandler.addStringForDeinterpolation("bar"); - - runTestWithMessage("Error happened in foobar"); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - // We shouldn't deinterpolate at all in this case, so we will get the default trace message - // behavior. - assertAll( - () -> assertEquals(AirbyteExceptionHandler.logMessage, traceMessage.getTrace().getError().getMessage()), - () -> assertEquals( - "java.lang.RuntimeException: Error happened in foobar", - traceMessage.getTrace().getError().getInternalMessage())); - } - - /** - * When one of the target strings is a substring of another, we should not deinterpolate the - * substring. - */ - @Test - void testMessageSubstringDeinterpolation() throws Exception { - AirbyteExceptionHandler.addStringForDeinterpolation("airbyte"); - AirbyteExceptionHandler.addStringForDeinterpolation("airbyte_internal"); - - runTestWithMessage("Error happened in airbyte_internal.foo"); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - assertEquals("Error happened in ?.foo", traceMessage.getTrace().getError().getInternalMessage()); - } - - /** - * We should only deinterpolate specific exception classes. - */ - @Test - void testClassDeinterpolation() throws Exception { - AirbyteExceptionHandler.addStringForDeinterpolation("foo"); - - runTestWithMessage(new IOException("Error happened in foo")); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - // We shouldn't deinterpolate at all in this case, so we will get the default trace message - // behavior. - assertAll( - () -> assertEquals(AirbyteExceptionHandler.logMessage, traceMessage.getTrace().getError().getMessage()), - () -> assertEquals( - "java.io.IOException: Error happened in foo", - traceMessage.getTrace().getError().getInternalMessage())); - } - - /** - * We should check the classes of the entire exception chain, not just the root exception. - */ - @Test - void testNestedThrowableClassDeinterpolation() throws Exception { - AirbyteExceptionHandler.addStringForDeinterpolation("foo"); - - runTestWithMessage(new Exception(new RuntimeException("Error happened in foo"))); - - final AirbyteMessage traceMessage = findFirstTraceMessage(); - // We shouldn't deinterpolate at all in this case, so we will get the default trace message - // behavior. - assertEquals("Error happened in ?", traceMessage.getTrace().getError().getInternalMessage()); - } - - private void runTestWithMessage(final String message) throws InterruptedException { - runTestWithMessage(new RuntimeException(message)); - } - - private void runTestWithMessage(final Throwable throwable) throws InterruptedException { - // have to spawn a new thread to test the uncaught exception handling, - // because junit catches any exceptions in main thread, i.e. they're not 'uncaught' - final Thread thread = new Thread() { - - @SneakyThrows - public void run() { - final IntegrationRunner runner = Mockito.mock(IntegrationRunner.class); - doThrow(throwable).when(runner).run(new String[] {"write"}); - runner.run(new String[] {"write"}); - } - - }; - thread.setUncaughtExceptionHandler(airbyteExceptionHandler); - thread.start(); - thread.join(); - System.out.flush(); - } - - @AfterEach - public void teardown() { - System.setOut(originalOut); - - AirbyteExceptionHandler.STRINGS_TO_DEINTERPOLATE.clear(); - AirbyteExceptionHandler.addCommonStringsToDeinterpolate(); - - AirbyteExceptionHandler.THROWABLES_TO_DEINTERPOLATE.clear(); - } - - private AirbyteMessage findFirstTraceMessage() { - final Optional maybeTraceMessage = Arrays.stream(outContent.toString(StandardCharsets.UTF_8).split("\n")) - .map(line -> { - // these tests sometimes emit non-json stdout (e.g. log4j warnings) - // so we try-catch to handle those malformed lines. - try { - return Jsons.deserialize(line, AirbyteMessage.class); - } catch (final Exception e) { - return null; - } - }) - .filter(message -> message != null && message.getType() == AirbyteMessage.Type.TRACE) - .findFirst(); - assertTrue(maybeTraceMessage.isPresent(), "Expected to find a trace message in stdout"); - return maybeTraceMessage.get(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java deleted file mode 100644 index 50307ebd1890c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import java.io.ByteArrayOutputStream; -import java.nio.charset.StandardCharsets; -import java.util.Optional; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.core.LoggerContext; -import org.apache.logging.log4j.core.appender.OutputStreamAppender; -import org.apache.logging.log4j.core.config.Configuration; -import org.apache.logging.log4j.core.config.Configurator; -import org.apache.logging.log4j.core.config.LoggerConfig; -import org.apache.logging.log4j.spi.ExtendedLogger; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.junit.platform.commons.util.StringUtils; - -public class AirbyteLogMessageTemplateTest { - - public static final String OUTPUT_STREAM_APPENDER = "OutputStreamAppender"; - public static final String CONSOLE_JSON_APPENDER = "ConsoleJSONAppender"; - private LoggerContext loggerContext; - private LoggerConfig rootLoggerConfig; - private ExtendedLogger logger; - private OutputStreamAppender outputStreamAppender; - private ByteArrayOutputStream outputContent; - - void getLogger() { - // We are creating a log appender with the same output pattern - // as the console json appender defined in this project's log4j2.xml file. - // We then attach this log appender with the LOGGER instance so that we can validate the logs - // produced by code and assert that it matches the expected format. - loggerContext = Configurator.initialize(null, "log4j2.xml"); - - final Configuration configuration = loggerContext.getConfiguration(); - rootLoggerConfig = configuration.getLoggerConfig(""); - - outputContent = new ByteArrayOutputStream(); - outputStreamAppender = OutputStreamAppender.createAppender( - rootLoggerConfig.getAppenders().get(CONSOLE_JSON_APPENDER).getLayout(), - null, outputContent, OUTPUT_STREAM_APPENDER, false, true); - outputStreamAppender.start(); - - rootLoggerConfig.addAppender(outputStreamAppender, Level.ALL, null); - logger = loggerContext.getLogger(AirbyteLogMessageTemplateTest.class); - } - - @AfterEach - void closeLogger() { - outputStreamAppender.stop(); - rootLoggerConfig.removeAppender(OUTPUT_STREAM_APPENDER); - loggerContext.close(); - } - - @Test - public void testAirbyteLogMessageFormat() throws java.io.IOException { - getLogger(); - logger.info("hello"); - - outputContent.flush(); - final String logMessage = outputContent.toString(StandardCharsets.UTF_8); - final AirbyteMessage airbyteMessage = validateLogIsAirbyteMessage(logMessage); - final AirbyteLogMessage airbyteLogMessage = validateAirbyteMessageIsLog(airbyteMessage); - - final String connectorLogMessage = airbyteLogMessage.getMessage(); - // validate that the message inside AirbyteLogMessage matches the pattern. - // pattern to check for is: LOG_LEVEL className(methodName):LineNumber logMessage - final String connectorLogMessageRegex = - String.format("^INFO %s [\\w+.]*.AirbyteLogMessageTemplateTest\\(testAirbyteLogMessageFormat\\):\\d+ hello$", - Pattern.compile(Thread.currentThread().getName())); - final Pattern pattern = Pattern.compile(connectorLogMessageRegex); - - final Matcher matcher = pattern.matcher(connectorLogMessage); - assertTrue(matcher.matches(), connectorLogMessage); - } - - private AirbyteMessage validateLogIsAirbyteMessage(final String logMessage) { - final Optional jsonLine = Jsons.tryDeserialize(logMessage); - assertFalse(jsonLine.isEmpty()); - - final Optional m = Jsons.tryObject(jsonLine.get(), AirbyteMessage.class); - assertFalse(m.isEmpty()); - return m.get(); - } - - private AirbyteLogMessage validateAirbyteMessageIsLog(final AirbyteMessage airbyteMessage) { - assertEquals(Type.LOG, airbyteMessage.getType()); - assertNotNull(airbyteMessage.getLog()); - assertFalse(StringUtils.isBlank(airbyteMessage.getLog().getMessage())); - return airbyteMessage.getLog(); - } - - @ParameterizedTest - @ValueSource(ints = {2, 100, 9000}) - public void testAirbyteLogMessageLength(int stringRepetitions) throws java.io.IOException { - getLogger(); - final StringBuilder sb = new StringBuilder(); - for (int i = 0; i < stringRepetitions; i++) { - sb.append("abcd"); - } - logger.info(sb.toString(), new RuntimeException("aaaaa bbbbbb ccccccc dddddd")); - outputContent.flush(); - final String logMessage = outputContent.toString(StandardCharsets.UTF_8); - - final AirbyteMessage airbyteMessage = validateLogIsAirbyteMessage(logMessage); - final AirbyteLogMessage airbyteLogMessage = validateAirbyteMessageIsLog(airbyteMessage); - final String connectorLogMessage = airbyteLogMessage.getMessage(); - - // #30781 - message length is capped at 16,000 charcters. - int j = connectorLogMessage.length(); - assertFalse(connectorLogMessage.length() > 16_001); - assertTrue(logMessage.length() < 32768); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.java deleted file mode 100644 index 6ea6492d29603..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteErrorTraceMessage.FailureType; -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; -import java.nio.charset.StandardCharsets; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -public class AirbyteTraceMessageUtilityTest { - - PrintStream originalOut = System.out; - private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); - - @BeforeEach - public void setUpOut() { - System.setOut(new PrintStream(outContent, true, StandardCharsets.UTF_8)); - } - - private void assertJsonNodeIsTraceMessage(final JsonNode jsonNode) { - // todo: this check could be better by actually trying to convert the JsonNode to an - // AirbyteTraceMessage instance - Assertions.assertEquals("TRACE", jsonNode.get("type").asText()); - Assertions.assertNotNull(jsonNode.get("trace")); - } - - @Test - void testEmitSystemErrorTrace() { - AirbyteTraceMessageUtility.emitSystemErrorTrace(Mockito.mock(RuntimeException.class), "this is a system error"); - final JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); - assertJsonNodeIsTraceMessage(outJson); - Assertions.assertEquals("system_error", outJson.get("trace").get("error").get("failure_type").asText()); - } - - @Test - void testEmitConfigErrorTrace() { - AirbyteTraceMessageUtility.emitConfigErrorTrace(Mockito.mock(RuntimeException.class), "this is a config error"); - final JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); - assertJsonNodeIsTraceMessage(outJson); - Assertions.assertEquals("config_error", outJson.get("trace").get("error").get("failure_type").asText()); - } - - @Test - void testEmitErrorTrace() { - AirbyteTraceMessageUtility.emitErrorTrace(Mockito.mock(RuntimeException.class), "this is an error", FailureType.SYSTEM_ERROR); - assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))); - } - - @Test - void testCorrectStacktraceFormat() { - try { - final int x = 1 / 0; - } catch (final Exception e) { - AirbyteTraceMessageUtility.emitSystemErrorTrace(e, "you exploded the universe"); - } - final JsonNode outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)); - Assertions.assertTrue(outJson.get("trace").get("error").get("stack_trace").asText().contains("\n\tat")); - } - - @AfterEach - public void revertOut() { - System.setOut(originalOut); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/DestinationConfigTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/DestinationConfigTest.java deleted file mode 100644 index 68044162bb724..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/DestinationConfigTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import org.junit.jupiter.api.Test; - -public class DestinationConfigTest { - - private static final String JSON = """ - { - "foo": "bar", - "baz": true - } - """; - - private static final JsonNode NODE = Jsons.deserialize(JSON); - - @Test - public void testInitialization() { - // bad initialization - assertThrows(IllegalArgumentException.class, () -> DestinationConfig.initialize(null)); - assertThrows(IllegalStateException.class, DestinationConfig::getInstance); - - // good initialization - DestinationConfig.initialize(NODE, true); - assertNotNull(DestinationConfig.getInstance()); - assertEquals(NODE, DestinationConfig.getInstance().root); - assertEquals(true, DestinationConfig.getInstance().getIsV2Destination()); - - // initializing again doesn't change the config - final JsonNode nodeUnused = Jsons.deserialize("{}"); - DestinationConfig.initialize(nodeUnused, false); - assertEquals(NODE, DestinationConfig.getInstance().root); - assertEquals(true, DestinationConfig.getInstance().getIsV2Destination()); - } - - @Test - public void testValues() { - DestinationConfig.clearInstance(); - DestinationConfig.initialize(NODE); - - assertEquals("bar", DestinationConfig.getInstance().getTextValue("foo")); - assertEquals("", DestinationConfig.getInstance().getTextValue("baz")); - - assertFalse(DestinationConfig.getInstance().getBooleanValue("foo")); - assertTrue(DestinationConfig.getInstance().getBooleanValue("baz")); - - // non-existent key - assertEquals("", DestinationConfig.getInstance().getTextValue("blah")); - assertFalse(DestinationConfig.getInstance().getBooleanValue("blah")); - - assertEquals(Jsons.deserialize("\"bar\""), DestinationConfig.getInstance().getNodeValue("foo")); - assertEquals(Jsons.deserialize("true"), DestinationConfig.getInstance().getNodeValue("baz")); - assertNull(DestinationConfig.getInstance().getNodeValue("blah")); - - assertEquals(false, DestinationConfig.getInstance().getIsV2Destination()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.java deleted file mode 100644 index ad3abb1703386..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import org.junit.jupiter.api.Test; - -class FailureTrackingAirbyteMessageConsumerTest { - - @Test - void testStartNoFailure() throws Exception { - final TestConsumer consumer = spy(new TestConsumer()); - consumer.start(); - consumer.close(); - - verify(consumer).close(false); - } - - @Test - void testStartWithFailure() throws Exception { - final TestConsumer consumer = spy(new TestConsumer()); - doThrow(new RuntimeException()).when(consumer).startTracked(); - - // verify the exception still gets thrown. - assertThrows(RuntimeException.class, consumer::start); - consumer.close(); - - verify(consumer).close(true); - } - - @Test - void testAcceptNoFailure() throws Exception { - final TestConsumer consumer = spy(new TestConsumer()); - - final AirbyteMessage msg = mock(AirbyteMessage.class); - consumer.accept(msg); - consumer.close(); - - verify(consumer).close(false); - } - - @Test - void testAcceptWithFailure() throws Exception { - final TestConsumer consumer = spy(new TestConsumer()); - final AirbyteMessage msg = mock(AirbyteMessage.class); - when(msg.getType()).thenReturn(Type.RECORD); - doThrow(new RuntimeException()).when(consumer).acceptTracked(any()); - - // verify the exception still gets thrown. - assertThrows(RuntimeException.class, () -> consumer.accept(msg)); - consumer.close(); - - verify(consumer).close(true); - } - - static class TestConsumer extends FailureTrackingAirbyteMessageConsumer { - - @Override - protected void startTracked() { - - } - - @Override - protected void acceptTracked(final AirbyteMessage s) { - - } - - @Override - protected void close(final boolean hasFailed) { - - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.java deleted file mode 100644 index 2eac0ed6e6316..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.nio.file.Path; -import org.junit.jupiter.api.Test; - -class IntegrationCliParserTest { - - private static final String CONFIG_FILENAME = "config.json"; - private static final String CATALOG_FILENAME = "catalog.json"; - private static final String STATE_FILENAME = "state.json"; - - @Test - void testSpec() { - final String[] args = new String[] {"--spec"}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.spec(), actual); - } - - @Test - void testCheck() { - final String[] args = new String[] {"--check", "--config", CONFIG_FILENAME}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.check(Path.of(CONFIG_FILENAME)), actual); - } - - @Test - void testDiscover() { - final String[] args = new String[] {"--discover", "--config", CONFIG_FILENAME}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.discover(Path.of(CONFIG_FILENAME)), actual); - } - - @Test - void testWrite() { - final String[] args = new String[] {"--write", "--config", CONFIG_FILENAME, "--catalog", CATALOG_FILENAME}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.write(Path.of(CONFIG_FILENAME), Path.of(CATALOG_FILENAME)), actual); - } - - @Test - void testReadWithoutState() { - final String[] args = new String[] {"--read", "--config", CONFIG_FILENAME, "--catalog", CATALOG_FILENAME}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.read(Path.of(CONFIG_FILENAME), Path.of(CATALOG_FILENAME), null), actual); - } - - @Test - void testReadWithState() { - final String[] args = new String[] {"--read", "--config", CONFIG_FILENAME, "--catalog", CATALOG_FILENAME, "--state", STATE_FILENAME}; - final IntegrationConfig actual = new IntegrationCliParser().parse(args); - assertEquals(IntegrationConfig.read(Path.of(CONFIG_FILENAME), Path.of(CATALOG_FILENAME), Path.of(STATE_FILENAME)), actual); - } - - @Test - void testFailsOnUnknownArg() { - final String[] args = new String[] {"--check", "--config", CONFIG_FILENAME, "--random", "garbage"}; - assertThrows(IllegalArgumentException.class, () -> new IntegrationCliParser().parse(args)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationConfigTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationConfigTest.java deleted file mode 100644 index ae76a8b27afb8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationConfigTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.nio.file.Path; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class IntegrationConfigTest { - - private static final Path CONFIG_PATH = Path.of("config.json"); - private static final Path CATALOG_PATH = Path.of("catalog.json"); - private static final Path STATE_PATH = Path.of("state.json"); - - @Test - void testSpec() { - final IntegrationConfig config = IntegrationConfig.spec(); - assertEquals(Command.SPEC, config.getCommand()); - assertThrows(IllegalStateException.class, config::getConfigPath); - assertThrows(IllegalStateException.class, config::getCatalogPath); - assertThrows(IllegalStateException.class, config::getStatePath); - } - - @Test - void testCheck() { - assertThrows(NullPointerException.class, () -> IntegrationConfig.check(null)); - - final IntegrationConfig config = IntegrationConfig.check(CONFIG_PATH); - assertEquals(Command.CHECK, config.getCommand()); - assertEquals(CONFIG_PATH, config.getConfigPath()); - assertThrows(IllegalStateException.class, config::getCatalogPath); - assertThrows(IllegalStateException.class, config::getStatePath); - } - - @Test - void testDiscover() { - assertThrows(NullPointerException.class, () -> IntegrationConfig.discover(null)); - - final IntegrationConfig config = IntegrationConfig.discover(CONFIG_PATH); - assertEquals(Command.DISCOVER, config.getCommand()); - assertEquals(CONFIG_PATH, config.getConfigPath()); - assertThrows(IllegalStateException.class, config::getCatalogPath); - assertThrows(IllegalStateException.class, config::getStatePath); - } - - @Test - void testWrite() { - assertThrows(NullPointerException.class, () -> IntegrationConfig.write(null, CATALOG_PATH)); - assertThrows(NullPointerException.class, () -> IntegrationConfig.write(CONFIG_PATH, null)); - - final IntegrationConfig config = IntegrationConfig.write(CONFIG_PATH, CATALOG_PATH); - assertEquals(Command.WRITE, config.getCommand()); - assertEquals(CONFIG_PATH, config.getConfigPath()); - assertEquals(CATALOG_PATH, config.getCatalogPath()); - assertThrows(IllegalStateException.class, config::getStatePath); - } - - @Test - void testReadWithState() { - assertThrows(NullPointerException.class, () -> IntegrationConfig.read(null, CATALOG_PATH, STATE_PATH)); - assertThrows(NullPointerException.class, () -> IntegrationConfig.read(CONFIG_PATH, null, STATE_PATH)); - - final IntegrationConfig config = IntegrationConfig.read(CONFIG_PATH, CATALOG_PATH, STATE_PATH); - assertEquals(Command.READ, config.getCommand()); - assertEquals(CONFIG_PATH, config.getConfigPath()); - assertEquals(CATALOG_PATH, config.getCatalogPath()); - assertEquals(Optional.of(STATE_PATH), config.getStatePath()); - } - - @Test - void testReadWithoutState() { - assertThrows(NullPointerException.class, () -> IntegrationConfig.read(null, CATALOG_PATH, null)); - assertThrows(NullPointerException.class, () -> IntegrationConfig.read(CONFIG_PATH, null, null)); - - final IntegrationConfig config = IntegrationConfig.read(CONFIG_PATH, CATALOG_PATH, null); - assertEquals(Command.READ, config.getCommand()); - assertEquals(CONFIG_PATH, config.getConfigPath()); - assertEquals(CATALOG_PATH, config.getCatalogPath()); - assertEquals(Optional.empty(), config.getStatePath()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatbilityTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatbilityTest.java deleted file mode 100644 index e0fef28b07921..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatbilityTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Scanner; -import org.junit.jupiter.api.Test; - -public class IntegrationRunnerBackwardsCompatbilityTest { - - @Test - void testByteArrayInputStreamVersusScanner() throws Exception { - final String[] testInputs = new String[] { - "This is line 1\nThis is line 2\nThis is line 3", - "This is line 1\n\nThis is line 2\n\n\nThis is line 3", - "This is line 1\rThis is line 2\nThis is line 3\r\nThis is line 4", - "This is line 1 with emoji 😊\nThis is line 2 with Greek characters: Α, Β, Χ\nThis is line 3 with Cyrillic characters: Д, Ж, З", - "This is a very long line that contains a lot of characters...", - "This is line 1 with an escaped newline \\n character\nThis is line 2 with another escaped newline \\n character", - "This is line 1\n\n", - "\nThis is line 2", - "\n" - }; - - for (final String testInput : testInputs) { - // get new output - final InputStream stream1 = new ByteArrayInputStream(testInput.getBytes(StandardCharsets.UTF_8)); - final MockConsumer consumer2 = new MockConsumer(); - try (final BufferedInputStream bis = new BufferedInputStream(stream1); - final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { - IntegrationRunner.consumeWriteStream(consumer2, bis, baos); - } - final List newOutput = consumer2.getOutput(); - - // get old output - final List oldOutput = new ArrayList<>(); - final InputStream stream2 = new ByteArrayInputStream(testInput.getBytes(StandardCharsets.UTF_8)); - final Scanner scanner = new Scanner(stream2, StandardCharsets.UTF_8).useDelimiter("[\r\n]+"); - while (scanner.hasNext()) { - oldOutput.add(scanner.next()); - } - - assertEquals(oldOutput, newOutput); - } - } - - private static class MockConsumer implements SerializedAirbyteMessageConsumer { - - private final List output = new ArrayList<>(); - - @Override - public void start() { - - } - - @Override - public void accept(final String message, final Integer sizeInBytes) { - output.add(message); - } - - @Override - public void close() { - - } - - public List getOutput() { - return new ArrayList<>(output); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.java deleted file mode 100644 index 742c01e32463a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.java +++ /dev/null @@ -1,497 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static io.airbyte.cdk.integrations.base.IntegrationRunner.ORPHANED_THREAD_FILTER; -import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE; -import static org.assertj.core.api.AssertionsForClassTypes.assertThat; -import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.Destination.ShimToSerializedAirbyteMessageConsumer; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.apache.commons.lang3.ThreadUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.InOrder; -import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class IntegrationRunnerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(IntegrationRunnerTest.class); - - private static final String CONFIG_FILE_NAME = "config.json"; - private static final String CONFIGURED_CATALOG_FILE_NAME = "configured_catalog.json"; - private static final String STATE_FILE_NAME = "state.json"; - - private static final String[] ARGS = new String[] {"args"}; - - private static final String CONFIG_STRING = "{ \"username\": \"airbyte\" }"; - private static final JsonNode CONFIG = Jsons.deserialize(CONFIG_STRING); - private static final String STREAM_NAME = "users"; - private static final Long EMITTED_AT = Instant.now().toEpochMilli(); - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - - private static final AirbyteCatalog CATALOG = new AirbyteCatalog().withStreams(Lists.newArrayList(new AirbyteStream().withName(STREAM_NAME))); - private static final ConfiguredAirbyteCatalog CONFIGURED_CATALOG = CatalogHelpers.toDefaultConfiguredCatalog(CATALOG); - private static final JsonNode STATE = Jsons.jsonNode(ImmutableMap.of("checkpoint", "05/08/1945")); - - private IntegrationCliParser cliParser; - private Consumer stdoutConsumer; - private Destination destination; - private Source source; - private Path configPath; - private Path configuredCatalogPath; - private Path statePath; - private Path configDir; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() throws IOException { - cliParser = mock(IntegrationCliParser.class); - stdoutConsumer = Mockito.mock(Consumer.class); - destination = mock(Destination.class); - source = mock(Source.class); - configDir = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test"); - - configPath = IOs.writeFile(configDir, CONFIG_FILE_NAME, CONFIG_STRING); - configuredCatalogPath = IOs.writeFile(configDir, CONFIGURED_CATALOG_FILE_NAME, Jsons.serialize(CONFIGURED_CATALOG)); - statePath = IOs.writeFile(configDir, STATE_FILE_NAME, Jsons.serialize(STATE)); - - final String testName = Thread.currentThread().getName(); - ThreadUtils.getAllThreads() - .stream() - .filter(runningThread -> !runningThread.isDaemon()) - .forEach(runningThread -> runningThread.setName(testName)); - } - - @Test - void testSpecSource() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.spec(); - final ConnectorSpecification output = new ConnectorSpecification().withDocumentationUrl(new URI("https://docs.airbyte.io/")); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.spec()).thenReturn(output); - - new IntegrationRunner(cliParser, stdoutConsumer, null, source).run(ARGS); - - verify(source).spec(); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.SPEC).withSpec(output)); - } - - @Test - void testSpecDestination() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.spec(); - final ConnectorSpecification output = new ConnectorSpecification().withDocumentationUrl(new URI("https://docs.airbyte.io/")); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(destination.spec()).thenReturn(output); - - new IntegrationRunner(cliParser, stdoutConsumer, destination, null).run(ARGS); - - verify(destination).spec(); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.SPEC).withSpec(output)); - } - - @Test - void testCheckSource() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.check(configPath); - final AirbyteConnectionStatus output = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage("it failed"); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.check(CONFIG)).thenReturn(output); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); - - verify(source).check(CONFIG); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testCheckDestination() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.check(configPath); - final AirbyteConnectionStatus output = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage("it failed"); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(destination.check(CONFIG)).thenReturn(output); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(destination.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - - new IntegrationRunner(cliParser, stdoutConsumer, destination, null, jsonSchemaValidator).run(ARGS); - - verify(destination).check(CONFIG); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testDiscover() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.discover(configPath); - final AirbyteCatalog output = new AirbyteCatalog() - .withStreams(Lists.newArrayList(new AirbyteStream().withName("oceans"))); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.discover(CONFIG)).thenReturn(output); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); - - verify(source).discover(CONFIG); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CATALOG).withCatalog(output)); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testRead() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.read(configPath, configuredCatalogPath, - statePath); - final AirbyteMessage message1 = new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withData(Jsons.jsonNode(ImmutableMap.of("names", "byron")))); - final AirbyteMessage message2 = new AirbyteMessage().withType(Type.RECORD).withRecord(new AirbyteRecordMessage() - .withData(Jsons.jsonNode(ImmutableMap.of("names", "reginald")))); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.read(CONFIG, CONFIGURED_CATALOG, STATE)) - .thenReturn(AutoCloseableIterators.fromIterator(MoreIterators.of(message1, message2))); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); - - // noinspection resource - verify(source).read(CONFIG, CONFIGURED_CATALOG, STATE); - verify(stdoutConsumer).accept(message1); - verify(stdoutConsumer).accept(message2); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testReadException() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.read(configPath, configuredCatalogPath, - statePath); - final ConfigErrorException configErrorException = new ConfigErrorException("Invalid configuration"); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.read(CONFIG, CONFIGURED_CATALOG, STATE)).thenThrow(configErrorException); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - final Throwable throwable = catchThrowable(() -> new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS)); - - assertThat(throwable).isInstanceOf(ConfigErrorException.class); - // noinspection resource - verify(source).read(CONFIG, CONFIGURED_CATALOG, STATE); - } - - @Test - void testCheckNestedException() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.check(configPath); - final AirbyteConnectionStatus output = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage("Invalid configuration"); - final ConfigErrorException configErrorException = new ConfigErrorException("Invalid configuration"); - final RuntimeException runtimeException = new RuntimeException(new RuntimeException(configErrorException)); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.check(CONFIG)).thenThrow(runtimeException); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); - - verify(source).check(CONFIG); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testCheckRuntimeException() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.check(configPath); - final AirbyteConnectionStatus output = - new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(String.format(COMMON_EXCEPTION_MESSAGE_TEMPLATE, "Runtime Error")); - final RuntimeException runtimeException = new RuntimeException("Runtime Error"); - - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(source.check(CONFIG)).thenThrow(runtimeException); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(source.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); - - verify(source).check(CONFIG); - verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testWrite() throws Exception { - final IntegrationConfig intConfig = IntegrationConfig.write(configPath, configuredCatalogPath); - final SerializedAirbyteMessageConsumer consumerMock = mock(SerializedAirbyteMessageConsumer.class); - when(cliParser.parse(ARGS)).thenReturn(intConfig); - when(destination.getSerializedMessageConsumer(CONFIG, CONFIGURED_CATALOG, stdoutConsumer)).thenReturn(consumerMock); - - final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); - when(destination.spec()).thenReturn(expectedConnSpec); - when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); - - final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); - - final IntegrationRunner runner = spy(new IntegrationRunner(cliParser, stdoutConsumer, destination, null, jsonSchemaValidator)); - runner.run(ARGS); - - verify(destination).getSerializedMessageConsumer(CONFIG, CONFIGURED_CATALOG, stdoutConsumer); - verify(jsonSchemaValidator).validate(any(), any()); - } - - @Test - void testDestinationConsumerLifecycleSuccess() throws Exception { - final AirbyteMessage message1 = new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(Jsons.deserialize("{ \"color\": \"blue\" }")) - .withStream(STREAM_NAME) - .withEmittedAt(EMITTED_AT)); - final AirbyteMessage message2 = new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(Jsons.deserialize("{ \"color\": \"yellow\" }")) - .withStream(STREAM_NAME) - .withEmittedAt(EMITTED_AT)); - final AirbyteMessage stateMessage = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withData(Jsons.deserialize("{ \"checkpoint\": \"1\" }"))); - System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" - + Jsons.serialize(message2) + "\n" - + Jsons.serialize(stateMessage)).getBytes(StandardCharsets.UTF_8))); - - try (final SerializedAirbyteMessageConsumer airbyteMessageConsumerMock = mock(SerializedAirbyteMessageConsumer.class)) { - IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock); - final InOrder inOrder = inOrder(airbyteMessageConsumerMock); - inOrder.verify(airbyteMessageConsumerMock).accept(Jsons.serialize(message1), Jsons.serialize(message1).getBytes(StandardCharsets.UTF_8).length); - inOrder.verify(airbyteMessageConsumerMock).accept(Jsons.serialize(message2), Jsons.serialize(message2).getBytes(StandardCharsets.UTF_8).length); - inOrder.verify(airbyteMessageConsumerMock).accept(Jsons.serialize(stateMessage), - Jsons.serialize(stateMessage).getBytes(StandardCharsets.UTF_8).length); - } - } - - @Test - void testDestinationConsumerLifecycleFailure() throws Exception { - final AirbyteMessage message1 = new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(Jsons.deserialize("{ \"color\": \"blue\" }")) - .withStream(STREAM_NAME) - .withEmittedAt(EMITTED_AT)); - final AirbyteMessage message2 = new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(Jsons.deserialize("{ \"color\": \"yellow\" }")) - .withStream(STREAM_NAME) - .withEmittedAt(EMITTED_AT)); - System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" + Jsons.serialize(message2)).getBytes(StandardCharsets.UTF_8))); - - try (final SerializedAirbyteMessageConsumer airbyteMessageConsumerMock = mock(SerializedAirbyteMessageConsumer.class)) { - doThrow(new IOException("error")).when(airbyteMessageConsumerMock).accept(Jsons.serialize(message1), - Jsons.serialize(message1).getBytes(StandardCharsets.UTF_8).length); - assertThrows(IOException.class, () -> IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock)); - final InOrder inOrder = inOrder(airbyteMessageConsumerMock); - inOrder.verify(airbyteMessageConsumerMock).accept(Jsons.serialize(message1), Jsons.serialize(message1).getBytes(StandardCharsets.UTF_8).length); - inOrder.verifyNoMoreInteractions(); - } - } - - @Test - void testInterruptOrphanThread() { - final List caughtExceptions = new ArrayList<>(); - startSleepingThread(caughtExceptions, false); - IntegrationRunner.stopOrphanedThreads( - Assertions::fail, - 3, TimeUnit.SECONDS, - 10, TimeUnit.SECONDS); - try { - TimeUnit.SECONDS.sleep(15); - } catch (final Exception e) { - throw new RuntimeException(e); - } - final List runningThreads = ThreadUtils.getAllThreads().stream() - .filter(ORPHANED_THREAD_FILTER) - .collect(Collectors.toList()); - // all threads should be interrupted - assertEquals(List.of(), runningThreads); - assertEquals(1, caughtExceptions.size()); - } - - @Test - void testNoInterruptOrphanThread() { - final List caughtExceptions = new ArrayList<>(); - final AtomicBoolean exitCalled = new AtomicBoolean(false); - startSleepingThread(caughtExceptions, true); - IntegrationRunner.stopOrphanedThreads( - () -> exitCalled.set(true), - 3, TimeUnit.SECONDS, - 10, TimeUnit.SECONDS); - try { - TimeUnit.SECONDS.sleep(15); - } catch (final Exception e) { - throw new RuntimeException(e); - } - - final List runningThreads = ThreadUtils.getAllThreads().stream() - .filter(ORPHANED_THREAD_FILTER) - .collect(Collectors.toList()); - // a thread that refuses to be interrupted should remain - assertEquals(1, runningThreads.size()); - assertEquals(1, caughtExceptions.size()); - assertTrue(exitCalled.get()); - } - - private void startSleepingThread(final List caughtExceptions, final boolean ignoreInterrupt) { - final ExecutorService executorService = Executors.newFixedThreadPool(1, r -> { - // Create a thread that should be identified as orphaned if still running during shutdown - final Thread thread = new Thread(r); - thread.setName("sleeping-thread"); - thread.setDaemon(false); - return thread; - }); - executorService.submit(() -> { - for (int tries = 0; tries < 3; tries++) { - try { - TimeUnit.MINUTES.sleep(5); - } catch (final Exception e) { - LOGGER.info("Caught Exception", e); - caughtExceptions.add(e); - if (!ignoreInterrupt) { - executorService.shutdownNow(); - break; - } - } - } - }); - } - - @Test - void testParseConnectorImage() { - assertEquals("unknown", IntegrationRunner.parseConnectorVersion(null)); - assertEquals("unknown", IntegrationRunner.parseConnectorVersion("")); - assertEquals("1.0.1-alpha", IntegrationRunner.parseConnectorVersion("airbyte/destination-test:1.0.1-alpha")); - assertEquals("dev", IntegrationRunner.parseConnectorVersion("airbyte/destination-test:dev")); - assertEquals("1.0.1-alpha", IntegrationRunner.parseConnectorVersion("destination-test:1.0.1-alpha")); - assertEquals("1.0.1-alpha", IntegrationRunner.parseConnectorVersion(":1.0.1-alpha")); - } - - @Test - void testConsumptionOfInvalidStateMessage() { - final String invalidStateMessage = """ - { - "type" : "STATE", - "state" : { - "type": "NOT_RECOGNIZED", - "global": { - "streamStates": { - "foo" : "bar" - } - } - } - } - """; - - Assertions.assertThrows(IllegalStateException.class, () -> { - try (final AirbyteMessageConsumer consumer = mock(AirbyteMessageConsumer.class)) { - ShimToSerializedAirbyteMessageConsumer.consumeMessage(consumer, invalidStateMessage); - } - }); - } - - @Test - void testConsumptionOfInvalidNonStateMessage() { - final String invalidNonStateMessage = """ - { - "type" : "NOT_RECOGNIZED", - "record" : { - "namespace": "namespace", - "stream": "stream", - "emittedAt": 123456789 - } - } - """; - - Assertions.assertDoesNotThrow(() -> { - try (final AirbyteMessageConsumer consumer = mock(AirbyteMessageConsumer.class)) { - ShimToSerializedAirbyteMessageConsumer.consumeMessage(consumer, invalidNonStateMessage); - verify(consumer, times(0)).accept(any(AirbyteMessage.class)); - } - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/NameTransformerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/NameTransformerTest.java deleted file mode 100644 index 72ee1cfca98a1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/NameTransformerTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import org.junit.jupiter.api.Test; - -class NameTransformerTest { - - @Test - void testStandardSQLNaming() { - final NamingConventionTransformer namingResolver = new StandardNameTransformer(); - assertEquals("identifier_name", namingResolver.getIdentifier("identifier_name")); - assertEquals("iDenTiFieR_name", namingResolver.getIdentifier("iDenTiFieR_name")); - assertEquals("__identifier_name", namingResolver.getIdentifier("__identifier_name")); - assertEquals("IDENTIFIER_NAME", namingResolver.getIdentifier("IDENTIFIER_NAME")); - assertEquals("123identifier_name", namingResolver.getIdentifier("123identifier_name")); - assertEquals("i0d0e0n0t0i0f0i0e0r0n0a0m0e", namingResolver.getIdentifier("i0d0e0n0t0i0f0i0e0r0n0a0m0e")); - assertEquals("_identifier_name", namingResolver.getIdentifier(",identifier+name")); - assertEquals("identifier_name", namingResolver.getIdentifier("identifiêr name")); - assertEquals("a_unicode_name__", namingResolver.getIdentifier("a_unicode_name_文")); - assertEquals("identifier__name__", namingResolver.getIdentifier("identifier__name__")); - assertEquals("identifier_name_weee", namingResolver.getIdentifier("identifier-name.weee")); - assertEquals("_identifier_name_", namingResolver.getIdentifier("\"identifier name\"")); - assertEquals("identifier_name", namingResolver.getIdentifier("identifier name")); - assertEquals("identifier_", namingResolver.getIdentifier("identifier%")); - assertEquals("_identifier_", namingResolver.getIdentifier("`identifier`")); - - assertEquals("_airbyte_raw_identifier_name", namingResolver.getRawTableName("identifier_name")); - } - - // Temporarily disabling the behavior of the StandardNameTransformer, see (issue #1785) - // @Test - void testExtendedSQLNaming() { - final NamingConventionTransformer namingResolver = new StandardNameTransformer(); - assertEquals("identifier_name", namingResolver.getIdentifier("identifier_name")); - assertEquals("iDenTiFieR_name", namingResolver.getIdentifier("iDenTiFieR_name")); - assertEquals("__identifier_name", namingResolver.getIdentifier("__identifier_name")); - assertEquals("IDENTIFIER_NAME", namingResolver.getIdentifier("IDENTIFIER_NAME")); - assertEquals("\"123identifier_name\"", namingResolver.getIdentifier("123identifier_name")); - assertEquals("i0d0e0n0t0i0f0i0e0r0n0a0m0e", namingResolver.getIdentifier("i0d0e0n0t0i0f0i0e0r0n0a0m0e")); - assertEquals("\",identifier+name\"", namingResolver.getIdentifier(",identifier+name")); - assertEquals("\"identifiêr name\"", namingResolver.getIdentifier("identifiêr name")); - assertEquals("\"a_unicode_name_文\"", namingResolver.getIdentifier("a_unicode_name_文")); - assertEquals("identifier__name__", namingResolver.getIdentifier("identifier__name__")); - assertEquals("\"identifier-name.weee\"", namingResolver.getIdentifier("identifier-name.weee")); - assertEquals("\"\"identifier name\"\"", namingResolver.getIdentifier("\"identifier name\"")); - assertEquals("\"identifier name\"", namingResolver.getIdentifier("identifier name")); - assertEquals("\"identifier%\"", namingResolver.getIdentifier("identifier%")); - assertEquals("\"`identifier`\"", namingResolver.getIdentifier("`identifier`")); - - assertEquals("_airbyte_raw_identifier_name", namingResolver.getRawTableName("identifier_name")); - assertEquals("\"_airbyte_raw_identifiêr name\"", namingResolver.getRawTableName("identifiêr name")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.java deleted file mode 100644 index 7f8a01af1293d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.normalization; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.cdk.integrations.destination.normalization.NormalizationLogParser; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteLogMessage.Level; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class NormalizationLogParserTest { - - private NormalizationLogParser parser; - - @BeforeEach - void setup() { - parser = new NormalizationLogParser(); - } - - @Test - void testWrapNonJsonLogs() { - runTest( - """ - foo - bar - [error] oh no - asdf - [error] qwer - """, - List.of( - logMessage(Level.INFO, "foo"), - logMessage(Level.INFO, "bar"), - logMessage(Level.INFO, "[error] oh no"), - logMessage(Level.INFO, "asdf"), - logMessage(Level.INFO, "[error] qwer")), - List.of( - "[error] oh no", - "[error] qwer")); - } - - @Test - void testWrapJsonLogs() { - runTest( - """ - {"code": "A001", "data": {"v": "=1.0.9"}, "invocation_id": "ed2017da-965d-406b-8fa1-07fb7c19fd14", "level": "info", "log_version": 1, "msg": "Running with dbt=1.0.9", "node_info": {}, "pid": 55, "thread_name": "MainThread", "ts": "2023-04-11T16:08:54.781886Z", "type": "log_line"} - {"code": "A001", "data": {"v": "=1.0.9"}, "invocation_id": "ed2017da-965d-406b-8fa1-07fb7c19fd14", "level": "error", "log_version": 1, "msg": "oh no", "node_info": {}, "pid": 55, "thread_name": "MainThread", "ts": "2023-04-11T16:08:54.781886Z", "type": "log_line"} - {"type": "TRACE", "trace": {"type": "ERROR", "emitted_at": 1.681766805198E12, "error": {"failure_type": "system_error", "message": "uh oh", "stack_trace": "normalization blew up", "internal_message": "normalization blew up with more detail"}}} - """, - List.of( - logMessage(Level.INFO, "Running with dbt=1.0.9"), - logMessage(Level.ERROR, "oh no"), - new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage() - .withType(AirbyteTraceMessage.Type.ERROR) - .withEmittedAt(1.681766805198E12) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(FailureType.SYSTEM_ERROR) - .withMessage("uh oh") - .withStackTrace("normalization blew up") - .withInternalMessage("normalization blew up with more detail")))), - List.of( - "oh no")); - } - - @Test - void testWeirdLogs() { - runTest( - """ - null - "null" - {"msg": "message with no level", "type": "log_line"} - {"level": "info", "type": "log_line"} - {"level": "error", "type": "log_line"} - """, - List.of( - logMessage(Level.INFO, "null"), - logMessage(Level.INFO, "\"null\""), - logMessage(Level.INFO, "{\n \"msg\" : \"message with no level\",\n \"type\" : \"log_line\"\n}"), - logMessage(Level.INFO, ""), - logMessage(Level.ERROR, "")), - List.of( - "")); - } - - private void runTest(String rawLogs, List expectedMessages, List expectedDbtErrors) { - final List messages = parser.create(new BufferedReader( - new InputStreamReader( - new ByteArrayInputStream( - rawLogs.getBytes(StandardCharsets.UTF_8)), - StandardCharsets.UTF_8))) - .toList(); - - assertEquals( - expectedMessages, - messages); - assertEquals(expectedDbtErrors, parser.getDbtErrors()); - } - - private AirbyteMessage logMessage(Level level, String message) { - return new AirbyteMessage() - .withType(Type.LOG) - .withLog(new AirbyteLogMessage() - .withLevel(level) - .withMessage(message)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.java deleted file mode 100644 index 06c68a50bdfea..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.cdk.integrations.base.ssh.SshTunnel.TunnelMethod; -import io.airbyte.commons.json.Jsons; -import java.nio.charset.StandardCharsets; -import java.security.KeyPair; -import java.security.PrivateKey; -import java.security.PublicKey; -import java.util.Arrays; -import org.apache.sshd.client.SshClient; -import org.apache.sshd.client.session.ClientSession; -import org.apache.sshd.common.util.security.SecurityUtils; -import org.apache.sshd.common.util.security.eddsa.EdDSASecurityProviderRegistrar; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -class SshTunnelTest { - - private static final String SSH_ED25519_PRIVATE_KEY = "-----BEGIN OPENSSH PRIVATE KEY-----\\n" - + "b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW\\n" - + "QyNTUxOQAAACDbBP+5jmEtjh1JvhzVQsvvTC2IQrX6P68XzrV7ZbnGsQAAAKBgtw9/YLcP\\n" - + "fwAAAAtzc2gtZWQyNTUxOQAAACDbBP+5jmEtjh1JvhzVQsvvTC2IQrX6P68XzrV7ZbnGsQ\\n" - + "AAAEAaKYn22N1O78HfdG22C7hcG2HiezKMzlq4JTdgYG1DstsE/7mOYS2OHUm+HNVCy+9M\\n" - + "LYhCtfo/rxfOtXtlucaxAAAAHHRmbG9yZXNfZHQwMUB0ZmxvcmVzX2R0MDEtUEMB\\n" - + "-----END OPENSSH PRIVATE KEY-----"; - private static final String SSH_RSA_PRIVATE_KEY = "-----BEGIN OPENSSH PRIVATE KEY-----\\n" - + "b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn\\n" - + "NhAAAAAwEAAQAAAYEAuFjfTMS6BrgoxaQe9i83y6CdGH3xJIwc1Wy+11ibWAFcQ6khX/x0\\n" - + "M+JnJaSCs/hxiDE4afHscP3HzVQC699IgKwyAPaG0ZG+bLhxWAm4E79P7Yssj7imhTqr0A\\n" - + "DZDO23CCOagHvfdg1svnBhk1ih14GMGKRFCS27CLgholIOeogOyH7b3Jaqy9LtICiE054e\\n" - + "jwdaZdwWU08kxMO4ItdxNasCPC5uQiaXIzWFysG0mLk7WWc8WyuQHneQFl3Qu6p/rWJz4i\\n" - + "seea5CBL5s1DIyCyo/jgN5/oOWOciPUl49mDLleCzYTDnWqX43NK9A87unNeuA95Fk9akH\\n" - + "8QH4hKBCzpHhsh4U3Ys/l9Q5NmnyBrtFWBY2n13ZftNA/Ms+Hsh6V3eyJW0rIFY2/UM4XA\\n" - + "YyD6MEOlvFAQjxC6EbqfkrC6FQgH3I2wAtIDqEk2j79vfIIDdzp8otWjIQsApX55j+kKio\\n" - + "sY8YTXb9sLWuEdpSd/AN3iQ8HwIceyTulaKn7rTBAAAFkMwDTyPMA08jAAAAB3NzaC1yc2\\n" - + "EAAAGBALhY30zEuga4KMWkHvYvN8ugnRh98SSMHNVsvtdYm1gBXEOpIV/8dDPiZyWkgrP4\\n" - + "cYgxOGnx7HD9x81UAuvfSICsMgD2htGRvmy4cVgJuBO/T+2LLI+4poU6q9AA2Qzttwgjmo\\n" - + "B733YNbL5wYZNYodeBjBikRQktuwi4IaJSDnqIDsh+29yWqsvS7SAohNOeHo8HWmXcFlNP\\n" - + "JMTDuCLXcTWrAjwubkImlyM1hcrBtJi5O1lnPFsrkB53kBZd0Luqf61ic+IrHnmuQgS+bN\\n" - + "QyMgsqP44Def6DljnIj1JePZgy5Xgs2Ew51ql+NzSvQPO7pzXrgPeRZPWpB/EB+ISgQs6R\\n" - + "4bIeFN2LP5fUOTZp8ga7RVgWNp9d2X7TQPzLPh7Ield3siVtKyBWNv1DOFwGMg+jBDpbxQ\\n" - + "EI8QuhG6n5KwuhUIB9yNsALSA6hJNo+/b3yCA3c6fKLVoyELAKV+eY/pCoqLGPGE12/bC1\\n" - + "rhHaUnfwDd4kPB8CHHsk7pWip+60wQAAAAMBAAEAAAGAXw+dHpY3o21lwP0v5h1VNVD+kX\\n" - + "moVwNVfw0ToDKV8JzK+i0GA9xIA9VVAUlDCREtYmCXSbKyDVYgqRYQZ5d9aLTjGDIINZtl\\n" - + "SeUWtaJVZQF7cvAYq4g5fmxR2vIE+zC9+Jl7e5PlGJg1okKLXpMO6fVoy/AxlVkaoJVq6q\\n" - + "xLwQ3WKbeZIrgjHPYIx1N9oy5fbbwJ9oq2jIE8YabXlkfonhcwEN6UhtIlj8dy1apruXGT\\n" - + "VDfzHMRrDfrzt0TrdUqmqgo/istP89sggtkJ8uuPtkBFHTjao8MiBsshy1iDVbIno9gDbJ\\n" - + "JgYyunmSgEjEZpp09+mkgwfZO3/RDLRPF1SRAGBNy27CH8/bh9gAVRhAPi0GLclNi292Ya\\n" - + "NrGvjMcRlYAsWL3mZ9aTbv0j7Qi8qdWth+rZ+tBmNToUVVl5iLxifgo0kjiXAehZB1LaQV\\n" - + "yuMXlXOGmt9V2/DPACA9getQJQONxrLAcgHdjMiuwD8r7d+m/kE4+cOTakOlzrfrwBAAAA\\n" - + "wQCVTQTvuyBW3JemMPtRLifQqdwMGRPokm5nTn+JSJQvg+dNpL7hC0k3IesKs63gxuuHoq\\n" - + "4q1xkMmCMvihT8oVlxrezEjsO/QMCxe6Sr9eMfHAjrdPeHsPaf9oOgG9vEEH9dEilHpnlb\\n" - + "97Vyl9EHm1iahONM1gWdXkPjIfnQzYPvSLZPtBBSI0XBjCTifMnCRgd3s2bdm7kh+7XA+C\\n" - + "rX62WfPIJKL+OhMIf+ED4HBJTd/vU34Vk73yvqHzqel0ZQnRoAAADBAOGSm6TNBptV7S5P\\n" - + "wT3BhGYSm35/7nCFTilyfy5/8EUmifUFittRIvgDoXBWeZEwvqIiQ55iX9mYmNmb0KbPCw\\n" - + "cqN/BtXWItAvyTDZ6PeI2m2aUj+rW2R3ZXEsBjgaNRtbPyMKQ69xtKRvHtNZNfgjpRQ4is\\n" - + "lbufhAK1YbUxrlfKaBGOcGyR7DNmUUUN6nptQbpOr1HQc5DOH17HIDnRPs44HIws3/apww\\n" - + "RBIjjy6GQNfJ/Ge8N4pxGoLl1qKO8xoQAAAMEA0Tat/E5mSsgjCgmFja/jOZJcrzZHwrPT\\n" - + "3NEbuAMQ/L3atKEINypmpJfjIAvNljKJwSUDMEWvs8qj8cSGCrtkcAv1YSm697TL2oC9HU\\n" - + "CFoOJAkH1X2CGTgHlR9it3j4aRJ3dXdL2k7aeoGXObfRWqBNPj0LOOZs64RA6scGAzo6MR\\n" - + "5WlcOxfV1wZuaM0fOd+PBmIlFEE7Uf6AY/UahBAxaFV2+twgK9GCDcu1t4Ye9wZ9kZ4Nal\\n" - + "0fkKD4uN4DRO8hAAAAFm10dWhhaUBrYnAxLWxocC1hMTQ1MzMBAgME\\n" - + "-----END OPENSSH PRIVATE KEY-----"; - private static final String HOST_PORT_CONFIG = - "{\"ssl\":true,\"host\":\"fakehost.com\",\"port\":5432,\"schema\":\"public\",\"database\":\"postgres\",\"password\":\"\",\"username\":\"postgres\",\"tunnel_method\":{\"ssh_key\":\"" - + "%s" - + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}"; - - private static final String URL_CONFIG_WITH_PORT = - "{\"ssl\":true,\"endpoint\":\"http://fakehost.com:9090/service\",\"password\":\"\",\"username\":\"restuser\",\"tunnel_method\":{\"ssh_key\":\"" - + "%s" - + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}"; - - private static final String URL_CONFIG_NO_PORT = - "{\"ssl\":true,\"endpoint\":\"http://fakehost.com/service\",\"password\":\"\",\"username\":\"restuser\",\"tunnel_method\":{\"ssh_key\":\"" - + "%s" - + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}"; - - /** - * This test verifies that OpenSsh correctly replaces values in connector configuration in a spec - * with host/port config and in a spec with endpoint URL config - * - * @param configString - * @throws Exception - */ - @ParameterizedTest - @ValueSource(strings = {HOST_PORT_CONFIG, URL_CONFIG_WITH_PORT, URL_CONFIG_NO_PORT}) - public void testConfigInTunnel(final String configString) throws Exception { - final JsonNode config = (new ObjectMapper()).readTree(String.format(configString, SSH_RSA_PRIVATE_KEY)); - String endPointURL = Jsons.getStringOrNull(config, "endpoint"); - final SshTunnel sshTunnel = new SshTunnel( - config, - endPointURL == null ? Arrays.asList(new String[] {"host"}) : null, - endPointURL == null ? Arrays.asList(new String[] {"port"}) : null, - endPointURL == null ? null : "endpoint", - endPointURL, - TunnelMethod.SSH_KEY_AUTH, - "faketunnel.com", - 22, - "tunnelUser", - SSH_RSA_PRIVATE_KEY, - "tunnelUserPassword", - endPointURL == null ? "fakeHost.com" : null, - endPointURL == null ? 5432 : 0) { - - @Override - ClientSession openTunnel(final SshClient client) { - tunnelLocalPort = 8080; - return null; // Prevent tunnel from attempting to connect - } - - }; - - final JsonNode configInTunnel = sshTunnel.getConfigInTunnel(); - if (endPointURL == null) { - assertTrue(configInTunnel.has("port")); - assertTrue(configInTunnel.has("host")); - assertFalse(configInTunnel.has("endpoint")); - assertEquals(8080, configInTunnel.get("port").asInt()); - assertEquals("127.0.0.1", configInTunnel.get("host").asText()); - } else { - assertFalse(configInTunnel.has("port")); - assertFalse(configInTunnel.has("host")); - assertTrue(configInTunnel.has("endpoint")); - assertEquals("http://127.0.0.1:8080/service", configInTunnel.get("endpoint").asText()); - } - } - - /** - * This test verifies that SshTunnel correctly extracts private key pairs from keys formatted as - * EdDSA and OpenSSH - * - * @param privateKey - * @throws Exception - */ - @ParameterizedTest - @ValueSource(strings = {SSH_ED25519_PRIVATE_KEY, SSH_RSA_PRIVATE_KEY}) - public void getKeyPair(final String privateKey) throws Exception { - final JsonNode config = (new ObjectMapper()).readTree(String.format(HOST_PORT_CONFIG, privateKey)); - final SshTunnel sshTunnel = new SshTunnel( - config, - Arrays.asList(new String[] {"host"}), - Arrays.asList(new String[] {"port"}), - null, - null, - TunnelMethod.SSH_KEY_AUTH, - "faketunnel.com", - 22, - "tunnelUser", - privateKey, - "tunnelUserPassword", - "fakeHost.com", - 5432) { - - @Override - ClientSession openTunnel(final SshClient client) { - return null; // Prevent tunnel from attempting to connect - } - - }; - - final KeyPair authKeyPair = sshTunnel.getPrivateKeyPair(); - assertNotNull(authKeyPair);// actually, all is good if there is no exception on previous line - } - - /** - * This test verifies that 'net.i2p.crypto:eddsa' is present and EdDSA is supported. If - * net.i2p.crypto:eddsa will be removed from project, then will be thrown: generator not correctly - * initialized - * - * @throws Exception - */ - @Test - public void edDsaIsSupported() throws Exception { - final var keygen = SecurityUtils.getKeyPairGenerator("EdDSA"); - final String message = "hello world"; - final KeyPair keyPair = keygen.generateKeyPair(); - - final byte[] signedMessage = sign(keyPair.getPrivate(), message); - - assertTrue(new EdDSASecurityProviderRegistrar().isSupported()); - assertTrue(verify(keyPair.getPublic(), signedMessage, message)); - } - - private byte[] sign(final PrivateKey privateKey, final String message) throws Exception { - final var signature = SecurityUtils.getSignature("NONEwithEdDSA"); - signature.initSign(privateKey); - - signature.update(message.getBytes(StandardCharsets.UTF_8)); - - return signature.sign(); - } - - private boolean verify(final PublicKey publicKey, final byte[] signed, final String message) - throws Exception { - final var signature = SecurityUtils.getSignature("NONEwithEdDSA"); - signature.initVerify(publicKey); - - signature.update(message.getBytes(StandardCharsets.UTF_8)); - - return signature.verify(signed); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java deleted file mode 100644 index cb5498b70487c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java +++ /dev/null @@ -1,561 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferFlushType; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferingStrategy; -import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryRecordBufferingStrategy; -import io.airbyte.commons.functional.CheckedFunction; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Duration; -import java.time.Instant; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class BufferedStreamConsumerTest { - - private static final String SCHEMA_NAME = "public"; - private static final String STREAM_NAME = "id_and_name"; - private static final String STREAM_NAME2 = STREAM_NAME + 2; - private static final int PERIODIC_BUFFER_FREQUENCY = 5; - private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)), - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME2, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - - private static final AirbyteMessage STATE_MESSAGE1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of("state_message_id", 1)))); - private static final AirbyteMessage STATE_MESSAGE2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of("state_message_id", 2)))); - - private BufferedStreamConsumer consumer; - private OnStartFunction onStart; - private RecordWriter recordWriter; - private OnCloseFunction onClose; - private CheckedFunction isValidRecord; - private Consumer outputRecordCollector; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() throws Exception { - onStart = mock(OnStartFunction.class); - recordWriter = mock(RecordWriter.class); - onClose = mock(OnCloseFunction.class); - isValidRecord = mock(CheckedFunction.class); - outputRecordCollector = mock(Consumer.class); - consumer = new BufferedStreamConsumer( - outputRecordCollector, - onStart, - new InMemoryRecordBufferingStrategy(recordWriter, 1_000), - onClose, - CATALOG, - isValidRecord); - - when(isValidRecord.apply(any())).thenReturn(true); - } - - @Test - void test1StreamWith1State() throws Exception { - final List expectedRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecords); - consumer.accept(STATE_MESSAGE1); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void test1StreamWith2State() throws Exception { - final List expectedRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecords); - consumer.accept(STATE_MESSAGE1); - consumer.accept(STATE_MESSAGE2); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - - verify(outputRecordCollector, times(1)).accept(STATE_MESSAGE2); - } - - @Test - void test1StreamWith0State() throws Exception { - final List expectedRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecords); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - } - - @Test - void test1StreamWithStateAndThenMoreRecordsBiggerThanBuffer() throws Exception { - final List expectedRecordsBatch1 = generateRecords(1_000); - final List expectedRecordsBatch2 = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecordsBatch1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsBatch2); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1); - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch2); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void test1StreamWithStateAndThenMoreRecordsSmallerThanBuffer() throws Exception { - final List expectedRecordsBatch1 = generateRecords(1_000); - final List expectedRecordsBatch2 = generateRecords(1_000); - - // consumer with big enough buffered that we see both batches are flushed in one go. - final BufferedStreamConsumer consumer = new BufferedStreamConsumer( - outputRecordCollector, - onStart, - new InMemoryRecordBufferingStrategy(recordWriter, 10_000), - onClose, - CATALOG, - isValidRecord); - - consumer.start(); - consumeRecords(consumer, expectedRecordsBatch1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsBatch2); - consumer.close(); - - verifyStartAndClose(); - - final List expectedRecords = Lists.newArrayList(expectedRecordsBatch1, expectedRecordsBatch2) - .stream() - .flatMap(Collection::stream) - .collect(Collectors.toList()); - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void testExceptionAfterOneStateMessage() throws Exception { - final List expectedRecordsBatch1 = generateRecords(1_000); - final List expectedRecordsBatch2 = generateRecords(1_000); - final List expectedRecordsBatch3 = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecordsBatch1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsBatch2); - when(isValidRecord.apply(any())).thenThrow(new IllegalStateException("induced exception")); - assertThrows(IllegalStateException.class, () -> consumer.accept(expectedRecordsBatch3.get(0))); - consumer.close(); - - verifyStartAndCloseFailure(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void testExceptionAfterNoStateMessages() throws Exception { - final List expectedRecordsBatch1 = generateRecords(1_000); - final List expectedRecordsBatch2 = generateRecords(1_000); - final List expectedRecordsBatch3 = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecordsBatch1); - consumeRecords(consumer, expectedRecordsBatch2); - when(isValidRecord.apply(any())).thenThrow(new IllegalStateException("induced exception")); - assertThrows(IllegalStateException.class, () -> consumer.accept(expectedRecordsBatch3.get(0))); - consumer.close(); - - verifyStartAndCloseFailure(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1); - - verifyNoInteractions(outputRecordCollector); - } - - @Test - void testExceptionDuringOnClose() throws Exception { - doThrow(new IllegalStateException("induced exception")).when(onClose).accept(false, new HashMap<>()); - - final List expectedRecordsBatch1 = generateRecords(1_000); - final List expectedRecordsBatch2 = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecordsBatch1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsBatch2); - assertThrows(IllegalStateException.class, () -> consumer.close(), "Expected an error to be thrown on close"); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void test2StreamWith1State() throws Exception { - final List expectedRecordsStream1 = generateRecords(1_000); - final List expectedRecordsStream2 = expectedRecordsStream1 - .stream() - .map(Jsons::clone) - .peek(m -> m.getRecord().withStream(STREAM_NAME2)) - .collect(Collectors.toList()); - - consumer.start(); - consumeRecords(consumer, expectedRecordsStream1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsStream2); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1); - verifyRecords(STREAM_NAME2, SCHEMA_NAME, expectedRecordsStream2); - - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void test2StreamWith2State() throws Exception { - final List expectedRecordsStream1 = generateRecords(1_000); - final List expectedRecordsStream2 = expectedRecordsStream1 - .stream() - .map(Jsons::clone) - .peek(m -> m.getRecord().withStream(STREAM_NAME2)) - .collect(Collectors.toList()); - - consumer.start(); - consumeRecords(consumer, expectedRecordsStream1); - consumer.accept(STATE_MESSAGE1); - consumeRecords(consumer, expectedRecordsStream2); - consumer.accept(STATE_MESSAGE2); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1); - verifyRecords(STREAM_NAME2, SCHEMA_NAME, expectedRecordsStream2); - - verify(outputRecordCollector, times(1)).accept(STATE_MESSAGE2); - } - - // Periodic Buffer Flush Tests - @Test - void testSlowStreamReturnsState() throws Exception { - // generate records less than the default maxQueueSizeInBytes to confirm periodic flushing occurs - final List expectedRecordsStream1 = generateRecords(500L); - final List expectedRecordsStream1Batch2 = generateRecords(200L); - - // Overrides flush frequency for testing purposes to 5 seconds - final BufferedStreamConsumer flushConsumer = getConsumerWithFlushFrequency(); - flushConsumer.start(); - consumeRecords(flushConsumer, expectedRecordsStream1); - flushConsumer.accept(STATE_MESSAGE1); - // NOTE: Sleeps process for 5 seconds, if tests are slow this can be updated to reduce slowdowns - TimeUnit.SECONDS.sleep(PERIODIC_BUFFER_FREQUENCY); - consumeRecords(flushConsumer, expectedRecordsStream1Batch2); - flushConsumer.close(); - - verifyStartAndClose(); - // expects the records to be grouped because periodicBufferFlush occurs at the end of acceptTracked - verifyRecords(STREAM_NAME, SCHEMA_NAME, - Stream.concat(expectedRecordsStream1.stream(), expectedRecordsStream1Batch2.stream()).collect(Collectors.toList())); - verify(outputRecordCollector).accept(STATE_MESSAGE1); - } - - @Test - void testSlowStreamReturnsMultipleStates() throws Exception { - // generate records less than the default maxQueueSizeInBytes to confirm periodic flushing occurs - final List expectedRecordsStream1 = generateRecords(500L); - final List expectedRecordsStream1Batch2 = generateRecords(200L); - // creates records equal to size that triggers buffer flush - final List expectedRecordsStream1Batch3 = generateRecords(1_000L); - - // Overrides flush frequency for testing purposes to 5 seconds - final BufferedStreamConsumer flushConsumer = getConsumerWithFlushFrequency(); - flushConsumer.start(); - consumeRecords(flushConsumer, expectedRecordsStream1); - flushConsumer.accept(STATE_MESSAGE1); - // NOTE: Sleeps process for 5 seconds, if tests are slow this can be updated to reduce slowdowns - TimeUnit.SECONDS.sleep(PERIODIC_BUFFER_FREQUENCY); - consumeRecords(flushConsumer, expectedRecordsStream1Batch2); - consumeRecords(flushConsumer, expectedRecordsStream1Batch3); - flushConsumer.accept(STATE_MESSAGE2); - flushConsumer.close(); - - verifyStartAndClose(); - // expects the records to be grouped because periodicBufferFlush occurs at the end of acceptTracked - verifyRecords(STREAM_NAME, SCHEMA_NAME, - Stream.concat(expectedRecordsStream1.stream(), expectedRecordsStream1Batch2.stream()).collect(Collectors.toList())); - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1Batch3); - // expects two STATE messages returned since one will be flushed after periodic flushing occurs - // and the other after buffer has been filled - verify(outputRecordCollector).accept(STATE_MESSAGE1); - verify(outputRecordCollector).accept(STATE_MESSAGE2); - } - - /** - * Verify that if we ack a state message for stream2 while stream1 has unflushed records+state, that - * we do _not_ ack stream1's state message. - */ - @Test - void testStreamTail() throws Exception { - // InMemoryRecordBufferingStrategy always returns FLUSH_ALL, so just mock a new strategy here - final BufferingStrategy strategy = mock(BufferingStrategy.class); - // The first two records that we push will not trigger any flushes, but the third record _will_ - // trigger a flush - when(strategy.addRecord(any(), any())).thenReturn( - Optional.empty(), - Optional.empty(), - Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM)); - consumer = new BufferedStreamConsumer( - outputRecordCollector, - onStart, - strategy, - onClose, - CATALOG, - isValidRecord, - // Never periodic flush - Duration.ofHours(24), - null); - final List expectedRecordsStream1 = List.of(new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME))); - final List expectedRecordsStream2 = List.of(new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME2) - .withNamespace(SCHEMA_NAME))); - - final AirbyteMessage state1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME).withNamespace(SCHEMA_NAME)) - .withStreamState(Jsons.jsonNode(ImmutableMap.of("state_message_id", 1))))); - final AirbyteMessage state2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME2).withNamespace(SCHEMA_NAME)) - .withStreamState(Jsons.jsonNode(ImmutableMap.of("state_message_id", 2))))); - - consumer.start(); - consumeRecords(consumer, expectedRecordsStream1); - consumer.accept(state1); - // At this point, we have not yet flushed anything - consumeRecords(consumer, expectedRecordsStream2); - consumer.accept(state2); - consumeRecords(consumer, expectedRecordsStream2); - // Now we have flushed stream 2, but not stream 1 - // Verify that we have only acked stream 2's state. - verify(outputRecordCollector).accept(state2); - verify(outputRecordCollector, never()).accept(state1); - - consumer.close(); - // Now we've closed the consumer, which flushes everything. - // Verify that we ack stream 1's pending state. - verify(outputRecordCollector).accept(state1); - } - - /** - * Same idea as {@link #testStreamTail()} but with global state. We shouldn't emit any state - * messages until we close the consumer. - */ - @Test - void testStreamTailGlobalState() throws Exception { - // InMemoryRecordBufferingStrategy always returns FLUSH_ALL, so just mock a new strategy here - final BufferingStrategy strategy = mock(BufferingStrategy.class); - // The first two records that we push will not trigger any flushes, but the third record _will_ - // trigger a flush - when(strategy.addRecord(any(), any())).thenReturn( - Optional.empty(), - Optional.empty(), - Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM)); - consumer = new BufferedStreamConsumer( - outputRecordCollector, - onStart, - strategy, - onClose, - CATALOG, - isValidRecord, - // Never periodic flush - Duration.ofHours(24), - null); - final List expectedRecordsStream1 = List.of(new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME))); - final List expectedRecordsStream2 = List.of(new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME2) - .withNamespace(SCHEMA_NAME))); - - final AirbyteMessage state1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(ImmutableMap.of("state_message_id", 1))))); - final AirbyteMessage state2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(ImmutableMap.of("state_message_id", 2))))); - - consumer.start(); - consumeRecords(consumer, expectedRecordsStream1); - consumer.accept(state1); - // At this point, we have not yet flushed anything - consumeRecords(consumer, expectedRecordsStream2); - consumer.accept(state2); - consumeRecords(consumer, expectedRecordsStream2); - // Now we have flushed stream 2, but not stream 1 - // We should not have acked any state yet, because we haven't written stream1's records yet. - verify(outputRecordCollector, never()).accept(any()); - - consumer.close(); - // Now we've closed the consumer, which flushes everything. - // Verify that we ack the final state. - // Note that we discard state1 entirely - this is OK. As long as we ack the last state message, - // the source can correctly resume from that point. - verify(outputRecordCollector).accept(state2); - } - - private BufferedStreamConsumer getConsumerWithFlushFrequency() { - final BufferedStreamConsumer flushFrequencyConsumer = new BufferedStreamConsumer( - outputRecordCollector, - onStart, - new InMemoryRecordBufferingStrategy(recordWriter, 10_000), - onClose, - CATALOG, - isValidRecord, - Duration.ofSeconds(PERIODIC_BUFFER_FREQUENCY), - null); - return flushFrequencyConsumer; - } - - private void verifyStartAndClose() throws Exception { - verify(onStart).call(); - verify(onClose).accept(false, new HashMap<>()); - } - - /** Indicates that a failure occurred while consuming AirbyteMessages */ - private void verifyStartAndCloseFailure() throws Exception { - verify(onStart).call(); - verify(onClose).accept(true, new HashMap<>()); - } - - private static void consumeRecords(final BufferedStreamConsumer consumer, final Collection records) { - records.forEach(m -> { - try { - consumer.accept(m); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - } - - // NOTE: Generates records at chunks of 160 bytes - private static List generateRecords(final long targetSizeInBytes) { - final List output = Lists.newArrayList(); - long bytesCounter = 0; - for (int i = 0;; i++) { - final JsonNode payload = - Jsons.jsonNode(ImmutableMap.of("id", RandomStringUtils.randomAlphabetic(7), "name", "human " + String.format("%8d", i))); - final long sizeInBytes = RecordSizeEstimator.getStringByteSize(payload); - bytesCounter += sizeInBytes; - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(payload)); - if (bytesCounter > targetSizeInBytes) { - break; - } else { - output.add(airbyteMessage); - } - } - return output; - } - - private void verifyRecords(final String streamName, final String namespace, final Collection expectedRecords) throws Exception { - verify(recordWriter).accept( - new AirbyteStreamNameNamespacePair(streamName, namespace), - expectedRecords.stream().map(AirbyteMessage::getRecord).collect(Collectors.toList())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.java deleted file mode 100644 index 5a9d2d4bba137..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.buffered_stream_consumer; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import org.junit.jupiter.api.Test; - -class RecordSizeEstimatorTest { - - private static final JsonNode DATA_0 = Jsons.deserialize("{}"); - private static final JsonNode DATA_1 = Jsons.deserialize("{ \"field1\": true }"); - private static final JsonNode DATA_2 = Jsons.deserialize("{ \"field1\": 10000 }"); - private static final long DATA_0_SIZE = RecordSizeEstimator.getStringByteSize(DATA_0); - private static final long DATA_1_SIZE = RecordSizeEstimator.getStringByteSize(DATA_1); - private static final long DATA_2_SIZE = RecordSizeEstimator.getStringByteSize(DATA_2); - - @Test - public void testPeriodicSampling() { - // the estimate performs a size sampling every 3 records - final RecordSizeEstimator sizeEstimator = new RecordSizeEstimator(3); - final String stream = "stream"; - final AirbyteRecordMessage record0 = new AirbyteRecordMessage().withStream(stream).withData(DATA_0); - final AirbyteRecordMessage record1 = new AirbyteRecordMessage().withStream(stream).withData(DATA_1); - final AirbyteRecordMessage record2 = new AirbyteRecordMessage().withStream(stream).withData(DATA_2); - - // sample record message 1 - final long firstEstimation = DATA_1_SIZE; - assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record1)); - // next two calls return the first sampling result - assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record0)); - assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record0)); - - // sample record message 2 - final long secondEstimation = firstEstimation / 2 + DATA_2_SIZE / 2; - assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record2)); - // next two calls return the second sampling result - assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record0)); - assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record0)); - - // sample record message 1 - final long thirdEstimation = secondEstimation / 2 + DATA_1_SIZE / 2; - assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record1)); - // next two calls return the first sampling result - assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record0)); - assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record0)); - } - - @Test - public void testDifferentEstimationPerStream() { - final RecordSizeEstimator sizeEstimator = new RecordSizeEstimator(); - final AirbyteRecordMessage record0 = new AirbyteRecordMessage().withStream("stream1").withData(DATA_0); - final AirbyteRecordMessage record1 = new AirbyteRecordMessage().withStream("stream2").withData(DATA_1); - final AirbyteRecordMessage record2 = new AirbyteRecordMessage().withStream("stream3").withData(DATA_2); - assertEquals(DATA_0_SIZE, sizeEstimator.getEstimatedByteSize(record0)); - assertEquals(DATA_1_SIZE, sizeEstimator.getEstimatedByteSize(record1)); - assertEquals(DATA_2_SIZE, sizeEstimator.getEstimatedByteSize(record2)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java deleted file mode 100644 index 66e5c226bb7bc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DefaultDestStateLifecycleManagerTest { - - private static final AirbyteMessage UNSET_TYPE_MESSAGE = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage()); - private static final AirbyteMessage LEGACY_MESSAGE = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY)); - private static final AirbyteMessage GLOBAL_MESSAGE = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL)); - private static final AirbyteMessage STREAM_MESSAGE = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("users")))); - - private DestStateLifecycleManager mgr1; - private DestStateLifecycleManager singleStateMgr; - private DestStateLifecycleManager streamMgr; - - @BeforeEach - void setup() { - singleStateMgr = mock(DestStateLifecycleManager.class); - streamMgr = mock(DestStateLifecycleManager.class); - mgr1 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); - } - - @Test - void testFailsOnIncompatibleStates() { - final DefaultDestStateLifecycleManager manager1 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); - manager1.addState(UNSET_TYPE_MESSAGE); - manager1.addState(UNSET_TYPE_MESSAGE); - manager1.addState(LEGACY_MESSAGE); - assertThrows(IllegalArgumentException.class, () -> manager1.addState(GLOBAL_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager1.addState(STREAM_MESSAGE)); - - final DefaultDestStateLifecycleManager manager2 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); - manager2.addState(LEGACY_MESSAGE); - manager2.addState(LEGACY_MESSAGE); - manager2.addState(UNSET_TYPE_MESSAGE); - assertThrows(IllegalArgumentException.class, () -> manager2.addState(GLOBAL_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager2.addState(STREAM_MESSAGE)); - - final DefaultDestStateLifecycleManager manager3 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); - manager3.addState(GLOBAL_MESSAGE); - manager3.addState(GLOBAL_MESSAGE); - assertThrows(IllegalArgumentException.class, () -> manager3.addState(UNSET_TYPE_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager3.addState(LEGACY_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager3.addState(STREAM_MESSAGE)); - - final DefaultDestStateLifecycleManager manager4 = new DefaultDestStateLifecycleManager(singleStateMgr, streamMgr); - manager4.addState(STREAM_MESSAGE); - manager4.addState(STREAM_MESSAGE); - assertThrows(IllegalArgumentException.class, () -> manager4.addState(UNSET_TYPE_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager4.addState(LEGACY_MESSAGE)); - assertThrows(IllegalArgumentException.class, () -> manager4.addState(GLOBAL_MESSAGE)); - } - - @Test - void testDelegatesLegacyMessages() { - mgr1.addState(UNSET_TYPE_MESSAGE); - mgr1.addState(LEGACY_MESSAGE); - mgr1.markPendingAsFlushed(); - mgr1.markFlushedAsCommitted(); - mgr1.listFlushed(); - mgr1.listCommitted(); - verify(singleStateMgr).addState(UNSET_TYPE_MESSAGE); - verify(singleStateMgr).addState(LEGACY_MESSAGE); - verify(singleStateMgr).markPendingAsFlushed(); - verify(singleStateMgr).markFlushedAsCommitted(); - verify(singleStateMgr).listFlushed(); - verify(singleStateMgr).listCommitted(); - } - - @Test - void testDelegatesGlobalMessages() { - mgr1.addState(GLOBAL_MESSAGE); - mgr1.markPendingAsFlushed(); - mgr1.markFlushedAsCommitted(); - mgr1.listFlushed(); - mgr1.listCommitted(); - verify(singleStateMgr).addState(GLOBAL_MESSAGE); - verify(singleStateMgr).markPendingAsFlushed(); - verify(singleStateMgr).markFlushedAsCommitted(); - verify(singleStateMgr).listFlushed(); - verify(singleStateMgr).listCommitted(); - } - - @Test - void testDelegatesStreamMessages() { - mgr1.addState(STREAM_MESSAGE); - mgr1.markPendingAsFlushed(); - mgr1.markFlushedAsCommitted(); - mgr1.listFlushed(); - mgr1.listCommitted(); - - verify(streamMgr).addState(STREAM_MESSAGE); - verify(streamMgr).markPendingAsFlushed(); - verify(streamMgr).markFlushedAsCommitted(); - verify(streamMgr).listFlushed(); - verify(streamMgr).listCommitted(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java deleted file mode 100644 index c0ed7621c05fe..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DestSingleStateLifecycleManagerTest { - - private static final AirbyteMessage MESSAGE1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withData(Jsons.jsonNode("a"))); - private static final AirbyteMessage MESSAGE2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withData(Jsons.jsonNode("b"))); - - private DestSingleStateLifecycleManager mgr; - - @BeforeEach - void setup() { - mgr = new DestSingleStateLifecycleManager(); - } - - /** - * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test get - * into the details. - */ - @Test - void testBasicLifeCycle() { - // starts with no state. - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - - mgr.addState(MESSAGE1); - // new state supersedes previous ones. we should only see MESSAGE2 from here on out. - mgr.addState(MESSAGE2); - - // after adding a state, it is in pending only. - assertEquals(MESSAGE2, mgr.listPending().poll()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - - mgr.markPendingAsFlushed(); - - // after flushing the state it is in flushed only. - assertTrue(mgr.listPending().isEmpty()); - assertEquals(MESSAGE2, mgr.listFlushed().poll()); - assertTrue(mgr.listCommitted().isEmpty()); - - // after committing the state it is in committed only. - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(MESSAGE2, mgr.listCommitted().poll()); - } - - @Test - void testPending() { - mgr.addState(MESSAGE1); - mgr.addState(MESSAGE2); - - // verify the LAST message is returned. - assertEquals(MESSAGE2, mgr.listPending().poll()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - } - - @Test - void testFlushed() { - mgr.addState(MESSAGE1); - mgr.addState(MESSAGE2); - mgr.markPendingAsFlushed(); - - assertTrue(mgr.listPending().isEmpty()); - assertEquals(MESSAGE2, mgr.listFlushed().poll()); - assertTrue(mgr.listCommitted().isEmpty()); - - // verify that multiple calls to markPendingAsFlushed overwrite old states - mgr.addState(MESSAGE1); - mgr.markPendingAsFlushed(); - mgr.markPendingAsFlushed(); - - assertTrue(mgr.listPending().isEmpty()); - assertEquals(MESSAGE1, mgr.listFlushed().poll()); - assertTrue(mgr.listCommitted().isEmpty()); - } - - @Test - void testCommitted() { - mgr.addState(MESSAGE1); - mgr.addState(MESSAGE2); - mgr.markPendingAsFlushed(); - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(MESSAGE2, mgr.listCommitted().poll()); - - // verify that multiple calls to markFlushedAsCommitted overwrite old states - mgr.addState(MESSAGE1); - mgr.markPendingAsFlushed(); - mgr.markFlushedAsCommitted(); - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(MESSAGE1, mgr.listCommitted().poll()); - } - - /* - * This change follows the same changes in DestStreamStateLifecycleManager where the goal is to - * confirm that `markPendingAsCommitted` combines what was previous `markPendingAsFlushed` and - * `markFlushedAsCommitted` - * - * The reason for this method is due to destination checkpointing will no longer hold into a state - * as "Flushed" but immediately commit records to the destination's final table - */ - @Test - void testMarkPendingAsCommitted() { - mgr.addState(MESSAGE1); - mgr.addState(MESSAGE2); - mgr.markPendingAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(MESSAGE2, mgr.listCommitted().poll()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java deleted file mode 100644 index b36add37561bf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.LinkedList; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DestStreamStateLifecycleManagerTest { - - private static final AirbyteMessage STREAM1_MESSAGE1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("apples")).withStreamState(Jsons.jsonNode("a")))); - private static final AirbyteMessage STREAM1_MESSAGE2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("apples")).withStreamState(Jsons.jsonNode("b")))); - private static final AirbyteMessage STREAM2_MESSAGE1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName("bananas")).withStreamState(Jsons.jsonNode("10")))); - - private DestStreamStateLifecycleManager mgr; - - @BeforeEach - void setup() { - mgr = new DestStreamStateLifecycleManager("default_namespace"); - } - - /** - * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test get - * into the details. - */ - @Test - void testBasicLifeCycle() { - // starts with no state. - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - - mgr.addState(STREAM1_MESSAGE1); - // new state supersedes previous ones. we should only see MESSAGE2 for STREAM1 from here on out. - mgr.addState(STREAM1_MESSAGE2); - // different stream, thus does not interact with messages from STREAM1. - mgr.addState(STREAM2_MESSAGE1); - - // after adding a state, it is in pending only. - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listPending()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - - mgr.markPendingAsFlushed(); - - // after flushing the state it is in flushed only. - assertTrue(mgr.listPending().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listFlushed()); - assertTrue(mgr.listCommitted().isEmpty()); - - // after committing the state it is in committed only. - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listCommitted()); - } - - @Test - void testPending() { - mgr.addState(STREAM1_MESSAGE1); - mgr.addState(STREAM1_MESSAGE2); - mgr.addState(STREAM2_MESSAGE1); - - // verify the LAST message is returned. - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listPending()); - assertTrue(mgr.listFlushed().isEmpty()); - assertTrue(mgr.listCommitted().isEmpty()); - } - - /* - * TODO: remove this test after all destination connectors have updated to reflect destination - * checkpointing changes where flush/commit will be bundled into the same operation - */ - @Deprecated - @Test - void testFlushed() { - mgr.addState(STREAM1_MESSAGE1); - mgr.addState(STREAM1_MESSAGE2); - mgr.addState(STREAM2_MESSAGE1); - mgr.markPendingAsFlushed(); - - assertTrue(mgr.listPending().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listFlushed()); - assertTrue(mgr.listCommitted().isEmpty()); - - // verify that multiple calls to markPendingAsFlushed overwrite old states - mgr.addState(STREAM1_MESSAGE1); - mgr.markPendingAsFlushed(); - mgr.markPendingAsFlushed(); - - assertTrue(mgr.listPending().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), mgr.listFlushed()); - assertTrue(mgr.listCommitted().isEmpty()); - } - - @Test - void testCommitted() { - mgr.addState(STREAM1_MESSAGE1); - mgr.addState(STREAM1_MESSAGE2); - mgr.addState(STREAM2_MESSAGE1); - mgr.markPendingAsFlushed(); - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), mgr.listCommitted()); - - // verify that multiple calls to markFlushedAsCommitted overwrite old states - mgr.addState(STREAM1_MESSAGE1); - mgr.markPendingAsFlushed(); - mgr.markFlushedAsCommitted(); - mgr.markFlushedAsCommitted(); - - assertTrue(mgr.listPending().isEmpty()); - assertTrue(mgr.listFlushed().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), mgr.listCommitted()); - } - - /* - * This section is to test for logic that is isolated to changes with respect to destination - * checkpointing where it captures flush and commit are bundled into a transaction so - * - * buffer -(flush buffer)-> staging area -(copy into {staging_file})-> destination raw table - */ - @Test - void testPendingAsCommitted() { - mgr.addState(STREAM1_MESSAGE1); - mgr.markPendingAsCommitted(); - - // verifies that we've skipped "Flushed" without needing to call `markPendingAsFlushed()` and - // `markFlushedAsCommitted` - assertTrue(mgr.listPending().isEmpty()); - assertEquals(new LinkedList<>(List.of(STREAM1_MESSAGE1)), mgr.listCommitted()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java deleted file mode 100644 index d76943f523223..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordWriter; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.List; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -public class InMemoryRecordBufferingStrategyTest { - - private static final JsonNode MESSAGE_DATA = Jsons.deserialize("{ \"field1\": 10000 }"); - // MESSAGE_DATA should be 64 bytes long, size the buffer such as it can contain at least 2 message - // instances - private static final int MAX_QUEUE_SIZE_IN_BYTES = 130; - - @SuppressWarnings("unchecked") - private final RecordWriter recordWriter = mock(RecordWriter.class); - - @Test - public void testBuffering() throws Exception { - final InMemoryRecordBufferingStrategy buffering = new InMemoryRecordBufferingStrategy(recordWriter, MAX_QUEUE_SIZE_IN_BYTES); - final AirbyteStreamNameNamespacePair stream1 = new AirbyteStreamNameNamespacePair("stream1", "namespace"); - final AirbyteStreamNameNamespacePair stream2 = new AirbyteStreamNameNamespacePair("stream2", null); - final AirbyteMessage message1 = generateMessage(stream1); - final AirbyteMessage message2 = generateMessage(stream2); - final AirbyteMessage message3 = generateMessage(stream2); - final AirbyteMessage message4 = generateMessage(stream2); - - assertFalse(buffering.addRecord(stream1, message1).isPresent()); - assertFalse(buffering.addRecord(stream2, message2).isPresent()); - // Buffer still has room - final Optional flushType = buffering.addRecord(stream2, message3); - // Keeps track of this #addRecord since we're expecting a buffer flush & that the flushType - // value will indicate that all buffers were flushed - assertTrue(flushType.isPresent()); - assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL); - // Buffer limit reach, flushing all messages so far before adding the new incoming one - verify(recordWriter, times(1)).accept(stream1, List.of(message1.getRecord())); - verify(recordWriter, times(1)).accept(stream2, List.of(message2.getRecord())); - - buffering.addRecord(stream2, message4); - - // force flush to terminate test - buffering.flushAllBuffers(); - verify(recordWriter, times(1)).accept(stream2, List.of(message3.getRecord(), message4.getRecord())); - } - - private static AirbyteMessage generateMessage(final AirbyteStreamNameNamespacePair stream) { - return new AirbyteMessage().withRecord(new AirbyteRecordMessage() - .withStream(stream.getName()) - .withNamespace(stream.getNamespace()) - .withData(MESSAGE_DATA)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java deleted file mode 100644 index f94f032a46f07..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.record_buffer; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class SerializedBufferingStrategyTest { - - private static final JsonNode MESSAGE_DATA = Jsons.deserialize("{ \"field1\": 10000 }"); - private static final String STREAM_1 = "stream1"; - private static final String STREAM_2 = "stream2"; - private static final String STREAM_3 = "stream3"; - private static final String STREAM_4 = "stream4"; - - // we set the limit to hold at most 4 messages of 10b total - private static final long MAX_TOTAL_BUFFER_SIZE_BYTES = 42L; - // we set the limit to hold at most 2 messages of 10b per stream - private static final long MAX_PER_STREAM_BUFFER_SIZE_BYTES = 21L; - - private final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - @SuppressWarnings("unchecked") - private final FlushBufferFunction perStreamFlushHook = mock(FlushBufferFunction.class); - - private final SerializableBuffer recordWriter1 = mock(SerializableBuffer.class); - private final SerializableBuffer recordWriter2 = mock(SerializableBuffer.class); - private final SerializableBuffer recordWriter3 = mock(SerializableBuffer.class); - private final SerializableBuffer recordWriter4 = mock(SerializableBuffer.class); - - @BeforeEach - public void setup() throws Exception { - setupMock(recordWriter1); - setupMock(recordWriter2); - setupMock(recordWriter3); - setupMock(recordWriter4); - } - - private void setupMock(final SerializableBuffer mockObject) throws Exception { - when(mockObject.accept(any())).thenReturn(10L); - when(mockObject.getByteCount()).thenReturn(10L); - when(mockObject.getMaxTotalBufferSizeInBytes()).thenReturn(MAX_TOTAL_BUFFER_SIZE_BYTES); - when(mockObject.getMaxPerStreamBufferSizeInBytes()).thenReturn(MAX_PER_STREAM_BUFFER_SIZE_BYTES); - when(mockObject.getMaxConcurrentStreamsInBuffer()).thenReturn(4); - } - - @Test - public void testPerStreamThresholdFlush() throws Exception { - final SerializedBufferingStrategy buffering = new SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook); - final AirbyteStreamNameNamespacePair stream1 = new AirbyteStreamNameNamespacePair(STREAM_1, "namespace"); - final AirbyteStreamNameNamespacePair stream2 = new AirbyteStreamNameNamespacePair(STREAM_2, null); - // To test per stream threshold, we are sending multiple test messages on a single stream - final AirbyteMessage message1 = generateMessage(stream1); - final AirbyteMessage message2 = generateMessage(stream2); - final AirbyteMessage message3 = generateMessage(stream2); - final AirbyteMessage message4 = generateMessage(stream2); - final AirbyteMessage message5 = generateMessage(stream2); - - when(recordWriter1.getByteCount()).thenReturn(10L); // one record in recordWriter1 - assertFalse(buffering.addRecord(stream1, message1).isPresent()); - when(recordWriter2.getByteCount()).thenReturn(10L); // one record in recordWriter2 - assertFalse(buffering.addRecord(stream2, message2).isPresent()); - - // Total and per stream Buffers still have room - verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); - - when(recordWriter2.getByteCount()).thenReturn(20L); // second record in recordWriter2 - assertFalse(buffering.addRecord(stream2, message3).isPresent()); - when(recordWriter2.getByteCount()).thenReturn(30L); // third record in recordWriter2 - - // Buffer reaches limit so a buffer flush occurs returning a buffer flush type of single stream - final Optional flushType = buffering.addRecord(stream2, message4); - assertTrue(flushType.isPresent()); - assertEquals(flushType.get(), BufferFlushType.FLUSH_SINGLE_STREAM); - - // The buffer limit is now reached for stream2, flushing that single stream only - verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); - - when(recordWriter2.getByteCount()).thenReturn(10L); // back to one record in recordWriter2 - assertFalse(buffering.addRecord(stream2, message5).isPresent()); - - // force flush to terminate test - buffering.flushAllBuffers(); - verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(2)).accept(stream2, recordWriter2); - } - - @Test - public void testTotalStreamThresholdFlush() throws Exception { - final SerializedBufferingStrategy buffering = new SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook); - final AirbyteStreamNameNamespacePair stream1 = new AirbyteStreamNameNamespacePair(STREAM_1, "namespace"); - final AirbyteStreamNameNamespacePair stream2 = new AirbyteStreamNameNamespacePair(STREAM_2, "namespace"); - final AirbyteStreamNameNamespacePair stream3 = new AirbyteStreamNameNamespacePair(STREAM_3, "namespace"); - // To test total stream threshold, we are sending test messages to multiple streams without reaching - // per stream limits - final AirbyteMessage message1 = generateMessage(stream1); - final AirbyteMessage message2 = generateMessage(stream2); - final AirbyteMessage message3 = generateMessage(stream3); - final AirbyteMessage message4 = generateMessage(stream1); - final AirbyteMessage message5 = generateMessage(stream2); - final AirbyteMessage message6 = generateMessage(stream3); - - assertFalse(buffering.addRecord(stream1, message1).isPresent()); - assertFalse(buffering.addRecord(stream2, message2).isPresent()); - // Total and per stream Buffers still have room - verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(0)).accept(stream3, recordWriter3); - - assertFalse(buffering.addRecord(stream3, message3).isPresent()); - when(recordWriter1.getByteCount()).thenReturn(20L); // second record in recordWriter1 - assertFalse(buffering.addRecord(stream1, message4).isPresent()); - when(recordWriter2.getByteCount()).thenReturn(20L); // second record in recordWriter2 - - // In response to checkpointing, will need to know what type of buffer flush occurred to mark - // AirbyteStateMessage as committed depending on DestDefaultStateLifecycleManager - final Optional flushType = buffering.addRecord(stream2, message5); - assertTrue(flushType.isPresent()); - assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL); - - // Buffer limit reached for total streams, flushing all streams - verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); - - assertFalse(buffering.addRecord(stream3, message6).isPresent()); - // force flush to terminate test - buffering.flushAllBuffers(); - verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(2)).accept(stream3, recordWriter3); - } - - @Test - public void testConcurrentStreamThresholdFlush() throws Exception { - final SerializedBufferingStrategy buffering = new SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook); - final AirbyteStreamNameNamespacePair stream1 = new AirbyteStreamNameNamespacePair(STREAM_1, "namespace1"); - final AirbyteStreamNameNamespacePair stream2 = new AirbyteStreamNameNamespacePair(STREAM_2, "namespace2"); - final AirbyteStreamNameNamespacePair stream3 = new AirbyteStreamNameNamespacePair(STREAM_3, null); - final AirbyteStreamNameNamespacePair stream4 = new AirbyteStreamNameNamespacePair(STREAM_4, null); - // To test concurrent stream threshold, we are sending test messages to multiple streams - final AirbyteMessage message1 = generateMessage(stream1); - final AirbyteMessage message2 = generateMessage(stream2); - final AirbyteMessage message3 = generateMessage(stream3); - final AirbyteMessage message4 = generateMessage(stream4); - final AirbyteMessage message5 = generateMessage(stream1); - - assertFalse(buffering.addRecord(stream1, message1).isPresent()); - assertFalse(buffering.addRecord(stream2, message2).isPresent()); - assertFalse(buffering.addRecord(stream3, message3).isPresent()); - // Total and per stream Buffers still have room - verify(perStreamFlushHook, times(0)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(0)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(0)).accept(stream3, recordWriter3); - - // Since the concurrent stream threshold has been exceeded, all buffer streams are flush - final Optional flushType = buffering.addRecord(stream4, message4); - assertTrue(flushType.isPresent()); - assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL); - - // Buffer limit reached for concurrent streams, flushing all streams - verify(perStreamFlushHook, times(1)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); - verify(perStreamFlushHook, times(1)).accept(stream4, recordWriter4); - - assertFalse(buffering.addRecord(stream1, message5).isPresent()); - // force flush to terminate test - buffering.flushAllBuffers(); - verify(perStreamFlushHook, times(2)).accept(stream1, recordWriter1); - verify(perStreamFlushHook, times(1)).accept(stream2, recordWriter2); - verify(perStreamFlushHook, times(1)).accept(stream3, recordWriter3); - verify(perStreamFlushHook, times(1)).accept(stream4, recordWriter4); - } - - @Test - public void testCreateBufferFailure() { - final SerializedBufferingStrategy buffering = new SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook); - final AirbyteStreamNameNamespacePair stream = new AirbyteStreamNameNamespacePair("unknown_stream", "namespace1"); - assertThrows(RuntimeException.class, () -> buffering.addRecord(stream, generateMessage(stream))); - } - - private static AirbyteMessage generateMessage(final AirbyteStreamNameNamespacePair stream) { - return new AirbyteMessage().withRecord(new AirbyteRecordMessage() - .withStream(stream.getName()) - .withNamespace(stream.getNamespace()) - .withData(MESSAGE_DATA)); - } - - private BufferCreateFunction onCreateBufferFunction() { - return (stream, catalog) -> switch (stream.getName()) { - case STREAM_1 -> recordWriter1; - case STREAM_2 -> recordWriter2; - case STREAM_3 -> recordWriter3; - case STREAM_4 -> recordWriter4; - default -> null; - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtilsTest.java deleted file mode 100644 index be46f68ee61bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AirbyteFileUtilsTest.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import org.junit.jupiter.api.Test; - -public class AirbyteFileUtilsTest { - - @Test - void testByteCountToDisplaySize() { - - assertEquals("500 bytes", AirbyteFileUtils.byteCountToDisplaySize(500L)); - assertEquals("1.95 KB", AirbyteFileUtils.byteCountToDisplaySize(2000L)); - assertEquals("2.93 MB", AirbyteFileUtils.byteCountToDisplaySize(3072000L)); - assertEquals("2.67 GB", AirbyteFileUtils.byteCountToDisplaySize(2872000000L)); - assertEquals("1.82 TB", AirbyteFileUtils.byteCountToDisplaySize(2000000000000L)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumerTest.java deleted file mode 100644 index 67bc7c7dc427c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/AsyncStreamConsumerTest.java +++ /dev/null @@ -1,412 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordSizeEstimator; -import io.airbyte.cdk.integrations.destination_async.buffers.BufferManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteRecordMessage; -import io.airbyte.cdk.integrations.destination_async.state.FlushFailure; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteLogMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.io.IOException; -import java.math.BigDecimal; -import java.time.Instant; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -class AsyncStreamConsumerTest { - - private static final int RECORD_SIZE_20_BYTES = 20; - - private static final String SCHEMA_NAME = "public"; - private static final String STREAM_NAME = "id_and_name"; - private static final String STREAM_NAME2 = STREAM_NAME + 2; - private static final StreamDescriptor STREAM1_DESC = new StreamDescriptor() - .withNamespace(SCHEMA_NAME) - .withName(STREAM_NAME); - - private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)), - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME2, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - - private static final JsonNode PAYLOAD = Jsons.jsonNode(Map.of( - "created_at", "2022-02-01T17:02:19+00:00", - "id", 1, - "make", "Mazda", - "nested_column", Map.of("array_column", List.of(1, 2, 3)))); - - private static final AirbyteMessage STATE_MESSAGE1 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM1_DESC).withStreamState(Jsons.jsonNode(1)))); - private static final AirbyteMessage STATE_MESSAGE2 = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM1_DESC).withStreamState(Jsons.jsonNode(2)))); - - private AsyncStreamConsumer consumer; - private OnStartFunction onStart; - private DestinationFlushFunction flushFunction; - private OnCloseFunction onClose; - private Consumer outputRecordCollector; - private FlushFailure flushFailure; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() { - onStart = mock(OnStartFunction.class); - onClose = mock(OnCloseFunction.class); - flushFunction = mock(DestinationFlushFunction.class); - outputRecordCollector = mock(Consumer.class); - flushFailure = mock(FlushFailure.class); - consumer = new AsyncStreamConsumer( - outputRecordCollector, - onStart, - onClose, - flushFunction, - CATALOG, - new BufferManager(), - flushFailure, - "default_ns"); - - when(flushFunction.getOptimalBatchSizeBytes()).thenReturn(10_000L); - } - - @Test - void test1StreamWith1State() throws Exception { - final List expectedRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecords); - consumer.accept(Jsons.serialize(STATE_MESSAGE1), RECORD_SIZE_20_BYTES); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - - final AirbyteMessage stateMessageWithDestinationStatsUpdated = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM1_DESC).withStreamState(Jsons.jsonNode(1))) - .withDestinationStats(new AirbyteStateStats().withRecordCount((double) expectedRecords.size()))); - - verify(outputRecordCollector).accept(stateMessageWithDestinationStatsUpdated); - } - - @Test - void test1StreamWith2State() throws Exception { - final List expectedRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, expectedRecords); - consumer.accept(Jsons.serialize(STATE_MESSAGE1), RECORD_SIZE_20_BYTES); - consumer.accept(Jsons.serialize(STATE_MESSAGE2), RECORD_SIZE_20_BYTES); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords); - - final AirbyteMessage stateMessageWithDestinationStatsUpdated = new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM1_DESC).withStreamState(Jsons.jsonNode(2))) - .withDestinationStats(new AirbyteStateStats().withRecordCount(0.0))); - - verify(outputRecordCollector, times(1)).accept(stateMessageWithDestinationStatsUpdated); - } - - @Test - void test1StreamWith0State() throws Exception { - final List allRecords = generateRecords(1_000); - - consumer.start(); - consumeRecords(consumer, allRecords); - consumer.close(); - - verifyStartAndClose(); - - verifyRecords(STREAM_NAME, SCHEMA_NAME, allRecords); - } - - @Test - void testShouldBlockWhenQueuesAreFull() throws Exception { - consumer.start(); - } - - /* - * Tests that the consumer will block when the buffer is full. Achieves this by setting optimal - * batch size to 0, so the flush worker never actually pulls anything from the queue. - */ - @Test - void testBackPressure() throws Exception { - flushFunction = mock(DestinationFlushFunction.class); - flushFailure = mock(FlushFailure.class); - consumer = new AsyncStreamConsumer( - m -> {}, - () -> {}, - (hasFailed, recordCounts) -> {}, - flushFunction, - CATALOG, - new BufferManager(1024 * 10), - flushFailure, - "default_ns"); - when(flushFunction.getOptimalBatchSizeBytes()).thenReturn(0L); - - final AtomicLong recordCount = new AtomicLong(); - - consumer.start(); - - final ExecutorService executor = Executors.newSingleThreadExecutor(); - while (true) { - final Future future = executor.submit(() -> { - try { - consumer.accept(Jsons.serialize(new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(Jsons.jsonNode(recordCount.getAndIncrement())))), - RECORD_SIZE_20_BYTES); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - - try { - future.get(1, TimeUnit.SECONDS); - } catch (final TimeoutException e) { - future.cancel(true); // Stop the operation running in thread - break; - } - } - executor.shutdownNow(); - - assertTrue(recordCount.get() < 1000, String.format("Record count was %s", recordCount.get())); - } - - @Test - void deserializeAirbyteMessageWithAirbyteRecord() { - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withData(PAYLOAD)); - final String serializedAirbyteMessage = Jsons.serialize(airbyteMessage); - final String airbyteRecordString = Jsons.serialize(PAYLOAD); - final PartialAirbyteMessage partial = AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage); - assertEquals(airbyteRecordString, partial.getSerialized()); - } - - @Test - void deserializeAirbyteMessageWithBigDecimalAirbyteRecord() { - final JsonNode payload = Jsons.jsonNode(Map.of( - "foo", new BigDecimal("1234567890.1234567890"))); - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withData(payload)); - final String serializedAirbyteMessage = Jsons.serialize(airbyteMessage); - final String airbyteRecordString = Jsons.serialize(payload); - final PartialAirbyteMessage partial = AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage); - assertEquals(airbyteRecordString, partial.getSerialized()); - } - - @Test - void deserializeAirbyteMessageWithEmptyAirbyteRecord() { - final Map emptyMap = Map.of(); - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withData(Jsons.jsonNode(emptyMap))); - final String serializedAirbyteMessage = Jsons.serialize(airbyteMessage); - final PartialAirbyteMessage partial = AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage); - assertEquals(emptyMap.toString(), partial.getSerialized()); - } - - @Test - void deserializeAirbyteMessageWithNoStateOrRecord() { - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.LOG) - .withLog(new AirbyteLogMessage()); - final String serializedAirbyteMessage = Jsons.serialize(airbyteMessage); - assertThrows(RuntimeException.class, () -> AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage)); - } - - @Test - void deserializeAirbyteMessageWithAirbyteState() { - final String serializedAirbyteMessage = Jsons.serialize(STATE_MESSAGE1); - final PartialAirbyteMessage partial = AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage); - assertEquals(serializedAirbyteMessage, partial.getSerialized()); - } - - @Test - void deserializeAirbyteMessageWithBadAirbyteState() { - final AirbyteMessage badState = new AirbyteMessage() - .withState(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(STREAM1_DESC).withStreamState(Jsons.jsonNode(1)))); - final String serializedAirbyteMessage = Jsons.serialize(badState); - assertThrows(RuntimeException.class, () -> AsyncStreamConsumer.deserializeAirbyteMessage(serializedAirbyteMessage)); - } - - @Nested - class ErrorHandling { - - @Test - void testErrorOnAccept() throws Exception { - when(flushFailure.isFailed()).thenReturn(false).thenReturn(true); - when(flushFailure.getException()).thenReturn(new IOException("test exception")); - - final var m = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(Jsons.deserialize(""))); - consumer.start(); - consumer.accept(Jsons.serialize(m), RECORD_SIZE_20_BYTES); - assertThrows(IOException.class, () -> consumer.accept(Jsons.serialize(m), RECORD_SIZE_20_BYTES)); - } - - @Test - void testErrorOnClose() throws Exception { - when(flushFailure.isFailed()).thenReturn(true); - when(flushFailure.getException()).thenReturn(new IOException("test exception")); - - consumer.start(); - assertThrows(IOException.class, () -> consumer.close()); - } - - } - - private static void consumeRecords(final AsyncStreamConsumer consumer, final Collection records) { - records.forEach(m -> { - try { - consumer.accept(Jsons.serialize(m), RECORD_SIZE_20_BYTES); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - } - - // NOTE: Generates records at chunks of 160 bytes - @SuppressWarnings("SameParameterValue") - private static List generateRecords(final long targetSizeInBytes) { - final List output = Lists.newArrayList(); - long bytesCounter = 0; - for (int i = 0;; i++) { - final JsonNode payload = - Jsons.jsonNode(ImmutableMap.of("id", RandomStringUtils.randomAlphabetic(7), "name", "human " + String.format("%8d", i))); - final long sizeInBytes = RecordSizeEstimator.getStringByteSize(payload); - bytesCounter += sizeInBytes; - final AirbyteMessage airbyteMessage = new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withData(payload)); - if (bytesCounter > targetSizeInBytes) { - break; - } else { - output.add(airbyteMessage); - } - } - return output; - } - - private void verifyStartAndClose() throws Exception { - verify(onStart).call(); - verify(onClose).accept(any(), any()); - } - - @SuppressWarnings({"unchecked", "SameParameterValue"}) - private void verifyRecords(final String streamName, final String namespace, final List allRecords) - throws Exception { - final ArgumentCaptor> argumentCaptor = ArgumentCaptor.forClass(Stream.class); - verify(flushFunction, atLeast(1)).flush( - eq(new StreamDescriptor().withNamespace(namespace).withName(streamName)), - argumentCaptor.capture()); - - // captures the output of all the workers, since our records could come out in any of them. - final List actualRecords = argumentCaptor - .getAllValues() - .stream() - // flatten those results into a single list for the simplicity of comparison - .flatMap(s -> s) - .toList(); - - final var expRecords = allRecords.stream().map(m -> new PartialAirbyteMessage() - .withType(Type.RECORD) - .withRecord(new PartialAirbyteRecordMessage() - .withStream(m.getRecord().getStream()) - .withNamespace(m.getRecord().getNamespace()) - .withData(m.getRecord().getData())) - .withSerialized(Jsons.serialize(m.getRecord().getData()))).collect(Collectors.toList()); - assertEquals(expRecords, actualRecords); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlushTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlushTest.java deleted file mode 100644 index 02bcf78f7c762..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/DetectStreamToFlushTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DetectStreamToFlushTest { - - public static final Instant NOW = Instant.now(); - public static final Duration FIVE_MIN = Duration.ofMinutes(5); - private static final long SIZE_10MB = 10 * 1024 * 1024; - private static final long SIZE_200MB = 200 * 1024 * 1024; - - private static final StreamDescriptor DESC1 = new StreamDescriptor().withName("test1"); - - private static DestinationFlushFunction flusher; - - @BeforeEach - void setup() { - flusher = mock(DestinationFlushFunction.class); - when(flusher.getOptimalBatchSizeBytes()).thenReturn(SIZE_200MB); - } - - @Test - void testGetNextSkipsEmptyStreams() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(0L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)); - } - - @Test - void testGetNextPicksUpOnSizeTrigger() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - // if above threshold, triggers - assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)); - // if below threshold, no trigger - assertEquals(Optional.empty(), detect.getNextStreamToFlush(1)); - } - - @Test - void testGetNextAccountsForAlreadyRunningWorkers() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(List.of(Optional.of(SIZE_10MB))); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)); - } - - @Test - void testGetNextPicksUpOnTimeTrigger() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)); - final Clock mockedNowProvider = mock(Clock.class); - - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(List.of(Optional.of(SIZE_10MB))); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher, - mockedNowProvider); - - // initialize flush time - when(mockedNowProvider.millis()) - .thenReturn(NOW.toEpochMilli()); - - assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)); - - // check 5 minutes later - when(mockedNowProvider.millis()) - .thenReturn(NOW.plus(FIVE_MIN).toEpochMilli()); - - assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)); - - // just flush once - assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)); - - // check another 5 minutes later - when(mockedNowProvider.millis()) - .thenReturn(NOW.plus(FIVE_MIN).plus(FIVE_MIN).toEpochMilli()); - assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushThresholdTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushThresholdTest.java deleted file mode 100644 index e8f56d90c7778..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushThresholdTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import java.util.concurrent.atomic.AtomicBoolean; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class FlushThresholdTest { - - private static final long SIZE_10MB = 10 * 1024 * 1024; - - private DestinationFlushFunction flusher; - - @BeforeEach - void setup() { - flusher = mock(DestinationFlushFunction.class); - when(flusher.getQueueFlushThresholdBytes()).thenReturn(SIZE_10MB); - } - - @Test - void testBaseThreshold() { - final AtomicBoolean isClosing = new AtomicBoolean(false); - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, isClosing, flusher); - assertEquals(SIZE_10MB, detect.computeQueueThreshold()); - } - - @Test - void testClosingThreshold() { - final AtomicBoolean isClosing = new AtomicBoolean(true); - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, isClosing, flusher); - assertEquals(0, detect.computeQueueThreshold()); - } - - @Test - void testEagerFlushThresholdBelowThreshold() { - final AtomicBoolean isClosing = new AtomicBoolean(false); - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getTotalGlobalQueueSizeBytes()).thenReturn(8L); - when(bufferDequeue.getMaxQueueSizeBytes()).thenReturn(10L); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, isClosing, flusher); - assertEquals(SIZE_10MB, detect.computeQueueThreshold()); - } - - @Test - void testEagerFlushThresholdAboveThreshold() { - final AtomicBoolean isClosing = new AtomicBoolean(false); - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getTotalGlobalQueueSizeBytes()).thenReturn(9L); - when(bufferDequeue.getMaxQueueSizeBytes()).thenReturn(10L); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, isClosing, flusher); - assertEquals(0, detect.computeQueueThreshold()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushWorkersTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushWorkersTest.java deleted file mode 100644 index ff45391c97736..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/FlushWorkersTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.cdk.integrations.destination_async.buffers.MemoryAwareMessageBatch; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.state.FlushFailure; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Stream; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class FlushWorkersTest { - - @Test - void testErrorHandling() throws Exception { - final AtomicBoolean hasThrownError = new AtomicBoolean(false); - final var desc = new StreamDescriptor().withName("test"); - final var dequeue = mock(BufferDequeue.class); - when(dequeue.getBufferedStreams()).thenReturn(Set.of(desc)); - when(dequeue.take(desc, 1000)).thenReturn(new MemoryAwareMessageBatch(List.of(), 10, null, null)); - when(dequeue.getQueueSizeBytes(desc)).thenReturn(Optional.of(10L)); - when(dequeue.getQueueSizeInRecords(desc)).thenAnswer(ignored -> { - if (hasThrownError.get()) { - return Optional.of(0L); - } else { - return Optional.of(1L); - } - }); - - final var flushFailure = new FlushFailure(); - final var workers = new FlushWorkers(dequeue, new ErrorOnFlush(hasThrownError), m -> {}, flushFailure, mock(GlobalAsyncStateManager.class)); - workers.start(); - workers.close(); - - Assertions.assertTrue(flushFailure.isFailed()); - Assertions.assertEquals(IOException.class, flushFailure.getException().getClass()); - } - - private static class ErrorOnFlush implements DestinationFlushFunction { - - private final AtomicBoolean hasThrownError; - - public ErrorOnFlush(final AtomicBoolean hasThrownError) { - this.hasThrownError = hasThrownError; - } - - @Override - public void flush(final StreamDescriptor desc, final Stream stream) throws Exception { - hasThrownError.set(true); - throw new IOException("Error on flush"); - } - - @Override - public long getOptimalBatchSizeBytes() { - return 1000; - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManagerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManagerTest.java deleted file mode 100644 index 9f79ed0554f69..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/GlobalMemoryManagerTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import org.junit.jupiter.api.Test; - -public class GlobalMemoryManagerTest { - - private static final long BYTES_MB = 1024 * 1024; - - @Test - void test() { - final GlobalMemoryManager mgr = new GlobalMemoryManager(15 * BYTES_MB); - - assertEquals(10 * BYTES_MB, mgr.requestMemory()); - assertEquals(5 * BYTES_MB, mgr.requestMemory()); - assertEquals(0, mgr.requestMemory()); - - mgr.free(10 * BYTES_MB); - assertEquals(10 * BYTES_MB, mgr.requestMemory()); - mgr.free(16 * BYTES_MB); - assertEquals(10 * BYTES_MB, mgr.requestMemory()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/PartialAirbyteMessageTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/PartialAirbyteMessageTest.java deleted file mode 100644 index 2f65a926f5449..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/PartialAirbyteMessageTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.time.Instant; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class PartialAirbyteMessageTest { - - @Test - void testDeserializeRecord() { - final long emittedAt = Instant.now().toEpochMilli(); - final var serializedRec = Jsons.serialize(new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream("users") - .withNamespace("public") - .withEmittedAt(emittedAt) - .withData(Jsons.jsonNode("data")))); - - final var rec = Jsons.tryDeserialize(serializedRec, PartialAirbyteMessage.class).get(); - Assertions.assertEquals(AirbyteMessage.Type.RECORD, rec.getType()); - Assertions.assertEquals("users", rec.getRecord().getStream()); - Assertions.assertEquals("public", rec.getRecord().getNamespace()); - Assertions.assertEquals("\"data\"", rec.getRecord().getData().toString()); - Assertions.assertEquals(emittedAt, rec.getRecord().getEmittedAt()); - } - - @Test - void testDeserializeState() { - final var serializedState = Jsons.serialize(new io.airbyte.protocol.models.AirbyteMessage() - .withType(io.airbyte.protocol.models.AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withStream( - new AirbyteStreamState().withStreamDescriptor( - new StreamDescriptor().withName("user").withNamespace("public")) - .withStreamState(Jsons.jsonNode("data"))) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM))); - - final var rec = Jsons.tryDeserialize(serializedState, PartialAirbyteMessage.class).get(); - Assertions.assertEquals(AirbyteMessage.Type.STATE, rec.getType()); - - final var streamDesc = rec.getState().getStream().getStreamDescriptor(); - Assertions.assertEquals("user", streamDesc.getName()); - Assertions.assertEquals("public", streamDesc.getNamespace()); - Assertions.assertEquals(io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType.STREAM, rec.getState().getType()); - } - - @Test - void testGarbage() { - final var badSerialization = "messed up data"; - - final var rec = Jsons.tryDeserialize(badSerialization, PartialAirbyteMessage.class); - Assertions.assertTrue(rec.isEmpty()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkersTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkersTest.java deleted file mode 100644 index 8f2f28d3dbc30..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningFlushWorkersTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class RunningFlushWorkersTest { - - private static final long SIZE_10MB = 10 * 1024 * 1024; - - private static final UUID FLUSH_WORKER_ID1 = UUID.randomUUID(); - private static final UUID FLUSH_WORKER_ID2 = UUID.randomUUID(); - private static final StreamDescriptor STREAM1 = new StreamDescriptor().withNamespace("namespace1").withName("stream1"); - private static final StreamDescriptor STREAM2 = new StreamDescriptor().withNamespace("namespace2").withName("stream2"); - - private RunningFlushWorkers runningFlushWorkers; - - @BeforeEach - void setup() { - runningFlushWorkers = new RunningFlushWorkers(); - } - - @Test - void testTrackFlushWorker() { - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(0); - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(1); - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2); - runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(2); - } - - @Test - void testCompleteFlushWorker() { - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1); - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2); - runningFlushWorkers.completeFlushWorker(STREAM1, FLUSH_WORKER_ID1); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(1); - runningFlushWorkers.completeFlushWorker(STREAM1, FLUSH_WORKER_ID2); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(0); - } - - @Test - void testCompleteFlushWorkerWithoutTrackThrowsException() { - assertThatThrownBy(() -> runningFlushWorkers.completeFlushWorker(STREAM1, FLUSH_WORKER_ID1)) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining("Cannot complete flush worker for stream that has not started."); - } - - @Test - void testMultipleStreams() { - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1); - runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size()).isEqualTo(1); - assertThat(runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2).size()).isEqualTo(1); - } - - @Test - void testGetSizesOfRunningWorkerBatches() { - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1); - runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2); - runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1); - assertEquals(List.of(Optional.empty(), Optional.empty()), - runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1)); - assertEquals(List.of(Optional.empty()), runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2)); - assertThrows(IllegalStateException.class, () -> runningFlushWorkers.registerBatchSize(STREAM2, FLUSH_WORKER_ID2, SIZE_10MB)); - runningFlushWorkers.registerBatchSize(STREAM1, FLUSH_WORKER_ID1, SIZE_10MB); - runningFlushWorkers.registerBatchSize(STREAM1, FLUSH_WORKER_ID2, SIZE_10MB); - runningFlushWorkers.registerBatchSize(STREAM2, FLUSH_WORKER_ID1, SIZE_10MB); - assertEquals(List.of(Optional.of(SIZE_10MB), Optional.of(SIZE_10MB)), runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1)); - assertEquals(List.of(Optional.of(SIZE_10MB)), runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningSizeEstimateTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningSizeEstimateTest.java deleted file mode 100644 index 57b360068a7a0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/RunningSizeEstimateTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class RunningSizeEstimateTest { - - private static final long SIZE_10MB = 10 * 1024 * 1024; - private static final long SIZE_20MB = 20 * 1024 * 1024; - private static final long SIZE_200MB = 200 * 1024 * 1024; - private static final StreamDescriptor DESC1 = new StreamDescriptor().withName("test1"); - - private static DestinationFlushFunction flusher; - - @BeforeEach - void setup() { - flusher = mock(DestinationFlushFunction.class); - when(flusher.getOptimalBatchSizeBytes()).thenReturn(SIZE_200MB); - } - - @Test - void testEstimateZeroWorkers() { - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(Collections.emptyList()); - final DetectStreamToFlush detect = new DetectStreamToFlush(null, runningFlushWorkers, null, flusher); - assertEquals(0, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)); - } - - @Test - void testEstimateWorkerWithBatch() { - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(List.of(Optional.of(SIZE_20MB))); - final DetectStreamToFlush detect = new DetectStreamToFlush(null, runningFlushWorkers, null, flusher); - assertEquals(SIZE_20MB, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)); - } - - @Test - void testEstimateWorkerWithoutBatchAndQueueLessThanOptimalSize() { - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(List.of(Optional.empty())); - final DetectStreamToFlush detect = new DetectStreamToFlush(null, runningFlushWorkers, null, flusher); - assertEquals(SIZE_10MB, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)); - } - - @Test - void testEstimateWorkerWithoutBatchAndQueueGreaterThanOptimalSize() { - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())).thenReturn(List.of(Optional.empty())); - final DetectStreamToFlush detect = new DetectStreamToFlush(null, runningFlushWorkers, null, flusher); - assertEquals(SIZE_200MB, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_200MB + 1)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/SizeTriggerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/SizeTriggerTest.java deleted file mode 100644 index 7f9a9b2a33970..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/SizeTriggerTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class SizeTriggerTest { - - public static final Instant NOW = Instant.now(); - public static final Instant FIVE_MIN_AGO = NOW.minusSeconds(60 * 5); - private static final long SIZE_10MB = 10 * 1024 * 1024; - private static final long SIZE_200MB = 200 * 1024 * 1024; - - private static final StreamDescriptor DESC1 = new StreamDescriptor().withName("test1"); - - private static DestinationFlushFunction flusher; - - @BeforeEach - void setup() { - flusher = mock(DestinationFlushFunction.class); - when(flusher.getOptimalBatchSizeBytes()).thenReturn(SIZE_200MB); - } - - @Test - void testSizeTriggerOnEmptyQueue() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(0L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - assertEquals(false, detect.isSizeTriggered(DESC1, SIZE_10MB).getLeft()); - } - - @Test - void testSizeTriggerRespectsThreshold() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - // if above threshold, triggers - assertEquals(true, detect.isSizeTriggered(DESC1, 0).getLeft()); - // if below threshold, no trigger - assertEquals(false, detect.isSizeTriggered(DESC1, SIZE_10MB).getLeft()); - } - - @Test - void testSizeTriggerRespectsRunningWorkersEstimate() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getBufferedStreams()).thenReturn(Set.of(DESC1)); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)); - final RunningFlushWorkers runningFlushWorkers = mock(RunningFlushWorkers.class); - when(runningFlushWorkers.getSizesOfRunningWorkerBatches(any())) - .thenReturn(Collections.emptyList()) - .thenReturn(List.of(Optional.of(SIZE_10MB))); - final DetectStreamToFlush detect = - new DetectStreamToFlush(bufferDequeue, runningFlushWorkers, new AtomicBoolean(false), flusher); - assertEquals(true, detect.isSizeTriggered(DESC1, 0).getLeft()); - assertEquals(false, detect.isSizeTriggered(DESC1, 0).getLeft()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/StreamPriorityTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/StreamPriorityTest.java deleted file mode 100644 index add92636f0472..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/StreamPriorityTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.HashSet; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import org.junit.jupiter.api.Test; - -public class StreamPriorityTest { - - public static final Instant NOW = Instant.now(); - public static final Instant FIVE_MIN_AGO = NOW.minusSeconds(60 * 5); - private static final StreamDescriptor DESC1 = new StreamDescriptor().withName("test1"); - private static final StreamDescriptor DESC2 = new StreamDescriptor().withName("test2"); - private static final Set DESCS = Set.of(DESC1, DESC2); - - @Test - void testOrderByPrioritySize() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)).thenReturn(Optional.of(0L)); - when(bufferDequeue.getQueueSizeBytes(DESC2)).thenReturn(Optional.of(0L)).thenReturn(Optional.of(1L)); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, new AtomicBoolean(false), null); - - assertEquals(List.of(DESC1, DESC2), detect.orderStreamsByPriority(DESCS)); - assertEquals(List.of(DESC2, DESC1), detect.orderStreamsByPriority(DESCS)); - } - - @Test - void testOrderByPrioritySecondarySortByTime() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getQueueSizeBytes(any())).thenReturn(Optional.of(0L)); - when(bufferDequeue.getTimeOfLastRecord(DESC1)).thenReturn(Optional.of(FIVE_MIN_AGO)).thenReturn(Optional.of(NOW)); - when(bufferDequeue.getTimeOfLastRecord(DESC2)).thenReturn(Optional.of(NOW)).thenReturn(Optional.of(FIVE_MIN_AGO)); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, new AtomicBoolean(false), null); - assertEquals(List.of(DESC1, DESC2), detect.orderStreamsByPriority(DESCS)); - assertEquals(List.of(DESC2, DESC1), detect.orderStreamsByPriority(DESCS)); - } - - @Test - void testOrderByPriorityTertiarySortByName() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - when(bufferDequeue.getQueueSizeBytes(any())).thenReturn(Optional.of(0L)); - when(bufferDequeue.getTimeOfLastRecord(any())).thenReturn(Optional.of(NOW)); - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, new AtomicBoolean(false), null); - final List descs = List.of(Jsons.clone(DESC1), Jsons.clone(DESC2)); - assertEquals(List.of(descs.get(0), descs.get(1)), detect.orderStreamsByPriority(new HashSet<>(descs))); - descs.get(0).setName("test3"); - assertEquals(List.of(descs.get(1), descs.get(0)), detect.orderStreamsByPriority(new HashSet<>(descs))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/TimeTriggerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/TimeTriggerTest.java deleted file mode 100644 index 95545118e713b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/TimeTriggerTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination_async.buffers.BufferDequeue; -import java.time.Clock; -import org.junit.jupiter.api.Test; - -public class TimeTriggerTest { - - private static final long NOW_MS = System.currentTimeMillis(); - private static final long ONE_SEC = 1000L; - private static final long FIVE_MIN = 5 * 60 * 1000; - - @Test - void testTimeTrigger() { - final BufferDequeue bufferDequeue = mock(BufferDequeue.class); - - final Clock mockedNowProvider = mock(Clock.class); - when(mockedNowProvider.millis()) - .thenReturn(NOW_MS); - - final DetectStreamToFlush detect = new DetectStreamToFlush(bufferDequeue, null, null, null, mockedNowProvider); - assertEquals(false, detect.isTimeTriggered(NOW_MS).getLeft()); - assertEquals(false, detect.isTimeTriggered(NOW_MS - ONE_SEC).getLeft()); - assertEquals(true, detect.isTimeTriggered(NOW_MS - FIVE_MIN).getLeft()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeueTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeueTest.java deleted file mode 100644 index 669579c7af968..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferDequeueTest.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import static io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager.BLOCK_SIZE_BYTES; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteRecordMessage; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -public class BufferDequeueTest { - - private static final int RECORD_SIZE_20_BYTES = 20; - private static final String DEFAULT_NAMESPACE = "foo_namespace"; - private static final String STREAM_NAME = "stream1"; - private static final StreamDescriptor STREAM_DESC = new StreamDescriptor().withName(STREAM_NAME); - private static final PartialAirbyteMessage RECORD_MSG_20_BYTES = new PartialAirbyteMessage() - .withType(Type.RECORD) - .withRecord(new PartialAirbyteRecordMessage() - .withStream(STREAM_NAME)); - - @Nested - class Take { - - @Test - void testTakeShouldBestEffortRead() { - final BufferManager bufferManager = new BufferManager(); - final BufferEnqueue enqueue = bufferManager.getBufferEnqueue(); - final BufferDequeue dequeue = bufferManager.getBufferDequeue(); - - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - // total size of records is 80, so we expect 50 to get us 2 records (prefer to under-pull records - // than over-pull). - try (final MemoryAwareMessageBatch take = dequeue.take(STREAM_DESC, 50)) { - assertEquals(2, take.getData().size()); - // verify it only took the records from the queue that it actually returned. - assertEquals(2, dequeue.getQueueSizeInRecords(STREAM_DESC).orElseThrow()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - @Test - void testTakeShouldReturnAllIfPossible() { - final BufferManager bufferManager = new BufferManager(); - final BufferEnqueue enqueue = bufferManager.getBufferEnqueue(); - final BufferDequeue dequeue = bufferManager.getBufferDequeue(); - - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - try (final MemoryAwareMessageBatch take = dequeue.take(STREAM_DESC, 60)) { - assertEquals(3, take.getData().size()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - @Test - void testTakeFewerRecordsThanSizeLimitShouldNotError() { - final BufferManager bufferManager = new BufferManager(); - final BufferEnqueue enqueue = bufferManager.getBufferEnqueue(); - final BufferDequeue dequeue = bufferManager.getBufferDequeue(); - - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - try (final MemoryAwareMessageBatch take = dequeue.take(STREAM_DESC, Long.MAX_VALUE)) { - assertEquals(2, take.getData().size()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - } - - @Test - void testMetadataOperationsCorrect() { - final BufferManager bufferManager = new BufferManager(); - final BufferEnqueue enqueue = bufferManager.getBufferEnqueue(); - final BufferDequeue dequeue = bufferManager.getBufferDequeue(); - - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - final var secondStream = new StreamDescriptor().withName("stream_2"); - final PartialAirbyteMessage recordFromSecondStream = Jsons.clone(RECORD_MSG_20_BYTES); - recordFromSecondStream.getRecord().withStream(secondStream.getName()); - enqueue.addRecord(recordFromSecondStream, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - assertEquals(60, dequeue.getTotalGlobalQueueSizeBytes()); - - assertEquals(2, dequeue.getQueueSizeInRecords(STREAM_DESC).get()); - assertEquals(1, dequeue.getQueueSizeInRecords(secondStream).get()); - - assertEquals(40, dequeue.getQueueSizeBytes(STREAM_DESC).get()); - assertEquals(20, dequeue.getQueueSizeBytes(secondStream).get()); - - // Buffer of 3 sec to deal with test execution variance. - final var lastThreeSec = Instant.now().minus(3, ChronoUnit.SECONDS); - assertTrue(lastThreeSec.isBefore(dequeue.getTimeOfLastRecord(STREAM_DESC).get())); - assertTrue(lastThreeSec.isBefore(dequeue.getTimeOfLastRecord(secondStream).get())); - } - - @Test - void testMetadataOperationsError() { - final BufferManager bufferManager = new BufferManager(); - final BufferDequeue dequeue = bufferManager.getBufferDequeue(); - - final var ghostStream = new StreamDescriptor().withName("ghost stream"); - - assertEquals(0, dequeue.getTotalGlobalQueueSizeBytes()); - - assertTrue(dequeue.getQueueSizeInRecords(ghostStream).isEmpty()); - - assertTrue(dequeue.getQueueSizeBytes(ghostStream).isEmpty()); - - assertTrue(dequeue.getTimeOfLastRecord(ghostStream).isEmpty()); - } - - @Test - void cleansUpMemoryForEmptyQueues() throws Exception { - final var bufferManager = new BufferManager(); - final var enqueue = bufferManager.getBufferEnqueue(); - final var dequeue = bufferManager.getBufferDequeue(); - final var memoryManager = bufferManager.getMemoryManager(); - - // we initialize with a block for state - assertEquals(BLOCK_SIZE_BYTES, memoryManager.getCurrentMemoryBytes()); - - // allocate a block for new stream - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - assertEquals(2 * BLOCK_SIZE_BYTES, memoryManager.getCurrentMemoryBytes()); - - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - - // no re-allocates as we haven't breached block size - assertEquals(2 * BLOCK_SIZE_BYTES, memoryManager.getCurrentMemoryBytes()); - - final var totalBatchSize = RECORD_SIZE_20_BYTES * 4; - - // read the whole queue - try (final var batch = dequeue.take(STREAM_DESC, totalBatchSize)) { - // slop allocation gets cleaned up - assertEquals(BLOCK_SIZE_BYTES + totalBatchSize, memoryManager.getCurrentMemoryBytes()); - batch.close(); - // back to initial state after flush clears the batch - assertEquals(BLOCK_SIZE_BYTES, memoryManager.getCurrentMemoryBytes()); - assertEquals(0, bufferManager.getBuffers().get(STREAM_DESC).getMaxMemoryUsage()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueueTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueueTest.java deleted file mode 100644 index a555c403e5c07..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/BufferEnqueueTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; - -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteRecordMessage; -import io.airbyte.cdk.integrations.destination_async.state.GlobalAsyncStateManager; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.concurrent.ConcurrentHashMap; -import org.junit.jupiter.api.Test; - -public class BufferEnqueueTest { - - private static final int RECORD_SIZE_20_BYTES = 20; - private static final String DEFAULT_NAMESPACE = "foo_namespace"; - - @Test - void testAddRecordShouldAdd() { - final var twoMB = 2 * 1024 * 1024; - final var streamToBuffer = new ConcurrentHashMap(); - final var enqueue = new BufferEnqueue(new GlobalMemoryManager(twoMB), streamToBuffer, mock(GlobalAsyncStateManager.class)); - - final var streamName = "stream"; - final var stream = new StreamDescriptor().withName(streamName); - final var record = new PartialAirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new PartialAirbyteRecordMessage() - .withStream(streamName)); - - enqueue.addRecord(record, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - assertEquals(1, streamToBuffer.get(stream).size()); - assertEquals(20L, streamToBuffer.get(stream).getCurrentMemoryUsage()); - - } - - @Test - public void testAddRecordShouldExpand() { - final var oneKb = 1024; - final var streamToBuffer = new ConcurrentHashMap(); - final var enqueue = - new BufferEnqueue(new GlobalMemoryManager(oneKb), streamToBuffer, mock(GlobalAsyncStateManager.class)); - - final var streamName = "stream"; - final var stream = new StreamDescriptor().withName(streamName); - final var record = new PartialAirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new PartialAirbyteRecordMessage() - .withStream(streamName)); - - enqueue.addRecord(record, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - enqueue.addRecord(record, RECORD_SIZE_20_BYTES, DEFAULT_NAMESPACE); - assertEquals(2, streamToBuffer.get(stream).size()); - assertEquals(40, streamToBuffer.get(stream).getCurrentMemoryUsage()); - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueueTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueueTest.java deleted file mode 100644 index bcd57d56e4219..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/MemoryBoundedLinkedBlockingQueueTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.concurrent.TimeUnit; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class MemoryBoundedLinkedBlockingQueueTest { - - @Test - void offerAndTakeShouldReturn() throws InterruptedException { - final MemoryBoundedLinkedBlockingQueue queue = new MemoryBoundedLinkedBlockingQueue<>(1024); - - queue.offer("abc", 6); - - final var item = queue.take(); - - assertEquals("abc", item.item()); - } - - @Test - void testBlocksOnFullMemory() throws InterruptedException { - final MemoryBoundedLinkedBlockingQueue queue = new MemoryBoundedLinkedBlockingQueue<>(10); - assertTrue(queue.offer("abc", 6)); - assertFalse(queue.offer("abc", 6)); - - assertNotNull(queue.poll(1, TimeUnit.NANOSECONDS)); - assertNull(queue.poll(1, TimeUnit.NANOSECONDS)); - } - - @ParameterizedTest - @ValueSource(longs = {1024, 100000, 600}) - void getMaxMemoryUsage(final long size) { - final MemoryBoundedLinkedBlockingQueue queue = new MemoryBoundedLinkedBlockingQueue<>(size); - - assertEquals(0, queue.getCurrentMemoryUsage()); - assertEquals(size, queue.getMaxMemoryUsage()); - - queue.addMaxMemory(-100); - - assertEquals(size - 100, queue.getMaxMemoryUsage()); - - queue.addMaxMemory(123); - - assertEquals(size - 100 + 123, queue.getMaxMemoryUsage()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueueTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueueTest.java deleted file mode 100644 index 39a32d0b5de97..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/buffers/StreamAwareQueueTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.buffers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class StreamAwareQueueTest { - - @Test - void test() throws InterruptedException { - final StreamAwareQueue queue = new StreamAwareQueue(1024); - - assertEquals(0, queue.getCurrentMemoryUsage()); - assertNull(queue.getTimeOfLastMessage().orElse(null)); - - queue.offer(new PartialAirbyteMessage(), 6, 1); - queue.offer(new PartialAirbyteMessage(), 6, 2); - queue.offer(new PartialAirbyteMessage(), 6, 3); - - assertEquals(18, queue.getCurrentMemoryUsage()); - assertNotNull(queue.getTimeOfLastMessage().orElse(null)); - - queue.take(); - queue.take(); - queue.take(); - - assertEquals(0, queue.getCurrentMemoryUsage()); - // This should be null because the queue is empty - assertTrue(queue.getTimeOfLastMessage().isEmpty(), "Expected empty optional; got " + queue.getTimeOfLastMessage()); - } - - @ParameterizedTest - @ValueSource(longs = {1024, 100000, 600}) - void getMaxMemoryUsage(final long size) { - final StreamAwareQueue queue = new StreamAwareQueue(size); - - assertEquals(0, queue.getCurrentMemoryUsage()); - assertEquals(size, queue.getMaxMemoryUsage()); - - queue.addMaxMemory(-100); - - assertEquals(size - 100, queue.getMaxMemoryUsage()); - - queue.addMaxMemory(123); - - assertEquals(size - 100 + 123, queue.getMaxMemoryUsage()); - } - - @Test - void isEmpty() { - final StreamAwareQueue queue = new StreamAwareQueue(1024); - - assertTrue(queue.isEmpty()); - - queue.offer(new PartialAirbyteMessage(), 10, 1); - - assertFalse(queue.isEmpty()); - - queue.offer(new PartialAirbyteMessage(), 10, 1); - queue.offer(new PartialAirbyteMessage(), 10, 1); - queue.offer(new PartialAirbyteMessage(), 10, 1); - - assertFalse(queue.isEmpty()); - - queue.poll(); - queue.poll(); - queue.poll(); - queue.poll(); - - assertTrue(queue.isEmpty()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManagerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManagerTest.java deleted file mode 100644 index b77c4419cd1cb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/destination_async/state/GlobalAsyncStateManagerTest.java +++ /dev/null @@ -1,508 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.state; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.destination_async.GlobalMemoryManager; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteStateMessage; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteStreamState; -import io.airbyte.protocol.models.Jsons; -import io.airbyte.protocol.models.v0.*; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import java.util.*; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -class GlobalAsyncStateManagerTest { - - private static final long TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES = 100 * 1024 * 1024; // 10MB - private static final String DEFAULT_NAMESPACE = "foo_namespace"; - private static final long STATE_MSG_SIZE = 1000; - - private static final String NAMESPACE = "namespace"; - private static final String STREAM_NAME = "id_and_name"; - private static final String STREAM_NAME2 = STREAM_NAME + 2; - private static final String STREAM_NAME3 = STREAM_NAME + 3; - private static final StreamDescriptor STREAM1_DESC = new StreamDescriptor() - .withName(STREAM_NAME).withNamespace(NAMESPACE); - private static final StreamDescriptor STREAM2_DESC = new StreamDescriptor() - .withName(STREAM_NAME2).withNamespace(NAMESPACE); - private static final StreamDescriptor STREAM3_DESC = new StreamDescriptor() - .withName(STREAM_NAME3).withNamespace(NAMESPACE); - - private static final PartialAirbyteMessage GLOBAL_STATE_MESSAGE1 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL)) - .withSerialized(serializedState(STREAM1_DESC, AirbyteStateType.GLOBAL, Jsons.jsonNode(ImmutableMap.of("cursor", 1)))); - private static final PartialAirbyteMessage GLOBAL_STATE_MESSAGE2 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL)) - .withSerialized(serializedState(STREAM2_DESC, AirbyteStateType.GLOBAL, Jsons.jsonNode(ImmutableMap.of("cursor", 2)))); - - private static final PartialAirbyteMessage GLOBAL_STATE_MESSAGE3 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL)) - .withSerialized(serializedState(STREAM3_DESC, AirbyteStateType.GLOBAL, Jsons.jsonNode(ImmutableMap.of("cursor", 2)))); - private static final PartialAirbyteMessage STREAM1_STATE_MESSAGE1 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC))) - .withSerialized(serializedState(STREAM1_DESC, AirbyteStateType.STREAM, Jsons.jsonNode(ImmutableMap.of("cursor", 1)))); - private static final PartialAirbyteMessage STREAM1_STATE_MESSAGE2 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC))) - .withSerialized(serializedState(STREAM1_DESC, AirbyteStateType.STREAM, Jsons.jsonNode(ImmutableMap.of("cursor", 2)))); - - private static final PartialAirbyteMessage STREAM1_STATE_MESSAGE3 = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC))) - .withSerialized(serializedState(STREAM1_DESC, AirbyteStateType.STREAM, Jsons.jsonNode(ImmutableMap.of("cursor", 3)))); - private static final PartialAirbyteMessage STREAM2_STATE_MESSAGE = new PartialAirbyteMessage() - .withType(Type.STATE) - .withState(new PartialAirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new PartialAirbyteStreamState().withStreamDescriptor(STREAM2_DESC))) - .withSerialized(serializedState(STREAM2_DESC, AirbyteStateType.STREAM, Jsons.jsonNode(ImmutableMap.of("cursor", 4)))); - - public static String serializedState(final StreamDescriptor streamDescriptor, final AirbyteStateType type, final JsonNode state) { - switch (type) { - case GLOBAL -> { - return Jsons.serialize(new AirbyteMessage().withType(Type.STATE).withState( - new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - .withGlobal(new AirbyteGlobalState() - .withSharedState(state) - .withStreamStates(Collections.singletonList(new AirbyteStreamState() - .withStreamState(Jsons.emptyObject()) - .withStreamDescriptor(streamDescriptor)))))); - - } - case STREAM -> { - return Jsons.serialize(new AirbyteMessage().withType(Type.STATE).withState( - new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamState(state) - .withStreamDescriptor(streamDescriptor)))); - } - default -> throw new RuntimeException("LEGACY STATE NOT SUPPORTED"); - } - } - - @Test - void testBasic() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var firstStateId = stateManager.getStateIdAndIncrementCounter(STREAM1_DESC); - final var secondStateId = stateManager.getStateIdAndIncrementCounter(STREAM1_DESC); - assertEquals(firstStateId, secondStateId); - - stateManager.decrement(firstStateId, 2); - stateManager.flushStates(emittedStatesFromDestination::add); - // because no state message has been tracked, there is nothing to flush yet. - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals(0, stateWithStats.size()); - - stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.flushStates(emittedStatesFromDestination::add); - - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(2.0); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats2.values().stream().toList()); - } - - public AirbyteMessage attachDestinationStateStats(final AirbyteMessage stateMessage, final AirbyteStateStats airbyteStateStats) { - stateMessage.getState().withDestinationStats(airbyteStateStats); - return stateMessage; - } - - @Nested - class GlobalState { - - @Test - void testEmptyQueuesGlobalState() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - // GLOBAL - stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(0.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - // - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - - assertThrows(IllegalArgumentException.class, () -> stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE)); - } - - @Test - void testConversion() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var preConvertId0 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - final var preConvertId1 = simulateIncomingRecords(STREAM2_DESC, 10, stateManager); - final var preConvertId2 = simulateIncomingRecords(STREAM3_DESC, 10, stateManager); - assertEquals(3, Set.of(preConvertId0, preConvertId1, preConvertId2).size()); - - stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - - // Since this is actually a global state, we can only flush after all streams are done. - stateManager.decrement(preConvertId0, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - assertEquals(0, emittedStatesFromDestination.size()); - stateManager.decrement(preConvertId1, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - assertEquals(0, emittedStatesFromDestination.size()); - stateManager.decrement(preConvertId2, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(30.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - - } - - @Test - void testCorrectFlushingOneStream() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var preConvertId0 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(preConvertId0, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(10.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - - emittedStatesFromDestination.clear(); - - final var afterConvertId1 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(afterConvertId1, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE2.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats2.values().stream().toList()); - } - - @Test - void testZeroRecordFlushing() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var preConvertId0 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(preConvertId0, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(10.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - emittedStatesFromDestination.clear(); - - stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats2 = new AirbyteStateStats().withRecordCount(0.0); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE2.getSerialized(), AirbyteMessage.class), expectedDestinationStats2)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats2), stateWithStats2.values().stream().toList()); - emittedStatesFromDestination.clear(); - - final var afterConvertId2 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(GLOBAL_STATE_MESSAGE3, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(afterConvertId2, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final Map stateWithStats3 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE3.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats3.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats3.values().stream().toList()); - } - - @Test - void testCorrectFlushingManyStreams() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var preConvertId0 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - final var preConvertId1 = simulateIncomingRecords(STREAM2_DESC, 10, stateManager); - assertNotEquals(preConvertId0, preConvertId1); - stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(preConvertId0, 10); - stateManager.decrement(preConvertId1, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(20.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - emittedStatesFromDestination.clear(); - - final var afterConvertId0 = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - final var afterConvertId1 = simulateIncomingRecords(STREAM2_DESC, 10, stateManager); - assertEquals(afterConvertId0, afterConvertId1); - stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(afterConvertId0, 20); - stateManager.flushStates(emittedStatesFromDestination::add); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(GLOBAL_STATE_MESSAGE2.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats2.values().stream().toList()); - } - - } - - @Nested - class PerStreamState { - - @Test - void testEmptyQueues() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - // GLOBAL - stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(0.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - - assertThrows(IllegalArgumentException.class, () -> stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE)); - } - - @Test - void testCorrectFlushingOneStream() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - var stateId = simulateIncomingRecords(STREAM1_DESC, 3, stateManager); - stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stateId, 3); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(3.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - - emittedStatesFromDestination.clear(); - - stateId = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(STREAM1_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stateId, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats2 = new AirbyteStateStats().withRecordCount(10.0); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals(List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE2.getSerialized(), AirbyteMessage.class), expectedDestinationStats2)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats2), stateWithStats2.values().stream().toList()); - } - - @Test - void testZeroRecordFlushing() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - var stateId = simulateIncomingRecords(STREAM1_DESC, 3, stateManager); - stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stateId, 3); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(3.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - emittedStatesFromDestination.clear(); - - stateManager.trackState(STREAM1_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.flushStates(emittedStatesFromDestination::add); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - final AirbyteStateStats expectedDestinationStats2 = new AirbyteStateStats().withRecordCount(0.0); - assertEquals(List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE2.getSerialized(), AirbyteMessage.class), expectedDestinationStats2)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats2), stateWithStats2.values().stream().toList()); - emittedStatesFromDestination.clear(); - - stateId = simulateIncomingRecords(STREAM1_DESC, 10, stateManager); - stateManager.trackState(STREAM1_STATE_MESSAGE3, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stateId, 10); - stateManager.flushStates(emittedStatesFromDestination::add); - final Map stateWithStats3 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - final AirbyteStateStats expectedDestinationStats3 = new AirbyteStateStats().withRecordCount(10.0); - assertEquals(List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE3.getSerialized(), AirbyteMessage.class), expectedDestinationStats3)), - stateWithStats3.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats3), stateWithStats3.values().stream().toList()); - } - - @Test - void testCorrectFlushingManyStream() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - - final var stream1StateId = simulateIncomingRecords(STREAM1_DESC, 3, stateManager); - final var stream2StateId = simulateIncomingRecords(STREAM2_DESC, 7, stateManager); - - stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stream1StateId, 3); - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats = new AirbyteStateStats().withRecordCount(3.0); - final Map stateWithStats = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM1_STATE_MESSAGE1.getSerialized(), AirbyteMessage.class), expectedDestinationStats)), - stateWithStats.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats), stateWithStats.values().stream().toList()); - emittedStatesFromDestination.clear(); - - stateManager.decrement(stream2StateId, 4); - stateManager.flushStates(emittedStatesFromDestination::add); - assertEquals(List.of(), emittedStatesFromDestination); - stateManager.trackState(STREAM2_STATE_MESSAGE, STATE_MSG_SIZE, DEFAULT_NAMESPACE); - stateManager.decrement(stream2StateId, 3); - // only flush state if counter is 0. - stateManager.flushStates(emittedStatesFromDestination::add); - final AirbyteStateStats expectedDestinationStats2 = new AirbyteStateStats().withRecordCount(7.0); - final Map stateWithStats2 = - emittedStatesFromDestination.stream() - .collect(Collectors.toMap(c -> c, c -> c.getState().getDestinationStats())); - assertEquals( - List.of( - attachDestinationStateStats(Jsons.deserialize(STREAM2_STATE_MESSAGE.getSerialized(), AirbyteMessage.class), expectedDestinationStats2)), - stateWithStats2.keySet().stream().toList()); - assertEquals(List.of(expectedDestinationStats2), stateWithStats2.values().stream().toList()); - } - - } - - private static long simulateIncomingRecords(final StreamDescriptor desc, final long count, final GlobalAsyncStateManager manager) { - var stateId = 0L; - for (int i = 0; i < count; i++) { - stateId = manager.getStateIdAndIncrementCounter(desc); - } - return stateId; - } - - @Test - void flushingRecordsShouldNotReduceStatsCounterForGlobalState() { - final List emittedStatesFromDestination = new ArrayList<>(); - final GlobalAsyncStateManager stateManager = - new GlobalAsyncStateManager(new GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)); - final long stateId = simulateIncomingRecords(STREAM1_DESC, 6, stateManager); - stateManager.decrement(stateId, 4); - stateManager.trackState(GLOBAL_STATE_MESSAGE1, 1, STREAM1_DESC.getNamespace()); - stateManager.flushStates(emittedStatesFromDestination::add); - assertEquals(0, emittedStatesFromDestination.size()); - stateManager.decrement(stateId, 2); - stateManager.flushStates(emittedStatesFromDestination::add); - assertEquals(1, emittedStatesFromDestination.size()); - assertEquals(6.0, emittedStatesFromDestination.getFirst().getState().getDestinationStats().getRecordCount()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.java deleted file mode 100644 index c36461af01508..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util; - -import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE; -import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.RECOVERY_CONNECTION_ERROR_MESSAGE; -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.exceptions.ConnectionErrorException; -import java.sql.SQLException; -import java.sql.SQLSyntaxErrorException; -import org.junit.jupiter.api.Test; - -class ConnectorExceptionUtilTest { - - public static final String CONFIG_EXCEPTION_MESSAGE = "test message"; - public static final String RECOVERY_EXCEPTION_MESSAGE = "FATAL: terminating connection due to conflict with recovery"; - public static final String COMMON_EXCEPTION_MESSAGE = "something happens with connection"; - public static final String CONNECTION_ERROR_MESSAGE_TEMPLATE = "State code: %s; Error code: %s; Message: %s"; - public static final String UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE = "Unknown column 'table.column' in 'field list'"; - - @Test() - void isConfigErrorForConfigException() { - ConfigErrorException configErrorException = new ConfigErrorException(CONFIG_EXCEPTION_MESSAGE); - assertTrue(ConnectorExceptionUtil.isConfigError(configErrorException)); - - } - - @Test - void isConfigErrorForConnectionException() { - ConnectionErrorException connectionErrorException = new ConnectionErrorException(CONFIG_EXCEPTION_MESSAGE); - assertTrue(ConnectorExceptionUtil.isConfigError(connectionErrorException)); - } - - @Test - void isConfigErrorForRecoveryPSQLException() { - SQLException recoveryPSQLException = new SQLException(RECOVERY_EXCEPTION_MESSAGE); - assertTrue(ConnectorExceptionUtil.isConfigError(recoveryPSQLException)); - } - - @Test - void isConfigErrorForUnknownColumnSQLSyntaxErrorException() { - SQLSyntaxErrorException unknownColumnSQLSyntaxErrorException = new SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE); - assertTrue(ConnectorExceptionUtil.isConfigError(unknownColumnSQLSyntaxErrorException)); - } - - @Test - void isConfigErrorForCommonSQLException() { - SQLException recoveryPSQLException = new SQLException(COMMON_EXCEPTION_MESSAGE); - assertFalse(ConnectorExceptionUtil.isConfigError(recoveryPSQLException)); - } - - @Test - void isConfigErrorForCommonException() { - assertFalse(ConnectorExceptionUtil.isConfigError(new Exception())); - } - - @Test - void getDisplayMessageForConfigException() { - ConfigErrorException configErrorException = new ConfigErrorException(CONFIG_EXCEPTION_MESSAGE); - String actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(configErrorException); - assertEquals(CONFIG_EXCEPTION_MESSAGE, actualDisplayMessage); - } - - @Test - void getDisplayMessageForConnectionError() { - String testCode = "test code"; - int errorCode = -1; - ConnectionErrorException connectionErrorException = new ConnectionErrorException(testCode, errorCode, CONFIG_EXCEPTION_MESSAGE, new Exception()); - String actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(connectionErrorException); - assertEquals(String.format(CONNECTION_ERROR_MESSAGE_TEMPLATE, testCode, errorCode, CONFIG_EXCEPTION_MESSAGE), actualDisplayMessage); - } - - @Test - void getDisplayMessageForRecoveryException() { - SQLException recoveryException = new SQLException(RECOVERY_EXCEPTION_MESSAGE); - String actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(recoveryException); - assertEquals(RECOVERY_CONNECTION_ERROR_MESSAGE, actualDisplayMessage); - } - - @Test - void getDisplayMessageForUnknownSQLErrorException() { - SQLSyntaxErrorException unknownColumnSQLSyntaxErrorException = new SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE); - String actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(unknownColumnSQLSyntaxErrorException); - assertEquals(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE, actualDisplayMessage); - } - - @Test - void getDisplayMessageForCommonException() { - Exception exception = new SQLException(COMMON_EXCEPTION_MESSAGE); - String actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(exception); - assertEquals(String.format(COMMON_EXCEPTION_MESSAGE_TEMPLATE, COMMON_EXCEPTION_MESSAGE), actualDisplayMessage); - } - - @Test - void getRootConfigErrorFromConfigException() { - ConfigErrorException configErrorException = new ConfigErrorException(CONFIG_EXCEPTION_MESSAGE); - Exception exception = new Exception(COMMON_EXCEPTION_MESSAGE, configErrorException); - - Throwable actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception); - assertEquals(configErrorException, actualRootConfigError); - } - - @Test - void getRootConfigErrorFromRecoverySQLException() { - SQLException recoveryException = new SQLException(RECOVERY_EXCEPTION_MESSAGE); - RuntimeException runtimeException = new RuntimeException(COMMON_EXCEPTION_MESSAGE, recoveryException); - Exception exception = new Exception(runtimeException); - - Throwable actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception); - assertEquals(recoveryException, actualRootConfigError); - } - - @Test - void getRootConfigErrorFromUnknownSQLErrorException() { - SQLException unknownSQLErrorException = new SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE); - RuntimeException runtimeException = new RuntimeException(COMMON_EXCEPTION_MESSAGE, unknownSQLErrorException); - Exception exception = new Exception(runtimeException); - - Throwable actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception); - assertEquals(unknownSQLErrorException, actualRootConfigError); - } - - @Test - void getRootConfigErrorFromNonConfigException() { - SQLException configErrorException = new SQLException(CONFIG_EXCEPTION_MESSAGE); - Exception exception = new Exception(COMMON_EXCEPTION_MESSAGE, configErrorException); - - Throwable actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception); - assertEquals(exception, actualRootConfigError); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.java deleted file mode 100644 index 085833dbf3392..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util.concurrent; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.node.IntNode; -import com.google.common.collect.Lists; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Consumer; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link ConcurrentStreamConsumer} class. - */ -class ConcurrentStreamConsumerTest { - - private static final String NAME = "name"; - private static final String NAMESPACE = "namespace"; - - @Test - void testAcceptMessage() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer> streamConsumer = mock(Consumer.class); - - final ConcurrentStreamConsumer concurrentStreamConsumer = new ConcurrentStreamConsumer(streamConsumer, 1); - - assertDoesNotThrow(() -> concurrentStreamConsumer.accept(List.of(stream))); - - verify(streamConsumer, times(1)).accept(stream); - } - - @Test - void testAcceptMessageWithException() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer> streamConsumer = mock(Consumer.class); - final Exception e = new NullPointerException("test"); - - doThrow(e).when(streamConsumer).accept(any()); - - final ConcurrentStreamConsumer concurrentStreamConsumer = new ConcurrentStreamConsumer(streamConsumer, 1); - - assertDoesNotThrow(() -> concurrentStreamConsumer.accept(List.of(stream))); - - verify(streamConsumer, times(1)).accept(stream); - assertTrue(concurrentStreamConsumer.getException().isPresent()); - assertEquals(e, concurrentStreamConsumer.getException().get()); - assertEquals(1, concurrentStreamConsumer.getExceptions().size()); - assertTrue(concurrentStreamConsumer.getExceptions().contains(e)); - } - - @Test - void testAcceptMessageWithMultipleExceptions() { - final AutoCloseableIterator stream1 = mock(AutoCloseableIterator.class); - final AutoCloseableIterator stream2 = mock(AutoCloseableIterator.class); - final AutoCloseableIterator stream3 = mock(AutoCloseableIterator.class); - final Consumer> streamConsumer = mock(Consumer.class); - final Exception e1 = new NullPointerException("test1"); - final Exception e2 = new NullPointerException("test2"); - final Exception e3 = new NullPointerException("test3"); - - doThrow(e1).when(streamConsumer).accept(stream1); - doThrow(e2).when(streamConsumer).accept(stream2); - doThrow(e3).when(streamConsumer).accept(stream3); - - final ConcurrentStreamConsumer concurrentStreamConsumer = new ConcurrentStreamConsumer(streamConsumer, 1); - - assertDoesNotThrow(() -> concurrentStreamConsumer.accept(List.of(stream1, stream2, stream3))); - - verify(streamConsumer, times(3)).accept(any(AutoCloseableIterator.class)); - assertTrue(concurrentStreamConsumer.getException().isPresent()); - assertEquals(e1, concurrentStreamConsumer.getException().get()); - assertEquals(3, concurrentStreamConsumer.getExceptions().size()); - assertTrue(concurrentStreamConsumer.getExceptions().contains(e1)); - assertTrue(concurrentStreamConsumer.getExceptions().contains(e2)); - assertTrue(concurrentStreamConsumer.getExceptions().contains(e3)); - } - - @Test - void testMoreStreamsThanAvailableThreads() { - final List baseData = List.of(2, 4, 6, 8, 10, 12, 14, 16, 18, 20); - final List> streams = new ArrayList<>(); - for (int i = 0; i < 20; i++) { - final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair = - new AirbyteStreamNameNamespacePair(String.format("%s_%d", NAME, i), NAMESPACE); - final List messages = new ArrayList<>(); - for (int d : baseData) { - final AirbyteMessage airbyteMessage = mock(AirbyteMessage.class); - final AirbyteRecordMessage recordMessage = mock(AirbyteRecordMessage.class); - when(recordMessage.getData()).thenReturn(new IntNode(d * i)); - when(airbyteMessage.getRecord()).thenReturn(recordMessage); - messages.add(airbyteMessage); - } - streams.add(AutoCloseableIterators.fromIterator(messages.iterator(), airbyteStreamNameNamespacePair)); - } - final Consumer> streamConsumer = mock(Consumer.class); - - final ConcurrentStreamConsumer concurrentStreamConsumer = new ConcurrentStreamConsumer(streamConsumer, streams.size()); - final Integer partitionSize = concurrentStreamConsumer.getParallelism(); - final List>> partitions = Lists.partition(streams.stream().toList(), - partitionSize); - - for (final List> partition : partitions) { - assertDoesNotThrow(() -> concurrentStreamConsumer.accept(partition)); - } - - verify(streamConsumer, times(streams.size())).accept(any(AutoCloseableIterator.class)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/IncrementalUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/IncrementalUtilsTest.kt new file mode 100644 index 0000000000000..338208227688a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/IncrementalUtilsTest.kt @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db + +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.Lists +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class IncrementalUtilsTest { + @Test + fun testGetCursorField() { + val stream = Jsons.clone(STREAM) + stream.cursorField = Lists.newArrayList(UUID_FIELD_NAME) + Assertions.assertEquals(UUID_FIELD_NAME, IncrementalUtils.getCursorField(stream)) + } + + @Test + fun testGetCursorFieldNoCursorFieldSet() { + Assertions.assertThrows(IllegalStateException::class.java) { + Assertions.assertEquals(UUID_FIELD_NAME, IncrementalUtils.getCursorField(STREAM)) + } + } + + @Test + fun testGetCursorFieldCompositCursor() { + val stream = Jsons.clone(STREAM) + stream.cursorField = Lists.newArrayList(UUID_FIELD_NAME, "something_else") + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.getCursorField(stream) + } + } + + @Test + fun testGetCursorType() { + Assertions.assertEquals( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING, + IncrementalUtils.getCursorType(STREAM, UUID_FIELD_NAME) + ) + } + + @Test + fun testGetCursorType_V1() { + Assertions.assertEquals( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING_V1, + IncrementalUtils.getCursorType(STREAM_V1, UUID_FIELD_NAME) + ) + } + + @Test + fun testGetCursorTypeNoProperties() { + val stream = Jsons.clone(STREAM) + stream.stream.jsonSchema = Jsons.jsonNode(emptyMap()) + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.getCursorType(stream, UUID_FIELD_NAME) + } + } + + @Test + fun testGetCursorTypeNoCursor() { + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.getCursorType(STREAM, "does not exist") + } + } + + @Test + fun testGetCursorTypeCursorHasNoType() { + val stream = Jsons.clone(STREAM) + (stream.stream.jsonSchema["properties"][UUID_FIELD_NAME] as ObjectNode).remove("type") + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.getCursorType(stream, UUID_FIELD_NAME) + } + } + + @Test + fun testCompareCursors() { + Assertions.assertTrue( + IncrementalUtils.compareCursors( + ABC, + "def", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING + ) < 0 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + ABC, + "def", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING_V1 + ) < 0 + ) + Assertions.assertEquals( + 0, + IncrementalUtils.compareCursors( + ABC, + ABC, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING + ) + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + "1", + "2", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NUMBER + ) < 0 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + "1", + "2", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.INTEGER_V1 + ) < 0 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + "5000000000", + "5000000001", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NUMBER + ) < 0 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + "false", + "true", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.BOOLEAN + ) < 0 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + null, + "def", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING + ) < 1 + ) + Assertions.assertTrue( + IncrementalUtils.compareCursors( + ABC, + null, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING + ) > 0 + ) + Assertions.assertEquals( + 0, + IncrementalUtils.compareCursors( + null, + null, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING + ) + ) + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.compareCursors( + "a", + "a", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.ARRAY + ) + } + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.compareCursors( + "a", + "a", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.OBJECT + ) + } + Assertions.assertThrows(IllegalStateException::class.java) { + IncrementalUtils.compareCursors( + "a", + "a", + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NULL + ) + } + } + + companion object { + private const val STREAM_NAME = "shoes" + private const val UUID_FIELD_NAME = "ascending_inventory_uuid" + private val STREAM: ConfiguredAirbyteStream = + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME, + null, + Field.of("ascending_inventory_uuid", JsonSchemaType.STRING) + ) + + private val STREAM_V1: ConfiguredAirbyteStream = + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME, + null, + Field.of("ascending_inventory_uuid", JsonSchemaType.STRING_V1) + ) + private const val ABC = "abc" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.kt new file mode 100644 index 0000000000000..277ea848a6f46 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/check/impl/CommonDatabaseCheckTest.kt @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.check.impl + +import io.airbyte.cdk.db.factory.DSLContextFactory +import io.airbyte.cdk.db.factory.DataSourceFactory +import javax.sql.DataSource +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.BeforeEach +import org.testcontainers.containers.PostgreSQLContainer + +/** Common test setup for database availability check tests. */ +internal class CommonDatabaseCheckTest { + protected var container: PostgreSQLContainer<*>? = null + + protected var dataSource: DataSource? = null + + protected var dslContext: DSLContext? = null + + @BeforeEach + fun setup() { + container = PostgreSQLContainer("postgres:13-alpine") + container!!.start() + + dataSource = + DataSourceFactory.create( + container!!.username, + container!!.password, + container!!.driverClassName, + container!!.jdbcUrl + ) + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES) + } + + @AfterEach + @Throws(Exception::class) + fun cleanup() { + DataSourceFactory.close(dataSource) + container!!.stop() + } + + companion object { + protected const val TIMEOUT_MS: Long = 500L + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/CommonFactoryTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/CommonFactoryTest.kt new file mode 100644 index 0000000000000..de8866e8ff998 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/CommonFactoryTest.kt @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.testcontainers.containers.PostgreSQLContainer + +/** Common test suite for the classes found in the `io.airbyte.cdk.db.factory` package. */ +internal open class CommonFactoryTest { + companion object { + private const val DATABASE_NAME = "airbyte_test_database" + + @JvmStatic + protected var container: PostgreSQLContainer<*> = + PostgreSQLContainer("postgres:13-alpine") + + @JvmStatic + @BeforeAll + fun dbSetup(): Unit { + container.withDatabaseName(DATABASE_NAME).withUsername("docker").withPassword("docker") + container!!.start() + } + + @JvmStatic + @AfterAll + fun dbDown(): Unit { + container!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DSLContextFactoryTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DSLContextFactoryTest.kt new file mode 100644 index 0000000000000..0eb418038851c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DSLContextFactoryTest.kt @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import io.airbyte.cdk.integrations.JdbcConnector +import java.util.Map +import org.jooq.SQLDialect +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +/** Test suite for the [DSLContextFactory] class. */ +internal class DSLContextFactoryTest : CommonFactoryTest() { + @Test + fun testCreatingADslContext() { + val dataSource = + DataSourceFactory.create( + CommonFactoryTest.Companion.container!!.getUsername(), + CommonFactoryTest.Companion.container!!.getPassword(), + CommonFactoryTest.Companion.container!!.getDriverClassName(), + CommonFactoryTest.Companion.container!!.getJdbcUrl() + ) + val dialect = SQLDialect.POSTGRES + val dslContext = DSLContextFactory.create(dataSource, dialect) + Assertions.assertNotNull(dslContext) + Assertions.assertEquals(dialect, dslContext.configuration().dialect()) + } + + @Test + fun testCreatingADslContextWithIndividualConfiguration() { + val dialect = SQLDialect.POSTGRES + val dslContext = + DSLContextFactory.create( + CommonFactoryTest.Companion.container!!.getUsername(), + CommonFactoryTest.Companion.container!!.getPassword(), + CommonFactoryTest.Companion.container!!.getDriverClassName(), + CommonFactoryTest.Companion.container!!.getJdbcUrl(), + dialect + ) + Assertions.assertNotNull(dslContext) + Assertions.assertEquals(dialect, dslContext.configuration().dialect()) + } + + @Test + fun testCreatingADslContextWithIndividualConfigurationAndConnectionProperties() { + val connectionProperties = Map.of("foo", "bar") + val dialect = SQLDialect.POSTGRES + val dslContext = + DSLContextFactory.create( + CommonFactoryTest.Companion.container!!.getUsername(), + CommonFactoryTest.Companion.container!!.getPassword(), + CommonFactoryTest.Companion.container!!.getDriverClassName(), + CommonFactoryTest.Companion.container!!.getJdbcUrl(), + dialect, + connectionProperties, + JdbcConnector.CONNECT_TIMEOUT_DEFAULT + ) + Assertions.assertNotNull(dslContext) + Assertions.assertEquals(dialect, dslContext.configuration().dialect()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DataSourceFactoryTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DataSourceFactoryTest.kt new file mode 100644 index 0000000000000..2839249c5cbf7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/factory/DataSourceFactoryTest.kt @@ -0,0 +1,258 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.factory + +import com.zaxxer.hikari.HikariDataSource +import io.airbyte.cdk.integrations.JdbcConnector +import java.util.Map +import javax.sql.DataSource +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.testcontainers.containers.MySQLContainer + +/** Test suite for the [DataSourceFactory] class. */ +internal class DataSourceFactoryTest : CommonFactoryTest() { + @Test + fun testCreatingDataSourceWithConnectionTimeoutSetAboveDefault() { + val connectionProperties = Map.of(CONNECT_TIMEOUT, "61") + val dataSource = + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcUrl, + connectionProperties, + JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 61000, + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + + @Test + fun testCreatingPostgresDataSourceWithConnectionTimeoutSetBelowDefault() { + val connectionProperties = Map.of(CONNECT_TIMEOUT, "30") + val dataSource = + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcUrl, + connectionProperties, + JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 30000, + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + + @Test + fun testCreatingMySQLDataSourceWithConnectionTimeoutSetBelowDefault() { + MySQLContainer("mysql:8.0").use { mySQLContainer -> + mySQLContainer.start() + val connectionProperties = Map.of(CONNECT_TIMEOUT, "5000") + val dataSource = + DataSourceFactory.create( + mySQLContainer.getUsername(), + mySQLContainer.getPassword(), + mySQLContainer.getDriverClassName(), + mySQLContainer.getJdbcUrl(), + connectionProperties, + JdbcConnector.getConnectionTimeout( + connectionProperties, + mySQLContainer.getDriverClassName() + ) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 5000, + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + } + + @Test + fun testCreatingDataSourceWithConnectionTimeoutSetWithZero() { + val connectionProperties = Map.of(CONNECT_TIMEOUT, "0") + val dataSource = + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcUrl, + connectionProperties, + JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + Int.MAX_VALUE.toLong(), + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + + @Test + fun testCreatingPostgresDataSourceWithConnectionTimeoutNotSet() { + val connectionProperties = Map.of() + val dataSource = + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcUrl, + connectionProperties, + JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10000, + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + + @Test + fun testCreatingMySQLDataSourceWithConnectionTimeoutNotSet() { + MySQLContainer("mysql:8.0").use { mySQLContainer -> + mySQLContainer.start() + val connectionProperties = Map.of() + val dataSource = + DataSourceFactory.create( + mySQLContainer.getUsername(), + mySQLContainer.getPassword(), + mySQLContainer.getDriverClassName(), + mySQLContainer.getJdbcUrl(), + connectionProperties, + JdbcConnector.getConnectionTimeout( + connectionProperties, + mySQLContainer.getDriverClassName() + ) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 60000, + (dataSource as HikariDataSource).hikariConfigMXBean.connectionTimeout + ) + } + } + + @Test + fun testCreatingADataSourceWithJdbcUrl() { + val dataSource = DataSourceFactory.create(username, password, driverClassName, jdbcUrl) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource).hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testCreatingADataSourceWithJdbcUrlAndConnectionProperties() { + val connectionProperties = Map.of("foo", "bar") + + val dataSource = + DataSourceFactory.create( + username, + password, + driverClassName, + jdbcUrl, + connectionProperties, + JdbcConnector.getConnectionTimeout(connectionProperties, driverClassName) + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource).hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testCreatingADataSourceWithHostAndPort() { + val dataSource = + DataSourceFactory.create(username, password, host, port, database, driverClassName) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource).hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testCreatingADataSourceWithHostPortAndConnectionProperties() { + val connectionProperties = Map.of("foo", "bar") + + val dataSource = + DataSourceFactory.create( + username, + password, + host, + port, + database, + driverClassName, + connectionProperties + ) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource).hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testCreatingAnInvalidDataSourceWithHostAndPort() { + val driverClassName = "Unknown" + + Assertions.assertThrows(RuntimeException::class.java) { + DataSourceFactory.create(username, password, host, port, database, driverClassName) + } + } + + @Test + fun testCreatingAPostgresqlDataSource() { + val dataSource = DataSourceFactory.createPostgres(username, password, host, port, database) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource).hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testClosingADataSource() { + val dataSource1 = Mockito.mock(HikariDataSource::class.java) + Assertions.assertDoesNotThrow { DataSourceFactory.close(dataSource1) } + Mockito.verify(dataSource1, Mockito.times(1)).close() + + val dataSource2 = Mockito.mock(DataSource::class.java) + Assertions.assertDoesNotThrow { DataSourceFactory.close(dataSource2) } + + Assertions.assertDoesNotThrow { DataSourceFactory.close(null) } + } + + companion object { + private const val CONNECT_TIMEOUT = "connectTimeout" + + var database: String = container.databaseName + var driverClassName: String = container.driverClassName + var host: String = container.host + var jdbcUrl: String = container.getJdbcUrl() + var password: String = container.password + var port: Int = container.firstMappedPort + var username: String = container.username + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.kt new file mode 100644 index 0000000000000..30c8f1c9ad431 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestDefaultJdbcDatabase.kt @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.cdk.db.factory.DataSourceFactory +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.testutils.PostgreSQLContainerHelper +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.sql.Connection +import java.sql.ResultSet +import java.sql.SQLException +import javax.sql.DataSource +import org.junit.jupiter.api.* +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.utility.MountableFile + +internal class TestDefaultJdbcDatabase { + private val sourceOperations: JdbcSourceOperations = JdbcUtils.defaultSourceOperations + private lateinit var dataSource: DataSource + private lateinit var database: JdbcDatabase + + @BeforeEach + @Throws(Exception::class) + fun setup() { + val dbName = Strings.addRandomSuffix("db", "_", 10) + + val config = getConfig(PSQL_DB, dbName) + val initScriptName = "init_$dbName.sql" + val tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE $dbName;") + PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB) + + dataSource = getDataSourceFromConfig(config) + database = DefaultJdbcDatabase(dataSource) + database.execute( + CheckedConsumer { connection: Connection -> + connection + .createStatement() + .execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));") + connection + .createStatement() + .execute( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');" + ) + } + ) + } + + @AfterEach + @Throws(Exception::class) + fun close() { + DataSourceFactory.close(dataSource) + } + + @Test + @Throws(SQLException::class) + fun testBufferedResultQuery() { + val actual = + database!!.bufferedResultSetQuery( + { connection: Connection -> + connection.createStatement().executeQuery("SELECT * FROM id_and_name;") + }, + { queryContext: ResultSet -> sourceOperations.rowToJson(queryContext) } + ) + + Assertions.assertEquals(RECORDS_AS_JSON, actual) + } + + @Test + @Throws(SQLException::class) + fun testResultSetQuery() { + database!! + .unsafeResultSetQuery( + { connection: Connection -> + connection.createStatement().executeQuery("SELECT * FROM id_and_name;") + }, + { queryContext: ResultSet -> sourceOperations.rowToJson(queryContext) } + ) + .use { actual -> Assertions.assertEquals(RECORDS_AS_JSON, actual.toList()) } + } + + @Test + @Throws(SQLException::class) + fun testQuery() { + val actual = + database!!.queryJsons( + { connection: Connection -> + connection.prepareStatement("SELECT * FROM id_and_name;") + }, + { queryContext: ResultSet -> sourceOperations.rowToJson(queryContext) } + ) + Assertions.assertEquals(RECORDS_AS_JSON, actual) + } + + private fun getDataSourceFromConfig(config: JsonNode): DataSource { + return DataSourceFactory.create( + config[JdbcUtils.USERNAME_KEY].asText(), + config[JdbcUtils.PASSWORD_KEY].asText(), + DatabaseDriver.POSTGRESQL.driverClassName, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + } + + private fun getConfig(psqlDb: PostgreSQLContainer<*>?, dbName: String): JsonNode { + return Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, psqlDb!!.host) + .put(JdbcUtils.PORT_KEY, psqlDb.firstMappedPort) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.USERNAME_KEY, psqlDb.username) + .put(JdbcUtils.PASSWORD_KEY, psqlDb.password) + .build() + ) + } + + companion object { + private val RECORDS_AS_JSON: List = + Lists.newArrayList( + Jsons.jsonNode(ImmutableMap.of("id", 1, "name", "picard")), + Jsons.jsonNode(ImmutableMap.of("id", 2, "name", "crusher")), + Jsons.jsonNode(ImmutableMap.of("id", 3, "name", "vash")) + ) + + private lateinit var PSQL_DB: PostgreSQLContainer + + @JvmStatic + @BeforeAll + fun init(): Unit { + PSQL_DB = PostgreSQLContainer("postgres:13-alpine") + PSQL_DB.start() + } + + @JvmStatic + @AfterAll + fun cleanUp(): Unit { + PSQL_DB.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestJdbcUtils.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestJdbcUtils.kt new file mode 100644 index 0000000000000..931b9edaac35b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestJdbcUtils.kt @@ -0,0 +1,479 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.BinaryNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.base.Charsets +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.cdk.db.factory.DataSourceFactory +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.testutils.PostgreSQLContainerHelper +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.stream.MoreStreams +import io.airbyte.commons.string.Strings +import io.airbyte.protocol.models.JsonSchemaType +import java.math.BigDecimal +import java.sql.* +import java.util.stream.Collectors +import javax.sql.DataSource +import org.bouncycastle.util.encoders.Base64 +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.CsvSource +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.utility.MountableFile + +internal class TestJdbcUtils { + private var dbName: String = "dummy" + private lateinit var dataSource: DataSource + @BeforeEach + @Throws(Exception::class) + fun setup() { + dbName = Strings.addRandomSuffix("db", "_", 10) + + val config = getConfig(PSQL_DB, dbName) + + val initScriptName = "init_$dbName.sql" + val tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE $dbName;") + PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB) + + dataSource = + DataSourceFactory.create( + config[JdbcUtils.USERNAME_KEY].asText(), + config[JdbcUtils.PASSWORD_KEY].asText(), + DatabaseDriver.POSTGRESQL.driverClassName, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + + val defaultJdbcDatabase: JdbcDatabase = DefaultJdbcDatabase(dataSource) + + defaultJdbcDatabase.execute { connection: Connection -> + connection + .createStatement() + .execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));") + connection + .createStatement() + .execute( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');" + ) + } + } + + private fun getConfig(psqlDb: PostgreSQLContainer<*>?, dbName: String?): JsonNode { + return Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, psqlDb!!.host) + .put(JdbcUtils.PORT_KEY, psqlDb.firstMappedPort) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.USERNAME_KEY, psqlDb.username) + .put(JdbcUtils.PASSWORD_KEY, psqlDb.password) + .build() + ) + } + + // Takes in a generic sslValue because useSsl maps sslValue to a boolean + private fun getConfigWithSsl( + psqlDb: PostgreSQLContainer<*>?, + dbName: String?, + sslValue: T + ): JsonNode { + return Jsons.jsonNode( + ImmutableMap.builder() + .put("host", psqlDb!!.host) + .put("port", psqlDb.firstMappedPort) + .put("database", dbName) + .put("username", psqlDb.username) + .put("password", psqlDb.password) + .put("ssl", sslValue) + .build() + ) + } + + @Test + @Throws(SQLException::class) + fun testRowToJson() { + dataSource!!.connection.use { connection -> + val rs = connection.createStatement().executeQuery("SELECT * FROM id_and_name;") + rs.next() + Assertions.assertEquals(RECORDS_AS_JSON[0], sourceOperations.rowToJson(rs)) + } + } + + @Test + @Throws(SQLException::class) + fun testToStream() { + dataSource!!.connection.use { connection -> + val rs = connection.createStatement().executeQuery("SELECT * FROM id_and_name;") + val actual = + JdbcDatabase.toUnsafeStream(rs) { queryContext: ResultSet -> + sourceOperations.rowToJson(queryContext) + } + .collect(Collectors.toList()) + Assertions.assertEquals(RECORDS_AS_JSON, actual) + } + } + + // test conversion of every JDBCType that we support to Json. + @Test + @Throws(SQLException::class) + fun testSetJsonField() { + dataSource!!.connection.use { connection -> + createTableWithAllTypes(connection) + insertRecordOfEachType(connection) + assertExpectedOutputValues(connection, jsonFieldExpectedValues()) + assertExpectedOutputTypes(connection) + } + } + + // test setting on a PreparedStatement every JDBCType that we support. + @Test + @Throws(SQLException::class) + fun testSetStatementField() { + dataSource!!.connection.use { connection -> + createTableWithAllTypes(connection) + val ps = + connection.prepareStatement( + "INSERT INTO data VALUES(?::bit,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);" + ) + + // insert the bit here to stay consistent even though setStatementField does not support + // it yet. + ps.setString(1, "1") + sourceOperations.setCursorField(ps, 2, JDBCType.BOOLEAN, "true") + sourceOperations.setCursorField(ps, 3, JDBCType.SMALLINT, "1") + sourceOperations.setCursorField(ps, 4, JDBCType.INTEGER, "1") + sourceOperations.setCursorField(ps, 5, JDBCType.BIGINT, "1") + sourceOperations.setCursorField(ps, 6, JDBCType.FLOAT, "1.0") + sourceOperations.setCursorField(ps, 7, JDBCType.DOUBLE, "1.0") + sourceOperations.setCursorField(ps, 8, JDBCType.REAL, "1.0") + sourceOperations.setCursorField(ps, 9, JDBCType.NUMERIC, "1") + sourceOperations.setCursorField(ps, 10, JDBCType.DECIMAL, "1") + sourceOperations.setCursorField(ps, 11, JDBCType.CHAR, "a") + sourceOperations.setCursorField(ps, 12, JDBCType.VARCHAR, "a") + sourceOperations.setCursorField(ps, 13, JDBCType.DATE, "2020-11-01") + sourceOperations.setCursorField(ps, 14, JDBCType.TIME, "05:00:00.000") + sourceOperations.setCursorField(ps, 15, JDBCType.TIMESTAMP, "2001-09-29T03:00:00.000") + sourceOperations.setCursorField(ps, 16, JDBCType.BINARY, "61616161") + + ps.execute() + + assertExpectedOutputValues(connection, expectedValues()) + assertExpectedOutputTypes(connection) + } + } + + @Test + fun testUseSslWithSslNotSet() { + val config = getConfig(PSQL_DB, dbName) + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertTrue(sslSet) + } + + @Test + fun testUseSslWithSslSetAndValueStringFalse() { + val config = getConfigWithSsl(PSQL_DB, dbName, "false") + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertFalse(sslSet) + } + + @Test + fun testUseSslWithSslSetAndValueIntegerFalse() { + val config = getConfigWithSsl(PSQL_DB, dbName, 0) + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertFalse(sslSet) + } + + @Test + fun testUseSslWithSslSetAndValueStringTrue() { + val config = getConfigWithSsl(PSQL_DB, dbName, "true") + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertTrue(sslSet) + } + + @Test + fun testUssSslWithSslSetAndValueIntegerTrue() { + val config = getConfigWithSsl(PSQL_DB, dbName, 3) + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertTrue(sslSet) + } + + @Test + fun testUseSslWithEmptySslKeyAndSslModeVerifyFull() { + val config = + Jsons.jsonNode( + ImmutableMap.builder() + .put("host", PSQL_DB.host) + .put("port", PSQL_DB.firstMappedPort) + .put("database", dbName) + .put("username", PSQL_DB.username) + .put("password", PSQL_DB.password) + .put( + "ssl_mode", + ImmutableMap.builder() + .put("mode", "verify-full") + .put("ca_certificate", "test_ca_cert") + .put("client_certificate", "test_client_cert") + .put("client_key", "test_client_key") + .put("client_key_password", "test_pass") + .build() + ) + .build() + ) + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertTrue(sslSet) + } + + @Test + fun testUseSslWithEmptySslKeyAndSslModeDisable() { + val config = + Jsons.jsonNode( + ImmutableMap.builder() + .put("host", PSQL_DB.host) + .put("port", PSQL_DB.firstMappedPort) + .put("database", dbName) + .put("username", PSQL_DB.username) + .put("password", PSQL_DB.password) + .put( + "ssl_mode", + ImmutableMap.builder().put("mode", "disable").build() + ) + .build() + ) + val sslSet = JdbcUtils.useSsl(config) + Assertions.assertFalse(sslSet) + } + + private fun jsonFieldExpectedValues(): ObjectNode { + val expected = expectedValues() + val arrayNode = ObjectMapper().createArrayNode() + arrayNode.add("one") + arrayNode.add("two") + arrayNode.add("three") + expected.set("text_array", arrayNode) + + val arrayNode2 = ObjectMapper().createArrayNode() + arrayNode2.add("1") + arrayNode2.add("2") + arrayNode2.add("3") + expected.set("int_array", arrayNode2) + + expected.set("binary1", BinaryNode("aaaa".toByteArray(Charsets.UTF_8))) + + return expected + } + + private fun expectedValues(): ObjectNode { + val expected = Jsons.jsonNode(emptyMap()) as ObjectNode + expected.put("bit", true) + expected.put("boolean", true) + expected.put("smallint", 1.toShort()) + expected.put("int", 1) + expected.put("bigint", 1L) + expected.put("float", 1.0) + expected.put("double", 1.0) + expected.put("real", 1.0.toFloat()) + expected.put("numeric", BigDecimal(1)) + expected.put("decimal", BigDecimal(1)) + expected.put("char", "a") + expected.put("varchar", "a") + expected.put("date", "2020-11-01") + expected.put("time", "05:00:00.000000") + expected.put("timestamp", "2001-09-29T03:00:00.000000") + expected.put("binary1", Base64.decode("61616161".toByteArray(Charsets.UTF_8))) + return expected + } + + @ParameterizedTest + @CsvSource( + "'3E+1', 30", + "'30', 30", + "'999000000000', 999000000000", + "'999E+9', 999000000000", + "'1.79E+3', 1790" + ) + @Throws(SQLException::class) + fun testSetStatementSpecialValues(colValue: String, value: Long) { + dataSource!!.connection.use { connection -> + createTableWithAllTypes(connection) + val ps = connection.prepareStatement("INSERT INTO data(bigint) VALUES(?);") + + // insert the bit here to stay consistent even though setStatementField does not support + // it yet. + sourceOperations.setCursorField(ps, 1, JDBCType.BIGINT, colValue) + ps.execute() + + assertExpectedOutputValues( + connection, + (Jsons.jsonNode(emptyMap()) as ObjectNode).put("bigint", value) + ) + assertExpectedOutputTypes(connection) + } + } + + companion object { + private const val ONE_POINT_0 = "1.0," + + private val RECORDS_AS_JSON: List = + Lists.newArrayList( + Jsons.jsonNode(ImmutableMap.of("id", 1, "name", "picard")), + Jsons.jsonNode(ImmutableMap.of("id", 2, "name", "crusher")), + Jsons.jsonNode(ImmutableMap.of("id", 3, "name", "vash")) + ) + + private lateinit var PSQL_DB: PostgreSQLContainer + + private val sourceOperations: JdbcSourceOperations = JdbcUtils.defaultSourceOperations + + @JvmStatic + @BeforeAll + fun init(): Unit { + PSQL_DB = PostgreSQLContainer("postgres:13-alpine") + PSQL_DB.start() + } + + @Throws(SQLException::class) + private fun createTableWithAllTypes(connection: Connection) { + // jdbctype not included because they are not directly supported in postgres: TINYINT, + // LONGVARCHAR, + // VARBINAR, LONGVARBINARY + connection + .createStatement() + .execute( + "CREATE TABLE data(" + + "bit BIT, " + + "boolean BOOLEAN, " + + "smallint SMALLINT," + + "int INTEGER," + + "bigint BIGINT," + + "float FLOAT," + + "double DOUBLE PRECISION," + + "real REAL," + + "numeric NUMERIC," + + "decimal DECIMAL," + + "char CHAR," + + "varchar VARCHAR," + + "date DATE," + + "time TIME," + + "timestamp TIMESTAMP," + + "binary1 bytea," + + "text_array _text," + + "int_array int[]" + + ");" + ) + } + + @Throws(SQLException::class) + private fun insertRecordOfEachType(connection: Connection) { + connection + .createStatement() + .execute( + "INSERT INTO data(" + + "bit," + + "boolean," + + "smallint," + + "int," + + "bigint," + + "float," + + "double," + + "real," + + "numeric," + + "decimal," + + "char," + + "varchar," + + "date," + + "time," + + "timestamp," + + "binary1," + + "text_array," + + "int_array" + + ") VALUES(" + + "1::bit(1)," + + "true," + + "1," + + "1," + + "1," + + ONE_POINT_0 + + ONE_POINT_0 + + ONE_POINT_0 + + "1," + + ONE_POINT_0 + + "'a'," + + "'a'," + + "'2020-11-01'," + + "'05:00'," + + "'2001-09-29 03:00'," + + "decode('61616161', 'hex')," + + "'{one,two,three}'," + + "'{1,2,3}'" + + ");" + ) + } + + @Throws(SQLException::class) + private fun assertExpectedOutputValues(connection: Connection, expected: ObjectNode) { + val resultSet = connection.createStatement().executeQuery("SELECT * FROM data;") + + resultSet.next() + val actual = sourceOperations.rowToJson(resultSet) + + // field-wise comparison to make debugging easier. + MoreStreams.toStream(expected.fields()).forEach { e: Map.Entry -> + Assertions.assertEquals(e.value, actual[e.key], "key: " + e.key) + } + Assertions.assertEquals(expected, actual) + } + + @Throws(SQLException::class) + private fun assertExpectedOutputTypes(connection: Connection) { + val resultSet = connection.createStatement().executeQuery("SELECT * FROM data;") + + resultSet.next() + val columnCount = resultSet.metaData.columnCount + val actual: MutableMap = HashMap(columnCount) + for (i in 1..columnCount) { + actual[resultSet.metaData.getColumnName(i)] = + sourceOperations.getAirbyteType( + JDBCType.valueOf(resultSet.metaData.getColumnType(i)) + ) + } + + val expected: Map = + ImmutableMap.builder() + .put("bit", JsonSchemaType.BOOLEAN) + .put("boolean", JsonSchemaType.BOOLEAN) + .put("smallint", JsonSchemaType.INTEGER) + .put("int", JsonSchemaType.INTEGER) + .put("bigint", JsonSchemaType.INTEGER) + .put("float", JsonSchemaType.NUMBER) + .put("double", JsonSchemaType.NUMBER) + .put("real", JsonSchemaType.NUMBER) + .put("numeric", JsonSchemaType.NUMBER) + .put("decimal", JsonSchemaType.NUMBER) + .put("char", JsonSchemaType.STRING) + .put("varchar", JsonSchemaType.STRING) + .put("date", JsonSchemaType.STRING) + .put("time", JsonSchemaType.STRING) + .put("timestamp", JsonSchemaType.STRING) + .put("binary1", JsonSchemaType.STRING_BASE_64) + .put("text_array", JsonSchemaType.ARRAY) + .put("int_array", JsonSchemaType.ARRAY) + .build() + + Assertions.assertEquals(actual, expected) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.kt new file mode 100644 index 0000000000000..d389d9a07615b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/TestStreamingJdbcDatabase.kt @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.cdk.db.factory.DataSourceFactory +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig +import io.airbyte.cdk.db.jdbc.streaming.FetchSizeConstants +import io.airbyte.cdk.testutils.PostgreSQLContainerHelper +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.sql.* +import java.util.Map +import java.util.concurrent.atomic.AtomicReference +import org.junit.jupiter.api.* +import org.mockito.Mockito +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.utility.MountableFile + +@TestMethodOrder(MethodOrderer.OrderAnnotation::class) +internal class TestStreamingJdbcDatabase { + private val sourceOperations: JdbcSourceOperations = JdbcUtils.defaultSourceOperations + private var defaultJdbcDatabase: JdbcDatabase? = null + private var streamingJdbcDatabase: JdbcDatabase? = null + + @BeforeEach + fun setup() { + val dbName = Strings.addRandomSuffix("db", "_", 10) + + val config = getConfig(PSQL_DB, dbName) + + val initScriptName = "init_$dbName.sql" + val tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE $dbName;") + PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB) + + val connectionPool = + DataSourceFactory.create( + config[JdbcUtils.USERNAME_KEY].asText(), + config[JdbcUtils.PASSWORD_KEY].asText(), + DatabaseDriver.POSTGRESQL.driverClassName, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + + defaultJdbcDatabase = Mockito.spy(DefaultJdbcDatabase(connectionPool)) + streamingJdbcDatabase = + StreamingJdbcDatabase(connectionPool, JdbcUtils.defaultSourceOperations) { -> + AdaptiveStreamingQueryConfig() + } + } + + @Test + @Order(1) + @Throws(SQLException::class) + fun testQuery() { + defaultJdbcDatabase!!.execute { connection: Connection -> + connection + .createStatement() + .execute( + """ + DROP TABLE IF EXISTS id_and_name; + CREATE TABLE id_and_name (id INTEGER, name VARCHAR(200)); + INSERT INTO id_and_name (id, name) VALUES (1, 'picard'), (2, 'crusher'), (3, 'vash'); + + """.trimIndent() + ) + } + + // grab references to connection and prepared statement, so we can verify the streaming + // config is + // invoked. + val connection1 = AtomicReference() + val ps1 = AtomicReference() + val actual = + streamingJdbcDatabase!!.queryJsons( + { connection: Connection -> + connection1.set(connection) + val ps = connection.prepareStatement("SELECT * FROM id_and_name;") + ps1.set(ps) + ps + }, + { queryContext: ResultSet -> sourceOperations.rowToJson(queryContext) } + ) + val expectedRecords: List = + Lists.newArrayList( + Jsons.jsonNode(Map.of("id", 1, "name", "picard")), + Jsons.jsonNode(Map.of("id", 2, "name", "crusher")), + Jsons.jsonNode(Map.of("id", 3, "name", "vash")) + ) + Assertions.assertEquals(expectedRecords, actual) + } + + /** + * Test stream querying a table with 20 rows. Each row is 10 MB large. The table in this test + * must contain more than `FetchSizeConstants.INITIAL_SAMPLE_SIZE` rows. Otherwise, all rows + * will be fetched in the first fetch, the fetch size won't be adjusted, and the test will fail. + */ + @Order(2) + @Test + @Throws(SQLException::class) + fun testLargeRow() { + defaultJdbcDatabase!!.execute { connection: Connection -> + connection + .createStatement() + .execute( + """ + DROP TABLE IF EXISTS id_and_name; + CREATE TABLE id_and_name (id INTEGER, name TEXT); + INSERT INTO id_and_name SELECT id, repeat('a', 10485760) as name from generate_series(1, 20) as id; + + """.trimIndent() + ) + } + + val connection1 = AtomicReference() + val ps1 = AtomicReference() + val fetchSizes: MutableSet = HashSet() + val actual = + streamingJdbcDatabase!!.queryJsons( + { connection: Connection -> + connection1.set(connection) + val ps = connection.prepareStatement("SELECT * FROM id_and_name;") + ps1.set(ps) + ps + }, + { resultSet: ResultSet -> + fetchSizes.add(resultSet.fetchSize) + sourceOperations.rowToJson(resultSet) + } + ) + Assertions.assertEquals(20, actual.size) + + // Two fetch sizes should be set on the result set, one is the initial sample size, + // and the other is smaller than the initial value because of the large row. + // This check assumes that FetchSizeConstants.TARGET_BUFFER_BYTE_SIZE = 200 MB. + // Update this check if the buffer size constant is changed. + Assertions.assertEquals(2, fetchSizes.size) + val sortedSizes = fetchSizes.stream().sorted().toList() + Assertions.assertTrue(sortedSizes[0] < FetchSizeConstants.INITIAL_SAMPLE_SIZE) + Assertions.assertEquals(FetchSizeConstants.INITIAL_SAMPLE_SIZE, sortedSizes[1]) + } + + private fun getConfig(psqlDb: PostgreSQLContainer<*>?, dbName: String): JsonNode { + return Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, psqlDb!!.host) + .put(JdbcUtils.PORT_KEY, psqlDb.firstMappedPort) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.USERNAME_KEY, psqlDb.username) + .put(JdbcUtils.PASSWORD_KEY, psqlDb.password) + .build() + ) + } + + companion object { + private lateinit var PSQL_DB: PostgreSQLContainer + + @JvmStatic + @BeforeAll + fun init(): Unit { + PSQL_DB = PostgreSQLContainer("postgres:13-alpine") + PSQL_DB.start() + } + + @JvmStatic + @AfterAll + fun cleanUp(): Unit { + PSQL_DB.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.kt new file mode 100644 index 0000000000000..cc7ae97430936 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/AdaptiveStreamingQueryConfigTest.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import java.sql.ResultSet +import java.sql.SQLException +import joptsimple.internal.Strings +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito + +internal class AdaptiveStreamingQueryConfigTest { + @Test + @Throws(SQLException::class) + fun testFetchSizeUpdate() { + val queryConfig = AdaptiveStreamingQueryConfig() + val resultSet = Mockito.mock(ResultSet::class.java) + for (i in 0 until FetchSizeConstants.INITIAL_SAMPLE_SIZE - 1) { + queryConfig.accept(resultSet, Strings.repeat(Character.forDigit(i, 10), i + 1)) + Mockito.verify(resultSet, Mockito.never()).fetchSize = ArgumentMatchers.anyInt() + } + queryConfig.accept(resultSet, "final sampling in the initial stage") + Mockito.verify(resultSet, Mockito.times(1)).fetchSize = ArgumentMatchers.anyInt() + queryConfig.accept(resultSet, "abcd") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.kt new file mode 100644 index 0000000000000..f826a94451773 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/BaseSizeEstimatorTest.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import io.airbyte.commons.json.Jsons +import java.util.* +import java.util.Map +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class BaseSizeEstimatorTest { + @Test + fun testGetEstimatedByteSize() { + Assertions.assertEquals(0L, BaseSizeEstimator.getEstimatedByteSize(null)) + Assertions.assertEquals(21L, BaseSizeEstimator.getEstimatedByteSize("12345")) + Assertions.assertEquals( + 45L, + BaseSizeEstimator.getEstimatedByteSize(Jsons.jsonNode(Map.of("key", "value"))) + ) + } + + class TestSizeEstimator( + bufferByteSize: Long, + minFetchSize: Int, + defaultFetchSize: Int, + maxFetchSize: Int + ) : BaseSizeEstimator(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize) { + override val fetchSize: Optional = Optional.empty() + + override fun accept(o: Any) {} + + fun setMeanByteSize(meanByteSize: Double) { + this.maxRowByteSize = meanByteSize + } + } + + @Test + fun testGetBoundedFetchSize() { + val bufferByteSize: Long = 120 + val minFetchSize = 10 + val defaultFetchSize = 20 + val maxFetchSize = 40 + val sizeEstimator = + TestSizeEstimator(bufferByteSize, minFetchSize, defaultFetchSize, maxFetchSize) + + sizeEstimator.setMeanByteSize(-1.0) + Assertions.assertEquals(defaultFetchSize, sizeEstimator.boundedFetchSize) + + sizeEstimator.setMeanByteSize(0.0) + Assertions.assertEquals(defaultFetchSize, sizeEstimator.boundedFetchSize) + + // fetch size = 5 < min fetch size + sizeEstimator.setMeanByteSize(bufferByteSize / 5.0) + Assertions.assertEquals(minFetchSize, sizeEstimator.boundedFetchSize) + + // fetch size = 10 within [min fetch size, max fetch size] + sizeEstimator.setMeanByteSize(bufferByteSize / 10.0) + Assertions.assertEquals(10, sizeEstimator.boundedFetchSize) + + // fetch size = 30 within [min fetch size, max fetch size] + sizeEstimator.setMeanByteSize(bufferByteSize / 30.0) + Assertions.assertEquals(30, sizeEstimator.boundedFetchSize) + + // fetch size = 40 within [min fetch size, max fetch size] + sizeEstimator.setMeanByteSize(bufferByteSize / 40.0) + Assertions.assertEquals(40, sizeEstimator.boundedFetchSize) + + // fetch size = 60 > max fetch size + sizeEstimator.setMeanByteSize(bufferByteSize / 60.0) + Assertions.assertEquals(maxFetchSize, sizeEstimator.boundedFetchSize) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.kt new file mode 100644 index 0000000000000..fe258dd3a6161 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/InitialSizeEstimatorTest.kt @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class InitialSizeEstimatorTest { + @Test + fun testIt() { + val bufferByteSize: Long = 120 + val initialSampleSize = 5 + val minFetchSize = 1 + val defaultFetchSize = 20 + val maxFetchSize = 120 + val sizeEstimator = + InitialSizeEstimator( + bufferByteSize, + initialSampleSize, + minFetchSize, + defaultFetchSize, + maxFetchSize + ) + + // size: 3 * 4 = 12 + sizeEstimator.accept("1") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + // size: 4 * 4 = 16 + sizeEstimator.accept("11") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + // size: 5 * 4 = 20 + sizeEstimator.accept("111") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + // size: 6 * 4 = 24 + sizeEstimator.accept("1111") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + // size: 7 * 4 = 28, fetch size is available + sizeEstimator.accept("11111") + val fetchSize = sizeEstimator.fetchSize + Assertions.assertTrue(fetchSize.isPresent) + val expectedMaxByteSize = 21L + Assertions.assertEquals(expectedMaxByteSize, Math.round(sizeEstimator.maxRowByteSize)) + Assertions.assertEquals( + (bufferByteSize / expectedMaxByteSize) + 1, + fetchSize.get().toLong() + ) // + 1 needed for int remainder rounding + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.kt new file mode 100644 index 0000000000000..30fc3343c125e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/SamplingSizeEstimatorTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class SamplingSizeEstimatorTest { + @Test + fun testIt() { + val bufferByteSize: Long = 120 + val sampleFrequency = 3 + val initialByteSize: Long = 10 + val minFetchSize = 1 + val defaultFetchSize = 20 + val maxFetchSize = 120 + val sizeEstimator = + SamplingSizeEstimator( + bufferByteSize, + sampleFrequency, + initialByteSize.toDouble(), + minFetchSize, + defaultFetchSize, + maxFetchSize + ) + + var maxByteSize = initialByteSize.toDouble() + + // size: 3 * 3 = 12, not sampled + sizeEstimator.accept("1") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + Assertions.assertEquals(maxByteSize, sizeEstimator.maxRowByteSize) + + // size: 4 * 3 = 16, not sampled + sizeEstimator.accept("11") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + Assertions.assertEquals(maxByteSize, sizeEstimator.maxRowByteSize) + + // size: 5 * 3 = 15, sampled, fetch size is ready + sizeEstimator.accept("111") + val fetchSize1 = sizeEstimator.fetchSize + maxByteSize = 15.0 + assertDoubleEquals(15.0, sizeEstimator.maxRowByteSize) + assertDoubleEquals(bufferByteSize / maxByteSize, fetchSize1.get().toDouble()) + + // size: 6 * 3 = 24, not sampled + sizeEstimator.accept("1111") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + assertDoubleEquals(maxByteSize, sizeEstimator.maxRowByteSize) + + // size: 7 * 3 = 28, not sampled + sizeEstimator.accept("11111") + Assertions.assertFalse(sizeEstimator.fetchSize.isPresent) + assertDoubleEquals(maxByteSize, sizeEstimator.maxRowByteSize) + + // size: 8 * 3 = 24, sampled, fetch size is ready + sizeEstimator.accept("111111") + val fetchSize2 = sizeEstimator.fetchSize + Assertions.assertTrue(fetchSize2.isPresent) + maxByteSize = 24.0 + assertDoubleEquals(maxByteSize, sizeEstimator.maxRowByteSize) + assertDoubleEquals(bufferByteSize / maxByteSize, fetchSize2.get().toDouble()) + } + + companion object { + private fun assertDoubleEquals(expected: Double, actual: Double) { + Assertions.assertEquals(Math.round(expected), Math.round(actual)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.kt new file mode 100644 index 0000000000000..3a4c120f74536 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/jdbc/streaming/TwoStageSizeEstimatorTest.kt @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.jdbc.streaming + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class TwoStageSizeEstimatorTest { + @Test + fun testDelegationSwitch() { + val sizeEstimator = TwoStageSizeEstimator.instance + for (i in 0 until FetchSizeConstants.INITIAL_SAMPLE_SIZE) { + sizeEstimator.accept("1") + Assertions.assertTrue(sizeEstimator.delegate is InitialSizeEstimator) + } + // delegation is changed after initial sampling + for (i in 0..2) { + sizeEstimator.accept("1") + Assertions.assertTrue(sizeEstimator.delegate is SamplingSizeEstimator) + } + } + + @Test + fun testGetTargetBufferByteSize() { + Assertions.assertEquals( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, + TwoStageSizeEstimator.getTargetBufferByteSize(null) + ) + Assertions.assertEquals( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, + TwoStageSizeEstimator.getTargetBufferByteSize(Long.MAX_VALUE) + ) + Assertions.assertEquals( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE, + TwoStageSizeEstimator.getTargetBufferByteSize( + FetchSizeConstants.MIN_BUFFER_BYTE_SIZE - 10L + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.kt new file mode 100644 index 0000000000000..93f65ff4d2c33 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/db/util/SSLCertificateUtilsTest.kt @@ -0,0 +1,302 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.db.util + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import java.io.IOException +import java.nio.file.FileSystem +import java.nio.file.Files +import java.nio.file.Path +import java.security.KeyStore +import java.security.KeyStoreException +import java.security.NoSuchAlgorithmException +import java.security.cert.CertificateException +import java.security.spec.InvalidKeySpecException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class SSLCertificateUtilsTest { + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun testkeyStoreFromCertificateInternal( + certString: String, + pwd: String, + fs: FileSystem?, + directory: String? + ) { + val ksUri = SSLCertificateUtils.keyStoreFromCertificate(certString, pwd, fs, directory) + + val ks = KeyStore.getInstance("PKCS12") + val inputStream = Files.newInputStream(Path.of(ksUri)) + ks.load(inputStream, pwd.toCharArray()) + Assertions.assertEquals(1, ks.size()) + Files.delete(Path.of(ksUri)) + } + + @Test + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun testkeyStoreFromCertificate() { + testkeyStoreFromCertificateInternal(caPem, KEY_STORE_PASSWORD, null, SLASH_TMP) + + val exception: Exception = + Assertions.assertThrows(CertificateException::class.java) { + testkeyStoreFromCertificateInternal(caPem_Bad, KEY_STORE_PASSWORD, null, SLASH_TMP) + } + Assertions.assertNotNull(exception) + } + + @Test + @Throws( + CertificateException::class, + IOException::class, + KeyStoreException::class, + NoSuchAlgorithmException::class + ) + fun testkeyStoreFromCertificateInMemory() { + testkeyStoreFromCertificateInternal(caPem, KEY_STORE_PASSWORD2, null, null) + + val exception: Exception = + Assertions.assertThrows(CertificateException::class.java) { + testkeyStoreFromCertificateInternal(caPem_Bad, KEY_STORE_PASSWORD, null, null) + } + Assertions.assertNotNull(exception) + } + + @SuppressFBWarnings("HARD_CODE_PASSWORD") + @Throws( + KeyStoreException::class, + IOException::class, + CertificateException::class, + NoSuchAlgorithmException::class, + InvalidKeySpecException::class, + InterruptedException::class + ) + fun testKeyStoreFromClientCertificateInternal( + certString: String, + keyString: String, + keyStorePassword: String, + filesystem: FileSystem?, + directory: String? + ) { + val ksUri = + SSLCertificateUtils.keyStoreFromClientCertificate( + certString, + keyString, + keyStorePassword, + filesystem, + directory + ) + val ks = KeyStore.getInstance("PKCS12") + val inputStream = Files.newInputStream(Path.of(ksUri)) + ks.load(inputStream, KEY_STORE_PASSWORD.toCharArray()) + Assertions.assertTrue(ks.isKeyEntry(SSLCertificateUtils.KEYSTORE_ENTRY_PREFIX)) + Assertions.assertFalse(ks.isKeyEntry("cd_")) + Assertions.assertEquals(1, ks.size()) + Files.delete(Path.of(ksUri)) + } + + @Test + @Throws( + CertificateException::class, + IOException::class, + NoSuchAlgorithmException::class, + InvalidKeySpecException::class, + KeyStoreException::class, + InterruptedException::class + ) + fun testKeyStoreFromClientCertificate() { + testKeyStoreFromClientCertificateInternal( + clientPem, + clientKey, + KEY_STORE_PASSWORD, + null, + SLASH_TMP + ) + + val exceptionKey: Exception = + Assertions.assertThrows(InvalidKeySpecException::class.java) { + testKeyStoreFromClientCertificateInternal( + clientPem, + clientKey_wrong_format, + KEY_STORE_PASSWORD, + null, + SLASH_TMP + ) + } + Assertions.assertNotNull(exceptionKey) + + val exceptionCert: Exception = + Assertions.assertThrows(CertificateException::class.java) { + testKeyStoreFromClientCertificateInternal( + caPem_Bad, + clientKey, + KEY_STORE_PASSWORD, + null, + SLASH_TMP + ) + } + Assertions.assertNotNull(exceptionCert) + } + + @Test + @Throws( + CertificateException::class, + IOException::class, + NoSuchAlgorithmException::class, + InvalidKeySpecException::class, + KeyStoreException::class, + InterruptedException::class + ) + fun testKeyStoreFromClientCertificateInMemory() { + testKeyStoreFromClientCertificateInternal( + clientPem, + clientKey, + KEY_STORE_PASSWORD, + null, + null + ) + + val exceptionKey: Exception = + Assertions.assertThrows(InvalidKeySpecException::class.java) { + testKeyStoreFromClientCertificateInternal( + clientPem, + clientKey_wrong_format, + KEY_STORE_PASSWORD, + null, + null + ) + } + Assertions.assertNotNull(exceptionKey) + + val exceptionCert: Exception = + Assertions.assertThrows(CertificateException::class.java) { + testKeyStoreFromClientCertificateInternal( + caPem_Bad, + clientKey, + KEY_STORE_PASSWORD, + null, + null + ) + } + Assertions.assertNotNull(exceptionCert) + } + + companion object { + private const val SLASH_TMP = "/tmp" + private const val KEY_STORE_PASSWORD = "123456" + private const val KEY_STORE_PASSWORD2 = "78910" + const val caPem: String = + ("-----BEGIN CERTIFICATE-----\n" + + "MIIDAzCCAeugAwIBAgIBATANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" + + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" + + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowPDE6MDgGA1UEAwwxTXlTUUxf\n" + + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DQV9DZXJ0aWZpY2F0ZTCCASIw\n" + + "DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKb2tDaE4TO/4xKRZ0QqpB4ho3cy\n" + + "daw85Sn8VNLa42EJgZVpSr0WCFl11Go7r0O2TMvceaWsnJU7FLhYHSR+Dlm62yVO\n" + + "0DsnMOC0kUoDnjSE/PmponWnoC79fgXV7AwKxSW4LLxYlPHQb4Kb7rv+UJ3KbxZz\n" + + "zB7JEm9WQCJ/byn1/jxQtoPGvWL2csX3RFr9QNh8UgpOBQsbebeLWNgxdYda2sz3\n" + + "kJcwk754Vj1mx6iszjLP0oHZu+RuoM+xIrpDmpPNMW/0rQl6q+vCymNxaxX8+MuW\n" + + "czRJ1hjh4cVjArp8YhJCEMVaLajVkhbzYaPRsdW1NGjh+C3eZnOm5fRi35kCAwEA\n" + + "AaMQMA4wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAWKlbtUosXVy7\n" + + "LbFEuL3c2Igs023v0mQNvtZVBl5Qpsxpc3+ybmQfksEQoPxPKmWpsnWv5Bsvt335\n" + + "/NHv1wSajHEpoyDBtF1QT2rR/kjezFpiH9AY3xwtBdZhTDlc5UBrpyv+Issn1CZF\n" + + "edcIk54Gzxifn+Et5WP8b6HV/ehdE0qQPtHDmendEaIHXg12/NE+hj3DocSVm8w/\n" + + "LUNeYd9wXefwMrEWwDn0DZSsShZmgJoppA15qOnq+FVW/bhZwRv5L4l3AJv0SGoA\n" + + "o7DXxD0VGHDA6aC4tJssZbrnoDCBPzYmt9s9GwVupuEroJHZ0Wks4pt4Wx50DUgA\n" + + "KC3v0Mo/gg==\n" + + "-----END CERTIFICATE-----\n") + + const val caPem_Bad: String = + ("-----BEGIN CERTIFICATE-----\n" + + "MIIDAzCCAeugAwIBAgIBATANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" + + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" + + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowPDE6MDgGA1UEAwwxTXlTUUxf\n" + + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DQV9DZXJ0aWZpY2F0ZTCCASIw\n" + + "DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKb2tDaE4TO/4xKRZ0QqpB4ho3cy\n" + + "daw85Sn8VNLa42EJgZVpSr0WCFl11Go7r0O2TMvceaWsnJU7FLhYHSR+Dlm62yVO\n" + + "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\n" + + "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\n" + + "/NHv1wSajHEpoyDBtF1QT2rR/kjezFpiH9AY3xwtBdZhTDlc5UBrpyv+Issn1CZF\n" + + "edcIk54Gzxifn+Et5WP8b6HV/ehdE0qQPtHDmendEaIHXg12/NE+hj3DocSVm8w/\n" + + "LUNeYd9wXefwMrEWwDn0DZSsShZmgJoppA15qOnq+FVW/bhZwRv5L4l3AJv0SGoA\n" + + "o7DXxD0VGHDA6aC4tJssZbrnoDCBPzYmt9s9GwVupuEroJHZ0Wks4pt4Wx50DUgA\n" + + "KC3v0Mo/gg==\n" + + "-----END CERTIFICATE-----\n") + + const val clientPem: String = + ("-----BEGIN CERTIFICATE-----\n" + + "MIIDBDCCAeygAwIBAgIBAzANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR\n" + + "TF9TZXJ2ZXJfOC4wLjMwX0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X\n" + + "DTIyMDgwODA1NDMwOFoXDTMyMDgwNTA1NDMwOFowQDE+MDwGA1UEAww1TXlTUUxf\n" + + "U2VydmVyXzguMC4zMF9BdXRvX0dlbmVyYXRlZF9DbGllbnRfQ2VydGlmaWNhdGUw\n" + + "ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCV/eRPDZmrPP8d2pKsFizU\n" + + "JQkGOYDKXOilLibR1TQwN/8MToop8+mvtMi7zr/cWBDR0qTObbduWFQdK82vGppS\n" + + "ZgrRG3QWVpe8NNI9AhriVZiOmcEQqgAhbgos57Tkjy3qghNbUN1KGb3I0DnNOtvF\n" + + "RIdATbE+LxOTgCzz/Cw6DVReunQvVo9T4EC4PBBUelMWlAJLo61AQVLM3ufx4ug2\n" + + "1wbV6D/aSRooNhkwWcwk+2vabxKnOzFAQzNU7dIZlBpo6coHFwZDUxtdM2DtuLHn\n" + + "/r9CsMw8p4wtdIRXrTDmiF/xTXKnABGM8kEqPovZ6eh7He1jrzLTVANUfNQc5b8F\n" + + "AgMBAAGjDTALMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBAGDJ6XgLBzat\n" + + "rpLDfGHR/tZ4eFzt1Nhjzl4CyFjpUcr2e2K5XmuveJAecaQSHff2zXwfGpg/BIen\n" + + "WcPm2daIzcfN/wWg8ENMB/JE3dMq44pOmWs2g4FPDQuaH81IV0hGX4klk2XZpskJ\n" + + "moWXyGY43Xr3bbNBjyOxgBsQc4kD96ODMUKfzNMH4p9hXKAMrF9DqHwQUho5uM6M\n" + + "RnU7Uzr745xw7LKJglCgO20t4302wzsUAEPuCTcB9wJy1/cRbMmoLAdUdn6XhFb4\n" + + "pR3UDNJvXGc8by6VWrXOeB0BFeB3beMxezlTHDOWoWeJwvEfAAD/dpwHXwp5dm9L\n" + + "VjtlERcTfH8=\n" + + "-----END CERTIFICATE-----\n") + + const val clientKey: String = + ("-----BEGIN RSA PRIVATE KEY-----\n" + + "MIIEowIBAAKCAQEAlf3kTw2Zqzz/HdqSrBYs1CUJBjmAylzopS4m0dU0MDf/DE6K\n" + + "KfPpr7TIu86/3FgQ0dKkzm23blhUHSvNrxqaUmYK0Rt0FlaXvDTSPQIa4lWYjpnB\n" + + "EKoAIW4KLOe05I8t6oITW1DdShm9yNA5zTrbxUSHQE2xPi8Tk4As8/wsOg1UXrp0\n" + + "L1aPU+BAuDwQVHpTFpQCS6OtQEFSzN7n8eLoNtcG1eg/2kkaKDYZMFnMJPtr2m8S\n" + + "pzsxQEMzVO3SGZQaaOnKBxcGQ1MbXTNg7bix5/6/QrDMPKeMLXSEV60w5ohf8U1y\n" + + "pwARjPJBKj6L2enoex3tY68y01QDVHzUHOW/BQIDAQABAoIBAHk/CHyC4PKUVyHZ\n" + + "2vCy6EABRB89AogSvJkyCn1anFpSGaDoKDWrjv7S4+U1RtCme8oxPboE5N+VFUGT\n" + + "dCwVFCSBikLor1mTXAruo/hfKD5HtQ+o6HFBCuP7IMyV7RtJRnOn/F+3qXpJ/qlC\n" + + "8UaeSqNXNwHbC+jZgzibxzrfYRz3BqnBYZsSP7/piN+rk6vAGs7WeawO1adqsLS6\n" + + "Hr9GilEe+bW/CtXsah3AYVwxDnwo/c03JYBdzYkRRqLgJ9dDG/5o/88FeeKbVb+U\n" + + "ZrGV9adwa+KGwsuMTYi7pkXUosm+43hLkmYUykxFv0vfkGz8EnDh4MBtY66QMkUJ\n" + + "cQgWl6ECgYEAxVJNsxpJjEa+d737iK7ylza0GhcTI3+uNPN92u0oucictMzLIm7N\n" + + "HAUhrHoO71NDYQYJlox7BG8mjze7l6fkwGfpg2u/KsN0vIqc+F+gIQeC7kmpRxQk\n" + + "l96pxMW25VhibZJFBaDx9UeBkR9RBnI1AF3jD3+wOdua+C9CMahdTDkCgYEAwph4\n" + + "FY2gcOSpA0Xz1cOFPNuwQhy9Lh3MJsb1kt20hlTcmpp3GpBrzyggiyIlpBtBHDrP\n" + + "6FcjZtV58bv8ckKB8jklvooJkyjmowBx+L7mHZ6/7QFPDQkp/dY9dQPtWjgrPyo+\n" + + "rLIN+SoVmyKdyXXaauyjyEPAexsuxzUKq0MMIS0CgYEAirvJQYnT+DqtJAeBWKKY\n" + + "kdS2YDmlDSpyU2x3KnvgTG9OLphmojkBIRhCir/uzDngf9D84Mq4m2+CzuNCk+hJ\n" + + "nzXwKqSQ7gIqi31xy/d/4Hklh2BnEkCJUfYNqvnQFARGf/99Y+268Ndrs5svHrch\n" + + "qLZaNMV0I9nRZXnksoFLx5ECgYBJ8LFAT041V005Jy1dfit0Um2I0W64xS27VkId\n" + + "igx8NmaUgDjdaR7t2etzsofm8UwuM9KoD+QtwNPTHIDx0X+a0EgdPEojFpl8OkEU\n" + + "KUU64AVBQwwMgfzorK0xd0qKy2jzWVPzPry8flczWVXnJNbXZg9dmxDaNhvyKZ9i\n" + + "L9m+CQKBgG3kkQTtsT7k1kQt/6cqjAaBq9Koi0gbS8hWjTioqPKHVQAAEjqVkmqa\n" + + "uuD/3Knh1gCgxW4jAUokRwfM7IgVA/plQQDQaKBzcFUl94Hl+t6VuvdvtA02MboE\n" + + "7TicEc38QKFoLN2hti0Bmm1eJCionsSPiuyDYH5XnhSz7TDjV9sM\n" + + "-----END RSA PRIVATE KEY-----\n") + + const val clientKey_wrong_format: String = + ("MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDBmUvDIVGZ5HsRgnXKns2fTf26pfKND45xu" + + "NOEWVetpvo3lGc28vVMvtPiNH/kuELxo5NesC89iotxfbOTl4I9BbjFVg3eO1nNhwmToU2f1kJJ5QFRjFw+xacIMsfBT5xy/v9U7ohZXdEk6txYkOpvhfja" + + "JcLDutT+NtzRdBsttgyItp5fODnk02G4bLsJ68jVH1/CXkDRvxktLR0/NctbtPVuACwA1QG9MsVbH3cE7SymIrzgI8JHwud63dQUb5iQWZ0iIDBqmF95wvg" + + "ox9O4QjnZCkHxo3kuYxBPaxAuMMVTohLBH/oAvo0FJt+0XF453sLPO8x3zOUnJJLhn4VHAgMBAAECggEBALQ4UB7F1YC9ARO7rouAaUnzAE/QS4qlAKU8uS" + + "prQQOWfTdgHvU4FsHqorPgy23PWgI3k+iBenh/kG+F5LVwRP0pZmfNQ/uspFx/aJrVfb1dZzgCxsdzMiv9MxCetPVvduRWHLqjkqoee6MyPwzzWkmXHaF1p" + + "WkvczdzOvyAaQyS3UPsnQzS0kt4mELGZs/E24K9vD9KfSrdRXxkk3fsLFbLrrau/mEhQ/CKX7Xl4MBchiH+lF8kHvpAc27fevrnDPToZp2cbfSc1oeeKjIM" + + "VmYFKytTCi5IXCNG6S0H31rNpX+5VbdZc1iJLPH7Ch6J+dRzX36R+5zSmp7OIl5gAoECgYEA5f1p/umqMW91HQ+amZoIg6gldFfGglFM5IVCrn0RRB/BrgH" + + "Rnpo0jo3JaOUyQMfyDz69lkpKEgejYTPGDkz3kJmpA54rBwmFitB13ZaqhzM63VzYE3hPdCqpy1VTLxW2+T5nEbLuiR4rC2Y7z+CRBmYdQUNxSq90rCpveg" + + "XIq4sCgYEA135M0fmeBAjTiz3f2pRt7ne64WzY4jJ0SRe6BrVA6PnI9V5+wwtRzyhee9A0awzal6t0OvAdxmnAZg3PsP1fbOPeVwXbvBKtZ4rM7adv6UdYy" + + "6oxjd9eULK92YnVOcZPf595WmoK28L37EHlxjP8p6lnMBk/BF9Y3N3rz2xyNLUCgYAZ8qdczTwYa7zI1JPatJg1Umk3YRfSaB3GwootaYrjJroRSb8+p6M6" + + "WiDZJtKuoGBc+/Uj2anVsurp8o9r2Z8sv0lkURoFpztb1/0UTQVcT5lalDkEqVQ9hPq3KB9Edqy4HiQ+yPNEoRS2KoihAXMbR7YRQOytQnJlYjxFhhWH1QK" + + "BgQCNFv97FyETaSgAacGQHlCfqrqr75VM/FXQqX09+RyHrUubA4ShdV7Z8Id0L0yyrlbMqRBPqnkEOKck6nQKYMpCxCsF9Sr6R4xLV8B29YK7TOBhcIxDZH" + + "UfBvhwXuNBkYrpd2OABCAZ5NxoTnj/vXf12l9aSZ1N4pOPAKntRAa+ZQKBgQDCPgJQfZePJGOvSIkW/TkXcHpGsexb5p900Si23BLjnMtCNMSkHuIWb60xq" + + "I3vLFKhrLiYzYVQ5n3C6PYLcdfiDYwruYU3zmtr/gpg/QzcsvTe5CW/hxTAkzsZsFBOquJyuyCRBGN59tH6N6ietu8zzvCc8EeJJX7N7AX0ezF7lQ==") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.kt new file mode 100644 index 0000000000000..2c05cb12d4727 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteExceptionHandlerTest.kt @@ -0,0 +1,259 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteErrorTraceMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import java.io.* +import java.nio.charset.StandardCharsets +import java.util.* +import lombok.SneakyThrows +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable +import org.mockito.Mockito + +class AirbyteExceptionHandlerTest { + var originalOut: PrintStream = System.out + private val outContent = ByteArrayOutputStream() + private lateinit var airbyteExceptionHandler: AirbyteExceptionHandler + + @BeforeEach + fun setup() { + System.setOut(PrintStream(outContent, true, StandardCharsets.UTF_8)) + + // mocking terminate() method in AirbyteExceptionHandler, so we don't kill the JVM + airbyteExceptionHandler = Mockito.spy(AirbyteExceptionHandler()) + Mockito.doNothing().`when`(airbyteExceptionHandler).terminate() + + AirbyteExceptionHandler.addThrowableForDeinterpolation(RuntimeException::class.java) + } + + @Test + @Throws(Exception::class) + fun testTraceMessageEmission() { + runTestWithMessage("error") + + val traceMessage = findFirstTraceMessage() + Assertions.assertAll( + Executable { + Assertions.assertEquals(AirbyteTraceMessage.Type.ERROR, traceMessage.trace.type) + }, + Executable { + Assertions.assertEquals( + AirbyteExceptionHandler.logMessage, + traceMessage.trace.error.message + ) + }, + Executable { + Assertions.assertEquals( + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR, + traceMessage.trace.error.failureType + ) + } + ) + } + + @Test + @Throws(Exception::class) + fun testMessageDeinterpolation() { + AirbyteExceptionHandler.addStringForDeinterpolation("foo") + AirbyteExceptionHandler.addStringForDeinterpolation("bar") + + // foo and bar are added to the list explicitly + // name and description are added implicitly by the exception handler. + // all of them should be replaced by '?' + // (including FOO, which should be detected case-insensitively) + runTestWithMessage("Error happened in arst_FOO_bar_zxcv (name: description)") + + val traceMessage = findFirstTraceMessage() + Assertions.assertAll( + Executable { + Assertions.assertEquals(AirbyteTraceMessage.Type.ERROR, traceMessage.trace.type) + }, + Executable { + Assertions.assertEquals( + "Error happened in arst_FOO_bar_zxcv (name: description)", + traceMessage.trace.error.message + ) + }, + Executable { + Assertions.assertEquals( + "Error happened in arst_?_?_zxcv (?: ?)", + traceMessage.trace.error.internalMessage + ) + }, + Executable { + Assertions.assertEquals( + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR, + traceMessage.trace.error.failureType + ) + }, + Executable { + Assertions.assertNull( + traceMessage.trace.error.stackTrace, + "Stacktrace should be null if deinterpolating the error message" + ) + } + ) + } + + /** + * We should only deinterpolate whole words, i.e. if the target string is not adjacent to an + * alphanumeric character. + */ + @Test + @Throws(Exception::class) + fun testMessageSmartDeinterpolation() { + AirbyteExceptionHandler.addStringForDeinterpolation("foo") + AirbyteExceptionHandler.addStringForDeinterpolation("bar") + + runTestWithMessage("Error happened in foobar") + + val traceMessage = findFirstTraceMessage() + // We shouldn't deinterpolate at all in this case, so we will get the default trace message + // behavior. + Assertions.assertAll( + Executable { + Assertions.assertEquals( + AirbyteExceptionHandler.logMessage, + traceMessage.trace.error.message + ) + }, + Executable { + Assertions.assertEquals( + "java.lang.RuntimeException: Error happened in foobar", + traceMessage.trace.error.internalMessage + ) + } + ) + } + + /** + * When one of the target strings is a substring of another, we should not deinterpolate the + * substring. + */ + @Test + @Throws(Exception::class) + fun testMessageSubstringDeinterpolation() { + AirbyteExceptionHandler.addStringForDeinterpolation("airbyte") + AirbyteExceptionHandler.addStringForDeinterpolation("airbyte_internal") + + runTestWithMessage("Error happened in airbyte_internal.foo") + + val traceMessage = findFirstTraceMessage() + Assertions.assertEquals("Error happened in ?.foo", traceMessage.trace.error.internalMessage) + } + + /** We should only deinterpolate specific exception classes. */ + @Test + @Throws(Exception::class) + fun testClassDeinterpolation() { + AirbyteExceptionHandler.addStringForDeinterpolation("foo") + + runTestWithMessage(IOException("Error happened in foo")) + + val traceMessage = findFirstTraceMessage() + // We shouldn't deinterpolate at all in this case, so we will get the default trace message + // behavior. + Assertions.assertAll( + Executable { + Assertions.assertEquals( + AirbyteExceptionHandler.logMessage, + traceMessage.trace.error.message + ) + }, + Executable { + Assertions.assertEquals( + "java.io.IOException: Error happened in foo", + traceMessage.trace.error.internalMessage + ) + } + ) + } + + /** We should check the classes of the entire exception chain, not just the root exception. */ + @Test + @Throws(Exception::class) + fun testNestedThrowableClassDeinterpolation() { + AirbyteExceptionHandler.addStringForDeinterpolation("foo") + + runTestWithMessage(Exception(RuntimeException("Error happened in foo"))) + + val traceMessage = findFirstTraceMessage() + // We shouldn't deinterpolate at all in this case, so we will get the default trace message + // behavior. + Assertions.assertEquals("Error happened in ?", traceMessage.trace.error.internalMessage) + } + + @Throws(InterruptedException::class) + private fun runTestWithMessage(message: String) { + runTestWithMessage(RuntimeException(message)) + } + + @Throws(InterruptedException::class) + private fun runTestWithMessage(throwable: Throwable) { + // have to spawn a new thread to test the uncaught exception handling, + // because junit catches any exceptions in main thread, i.e. they're not 'uncaught' + val thread: Thread = + object : Thread() { + @SneakyThrows + override fun run() { + val runner = Mockito.mock(IntegrationRunner::class.java) + Mockito.doThrow(throwable).`when`(runner).run(arrayOf("write")) + runner.run(arrayOf("write")) + } + } + thread.uncaughtExceptionHandler = airbyteExceptionHandler + thread.start() + thread.join() + System.out.flush() + } + + @AfterEach + fun teardown() { + System.setOut(originalOut) + + AirbyteExceptionHandler.STRINGS_TO_DEINTERPOLATE.clear() + AirbyteExceptionHandler.addCommonStringsToDeinterpolate() + + AirbyteExceptionHandler.THROWABLES_TO_DEINTERPOLATE.clear() + } + + private fun findFirstTraceMessage(): AirbyteMessage { + val maybeTraceMessage = + Arrays.stream( + outContent + .toString(StandardCharsets.UTF_8) + .split("\n".toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + ) + .map { line: String? -> + // these tests sometimes emit non-json stdout (e.g. log4j warnings) + // so we try-catch to handle those malformed lines. + try { + return@map Jsons.deserialize( + line, + AirbyteMessage::class.java + ) + } catch (e: Exception) { + return@map null + } + } + .filter { message: AirbyteMessage? -> + message != null && message.type == AirbyteMessage.Type.TRACE + } + .findFirst() + Assertions.assertTrue( + maybeTraceMessage.isPresent, + "Expected to find a trace message in stdout" + ) + return maybeTraceMessage.get() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.kt new file mode 100644 index 0000000000000..c8585437ac488 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.kt @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import java.io.* +import java.nio.charset.StandardCharsets +import java.util.regex.Pattern +import org.apache.logging.log4j.Level +import org.apache.logging.log4j.core.LoggerContext +import org.apache.logging.log4j.core.appender.OutputStreamAppender +import org.apache.logging.log4j.core.config.Configurator +import org.apache.logging.log4j.core.config.LoggerConfig +import org.apache.logging.log4j.spi.ExtendedLogger +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource +import org.junit.platform.commons.util.StringUtils + +class AirbyteLogMessageTemplateTest { + private lateinit var loggerContext: LoggerContext + private lateinit var rootLoggerConfig: LoggerConfig + private lateinit var logger: ExtendedLogger + private lateinit var outputStreamAppender: OutputStreamAppender + private lateinit var outputContent: ByteArrayOutputStream + + fun getLogger() { + // We are creating a log appender with the same output pattern + // as the console json appender defined in this project's log4j2.xml file. + // We then attach this log appender with the LOGGER instance so that we can validate the + // logs + // produced by code and assert that it matches the expected format. + loggerContext = Configurator.initialize(null, "log4j2.xml") + + val configuration = loggerContext.getConfiguration() + rootLoggerConfig = configuration.getLoggerConfig("") + + outputContent = ByteArrayOutputStream() + outputStreamAppender = + OutputStreamAppender.createAppender( + rootLoggerConfig.getAppenders()[CONSOLE_JSON_APPENDER]!!.layout, + null, + outputContent, + OUTPUT_STREAM_APPENDER, + false, + true + ) + outputStreamAppender.start() + + rootLoggerConfig.addAppender(outputStreamAppender, Level.ALL, null) + logger = loggerContext.getLogger(AirbyteLogMessageTemplateTest::class.java) + } + + @AfterEach + fun closeLogger() { + outputStreamAppender!!.stop() + rootLoggerConfig!!.removeAppender(OUTPUT_STREAM_APPENDER) + loggerContext!!.close() + } + + @Test + @Throws(IOException::class) + fun testAirbyteLogMessageFormat() { + getLogger() + logger!!.info("hello") + + outputContent!!.flush() + val logMessage = outputContent!!.toString(StandardCharsets.UTF_8) + val airbyteMessage = validateLogIsAirbyteMessage(logMessage) + val airbyteLogMessage = validateAirbyteMessageIsLog(airbyteMessage) + + val connectorLogMessage = airbyteLogMessage.message + // validate that the message inside AirbyteLogMessage matches the pattern. + // pattern to check for is: LOG_LEVEL className(methodName):LineNumber logMessage + val connectorLogMessageRegex = + String.format( + "^INFO %s [\\w+.]*.AirbyteLogMessageTemplateTest\\(testAirbyteLogMessageFormat\\):\\d+ hello$", + Pattern.compile(Thread.currentThread().name) + ) + val pattern = Pattern.compile(connectorLogMessageRegex) + + val matcher = pattern.matcher(connectorLogMessage) + Assertions.assertTrue(matcher.matches(), connectorLogMessage) + } + + private fun validateLogIsAirbyteMessage(logMessage: String): AirbyteMessage { + val jsonLine = Jsons.tryDeserialize(logMessage) + Assertions.assertFalse(jsonLine.isEmpty) + + val m = Jsons.tryObject(jsonLine.get(), AirbyteMessage::class.java) + Assertions.assertFalse(m.isEmpty) + return m.get() + } + + private fun validateAirbyteMessageIsLog(airbyteMessage: AirbyteMessage): AirbyteLogMessage { + Assertions.assertEquals(AirbyteMessage.Type.LOG, airbyteMessage.type) + Assertions.assertNotNull(airbyteMessage.log) + Assertions.assertFalse(StringUtils.isBlank(airbyteMessage.log.message)) + return airbyteMessage.log + } + + @ParameterizedTest + @ValueSource(ints = [2, 100, 9000]) + @Throws(IOException::class) + fun testAirbyteLogMessageLength(stringRepetitions: Int) { + getLogger() + val sb = StringBuilder() + for (i in 0 until stringRepetitions) { + sb.append("abcd") + } + logger!!.info(sb.toString(), RuntimeException("aaaaa bbbbbb ccccccc dddddd")) + outputContent!!.flush() + val logMessage = outputContent!!.toString(StandardCharsets.UTF_8) + + val airbyteMessage = validateLogIsAirbyteMessage(logMessage) + val airbyteLogMessage = validateAirbyteMessageIsLog(airbyteMessage) + val connectorLogMessage = airbyteLogMessage.message + + // #30781 - message length is capped at 16,000 charcters. + val j = connectorLogMessage.length + Assertions.assertFalse(connectorLogMessage.length > 16001) + Assertions.assertTrue(logMessage.length < 32768) + } + + companion object { + const val OUTPUT_STREAM_APPENDER: String = "OutputStreamAppender" + const val CONSOLE_JSON_APPENDER: String = "ConsoleJSONAppender" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.kt new file mode 100644 index 0000000000000..3fd9480cffe0b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/AirbyteTraceMessageUtilityTest.kt @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteErrorTraceMessage +import java.io.ByteArrayOutputStream +import java.io.PrintStream +import java.nio.charset.StandardCharsets +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +class AirbyteTraceMessageUtilityTest { + var originalOut: PrintStream = System.out + private val outContent = ByteArrayOutputStream() + + @BeforeEach + fun setUpOut() { + System.setOut(PrintStream(outContent, true, StandardCharsets.UTF_8)) + } + + private fun assertJsonNodeIsTraceMessage(jsonNode: JsonNode) { + // todo: this check could be better by actually trying to convert the JsonNode to an + // AirbyteTraceMessage instance + Assertions.assertEquals("TRACE", jsonNode["type"].asText()) + Assertions.assertNotNull(jsonNode["trace"]) + } + + @Test + fun testEmitSystemErrorTrace() { + AirbyteTraceMessageUtility.emitSystemErrorTrace( + Mockito.mock(RuntimeException::class.java), + "this is a system error" + ) + val outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)) + assertJsonNodeIsTraceMessage(outJson) + Assertions.assertEquals("system_error", outJson["trace"]["error"]["failure_type"].asText()) + } + + @Test + fun testEmitConfigErrorTrace() { + AirbyteTraceMessageUtility.emitConfigErrorTrace( + Mockito.mock(RuntimeException::class.java), + "this is a config error" + ) + val outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)) + assertJsonNodeIsTraceMessage(outJson) + Assertions.assertEquals("config_error", outJson["trace"]["error"]["failure_type"].asText()) + } + + @Test + fun testEmitErrorTrace() { + AirbyteTraceMessageUtility.emitErrorTrace( + Mockito.mock(RuntimeException::class.java), + "this is an error", + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR + ) + assertJsonNodeIsTraceMessage(Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8))) + } + + @Test + fun testCorrectStacktraceFormat() { + try { + val x = 1 / 0 + } catch (e: Exception) { + AirbyteTraceMessageUtility.emitSystemErrorTrace(e, "you exploded the universe") + } + val outJson = Jsons.deserialize(outContent.toString(StandardCharsets.UTF_8)) + Assertions.assertTrue(outJson["trace"]["error"]["stack_trace"].asText().contains("\n\tat")) + } + + @AfterEach + fun revertOut() { + System.setOut(originalOut) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/DestinationConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/DestinationConfigTest.kt new file mode 100644 index 0000000000000..1b9dd2db4e441 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/DestinationConfigTest.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class DestinationConfigTest { + @Test + fun testInitialization() { + // bad initialization + Assertions.assertThrows(IllegalArgumentException::class.java) { + DestinationConfig.initialize(null) + } + Assertions.assertThrows(IllegalStateException::class.java) { DestinationConfig.instance } + + // good initialization + DestinationConfig.initialize(NODE, true) + Assertions.assertNotNull(DestinationConfig.instance) + Assertions.assertEquals(NODE, DestinationConfig.instance!!.root) + Assertions.assertEquals(true, DestinationConfig.instance!!.isV2Destination) + + // initializing again doesn't change the config + val nodeUnused = Jsons.deserialize("{}") + DestinationConfig.initialize(nodeUnused, false) + Assertions.assertEquals(NODE, DestinationConfig.instance!!.root) + Assertions.assertEquals(true, DestinationConfig.instance!!.isV2Destination) + } + + @Test + fun testValues() { + DestinationConfig.clearInstance() + DestinationConfig.initialize(NODE) + + Assertions.assertEquals("bar", DestinationConfig.instance!!.getTextValue("foo")) + Assertions.assertEquals("", DestinationConfig.instance!!.getTextValue("baz")) + + Assertions.assertFalse(DestinationConfig.instance!!.getBooleanValue("foo")) + Assertions.assertTrue(DestinationConfig.instance!!.getBooleanValue("baz")) + + // non-existent key + Assertions.assertEquals("", DestinationConfig.instance!!.getTextValue("blah")) + Assertions.assertFalse(DestinationConfig.instance!!.getBooleanValue("blah")) + + Assertions.assertEquals( + Jsons.deserialize("\"bar\""), + DestinationConfig.instance!!.getNodeValue("foo") + ) + Assertions.assertEquals( + Jsons.deserialize("true"), + DestinationConfig.instance!!.getNodeValue("baz") + ) + Assertions.assertNull(DestinationConfig.instance!!.getNodeValue("blah")) + + Assertions.assertEquals(false, DestinationConfig.instance!!.isV2Destination) + } + + companion object { + private val JSON = + """ + { + "foo": "bar", + "baz": true + } + + """.trimIndent() + + private val NODE: JsonNode = Jsons.deserialize(JSON) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.kt new file mode 100644 index 0000000000000..48eb508ec709a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/FailureTrackingAirbyteMessageConsumerTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.protocol.models.v0.AirbyteMessage +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.mock + +internal class FailureTrackingAirbyteMessageConsumerTest { + @Test + @Throws(Exception::class) + fun testStartNoFailure() { + val consumer = Mockito.spy(TestConsumer()) + consumer.start() + consumer.close() + + Mockito.verify(consumer).close(false) + } + + @Test + @Throws(Exception::class) + fun testStartWithFailure() { + val consumer = Mockito.spy(TestConsumer()) + Mockito.doThrow(RuntimeException()).`when`(consumer).startTracked() + + // verify the exception still gets thrown. + Assertions.assertThrows(RuntimeException::class.java) { consumer.start() } + consumer.close() + + Mockito.verify(consumer).close(true) + } + + @Test + @Throws(Exception::class) + fun testAcceptNoFailure() { + val consumer = Mockito.spy(TestConsumer()) + + val msg = Mockito.mock(AirbyteMessage::class.java) + consumer.accept(msg) + consumer.close() + + Mockito.verify(consumer).close(false) + } + + @Test + @Throws(Exception::class) + fun testAcceptWithFailure() { + val consumer = Mockito.spy(TestConsumer()) + val msg: AirbyteMessage = mock() + Mockito.`when`(msg.type).thenReturn(AirbyteMessage.Type.RECORD) + Mockito.doThrow(RuntimeException()).`when`(consumer).acceptTracked(any()) + + // verify the exception still gets thrown. + Assertions.assertThrows(RuntimeException::class.java) { consumer.accept(msg) } + consumer.close() + + Mockito.verify(consumer).close(true) + } + + internal class TestConsumer : FailureTrackingAirbyteMessageConsumer() { + public override fun startTracked() {} + + public override fun acceptTracked(s: AirbyteMessage) {} + + public override fun close(hasFailed: Boolean) {} + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.kt new file mode 100644 index 0000000000000..8dcdf870d471e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationCliParserTest.kt @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import java.nio.file.Path +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class IntegrationCliParserTest { + @Test + fun testSpec() { + val args = arrayOf("--spec") + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals(IntegrationConfig.spec(), actual) + } + + @Test + fun testCheck() { + val args = arrayOf("--check", "--config", CONFIG_FILENAME) + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals(IntegrationConfig.check(Path.of(CONFIG_FILENAME)), actual) + } + + @Test + fun testDiscover() { + val args = arrayOf("--discover", "--config", CONFIG_FILENAME) + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals(IntegrationConfig.discover(Path.of(CONFIG_FILENAME)), actual) + } + + @Test + fun testWrite() { + val args = arrayOf("--write", "--config", CONFIG_FILENAME, "--catalog", CATALOG_FILENAME) + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals( + IntegrationConfig.write(Path.of(CONFIG_FILENAME), Path.of(CATALOG_FILENAME)), + actual + ) + } + + @Test + fun testReadWithoutState() { + val args = arrayOf("--read", "--config", CONFIG_FILENAME, "--catalog", CATALOG_FILENAME) + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals( + IntegrationConfig.read(Path.of(CONFIG_FILENAME), Path.of(CATALOG_FILENAME), null), + actual + ) + } + + @Test + fun testReadWithState() { + val args = + arrayOf( + "--read", + "--config", + CONFIG_FILENAME, + "--catalog", + CATALOG_FILENAME, + "--state", + STATE_FILENAME + ) + val actual = IntegrationCliParser().parse(args) + Assertions.assertEquals( + IntegrationConfig.read( + Path.of(CONFIG_FILENAME), + Path.of(CATALOG_FILENAME), + Path.of(STATE_FILENAME) + ), + actual + ) + } + + @Test + fun testFailsOnUnknownArg() { + val args = arrayOf("--check", "--config", CONFIG_FILENAME, "--random", "garbage") + Assertions.assertThrows(IllegalArgumentException::class.java) { + IntegrationCliParser().parse(args) + } + } + + companion object { + private const val CONFIG_FILENAME = "config.json" + private const val CATALOG_FILENAME = "catalog.json" + private const val STATE_FILENAME = "state.json" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfigTest.kt new file mode 100644 index 0000000000000..2dee19596dc6c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationConfigTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import java.nio.file.Path +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class IntegrationConfigTest { + @Test + fun testSpec() { + val config = IntegrationConfig.spec() + Assertions.assertEquals(Command.SPEC, config.command) + Assertions.assertThrows(IllegalStateException::class.java) { config.getConfigPath() } + Assertions.assertThrows(IllegalStateException::class.java) { config.getCatalogPath() } + Assertions.assertThrows(IllegalStateException::class.java) { config.getStatePath() } + } + + @Test + fun testCheck() { + val config = IntegrationConfig.check(CONFIG_PATH) + Assertions.assertEquals(Command.CHECK, config.command) + Assertions.assertEquals(CONFIG_PATH, config.getConfigPath()) + Assertions.assertThrows(IllegalStateException::class.java) { config.getCatalogPath() } + Assertions.assertThrows(IllegalStateException::class.java) { config.getStatePath() } + } + + @Test + fun testDiscover() { + val config = IntegrationConfig.discover(CONFIG_PATH) + Assertions.assertEquals(Command.DISCOVER, config.command) + Assertions.assertEquals(CONFIG_PATH, config.getConfigPath()) + Assertions.assertThrows(IllegalStateException::class.java) { config.getCatalogPath() } + Assertions.assertThrows(IllegalStateException::class.java) { config.getStatePath() } + } + + @Test + fun testWrite() { + val config = IntegrationConfig.write(CONFIG_PATH, CATALOG_PATH) + Assertions.assertEquals(Command.WRITE, config.command) + Assertions.assertEquals(CONFIG_PATH, config.getConfigPath()) + Assertions.assertEquals(CATALOG_PATH, config.getCatalogPath()) + Assertions.assertThrows(IllegalStateException::class.java) { config.getStatePath() } + } + + @Test + fun testReadWithState() { + val config = IntegrationConfig.read(CONFIG_PATH, CATALOG_PATH, STATE_PATH) + Assertions.assertEquals(Command.READ, config.command) + Assertions.assertEquals(CONFIG_PATH, config.getConfigPath()) + Assertions.assertEquals(CATALOG_PATH, config.getCatalogPath()) + Assertions.assertEquals(Optional.of(STATE_PATH), config.getStatePath()) + } + + @Test + fun testReadWithoutState() { + val config = IntegrationConfig.read(CONFIG_PATH, CATALOG_PATH, null) + Assertions.assertEquals(Command.READ, config.command) + Assertions.assertEquals(CONFIG_PATH, config.getConfigPath()) + Assertions.assertEquals(CATALOG_PATH, config.getCatalogPath()) + Assertions.assertEquals(Optional.empty(), config.getStatePath()) + } + + companion object { + private val CONFIG_PATH: Path = Path.of("config.json") + private val CATALOG_PATH: Path = Path.of("catalog.json") + private val STATE_PATH: Path = Path.of("state.json") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatabilityTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatabilityTest.kt new file mode 100644 index 0000000000000..1958b6e96bbbf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerBackwardsCompatabilityTest.kt @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import java.io.* +import java.nio.charset.StandardCharsets +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class IntegrationRunnerBackwardsCompatabilityTest { + @Test + @Throws(Exception::class) + fun testByteArrayInputStreamVersusScanner() { + val testInputs = + arrayOf( + "This is line 1\nThis is line 2\nThis is line 3", + "This is line 1\n\nThis is line 2\n\n\nThis is line 3", + "This is line 1\rThis is line 2\nThis is line 3\r\nThis is line 4", + "This is line 1 with emoji 😊\nThis is line 2 with Greek characters: Α, Β, Χ\nThis is line 3 with Cyrillic characters: Д, Ж, З", + "This is a very long line that contains a lot of characters...", + "This is line 1 with an escaped newline \\n character\nThis is line 2 with another escaped newline \\n character", + "This is line 1\n\n", + "\nThis is line 2", + "\n" + ) + + for (testInput in testInputs) { + // get new output + val stream1: InputStream = + ByteArrayInputStream(testInput.toByteArray(StandardCharsets.UTF_8)) + val consumer2 = MockConsumer() + IntegrationRunner.consumeWriteStream(consumer2, stream1) + val newOutput = consumer2.getOutput() + + // get old output + val oldOutput: MutableList = ArrayList() + val stream2: InputStream = + ByteArrayInputStream(testInput.toByteArray(StandardCharsets.UTF_8)) + val scanner = Scanner(stream2, StandardCharsets.UTF_8).useDelimiter("[\r\n]+") + while (scanner.hasNext()) { + oldOutput.add(scanner.next()) + } + + Assertions.assertEquals(oldOutput, newOutput) + } + } + + private class MockConsumer : SerializedAirbyteMessageConsumer { + private val output: MutableList = ArrayList() + + override fun start() {} + + override fun accept(message: String, sizeInBytes: Int) { + output.add(message) + } + + override fun close() {} + + fun getOutput(): List { + return ArrayList(output) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.kt new file mode 100644 index 0000000000000..59d06c53b7445 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/IntegrationRunnerTest.kt @@ -0,0 +1,643 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.commons.util.MoreIterators +import io.airbyte.protocol.models.v0.* +import io.airbyte.validation.json.JsonSchemaValidator +import java.io.* +import java.net.URI +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Path +import java.time.Instant +import java.util.concurrent.Executors +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicBoolean +import java.util.function.Consumer +import java.util.stream.Collectors +import org.apache.commons.lang3.ThreadUtils +import org.assertj.core.api.AssertionsForClassTypes +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.mock +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +internal class IntegrationRunnerTest { + private lateinit var cliParser: IntegrationCliParser + private lateinit var stdoutConsumer: Consumer + private lateinit var destination: Destination + private lateinit var source: Source + private lateinit var configPath: Path + private lateinit var configuredCatalogPath: Path + private lateinit var statePath: Path + private lateinit var configDir: Path + + @BeforeEach + @Throws(IOException::class) + fun setup() { + cliParser = mock() + stdoutConsumer = mock() + destination = mock() + source = mock() + configDir = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test") + + configPath = IOs.writeFile(configDir, CONFIG_FILE_NAME, CONFIG_STRING) + configuredCatalogPath = + IOs.writeFile( + configDir, + CONFIGURED_CATALOG_FILE_NAME, + Jsons.serialize(CONFIGURED_CATALOG) + ) + statePath = IOs.writeFile(configDir, STATE_FILE_NAME, Jsons.serialize(STATE)) + + val testName = Thread.currentThread().name + ThreadUtils.getAllThreads() + .stream() + .filter { runningThread: Thread -> !runningThread.isDaemon } + .forEach { runningThread: Thread -> runningThread.name = testName } + } + + @Test + @Throws(Exception::class) + fun testSpecSource() { + val intConfig = IntegrationConfig.spec() + val output = ConnectorSpecification().withDocumentationUrl(URI("https://docs.airbyte.io/")) + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.spec()).thenReturn(output) + + IntegrationRunner(cliParser, stdoutConsumer, null, source).run(ARGS) + + Mockito.verify(source).spec() + Mockito.verify(stdoutConsumer) + .accept(AirbyteMessage().withType(AirbyteMessage.Type.SPEC).withSpec(output)) + } + + @Test + @Throws(Exception::class) + fun testSpecDestination() { + val intConfig = IntegrationConfig.spec() + val output = ConnectorSpecification().withDocumentationUrl(URI("https://docs.airbyte.io/")) + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(destination!!.spec()).thenReturn(output) + + IntegrationRunner(cliParser, stdoutConsumer, destination, null).run(ARGS) + + Mockito.verify(destination).spec() + Mockito.verify(stdoutConsumer) + .accept(AirbyteMessage().withType(AirbyteMessage.Type.SPEC).withSpec(output)) + } + + @Test + @Throws(Exception::class) + fun testCheckSource() { + val intConfig = IntegrationConfig.check(configPath) + val output = + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage("it failed") + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.check(CONFIG)).thenReturn(output) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS) + + Mockito.verify(source).check(CONFIG) + Mockito.verify(stdoutConsumer) + .accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus(output) + ) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testCheckDestination() { + val intConfig = IntegrationConfig.check(configPath) + val output = + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage("it failed") + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(destination!!.check(CONFIG)).thenReturn(output) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(destination!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + + IntegrationRunner(cliParser, stdoutConsumer, destination, null, jsonSchemaValidator) + .run(ARGS) + + Mockito.verify(destination).check(CONFIG) + Mockito.verify(stdoutConsumer) + .accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus(output) + ) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testDiscover() { + val intConfig = IntegrationConfig.discover(configPath) + val output = + AirbyteCatalog().withStreams(Lists.newArrayList(AirbyteStream().withName("oceans"))) + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.discover(CONFIG)).thenReturn(output) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS) + + Mockito.verify(source).discover(CONFIG) + Mockito.verify(stdoutConsumer) + .accept(AirbyteMessage().withType(AirbyteMessage.Type.CATALOG).withCatalog(output)) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testRead() { + val intConfig = IntegrationConfig.read(configPath, configuredCatalogPath, statePath) + val message1 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.jsonNode(ImmutableMap.of("names", "byron"))) + ) + val message2 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.jsonNode(ImmutableMap.of("names", "reginald"))) + ) + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.read(CONFIG, CONFIGURED_CATALOG, STATE)) + .thenReturn(AutoCloseableIterators.fromIterator(MoreIterators.of(message1, message2))) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS) + + // noinspection resource + Mockito.verify(source).read(CONFIG, CONFIGURED_CATALOG, STATE) + Mockito.verify(stdoutConsumer).accept(message1) + Mockito.verify(stdoutConsumer).accept(message2) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testReadException() { + val intConfig = IntegrationConfig.read(configPath, configuredCatalogPath, statePath) + val configErrorException = ConfigErrorException("Invalid configuration") + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.read(CONFIG, CONFIGURED_CATALOG, STATE)) + .thenThrow(configErrorException) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + val throwable = + AssertionsForClassTypes.catchThrowable { + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator) + .run(ARGS) + } + + AssertionsForClassTypes.assertThat(throwable).isInstanceOf(ConfigErrorException::class.java) + // noinspection resource + Mockito.verify(source).read(CONFIG, CONFIGURED_CATALOG, STATE) + } + + @Test + @Throws(Exception::class) + fun testCheckNestedException() { + val intConfig = IntegrationConfig.check(configPath) + val output = + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage("Invalid configuration") + val configErrorException = ConfigErrorException("Invalid configuration") + val runtimeException = RuntimeException(RuntimeException(configErrorException)) + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.check(CONFIG)).thenThrow(runtimeException) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS) + + Mockito.verify(source).check(CONFIG) + Mockito.verify(stdoutConsumer) + .accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus(output) + ) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testCheckRuntimeException() { + val intConfig = IntegrationConfig.check(configPath) + val output = + AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage( + String.format( + ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE, + "Runtime Error" + ) + ) + val runtimeException = RuntimeException("Runtime Error") + + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`(source!!.check(CONFIG)).thenThrow(runtimeException) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(source!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS) + + Mockito.verify(source).check(CONFIG) + Mockito.verify(stdoutConsumer) + .accept( + AirbyteMessage() + .withType(AirbyteMessage.Type.CONNECTION_STATUS) + .withConnectionStatus(output) + ) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testWrite() { + val intConfig = IntegrationConfig.write(configPath, configuredCatalogPath) + val consumerMock = Mockito.mock(SerializedAirbyteMessageConsumer::class.java) + Mockito.`when`(cliParser!!.parse(ARGS)).thenReturn(intConfig) + Mockito.`when`( + destination!!.getSerializedMessageConsumer( + CONFIG, + CONFIGURED_CATALOG, + stdoutConsumer + ) + ) + .thenReturn(consumerMock) + + val expectedConnSpec = Mockito.mock(ConnectorSpecification::class.java) + Mockito.`when`(destination!!.spec()).thenReturn(expectedConnSpec) + Mockito.`when`(expectedConnSpec.connectionSpecification).thenReturn(CONFIG) + + val jsonSchemaValidator = Mockito.mock(JsonSchemaValidator::class.java) + + val runner = + Mockito.spy( + IntegrationRunner(cliParser, stdoutConsumer, destination, null, jsonSchemaValidator) + ) + runner.run(ARGS) + + Mockito.verify(destination) + .getSerializedMessageConsumer(CONFIG, CONFIGURED_CATALOG, stdoutConsumer) + Mockito.verify(jsonSchemaValidator).validate(any(), any()) + } + + @Test + @Throws(Exception::class) + fun testDestinationConsumerLifecycleSuccess() { + val message1 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.deserialize("{ \"color\": \"blue\" }")) + .withStream(STREAM_NAME) + .withEmittedAt(EMITTED_AT) + ) + val message2 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.deserialize("{ \"color\": \"yellow\" }")) + .withStream(STREAM_NAME) + .withEmittedAt(EMITTED_AT) + ) + val stateMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage().withData(Jsons.deserialize("{ \"checkpoint\": \"1\" }")) + ) + System.setIn( + ByteArrayInputStream( + """${Jsons.serialize(message1)} +${Jsons.serialize(message2)} +${Jsons.serialize(stateMessage)}""".toByteArray( + StandardCharsets.UTF_8 + ) + ) + ) + + Mockito.mock(SerializedAirbyteMessageConsumer::class.java) + .use { airbyteMessageConsumerMock -> + IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock) + val inOrder = Mockito.inOrder(airbyteMessageConsumerMock) + inOrder + .verify(airbyteMessageConsumerMock) + .accept( + Jsons.serialize(message1), + Jsons.serialize(message1).toByteArray(StandardCharsets.UTF_8).size + ) + inOrder + .verify(airbyteMessageConsumerMock) + .accept( + Jsons.serialize(message2), + Jsons.serialize(message2).toByteArray(StandardCharsets.UTF_8).size + ) + inOrder + .verify(airbyteMessageConsumerMock) + .accept( + Jsons.serialize(stateMessage), + Jsons.serialize(stateMessage).toByteArray(StandardCharsets.UTF_8).size + ) + } + } + + @Test + @Throws(Exception::class) + fun testDestinationConsumerLifecycleFailure() { + val message1 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.deserialize("{ \"color\": \"blue\" }")) + .withStream(STREAM_NAME) + .withEmittedAt(EMITTED_AT) + ) + val message2 = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.deserialize("{ \"color\": \"yellow\" }")) + .withStream(STREAM_NAME) + .withEmittedAt(EMITTED_AT) + ) + System.setIn( + ByteArrayInputStream( + """${Jsons.serialize(message1)} +${Jsons.serialize(message2)}""".toByteArray( + StandardCharsets.UTF_8 + ) + ) + ) + + Mockito.mock(SerializedAirbyteMessageConsumer::class.java) + .use { airbyteMessageConsumerMock -> + Mockito.doThrow(IOException("error")) + .`when`(airbyteMessageConsumerMock) + .accept( + Jsons.serialize(message1), + Jsons.serialize(message1).toByteArray(StandardCharsets.UTF_8).size + ) + Assertions.assertThrows(IOException::class.java) { + IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock) + } + val inOrder = Mockito.inOrder(airbyteMessageConsumerMock) + inOrder + .verify(airbyteMessageConsumerMock) + .accept( + Jsons.serialize(message1), + Jsons.serialize(message1).toByteArray(StandardCharsets.UTF_8).size + ) + inOrder.verifyNoMoreInteractions() + } + } + + @Test + fun testInterruptOrphanThread() { + val caughtExceptions: MutableList = ArrayList() + startSleepingThread(caughtExceptions, false) + IntegrationRunner.stopOrphanedThreads( + { Assertions.fail() }, + 3, + TimeUnit.SECONDS, + 10, + TimeUnit.SECONDS + ) + try { + TimeUnit.SECONDS.sleep(15) + } catch (e: Exception) { + throw RuntimeException(e) + } + val runningThreads = + ThreadUtils.getAllThreads() + .stream() + .filter(IntegrationRunner.ORPHANED_THREAD_FILTER) + .collect(Collectors.toList()) + // all threads should be interrupted + Assertions.assertEquals(listOf(), runningThreads) + Assertions.assertEquals(1, caughtExceptions.size) + } + + @Test + fun testNoInterruptOrphanThread() { + val caughtExceptions: MutableList = ArrayList() + val exitCalled = AtomicBoolean(false) + startSleepingThread(caughtExceptions, true) + IntegrationRunner.stopOrphanedThreads( + { exitCalled.set(true) }, + 3, + TimeUnit.SECONDS, + 10, + TimeUnit.SECONDS + ) + try { + TimeUnit.SECONDS.sleep(15) + } catch (e: Exception) { + throw RuntimeException(e) + } + + val runningThreads = + ThreadUtils.getAllThreads() + .stream() + .filter(IntegrationRunner.ORPHANED_THREAD_FILTER) + .collect(Collectors.toList()) + // a thread that refuses to be interrupted should remain + Assertions.assertEquals(1, runningThreads.size) + Assertions.assertEquals(1, caughtExceptions.size) + Assertions.assertTrue(exitCalled.get()) + } + + private fun startSleepingThread( + caughtExceptions: MutableList, + ignoreInterrupt: Boolean + ) { + val executorService = + Executors.newFixedThreadPool(1) { r: Runnable? -> + // Create a thread that should be identified as orphaned if still running during + // shutdown + val thread = Thread(r) + thread.name = "sleeping-thread" + thread.isDaemon = false + thread + } + executorService.submit { + for (tries in 0..2) { + try { + TimeUnit.MINUTES.sleep(5) + } catch (e: Exception) { + LOGGER.info("Caught Exception", e) + caughtExceptions.add(e) + if (!ignoreInterrupt) { + executorService.shutdownNow() + break + } + } + } + } + } + + @Test + fun testParseConnectorImage() { + Assertions.assertEquals("unknown", IntegrationRunner.parseConnectorVersion(null)) + Assertions.assertEquals("unknown", IntegrationRunner.parseConnectorVersion("")) + Assertions.assertEquals( + "1.0.1-alpha", + IntegrationRunner.parseConnectorVersion("airbyte/destination-test:1.0.1-alpha") + ) + Assertions.assertEquals( + "dev", + IntegrationRunner.parseConnectorVersion("airbyte/destination-test:dev") + ) + Assertions.assertEquals( + "1.0.1-alpha", + IntegrationRunner.parseConnectorVersion("destination-test:1.0.1-alpha") + ) + Assertions.assertEquals( + "1.0.1-alpha", + IntegrationRunner.parseConnectorVersion(":1.0.1-alpha") + ) + } + + @Test + fun testConsumptionOfInvalidStateMessage() { + val invalidStateMessage = + """ + { + "type" : "STATE", + "state" : { + "type": "NOT_RECOGNIZED", + "global": { + "streamStates": { + "foo" : "bar" + } + } + } + } + + """.trimIndent() + + Assertions.assertThrows(IllegalStateException::class.java) { + Mockito.mock(AirbyteMessageConsumer::class.java).use { consumer -> + Destination.ShimToSerializedAirbyteMessageConsumer.consumeMessage( + consumer, + invalidStateMessage + ) + } + } + } + + @Test + fun testConsumptionOfInvalidNonStateMessage() { + val invalidNonStateMessage = + """ + { + "type" : "NOT_RECOGNIZED", + "record" : { + "namespace": "namespace", + "stream": "stream", + "emittedAt": 123456789 + } + } + + """.trimIndent() + + Assertions.assertDoesNotThrow { + Mockito.mock(AirbyteMessageConsumer::class.java).use { consumer + -> + Destination.ShimToSerializedAirbyteMessageConsumer.consumeMessage( + consumer, + invalidNonStateMessage + ) + Mockito.verify(consumer, Mockito.times(0)).accept(any()) + } + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(IntegrationRunnerTest::class.java) + + private const val CONFIG_FILE_NAME = "config.json" + private const val CONFIGURED_CATALOG_FILE_NAME = "configured_catalog.json" + private const val STATE_FILE_NAME = "state.json" + + private val ARGS = arrayOf("args") + + private const val CONFIG_STRING = "{ \"username\": \"airbyte\" }" + private val CONFIG: JsonNode = Jsons.deserialize(CONFIG_STRING) + private const val STREAM_NAME = "users" + private val EMITTED_AT = Instant.now().toEpochMilli() + private val TEST_ROOT: Path = Path.of("/tmp/airbyte_tests") + + private val CATALOG: AirbyteCatalog = + AirbyteCatalog().withStreams(Lists.newArrayList(AirbyteStream().withName(STREAM_NAME))) + private val CONFIGURED_CATALOG: ConfiguredAirbyteCatalog = + CatalogHelpers.toDefaultConfiguredCatalog(CATALOG) + private val STATE: JsonNode = Jsons.jsonNode(ImmutableMap.of("checkpoint", "05/08/1945")) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/NameTransformerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/NameTransformerTest.kt new file mode 100644 index 0000000000000..781d53fb052d0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/NameTransformerTest.kt @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base + +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class NameTransformerTest { + @Test + fun testStandardSQLNaming() { + val namingResolver: NamingConventionTransformer = StandardNameTransformer() + Assertions.assertEquals("identifier_name", namingResolver.getIdentifier("identifier_name")) + Assertions.assertEquals("iDenTiFieR_name", namingResolver.getIdentifier("iDenTiFieR_name")) + Assertions.assertEquals( + "__identifier_name", + namingResolver.getIdentifier("__identifier_name") + ) + Assertions.assertEquals("IDENTIFIER_NAME", namingResolver.getIdentifier("IDENTIFIER_NAME")) + Assertions.assertEquals( + "123identifier_name", + namingResolver.getIdentifier("123identifier_name") + ) + Assertions.assertEquals( + "i0d0e0n0t0i0f0i0e0r0n0a0m0e", + namingResolver.getIdentifier("i0d0e0n0t0i0f0i0e0r0n0a0m0e") + ) + Assertions.assertEquals( + "_identifier_name", + namingResolver.getIdentifier(",identifier+name") + ) + Assertions.assertEquals("identifier_name", namingResolver.getIdentifier("identifiêr name")) + Assertions.assertEquals( + "a_unicode_name__", + namingResolver.getIdentifier("a_unicode_name_文") + ) + Assertions.assertEquals( + "identifier__name__", + namingResolver.getIdentifier("identifier__name__") + ) + Assertions.assertEquals( + "identifier_name_weee", + namingResolver.getIdentifier("identifier-name.weee") + ) + Assertions.assertEquals( + "_identifier_name_", + namingResolver.getIdentifier("\"identifier name\"") + ) + Assertions.assertEquals("identifier_name", namingResolver.getIdentifier("identifier name")) + Assertions.assertEquals("identifier_", namingResolver.getIdentifier("identifier%")) + Assertions.assertEquals("_identifier_", namingResolver.getIdentifier("`identifier`")) + + Assertions.assertEquals( + "_airbyte_raw_identifier_name", + namingResolver.getRawTableName("identifier_name") + ) + } + + // Temporarily disabling the behavior of the StandardNameTransformer, see (issue #1785) + // @Test + fun testExtendedSQLNaming() { + val namingResolver: NamingConventionTransformer = StandardNameTransformer() + Assertions.assertEquals("identifier_name", namingResolver.getIdentifier("identifier_name")) + Assertions.assertEquals("iDenTiFieR_name", namingResolver.getIdentifier("iDenTiFieR_name")) + Assertions.assertEquals( + "__identifier_name", + namingResolver.getIdentifier("__identifier_name") + ) + Assertions.assertEquals("IDENTIFIER_NAME", namingResolver.getIdentifier("IDENTIFIER_NAME")) + Assertions.assertEquals( + "\"123identifier_name\"", + namingResolver.getIdentifier("123identifier_name") + ) + Assertions.assertEquals( + "i0d0e0n0t0i0f0i0e0r0n0a0m0e", + namingResolver.getIdentifier("i0d0e0n0t0i0f0i0e0r0n0a0m0e") + ) + Assertions.assertEquals( + "\",identifier+name\"", + namingResolver.getIdentifier(",identifier+name") + ) + Assertions.assertEquals( + "\"identifiêr name\"", + namingResolver.getIdentifier("identifiêr name") + ) + Assertions.assertEquals( + "\"a_unicode_name_文\"", + namingResolver.getIdentifier("a_unicode_name_文") + ) + Assertions.assertEquals( + "identifier__name__", + namingResolver.getIdentifier("identifier__name__") + ) + Assertions.assertEquals( + "\"identifier-name.weee\"", + namingResolver.getIdentifier("identifier-name.weee") + ) + Assertions.assertEquals( + "\"\"identifier name\"\"", + namingResolver.getIdentifier("\"identifier name\"") + ) + Assertions.assertEquals( + "\"identifier name\"", + namingResolver.getIdentifier("identifier name") + ) + Assertions.assertEquals("\"identifier%\"", namingResolver.getIdentifier("identifier%")) + Assertions.assertEquals("\"`identifier`\"", namingResolver.getIdentifier("`identifier`")) + + Assertions.assertEquals( + "_airbyte_raw_identifier_name", + namingResolver.getRawTableName("identifier_name") + ) + Assertions.assertEquals( + "\"_airbyte_raw_identifiêr name\"", + namingResolver.getRawTableName("identifiêr name") + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.kt new file mode 100644 index 0000000000000..59df191d10f39 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/normalization/NormalizationLogParserTest.kt @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.normalization + +import io.airbyte.cdk.integrations.destination.normalization.NormalizationLogParser +import io.airbyte.protocol.models.AirbyteErrorTraceMessage +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import java.io.BufferedReader +import java.io.ByteArrayInputStream +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +internal class NormalizationLogParserTest { + private var parser: NormalizationLogParser? = null + + @BeforeEach + fun setup() { + parser = NormalizationLogParser() + } + + @Test + fun testWrapNonJsonLogs() { + runTest( + """ + foo + bar + [error] oh no + asdf + [error] qwer + + """.trimIndent(), + java.util.List.of( + logMessage(AirbyteLogMessage.Level.INFO, "foo"), + logMessage(AirbyteLogMessage.Level.INFO, "bar"), + logMessage(AirbyteLogMessage.Level.INFO, "[error] oh no"), + logMessage(AirbyteLogMessage.Level.INFO, "asdf"), + logMessage(AirbyteLogMessage.Level.INFO, "[error] qwer") + ), + listOf("[error] oh no", "[error] qwer") + ) + } + + @Test + fun testWrapJsonLogs() { + runTest( + """ + {"code": "A001", "data": {"v": "=1.0.9"}, "invocation_id": "ed2017da-965d-406b-8fa1-07fb7c19fd14", "level": "info", "log_version": 1, "msg": "Running with dbt=1.0.9", "node_info": {}, "pid": 55, "thread_name": "MainThread", "ts": "2023-04-11T16:08:54.781886Z", "type": "log_line"} + {"code": "A001", "data": {"v": "=1.0.9"}, "invocation_id": "ed2017da-965d-406b-8fa1-07fb7c19fd14", "level": "error", "log_version": 1, "msg": "oh no", "node_info": {}, "pid": 55, "thread_name": "MainThread", "ts": "2023-04-11T16:08:54.781886Z", "type": "log_line"} + {"type": "TRACE", "trace": {"type": "ERROR", "emitted_at": 1.681766805198E12, "error": {"failure_type": "system_error", "message": "uh oh", "stack_trace": "normalization blew up", "internal_message": "normalization blew up with more detail"}}} + + """.trimIndent(), + java.util.List.of( + logMessage(AirbyteLogMessage.Level.INFO, "Running with dbt=1.0.9"), + logMessage(AirbyteLogMessage.Level.ERROR, "oh no"), + AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.ERROR) + .withEmittedAt(1.681766805198E12) + .withError( + AirbyteErrorTraceMessage() + .withFailureType( + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR + ) + .withMessage("uh oh") + .withStackTrace("normalization blew up") + .withInternalMessage("normalization blew up with more detail") + ) + ) + ), + listOf("oh no") + ) + } + + @Test + fun testWeirdLogs() { + runTest( + """ + null + "null" + {"msg": "message with no level", "type": "log_line"} + {"level": "info", "type": "log_line"} + {"level": "error", "type": "log_line"} + + """.trimIndent(), + java.util.List.of( + logMessage(AirbyteLogMessage.Level.INFO, "null"), + logMessage(AirbyteLogMessage.Level.INFO, "\"null\""), + logMessage( + AirbyteLogMessage.Level.INFO, + "{\n \"msg\" : \"message with no level\",\n \"type\" : \"log_line\"\n}" + ), + logMessage(AirbyteLogMessage.Level.INFO, ""), + logMessage(AirbyteLogMessage.Level.ERROR, "") + ), + listOf("") + ) + } + + private fun runTest( + rawLogs: String, + expectedMessages: List, + expectedDbtErrors: List + ) { + val messages = + parser!! + .create( + BufferedReader( + InputStreamReader( + ByteArrayInputStream(rawLogs.toByteArray(StandardCharsets.UTF_8)), + StandardCharsets.UTF_8 + ) + ) + ) + .toList() + + Assertions.assertEquals(expectedMessages, messages) + Assertions.assertEquals(expectedDbtErrors, parser!!.dbtErrors) + } + + private fun logMessage(level: AirbyteLogMessage.Level, message: String): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.LOG) + .withLog(AirbyteLogMessage().withLevel(level).withMessage(message)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.kt new file mode 100644 index 0000000000000..4def0951bbff9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/base/ssh/SshTunnelTest.kt @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.commons.json.Jsons +import java.nio.charset.StandardCharsets +import java.security.* +import java.util.* +import org.apache.sshd.client.SshClient +import org.apache.sshd.client.session.ClientSession +import org.apache.sshd.common.util.security.SecurityUtils +import org.apache.sshd.common.util.security.eddsa.EdDSASecurityProviderRegistrar +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +internal class SshTunnelTest { + /** + * This test verifies that OpenSsh correctly replaces values in connector configuration in a + * spec with host/port config and in a spec with endpoint URL config + * + * @param configString + * @throws Exception + */ + @ParameterizedTest + @ValueSource(strings = [HOST_PORT_CONFIG, URL_CONFIG_WITH_PORT, URL_CONFIG_NO_PORT]) + @Throws(Exception::class) + fun testConfigInTunnel(configString: String) { + val config = ObjectMapper().readTree(String.format(configString, SSH_RSA_PRIVATE_KEY)) + val endPointURL = Jsons.getStringOrNull(config, "endpoint") + val sshTunnel: SshTunnel = + object : + SshTunnel( + config, + if (endPointURL == null) Arrays.asList(*arrayOf("host")) else null, + if (endPointURL == null) Arrays.asList(*arrayOf("port")) else null, + if (endPointURL == null) null else "endpoint", + endPointURL, + TunnelMethod.SSH_KEY_AUTH, + "faketunnel.com", + 22, + "tunnelUser", + SSH_RSA_PRIVATE_KEY, + "tunnelUserPassword", + if (endPointURL == null) "fakeHost.com" else null, + if (endPointURL == null) 5432 else 0 + ) { + public override fun openTunnel(client: SshClient): ClientSession? { + tunnelLocalPort = 8080 + return null // Prevent tunnel from attempting to connect + } + } + + val configInTunnel = sshTunnel.configInTunnel + if (endPointURL == null) { + Assertions.assertTrue(configInTunnel!!.has("port")) + Assertions.assertTrue(configInTunnel.has("host")) + Assertions.assertFalse(configInTunnel.has("endpoint")) + Assertions.assertEquals(8080, configInTunnel!!["port"].asInt()) + Assertions.assertEquals("127.0.0.1", configInTunnel["host"].asText()) + } else { + Assertions.assertFalse(configInTunnel!!.has("port")) + Assertions.assertFalse(configInTunnel.has("host")) + Assertions.assertTrue(configInTunnel.has("endpoint")) + Assertions.assertEquals( + "http://127.0.0.1:8080/service", + configInTunnel["endpoint"].asText() + ) + } + } + + /** + * This test verifies that SshTunnel correctly extracts private key pairs from keys formatted as + * EdDSA and OpenSSH + * + * @param privateKey + * @throws Exception + */ + @ParameterizedTest + @ValueSource(strings = [SSH_ED25519_PRIVATE_KEY, SSH_RSA_PRIVATE_KEY]) + @Throws(Exception::class) + fun getKeyPair(privateKey: String?) { + val config = ObjectMapper().readTree(String.format(HOST_PORT_CONFIG, privateKey)) + val sshTunnel: SshTunnel = + object : + SshTunnel( + config, + Arrays.asList(*arrayOf("host")), + Arrays.asList(*arrayOf("port")), + null, + null, + TunnelMethod.SSH_KEY_AUTH, + "faketunnel.com", + 22, + "tunnelUser", + privateKey, + "tunnelUserPassword", + "fakeHost.com", + 5432 + ) { + public override fun openTunnel(client: SshClient): ClientSession? { + return null // Prevent tunnel from attempting to connect + } + } + + val authKeyPair = sshTunnel.privateKeyPair + Assertions.assertNotNull( + authKeyPair + ) // actually, all is good if there is no exception on previous line + } + + /** + * This test verifies that 'net.i2p.crypto:eddsa' is present and EdDSA is supported. If + * net.i2p.crypto:eddsa will be removed from project, then will be thrown: generator not + * correctly initialized + * + * @throws Exception + */ + @Test + @Throws(Exception::class) + fun edDsaIsSupported() { + val keygen = SecurityUtils.getKeyPairGenerator("EdDSA") + val message = "hello world" + val keyPair = keygen.generateKeyPair() + + val signedMessage = sign(keyPair.private, message) + + Assertions.assertTrue(EdDSASecurityProviderRegistrar().isSupported) + Assertions.assertTrue(verify(keyPair.public, signedMessage, message)) + } + + @Throws(Exception::class) + private fun sign(privateKey: PrivateKey, message: String): ByteArray { + val signature = SecurityUtils.getSignature("NONEwithEdDSA") + signature.initSign(privateKey) + + signature.update(message.toByteArray(StandardCharsets.UTF_8)) + + return signature.sign() + } + + @Throws(Exception::class) + private fun verify(publicKey: PublicKey, signed: ByteArray, message: String): Boolean { + val signature = SecurityUtils.getSignature("NONEwithEdDSA") + signature.initVerify(publicKey) + + signature.update(message.toByteArray(StandardCharsets.UTF_8)) + + return signature.verify(signed) + } + + companion object { + private const val SSH_ED25519_PRIVATE_KEY = + ("-----BEGIN OPENSSH PRIVATE KEY-----\\n" + + "b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW\\n" + + "QyNTUxOQAAACDbBP+5jmEtjh1JvhzVQsvvTC2IQrX6P68XzrV7ZbnGsQAAAKBgtw9/YLcP\\n" + + "fwAAAAtzc2gtZWQyNTUxOQAAACDbBP+5jmEtjh1JvhzVQsvvTC2IQrX6P68XzrV7ZbnGsQ\\n" + + "AAAEAaKYn22N1O78HfdG22C7hcG2HiezKMzlq4JTdgYG1DstsE/7mOYS2OHUm+HNVCy+9M\\n" + + "LYhCtfo/rxfOtXtlucaxAAAAHHRmbG9yZXNfZHQwMUB0ZmxvcmVzX2R0MDEtUEMB\\n" + + "-----END OPENSSH PRIVATE KEY-----") + private const val SSH_RSA_PRIVATE_KEY = + ("-----BEGIN OPENSSH PRIVATE KEY-----\\n" + + "b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn\\n" + + "NhAAAAAwEAAQAAAYEAuFjfTMS6BrgoxaQe9i83y6CdGH3xJIwc1Wy+11ibWAFcQ6khX/x0\\n" + + "M+JnJaSCs/hxiDE4afHscP3HzVQC699IgKwyAPaG0ZG+bLhxWAm4E79P7Yssj7imhTqr0A\\n" + + "DZDO23CCOagHvfdg1svnBhk1ih14GMGKRFCS27CLgholIOeogOyH7b3Jaqy9LtICiE054e\\n" + + "jwdaZdwWU08kxMO4ItdxNasCPC5uQiaXIzWFysG0mLk7WWc8WyuQHneQFl3Qu6p/rWJz4i\\n" + + "seea5CBL5s1DIyCyo/jgN5/oOWOciPUl49mDLleCzYTDnWqX43NK9A87unNeuA95Fk9akH\\n" + + "8QH4hKBCzpHhsh4U3Ys/l9Q5NmnyBrtFWBY2n13ZftNA/Ms+Hsh6V3eyJW0rIFY2/UM4XA\\n" + + "YyD6MEOlvFAQjxC6EbqfkrC6FQgH3I2wAtIDqEk2j79vfIIDdzp8otWjIQsApX55j+kKio\\n" + + "sY8YTXb9sLWuEdpSd/AN3iQ8HwIceyTulaKn7rTBAAAFkMwDTyPMA08jAAAAB3NzaC1yc2\\n" + + "EAAAGBALhY30zEuga4KMWkHvYvN8ugnRh98SSMHNVsvtdYm1gBXEOpIV/8dDPiZyWkgrP4\\n" + + "cYgxOGnx7HD9x81UAuvfSICsMgD2htGRvmy4cVgJuBO/T+2LLI+4poU6q9AA2Qzttwgjmo\\n" + + "B733YNbL5wYZNYodeBjBikRQktuwi4IaJSDnqIDsh+29yWqsvS7SAohNOeHo8HWmXcFlNP\\n" + + "JMTDuCLXcTWrAjwubkImlyM1hcrBtJi5O1lnPFsrkB53kBZd0Luqf61ic+IrHnmuQgS+bN\\n" + + "QyMgsqP44Def6DljnIj1JePZgy5Xgs2Ew51ql+NzSvQPO7pzXrgPeRZPWpB/EB+ISgQs6R\\n" + + "4bIeFN2LP5fUOTZp8ga7RVgWNp9d2X7TQPzLPh7Ield3siVtKyBWNv1DOFwGMg+jBDpbxQ\\n" + + "EI8QuhG6n5KwuhUIB9yNsALSA6hJNo+/b3yCA3c6fKLVoyELAKV+eY/pCoqLGPGE12/bC1\\n" + + "rhHaUnfwDd4kPB8CHHsk7pWip+60wQAAAAMBAAEAAAGAXw+dHpY3o21lwP0v5h1VNVD+kX\\n" + + "moVwNVfw0ToDKV8JzK+i0GA9xIA9VVAUlDCREtYmCXSbKyDVYgqRYQZ5d9aLTjGDIINZtl\\n" + + "SeUWtaJVZQF7cvAYq4g5fmxR2vIE+zC9+Jl7e5PlGJg1okKLXpMO6fVoy/AxlVkaoJVq6q\\n" + + "xLwQ3WKbeZIrgjHPYIx1N9oy5fbbwJ9oq2jIE8YabXlkfonhcwEN6UhtIlj8dy1apruXGT\\n" + + "VDfzHMRrDfrzt0TrdUqmqgo/istP89sggtkJ8uuPtkBFHTjao8MiBsshy1iDVbIno9gDbJ\\n" + + "JgYyunmSgEjEZpp09+mkgwfZO3/RDLRPF1SRAGBNy27CH8/bh9gAVRhAPi0GLclNi292Ya\\n" + + "NrGvjMcRlYAsWL3mZ9aTbv0j7Qi8qdWth+rZ+tBmNToUVVl5iLxifgo0kjiXAehZB1LaQV\\n" + + "yuMXlXOGmt9V2/DPACA9getQJQONxrLAcgHdjMiuwD8r7d+m/kE4+cOTakOlzrfrwBAAAA\\n" + + "wQCVTQTvuyBW3JemMPtRLifQqdwMGRPokm5nTn+JSJQvg+dNpL7hC0k3IesKs63gxuuHoq\\n" + + "4q1xkMmCMvihT8oVlxrezEjsO/QMCxe6Sr9eMfHAjrdPeHsPaf9oOgG9vEEH9dEilHpnlb\\n" + + "97Vyl9EHm1iahONM1gWdXkPjIfnQzYPvSLZPtBBSI0XBjCTifMnCRgd3s2bdm7kh+7XA+C\\n" + + "rX62WfPIJKL+OhMIf+ED4HBJTd/vU34Vk73yvqHzqel0ZQnRoAAADBAOGSm6TNBptV7S5P\\n" + + "wT3BhGYSm35/7nCFTilyfy5/8EUmifUFittRIvgDoXBWeZEwvqIiQ55iX9mYmNmb0KbPCw\\n" + + "cqN/BtXWItAvyTDZ6PeI2m2aUj+rW2R3ZXEsBjgaNRtbPyMKQ69xtKRvHtNZNfgjpRQ4is\\n" + + "lbufhAK1YbUxrlfKaBGOcGyR7DNmUUUN6nptQbpOr1HQc5DOH17HIDnRPs44HIws3/apww\\n" + + "RBIjjy6GQNfJ/Ge8N4pxGoLl1qKO8xoQAAAMEA0Tat/E5mSsgjCgmFja/jOZJcrzZHwrPT\\n" + + "3NEbuAMQ/L3atKEINypmpJfjIAvNljKJwSUDMEWvs8qj8cSGCrtkcAv1YSm697TL2oC9HU\\n" + + "CFoOJAkH1X2CGTgHlR9it3j4aRJ3dXdL2k7aeoGXObfRWqBNPj0LOOZs64RA6scGAzo6MR\\n" + + "5WlcOxfV1wZuaM0fOd+PBmIlFEE7Uf6AY/UahBAxaFV2+twgK9GCDcu1t4Ye9wZ9kZ4Nal\\n" + + "0fkKD4uN4DRO8hAAAAFm10dWhhaUBrYnAxLWxocC1hMTQ1MzMBAgME\\n" + + "-----END OPENSSH PRIVATE KEY-----") + private const val HOST_PORT_CONFIG = + ("{\"ssl\":true,\"host\":\"fakehost.com\",\"port\":5432,\"schema\":\"public\",\"database\":\"postgres\",\"password\":\"\",\"username\":\"postgres\",\"tunnel_method\":{\"ssh_key\":\"" + + "%s" + + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}") + + private const val URL_CONFIG_WITH_PORT = + ("{\"ssl\":true,\"endpoint\":\"http://fakehost.com:9090/service\",\"password\":\"\",\"username\":\"restuser\",\"tunnel_method\":{\"ssh_key\":\"" + + "%s" + + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}") + + private const val URL_CONFIG_NO_PORT = + ("{\"ssl\":true,\"endpoint\":\"http://fakehost.com/service\",\"password\":\"\",\"username\":\"restuser\",\"tunnel_method\":{\"ssh_key\":\"" + + "%s" + + "\",\"tunnel_host\":\"faketunnel.com\",\"tunnel_port\":22,\"tunnel_user\":\"ec2-user\",\"tunnel_method\":\"SSH_KEY_AUTH\"}}") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtilsTest.kt new file mode 100644 index 0000000000000..c1ef1017f1fb5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AirbyteFileUtilsTest.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class AirbyteFileUtilsTest { + @Test + internal fun testByteCountToDisplaySize() { + Assertions.assertEquals("500 bytes", AirbyteFileUtils.byteCountToDisplaySize(500L)) + Assertions.assertEquals("1.95 KB", AirbyteFileUtils.byteCountToDisplaySize(2000L)) + Assertions.assertEquals("2.93 MB", AirbyteFileUtils.byteCountToDisplaySize(3072000L)) + Assertions.assertEquals("2.67 GB", AirbyteFileUtils.byteCountToDisplaySize(2872000000L)) + Assertions.assertEquals("1.82 TB", AirbyteFileUtils.byteCountToDisplaySize(2000000000000L)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumerTest.kt new file mode 100644 index 0000000000000..a2128488bd897 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/AsyncStreamConsumerTest.kt @@ -0,0 +1,580 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import com.fasterxml.jackson.databind.JsonNode +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.BufferManager +import io.airbyte.cdk.integrations.destination.async.deser.AirbyteMessageDeserializer +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteRecordMessage +import io.airbyte.cdk.integrations.destination.async.state.FlushFailure +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnCloseFunction +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordSizeEstimator +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteLogMessage +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.io.IOException +import java.math.BigDecimal +import java.time.Instant +import java.util.Optional +import java.util.concurrent.Executors +import java.util.concurrent.TimeUnit +import java.util.concurrent.TimeoutException +import java.util.concurrent.atomic.AtomicLong +import java.util.function.Consumer +import java.util.stream.Collectors +import java.util.stream.Stream +import org.apache.commons.lang3.RandomStringUtils +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito + +class AsyncStreamConsumerTest { + companion object { + private const val RECORD_SIZE_20_BYTES = 20 + private const val SCHEMA_NAME = "public" + private const val STREAM_NAME = "id_and_name" + private const val STREAM_NAME2 = STREAM_NAME + 2 + private val STREAM1_DESC: StreamDescriptor = + StreamDescriptor().withNamespace(SCHEMA_NAME).withName(STREAM_NAME) + + private val CATALOG: ConfiguredAirbyteCatalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + ), + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME2, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + ), + ), + ) + + private val PAYLOAD: JsonNode = + Jsons.jsonNode( + mapOf( + "created_at" to "2022-02-01T17:02:19+00:00", + "id" to 1, + "make" to "Mazda", + "nested_column" to mapOf("array_column" to listOf(1, 2, 3)), + ), + ) + + private val STATE_MESSAGE1: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + STREAM1_DESC, + ) + .withStreamState(Jsons.jsonNode(1)), + ), + ) + private val STATE_MESSAGE2: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + STREAM1_DESC, + ) + .withStreamState(Jsons.jsonNode(2)), + ), + ) + } + + private lateinit var consumer: AsyncStreamConsumer + private lateinit var onStart: OnStartFunction + private lateinit var flushFunction: DestinationFlushFunction + private lateinit var onClose: OnCloseFunction + private lateinit var outputRecordCollector: Consumer + private lateinit var flushFailure: FlushFailure + private lateinit var streamAwareDataTransformer: StreamAwareDataTransformer + private lateinit var airbyteMessageDeserializer: AirbyteMessageDeserializer + + @BeforeEach + @Suppress("UNCHECKED_CAST") + @SuppressFBWarnings(value = ["RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE"]) + internal fun setup() { + onStart = + Mockito.mock( + OnStartFunction::class.java, + ) + onClose = Mockito.mock(OnCloseFunction::class.java) + flushFunction = Mockito.mock(DestinationFlushFunction::class.java) + outputRecordCollector = Mockito.mock(Consumer::class.java) as Consumer + flushFailure = Mockito.mock(FlushFailure::class.java) + airbyteMessageDeserializer = AirbyteMessageDeserializer() + consumer = + AsyncStreamConsumer( + outputRecordCollector = outputRecordCollector, + onStart = onStart, + onClose = onClose, + onFlush = flushFunction, + catalog = CATALOG, + bufferManager = BufferManager(), + flushFailure = flushFailure, + defaultNamespace = Optional.of("default_ns"), + airbyteMessageDeserializer = airbyteMessageDeserializer, + workerPool = Executors.newFixedThreadPool(5), + ) + + Mockito.`when`(flushFunction.optimalBatchSizeBytes).thenReturn(10000L) + } + + @Test + @Throws(Exception::class) + internal fun test1StreamWith1State() { + val expectedRecords = generateRecords(1000) + + consumer.start() + consumeRecords(consumer, expectedRecords) + consumer.accept(Jsons.serialize(STATE_MESSAGE1), RECORD_SIZE_20_BYTES) + consumer.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + + val stateMessageWithDestinationStatsUpdated = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + STREAM1_DESC, + ) + .withStreamState(Jsons.jsonNode(1)), + ) + .withDestinationStats( + AirbyteStateStats().withRecordCount(expectedRecords.size.toDouble()), + ), + ) + + Mockito.verify(outputRecordCollector).accept(stateMessageWithDestinationStatsUpdated) + } + + @Test + @Throws(Exception::class) + internal fun test1StreamWith2State() { + val expectedRecords = generateRecords(1000) + + consumer.start() + consumeRecords(consumer, expectedRecords) + consumer.accept(Jsons.serialize(STATE_MESSAGE1), RECORD_SIZE_20_BYTES) + consumer.accept(Jsons.serialize(STATE_MESSAGE2), RECORD_SIZE_20_BYTES) + consumer.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + + val stateMessageWithDestinationStatsUpdated = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + STREAM1_DESC, + ) + .withStreamState(Jsons.jsonNode(2)), + ) + .withDestinationStats(AirbyteStateStats().withRecordCount(0.0)), + ) + + Mockito.verify( + outputRecordCollector, + Mockito.times(1), + ) + .accept(stateMessageWithDestinationStatsUpdated) + } + + @Test + @Throws(Exception::class) + internal fun test1StreamWith0State() { + val allRecords = generateRecords(1000) + + consumer.start() + consumeRecords(consumer, allRecords) + consumer.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, allRecords) + } + + @Test + @Throws(Exception::class) + internal fun testShouldBlockWhenQueuesAreFull() { + consumer.start() + } + + /* + * Tests that the consumer will block when the buffer is full. Achieves this by setting optimal + * batch size to 0, so the flush worker never actually pulls anything from the queue. + */ + @Test + @Throws(Exception::class) + internal fun testBackPressure() { + flushFunction = Mockito.mock(DestinationFlushFunction::class.java) + flushFailure = Mockito.mock(FlushFailure::class.java) + consumer = + AsyncStreamConsumer( + {}, + Mockito.mock(OnStartFunction::class.java), + Mockito.mock(OnCloseFunction::class.java), + flushFunction, + CATALOG, + BufferManager((1024 * 10).toLong()), + flushFailure, + Optional.of("default_ns"), + ) + Mockito.`when`(flushFunction.optimalBatchSizeBytes).thenReturn(0L) + + val recordCount = AtomicLong() + + consumer.start() + + val executor = Executors.newSingleThreadExecutor() + while (true) { + val future = + executor.submit { + try { + consumer.accept( + Jsons.serialize( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withEmittedAt(Instant.now().toEpochMilli()) + .withData( + Jsons.jsonNode(recordCount.getAndIncrement()), + ), + ), + ), + RECORD_SIZE_20_BYTES, + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + } + + try { + future[1, TimeUnit.SECONDS] + } catch (e: TimeoutException) { + future.cancel(true) // Stop the operation running in thread + break + } + } + executor.shutdownNow() + + assertTrue(recordCount.get() < 1000, "Record count was ${recordCount.get()}") + } + + @Test + internal fun deserializeAirbyteMessageWithAirbyteRecord() { + val airbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withData(PAYLOAD), + ) + val serializedAirbyteMessage = Jsons.serialize(airbyteMessage) + val airbyteRecordString = Jsons.serialize(PAYLOAD) + val partial = + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + assertEquals(airbyteRecordString, partial.serialized) + } + + @Test + internal fun deserializeAirbyteMessageWithBigDecimalAirbyteRecord() { + val payload = + Jsons.jsonNode( + mapOf( + "foo" to BigDecimal("1234567890.1234567890"), + ), + ) + val airbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withData(payload), + ) + val serializedAirbyteMessage = Jsons.serialize(airbyteMessage) + val airbyteRecordString = Jsons.serialize(payload) + val partial = + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + assertEquals(airbyteRecordString, partial.serialized) + } + + @Test + internal fun deserializeAirbyteMessageWithEmptyAirbyteRecord() { + val emptyMap: Map<*, *> = java.util.Map.of() + val airbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withData(Jsons.jsonNode(emptyMap)), + ) + val serializedAirbyteMessage = Jsons.serialize(airbyteMessage) + val partial = + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + assertEquals(emptyMap.toString(), partial.serialized) + } + + @Test + internal fun deserializeAirbyteMessageWithNoStateOrRecord() { + val airbyteMessage = + AirbyteMessage().withType(AirbyteMessage.Type.LOG).withLog(AirbyteLogMessage()) + val serializedAirbyteMessage = Jsons.serialize(airbyteMessage) + assertThrows( + RuntimeException::class.java, + ) { + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + } + } + + @Test + internal fun deserializeAirbyteMessageWithAirbyteState() { + val serializedAirbyteMessage = Jsons.serialize(STATE_MESSAGE1) + val partial = + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + assertEquals(serializedAirbyteMessage, partial.serialized) + } + + @Test + internal fun deserializeAirbyteMessageWithBadAirbyteState() { + val badState = + AirbyteMessage() + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + STREAM1_DESC, + ) + .withStreamState(Jsons.jsonNode(1)), + ), + ) + val serializedAirbyteMessage = Jsons.serialize(badState) + assertThrows( + RuntimeException::class.java, + ) { + airbyteMessageDeserializer.deserializeAirbyteMessage( + serializedAirbyteMessage, + ) + } + } + + @Nested + internal inner class ErrorHandling { + @Test + @Throws(Exception::class) + internal fun testErrorOnAccept() { + Mockito.`when`(flushFailure.isFailed()).thenReturn(false).thenReturn(true) + Mockito.`when`(flushFailure.exception).thenReturn(IOException("test exception")) + + val m = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withEmittedAt(Instant.now().toEpochMilli()) + .withData(Jsons.deserialize("")), + ) + consumer.start() + consumer.accept(Jsons.serialize(m), RECORD_SIZE_20_BYTES) + assertThrows( + IOException::class.java, + ) { + consumer.accept( + Jsons.serialize( + m, + ), + RECORD_SIZE_20_BYTES, + ) + } + } + + @Test + @Throws(Exception::class) + internal fun testErrorOnClose() { + Mockito.`when`(flushFailure.isFailed()).thenReturn(true) + Mockito.`when`(flushFailure.exception).thenReturn(IOException("test exception")) + + consumer.start() + assertThrows( + IOException::class.java, + ) { + consumer.close() + } + } + } + + private fun consumeRecords( + consumer: AsyncStreamConsumer?, + records: Collection, + ) { + records.forEach( + Consumer { m: AirbyteMessage -> + try { + consumer!!.accept( + Jsons.serialize(m), + RECORD_SIZE_20_BYTES, + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + }, + ) + } + + // NOTE: Generates records at chunks of 160 bytes + private fun generateRecords(targetSizeInBytes: Long): List { + val output: MutableList = arrayListOf() + var bytesCounter: Long = 0 + var i = 0 + while (true) { + val payload = + Jsons.jsonNode( + mapOf( + "id" to RandomStringUtils.randomAlphabetic(7), + "name" to "human " + String.format("%8d", i), + ), + ) + val sizeInBytes = RecordSizeEstimator.getStringByteSize(payload) + bytesCounter += sizeInBytes + val airbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withData(payload), + ) + if (bytesCounter > targetSizeInBytes) { + break + } else { + output.add(airbyteMessage) + } + i++ + } + return output + } + + @Throws(Exception::class) + private fun verifyStartAndClose() { + Mockito.verify(onStart).call() + Mockito.verify(onClose).accept(ArgumentMatchers.any(), ArgumentMatchers.any()) + } + + @Throws(Exception::class) + private fun verifyRecords( + streamName: String, + namespace: String, + allRecords: List, + ) { + val argumentCaptor = org.mockito.kotlin.argumentCaptor>() + Mockito.verify(flushFunction, Mockito.atLeast(1)) + .flush( + org.mockito.kotlin.eq( + StreamDescriptor().withNamespace(namespace).withName(streamName) + ), + argumentCaptor.capture(), + ) + + // captures the output of all the workers, since our records could come out in any of them. + val actualRecords = + argumentCaptor.allValues + .stream() // flatten those results into a single list for the simplicity of + // comparison + .flatMap { s: Stream<*>? -> s } + .toList() + + val expRecords = + allRecords + .stream() + .map { m: AirbyteMessage -> + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + PartialAirbyteRecordMessage() + .withStream(m.record.stream) + .withNamespace(m.record.namespace) + .withData(m.record.data), + ) + .withSerialized( + Jsons.serialize( + m.record.data, + ), + ) + } + .collect(Collectors.toList()) + assertEquals(expRecords, actualRecords) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlushTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlushTest.kt new file mode 100644 index 0000000000000..190a62db5babd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/DetectStreamToFlushTest.kt @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.time.Clock +import java.time.Duration +import java.time.Instant +import java.util.Optional +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +@SuppressFBWarnings(value = ["BC_IMPOSSIBLE_CAST"]) +class DetectStreamToFlushTest { + val NOW: Instant = Instant.now() + val FIVE_MIN: Duration = Duration.ofMinutes(5) + private val SIZE_10MB = (10 * 1024 * 1024).toLong() + private val SIZE_200MB = (200 * 1024 * 1024).toLong() + + private val DESC1: StreamDescriptor = StreamDescriptor().withName("test1") + + private lateinit var flusher: DestinationFlushFunction + + @BeforeEach + internal fun setup() { + flusher = Mockito.mock(DestinationFlushFunction::class.java) + Mockito.`when`(flusher.optimalBatchSizeBytes).thenReturn(SIZE_200MB) + } + + @Test + internal fun testGetNextSkipsEmptyStreams() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(0L)) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)) + } + + @Test + internal fun testGetNextPicksUpOnSizeTrigger() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + // if above threshold, triggers + Assertions.assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)) + // if below threshold, no trigger + Assertions.assertEquals(Optional.empty(), detect.getNextStreamToFlush(1)) + } + + @Test + internal fun testGetNextAccountsForAlreadyRunningWorkers() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + listOf( + Optional.of(SIZE_10MB), + ), + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)) + } + + @Test + internal fun testGetNextPicksUpOnTimeTrigger() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)) + val mockedNowProvider = Mockito.mock(Clock::class.java) + + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + listOf( + Optional.of(SIZE_10MB), + ), + ) + val detect = + DetectStreamToFlush( + bufferDequeue, + runningFlushWorkers, + AtomicBoolean(false), + flusher, + mockedNowProvider, + ) + + // initialize flush time + Mockito.`when`(mockedNowProvider.millis()).thenReturn(NOW.toEpochMilli()) + + Assertions.assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)) + + // check 5 minutes later + Mockito.`when`(mockedNowProvider.millis()).thenReturn(NOW.plus(FIVE_MIN).toEpochMilli()) + + Assertions.assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)) + + // just flush once + Assertions.assertEquals(Optional.empty(), detect.getNextStreamToFlush(0)) + + // check another 5 minutes later + Mockito.`when`(mockedNowProvider.millis()) + .thenReturn(NOW.plus(FIVE_MIN).plus(FIVE_MIN).toEpochMilli()) + Assertions.assertEquals(Optional.of(DESC1), detect.getNextStreamToFlush(0)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/FlushThresholdTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/FlushThresholdTest.kt new file mode 100644 index 0000000000000..06778a0f6f9eb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/FlushThresholdTest.kt @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +class FlushThresholdTest { + private val SIZE_10MB = (10 * 1024 * 1024).toLong() + + private lateinit var flusher: DestinationFlushFunction + + @BeforeEach + internal fun setup() { + flusher = Mockito.mock(DestinationFlushFunction::class.java) + Mockito.`when`(flusher.queueFlushThresholdBytes).thenReturn(SIZE_10MB) + } + + @Test + internal fun testBaseThreshold() { + val isClosing = AtomicBoolean(false) + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val detect = + DetectStreamToFlush( + bufferDequeue, + Mockito.mock(RunningFlushWorkers::class.java), + isClosing, + flusher, + ) + Assertions.assertEquals(SIZE_10MB, detect.computeQueueThreshold()) + } + + @Test + internal fun testClosingThreshold() { + val isClosing = AtomicBoolean(true) + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val detect = + DetectStreamToFlush( + bufferDequeue, + Mockito.mock(RunningFlushWorkers::class.java), + isClosing, + flusher, + ) + Assertions.assertEquals(0, detect.computeQueueThreshold()) + } + + @Test + internal fun testEagerFlushThresholdBelowThreshold() { + val isClosing = AtomicBoolean(false) + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.totalGlobalQueueSizeBytes).thenReturn(8L) + Mockito.`when`(bufferDequeue.maxQueueSizeBytes).thenReturn(10L) + val detect = + DetectStreamToFlush( + bufferDequeue, + Mockito.mock(RunningFlushWorkers::class.java), + isClosing, + flusher, + ) + Assertions.assertEquals(SIZE_10MB, detect.computeQueueThreshold()) + } + + @Test + internal fun testEagerFlushThresholdAboveThreshold() { + val isClosing = AtomicBoolean(false) + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + Mockito.`when`(bufferDequeue.totalGlobalQueueSizeBytes).thenReturn(9L) + Mockito.`when`(bufferDequeue.maxQueueSizeBytes).thenReturn(10L) + val detect = + DetectStreamToFlush( + bufferDequeue, + Mockito.mock(RunningFlushWorkers::class.java), + isClosing, + flusher, + ) + Assertions.assertEquals(0, detect.computeQueueThreshold()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManagerTest.kt new file mode 100644 index 0000000000000..599b76bb289c3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/GlobalMemoryManagerTest.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class GlobalMemoryManagerTest { + private val BYTES_MB = (1024 * 1024).toLong() + + @Test + internal fun test() { + val mgr = GlobalMemoryManager(15 * BYTES_MB) + + Assertions.assertEquals(10 * BYTES_MB, mgr.requestMemory()) + Assertions.assertEquals(5 * BYTES_MB, mgr.requestMemory()) + Assertions.assertEquals(0, mgr.requestMemory()) + + mgr.free(10 * BYTES_MB) + Assertions.assertEquals(10 * BYTES_MB, mgr.requestMemory()) + mgr.free(16 * BYTES_MB) + Assertions.assertEquals(10 * BYTES_MB, mgr.requestMemory()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkersTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkersTest.kt new file mode 100644 index 0000000000000..21d9b8fe29656 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningFlushWorkersTest.kt @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.UUID +import org.assertj.core.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class RunningFlushWorkersTest { + private val SIZE_10MB = (10 * 1024 * 1024).toLong() + + private val FLUSH_WORKER_ID1: UUID = UUID.randomUUID() + private val FLUSH_WORKER_ID2: UUID = UUID.randomUUID() + private val STREAM1: StreamDescriptor = + StreamDescriptor() + .withNamespace( + "namespace1", + ) + .withName("stream1") + private val STREAM2: StreamDescriptor = + StreamDescriptor() + .withNamespace( + "namespace2", + ) + .withName("stream2") + + private lateinit var runningFlushWorkers: RunningFlushWorkers + + @BeforeEach + internal fun setup() { + runningFlushWorkers = RunningFlushWorkers() + } + + @Test + internal fun testTrackFlushWorker() { + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(0) + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(1) + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2) + runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(2) + } + + @Test + internal fun testCompleteFlushWorker() { + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1) + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2) + runningFlushWorkers.completeFlushWorker(STREAM1, FLUSH_WORKER_ID1) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(1) + runningFlushWorkers.completeFlushWorker(STREAM1, FLUSH_WORKER_ID2) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(0) + } + + @Test + internal fun testCompleteFlushWorkerWithoutTrackThrowsException() { + Assertions.assertThatThrownBy { + runningFlushWorkers.completeFlushWorker( + STREAM1, + FLUSH_WORKER_ID1, + ) + } + .isInstanceOf(IllegalStateException::class.java) + .hasMessageContaining("Cannot complete flush worker for stream that has not started.") + } + + @Test + internal fun testMultipleStreams() { + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1) + runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1).size, + ) + .isEqualTo(1) + Assertions.assertThat( + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2).size, + ) + .isEqualTo(1) + } + + @Test + internal fun testGetSizesOfRunningWorkerBatches() { + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID1) + runningFlushWorkers.trackFlushWorker(STREAM1, FLUSH_WORKER_ID2) + runningFlushWorkers.trackFlushWorker(STREAM2, FLUSH_WORKER_ID1) + assertEquals( + listOf(Optional.empty(), Optional.empty()), + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1), + ) + assertEquals( + listOf(Optional.empty()), + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2), + ) + assertThrows( + IllegalStateException::class.java, + ) { + runningFlushWorkers.registerBatchSize( + STREAM2, + FLUSH_WORKER_ID2, + SIZE_10MB, + ) + } + runningFlushWorkers.registerBatchSize(STREAM1, FLUSH_WORKER_ID1, SIZE_10MB) + runningFlushWorkers.registerBatchSize(STREAM1, FLUSH_WORKER_ID2, SIZE_10MB) + runningFlushWorkers.registerBatchSize(STREAM2, FLUSH_WORKER_ID1, SIZE_10MB) + assertEquals( + listOf(Optional.of(SIZE_10MB), Optional.of(SIZE_10MB)), + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM1), + ) + assertEquals( + listOf(Optional.of(SIZE_10MB)), + runningFlushWorkers.getSizesOfRunningWorkerBatches(STREAM2), + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningSizeEstimateTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningSizeEstimateTest.kt new file mode 100644 index 0000000000000..2e5dcb6dd3a92 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/RunningSizeEstimateTest.kt @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +@SuppressFBWarnings(value = ["BC_IMPOSSIBLE_CAST"]) +class RunningSizeEstimateTest { + private val SIZE_10MB = (10 * 1024 * 1024).toLong() + private val SIZE_20MB = (20 * 1024 * 1024).toLong() + private val SIZE_200MB = (200 * 1024 * 1024).toLong() + private val DESC1: StreamDescriptor = StreamDescriptor().withName("test1") + + private lateinit var flusher: DestinationFlushFunction + + @BeforeEach + internal fun setup() { + flusher = Mockito.mock(DestinationFlushFunction::class.java) + Mockito.`when`(flusher.optimalBatchSizeBytes).thenReturn(SIZE_200MB) + } + + @Test + internal fun testEstimateZeroWorkers() { + val bufferDequeue = Mockito.mock(BufferDequeue::class.java) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + emptyList(), + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(0, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)) + } + + @Test + internal fun testEstimateWorkerWithBatch() { + val bufferDequeue = Mockito.mock(BufferDequeue::class.java) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + listOf( + Optional.of(SIZE_20MB), + ), + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(SIZE_20MB, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)) + } + + @Test + internal fun testEstimateWorkerWithoutBatchAndQueueLessThanOptimalSize() { + val bufferDequeue = Mockito.mock(BufferDequeue::class.java) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + listOf( + Optional.empty(), + ), + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(SIZE_10MB, detect.estimateSizeOfRunningWorkers(DESC1, SIZE_10MB)) + } + + @Test + internal fun testEstimateWorkerWithoutBatchAndQueueGreaterThanOptimalSize() { + val bufferDequeue = Mockito.mock(BufferDequeue::class.java) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`( + runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any()), + ) + .thenReturn( + listOf( + Optional.empty(), + ), + ) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals( + SIZE_200MB, + detect.estimateSizeOfRunningWorkers(DESC1, SIZE_200MB + 1), + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/SizeTriggerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/SizeTriggerTest.kt new file mode 100644 index 0000000000000..2a285e7279d01 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/SizeTriggerTest.kt @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +@SuppressFBWarnings(value = ["BC_IMPOSSIBLE_CAST"]) +class SizeTriggerTest { + companion object { + private const val SIZE_10MB = (10 * 1024 * 1024).toLong() + private const val SIZE_200MB = (200 * 1024 * 1024).toLong() + private val DESC1: StreamDescriptor = StreamDescriptor().withName("test1") + } + + private lateinit var flusher: DestinationFlushFunction + + @BeforeEach + internal fun setup() { + flusher = Mockito.mock(DestinationFlushFunction::class.java) + Mockito.`when`(flusher.optimalBatchSizeBytes).thenReturn(SIZE_200MB) + } + + @Test + internal fun testSizeTriggerOnEmptyQueue() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(0L)) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + assertEquals(false, detect.isSizeTriggered(DESC1, SIZE_10MB).first) + } + + @Test + internal fun testSizeTriggerRespectsThreshold() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + // if above threshold, triggers + assertEquals(true, detect.isSizeTriggered(DESC1, 0).first) + // if below threshold, no trigger + assertEquals(false, detect.isSizeTriggered(DESC1, SIZE_10MB).first) + } + + @Test + internal fun testSizeTriggerRespectsRunningWorkersEstimate() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + Mockito.`when`(bufferDequeue.bufferedStreams).thenReturn(setOf(DESC1)) + Mockito.`when`(bufferDequeue.getQueueSizeBytes(DESC1)).thenReturn(Optional.of(1L)) + Mockito.`when`(runningFlushWorkers.getSizesOfRunningWorkerBatches(org.mockito.kotlin.any())) + .thenReturn(emptyList()) + .thenReturn(listOf(Optional.of(SIZE_10MB))) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + assertEquals(true, detect.isSizeTriggered(DESC1, 0).first) + assertEquals(false, detect.isSizeTriggered(DESC1, 0).first) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/StreamPriorityTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/StreamPriorityTest.kt new file mode 100644 index 0000000000000..de8b61b76d82d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/StreamPriorityTest.kt @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.time.Instant +import java.util.Optional +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +@SuppressFBWarnings(value = ["BC_IMPOSSIBLE_CAST"]) +class StreamPriorityTest { + val NOW: Instant = Instant.now() + val FIVE_MIN_AGO: Instant = NOW.minusSeconds((60 * 5).toLong()) + private val DESC1: StreamDescriptor = StreamDescriptor().withName("test1") + private val DESC2: StreamDescriptor = StreamDescriptor().withName("test2") + private val DESCS: Set = java.util.Set.of(DESC1, DESC2) + + @Test + internal fun testOrderByPrioritySize() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val flusher = Mockito.mock(DestinationFlushFunction::class.java) + val runningFlushWorkers = Mockito.mock(RunningFlushWorkers::class.java) + Mockito.`when`( + bufferDequeue.getQueueSizeBytes(DESC1), + ) + .thenReturn(Optional.of(1L)) + .thenReturn(Optional.of(0L)) + Mockito.`when`( + bufferDequeue.getQueueSizeBytes(DESC2), + ) + .thenReturn(Optional.of(0L)) + .thenReturn(Optional.of(1L)) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + + Assertions.assertEquals(listOf(DESC1, DESC2), detect.orderStreamsByPriority(DESCS)) + Assertions.assertEquals(listOf(DESC2, DESC1), detect.orderStreamsByPriority(DESCS)) + } + + @Test + internal fun testOrderByPrioritySecondarySortByTime() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val flusher = Mockito.mock(DestinationFlushFunction::class.java) + val runningFlushWorkers = Mockito.mock(RunningFlushWorkers::class.java) + Mockito.`when`( + bufferDequeue.getQueueSizeBytes(org.mockito.kotlin.any()), + ) + .thenReturn(Optional.of(0L)) + Mockito.`when`( + bufferDequeue.getTimeOfLastRecord(DESC1), + ) + .thenReturn(Optional.of(FIVE_MIN_AGO)) + .thenReturn(Optional.of(NOW)) + Mockito.`when`(bufferDequeue.getTimeOfLastRecord(DESC2)) + .thenReturn(Optional.of(NOW)) + .thenReturn(Optional.of(FIVE_MIN_AGO)) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + Assertions.assertEquals(listOf(DESC1, DESC2), detect.orderStreamsByPriority(DESCS)) + Assertions.assertEquals(listOf(DESC2, DESC1), detect.orderStreamsByPriority(DESCS)) + } + + @Test + internal fun testOrderByPriorityTertiarySortByName() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val flusher = Mockito.mock(DestinationFlushFunction::class.java) + val runningFlushWorkers = Mockito.mock(RunningFlushWorkers::class.java) + Mockito.`when`( + bufferDequeue.getQueueSizeBytes(org.mockito.kotlin.any()), + ) + .thenReturn(Optional.of(0L)) + Mockito.`when`( + bufferDequeue.getTimeOfLastRecord(org.mockito.kotlin.any()), + ) + .thenReturn(Optional.of(NOW)) + val detect = + DetectStreamToFlush(bufferDequeue, runningFlushWorkers, AtomicBoolean(false), flusher) + val descs = listOf(Jsons.clone(DESC1), Jsons.clone(DESC2)) + Assertions.assertEquals( + listOf( + descs[0], + descs[1], + ), + detect.orderStreamsByPriority(HashSet(descs)), + ) + descs[0].name = "test3" + Assertions.assertEquals( + listOf( + descs[1], + descs[0], + ), + detect.orderStreamsByPriority(HashSet(descs)), + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/TimeTriggerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/TimeTriggerTest.kt new file mode 100644 index 0000000000000..3eb3c9b235412 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/TimeTriggerTest.kt @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async + +import io.airbyte.cdk.integrations.destination.async.buffers.BufferDequeue +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import java.time.Clock +import java.util.concurrent.atomic.AtomicBoolean +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +class TimeTriggerTest { + companion object { + private val NOW_MS = System.currentTimeMillis() + private const val ONE_SEC = 1000L + private const val FIVE_MIN = (5 * 60 * 1000).toLong() + } + + @Test + internal fun testTimeTrigger() { + val bufferDequeue = + Mockito.mock( + BufferDequeue::class.java, + ) + val flusher = Mockito.mock(DestinationFlushFunction::class.java) + val runningFlushWorkers = + Mockito.mock( + RunningFlushWorkers::class.java, + ) + + val mockedNowProvider = Mockito.mock(Clock::class.java) + Mockito.`when`(mockedNowProvider.millis()).thenReturn(NOW_MS) + + val detect = + DetectStreamToFlush( + bufferDequeue, + runningFlushWorkers, + AtomicBoolean(false), + flusher, + mockedNowProvider, + ) + assertEquals(false, detect.isTimeTriggered(NOW_MS).first) + assertEquals(false, detect.isTimeTriggered(NOW_MS - ONE_SEC).first) + assertEquals(true, detect.isTimeTriggered(NOW_MS - FIVE_MIN).first) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeueTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeueTest.kt new file mode 100644 index 0000000000000..209676f5c102b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferDequeueTest.kt @@ -0,0 +1,237 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteRecordMessage +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.time.Instant +import java.time.temporal.ChronoUnit +import java.util.Optional +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test + +class BufferDequeueTest { + private val RECORD_SIZE_20_BYTES = 20 + private val DEFAULT_NAMESPACE = "foo_namespace" + private val STREAM_NAME = "stream1" + private val STREAM_DESC: StreamDescriptor = StreamDescriptor().withName(STREAM_NAME) + private val RECORD_MSG_20_BYTES: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + PartialAirbyteRecordMessage().withStream(STREAM_NAME), + ) + + @Nested + internal inner class Take { + @Test + internal fun testTakeShouldBestEffortRead() { + val bufferManager = BufferManager() + val enqueue = bufferManager.bufferEnqueue + val dequeue = bufferManager.bufferDequeue + + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + + // total size of records is 80, so we expect 50 to get us 2 records (prefer to + // under-pull records + // than over-pull). + try { + dequeue.take(STREAM_DESC, 50).use { take -> + Assertions.assertEquals(2, take.data.size) + // verify it only took the records from the queue that it actually returned. + Assertions.assertEquals( + 2, + dequeue.getQueueSizeInRecords(STREAM_DESC).orElseThrow(), + ) + } + } catch (e: Exception) { + throw RuntimeException(e) + } + } + + @Test + internal fun testTakeShouldReturnAllIfPossible() { + val bufferManager = BufferManager() + val enqueue = bufferManager.bufferEnqueue + val dequeue = bufferManager.bufferDequeue + + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + + try { + dequeue.take(STREAM_DESC, 60).use { take -> + Assertions.assertEquals(3, take.data.size) + } + } catch (e: Exception) { + throw RuntimeException(e) + } + } + + @Test + internal fun testTakeFewerRecordsThanSizeLimitShouldNotError() { + val bufferManager = BufferManager() + val enqueue = bufferManager.bufferEnqueue + val dequeue = bufferManager.bufferDequeue + + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + enqueue.addRecord( + RECORD_MSG_20_BYTES, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + + try { + dequeue.take(STREAM_DESC, Long.MAX_VALUE).use { take -> + Assertions.assertEquals(2, take.data.size) + } + } catch (e: Exception) { + throw RuntimeException(e) + } + } + } + + @Test + internal fun testMetadataOperationsCorrect() { + val bufferManager = BufferManager() + val enqueue = bufferManager.bufferEnqueue + val dequeue = bufferManager.bufferDequeue + + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + + val secondStream = StreamDescriptor().withName("stream_2") + val recordFromSecondStream = Jsons.clone(RECORD_MSG_20_BYTES) + recordFromSecondStream.record?.withStream(secondStream.name) + enqueue.addRecord( + recordFromSecondStream, + RECORD_SIZE_20_BYTES, + Optional.of(DEFAULT_NAMESPACE) + ) + + Assertions.assertEquals(60, dequeue.totalGlobalQueueSizeBytes) + + Assertions.assertEquals(2, dequeue.getQueueSizeInRecords(STREAM_DESC).get()) + Assertions.assertEquals(1, dequeue.getQueueSizeInRecords(secondStream).get()) + + Assertions.assertEquals(40, dequeue.getQueueSizeBytes(STREAM_DESC).get()) + Assertions.assertEquals(20, dequeue.getQueueSizeBytes(secondStream).get()) + + // Buffer of 3 sec to deal with test execution variance. + val lastThreeSec = Instant.now().minus(3, ChronoUnit.SECONDS) + Assertions.assertTrue(lastThreeSec.isBefore(dequeue.getTimeOfLastRecord(STREAM_DESC).get())) + Assertions.assertTrue( + lastThreeSec.isBefore(dequeue.getTimeOfLastRecord(secondStream).get()), + ) + } + + @Test + internal fun testMetadataOperationsError() { + val bufferManager = BufferManager() + val dequeue = bufferManager.bufferDequeue + + val ghostStream = StreamDescriptor().withName("ghost stream") + + Assertions.assertEquals(0, dequeue.totalGlobalQueueSizeBytes) + + Assertions.assertTrue(dequeue.getQueueSizeInRecords(ghostStream).isEmpty) + + Assertions.assertTrue(dequeue.getQueueSizeBytes(ghostStream).isEmpty) + + Assertions.assertTrue(dequeue.getTimeOfLastRecord(ghostStream).isEmpty) + } + + @Test + @Throws(Exception::class) + internal fun cleansUpMemoryForEmptyQueues() { + val bufferManager = BufferManager() + val enqueue = bufferManager.bufferEnqueue + val dequeue = bufferManager.bufferDequeue + val memoryManager = bufferManager.memoryManager + + // we initialize with a block for state + Assertions.assertEquals( + GlobalMemoryManager.BLOCK_SIZE_BYTES, + memoryManager.getCurrentMemoryBytes(), + ) + + // allocate a block for new stream + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + Assertions.assertEquals( + 2 * GlobalMemoryManager.BLOCK_SIZE_BYTES, + memoryManager.getCurrentMemoryBytes(), + ) + + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + enqueue.addRecord(RECORD_MSG_20_BYTES, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + + // no re-allocates as we haven't breached block size + Assertions.assertEquals( + 2 * GlobalMemoryManager.BLOCK_SIZE_BYTES, + memoryManager.getCurrentMemoryBytes(), + ) + + val totalBatchSize = RECORD_SIZE_20_BYTES * 4 + + dequeue.take(STREAM_DESC, totalBatchSize.toLong()).use { batch -> + // slop allocation gets cleaned up + Assertions.assertEquals( + GlobalMemoryManager.BLOCK_SIZE_BYTES + totalBatchSize, + memoryManager.getCurrentMemoryBytes(), + ) + batch.close() + // back to initial state after flush clears the batch + Assertions.assertEquals( + GlobalMemoryManager.BLOCK_SIZE_BYTES, + memoryManager.getCurrentMemoryBytes(), + ) + Assertions.assertEquals( + 0, + bufferManager.buffers[STREAM_DESC]!!.maxMemoryUsage, + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueueTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueueTest.kt new file mode 100644 index 0000000000000..01c859d31b3f9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/BufferEnqueueTest.kt @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteRecordMessage +import io.airbyte.cdk.integrations.destination.async.state.GlobalAsyncStateManager +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.Optional +import java.util.concurrent.ConcurrentHashMap +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +class BufferEnqueueTest { + private val RECORD_SIZE_20_BYTES = 20 + private val DEFAULT_NAMESPACE = "foo_namespace" + + @Test + internal fun testAddRecordShouldAdd() { + val twoMB = 2 * 1024 * 1024 + val streamToBuffer = ConcurrentHashMap() + val enqueue = + BufferEnqueue( + GlobalMemoryManager(twoMB.toLong()), + streamToBuffer, + Mockito.mock( + GlobalAsyncStateManager::class.java, + ), + ) + + val streamName = "stream" + val stream = StreamDescriptor().withName(streamName) + val record = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + PartialAirbyteRecordMessage().withStream(streamName), + ) + + enqueue.addRecord(record, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + Assertions.assertEquals(1, streamToBuffer[stream]!!.size()) + Assertions.assertEquals(20L, streamToBuffer[stream]!!.currentMemoryUsage) + } + + @Test + internal fun testAddRecordShouldExpand() { + val oneKb = 1024 + val streamToBuffer = ConcurrentHashMap() + val enqueue = + BufferEnqueue( + GlobalMemoryManager(oneKb.toLong()), + streamToBuffer, + Mockito.mock( + GlobalAsyncStateManager::class.java, + ), + ) + + val streamName = "stream" + val stream = StreamDescriptor().withName(streamName) + val record = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + PartialAirbyteRecordMessage().withStream(streamName), + ) + + enqueue.addRecord(record, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + enqueue.addRecord(record, RECORD_SIZE_20_BYTES, Optional.of(DEFAULT_NAMESPACE)) + Assertions.assertEquals(2, streamToBuffer[stream]!!.size()) + Assertions.assertEquals(40, streamToBuffer[stream]!!.currentMemoryUsage) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueueTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueueTest.kt new file mode 100644 index 0000000000000..b42342d41ba4d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/MemoryBoundedLinkedBlockingQueueTest.kt @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import java.util.concurrent.TimeUnit +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +class MemoryBoundedLinkedBlockingQueueTest { + @Test + @Throws(InterruptedException::class) + internal fun offerAndTakeShouldReturn() { + val queue = MemoryBoundedLinkedBlockingQueue(1024) + + queue.offer("abc", 6) + + val item = queue.take() + + Assertions.assertEquals("abc", item.item) + } + + @Test + @Throws(InterruptedException::class) + internal fun testBlocksOnFullMemory() { + val queue = MemoryBoundedLinkedBlockingQueue(10) + Assertions.assertTrue(queue.offer("abc", 6)) + Assertions.assertFalse(queue.offer("abc", 6)) + + Assertions.assertNotNull(queue.poll(1, TimeUnit.NANOSECONDS)) + Assertions.assertNull(queue.poll(1, TimeUnit.NANOSECONDS)) + } + + @ParameterizedTest + @ValueSource(longs = [1024, 100000, 600]) + internal fun getMaxMemoryUsage(size: Long) { + val queue = MemoryBoundedLinkedBlockingQueue(size) + + Assertions.assertEquals(0, queue.currentMemoryUsage) + Assertions.assertEquals(size, queue.maxMemoryUsage) + + queue.addMaxMemory(-100) + + Assertions.assertEquals(size - 100, queue.maxMemoryUsage) + + queue.addMaxMemory(123) + + Assertions.assertEquals(size - 100 + 123, queue.maxMemoryUsage) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueueTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueueTest.kt new file mode 100644 index 0000000000000..13866f0c8a9e3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/buffers/StreamAwareQueueTest.kt @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.buffers + +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +class StreamAwareQueueTest { + @Test + @Throws(InterruptedException::class) + internal fun test() { + val queue = StreamAwareQueue(1024) + + Assertions.assertEquals(0, queue.currentMemoryUsage) + Assertions.assertNull(queue.getTimeOfLastMessage().orElse(null)) + + queue.offer(PartialAirbyteMessage(), 6, 1) + queue.offer(PartialAirbyteMessage(), 6, 2) + queue.offer(PartialAirbyteMessage(), 6, 3) + + Assertions.assertEquals(18, queue.currentMemoryUsage) + Assertions.assertNotNull(queue.getTimeOfLastMessage().orElse(null)) + + queue.take() + queue.take() + queue.take() + + Assertions.assertEquals(0, queue.currentMemoryUsage) + // This should be null because the queue is empty + Assertions.assertTrue( + queue.getTimeOfLastMessage().isEmpty, + "Expected empty optional; got " + queue.getTimeOfLastMessage(), + ) + } + + @ParameterizedTest + @ValueSource(longs = [1024, 100000, 600]) + internal fun getMaxMemoryUsage(size: Long) { + val queue = StreamAwareQueue(size) + + Assertions.assertEquals(0, queue.currentMemoryUsage) + Assertions.assertEquals(size, queue.maxMemoryUsage) + + queue.addMaxMemory(-100) + + Assertions.assertEquals(size - 100, queue.maxMemoryUsage) + + queue.addMaxMemory(123) + + Assertions.assertEquals(size - 100 + 123, queue.maxMemoryUsage) + } + + @Test + internal fun isEmpty() { + val queue = StreamAwareQueue(1024) + + Assertions.assertTrue(queue.isEmpty) + + queue.offer(PartialAirbyteMessage(), 10, 1) + + Assertions.assertFalse(queue.isEmpty) + + queue.offer(PartialAirbyteMessage(), 10, 1) + queue.offer(PartialAirbyteMessage(), 10, 1) + queue.offer(PartialAirbyteMessage(), 10, 1) + + Assertions.assertFalse(queue.isEmpty) + + queue.poll() + queue.poll() + queue.poll() + queue.poll() + + Assertions.assertTrue(queue.isEmpty) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessageTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessageTest.kt new file mode 100644 index 0000000000000..a001555ba614c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/model/PartialAirbyteMessageTest.kt @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.model + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteStateMessage +import io.airbyte.protocol.models.AirbyteStreamState +import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.time.Instant +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class PartialAirbyteMessageTest { + @Test + internal fun testDeserializeRecord() { + val emittedAt = Instant.now().toEpochMilli() + val serializedRec = + Jsons.serialize( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream("users") + .withNamespace("public") + .withEmittedAt(emittedAt) + .withData(Jsons.jsonNode("data")), + ), + ) + + val rec = + Jsons.tryDeserialize( + serializedRec, + PartialAirbyteMessage::class.java, + ) + .get() + Assertions.assertEquals(AirbyteMessage.Type.RECORD, rec.type) + Assertions.assertEquals("users", rec.record?.stream) + Assertions.assertEquals("public", rec.record?.namespace) + Assertions.assertEquals("\"data\"", rec.record?.data.toString()) + Assertions.assertEquals(emittedAt, rec.record?.emittedAt) + } + + @Test + internal fun testDeserializeState() { + val serializedState = + Jsons.serialize( + io.airbyte.protocol.models + .AirbyteMessage() + .withType(io.airbyte.protocol.models.AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName("user").withNamespace("public"), + ) + .withStreamState(Jsons.jsonNode("data")), + ) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) + + val rec = + Jsons.tryDeserialize( + serializedState, + PartialAirbyteMessage::class.java, + ) + .get() + Assertions.assertEquals(AirbyteMessage.Type.STATE, rec.type) + + val streamDesc = rec.state?.stream?.streamDescriptor + Assertions.assertEquals("user", streamDesc?.name) + Assertions.assertEquals("public", streamDesc?.namespace) + Assertions.assertEquals( + io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType.STREAM, + rec.state?.type, + ) + } + + @Test + internal fun testGarbage() { + val badSerialization = "messed up data" + + val rec = + Jsons.tryDeserialize( + badSerialization, + PartialAirbyteMessage::class.java, + ) + Assertions.assertTrue(rec.isEmpty) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManagerTest.kt new file mode 100644 index 0000000000000..a46535bf82ea0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/async/state/GlobalAsyncStateManagerTest.kt @@ -0,0 +1,987 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.async.state + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.async.GlobalMemoryManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteStateMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteStreamState +import io.airbyte.protocol.models.Jsons +import io.airbyte.protocol.models.v0.AirbyteGlobalState +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotEquals +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test + +class GlobalAsyncStateManagerTest { + companion object { + private const val TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES = + (100 * 1024 * 1024 // 10MB + ) + .toLong() + private const val DEFAULT_NAMESPACE = "foo_namespace" + private const val STATE_MSG_SIZE: Long = 1000 + private const val NAMESPACE = "namespace" + private const val STREAM_NAME = "id_and_name" + private const val STREAM_NAME2 = STREAM_NAME + 2 + private const val STREAM_NAME3 = STREAM_NAME + 3 + private val STREAM1_DESC: StreamDescriptor = + StreamDescriptor().withName(STREAM_NAME).withNamespace(NAMESPACE) + private val STREAM2_DESC: StreamDescriptor = + StreamDescriptor().withName(STREAM_NAME2).withNamespace(NAMESPACE) + private val STREAM3_DESC: StreamDescriptor = + StreamDescriptor().withName(STREAM_NAME3).withNamespace(NAMESPACE) + + private val GLOBAL_STATE_MESSAGE1: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ) + .withSerialized( + serializedState( + STREAM1_DESC, + AirbyteStateMessage.AirbyteStateType.GLOBAL, + Jsons.jsonNode(mapOf("cursor" to 1)), + ), + ) + private val GLOBAL_STATE_MESSAGE2: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ) + .withSerialized( + serializedState( + STREAM2_DESC, + AirbyteStateMessage.AirbyteStateType.GLOBAL, + Jsons.jsonNode(mapOf("cursor" to 2)), + ), + ) + + private val GLOBAL_STATE_MESSAGE3: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ) + .withSerialized( + serializedState( + STREAM3_DESC, + AirbyteStateMessage.AirbyteStateType.GLOBAL, + Jsons.jsonNode(mapOf("cursor" to 2)), + ), + ) + private val STREAM1_STATE_MESSAGE1: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC)), + ) + .withSerialized( + serializedState( + STREAM1_DESC, + AirbyteStateMessage.AirbyteStateType.STREAM, + Jsons.jsonNode(mapOf("cursor" to 1)), + ), + ) + private val STREAM1_STATE_MESSAGE2: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC)), + ) + .withSerialized( + serializedState( + STREAM1_DESC, + AirbyteStateMessage.AirbyteStateType.STREAM, + Jsons.jsonNode(mapOf("cursor" to 2)), + ), + ) + + private val STREAM1_STATE_MESSAGE3: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(PartialAirbyteStreamState().withStreamDescriptor(STREAM1_DESC)), + ) + .withSerialized( + serializedState( + STREAM1_DESC, + AirbyteStateMessage.AirbyteStateType.STREAM, + Jsons.jsonNode(mapOf("cursor" to 3)), + ), + ) + private val STREAM2_STATE_MESSAGE: PartialAirbyteMessage = + PartialAirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + PartialAirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(PartialAirbyteStreamState().withStreamDescriptor(STREAM2_DESC)), + ) + .withSerialized( + serializedState( + STREAM2_DESC, + AirbyteStateMessage.AirbyteStateType.STREAM, + Jsons.jsonNode(mapOf("cursor" to 4)), + ), + ) + + private fun serializedState( + streamDescriptor: StreamDescriptor?, + type: AirbyteStateMessage.AirbyteStateType?, + state: JsonNode?, + ): String { + return when (type) { + AirbyteStateMessage.AirbyteStateType.GLOBAL -> { + Jsons.serialize( + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + AirbyteGlobalState() + .withSharedState(state) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamState(Jsons.emptyObject()) + .withStreamDescriptor(streamDescriptor), + ), + ), + ), + ), + ) + } + AirbyteStateMessage.AirbyteStateType.STREAM -> { + Jsons.serialize( + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamState(state) + .withStreamDescriptor(streamDescriptor), + ), + ), + ) + } + else -> throw RuntimeException("LEGACY STATE NOT SUPPORTED") + } + } + } + + @Test + internal fun testBasic() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val firstStateId = stateManager.getStateIdAndIncrementCounter(STREAM1_DESC) + val secondStateId = stateManager.getStateIdAndIncrementCounter(STREAM1_DESC) + assertEquals(firstStateId, secondStateId) + + stateManager.decrement(firstStateId, 2) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + // because no state message has been tracked, there is nothing to flush yet. + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals(0, stateWithStats.size) + + stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + + val expectedDestinationStats = AirbyteStateStats().withRecordCount(2.0) + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats2.values.stream().toList()) + } + + private fun attachDestinationStateStats( + stateMessage: AirbyteMessage, + airbyteStateStats: AirbyteStateStats?, + ): AirbyteMessage { + stateMessage.state.withDestinationStats(airbyteStateStats) + return stateMessage + } + + @Nested + internal inner class GlobalState { + @Test + fun testEmptyQueuesGlobalState() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + // GLOBAL + stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(0.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + // + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + + assertThrows( + IllegalArgumentException::class.java, + ) { + stateManager.trackState( + STREAM1_STATE_MESSAGE1, + STATE_MSG_SIZE, + DEFAULT_NAMESPACE, + ) + } + } + + @Test + internal fun testConversion() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val preConvertId0: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + val preConvertId1: Long = simulateIncomingRecords(STREAM2_DESC, 10, stateManager) + val preConvertId2: Long = simulateIncomingRecords(STREAM3_DESC, 10, stateManager) + assertEquals(3, setOf(preConvertId0, preConvertId1, preConvertId2).size) + + stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + + // Since this is actually a global state, we can only flush after all streams are done. + stateManager.decrement(preConvertId0, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + assertEquals(0, emittedStatesFromDestination.size) + stateManager.decrement(preConvertId1, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + assertEquals(0, emittedStatesFromDestination.size) + stateManager.decrement(preConvertId2, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(30.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + } + + @Test + internal fun testCorrectFlushingOneStream() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val preConvertId0: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(preConvertId0, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(10.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + + emittedStatesFromDestination.clear() + + val afterConvertId1: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(afterConvertId1, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE2.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats2.values.stream().toList()) + } + + @Test + internal fun testZeroRecordFlushing() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val preConvertId0: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(preConvertId0, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(10.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + emittedStatesFromDestination.clear() + + stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats2 = AirbyteStateStats().withRecordCount(0.0) + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE2.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats2, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals( + listOf(expectedDestinationStats2), + stateWithStats2.values.stream().toList(), + ) + emittedStatesFromDestination.clear() + + val afterConvertId2: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(GLOBAL_STATE_MESSAGE3, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(afterConvertId2, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val stateWithStats3 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE3.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats3.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats3.values.stream().toList()) + } + + @Test + internal fun testCorrectFlushingManyStreams() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val preConvertId0: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + val preConvertId1: Long = simulateIncomingRecords(STREAM2_DESC, 10, stateManager) + assertNotEquals(preConvertId0, preConvertId1) + stateManager.trackState(GLOBAL_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(preConvertId0, 10) + stateManager.decrement(preConvertId1, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(20.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + emittedStatesFromDestination.clear() + + val afterConvertId0: Long = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + val afterConvertId1: Long = simulateIncomingRecords(STREAM2_DESC, 10, stateManager) + assertEquals(afterConvertId0, afterConvertId1) + stateManager.trackState(GLOBAL_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(afterConvertId0, 20) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + GLOBAL_STATE_MESSAGE2.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats2.values.stream().toList()) + } + } + + @Nested + internal inner class PerStreamState { + @Test + internal fun testEmptyQueues() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + // GLOBAL + stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(0.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + + assertThrows( + IllegalArgumentException::class.java, + ) { + stateManager.trackState( + GLOBAL_STATE_MESSAGE1, + STATE_MSG_SIZE, + DEFAULT_NAMESPACE, + ) + } + } + + @Test + internal fun testCorrectFlushingOneStream() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + var stateId: Long = simulateIncomingRecords(STREAM1_DESC, 3, stateManager) + stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stateId, 3) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(3.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + + emittedStatesFromDestination.clear() + + stateId = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(STREAM1_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stateId, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats2 = AirbyteStateStats().withRecordCount(10.0) + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE2.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats2, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals( + listOf(expectedDestinationStats2), + stateWithStats2.values.stream().toList(), + ) + } + + @Test + internal fun testZeroRecordFlushing() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + var stateId: Long = simulateIncomingRecords(STREAM1_DESC, 3, stateManager) + stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stateId, 3) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(3.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + emittedStatesFromDestination.clear() + + stateManager.trackState(STREAM1_STATE_MESSAGE2, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + val expectedDestinationStats2 = AirbyteStateStats().withRecordCount(0.0) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE2.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats2, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals( + listOf(expectedDestinationStats2), + stateWithStats2.values.stream().toList(), + ) + emittedStatesFromDestination.clear() + + stateId = simulateIncomingRecords(STREAM1_DESC, 10, stateManager) + stateManager.trackState(STREAM1_STATE_MESSAGE3, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stateId, 10) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val stateWithStats3 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + val expectedDestinationStats3 = AirbyteStateStats().withRecordCount(10.0) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE3.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats3, + ), + ), + stateWithStats3.keys.stream().toList(), + ) + assertEquals( + listOf(expectedDestinationStats3), + stateWithStats3.values.stream().toList(), + ) + } + + @Test + internal fun testCorrectFlushingManyStream() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + + val stream1StateId: Long = simulateIncomingRecords(STREAM1_DESC, 3, stateManager) + val stream2StateId: Long = simulateIncomingRecords(STREAM2_DESC, 7, stateManager) + + stateManager.trackState(STREAM1_STATE_MESSAGE1, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stream1StateId, 3) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats = AirbyteStateStats().withRecordCount(3.0) + val stateWithStats = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM1_STATE_MESSAGE1.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats, + ), + ), + stateWithStats.keys.stream().toList(), + ) + assertEquals(listOf(expectedDestinationStats), stateWithStats.values.stream().toList()) + emittedStatesFromDestination.clear() + + stateManager.decrement(stream2StateId, 4) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + assertEquals(listOf(), emittedStatesFromDestination) + stateManager.trackState(STREAM2_STATE_MESSAGE, STATE_MSG_SIZE, DEFAULT_NAMESPACE) + stateManager.decrement(stream2StateId, 3) + // only flush state if counter is 0. + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + val expectedDestinationStats2 = AirbyteStateStats().withRecordCount(7.0) + val stateWithStats2 = + emittedStatesFromDestination + .stream() + .collect( + Collectors.toMap( + { c: AirbyteMessage? -> c }, + { c: AirbyteMessage? -> c?.state?.destinationStats }, + ), + ) + assertEquals( + listOf( + attachDestinationStateStats( + Jsons.deserialize( + STREAM2_STATE_MESSAGE.serialized, + AirbyteMessage::class.java, + ), + expectedDestinationStats2, + ), + ), + stateWithStats2.keys.stream().toList(), + ) + assertEquals( + listOf(expectedDestinationStats2), + stateWithStats2.values.stream().toList(), + ) + } + } + + private fun simulateIncomingRecords( + desc: StreamDescriptor, + count: Long, + manager: GlobalAsyncStateManager, + ): Long { + var stateId = 0L + for (i in 0 until count) { + stateId = manager.getStateIdAndIncrementCounter(desc) + } + return stateId + } + + @Test + internal fun flushingRecordsShouldNotReduceStatsCounterForGlobalState() { + val emittedStatesFromDestination: MutableList = mutableListOf() + val stateManager = + GlobalAsyncStateManager(GlobalMemoryManager(TOTAL_QUEUES_MAX_SIZE_LIMIT_BYTES)) + val stateId = simulateIncomingRecords(STREAM1_DESC, 6, stateManager) + stateManager.decrement(stateId, 4) + stateManager.trackState(GLOBAL_STATE_MESSAGE1, 1, STREAM1_DESC.namespace) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + assertEquals(0, emittedStatesFromDestination.size) + stateManager.decrement(stateId, 2) + stateManager.flushStates { e: AirbyteMessage? -> + emittedStatesFromDestination.add( + e, + ) + } + assertEquals(1, emittedStatesFromDestination.size) + assertEquals( + 6.0, + emittedStatesFromDestination.first()?.state?.destinationStats?.recordCount, + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.kt new file mode 100644 index 0000000000000..87f86ce109b28 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.kt @@ -0,0 +1,690 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.destination.record_buffer.* +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.time.Duration +import java.time.Instant +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.function.Consumer +import java.util.stream.Collectors +import java.util.stream.Stream +import org.apache.commons.lang3.RandomStringUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.mock + +class BufferedStreamConsumerTest { + private var consumer: BufferedStreamConsumer = mock() + private var onStart: OnStartFunction = mock() + private var recordWriter: RecordWriter = mock() + private var onClose: OnCloseFunction = mock() + private var isValidRecord: CheckedFunction = mock() + private var outputRecordCollector: Consumer = mock() + + @BeforeEach + @Throws(Exception::class) + fun setup() { + consumer = + BufferedStreamConsumer( + outputRecordCollector, + onStart, + InMemoryRecordBufferingStrategy(recordWriter, 1000), + onClose, + CATALOG, + isValidRecord + ) + + Mockito.`when`(isValidRecord.apply(ArgumentMatchers.any())).thenReturn(true) + } + + @Test + @Throws(Exception::class) + fun test1StreamWith1State() { + val expectedRecords = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecords) + consumer!!.accept(STATE_MESSAGE1) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun test1StreamWith2State() { + val expectedRecords = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecords) + consumer!!.accept(STATE_MESSAGE1) + consumer!!.accept(STATE_MESSAGE2) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + + Mockito.verify(outputRecordCollector, Mockito.times(1)).accept(STATE_MESSAGE2) + } + + @Test + @Throws(Exception::class) + fun test1StreamWith0State() { + val expectedRecords = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecords) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + } + + @Test + @Throws(Exception::class) + fun test1StreamWithStateAndThenMoreRecordsBiggerThanBuffer() { + val expectedRecordsBatch1 = generateRecords(1000) + val expectedRecordsBatch2 = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsBatch1) + consumer!!.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsBatch2) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1) + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch2) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun test1StreamWithStateAndThenMoreRecordsSmallerThanBuffer() { + val expectedRecordsBatch1 = generateRecords(1000) + val expectedRecordsBatch2 = generateRecords(1000) + + // consumer with big enough buffered that we see both batches are flushed in one go. + val consumer = + BufferedStreamConsumer( + outputRecordCollector, + onStart, + InMemoryRecordBufferingStrategy(recordWriter, 10000), + onClose, + CATALOG, + isValidRecord + ) + + consumer.start() + consumeRecords(consumer, expectedRecordsBatch1) + consumer.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsBatch2) + consumer.close() + + verifyStartAndClose() + + val expectedRecords = + Lists.newArrayList(expectedRecordsBatch1, expectedRecordsBatch2) + .stream() + .flatMap { obj: List -> obj.stream() } + .collect(Collectors.toList()) + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecords) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun testExceptionAfterOneStateMessage() { + val expectedRecordsBatch1 = generateRecords(1000) + val expectedRecordsBatch2 = generateRecords(1000) + val expectedRecordsBatch3 = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsBatch1) + consumer!!.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsBatch2) + Mockito.`when`(isValidRecord!!.apply(ArgumentMatchers.any())) + .thenThrow(IllegalStateException("induced exception")) + Assertions.assertThrows(IllegalStateException::class.java) { + consumer!!.accept(expectedRecordsBatch3[0]) + } + consumer!!.close() + + verifyStartAndCloseFailure() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun testExceptionAfterNoStateMessages() { + val expectedRecordsBatch1 = generateRecords(1000) + val expectedRecordsBatch2 = generateRecords(1000) + val expectedRecordsBatch3 = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsBatch1) + consumeRecords(consumer, expectedRecordsBatch2) + Mockito.`when`(isValidRecord!!.apply(ArgumentMatchers.any())) + .thenThrow(IllegalStateException("induced exception")) + Assertions.assertThrows(IllegalStateException::class.java) { + consumer!!.accept(expectedRecordsBatch3[0]) + } + consumer!!.close() + + verifyStartAndCloseFailure() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1) + + Mockito.verifyNoInteractions(outputRecordCollector) + } + + @Test + @Throws(Exception::class) + fun testExceptionDuringOnClose() { + Mockito.doThrow(IllegalStateException("induced exception")) + .`when`(onClose) + .accept(false, HashMap()) + + val expectedRecordsBatch1 = generateRecords(1000) + val expectedRecordsBatch2 = generateRecords(1000) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsBatch1) + consumer!!.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsBatch2) + Assertions.assertThrows( + IllegalStateException::class.java, + { consumer!!.close() }, + "Expected an error to be thrown on close" + ) + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsBatch1) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun test2StreamWith1State() { + val expectedRecordsStream1 = generateRecords(1000) + val expectedRecordsStream2 = + expectedRecordsStream1 + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> m.record.withStream(STREAM_NAME2) } + .collect(Collectors.toList()) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsStream1) + consumer!!.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsStream2) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1) + verifyRecords(STREAM_NAME2, SCHEMA_NAME, expectedRecordsStream2) + + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun test2StreamWith2State() { + val expectedRecordsStream1 = generateRecords(1000) + val expectedRecordsStream2 = + expectedRecordsStream1 + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> m.record.withStream(STREAM_NAME2) } + .collect(Collectors.toList()) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsStream1) + consumer!!.accept(STATE_MESSAGE1) + consumeRecords(consumer, expectedRecordsStream2) + consumer!!.accept(STATE_MESSAGE2) + consumer!!.close() + + verifyStartAndClose() + + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1) + verifyRecords(STREAM_NAME2, SCHEMA_NAME, expectedRecordsStream2) + + Mockito.verify(outputRecordCollector, Mockito.times(1)).accept(STATE_MESSAGE2) + } + + // Periodic Buffer Flush Tests + @Test + @Throws(Exception::class) + fun testSlowStreamReturnsState() { + // generate records less than the default maxQueueSizeInBytes to confirm periodic flushing + // occurs + val expectedRecordsStream1 = generateRecords(500L) + val expectedRecordsStream1Batch2 = generateRecords(200L) + + // Overrides flush frequency for testing purposes to 5 seconds + val flushConsumer = consumerWithFlushFrequency + flushConsumer.start() + consumeRecords(flushConsumer, expectedRecordsStream1) + flushConsumer.accept(STATE_MESSAGE1) + // NOTE: Sleeps process for 5 seconds, if tests are slow this can be updated to reduce + // slowdowns + TimeUnit.SECONDS.sleep(PERIODIC_BUFFER_FREQUENCY.toLong()) + consumeRecords(flushConsumer, expectedRecordsStream1Batch2) + flushConsumer.close() + + verifyStartAndClose() + // expects the records to be grouped because periodicBufferFlush occurs at the end of + // acceptTracked + verifyRecords( + STREAM_NAME, + SCHEMA_NAME, + Stream.concat(expectedRecordsStream1.stream(), expectedRecordsStream1Batch2.stream()) + .collect(Collectors.toList()) + ) + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + } + + @Test + @Throws(Exception::class) + fun testSlowStreamReturnsMultipleStates() { + // generate records less than the default maxQueueSizeInBytes to confirm periodic flushing + // occurs + val expectedRecordsStream1 = generateRecords(500L) + val expectedRecordsStream1Batch2 = generateRecords(200L) + // creates records equal to size that triggers buffer flush + val expectedRecordsStream1Batch3 = generateRecords(1000L) + + // Overrides flush frequency for testing purposes to 5 seconds + val flushConsumer = consumerWithFlushFrequency + flushConsumer.start() + consumeRecords(flushConsumer, expectedRecordsStream1) + flushConsumer.accept(STATE_MESSAGE1) + // NOTE: Sleeps process for 5 seconds, if tests are slow this can be updated to reduce + // slowdowns + TimeUnit.SECONDS.sleep(PERIODIC_BUFFER_FREQUENCY.toLong()) + consumeRecords(flushConsumer, expectedRecordsStream1Batch2) + consumeRecords(flushConsumer, expectedRecordsStream1Batch3) + flushConsumer.accept(STATE_MESSAGE2) + flushConsumer.close() + + verifyStartAndClose() + // expects the records to be grouped because periodicBufferFlush occurs at the end of + // acceptTracked + verifyRecords( + STREAM_NAME, + SCHEMA_NAME, + Stream.concat(expectedRecordsStream1.stream(), expectedRecordsStream1Batch2.stream()) + .collect(Collectors.toList()) + ) + verifyRecords(STREAM_NAME, SCHEMA_NAME, expectedRecordsStream1Batch3) + // expects two STATE messages returned since one will be flushed after periodic flushing + // occurs + // and the other after buffer has been filled + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE1) + Mockito.verify(outputRecordCollector).accept(STATE_MESSAGE2) + } + + /** + * Verify that if we ack a state message for stream2 while stream1 has unflushed records+state, + * that we do _not_ ack stream1's state message. + */ + @Test + @Throws(Exception::class) + fun testStreamTail() { + // InMemoryRecordBufferingStrategy always returns FLUSH_ALL, so just mock a new strategy + // here + val strategy = Mockito.mock(BufferingStrategy::class.java) + // The first two records that we push will not trigger any flushes, but the third record + // _will_ + // trigger a flush + Mockito.`when`(strategy.addRecord(any(), any())) + .thenReturn( + Optional.empty(), + Optional.empty(), + Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM) + ) + consumer = + BufferedStreamConsumer( + outputRecordCollector, + onStart, + strategy, + onClose, + CATALOG, + isValidRecord, // Never periodic flush + Duration.ofHours(24), + null + ) + val expectedRecordsStream1 = + java.util.List.of( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage().withStream(STREAM_NAME).withNamespace(SCHEMA_NAME) + ) + ) + val expectedRecordsStream2 = + java.util.List.of( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage().withStream(STREAM_NAME2).withNamespace(SCHEMA_NAME) + ) + ) + + val state1 = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + ) + .withStreamState( + Jsons.jsonNode(ImmutableMap.of("state_message_id", 1)) + ) + ) + ) + val state2 = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(STREAM_NAME2) + .withNamespace(SCHEMA_NAME) + ) + .withStreamState( + Jsons.jsonNode(ImmutableMap.of("state_message_id", 2)) + ) + ) + ) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsStream1) + consumer!!.accept(state1) + // At this point, we have not yet flushed anything + consumeRecords(consumer, expectedRecordsStream2) + consumer!!.accept(state2) + consumeRecords(consumer, expectedRecordsStream2) + // Now we have flushed stream 2, but not stream 1 + // Verify that we have only acked stream 2's state. + Mockito.verify(outputRecordCollector).accept(state2) + Mockito.verify(outputRecordCollector, Mockito.never()).accept(state1) + + consumer!!.close() + // Now we've closed the consumer, which flushes everything. + // Verify that we ack stream 1's pending state. + Mockito.verify(outputRecordCollector).accept(state1) + } + + /** + * Same idea as [.testStreamTail] but with global state. We shouldn't emit any state messages + * until we close the consumer. + */ + @Test + @Throws(Exception::class) + fun testStreamTailGlobalState() { + // InMemoryRecordBufferingStrategy always returns FLUSH_ALL, so just mock a new strategy + // here + val strategy = Mockito.mock(BufferingStrategy::class.java) + // The first two records that we push will not trigger any flushes, but the third record + // _will_ + // trigger a flush + Mockito.`when`(strategy.addRecord(any(), any())) + .thenReturn( + Optional.empty(), + Optional.empty(), + Optional.of(BufferFlushType.FLUSH_SINGLE_STREAM) + ) + consumer = + BufferedStreamConsumer( + outputRecordCollector, + onStart, + strategy, + onClose, + CATALOG, + isValidRecord, // Never periodic flush + Duration.ofHours(24), + null + ) + val expectedRecordsStream1 = + java.util.List.of( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage().withStream(STREAM_NAME).withNamespace(SCHEMA_NAME) + ) + ) + val expectedRecordsStream2 = + java.util.List.of( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage().withStream(STREAM_NAME2).withNamespace(SCHEMA_NAME) + ) + ) + + val state1 = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + AirbyteGlobalState() + .withSharedState( + Jsons.jsonNode(ImmutableMap.of("state_message_id", 1)) + ) + ) + ) + val state2 = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + AirbyteGlobalState() + .withSharedState( + Jsons.jsonNode(ImmutableMap.of("state_message_id", 2)) + ) + ) + ) + + consumer!!.start() + consumeRecords(consumer, expectedRecordsStream1) + consumer!!.accept(state1) + // At this point, we have not yet flushed anything + consumeRecords(consumer, expectedRecordsStream2) + consumer!!.accept(state2) + consumeRecords(consumer, expectedRecordsStream2) + // Now we have flushed stream 2, but not stream 1 + // We should not have acked any state yet, because we haven't written stream1's records yet. + Mockito.verify(outputRecordCollector, Mockito.never()).accept(ArgumentMatchers.any()) + + consumer!!.close() + // Now we've closed the consumer, which flushes everything. + // Verify that we ack the final state. + // Note that we discard state1 entirely - this is OK. As long as we ack the last state + // message, + // the source can correctly resume from that point. + Mockito.verify(outputRecordCollector).accept(state2) + } + + private val consumerWithFlushFrequency: BufferedStreamConsumer + get() { + val flushFrequencyConsumer = + BufferedStreamConsumer( + outputRecordCollector, + onStart, + InMemoryRecordBufferingStrategy(recordWriter, 10000), + onClose, + CATALOG, + isValidRecord, + Duration.ofSeconds(PERIODIC_BUFFER_FREQUENCY.toLong()), + null + ) + return flushFrequencyConsumer + } + + @Throws(Exception::class) + private fun verifyStartAndClose() { + Mockito.verify(onStart).call() + Mockito.verify(onClose).accept(false, HashMap()) + } + + /** Indicates that a failure occurred while consuming AirbyteMessages */ + @Throws(Exception::class) + private fun verifyStartAndCloseFailure() { + Mockito.verify(onStart).call() + Mockito.verify(onClose).accept(true, HashMap()) + } + + @Throws(Exception::class) + private fun verifyRecords( + streamName: String, + namespace: String, + expectedRecords: Collection + ) { + Mockito.verify(recordWriter) + .accept( + AirbyteStreamNameNamespacePair(streamName, namespace), + expectedRecords + .stream() + .map { obj: AirbyteMessage -> obj.record } + .collect(Collectors.toList()) + ) + } + + companion object { + private const val SCHEMA_NAME = "public" + private const val STREAM_NAME = "id_and_name" + private const val STREAM_NAME2 = STREAM_NAME + 2 + private const val PERIODIC_BUFFER_FREQUENCY = 5 + private val CATALOG: ConfiguredAirbyteCatalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING) + ), + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME2, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING) + ) + ) + ) + + private val STATE_MESSAGE1: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withData(Jsons.jsonNode(ImmutableMap.of("state_message_id", 1))) + ) + private val STATE_MESSAGE2: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withData(Jsons.jsonNode(ImmutableMap.of("state_message_id", 2))) + ) + + private fun consumeRecords( + consumer: BufferedStreamConsumer?, + records: Collection + ) { + records.forEach( + Consumer { m: AirbyteMessage -> + try { + consumer!!.accept(m) + } catch (e: Exception) { + throw RuntimeException(e) + } + } + ) + } + + // NOTE: Generates records at chunks of 160 bytes + private fun generateRecords(targetSizeInBytes: Long): List { + val output: MutableList = Lists.newArrayList() + var bytesCounter: Long = 0 + var i = 0 + while (true) { + val payload = + Jsons.jsonNode( + ImmutableMap.of( + "id", + RandomStringUtils.randomAlphabetic(7), + "name", + "human " + String.format("%8d", i) + ) + ) + val sizeInBytes = RecordSizeEstimator.getStringByteSize(payload) + bytesCounter += sizeInBytes + val airbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withNamespace(SCHEMA_NAME) + .withEmittedAt(Instant.now().toEpochMilli()) + .withData(payload) + ) + if (bytesCounter > targetSizeInBytes) { + break + } else { + output.add(airbyteMessage) + } + i++ + } + return output + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.kt new file mode 100644 index 0000000000000..aec47539aaadf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/buffered_stream_consumer/RecordSizeEstimatorTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.buffered_stream_consumer + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class RecordSizeEstimatorTest { + @Test + fun testPeriodicSampling() { + // the estimate performs a size sampling every 3 records + val sizeEstimator = RecordSizeEstimator(3) + val stream = "stream" + val record0 = AirbyteRecordMessage().withStream(stream).withData(DATA_0) + val record1 = AirbyteRecordMessage().withStream(stream).withData(DATA_1) + val record2 = AirbyteRecordMessage().withStream(stream).withData(DATA_2) + + // sample record message 1 + val firstEstimation = DATA_1_SIZE + Assertions.assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record1)) + // next two calls return the first sampling result + Assertions.assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record0)) + Assertions.assertEquals(firstEstimation, sizeEstimator.getEstimatedByteSize(record0)) + + // sample record message 2 + val secondEstimation = firstEstimation / 2 + DATA_2_SIZE / 2 + Assertions.assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record2)) + // next two calls return the second sampling result + Assertions.assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record0)) + Assertions.assertEquals(secondEstimation, sizeEstimator.getEstimatedByteSize(record0)) + + // sample record message 1 + val thirdEstimation = secondEstimation / 2 + DATA_1_SIZE / 2 + Assertions.assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record1)) + // next two calls return the first sampling result + Assertions.assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record0)) + Assertions.assertEquals(thirdEstimation, sizeEstimator.getEstimatedByteSize(record0)) + } + + @Test + fun testDifferentEstimationPerStream() { + val sizeEstimator = RecordSizeEstimator() + val record0 = AirbyteRecordMessage().withStream("stream1").withData(DATA_0) + val record1 = AirbyteRecordMessage().withStream("stream2").withData(DATA_1) + val record2 = AirbyteRecordMessage().withStream("stream3").withData(DATA_2) + Assertions.assertEquals(DATA_0_SIZE, sizeEstimator.getEstimatedByteSize(record0)) + Assertions.assertEquals(DATA_1_SIZE, sizeEstimator.getEstimatedByteSize(record1)) + Assertions.assertEquals(DATA_2_SIZE, sizeEstimator.getEstimatedByteSize(record2)) + } + + companion object { + private val DATA_0: JsonNode = Jsons.deserialize("{}") + private val DATA_1: JsonNode = Jsons.deserialize("{ \"field1\": true }") + private val DATA_2: JsonNode = Jsons.deserialize("{ \"field1\": 10000 }") + private val DATA_0_SIZE = RecordSizeEstimator.getStringByteSize(DATA_0) + private val DATA_1_SIZE = RecordSizeEstimator.getStringByteSize(DATA_1) + private val DATA_2_SIZE = RecordSizeEstimator.getStringByteSize(DATA_2) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.kt new file mode 100644 index 0000000000000..38c17de0e07f4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DefaultDestStateLifecycleManagerTest.kt @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.StreamDescriptor +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class DefaultDestStateLifecycleManagerTest { + private lateinit var mgr1: DestStateLifecycleManager + private lateinit var singleStateMgr: DestStateLifecycleManager + private lateinit var streamMgr: DestStateLifecycleManager + + @BeforeEach + fun setup() { + singleStateMgr = Mockito.mock(DestStateLifecycleManager::class.java) + streamMgr = Mockito.mock(DestStateLifecycleManager::class.java) + mgr1 = DefaultDestStateLifecycleManager(singleStateMgr, streamMgr) + } + + @Test + fun testFailsOnIncompatibleStates() { + val manager1 = DefaultDestStateLifecycleManager(singleStateMgr, streamMgr) + manager1.addState(UNSET_TYPE_MESSAGE) + manager1.addState(UNSET_TYPE_MESSAGE) + manager1.addState(LEGACY_MESSAGE) + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager1.addState(GLOBAL_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager1.addState(STREAM_MESSAGE) + } + + val manager2 = DefaultDestStateLifecycleManager(singleStateMgr, streamMgr) + manager2.addState(LEGACY_MESSAGE) + manager2.addState(LEGACY_MESSAGE) + manager2.addState(UNSET_TYPE_MESSAGE) + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager2.addState(GLOBAL_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager2.addState(STREAM_MESSAGE) + } + + val manager3 = DefaultDestStateLifecycleManager(singleStateMgr, streamMgr) + manager3.addState(GLOBAL_MESSAGE) + manager3.addState(GLOBAL_MESSAGE) + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager3.addState(UNSET_TYPE_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager3.addState(LEGACY_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager3.addState(STREAM_MESSAGE) + } + + val manager4 = DefaultDestStateLifecycleManager(singleStateMgr, streamMgr) + manager4.addState(STREAM_MESSAGE) + manager4.addState(STREAM_MESSAGE) + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager4.addState(UNSET_TYPE_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager4.addState(LEGACY_MESSAGE) + } + Assertions.assertThrows(IllegalArgumentException::class.java) { + manager4.addState(GLOBAL_MESSAGE) + } + } + + @Test + fun testDelegatesLegacyMessages() { + mgr1!!.addState(UNSET_TYPE_MESSAGE) + mgr1!!.addState(LEGACY_MESSAGE) + mgr1!!.markPendingAsFlushed() + mgr1!!.markFlushedAsCommitted() + mgr1!!.listFlushed() + mgr1!!.listCommitted() + Mockito.verify(singleStateMgr).addState(UNSET_TYPE_MESSAGE) + Mockito.verify(singleStateMgr).addState(LEGACY_MESSAGE) + Mockito.verify(singleStateMgr).markPendingAsFlushed() + Mockito.verify(singleStateMgr).markFlushedAsCommitted() + Mockito.verify(singleStateMgr).listFlushed() + Mockito.verify(singleStateMgr).listCommitted() + } + + @Test + fun testDelegatesGlobalMessages() { + mgr1!!.addState(GLOBAL_MESSAGE) + mgr1!!.markPendingAsFlushed() + mgr1!!.markFlushedAsCommitted() + mgr1!!.listFlushed() + mgr1!!.listCommitted() + Mockito.verify(singleStateMgr).addState(GLOBAL_MESSAGE) + Mockito.verify(singleStateMgr).markPendingAsFlushed() + Mockito.verify(singleStateMgr).markFlushedAsCommitted() + Mockito.verify(singleStateMgr).listFlushed() + Mockito.verify(singleStateMgr).listCommitted() + } + + @Test + fun testDelegatesStreamMessages() { + mgr1!!.addState(STREAM_MESSAGE) + mgr1!!.markPendingAsFlushed() + mgr1!!.markFlushedAsCommitted() + mgr1!!.listFlushed() + mgr1!!.listCommitted() + + Mockito.verify(streamMgr).addState(STREAM_MESSAGE) + Mockito.verify(streamMgr).markPendingAsFlushed() + Mockito.verify(streamMgr).markFlushedAsCommitted() + Mockito.verify(streamMgr).listFlushed() + Mockito.verify(streamMgr).listCommitted() + } + + companion object { + private val UNSET_TYPE_MESSAGE: AirbyteMessage = + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(AirbyteStateMessage()) + private val LEGACY_MESSAGE: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + ) + private val GLOBAL_MESSAGE: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + ) + private val STREAM_MESSAGE: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName("users")) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.kt new file mode 100644 index 0000000000000..b2a46fc5bc98d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestSingleStateLifecycleManagerTest.kt @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +internal class DestSingleStateLifecycleManagerTest { + private var mgr: DestSingleStateLifecycleManager? = null + + @BeforeEach + fun setup() { + mgr = DestSingleStateLifecycleManager() + } + + /** + * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test + * get into the details. + */ + @Test + fun testBasicLifeCycle() { + // starts with no state. + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + + mgr!!.addState(MESSAGE1) + // new state supersedes previous ones. we should only see MESSAGE2 from here on out. + mgr!!.addState(MESSAGE2) + + // after adding a state, it is in pending only. + Assertions.assertEquals(MESSAGE2, mgr!!.listPending().poll()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + + mgr!!.markPendingAsFlushed() + + // after flushing the state it is in flushed only. + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals(MESSAGE2, mgr!!.listFlushed()!!.poll()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + + // after committing the state it is in committed only. + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertEquals(MESSAGE2, mgr!!.listCommitted()!!.poll()) + } + + @Test + fun testPending() { + mgr!!.addState(MESSAGE1) + mgr!!.addState(MESSAGE2) + + // verify the LAST message is returned. + Assertions.assertEquals(MESSAGE2, mgr!!.listPending().poll()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + } + + @Test + fun testFlushed() { + mgr!!.addState(MESSAGE1) + mgr!!.addState(MESSAGE2) + mgr!!.markPendingAsFlushed() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals(MESSAGE2, mgr!!.listFlushed()!!.poll()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + + // verify that multiple calls to markPendingAsFlushed overwrite old states + mgr!!.addState(MESSAGE1) + mgr!!.markPendingAsFlushed() + mgr!!.markPendingAsFlushed() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals(MESSAGE1, mgr!!.listFlushed()!!.poll()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + } + + @Test + fun testCommitted() { + mgr!!.addState(MESSAGE1) + mgr!!.addState(MESSAGE2) + mgr!!.markPendingAsFlushed() + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertEquals(MESSAGE2, mgr!!.listCommitted()!!.poll()) + + // verify that multiple calls to markFlushedAsCommitted overwrite old states + mgr!!.addState(MESSAGE1) + mgr!!.markPendingAsFlushed() + mgr!!.markFlushedAsCommitted() + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertEquals(MESSAGE1, mgr!!.listCommitted()!!.poll()) + } + + /* + * This change follows the same changes in DestStreamStateLifecycleManager where the goal is to + * confirm that `markPendingAsCommitted` combines what was previous `markPendingAsFlushed` and + * `markFlushedAsCommitted` + * + * The reason for this method is due to destination checkpointing will no longer hold into a state + * as "Flushed" but immediately commit records to the destination's final table + */ + @Test + fun testMarkPendingAsCommitted() { + mgr!!.addState(MESSAGE1) + mgr!!.addState(MESSAGE2) + mgr!!.markPendingAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertEquals(MESSAGE2, mgr!!.listCommitted()!!.poll()) + } + + companion object { + private val MESSAGE1: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withData(Jsons.jsonNode("a")) + ) + private val MESSAGE2: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withData(Jsons.jsonNode("b")) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.kt new file mode 100644 index 0000000000000..53e68cd61e6f0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/dest_state_lifecycle_manager/DestStreamStateLifecycleManagerTest.kt @@ -0,0 +1,204 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.dest_state_lifecycle_manager + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.* +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +internal class DestStreamStateLifecycleManagerTest { + private var mgr: DestStreamStateLifecycleManager? = null + + @BeforeEach + fun setup() { + mgr = DestStreamStateLifecycleManager("default_namespace") + } + + /** + * Demonstrates expected lifecycle of a state object for documentation purposes. Subsequent test + * get into the details. + */ + @Test + fun testBasicLifeCycle() { + // starts with no state. + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed()!!.isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted()!!.isEmpty()) + + mgr!!.addState(STREAM1_MESSAGE1) + // new state supersedes previous ones. we should only see MESSAGE2 for STREAM1 from here on + // out. + mgr!!.addState(STREAM1_MESSAGE2) + // different stream, thus does not interact with messages from STREAM1. + mgr!!.addState(STREAM2_MESSAGE1) + + // after adding a state, it is in pending only. + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listPending() + ) + Assertions.assertTrue(mgr!!.listFlushed().isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted().isEmpty()) + + mgr!!.markPendingAsFlushed() + + // after flushing the state it is in flushed only. + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listFlushed() + ) + Assertions.assertTrue(mgr!!.listCommitted().isEmpty()) + + // after committing the state it is in committed only. + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listCommitted() + ) + } + + @Test + fun testPending() { + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.addState(STREAM1_MESSAGE2) + mgr!!.addState(STREAM2_MESSAGE1) + + // verify the LAST message is returned. + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listPending() + ) + Assertions.assertTrue(mgr!!.listFlushed().isEmpty()) + Assertions.assertTrue(mgr!!.listCommitted().isEmpty()) + } + + /* + * TODO: remove this test after all destination connectors have updated to reflect destination + * checkpointing changes where flush/commit will be bundled into the same operation + */ + @Deprecated("") + @Test + fun testFlushed() { + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.addState(STREAM1_MESSAGE2) + mgr!!.addState(STREAM2_MESSAGE1) + mgr!!.markPendingAsFlushed() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listFlushed() + ) + Assertions.assertTrue(mgr!!.listCommitted().isEmpty()) + + // verify that multiple calls to markPendingAsFlushed overwrite old states + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.markPendingAsFlushed() + mgr!!.markPendingAsFlushed() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), + mgr!!.listFlushed() + ) + Assertions.assertTrue(mgr!!.listCommitted().isEmpty()) + } + + @Test + fun testCommitted() { + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.addState(STREAM1_MESSAGE2) + mgr!!.addState(STREAM2_MESSAGE1) + mgr!!.markPendingAsFlushed() + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE2, STREAM2_MESSAGE1)), + mgr!!.listCommitted() + ) + + // verify that multiple calls to markFlushedAsCommitted overwrite old states + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.markPendingAsFlushed() + mgr!!.markFlushedAsCommitted() + mgr!!.markFlushedAsCommitted() + + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertTrue(mgr!!.listFlushed().isEmpty()) + Assertions.assertEquals( + LinkedList(List.of(STREAM1_MESSAGE1, STREAM2_MESSAGE1)), + mgr!!.listCommitted() + ) + } + + /* + * This section is to test for logic that is isolated to changes with respect to destination + * checkpointing where it captures flush and commit are bundled into a transaction so + * + * buffer -(flush buffer)-> staging area -(copy into {staging_file})-> destination raw table + */ + @Test + fun testPendingAsCommitted() { + mgr!!.addState(STREAM1_MESSAGE1) + mgr!!.markPendingAsCommitted() + + // verifies that we've skipped "Flushed" without needing to call `markPendingAsFlushed()` + // and + // `markFlushedAsCommitted` + Assertions.assertTrue(mgr!!.listPending().isEmpty()) + Assertions.assertEquals(LinkedList(List.of(STREAM1_MESSAGE1)), mgr!!.listCommitted()) + } + + companion object { + private val STREAM1_MESSAGE1: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName("apples")) + .withStreamState(Jsons.jsonNode("a")) + ) + ) + private val STREAM1_MESSAGE2: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName("apples")) + .withStreamState(Jsons.jsonNode("b")) + ) + ) + private val STREAM2_MESSAGE1: AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName("bananas")) + .withStreamState(Jsons.jsonNode("10")) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.kt new file mode 100644 index 0000000000000..90584e05b1299 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.RecordWriter +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.mock + +class InMemoryRecordBufferingStrategyTest { + private val recordWriter: RecordWriter = mock() + + @Test + @Throws(Exception::class) + fun testBuffering() { + val buffering = + InMemoryRecordBufferingStrategy(recordWriter, MAX_QUEUE_SIZE_IN_BYTES.toLong()) + val stream1 = AirbyteStreamNameNamespacePair("stream1", "namespace") + val stream2 = AirbyteStreamNameNamespacePair("stream2", null) + val message1 = generateMessage(stream1) + val message2 = generateMessage(stream2) + val message3 = generateMessage(stream2) + val message4 = generateMessage(stream2) + + Assertions.assertFalse(buffering.addRecord(stream1, message1).isPresent) + Assertions.assertFalse(buffering.addRecord(stream2, message2).isPresent) + // Buffer still has room + val flushType = buffering.addRecord(stream2, message3) + // Keeps track of this #addRecord since we're expecting a buffer flush & that the flushType + // value will indicate that all buffers were flushed + Assertions.assertTrue(flushType.isPresent) + Assertions.assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL) + // Buffer limit reach, flushing all messages so far before adding the new incoming one + Mockito.verify(recordWriter, Mockito.times(1)).accept(stream1, List.of(message1.record)) + Mockito.verify(recordWriter, Mockito.times(1)).accept(stream2, List.of(message2.record)) + + buffering.addRecord(stream2, message4) + + // force flush to terminate test + buffering.flushAllBuffers() + Mockito.verify(recordWriter, Mockito.times(1)) + .accept(stream2, List.of(message3.record, message4.record)) + } + + companion object { + private val MESSAGE_DATA: JsonNode = Jsons.deserialize("{ \"field1\": 10000 }") + + // MESSAGE_DATA should be 64 bytes long, size the buffer such as it can contain at least 2 + // message + // instances + private const val MAX_QUEUE_SIZE_IN_BYTES = 130 + + private fun generateMessage(stream: AirbyteStreamNameNamespacePair): AirbyteMessage { + return AirbyteMessage() + .withRecord( + AirbyteRecordMessage() + .withStream(stream.name) + .withNamespace(stream.namespace) + .withData(MESSAGE_DATA) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.kt new file mode 100644 index 0000000000000..d63dce5844805 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/destination/record_buffer/SerializedBufferingStrategyTest.kt @@ -0,0 +1,236 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.record_buffer + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.any + +class SerializedBufferingStrategyTest { + private val catalog: ConfiguredAirbyteCatalog = + Mockito.mock(ConfiguredAirbyteCatalog::class.java) + private val perStreamFlushHook: FlushBufferFunction = + Mockito.mock(FlushBufferFunction::class.java) + + private val recordWriter1: SerializableBuffer = Mockito.mock(SerializableBuffer::class.java) + private val recordWriter2: SerializableBuffer = Mockito.mock(SerializableBuffer::class.java) + private val recordWriter3: SerializableBuffer = Mockito.mock(SerializableBuffer::class.java) + private val recordWriter4: SerializableBuffer = Mockito.mock(SerializableBuffer::class.java) + + @BeforeEach + @Throws(Exception::class) + fun setup() { + setupMock(recordWriter1) + setupMock(recordWriter2) + setupMock(recordWriter3) + setupMock(recordWriter4) + } + + @Throws(Exception::class) + private fun setupMock(mockObject: SerializableBuffer) { + Mockito.`when`(mockObject.accept(any())).thenReturn(10L) + Mockito.`when`(mockObject.byteCount).thenReturn(10L) + Mockito.`when`(mockObject.maxTotalBufferSizeInBytes).thenReturn(MAX_TOTAL_BUFFER_SIZE_BYTES) + Mockito.`when`(mockObject.maxPerStreamBufferSizeInBytes) + .thenReturn(MAX_PER_STREAM_BUFFER_SIZE_BYTES) + Mockito.`when`(mockObject.maxConcurrentStreamsInBuffer).thenReturn(4) + } + + @Test + @Throws(Exception::class) + fun testPerStreamThresholdFlush() { + val buffering = + SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook) + val stream1 = AirbyteStreamNameNamespacePair(STREAM_1, "namespace") + val stream2 = AirbyteStreamNameNamespacePair(STREAM_2, null) + // To test per stream threshold, we are sending multiple test messages on a single stream + val message1 = generateMessage(stream1) + val message2 = generateMessage(stream2) + val message3 = generateMessage(stream2) + val message4 = generateMessage(stream2) + val message5 = generateMessage(stream2) + + Mockito.`when`(recordWriter1.byteCount).thenReturn(10L) // one record in recordWriter1 + Assertions.assertFalse(buffering.addRecord(stream1, message1).isPresent) + Mockito.`when`(recordWriter2.byteCount).thenReturn(10L) // one record in recordWriter2 + Assertions.assertFalse(buffering.addRecord(stream2, message2).isPresent) + + // Total and per stream Buffers still have room + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream2, recordWriter2) + + Mockito.`when`(recordWriter2.byteCount).thenReturn(20L) // second record in recordWriter2 + Assertions.assertFalse(buffering.addRecord(stream2, message3).isPresent) + Mockito.`when`(recordWriter2.byteCount).thenReturn(30L) // third record in recordWriter2 + + // Buffer reaches limit so a buffer flush occurs returning a buffer flush type of single + // stream + val flushType = buffering.addRecord(stream2, message4) + Assertions.assertTrue(flushType.isPresent) + Assertions.assertEquals(flushType.get(), BufferFlushType.FLUSH_SINGLE_STREAM) + + // The buffer limit is now reached for stream2, flushing that single stream only + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream2, recordWriter2) + + Mockito.`when`(recordWriter2.byteCount) + .thenReturn(10L) // back to one record in recordWriter2 + Assertions.assertFalse(buffering.addRecord(stream2, message5).isPresent) + + // force flush to terminate test + buffering.flushAllBuffers() + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(2)).accept(stream2, recordWriter2) + } + + @Test + @Throws(Exception::class) + fun testTotalStreamThresholdFlush() { + val buffering = + SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook) + val stream1 = AirbyteStreamNameNamespacePair(STREAM_1, "namespace") + val stream2 = AirbyteStreamNameNamespacePair(STREAM_2, "namespace") + val stream3 = AirbyteStreamNameNamespacePair(STREAM_3, "namespace") + // To test total stream threshold, we are sending test messages to multiple streams without + // reaching + // per stream limits + val message1 = generateMessage(stream1) + val message2 = generateMessage(stream2) + val message3 = generateMessage(stream3) + val message4 = generateMessage(stream1) + val message5 = generateMessage(stream2) + val message6 = generateMessage(stream3) + + Assertions.assertFalse(buffering.addRecord(stream1, message1).isPresent) + Assertions.assertFalse(buffering.addRecord(stream2, message2).isPresent) + // Total and per stream Buffers still have room + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream3, recordWriter3) + + Assertions.assertFalse(buffering.addRecord(stream3, message3).isPresent) + Mockito.`when`(recordWriter1.byteCount).thenReturn(20L) // second record in recordWriter1 + Assertions.assertFalse(buffering.addRecord(stream1, message4).isPresent) + Mockito.`when`(recordWriter2.byteCount).thenReturn(20L) // second record in recordWriter2 + + // In response to checkpointing, will need to know what type of buffer flush occurred to + // mark + // AirbyteStateMessage as committed depending on DestDefaultStateLifecycleManager + val flushType = buffering.addRecord(stream2, message5) + Assertions.assertTrue(flushType.isPresent) + Assertions.assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL) + + // Buffer limit reached for total streams, flushing all streams + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream3, recordWriter3) + + Assertions.assertFalse(buffering.addRecord(stream3, message6).isPresent) + // force flush to terminate test + buffering.flushAllBuffers() + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(2)).accept(stream3, recordWriter3) + } + + @Test + @Throws(Exception::class) + fun testConcurrentStreamThresholdFlush() { + val buffering = + SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook) + val stream1 = AirbyteStreamNameNamespacePair(STREAM_1, "namespace1") + val stream2 = AirbyteStreamNameNamespacePair(STREAM_2, "namespace2") + val stream3 = AirbyteStreamNameNamespacePair(STREAM_3, null) + val stream4 = AirbyteStreamNameNamespacePair(STREAM_4, null) + // To test concurrent stream threshold, we are sending test messages to multiple streams + val message1 = generateMessage(stream1) + val message2 = generateMessage(stream2) + val message3 = generateMessage(stream3) + val message4 = generateMessage(stream4) + val message5 = generateMessage(stream1) + + Assertions.assertFalse(buffering.addRecord(stream1, message1).isPresent) + Assertions.assertFalse(buffering.addRecord(stream2, message2).isPresent) + Assertions.assertFalse(buffering.addRecord(stream3, message3).isPresent) + // Total and per stream Buffers still have room + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(0)).accept(stream3, recordWriter3) + + // Since the concurrent stream threshold has been exceeded, all buffer streams are flush + val flushType = buffering.addRecord(stream4, message4) + Assertions.assertTrue(flushType.isPresent) + Assertions.assertEquals(flushType.get(), BufferFlushType.FLUSH_ALL) + + // Buffer limit reached for concurrent streams, flushing all streams + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream3, recordWriter3) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream4, recordWriter4) + + Assertions.assertFalse(buffering.addRecord(stream1, message5).isPresent) + // force flush to terminate test + buffering.flushAllBuffers() + Mockito.verify(perStreamFlushHook, Mockito.times(2)).accept(stream1, recordWriter1) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream2, recordWriter2) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream3, recordWriter3) + Mockito.verify(perStreamFlushHook, Mockito.times(1)).accept(stream4, recordWriter4) + } + + @Test + fun testCreateBufferFailure() { + val buffering = + SerializedBufferingStrategy(onCreateBufferFunction(), catalog, perStreamFlushHook) + val stream = AirbyteStreamNameNamespacePair("unknown_stream", "namespace1") + Assertions.assertThrows(RuntimeException::class.java) { + buffering.addRecord(stream, generateMessage(stream)) + } + } + + private fun onCreateBufferFunction(): BufferCreateFunction { + return BufferCreateFunction { + stream: AirbyteStreamNameNamespacePair, + catalog: ConfiguredAirbyteCatalog? -> + when (stream.name) { + STREAM_1 -> recordWriter1 + STREAM_2 -> recordWriter2 + STREAM_3 -> recordWriter3 + STREAM_4 -> recordWriter4 + else -> null + } + } + } + + companion object { + private val MESSAGE_DATA: JsonNode = Jsons.deserialize("{ \"field1\": 10000 }") + private const val STREAM_1 = "stream1" + private const val STREAM_2 = "stream2" + private const val STREAM_3 = "stream3" + private const val STREAM_4 = "stream4" + + // we set the limit to hold at most 4 messages of 10b total + private const val MAX_TOTAL_BUFFER_SIZE_BYTES = 42L + + // we set the limit to hold at most 2 messages of 10b per stream + private const val MAX_PER_STREAM_BUFFER_SIZE_BYTES = 21L + + private fun generateMessage(stream: AirbyteStreamNameNamespacePair): AirbyteMessage { + return AirbyteMessage() + .withRecord( + AirbyteRecordMessage() + .withStream(stream.name) + .withNamespace(stream.namespace) + .withData(MESSAGE_DATA) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.kt new file mode 100644 index 0000000000000..4d2671c3acf02 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/ConnectorExceptionUtilTest.kt @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util + +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.exceptions.ConnectionErrorException +import java.sql.SQLException +import java.sql.SQLSyntaxErrorException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class ConnectorExceptionUtilTest { + @get:Test + val isConfigErrorForConfigException: Unit + get() { + val configErrorException = ConfigErrorException(CONFIG_EXCEPTION_MESSAGE) + Assertions.assertTrue(ConnectorExceptionUtil.isConfigError(configErrorException)) + } + + @get:Test + val isConfigErrorForConnectionException: Unit + get() { + val connectionErrorException = ConnectionErrorException(CONFIG_EXCEPTION_MESSAGE) + Assertions.assertTrue(ConnectorExceptionUtil.isConfigError(connectionErrorException)) + } + + @get:Test + val isConfigErrorForRecoveryPSQLException: Unit + get() { + val recoveryPSQLException = SQLException(RECOVERY_EXCEPTION_MESSAGE) + Assertions.assertTrue(ConnectorExceptionUtil.isConfigError(recoveryPSQLException)) + } + + @get:Test + val isConfigErrorForUnknownColumnSQLSyntaxErrorException: Unit + get() { + val unknownColumnSQLSyntaxErrorException = + SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE) + Assertions.assertTrue( + ConnectorExceptionUtil.isConfigError(unknownColumnSQLSyntaxErrorException) + ) + } + + @get:Test + val isConfigErrorForCommonSQLException: Unit + get() { + val recoveryPSQLException = SQLException(COMMON_EXCEPTION_MESSAGE) + Assertions.assertFalse(ConnectorExceptionUtil.isConfigError(recoveryPSQLException)) + } + + @get:Test + val isConfigErrorForCommonException: Unit + get() { + Assertions.assertFalse(ConnectorExceptionUtil.isConfigError(Exception())) + } + + @get:Test + val displayMessageForConfigException: Unit + get() { + val configErrorException = ConfigErrorException(CONFIG_EXCEPTION_MESSAGE) + val actualDisplayMessage = + ConnectorExceptionUtil.getDisplayMessage(configErrorException) + Assertions.assertEquals(CONFIG_EXCEPTION_MESSAGE, actualDisplayMessage) + } + + @get:Test + val displayMessageForConnectionError: Unit + get() { + val testCode = "test code" + val errorCode = -1 + val connectionErrorException = + ConnectionErrorException(testCode, errorCode, CONFIG_EXCEPTION_MESSAGE, Exception()) + val actualDisplayMessage = + ConnectorExceptionUtil.getDisplayMessage(connectionErrorException) + Assertions.assertEquals( + String.format( + CONNECTION_ERROR_MESSAGE_TEMPLATE, + testCode, + errorCode, + CONFIG_EXCEPTION_MESSAGE + ), + actualDisplayMessage + ) + } + + @get:Test + val displayMessageForRecoveryException: Unit + get() { + val recoveryException = SQLException(RECOVERY_EXCEPTION_MESSAGE) + val actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(recoveryException) + Assertions.assertEquals( + ConnectorExceptionUtil.RECOVERY_CONNECTION_ERROR_MESSAGE, + actualDisplayMessage + ) + } + + @get:Test + val displayMessageForUnknownSQLErrorException: Unit + get() { + val unknownColumnSQLSyntaxErrorException = + SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE) + val actualDisplayMessage = + ConnectorExceptionUtil.getDisplayMessage(unknownColumnSQLSyntaxErrorException) + Assertions.assertEquals(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE, actualDisplayMessage) + } + + @get:Test + val displayMessageForCommonException: Unit + get() { + val exception: Exception = SQLException(COMMON_EXCEPTION_MESSAGE) + val actualDisplayMessage = ConnectorExceptionUtil.getDisplayMessage(exception) + Assertions.assertEquals( + String.format( + ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE, + COMMON_EXCEPTION_MESSAGE + ), + actualDisplayMessage + ) + } + + @get:Test + val rootConfigErrorFromConfigException: Unit + get() { + val configErrorException = ConfigErrorException(CONFIG_EXCEPTION_MESSAGE) + val exception = Exception(COMMON_EXCEPTION_MESSAGE, configErrorException) + + val actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception) + Assertions.assertEquals(configErrorException, actualRootConfigError) + } + + @get:Test + val rootConfigErrorFromRecoverySQLException: Unit + get() { + val recoveryException = SQLException(RECOVERY_EXCEPTION_MESSAGE) + val runtimeException = RuntimeException(COMMON_EXCEPTION_MESSAGE, recoveryException) + val exception = Exception(runtimeException) + + val actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception) + Assertions.assertEquals(recoveryException, actualRootConfigError) + } + + @get:Test + val rootConfigErrorFromUnknownSQLErrorException: Unit + get() { + val unknownSQLErrorException: SQLException = + SQLSyntaxErrorException(UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE) + val runtimeException = + RuntimeException(COMMON_EXCEPTION_MESSAGE, unknownSQLErrorException) + val exception = Exception(runtimeException) + + val actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception) + Assertions.assertEquals(unknownSQLErrorException, actualRootConfigError) + } + + @get:Test + val rootConfigErrorFromNonConfigException: Unit + get() { + val configErrorException = SQLException(CONFIG_EXCEPTION_MESSAGE) + val exception = Exception(COMMON_EXCEPTION_MESSAGE, configErrorException) + + val actualRootConfigError = ConnectorExceptionUtil.getRootConfigError(exception) + Assertions.assertEquals(exception, actualRootConfigError) + } + + companion object { + const val CONFIG_EXCEPTION_MESSAGE: String = "test message" + const val RECOVERY_EXCEPTION_MESSAGE: String = + "FATAL: terminating connection due to conflict with recovery" + const val COMMON_EXCEPTION_MESSAGE: String = "something happens with connection" + const val CONNECTION_ERROR_MESSAGE_TEMPLATE: String = + "State code: %s; Error code: %s; Message: %s" + const val UNKNOWN_COLUMN_SQL_EXCEPTION_MESSAGE: String = + "Unknown column 'table.column' in 'field list'" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.kt b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.kt new file mode 100644 index 0000000000000..98f8676ce555c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/kotlin/io/airbyte/cdk/integrations/util/concurrent/ConcurrentStreamConsumerTest.kt @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util.concurrent + +import com.fasterxml.jackson.databind.node.IntNode +import com.google.common.collect.Lists +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.util.List +import java.util.function.Consumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.mock + +/** Test suite for the [ConcurrentStreamConsumer] class. */ +internal class ConcurrentStreamConsumerTest { + @Test + fun testAcceptMessage() { + val stream: AutoCloseableIterator = mock() + val streamConsumer: Consumer> = mock() + + val concurrentStreamConsumer = ConcurrentStreamConsumer(streamConsumer, 1) + + Assertions.assertDoesNotThrow { concurrentStreamConsumer.accept(List.of(stream)) } + + Mockito.verify(streamConsumer, Mockito.times(1)).accept(stream) + } + + @Test + fun testAcceptMessageWithException() { + val stream: AutoCloseableIterator = mock() + val streamConsumer: Consumer> = mock() + val e: Exception = NullPointerException("test") + + Mockito.doThrow(e).`when`(streamConsumer).accept(ArgumentMatchers.any()) + + val concurrentStreamConsumer = ConcurrentStreamConsumer(streamConsumer, 1) + + Assertions.assertDoesNotThrow { concurrentStreamConsumer.accept(List.of(stream)) } + + Mockito.verify(streamConsumer, Mockito.times(1)).accept(stream) + Assertions.assertTrue(concurrentStreamConsumer.exception.isPresent) + Assertions.assertEquals(e, concurrentStreamConsumer.exception.get()) + Assertions.assertEquals(1, concurrentStreamConsumer.getExceptions().size) + Assertions.assertTrue(concurrentStreamConsumer.getExceptions().contains(e)) + } + + @Test + fun testAcceptMessageWithMultipleExceptions() { + val stream1: AutoCloseableIterator = mock() + val stream2: AutoCloseableIterator = mock() + val stream3: AutoCloseableIterator = mock() + val streamConsumer: Consumer> = mock() + val e1: Exception = NullPointerException("test1") + val e2: Exception = NullPointerException("test2") + val e3: Exception = NullPointerException("test3") + + Mockito.doThrow(e1).`when`(streamConsumer).accept(stream1) + Mockito.doThrow(e2).`when`(streamConsumer).accept(stream2) + Mockito.doThrow(e3).`when`(streamConsumer).accept(stream3) + + val concurrentStreamConsumer = ConcurrentStreamConsumer(streamConsumer, 1) + + Assertions.assertDoesNotThrow { + concurrentStreamConsumer.accept(List.of(stream1, stream2, stream3)) + } + + Mockito.verify(streamConsumer, Mockito.times(3)).accept(any()) + Assertions.assertTrue(concurrentStreamConsumer.exception.isPresent) + Assertions.assertEquals(e1, concurrentStreamConsumer.exception.get()) + Assertions.assertEquals(3, concurrentStreamConsumer.getExceptions().size) + Assertions.assertTrue(concurrentStreamConsumer.getExceptions().contains(e1)) + Assertions.assertTrue(concurrentStreamConsumer.getExceptions().contains(e2)) + Assertions.assertTrue(concurrentStreamConsumer.getExceptions().contains(e3)) + } + + @Test + fun testMoreStreamsThanAvailableThreads() { + val baseData = listOf(2, 4, 6, 8, 10, 12, 14, 16, 18, 20) + val streams: MutableList> = ArrayList() + for (i in 0..19) { + val airbyteStreamNameNamespacePair = + AirbyteStreamNameNamespacePair(String.format("%s_%d", NAME, i), NAMESPACE) + val messages: MutableList = ArrayList() + for (d in baseData) { + val airbyteMessage = Mockito.mock(AirbyteMessage::class.java) + val recordMessage = Mockito.mock(AirbyteRecordMessage::class.java) + Mockito.`when`(recordMessage.data).thenReturn(IntNode(d * i)) + Mockito.`when`(airbyteMessage.record).thenReturn(recordMessage) + messages.add(airbyteMessage) + } + streams.add( + AutoCloseableIterators.fromIterator( + messages.iterator(), + airbyteStreamNameNamespacePair + ) + ) + } + val streamConsumer: Consumer> = mock() + + val concurrentStreamConsumer = ConcurrentStreamConsumer(streamConsumer, streams.size) + val partitionSize = concurrentStreamConsumer.parallelism + val partitions = Lists.partition(streams.stream().toList(), partitionSize) + + for (partition in partitions) { + Assertions.assertDoesNotThrow { concurrentStreamConsumer.accept(partition) } + } + + Mockito.verify(streamConsumer, Mockito.times(streams.size)).accept(any()) + } + + companion object { + private const val NAME = "name" + private const val NAMESPACE = "namespace" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java deleted file mode 100644 index a62788dc1ad44..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.extensions; - -import static java.util.concurrent.TimeUnit.DAYS; -import static java.util.concurrent.TimeUnit.HOURS; -import static java.util.concurrent.TimeUnit.MICROSECONDS; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static java.util.concurrent.TimeUnit.MINUTES; -import static java.util.concurrent.TimeUnit.NANOSECONDS; -import static java.util.concurrent.TimeUnit.SECONDS; -import static java.util.regex.Pattern.CASE_INSENSITIVE; -import static java.util.regex.Pattern.UNICODE_CASE; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.lang.reflect.Proxy; -import java.time.Duration; -import java.time.Instant; -import java.time.format.DateTimeParseException; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.commons.lang3.time.DurationFormatUtils; -import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.api.extension.DynamicTestInvocationContext; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.api.extension.InvocationInterceptor; -import org.junit.jupiter.api.extension.ReflectiveInvocationContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * By default, junit only output logs to the console, and nothing makes it into log4j logs. This - * class fixes that by using the interceptor facility to print progress and timing information. This - * allows us to have junit loglines in our test logs. This is instanciated via Java's - * ServiceLoader The declaration can be found in - * resources/META-INF/services/org.junit.jupiter.api.extension.Extension - */ -public class LoggingInvocationInterceptor implements InvocationInterceptor { - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); - private static final String JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME = "JunitMethodExecutionTimeout"; - - private static final class LoggingInvocationInterceptorHandler implements InvocationHandler { - - private static final Pattern methodPattern = Pattern.compile("intercept(.*)Method"); - - @Override - @SuppressWarnings("unchecked") - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - if (LoggingInvocationInterceptor.class.getDeclaredMethod(method.getName(), Invocation.class, ReflectiveInvocationContext.class, - ExtensionContext.class) == null) { - LOGGER.error("Junit LoggingInvocationInterceptor executing unknown interception point {}", method.getName()); - return method.invoke(proxy, args); - } - var invocation = (Invocation) args[0]; - var invocationContext = (ReflectiveInvocationContext) args[1]; - var extensionContext = (ExtensionContext) args[2]; - String methodName = method.getName(); - String logLineSuffix; - Matcher methodMatcher = methodPattern.matcher(methodName); - if (methodName.equals("interceptDynamicTest")) { - logLineSuffix = "execution of DynamicTest %s".formatted(extensionContext.getDisplayName()); - } else if (methodName.equals("interceptTestClassConstructor")) { - logLineSuffix = "instance creation for %s".formatted(invocationContext.getTargetClass()); - } else if (methodMatcher.matches()) { - String interceptedEvent = methodMatcher.group(1); - logLineSuffix = "execution of @%s method %s.%s".formatted(interceptedEvent, - invocationContext.getExecutable().getDeclaringClass().getSimpleName(), - invocationContext.getExecutable().getName()); - } else { - logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName); - } - Thread currentThread = Thread.currentThread(); - TimeoutInteruptor timeoutTask = new TimeoutInteruptor(currentThread); - Instant start = Instant.now(); - try { - final Object retVal; - Duration timeout = getTimeout(invocationContext); - if (timeout != null) { - LOGGER.info("Junit starting {} with timeout of {}", logLineSuffix, DurationFormatUtils.formatDurationWords(timeout.toMillis(), true, true)); - new Timer("TimeoutTimer-" + currentThread.getName(), true).schedule(timeoutTask, timeout.toMillis()); - } else { - LOGGER.warn("Junit starting {} with no timeout", logLineSuffix); - } - retVal = invocation.proceed(); - long elapsedMs = Duration.between(start, Instant.now()).toMillis(); - LOGGER.info("Junit completed {} in {}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true)); - return retVal; - } catch (Throwable t) { - timeoutTask.cancel(); - long elapsedMs = Duration.between(start, Instant.now()).toMillis(); - if (timeoutTask.wasTriggered) { - Throwable t1 = t; - t = new TimeoutException( - "Execution was cancelled after %s. If you think your test should be given more time to complete, you can use the @Timeout annotation. If all the test of a connector are slow, " - + " you can override the property 'JunitMethodExecutionTimeout' in your gradle.properties." - .formatted(DurationFormatUtils.formatDurationWords(elapsedMs, true, true))); - t.initCause(t1); - } - boolean belowCurrentCall = false; - List stackToDisplay = new LinkedList<>(); - for (String stackString : ExceptionUtils.getStackFrames(t)) { - if (stackString.startsWith("\tat ")) { - if (!belowCurrentCall && stackString.contains(LoggingInvocationInterceptor.class.getCanonicalName())) { - belowCurrentCall = true; - } - } else { - belowCurrentCall = false; - } - if (!belowCurrentCall) { - stackToDisplay.add(stackString); - } - } - String stackTrace = StringUtils.join(stackToDisplay, "\n "); - LOGGER.error("Junit exception throw during {} after {}:\n{}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true), - stackTrace); - throw t; - } finally { - timeoutTask.cancel(); - } - } - - private static class TimeoutInteruptor extends TimerTask { - - private final Thread parentThread; - volatile boolean wasTriggered = false; - - TimeoutInteruptor(Thread parentThread) { - this.parentThread = parentThread; - } - - @Override - public void run() { - wasTriggered = true; - parentThread.interrupt(); - } - - public boolean cancel() { - return super.cancel(); - } - - } - - private static final Pattern PATTERN = Pattern.compile("([1-9]\\d*) *((?:[nμm]?s)|m|h|d)?", - CASE_INSENSITIVE | UNICODE_CASE); - private static final Map UNITS_BY_ABBREVIATION; - - static { - Map unitsByAbbreviation = new HashMap<>(); - unitsByAbbreviation.put("ns", NANOSECONDS); - unitsByAbbreviation.put("μs", MICROSECONDS); - unitsByAbbreviation.put("ms", MILLISECONDS); - unitsByAbbreviation.put("s", SECONDS); - unitsByAbbreviation.put("m", MINUTES); - unitsByAbbreviation.put("h", HOURS); - unitsByAbbreviation.put("d", DAYS); - UNITS_BY_ABBREVIATION = Collections.unmodifiableMap(unitsByAbbreviation); - } - - static Duration parseDuration(String text) throws DateTimeParseException { - Matcher matcher = PATTERN.matcher(text.trim()); - if (matcher.matches()) { - long value = Long.parseLong(matcher.group(1)); - String unitAbbreviation = matcher.group(2); - TimeUnit unit = unitAbbreviation == null ? SECONDS - : UNITS_BY_ABBREVIATION.get(unitAbbreviation.toLowerCase(Locale.ENGLISH)); - return Duration.ofSeconds(unit.toSeconds(value)); - } - throw new DateTimeParseException("Timeout duration is not in the expected format ( [ns|μs|ms|s|m|h|d])", - text, 0); - } - - private static Duration getTimeout(ReflectiveInvocationContext invocationContext) { - Duration timeout = null; - if (invocationContext.getExecutable()instanceof Method m) { - Timeout timeoutAnnotation = m.getAnnotation(Timeout.class); - if (timeoutAnnotation == null) { - timeoutAnnotation = invocationContext.getTargetClass().getAnnotation(Timeout.class); - } - if (timeoutAnnotation != null) { - timeout = Duration.ofMillis(timeoutAnnotation.unit().toMillis(timeoutAnnotation.value())); - } - } - if (timeout == null) { - timeout = parseDuration(System.getProperty(JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME)); - } - return timeout; - } - - } - - private final InvocationInterceptor proxy = (InvocationInterceptor) Proxy.newProxyInstance( - getClass().getClassLoader(), - new Class[] {InvocationInterceptor.class}, - new LoggingInvocationInterceptorHandler()); - - @Override - public void interceptAfterAllMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptAfterAllMethod(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptAfterEachMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptAfterEachMethod(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptBeforeAllMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptBeforeAllMethod(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptBeforeEachMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptBeforeEachMethod(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptDynamicTest(Invocation invocation, - DynamicTestInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptDynamicTest(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptTestMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - if (!Modifier.isPublic(invocationContext.getExecutable().getModifiers())) { - LOGGER.warn("Junit method {}.{} is not declared as public", invocationContext.getExecutable().getDeclaringClass().getCanonicalName(), - invocationContext.getExecutable().getName()); - } - proxy.interceptTestMethod(invocation, invocationContext, extensionContext); - } - - @Override - public void interceptTestTemplateMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - proxy.interceptTestTemplateMethod(invocation, invocationContext, extensionContext); - } - - @Override - public T interceptTestFactoryMethod(Invocation invocation, - ReflectiveInvocationContext invocationContext, - ExtensionContext extensionContext) - throws Throwable { - return proxy.interceptTestFactoryMethod(invocation, invocationContext, extensionContext); - } - - @Override - public T interceptTestClassConstructor(Invocation invocation, - ReflectiveInvocationContext> invocationContext, - ExtensionContext extensionContext) - throws Throwable { - return proxy.interceptTestClassConstructor(invocation, invocationContext, extensionContext); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java deleted file mode 100644 index 1770dca4905e8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.base.ssh; - -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.TunnelMethod.SSH_KEY_AUTH; -import static io.airbyte.cdk.integrations.base.ssh.SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.cdk.testutils.ContainerFactory; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.function.Consumer; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.JdbcDatabaseContainer; -import org.testcontainers.containers.Network; -import org.testcontainers.images.builder.ImageFromDockerfile; -import org.testcontainers.utility.DockerImageName; - -public class SshBastionContainer implements AutoCloseable { - - public static class SshBastionContainerFactory extends ContainerFactory> { - - @Override - protected GenericContainer createNewContainer(DockerImageName imageName) { - var container = new GenericContainer(new ImageFromDockerfile("bastion-test") - .withFileFromClasspath("Dockerfile", "bastion/Dockerfile")) - .withExposedPorts(22); - return container; - } - - public GenericContainer exclusive(final Network network) { - Consumer> imageModifier = c -> { - c.withNetwork(network); - }; - var container = super.exclusive("bastion-test", new NamedContainerModifierImpl<>("withNetwork", imageModifier)); - return container; - } - - } - - private static final SshBastionContainerFactory factory = new SshBastionContainerFactory(); - - private static final String SSH_USER = "sshuser"; - private static final String SSH_PASSWORD = "secret"; - private GenericContainer bastion; - - public void initAndStartBastion(final Network network) { - bastion = factory.exclusive(network); - bastion.start(); - } - - public JsonNode getTunnelMethod(final SshTunnel.TunnelMethod tunnelMethod, - final boolean innerAddress) - throws IOException, InterruptedException { - final var containerAddress = innerAddress ? getInnerContainerAddress(bastion) : getOuterContainerAddress(bastion); - return Jsons.jsonNode(ImmutableMap.builder() - .put("tunnel_host", - Objects.requireNonNull(containerAddress.left)) - .put("tunnel_method", tunnelMethod) - .put("tunnel_port", containerAddress.right) - .put("tunnel_user", SSH_USER) - .put("tunnel_user_password", tunnelMethod.equals(SSH_PASSWORD_AUTH) ? SSH_PASSWORD : "") - .put("ssh_key", tunnelMethod.equals(SSH_KEY_AUTH) ? bastion.execInContainer("cat", "var/bastion/id_rsa").getStdout() : "") - .build()); - } - - public JsonNode getTunnelConfig(final SshTunnel.TunnelMethod tunnelMethod, - final ImmutableMap.Builder builderWithSchema, - final boolean innerAddress) - throws IOException, InterruptedException { - return Jsons.jsonNode(builderWithSchema - .put("tunnel_method", getTunnelMethod(tunnelMethod, innerAddress)) - .build()); - } - - public ImmutableMap.Builder getBasicDbConfigBuider(final JdbcDatabaseContainer db) { - return getBasicDbConfigBuider(db, db.getDatabaseName()); - } - - public ImmutableMap.Builder getBasicDbConfigBuider(final JdbcDatabaseContainer db, final List schemas) { - return getBasicDbConfigBuider(db, db.getDatabaseName()).put("schemas", schemas); - } - - public ImmutableMap.Builder getBasicDbConfigBuider(final JdbcDatabaseContainer db, final String schemaName) { - return ImmutableMap.builder() - .put("host", Objects.requireNonNull(HostPortResolver.resolveHost(db))) - .put("username", db.getUsername()) - .put("password", db.getPassword()) - .put("port", HostPortResolver.resolvePort(db)) - .put("database", schemaName) - .put("ssl", false); - } - - public void stopAndCloseContainers(final JdbcDatabaseContainer db) { - bastion.stop(); - bastion.close(); - db.stop(); - db.close(); - } - - public void stopAndClose() { - bastion.close(); - } - - @Override - public void close() { - stopAndClose(); - } - - public GenericContainer getContainer() { - return bastion; - } - - /** - * Returns the inner docker network ip address and port of a container. This can be used to reach a - * container from another container running on the same network - * - * @param container container - * @return a pair of host and port - */ - public static ImmutablePair getInnerContainerAddress(final Container container) { - return ImmutablePair.of( - container.getContainerInfo().getNetworkSettings().getNetworks().entrySet().stream().findFirst().get().getValue().getIpAddress(), - (Integer) container.getExposedPorts().stream().findFirst().get()); - } - - /** - * Returns the outer docker network ip address and port of a container. This can be used to reach a - * container from the host machine - * - * @param container container - * @return a pair of host and port - */ - public static ImmutablePair getOuterContainerAddress(final Container container) { - return ImmutablePair.of( - container.getHost(), - container.getFirstMappedPort()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/util/HostPortResolver.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/util/HostPortResolver.java deleted file mode 100644 index 4d29d36c98484..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/util/HostPortResolver.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.util; - -import java.util.Objects; -import org.testcontainers.containers.GenericContainer; - -public class HostPortResolver { - - public static String resolveHost(GenericContainer container) { - return getIpAddress(container); - } - - public static int resolvePort(GenericContainer container) { - return (Integer) container.getExposedPorts().stream().findFirst().get(); - } - - public static String resolveIpAddress(GenericContainer container) { - return getIpAddress(container); - } - - private static String getIpAddress(GenericContainer container) { - return Objects.requireNonNull(container.getContainerInfo() - .getNetworkSettings() - .getNetworks() - .entrySet().stream() - .findFirst() - .get().getValue().getIpAddress()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java deleted file mode 100644 index 0cc6977641061..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.testutils; - -import com.google.common.collect.Lists; -import io.airbyte.commons.logging.LoggingHelper; -import io.airbyte.commons.logging.MdcScope; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; -import java.util.function.Supplier; -import java.util.stream.Stream; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.output.OutputFrame; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.utility.DockerImageName; - -/** - * ContainerFactory is the companion to {@link TestDatabase} and provides it with suitable - * testcontainer instances. - */ -public abstract class ContainerFactory> { - - static private final Logger LOGGER = LoggerFactory.getLogger(ContainerFactory.class); - - private record ContainerKey> (Class clazz, - DockerImageName imageName, - List> methods) {} - - ; - - private static class ContainerOrException { - - private final Supplier> containerSupplier; - private volatile RuntimeException _exception = null; - private volatile GenericContainer _container = null; - - ContainerOrException(Supplier> containerSupplier) { - this.containerSupplier = containerSupplier; - } - - GenericContainer container() { - if (_exception == null && _container == null) { - synchronized (this) { - if (_container == null && _exception == null) { - try { - _container = containerSupplier.get(); - if (_container == null) { - throw new IllegalStateException("testcontainer instance was not constructed"); - } - } catch (RuntimeException e) { - _exception = e; - } - } - } - } - if (_exception != null) { - throw _exception; - } - return _container; - } - - } - - private static final ConcurrentMap, ContainerOrException> SHARED_CONTAINERS = new ConcurrentHashMap<>(); - private static final AtomicInteger containerId = new AtomicInteger(0); - - private final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageName imageName, - List> containerModifiers) { - return new MdcScope.Builder() - .setLogPrefix("testcontainer %s (%s[%s]):".formatted(containerId.incrementAndGet(), imageName, StringUtils.join(containerModifiers, ","))) - .setPrefixColor(LoggingHelper.Color.RED_BACKGROUND); - } - - /** - * Creates a new, unshared testcontainer instance. This usually wraps the default constructor for - * the testcontainer type. - */ - protected abstract C createNewContainer(DockerImageName imageName); - - /** - * Returns a shared instance of the testcontainer. - * - * @Deprecated use shared(String, NamedContainerModifier) instead - */ - @Deprecated - public final C shared(String imageName, String... methods) { - return shared(imageName, - Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); - } - - public final C shared(String imageName, NamedContainerModifier... namedContainerModifiers) { - return shared(imageName, List.of(namedContainerModifiers)); - } - - public final C shared(String imageName) { - return shared(imageName, new ArrayList<>()); - } - - public final C shared(String imageName, List> namedContainerModifiers) { - final ContainerKey containerKey = new ContainerKey<>(getClass(), DockerImageName.parse(imageName), namedContainerModifiers); - // We deliberately avoid creating the container itself eagerly during the evaluation of the map - // value. - // Container creation can be exceedingly slow. - // Furthermore, we need to handle exceptions raised during container creation. - ContainerOrException containerOrError = SHARED_CONTAINERS.computeIfAbsent(containerKey, - key -> new ContainerOrException(() -> createAndStartContainer(key.imageName(), ((ContainerKey) key).methods()))); - // Instead, the container creation (if applicable) is deferred to here. - return (C) containerOrError.container(); - } - - /** - * Returns an exclusive instance of the testcontainer. - * - * @Deprecated use exclusive(String, NamedContainerModifier) instead - */ - @SuppressWarnings("unchecked") - @Deprecated - public final C exclusive(String imageName, String... methods) { - return exclusive(imageName, Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); - } - - public final C exclusive(String imageName) { - return exclusive(imageName, new ArrayList<>()); - } - - public final C exclusive(String imageName, NamedContainerModifier... namedContainerModifiers) { - return exclusive(imageName, List.of(namedContainerModifiers)); - } - - public final C exclusive(String imageName, List> namedContainerModifiers) { - return (C) createAndStartContainer(DockerImageName.parse(imageName), namedContainerModifiers); - } - - public interface NamedContainerModifier> { - - String name(); - - Consumer modifier(); - - } - - public record NamedContainerModifierImpl> (String name, Consumer method) implements NamedContainerModifier { - - public String name() { - return name; - } - - public Consumer modifier() { - return method; - } - - } - - private Consumer resolveModifierByName(String methodName) { - final ContainerFactory self = this; - Consumer resolvedMethod = c -> { - try { - Class containerClass = c.getClass(); - Method method = self.getClass().getMethod(methodName, containerClass); - method.invoke(self, c); - } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }; - return resolvedMethod; - } - - private C createAndStartContainer(DockerImageName imageName, List> namedContainerModifiers) { - LOGGER.info("Creating new container based on {} with {}.", imageName, Lists.transform(namedContainerModifiers, c -> c.name())); - C container = createNewContainer(imageName); - final var logConsumer = new Slf4jLogConsumer(LOGGER) { - - public void accept(OutputFrame frame) { - if (frame.getUtf8StringWithoutLineEnding().trim().length() > 0) { - super.accept(frame); - } - } - - }; - getTestContainerLogMdcBuilder(imageName, namedContainerModifiers).produceMappings(logConsumer::withMdc); - container.withLogConsumer(logConsumer); - for (NamedContainerModifier resolvedNamedContainerModifier : namedContainerModifiers) { - LOGGER.info("Calling {} in {} on new container based on {}.", - resolvedNamedContainerModifier.name(), getClass().getName(), imageName); - resolvedNamedContainerModifier.modifier().accept(container); - } - container.start(); - return container; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/DatabaseConnectionHelper.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/DatabaseConnectionHelper.java deleted file mode 100644 index da503eb21dfbb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/DatabaseConnectionHelper.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.testutils; - -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.testcontainers.containers.JdbcDatabaseContainer; - -/** - * Helper class that facilitates the creation of database connection objects for testing purposes. - */ -public class DatabaseConnectionHelper { - - /** - * Constructs a new {@link DataSource} using the provided configuration. - * - * @param container A JDBC Test Container instance. - * @return The configured {@link DataSource}. - */ - public static DataSource createDataSource(final JdbcDatabaseContainer container) { - return DataSourceFactory.create(container.getUsername(), - container.getPassword(), - container.getDriverClassName(), - container.getJdbcUrl()); - } - - /** - * Constructs a configured {@link DSLContext} instance using the provided configuration. - * - * @param container A JDBC Test Container instance. - * @param dialect The SQL dialect to use with objects created from this context. - * @return The configured {@link DSLContext}. - */ - public static DSLContext createDslContext(final JdbcDatabaseContainer container, final SQLDialect dialect) { - return DSLContextFactory.create( - container.getUsername(), - container.getPassword(), - container.getDriverClassName(), - container.getJdbcUrl(), - dialect); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/NonContainer.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/NonContainer.java deleted file mode 100644 index badf004d4f990..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/NonContainer.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.testutils; - -import org.testcontainers.containers.JdbcDatabaseContainer; - -/** - * This is used when a source (such as Snowflake) relies on an always-on resource and therefore - * doesn't need an actual container. compatible - */ -public class NonContainer extends JdbcDatabaseContainer { - - private final String username; - private final String password; - private final String jdbcUrl; - - private final String driverClassName; - - public NonContainer(final String userName, - final String password, - final String jdbcUrl, - final String driverClassName, - final String dockerImageName) { - super(dockerImageName); - this.username = userName; - this.password = password; - this.jdbcUrl = jdbcUrl; - this.driverClassName = driverClassName; - } - - @Override - public String getDriverClassName() { - return driverClassName; - } - - @Override - public String getJdbcUrl() { - return jdbcUrl; - } - - @Override - public String getUsername() { - return username; - } - - @Override - public String getPassword() { - return password; - } - - @Override - protected String getTestQueryString() { - return "SELECT 1"; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java deleted file mode 100644 index 3ee1d0e9b0d1c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.testutils; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.ContextQueryFunction; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.JdbcConnector; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.sql.SQLException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.time.Duration; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Stream; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.JdbcDatabaseContainer; - -/** - * TestDatabase provides a convenient pattern for interacting with databases when testing SQL - * database sources. The basic idea is to share the same database testcontainer instance for all - * tests and to use SQL constructs such as DATABASE and USER to isolate each test case's state. - * - * @param the type of the backing testcontainer. - * @param itself - * @param the type of the object returned by {@link #configBuilder()} - */ -abstract public class TestDatabase, T extends TestDatabase, B extends TestDatabase.ConfigBuilder> - implements AutoCloseable { - - static private final Logger LOGGER = LoggerFactory.getLogger(TestDatabase.class); - - final private C container; - final private String suffix; - final private ArrayList cleanupSQL = new ArrayList<>(); - final private Map connectionProperties = new HashMap<>(); - - private volatile DataSource dataSource; - private volatile DSLContext dslContext; - - protected final int databaseId; - private static final AtomicInteger nextDatabaseId = new AtomicInteger(0); - - protected final int containerId; - private static final AtomicInteger nextContainerId = new AtomicInteger(0); - private static final Map containerUidToId = new ConcurrentHashMap<>(); - - @SuppressWarnings("this-escape") - protected TestDatabase(C container) { - this.container = container; - this.suffix = Strings.addRandomSuffix("", "_", 10); - this.databaseId = nextDatabaseId.getAndIncrement(); - this.containerId = containerUidToId.computeIfAbsent(container.getContainerId(), k -> nextContainerId.getAndIncrement()); - LOGGER.info(formatLogLine("creating database " + getDatabaseName())); - } - - private final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); - - protected String formatLogLine(String logLine) { - String retVal = "TestDatabase databaseId=" + databaseId + ", containerId=" + containerId + " - " + logLine; - return retVal; - } - - @SuppressWarnings("unchecked") - protected T self() { - return (T) this; - } - - /** - * Adds a key-value pair to the JDBC URL's query parameters. - */ - public T withConnectionProperty(String key, String value) { - if (isInitialized()) { - throw new RuntimeException("TestDatabase instance is already initialized"); - } - connectionProperties.put(key, value); - return self(); - } - - /** - * Enqueues a SQL statement to be executed when this object is closed. - */ - public T onClose(String fmtSql, Object... fmtArgs) { - cleanupSQL.add(String.format(fmtSql, fmtArgs)); - return self(); - } - - /** - * Executes a SQL statement after calling String.format on the arguments. - */ - public T with(String fmtSql, Object... fmtArgs) { - execSQL(Stream.of(String.format(fmtSql, fmtArgs))); - return self(); - } - - /** - * Executes SQL statements as root to provide the necessary isolation for the lifetime of this - * object. This typically entails at least a CREATE DATABASE and a CREATE USER. Also Initializes the - * {@link DataSource} and {@link DSLContext} owned by this object. - */ - public T initialized() { - inContainerBootstrapCmd().forEach(this::execInContainer); - this.dataSource = DataSourceFactory.create( - getUserName(), - getPassword(), - getDatabaseDriver().getDriverClassName(), - getJdbcUrl(), - connectionProperties, - JdbcConnector.getConnectionTimeout(connectionProperties, getDatabaseDriver().getDriverClassName())); - this.dslContext = DSLContextFactory.create(dataSource, getSqlDialect()); - return self(); - } - - final public boolean isInitialized() { - return dslContext != null; - } - - abstract protected Stream> inContainerBootstrapCmd(); - - abstract protected Stream inContainerUndoBootstrapCmd(); - - abstract public DatabaseDriver getDatabaseDriver(); - - abstract public SQLDialect getSqlDialect(); - - final public C getContainer() { - return container; - } - - public String withNamespace(String name) { - return name + suffix; - } - - public String getDatabaseName() { - return withNamespace("db"); - } - - public String getUserName() { - return withNamespace("user"); - } - - public String getPassword() { - return "password"; - } - - public DataSource getDataSource() { - if (!isInitialized()) { - throw new RuntimeException("TestDatabase instance is not yet initialized"); - } - return dataSource; - } - - final public DSLContext getDslContext() { - if (!isInitialized()) { - throw new RuntimeException("TestDatabase instance is not yet initialized"); - } - return dslContext; - } - - public String getJdbcUrl() { - return String.format( - getDatabaseDriver().getUrlFormatString(), - getContainer().getHost(), - getContainer().getFirstMappedPort(), - getDatabaseName()); - } - - public Database getDatabase() { - return new Database(getDslContext()); - } - - protected void execSQL(final Stream sql) { - try { - getDatabase().query(ctx -> { - sql.forEach(statement -> { - LOGGER.info("executing SQL statement {}", statement); - ctx.execute(statement); - }); - return null; - }); - } catch (SQLException e) { - throw new RuntimeException(e); - } - } - - protected void execInContainer(Stream cmds) { - final List cmd = cmds.toList(); - if (cmd.isEmpty()) { - return; - } - try { - LOGGER.info(formatLogLine(String.format("executing command %s", Strings.join(cmd, " ")))); - final var exec = getContainer().execInContainer(cmd.toArray(new String[0])); - if (exec.getExitCode() == 0) { - LOGGER.info(formatLogLine(String.format("execution success\nstdout:\n%s\nstderr:\n%s", exec.getStdout(), exec.getStderr()))); - } else { - LOGGER.error(formatLogLine( - String.format("execution failure, code %s\nstdout:\n%s\nstderr:\n%s", exec.getExitCode(), exec.getStdout(), exec.getStderr()))); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - - public X query(final ContextQueryFunction transform) throws SQLException { - return getDatabase().query(transform); - } - - public X transaction(final ContextQueryFunction transform) throws SQLException { - return getDatabase().transaction(transform); - } - - /** - * Returns a builder for the connector config object. - */ - public B configBuilder() { - return new ConfigBuilder(self()).self(); - } - - public B testConfigBuilder() { - return configBuilder() - .withHostAndPort() - .withCredentials() - .withDatabase(); - } - - public B integrationTestConfigBuilder() { - return configBuilder() - .withResolvedHostAndPort() - .withCredentials() - .withDatabase(); - } - - @Override - public void close() { - execSQL(this.cleanupSQL.stream()); - execInContainer(inContainerUndoBootstrapCmd()); - LOGGER.info("closing database databaseId=" + databaseId); - } - - static public class ConfigBuilder, B extends ConfigBuilder> { - - static public final Duration DEFAULT_CDC_REPLICATION_INITIAL_WAIT = Duration.ofSeconds(5); - - protected final ImmutableMap.Builder builder = ImmutableMap.builder(); - protected final T testDatabase; - - protected ConfigBuilder(T testDatabase) { - this.testDatabase = testDatabase; - } - - public JsonNode build() { - return Jsons.jsonNode(builder.build()); - } - - @SuppressWarnings("unchecked") - final protected B self() { - return (B) this; - } - - public B with(Object key, Object value) { - builder.put(key, value); - return self(); - } - - public B withDatabase() { - return this - .with(JdbcUtils.DATABASE_KEY, testDatabase.getDatabaseName()); - } - - public B withCredentials() { - return this - .with(JdbcUtils.USERNAME_KEY, testDatabase.getUserName()) - .with(JdbcUtils.PASSWORD_KEY, testDatabase.getPassword()); - } - - public B withResolvedHostAndPort() { - return this - .with(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(testDatabase.getContainer())) - .with(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(testDatabase.getContainer())); - } - - public B withHostAndPort() { - return this - .with(JdbcUtils.HOST_KEY, testDatabase.getContainer().getHost()) - .with(JdbcUtils.PORT_KEY, testDatabase.getContainer().getFirstMappedPort()); - } - - public B withoutSsl() { - return with(JdbcUtils.SSL_KEY, false); - } - - public B withSsl(Map sslMode) { - return with(JdbcUtils.SSL_KEY, true).with(JdbcUtils.SSL_MODE_KEY, sslMode); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.kt new file mode 100644 index 0000000000000..0c393ad9d013c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.kt @@ -0,0 +1,336 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.extensions + +import java.lang.reflect.* +import java.time.Duration +import java.time.Instant +import java.time.format.DateTimeParseException +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.concurrent.TimeoutException +import java.util.regex.Pattern +import kotlin.concurrent.Volatile +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils +import org.apache.commons.lang3.time.DurationFormatUtils +import org.junit.jupiter.api.Timeout +import org.junit.jupiter.api.extension.DynamicTestInvocationContext +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.api.extension.InvocationInterceptor +import org.junit.jupiter.api.extension.ReflectiveInvocationContext +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * By default, junit only output logs to the console, and nothing makes it into log4j logs. This + * class fixes that by using the interceptor facility to print progress and timing information. This + * allows us to have junit loglines in our test logs. This is instanciated via + * [Java's + * ServiceLoader](https://docs.oracle.com/javase%2F9%2Fdocs%2Fapi%2F%2F/java/util/ServiceLoader.html) + * The declaration can be found in + * resources/META-INF/services/org.junit.jupiter.api.extension.Extension + */ +class LoggingInvocationInterceptor : InvocationInterceptor { + private class LoggingInvocationInterceptorHandler : InvocationHandler { + @Throws(Throwable::class) + override fun invoke(proxy: Any, method: Method, args: Array): Any? { + if ( + LoggingInvocationInterceptor::class + .java + .getDeclaredMethod( + method.name, + InvocationInterceptor.Invocation::class.java, + ReflectiveInvocationContext::class.java, + ExtensionContext::class.java + ) == null + ) { + LOGGER!!.error( + "Junit LoggingInvocationInterceptor executing unknown interception point {}", + method.name + ) + return method.invoke(proxy, *(args!!)) + } + val invocation = args!![0] as InvocationInterceptor.Invocation<*>? + val invocationContext = args[1] as ReflectiveInvocationContext<*>? + val extensionContext = args[2] as ExtensionContext? + val methodName = method.name + val logLineSuffix: String? + val methodMatcher = methodPattern!!.matcher(methodName) + if (methodName == "interceptDynamicTest") { + logLineSuffix = + "execution of DynamicTest %s".formatted(extensionContext!!.displayName) + } else if (methodName == "interceptTestClassConstructor") { + logLineSuffix = + "instance creation for %s".formatted(invocationContext!!.targetClass) + } else if (methodMatcher.matches()) { + val interceptedEvent = methodMatcher.group(1) + logLineSuffix = + "execution of @%s method %s.%s".formatted( + interceptedEvent, + invocationContext!!.executable!!.declaringClass.simpleName, + invocationContext.executable!!.name + ) + } else { + logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName) + } + val currentThread = Thread.currentThread() + val timeoutTask = TimeoutInteruptor(currentThread) + val start = Instant.now() + try { + val timeout = getTimeout(invocationContext) + if (timeout != null) { + LOGGER!!.info( + "Junit starting {} with timeout of {}", + logLineSuffix, + DurationFormatUtils.formatDurationWords(timeout.toMillis(), true, true) + ) + Timer("TimeoutTimer-" + currentThread.name, true) + .schedule(timeoutTask, timeout.toMillis()) + } else { + LOGGER!!.warn("Junit starting {} with no timeout", logLineSuffix) + } + val retVal = invocation!!.proceed() + val elapsedMs = Duration.between(start, Instant.now()).toMillis() + LOGGER.info( + "Junit completed {} in {}", + logLineSuffix, + DurationFormatUtils.formatDurationWords(elapsedMs, true, true) + ) + return retVal + } catch (throwable: Throwable) { + timeoutTask.cancel() + val elapsedMs = Duration.between(start, Instant.now()).toMillis() + var t1: Throwable + if (timeoutTask.wasTriggered) { + t1 = + TimeoutException( + ("Execution was cancelled after %s. If you think your test should be given more time to complete, you can use the @Timeout annotation. If all the test of a connector are slow, " + + " you can override the property 'JunitMethodExecutionTimeout' in your gradle.properties.") + .formatted( + DurationFormatUtils.formatDurationWords(elapsedMs, true, true) + ) + ) + t1.initCause(throwable) + } else { + t1 = throwable + } + var belowCurrentCall = false + val stackToDisplay: MutableList = LinkedList() + for (stackString in ExceptionUtils.getStackFrames(throwable)) { + if (stackString!!.startsWith("\tat ")) { + if ( + !belowCurrentCall && + stackString.contains( + LoggingInvocationInterceptor::class.java.canonicalName + ) + ) { + belowCurrentCall = true + } + } else { + belowCurrentCall = false + } + if (!belowCurrentCall) { + stackToDisplay.add(stackString) + } + } + val stackTrace = StringUtils.join(stackToDisplay, "\n ") + LOGGER!!.error( + "Junit exception throw during {} after {}:\n{}", + logLineSuffix, + DurationFormatUtils.formatDurationWords(elapsedMs, true, true), + stackTrace + ) + throw t1 + } finally { + timeoutTask.cancel() + } + } + + private class TimeoutInteruptor(private val parentThread: Thread?) : TimerTask() { + @Volatile var wasTriggered: Boolean = false + + override fun run() { + wasTriggered = true + parentThread!!.interrupt() + } + + override fun cancel(): Boolean { + return super.cancel() + } + } + + companion object { + private val methodPattern: Pattern? = Pattern.compile("intercept(.*)Method") + + private val PATTERN: Pattern = + Pattern.compile( + "([1-9]\\d*) *((?:[nμm]?s)|m|h|d)?", + Pattern.CASE_INSENSITIVE or Pattern.UNICODE_CASE + ) + private val UNITS_BY_ABBREVIATION: MutableMap + + init { + val unitsByAbbreviation: MutableMap = HashMap() + unitsByAbbreviation["ns"] = TimeUnit.NANOSECONDS + unitsByAbbreviation["μs"] = TimeUnit.MICROSECONDS + unitsByAbbreviation["ms"] = TimeUnit.MILLISECONDS + unitsByAbbreviation["s"] = TimeUnit.SECONDS + unitsByAbbreviation["m"] = TimeUnit.MINUTES + unitsByAbbreviation["h"] = TimeUnit.HOURS + unitsByAbbreviation["d"] = TimeUnit.DAYS + UNITS_BY_ABBREVIATION = Collections.unmodifiableMap(unitsByAbbreviation) + } + + @Throws(DateTimeParseException::class) + fun parseDuration(text: String): Duration { + val matcher = PATTERN.matcher(text.trim { it <= ' ' }) + if (matcher.matches()) { + val value = matcher.group(1).toLong() + val unitAbbreviation = matcher.group(2) + val unit = + if (unitAbbreviation == null) TimeUnit.SECONDS + else UNITS_BY_ABBREVIATION.getValue(unitAbbreviation.lowercase()) + return Duration.ofSeconds(unit.toSeconds(value)) + } + throw DateTimeParseException( + "Timeout duration is not in the expected format ( [ns|μs|ms|s|m|h|d])", + text, + 0 + ) + } + + private fun getTimeout(invocationContext: ReflectiveInvocationContext<*>?): Duration? { + var timeout: Duration? = null + var m = invocationContext!!.executable + if (m is Method) { + var timeoutAnnotation: Timeout? = m.getAnnotation(Timeout::class.java) + if (timeoutAnnotation == null) { + timeoutAnnotation = + invocationContext.targetClass.getAnnotation(Timeout::class.java) + } + if (timeoutAnnotation != null) { + timeout = + Duration.ofMillis( + timeoutAnnotation.unit.toMillis(timeoutAnnotation.value) + ) + } + } + if (timeout == null) { + timeout = + parseDuration( + System.getProperty(JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME) + ) + } + return timeout + } + } + } + + private val proxy: InvocationInterceptor? = + Proxy.newProxyInstance( + javaClass.classLoader, + arrayOf?>(InvocationInterceptor::class.java), + LoggingInvocationInterceptorHandler() + ) as InvocationInterceptor + + @Throws(Throwable::class) + override fun interceptAfterAllMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptAfterAllMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptAfterEachMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptAfterEachMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptBeforeAllMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptBeforeAllMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptBeforeEachMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptBeforeEachMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptDynamicTest( + invocation: InvocationInterceptor.Invocation?, + invocationContext: DynamicTestInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptDynamicTest(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptTestMethod( + invocation: InvocationInterceptor.Invocation, + invocationContext: ReflectiveInvocationContext, + extensionContext: ExtensionContext + ) { + if (!Modifier.isPublic(invocationContext.executable!!.modifiers)) { + LOGGER!!.warn( + "Junit method {}.{} is not declared as public", + invocationContext.executable!!.declaringClass.canonicalName, + invocationContext.executable!!.name + ) + } + proxy!!.interceptTestMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptTestTemplateMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ) { + proxy!!.interceptTestTemplateMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptTestFactoryMethod( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?, + extensionContext: ExtensionContext? + ): T? { + return proxy!!.interceptTestFactoryMethod(invocation, invocationContext, extensionContext) + } + + @Throws(Throwable::class) + override fun interceptTestClassConstructor( + invocation: InvocationInterceptor.Invocation?, + invocationContext: ReflectiveInvocationContext?>?, + extensionContext: ExtensionContext? + ): T? { + return proxy!!.interceptTestClassConstructor( + invocation, + invocationContext, + extensionContext + ) + } + + companion object { + private val LOGGER: Logger? = + LoggerFactory.getLogger(LoggingInvocationInterceptor::class.java) + private val JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME: String? = + "JunitMethodExecutionTimeout" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.kt new file mode 100644 index 0000000000000..09f2a9f235f9f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.kt @@ -0,0 +1,169 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.base.ssh + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.integrations.util.HostPortResolver +import io.airbyte.cdk.testutils.ContainerFactory +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.util.* +import java.util.function.Consumer +import org.apache.commons.lang3.tuple.ImmutablePair +import org.testcontainers.containers.Container +import org.testcontainers.containers.GenericContainer +import org.testcontainers.containers.JdbcDatabaseContainer +import org.testcontainers.containers.Network +import org.testcontainers.images.builder.ImageFromDockerfile +import org.testcontainers.utility.DockerImageName + +class SshBastionContainer : AutoCloseable { + class SshBastionContainerFactory : ContainerFactory>() { + override fun createNewContainer(imageName: DockerImageName?): GenericContainer<*>? { + val container: GenericContainer<*> = + GenericContainer( + ImageFromDockerfile("bastion-test") + .withFileFromClasspath("Dockerfile", "bastion/Dockerfile") + ) + .withExposedPorts(22) + return container + } + + fun exclusive(network: Network): GenericContainer<*>? { + val imageModifier = Consumer { c: GenericContainer<*> -> c!!.withNetwork(network) } + val container = + super.exclusive( + "bastion-test", + NamedContainerModifierImpl("withNetwork", imageModifier) + ) + return container + } + } + + var container: GenericContainer<*>? = null + private set + + fun initAndStartBastion(network: Network) { + container = factory!!.exclusive(network) + container!!.start() + } + + @Throws(IOException::class, InterruptedException::class) + fun getTunnelMethod(tunnelMethod: SshTunnel.TunnelMethod, innerAddress: Boolean): JsonNode? { + val containerAddress = + if (innerAddress) getInnerContainerAddress(container!!) + else getOuterContainerAddress(container!!) + return Jsons.jsonNode( + ImmutableMap.builder() + .put("tunnel_host", Objects.requireNonNull(containerAddress!!.left)) + .put("tunnel_method", tunnelMethod) + .put("tunnel_port", containerAddress.right) + .put("tunnel_user", SSH_USER) + .put( + "tunnel_user_password", + if (tunnelMethod == SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH) SSH_PASSWORD + else "" + ) + .put( + "ssh_key", + if (tunnelMethod == SshTunnel.TunnelMethod.SSH_KEY_AUTH) + container!!.execInContainer("cat", "var/bastion/id_rsa").stdout + else "" + ) + .build() + ) + } + + @Throws(IOException::class, InterruptedException::class) + fun getTunnelConfig( + tunnelMethod: SshTunnel.TunnelMethod, + builderWithSchema: ImmutableMap.Builder, + innerAddress: Boolean + ): JsonNode? { + return Jsons.jsonNode( + builderWithSchema + .put("tunnel_method", getTunnelMethod(tunnelMethod, innerAddress)) + .build() + ) + } + + fun getBasicDbConfigBuider(db: JdbcDatabaseContainer<*>): ImmutableMap.Builder { + return getBasicDbConfigBuider(db, db.databaseName) + } + + fun getBasicDbConfigBuider( + db: JdbcDatabaseContainer<*>, + schemas: MutableList + ): ImmutableMap.Builder { + return getBasicDbConfigBuider(db, db.databaseName).put("schemas", schemas) + } + + fun getBasicDbConfigBuider( + db: JdbcDatabaseContainer<*>, + schemaName: String + ): ImmutableMap.Builder { + return ImmutableMap.builder() + .put("host", Objects.requireNonNull(HostPortResolver.resolveHost(db))) + .put("username", db.username) + .put("password", db.password) + .put("port", HostPortResolver.resolvePort(db)) + .put("database", schemaName) + .put("ssl", false) + } + + fun stopAndCloseContainers(db: JdbcDatabaseContainer<*>) { + container!!.stop() + container!!.close() + db.stop() + db.close() + } + + fun stopAndClose() { + container!!.close() + } + + override fun close() { + stopAndClose() + } + + companion object { + private val factory: SshBastionContainerFactory? = SshBastionContainerFactory() + + private val SSH_USER: String = "sshuser" + private val SSH_PASSWORD: String = "secret" + + @JvmStatic + /** + * Returns the inner docker network ip address and port of a container. This can be used to + * reach a container from another container running on the same network + * + * @param container container + * @return a pair of host and port + */ + fun getInnerContainerAddress(container: Container<*>): ImmutablePair { + return ImmutablePair.of( + container.containerInfo.networkSettings.networks.entries + .stream() + .findFirst() + .get() + .value + .ipAddress, + container.exposedPorts.stream().findFirst().get() + ) + } + + @JvmStatic + /** + * Returns the outer docker network ip address and port of a container. This can be used to + * reach a container from the host machine + * + * @param container container + * @return a pair of host and port + */ + fun getOuterContainerAddress(container: Container<*>): ImmutablePair { + return ImmutablePair.of(container.host, container.firstMappedPort) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/util/HostPortResolver.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/util/HostPortResolver.kt new file mode 100644 index 0000000000000..b0ceaa9101ba5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/integrations/util/HostPortResolver.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.util + +import java.util.* +import org.testcontainers.containers.GenericContainer + +object HostPortResolver { + @JvmStatic + fun resolveHost(container: GenericContainer<*>): String { + return getIpAddress(container) + } + + @JvmStatic + fun resolvePort(container: GenericContainer<*>): Int { + return container.exposedPorts.stream().findFirst().get() + } + + fun resolveIpAddress(container: GenericContainer<*>): String { + return getIpAddress(container) + } + + private fun getIpAddress(container: GenericContainer<*>): String { + // Weird double bang here. If I remove the Object.requireNotNull, there's a type error... + return Objects.requireNonNull( + container.containerInfo.networkSettings.networks.entries + .stream() + .findFirst() + .get() + .value + .ipAddress + )!! + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/ContainerFactory.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/ContainerFactory.kt new file mode 100644 index 0000000000000..78a7e97a1c9f4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/ContainerFactory.kt @@ -0,0 +1,245 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.testutils + +import com.google.common.collect.Lists +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.MdcScope +import java.lang.reflect.InvocationTargetException +import java.util.List +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.atomic.AtomicInteger +import java.util.function.Consumer +import java.util.function.Supplier +import java.util.stream.Stream +import kotlin.concurrent.Volatile +import org.apache.commons.lang3.StringUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.testcontainers.containers.GenericContainer +import org.testcontainers.containers.output.OutputFrame +import org.testcontainers.containers.output.Slf4jLogConsumer +import org.testcontainers.utility.DockerImageName + +/** + * ContainerFactory is the companion to [TestDatabase] and provides it with suitable testcontainer + * instances. + */ +abstract class ContainerFactory> { + @JvmRecord + private data class ContainerKey>( + val clazz: Class>, + val imageName: DockerImageName, + val methods: kotlin.collections.List + ) + + private class ContainerOrException( + private val containerSupplier: Supplier> + ) { + @Volatile private lateinit var _exception: RuntimeException + + @Volatile private lateinit var _container: GenericContainer<*> + + fun container(): GenericContainer<*> { + if (!::_exception.isInitialized && !::_container.isInitialized) { + synchronized(this) { + if (!::_exception.isInitialized && !::_container.isInitialized) { + try { + _container = containerSupplier!!.get() + checkNotNull(_container) { + "testcontainer instance was not constructed" + } + } catch (e: RuntimeException) { + _exception = e + } + } + } + } + if (::_exception.isInitialized) { + throw _exception + } + return _container + } + } + + private fun getTestContainerLogMdcBuilder( + imageName: DockerImageName?, + containerModifiers: MutableList> + ): MdcScope.Builder { + return MdcScope.Builder() + .setLogPrefix( + "testcontainer %s (%s[%s]):".formatted( + containerId!!.incrementAndGet(), + imageName, + StringUtils.join(containerModifiers, ",") + ) + ) + .setPrefixColor(LoggingHelper.Color.RED_BACKGROUND) + } + + /** + * Creates a new, unshared testcontainer instance. This usually wraps the default constructor + * for the testcontainer type. + */ + protected abstract fun createNewContainer(imageName: DockerImageName?): C? + + /** + * Returns a shared instance of the testcontainer. + * + * @Deprecated use shared(String, NamedContainerModifier) instead + */ + fun shared(imageName: String, vararg methods: String): C { + return shared( + imageName, + Stream.of(*methods) + .map { n: String -> NamedContainerModifierImpl(n, resolveModifierByName(n)) } + .toList() + ) + } + + fun shared(imageName: String, vararg namedContainerModifiers: NamedContainerModifier): C { + return shared(imageName, List.of(*namedContainerModifiers)) + } + + @JvmOverloads + fun shared( + imageName: String, + namedContainerModifiers: MutableList> = ArrayList() + ): C { + val containerKey = + ContainerKey( + javaClass, + DockerImageName.parse(imageName), + namedContainerModifiers.map { it.name() }.toList() + ) + // We deliberately avoid creating the container itself eagerly during the evaluation of the + // map + // value. + // Container creation can be exceedingly slow. + // Furthermore, we need to handle exceptions raised during container creation. + val containerOrError = + SHARED_CONTAINERS!!.computeIfAbsent(containerKey) { key: ContainerKey<*>? -> + ContainerOrException { + createAndStartContainer(key!!.imageName, namedContainerModifiers) + } + } + // Instead, the container creation (if applicable) is deferred to here. + return containerOrError!!.container() as C + } + + /** + * Returns an exclusive instance of the testcontainer. + * + * @Deprecated use exclusive(String, NamedContainerModifier) instead + */ + fun exclusive(imageName: String, vararg methods: String): C { + return exclusive( + imageName, + Stream.of(*methods) + .map { n: String -> NamedContainerModifierImpl(n, resolveModifierByName(n)) } + .toList() + ) + } + + fun exclusive(imageName: String, vararg namedContainerModifiers: NamedContainerModifier): C { + return exclusive(imageName, List.of(*namedContainerModifiers)) + } + + @JvmOverloads + fun exclusive( + imageName: String, + namedContainerModifiers: MutableList> = ArrayList() + ): C { + return createAndStartContainer(DockerImageName.parse(imageName), namedContainerModifiers) + } + + interface NamedContainerModifier> { + fun name(): String + + fun modifier(): Consumer + } + + class NamedContainerModifierImpl>(name: String, method: Consumer) : + NamedContainerModifier { + override fun name(): String { + return name + } + + override fun modifier(): Consumer { + return method + } + + val name: String + val method: Consumer + + init { + this.name = name + this.method = method + } + } + + private fun resolveModifierByName(methodName: String?): Consumer { + val self: ContainerFactory = this + val resolvedMethod = Consumer { c: C -> + try { + val containerClass: Class> = c.javaClass + val method = self.javaClass.getMethod(methodName, containerClass) + method.invoke(self, c) + } catch (e: NoSuchMethodException) { + throw RuntimeException(e) + } catch (e: IllegalAccessException) { + throw RuntimeException(e) + } catch (e: InvocationTargetException) { + throw RuntimeException(e) + } + } + return resolvedMethod + } + + private fun createAndStartContainer( + imageName: DockerImageName?, + namedContainerModifiers: MutableList> + ): C { + LOGGER!!.info( + "Creating new container based on {} with {}.", + imageName, + Lists.transform(namedContainerModifiers) { c: NamedContainerModifier -> c!!.name() } + ) + val container = createNewContainer(imageName) + val logConsumer: Slf4jLogConsumer = + object : Slf4jLogConsumer(LOGGER) { + override fun accept(frame: OutputFrame) { + if (frame.utf8StringWithoutLineEnding.trim { it <= ' ' }.length > 0) { + super.accept(frame) + } + } + } + getTestContainerLogMdcBuilder(imageName, namedContainerModifiers)!!.produceMappings { + key: String?, + value: String? -> + logConsumer.withMdc(key, value) + } + container!!.withLogConsumer(logConsumer) + for (resolvedNamedContainerModifier in namedContainerModifiers!!) { + LOGGER.info( + "Calling {} in {} on new container based on {}.", + resolvedNamedContainerModifier!!.name(), + javaClass.name, + imageName + ) + resolvedNamedContainerModifier.modifier()!!.accept(container) + } + container.start() + return container + } + + companion object { + private val LOGGER: Logger? = LoggerFactory.getLogger(ContainerFactory::class.java) + + private val SHARED_CONTAINERS: ConcurrentMap?, ContainerOrException?>? = + ConcurrentHashMap() + private val containerId: AtomicInteger? = AtomicInteger(0) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/DatabaseConnectionHelper.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/DatabaseConnectionHelper.kt new file mode 100644 index 0000000000000..681c9d43d3e0e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/DatabaseConnectionHelper.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.testutils + +import io.airbyte.cdk.db.factory.DSLContextFactory +import io.airbyte.cdk.db.factory.DataSourceFactory +import javax.sql.DataSource +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.testcontainers.containers.JdbcDatabaseContainer + +/** + * Helper class that facilitates the creation of database connection objects for testing purposes. + */ +object DatabaseConnectionHelper { + /** + * Constructs a new [DataSource] using the provided configuration. + * + * @param container A JDBC Test Container instance. + * @return The configured [DataSource]. + */ + @JvmStatic + fun createDataSource(container: JdbcDatabaseContainer<*>): DataSource { + return DataSourceFactory.create( + container.username, + container.password, + container.driverClassName, + container.jdbcUrl + ) + } + + /** + * Constructs a configured [DSLContext] instance using the provided configuration. + * + * @param container A JDBC Test Container instance. + * @param dialect The SQL dialect to use with objects created from this context. + * @return The configured [DSLContext]. + */ + @JvmStatic + fun createDslContext(container: JdbcDatabaseContainer<*>, dialect: SQLDialect?): DSLContext? { + return DSLContextFactory.create( + container.username, + container.password, + container.driverClassName, + container.jdbcUrl, + dialect + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/NonContainer.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/NonContainer.kt new file mode 100644 index 0000000000000..17a8c46415b8b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/NonContainer.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.testutils + +import org.testcontainers.containers.JdbcDatabaseContainer + +/** + * This is used when a source (such as Snowflake) relies on an always-on resource and therefore + * doesn't need an actual container. compatible + */ +class NonContainer( + private val username: String?, + private val password: String?, + private val jdbcUrl: String?, + private val driverClassName: String?, + dockerImageName: String +) : JdbcDatabaseContainer(dockerImageName) { + override fun getDriverClassName(): String? { + return driverClassName + } + + override fun getJdbcUrl(): String? { + return jdbcUrl + } + + override fun getUsername(): String? { + return username + } + + override fun getPassword(): String? { + return password + } + + override fun getTestQueryString(): String? { + return "SELECT 1" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/TestDatabase.kt b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/TestDatabase.kt new file mode 100644 index 0000000000000..bffd92f31cf96 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/kotlin/io/airbyte/cdk/testutils/TestDatabase.kt @@ -0,0 +1,320 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.testutils + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.db.ContextQueryFunction +import io.airbyte.cdk.db.Database +import io.airbyte.cdk.db.factory.DSLContextFactory +import io.airbyte.cdk.db.factory.DataSourceFactory +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.integrations.JdbcConnector +import io.airbyte.cdk.integrations.util.HostPortResolver +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.io.IOException +import java.io.UncheckedIOException +import java.sql.SQLException +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.time.Duration +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.atomic.AtomicInteger +import java.util.stream.Stream +import javax.sql.DataSource +import kotlin.concurrent.Volatile +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.testcontainers.containers.JdbcDatabaseContainer + +/** + * TestDatabase provides a convenient pattern for interacting with databases when testing SQL + * database sources. The basic idea is to share the same database testcontainer instance for all + * tests and to use SQL constructs such as DATABASE and USER to isolate each test case's state. + * + * @param the type of the backing testcontainer. + * @param itself + * @param the type of the object returned by [.configBuilder] + */ +abstract class TestDatabase< + C : JdbcDatabaseContainer<*>, T : TestDatabase, B : TestDatabase.ConfigBuilder> +protected constructor(val container: C) : AutoCloseable { + private val suffix: String = Strings.addRandomSuffix("", "_", 10) + private val cleanupSQL: ArrayList = ArrayList() + private val connectionProperties: MutableMap = HashMap() + + @Volatile private var dataSource: DataSource? = null + + @Volatile private lateinit var dslContext: DSLContext + + @JvmField protected val databaseId: Int = nextDatabaseId.getAndIncrement() + @JvmField + protected val containerId: Int = + containerUidToId!!.computeIfAbsent(container.containerId) { _: String? -> + nextContainerId!!.getAndIncrement() + }!! + private val dateFormat: DateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS") + + init { + LOGGER!!.info(formatLogLine("creating database " + databaseName)) + } + + protected fun formatLogLine(logLine: String?): String? { + val retVal = "TestDatabase databaseId=$databaseId, containerId=$containerId - $logLine" + return retVal + } + + @Suppress("UNCHECKED_CAST") + protected fun self(): T { + return this as T + } + + /** Adds a key-value pair to the JDBC URL's query parameters. */ + fun withConnectionProperty(key: String, value: String): T { + if (this.isInitialized) { + throw RuntimeException("TestDatabase instance is already initialized") + } + connectionProperties[key] = value + return self() + } + + /** Enqueues a SQL statement to be executed when this object is closed. */ + fun onClose(fmtSql: String, vararg fmtArgs: Any?): T { + cleanupSQL.add(String.format(fmtSql, *fmtArgs)) + return self() + } + + /** Executes a SQL statement after calling String.format on the arguments. */ + fun with(fmtSql: String, vararg fmtArgs: Any?): T { + execSQL(Stream.of(String.format(fmtSql, *fmtArgs))) + return self() + } + + /** + * Executes SQL statements as root to provide the necessary isolation for the lifetime of this + * object. This typically entails at least a CREATE DATABASE and a CREATE USER. Also Initializes + * the [DataSource] and [DSLContext] owned by this object. + */ + open fun initialized(): T? { + inContainerBootstrapCmd().forEach { cmds: Stream -> this.execInContainer(cmds) } + this.dataSource = + DataSourceFactory.create( + userName, + password, + databaseDriver!!.driverClassName, + jdbcUrl, + connectionProperties.toMap(), + JdbcConnector.getConnectionTimeout( + connectionProperties.toMap(), + databaseDriver!!.driverClassName + ) + ) + this.dslContext = DSLContextFactory.create(dataSource, sqlDialect) + return self() + } + + val isInitialized: Boolean + get() = ::dslContext.isInitialized + + protected abstract fun inContainerBootstrapCmd(): Stream> + + protected abstract fun inContainerUndoBootstrapCmd(): Stream + + abstract val databaseDriver: DatabaseDriver? + + abstract val sqlDialect: SQLDialect? + + fun withNamespace(name: String?): String { + return name + suffix + } + + open val databaseName: String + get() = withNamespace("db") + + val userName: String + get() = withNamespace("user") + + open val password: String? + get() = "password" + + fun getDataSource(): DataSource? { + if (!this.isInitialized) { + throw RuntimeException("TestDatabase instance is not yet initialized") + } + return dataSource + } + + fun getDslContext(): DSLContext { + if (!this.isInitialized) { + throw RuntimeException("TestDatabase instance is not yet initialized") + } + return dslContext + } + + open val jdbcUrl: String? + get() = + String.format( + databaseDriver!!.urlFormatString, + container.host, + container.firstMappedPort, + databaseName + ) + + val database: Database? + get() = Database(getDslContext()) + + protected fun execSQL(sql: Stream) { + try { + database!!.query { ctx: DSLContext? -> + sql.forEach { statement: String? -> + LOGGER!!.info("executing SQL statement {}", statement) + ctx!!.execute(statement) + } + null + } + } catch (e: SQLException) { + throw RuntimeException(e) + } + } + + protected fun execInContainer(cmds: Stream) { + val cmd = cmds.toList() + if (cmd!!.isEmpty()) { + return + } + try { + LOGGER!!.info( + formatLogLine( + String.format( + "executing command %s", + Strings.join(cmd.toList().asIterable(), " ") + ) + ) + ) + val exec = container.execInContainer(*cmd.toTypedArray()) + if (exec!!.exitCode == 0) { + LOGGER.info( + formatLogLine( + String.format( + "execution success\nstdout:\n%s\nstderr:\n%s", + exec.stdout, + exec.stderr + ) + ) + ) + } else { + LOGGER.error( + formatLogLine( + String.format( + "execution failure, code %s\nstdout:\n%s\nstderr:\n%s", + exec.exitCode, + exec.stdout, + exec.stderr + ) + ) + ) + } + } catch (e: IOException) { + throw UncheckedIOException(e) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + } + + @Throws(SQLException::class) + fun query(transform: ContextQueryFunction): X? { + return database!!.query(transform) + } + + @Throws(SQLException::class) + fun transaction(transform: ContextQueryFunction): X? { + return database!!.transaction(transform) + } + + /** Returns a builder for the connector config object. */ + open fun configBuilder(): B { + return ConfigBuilder(self()).self() + } + + fun testConfigBuilder(): B { + return configBuilder().withHostAndPort().withCredentials().withDatabase() + } + + fun integrationTestConfigBuilder(): B? { + return configBuilder().withResolvedHostAndPort().withCredentials().withDatabase() + } + + override fun close() { + execSQL(cleanupSQL.stream()) + execInContainer(inContainerUndoBootstrapCmd()) + LOGGER!!.info("closing database databaseId=$databaseId") + } + + open class ConfigBuilder, B : ConfigBuilder>( + protected val testDatabase: T + ) { + protected val builder: ImmutableMap.Builder = ImmutableMap.builder() + + fun build(): JsonNode { + return Jsons.jsonNode(builder.build()) + } + + @Suppress("UNCHECKED_CAST") + fun self(): B { + return this as B + } + + fun with(key: Any, value: Any): B { + builder.put(key, value) + return self() + } + + fun withDatabase(): B { + return this.with(JdbcUtils.DATABASE_KEY, testDatabase.databaseName) + } + + fun withCredentials(): B { + return this.with(JdbcUtils.USERNAME_KEY, testDatabase.userName) + .with(JdbcUtils.PASSWORD_KEY, testDatabase.password!!) + } + + fun withResolvedHostAndPort(): B { + return this.with( + JdbcUtils.HOST_KEY, + HostPortResolver.resolveHost(testDatabase.container) + ) + .with(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(testDatabase.container)) + } + + fun withHostAndPort(): B { + return this.with(JdbcUtils.HOST_KEY, testDatabase.container.host) + .with(JdbcUtils.PORT_KEY, testDatabase.container.firstMappedPort) + } + + open fun withoutSsl(): B { + return with(JdbcUtils.SSL_KEY, false) + } + + open fun withSsl(sslMode: MutableMap): B { + return with(JdbcUtils.SSL_KEY, true).with(JdbcUtils.SSL_MODE_KEY, sslMode) + } + + companion object { + @JvmField val DEFAULT_CDC_REPLICATION_INITIAL_WAIT: Duration = Duration.ofSeconds(5) + } + } + + companion object { + private val LOGGER: Logger? = LoggerFactory.getLogger(TestDatabase::class.java) + + private val nextDatabaseId: AtomicInteger = AtomicInteger(0) + + private val nextContainerId: AtomicInteger? = AtomicInteger(0) + private val containerUidToId: MutableMap? = ConcurrentHashMap() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/log4j2-test.xml similarity index 98% rename from airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml rename to airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/log4j2-test.xml index 9c9e72c2c33fb..63eac418a4ee6 100644 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/log4j2-test.xml @@ -67,7 +67,7 @@ - + info "schema1.table1, schema2.table2" - - return catalog.getStreams().stream() - .filter(s -> s.getSyncMode() == SyncMode.INCREMENTAL) - .map(ConfiguredAirbyteStream::getStream) - .map(stream -> stream.getNamespace() + "." + stream.getName()) - // debezium needs commas escaped to split properly - .map(x -> StringUtils.escape(Pattern.quote(x), ",".toCharArray(), "\\,")) - .collect(Collectors.joining(",")); - } - - public static String getColumnIncludeList(final ConfiguredAirbyteCatalog catalog) { - // Turn "stream": { - // "namespace": "schema1" - // "name": "table1" - // "jsonSchema": { - // "properties": { - // "column1": { - // }, - // "column2": { - // } - // } - // } - // } -------> info "schema1.table1.(column1 | column2)" - - return catalog.getStreams().stream() - .filter(s -> s.getSyncMode() == SyncMode.INCREMENTAL) - .map(ConfiguredAirbyteStream::getStream) - .map(s -> { - final String fields = parseFields(s.getJsonSchema().get("properties").fieldNames()); - // schema.table.(col1|col2) - return Pattern.quote(s.getNamespace() + "." + s.getName()) + (StringUtils.isNotBlank(fields) ? "\\." + fields : ""); - }) - .map(x -> StringUtils.escape(x, ",".toCharArray(), "\\,")) - .collect(Collectors.joining(",")); - } - - private static String parseFields(final Iterator fieldNames) { - if (fieldNames == null || !fieldNames.hasNext()) { - return ""; - } - final Iterable iter = () -> fieldNames; - return StreamSupport.stream(iter.spliterator(), false) - .map(f -> Pattern.quote(f)) - .collect(Collectors.joining("|", "(", ")")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.java deleted file mode 100644 index 995d9eac6a194..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import com.google.common.collect.ImmutableSet; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -public enum SnapshotMetadata { - - FIRST, - FIRST_IN_DATA_COLLECTION, - LAST_IN_DATA_COLLECTION, - TRUE, - LAST, - FALSE; - - private static final Set ENTRIES_OF_SNAPSHOT_EVENTS = - ImmutableSet.of(TRUE, FIRST, FIRST_IN_DATA_COLLECTION, LAST_IN_DATA_COLLECTION); - private static final Map STRING_TO_ENUM; - static { - STRING_TO_ENUM = new HashMap<>(12); - STRING_TO_ENUM.put("true", TRUE); - STRING_TO_ENUM.put("TRUE", TRUE); - STRING_TO_ENUM.put("false", FALSE); - STRING_TO_ENUM.put("FALSE", FALSE); - STRING_TO_ENUM.put("last", LAST); - STRING_TO_ENUM.put("LAST", LAST); - STRING_TO_ENUM.put("first", FIRST); - STRING_TO_ENUM.put("FIRST", FIRST); - STRING_TO_ENUM.put("last_in_data_collection", LAST_IN_DATA_COLLECTION); - STRING_TO_ENUM.put("LAST_IN_DATA_COLLECTION", LAST_IN_DATA_COLLECTION); - STRING_TO_ENUM.put("first_in_data_collection", FIRST_IN_DATA_COLLECTION); - STRING_TO_ENUM.put("FIRST_IN_DATA_COLLECTION", FIRST_IN_DATA_COLLECTION); - } - - public static SnapshotMetadata fromString(final String value) { - if (STRING_TO_ENUM.containsKey(value)) { - return STRING_TO_ENUM.get(value); - } - throw new RuntimeException("ENUM value not found for " + value); - } - - public static boolean isSnapshotEventMetadata(final SnapshotMetadata snapshotMetadata) { - return ENTRIES_OF_SNAPSHOT_EVENTS.contains(snapshotMetadata); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.java deleted file mode 100644 index 42d4800072850..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.java +++ /dev/null @@ -1,502 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_SIZE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_DECIMAL_DIGITS; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_IS_NULLABLE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_SCHEMA_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_TABLE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_COLUMN_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_DATABASE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_DATA_TYPE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_SCHEMA_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_SIZE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_TABLE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_TYPE_NAME; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_DECIMAL_DIGITS; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_IS_NULLABLE; -import static io.airbyte.cdk.db.jdbc.JdbcConstants.KEY_SEQ; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifier; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifierList; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.queryTable; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import datadog.trace.api.Trace; -import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; -import io.airbyte.cdk.db.SqlDatabase; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.StreamingJdbcDatabase; -import io.airbyte.cdk.db.jdbc.streaming.JdbcStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.jdbc.dto.JdbcPrivilegeDto; -import io.airbyte.cdk.integrations.source.relationaldb.AbstractDbSource; -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.stream.AirbyteStreamUtils; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.CommonField; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Predicate; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.sql.DataSource; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class contains helper functions and boilerplate for implementing a source connector for a - * relational DB source which can be accessed via JDBC driver. If you are implementing a connector - * for a relational DB which has a JDBC driver, make an effort to use this class. - */ -public abstract class AbstractJdbcSource extends AbstractDbSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractJdbcSource.class); - - protected final Supplier streamingQueryConfigProvider; - protected final JdbcCompatibleSourceOperations sourceOperations; - - protected String quoteString; - protected Collection dataSources = new ArrayList<>(); - - public AbstractJdbcSource(final String driverClass, - final Supplier streamingQueryConfigProvider, - final JdbcCompatibleSourceOperations sourceOperations) { - super(driverClass); - this.streamingQueryConfigProvider = streamingQueryConfigProvider; - this.sourceOperations = sourceOperations; - } - - @Override - protected AutoCloseableIterator queryTableFullRefresh(final JdbcDatabase database, - final List columnNames, - final String schemaName, - final String tableName, - final SyncMode syncMode, - final Optional cursorField) { - LOGGER.info("Queueing query for table: {}", tableName); - // This corresponds to the initial sync for in INCREMENTAL_MODE, where the ordering of the records - // matters - // as intermediate state messages are emitted (if the connector emits intermediate state). - if (syncMode.equals(SyncMode.INCREMENTAL) && getStateEmissionFrequency() > 0) { - final String quotedCursorField = enquoteIdentifier(cursorField.get(), getQuoteString()); - return queryTable(database, String.format("SELECT %s FROM %s ORDER BY %s ASC", - enquoteIdentifierList(columnNames, getQuoteString()), - getFullyQualifiedTableNameWithQuoting(schemaName, tableName, getQuoteString()), quotedCursorField), - tableName, schemaName); - } else { - // If we are in FULL_REFRESH mode, state messages are never emitted, so we don't care about ordering - // of the records. - return queryTable(database, String.format("SELECT %s FROM %s", - enquoteIdentifierList(columnNames, getQuoteString()), - getFullyQualifiedTableNameWithQuoting(schemaName, tableName, getQuoteString())), tableName, schemaName); - } - } - - /** - * Configures a list of operations that can be used to check the connection to the source. - * - * @return list of consumers that run queries for the check command. - */ - @Trace(operationName = CHECK_TRACE_OPERATION_NAME) - protected List> getCheckOperations(final JsonNode config) throws Exception { - return ImmutableList.of(database -> { - LOGGER.info("Attempting to get metadata from the database to see if we can connect."); - database.bufferedResultSetQuery(connection -> connection.getMetaData().getCatalogs(), sourceOperations::rowToJson); - }); - } - - /** - * Aggregate list of @param entries of StreamName and PrimaryKey and - * - * @return a map by StreamName to associated list of primary keys - */ - @VisibleForTesting - public static Map> aggregatePrimateKeys(final List entries) { - final Map> result = new HashMap<>(); - entries.stream().sorted(Comparator.comparingInt(PrimaryKeyAttributesFromDb::keySequence)).forEach(entry -> { - if (!result.containsKey(entry.streamName())) { - result.put(entry.streamName(), new ArrayList<>()); - } - result.get(entry.streamName()).add(entry.primaryKey()); - }); - return result; - } - - private String getCatalog(final SqlDatabase database) { - return (database.getSourceConfig().has(JdbcUtils.DATABASE_KEY) ? database.getSourceConfig().get(JdbcUtils.DATABASE_KEY).asText() : null); - } - - @Override - protected List>> discoverInternal(final JdbcDatabase database, final String schema) throws Exception { - final Set internalSchemas = new HashSet<>(getExcludedInternalNameSpaces()); - LOGGER.info("Internal schemas to exclude: {}", internalSchemas); - final Set tablesWithSelectGrantPrivilege = getPrivilegesTableForCurrentUser(database, schema); - return database.bufferedResultSetQuery( - // retrieve column metadata from the database - connection -> connection.getMetaData().getColumns(getCatalog(database), schema, null, null), - // store essential column metadata to a Json object from the result set about each column - this::getColumnMetadata) - .stream() - .filter(excludeNotAccessibleTables(internalSchemas, tablesWithSelectGrantPrivilege)) - // group by schema and table name to handle the case where a table with the same name exists in - // multiple schemas. - .collect(Collectors.groupingBy(t -> ImmutablePair.of(t.get(INTERNAL_SCHEMA_NAME).asText(), t.get(INTERNAL_TABLE_NAME).asText()))) - .values() - .stream() - .map(fields -> TableInfo.>builder() - .nameSpace(fields.get(0).get(INTERNAL_SCHEMA_NAME).asText()) - .name(fields.get(0).get(INTERNAL_TABLE_NAME).asText()) - .fields(fields.stream() - // read the column metadata Json object, and determine its type - .map(f -> { - final Datatype datatype = sourceOperations.getDatabaseFieldType(f); - final JsonSchemaType jsonType = getAirbyteType(datatype); - LOGGER.debug("Table {} column {} (type {}[{}], nullable {}) -> {}", - fields.get(0).get(INTERNAL_TABLE_NAME).asText(), - f.get(INTERNAL_COLUMN_NAME).asText(), - f.get(INTERNAL_COLUMN_TYPE_NAME).asText(), - f.get(INTERNAL_COLUMN_SIZE).asInt(), - f.get(INTERNAL_IS_NULLABLE).asBoolean(), - jsonType); - return new CommonField(f.get(INTERNAL_COLUMN_NAME).asText(), datatype) {}; - }) - .collect(Collectors.toList())) - .cursorFields(extractCursorFields(fields)) - .build()) - .collect(Collectors.toList()); - } - - private List extractCursorFields(final List fields) { - return fields.stream() - .filter(field -> isCursorType(sourceOperations.getDatabaseFieldType(field))) - .map(field -> field.get(INTERNAL_COLUMN_NAME).asText()) - .collect(Collectors.toList()); - } - - protected Predicate excludeNotAccessibleTables(final Set internalSchemas, - final Set tablesWithSelectGrantPrivilege) { - return jsonNode -> { - if (tablesWithSelectGrantPrivilege.isEmpty()) { - return isNotInternalSchema(jsonNode, internalSchemas); - } - return tablesWithSelectGrantPrivilege.stream() - .anyMatch(e -> e.getSchemaName().equals(jsonNode.get(INTERNAL_SCHEMA_NAME).asText())) - && tablesWithSelectGrantPrivilege.stream() - .anyMatch(e -> e.getTableName().equals(jsonNode.get(INTERNAL_TABLE_NAME).asText())) - && !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText()); - }; - } - - // needs to override isNotInternalSchema for connectors that override - // getPrivilegesTableForCurrentUser() - protected boolean isNotInternalSchema(final JsonNode jsonNode, final Set internalSchemas) { - return !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText()); - } - - /** - * @param resultSet Description of a column available in the table catalog. - * @return Essential information about a column to determine which table it belongs to and its type. - */ - private JsonNode getColumnMetadata(final ResultSet resultSet) throws SQLException { - final var fieldMap = ImmutableMap.builder() - // we always want a namespace, if we cannot get a schema, use db name. - .put(INTERNAL_SCHEMA_NAME, - resultSet.getObject(JDBC_COLUMN_SCHEMA_NAME) != null ? resultSet.getString(JDBC_COLUMN_SCHEMA_NAME) - : resultSet.getObject(JDBC_COLUMN_DATABASE_NAME)) - .put(INTERNAL_TABLE_NAME, resultSet.getString(JDBC_COLUMN_TABLE_NAME)) - .put(INTERNAL_COLUMN_NAME, resultSet.getString(JDBC_COLUMN_COLUMN_NAME)) - .put(INTERNAL_COLUMN_TYPE, resultSet.getString(JDBC_COLUMN_DATA_TYPE)) - .put(INTERNAL_COLUMN_TYPE_NAME, resultSet.getString(JDBC_COLUMN_TYPE_NAME)) - .put(INTERNAL_COLUMN_SIZE, resultSet.getInt(JDBC_COLUMN_SIZE)) - .put(INTERNAL_IS_NULLABLE, resultSet.getString(JDBC_IS_NULLABLE)); - if (resultSet.getString(JDBC_DECIMAL_DIGITS) != null) { - fieldMap.put(INTERNAL_DECIMAL_DIGITS, resultSet.getString(JDBC_DECIMAL_DIGITS)); - } - return Jsons.jsonNode(fieldMap.build()); - } - - @Override - public List>> discoverInternal(final JdbcDatabase database) - throws Exception { - return discoverInternal(database, null); - } - - @Override - public JsonSchemaType getAirbyteType(final Datatype columnType) { - return sourceOperations.getAirbyteType(columnType); - } - - @VisibleForTesting - public record PrimaryKeyAttributesFromDb(String streamName, - String primaryKey, - int keySequence) { - - } - - @Override - protected Map> discoverPrimaryKeys(final JdbcDatabase database, - final List>> tableInfos) { - LOGGER.info("Discover primary keys for tables: " + tableInfos.stream().map(TableInfo::getName).collect( - Collectors.toSet())); - try { - // Get all primary keys without specifying a table name - final Map> tablePrimaryKeys = aggregatePrimateKeys(database.bufferedResultSetQuery( - connection -> connection.getMetaData().getPrimaryKeys(getCatalog(database), null, null), - r -> { - final String schemaName = - r.getObject(JDBC_COLUMN_SCHEMA_NAME) != null ? r.getString(JDBC_COLUMN_SCHEMA_NAME) : r.getString(JDBC_COLUMN_DATABASE_NAME); - final String streamName = JdbcUtils.getFullyQualifiedTableName(schemaName, r.getString(JDBC_COLUMN_TABLE_NAME)); - final String primaryKey = r.getString(JDBC_COLUMN_COLUMN_NAME); - final int keySeq = r.getInt(KEY_SEQ); - return new PrimaryKeyAttributesFromDb(streamName, primaryKey, keySeq); - })); - if (!tablePrimaryKeys.isEmpty()) { - return tablePrimaryKeys; - } - } catch (final SQLException e) { - LOGGER.debug(String.format("Could not retrieve primary keys without a table name (%s), retrying", e)); - } - // Get primary keys one table at a time - return tableInfos.stream() - .collect(Collectors.toMap( - tableInfo -> JdbcUtils.getFullyQualifiedTableName(tableInfo.getNameSpace(), tableInfo.getName()), - tableInfo -> { - final String streamName = JdbcUtils.getFullyQualifiedTableName(tableInfo.getNameSpace(), tableInfo.getName()); - try { - final Map> primaryKeys = aggregatePrimateKeys(database.bufferedResultSetQuery( - connection -> connection.getMetaData().getPrimaryKeys(getCatalog(database), tableInfo.getNameSpace(), tableInfo.getName()), - r -> new PrimaryKeyAttributesFromDb(streamName, r.getString(JDBC_COLUMN_COLUMN_NAME), r.getInt(KEY_SEQ)))); - return primaryKeys.getOrDefault(streamName, Collections.emptyList()); - } catch (final SQLException e) { - LOGGER.error(String.format("Could not retrieve primary keys for %s: %s", streamName, e)); - return Collections.emptyList(); - } - })); - } - - @Override - protected String getQuoteString() { - return quoteString; - } - - @Override - public boolean isCursorType(final Datatype type) { - return sourceOperations.isCursorType(type); - } - - @Override - public AutoCloseableIterator queryTableIncremental(final JdbcDatabase database, - final List columnNames, - final String schemaName, - final String tableName, - final CursorInfo cursorInfo, - final Datatype cursorFieldType) { - LOGGER.info("Queueing query for table: {}", tableName); - final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair airbyteStream = - AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName); - return AutoCloseableIterators.lazyIterator(() -> { - try { - final Stream stream = database.unsafeQuery( - connection -> { - LOGGER.info("Preparing query for table: {}", tableName); - final String fullTableName = getFullyQualifiedTableNameWithQuoting(schemaName, tableName, getQuoteString()); - final String quotedCursorField = enquoteIdentifier(cursorInfo.getCursorField(), getQuoteString()); - - final String operator; - if (cursorInfo.getCursorRecordCount() <= 0L) { - operator = ">"; - } else { - final long actualRecordCount = getActualCursorRecordCount( - connection, fullTableName, quotedCursorField, cursorFieldType, cursorInfo.getCursor()); - LOGGER.info("Table {} cursor count: expected {}, actual {}", tableName, cursorInfo.getCursorRecordCount(), actualRecordCount); - if (actualRecordCount == cursorInfo.getCursorRecordCount()) { - operator = ">"; - } else { - operator = ">="; - } - } - - final String wrappedColumnNames = getWrappedColumnNames(database, connection, columnNames, schemaName, tableName); - final StringBuilder sql = new StringBuilder(String.format("SELECT %s FROM %s WHERE %s %s ?", - wrappedColumnNames, - fullTableName, - quotedCursorField, - operator)); - // if the connector emits intermediate states, the incremental query must be sorted by the cursor - // field - if (getStateEmissionFrequency() > 0) { - sql.append(String.format(" ORDER BY %s ASC", quotedCursorField)); - } - - final PreparedStatement preparedStatement = connection.prepareStatement(sql.toString()); - LOGGER.info("Executing query for table {}: {}", tableName, preparedStatement); - sourceOperations.setCursorField(preparedStatement, 1, cursorFieldType, cursorInfo.getCursor()); - return preparedStatement; - }, - sourceOperations::rowToJson); - return AutoCloseableIterators.fromStream(stream, airbyteStream); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - }, airbyteStream); - } - - /** - * Some databases need special column names in the query. - */ - protected String getWrappedColumnNames(final JdbcDatabase database, - final Connection connection, - final List columnNames, - final String schemaName, - final String tableName) - throws SQLException { - return enquoteIdentifierList(columnNames, getQuoteString()); - } - - protected String getCountColumnName() { - return "record_count"; - } - - protected long getActualCursorRecordCount(final Connection connection, - final String fullTableName, - final String quotedCursorField, - final Datatype cursorFieldType, - final String cursor) - throws SQLException { - final String columnName = getCountColumnName(); - final PreparedStatement cursorRecordStatement; - if (cursor == null) { - final String cursorRecordQuery = String.format("SELECT COUNT(*) AS %s FROM %s WHERE %s IS NULL", - columnName, - fullTableName, - quotedCursorField); - cursorRecordStatement = connection.prepareStatement(cursorRecordQuery); - } else { - final String cursorRecordQuery = String.format("SELECT COUNT(*) AS %s FROM %s WHERE %s = ?", - columnName, - fullTableName, - quotedCursorField); - cursorRecordStatement = connection.prepareStatement(cursorRecordQuery);; - sourceOperations.setCursorField(cursorRecordStatement, 1, cursorFieldType, cursor); - } - final ResultSet resultSet = cursorRecordStatement.executeQuery(); - if (resultSet.next()) { - return resultSet.getLong(columnName); - } else { - return 0L; - } - } - - @Override - public JdbcDatabase createDatabase(final JsonNode sourceConfig) throws SQLException { - return createDatabase(sourceConfig, JdbcDataSourceUtils.DEFAULT_JDBC_PARAMETERS_DELIMITER); - } - - public JdbcDatabase createDatabase(final JsonNode sourceConfig, String delimiter) throws SQLException { - final JsonNode jdbcConfig = toDatabaseConfig(sourceConfig); - Map connectionProperties = JdbcDataSourceUtils.getConnectionProperties(sourceConfig, delimiter); - // Create the data source - final DataSource dataSource = DataSourceFactory.create( - jdbcConfig.has(JdbcUtils.USERNAME_KEY) ? jdbcConfig.get(JdbcUtils.USERNAME_KEY).asText() : null, - jdbcConfig.has(JdbcUtils.PASSWORD_KEY) ? jdbcConfig.get(JdbcUtils.PASSWORD_KEY).asText() : null, - driverClassName, - jdbcConfig.get(JdbcUtils.JDBC_URL_KEY).asText(), - connectionProperties, - getConnectionTimeout(connectionProperties)); - // Record the data source so that it can be closed. - dataSources.add(dataSource); - - final JdbcDatabase database = new StreamingJdbcDatabase( - dataSource, - sourceOperations, - streamingQueryConfigProvider); - - quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); - database.setSourceConfig(sourceConfig); - database.setDatabaseConfig(jdbcConfig); - return database; - } - - /** - * {@inheritDoc} - * - * @param database database instance - * @param catalog schema of the incoming messages. - * @throws SQLException - */ - @Override - protected void logPreSyncDebugData(final JdbcDatabase database, final ConfiguredAirbyteCatalog catalog) - throws SQLException { - LOGGER.info("Data source product recognized as {}:{}", - database.getMetaData().getDatabaseProductName(), - database.getMetaData().getDatabaseProductVersion()); - } - - @Override - public void close() { - dataSources.forEach(d -> { - try { - DataSourceFactory.close(d); - } catch (final Exception e) { - LOGGER.warn("Unable to close data source.", e); - } - }); - dataSources.clear(); - } - - protected List identifyStreamsToSnapshot(final ConfiguredAirbyteCatalog catalog, final StateManager stateManager) { - final Set alreadySyncedStreams = stateManager.getCdcStateManager().getInitialStreamsSynced(); - if (alreadySyncedStreams.isEmpty() && (stateManager.getCdcStateManager().getCdcState() == null - || stateManager.getCdcStateManager().getCdcState().getState() == null)) { - return Collections.emptyList(); - } - - final Set allStreams = AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog); - - final Set newlyAddedStreams = new HashSet<>(Sets.difference(allStreams, alreadySyncedStreams)); - - return catalog.getStreams().stream() - .filter(c -> c.getSyncMode() == SyncMode.INCREMENTAL) - .filter(stream -> newlyAddedStreams.contains(AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream()))) - .map(Jsons::clone) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.java deleted file mode 100644 index f11193178ec46..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.commons.map.MoreMaps; -import java.util.Map; -import java.util.Objects; - -public class JdbcDataSourceUtils { - - public static final String DEFAULT_JDBC_PARAMETERS_DELIMITER = "&"; - - /** - * Validates for duplication parameters - * - * @param customParameters custom connection properties map as specified by each Jdbc source - * @param defaultParameters connection properties map as specified by each Jdbc source - * @throws IllegalArgumentException - */ - public static void assertCustomParametersDontOverwriteDefaultParameters(final Map customParameters, - final Map defaultParameters) { - for (final String key : defaultParameters.keySet()) { - if (customParameters.containsKey(key) && !Objects.equals(customParameters.get(key), defaultParameters.get(key))) { - throw new IllegalArgumentException("Cannot overwrite default JDBC parameter " + key); - } - } - } - - /** - * Retrieves connection_properties from config and also validates if custom jdbc_url parameters - * overlap with the default properties - * - * @param config A configuration used to check Jdbc connection - * @return A mapping of connection properties - */ - public static Map getConnectionProperties(final JsonNode config) { - return getConnectionProperties(config, DEFAULT_JDBC_PARAMETERS_DELIMITER); - } - - public static Map getConnectionProperties(final JsonNode config, String parameterDelimiter) { - final Map customProperties = JdbcUtils.parseJdbcParameters(config, JdbcUtils.JDBC_URL_PARAMS_KEY, parameterDelimiter); - final Map defaultProperties = JdbcDataSourceUtils.getDefaultConnectionProperties(config); - assertCustomParametersDontOverwriteDefaultParameters(customProperties, defaultProperties); - return MoreMaps.merge(customProperties, defaultProperties); - } - - /** - * Retrieves default connection_properties from config - * - * TODO: make this method abstract and add parity features to destination connectors - * - * @param config A configuration used to check Jdbc connection - * @return A mapping of the default connection properties - */ - public static Map getDefaultConnectionProperties(final JsonNode config) { - // NOTE that Postgres returns an empty map for some reason? - return JdbcUtils.parseJdbcParameters(config, "connection_properties", DEFAULT_JDBC_PARAMETERS_DELIMITER); - }; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.java deleted file mode 100644 index 83106c17d6ce4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.util.SSLCertificateUtils; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateException; -import java.security.spec.InvalidKeySpecException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JdbcSSLConnectionUtils { - - public static final String SSL_MODE = "sslMode"; - - public static final String TRUST_KEY_STORE_URL = "trustCertificateKeyStoreUrl"; - public static final String TRUST_KEY_STORE_PASS = "trustCertificateKeyStorePassword"; - public static final String CLIENT_KEY_STORE_URL = "clientCertificateKeyStoreUrl"; - public static final String CLIENT_KEY_STORE_PASS = "clientCertificateKeyStorePassword"; - public static final String CLIENT_KEY_STORE_TYPE = "clientCertificateKeyStoreType"; - public static final String TRUST_KEY_STORE_TYPE = "trustCertificateKeyStoreType"; - public static final String KEY_STORE_TYPE_PKCS12 = "PKCS12"; - public static final String PARAM_MODE = "mode"; - Pair caCertKeyStorePair; - Pair clientCertKeyStorePair; - - public enum SslMode { - - DISABLED("disable"), - ALLOWED("allow"), - PREFERRED("preferred", "prefer"), - REQUIRED("required", "require"), - VERIFY_CA("verify_ca", "verify-ca"), - VERIFY_IDENTITY("verify_identity", "verify-full"); - - public final List spec; - - SslMode(final String... spec) { - this.spec = Arrays.asList(spec); - } - - public static Optional bySpec(final String spec) { - return Arrays.stream(SslMode.values()) - .filter(sslMode -> sslMode.spec.contains(spec)) - .findFirst(); - } - - } - - private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSSLConnectionUtils.class.getClass()); - public static final String PARAM_CA_CERTIFICATE = "ca_certificate"; - public static final String PARAM_CLIENT_CERTIFICATE = "client_certificate"; - public static final String PARAM_CLIENT_KEY = "client_key"; - public static final String PARAM_CLIENT_KEY_PASSWORD = "client_key_password"; - - /** - * Parses SSL related configuration and generates keystores to be used by connector - * - * @param config configuration - * @return map containing relevant parsed values including location of keystore or an empty map - */ - public static Map parseSSLConfig(final JsonNode config) { - LOGGER.debug("source config: {}", config); - - Pair caCertKeyStorePair = null; - Pair clientCertKeyStorePair = null; - final Map additionalParameters = new HashMap<>(); - // assume ssl if not explicitly mentioned. - if (!config.has(JdbcUtils.SSL_KEY) || config.get(JdbcUtils.SSL_KEY).asBoolean()) { - if (config.has(JdbcUtils.SSL_MODE_KEY)) { - final String specMode = config.get(JdbcUtils.SSL_MODE_KEY).get(PARAM_MODE).asText(); - additionalParameters.put(SSL_MODE, - SslMode.bySpec(specMode).orElseThrow(() -> new IllegalArgumentException("unexpected ssl mode")).name()); - if (Objects.isNull(caCertKeyStorePair)) { - caCertKeyStorePair = JdbcSSLConnectionUtils.prepareCACertificateKeyStore(config); - } - - if (Objects.nonNull(caCertKeyStorePair)) { - LOGGER.debug("uri for ca cert keystore: {}", caCertKeyStorePair.getLeft().toString()); - try { - additionalParameters.putAll(Map.of( - TRUST_KEY_STORE_URL, caCertKeyStorePair.getLeft().toURL().toString(), - TRUST_KEY_STORE_PASS, caCertKeyStorePair.getRight(), - TRUST_KEY_STORE_TYPE, KEY_STORE_TYPE_PKCS12)); - } catch (final MalformedURLException e) { - throw new RuntimeException("Unable to get a URL for trust key store"); - } - - } - - if (Objects.isNull(clientCertKeyStorePair)) { - clientCertKeyStorePair = JdbcSSLConnectionUtils.prepareClientCertificateKeyStore(config); - } - - if (Objects.nonNull(clientCertKeyStorePair)) { - LOGGER.debug("uri for client cert keystore: {} / {}", clientCertKeyStorePair.getLeft().toString(), clientCertKeyStorePair.getRight()); - try { - additionalParameters.putAll(Map.of( - CLIENT_KEY_STORE_URL, clientCertKeyStorePair.getLeft().toURL().toString(), - CLIENT_KEY_STORE_PASS, clientCertKeyStorePair.getRight(), - CLIENT_KEY_STORE_TYPE, KEY_STORE_TYPE_PKCS12)); - } catch (final MalformedURLException e) { - throw new RuntimeException("Unable to get a URL for client key store"); - } - } - } else { - additionalParameters.put(SSL_MODE, SslMode.DISABLED.name()); - } - } - LOGGER.debug("additional params: {}", additionalParameters); - return additionalParameters; - } - - public static Pair prepareCACertificateKeyStore(final JsonNode config) { - // if config available - // if has CA cert - make keystore - // if has client cert - // if has client password - make keystore using password - // if no client password - make keystore using random password - Pair caCertKeyStorePair = null; - if (Objects.nonNull(config)) { - if (!config.has(JdbcUtils.SSL_KEY) || config.get(JdbcUtils.SSL_KEY).asBoolean()) { - final var encryption = config.get(JdbcUtils.SSL_MODE_KEY); - if (encryption.has(PARAM_CA_CERTIFICATE) && !encryption.get(PARAM_CA_CERTIFICATE).asText().isEmpty()) { - final String clientKeyPassword = getOrGeneratePassword(encryption); - try { - final URI caCertKeyStoreUri = SSLCertificateUtils.keyStoreFromCertificate( - encryption.get(PARAM_CA_CERTIFICATE).asText(), - clientKeyPassword, - null, - null); - caCertKeyStorePair = new ImmutablePair<>(caCertKeyStoreUri, clientKeyPassword); - } catch (final CertificateException | IOException | KeyStoreException | NoSuchAlgorithmException e) { - throw new RuntimeException("Failed to create keystore for CA certificate", e); - } - } - } - } - return caCertKeyStorePair; - } - - private static String getOrGeneratePassword(final JsonNode sslModeConfig) { - final String clientKeyPassword; - if (sslModeConfig.has(PARAM_CLIENT_KEY_PASSWORD) && !sslModeConfig.get(PARAM_CLIENT_KEY_PASSWORD).asText().isEmpty()) { - clientKeyPassword = sslModeConfig.get(PARAM_CLIENT_KEY_PASSWORD).asText(); - } else { - clientKeyPassword = RandomStringUtils.randomAlphanumeric(10); - } - return clientKeyPassword; - } - - public static Pair prepareClientCertificateKeyStore(final JsonNode config) { - Pair clientCertKeyStorePair = null; - if (Objects.nonNull(config)) { - if (!config.has(JdbcUtils.SSL_KEY) || config.get(JdbcUtils.SSL_KEY).asBoolean()) { - final var encryption = config.get(JdbcUtils.SSL_MODE_KEY); - if (encryption.has(PARAM_CLIENT_CERTIFICATE) && !encryption.get(PARAM_CLIENT_CERTIFICATE).asText().isEmpty() - && encryption.has(PARAM_CLIENT_KEY) && !encryption.get(PARAM_CLIENT_KEY).asText().isEmpty()) { - final String clientKeyPassword = getOrGeneratePassword(encryption); - try { - final URI clientCertKeyStoreUri = SSLCertificateUtils.keyStoreFromClientCertificate(encryption.get(PARAM_CLIENT_CERTIFICATE).asText(), - encryption.get(PARAM_CLIENT_KEY).asText(), - clientKeyPassword, null); - clientCertKeyStorePair = new ImmutablePair<>(clientCertKeyStoreUri, clientKeyPassword); - } catch (final CertificateException | IOException - | KeyStoreException | NoSuchAlgorithmException - | InvalidKeySpecException | InterruptedException e) { - throw new RuntimeException("Failed to create keystore for Client certificate", e); - } - } - } - } - return clientCertKeyStorePair; - } - - public static Path fileFromCertPem(final String certPem) { - try { - final Path path = Files.createTempFile(null, ".crt"); - Files.writeString(path, certPem); - path.toFile().deleteOnExit(); - return path; - } catch (final IOException e) { - throw new RuntimeException("Cannot save root certificate to file", e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.java deleted file mode 100644 index b50fdd36a9315..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import java.sql.JDBCType; -import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JdbcSource extends AbstractJdbcSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSource.class); - - public JdbcSource() { - super(DatabaseDriver.POSTGRESQL.getDriverClassName(), AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); - } - - // no-op for JdbcSource since the config it receives is designed to be use for JDBC. - @Override - public JsonNode toDatabaseConfig(final JsonNode config) { - return config; - } - - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of("information_schema", "pg_catalog", "pg_internal", "catalog_history"); - } - - public static void main(final String[] args) throws Exception { - final Source source = new JdbcSource(); - LOGGER.info("starting source: {}", JdbcSource.class); - new IntegrationRunner(source).run(args); - LOGGER.info("completed source: {}", JdbcSource.class); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.java deleted file mode 100644 index b598f041dde4a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc.dto; - -import com.google.common.base.Objects; - -/** - * The class to store values from privileges table - */ -public class JdbcPrivilegeDto { - - private String grantee; - private String tableName; - private String schemaName; - private String privilege; - - public JdbcPrivilegeDto(String grantee, String tableName, String schemaName, String privilege) { - this.grantee = grantee; - this.tableName = tableName; - this.schemaName = schemaName; - this.privilege = privilege; - } - - public String getGrantee() { - return grantee; - } - - public String getTableName() { - return tableName; - } - - public String getSchemaName() { - return schemaName; - } - - public String getPrivilege() { - return privilege; - } - - public static JdbcPrivilegeDtoBuilder builder() { - return new JdbcPrivilegeDtoBuilder(); - } - - public static class JdbcPrivilegeDtoBuilder { - - private String grantee; - private String tableName; - private String schemaName; - private String privilege; - - public JdbcPrivilegeDtoBuilder grantee(String grantee) { - this.grantee = grantee; - return this; - } - - public JdbcPrivilegeDtoBuilder tableName(String tableName) { - this.tableName = tableName; - return this; - } - - public JdbcPrivilegeDtoBuilder schemaName(String schemaName) { - this.schemaName = schemaName; - return this; - } - - public JdbcPrivilegeDtoBuilder privilege(String privilege) { - this.privilege = privilege; - return this; - } - - public JdbcPrivilegeDto build() { - return new JdbcPrivilegeDto(grantee, tableName, schemaName, privilege); - } - - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - JdbcPrivilegeDto that = (JdbcPrivilegeDto) o; - return Objects.equal(grantee, that.grantee) && Objects.equal(tableName, that.tableName) - && Objects.equal(schemaName, that.schemaName) && Objects.equal(privilege, that.privilege); - } - - @Override - public int hashCode() { - return Objects.hashCode(grantee, tableName, schemaName, privilege); - } - - @Override - public String toString() { - return "JdbcPrivilegeDto{" + - "grantee='" + grantee + '\'' + - ", columnName='" + tableName + '\'' + - ", schemaName='" + schemaName + '\'' + - ", privilege='" + privilege + '\'' + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java deleted file mode 100644 index 26d04bed4b6bf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java +++ /dev/null @@ -1,695 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import static io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import datadog.trace.api.Trace; -import io.airbyte.cdk.db.AbstractDatabase; -import io.airbyte.cdk.db.IncrementalUtils; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.JdbcConnector; -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.relationaldb.InvalidCursorInfoUtil.InvalidCursorInfo; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateManagerFactory; -import io.airbyte.cdk.integrations.util.ApmTraceUtils; -import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.exceptions.ConnectionErrorException; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.stream.AirbyteStreamUtils; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.CommonField; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.sql.SQLException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class contains helper functions and boilerplate for implementing a source connector for a DB - * source of both non-relational and relational type - */ -public abstract class AbstractDbSource extends - JdbcConnector implements Source, AutoCloseable { - - public static final String CHECK_TRACE_OPERATION_NAME = "check-operation"; - public static final String DISCOVER_TRACE_OPERATION_NAME = "discover-operation"; - public static final String READ_TRACE_OPERATION_NAME = "read-operation"; - - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDbSource.class); - - // TODO: Remove when the flag is not use anymore - protected FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - - protected AbstractDbSource(String driverClassName) { - super(driverClassName); - } - - @VisibleForTesting - public void setFeatureFlags(FeatureFlags featureFlags) { - this.featureFlags = featureFlags; - } - - @Override - @Trace(operationName = CHECK_TRACE_OPERATION_NAME) - public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - try { - final Database database = createDatabase(config); - for (final CheckedConsumer checkOperation : getCheckOperations(config)) { - checkOperation.accept(database); - } - - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final ConnectionErrorException ex) { - ApmTraceUtils.addExceptionToTrace(ex); - final String message = getErrorMessage(ex.getStateCode(), ex.getErrorCode(), - ex.getExceptionMessage(), ex); - AirbyteTraceMessageUtility.emitConfigErrorTrace(ex, message); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage(message); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.info("Exception while checking connection: ", e); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage(String.format(ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE, e.getMessage())); - } finally { - close(); - } - } - - @Override - @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) - public AirbyteCatalog discover(final JsonNode config) throws Exception { - try { - final Database database = createDatabase(config); - final List>> tableInfos = discoverWithoutSystemTables(database); - final Map> fullyQualifiedTableNameToPrimaryKeys = discoverPrimaryKeys( - database, tableInfos); - return DbSourceDiscoverUtil.convertTableInfosToAirbyteCatalog(tableInfos, fullyQualifiedTableNameToPrimaryKeys, this::getAirbyteType); - } finally { - close(); - } - } - - /** - * Creates a list of AirbyteMessageIterators with all the streams selected in a configured catalog - * - * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", - * "password": "super secure" } - * @param catalog - schema of the incoming messages. - * @param state - state of the incoming messages. - * @return AirbyteMessageIterator with all the streams that are to be synced - * @throws Exception - */ - @Override - public AutoCloseableIterator read(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final JsonNode state) - throws Exception { - final AirbyteStateType supportedStateType = getSupportedStateType(config); - final StateManager stateManager = - StateManagerFactory.createStateManager(supportedStateType, - StateGeneratorUtils.deserializeInitialState(state, supportedStateType), catalog); - final Instant emittedAt = Instant.now(); - - final Database database = createDatabase(config); - - logPreSyncDebugData(database, catalog); - - final Map>> fullyQualifiedTableNameToInfo = - discoverWithoutSystemTables(database) - .stream() - .collect(Collectors.toMap(t -> String.format("%s.%s", t.getNameSpace(), t.getName()), - Function - .identity())); - - validateCursorFieldForIncrementalTables(fullyQualifiedTableNameToInfo, catalog, database); - - DbSourceDiscoverUtil.logSourceSchemaChange(fullyQualifiedTableNameToInfo, catalog, this::getAirbyteType); - - final List> incrementalIterators = - getIncrementalIterators(database, catalog, fullyQualifiedTableNameToInfo, stateManager, - emittedAt); - final List> fullRefreshIterators = - getFullRefreshIterators(database, catalog, fullyQualifiedTableNameToInfo, stateManager, - emittedAt); - final List> iteratorList = Stream - .of(incrementalIterators, fullRefreshIterators) - .flatMap(Collection::stream) - .collect(Collectors.toList()); - - return AutoCloseableIterators - .appendOnClose(AutoCloseableIterators.concatWithEagerClose(iteratorList, AirbyteTraceMessageUtility::emitStreamStatusTrace), () -> { - LOGGER.info("Closing database connection pool."); - Exceptions.toRuntime(this::close); - LOGGER.info("Closed database connection pool."); - }); - } - - protected void validateCursorFieldForIncrementalTables( - final Map>> tableNameToTable, - final ConfiguredAirbyteCatalog catalog, - final Database database) - throws SQLException { - final List tablesWithInvalidCursor = new ArrayList<>(); - for (final ConfiguredAirbyteStream airbyteStream : catalog.getStreams()) { - final AirbyteStream stream = airbyteStream.getStream(); - final String fullyQualifiedTableName = DbSourceDiscoverUtil.getFullyQualifiedTableName(stream.getNamespace(), - stream.getName()); - final boolean hasSourceDefinedCursor = - !Objects.isNull(airbyteStream.getStream().getSourceDefinedCursor()) - && airbyteStream.getStream().getSourceDefinedCursor(); - if (!tableNameToTable.containsKey(fullyQualifiedTableName) - || airbyteStream.getSyncMode() != SyncMode.INCREMENTAL || hasSourceDefinedCursor) { - continue; - } - - final TableInfo> table = tableNameToTable - .get(fullyQualifiedTableName); - final Optional cursorField = IncrementalUtils.getCursorFieldOptional(airbyteStream); - if (cursorField.isEmpty()) { - continue; - } - final DataType cursorType = table.getFields().stream() - .filter(info -> info.getName().equals(cursorField.get())) - .map(CommonField::getType) - .findFirst() - .orElseThrow(); - - if (!isCursorType(cursorType)) { - tablesWithInvalidCursor.add( - new InvalidCursorInfo(fullyQualifiedTableName, cursorField.get(), - cursorType.toString(), "Unsupported cursor type")); - continue; - } - - if (!verifyCursorColumnValues(database, stream.getNamespace(), stream.getName(), cursorField.get())) { - tablesWithInvalidCursor.add( - new InvalidCursorInfo(fullyQualifiedTableName, cursorField.get(), - cursorType.toString(), "Cursor column contains NULL value")); - } - } - - if (!tablesWithInvalidCursor.isEmpty()) { - throw new ConfigErrorException( - InvalidCursorInfoUtil.getInvalidCursorConfigMessage(tablesWithInvalidCursor)); - } - } - - /** - * Verify that cursor column allows syncing to go through. - * - * @param database database - * @return true if syncing can go through. false otherwise - * @throws SQLException exception - */ - protected boolean verifyCursorColumnValues(final Database database, final String schema, final String tableName, final String columnName) - throws SQLException { - /* no-op */ - return true; - } - - /** - * Estimates the total volume (rows and bytes) to sync and emits a - * {@link AirbyteEstimateTraceMessage} associated with the full refresh stream. - * - * @param database database - */ - protected void estimateFullRefreshSyncSize(final Database database, - final ConfiguredAirbyteStream configuredAirbyteStream) { - /* no-op */ - } - - protected List>> discoverWithoutSystemTables(final Database database) - throws Exception { - final Set systemNameSpaces = getExcludedInternalNameSpaces(); - final Set systemViews = getExcludedViews(); - final List>> discoveredTables = discoverInternal(database); - return (systemNameSpaces == null || systemNameSpaces.isEmpty() ? discoveredTables - : discoveredTables.stream() - .filter(table -> !systemNameSpaces.contains(table.getNameSpace()) && !systemViews.contains(table.getName())).collect( - Collectors.toList())); - } - - protected List> getFullRefreshIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt) { - return getSelectedIterators( - database, - catalog, - tableNameToTable, - stateManager, - emittedAt, - SyncMode.FULL_REFRESH); - } - - protected List> getIncrementalIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt) { - return getSelectedIterators( - database, - catalog, - tableNameToTable, - stateManager, - emittedAt, - SyncMode.INCREMENTAL); - } - - /** - * Creates a list of read iterators for each stream within an ConfiguredAirbyteCatalog - * - * @param database Source Database - * @param catalog List of streams (e.g. database tables or API endpoints) with settings on sync mode - * @param tableNameToTable Mapping of table name to table - * @param stateManager Manager used to track the state of data synced by the connector - * @param emittedAt Time when data was emitted from the Source database - * @param syncMode the sync mode for which we want to grab the required iterators - * @return List of AirbyteMessageIterators containing all iterators for a catalog - */ - private List> getSelectedIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt, - final SyncMode syncMode) { - final List> iteratorList = new ArrayList<>(); - for (final ConfiguredAirbyteStream airbyteStream : catalog.getStreams()) { - if (airbyteStream.getSyncMode().equals(syncMode)) { - final AirbyteStream stream = airbyteStream.getStream(); - final String fullyQualifiedTableName = DbSourceDiscoverUtil.getFullyQualifiedTableName(stream.getNamespace(), - stream.getName()); - if (!tableNameToTable.containsKey(fullyQualifiedTableName)) { - LOGGER - .info("Skipping stream {} because it is not in the source", fullyQualifiedTableName); - continue; - } - - final TableInfo> table = tableNameToTable - .get(fullyQualifiedTableName); - final AutoCloseableIterator tableReadIterator = createReadIterator( - database, - airbyteStream, - table, - stateManager, - emittedAt); - iteratorList.add(tableReadIterator); - } - } - - return iteratorList; - } - - /** - * ReadIterator is used to retrieve records from a source connector - * - * @param database Source Database - * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) - * @param table information in tabular format - * @param stateManager Manager used to track the state of data synced by the connector - * @param emittedAt Time when data was emitted from the Source database - * @return - */ - private AutoCloseableIterator createReadIterator(final Database database, - final ConfiguredAirbyteStream airbyteStream, - final TableInfo> table, - final StateManager stateManager, - final Instant emittedAt) { - final String streamName = airbyteStream.getStream().getName(); - final String namespace = airbyteStream.getStream().getNamespace(); - final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(streamName, - namespace); - final Set selectedFieldsInCatalog = CatalogHelpers.getTopLevelFieldNames(airbyteStream); - final List selectedDatabaseFields = table.getFields() - .stream() - .map(CommonField::getName) - .filter(selectedFieldsInCatalog::contains) - .collect(Collectors.toList()); - - final AutoCloseableIterator iterator; - // checks for which sync mode we're using based on the configured airbytestream - // this is where the bifurcation between full refresh and incremental - if (airbyteStream.getSyncMode() == SyncMode.INCREMENTAL) { - final String cursorField = IncrementalUtils.getCursorField(airbyteStream); - final Optional cursorInfo = stateManager.getCursorInfo(pair); - - final AutoCloseableIterator airbyteMessageIterator; - if (cursorInfo.map(CursorInfo::getCursor).isPresent()) { - airbyteMessageIterator = getIncrementalStream( - database, - airbyteStream, - selectedDatabaseFields, - table, - cursorInfo.get(), - emittedAt); - } else { - // if no cursor is present then this is the first read for is the same as doing a full refresh read. - estimateFullRefreshSyncSize(database, airbyteStream); - airbyteMessageIterator = getFullRefreshStream(database, streamName, namespace, - selectedDatabaseFields, table, emittedAt, SyncMode.INCREMENTAL, Optional.of(cursorField)); - } - - final JsonSchemaPrimitive cursorType = IncrementalUtils.getCursorType(airbyteStream, - cursorField); - - iterator = AutoCloseableIterators.transform( - autoCloseableIterator -> new StateDecoratingIterator( - autoCloseableIterator, - stateManager, - pair, - cursorField, - cursorInfo.map(CursorInfo::getCursor).orElse(null), - cursorType, - getStateEmissionFrequency()), - airbyteMessageIterator, - AirbyteStreamUtils.convertFromNameAndNamespace(pair.getName(), pair.getNamespace())); - } else if (airbyteStream.getSyncMode() == SyncMode.FULL_REFRESH) { - estimateFullRefreshSyncSize(database, airbyteStream); - iterator = getFullRefreshStream(database, streamName, namespace, selectedDatabaseFields, - table, emittedAt, SyncMode.FULL_REFRESH, Optional.empty()); - } else if (airbyteStream.getSyncMode() == null) { - throw new IllegalArgumentException( - String.format("%s requires a source sync mode", this.getClass())); - } else { - throw new IllegalArgumentException( - String.format("%s does not support sync mode: %s.", this.getClass(), - airbyteStream.getSyncMode())); - } - - final AtomicLong recordCount = new AtomicLong(); - return AutoCloseableIterators.transform(iterator, - AirbyteStreamUtils.convertFromNameAndNamespace(pair.getName(), pair.getNamespace()), - r -> { - final long count = recordCount.incrementAndGet(); - if (count % 10000 == 0) { - LOGGER.info("Reading stream {}. Records read: {}", streamName, count); - } - return r; - }); - } - - /** - * @param database Source Database - * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) - * @param selectedDatabaseFields subset of database fields selected for replication - * @param table information in tabular format - * @param cursorInfo state of where to start the sync from - * @param emittedAt Time when data was emitted from the Source database - * @return AirbyteMessage Iterator that - */ - private AutoCloseableIterator getIncrementalStream(final Database database, - final ConfiguredAirbyteStream airbyteStream, - final List selectedDatabaseFields, - final TableInfo> table, - final CursorInfo cursorInfo, - final Instant emittedAt) { - final String streamName = airbyteStream.getStream().getName(); - final String namespace = airbyteStream.getStream().getNamespace(); - final String cursorField = IncrementalUtils.getCursorField(airbyteStream); - final DataType cursorType = table.getFields().stream() - .filter(info -> info.getName().equals(cursorField)) - .map(CommonField::getType) - .findFirst() - .orElseThrow(); - - Preconditions.checkState( - table.getFields().stream().anyMatch(f -> f.getName().equals(cursorField)), - String.format("Could not find cursor field %s in table %s", cursorField, table.getName())); - - final AutoCloseableIterator queryIterator = queryTableIncremental( - database, - selectedDatabaseFields, - table.getNameSpace(), - table.getName(), - cursorInfo, - cursorType); - - return getMessageIterator(queryIterator, streamName, namespace, emittedAt.toEpochMilli()); - } - - /** - * Creates a AirbyteMessageIterator that contains all records for a database source connection - * - * @param database Source Database - * @param streamName name of an individual stream in which a stream represents a source (e.g. API - * endpoint or database table) - * @param namespace Namespace of the database (e.g. public) - * @param selectedDatabaseFields List of all interested database column names - * @param table information in tabular format - * @param emittedAt Time when data was emitted from the Source database - * @param syncMode The sync mode that this full refresh stream should be associated with. - * @return AirbyteMessageIterator with all records for a database source - */ - private AutoCloseableIterator getFullRefreshStream(final Database database, - final String streamName, - final String namespace, - final List selectedDatabaseFields, - final TableInfo> table, - final Instant emittedAt, - final SyncMode syncMode, - final Optional cursorField) { - final AutoCloseableIterator queryStream = - queryTableFullRefresh(database, selectedDatabaseFields, table.getNameSpace(), - table.getName(), syncMode, cursorField); - return getMessageIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()); - } - - private static AutoCloseableIterator getMessageIterator( - final AutoCloseableIterator recordIterator, - final String streamName, - final String namespace, - final long emittedAt) { - return AutoCloseableIterators.transform(recordIterator, - new io.airbyte.protocol.models.AirbyteStreamNameNamespacePair(streamName, namespace), - r -> new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(streamName) - .withNamespace(namespace) - .withEmittedAt(emittedAt) - .withData(r))); - } - - /** - * @param database - The database where from privileges for tables will be consumed - * @param schema - The schema where from privileges for tables will be consumed - * @return Set with privileges for tables for current DB-session user The method is responsible for - * SELECT-ing the table with privileges. In some cases such SELECT doesn't require (e.g. in - * Oracle DB - the schema is the user, you cannot REVOKE a privilege on a table from its - * owner). - */ - protected Set getPrivilegesTableForCurrentUser(final JdbcDatabase database, - final String schema) - throws SQLException { - return Collections.emptySet(); - } - - /** - * Map a database implementation-specific configuration to json object that adheres to the database - * config spec. See resources/spec.json. - * - * @param config database implementation-specific configuration. - * @return database spec config - */ - @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) - public abstract JsonNode toDatabaseConfig(JsonNode config); - - /** - * Creates a database instance using the database spec config. - * - * @param config database spec config - * @return database instance - * @throws Exception might throw an error during connection to database - */ - @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) - protected abstract Database createDatabase(JsonNode config) throws Exception; - - /** - * Gets and logs relevant and useful database metadata such as DB product/version, index names and - * definition. Called before syncing data. Any logged information should be scoped to the configured - * catalog and database. - * - * @param database given database instance. - * @param catalog configured catalog. - */ - protected void logPreSyncDebugData(final Database database, final ConfiguredAirbyteCatalog catalog) throws Exception {} - - /** - * Configures a list of operations that can be used to check the connection to the source. - * - * @return list of consumers that run queries for the check command. - */ - protected abstract List> getCheckOperations(JsonNode config) - throws Exception; - - /** - * Map source types to Airbyte types - * - * @param columnType source data type - * @return airbyte data type - */ - protected abstract JsonSchemaType getAirbyteType(DataType columnType); - - /** - * Get list of system namespaces(schemas) in order to exclude them from the `discover` result list. - * - * @return set of system namespaces(schemas) to be excluded - */ - protected abstract Set getExcludedInternalNameSpaces(); - - /** - * Get list of system views in order to exclude them from the `discover` result list. - * - * @return set of views to be excluded - */ - protected Set getExcludedViews() { - return Collections.emptySet(); - }; - - /** - * Discover all available tables in the source database. - * - * @param database source database - * @return list of the source tables - * @throws Exception access to the database might lead to an exceptions. - */ - @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) - protected abstract List>> discoverInternal( - final Database database) - throws Exception; - - /** - * Discovers all available tables within a schema in the source database. - * - * @param database - source database - * @param schema - source schema - * @return list of source tables - * @throws Exception - access to the database might lead to exceptions. - */ - protected abstract List>> discoverInternal( - final Database database, - String schema) - throws Exception; - - /** - * Discover Primary keys for each table and @return a map of namespace.table name to their - * associated list of primary key fields. - * - * @param database source database - * @param tableInfos list of tables - * @return map of namespace.table and primary key fields. - */ - protected abstract Map> discoverPrimaryKeys(Database database, - List>> tableInfos); - - /** - * Returns quote symbol of the database - * - * @return quote symbol - */ - protected abstract String getQuoteString(); - - /** - * Read all data from a table. - * - * @param database source database - * @param columnNames interested column names - * @param schemaName table namespace - * @param tableName target table - * @param syncMode The sync mode that this full refresh stream should be associated with. - * @return iterator with read data - */ - protected abstract AutoCloseableIterator queryTableFullRefresh(final Database database, - final List columnNames, - final String schemaName, - final String tableName, - final SyncMode syncMode, - final Optional cursorField); - - /** - * Read incremental data from a table. Incremental read should return only records where cursor - * column value is bigger than cursor. Note that if the connector needs to emit intermediate state - * (i.e. {@link AbstractDbSource#getStateEmissionFrequency} > 0), the incremental query must be - * sorted by the cursor field. - * - * @return iterator with read data - */ - protected abstract AutoCloseableIterator queryTableIncremental(Database database, - List columnNames, - String schemaName, - String tableName, - CursorInfo cursorInfo, - DataType cursorFieldType); - - /** - * When larger than 0, the incremental iterator will emit intermediate state for every N records. - * Please note that if intermediate state emission is enabled, the incremental query must be ordered - * by the cursor field. - */ - protected int getStateEmissionFrequency() { - return 0; - } - - /** - * @return list of fields that could be used as cursors - */ - protected abstract boolean isCursorType(DataType type); - - /** - * Returns the {@link AirbyteStateType} supported by this connector. - * - * @param config The connector configuration. - * @return A {@link AirbyteStateType} representing the state supported by this connector. - */ - protected AirbyteStateType getSupportedStateType(final JsonNode config) { - return AirbyteStateType.STREAM; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java deleted file mode 100644 index c4532dcd02708..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Collections; -import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CdcStateManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(CdcStateManager.class); - - private final CdcState initialState; - private final Set initialStreamsSynced; - private final AirbyteStateMessage rawStateMessage; - private CdcState currentState; - - public CdcStateManager(final CdcState serialized, - final Set initialStreamsSynced, - final AirbyteStateMessage stateMessage) { - this.initialState = serialized; - this.currentState = serialized; - this.initialStreamsSynced = initialStreamsSynced; - - this.rawStateMessage = stateMessage; - LOGGER.info("Initialized CDC state"); - } - - public void setCdcState(final CdcState state) { - this.currentState = state; - } - - public CdcState getCdcState() { - return currentState != null ? Jsons.clone(currentState) : null; - } - - public AirbyteStateMessage getRawStateMessage() { - return rawStateMessage; - } - - public Set getInitialStreamsSynced() { - return initialStreamsSynced != null ? Collections.unmodifiableSet(initialStreamsSynced) : null; - } - - @Override - public String toString() { - return "CdcStateManager{" + - "initialState=" + initialState + - ", currentState=" + currentState + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.java deleted file mode 100644 index cf92ed8668d43..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import java.util.Objects; - -public class CursorInfo { - - private final String originalCursorField; - private final String originalCursor; - private final long originalCursorRecordCount; - - private final String cursorField; - private String cursor; - private long cursorRecordCount; - - public CursorInfo(final String originalCursorField, - final String originalCursor, - final String cursorField, - final String cursor) { - this(originalCursorField, originalCursor, 0L, cursorField, cursor, 0L); - } - - public CursorInfo(final String originalCursorField, - final String originalCursor, - final long originalCursorRecordCount, - final String cursorField, - final String cursor, - final long cursorRecordCount) { - this.originalCursorField = originalCursorField; - this.originalCursor = originalCursor; - this.originalCursorRecordCount = originalCursorRecordCount; - this.cursorField = cursorField; - this.cursor = cursor; - this.cursorRecordCount = cursorRecordCount; - } - - public String getOriginalCursorField() { - return originalCursorField; - } - - public String getOriginalCursor() { - return originalCursor; - } - - public long getOriginalCursorRecordCount() { - return originalCursorRecordCount; - } - - public String getCursorField() { - return cursorField; - } - - public String getCursor() { - return cursor; - } - - public long getCursorRecordCount() { - return cursorRecordCount; - } - - @SuppressWarnings("UnusedReturnValue") - public CursorInfo setCursor(final String cursor) { - this.cursor = cursor; - return this; - } - - public CursorInfo setCursorRecordCount(final long cursorRecordCount) { - this.cursorRecordCount = cursorRecordCount; - return this; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final CursorInfo that = (CursorInfo) o; - return Objects.equals(originalCursorField, that.originalCursorField) - && Objects.equals(originalCursor, that.originalCursor) - && Objects.equals(originalCursorRecordCount, that.originalCursorRecordCount) - && Objects.equals(cursorField, that.cursorField) - && Objects.equals(cursor, that.cursor) - && Objects.equals(cursorRecordCount, that.cursorRecordCount); - } - - @Override - public int hashCode() { - return Objects.hash(originalCursorField, originalCursor, originalCursorRecordCount, cursorField, cursor, cursorRecordCount); - } - - @Override - public String toString() { - return "CursorInfo{" + - "originalCursorField='" + originalCursorField + '\'' + - ", originalCursor='" + originalCursor + '\'' + - ", originalCursorRecordCount='" + originalCursorRecordCount + '\'' + - ", cursorField='" + cursorField + '\'' + - ", cursor='" + cursor + '\'' + - ", cursorRecordCount='" + cursorRecordCount + '\'' + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.java deleted file mode 100644 index 9377190b75957..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import static io.airbyte.protocol.models.v0.CatalogHelpers.fieldsToJsonSchema; -import static java.util.stream.Collectors.toList; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.protocol.models.CommonField; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Contains utilities and helper classes for discovering schemas in database sources. - */ -public class DbSourceDiscoverUtil { - - private static final Logger LOGGER = LoggerFactory.getLogger(DbSourceDiscoverUtil.class); - private static final List AIRBYTE_METADATA = Arrays.asList("_ab_cdc_lsn", - "_ab_cdc_updated_at", - "_ab_cdc_deleted_at"); - - /* - * This method logs schema drift between source table and the catalog. This can happen if (i) - * underlying table schema changed between syncs (ii) The source connector's mapping of datatypes to - * Airbyte types changed between runs - */ - public static void logSourceSchemaChange(final Map>> fullyQualifiedTableNameToInfo, - final ConfiguredAirbyteCatalog catalog, - final Function airbyteTypeConverter) { - for (final ConfiguredAirbyteStream airbyteStream : catalog.getStreams()) { - final AirbyteStream stream = airbyteStream.getStream(); - final String fullyQualifiedTableName = DbSourceDiscoverUtil.getFullyQualifiedTableName(stream.getNamespace(), - stream.getName()); - if (!fullyQualifiedTableNameToInfo.containsKey(fullyQualifiedTableName)) { - continue; - } - final TableInfo> table = fullyQualifiedTableNameToInfo.get(fullyQualifiedTableName); - final List fields = table.getFields() - .stream() - .map(commonField -> toField(commonField, airbyteTypeConverter)) - .distinct() - .collect(toList()); - final JsonNode currentJsonSchema = fieldsToJsonSchema(fields); - final JsonNode catalogSchema = stream.getJsonSchema(); - final JsonNode currentSchemaProperties = currentJsonSchema.get("properties"); - final JsonNode catalogProperties = catalogSchema.get("properties"); - final List mismatchedFields = new ArrayList<>(); - catalogProperties.fieldNames().forEachRemaining(fieldName -> { - // Ignoring metadata fields since those are automatically added onto the catalog schema by Airbyte - // and don't exist in the source schema. They should not be considered a change - if (AIRBYTE_METADATA.contains(fieldName)) { - return; - } - - if (!currentSchemaProperties.has(fieldName) || - !currentSchemaProperties.get(fieldName).equals(catalogProperties.get(fieldName))) { - mismatchedFields.add(fieldName); - } - }); - - if (!mismatchedFields.isEmpty()) { - LOGGER.warn( - "Source schema changed for table {}! Potential mismatches: {}. Actual schema: {}. Catalog schema: {}", - fullyQualifiedTableName, - String.join(", ", mismatchedFields.toString()), - currentJsonSchema, - catalogSchema); - } - } - } - - public static AirbyteCatalog convertTableInfosToAirbyteCatalog(final List>> tableInfos, - final Map> fullyQualifiedTableNameToPrimaryKeys, - final Function airbyteTypeConverter) { - final List> tableInfoFieldList = tableInfos.stream() - .map(t -> { - // some databases return multiple copies of the same record for a column (e.g. redshift) because - // they have at least once delivery guarantees. we want to dedupe these, but first we check that the - // records are actually the same and provide a good error message if they are not. - assertColumnsWithSameNameAreSame(t.getNameSpace(), t.getName(), t.getFields()); - final List fields = t.getFields() - .stream() - .map(commonField -> toField(commonField, airbyteTypeConverter)) - .distinct() - .collect(toList()); - final String fullyQualifiedTableName = getFullyQualifiedTableName(t.getNameSpace(), - t.getName()); - final List primaryKeys = fullyQualifiedTableNameToPrimaryKeys.getOrDefault( - fullyQualifiedTableName, Collections - .emptyList()); - return TableInfo.builder().nameSpace(t.getNameSpace()).name(t.getName()) - .fields(fields).primaryKeys(primaryKeys) - .cursorFields(t.getCursorFields()) - .build(); - }) - .collect(toList()); - - final List streams = tableInfoFieldList.stream() - .map(tableInfo -> { - final var primaryKeys = tableInfo.getPrimaryKeys().stream() - .filter(Objects::nonNull) - .map(Collections::singletonList) - .collect(toList()); - - return CatalogHelpers - .createAirbyteStream(tableInfo.getName(), tableInfo.getNameSpace(), - tableInfo.getFields()) - .withSupportedSyncModes( - tableInfo.getCursorFields() != null && tableInfo.getCursorFields().isEmpty() - ? Lists.newArrayList(SyncMode.FULL_REFRESH) - : Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(primaryKeys); - }) - .collect(toList()); - return new AirbyteCatalog().withStreams(streams); - } - - public static String getFullyQualifiedTableName(final String nameSpace, final String tableName) { - return nameSpace != null ? nameSpace + "." + tableName : tableName; - } - - private static Field toField(final CommonField commonField, final Function airbyteTypeConverter) { - if (airbyteTypeConverter.apply(commonField.getType()) == JsonSchemaType.OBJECT && commonField.getProperties() != null - && !commonField.getProperties().isEmpty()) { - final var properties = commonField.getProperties().stream().map(commField -> toField(commField, airbyteTypeConverter)).toList(); - return Field.of(commonField.getName(), airbyteTypeConverter.apply(commonField.getType()), properties); - } else { - return Field.of(commonField.getName(), airbyteTypeConverter.apply(commonField.getType())); - } - } - - private static void assertColumnsWithSameNameAreSame(final String nameSpace, - final String tableName, - final List> columns) { - columns.stream() - .collect(Collectors.groupingBy(CommonField::getName)) - .values() - .forEach(columnsWithSameName -> { - final CommonField comparisonColumn = columnsWithSameName.get(0); - columnsWithSameName.forEach(column -> { - if (!column.equals(comparisonColumn)) { - throw new RuntimeException( - String.format( - "Found multiple columns with same name: %s in table: %s.%s but the columns are not the same. columns: %s", - comparisonColumn.getName(), nameSpace, tableName, columns)); - } - }); - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.java deleted file mode 100644 index 650b2a60a0acb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import java.util.List; -import java.util.stream.Collectors; - -public class InvalidCursorInfoUtil { - - public static String getInvalidCursorConfigMessage(final List tablesWithInvalidCursor) { - return "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. " - + tablesWithInvalidCursor.stream().map(InvalidCursorInfo::toString) - .collect(Collectors.joining(",")); - } - - public record InvalidCursorInfo(String tableName, String cursorColumnName, String cursorSqlType, String cause) { - - @Override - public String toString() { - return "{" + - "tableName='" + tableName + '\'' + - ", cursorColumnName='" + cursorColumnName + '\'' + - ", cursorSqlType=" + cursorSqlType + - ", cause=" + cause + - '}'; - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.java deleted file mode 100644 index fd66d1a43b35e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.SqlDatabase; -import io.airbyte.commons.stream.AirbyteStreamUtils; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.List; -import java.util.StringJoiner; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Utility class for methods to query a relational db. - */ -public class RelationalDbQueryUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(RelationalDbQueryUtils.class); - - public record TableSizeInfo(Long tableSize, Long avgRowLength) {} - - public static String getIdentifierWithQuoting(final String identifier, final String quoteString) { - // double-quoted values within a database name or column name should be wrapped with extra - // quoteString - if (identifier.startsWith(quoteString) && identifier.endsWith(quoteString)) { - return quoteString + quoteString + identifier + quoteString + quoteString; - } else { - return quoteString + identifier + quoteString; - } - } - - public static String enquoteIdentifierList(final List identifiers, final String quoteString) { - final StringJoiner joiner = new StringJoiner(","); - for (final String identifier : identifiers) { - joiner.add(getIdentifierWithQuoting(identifier, quoteString)); - } - return joiner.toString(); - } - - /** - * @return fully qualified table name with the schema (if a schema exists) in quotes. - */ - public static String getFullyQualifiedTableNameWithQuoting(final String nameSpace, final String tableName, final String quoteString) { - return (nameSpace == null || nameSpace.isEmpty() ? getIdentifierWithQuoting(tableName, quoteString) - : getIdentifierWithQuoting(nameSpace, quoteString) + "." + getIdentifierWithQuoting(tableName, quoteString)); - } - - /** - * @return fully qualified table name with the schema (if a schema exists) without quotes. - */ - public static String getFullyQualifiedTableName(final String schemaName, final String tableName) { - return schemaName != null ? schemaName + "." + tableName : tableName; - } - - /** - * @return the input identifier with quotes. - */ - public static String enquoteIdentifier(final String identifier, final String quoteString) { - return quoteString + identifier + quoteString; - } - - public static AutoCloseableIterator queryTable(final Database database, - final String sqlQuery, - final String tableName, - final String schemaName) { - final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair = AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName); - return AutoCloseableIterators.lazyIterator(() -> { - try { - LOGGER.info("Queueing query: {}", sqlQuery); - final Stream stream = database.unsafeQuery(sqlQuery); - return AutoCloseableIterators.fromStream(stream, airbyteStreamNameNamespacePair); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }, airbyteStreamNameNamespacePair); - } - - public static void logStreamSyncStatus(final List streams, final String syncType) { - if (streams.isEmpty()) { - LOGGER.info("No Streams will be synced via {}.", syncType); - } else { - LOGGER.info("Streams to be synced via {} : {}", syncType, streams.size()); - LOGGER.info("Streams: {}", prettyPrintConfiguredAirbyteStreamList(streams)); - } - } - - public static String prettyPrintConfiguredAirbyteStreamList(final List streamList) { - return streamList.stream().map(s -> "%s.%s".formatted(s.getStream().getNamespace(), s.getStream().getName())).collect(Collectors.joining(", ")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.java deleted file mode 100644 index 9e1b8464e06ab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import com.google.common.collect.Sets; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -public class RelationalDbReadUtil { - - public static List identifyStreamsToSnapshot(final ConfiguredAirbyteCatalog catalog, - final Set alreadySyncedStreams) { - final Set allStreams = AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog); - final Set newlyAddedStreams = new HashSet<>(Sets.difference(allStreams, alreadySyncedStreams)); - return catalog.getStreams().stream() - .filter(c -> c.getSyncMode() == SyncMode.INCREMENTAL) - .filter(stream -> newlyAddedStreams.contains(AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream()))) - .map(Jsons::clone) - .collect(Collectors.toList()); - } - - public static List identifyStreamsForCursorBased(final ConfiguredAirbyteCatalog catalog, - final List streamsForInitialLoad) { - - final Set initialLoadStreamsNamespacePairs = - streamsForInitialLoad.stream().map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) - .collect( - Collectors.toSet()); - return catalog.getStreams().stream() - .filter(c -> c.getSyncMode() == SyncMode.INCREMENTAL) - .filter(stream -> !initialLoadStreamsNamespacePairs.contains(AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream()))) - .map(Jsons::clone) - .collect(Collectors.toList()); - } - - public static AirbyteStreamNameNamespacePair convertNameNamespacePairFromV0(final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair v1NameNamespacePair) { - return new AirbyteStreamNameNamespacePair(v1NameNamespacePair.getName(), v1NameNamespacePair.getNamespace()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.java deleted file mode 100644 index 919d38b3bb507..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import com.google.common.collect.AbstractIterator; -import io.airbyte.cdk.db.IncrementalUtils; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Iterator; -import java.util.Objects; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StateDecoratingIterator extends AbstractIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(StateDecoratingIterator.class); - - private final Iterator messageIterator; - private final StateManager stateManager; - private final AirbyteStreamNameNamespacePair pair; - private final String cursorField; - private final JsonSchemaPrimitive cursorType; - - private final String initialCursor; - private String currentMaxCursor; - private long currentMaxCursorRecordCount = 0L; - private boolean hasEmittedFinalState; - - /** - * These parameters are for intermediate state message emission. We can emit an intermediate state - * when the following two conditions are met. - *

    - * 1. The records are sorted by the cursor field. This is true when {@code stateEmissionFrequency} > - * 0. This logic is guaranteed in {@code AbstractJdbcSource#queryTableIncremental}, in which an - * "ORDER BY" clause is appended to the SQL query if {@code stateEmissionFrequency} > 0. - *

    - * 2. There is a cursor value that is ready for emission. A cursor value is "ready" if there is no - * more record with the same value. We cannot emit a cursor at will, because there may be multiple - * records with the same cursor value. If we emit a cursor ignoring this condition, should the sync - * fail right after the emission, the next sync may skip some records with the same cursor value due - * to "WHERE cursor_field > cursor" in {@code AbstractJdbcSource#queryTableIncremental}. - *

    - * The {@code intermediateStateMessage} is set to the latest state message that is ready for - * emission. For every {@code stateEmissionFrequency} messages, {@code emitIntermediateState} is set - * to true and the latest "ready" state will be emitted in the next {@code computeNext} call. - */ - private final int stateEmissionFrequency; - private int totalRecordCount = 0; - // In between each state message, recordCountInStateMessage will be reset to 0. - private int recordCountInStateMessage = 0; - private boolean emitIntermediateState = false; - private AirbyteMessage intermediateStateMessage = null; - private boolean hasCaughtException = false; - - /** - * @param stateManager Manager that maintains connector state - * @param pair Stream Name and Namespace (e.g. public.users) - * @param cursorField Path to the comparator field used to track the records read so far - * @param initialCursor name of the initial cursor column - * @param cursorType ENUM type of primitive values that can be used as a cursor for checkpointing - * @param stateEmissionFrequency If larger than 0, the records are sorted by the cursor field, and - * intermediate states will be emitted for every {@code stateEmissionFrequency} records. The - * order of the records is guaranteed in {@code AbstractJdbcSource#queryTableIncremental}, in - * which an "ORDER BY" clause is appended to the SQL query if {@code stateEmissionFrequency} - * > 0. - */ - public StateDecoratingIterator(final Iterator messageIterator, - final StateManager stateManager, - final AirbyteStreamNameNamespacePair pair, - final String cursorField, - final String initialCursor, - final JsonSchemaPrimitive cursorType, - final int stateEmissionFrequency) { - this.messageIterator = messageIterator; - this.stateManager = stateManager; - this.pair = pair; - this.cursorField = cursorField; - this.cursorType = cursorType; - this.initialCursor = initialCursor; - this.currentMaxCursor = initialCursor; - this.stateEmissionFrequency = stateEmissionFrequency; - } - - private String getCursorCandidate(final AirbyteMessage message) { - final String cursorCandidate = message.getRecord().getData().get(cursorField).asText(); - return (cursorCandidate != null ? replaceNull(cursorCandidate) : null); - } - - private String replaceNull(final String cursorCandidate) { - if (cursorCandidate.contains("\u0000")) { - return cursorCandidate.replaceAll("\u0000", ""); - } - return cursorCandidate; - } - - /** - * Computes the next record retrieved from Source stream. Emits StateMessage containing data of the - * record that has been read so far - * - *

    - * If this method throws an exception, it will propagate outward to the {@code hasNext} or - * {@code next} invocation that invoked this method. Any further attempts to use the iterator will - * result in an {@link IllegalStateException}. - *

    - * - * @return {@link AirbyteStateMessage} containing information of the records read so far - */ - @Override - protected AirbyteMessage computeNext() { - if (hasCaughtException) { - // Mark iterator as done since the next call to messageIterator will result in an - // IllegalArgumentException and resets exception caught state. - // This occurs when the previous iteration emitted state so this iteration cycle will indicate - // iteration is complete - hasCaughtException = false; - return endOfData(); - } - - if (messageIterator.hasNext()) { - Optional optionalIntermediateMessage = getIntermediateMessage(); - if (optionalIntermediateMessage.isPresent()) { - return optionalIntermediateMessage.get(); - } - - totalRecordCount++; - recordCountInStateMessage++; - // Use try-catch to catch Exception that could occur when connection to the database fails - try { - final AirbyteMessage message = messageIterator.next(); - if (message.getRecord().getData().hasNonNull(cursorField)) { - final String cursorCandidate = getCursorCandidate(message); - final int cursorComparison = IncrementalUtils.compareCursors(currentMaxCursor, cursorCandidate, cursorType); - if (cursorComparison < 0) { - // Update the current max cursor only when current max cursor < cursor candidate from the message - if (stateEmissionFrequency > 0 && !Objects.equals(currentMaxCursor, initialCursor) && messageIterator.hasNext()) { - // Only create an intermediate state when it is not the first or last record message. - // The last state message will be processed seperately. - intermediateStateMessage = createStateMessage(false, recordCountInStateMessage); - } - currentMaxCursor = cursorCandidate; - currentMaxCursorRecordCount = 1L; - } else if (cursorComparison == 0) { - currentMaxCursorRecordCount++; - } else if (cursorComparison > 0 && stateEmissionFrequency > 0) { - LOGGER.warn("Intermediate state emission feature requires records to be processed in order according to the cursor value. Otherwise, " - + "data loss can occur."); - } - } - - if (stateEmissionFrequency > 0 && totalRecordCount % stateEmissionFrequency == 0) { - emitIntermediateState = true; - } - - return message; - } catch (final Exception e) { - emitIntermediateState = true; - hasCaughtException = true; - LOGGER.error("Message iterator failed to read next record.", e); - optionalIntermediateMessage = getIntermediateMessage(); - return optionalIntermediateMessage.orElse(endOfData()); - } - } else if (!hasEmittedFinalState) { - return createStateMessage(true, recordCountInStateMessage); - } else { - return endOfData(); - } - } - - /** - * Returns AirbyteStateMessage when in a ready state, a ready state means that it has satifies the - * conditions of: - *

    - * cursorField has changed (e.g. 08-22-2022 -> 08-23-2022) and there have been at least - * stateEmissionFrequency number of records since the last emission - *

    - * - * @return AirbyteStateMessage if one exists, otherwise Optional indicating state was not ready to - * be emitted - */ - protected final Optional getIntermediateMessage() { - if (emitIntermediateState && intermediateStateMessage != null) { - final AirbyteMessage message = intermediateStateMessage; - if (message.getState() != null) { - message.getState().setSourceStats(new AirbyteStateStats().withRecordCount((double) recordCountInStateMessage)); - } - - intermediateStateMessage = null; - recordCountInStateMessage = 0; - emitIntermediateState = false; - return Optional.of(message); - } - return Optional.empty(); - } - - /** - * Creates AirbyteStateMessage while updating the cursor used to checkpoint the state of records - * read up so far - * - * @param isFinalState marker for if the final state of the iterator has been reached - * @param recordCount count of read messages - * @return AirbyteMessage which includes information on state of records read so far - */ - public AirbyteMessage createStateMessage(final boolean isFinalState, final int recordCount) { - final AirbyteStateMessage stateMessage = stateManager.updateAndEmit(pair, currentMaxCursor, currentMaxCursorRecordCount); - final Optional cursorInfo = stateManager.getCursorInfo(pair); - - // logging once every 100 messages to reduce log verbosity - if (recordCount % 100 == 0) { - LOGGER.info("State report for stream {} - original: {} = {} (count {}) -> latest: {} = {} (count {})", - pair, - cursorInfo.map(CursorInfo::getOriginalCursorField).orElse(null), - cursorInfo.map(CursorInfo::getOriginalCursor).orElse(null), - cursorInfo.map(CursorInfo::getOriginalCursorRecordCount).orElse(null), - cursorInfo.map(CursorInfo::getCursorField).orElse(null), - cursorInfo.map(CursorInfo::getCursor).orElse(null), - cursorInfo.map(CursorInfo::getCursorRecordCount).orElse(null)); - } - - if (stateMessage != null) { - stateMessage.withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount)); - } - if (isFinalState) { - hasEmittedFinalState = true; - if (stateManager.getCursor(pair).isEmpty()) { - LOGGER.warn("Cursor for stream {} was null. This stream will replicate all records on the next run", pair); - } - } - - return new AirbyteMessage().withType(Type.STATE).withState(stateMessage); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.java deleted file mode 100644 index 1d990bdfd46b2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import java.util.List; -import lombok.Builder; -import lombok.Getter; - -/** - * This class encapsulates all externally relevant Table information. - */ -@Getter -@Builder -public class TableInfo { - - private final String nameSpace; - private final String name; - private final List fields; - private final List primaryKeys; - private final List cursorFields; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.java deleted file mode 100644 index ea4214cf30b15..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import java.util.function.Supplier; - -/** - * Abstract implementation of the {@link StateManager} interface that provides common functionality - * for state manager implementations. - * - * @param The type associated with the state object managed by this manager. - * @param The type associated with the state object stored in the state managed by this manager. - */ -public abstract class AbstractStateManager implements StateManager { - - /** - * The {@link CursorManager} responsible for keeping track of the current cursor value for each - * stream managed by this state manager. - */ - private final CursorManager cursorManager; - - /** - * Constructs a new state manager for the given configured connector. - * - * @param catalog The connector's configured catalog. - * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of - * streams tracked by the connector's state. - * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in - * the connector's state. - * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream - * stored in the connector's state. - * @param cursorRecordCountFunction A {@link Function} that extracts the cursor record count for a - * stream stored in the connector's state. - * @param namespacePairFunction A {@link Function} that generates a - * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's - * state. - */ - public AbstractStateManager(final ConfiguredAirbyteCatalog catalog, - final Supplier> streamSupplier, - final Function cursorFunction, - final Function> cursorFieldFunction, - final Function cursorRecordCountFunction, - final Function namespacePairFunction) { - this(catalog, streamSupplier, cursorFunction, cursorFieldFunction, cursorRecordCountFunction, namespacePairFunction, false); - } - - public AbstractStateManager(final ConfiguredAirbyteCatalog catalog, - final Supplier> streamSupplier, - final Function cursorFunction, - final Function> cursorFieldFunction, - final Function cursorRecordCountFunction, - final Function namespacePairFunction, - final boolean onlyIncludeIncrementalStreams) { - cursorManager = new CursorManager(catalog, streamSupplier, cursorFunction, cursorFieldFunction, cursorRecordCountFunction, namespacePairFunction, - onlyIncludeIncrementalStreams); - } - - @Override - public Map getPairToCursorInfoMap() { - return cursorManager.getPairToCursorInfo(); - } - - @Override - public abstract AirbyteStateMessage toState(final Optional pair); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.java deleted file mode 100644 index 2449c7666d555..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Manages the map of streams to current cursor values for state management. - * - * @param The type that represents the stream object which holds the current cursor information - * in the state. - */ -public class CursorManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(CursorManager.class); - - /** - * Map of streams (name/namespace tuple) to the current cursor information stored in the state. - */ - private final Map pairToCursorInfo; - - /** - * Constructs a new {@link CursorManager} based on the configured connector and current state - * information. - * - * @param catalog The connector's configured catalog. - * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of - * streams tracked by the connector's state. - * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in - * the connector's state. - * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream - * stored in the connector's state. - * @param cursorRecordCountFunction A {@link Function} that extracts the cursor record count for a - * stream stored in the connector's state. - * @param namespacePairFunction A {@link Function} that generates a - * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's - * state. - */ - public CursorManager(final ConfiguredAirbyteCatalog catalog, - final Supplier> streamSupplier, - final Function cursorFunction, - final Function> cursorFieldFunction, - final Function cursorRecordCountFunction, - final Function namespacePairFunction, - final boolean onlyIncludeIncrementalStreams) { - pairToCursorInfo = createCursorInfoMap( - catalog, streamSupplier, cursorFunction, cursorFieldFunction, cursorRecordCountFunction, namespacePairFunction, - onlyIncludeIncrementalStreams); - } - - /** - * Creates the cursor information map that associates stream name/namespace tuples with the current - * cursor information for that stream as stored in the connector's state. - * - * @param catalog The connector's configured catalog. - * @param streamSupplier A {@link Supplier} that provides the cursor manager with the collection of - * streams tracked by the connector's state. - * @param cursorFunction A {@link Function} that extracts the current cursor from a stream stored in - * the connector's state. - * @param cursorFieldFunction A {@link Function} that extracts the cursor field name from a stream - * stored in the connector's state. - * @param cursorRecordCountFunction A {@link Function} that extracts the cursor record count for a - * stream stored in the connector's state. - * @param namespacePairFunction A {@link Function} that generates a - * {@link AirbyteStreamNameNamespacePair} that identifies each stream in the connector's - * state. - * @return A map of streams to current cursor information for the stream. - */ - @VisibleForTesting - protected Map createCursorInfoMap( - final ConfiguredAirbyteCatalog catalog, - final Supplier> streamSupplier, - final Function cursorFunction, - final Function> cursorFieldFunction, - final Function cursorRecordCountFunction, - final Function namespacePairFunction, - final boolean onlyIncludeIncrementalStreams) { - final Set allStreamNames = catalog.getStreams() - .stream() - .filter(c -> { - if (onlyIncludeIncrementalStreams) { - return c.getSyncMode() == SyncMode.INCREMENTAL; - } - return true; - }) - .map(ConfiguredAirbyteStream::getStream) - .map(AirbyteStreamNameNamespacePair::fromAirbyteStream) - .collect(Collectors.toSet()); - allStreamNames.addAll(streamSupplier.get().stream().map(namespacePairFunction).filter(Objects::nonNull).collect(Collectors.toSet())); - - final Map localMap = new ConcurrentHashMap<>(); - final Map pairToState = streamSupplier.get() - .stream() - .collect(Collectors.toMap(namespacePairFunction, Function.identity())); - final Map pairToConfiguredAirbyteStream = catalog.getStreams().stream() - .collect(Collectors.toMap(AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, Function.identity())); - - for (final AirbyteStreamNameNamespacePair pair : allStreamNames) { - final Optional stateOptional = Optional.ofNullable(pairToState.get(pair)); - final Optional streamOptional = Optional.ofNullable(pairToConfiguredAirbyteStream.get(pair)); - localMap.put(pair, - createCursorInfoForStream(pair, stateOptional, streamOptional, cursorFunction, cursorFieldFunction, cursorRecordCountFunction)); - } - - return localMap; - } - - /** - * Generates a {@link CursorInfo} object based on the data currently stored in the connector's state - * for the given stream. - * - * @param pair A {@link AirbyteStreamNameNamespacePair} that identifies a specific stream managed by - * the connector. - * @param stateOptional {@link Optional} containing the current state associated with the stream. - * @param streamOptional {@link Optional} containing the {@link ConfiguredAirbyteStream} associated - * with the stream. - * @param cursorFunction A {@link Function} that provides the current cursor from the state - * associated with the stream. - * @param cursorFieldFunction A {@link Function} that provides the cursor field name for the cursor - * stored in the state associated with the stream. - * @param cursorRecordCountFunction A {@link Function} that extracts the cursor record count for a - * stream stored in the connector's state. - * @return A {@link CursorInfo} object based on the data currently stored in the connector's state - * for the given stream. - */ - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") - @VisibleForTesting - protected CursorInfo createCursorInfoForStream(final AirbyteStreamNameNamespacePair pair, - final Optional stateOptional, - final Optional streamOptional, - final Function cursorFunction, - final Function> cursorFieldFunction, - final Function cursorRecordCountFunction) { - final String originalCursorField = stateOptional - .map(cursorFieldFunction) - .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) - .orElse(null); - final String originalCursor = stateOptional.map(cursorFunction).orElse(null); - final long originalCursorRecordCount = stateOptional.map(cursorRecordCountFunction).orElse(0L); - - final String cursor; - final String cursorField; - final long cursorRecordCount; - - // if cursor field is set in catalog. - if (streamOptional.map(ConfiguredAirbyteStream::getCursorField).isPresent()) { - cursorField = streamOptional - .map(ConfiguredAirbyteStream::getCursorField) - .flatMap(f -> f.size() > 0 ? Optional.of(f.get(0)) : Optional.empty()) - .orElse(null); - // if cursor field is set in state. - if (stateOptional.map(cursorFieldFunction).isPresent()) { - // if cursor field in catalog and state are the same. - if (stateOptional.map(cursorFieldFunction).equals(streamOptional.map(ConfiguredAirbyteStream::getCursorField))) { - cursor = stateOptional.map(cursorFunction).orElse(null); - cursorRecordCount = stateOptional.map(cursorRecordCountFunction).orElse(0L); - // If a matching cursor is found in the state, and it's value is null - this indicates a CDC stream - // and we shouldn't log anything. - if (cursor != null) { - LOGGER.info("Found matching cursor in state. Stream: {}. Cursor Field: {} Value: {} Count: {}", - pair, cursorField, cursor, cursorRecordCount); - } - // if cursor field in catalog and state are different. - } else { - cursor = null; - cursorRecordCount = 0L; - LOGGER.info( - "Found cursor field. Does not match previous cursor field. Stream: {}. Original Cursor Field: {} (count {}). New Cursor Field: {}. Resetting cursor value.", - pair, originalCursorField, originalCursorRecordCount, cursorField); - } - // if cursor field is not set in state but is set in catalog. - } else { - LOGGER.info("No cursor field set in catalog but not present in state. Stream: {}, New Cursor Field: {}. Resetting cursor value", pair, - cursorField); - cursor = null; - cursorRecordCount = 0L; - } - // if cursor field is not set in catalog. - } else { - LOGGER.info( - "Cursor field set in state but not present in catalog. Stream: {}. Original Cursor Field: {}. Original value: {}. Resetting cursor.", - pair, originalCursorField, originalCursor); - cursorField = null; - cursor = null; - cursorRecordCount = 0L; - } - - return new CursorInfo(originalCursorField, originalCursor, originalCursorRecordCount, cursorField, cursor, cursorRecordCount); - } - - /** - * Retrieves a copy of the stream name/namespace tuple to current cursor information map. - * - * @return A copy of the stream name/namespace tuple to current cursor information map. - */ - public Map getPairToCursorInfo() { - return Map.copyOf(pairToCursorInfo); - } - - /** - * Retrieves an {@link Optional} possibly containing the current {@link CursorInfo} associated with - * the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the current {@link CursorInfo} associated with - * the provided stream name/namespace tuple. - */ - public Optional getCursorInfo(final AirbyteStreamNameNamespacePair pair) { - return Optional.ofNullable(pairToCursorInfo.get(pair)); - } - - /** - * Retrieves an {@link Optional} possibly containing the cursor field name associated with the - * cursor tracked in the state associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the cursor field name associated with the cursor - * tracked in the state associated with the provided stream name/namespace tuple. - */ - public Optional getCursorField(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursorField); - } - - /** - * Retrieves an {@link Optional} possibly containing the cursor value tracked in the state - * associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the cursor value tracked in the state associated - * with the provided stream name/namespace tuple. - */ - public Optional getCursor(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursor); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.java deleted file mode 100644 index 384bd4d0cb8ea..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FIELD_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_RECORD_COUNT_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; - -import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager; -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -/** - * Global implementation of the {@link StateManager} interface. - * - * This implementation generates a single, global state object for the state tracked by this - * manager. - */ -public class GlobalStateManager extends AbstractStateManager { - - /** - * Legacy {@link CdcStateManager} used to manage state for connectors that support Change Data - * Capture (CDC). - */ - private final CdcStateManager cdcStateManager; - - /** - * Constructs a new {@link GlobalStateManager} that is seeded with the provided - * {@link AirbyteStateMessage}. - * - * @param airbyteStateMessage The initial state represented as an {@link AirbyteStateMessage}. - * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state - * manager. - */ - public GlobalStateManager(final AirbyteStateMessage airbyteStateMessage, final ConfiguredAirbyteCatalog catalog) { - super(catalog, - getStreamsSupplier(airbyteStateMessage), - CURSOR_FUNCTION, - CURSOR_FIELD_FUNCTION, - CURSOR_RECORD_COUNT_FUNCTION, - NAME_NAMESPACE_PAIR_FUNCTION, - true); - - this.cdcStateManager = new CdcStateManager(extractCdcState(airbyteStateMessage), extractStreams(airbyteStateMessage), airbyteStateMessage); - } - - @Override - public CdcStateManager getCdcStateManager() { - return cdcStateManager; - } - - @Override - public List getRawStateMessages() { - throw new UnsupportedOperationException("Raw state retrieval not supported by global state manager."); - } - - @Override - public AirbyteStateMessage toState(final Optional pair) { - // Populate global state - final AirbyteGlobalState globalState = new AirbyteGlobalState(); - globalState.setSharedState(Jsons.jsonNode(getCdcStateManager().getCdcState())); - globalState.setStreamStates(StateGeneratorUtils.generateStreamStateList(getPairToCursorInfoMap())); - - // Generate the legacy state for backwards compatibility - final DbState dbState = StateGeneratorUtils.generateDbState(getPairToCursorInfoMap()) - .withCdc(true) - .withCdcState(getCdcStateManager().getCdcState()); - - return new AirbyteStateMessage() - .withType(AirbyteStateType.GLOBAL) - // Temporarily include legacy state for backwards compatibility with the platform - .withData(Jsons.jsonNode(dbState)) - .withGlobal(globalState); - } - - /** - * Extracts the Change Data Capture (CDC) state stored in the initial state provided to this state - * manager. - * - * @param airbyteStateMessage The {@link AirbyteStateMessage} that contains the initial state - * provided to the state manager. - * @return The {@link CdcState} stored in the state, if any. Note that this will not be {@code null} - * but may be empty. - */ - private CdcState extractCdcState(final AirbyteStateMessage airbyteStateMessage) { - if (airbyteStateMessage.getType() == AirbyteStateType.GLOBAL) { - return Jsons.object(airbyteStateMessage.getGlobal().getSharedState(), CdcState.class); - } else { - final DbState legacyState = Jsons.object(airbyteStateMessage.getData(), DbState.class); - return legacyState != null ? legacyState.getCdcState() : null; - } - } - - private Set extractStreams(final AirbyteStateMessage airbyteStateMessage) { - if (airbyteStateMessage.getType() == AirbyteStateType.GLOBAL) { - return airbyteStateMessage.getGlobal().getStreamStates().stream() - .map(streamState -> { - final AirbyteStreamState cloned = Jsons.clone(streamState); - return new AirbyteStreamNameNamespacePair(cloned.getStreamDescriptor().getName(), cloned.getStreamDescriptor().getNamespace()); - }).collect(Collectors.toSet()); - } else { - final DbState legacyState = Jsons.object(airbyteStateMessage.getData(), DbState.class); - return legacyState != null ? extractNamespacePairsFromDbStreamState(legacyState.getStreams()) : Collections.emptySet(); - } - } - - private Set extractNamespacePairsFromDbStreamState(final List streams) { - return streams.stream().map(stream -> { - final DbStreamState cloned = Jsons.clone(stream); - return new AirbyteStreamNameNamespacePair(cloned.getStreamName(), cloned.getStreamNamespace()); - }).collect(Collectors.toSet()); - } - - /** - * Generates the {@link Supplier} that will be used to extract the streams from the incoming - * {@link AirbyteStateMessage}. - * - * @param airbyteStateMessage The {@link AirbyteStateMessage} supplied to this state manager with - * the initial state. - * @return A {@link Supplier} that will be used to fetch the streams present in the initial state. - */ - private static Supplier> getStreamsSupplier(final AirbyteStateMessage airbyteStateMessage) { - /* - * If the incoming message has the state type set to GLOBAL, it is using the new format. Therefore, - * we can look for streams in the "global" field of the message. Otherwise, the message is still - * storing state in the legacy "data" field. - */ - return () -> { - if (airbyteStateMessage.getType() == AirbyteStateType.GLOBAL) { - return airbyteStateMessage.getGlobal().getStreamStates(); - } else if (airbyteStateMessage.getData() != null) { - return Jsons.object(airbyteStateMessage.getData(), DbState.class).getStreams().stream() - .map(s -> new AirbyteStreamState().withStreamState(Jsons.jsonNode(s)) - .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName()))) - .collect( - Collectors.toList()); - } else { - return List.of(); - } - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.java deleted file mode 100644 index c12137e607a73..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Legacy implementation (pre-per-stream state support) of the {@link StateManager} interface. - * - * This implementation assumes that the state matches the {@link DbState} object and effectively - * tracks state as global across the streams managed by a connector. - * - * @deprecated This manager may be removed in the future if/once all connectors support per-stream - * state management. - */ -@Deprecated(forRemoval = true) -public class LegacyStateManager extends AbstractStateManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(LegacyStateManager.class); - - /** - * {@link Function} that extracts the cursor from the stream state. - */ - private static final Function CURSOR_FUNCTION = DbStreamState::getCursor; - - /** - * {@link Function} that extracts the cursor field(s) from the stream state. - */ - private static final Function> CURSOR_FIELD_FUNCTION = DbStreamState::getCursorField; - - private static final Function CURSOR_RECORD_COUNT_FUNCTION = - stream -> Objects.requireNonNullElse(stream.getCursorRecordCount(), 0L); - - /** - * {@link Function} that creates an {@link AirbyteStreamNameNamespacePair} from the stream state. - */ - private static final Function NAME_NAMESPACE_PAIR_FUNCTION = - s -> new AirbyteStreamNameNamespacePair(s.getStreamName(), s.getStreamNamespace()); - - /** - * Tracks whether the connector associated with this state manager supports CDC. - */ - private Boolean isCdc; - - /** - * {@link CdcStateManager} used to manage state for connectors that support CDC. - */ - private final CdcStateManager cdcStateManager; - - /** - * Constructs a new {@link LegacyStateManager} that is seeded with the provided {@link DbState} - * instance. - * - * @param dbState The initial state represented as an {@link DbState} instance. - * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state - * manager. - */ - public LegacyStateManager(final DbState dbState, final ConfiguredAirbyteCatalog catalog) { - super(catalog, - dbState::getStreams, - CURSOR_FUNCTION, - CURSOR_FIELD_FUNCTION, - CURSOR_RECORD_COUNT_FUNCTION, - NAME_NAMESPACE_PAIR_FUNCTION); - - this.cdcStateManager = new CdcStateManager(dbState.getCdcState(), AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog), null); - this.isCdc = dbState.getCdc(); - if (dbState.getCdc() == null) { - this.isCdc = false; - } - } - - @Override - public CdcStateManager getCdcStateManager() { - return cdcStateManager; - } - - @Override - public List getRawStateMessages() { - throw new UnsupportedOperationException("Raw state retrieval not supported by global state manager."); - } - - @Override - public AirbyteStateMessage toState(final Optional pair) { - final DbState dbState = StateGeneratorUtils.generateDbState(getPairToCursorInfoMap()) - .withCdc(isCdc) - .withCdcState(getCdcStateManager().getCdcState()); - - LOGGER.debug("Generated legacy state for {} streams", dbState.getStreams().size()); - return new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); - } - - @Override - public AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor) { - return updateAndEmit(pair, cursor, 0L); - } - - @Override - public AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor, final long cursorRecordCount) { - // cdc file gets updated by debezium so the "update" part is a no op. - if (!isCdc) { - return super.updateAndEmit(pair, cursor, cursorRecordCount); - } - - return toState(Optional.ofNullable(pair)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java deleted file mode 100644 index c2d5ff8a7ba53..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import com.google.common.collect.AbstractIterator; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.time.Duration; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.util.Iterator; -import javax.annotation.CheckForNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SourceStateIterator extends AbstractIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(SourceStateIterator.class); - private final Iterator messageIterator; - private final ConfiguredAirbyteStream stream; - private final StateEmitFrequency stateEmitFrequency; - private boolean hasEmittedFinalState = false; - private long recordCount = 0L; - private Instant lastCheckpoint = Instant.now(); - - private final SourceStateMessageProducer sourceStateMessageProducer; - - public SourceStateIterator(final Iterator messageIterator, - final ConfiguredAirbyteStream stream, - final SourceStateMessageProducer sourceStateMessageProducer, - final StateEmitFrequency stateEmitFrequency) { - this.messageIterator = messageIterator; - this.stream = stream; - this.sourceStateMessageProducer = sourceStateMessageProducer; - this.stateEmitFrequency = stateEmitFrequency; - } - - @CheckForNull - @Override - protected AirbyteMessage computeNext() { - - boolean iteratorHasNextValue = false; - try { - iteratorHasNextValue = messageIterator.hasNext(); - } catch (final Exception ex) { - // If the initial snapshot is incomplete for this stream, throw an exception failing the sync. This - // will ensure the platform retry logic - // kicks in and keeps retrying the sync until the initial snapshot is complete. - throw new RuntimeException(ex); - } - if (iteratorHasNextValue) { - if (shouldEmitStateMessage() && sourceStateMessageProducer.shouldEmitStateMessage(stream)) { - final AirbyteStateMessage stateMessage = sourceStateMessageProducer.generateStateMessageAtCheckpoint(stream); - stateMessage.withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount)); - - recordCount = 0L; - lastCheckpoint = Instant.now(); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(stateMessage); - } - // Use try-catch to catch Exception that could occur when connection to the database fails - try { - final T message = messageIterator.next(); - final AirbyteMessage processedMessage = sourceStateMessageProducer.processRecordMessage(stream, message); - recordCount++; - return processedMessage; - } catch (final Exception e) { - throw new RuntimeException(e); - } - } else if (!hasEmittedFinalState) { - hasEmittedFinalState = true; - final AirbyteStateMessage finalStateMessageForStream = sourceStateMessageProducer.createFinalStateMessage(stream); - finalStateMessageForStream.withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount)); - recordCount = 0L; - return new AirbyteMessage() - .withType(Type.STATE) - .withState(finalStateMessageForStream); - } else { - return endOfData(); - } - } - - private boolean shouldEmitStateMessage() { - return (recordCount >= stateEmitFrequency.syncCheckpointRecords() - || Duration.between(lastCheckpoint, OffsetDateTime.now()).compareTo(stateEmitFrequency.syncCheckpointDuration()) > 0); - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java deleted file mode 100644 index c4d95b2b1fbb3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; - -/** - * To be used with SourceStateIterator. SourceStateIterator will iterate over the records and - * generate state messages when needed. This interface defines how would those state messages be - * generated, and how the incoming record messages will be processed. - * - * @param - */ -public interface SourceStateMessageProducer { - - /** - * Returns a state message that should be emitted at checkpoint. - */ - AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream); - - /** - * For the incoming record message, this method defines how the connector will consume it. - */ - AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final T message); - - /** - * At the end of the iteration, this method will be called and it will generate the final state - * message. - * - * @return - */ - AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream); - - /** - * Determines if the iterator has reached checkpoint or not per connector's definition. By default - * iterator will check if the number of records processed is greater than the checkpoint interval or - * last state message has already passed syncCheckpointDuration. - */ - boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java deleted file mode 100644 index ee1eef34c421e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import java.time.Duration; - -public record StateEmitFrequency(long syncCheckpointRecords, Duration syncCheckpointDuration) {} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.java deleted file mode 100644 index 4c272190946b2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.StateWrapper; -import io.airbyte.configoss.helpers.StateMessageHelper; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Collection of utilities that facilitate the generation of state objects. - */ -public class StateGeneratorUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(StateGeneratorUtils.class); - - /** - * {@link Function} that extracts the cursor from the stream state. - */ - public static final Function CURSOR_FUNCTION = stream -> { - final Optional dbStreamState = StateGeneratorUtils.extractState(stream); - return dbStreamState.map(DbStreamState::getCursor).orElse(null); - }; - - /** - * {@link Function} that extracts the cursor field(s) from the stream state. - */ - public static final Function> CURSOR_FIELD_FUNCTION = stream -> { - final Optional dbStreamState = StateGeneratorUtils.extractState(stream); - if (dbStreamState.isPresent()) { - return dbStreamState.get().getCursorField(); - } else { - return List.of(); - } - }; - - public static final Function CURSOR_RECORD_COUNT_FUNCTION = stream -> { - final Optional dbStreamState = StateGeneratorUtils.extractState(stream); - return dbStreamState.map(DbStreamState::getCursorRecordCount).orElse(0L); - }; - - /** - * {@link Function} that creates an {@link AirbyteStreamNameNamespacePair} from the stream state. - */ - public static final Function NAME_NAMESPACE_PAIR_FUNCTION = - s -> isValidStreamDescriptor(s.getStreamDescriptor()) - ? new AirbyteStreamNameNamespacePair(s.getStreamDescriptor().getName(), s.getStreamDescriptor().getNamespace()) - : null; - - private StateGeneratorUtils() {} - - /** - * Generates the stream state for the given stream and cursor information. - * - * @param airbyteStreamNameNamespacePair The stream. - * @param cursorInfo The current cursor. - * @return The {@link AirbyteStreamState} representing the current state of the stream. - */ - public static AirbyteStreamState generateStreamState(final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair, - final CursorInfo cursorInfo) { - return new AirbyteStreamState() - .withStreamDescriptor( - new StreamDescriptor().withName(airbyteStreamNameNamespacePair.getName()).withNamespace(airbyteStreamNameNamespacePair.getNamespace())) - .withStreamState(Jsons.jsonNode(generateDbStreamState(airbyteStreamNameNamespacePair, cursorInfo))); - } - - /** - * Generates a list of valid stream states from the provided stream and cursor information. A stream - * state is considered to be valid if the stream has a valid descriptor (see - * {@link #isValidStreamDescriptor(StreamDescriptor)} for more details). - * - * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor - * information for that stream - * @return The list of stream states derived from the state information extracted from the provided - * map. - */ - public static List generateStreamStateList(final Map pairToCursorInfoMap) { - return pairToCursorInfoMap.entrySet().stream() - .sorted(Entry.comparingByKey()) - .map(e -> generateStreamState(e.getKey(), e.getValue())) - .filter(s -> isValidStreamDescriptor(s.getStreamDescriptor())) - .collect(Collectors.toList()); - } - - /** - * Generates the legacy global state for backwards compatibility. - * - * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor - * information for that stream - * @return The legacy {@link DbState}. - */ - public static DbState generateDbState(final Map pairToCursorInfoMap) { - return new DbState() - .withCdc(false) - .withStreams(pairToCursorInfoMap.entrySet().stream() - .sorted(Entry.comparingByKey()) // sort by stream name then namespace for sanity. - .map(e -> generateDbStreamState(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - } - - /** - * Generates the {@link DbStreamState} for the given stream and cursor. - * - * @param airbyteStreamNameNamespacePair The stream. - * @param cursorInfo The current cursor. - * @return The {@link DbStreamState}. - */ - public static DbStreamState generateDbStreamState(final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair, - final CursorInfo cursorInfo) { - final DbStreamState state = new DbStreamState() - .withStreamName(airbyteStreamNameNamespacePair.getName()) - .withStreamNamespace(airbyteStreamNameNamespacePair.getNamespace()) - .withCursorField(cursorInfo.getCursorField() == null ? Collections.emptyList() : Lists.newArrayList(cursorInfo.getCursorField())) - .withCursor(cursorInfo.getCursor()); - if (cursorInfo.getCursorRecordCount() > 0L) { - state.setCursorRecordCount(cursorInfo.getCursorRecordCount()); - } - return state; - } - - /** - * Extracts the actual state from the {@link AirbyteStreamState} object. - * - * @param state The {@link AirbyteStreamState} that contains the actual stream state as JSON. - * @return An {@link Optional} possibly containing the deserialized representation of the stream - * state or an empty {@link Optional} if the state is not present or could not be - * deserialized. - */ - public static Optional extractState(final AirbyteStreamState state) { - try { - return Optional.ofNullable(Jsons.object(state.getStreamState(), DbStreamState.class)); - } catch (final IllegalArgumentException e) { - LOGGER.error("Unable to extract state.", e); - return Optional.empty(); - } - } - - /** - * Tests whether the provided {@link StreamDescriptor} is valid. A valid descriptor is defined as - * one that has a non-{@code null} name. - * - * See - * https://github.com/airbytehq/airbyte/blob/e63458fabb067978beb5eaa74d2bc130919b419f/docs/understanding-airbyte/airbyte-protocol.md - * for more details - * - * @param streamDescriptor A {@link StreamDescriptor} to be validated. - * @return {@code true} if the provided {@link StreamDescriptor} is valid or {@code false} if it is - * invalid. - */ - public static boolean isValidStreamDescriptor(final StreamDescriptor streamDescriptor) { - if (streamDescriptor != null) { - return streamDescriptor.getName() != null; - } else { - return false; - } - } - - /** - * Converts a {@link AirbyteStateType#LEGACY} state message into a {@link AirbyteStateType#GLOBAL} - * message. - * - * @param airbyteStateMessage A {@link AirbyteStateType#LEGACY} state message. - * @return A {@link AirbyteStateType#GLOBAL} state message. - */ - public static AirbyteStateMessage convertLegacyStateToGlobalState(final AirbyteStateMessage airbyteStateMessage) { - final DbState dbState = Jsons.object(airbyteStateMessage.getData(), DbState.class); - final AirbyteGlobalState globalState = new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(dbState.getCdcState())) - .withStreamStates(dbState.getStreams().stream() - .map(s -> new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(s.getStreamName()).withNamespace(s.getStreamNamespace())) - .withStreamState(Jsons.jsonNode(s))) - .collect( - Collectors.toList())); - return new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); - } - - /** - * Converts a {@link AirbyteStateType#LEGACY} state message into a list of - * {@link AirbyteStateType#STREAM} messages. - * - * @param airbyteStateMessage A {@link AirbyteStateType#LEGACY} state message. - * @return A list {@link AirbyteStateType#STREAM} state messages. - */ - public static List convertLegacyStateToStreamState(final AirbyteStateMessage airbyteStateMessage) { - return Jsons.object(airbyteStateMessage.getData(), DbState.class).getStreams().stream() - .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) - .withStreamState(Jsons.jsonNode(s)))) - .collect(Collectors.toList()); - } - - public static AirbyteStateMessage convertStateMessage(final io.airbyte.protocol.models.AirbyteStateMessage state) { - return Jsons.object(Jsons.jsonNode(state), AirbyteStateMessage.class); - } - - /** - * Deserializes the state represented as JSON into an object representation. - * - * @param initialStateJson The state as JSON. - * @Param supportedStateType the {@link AirbyteStateType} supported by this connector. - * @return The deserialized object representation of the state. - */ - public static List deserializeInitialState(final JsonNode initialStateJson, - final AirbyteStateType supportedStateType) { - final Optional typedState = StateMessageHelper.getTypedState(initialStateJson); - return typedState - .map(state -> switch (state.getStateType()) { - case GLOBAL -> List.of(StateGeneratorUtils.convertStateMessage(state.getGlobal())); - case STREAM -> state.getStateMessages() - .stream() - .map(StateGeneratorUtils::convertStateMessage).toList(); - default -> List.of(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY) - .withData(state.getLegacyState())); - }) - .orElse(generateEmptyInitialState(supportedStateType)); - } - - /** - * Generates an empty, initial state for use by the connector. - * - * @Param supportedStateType the {@link AirbyteStateType} supported by this connector. - * @return The empty, initial state. - */ - private static List generateEmptyInitialState(final AirbyteStateType supportedStateType) { - // For backwards compatibility with existing connectors - if (supportedStateType == AirbyteStateType.LEGACY) { - return List.of(new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState()))); - } else if (supportedStateType == AirbyteStateType.GLOBAL) { - final AirbyteGlobalState globalState = new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(new CdcState())) - .withStreamStates(List.of()); - return List.of(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState)); - } else { - return List.of(new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState())); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.java deleted file mode 100644 index 3bfb211ea2aae..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager; -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Defines a manager that manages connector state. Connector state is used to keep track of the data - * synced by the connector. - * - * @param The type of the state maintained by the manager. - * @param The type of the stream(s) stored within the state maintained by the manager. - */ -public interface StateManager { - - Logger LOGGER = LoggerFactory.getLogger(StateManager.class); - - /** - * Retrieves the {@link CdcStateManager} associated with the state manager. - * - * @return The {@link CdcStateManager} - * @throws UnsupportedOperationException if the state manager does not support tracking change data - * capture (CDC) state. - */ - CdcStateManager getCdcStateManager(); - - /** - * Retries the raw state messages associated with the state manager. This is required for - * database-specific sync modes (e.g. Xmin) that would want to handle and parse their own state - * - * @return the list of airbyte state messages - * @throws UnsupportedOperationException if the state manager does not support retrieving raw state. - */ - List getRawStateMessages(); - - /** - * Retrieves the map of stream name/namespace tuple to the current cursor information for that - * stream. - * - * @return The map of stream name/namespace tuple to the current cursor information for that stream - * as maintained by this state manager. - */ - Map getPairToCursorInfoMap(); - - /** - * Generates an {@link AirbyteStateMessage} that represents the current state contained in the state - * manager. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the - * state manager. - * @return The {@link AirbyteStateMessage} that represents the current state contained in the state - * manager. - */ - AirbyteStateMessage toState(final Optional pair); - - /** - * Retrieves an {@link Optional} possibly containing the cursor value tracked in the state - * associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the cursor value tracked in the state associated - * with the provided stream name/namespace tuple. - */ - default Optional getCursor(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursor); - } - - /** - * Retrieves an {@link Optional} possibly containing the cursor field name associated with the - * cursor tracked in the state associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the cursor field name associated with the cursor - * tracked in the state associated with the provided stream name/namespace tuple. - */ - default Optional getCursorField(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getCursorField); - } - - /** - * Retrieves an {@link Optional} possibly containing the original cursor value tracked in the state - * associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the original cursor value tracked in the state - * associated with the provided stream name/namespace tuple. - */ - default Optional getOriginalCursor(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getOriginalCursor); - } - - /** - * Retrieves an {@link Optional} possibly containing the original cursor field name associated with - * the cursor tracked in the state associated with the provided stream name/namespace tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} which identifies a stream. - * @return An {@link Optional} possibly containing the original cursor field name associated with - * the cursor tracked in the state associated with the provided stream name/namespace tuple. - */ - default Optional getOriginalCursorField(final AirbyteStreamNameNamespacePair pair) { - return getCursorInfo(pair).map(CursorInfo::getOriginalCursorField); - } - - /** - * Retrieves the current cursor information stored in the state manager for the steam name/namespace - * tuple. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the - * state manager. - * @return {@link Optional} that potentially contains the current cursor information for the given - * stream name/namespace tuple. - */ - default Optional getCursorInfo(final AirbyteStreamNameNamespacePair pair) { - return Optional.ofNullable(getPairToCursorInfoMap().get(pair)); - } - - /** - * Emits the current state maintained by the manager as an {@link AirbyteStateMessage}. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the - * state manager. - * @return An {@link AirbyteStateMessage} that represents the current state maintained by the state - * manager. - */ - default AirbyteStateMessage emit(final Optional pair) { - return toState(pair); - } - - /** - * Updates the cursor associated with the provided stream name/namespace pair and emits the current - * state maintained by the state manager. - * - * @param pair The {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the - * state manager. - * @param cursor The new value for the cursor associated with the - * {@link AirbyteStreamNameNamespacePair} that represents a stream managed by the state - * manager. - * @return An {@link AirbyteStateMessage} that represents the current state maintained by the state - * manager. - */ - default AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor) { - return updateAndEmit(pair, cursor, 0L); - } - - default AirbyteStateMessage updateAndEmit(final AirbyteStreamNameNamespacePair pair, final String cursor, final long cursorRecordCount) { - final Optional cursorInfo = getCursorInfo(pair); - Preconditions.checkState(cursorInfo.isPresent(), "Could not find cursor information for stream: " + pair); - cursorInfo.get().setCursor(cursor); - if (cursorRecordCount > 0L) { - cursorInfo.get().setCursorRecordCount(cursorRecordCount); - } - LOGGER.debug("Updating cursor value for {} to {} (count {})...", pair, cursor, cursorRecordCount); - return emit(Optional.ofNullable(pair)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.java deleted file mode 100644 index 6c6d8b166443c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.ArrayList; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Factory class that creates {@link StateManager} instances based on the provided state. - */ -public class StateManagerFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(StateManagerFactory.class); - - /** - * Private constructor to prevent direct instantiation. - */ - private StateManagerFactory() {} - - /** - * Creates a {@link StateManager} based on the provided state object and catalog. This method will - * handle the conversion of the provided state to match the requested state manager based on the - * provided {@link AirbyteStateType}. - * - * @param supportedStateType The type of state supported by the connector. - * @param initialState The deserialized initial state that will be provided to the selected - * {@link StateManager}. - * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector that will utilize the state - * manager. - * @return A newly created {@link StateManager} implementation based on the provided state. - */ - public static StateManager createStateManager(final AirbyteStateType supportedStateType, - final List initialState, - final ConfiguredAirbyteCatalog catalog) { - if (initialState != null && !initialState.isEmpty()) { - final AirbyteStateMessage airbyteStateMessage = initialState.get(0); - switch (supportedStateType) { - case LEGACY: - LOGGER.info("Legacy state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); - @SuppressWarnings("deprecation") - StateManager retVal = new LegacyStateManager(Jsons.object(airbyteStateMessage.getData(), DbState.class), catalog); - return retVal; - case GLOBAL: - LOGGER.info("Global state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); - return new GlobalStateManager(generateGlobalState(airbyteStateMessage), catalog); - case STREAM: - default: - LOGGER.info("Stream state manager selected to manage state object with type {}.", airbyteStateMessage.getType()); - return new StreamStateManager(generateStreamState(initialState), catalog); - } - } else { - throw new IllegalArgumentException("Failed to create state manager due to empty state list."); - } - } - - /** - * Handles the conversion between a different state type and the global state. This method handles - * the following transitions: - *
      - *
    • Stream -> Global (not supported, results in {@link IllegalArgumentException}
    • - *
    • Legacy -> Global (supported)
    • - *
    • Global -> Global (supported/no conversion required)
    • - *
    - * - * @param airbyteStateMessage The current state that is to be converted to global state. - * @return The converted state message. - * @throws IllegalArgumentException if unable to convert between the given state type and global. - */ - private static AirbyteStateMessage generateGlobalState(final AirbyteStateMessage airbyteStateMessage) { - AirbyteStateMessage globalStateMessage = airbyteStateMessage; - - switch (airbyteStateMessage.getType()) { - case STREAM: - throw new IllegalArgumentException("Unable to convert connector state from stream to global. Please reset the connection to continue."); - case LEGACY: - globalStateMessage = StateGeneratorUtils.convertLegacyStateToGlobalState(airbyteStateMessage); - LOGGER.info("Legacy state converted to global state.", airbyteStateMessage.getType()); - break; - case GLOBAL: - default: - break; - } - - return globalStateMessage; - } - - /** - * Handles the conversion between a different state type and the stream state. This method handles - * the following transitions: - *
      - *
    • Global -> Stream (not supported, results in {@link IllegalArgumentException}
    • - *
    • Legacy -> Stream (supported)
    • - *
    • Stream -> Stream (supported/no conversion required)
    • - *
    - * - * @param states The list of current states. - * @return The converted state messages. - * @throws IllegalArgumentException if unable to convert between the given state type and stream. - */ - private static List generateStreamState(final List states) { - final AirbyteStateMessage airbyteStateMessage = states.get(0); - final List streamStates = new ArrayList<>(); - switch (airbyteStateMessage.getType()) { - case GLOBAL: - throw new IllegalArgumentException("Unable to convert connector state from global to stream. Please reset the connection to continue."); - case LEGACY: - streamStates.addAll(StateGeneratorUtils.convertLegacyStateToStreamState(airbyteStateMessage)); - break; - case STREAM: - default: - streamStates.addAll(states); - break; - } - - return streamStates; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.java deleted file mode 100644 index efb874b8b0343..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FIELD_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.CURSOR_RECORD_COUNT_FUNCTION; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; - -import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager; -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Per-stream implementation of the {@link StateManager} interface. - *

    - * This implementation generates a state object for each stream detected in catalog/map of known - * streams to cursor information stored in this manager. - */ -public class StreamStateManager extends AbstractStateManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(StreamStateManager.class); - private final List rawAirbyteStateMessages; - - /** - * Constructs a new {@link StreamStateManager} that is seeded with the provided - * {@link AirbyteStateMessage}. - * - * @param airbyteStateMessages The initial state represented as a list of - * {@link AirbyteStateMessage}s. - * @param catalog The {@link ConfiguredAirbyteCatalog} for the connector associated with this state - * manager. - */ - public StreamStateManager(final List airbyteStateMessages, final ConfiguredAirbyteCatalog catalog) { - super(catalog, - () -> airbyteStateMessages.stream().map(AirbyteStateMessage::getStream).collect(Collectors.toList()), - CURSOR_FUNCTION, - CURSOR_FIELD_FUNCTION, - CURSOR_RECORD_COUNT_FUNCTION, - NAME_NAMESPACE_PAIR_FUNCTION); - this.rawAirbyteStateMessages = airbyteStateMessages; - } - - @Override - public CdcStateManager getCdcStateManager() { - throw new UnsupportedOperationException("CDC state management not supported by stream state manager."); - } - - @Override - public List getRawStateMessages() { - return rawAirbyteStateMessages; - } - - @Override - public AirbyteStateMessage toState(final Optional pair) { - if (pair.isPresent()) { - final Map pairToCursorInfoMap = getPairToCursorInfoMap(); - final Optional cursorInfo = Optional.ofNullable(pairToCursorInfoMap.get(pair.get())); - - if (cursorInfo.isPresent()) { - LOGGER.debug("Generating state message for {}...", pair); - return new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - // Temporarily include legacy state for backwards compatibility with the platform - .withData(Jsons.jsonNode(StateGeneratorUtils.generateDbState(pairToCursorInfoMap))) - .withStream(StateGeneratorUtils.generateStreamState(pair.get(), cursorInfo.get())); - } else { - LOGGER.warn("Cursor information could not be located in state for stream {}. Returning a new, empty state message...", pair); - return new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState()); - } - } else { - LOGGER.warn("Stream not provided. Returning a new, empty state message..."); - return new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.kt new file mode 100644 index 0000000000000..96f4c815a9df4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.kt @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.integrations.debezium.internals.* +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import io.debezium.engine.ChangeEvent +import io.debezium.engine.DebeziumEngine +import java.time.Duration +import java.time.Instant +import java.time.temporal.ChronoUnit +import java.util.* +import java.util.concurrent.LinkedBlockingQueue +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class acts as the bridge between Airbyte DB connectors and debezium. If a DB connector wants + * to use debezium for CDC, it should use this class + */ +class AirbyteDebeziumHandler( + private val config: JsonNode, + private val targetPosition: CdcTargetPosition, + private val trackSchemaHistory: Boolean, + private val firstRecordWaitTime: Duration, + private val subsequentRecordWaitTime: Duration, + private val queueSize: Int, + private val addDbNameToOffsetState: Boolean +) { + internal inner class CapacityReportingBlockingQueue(capacity: Int) : + LinkedBlockingQueue(capacity) { + private var lastReport: Instant? = null + + private fun reportQueueUtilization() { + if ( + lastReport == null || + Duration.between(lastReport, Instant.now()) + .compareTo(Companion.REPORT_DURATION) > 0 + ) { + LOGGER.info( + "CDC events queue size: {}. remaining {}", + this.size, + this.remainingCapacity() + ) + synchronized(this) { lastReport = Instant.now() } + } + } + + @Throws(InterruptedException::class) + override fun put(e: E) { + reportQueueUtilization() + super.put(e) + } + + override fun poll(): E { + reportQueueUtilization() + return super.poll() + } + } + + fun getIncrementalIterators( + debeziumPropertiesManager: DebeziumPropertiesManager, + eventConverter: DebeziumEventConverter, + cdcSavedInfoFetcher: CdcSavedInfoFetcher, + cdcStateHandler: CdcStateHandler + ): AutoCloseableIterator { + LOGGER.info("Using CDC: {}", true) + LOGGER.info( + "Using DBZ version: {}", + DebeziumEngine::class.java.getPackage().implementationVersion + ) + val offsetManager: AirbyteFileOffsetBackingStore = + AirbyteFileOffsetBackingStore.Companion.initializeState( + cdcSavedInfoFetcher.savedOffset, + if (addDbNameToOffsetState) + Optional.ofNullable(config[JdbcUtils.DATABASE_KEY].asText()) + else Optional.empty() + ) + val schemaHistoryManager: Optional = + if (trackSchemaHistory) + Optional.of( + AirbyteSchemaHistoryStorage.Companion.initializeDBHistory( + cdcSavedInfoFetcher.savedSchemaHistory, + cdcStateHandler.compressSchemaHistoryForState() + ) + ) + else Optional.empty() + val publisher = DebeziumRecordPublisher(debeziumPropertiesManager) + val queue: CapacityReportingBlockingQueue> = + CapacityReportingBlockingQueue(queueSize) + publisher.start(queue, offsetManager, schemaHistoryManager) + // handle state machine around pub/sub logic. + val eventIterator: AutoCloseableIterator = + DebeziumRecordIterator( + queue, + targetPosition, + { publisher.hasClosed() }, + DebeziumShutdownProcedure(queue, { publisher.close() }, { publisher.hasClosed() }), + firstRecordWaitTime, + subsequentRecordWaitTime + ) + + val syncCheckpointDuration = + if (config.has(DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION_PROPERTY)) + Duration.ofSeconds( + config[DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION_PROPERTY].asLong() + ) + else DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION + val syncCheckpointRecords = + if (config.has(DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS_PROPERTY)) + config[DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS_PROPERTY].asLong() + else DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS.toLong() + + val messageProducer: DebeziumMessageProducer = + DebeziumMessageProducer( + cdcStateHandler, + targetPosition, + eventConverter, + offsetManager, + schemaHistoryManager + ) + + // Usually sourceStateIterator requires airbyteStream as input. For DBZ iterator, stream is + // not used + // at all thus we will pass in null. + val iterator: SourceStateIterator = + SourceStateIterator( + eventIterator, + null, + messageProducer!!, + StateEmitFrequency(syncCheckpointRecords, syncCheckpointDuration) + ) + return AutoCloseableIterators.fromIterator(iterator) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(AirbyteDebeziumHandler::class.java) + private val REPORT_DURATION: Duration = Duration.of(10, ChronoUnit.SECONDS) + + /** + * We use 10000 as capacity cause the default queue size and batch size of debezium is : + * [io.debezium.config.CommonConnectorConfig.DEFAULT_MAX_BATCH_SIZE]is 2048 + * [io.debezium.config.CommonConnectorConfig.DEFAULT_MAX_QUEUE_SIZE] is 8192 + */ + const val QUEUE_CAPACITY: Int = 10000 + + @JvmStatic + fun isAnyStreamIncrementalSyncMode(catalog: ConfiguredAirbyteCatalog): Boolean { + return catalog.streams + .stream() + .map { obj: ConfiguredAirbyteStream -> obj.syncMode } + .anyMatch { syncMode: SyncMode -> syncMode == SyncMode.INCREMENTAL } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcMetadataInjector.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcMetadataInjector.kt new file mode 100644 index 0000000000000..7054290eb0df0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcMetadataInjector.kt @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode + +/** + * This interface is used to add metadata to the records fetched from the database. For instance, in + * Postgres we add the lsn to the records. In MySql we add the file name and position to the + * records. + */ +interface CdcMetadataInjector { + /** + * A debezium record contains multiple pieces. Ref : + * https://debezium.io/documentation/reference/1.9/connectors/mysql.html#mysql-create-events + * + * @param event is the actual record which contains data and would be written to the destination + * @param source contains the metadata about the record and we need to extract that metadata and + * add it to the event before writing it to destination + */ + fun addMetaData(event: ObjectNode?, source: JsonNode?) + + fun addMetaDataToRowsFetchedOutsideDebezium( + record: ObjectNode?, + transactionTimestamp: String?, + metadataToAdd: T + ) { + throw RuntimeException("Not Supported") + } + + fun addMetaDataToRowsFetchedOutsideDebezium(record: ObjectNode?) { + throw java.lang.RuntimeException("Not Supported") + } + + /** + * As part of Airbyte record we need to add the namespace (schema name) + * + * @param source part of debezium record and contains the metadata about the record. We need to + * extract namespace out of this metadata and return Ref : + * https://debezium.io/documentation/reference/1.9/connectors/mysql.html#mysql-create-events + * @return the stream namespace extracted from the change event source. + */ + fun namespace(source: JsonNode?): String? + + /** + * As part of Airbyte record we need to add the name (e.g. table name) + * + * @param source part of debezium record and contains the metadata about the record. We need to + * extract namespace out of this metadata and return Ref : + * https://debezium.io/documentation/reference/1.9/connectors/mysql.html#mysql-create-events + * @return The stream name extracted from the change event source. + */ + fun name(source: JsonNode?): String? +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcSavedInfoFetcher.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcSavedInfoFetcher.kt new file mode 100644 index 0000000000000..abcc9e5915394 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcSavedInfoFetcher.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.debezium.internals.AirbyteSchemaHistoryStorage +import java.util.* + +/** + * This interface is used to fetch the saved info required for debezium to run incrementally. Each + * connector saves offset and schema history in different manner + */ +interface CdcSavedInfoFetcher { + val savedOffset: JsonNode? + + val savedSchemaHistory: AirbyteSchemaHistoryStorage.SchemaHistory>? +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcStateHandler.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcStateHandler.kt new file mode 100644 index 0000000000000..976c97952a1da --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcStateHandler.kt @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import io.airbyte.cdk.integrations.debezium.internals.AirbyteSchemaHistoryStorage +import io.airbyte.protocol.models.v0.AirbyteMessage + +/** + * This interface is used to allow connectors to save the offset and schema history in the manner + * which suits them. Also, it adds some utils to verify CDC event status. + */ +interface CdcStateHandler { + fun saveState( + offset: Map, + dbHistory: AirbyteSchemaHistoryStorage.SchemaHistory? + ): AirbyteMessage? + + fun saveStateAfterCompletionOfSnapshotOfNewStreams(): AirbyteMessage? + + fun compressSchemaHistoryForState(): Boolean { + return false + } + + val isCdcCheckpointEnabled: Boolean + /** + * This function is used as feature flag for sending state messages as checkpoints in CDC + * syncs. + * + * @return Returns `true` if checkpoint state messages are enabled for CDC syncs. Otherwise, + * it returns `false` + */ + get() = false +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcTargetPosition.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcTargetPosition.kt new file mode 100644 index 0000000000000..46005ae1194c2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/CdcTargetPosition.kt @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import io.airbyte.cdk.integrations.debezium.internals.ChangeEventWithMetadata + +/** + * This interface is used to define the target position at the beginning of the sync so that once we + * reach the desired target, we can shutdown the sync. This is needed because it might happen that + * while we are syncing the data, new changes are being made in the source database and as a result + * we might end up syncing forever. In order to tackle that, we need to define a point to end at the + * beginning of the sync + */ +interface CdcTargetPosition { + /** + * Reads a position value (ex: LSN) from a change event and compares it to target position + * + * @param changeEventWithMetadata change event from Debezium with extra calculated metadata + * @return true if event position is equal or greater than target position, or if last snapshot + * event + */ + fun reachedTargetPosition(changeEventWithMetadata: ChangeEventWithMetadata?): Boolean + + /** + * Reads a position value (lsn) from a change event and compares it to target lsn + * + * @param positionFromHeartbeat is the position extracted out of a heartbeat event (if the + * connector supports heartbeat) + * @return true if heartbeat position is equal or greater than target position + */ + fun reachedTargetPosition(positionFromHeartbeat: T): Boolean { + throw UnsupportedOperationException() + } + + val isHeartbeatSupported: Boolean + /** + * Indicates whether the implementation supports heartbeat position. + * + * @return true if heartbeats are supported + */ + get() = false + + /** + * Returns a position value from a heartbeat event offset. + * + * @param sourceOffset source offset params from heartbeat change event + * @return the heartbeat position in a heartbeat change event or null + */ + fun extractPositionFromHeartbeatOffset(sourceOffset: Map): T + + /** + * This function checks if the event we are processing in the loop is already behind the offset + * so the process can safety save the state. + * + * @param offset DB CDC offset + * @param event Event from the CDC load + * @return Returns `true` when the event is ahead of the offset. Otherwise, it returns `false` + */ + fun isEventAheadOffset(offset: Map?, event: ChangeEventWithMetadata?): Boolean { + return false + } + + /** + * This function compares two offsets to make sure both are not pointing to the same position. + * The main purpose is to avoid sending same offset multiple times. + * + * @param offsetA Offset to compare + * @param offsetB Offset to compare + * @return Returns `true` if both offsets are at the same position. Otherwise, it returns + * `false` + */ + fun isSameOffset(offsetA: Map, offsetB: Map): Boolean { + return false + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.kt new file mode 100644 index 0000000000000..95108db4aec55 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import java.time.Duration + +object DebeziumIteratorConstants { + const val SYNC_CHECKPOINT_DURATION_PROPERTY: String = "sync_checkpoint_seconds" + const val SYNC_CHECKPOINT_RECORDS_PROPERTY: String = "sync_checkpoint_records" + + // TODO: Move these variables to a separate class IteratorConstants, as they will be used in + // state + // iterators for non debezium cases too. + @JvmField val SYNC_CHECKPOINT_DURATION: Duration = Duration.ofMinutes(15) + const val SYNC_CHECKPOINT_RECORDS: Int = 10000 +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteFileOffsetBackingStore.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteFileOffsetBackingStore.kt new file mode 100644 index 0000000000000..bb6b9958ff1ec --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteFileOffsetBackingStore.kt @@ -0,0 +1,227 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Preconditions +import io.airbyte.commons.json.Jsons +import java.io.EOFException +import java.io.IOException +import java.io.ObjectOutputStream +import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.NoSuchFileException +import java.nio.file.Path +import java.util.* +import java.util.function.BiFunction +import java.util.function.Function +import java.util.stream.Collectors +import org.apache.commons.io.FileUtils +import org.apache.kafka.connect.errors.ConnectException +import org.apache.kafka.connect.util.SafeObjectInputStream +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class handles reading and writing a debezium offset file. In many cases it is duplicating + * logic in debezium because that logic is not exposed in the public API. We mostly treat the + * contents of this state file like a black box. We know it is a Map<ByteBuffer, Bytebuffer>. + * We deserialize it to a Map<String, String> so that the state file can be human readable. If + * we ever discover that any of the contents of these offset files is not string serializable we + * will likely have to drop the human readability support and just base64 encode it. + */ +class AirbyteFileOffsetBackingStore( + private val offsetFilePath: Path, + private val dbName: Optional +) { + fun read(): Map { + val raw = load() + + return raw.entries + .stream() + .collect( + Collectors.toMap( + Function { e: Map.Entry -> + byteBufferToString(e.key) + }, + Function { e: Map.Entry -> + byteBufferToString(e.value) + } + ) + ) + } + + fun persist(cdcState: JsonNode?) { + val mapAsString: Map = + if (cdcState != null) + Jsons.`object`(cdcState, MutableMap::class.java) as Map + else emptyMap() + + val updatedMap = updateStateForDebezium2_1(mapAsString) + + val mappedAsStrings = + updatedMap.entries + .stream() + .collect( + Collectors.toMap( + Function { e: Map.Entry -> stringToByteBuffer(e.key) }, + Function { e: Map.Entry -> stringToByteBuffer(e.value) } + ) + ) + + FileUtils.deleteQuietly(offsetFilePath.toFile()) + save(mappedAsStrings) + } + + private fun updateStateForDebezium2_1(mapAsString: Map): Map { + val updatedMap: MutableMap = LinkedHashMap() + if (mapAsString.size > 0) { + val key = mapAsString.keys.stream().toList()[0] + val i = key.indexOf('[') + val i1 = key.lastIndexOf(']') + + if (i == 0 && i1 == key.length - 1) { + // The state is Debezium 2.1 compatible. No need to change anything. + return mapAsString + } + + LOGGER.info("Mutating sate to make it Debezium 2.1 compatible") + val newKey = + if (dbName.isPresent) + SQL_SERVER_STATE_MUTATION.apply(key.substring(i, i1 + 1), dbName.get()) + else key.substring(i, i1 + 1) + val value = mapAsString[key] + updatedMap[newKey] = value + } + return updatedMap + } + + /** + * See FileOffsetBackingStore#load - logic is mostly borrowed from here. duplicated because this + * method is not public. Reduced the try catch block to only the read operation from original + * code to reduce errors when reading the file. + */ + private fun load(): Map { + var obj: Any + try { + SafeObjectInputStream(Files.newInputStream(offsetFilePath)).use { `is` -> + // todo (cgardens) - we currently suppress a security warning for this line. use of + // readObject from + // untrusted sources is considered unsafe. Since the source is controlled by us in + // this case it + // should be safe. That said, changing this implementation to not use readObject + // would remove some + // headache. + obj = `is`.readObject() + } + } catch (e: NoSuchFileException) { + // NoSuchFileException: Ignore, may be new. + // EOFException: Ignore, this means the file was missing or corrupt + return emptyMap() + } catch (e: EOFException) { + return emptyMap() + } catch (e: IOException) { + throw ConnectException(e) + } catch (e: ClassNotFoundException) { + throw ConnectException(e) + } + + if (obj !is HashMap<*, *>) + throw ConnectException("Expected HashMap but found " + obj.javaClass) + val raw = obj as Map + val data: MutableMap = HashMap() + for ((key1, value1) in raw) { + val key = if ((key1 != null)) ByteBuffer.wrap(key1) else null + val value = if ((value1 != null)) ByteBuffer.wrap(value1) else null + data[key] = value + } + + return data + } + + /** + * See FileOffsetBackingStore#save - logic is mostly borrowed from here. duplicated because this + * method is not public. + */ + private fun save(data: Map) { + try { + ObjectOutputStream(Files.newOutputStream(offsetFilePath)).use { os -> + val raw: MutableMap = HashMap() + for ((key1, value1) in data) { + val key = if ((key1 != null)) key1.array() else null + val value = if ((value1 != null)) value1.array() else null + raw[key] = value + } + os.writeObject(raw) + } + } catch (e: IOException) { + throw ConnectException(e) + } + } + + fun setDebeziumProperties(props: Properties) { + // debezium engine configuration + // https://debezium.io/documentation/reference/2.2/development/engine.html#engine-properties + props.setProperty( + "offset.storage", + "org.apache.kafka.connect.storage.FileOffsetBackingStore" + ) + props.setProperty("offset.storage.file.filename", offsetFilePath.toString()) + props.setProperty("offset.flush.interval.ms", "1000") // todo: make this longer + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(AirbyteFileOffsetBackingStore::class.java) + private val SQL_SERVER_STATE_MUTATION = BiFunction { key: String, databaseName: String -> + (key.substring(0, key.length - 2) + + ",\"database\":\"" + + databaseName + + "\"" + + key.substring(key.length - 2)) + } + + private fun byteBufferToString(byteBuffer: ByteBuffer?): String { + Preconditions.checkNotNull(byteBuffer) + return String(byteBuffer!!.array(), StandardCharsets.UTF_8) + } + + private fun stringToByteBuffer(s: String?): ByteBuffer { + Preconditions.checkNotNull(s) + return ByteBuffer.wrap(s!!.toByteArray(StandardCharsets.UTF_8)) + } + + @JvmStatic + fun initializeState( + cdcState: JsonNode?, + dbName: Optional + ): AirbyteFileOffsetBackingStore { + val cdcWorkingDir: Path + try { + cdcWorkingDir = Files.createTempDirectory(Path.of("/tmp"), "cdc-state-offset") + } catch (e: IOException) { + throw RuntimeException(e) + } + val cdcOffsetFilePath = cdcWorkingDir.resolve("offset.dat") + + val offsetManager = AirbyteFileOffsetBackingStore(cdcOffsetFilePath, dbName) + offsetManager.persist(cdcState) + return offsetManager + } + + @JvmStatic + fun initializeDummyStateForSnapshotPurpose(): AirbyteFileOffsetBackingStore { + val cdcWorkingDir: Path + try { + cdcWorkingDir = Files.createTempDirectory(Path.of("/tmp"), "cdc-dummy-state-offset") + } catch (e: IOException) { + throw RuntimeException(e) + } + val cdcOffsetFilePath = cdcWorkingDir.resolve("offset.dat") + + return AirbyteFileOffsetBackingStore(cdcOffsetFilePath, Optional.empty()) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorage.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorage.kt new file mode 100644 index 0000000000000..417aaddcd86c2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorage.kt @@ -0,0 +1,254 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import io.debezium.document.DocumentReader +import io.debezium.document.DocumentWriter +import java.io.* +import java.nio.charset.Charset +import java.nio.charset.StandardCharsets +import java.nio.file.FileAlreadyExistsException +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.StandardOpenOption +import java.util.* +import java.util.zip.GZIPInputStream +import java.util.zip.GZIPOutputStream +import org.apache.commons.io.FileUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * The purpose of this class is : to , 1. Read the contents of the file [.path] which contains the + * schema history at the end of the sync so that it can be saved in state for future syncs. Check + * [.read] 2. Write the saved content back to the file [.path] at the beginning of the sync so that + * debezium can function smoothly. Check persist(Optional<JsonNode>). + */ +class AirbyteSchemaHistoryStorage( + private val path: Path, + private val compressSchemaHistoryForState: Boolean +) { + private val reader: DocumentReader = DocumentReader.defaultReader() + private val writer: DocumentWriter = DocumentWriter.defaultWriter() + + data class SchemaHistory(val schema: T, val isCompressed: Boolean) + + fun read(): SchemaHistory { + val fileSizeMB = path.toFile().length().toDouble() / (ONE_MB) + if ((fileSizeMB > SIZE_LIMIT_TO_COMPRESS_MB) && compressSchemaHistoryForState) { + LOGGER.info( + "File Size {} MB is greater than the size limit of {} MB, compressing the content of the file.", + fileSizeMB, + SIZE_LIMIT_TO_COMPRESS_MB + ) + val schemaHistory = readCompressed() + val compressedSizeMB = calculateSizeOfStringInMB(schemaHistory) + if (fileSizeMB > compressedSizeMB) { + LOGGER.info("Content Size post compression is {} MB ", compressedSizeMB) + } else { + throw RuntimeException( + "Compressing increased the size of the content. Size before compression " + + fileSizeMB + + ", after compression " + + compressedSizeMB + ) + } + return SchemaHistory(schemaHistory, true) + } + if (compressSchemaHistoryForState) { + LOGGER.info( + "File Size {} MB is less than the size limit of {} MB, reading the content of the file without compression.", + fileSizeMB, + SIZE_LIMIT_TO_COMPRESS_MB + ) + } else { + LOGGER.info("File Size {} MB.", fileSizeMB) + } + val schemaHistory = readUncompressed() + return SchemaHistory(schemaHistory, false) + } + + @VisibleForTesting + fun readUncompressed(): String { + val fileAsString = StringBuilder() + try { + for (line in Files.readAllLines(path, UTF8)) { + if (line != null && !line.isEmpty()) { + val record = reader.read(line) + val recordAsString = writer.write(record) + fileAsString.append(recordAsString) + fileAsString.append(System.lineSeparator()) + } + } + return fileAsString.toString() + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + private fun readCompressed(): String { + val lineSeparator = System.lineSeparator() + val compressedStream = ByteArrayOutputStream() + try { + GZIPOutputStream(compressedStream).use { gzipOutputStream -> + Files.newBufferedReader(path, UTF8).use { bufferedReader -> + while (true) { + val line = bufferedReader.readLine() ?: break + + if (!line.isEmpty()) { + val record = reader.read(line) + val recordAsString = writer.write(record) + gzipOutputStream.write( + recordAsString.toByteArray(StandardCharsets.UTF_8) + ) + gzipOutputStream.write( + lineSeparator.toByteArray(StandardCharsets.UTF_8) + ) + } + } + } + } + } catch (e: IOException) { + throw RuntimeException(e) + } + return Jsons.serialize(compressedStream.toByteArray()) + } + + private fun makeSureFileExists() { + try { + // Make sure the file exists ... + if (!Files.exists(path)) { + // Create parent directories if we have them ... + if (path.parent != null) { + Files.createDirectories(path.parent) + } + try { + Files.createFile(path) + } catch (e: FileAlreadyExistsException) { + // do nothing + } + } + } catch (e: IOException) { + throw IllegalStateException( + "Unable to check or create history file at " + path + ": " + e.message, + e + ) + } + } + + private fun persist(schemaHistory: SchemaHistory>?) { + if (schemaHistory!!.schema!!.isEmpty) { + return + } + val fileAsString = Jsons.`object`(schemaHistory.schema!!.get(), String::class.java) + + if (fileAsString == null || fileAsString.isEmpty()) { + return + } + + FileUtils.deleteQuietly(path.toFile()) + makeSureFileExists() + if (schemaHistory.isCompressed) { + writeCompressedStringToFile(fileAsString) + } else { + writeToFile(fileAsString) + } + } + + /** + * @param fileAsString Represents the contents of the file saved in state from previous syncs + */ + private fun writeToFile(fileAsString: String) { + try { + val split = + fileAsString + .split(System.lineSeparator().toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + for (element in split) { + val read = reader.read(element) + val line = writer.write(read) + + Files.newBufferedWriter(path, StandardOpenOption.APPEND).use { historyWriter -> + try { + historyWriter.append(line) + historyWriter.newLine() + } catch (e: IOException) { + throw RuntimeException(e) + } + } + } + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + private fun writeCompressedStringToFile(compressedString: String) { + try { + ByteArrayInputStream(Jsons.deserialize(compressedString, ByteArray::class.java)).use { + inputStream -> + GZIPInputStream(inputStream).use { gzipInputStream -> + FileOutputStream(path.toFile()).use { fileOutputStream -> + val buffer = ByteArray(1024) + var bytesRead: Int + while ((gzipInputStream.read(buffer).also { bytesRead = it }) != -1) { + fileOutputStream.write(buffer, 0, bytesRead) + } + } + } + } + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + fun setDebeziumProperties(props: Properties) { + // https://debezium.io/documentation/reference/2.2/operations/debezium-server.html#debezium-source-database-history-class + // https://debezium.io/documentation/reference/development/engine.html#_in_the_code + // As mentioned in the documents above, debezium connector for MySQL needs to track the + // schema + // changes. If we don't do this, we can't fetch records for the table. + props.setProperty( + "schema.history.internal", + "io.debezium.storage.file.history.FileSchemaHistory" + ) + props.setProperty("schema.history.internal.file.filename", path.toString()) + props.setProperty("schema.history.internal.store.only.captured.databases.ddl", "true") + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(AirbyteSchemaHistoryStorage::class.java) + private const val SIZE_LIMIT_TO_COMPRESS_MB: Long = 1 + const val ONE_MB: Int = 1024 * 1024 + private val UTF8: Charset = StandardCharsets.UTF_8 + + @VisibleForTesting + fun calculateSizeOfStringInMB(string: String): Double { + return string.toByteArray(StandardCharsets.UTF_8).size.toDouble() / (ONE_MB) + } + + @JvmStatic + fun initializeDBHistory( + schemaHistory: SchemaHistory>?, + compressSchemaHistoryForState: Boolean + ): AirbyteSchemaHistoryStorage { + val dbHistoryWorkingDir: Path + try { + dbHistoryWorkingDir = Files.createTempDirectory(Path.of("/tmp"), "cdc-db-history") + } catch (e: IOException) { + throw RuntimeException(e) + } + val dbHistoryFilePath = dbHistoryWorkingDir.resolve("dbhistory.dat") + + val schemaHistoryManager = + AirbyteSchemaHistoryStorage(dbHistoryFilePath, compressSchemaHistoryForState) + schemaHistoryManager.persist(schemaHistory) + return schemaHistoryManager + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/ChangeEventWithMetadata.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/ChangeEventWithMetadata.kt new file mode 100644 index 0000000000000..8e0a8985e2ffb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/ChangeEventWithMetadata.kt @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.debezium.engine.ChangeEvent + +class ChangeEventWithMetadata(private val event: ChangeEvent) { + private val eventKeyAsJson: JsonNode = Jsons.deserialize(event.key()) + private val eventValueAsJson: JsonNode = Jsons.deserialize(event.value()) + private val snapshotMetadata: SnapshotMetadata? = + SnapshotMetadata.Companion.fromString(eventValueAsJson["source"]["snapshot"].asText()) + + fun event(): ChangeEvent { + return event + } + + fun eventKeyAsJson(): JsonNode { + return eventKeyAsJson + } + + fun eventValueAsJson(): JsonNode { + return eventValueAsJson + } + + val isSnapshotEvent: Boolean + get() = SnapshotMetadata.Companion.isSnapshotEventMetadata(snapshotMetadata) + + fun snapshotMetadata(): SnapshotMetadata? { + return snapshotMetadata + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtils.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtils.kt new file mode 100644 index 0000000000000..a2044acb064f2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtils.kt @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.cdk.db.DataTypeUtils.toISO8601String +import io.airbyte.cdk.db.DataTypeUtils.toISO8601StringWithMicroseconds +import io.debezium.spi.converter.RelationalColumn +import java.sql.Date +import java.sql.Timestamp +import java.time.Duration +import java.time.LocalDate +import java.time.LocalDateTime +import java.time.format.DateTimeParseException +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DebeziumConverterUtils private constructor() { + init { + throw UnsupportedOperationException() + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DebeziumConverterUtils::class.java) + + /** TODO : Replace usage of this method with [io.airbyte.cdk.db.jdbc.DateTimeConverter] */ + @JvmStatic + fun convertDate(input: Any): String { + /** + * While building this custom converter we were not sure what type debezium could return + * cause there is no mention of it in the documentation. Secondly if you take a look at + * [io.debezium.connector.mysql.converters.TinyIntOneToBooleanConverter.converterFor] + * method, even it is handling multiple data types but its not clear under what + * circumstances which data type would be returned. I just went ahead and handled the + * data types that made sense. Secondly, we use LocalDateTime to handle this cause it + * represents DATETIME datatype in JAVA + */ + if (input is LocalDateTime) { + return toISO8601String(input) + } else if (input is LocalDate) { + return toISO8601String(input) + } else if (input is Duration) { + return toISO8601String(input) + } else if (input is Timestamp) { + return toISO8601StringWithMicroseconds((input.toInstant())) + } else if (input is Number) { + return toISO8601String(Timestamp(input.toLong()).toLocalDateTime()) + } else if (input is Date) { + return toISO8601String(input) + } else if (input is String) { + try { + return LocalDateTime.parse(input).toString() + } catch (e: DateTimeParseException) { + LOGGER.warn("Cannot convert value '{}' to LocalDateTime type", input) + return input.toString() + } + } + LOGGER.warn( + "Uncovered date class type '{}'. Use default converter", + input.javaClass.name + ) + return input.toString() + } + + @JvmStatic + fun convertDefaultValue(field: RelationalColumn): Any? { + if (field.isOptional) { + return null + } else if (field.hasDefaultValue()) { + return field.defaultValue() + } + return null + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumEventConverter.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumEventConverter.kt new file mode 100644 index 0000000000000..dc149a08c258d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumEventConverter.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.debezium.CdcMetadataInjector +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.time.Instant + +interface DebeziumEventConverter { + fun toAirbyteMessage(event: ChangeEventWithMetadata): AirbyteMessage + + companion object { + @JvmStatic + fun buildAirbyteMessage( + source: JsonNode?, + cdcMetadataInjector: CdcMetadataInjector<*>, + emittedAt: Instant, + data: JsonNode? + ): AirbyteMessage { + val streamNamespace = cdcMetadataInjector.namespace(source) + val streamName = cdcMetadataInjector.name(source) + + val airbyteRecordMessage = + AirbyteRecordMessage() + .withStream(streamName) + .withNamespace(streamNamespace) + .withEmittedAt(emittedAt.toEpochMilli()) + .withData(data) + + return AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord(airbyteRecordMessage) + } + + @JvmStatic + fun addCdcMetadata( + baseNode: ObjectNode, + source: JsonNode, + cdcMetadataInjector: CdcMetadataInjector<*>, + isDelete: Boolean + ): JsonNode { + val transactionMillis = source["ts_ms"].asLong() + val transactionTimestamp = Instant.ofEpochMilli(transactionMillis).toString() + + baseNode.put(CDC_UPDATED_AT, transactionTimestamp) + cdcMetadataInjector.addMetaData(baseNode, source) + + if (isDelete) { + baseNode.put(CDC_DELETED_AT, transactionTimestamp) + } else { + baseNode.put(CDC_DELETED_AT, null as String?) + } + + return baseNode + } + + const val CDC_LSN: String = "_ab_cdc_lsn" + const val CDC_UPDATED_AT: String = "_ab_cdc_updated_at" + const val CDC_DELETED_AT: String = "_ab_cdc_deleted_at" + const val AFTER_EVENT: String = "after" + const val BEFORE_EVENT: String = "before" + const val OPERATION_FIELD: String = "op" + const val SOURCE_EVENT: String = "source" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducer.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducer.kt new file mode 100644 index 0000000000000..7a4c1a8464941 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducer.kt @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.cdk.integrations.debezium.CdcStateHandler +import io.airbyte.cdk.integrations.debezium.CdcTargetPosition +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.* +import org.apache.kafka.connect.errors.ConnectException +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DebeziumMessageProducer( + private val cdcStateHandler: CdcStateHandler, + targetPosition: CdcTargetPosition, + eventConverter: DebeziumEventConverter, + offsetManager: AirbyteFileOffsetBackingStore?, + schemaHistoryManager: Optional +) : SourceStateMessageProducer { + /** + * `checkpointOffsetToSend` is used as temporal storage for the offset that we want to send as + * message. As Debezium is reading records faster that we process them, if we try to send + * `offsetManger.read()` offset, it is possible that the state is behind the record we are + * currently propagating. To avoid that, we store the offset as soon as we reach the checkpoint + * threshold (time or records) and we wait to send it until we are sure that the record we are + * processing is behind the offset to be sent. + */ + private val checkpointOffsetToSend = HashMap() + + /** + * `previousCheckpointOffset` is used to make sure we don't send duplicated states with the same + * offset. Is it possible that the offset Debezium report doesn't move for a period of time, and + * if we just rely on the `offsetManger.read()`, there is a chance to sent duplicate states, + * generating an unneeded usage of networking and processing. + */ + private val initialOffset: HashMap + private val previousCheckpointOffset: HashMap + private val offsetManager: AirbyteFileOffsetBackingStore? + private val targetPosition: CdcTargetPosition + private val schemaHistoryManager: Optional + + private var shouldEmitStateMessage = false + + private val eventConverter: DebeziumEventConverter + + init { + this.targetPosition = targetPosition + this.eventConverter = eventConverter + this.offsetManager = offsetManager + if (offsetManager == null) { + throw RuntimeException("Offset manager cannot be null") + } + this.schemaHistoryManager = schemaHistoryManager + this.previousCheckpointOffset = offsetManager.read() as HashMap + this.initialOffset = HashMap(this.previousCheckpointOffset) + } + + override fun generateStateMessageAtCheckpoint( + stream: ConfiguredAirbyteStream? + ): AirbyteStateMessage { + LOGGER.info("Sending CDC checkpoint state message.") + val stateMessage = createStateMessage(checkpointOffsetToSend) + previousCheckpointOffset.clear() + previousCheckpointOffset.putAll(checkpointOffsetToSend) + checkpointOffsetToSend.clear() + shouldEmitStateMessage = false + return stateMessage + } + + /** + * @param stream + * @param message + * @return + */ + override fun processRecordMessage( + stream: ConfiguredAirbyteStream?, + message: ChangeEventWithMetadata + ): AirbyteMessage { + if (checkpointOffsetToSend.isEmpty()) { + try { + val temporalOffset = offsetManager!!.read() + if (!targetPosition.isSameOffset(previousCheckpointOffset, temporalOffset)) { + checkpointOffsetToSend.putAll(temporalOffset) + } + } catch (e: ConnectException) { + LOGGER.warn( + "Offset file is being written by Debezium. Skipping CDC checkpoint in this loop." + ) + } + } + + if (checkpointOffsetToSend.size == 1 && !message!!.isSnapshotEvent) { + if (targetPosition.isEventAheadOffset(checkpointOffsetToSend, message)) { + shouldEmitStateMessage = true + } + } + + return eventConverter.toAirbyteMessage(message!!) + } + + override fun createFinalStateMessage(stream: ConfiguredAirbyteStream?): AirbyteStateMessage { + val syncFinishedOffset = offsetManager!!.read() + if (targetPosition.isSameOffset(initialOffset, syncFinishedOffset)) { + // Edge case where no progress has been made: wrap up the + // sync by returning the initial offset instead of the + // current offset. We do this because we found that + // for some databases, heartbeats will cause Debezium to + // overwrite the offset file with a state which doesn't + // include all necessary data such as snapshot completion. + // This is the case for MS SQL Server, at least. + return createStateMessage(initialOffset) + } + return createStateMessage(syncFinishedOffset) + } + + override fun shouldEmitStateMessage(stream: ConfiguredAirbyteStream?): Boolean { + return shouldEmitStateMessage + } + + /** + * Creates [AirbyteStateMessage] while updating CDC data, used to checkpoint the state of the + * process. + * + * @return [AirbyteStateMessage] which includes offset and schema history if used. + */ + private fun createStateMessage(offset: Map): AirbyteStateMessage { + val message = + cdcStateHandler + .saveState( + offset, + schemaHistoryManager + .map { obj: AirbyteSchemaHistoryStorage -> obj.read() } + .orElse(null) + )!! + .state + return message + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DebeziumMessageProducer::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.kt new file mode 100644 index 0000000000000..70846e9fc517b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.kt @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.debezium.spi.common.ReplacementFunction +import java.util.* + +abstract class DebeziumPropertiesManager( + private val properties: Properties, + private val config: JsonNode, + private val catalog: ConfiguredAirbyteCatalog +) { + fun getDebeziumProperties(offsetManager: AirbyteFileOffsetBackingStore): Properties { + return getDebeziumProperties(offsetManager, Optional.empty()) + } + + fun getDebeziumProperties( + offsetManager: AirbyteFileOffsetBackingStore, + schemaHistoryManager: Optional + ): Properties { + val props = Properties() + props.putAll(properties) + + // debezium engine configuration + offsetManager.setDebeziumProperties(props) + // default values from debezium CommonConnectorConfig + props.setProperty("max.batch.size", "2048") + props.setProperty("max.queue.size", "8192") + + props.setProperty("errors.max.retries", "5") + // This property must be strictly less than errors.retry.delay.max.ms + // (https://github.com/debezium/debezium/blob/bcc7d49519a4f07d123c616cfa45cd6268def0b9/debezium-core/src/main/java/io/debezium/util/DelayStrategy.java#L135) + props.setProperty("errors.retry.delay.initial.ms", "299") + props.setProperty("errors.retry.delay.max.ms", "300") + + schemaHistoryManager.ifPresent { m: AirbyteSchemaHistoryStorage -> + m.setDebeziumProperties(props) + } + + // https://debezium.io/documentation/reference/2.2/configuration/avro.html + props.setProperty("key.converter.schemas.enable", "false") + props.setProperty("value.converter.schemas.enable", "false") + + // debezium names + props.setProperty(NAME_KEY, getName(config)) + + // connection configuration + props.putAll(getConnectionConfiguration(config)) + + // By default "decimal.handing.mode=precise" which's caused returning this value as a + // binary. + // The "double" type may cause a loss of precision, so set Debezium's config to store it as + // a String + // explicitly in its Kafka messages for more details see: + // https://debezium.io/documentation/reference/2.2/connectors/postgresql.html#postgresql-decimal-types + // https://debezium.io/documentation/faq/#how_to_retrieve_decimal_field_from_binary_representation + props.setProperty("decimal.handling.mode", "string") + + // https://debezium.io/documentation/reference/2.2/connectors/postgresql.html#postgresql-property-max-queue-size-in-bytes + props.setProperty("max.queue.size.in.bytes", BYTE_VALUE_256_MB) + + // WARNING : Never change the value of this otherwise all the connectors would start syncing + // from + // scratch. + props.setProperty(TOPIC_PREFIX_KEY, sanitizeTopicPrefix(getName(config))) + // https://issues.redhat.com/browse/DBZ-7635 + // https://cwiki.apache.org/confluence/display/KAFKA/KIP-581%3A+Value+of+optional+null+field+which+has+default+value + // A null value in a column with default value won't be generated correctly in CDC unless we + // set the + // following + props.setProperty("value.converter.replace.null.with.default", "false") + // includes + props.putAll(getIncludeConfiguration(catalog, config)) + + return props + } + + protected abstract fun getConnectionConfiguration(config: JsonNode): Properties + + protected abstract fun getName(config: JsonNode): String + + protected abstract fun getIncludeConfiguration( + catalog: ConfiguredAirbyteCatalog, + config: JsonNode? + ): Properties + + companion object { + private const val BYTE_VALUE_256_MB = (256 * 1024 * 1024).toString() + + const val NAME_KEY: String = "name" + const val TOPIC_PREFIX_KEY: String = "topic.prefix" + + @JvmStatic + fun sanitizeTopicPrefix(topicName: String): String { + val sanitizedNameBuilder = StringBuilder(topicName.length) + var changed = false + + for (i in 0 until topicName.length) { + val c = topicName[i] + if (isValidCharacter(c)) { + sanitizedNameBuilder.append(c) + } else { + sanitizedNameBuilder.append( + ReplacementFunction.UNDERSCORE_REPLACEMENT.replace(c) + ) + changed = true + } + } + + return if (changed) { + sanitizedNameBuilder.toString() + } else { + topicName + } + } + + // We need to keep the validation rule the same as debezium engine, which is defined here: + // https://github.com/debezium/debezium/blob/c51ef3099a688efb41204702d3aa6d4722bb4825/debezium-core/src/main/java/io/debezium/schema/AbstractTopicNamingStrategy.java#L178 + private fun isValidCharacter(c: Char): Boolean { + return c == '.' || + c == '_' || + c == '-' || + c >= 'A' && c <= 'Z' || + c >= 'a' && c <= 'z' || + c >= '0' && c <= '9' + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.kt new file mode 100644 index 0000000000000..85f2e7bc7fa68 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.kt @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.google.common.annotations.VisibleForTesting +import com.google.common.collect.AbstractIterator +import io.airbyte.cdk.integrations.debezium.CdcTargetPosition +import io.airbyte.commons.lang.MoreBooleans +import io.airbyte.commons.util.AutoCloseableIterator +import io.debezium.engine.ChangeEvent +import java.lang.reflect.Field +import java.time.Duration +import java.time.LocalDateTime +import java.util.* +import java.util.concurrent.* +import java.util.function.Supplier +import org.apache.kafka.connect.source.SourceRecord +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * The record iterator is the consumer (in the producer / consumer relationship with debezium) + * responsible for 1. making sure every record produced by the record publisher is processed 2. + * signalling to the record publisher when it is time for it to stop producing records. It emits + * this signal either when the publisher had not produced a new record for a long time or when it + * has processed at least all of the records that were present in the database when the source was + * started. Because the publisher might publish more records between the consumer sending this + * signal and the publisher actually shutting down, the consumer must stay alive as long as the + * publisher is not closed. Even after the publisher is closed, the consumer will finish processing + * any produced records before closing. + */ +class DebeziumRecordIterator( + private val queue: LinkedBlockingQueue>, + private val targetPosition: CdcTargetPosition, + private val publisherStatusSupplier: Supplier, + private val debeziumShutdownProcedure: DebeziumShutdownProcedure>, + private val firstRecordWaitTime: Duration, + subsequentRecordWaitTime: Duration? +) : AbstractIterator(), AutoCloseableIterator { + private val heartbeatEventSourceField: MutableMap?>, Field?> = + HashMap(1) + private val subsequentRecordWaitTime: Duration = firstRecordWaitTime.dividedBy(2) + + private var receivedFirstRecord = false + private var hasSnapshotFinished = true + private var tsLastHeartbeat: LocalDateTime? = null + private var lastHeartbeatPosition: T? = null + private var maxInstanceOfNoRecordsFound = 0 + private var signalledDebeziumEngineShutdown = false + + // The following logic incorporates heartbeat (CDC postgres only for now): + // 1. Wait on queue either the configured time first or 1 min after a record received + // 2. If nothing came out of queue finish sync + // 3. If received heartbeat: check if hearbeat_lsn reached target or hasn't changed in a while + // finish sync + // 4. If change event lsn reached target finish sync + // 5. Otherwise check message queuen again + override fun computeNext(): ChangeEventWithMetadata? { + // keep trying until the publisher is closed or until the queue is empty. the latter case is + // possible when the publisher has shutdown but the consumer has not yet processed all + // messages it + // emitted. + while (!MoreBooleans.isTruthy(publisherStatusSupplier.get()) || !queue.isEmpty()) { + val next: ChangeEvent? + + val waitTime = + if (receivedFirstRecord) this.subsequentRecordWaitTime else this.firstRecordWaitTime + try { + next = queue.poll(waitTime.seconds, TimeUnit.SECONDS) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + + // if within the timeout, the consumer could not get a record, it is time to tell the + // producer to + // shutdown. + if (next == null) { + if ( + !receivedFirstRecord || hasSnapshotFinished || maxInstanceOfNoRecordsFound >= 10 + ) { + requestClose( + String.format( + "No records were returned by Debezium in the timeout seconds %s, closing the engine and iterator", + waitTime.seconds + ) + ) + } + LOGGER.info("no record found. polling again.") + maxInstanceOfNoRecordsFound++ + continue + } + + if (isHeartbeatEvent(next)) { + if (!hasSnapshotFinished) { + continue + } + + val heartbeatPos = getHeartbeatPosition(next) + // wrap up sync if heartbeat position crossed the target OR heartbeat position + // hasn't changed for + // too long + if (targetPosition.reachedTargetPosition(heartbeatPos)) { + requestClose( + "Closing: Heartbeat indicates sync is done by reaching the target position" + ) + } else if ( + heartbeatPos == this.lastHeartbeatPosition && heartbeatPosNotChanging() + ) { + requestClose("Closing: Heartbeat indicates sync is not progressing") + } + + if (heartbeatPos != lastHeartbeatPosition) { + this.tsLastHeartbeat = LocalDateTime.now() + this.lastHeartbeatPosition = heartbeatPos + } + continue + } + + val changeEventWithMetadata = ChangeEventWithMetadata(next) + hasSnapshotFinished = !changeEventWithMetadata.isSnapshotEvent + + // if the last record matches the target file position, it is time to tell the producer + // to shutdown. + if (targetPosition.reachedTargetPosition(changeEventWithMetadata)) { + requestClose("Closing: Change event reached target position") + } + this.tsLastHeartbeat = null + this.receivedFirstRecord = true + this.maxInstanceOfNoRecordsFound = 0 + return changeEventWithMetadata + } + + if (!signalledDebeziumEngineShutdown) { + LOGGER.warn("Debezium engine has not been signalled to shutdown, this is unexpected") + } + + // Read the records that Debezium might have fetched right at the time we called shutdown + while (!debeziumShutdownProcedure.recordsRemainingAfterShutdown.isEmpty()) { + val event: ChangeEvent? + try { + event = + debeziumShutdownProcedure.recordsRemainingAfterShutdown.poll( + 100, + TimeUnit.MILLISECONDS + ) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + if (event == null || isHeartbeatEvent(event)) { + continue + } + val changeEventWithMetadata = ChangeEventWithMetadata(event) + hasSnapshotFinished = !changeEventWithMetadata.isSnapshotEvent + return changeEventWithMetadata + } + throwExceptionIfSnapshotNotFinished() + return endOfData() + } + + /** + * Debezium was built as an ever running process which keeps on listening for new changes on DB + * and immediately processing them. Airbyte needs debezium to work as a start stop mechanism. In + * order to determine when to stop debezium engine we rely on few factors 1. TargetPosition + * logic. At the beginning of the sync we define a target position in the logs of the DB. This + * can be an LSN or anything specific to the DB which can help us identify that we have reached + * a specific position in the log based replication When we start processing records from + * debezium, we extract the the log position from the metadata of the record and compare it with + * our target that we defined at the beginning of the sync. If we have reached the target + * position, we shutdown the debezium engine 2. The TargetPosition logic might not always work + * and in order to tackle that we have another logic where if we do not receive records from + * debezium for a given duration, we ask debezium engine to shutdown 3. We also take the + * Snapshot into consideration, when a connector is running for the first time, we let it + * complete the snapshot and only after the completion of snapshot we should shutdown the + * engine. If we are closing the engine before completion of snapshot, we throw an exception + */ + @Throws(Exception::class) + override fun close() { + requestClose("Closing: Iterator closing") + } + + private fun isHeartbeatEvent(event: ChangeEvent): Boolean { + return targetPosition.isHeartbeatSupported && + Objects.nonNull(event) && + !event.value()!!.contains("source") + } + + private fun heartbeatPosNotChanging(): Boolean { + if (this.tsLastHeartbeat == null) { + return false + } + val timeElapsedSinceLastHeartbeatTs = + Duration.between(this.tsLastHeartbeat, LocalDateTime.now()) + LOGGER.info( + "Time since last hb_pos change {}s", + timeElapsedSinceLastHeartbeatTs.toSeconds() + ) + // wait time for no change in heartbeat position is half of initial waitTime + return timeElapsedSinceLastHeartbeatTs.compareTo(firstRecordWaitTime.dividedBy(2)) > 0 + } + + private fun requestClose(closeLogMessage: String) { + if (signalledDebeziumEngineShutdown) { + return + } + LOGGER.info(closeLogMessage) + debeziumShutdownProcedure.initiateShutdownProcedure() + signalledDebeziumEngineShutdown = true + } + + private fun throwExceptionIfSnapshotNotFinished() { + if (!hasSnapshotFinished) { + throw RuntimeException("Closing down debezium engine but snapshot has not finished") + } + } + + /** + * [DebeziumRecordIterator.heartbeatEventSourceField] acts as a cache so that we avoid using + * reflection to setAccessible for each event + */ + @VisibleForTesting + internal fun getHeartbeatPosition(heartbeatEvent: ChangeEvent): T { + try { + val eventClass: Class?> = heartbeatEvent.javaClass + val f: Field? + if (heartbeatEventSourceField.containsKey(eventClass)) { + f = heartbeatEventSourceField[eventClass] + } else { + f = eventClass.getDeclaredField("sourceRecord") + f.isAccessible = true + heartbeatEventSourceField[eventClass] = f + + if (heartbeatEventSourceField.size > 1) { + LOGGER.warn( + "Field Cache size growing beyond expected size of 1, size is " + + heartbeatEventSourceField.size + ) + } + } + + val sr = f!![heartbeatEvent] as SourceRecord + return targetPosition.extractPositionFromHeartbeatOffset(sr.sourceOffset()) + } catch (e: NoSuchFieldException) { + LOGGER.info("failed to get heartbeat source offset") + throw RuntimeException(e) + } catch (e: IllegalAccessException) { + LOGGER.info("failed to get heartbeat source offset") + throw RuntimeException(e) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DebeziumRecordIterator::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordPublisher.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordPublisher.kt new file mode 100644 index 0000000000000..4e0bfc1e14e8f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordPublisher.kt @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.debezium.engine.ChangeEvent +import io.debezium.engine.DebeziumEngine +import io.debezium.engine.format.Json +import io.debezium.engine.spi.OffsetCommitPolicy +import java.util.* +import java.util.concurrent.* +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.atomic.AtomicReference +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * The purpose of this class is to initialize and spawn the debezium engine with the right + * properties to fetch records + */ +class DebeziumRecordPublisher(private val debeziumPropertiesManager: DebeziumPropertiesManager) : + AutoCloseable { + private val executor: ExecutorService = Executors.newSingleThreadExecutor() + private var engine: DebeziumEngine>? = null + private val hasClosed = AtomicBoolean(false) + private val isClosing = AtomicBoolean(false) + private val thrownError = AtomicReference() + private val engineLatch = CountDownLatch(1) + + fun start( + queue: BlockingQueue>, + offsetManager: AirbyteFileOffsetBackingStore, + schemaHistoryManager: Optional + ) { + engine = + DebeziumEngine.create(Json::class.java) + .using( + debeziumPropertiesManager.getDebeziumProperties( + offsetManager, + schemaHistoryManager + ) + ) + .using(OffsetCommitPolicy.AlwaysCommitOffsetPolicy()) + .notifying { e: ChangeEvent -> + // debezium outputs a tombstone event that has a value of null. this is an + // artifact of how it + // interacts with kafka. we want to ignore it. + // more on the tombstone: + // https://debezium.io/documentation/reference/2.2/transformations/event-flattening.html + if (e.value() != null) { + try { + queue.put(e) + } catch (ex: InterruptedException) { + Thread.currentThread().interrupt() + throw RuntimeException(ex) + } + } + } + .using { success: Boolean, message: String?, error: Throwable? -> + LOGGER.info( + "Debezium engine shutdown. Engine terminated successfully : {}", + success + ) + LOGGER.info(message) + if (!success) { + if (error != null) { + thrownError.set(error) + } else { + // There are cases where Debezium doesn't succeed but only fills the + // message field. + // In that case, we still want to fail loud and clear + thrownError.set(RuntimeException(message)) + } + } + engineLatch.countDown() + } + .build() + + // Run the engine asynchronously ... + executor.execute(engine) + } + + fun hasClosed(): Boolean { + return hasClosed.get() + } + + @Throws(Exception::class) + override fun close() { + if (isClosing.compareAndSet(false, true)) { + // consumers should assume records can be produced until engine has closed. + if (engine != null) { + engine!!.close() + } + + // wait for closure before shutting down executor service + engineLatch.await(5, TimeUnit.MINUTES) + + // shut down and await for thread to actually go down + executor.shutdown() + executor.awaitTermination(5, TimeUnit.MINUTES) + + // after the engine is completely off, we can mark this as closed + hasClosed.set(true) + + if (thrownError.get() != null) { + throw RuntimeException(thrownError.get()) + } + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DebeziumRecordPublisher::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedure.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedure.kt new file mode 100644 index 0000000000000..939303c1cc738 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedure.kt @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.lang.MoreBooleans +import java.util.concurrent.* +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class has the logic for shutting down Debezium Engine in graceful manner. We made it Generic + * to allow us to write tests easily. + */ +class DebeziumShutdownProcedure( + private val sourceQueue: LinkedBlockingQueue, + private val debeziumThreadRequestClose: VoidCallable, + private val publisherStatusSupplier: Supplier +) { + private val targetQueue = LinkedBlockingQueue() + private val executorService: ExecutorService + private var exception: Throwable? = null + private var hasTransferThreadShutdown: Boolean + + init { + this.hasTransferThreadShutdown = false + this.executorService = + Executors.newSingleThreadExecutor { r: Runnable? -> + val thread = Thread(r, "queue-data-transfer-thread") + thread.uncaughtExceptionHandler = + Thread.UncaughtExceptionHandler { t: Thread?, e: Throwable? -> exception = e } + thread + } + } + + private fun transfer(): Runnable { + return Runnable { + while (!sourceQueue.isEmpty() || !hasEngineShutDown()) { + try { + val event = sourceQueue.poll(100, TimeUnit.MILLISECONDS) + if (event != null) { + targetQueue.put(event) + } + } catch (e: InterruptedException) { + Thread.currentThread().interrupt() + throw RuntimeException(e) + } + } + } + } + + private fun hasEngineShutDown(): Boolean { + return MoreBooleans.isTruthy(publisherStatusSupplier.get()) + } + + private fun initiateTransfer() { + executorService.execute(transfer()) + } + + val recordsRemainingAfterShutdown: LinkedBlockingQueue + get() { + if (!hasTransferThreadShutdown) { + LOGGER.warn( + "Queue transfer thread has not shut down, some records might be missing." + ) + } + return targetQueue + } + + /** + * This method triggers the shutdown of Debezium Engine. When we trigger Debezium shutdown, the + * main thread pauses, as a result we stop reading data from the [sourceQueue] and since the + * queue is of fixed size, if it's already at capacity, Debezium won't be able to put remaining + * records in the queue. So before we trigger Debezium shutdown, we initiate a transfer of the + * records from the [sourceQueue] to a new queue i.e. [targetQueue]. This allows Debezium to + * continue to put records in the [sourceQueue] and once done, gracefully shutdown. After the + * shutdown is complete we just have to read the remaining records from the [targetQueue] + */ + fun initiateShutdownProcedure() { + if (hasEngineShutDown()) { + LOGGER.info("Debezium Engine has already shut down.") + return + } + var exceptionDuringEngineClose: Exception? = null + try { + initiateTransfer() + debeziumThreadRequestClose.call() + } catch (e: Exception) { + exceptionDuringEngineClose = e + throw RuntimeException(e) + } finally { + try { + shutdownTransferThread() + } catch (e: Exception) { + if (exceptionDuringEngineClose != null) { + e.addSuppressed(exceptionDuringEngineClose) + throw e + } + } + } + } + + private fun shutdownTransferThread() { + executorService.shutdown() + var terminated = false + while (!terminated) { + try { + terminated = executorService.awaitTermination(5, TimeUnit.MINUTES) + } catch (e: InterruptedException) { + Thread.currentThread().interrupt() + throw RuntimeException(e) + } + } + hasTransferThreadShutdown = true + if (exception != null) { + throw RuntimeException(exception) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DebeziumShutdownProcedure::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateUtil.kt new file mode 100644 index 0000000000000..fbc6534eb0915 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateUtil.kt @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.debezium.config.Configuration +import io.debezium.embedded.KafkaConnectUtil +import java.lang.Boolean +import java.util.* +import kotlin.String +import org.apache.kafka.connect.json.JsonConverter +import org.apache.kafka.connect.json.JsonConverterConfig +import org.apache.kafka.connect.runtime.WorkerConfig +import org.apache.kafka.connect.runtime.standalone.StandaloneConfig +import org.apache.kafka.connect.storage.FileOffsetBackingStore +import org.apache.kafka.connect.storage.OffsetStorageReaderImpl + +/** Represents a utility class that assists with the parsing of Debezium offset state. */ +interface DebeziumStateUtil { + /** + * Creates and starts a [FileOffsetBackingStore] that is used to store the tracked Debezium + * offset state. + * + * @param properties The Debezium configuration properties for the selected Debezium connector. + * @return A configured and started [FileOffsetBackingStore] instance. + */ + fun getFileOffsetBackingStore(properties: Properties?): FileOffsetBackingStore? { + val fileOffsetBackingStore = KafkaConnectUtil.fileOffsetBackingStore() + val propertiesMap = Configuration.from(properties).asMap() + propertiesMap[WorkerConfig.KEY_CONVERTER_CLASS_CONFIG] = JsonConverter::class.java.name + propertiesMap[WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG] = JsonConverter::class.java.name + fileOffsetBackingStore.configure(StandaloneConfig(propertiesMap)) + fileOffsetBackingStore.start() + return fileOffsetBackingStore + } + + val keyConverter: JsonConverter? + /** + * Creates and returns a [JsonConverter] that can be used to parse keys in the Debezium + * offset state storage. + * + * @return A [JsonConverter] for key conversion. + */ + get() { + val keyConverter = JsonConverter() + keyConverter.configure(INTERNAL_CONVERTER_CONFIG, true) + return keyConverter + } + + /** + * Creates and returns an [OffsetStorageReaderImpl] instance that can be used to load offset + * state from the offset file storage. + * + * @param fileOffsetBackingStore The [FileOffsetBackingStore] that contains the offset state + * saved to disk. + * @param properties The Debezium configuration properties for the selected Debezium connector. + * @return An [OffsetStorageReaderImpl] instance that can be used to load the offset state from + * the offset file storage. + */ + fun getOffsetStorageReader( + fileOffsetBackingStore: FileOffsetBackingStore?, + properties: Properties + ): OffsetStorageReaderImpl? { + return OffsetStorageReaderImpl( + fileOffsetBackingStore, + properties.getProperty(CONNECTOR_NAME_PROPERTY), + keyConverter, + valueConverter + ) + } + + val valueConverter: JsonConverter? + /** + * Creates and returns a [JsonConverter] that can be used to parse values in the Debezium + * offset state storage. + * + * @return A [JsonConverter] for value conversion. + */ + get() { + val valueConverter = JsonConverter() + valueConverter.configure(INTERNAL_CONVERTER_CONFIG, false) + return valueConverter + } + + companion object { + /** + * The name of the Debezium property that contains the unique name for the Debezium + * connector. + */ + const val CONNECTOR_NAME_PROPERTY: String = "name" + + /** Configuration for offset state key/value converters. */ + val INTERNAL_CONVERTER_CONFIG: Map = + java.util.Map.of(JsonConverterConfig.SCHEMAS_ENABLE_CONFIG, Boolean.FALSE.toString()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtil.kt new file mode 100644 index 0000000000000..00c1fa1acfaec --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtil.kt @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import java.time.Duration +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object RecordWaitTimeUtil { + private val LOGGER: Logger = LoggerFactory.getLogger(RecordWaitTimeUtil::class.java) + + val MIN_FIRST_RECORD_WAIT_TIME: Duration = Duration.ofMinutes(2) + val MAX_FIRST_RECORD_WAIT_TIME: Duration = Duration.ofMinutes(60) + val DEFAULT_FIRST_RECORD_WAIT_TIME: Duration = Duration.ofMinutes(5) + val DEFAULT_SUBSEQUENT_RECORD_WAIT_TIME: Duration = Duration.ofMinutes(1) + + @JvmStatic + fun checkFirstRecordWaitTime(config: JsonNode) { + // we need to skip the check because in tests, we set initial_waiting_seconds + // to 5 seconds for performance reasons, which is shorter than the minimum + // value allowed in production + if (config.has("is_test") && config["is_test"].asBoolean()) { + return + } + + val firstRecordWaitSeconds = getFirstRecordWaitSeconds(config) + if (firstRecordWaitSeconds.isPresent) { + val seconds = firstRecordWaitSeconds.get() + require( + !(seconds < MIN_FIRST_RECORD_WAIT_TIME.seconds || + seconds > MAX_FIRST_RECORD_WAIT_TIME.seconds) + ) { + String.format( + "initial_waiting_seconds must be between %d and %d seconds", + MIN_FIRST_RECORD_WAIT_TIME.seconds, + MAX_FIRST_RECORD_WAIT_TIME.seconds + ) + } + } + } + + @JvmStatic + fun getFirstRecordWaitTime(config: JsonNode): Duration { + val isTest = config.has("is_test") && config["is_test"].asBoolean() + var firstRecordWaitTime = DEFAULT_FIRST_RECORD_WAIT_TIME + + val firstRecordWaitSeconds = getFirstRecordWaitSeconds(config) + if (firstRecordWaitSeconds.isPresent) { + firstRecordWaitTime = Duration.ofSeconds(firstRecordWaitSeconds.get().toLong()) + if (!isTest && firstRecordWaitTime.compareTo(MIN_FIRST_RECORD_WAIT_TIME) < 0) { + LOGGER.warn( + "First record waiting time is overridden to {} minutes, which is the min time allowed for safety.", + MIN_FIRST_RECORD_WAIT_TIME.toMinutes() + ) + firstRecordWaitTime = MIN_FIRST_RECORD_WAIT_TIME + } else if (!isTest && firstRecordWaitTime.compareTo(MAX_FIRST_RECORD_WAIT_TIME) > 0) { + LOGGER.warn( + "First record waiting time is overridden to {} minutes, which is the max time allowed for safety.", + MAX_FIRST_RECORD_WAIT_TIME.toMinutes() + ) + firstRecordWaitTime = MAX_FIRST_RECORD_WAIT_TIME + } + } + + LOGGER.info("First record waiting time: {} seconds", firstRecordWaitTime.seconds) + return firstRecordWaitTime + } + + @JvmStatic + fun getSubsequentRecordWaitTime(config: JsonNode): Duration { + var subsequentRecordWaitTime = DEFAULT_SUBSEQUENT_RECORD_WAIT_TIME + val isTest = config.has("is_test") && config["is_test"].asBoolean() + val firstRecordWaitSeconds = getFirstRecordWaitSeconds(config) + if (isTest && firstRecordWaitSeconds.isPresent) { + // In tests, reuse the initial_waiting_seconds property to speed things up. + subsequentRecordWaitTime = Duration.ofSeconds(firstRecordWaitSeconds.get().toLong()) + } + LOGGER.info("Subsequent record waiting time: {} seconds", subsequentRecordWaitTime.seconds) + return subsequentRecordWaitTime + } + + fun getFirstRecordWaitSeconds(config: JsonNode): Optional { + val replicationMethod = config["replication_method"] + if (replicationMethod != null && replicationMethod.has("initial_waiting_seconds")) { + val seconds = config["replication_method"]["initial_waiting_seconds"].asInt() + return Optional.of(seconds) + } + return Optional.empty() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumEventConverter.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumEventConverter.kt new file mode 100644 index 0000000000000..b7e09e7c9b9ed --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumEventConverter.kt @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.debezium.CdcMetadataInjector +import io.airbyte.protocol.models.v0.AirbyteMessage +import java.time.Instant + +class RelationalDbDebeziumEventConverter( + private val cdcMetadataInjector: CdcMetadataInjector<*>, + private val emittedAt: Instant +) : DebeziumEventConverter { + override fun toAirbyteMessage(event: ChangeEventWithMetadata): AirbyteMessage { + val debeziumEvent = event.eventValueAsJson() + val before: JsonNode = debeziumEvent!!.get(DebeziumEventConverter.Companion.BEFORE_EVENT) + val after: JsonNode = debeziumEvent.get(DebeziumEventConverter.Companion.AFTER_EVENT) + val source: JsonNode = debeziumEvent.get(DebeziumEventConverter.Companion.SOURCE_EVENT) + + val baseNode = (if (after.isNull) before else after) as ObjectNode + val data: JsonNode = + DebeziumEventConverter.Companion.addCdcMetadata( + baseNode, + source, + cdcMetadataInjector, + after.isNull + ) + return DebeziumEventConverter.Companion.buildAirbyteMessage( + source, + cdcMetadataInjector, + emittedAt, + data + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumPropertiesManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumPropertiesManager.kt new file mode 100644 index 0000000000000..c78ead79f77d1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/RelationalDbDebeziumPropertiesManager.kt @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import java.util.* +import java.util.regex.Pattern +import java.util.stream.Collectors +import java.util.stream.StreamSupport +import org.codehaus.plexus.util.StringUtils + +class RelationalDbDebeziumPropertiesManager( + properties: Properties, + config: JsonNode, + catalog: ConfiguredAirbyteCatalog +) : DebeziumPropertiesManager(properties, config, catalog) { + override fun getConnectionConfiguration(config: JsonNode): Properties { + val properties = Properties() + + // db connection configuration + properties.setProperty("database.hostname", config[JdbcUtils.HOST_KEY].asText()) + properties.setProperty("database.port", config[JdbcUtils.PORT_KEY].asText()) + properties.setProperty("database.user", config[JdbcUtils.USERNAME_KEY].asText()) + properties.setProperty("database.dbname", config[JdbcUtils.DATABASE_KEY].asText()) + + if (config.has(JdbcUtils.PASSWORD_KEY)) { + properties.setProperty("database.password", config[JdbcUtils.PASSWORD_KEY].asText()) + } + + return properties + } + + override fun getName(config: JsonNode): String { + return config[JdbcUtils.DATABASE_KEY].asText() + } + + override fun getIncludeConfiguration( + catalog: ConfiguredAirbyteCatalog, + config: JsonNode? + ): Properties { + val properties = Properties() + + // table selection + properties.setProperty("table.include.list", getTableIncludelist(catalog)) + // column selection + properties.setProperty("column.include.list", getColumnIncludeList(catalog)) + + return properties + } + + companion object { + fun getTableIncludelist(catalog: ConfiguredAirbyteCatalog): String { + // Turn "stream": { + // "namespace": "schema1" + // "name": "table1 + // }, + // "stream": { + // "namespace": "schema2" + // "name": "table2 + // } -------> info "schema1.table1, schema2.table2" + + return catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> s.syncMode == SyncMode.INCREMENTAL } + .map { obj: ConfiguredAirbyteStream -> obj.stream } + .map { stream: AirbyteStream -> + stream.namespace + "." + stream.name + } // debezium needs commas escaped to split properly + .map { x: String -> StringUtils.escape(Pattern.quote(x), ",".toCharArray(), "\\,") } + .collect(Collectors.joining(",")) + } + + fun getColumnIncludeList(catalog: ConfiguredAirbyteCatalog): String { + // Turn "stream": { + // "namespace": "schema1" + // "name": "table1" + // "jsonSchema": { + // "properties": { + // "column1": { + // }, + // "column2": { + // } + // } + // } + // } -------> info "schema1.table1.(column1 | column2)" + + return catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> s.syncMode == SyncMode.INCREMENTAL } + .map { obj: ConfiguredAirbyteStream -> obj.stream } + .map { s: AirbyteStream -> + val fields = parseFields(s.jsonSchema["properties"].fieldNames()) + Pattern.quote(s.namespace + "." + s.name) + + (if (StringUtils.isNotBlank(fields)) "\\.$fields" else "") + } + .map { x: String? -> StringUtils.escape(x, ",".toCharArray(), "\\,") } + .collect(Collectors.joining(",")) + } + + private fun parseFields(fieldNames: Iterator?): String { + if (fieldNames == null || !fieldNames.hasNext()) { + return "" + } + val iter = Iterable { fieldNames } + return StreamSupport.stream(iter.spliterator(), false) + .map { f: String -> Pattern.quote(f) } + .collect(Collectors.joining("|", "(", ")")) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.kt new file mode 100644 index 0000000000000..f34141431ca17 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/debezium/internals/SnapshotMetadata.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import com.google.common.collect.ImmutableSet + +enum class SnapshotMetadata { + FIRST, + FIRST_IN_DATA_COLLECTION, + LAST_IN_DATA_COLLECTION, + TRUE, + LAST, + FALSE, + NULL; + + companion object { + private val ENTRIES_OF_SNAPSHOT_EVENTS: Set = + ImmutableSet.of(TRUE, FIRST, FIRST_IN_DATA_COLLECTION, LAST_IN_DATA_COLLECTION) + private val STRING_TO_ENUM: MutableMap = HashMap(12) + + init { + STRING_TO_ENUM["true"] = TRUE + STRING_TO_ENUM["TRUE"] = TRUE + STRING_TO_ENUM["false"] = FALSE + STRING_TO_ENUM["FALSE"] = FALSE + STRING_TO_ENUM["last"] = LAST + STRING_TO_ENUM["LAST"] = LAST + STRING_TO_ENUM["first"] = FIRST + STRING_TO_ENUM["FIRST"] = FIRST + STRING_TO_ENUM["last_in_data_collection"] = LAST_IN_DATA_COLLECTION + STRING_TO_ENUM["LAST_IN_DATA_COLLECTION"] = LAST_IN_DATA_COLLECTION + STRING_TO_ENUM["first_in_data_collection"] = FIRST_IN_DATA_COLLECTION + STRING_TO_ENUM["FIRST_IN_DATA_COLLECTION"] = FIRST_IN_DATA_COLLECTION + STRING_TO_ENUM["NULL"] = NULL + STRING_TO_ENUM["null"] = NULL + } + + fun fromString(value: String): SnapshotMetadata? { + if (STRING_TO_ENUM.containsKey(value)) { + return STRING_TO_ENUM[value] + } + throw RuntimeException("ENUM value not found for $value") + } + + fun isSnapshotEventMetadata(snapshotMetadata: SnapshotMetadata?): Boolean { + return ENTRIES_OF_SNAPSHOT_EVENTS.contains(snapshotMetadata) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.kt new file mode 100644 index 0000000000000..91348ed795f81 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractJdbcSource.kt @@ -0,0 +1,745 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import com.google.common.collect.ImmutableList +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Sets +import datadog.trace.api.Trace +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.db.JdbcCompatibleSourceOperations +import io.airbyte.cdk.db.SqlDatabase +import io.airbyte.cdk.db.factory.DataSourceFactory.close +import io.airbyte.cdk.db.factory.DataSourceFactory.create +import io.airbyte.cdk.db.jdbc.AirbyteRecordData +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_SIZE +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_DECIMAL_DIGITS +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_IS_NULLABLE +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_SCHEMA_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_TABLE_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_COLUMN_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_DATABASE_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_DATA_TYPE +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_SCHEMA_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_SIZE +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_TABLE_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_COLUMN_TYPE_NAME +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_DECIMAL_DIGITS +import io.airbyte.cdk.db.jdbc.JdbcConstants.JDBC_IS_NULLABLE +import io.airbyte.cdk.db.jdbc.JdbcConstants.KEY_SEQ +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.JdbcUtils.getFullyQualifiedTableName +import io.airbyte.cdk.db.jdbc.StreamingJdbcDatabase +import io.airbyte.cdk.db.jdbc.streaming.JdbcStreamingQueryConfig +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.source.jdbc.dto.JdbcPrivilegeDto +import io.airbyte.cdk.integrations.source.relationaldb.AbstractDbSource +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils +import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifier +import io.airbyte.cdk.integrations.source.relationaldb.TableInfo +import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.functional.CheckedFunction +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.stream.AirbyteStreamUtils +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.CommonField +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.sql.SQLException +import java.util.* +import java.util.function.Consumer +import java.util.function.Function +import java.util.function.Predicate +import java.util.function.Supplier +import java.util.stream.Collectors +import javax.sql.DataSource +import org.apache.commons.lang3.tuple.ImmutablePair +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class contains helper functions and boilerplate for implementing a source connector for a + * relational DB source which can be accessed via JDBC driver. If you are implementing a connector + * for a relational DB which has a JDBC driver, make an effort to use this class. + */ +// This is onoly here because spotbugs complains about aggregatePrimateKeys and I wasn't able to +// figure out what it's complaining about +@SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") +abstract class AbstractJdbcSource( + driverClass: String, + @JvmField val streamingQueryConfigProvider: Supplier, + sourceOperations: JdbcCompatibleSourceOperations +) : AbstractDbSource(driverClass), Source { + @JvmField val sourceOperations: JdbcCompatibleSourceOperations + + override var quoteString: String? = null + @JvmField val dataSources: MutableCollection = ArrayList() + + init { + this.sourceOperations = sourceOperations + } + + override fun queryTableFullRefresh( + database: JdbcDatabase, + columnNames: List, + schemaName: String?, + tableName: String, + syncMode: SyncMode, + cursorField: Optional + ): AutoCloseableIterator { + AbstractDbSource.LOGGER.info("Queueing query for table: {}", tableName) + val airbyteStream = AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName) + return AutoCloseableIterators.lazyIterator( + Supplier> { + try { + val stream = + database.unsafeQuery( + { connection: Connection -> + AbstractDbSource.LOGGER.info( + "Preparing query for table: {}", + tableName + ) + val fullTableName: String = + RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting( + schemaName, + tableName, + quoteString!! + ) + + val wrappedColumnNames = + getWrappedColumnNames( + database, + connection, + columnNames, + schemaName, + tableName + ) + val sql = + java.lang.StringBuilder( + String.format( + "SELECT %s FROM %s", + wrappedColumnNames, + fullTableName + ) + ) + // if the connector emits intermediate states, the incremental query + // must be sorted by the cursor + // field + if ( + syncMode == SyncMode.INCREMENTAL && stateEmissionFrequency > 0 + ) { + val quotedCursorField: String = + enquoteIdentifier(cursorField.get(), quoteString) + sql.append(String.format(" ORDER BY %s ASC", quotedCursorField)) + } + + val preparedStatement = connection.prepareStatement(sql.toString()) + AbstractDbSource.LOGGER.info( + "Executing query for table {}: {}", + tableName, + preparedStatement + ) + preparedStatement + }, + sourceOperations::convertDatabaseRowToAirbyteRecordData + ) + return@Supplier AutoCloseableIterators.fromStream( + stream, + airbyteStream + ) + } catch (e: SQLException) { + throw java.lang.RuntimeException(e) + } + }, + airbyteStream + ) + } + + /** + * Configures a list of operations that can be used to check the connection to the source. + * + * @return list of consumers that run queries for the check command. + */ + @Trace(operationName = AbstractDbSource.Companion.CHECK_TRACE_OPERATION_NAME) + @Throws(Exception::class) + override fun getCheckOperations( + config: JsonNode? + ): List> { + return ImmutableList.of( + CheckedConsumer { database: JdbcDatabase -> + LOGGER.info( + "Attempting to get metadata from the database to see if we can connect." + ) + database.bufferedResultSetQuery( + CheckedFunction { connection: Connection -> connection.metaData.catalogs }, + CheckedFunction { queryResult: ResultSet? -> + sourceOperations.rowToJson(queryResult!!) + } + ) + } + ) + } + + private fun getCatalog(database: SqlDatabase): String? { + return (if (database.sourceConfig!!.has(JdbcUtils.DATABASE_KEY)) + database.sourceConfig!![JdbcUtils.DATABASE_KEY].asText() + else null) + } + + @Throws(Exception::class) + override fun discoverInternal( + database: JdbcDatabase, + schema: String? + ): List>> { + val internalSchemas: Set = HashSet(excludedInternalNameSpaces) + LOGGER.info("Internal schemas to exclude: {}", internalSchemas) + val tablesWithSelectGrantPrivilege = + getPrivilegesTableForCurrentUser(database, schema) + return database + .bufferedResultSetQuery( // retrieve column metadata from the database + { connection: Connection -> + connection.metaData.getColumns(getCatalog(database), schema, null, null) + }, // store essential column metadata to a Json object from the result set about + // each column + { resultSet: ResultSet -> this.getColumnMetadata(resultSet) } + ) + .stream() + .filter( + excludeNotAccessibleTables(internalSchemas, tablesWithSelectGrantPrivilege) + ) // group by schema and table name to handle the case where a table with the same name + // exists in + // multiple schemas. + .collect( + Collectors.groupingBy>( + Function> { t: JsonNode -> + ImmutablePair.of( + t.get(INTERNAL_SCHEMA_NAME).asText(), + t.get(INTERNAL_TABLE_NAME).asText() + ) + } + ) + ) + .values + .stream() + .map>> { fields: List -> + TableInfo>( + nameSpace = fields[0].get(INTERNAL_SCHEMA_NAME).asText(), + name = fields[0].get(INTERNAL_TABLE_NAME).asText(), + fields = + fields + .stream() // read the column metadata Json object, and determine its + // type + .map { f: JsonNode -> + val datatype = sourceOperations.getDatabaseFieldType(f) + val jsonType = getAirbyteType(datatype) + LOGGER.debug( + "Table {} column {} (type {}[{}], nullable {}) -> {}", + fields[0].get(INTERNAL_TABLE_NAME).asText(), + f.get(INTERNAL_COLUMN_NAME).asText(), + f.get(INTERNAL_COLUMN_TYPE_NAME).asText(), + f.get(INTERNAL_COLUMN_SIZE).asInt(), + f.get(INTERNAL_IS_NULLABLE).asBoolean(), + jsonType + ) + object : + CommonField( + f.get(INTERNAL_COLUMN_NAME).asText(), + datatype + ) {} + } + .collect(Collectors.toList>()), + cursorFields = extractCursorFields(fields) + ) + } + .collect(Collectors.toList>>()) + } + + private fun extractCursorFields(fields: List): List { + return fields + .stream() + .filter { field: JsonNode -> + isCursorType(sourceOperations.getDatabaseFieldType(field)) + } + .map( + Function { field: JsonNode -> + field.get(INTERNAL_COLUMN_NAME).asText() + } + ) + .collect(Collectors.toList()) + } + + protected fun excludeNotAccessibleTables( + internalSchemas: Set, + tablesWithSelectGrantPrivilege: Set? + ): Predicate { + return Predicate { jsonNode: JsonNode -> + if (tablesWithSelectGrantPrivilege!!.isEmpty()) { + return@Predicate isNotInternalSchema(jsonNode, internalSchemas) + } + (tablesWithSelectGrantPrivilege.stream().anyMatch { e: JdbcPrivilegeDto -> + e.schemaName == jsonNode.get(INTERNAL_SCHEMA_NAME).asText() + } && + tablesWithSelectGrantPrivilege.stream().anyMatch { e: JdbcPrivilegeDto -> + e.tableName == jsonNode.get(INTERNAL_TABLE_NAME).asText() + } && + !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText())) + } + } + + // needs to override isNotInternalSchema for connectors that override + // getPrivilegesTableForCurrentUser() + protected open fun isNotInternalSchema( + jsonNode: JsonNode, + internalSchemas: Set + ): Boolean { + return !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText()) + } + + /** + * @param resultSet Description of a column available in the table catalog. + * @return Essential information about a column to determine which table it belongs to and its + * type. + */ + @Throws(SQLException::class) + private fun getColumnMetadata(resultSet: ResultSet): JsonNode { + val fieldMap = + ImmutableMap.builder< + String, Any + >() // we always want a namespace, if we cannot get a schema, use db name. + .put( + INTERNAL_SCHEMA_NAME, + if (resultSet.getObject(JDBC_COLUMN_SCHEMA_NAME) != null) + resultSet.getString(JDBC_COLUMN_SCHEMA_NAME) + else resultSet.getObject(JDBC_COLUMN_DATABASE_NAME) + ) + .put(INTERNAL_TABLE_NAME, resultSet.getString(JDBC_COLUMN_TABLE_NAME)) + .put(INTERNAL_COLUMN_NAME, resultSet.getString(JDBC_COLUMN_COLUMN_NAME)) + .put(INTERNAL_COLUMN_TYPE, resultSet.getString(JDBC_COLUMN_DATA_TYPE)) + .put(INTERNAL_COLUMN_TYPE_NAME, resultSet.getString(JDBC_COLUMN_TYPE_NAME)) + .put(INTERNAL_COLUMN_SIZE, resultSet.getInt(JDBC_COLUMN_SIZE)) + .put(INTERNAL_IS_NULLABLE, resultSet.getString(JDBC_IS_NULLABLE)) + if (resultSet.getString(JDBC_DECIMAL_DIGITS) != null) { + fieldMap.put(INTERNAL_DECIMAL_DIGITS, resultSet.getString(JDBC_DECIMAL_DIGITS)) + } + return Jsons.jsonNode(fieldMap.build()) + } + + @Throws(Exception::class) + public override fun discoverInternal( + database: JdbcDatabase + ): List>> { + return discoverInternal(database, null) + } + + public override fun getAirbyteType(columnType: Datatype): JsonSchemaType { + return sourceOperations.getAirbyteType(columnType) + } + + @VisibleForTesting + @JvmRecord + data class PrimaryKeyAttributesFromDb( + val streamName: String, + val primaryKey: String, + val keySequence: Int + ) + + override fun discoverPrimaryKeys( + database: JdbcDatabase, + tableInfos: List>> + ): Map> { + LOGGER.info( + "Discover primary keys for tables: " + + tableInfos + .stream() + .map { obj: TableInfo> -> obj.name } + .collect(Collectors.toSet()) + ) + try { + // Get all primary keys without specifying a table name + val tablePrimaryKeys = + aggregatePrimateKeys( + database.bufferedResultSetQuery( + { connection: Connection -> + connection.metaData.getPrimaryKeys(getCatalog(database), null, null) + }, + { r: ResultSet -> + val schemaName: String = + if (r.getObject(JDBC_COLUMN_SCHEMA_NAME) != null) + r.getString(JDBC_COLUMN_SCHEMA_NAME) + else r.getString(JDBC_COLUMN_DATABASE_NAME) + val streamName = + getFullyQualifiedTableName( + schemaName, + r.getString(JDBC_COLUMN_TABLE_NAME) + ) + val primaryKey: String = r.getString(JDBC_COLUMN_COLUMN_NAME) + val keySeq: Int = r.getInt(KEY_SEQ) + PrimaryKeyAttributesFromDb(streamName, primaryKey, keySeq) + } + ) + ) + if (!tablePrimaryKeys.isEmpty()) { + return tablePrimaryKeys + } + } catch (e: SQLException) { + LOGGER.debug( + String.format( + "Could not retrieve primary keys without a table name (%s), retrying", + e + ) + ) + } + // Get primary keys one table at a time + return tableInfos + .stream() + .collect( + Collectors.toMap>, String, MutableList>( + Function>, String> { + tableInfo: TableInfo> -> + getFullyQualifiedTableName(tableInfo.nameSpace, tableInfo.name) + }, + Function>, MutableList> toMap@{ + tableInfo: TableInfo> -> + val streamName = + getFullyQualifiedTableName(tableInfo.nameSpace, tableInfo.name) + try { + val primaryKeys = + aggregatePrimateKeys( + database.bufferedResultSetQuery( + { connection: Connection -> + connection.metaData.getPrimaryKeys( + getCatalog(database), + tableInfo.nameSpace, + tableInfo.name + ) + }, + { r: ResultSet -> + PrimaryKeyAttributesFromDb( + streamName, + r.getString(JDBC_COLUMN_COLUMN_NAME), + r.getInt(KEY_SEQ) + ) + } + ) + ) + return@toMap primaryKeys.getOrDefault( + streamName, + mutableListOf() + ) + } catch (e: SQLException) { + LOGGER.error( + String.format( + "Could not retrieve primary keys for %s: %s", + streamName, + e + ) + ) + return@toMap mutableListOf() + } + } + ) + ) + } + + public override fun isCursorType(type: Datatype): Boolean { + return sourceOperations.isCursorType(type) + } + + override fun queryTableIncremental( + database: JdbcDatabase, + columnNames: List, + schemaName: String?, + tableName: String, + cursorInfo: CursorInfo, + cursorFieldType: Datatype + ): AutoCloseableIterator { + AbstractDbSource.LOGGER.info("Queueing query for table: {}", tableName) + val airbyteStream = AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName) + return AutoCloseableIterators.lazyIterator( + { + try { + val stream = + database.unsafeQuery( + { connection: Connection -> + AbstractDbSource.LOGGER.info( + "Preparing query for table: {}", + tableName + ) + val fullTableName: String = + RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting( + schemaName, + tableName, + quoteString!! + ) + val quotedCursorField: String = + enquoteIdentifier(cursorInfo.cursorField, quoteString) + val operator: String + if (cursorInfo.cursorRecordCount <= 0L) { + operator = ">" + } else { + val actualRecordCount = + getActualCursorRecordCount( + connection, + fullTableName, + quotedCursorField, + cursorFieldType, + cursorInfo.cursor + ) + AbstractDbSource.LOGGER.info( + "Table {} cursor count: expected {}, actual {}", + tableName, + cursorInfo.cursorRecordCount, + actualRecordCount + ) + operator = + if (actualRecordCount == cursorInfo.cursorRecordCount) { + ">" + } else { + ">=" + } + } + val wrappedColumnNames = + getWrappedColumnNames( + database, + connection, + columnNames, + schemaName, + tableName + ) + val sql = + StringBuilder( + String.format( + "SELECT %s FROM %s WHERE %s %s ?", + wrappedColumnNames, + fullTableName, + quotedCursorField, + operator + ) + ) + // if the connector emits intermediate states, the incremental query + // must be sorted by the cursor + // field + if (stateEmissionFrequency > 0) { + sql.append(String.format(" ORDER BY %s ASC", quotedCursorField)) + } + val preparedStatement = connection.prepareStatement(sql.toString()) + AbstractDbSource.LOGGER.info( + "Executing query for table {}: {}", + tableName, + preparedStatement + ) + sourceOperations.setCursorField( + preparedStatement, + 1, + cursorFieldType, + cursorInfo.cursor!! + ) + preparedStatement + }, + sourceOperations::convertDatabaseRowToAirbyteRecordData + ) + return@lazyIterator AutoCloseableIterators.fromStream( + stream, + airbyteStream + ) + } catch (e: SQLException) { + throw RuntimeException(e) + } + }, + airbyteStream + ) + } + + protected fun getCountColumnName(): String { + return "record_count" + } + + /** Some databases need special column names in the query. */ + @Throws(SQLException::class) + protected open fun getWrappedColumnNames( + database: JdbcDatabase?, + connection: Connection?, + columnNames: List, + schemaName: String?, + tableName: String? + ): String? { + return RelationalDbQueryUtils.enquoteIdentifierList(columnNames, quoteString!!) + } + + @Throws(SQLException::class) + protected fun getActualCursorRecordCount( + connection: Connection, + fullTableName: String?, + quotedCursorField: String?, + cursorFieldType: Datatype, + cursor: String? + ): Long { + val columnName = getCountColumnName() + val cursorRecordStatement: PreparedStatement + if (cursor == null) { + val cursorRecordQuery = + String.format( + "SELECT COUNT(*) AS %s FROM %s WHERE %s IS NULL", + columnName, + fullTableName, + quotedCursorField + ) + cursorRecordStatement = connection.prepareStatement(cursorRecordQuery) + } else { + val cursorRecordQuery = + String.format( + "SELECT COUNT(*) AS %s FROM %s WHERE %s = ?", + columnName, + fullTableName, + quotedCursorField + ) + cursorRecordStatement = connection.prepareStatement(cursorRecordQuery) + + sourceOperations.setCursorField(cursorRecordStatement, 1, cursorFieldType, cursor) + } + val resultSet = cursorRecordStatement.executeQuery() + return if (resultSet.next()) { + resultSet.getLong(columnName) + } else { + 0L + } + } + + @Throws(SQLException::class) + public override fun createDatabase(sourceConfig: JsonNode): JdbcDatabase { + return createDatabase(sourceConfig, JdbcDataSourceUtils.DEFAULT_JDBC_PARAMETERS_DELIMITER) + } + + @Throws(SQLException::class) + fun createDatabase(sourceConfig: JsonNode, delimiter: String): JdbcDatabase { + val jdbcConfig = toDatabaseConfig(sourceConfig) + val connectionProperties = + JdbcDataSourceUtils.getConnectionProperties(sourceConfig, delimiter) + // Create the data source + val dataSource = + create( + if (jdbcConfig!!.has(JdbcUtils.USERNAME_KEY)) + jdbcConfig[JdbcUtils.USERNAME_KEY].asText() + else null, + if (jdbcConfig.has(JdbcUtils.PASSWORD_KEY)) + jdbcConfig[JdbcUtils.PASSWORD_KEY].asText() + else null, + driverClassName, + jdbcConfig[JdbcUtils.JDBC_URL_KEY].asText(), + connectionProperties, + getConnectionTimeout(connectionProperties!!) + ) + // Record the data source so that it can be closed. + dataSources.add(dataSource) + + val database: JdbcDatabase = + StreamingJdbcDatabase(dataSource, sourceOperations, streamingQueryConfigProvider) + + quoteString = + (if (quoteString == null) database.metaData.identifierQuoteString else quoteString) + database.sourceConfig = sourceConfig + database.databaseConfig = jdbcConfig + return database + } + + /** + * {@inheritDoc} + * + * @param database database instance + * @param catalog schema of the incoming messages. + * @throws SQLException + */ + @Throws(SQLException::class) + override fun logPreSyncDebugData(database: JdbcDatabase, catalog: ConfiguredAirbyteCatalog?) { + LOGGER.info( + "Data source product recognized as {}:{}", + database.metaData.databaseProductName, + database.metaData.databaseProductVersion + ) + } + + override fun close() { + dataSources.forEach( + Consumer { d: DataSource? -> + try { + close(d) + } catch (e: Exception) { + LOGGER.warn("Unable to close data source.", e) + } + } + ) + dataSources.clear() + } + + protected fun identifyStreamsToSnapshot( + catalog: ConfiguredAirbyteCatalog, + stateManager: StateManager + ): List { + val alreadySyncedStreams = stateManager.cdcStateManager.initialStreamsSynced + if ( + alreadySyncedStreams!!.isEmpty() && + (stateManager.cdcStateManager.cdcState?.state == null) + ) { + return emptyList() + } + + val allStreams = AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog) + + val newlyAddedStreams: Set = + HashSet(Sets.difference(allStreams, alreadySyncedStreams)) + + return catalog.streams + .stream() + .filter { c: ConfiguredAirbyteStream -> c.syncMode == SyncMode.INCREMENTAL } + .filter { stream: ConfiguredAirbyteStream -> + newlyAddedStreams.contains( + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.stream) + ) + } + .map { `object`: ConfiguredAirbyteStream -> Jsons.clone(`object`) } + .collect(Collectors.toList()) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(AbstractJdbcSource::class.java) + + /** + * Aggregate list of @param entries of StreamName and PrimaryKey and + * + * @return a map by StreamName to associated list of primary keys + */ + @VisibleForTesting + @JvmStatic + fun aggregatePrimateKeys( + entries: List + ): Map> { + val result: MutableMap> = HashMap() + entries + .stream() + .sorted(Comparator.comparingInt(PrimaryKeyAttributesFromDb::keySequence)) + .forEach { entry: PrimaryKeyAttributesFromDb -> + if (!result.containsKey(entry.streamName)) { + result[entry.streamName] = ArrayList() + } + result[entry.streamName]!!.add(entry.primaryKey) + } + return result + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.kt new file mode 100644 index 0000000000000..a5e06aefbb489 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtils.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.JdbcUtils.parseJdbcParameters +import io.airbyte.commons.map.MoreMaps + +object JdbcDataSourceUtils { + const val DEFAULT_JDBC_PARAMETERS_DELIMITER: String = "&" + + /** + * Validates for duplication parameters + * + * @param customParameters custom connection properties map as specified by each Jdbc source + * @param defaultParameters connection properties map as specified by each Jdbc source + * @throws IllegalArgumentException + */ + @JvmStatic + fun assertCustomParametersDontOverwriteDefaultParameters( + customParameters: Map, + defaultParameters: Map + ) { + for (key in defaultParameters.keys) { + require( + !(customParameters.containsKey(key) && + customParameters[key] != defaultParameters[key]) + ) { "Cannot overwrite default JDBC parameter $key" } + } + } + + /** + * Retrieves connection_properties from config and also validates if custom jdbc_url parameters + * overlap with the default properties + * + * @param config A configuration used to check Jdbc connection + * @return A mapping of connection properties + */ + fun getConnectionProperties(config: JsonNode): Map { + return getConnectionProperties(config, DEFAULT_JDBC_PARAMETERS_DELIMITER) + } + + fun getConnectionProperties(config: JsonNode, parameterDelimiter: String): Map { + val customProperties = + parseJdbcParameters(config, JdbcUtils.JDBC_URL_PARAMS_KEY, parameterDelimiter) + val defaultProperties = getDefaultConnectionProperties(config) + assertCustomParametersDontOverwriteDefaultParameters(customProperties, defaultProperties) + return MoreMaps.merge(customProperties, defaultProperties) + } + + /** + * Retrieves default connection_properties from config + * + * TODO: make this method abstract and add parity features to destination connectors + * + * @param config A configuration used to check Jdbc connection + * @return A mapping of the default connection properties + */ + @JvmStatic + fun getDefaultConnectionProperties(config: JsonNode): Map { + // NOTE that Postgres returns an empty map for some reason? + return parseJdbcParameters( + config, + "connection_properties", + DEFAULT_JDBC_PARAMETERS_DELIMITER + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.kt new file mode 100644 index 0000000000000..8578ecd42f1a3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSSLConnectionUtils.kt @@ -0,0 +1,283 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.util.SSLCertificateUtils.keyStoreFromCertificate +import io.airbyte.cdk.db.util.SSLCertificateUtils.keyStoreFromClientCertificate +import java.io.IOException +import java.net.MalformedURLException +import java.net.URI +import java.nio.file.Files +import java.nio.file.Path +import java.security.KeyStoreException +import java.security.NoSuchAlgorithmException +import java.security.cert.CertificateException +import java.security.spec.InvalidKeySpecException +import java.util.* +import org.apache.commons.lang3.RandomStringUtils +import org.apache.commons.lang3.tuple.ImmutablePair +import org.apache.commons.lang3.tuple.Pair +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class JdbcSSLConnectionUtils { + var caCertKeyStorePair: Pair? = null + var clientCertKeyStorePair: Pair? = null + + enum class SslMode(vararg spec: String) { + DISABLED("disable"), + ALLOWED("allow"), + PREFERRED("preferred", "prefer"), + REQUIRED("required", "require"), + VERIFY_CA("verify_ca", "verify-ca"), + VERIFY_IDENTITY("verify_identity", "verify-full"); + + val spec: List = Arrays.asList(*spec) + + companion object { + fun bySpec(spec: String): Optional { + return Arrays.stream(entries.toTypedArray()) + .filter { sslMode: SslMode -> sslMode.spec.contains(spec) } + .findFirst() + } + } + } + + companion object { + const val SSL_MODE: String = "sslMode" + + const val TRUST_KEY_STORE_URL: String = "trustCertificateKeyStoreUrl" + const val TRUST_KEY_STORE_PASS: String = "trustCertificateKeyStorePassword" + const val CLIENT_KEY_STORE_URL: String = "clientCertificateKeyStoreUrl" + const val CLIENT_KEY_STORE_PASS: String = "clientCertificateKeyStorePassword" + const val CLIENT_KEY_STORE_TYPE: String = "clientCertificateKeyStoreType" + const val TRUST_KEY_STORE_TYPE: String = "trustCertificateKeyStoreType" + const val KEY_STORE_TYPE_PKCS12: String = "PKCS12" + const val PARAM_MODE: String = "mode" + private val LOGGER: Logger = + LoggerFactory.getLogger(JdbcSSLConnectionUtils::class.java.javaClass) + const val PARAM_CA_CERTIFICATE: String = "ca_certificate" + const val PARAM_CLIENT_CERTIFICATE: String = "client_certificate" + const val PARAM_CLIENT_KEY: String = "client_key" + const val PARAM_CLIENT_KEY_PASSWORD: String = "client_key_password" + + /** + * Parses SSL related configuration and generates keystores to be used by connector + * + * @param config configuration + * @return map containing relevant parsed values including location of keystore or an empty + * map + */ + @JvmStatic + fun parseSSLConfig(config: JsonNode): Map { + LOGGER.debug("source config: {}", config) + + var caCertKeyStorePair: Pair? = null + var clientCertKeyStorePair: Pair? = null + val additionalParameters: MutableMap = HashMap() + // assume ssl if not explicitly mentioned. + if (!config.has(JdbcUtils.SSL_KEY) || config[JdbcUtils.SSL_KEY].asBoolean()) { + if (config.has(JdbcUtils.SSL_MODE_KEY)) { + val specMode = config[JdbcUtils.SSL_MODE_KEY][PARAM_MODE].asText() + additionalParameters[SSL_MODE] = + SslMode.bySpec(specMode) + .orElseThrow { IllegalArgumentException("unexpected ssl mode") } + .name + if (Objects.isNull(caCertKeyStorePair)) { + caCertKeyStorePair = prepareCACertificateKeyStore(config) + } + + if (Objects.nonNull(caCertKeyStorePair)) { + LOGGER.debug( + "uri for ca cert keystore: {}", + caCertKeyStorePair!!.left.toString() + ) + try { + additionalParameters.putAll( + java.util.Map.of( + TRUST_KEY_STORE_URL, + caCertKeyStorePair.left.toURL().toString(), + TRUST_KEY_STORE_PASS, + caCertKeyStorePair.right, + TRUST_KEY_STORE_TYPE, + KEY_STORE_TYPE_PKCS12 + ) + ) + } catch (e: MalformedURLException) { + throw RuntimeException("Unable to get a URL for trust key store") + } + } + + if (Objects.isNull(clientCertKeyStorePair)) { + clientCertKeyStorePair = prepareClientCertificateKeyStore(config) + } + + if (Objects.nonNull(clientCertKeyStorePair)) { + LOGGER.debug( + "uri for client cert keystore: {} / {}", + clientCertKeyStorePair!!.left.toString(), + clientCertKeyStorePair.right + ) + try { + additionalParameters.putAll( + java.util.Map.of( + CLIENT_KEY_STORE_URL, + clientCertKeyStorePair.left.toURL().toString(), + CLIENT_KEY_STORE_PASS, + clientCertKeyStorePair.right, + CLIENT_KEY_STORE_TYPE, + KEY_STORE_TYPE_PKCS12 + ) + ) + } catch (e: MalformedURLException) { + throw RuntimeException("Unable to get a URL for client key store") + } + } + } else { + additionalParameters[SSL_MODE] = SslMode.DISABLED.name + } + } + LOGGER.debug("additional params: {}", additionalParameters) + return additionalParameters + } + + @JvmStatic + fun prepareCACertificateKeyStore(config: JsonNode): Pair? { + // if config available + // if has CA cert - make keystore + // if has client cert + // if has client password - make keystore using password + // if no client password - make keystore using random password + var caCertKeyStorePair: Pair? = null + if (Objects.nonNull(config)) { + if (!config.has(JdbcUtils.SSL_KEY) || config[JdbcUtils.SSL_KEY].asBoolean()) { + val encryption = config[JdbcUtils.SSL_MODE_KEY] + if ( + encryption.has(PARAM_CA_CERTIFICATE) && + !encryption[PARAM_CA_CERTIFICATE].asText().isEmpty() + ) { + val clientKeyPassword = getOrGeneratePassword(encryption) + try { + val caCertKeyStoreUri = + keyStoreFromCertificate( + encryption[PARAM_CA_CERTIFICATE].asText(), + clientKeyPassword, + null, + null + ) + caCertKeyStorePair = ImmutablePair(caCertKeyStoreUri, clientKeyPassword) + } catch (e: CertificateException) { + throw RuntimeException( + "Failed to create keystore for CA certificate", + e + ) + } catch (e: IOException) { + throw RuntimeException( + "Failed to create keystore for CA certificate", + e + ) + } catch (e: KeyStoreException) { + throw RuntimeException( + "Failed to create keystore for CA certificate", + e + ) + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException( + "Failed to create keystore for CA certificate", + e + ) + } + } + } + } + return caCertKeyStorePair + } + + private fun getOrGeneratePassword(sslModeConfig: JsonNode): String { + val clientKeyPassword = + if ( + sslModeConfig.has(PARAM_CLIENT_KEY_PASSWORD) && + !sslModeConfig[PARAM_CLIENT_KEY_PASSWORD].asText().isEmpty() + ) { + sslModeConfig[PARAM_CLIENT_KEY_PASSWORD].asText() + } else { + RandomStringUtils.randomAlphanumeric(10) + } + return clientKeyPassword + } + + fun prepareClientCertificateKeyStore(config: JsonNode): Pair? { + var clientCertKeyStorePair: Pair? = null + if (Objects.nonNull(config)) { + if (!config.has(JdbcUtils.SSL_KEY) || config[JdbcUtils.SSL_KEY].asBoolean()) { + val encryption = config[JdbcUtils.SSL_MODE_KEY] + if ( + encryption.has(PARAM_CLIENT_CERTIFICATE) && + !encryption[PARAM_CLIENT_CERTIFICATE].asText().isEmpty() && + encryption.has(PARAM_CLIENT_KEY) && + !encryption[PARAM_CLIENT_KEY].asText().isEmpty() + ) { + val clientKeyPassword = getOrGeneratePassword(encryption) + try { + val clientCertKeyStoreUri = + keyStoreFromClientCertificate( + encryption[PARAM_CLIENT_CERTIFICATE].asText(), + encryption[PARAM_CLIENT_KEY].asText(), + clientKeyPassword, + null + ) + clientCertKeyStorePair = + ImmutablePair(clientCertKeyStoreUri, clientKeyPassword) + } catch (e: CertificateException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } catch (e: IOException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } catch (e: KeyStoreException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } catch (e: InvalidKeySpecException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } catch (e: InterruptedException) { + throw RuntimeException( + "Failed to create keystore for Client certificate", + e + ) + } + } + } + } + return clientCertKeyStorePair + } + + @JvmStatic + fun fileFromCertPem(certPem: String?): Path { + try { + val path = Files.createTempFile(null, ".crt") + Files.writeString(path, certPem) + path.toFile().deleteOnExit() + return path + } catch (e: IOException) { + throw RuntimeException("Cannot save root certificate to file", e) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.kt new file mode 100644 index 0000000000000..7e1f9b3125344 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSource.kt @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.base.Source +import java.sql.JDBCType +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class JdbcSource : + AbstractJdbcSource( + DatabaseDriver.POSTGRESQL.driverClassName, + Supplier { AdaptiveStreamingQueryConfig() }, + JdbcUtils.defaultSourceOperations + ), + Source { + // no-op for JdbcSource since the config it receives is designed to be use for JDBC. + override fun toDatabaseConfig(config: JsonNode): JsonNode { + return config + } + + override val excludedInternalNameSpaces: Set + get() = setOf("information_schema", "pg_catalog", "pg_internal", "catalog_history") + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(JdbcSource::class.java) + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val source: Source = JdbcSource() + LOGGER.info("starting source: {}", JdbcSource::class.java) + IntegrationRunner(source).run(args) + LOGGER.info("completed source: {}", JdbcSource::class.java) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.kt new file mode 100644 index 0000000000000..2f9f9be1f4065 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/jdbc/dto/JdbcPrivilegeDto.kt @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc.dto + +import com.google.common.base.Objects + +/** The class to store values from privileges table */ +class JdbcPrivilegeDto( + val grantee: String?, + val tableName: String?, + val schemaName: String?, + val privilege: String? +) { + class JdbcPrivilegeDtoBuilder { + private var grantee: String? = null + private var tableName: String? = null + private var schemaName: String? = null + private var privilege: String? = null + + fun grantee(grantee: String?): JdbcPrivilegeDtoBuilder { + this.grantee = grantee + return this + } + + fun tableName(tableName: String?): JdbcPrivilegeDtoBuilder { + this.tableName = tableName + return this + } + + fun schemaName(schemaName: String?): JdbcPrivilegeDtoBuilder { + this.schemaName = schemaName + return this + } + + fun privilege(privilege: String?): JdbcPrivilegeDtoBuilder { + this.privilege = privilege + return this + } + + fun build(): JdbcPrivilegeDto { + return JdbcPrivilegeDto(grantee, tableName, schemaName, privilege) + } + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as JdbcPrivilegeDto + return (Objects.equal(grantee, that.grantee) && + Objects.equal(tableName, that.tableName) && + Objects.equal(schemaName, that.schemaName) && + Objects.equal(privilege, that.privilege)) + } + + override fun hashCode(): Int { + return Objects.hashCode(grantee, tableName, schemaName, privilege) + } + + override fun toString(): String { + return "JdbcPrivilegeDto{" + + "grantee='" + + grantee + + '\'' + + ", columnName='" + + tableName + + '\'' + + ", schemaName='" + + schemaName + + '\'' + + ", privilege='" + + privilege + + '\'' + + '}' + } + + companion object { + @JvmStatic + fun builder(): JdbcPrivilegeDtoBuilder { + return JdbcPrivilegeDtoBuilder() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.kt new file mode 100644 index 0000000000000..201a845d76350 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.kt @@ -0,0 +1,831 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Preconditions +import datadog.trace.api.Trace +import io.airbyte.cdk.db.AbstractDatabase +import io.airbyte.cdk.db.IncrementalUtils.getCursorField +import io.airbyte.cdk.db.IncrementalUtils.getCursorFieldOptional +import io.airbyte.cdk.db.IncrementalUtils.getCursorType +import io.airbyte.cdk.db.jdbc.AirbyteRecordData +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.JdbcConnector +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility.emitConfigErrorTrace +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage +import io.airbyte.cdk.integrations.source.relationaldb.state.* +import io.airbyte.cdk.integrations.util.ApmTraceUtils.addExceptionToTrace +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.exceptions.ConnectionErrorException +import io.airbyte.commons.features.EnvVariableFeatureFlags +import io.airbyte.commons.features.FeatureFlags +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.lang.Exceptions +import io.airbyte.commons.stream.AirbyteStreamUtils +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.CommonField +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.sql.SQLException +import java.time.Duration +import java.time.Instant +import java.util.* +import java.util.concurrent.atomic.AtomicLong +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class contains helper functions and boilerplate for implementing a source connector for a DB + * source of both non-relational and relational type + */ +abstract class AbstractDbSource +protected constructor(driverClassName: String) : + JdbcConnector(driverClassName), Source, AutoCloseable { + // TODO: Remove when the flag is not use anymore + var featureFlags: FeatureFlags = EnvVariableFeatureFlags() + + @Trace(operationName = CHECK_TRACE_OPERATION_NAME) + @Throws(Exception::class) + override fun check(config: JsonNode): AirbyteConnectionStatus? { + try { + val database = createDatabase(config) + for (checkOperation in getCheckOperations(config)) { + checkOperation.accept(database) + } + + return AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED) + } catch (ex: ConnectionErrorException) { + addExceptionToTrace(ex) + val message = getErrorMessage(ex.stateCode, ex.errorCode, ex.exceptionMessage, ex) + emitConfigErrorTrace(ex, message) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(message) + } catch (e: Exception) { + addExceptionToTrace(e) + LOGGER.info("Exception while checking connection: ", e) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage( + String.format( + ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE, + e.message + ) + ) + } finally { + close() + } + } + + @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) + @Throws(Exception::class) + override fun discover(config: JsonNode): AirbyteCatalog { + try { + val database = createDatabase(config) + val tableInfos = discoverWithoutSystemTables(database) + val fullyQualifiedTableNameToPrimaryKeys = discoverPrimaryKeys(database, tableInfos) + return DbSourceDiscoverUtil.convertTableInfosToAirbyteCatalog( + tableInfos, + fullyQualifiedTableNameToPrimaryKeys + ) { columnType: DataType -> this.getAirbyteType(columnType) } + } finally { + close() + } + } + + /** + * Creates a list of AirbyteMessageIterators with all the streams selected in a configured + * catalog + * + * @param config + * - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } + * @param catalog + * - schema of the incoming messages. + * @param state + * - state of the incoming messages. + * @return AirbyteMessageIterator with all the streams that are to be synced + * @throws Exception + */ + @Throws(Exception::class) + override fun read( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + state: JsonNode? + ): AutoCloseableIterator { + val supportedStateType = getSupportedStateType(config) + val stateManager = + StateManagerFactory.createStateManager( + supportedStateType, + StateGeneratorUtils.deserializeInitialState(state, supportedStateType), + catalog + ) + val emittedAt = Instant.now() + + val database = createDatabase(config) + + logPreSyncDebugData(database, catalog) + + val fullyQualifiedTableNameToInfo = + discoverWithoutSystemTables(database) + .stream() + .collect( + Collectors.toMap( + Function { t: TableInfo> -> + String.format("%s.%s", t.nameSpace, t.name) + }, + Function.identity() + ) + ) + + validateCursorFieldForIncrementalTables(fullyQualifiedTableNameToInfo, catalog, database) + + DbSourceDiscoverUtil.logSourceSchemaChange(fullyQualifiedTableNameToInfo, catalog) { + columnType: DataType -> + this.getAirbyteType(columnType) + } + + val incrementalIterators = + getIncrementalIterators( + database, + catalog, + fullyQualifiedTableNameToInfo, + stateManager, + emittedAt + ) + val fullRefreshIterators = + getFullRefreshIterators( + database, + catalog, + fullyQualifiedTableNameToInfo, + stateManager, + emittedAt + ) + val iteratorList = + Stream.of(incrementalIterators, fullRefreshIterators) + .flatMap(Collection>::stream) + .collect(Collectors.toList()) + + return AutoCloseableIterators.appendOnClose( + AutoCloseableIterators.concatWithEagerClose( + iteratorList, + AirbyteTraceMessageUtility::emitStreamStatusTrace + ) + ) { + LOGGER.info("Closing database connection pool.") + Exceptions.toRuntime { this.close() } + LOGGER.info("Closed database connection pool.") + } + } + + @Throws(SQLException::class) + protected fun validateCursorFieldForIncrementalTables( + tableNameToTable: Map>>, + catalog: ConfiguredAirbyteCatalog, + database: Database + ) { + val tablesWithInvalidCursor: MutableList = + ArrayList() + for (airbyteStream in catalog.streams) { + val stream = airbyteStream.stream + val fullyQualifiedTableName = + DbSourceDiscoverUtil.getFullyQualifiedTableName(stream.namespace, stream.name) + val hasSourceDefinedCursor = + (!Objects.isNull(airbyteStream.stream.sourceDefinedCursor) && + airbyteStream.stream.sourceDefinedCursor) + if ( + !tableNameToTable.containsKey(fullyQualifiedTableName) || + airbyteStream.syncMode != SyncMode.INCREMENTAL || + hasSourceDefinedCursor + ) { + continue + } + + val table = tableNameToTable[fullyQualifiedTableName]!! + val cursorField = getCursorFieldOptional(airbyteStream) + if (cursorField.isEmpty) { + continue + } + val cursorType = + table.fields!! + .stream() + .filter { info: CommonField -> info.name == cursorField.get() } + .map { obj: CommonField -> obj.type } + .findFirst() + .orElseThrow() + + if (!isCursorType(cursorType)) { + tablesWithInvalidCursor.add( + InvalidCursorInfoUtil.InvalidCursorInfo( + fullyQualifiedTableName, + cursorField.get(), + cursorType.toString(), + "Unsupported cursor type" + ) + ) + continue + } + + if ( + !verifyCursorColumnValues( + database, + stream.namespace, + stream.name, + cursorField.get() + ) + ) { + tablesWithInvalidCursor.add( + InvalidCursorInfoUtil.InvalidCursorInfo( + fullyQualifiedTableName, + cursorField.get(), + cursorType.toString(), + "Cursor column contains NULL value" + ) + ) + } + } + + if (!tablesWithInvalidCursor.isEmpty()) { + throw ConfigErrorException( + InvalidCursorInfoUtil.getInvalidCursorConfigMessage(tablesWithInvalidCursor) + ) + } + } + + /** + * Verify that cursor column allows syncing to go through. + * + * @param database database + * @return true if syncing can go through. false otherwise + * @throws SQLException exception + */ + @Throws(SQLException::class) + protected open fun verifyCursorColumnValues( + database: Database, + schema: String?, + tableName: String?, + columnName: String? + ): Boolean { + /* no-op */ + return true + } + + /** + * Estimates the total volume (rows and bytes) to sync and emits a [AirbyteEstimateTraceMessage] + * associated with the full refresh stream. + * + * @param database database + */ + protected open fun estimateFullRefreshSyncSize( + database: Database, + configuredAirbyteStream: ConfiguredAirbyteStream? + ) { + /* no-op */ + } + + @Throws(Exception::class) + protected fun discoverWithoutSystemTables( + database: Database + ): List>> { + val systemNameSpaces = excludedInternalNameSpaces + val systemViews = excludedViews + val discoveredTables = discoverInternal(database) + return (if (systemNameSpaces == null || systemNameSpaces.isEmpty()) discoveredTables + else + discoveredTables + .stream() + .filter { table: TableInfo> -> + !systemNameSpaces.contains(table.nameSpace) && !systemViews.contains(table.name) + } + .collect(Collectors.toList())) + } + + protected fun getFullRefreshIterators( + database: Database, + catalog: ConfiguredAirbyteCatalog, + tableNameToTable: Map>>, + stateManager: StateManager?, + emittedAt: Instant + ): List> { + return getSelectedIterators( + database, + catalog, + tableNameToTable, + stateManager, + emittedAt, + SyncMode.FULL_REFRESH + ) + } + + protected open fun getIncrementalIterators( + database: Database, + catalog: ConfiguredAirbyteCatalog, + tableNameToTable: Map>>, + stateManager: StateManager?, + emittedAt: Instant + ): List> { + return getSelectedIterators( + database, + catalog, + tableNameToTable, + stateManager, + emittedAt, + SyncMode.INCREMENTAL + ) + } + + /** + * Creates a list of read iterators for each stream within an ConfiguredAirbyteCatalog + * + * @param database Source Database + * @param catalog List of streams (e.g. database tables or API endpoints) with settings on sync + * mode + * @param tableNameToTable Mapping of table name to table + * @param stateManager Manager used to track the state of data synced by the connector + * @param emittedAt Time when data was emitted from the Source database + * @param syncMode the sync mode for which we want to grab the required iterators + * @return List of AirbyteMessageIterators containing all iterators for a catalog + */ + private fun getSelectedIterators( + database: Database, + catalog: ConfiguredAirbyteCatalog?, + tableNameToTable: Map>>, + stateManager: StateManager?, + emittedAt: Instant, + syncMode: SyncMode + ): List> { + val iteratorList: MutableList> = ArrayList() + for (airbyteStream in catalog!!.streams) { + if (airbyteStream.syncMode == syncMode) { + val stream = airbyteStream.stream + val fullyQualifiedTableName = + DbSourceDiscoverUtil.getFullyQualifiedTableName(stream.namespace, stream.name) + if (!tableNameToTable.containsKey(fullyQualifiedTableName)) { + LOGGER.info( + "Skipping stream {} because it is not in the source", + fullyQualifiedTableName + ) + continue + } + + val table = tableNameToTable[fullyQualifiedTableName]!! + val tableReadIterator = + createReadIterator(database, airbyteStream, table, stateManager, emittedAt) + iteratorList.add(tableReadIterator) + } + } + + return iteratorList + } + + /** + * ReadIterator is used to retrieve records from a source connector + * + * @param database Source Database + * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) + * @param table information in tabular format + * @param stateManager Manager used to track the state of data synced by the connector + * @param emittedAt Time when data was emitted from the Source database + * @return + */ + private fun createReadIterator( + database: Database, + airbyteStream: ConfiguredAirbyteStream, + table: TableInfo>, + stateManager: StateManager?, + emittedAt: Instant + ): AutoCloseableIterator { + val streamName = airbyteStream.stream.name + val namespace = airbyteStream.stream.namespace + val pair = + io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair(streamName, namespace) + val selectedFieldsInCatalog = CatalogHelpers.getTopLevelFieldNames(airbyteStream) + val selectedDatabaseFields = + table.fields + .stream() + .map { obj: CommonField -> obj.name } + .filter { o: String -> selectedFieldsInCatalog.contains(o) } + .collect(Collectors.toList()) + + val iterator: AutoCloseableIterator + // checks for which sync mode we're using based on the configured airbytestream + // this is where the bifurcation between full refresh and incremental + if (airbyteStream.syncMode == SyncMode.INCREMENTAL) { + val cursorField = getCursorField(airbyteStream) + val cursorInfo = stateManager!!.getCursorInfo(pair) + + val airbyteMessageIterator: AutoCloseableIterator + if (cursorInfo!!.map { it.cursor }.isPresent) { + airbyteMessageIterator = + getIncrementalStream( + database, + airbyteStream, + selectedDatabaseFields, + table, + cursorInfo.get(), + emittedAt + ) + } else { + // if no cursor is present then this is the first read for is the same as doing a + // full refresh read. + estimateFullRefreshSyncSize(database, airbyteStream) + airbyteMessageIterator = + getFullRefreshStream( + database, + streamName, + namespace, + selectedDatabaseFields, + table, + emittedAt, + SyncMode.INCREMENTAL, + Optional.of(cursorField) + ) + } + + val cursorType = getCursorType(airbyteStream, cursorField) + + val messageProducer = + CursorStateMessageProducer(stateManager, cursorInfo.map { it.cursor }) + + iterator = + AutoCloseableIterators.transform( + { autoCloseableIterator: AutoCloseableIterator -> + SourceStateIterator( + autoCloseableIterator, + airbyteStream, + messageProducer, + StateEmitFrequency(stateEmissionFrequency.toLong(), Duration.ZERO) + ) + }, + airbyteMessageIterator, + AirbyteStreamUtils.convertFromNameAndNamespace(pair.name, pair.namespace) + ) + } else if (airbyteStream.syncMode == SyncMode.FULL_REFRESH) { + estimateFullRefreshSyncSize(database, airbyteStream) + iterator = + getFullRefreshStream( + database, + streamName, + namespace, + selectedDatabaseFields, + table, + emittedAt, + SyncMode.FULL_REFRESH, + Optional.empty() + ) + } else if (airbyteStream.syncMode == null) { + throw IllegalArgumentException( + String.format("%s requires a source sync mode", this.javaClass) + ) + } else { + throw IllegalArgumentException( + String.format( + "%s does not support sync mode: %s.", + this.javaClass, + airbyteStream.syncMode + ) + ) + } + + val recordCount = AtomicLong() + return AutoCloseableIterators.transform( + iterator, + AirbyteStreamUtils.convertFromNameAndNamespace(pair.name, pair.namespace) + ) { r: AirbyteMessage -> + val count = recordCount.incrementAndGet() + if (count % 10000 == 0L) { + LOGGER.info("Reading stream {}. Records read: {}", streamName, count) + } + r + } + } + + /** + * @param database Source Database + * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) + * @param selectedDatabaseFields subset of database fields selected for replication + * @param table information in tabular format + * @param cursorInfo state of where to start the sync from + * @param emittedAt Time when data was emitted from the Source database + * @return AirbyteMessage Iterator that + */ + private fun getIncrementalStream( + database: Database, + airbyteStream: ConfiguredAirbyteStream, + selectedDatabaseFields: List, + table: TableInfo>, + cursorInfo: CursorInfo, + emittedAt: Instant + ): AutoCloseableIterator { + val streamName = airbyteStream.stream.name + val namespace = airbyteStream.stream.namespace + val cursorField = getCursorField(airbyteStream) + val cursorType = + table.fields + .stream() + .filter { info: CommonField -> info.name == cursorField } + .map { obj: CommonField -> obj.type } + .findFirst() + .orElseThrow() + + Preconditions.checkState( + table.fields.stream().anyMatch { f: CommonField -> f.name == cursorField }, + String.format("Could not find cursor field %s in table %s", cursorField, table.name) + ) + + val queryIterator = + queryTableIncremental( + database, + selectedDatabaseFields, + table.nameSpace, + table.name, + cursorInfo, + cursorType + ) + + return getMessageIterator(queryIterator, streamName, namespace, emittedAt.toEpochMilli()) + } + + /** + * Creates a AirbyteMessageIterator that contains all records for a database source connection + * + * @param database Source Database + * @param streamName name of an individual stream in which a stream represents a source (e.g. + * API endpoint or database table) + * @param namespace Namespace of the database (e.g. public) + * @param selectedDatabaseFields List of all interested database column names + * @param table information in tabular format + * @param emittedAt Time when data was emitted from the Source database + * @param syncMode The sync mode that this full refresh stream should be associated with. + * @return AirbyteMessageIterator with all records for a database source + */ + private fun getFullRefreshStream( + database: Database, + streamName: String, + namespace: String, + selectedDatabaseFields: List, + table: TableInfo>, + emittedAt: Instant, + syncMode: SyncMode, + cursorField: Optional + ): AutoCloseableIterator { + val queryStream = + queryTableFullRefresh( + database, + selectedDatabaseFields, + table.nameSpace, + table.name, + syncMode, + cursorField + ) + return getMessageIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()) + } + + /** + * @param database + * - The database where from privileges for tables will be consumed + * @param schema + * - The schema where from privileges for tables will be consumed + * @return Set with privileges for tables for current DB-session user The method is responsible + * for SELECT-ing the table with privileges. In some cases such SELECT doesn't require (e.g. in + * Oracle DB - the schema is the user, you cannot REVOKE a privilege on a table from its owner). + */ + @Throws(SQLException::class) + protected open fun getPrivilegesTableForCurrentUser( + database: JdbcDatabase?, + schema: String? + ): Set { + return emptySet() + } + + /** + * Map a database implementation-specific configuration to json object that adheres to the + * database config spec. See resources/spec.json. + * + * @param config database implementation-specific configuration. + * @return database spec config + */ + @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) + abstract fun toDatabaseConfig(config: JsonNode): JsonNode + + /** + * Creates a database instance using the database spec config. + * + * @param config database spec config + * @return database instance + * @throws Exception might throw an error during connection to database + */ + @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) + @Throws(Exception::class) + protected abstract fun createDatabase(config: JsonNode): Database + + /** + * Gets and logs relevant and useful database metadata such as DB product/version, index names + * and definition. Called before syncing data. Any logged information should be scoped to the + * configured catalog and database. + * + * @param database given database instance. + * @param catalog configured catalog. + */ + @Throws(Exception::class) + protected open fun logPreSyncDebugData( + database: Database, + catalog: ConfiguredAirbyteCatalog? + ) {} + + /** + * Configures a list of operations that can be used to check the connection to the source. + * + * @return list of consumers that run queries for the check command. + */ + @Throws(Exception::class) + protected abstract fun getCheckOperations( + config: JsonNode? + ): List> + + /** + * Map source types to Airbyte types + * + * @param columnType source data type + * @return airbyte data type + */ + protected abstract fun getAirbyteType(columnType: DataType): JsonSchemaType + + protected abstract val excludedInternalNameSpaces: Set + /** + * Get list of system namespaces(schemas) in order to exclude them from the `discover` + * result list. + * + * @return set of system namespaces(schemas) to be excluded + */ + get + + protected open val excludedViews: Set + /** + * Get list of system views in order to exclude them from the `discover` result list. + * + * @return set of views to be excluded + */ + get() = emptySet() + + /** + * Discover all available tables in the source database. + * + * @param database source database + * @return list of the source tables + * @throws Exception access to the database might lead to an exceptions. + */ + @Trace(operationName = DISCOVER_TRACE_OPERATION_NAME) + @Throws(Exception::class) + protected abstract fun discoverInternal( + database: Database + ): List>> + + /** + * Discovers all available tables within a schema in the source database. + * + * @param database + * - source database + * @param schema + * - source schema + * @return list of source tables + * @throws Exception + * - access to the database might lead to exceptions. + */ + @Throws(Exception::class) + protected abstract fun discoverInternal( + database: Database, + schema: String? + ): List>> + + /** + * Discover Primary keys for each table and @return a map of namespace.table name to their + * associated list of primary key fields. + * + * @param database source database + * @param tableInfos list of tables + * @return map of namespace.table and primary key fields. + */ + protected abstract fun discoverPrimaryKeys( + database: Database, + tableInfos: List>> + ): Map> + + protected abstract val quoteString: String? + /** + * Returns quote symbol of the database + * + * @return quote symbol + */ + get + + /** + * Read all data from a table. + * + * @param database source database + * @param columnNames interested column names + * @param schemaName table namespace + * @param tableName target table + * @param syncMode The sync mode that this full refresh stream should be associated with. + * @return iterator with read data + */ + protected abstract fun queryTableFullRefresh( + database: Database, + columnNames: List, + schemaName: String?, + tableName: String, + syncMode: SyncMode, + cursorField: Optional + ): AutoCloseableIterator + + /** + * Read incremental data from a table. Incremental read should return only records where cursor + * column value is bigger than cursor. Note that if the connector needs to emit intermediate + * state (i.e. [AbstractDbSource.getStateEmissionFrequency] > 0), the incremental query must be + * sorted by the cursor field. + * + * @return iterator with read data + */ + protected abstract fun queryTableIncremental( + database: Database, + columnNames: List, + schemaName: String?, + tableName: String, + cursorInfo: CursorInfo, + cursorFieldType: DataType + ): AutoCloseableIterator + + protected open val stateEmissionFrequency: Int + /** + * When larger than 0, the incremental iterator will emit intermediate state for every N + * records. Please note that if intermediate state emission is enabled, the incremental + * query must be ordered by the cursor field. + * + * TODO: Return an optional value instead of 0 to make it easier to understand. + */ + get() = 0 + + /** @return list of fields that could be used as cursors */ + protected abstract fun isCursorType(type: DataType): Boolean + + /** + * Returns the [AirbyteStateType] supported by this connector. + * + * @param config The connector configuration. + * @return A [AirbyteStateType] representing the state supported by this connector. + */ + protected open fun getSupportedStateType( + config: JsonNode? + ): AirbyteStateMessage.AirbyteStateType { + return AirbyteStateMessage.AirbyteStateType.STREAM + } + + companion object { + const val CHECK_TRACE_OPERATION_NAME: String = "check-operation" + const val DISCOVER_TRACE_OPERATION_NAME: String = "discover-operation" + const val READ_TRACE_OPERATION_NAME: String = "read-operation" + + @JvmStatic + protected val LOGGER: Logger = LoggerFactory.getLogger(AbstractDbSource::class.java) + + private fun getMessageIterator( + recordIterator: AutoCloseableIterator, + streamName: String, + namespace: String, + emittedAt: Long + ): AutoCloseableIterator { + return AutoCloseableIterators.transform( + recordIterator, + AirbyteStreamNameNamespacePair(streamName, namespace) + ) { airbyteRecordData -> + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName) + .withNamespace(namespace) + .withEmittedAt(emittedAt) + .withData(airbyteRecordData.rawRowData) + .withMeta( + if (isMetaChangesEmptyOrNull(airbyteRecordData.meta)) null + else airbyteRecordData.meta + ) + ) + } + } + + private fun isMetaChangesEmptyOrNull(meta: AirbyteRecordMessageMeta?): Boolean { + return meta == null || meta.changes == null || meta.changes.isEmpty() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.kt new file mode 100644 index 0000000000000..8a628ae732300 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class CdcStateManager( + private val initialState: CdcState?, + initialStreamsSynced: Set?, + stateMessage: AirbyteStateMessage? +) { + val initialStreamsSynced: Set? + val rawStateMessage: AirbyteStateMessage? + private var currentState: CdcState? + + init { + this.currentState = initialState + this.initialStreamsSynced = + if (initialStreamsSynced != null) Collections.unmodifiableSet(initialStreamsSynced) + else null + this.rawStateMessage = stateMessage + LOGGER.info("Initialized CDC state") + } + + var cdcState: CdcState? + get() = if (currentState != null) Jsons.clone(currentState!!) else null + set(state) { + this.currentState = state + } + + override fun toString(): String { + return "CdcStateManager{" + + "initialState=" + + initialState + + ", currentState=" + + currentState + + '}' + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CdcStateManager::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.kt new file mode 100644 index 0000000000000..b4e4721d1bb18 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/CursorInfo.kt @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import java.util.* + +class CursorInfo( + val originalCursorField: String?, + val originalCursor: String?, + val originalCursorRecordCount: Long, + val cursorField: String?, + var cursor: String?, + var cursorRecordCount: Long +) { + constructor( + originalCursorField: String?, + originalCursor: String?, + cursorField: String?, + cursor: String? + ) : this(originalCursorField, originalCursor, 0L, cursorField, cursor, 0L) + + fun setCursor(cursor: String?): CursorInfo { + this.cursor = cursor + return this + } + + fun setCursorRecordCount(cursorRecordCount: Long): CursorInfo { + this.cursorRecordCount = cursorRecordCount + return this + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as CursorInfo + return originalCursorField == that.originalCursorField && + originalCursor == that.originalCursor && + originalCursorRecordCount == that.originalCursorRecordCount && + cursorField == that.cursorField && + cursor == that.cursor && + cursorRecordCount == that.cursorRecordCount + } + + override fun hashCode(): Int { + return Objects.hash( + originalCursorField, + originalCursor, + originalCursorRecordCount, + cursorField, + cursor, + cursorRecordCount + ) + } + + override fun toString(): String { + return "CursorInfo{" + + "originalCursorField='" + + originalCursorField + + '\'' + + ", originalCursor='" + + originalCursor + + '\'' + + ", originalCursorRecordCount='" + + originalCursorRecordCount + + '\'' + + ", cursorField='" + + cursorField + + '\'' + + ", cursor='" + + cursor + + '\'' + + ", cursorRecordCount='" + + cursorRecordCount + + '\'' + + '}' + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.kt new file mode 100644 index 0000000000000..65e283132d9c7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/DbSourceDiscoverUtil.kt @@ -0,0 +1,212 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.google.common.collect.Lists +import io.airbyte.protocol.models.CommonField +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.SyncMode +import java.util.* +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Contains utilities and helper classes for discovering schemas in database sources. */ +object DbSourceDiscoverUtil { + private val LOGGER: Logger = LoggerFactory.getLogger(DbSourceDiscoverUtil::class.java) + private val AIRBYTE_METADATA: List = + mutableListOf("_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at") + + /* + * This method logs schema drift between source table and the catalog. This can happen if (i) + * underlying table schema changed between syncs (ii) The source connector's mapping of datatypes to + * Airbyte types changed between runs + */ + @JvmStatic + fun logSourceSchemaChange( + fullyQualifiedTableNameToInfo: Map>>, + catalog: ConfiguredAirbyteCatalog, + airbyteTypeConverter: Function + ) { + for (airbyteStream in catalog.streams) { + val stream = airbyteStream.stream + val fullyQualifiedTableName = getFullyQualifiedTableName(stream.namespace, stream.name) + if (!fullyQualifiedTableNameToInfo.containsKey(fullyQualifiedTableName)) { + continue + } + val table = fullyQualifiedTableNameToInfo[fullyQualifiedTableName]!! + val fields = + table.fields + .stream() + .map { commonField: CommonField -> + toField(commonField, airbyteTypeConverter) + } + .distinct() + .collect(Collectors.toList()) + val currentJsonSchema = CatalogHelpers.fieldsToJsonSchema(fields) + val catalogSchema = stream.jsonSchema + val currentSchemaProperties = currentJsonSchema["properties"] + val catalogProperties = catalogSchema["properties"] + val mismatchedFields: MutableList = ArrayList() + catalogProperties.fieldNames().forEachRemaining { fieldName: String -> + // Ignoring metadata fields since those are automatically added onto the catalog + // schema by Airbyte + // and don't exist in the source schema. They should not be considered a change + if (AIRBYTE_METADATA.contains(fieldName)) { + return@forEachRemaining + } + if ( + !currentSchemaProperties.has(fieldName) || + currentSchemaProperties[fieldName] != catalogProperties[fieldName] + ) { + mismatchedFields.add(fieldName) + } + } + + if (!mismatchedFields.isEmpty()) { + LOGGER.warn( + "Source schema changed for table {}! Potential mismatches: {}. Actual schema: {}. Catalog schema: {}", + fullyQualifiedTableName, + java.lang.String.join(", ", mismatchedFields.toString()), + currentJsonSchema, + catalogSchema + ) + } + } + } + + fun convertTableInfosToAirbyteCatalog( + tableInfos: List>>, + fullyQualifiedTableNameToPrimaryKeys: Map>, + airbyteTypeConverter: Function + ): AirbyteCatalog { + val tableInfoFieldList = + tableInfos + .stream() + .map { t: TableInfo> -> + // some databases return multiple copies of the same record for a column (e.g. + // redshift) because + // they have at least once delivery guarantees. we want to dedupe these, but + // first we check that the + // records are actually the same and provide a good error message if they are + // not. + assertColumnsWithSameNameAreSame(t.nameSpace, t.name, t.fields) + val fields = + t.fields + .stream() + .map { commonField: CommonField -> + toField(commonField, airbyteTypeConverter) + } + .distinct() + .collect(Collectors.toList()) + val fullyQualifiedTableName = getFullyQualifiedTableName(t.nameSpace, t.name) + val primaryKeys = + fullyQualifiedTableNameToPrimaryKeys.getOrDefault( + fullyQualifiedTableName, + emptyList() + ) + TableInfo( + nameSpace = t.nameSpace, + name = t.name, + fields = fields, + primaryKeys = primaryKeys, + cursorFields = t.cursorFields + ) + } + .collect(Collectors.toList()) + + val streams = + tableInfoFieldList + .stream() + .map { tableInfo: TableInfo -> + val primaryKeys = + tableInfo.primaryKeys + .stream() + .filter { obj: String? -> Objects.nonNull(obj) } + .map { listOf(it) } + .toList() + CatalogHelpers.createAirbyteStream( + tableInfo.name, + tableInfo.nameSpace, + tableInfo.fields + ) + .withSupportedSyncModes( + if (tableInfo.cursorFields != null && tableInfo.cursorFields.isEmpty()) + Lists.newArrayList(SyncMode.FULL_REFRESH) + else Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(primaryKeys) + } + .collect(Collectors.toList()) + return AirbyteCatalog().withStreams(streams) + } + + @JvmStatic + fun getFullyQualifiedTableName(nameSpace: String?, tableName: String): String { + return if (nameSpace != null) "$nameSpace.$tableName" else tableName + } + + private fun toField( + commonField: CommonField, + airbyteTypeConverter: Function + ): Field { + if ( + airbyteTypeConverter.apply(commonField.type) === JsonSchemaType.OBJECT && + commonField.properties != null && + !commonField.properties.isEmpty() + ) { + val properties = + commonField.properties + .stream() + .map { commField: CommonField -> + toField(commField, airbyteTypeConverter) + } + .toList() + return Field.of( + commonField.name, + airbyteTypeConverter.apply(commonField.type), + properties + ) + } else { + return Field.of(commonField.name, airbyteTypeConverter.apply(commonField.type)) + } + } + + private fun assertColumnsWithSameNameAreSame( + nameSpace: String, + tableName: String, + columns: List> + ) { + columns + .stream() + .collect(Collectors.groupingBy(Function { obj: CommonField -> obj.name })) + .values + .forEach( + Consumer { columnsWithSameName: List> -> + val comparisonColumn = columnsWithSameName[0] + columnsWithSameName.forEach( + Consumer { column: CommonField -> + if (column != comparisonColumn) { + throw RuntimeException( + String.format( + "Found multiple columns with same name: %s in table: %s.%s but the columns are not the same. columns: %s", + comparisonColumn.name, + nameSpace, + tableName, + columns + ) + ) + } + } + ) + } + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.kt new file mode 100644 index 0000000000000..d2c8e2b5ee016 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/InvalidCursorInfoUtil.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import java.util.stream.Collectors + +object InvalidCursorInfoUtil { + fun getInvalidCursorConfigMessage(tablesWithInvalidCursor: List): String { + return ("The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. " + + tablesWithInvalidCursor + .stream() + .map { obj: InvalidCursorInfo -> obj.toString() } + .collect(Collectors.joining(","))) + } + + class InvalidCursorInfo( + tableName: String?, + cursorColumnName: String, + cursorSqlType: String, + cause: String + ) { + override fun toString(): String { + return "{" + + "tableName='" + + tableName + + '\'' + + ", cursorColumnName='" + + cursorColumnName + + '\'' + + ", cursorSqlType=" + + cursorSqlType + + ", cause=" + + cause + + '}' + } + + val tableName: String? + val cursorColumnName: String + val cursorSqlType: String + val cause: String + + init { + this.tableName = tableName + this.cursorColumnName = cursorColumnName + this.cursorSqlType = cursorSqlType + this.cause = cause + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.kt new file mode 100644 index 0000000000000..5d331c2d56982 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbQueryUtils.kt @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.SqlDatabase +import io.airbyte.commons.stream.AirbyteStreamUtils +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.* +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Utility class for methods to query a relational db. */ +object RelationalDbQueryUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(RelationalDbQueryUtils::class.java) + + @JvmStatic + fun getIdentifierWithQuoting(identifier: String, quoteString: String): String { + // double-quoted values within a database name or column name should be wrapped with extra + // quoteString + return if (identifier.startsWith(quoteString) && identifier.endsWith(quoteString)) { + quoteString + quoteString + identifier + quoteString + quoteString + } else { + quoteString + identifier + quoteString + } + } + + @JvmStatic + fun enquoteIdentifierList(identifiers: List, quoteString: String): String { + val joiner = StringJoiner(",") + for (identifier in identifiers) { + joiner.add(getIdentifierWithQuoting(identifier, quoteString)) + } + return joiner.toString() + } + + /** @return fully qualified table name with the schema (if a schema exists) in quotes. */ + @JvmStatic + fun getFullyQualifiedTableNameWithQuoting( + nameSpace: String?, + tableName: String, + quoteString: String + ): String { + return (if (nameSpace == null || nameSpace.isEmpty()) + getIdentifierWithQuoting(tableName, quoteString) + else + getIdentifierWithQuoting(nameSpace, quoteString) + + "." + + getIdentifierWithQuoting(tableName, quoteString)) + } + + /** @return fully qualified table name with the schema (if a schema exists) without quotes. */ + @JvmStatic + fun getFullyQualifiedTableName(schemaName: String?, tableName: String): String { + return if (schemaName != null) "$schemaName.$tableName" else tableName + } + + /** @return the input identifier with quotes. */ + @JvmStatic + fun enquoteIdentifier(identifier: String?, quoteString: String?): String { + return quoteString + identifier + quoteString + } + + @JvmStatic + fun queryTable( + database: Database, + sqlQuery: String?, + tableName: String?, + schemaName: String? + ): AutoCloseableIterator { + val airbyteStreamNameNamespacePair = + AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName) + return AutoCloseableIterators.lazyIterator( + { + try { + LOGGER.info("Queueing query: {}", sqlQuery) + val stream = database!!.unsafeQuery(sqlQuery) + return@lazyIterator AutoCloseableIterators.fromStream( + stream, + airbyteStreamNameNamespacePair + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + }, + airbyteStreamNameNamespacePair + ) + } + + @JvmStatic + fun logStreamSyncStatus(streams: List, syncType: String?) { + if (streams.isEmpty()) { + LOGGER.info("No Streams will be synced via {}.", syncType) + } else { + LOGGER.info("Streams to be synced via {} : {}", syncType, streams.size) + LOGGER.info("Streams: {}", prettyPrintConfiguredAirbyteStreamList(streams)) + } + } + + fun prettyPrintConfiguredAirbyteStreamList(streamList: List): String { + return streamList + .stream() + .map { s: ConfiguredAirbyteStream -> + "%s.%s".formatted(s.stream.namespace, s.stream.name) + } + .collect(Collectors.joining(", ")) + } + + class TableSizeInfo(tableSize: Long, avgRowLength: Long) { + val tableSize: Long + val avgRowLength: Long + + init { + this.tableSize = tableSize + this.avgRowLength = avgRowLength + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.kt new file mode 100644 index 0000000000000..fd8c5d79a565e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/RelationalDbReadUtil.kt @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.google.common.collect.Sets +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import java.util.stream.Collectors + +object RelationalDbReadUtil { + fun identifyStreamsToSnapshot( + catalog: ConfiguredAirbyteCatalog, + alreadySyncedStreams: Set + ): List { + val allStreams = AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog) + val newlyAddedStreams: Set = + HashSet(Sets.difference(allStreams, alreadySyncedStreams)) + return catalog.streams + .stream() + .filter { c: ConfiguredAirbyteStream -> c.syncMode == SyncMode.INCREMENTAL } + .filter { stream: ConfiguredAirbyteStream -> + newlyAddedStreams.contains( + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.stream) + ) + } + .map { `object`: ConfiguredAirbyteStream -> Jsons.clone(`object`) } + .collect(Collectors.toList()) + } + + @JvmStatic + fun identifyStreamsForCursorBased( + catalog: ConfiguredAirbyteCatalog, + streamsForInitialLoad: List + ): List { + val initialLoadStreamsNamespacePairs = + streamsForInitialLoad + .stream() + .map { stream: ConfiguredAirbyteStream -> + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.stream) + } + .collect(Collectors.toSet()) + return catalog.streams + .stream() + .filter { c: ConfiguredAirbyteStream -> c.syncMode == SyncMode.INCREMENTAL } + .filter { stream: ConfiguredAirbyteStream -> + !initialLoadStreamsNamespacePairs.contains( + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.stream) + ) + } + .map { `object`: ConfiguredAirbyteStream -> Jsons.clone(`object`) } + .collect(Collectors.toList()) + } + + @JvmStatic + fun convertNameNamespacePairFromV0( + v1NameNamespacePair: io.airbyte.protocol.models.AirbyteStreamNameNamespacePair + ): AirbyteStreamNameNamespacePair { + return AirbyteStreamNameNamespacePair( + v1NameNamespacePair.name, + v1NameNamespacePair.namespace + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.kt new file mode 100644 index 0000000000000..7d7bc4498cded --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIterator.kt @@ -0,0 +1,241 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.google.common.collect.AbstractIterator +import io.airbyte.cdk.db.IncrementalUtils.compareCursors +import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager +import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +@Deprecated("") +class StateDecoratingIterator( + private val messageIterator: Iterator, + private val stateManager: StateManager, + private val pair: AirbyteStreamNameNamespacePair, + private val cursorField: String, + private val initialCursor: String, + private val cursorType: JsonSchemaPrimitiveUtil.JsonSchemaPrimitive, + stateEmissionFrequency: Int +) : AbstractIterator(), MutableIterator { + private var currentMaxCursor: String? + private var currentMaxCursorRecordCount = 0L + private var hasEmittedFinalState = false + + /** + * These parameters are for intermediate state message emission. We can emit an intermediate + * state when the following two conditions are met. + * + * 1. The records are sorted by the cursor field. This is true when `stateEmissionFrequency` > + * 0. This logic is guaranteed in `AbstractJdbcSource#queryTableIncremental`, in which an "ORDER + * BY" clause is appended to the SQL query if `stateEmissionFrequency` > 0. + * + * 2. There is a cursor value that is ready for emission. A cursor value is "ready" if there is + * no more record with the same value. We cannot emit a cursor at will, because there may be + * multiple records with the same cursor value. If we emit a cursor ignoring this condition, + * should the sync fail right after the emission, the next sync may skip some records with the + * same cursor value due to "WHERE cursor_field > cursor" in + * `AbstractJdbcSource#queryTableIncremental`. + * + * The `intermediateStateMessage` is set to the latest state message that is ready for emission. + * For every `stateEmissionFrequency` messages, `emitIntermediateState` is set to true and the + * latest "ready" state will be emitted in the next `computeNext` call. + */ + private val stateEmissionFrequency: Int + private var totalRecordCount = 0 + + // In between each state message, recordCountInStateMessage will be reset to 0. + private var recordCountInStateMessage = 0 + private var emitIntermediateState = false + private var intermediateStateMessage: AirbyteMessage? = null + private var hasCaughtException = false + + /** + * @param stateManager Manager that maintains connector state + * @param pair Stream Name and Namespace (e.g. public.users) + * @param cursorField Path to the comparator field used to track the records read so far + * @param initialCursor name of the initial cursor column + * @param cursorType ENUM type of primitive values that can be used as a cursor for + * checkpointing + * @param stateEmissionFrequency If larger than 0, the records are sorted by the cursor field, + * and intermediate states will be emitted for every `stateEmissionFrequency` records. The order + * of the records is guaranteed in `AbstractJdbcSource#queryTableIncremental`, in which an + * "ORDER BY" clause is appended to the SQL query if `stateEmissionFrequency` > 0. + */ + init { + this.currentMaxCursor = initialCursor + this.stateEmissionFrequency = stateEmissionFrequency + } + + private fun getCursorCandidate(message: AirbyteMessage): String? { + val cursorCandidate = message.record.data[cursorField].asText() + return (if (cursorCandidate != null) replaceNull(cursorCandidate) else null) + } + + private fun replaceNull(cursorCandidate: String): String { + if (cursorCandidate.contains("\u0000")) { + return cursorCandidate.replace("\u0000".toRegex(), "") + } + return cursorCandidate + } + + /** + * Computes the next record retrieved from Source stream. Emits StateMessage containing data of + * the record that has been read so far + * + * If this method throws an exception, it will propagate outward to the `hasNext` or `next` + * invocation that invoked this method. Any further attempts to use the iterator will result in + * an [IllegalStateException]. + * + * @return [AirbyteStateMessage] containing information of the records read so far + */ + override fun computeNext(): AirbyteMessage? { + if (hasCaughtException) { + // Mark iterator as done since the next call to messageIterator will result in an + // IllegalArgumentException and resets exception caught state. + // This occurs when the previous iteration emitted state so this iteration cycle will + // indicate + // iteration is complete + hasCaughtException = false + return endOfData() + } + + if (messageIterator.hasNext()) { + var optionalIntermediateMessage = intermediateMessage + if (optionalIntermediateMessage.isPresent) { + return optionalIntermediateMessage.get() + } + + totalRecordCount++ + recordCountInStateMessage++ + // Use try-catch to catch Exception that could occur when connection to the database + // fails + try { + val message = messageIterator.next() + if (message.record.data.hasNonNull(cursorField)) { + val cursorCandidate = getCursorCandidate(message) + val cursorComparison = + compareCursors(currentMaxCursor, cursorCandidate, cursorType) + if (cursorComparison < 0) { + // Update the current max cursor only when current max cursor < cursor + // candidate from the message + if ( + stateEmissionFrequency > 0 && + currentMaxCursor != initialCursor && + messageIterator.hasNext() + ) { + // Only create an intermediate state when it is not the first or last + // record message. + // The last state message will be processed seperately. + intermediateStateMessage = + createStateMessage(false, recordCountInStateMessage) + } + currentMaxCursor = cursorCandidate + currentMaxCursorRecordCount = 1L + } else if (cursorComparison == 0) { + currentMaxCursorRecordCount++ + } else if (cursorComparison > 0 && stateEmissionFrequency > 0) { + LOGGER.warn( + "Intermediate state emission feature requires records to be processed in order according to the cursor value. Otherwise, " + + "data loss can occur." + ) + } + } + + if (stateEmissionFrequency > 0 && totalRecordCount % stateEmissionFrequency == 0) { + emitIntermediateState = true + } + + return message + } catch (e: Exception) { + emitIntermediateState = true + hasCaughtException = true + LOGGER.error("Message iterator failed to read next record.", e) + optionalIntermediateMessage = intermediateMessage + return optionalIntermediateMessage.orElse(endOfData()) + } + } else if (!hasEmittedFinalState) { + return createStateMessage(true, recordCountInStateMessage) + } else { + return endOfData() + } + } + + protected val intermediateMessage: Optional + /** + * Returns AirbyteStateMessage when in a ready state, a ready state means that it has + * satifies the conditions of: + * + * cursorField has changed (e.g. 08-22-2022 -> 08-23-2022) and there have been at least + * stateEmissionFrequency number of records since the last emission + * + * @return AirbyteStateMessage if one exists, otherwise Optional indicating state was not + * ready to be emitted + */ + get() { + val message: AirbyteMessage? = intermediateStateMessage + if (emitIntermediateState && message != null) { + if (message.state != null) { + message.state.sourceStats = + AirbyteStateStats().withRecordCount(recordCountInStateMessage.toDouble()) + } + + intermediateStateMessage = null + recordCountInStateMessage = 0 + emitIntermediateState = false + return Optional.of(message) + } + return Optional.empty() + } + + /** + * Creates AirbyteStateMessage while updating the cursor used to checkpoint the state of records + * read up so far + * + * @param isFinalState marker for if the final state of the iterator has been reached + * @param recordCount count of read messages + * @return AirbyteMessage which includes information on state of records read so far + */ + fun createStateMessage(isFinalState: Boolean, recordCount: Int): AirbyteMessage { + val stateMessage = + stateManager.updateAndEmit(pair, currentMaxCursor, currentMaxCursorRecordCount) + val cursorInfo = stateManager.getCursorInfo(pair) + + // logging once every 100 messages to reduce log verbosity + if (recordCount % 100 == 0) { + LOGGER.info( + "State report for stream {} - original: {} = {} (count {}) -> latest: {} = {} (count {})", + pair, + cursorInfo.map { obj: CursorInfo -> obj.originalCursorField }.orElse(null), + cursorInfo.map { obj: CursorInfo -> obj.originalCursor }.orElse(null), + cursorInfo.map { obj: CursorInfo -> obj.originalCursorRecordCount }.orElse(null), + cursorInfo.map { obj: CursorInfo -> obj.cursorField }.orElse(null), + cursorInfo.map { obj: CursorInfo -> obj.cursor }.orElse(null), + cursorInfo.map { obj: CursorInfo -> obj.cursorRecordCount }.orElse(null) + ) + } + + stateMessage?.withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + if (isFinalState) { + hasEmittedFinalState = true + if (stateManager.getCursor(pair).isEmpty) { + LOGGER.warn( + "Cursor for stream {} was null. This stream will replicate all records on the next run", + pair + ) + } + } + + return AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(stateMessage) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(StateDecoratingIterator::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.kt new file mode 100644 index 0000000000000..46ebe3bd96d86 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/TableInfo.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +/** This class encapsulates all externally relevant Table information. */ +data class TableInfo( + val nameSpace: String, + val name: String, + val fields: List, + val primaryKeys: List = emptyList(), + val cursorFields: List +) diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.kt new file mode 100644 index 0000000000000..935f8c6d008de --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/AbstractStateManager.kt @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.* +import java.util.function.Function +import java.util.function.Supplier + +/** + * Abstract implementation of the [StateManager] interface that provides common functionality for + * state manager implementations. + * + * @param The type associated with the state object managed by this manager. + * @param The type associated with the state object stored in the state managed by this manager. + * + */ +abstract class AbstractStateManager +@JvmOverloads +constructor( + catalog: ConfiguredAirbyteCatalog, + streamSupplier: Supplier>, + cursorFunction: Function?, + cursorFieldFunction: Function>?, + cursorRecordCountFunction: Function?, + namespacePairFunction: Function?, + onlyIncludeIncrementalStreams: Boolean = false +) : StateManager { + /** + * The [CursorManager] responsible for keeping track of the current cursor value for each stream + * managed by this state manager. + */ + private val cursorManager: CursorManager<*> = + CursorManager( + catalog, + streamSupplier, + cursorFunction, + cursorFieldFunction, + cursorRecordCountFunction, + namespacePairFunction, + onlyIncludeIncrementalStreams + ) + + override val pairToCursorInfoMap: Map + get() = cursorManager.pairToCursorInfo + + abstract override fun toState( + pair: Optional + ): AirbyteStateMessage +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.kt new file mode 100644 index 0000000000000..657e9437c603d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManager.kt @@ -0,0 +1,311 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.concurrent.* +import java.util.function.Function +import java.util.function.Supplier +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Manages the map of streams to current cursor values for state management. + * + * @param The type that represents the stream object which holds the current cursor information + * in the state. + */ +class CursorManager( + catalog: ConfiguredAirbyteCatalog, + streamSupplier: Supplier>, + cursorFunction: Function?, + cursorFieldFunction: Function>?, + cursorRecordCountFunction: Function?, + namespacePairFunction: Function?, + onlyIncludeIncrementalStreams: Boolean +) { + /** + * Map of streams (name/namespace tuple) to the current cursor information stored in the state. + */ + val pairToCursorInfo: Map + + /** + * Constructs a new [CursorManager] based on the configured connector and current state + * information. + * + * @param catalog The connector's configured catalog. + * @param streamSupplier A [Supplier] that provides the cursor manager with the collection of + * streams tracked by the connector's state. + * @param cursorFunction A [Function] that extracts the current cursor from a stream stored in + * the connector's state. + * @param cursorFieldFunction A [Function] that extracts the cursor field name from a stream + * stored in the connector's state. + * @param cursorRecordCountFunction A [Function] that extracts the cursor record count for a + * stream stored in the connector's state. + * @param namespacePairFunction A [Function] that generates a [AirbyteStreamNameNamespacePair] + * that identifies each stream in the connector's state. + */ + init { + pairToCursorInfo = + createCursorInfoMap( + catalog, + streamSupplier, + cursorFunction, + cursorFieldFunction, + cursorRecordCountFunction, + namespacePairFunction, + onlyIncludeIncrementalStreams + ) + } + + /** + * Creates the cursor information map that associates stream name/namespace tuples with the + * current cursor information for that stream as stored in the connector's state. + * + * @param catalog The connector's configured catalog. + * @param streamSupplier A [Supplier] that provides the cursor manager with the collection of + * streams tracked by the connector's state. + * @param cursorFunction A [Function] that extracts the current cursor from a stream stored in + * the connector's state. + * @param cursorFieldFunction A [Function] that extracts the cursor field name from a stream + * stored in the connector's state. + * @param cursorRecordCountFunction A [Function] that extracts the cursor record count for a + * stream stored in the connector's state. + * @param namespacePairFunction A [Function] that generates a [AirbyteStreamNameNamespacePair] + * that identifies each stream in the connector's state. + * @return A map of streams to current cursor information for the stream. + */ + @VisibleForTesting + protected fun createCursorInfoMap( + catalog: ConfiguredAirbyteCatalog, + streamSupplier: Supplier>, + cursorFunction: Function?, + cursorFieldFunction: Function>?, + cursorRecordCountFunction: Function?, + namespacePairFunction: Function?, + onlyIncludeIncrementalStreams: Boolean + ): Map { + val allStreamNames = + catalog.streams + .stream() + .filter { c: ConfiguredAirbyteStream -> + if (onlyIncludeIncrementalStreams) { + return@filter c.syncMode == SyncMode.INCREMENTAL + } + true + } + .map { obj: ConfiguredAirbyteStream -> obj.stream } + .map { stream: AirbyteStream? -> + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream) + } + .collect(Collectors.toSet()) + allStreamNames.addAll( + streamSupplier + .get() + .stream() + .map(namespacePairFunction) + .filter { obj: AirbyteStreamNameNamespacePair? -> Objects.nonNull(obj) } + .collect(Collectors.toSet()) + ) + + val localMap: MutableMap = ConcurrentHashMap() + val pairToState = + streamSupplier + .get() + .stream() + .collect(Collectors.toMap(namespacePairFunction, Function.identity())) + val pairToConfiguredAirbyteStream = + catalog.streams + .stream() + .collect( + Collectors.toMap( + Function { stream: ConfiguredAirbyteStream? -> + AirbyteStreamNameNamespacePair.fromConfiguredAirbyteSteam(stream) + }, + Function.identity() + ) + ) + + for (pair in allStreamNames) { + val stateOptional: Optional = Optional.ofNullable(pairToState[pair]) + val streamOptional = Optional.ofNullable(pairToConfiguredAirbyteStream[pair]) + localMap[pair] = + createCursorInfoForStream( + pair, + stateOptional, + streamOptional, + cursorFunction, + cursorFieldFunction, + cursorRecordCountFunction + ) + } + + return localMap.toMap() + } + + /** + * Generates a [CursorInfo] object based on the data currently stored in the connector's state + * for the given stream. + * + * @param pair A [AirbyteStreamNameNamespacePair] that identifies a specific stream managed by + * the connector. + * @param stateOptional [Optional] containing the current state associated with the stream. + * @param streamOptional [Optional] containing the [ConfiguredAirbyteStream] associated with the + * stream. + * @param cursorFunction A [Function] that provides the current cursor from the state associated + * with the stream. + * @param cursorFieldFunction A [Function] that provides the cursor field name for the cursor + * stored in the state associated with the stream. + * @param cursorRecordCountFunction A [Function] that extracts the cursor record count for a + * stream stored in the connector's state. + * @return A [CursorInfo] object based on the data currently stored in the connector's state for + * the given stream. + */ + internal fun createCursorInfoForStream( + pair: AirbyteStreamNameNamespacePair?, + stateOptional: Optional, + streamOptional: Optional, + cursorFunction: Function?, + cursorFieldFunction: Function>?, + cursorRecordCountFunction: Function? + ): CursorInfo { + val originalCursorField = + stateOptional + .map(cursorFieldFunction) + .flatMap { f: List -> + if (f.isNotEmpty()) Optional.of(f[0]) else Optional.empty() + } + .orElse(null) + val originalCursor = stateOptional.map(cursorFunction).orElse(null) + val originalCursorRecordCount = stateOptional.map(cursorRecordCountFunction).orElse(0L) + + val cursor: String? + val cursorField: String? + val cursorRecordCount: Long + + // if cursor field is set in catalog. + if ( + streamOptional + .map> { obj: ConfiguredAirbyteStream -> obj.cursorField } + .isPresent + ) { + cursorField = + streamOptional + .map { obj: ConfiguredAirbyteStream -> obj.cursorField } + .flatMap { f: List -> + if (f.size > 0) Optional.of(f[0]) else Optional.empty() + } + .orElse(null) + // if cursor field is set in state. + if (stateOptional.map?>(cursorFieldFunction).isPresent) { + // if cursor field in catalog and state are the same. + if ( + stateOptional.map?>(cursorFieldFunction) == + streamOptional.map> { obj: ConfiguredAirbyteStream -> + obj.cursorField + } + ) { + cursor = stateOptional.map(cursorFunction).orElse(null) + cursorRecordCount = stateOptional.map(cursorRecordCountFunction).orElse(0L) + // If a matching cursor is found in the state, and it's value is null - this + // indicates a CDC stream + // and we shouldn't log anything. + if (cursor != null) { + LOGGER.info( + "Found matching cursor in state. Stream: {}. Cursor Field: {} Value: {} Count: {}", + pair, + cursorField, + cursor, + cursorRecordCount + ) + } + // if cursor field in catalog and state are different. + } else { + cursor = null + cursorRecordCount = 0L + LOGGER.info( + "Found cursor field. Does not match previous cursor field. Stream: {}. Original Cursor Field: {} (count {}). New Cursor Field: {}. Resetting cursor value.", + pair, + originalCursorField, + originalCursorRecordCount, + cursorField + ) + } + // if cursor field is not set in state but is set in catalog. + } else { + LOGGER.info( + "No cursor field set in catalog but not present in state. Stream: {}, New Cursor Field: {}. Resetting cursor value", + pair, + cursorField + ) + cursor = null + cursorRecordCount = 0L + } + // if cursor field is not set in catalog. + } else { + LOGGER.info( + "Cursor field set in state but not present in catalog. Stream: {}. Original Cursor Field: {}. Original value: {}. Resetting cursor.", + pair, + originalCursorField, + originalCursor + ) + cursorField = null + cursor = null + cursorRecordCount = 0L + } + + return CursorInfo( + originalCursorField, + originalCursor, + originalCursorRecordCount, + cursorField, + cursor, + cursorRecordCount + ) + } + + /** + * Retrieves an [Optional] possibly containing the current [CursorInfo] associated with the + * provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the current [CursorInfo] associated with the + * provided stream name/namespace tuple. + */ + fun getCursorInfo(pair: AirbyteStreamNameNamespacePair?): Optional { + return Optional.ofNullable(pairToCursorInfo[pair]) + } + + /** + * Retrieves an [Optional] possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + */ + fun getCursorField(pair: AirbyteStreamNameNamespacePair?): Optional { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.cursorField } + } + + /** + * Retrieves an [Optional] possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + */ + fun getCursor(pair: AirbyteStreamNameNamespacePair?): Optional { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.cursor } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CursorManager::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducer.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducer.kt new file mode 100644 index 0000000000000..9f006f8f053d9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducer.kt @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.db.IncrementalUtils.compareCursors +import io.airbyte.cdk.db.IncrementalUtils.getCursorField +import io.airbyte.cdk.db.IncrementalUtils.getCursorType +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class CursorStateMessageProducer( + private val stateManager: StateManager?, + private val initialCursor: Optional +) : SourceStateMessageProducer { + private var currentMaxCursor: Optional + + // We keep this field to mark `cursor_record_count` and also to control logging frequency. + private var currentCursorRecordCount = 0 + private var intermediateStateMessage: AirbyteStateMessage? = null + + private var cursorOutOfOrderDetected = false + + init { + this.currentMaxCursor = initialCursor + } + + override fun generateStateMessageAtCheckpoint( + stream: ConfiguredAirbyteStream? + ): AirbyteStateMessage? { + // At this stage intermediate state message should never be null; otherwise it would have + // been + // blocked by shouldEmitStateMessage check. + val message = intermediateStateMessage + intermediateStateMessage = null + if (cursorOutOfOrderDetected) { + LOGGER.warn( + "Intermediate state emission feature requires records to be processed in order according to the cursor value. Otherwise, " + + "data loss can occur." + ) + } + return message + } + + /** + * Note: We do not try to catch exception here. If error/exception happens, we should fail the + * sync, and since we have saved state message before, we should be able to resume it in next + * sync if we have fixed the underlying issue, of if the issue is transient. + */ + @SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") + override fun processRecordMessage( + stream: ConfiguredAirbyteStream?, + message: AirbyteMessage + ): AirbyteMessage { + val cursorField = getCursorField(stream!!) + if (message.record.data.hasNonNull(cursorField)) { + val cursorCandidate = getCursorCandidate(cursorField, message) + val cursorType = getCursorType(stream, cursorField) + val cursorComparison = + compareCursors(currentMaxCursor.orElse(null), cursorCandidate, cursorType) + if (cursorComparison < 0) { + // Reset cursor but include current record message. This value will be used to + // create state message. + // Update the current max cursor only when current max cursor < cursor candidate + // from the message + if (currentMaxCursor != initialCursor) { + // Only create an intermediate state when it is not the first record. + intermediateStateMessage = createStateMessage(stream) + } + currentMaxCursor = Optional.of(cursorCandidate!!) + currentCursorRecordCount = 1 + } else if (cursorComparison > 0) { + cursorOutOfOrderDetected = true + } else { + currentCursorRecordCount++ + } + } + println("processed a record message. count: $currentCursorRecordCount") + return message + } + + @SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") + override fun createFinalStateMessage(stream: ConfiguredAirbyteStream?): AirbyteStateMessage? { + return createStateMessage(stream!!) + } + + /** Only sends out state message when there is a state message to be sent out. */ + override fun shouldEmitStateMessage(stream: ConfiguredAirbyteStream?): Boolean { + return intermediateStateMessage != null + } + + /** + * Creates AirbyteStateMessage while updating the cursor used to checkpoint the state of records + * read up so far + * + * @return AirbyteMessage which includes information on state of records read so far + */ + private fun createStateMessage(stream: ConfiguredAirbyteStream): AirbyteStateMessage? { + val pair = AirbyteStreamNameNamespacePair(stream.stream.name, stream.stream.namespace) + println( + "state message creation: " + + pair + + " " + + currentMaxCursor.orElse(null) + + " " + + currentCursorRecordCount + ) + val stateMessage = + stateManager!!.updateAndEmit( + pair, + currentMaxCursor.orElse(null), + currentCursorRecordCount.toLong() + ) + val cursorInfo = stateManager.getCursorInfo(pair) + + // logging once every 100 messages to reduce log verbosity + if (currentCursorRecordCount % LOG_FREQUENCY == 0) { + LOGGER.info("State report for stream {}: {}", pair, cursorInfo) + } + + return stateMessage + } + + private fun getCursorCandidate(cursorField: String, message: AirbyteMessage): String? { + val cursorCandidate = message.record.data[cursorField].asText() + return (if (cursorCandidate != null) replaceNull(cursorCandidate) else null) + } + + private fun replaceNull(cursorCandidate: String): String { + if (cursorCandidate.contains("\u0000")) { + return cursorCandidate.replace("\u0000".toRegex(), "") + } + return cursorCandidate + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CursorStateMessageProducer::class.java) + private const val LOG_FREQUENCY = 100 + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/FailedRecordIteratorException.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/FailedRecordIteratorException.kt new file mode 100644 index 0000000000000..33434b23d9b74 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/FailedRecordIteratorException.kt @@ -0,0 +1,6 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +class FailedRecordIteratorException(cause: Throwable?) : RuntimeException(cause) diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.kt new file mode 100644 index 0000000000000..9329d6d665540 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManager.kt @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.function.Supplier +import java.util.stream.Collectors + +/** + * Global implementation of the [StateManager] interface. + * + * This implementation generates a single, global state object for the state tracked by this + * manager. + */ +class GlobalStateManager( + airbyteStateMessage: AirbyteStateMessage, + catalog: ConfiguredAirbyteCatalog +) : + AbstractStateManager( + catalog, + getStreamsSupplier(airbyteStateMessage), + StateGeneratorUtils.CURSOR_FUNCTION, + StateGeneratorUtils.CURSOR_FIELD_FUNCTION, + StateGeneratorUtils.CURSOR_RECORD_COUNT_FUNCTION, + StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION, + true + ) { + /** + * Legacy [CdcStateManager] used to manage state for connectors that support Change Data Capture + * (CDC). + */ + override val cdcStateManager: CdcStateManager + + /** + * Constructs a new [GlobalStateManager] that is seeded with the provided [AirbyteStateMessage]. + * + * @param airbyteStateMessage The initial state represented as an [AirbyteStateMessage]. + * @param catalog The [ConfiguredAirbyteCatalog] for the connector associated with this state + * manager. + */ + init { + this.cdcStateManager = + CdcStateManager( + extractCdcState(airbyteStateMessage), + extractStreams(airbyteStateMessage), + airbyteStateMessage + ) + } + + override val rawStateMessages: List? + get() { + throw UnsupportedOperationException( + "Raw state retrieval not supported by global state manager." + ) + } + + override fun toState(pair: Optional): AirbyteStateMessage { + // Populate global state + val globalState = AirbyteGlobalState() + globalState.sharedState = Jsons.jsonNode(cdcStateManager.cdcState) + globalState.streamStates = StateGeneratorUtils.generateStreamStateList(pairToCursorInfoMap) + + // Generate the legacy state for backwards compatibility + val dbState = + StateGeneratorUtils.generateDbState(pairToCursorInfoMap) + .withCdc(true) + .withCdcState(cdcStateManager.cdcState) + + return AirbyteStateMessage() + .withType( + AirbyteStateMessage.AirbyteStateType.GLOBAL + ) // Temporarily include legacy state for backwards compatibility with the platform + .withData(Jsons.jsonNode(dbState)) + .withGlobal(globalState) + } + + /** + * Extracts the Change Data Capture (CDC) state stored in the initial state provided to this + * state manager. + * + * @param airbyteStateMessage The [AirbyteStateMessage] that contains the initial state provided + * to the state manager. + * @return The [CdcState] stored in the state, if any. Note that this will not be `null` but may + * be empty. + */ + private fun extractCdcState(airbyteStateMessage: AirbyteStateMessage?): CdcState? { + if (airbyteStateMessage!!.type == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + return Jsons.`object`(airbyteStateMessage.global.sharedState, CdcState::class.java) + } else { + val legacyState = Jsons.`object`(airbyteStateMessage.data, DbState::class.java) + return legacyState?.cdcState + } + } + + private fun extractStreams( + airbyteStateMessage: AirbyteStateMessage? + ): Set { + if (airbyteStateMessage!!.type == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + return airbyteStateMessage.global.streamStates + .stream() + .map { streamState: AirbyteStreamState -> + val cloned = Jsons.clone(streamState) + AirbyteStreamNameNamespacePair( + cloned.streamDescriptor.name, + cloned.streamDescriptor.namespace + ) + } + .collect(Collectors.toSet()) + } else { + val legacyState = Jsons.`object`(airbyteStateMessage.data, DbState::class.java) + return if (legacyState != null) + extractNamespacePairsFromDbStreamState(legacyState.streams) + else emptySet() + } + } + + private fun extractNamespacePairsFromDbStreamState( + streams: List + ): Set { + return streams + .stream() + .map { stream: DbStreamState -> + val cloned = Jsons.clone(stream) + AirbyteStreamNameNamespacePair(cloned.streamName, cloned.streamNamespace) + } + .collect(Collectors.toSet()) + } + + companion object { + /** + * Generates the [Supplier] that will be used to extract the streams from the incoming + * [AirbyteStateMessage]. + * + * @param airbyteStateMessage The [AirbyteStateMessage] supplied to this state manager with + * the initial state. + * @return A [Supplier] that will be used to fetch the streams present in the initial state. + */ + private fun getStreamsSupplier( + airbyteStateMessage: AirbyteStateMessage? + ): Supplier> { + /* + * If the incoming message has the state type set to GLOBAL, it is using the new format. Therefore, + * we can look for streams in the "global" field of the message. Otherwise, the message is still + * storing state in the legacy "data" field. + */ + return Supplier { + if (airbyteStateMessage!!.type == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + return@Supplier airbyteStateMessage.global.streamStates + } else if (airbyteStateMessage.data != null) { + return@Supplier Jsons.`object`( + airbyteStateMessage.data, + DbState::class.java + ) + .streams + .stream() + .map { s: DbStreamState -> + AirbyteStreamState() + .withStreamState(Jsons.jsonNode(s)) + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(s.streamNamespace) + .withName(s.streamName) + ) + } + .collect(Collectors.toList()) + } else { + return@Supplier listOf() + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.kt new file mode 100644 index 0000000000000..c379f25a9d1e2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManager.kt @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.* +import java.util.function.Function +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Legacy implementation (pre-per-stream state support) of the [StateManager] interface. + * + * This implementation assumes that the state matches the [DbState] object and effectively tracks + * state as global across the streams managed by a connector. + */ +@Deprecated( + """This manager may be removed in the future if/once all connectors support per-stream + state management.""" +) +class LegacyStateManager(dbState: DbState, catalog: ConfiguredAirbyteCatalog) : + AbstractStateManager( + catalog, + Supplier { dbState.streams }, + CURSOR_FUNCTION, + CURSOR_FIELD_FUNCTION, + CURSOR_RECORD_COUNT_FUNCTION, + NAME_NAMESPACE_PAIR_FUNCTION + ) { + /** Tracks whether the connector associated with this state manager supports CDC. */ + private var isCdc: Boolean + + /** [CdcStateManager] used to manage state for connectors that support CDC. */ + override val cdcStateManager: CdcStateManager = + CdcStateManager( + dbState.cdcState, + AirbyteStreamNameNamespacePair.fromConfiguredCatalog(catalog), + null + ) + + /** + * Constructs a new [LegacyStateManager] that is seeded with the provided [DbState] instance. + * + * @param dbState The initial state represented as an [DbState] instance. + * @param catalog The [ConfiguredAirbyteCatalog] for the connector associated with this state + * manager. + */ + init { + this.isCdc = dbState.cdc ?: false + } + + override val rawStateMessages: List? + get() { + throw UnsupportedOperationException( + "Raw state retrieval not supported by global state manager." + ) + } + + override fun toState(pair: Optional): AirbyteStateMessage { + val dbState = + StateGeneratorUtils.generateDbState(pairToCursorInfoMap) + .withCdc(isCdc) + .withCdcState(cdcStateManager.cdcState) + + LOGGER.debug("Generated legacy state for {} streams", dbState.streams.size) + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(dbState)) + } + + override fun updateAndEmit( + pair: AirbyteStreamNameNamespacePair, + cursor: String? + ): AirbyteStateMessage? { + return updateAndEmit(pair, cursor, 0L) + } + + override fun updateAndEmit( + pair: AirbyteStreamNameNamespacePair, + cursor: String?, + cursorRecordCount: Long + ): AirbyteStateMessage? { + // cdc file gets updated by debezium so the "update" part is a no op. + if (!isCdc) { + return super.updateAndEmit(pair, cursor, cursorRecordCount) + } + + return toState(Optional.ofNullable(pair)) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(LegacyStateManager::class.java) + + /** [Function] that extracts the cursor from the stream state. */ + private val CURSOR_FUNCTION = DbStreamState::getCursor + + /** [Function] that extracts the cursor field(s) from the stream state. */ + private val CURSOR_FIELD_FUNCTION = DbStreamState::getCursorField + + private val CURSOR_RECORD_COUNT_FUNCTION = Function { stream: DbStreamState -> + Objects.requireNonNullElse(stream.cursorRecordCount, 0L) + } + + /** [Function] that creates an [AirbyteStreamNameNamespacePair] from the stream state. */ + private val NAME_NAMESPACE_PAIR_FUNCTION = + Function { s: DbStreamState -> + AirbyteStreamNameNamespacePair(s!!.streamName, s.streamNamespace) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.kt new file mode 100644 index 0000000000000..be452fd41360b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.kt @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.google.common.collect.AbstractIterator +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.time.Duration +import java.time.Instant +import java.time.OffsetDateTime +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +open class SourceStateIterator( + private val messageIterator: Iterator, + private val stream: ConfiguredAirbyteStream?, + private val sourceStateMessageProducer: SourceStateMessageProducer, + private val stateEmitFrequency: StateEmitFrequency +) : AbstractIterator(), MutableIterator { + private var hasEmittedFinalState = false + private var recordCount = 0L + private var lastCheckpoint: Instant = Instant.now() + + override fun computeNext(): AirbyteMessage? { + var iteratorHasNextValue = false + try { + iteratorHasNextValue = messageIterator.hasNext() + } catch (ex: Exception) { + // If the underlying iterator throws an exception, we want to fail the sync, expecting + // sync/attempt + // will be restarted and + // sync will resume from the last state message. + throw FailedRecordIteratorException(ex) + } + if (iteratorHasNextValue) { + if ( + shouldEmitStateMessage() && + sourceStateMessageProducer.shouldEmitStateMessage(stream) + ) { + val stateMessage = + sourceStateMessageProducer.generateStateMessageAtCheckpoint(stream) + stateMessage!!.withSourceStats( + AirbyteStateStats().withRecordCount(recordCount.toDouble()) + ) + + recordCount = 0L + lastCheckpoint = Instant.now() + return AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(stateMessage) + } + // Use try-catch to catch Exception that could occur when connection to the database + // fails + try { + val message = messageIterator.next() + val processedMessage = + sourceStateMessageProducer.processRecordMessage(stream, message) + recordCount++ + return processedMessage + } catch (e: Exception) { + throw FailedRecordIteratorException(e) + } + } else if (!hasEmittedFinalState) { + hasEmittedFinalState = true + val finalStateMessageForStream = + sourceStateMessageProducer.createFinalStateMessage(stream) + finalStateMessageForStream!!.withSourceStats( + AirbyteStateStats().withRecordCount(recordCount.toDouble()) + ) + recordCount = 0L + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState(finalStateMessageForStream) + } else { + return endOfData() + } + } + + // This method is used to check if we should emit a state message. If the record count is set to + // 0, + // we should not emit a state message. + // If the frequency is set to be zero, we should not use it. + private fun shouldEmitStateMessage(): Boolean { + if (stateEmitFrequency.syncCheckpointRecords == 0L) { + return false + } + if (recordCount >= stateEmitFrequency.syncCheckpointRecords) { + return true + } + if (!stateEmitFrequency.syncCheckpointDuration.isZero) { + return Duration.between(lastCheckpoint, OffsetDateTime.now()) + .compareTo(stateEmitFrequency.syncCheckpointDuration) > 0 + } + return false + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(SourceStateIterator::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.kt new file mode 100644 index 0000000000000..7c2fd5bc7c44b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream + +/** + * To be used with SourceStateIterator. SourceStateIterator will iterate over the records and + * generate state messages when needed. This interface defines how would those state messages be + * generated, and how the incoming record messages will be processed. + * + * @param + */ +interface SourceStateMessageProducer { + /** Returns a state message that should be emitted at checkpoint. */ + fun generateStateMessageAtCheckpoint(stream: ConfiguredAirbyteStream?): AirbyteStateMessage? + + /** For the incoming record message, this method defines how the connector will consume it. */ + fun processRecordMessage(stream: ConfiguredAirbyteStream?, message: T): AirbyteMessage + + /** + * At the end of the iteration, this method will be called and it will generate the final state + * message. + * + * @return + */ + fun createFinalStateMessage(stream: ConfiguredAirbyteStream?): AirbyteStateMessage? + + /** + * Determines if the iterator has reached checkpoint or not per connector's definition. By + * default iterator will check if the number of records processed is greater than the checkpoint + * interval or last state message has already passed syncCheckpointDuration. + */ + fun shouldEmitStateMessage(stream: ConfiguredAirbyteStream?): Boolean +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.kt new file mode 100644 index 0000000000000..6c2d0120cc6f1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.kt @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import java.time.Duration + +class StateEmitFrequency(syncCheckpointRecords: Long, syncCheckpointDuration: Duration) { + val syncCheckpointRecords: Long + val syncCheckpointDuration: Duration + + init { + this.syncCheckpointRecords = syncCheckpointRecords + this.syncCheckpointDuration = syncCheckpointDuration + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.kt new file mode 100644 index 0000000000000..15369a1886e69 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtils.kt @@ -0,0 +1,325 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.StateType +import io.airbyte.configoss.StateWrapper +import io.airbyte.configoss.helpers.StateMessageHelper +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.function.Function +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Collection of utilities that facilitate the generation of state objects. */ +object StateGeneratorUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(StateGeneratorUtils::class.java) + + /** [Function] that extracts the cursor from the stream state. */ + val CURSOR_FUNCTION: Function = + Function { stream: AirbyteStreamState -> + val dbStreamState = extractState(stream) + dbStreamState.map { obj: DbStreamState -> obj.cursor }.orElse(null) + } + + /** [Function] that extracts the cursor field(s) from the stream state. */ + val CURSOR_FIELD_FUNCTION: Function> = + Function { stream: AirbyteStreamState -> + val dbStreamState = extractState(stream) + if (dbStreamState.isPresent) { + return@Function dbStreamState.get().cursorField + } else { + return@Function listOf() + } + } + + val CURSOR_RECORD_COUNT_FUNCTION: Function = + Function { stream: AirbyteStreamState -> + val dbStreamState = extractState(stream) + dbStreamState.map { obj: DbStreamState -> obj.cursorRecordCount }.orElse(0L) + } + + /** [Function] that creates an [AirbyteStreamNameNamespacePair] from the stream state. */ + val NAME_NAMESPACE_PAIR_FUNCTION: + Function = + Function { s: AirbyteStreamState -> + if (isValidStreamDescriptor(s.streamDescriptor)) + AirbyteStreamNameNamespacePair( + s.streamDescriptor.name, + s.streamDescriptor.namespace + ) + else null + } + + /** + * Generates the stream state for the given stream and cursor information. + * + * @param airbyteStreamNameNamespacePair The stream. + * @param cursorInfo The current cursor. + * @return The [AirbyteStreamState] representing the current state of the stream. + */ + fun generateStreamState( + airbyteStreamNameNamespacePair: AirbyteStreamNameNamespacePair, + cursorInfo: CursorInfo + ): AirbyteStreamState { + return AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(airbyteStreamNameNamespacePair.name) + .withNamespace(airbyteStreamNameNamespacePair.namespace) + ) + .withStreamState( + Jsons.jsonNode(generateDbStreamState(airbyteStreamNameNamespacePair, cursorInfo)) + ) + } + + /** + * Generates a list of valid stream states from the provided stream and cursor information. A + * stream state is considered to be valid if the stream has a valid descriptor (see + * [.isValidStreamDescriptor] for more details). + * + * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor + * information for that stream + * @return The list of stream states derived from the state information extracted from the + * provided map. + */ + fun generateStreamStateList( + pairToCursorInfoMap: Map + ): List { + return pairToCursorInfoMap.entries + .stream() + .sorted(java.util.Map.Entry.comparingByKey()) + .map { e: Map.Entry -> + generateStreamState(e.key, e.value) + } + .filter { s: AirbyteStreamState -> isValidStreamDescriptor(s.streamDescriptor) } + .collect(Collectors.toList()) + } + + /** + * Generates the legacy global state for backwards compatibility. + * + * @param pairToCursorInfoMap The map of stream name/namespace tuple to the current cursor + * information for that stream + * @return The legacy [DbState]. + */ + fun generateDbState( + pairToCursorInfoMap: Map + ): DbState { + return DbState() + .withCdc(false) + .withStreams( + pairToCursorInfoMap.entries + .stream() + .sorted( + java.util.Map.Entry.comparingByKey() + ) // sort by stream name then namespace for sanity. + .map { e: Map.Entry -> + generateDbStreamState(e.key, e.value) + } + .collect(Collectors.toList()) + ) + } + + /** + * Generates the [DbStreamState] for the given stream and cursor. + * + * @param airbyteStreamNameNamespacePair The stream. + * @param cursorInfo The current cursor. + * @return The [DbStreamState]. + */ + fun generateDbStreamState( + airbyteStreamNameNamespacePair: AirbyteStreamNameNamespacePair, + cursorInfo: CursorInfo + ): DbStreamState { + val state = + DbStreamState() + .withStreamName(airbyteStreamNameNamespacePair.name) + .withStreamNamespace(airbyteStreamNameNamespacePair.namespace) + .withCursorField( + if (cursorInfo.cursorField == null) emptyList() + else Lists.newArrayList(cursorInfo.cursorField) + ) + .withCursor(cursorInfo.cursor) + if (cursorInfo.cursorRecordCount > 0L) { + state.cursorRecordCount = cursorInfo.cursorRecordCount + } + return state + } + + /** + * Extracts the actual state from the [AirbyteStreamState] object. + * + * @param state The [AirbyteStreamState] that contains the actual stream state as JSON. + * @return An [Optional] possibly containing the deserialized representation of the stream state + * or an empty [Optional] if the state is not present or could not be deserialized. + */ + fun extractState(state: AirbyteStreamState): Optional { + try { + return Optional.ofNullable(Jsons.`object`(state.streamState, DbStreamState::class.java)) + } catch (e: IllegalArgumentException) { + LOGGER.error("Unable to extract state.", e) + return Optional.empty() + } + } + + /** + * Tests whether the provided [StreamDescriptor] is valid. A valid descriptor is defined as one + * that has a non-`null` name. + * + * See + * https://github.com/airbytehq/airbyte/blob/e63458fabb067978beb5eaa74d2bc130919b419f/docs/understanding-airbyte/airbyte-protocol.md + * for more details + * + * @param streamDescriptor A [StreamDescriptor] to be validated. + * @return `true` if the provided [StreamDescriptor] is valid or `false` if it is invalid. + */ + fun isValidStreamDescriptor(streamDescriptor: StreamDescriptor?): Boolean { + return if (streamDescriptor != null) { + streamDescriptor.name != null + } else { + false + } + } + + /** + * Converts a [AirbyteStateType.LEGACY] state message into a [AirbyteStateType.GLOBAL] message. + * + * @param airbyteStateMessage A [AirbyteStateType.LEGACY] state message. + * @return A [AirbyteStateType.GLOBAL] state message. + */ + @JvmStatic + fun convertLegacyStateToGlobalState( + airbyteStateMessage: AirbyteStateMessage + ): AirbyteStateMessage { + val dbState = Jsons.`object`(airbyteStateMessage.data, DbState::class.java) + val globalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(dbState.cdcState)) + .withStreamStates( + dbState.streams + .stream() + .map { s: DbStreamState -> + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(s.streamName) + .withNamespace(s.streamNamespace) + ) + .withStreamState(Jsons.jsonNode(s)) + } + .collect(Collectors.toList()) + ) + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + } + + /** + * Converts a [AirbyteStateType.LEGACY] state message into a list of [AirbyteStateType.STREAM] + * messages. + * + * @param airbyteStateMessage A [AirbyteStateType.LEGACY] state message. + * @return A list [AirbyteStateType.STREAM] state messages. + */ + fun convertLegacyStateToStreamState( + airbyteStateMessage: AirbyteStateMessage + ): List { + return Jsons.`object`(airbyteStateMessage.data, DbState::class.java) + .streams + .stream() + .map { s: DbStreamState -> + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(s.streamNamespace) + .withName(s.streamName) + ) + .withStreamState(Jsons.jsonNode(s)) + ) + } + .collect(Collectors.toList()) + } + + fun convertStateMessage( + state: io.airbyte.protocol.models.AirbyteStateMessage + ): AirbyteStateMessage { + return Jsons.`object`(Jsons.jsonNode(state), AirbyteStateMessage::class.java) + } + + /** + * Deserializes the state represented as JSON into an object representation. + * + * @param initialStateJson The state as JSON. + * @Param supportedStateType the [AirbyteStateType] supported by this connector. + * @return The deserialized object representation of the state. + */ + @JvmStatic + fun deserializeInitialState( + initialStateJson: JsonNode?, + supportedStateType: AirbyteStateMessage.AirbyteStateType + ): List { + val typedState = StateMessageHelper.getTypedState(initialStateJson) + return typedState + .map { state: StateWrapper -> + when (state.stateType) { + StateType.GLOBAL -> java.util.List.of(convertStateMessage(state.global)) + StateType.STREAM -> state.stateMessages.map { convertStateMessage(it) } + else -> + java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(state.legacyState) + ) + } + } + .orElse(generateEmptyInitialState(supportedStateType)) + } + + /** + * Generates an empty, initial state for use by the connector. + * + * @Param supportedStateType the [AirbyteStateType] supported by this connector. + * @return The empty, initial state. + */ + private fun generateEmptyInitialState( + supportedStateType: AirbyteStateMessage.AirbyteStateType + ): List { + // For backwards compatibility with existing connectors + if (supportedStateType == AirbyteStateMessage.AirbyteStateType.LEGACY) { + return java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(DbState())) + ) + } else if (supportedStateType == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + val globalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(CdcState())) + .withStreamStates(listOf()) + return java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + ) + } else { + return java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(AirbyteStreamState()) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.kt new file mode 100644 index 0000000000000..9588478c6ac51 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManager.kt @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.google.common.base.Preconditions +import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Defines a manager that manages connector state. Connector state is used to keep track of the data + * synced by the connector. + * + * @param The type of the state maintained by the manager. + * @param The type of the stream(s) stored within the state maintained by the manager. + */ +interface StateManager { + /** + * Retrieves the [CdcStateManager] associated with the state manager. + * + * @return The [CdcStateManager] + * @throws UnsupportedOperationException if the state manager does not support tracking change + * data capture (CDC) state. + */ + val cdcStateManager: CdcStateManager + + /** + * Retries the raw state messages associated with the state manager. This is required for + * database-specific sync modes (e.g. Xmin) that would want to handle and parse their own state + * + * @return the list of airbyte state messages + * @throws UnsupportedOperationException if the state manager does not support retrieving raw + * state. + */ + val rawStateMessages: List? + + /** + * Retrieves the map of stream name/namespace tuple to the current cursor information for that + * stream. + * + * @return The map of stream name/namespace tuple to the current cursor information for that + * stream as maintained by this state manager. + */ + val pairToCursorInfoMap: Map + + /** + * Generates an [AirbyteStateMessage] that represents the current state contained in the state + * manager. + * + * @param pair The [AirbyteStreamNameNamespacePair] that represents a stream managed by the + * state manager. + * @return The [AirbyteStateMessage] that represents the current state contained in the state + * manager. + */ + fun toState(pair: Optional): AirbyteStateMessage + + /** + * Retrieves an [Optional] possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the cursor value tracked in the state associated + * with the provided stream name/namespace tuple. + */ + fun getCursor(pair: AirbyteStreamNameNamespacePair?): Optional { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.cursor } + } + + /** + * Retrieves an [Optional] possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the cursor field name associated with the cursor + * tracked in the state associated with the provided stream name/namespace tuple. + */ + fun getCursorField(pair: AirbyteStreamNameNamespacePair?): Optional? { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.cursorField } + } + + /** + * Retrieves an [Optional] possibly containing the original cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the original cursor value tracked in the state + * associated with the provided stream name/namespace tuple. + */ + fun getOriginalCursor(pair: AirbyteStreamNameNamespacePair?): Optional? { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.originalCursor } + } + + /** + * Retrieves an [Optional] possibly containing the original cursor field name associated with + * the cursor tracked in the state associated with the provided stream name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] which identifies a stream. + * @return An [Optional] possibly containing the original cursor field name associated with the + * cursor tracked in the state associated with the provided stream name/namespace tuple. + */ + fun getOriginalCursorField(pair: AirbyteStreamNameNamespacePair?): Optional? { + return getCursorInfo(pair).map { obj: CursorInfo -> obj.originalCursorField } + } + + /** + * Retrieves the current cursor information stored in the state manager for the steam + * name/namespace tuple. + * + * @param pair The [AirbyteStreamNameNamespacePair] that represents a stream managed by the + * state manager. + * @return [Optional] that potentially contains the current cursor information for the given + * stream name/namespace tuple. + */ + fun getCursorInfo(pair: AirbyteStreamNameNamespacePair?): Optional { + return Optional.ofNullable(pairToCursorInfoMap!![pair]) + } + + /** + * Emits the current state maintained by the manager as an [AirbyteStateMessage]. + * + * @param pair The [AirbyteStreamNameNamespacePair] that represents a stream managed by the + * state manager. + * @return An [AirbyteStateMessage] that represents the current state maintained by the state + * manager. + */ + fun emit(pair: Optional): AirbyteStateMessage? { + return toState(pair) + } + + /** + * Updates the cursor associated with the provided stream name/namespace pair and emits the + * current state maintained by the state manager. + * + * @param pair The [AirbyteStreamNameNamespacePair] that represents a stream managed by the + * state manager. + * @param cursor The new value for the cursor associated with the + * [AirbyteStreamNameNamespacePair] that represents a stream managed by the state manager. + * @return An [AirbyteStateMessage] that represents the current state maintained by the state + * manager. + */ + fun updateAndEmit(pair: AirbyteStreamNameNamespacePair, cursor: String?): AirbyteStateMessage? { + return updateAndEmit(pair, cursor, 0L) + } + + fun updateAndEmit( + pair: AirbyteStreamNameNamespacePair, + cursor: String?, + cursorRecordCount: Long + ): AirbyteStateMessage? { + val cursorInfo = getCursorInfo(pair) + Preconditions.checkState( + cursorInfo.isPresent, + "Could not find cursor information for stream: $pair" + ) + cursorInfo.get().setCursor(cursor) + if (cursorRecordCount > 0L) { + cursorInfo.get().setCursorRecordCount(cursorRecordCount) + } + LOGGER.debug( + "Updating cursor value for {} to {} (count {})...", + pair, + cursor, + cursorRecordCount + ) + return emit(Optional.ofNullable(pair)) + } + + companion object { + val LOGGER: Logger = LoggerFactory.getLogger(StateManager::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.kt new file mode 100644 index 0000000000000..30816a665a94e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactory.kt @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Factory class that creates [StateManager] instances based on the provided state. */ +object StateManagerFactory { + private val LOGGER: Logger = LoggerFactory.getLogger(StateManagerFactory::class.java) + + /** + * Creates a [StateManager] based on the provided state object and catalog. This method will + * handle the conversion of the provided state to match the requested state manager based on the + * provided [AirbyteStateType]. + * + * @param supportedStateType The type of state supported by the connector. + * @param initialState The deserialized initial state that will be provided to the selected + * [StateManager]. + * @param catalog The [ConfiguredAirbyteCatalog] for the connector that will utilize the state + * manager. + * @return A newly created [StateManager] implementation based on the provided state. + */ + @JvmStatic + fun createStateManager( + supportedStateType: AirbyteStateMessage.AirbyteStateType?, + initialState: List?, + catalog: ConfiguredAirbyteCatalog + ): StateManager { + if (initialState != null && !initialState.isEmpty()) { + val airbyteStateMessage = initialState[0] + when (supportedStateType) { + AirbyteStateMessage.AirbyteStateType.LEGACY -> { + LOGGER.info( + "Legacy state manager selected to manage state object with type {}.", + airbyteStateMessage!!.type + ) + @Suppress("deprecation") + val retVal: StateManager = + LegacyStateManager( + Jsons.`object`(airbyteStateMessage.data, DbState::class.java), + catalog + ) + return retVal + } + AirbyteStateMessage.AirbyteStateType.GLOBAL -> { + LOGGER.info( + "Global state manager selected to manage state object with type {}.", + airbyteStateMessage!!.type + ) + return GlobalStateManager(generateGlobalState(airbyteStateMessage), catalog) + } + AirbyteStateMessage.AirbyteStateType.STREAM -> { + LOGGER.info( + "Stream state manager selected to manage state object with type {}.", + airbyteStateMessage!!.type + ) + return StreamStateManager(generateStreamState(initialState), catalog) + } + else -> { + LOGGER.info( + "Stream state manager selected to manage state object with type {}.", + airbyteStateMessage!!.type + ) + return StreamStateManager(generateStreamState(initialState), catalog) + } + } + } else { + throw IllegalArgumentException( + "Failed to create state manager due to empty state list." + ) + } + } + + /** + * Handles the conversion between a different state type and the global state. This method + * handles the following transitions: + * + * * Stream -> Global (not supported, results in [IllegalArgumentException] + * * Legacy -> Global (supported) + * * Global -> Global (supported/no conversion required) + * + * @param airbyteStateMessage The current state that is to be converted to global state. + * @return The converted state message. + * @throws IllegalArgumentException if unable to convert between the given state type and + * global. + */ + private fun generateGlobalState(airbyteStateMessage: AirbyteStateMessage): AirbyteStateMessage { + var globalStateMessage = airbyteStateMessage + + when (airbyteStateMessage!!.type) { + AirbyteStateMessage.AirbyteStateType.STREAM -> + throw IllegalArgumentException( + "Unable to convert connector state from stream to global. Please reset the connection to continue." + ) + AirbyteStateMessage.AirbyteStateType.LEGACY -> { + globalStateMessage = + StateGeneratorUtils.convertLegacyStateToGlobalState(airbyteStateMessage) + LOGGER.info("Legacy state converted to global state.", airbyteStateMessage.type) + } + AirbyteStateMessage.AirbyteStateType.GLOBAL -> {} + else -> {} + } + return globalStateMessage + } + + /** + * Handles the conversion between a different state type and the stream state. This method + * handles the following transitions: + * + * * Global -> Stream (not supported, results in [IllegalArgumentException] + * * Legacy -> Stream (supported) + * * Stream -> Stream (supported/no conversion required) + * + * @param states The list of current states. + * @return The converted state messages. + * @throws IllegalArgumentException if unable to convert between the given state type and + * stream. + */ + private fun generateStreamState(states: List): List { + val airbyteStateMessage = states[0] + val streamStates: MutableList = ArrayList() + when (airbyteStateMessage!!.type) { + AirbyteStateMessage.AirbyteStateType.GLOBAL -> + throw IllegalArgumentException( + "Unable to convert connector state from global to stream. Please reset the connection to continue." + ) + AirbyteStateMessage.AirbyteStateType.LEGACY -> + streamStates.addAll( + StateGeneratorUtils.convertLegacyStateToStreamState(airbyteStateMessage) + ) + AirbyteStateMessage.AirbyteStateType.STREAM -> streamStates.addAll(states) + else -> streamStates.addAll(states) + } + return streamStates + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.kt new file mode 100644 index 0000000000000..d2d7997747612 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManager.kt @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.CdcStateManager +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.* +import java.util.function.Supplier +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Per-stream implementation of the [StateManager] interface. + * + * This implementation generates a state object for each stream detected in catalog/map of known + * streams to cursor information stored in this manager. + */ +open class StreamStateManager +/** + * Constructs a new [StreamStateManager] that is seeded with the provided [AirbyteStateMessage]. + * + * @param airbyteStateMessages The initial state represented as a list of [AirbyteStateMessage]s. + * @param catalog The [ConfiguredAirbyteCatalog] for the connector associated with this state + * manager. + */ +( + private val rawAirbyteStateMessages: List, + catalog: ConfiguredAirbyteCatalog +) : + AbstractStateManager( + catalog, + Supplier { + rawAirbyteStateMessages.stream().map { it.stream }.collect(Collectors.toList()) + }, + StateGeneratorUtils.CURSOR_FUNCTION, + StateGeneratorUtils.CURSOR_FIELD_FUNCTION, + StateGeneratorUtils.CURSOR_RECORD_COUNT_FUNCTION, + StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION + ) { + override val cdcStateManager: CdcStateManager + get() { + throw UnsupportedOperationException( + "CDC state management not supported by stream state manager." + ) + } + + override val rawStateMessages: List? + get() = rawAirbyteStateMessages + + override fun toState(pair: Optional): AirbyteStateMessage { + if (pair.isPresent) { + val pairToCursorInfoMap = pairToCursorInfoMap + val cursorInfo = Optional.ofNullable(pairToCursorInfoMap!![pair.get()]) + + if (cursorInfo.isPresent) { + LOGGER.debug("Generating state message for {}...", pair) + return AirbyteStateMessage() + .withType( + AirbyteStateMessage.AirbyteStateType.STREAM + ) // Temporarily include legacy state for backwards compatibility with the + // platform + .withData( + Jsons.jsonNode(StateGeneratorUtils.generateDbState(pairToCursorInfoMap)) + ) + .withStream( + StateGeneratorUtils.generateStreamState(pair.get(), cursorInfo.get()) + ) + } else { + LOGGER.warn( + "Cursor information could not be located in state for stream {}. Returning a new, empty state message...", + pair + ) + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(AirbyteStreamState()) + } + } else { + LOGGER.warn("Stream not provided. Returning a new, empty state message...") + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(AirbyteStreamState()) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(StreamStateManager::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.java deleted file mode 100644 index 95b8e5e26d965..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium; - -import com.google.common.collect.Lists; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.List; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class AirbyteDebeziumHandlerTest { - - @Test - public void shouldUseCdcTestShouldReturnTrue() { - final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - "MODELS_STREAM_NAME", - "MODELS_SCHEMA", - Field.of("COL_ID", JsonSchemaType.NUMBER), - Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), - Field.of("COL_MODEL", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); - final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers - .toDefaultConfiguredCatalog(catalog); - // set all streams to incremental. - configuredCatalog.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); - - Assertions.assertTrue(AirbyteDebeziumHandler.isAnyStreamIncrementalSyncMode(configuredCatalog)); - } - - @Test - public void shouldUseCdcTestShouldReturnFalse() { - final AirbyteCatalog catalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - "MODELS_STREAM_NAME", - "MODELS_SCHEMA", - Field.of("COL_ID", JsonSchemaType.NUMBER), - Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), - Field.of("COL_MODEL", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("COL_ID"))))); - final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers - .toDefaultConfiguredCatalog(catalog); - - Assertions.assertFalse(AirbyteDebeziumHandler.isAnyStreamIncrementalSyncMode(configuredCatalog)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java deleted file mode 100644 index 70fdefe0dd9ee..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.cdk.integrations.debezium.internals.AirbyteFileOffsetBackingStore; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class AirbyteFileOffsetBackingStoreTest { - - @SuppressWarnings("UnstableApiUsage") - @Test - void test() throws IOException { - final Path testRoot = Files.createTempDirectory(Path.of("/tmp"), "offset-store-test"); - - final byte[] bytes = MoreResources.readBytes("test_debezium_offset.dat"); - final Path templateFilePath = testRoot.resolve("template_offset.dat"); - IOs.writeFile(templateFilePath, bytes); - - final Path writeFilePath = testRoot.resolve("offset.dat"); - final Path secondWriteFilePath = testRoot.resolve("offset_2.dat"); - - final AirbyteFileOffsetBackingStore offsetStore = new AirbyteFileOffsetBackingStore(templateFilePath, Optional.empty()); - final Map offset = offsetStore.read(); - - final AirbyteFileOffsetBackingStore offsetStore2 = new AirbyteFileOffsetBackingStore(writeFilePath, Optional.empty()); - offsetStore2.persist(Jsons.jsonNode(offset)); - final Map stateFromOffsetStore2 = offsetStore2.read(); - - final AirbyteFileOffsetBackingStore offsetStore3 = new AirbyteFileOffsetBackingStore(secondWriteFilePath, Optional.empty()); - offsetStore3.persist(Jsons.jsonNode(stateFromOffsetStore2)); - final Map stateFromOffsetStore3 = offsetStore3.read(); - - // verify that, after a round trip through the offset store, we get back the same data. - assertEquals(stateFromOffsetStore2, stateFromOffsetStore3); - // verify that the file written by the offset store is identical to the template file. - assertTrue(com.google.common.io.Files.equal(secondWriteFilePath.toFile(), writeFilePath.toFile())); - } - - @Test - void test2() throws IOException { - final Path testRoot = Files.createTempDirectory(Path.of("/tmp"), "offset-store-test"); - - final byte[] bytes = MoreResources.readBytes("test_debezium_offset.dat"); - final Path templateFilePath = testRoot.resolve("template_offset.dat"); - IOs.writeFile(templateFilePath, bytes); - - final Path writeFilePath = testRoot.resolve("offset.dat"); - final Path secondWriteFilePath = testRoot.resolve("offset_2.dat"); - - final AirbyteFileOffsetBackingStore offsetStore = new AirbyteFileOffsetBackingStore(templateFilePath, Optional.of("orders")); - final Map offset = offsetStore.read(); - - final AirbyteFileOffsetBackingStore offsetStore2 = new AirbyteFileOffsetBackingStore(writeFilePath, Optional.of("orders")); - offsetStore2.persist(Jsons.jsonNode(offset)); - final Map stateFromOffsetStore2 = offsetStore2.read(); - - final AirbyteFileOffsetBackingStore offsetStore3 = new AirbyteFileOffsetBackingStore(secondWriteFilePath, Optional.of("orders")); - offsetStore3.persist(Jsons.jsonNode(stateFromOffsetStore2)); - final Map stateFromOffsetStore3 = offsetStore3.read(); - - // verify that, after a round trip through the offset store, we get back the same data. - assertEquals(stateFromOffsetStore2, stateFromOffsetStore3); - // verify that the file written by the offset store is identical to the template file. - assertTrue(com.google.common.io.Files.equal(secondWriteFilePath.toFile(), writeFilePath.toFile())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.java deleted file mode 100644 index be906557f431e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.google.common.collect.ImmutableList; -import io.airbyte.cdk.integrations.debezium.internals.RelationalDbDebeziumPropertiesManager; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.regex.Pattern; -import org.junit.jupiter.api.Test; - -class DebeziumRecordPublisherTest { - - @Test - public void testTableIncludelistCreation() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( - CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public").withSyncMode(SyncMode.INCREMENTAL), - CatalogHelpers.createConfiguredAirbyteStream("id_,something", "public").withSyncMode(SyncMode.INCREMENTAL), - CatalogHelpers.createConfiguredAirbyteStream("n\"aMéS", "public").withSyncMode(SyncMode.INCREMENTAL))); - - final String expectedWhitelist = "\\Qpublic.id_and_name\\E,\\Qpublic.id_\\,something\\E,\\Qpublic.n\"aMéS\\E"; - final String actualWhitelist = RelationalDbDebeziumPropertiesManager.getTableIncludelist(catalog); - - assertEquals(expectedWhitelist, actualWhitelist); - } - - @Test - public void testTableIncludelistFiltersFullRefresh() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( - CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public").withSyncMode(SyncMode.INCREMENTAL), - CatalogHelpers.createConfiguredAirbyteStream("id_and_name2", "public").withSyncMode(SyncMode.FULL_REFRESH))); - - final String expectedWhitelist = "\\Qpublic.id_and_name\\E"; - final String actualWhitelist = RelationalDbDebeziumPropertiesManager.getTableIncludelist(catalog); - - assertEquals(expectedWhitelist, actualWhitelist); - } - - @Test - public void testColumnIncludelistFiltersFullRefresh() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( - CatalogHelpers.createConfiguredAirbyteStream( - "id_and_name", - "public", - Field.of("fld1", JsonSchemaType.NUMBER), Field.of("fld2", JsonSchemaType.STRING)).withSyncMode(SyncMode.INCREMENTAL), - CatalogHelpers.createConfiguredAirbyteStream("id_,something", "public").withSyncMode(SyncMode.INCREMENTAL), - CatalogHelpers.createConfiguredAirbyteStream("id_and_name2", "public").withSyncMode(SyncMode.FULL_REFRESH), - CatalogHelpers.createConfiguredAirbyteStream("n\"aMéS", "public").withSyncMode(SyncMode.INCREMENTAL))); - - final String expectedWhitelist = "\\Qpublic.id_and_name\\E\\.(\\Qfld2\\E|\\Qfld1\\E),\\Qpublic.id_\\,something\\E,\\Qpublic.n\"aMéS\\E"; - final String actualWhitelist = RelationalDbDebeziumPropertiesManager.getColumnIncludeList(catalog); - - assertEquals(expectedWhitelist, actualWhitelist); - } - - @Test - public void testColumnIncludeListEscaping() { - // final String a = "public\\.products\\*\\^\\$\\+-\\\\"; - // final String b = "public.products*^$+-\\"; - // final Pattern p = Pattern.compile(a, Pattern.UNIX_LINES); - // assertTrue(p.matcher(b).find()); - // assertTrue(Pattern.compile(Pattern.quote(b)).matcher(b).find()); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(ImmutableList.of( - CatalogHelpers.createConfiguredAirbyteStream( - "id_and_name", - "public", - Field.of("fld1", JsonSchemaType.NUMBER), Field.of("fld2", JsonSchemaType.STRING)).withSyncMode(SyncMode.INCREMENTAL))); - - final String anchored = "^" + RelationalDbDebeziumPropertiesManager.getColumnIncludeList(catalog) + "$"; - final Pattern pattern = Pattern.compile(anchored); - - assertTrue(pattern.matcher("public.id_and_name.fld1").find()); - assertTrue(pattern.matcher("public.id_and_name.fld2").find()); - assertFalse(pattern.matcher("ic.id_and_name.fl").find()); - assertFalse(pattern.matcher("ppppublic.id_and_name.fld2333").find()); - assertFalse(pattern.matcher("public.id_and_name.fld_wrong_wrong").find()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.java deleted file mode 100644 index 482936bd54aa0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.cdk.integrations.debezium.internals.AirbyteSchemaHistoryStorage.SchemaHistory; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import java.io.IOException; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -public class AirbyteSchemaHistoryStorageTest { - - @Test - public void testForContentBiggerThan1MBLimit() throws IOException { - final String contentReadDirectlyFromFile = MoreResources.readResource("dbhistory_greater_than_1_mb.dat"); - - final AirbyteSchemaHistoryStorage schemaHistoryStorageFromUncompressedContent = AirbyteSchemaHistoryStorage.initializeDBHistory( - new SchemaHistory<>(Optional.of(Jsons.jsonNode(contentReadDirectlyFromFile)), - false), - true); - final SchemaHistory schemaHistoryFromUncompressedContent = schemaHistoryStorageFromUncompressedContent.read(); - - assertTrue(schemaHistoryFromUncompressedContent.isCompressed()); - assertNotNull(schemaHistoryFromUncompressedContent.schema()); - assertEquals(contentReadDirectlyFromFile, schemaHistoryStorageFromUncompressedContent.readUncompressed()); - - final AirbyteSchemaHistoryStorage schemaHistoryStorageFromCompressedContent = AirbyteSchemaHistoryStorage.initializeDBHistory( - new SchemaHistory<>(Optional.of(Jsons.jsonNode(schemaHistoryFromUncompressedContent.schema())), - true), - true); - final SchemaHistory schemaHistoryFromCompressedContent = schemaHistoryStorageFromCompressedContent.read(); - - assertTrue(schemaHistoryFromCompressedContent.isCompressed()); - assertNotNull(schemaHistoryFromCompressedContent.schema()); - assertEquals(schemaHistoryFromUncompressedContent.schema(), schemaHistoryFromCompressedContent.schema()); - } - - @Test - public void sizeTest() throws IOException { - assertEquals(5.881045341491699, - AirbyteSchemaHistoryStorage.calculateSizeOfStringInMB(MoreResources.readResource("dbhistory_greater_than_1_mb.dat"))); - assertEquals(0.0038671493530273438, - AirbyteSchemaHistoryStorage.calculateSizeOfStringInMB(MoreResources.readResource("dbhistory_less_than_1_mb.dat"))); - } - - @Test - public void testForContentLessThan1MBLimit() throws IOException { - final String contentReadDirectlyFromFile = MoreResources.readResource("dbhistory_less_than_1_mb.dat"); - - final AirbyteSchemaHistoryStorage schemaHistoryStorageFromUncompressedContent = AirbyteSchemaHistoryStorage.initializeDBHistory( - new SchemaHistory<>(Optional.of(Jsons.jsonNode(contentReadDirectlyFromFile)), - false), - true); - final SchemaHistory schemaHistoryFromUncompressedContent = schemaHistoryStorageFromUncompressedContent.read(); - - assertFalse(schemaHistoryFromUncompressedContent.isCompressed()); - assertNotNull(schemaHistoryFromUncompressedContent.schema()); - assertEquals(contentReadDirectlyFromFile, schemaHistoryFromUncompressedContent.schema()); - - final AirbyteSchemaHistoryStorage schemaHistoryStorageFromCompressedContent = AirbyteSchemaHistoryStorage.initializeDBHistory( - new SchemaHistory<>(Optional.of(Jsons.jsonNode(schemaHistoryFromUncompressedContent.schema())), - false), - true); - final SchemaHistory schemaHistoryFromCompressedContent = schemaHistoryStorageFromCompressedContent.read(); - - assertFalse(schemaHistoryFromCompressedContent.isCompressed()); - assertNotNull(schemaHistoryFromCompressedContent.schema()); - assertEquals(schemaHistoryFromUncompressedContent.schema(), schemaHistoryFromCompressedContent.schema()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.java deleted file mode 100644 index 59312c8887038..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.debezium.spi.converter.RelationalColumn; -import java.sql.Timestamp; -import java.time.Duration; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -class DebeziumConverterUtilsTest { - - @Test - public void convertDefaultValueTest() { - - final RelationalColumn relationalColumn = mock(RelationalColumn.class); - - when(relationalColumn.isOptional()).thenReturn(true); - Object actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); - Assertions.assertNull(actualColumnDefaultValue, "Default value for optional relational column should be null"); - - when(relationalColumn.isOptional()).thenReturn(false); - when(relationalColumn.hasDefaultValue()).thenReturn(false); - actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); - Assertions.assertNull(actualColumnDefaultValue); - - when(relationalColumn.isOptional()).thenReturn(false); - when(relationalColumn.hasDefaultValue()).thenReturn(true); - final String expectedColumnDefaultValue = "default value"; - when(relationalColumn.defaultValue()).thenReturn(expectedColumnDefaultValue); - actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn); - Assertions.assertEquals(actualColumnDefaultValue, expectedColumnDefaultValue); - } - - @Test - public void convertLocalDate() { - final LocalDate localDate = LocalDate.of(2021, 1, 1); - - final String actual = DebeziumConverterUtils.convertDate(localDate); - Assertions.assertEquals("2021-01-01T00:00:00Z", actual); - } - - @Test - public void convertTLocalTime() { - final LocalTime localTime = LocalTime.of(8, 1, 1); - final String actual = DebeziumConverterUtils.convertDate(localTime); - Assertions.assertEquals("08:01:01", actual); - } - - @Test - public void convertLocalDateTime() { - final LocalDateTime localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1); - - final String actual = DebeziumConverterUtils.convertDate(localDateTime); - Assertions.assertEquals("2021-01-01T08:01:01Z", actual); - } - - @Test - @Disabled - public void convertDuration() { - final Duration duration = Duration.ofHours(100_000); - - final String actual = DebeziumConverterUtils.convertDate(duration); - Assertions.assertEquals("1981-05-29T20:00:00Z", actual); - } - - @Test - public void convertTimestamp() { - final LocalDateTime localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1); - final Timestamp timestamp = Timestamp.valueOf(localDateTime); - - final String actual = DebeziumConverterUtils.convertDate(timestamp); - Assertions.assertEquals("2021-01-01T08:01:01.000000Z", actual); - } - - @Test - @Disabled - public void convertNumber() { - final Number number = 100_000; - - final String actual = DebeziumConverterUtils.convertDate(number); - Assertions.assertEquals("1970-01-01T03:01:40Z", actual); - } - - @Test - public void convertStringDateFormat() { - final String stringValue = "2021-01-01T00:00:00Z"; - - final String actual = DebeziumConverterUtils.convertDate(stringValue); - Assertions.assertEquals("2021-01-01T00:00:00Z", actual); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.java deleted file mode 100644 index e386b100c647e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; - -import io.airbyte.cdk.integrations.debezium.CdcTargetPosition; -import io.debezium.engine.ChangeEvent; -import java.time.Duration; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.LinkedBlockingQueue; -import org.apache.kafka.connect.source.SourceRecord; -import org.junit.jupiter.api.Test; - -public class DebeziumRecordIteratorTest { - - @Test - public void getHeartbeatPositionTest() { - final DebeziumRecordIterator debeziumRecordIterator = new DebeziumRecordIterator<>(mock(LinkedBlockingQueue.class), - new CdcTargetPosition<>() { - - @Override - public boolean reachedTargetPosition(final ChangeEventWithMetadata changeEventWithMetadata) { - return false; - } - - @Override - public Long extractPositionFromHeartbeatOffset(final Map sourceOffset) { - return (long) sourceOffset.get("lsn"); - } - - }, - () -> false, - mock(DebeziumShutdownProcedure.class), - Duration.ZERO, - Duration.ZERO); - final Long lsn = debeziumRecordIterator.getHeartbeatPosition(new ChangeEvent() { - - private final SourceRecord sourceRecord = new SourceRecord(null, Collections.singletonMap("lsn", 358824993496L), null, null, null); - - @Override - public String key() { - return null; - } - - @Override - public String value() { - return "{\"ts_ms\":1667616934701}"; - } - - @Override - public String destination() { - return null; - } - - public SourceRecord sourceRecord() { - return sourceRecord; - } - - }); - - assertEquals(lsn, 358824993496L); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.java deleted file mode 100644 index 335d157ed2712..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicInteger; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class DebeziumShutdownProcedureTest { - - @Test - public void test() throws InterruptedException { - final LinkedBlockingQueue sourceQueue = new LinkedBlockingQueue<>(10); - final AtomicInteger recordsInserted = new AtomicInteger(); - final ExecutorService executorService = Executors.newSingleThreadExecutor(); - final DebeziumShutdownProcedure debeziumShutdownProcedure = new DebeziumShutdownProcedure<>(sourceQueue, - executorService::shutdown, () -> recordsInserted.get() >= 99); - executorService.execute(() -> { - for (int i = 0; i < 100; i++) { - try { - sourceQueue.put(i); - recordsInserted.set(i); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - }); - - Thread.sleep(1000); - debeziumShutdownProcedure.initiateShutdownProcedure(); - - Assertions.assertEquals(99, recordsInserted.get()); - Assertions.assertEquals(0, sourceQueue.size()); - Assertions.assertEquals(100, debeziumShutdownProcedure.getRecordsRemainingAfterShutdown().size()); - - for (int i = 0; i < 100; i++) { - Assertions.assertEquals(i, debeziumShutdownProcedure.getRecordsRemainingAfterShutdown().poll()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.java deleted file mode 100644 index 64701dd406689..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium.internals; - -import static io.airbyte.cdk.integrations.debezium.internals.RecordWaitTimeUtil.MAX_FIRST_RECORD_WAIT_TIME; -import static io.airbyte.cdk.integrations.debezium.internals.RecordWaitTimeUtil.MIN_FIRST_RECORD_WAIT_TIME; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import java.time.Duration; -import java.util.Collections; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -public class RecordWaitTimeUtilTest { - - @Test - void testGetFirstRecordWaitTime() { - final JsonNode emptyConfig = Jsons.jsonNode(Collections.emptyMap()); - assertDoesNotThrow(() -> RecordWaitTimeUtil.checkFirstRecordWaitTime(emptyConfig)); - assertEquals(Optional.empty(), RecordWaitTimeUtil.getFirstRecordWaitSeconds(emptyConfig)); - assertEquals(RecordWaitTimeUtil.DEFAULT_FIRST_RECORD_WAIT_TIME, RecordWaitTimeUtil.getFirstRecordWaitTime(emptyConfig)); - - final JsonNode normalConfig = Jsons.jsonNode(Map.of("replication_method", - Map.of("method", "CDC", "initial_waiting_seconds", 500))); - assertDoesNotThrow(() -> RecordWaitTimeUtil.checkFirstRecordWaitTime(normalConfig)); - assertEquals(Optional.of(500), RecordWaitTimeUtil.getFirstRecordWaitSeconds(normalConfig)); - assertEquals(Duration.ofSeconds(500), RecordWaitTimeUtil.getFirstRecordWaitTime(normalConfig)); - - final int tooShortTimeout = (int) MIN_FIRST_RECORD_WAIT_TIME.getSeconds() - 1; - final JsonNode tooShortConfig = Jsons.jsonNode(Map.of("replication_method", - Map.of("method", "CDC", "initial_waiting_seconds", tooShortTimeout))); - assertThrows(IllegalArgumentException.class, () -> RecordWaitTimeUtil.checkFirstRecordWaitTime(tooShortConfig)); - assertEquals(Optional.of(tooShortTimeout), RecordWaitTimeUtil.getFirstRecordWaitSeconds(tooShortConfig)); - assertEquals(MIN_FIRST_RECORD_WAIT_TIME, RecordWaitTimeUtil.getFirstRecordWaitTime(tooShortConfig)); - - final int tooLongTimeout = (int) MAX_FIRST_RECORD_WAIT_TIME.getSeconds() + 1; - final JsonNode tooLongConfig = Jsons.jsonNode(Map.of("replication_method", - Map.of("method", "CDC", "initial_waiting_seconds", tooLongTimeout))); - assertThrows(IllegalArgumentException.class, () -> RecordWaitTimeUtil.checkFirstRecordWaitTime(tooLongConfig)); - assertEquals(Optional.of(tooLongTimeout), RecordWaitTimeUtil.getFirstRecordWaitSeconds(tooLongConfig)); - assertEquals(MAX_FIRST_RECORD_WAIT_TIME, RecordWaitTimeUtil.getFirstRecordWaitTime(tooLongConfig)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.java deleted file mode 100644 index 5d4dcb3e68d05..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import static io.airbyte.cdk.integrations.source.jdbc.JdbcDataSourceUtils.assertCustomParametersDontOverwriteDefaultParameters; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.cdk.testutils.TestDatabase; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import java.sql.JDBCType; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Stream; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; - -/** - * Runs the acceptance tests in the source-jdbc test module. We want this module to run these tests - * itself as a sanity check. The trade off here is that this class is duplicated from the one used - * in source-postgres. - */ -class DefaultJdbcSourceAcceptanceTest - extends JdbcSourceAcceptanceTest { - - private static PostgreSQLContainer PSQL_CONTAINER; - - @BeforeAll - static void init() { - PSQL_CONTAINER = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_CONTAINER.start(); - CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "CREATE TABLE %s (%s BIT(3) NOT NULL);"; - INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "INSERT INTO %s VALUES(B'101');"; - } - - @Override - protected JsonNode config() { - return testdb.testConfigBuilder().build(); - } - - @Override - protected PostgresTestSource source() { - return new PostgresTestSource(); - } - - @Override - protected BareBonesTestDatabase createTestDatabase() { - return new BareBonesTestDatabase(PSQL_CONTAINER).initialized(); - } - - @Override - public boolean supportsSchemas() { - return true; - } - - public JsonNode getConfigWithConnectionProperties(final PostgreSQLContainer psqlDb, final String dbName, final String additionalParameters) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(psqlDb)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(psqlDb)) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) - .put(JdbcUtils.CONNECTION_PROPERTIES_KEY, additionalParameters) - .build()); - } - - @AfterAll - static void cleanUp() { - PSQL_CONTAINER.close(); - } - - public static class PostgresTestSource extends AbstractJdbcSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - - static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); - - public PostgresTestSource() { - super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); - } - - @Override - public JsonNode toDatabaseConfig(final JsonNode config) { - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of("information_schema", "pg_catalog", "pg_internal", "catalog_history"); - } - - @Override - protected AirbyteStateType getSupportedStateType(final JsonNode config) { - return AirbyteStateType.STREAM; - } - - public static void main(final String[] args) throws Exception { - final Source source = new PostgresTestSource(); - LOGGER.info("starting source: {}", PostgresTestSource.class); - new IntegrationRunner(source).run(args); - LOGGER.info("completed source: {}", PostgresTestSource.class); - } - - } - - static protected class BareBonesTestDatabase - extends TestDatabase, BareBonesTestDatabase, BareBonesTestDatabase.BareBonesConfigBuilder> { - - public BareBonesTestDatabase(PostgreSQLContainer container) { - super(container); - } - - @Override - protected Stream> inContainerBootstrapCmd() { - final var sql = Stream.of( - String.format("CREATE DATABASE %s", getDatabaseName()), - String.format("CREATE USER %s PASSWORD '%s'", getUserName(), getPassword()), - String.format("GRANT ALL PRIVILEGES ON DATABASE %s TO %s", getDatabaseName(), getUserName()), - String.format("ALTER USER %s WITH SUPERUSER", getUserName())); - return Stream.of(Stream.concat( - Stream.of("psql", - "-d", getContainer().getDatabaseName(), - "-U", getContainer().getUsername(), - "-v", "ON_ERROR_STOP=1", - "-a"), - sql.flatMap(stmt -> Stream.of("-c", stmt)))); - } - - @Override - protected Stream inContainerUndoBootstrapCmd() { - return Stream.empty(); - } - - @Override - public DatabaseDriver getDatabaseDriver() { - return DatabaseDriver.POSTGRESQL; - } - - @Override - public SQLDialect getSqlDialect() { - return SQLDialect.POSTGRES; - } - - @Override - public BareBonesConfigBuilder configBuilder() { - return new BareBonesConfigBuilder(this); - } - - static protected class BareBonesConfigBuilder extends TestDatabase.ConfigBuilder { - - private BareBonesConfigBuilder(BareBonesTestDatabase testDatabase) { - super(testDatabase); - } - - } - - } - - @Test - void testCustomParametersOverwriteDefaultParametersExpectException() { - final String connectionPropertiesUrl = "ssl=false"; - final JsonNode config = getConfigWithConnectionProperties(PSQL_CONTAINER, testdb.getDatabaseName(), connectionPropertiesUrl); - final Map customParameters = JdbcUtils.parseJdbcParameters(config, JdbcUtils.CONNECTION_PROPERTIES_KEY, "&"); - final Map defaultParameters = Map.of( - "ssl", "true", - "sslmode", "require"); - assertThrows(IllegalArgumentException.class, () -> { - assertCustomParametersDontOverwriteDefaultParameters(customParameters, defaultParameters); - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.java deleted file mode 100644 index b8d6b88f23efe..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.jdbc.test.JdbcStressTest; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.sql.JDBCType; -import java.util.Optional; -import java.util.Set; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -/** - * Runs the stress tests in the source-jdbc test module. We want this module to run these tests - * itself as a sanity check. The trade off here is that this class is duplicated from the one used - * in source-postgres. - */ -@Disabled -class DefaultJdbcStressTest extends JdbcStressTest { - - private static PostgreSQLContainer PSQL_DB; - - private JsonNode config; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { - final String dbName = Strings.addRandomSuffix("db", "_", 10); - - config = Jsons.jsonNode(ImmutableMap.of(JdbcUtils.HOST_KEY, "localhost", - JdbcUtils.PORT_KEY, 5432, - JdbcUtils.DATABASE_KEY, "charles", - JdbcUtils.USERNAME_KEY, "postgres", - JdbcUtils.PASSWORD_KEY, "")); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, PSQL_DB.getHost()) - .put(JdbcUtils.PORT_KEY, PSQL_DB.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, PSQL_DB.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, PSQL_DB.getPassword()) - .build()); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - super.setup(); - } - - @Override - public Optional getDefaultSchemaName() { - return Optional.of("public"); - } - - @Override - public AbstractJdbcSource getSource() { - return new PostgresTestSource(); - } - - @Override - public JsonNode getConfig() { - return config; - } - - @Override - public String getDriverClass() { - return PostgresTestSource.DRIVER_CLASS; - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - - private static class PostgresTestSource extends AbstractJdbcSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - - static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); - - public PostgresTestSource() { - super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); - } - - @Override - public JsonNode toDatabaseConfig(final JsonNode config) { - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of("information_schema", "pg_catalog", "pg_internal", "catalog_history"); - } - - public static void main(final String[] args) throws Exception { - final Source source = new PostgresTestSource(); - LOGGER.info("starting source: {}", PostgresTestSource.class); - new IntegrationRunner(source).run(args); - LOGGER.info("completed source: {}", PostgresTestSource.class); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.java deleted file mode 100644 index 116d122d7d318..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import static org.assertj.core.api.AssertionsForClassTypes.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import java.util.List; -import java.util.Map; -import org.junit.jupiter.api.Test; - -public class JdbcDataSourceUtilsTest { - - @Test - void test() { - final String validConfigString = "{\"jdbc_url_params\":\"key1=val1&key3=key3\",\"connection_properties\":\"key1=val1&key2=val2\"}"; - final JsonNode validConfig = Jsons.deserialize(validConfigString); - final Map connectionProperties = JdbcDataSourceUtils.getConnectionProperties(validConfig); - final List validKeys = List.of("key1", "key2", "key3"); - validKeys.forEach(key -> assertTrue(connectionProperties.containsKey(key))); - - // For an invalid config, there is a conflict betweeen the values of keys in jdbc_url_params and - // connection_properties - final String invalidConfigString = "{\"jdbc_url_params\":\"key1=val2&key3=key3\",\"connection_properties\":\"key1=val1&key2=val2\"}"; - final JsonNode invalidConfig = Jsons.deserialize(invalidConfigString); - final Exception exception = assertThrows(IllegalArgumentException.class, () -> { - JdbcDataSourceUtils.getConnectionProperties(invalidConfig); - }); - - final String expectedMessage = "Cannot overwrite default JDBC parameter key1"; - assertThat(expectedMessage.equals(exception.getMessage())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.java deleted file mode 100644 index 001f0b62b9c40..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.jdbc.test.JdbcStressTest; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import java.sql.JDBCType; -import java.util.Optional; -import java.util.Set; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; - -/** - * Runs the stress tests in the source-jdbc test module. We want this module to run these tests - * itself as a sanity check. The trade off here is that this class is duplicated from the one used - * in source-postgres. - */ -@Disabled -class JdbcSourceStressTest extends JdbcStressTest { - - private static PostgreSQLContainer PSQL_DB; - - private JsonNode config; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } - - @BeforeEach - public void setup() throws Exception { - final String schemaName = Strings.addRandomSuffix("db", "_", 10);; - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, PSQL_DB.getHost()) - .put(JdbcUtils.PORT_KEY, PSQL_DB.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, schemaName) - .put(JdbcUtils.USERNAME_KEY, PSQL_DB.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, PSQL_DB.getPassword()) - .build()); - - final String initScriptName = "init_" + schemaName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + schemaName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - super.setup(); - } - - @Override - public Optional getDefaultSchemaName() { - return Optional.of("public"); - } - - @Override - public AbstractJdbcSource getSource() { - return new PostgresTestSource(); - } - - @Override - public JsonNode getConfig() { - return config; - } - - @Override - public String getDriverClass() { - return PostgresTestSource.DRIVER_CLASS; - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - - private static class PostgresTestSource extends AbstractJdbcSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger(PostgresTestSource.class); - - static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); - - public PostgresTestSource() { - super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); - } - - @Override - public JsonNode toDatabaseConfig(final JsonNode config) { - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of("information_schema", "pg_catalog", "pg_internal", "catalog_history"); - } - - public static void main(final String[] args) throws Exception { - final Source source = new PostgresTestSource(); - LOGGER.info("starting source: {}", PostgresTestSource.class); - new IntegrationRunner(source).run(args); - LOGGER.info("completed source: {}", PostgresTestSource.class); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.java deleted file mode 100644 index 9e7bab7177f28..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.CALLS_REAL_METHODS; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.withSettings; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateGeneratorUtils; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import java.io.IOException; -import java.util.List; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -/** - * Test suite for the {@link AbstractDbSource} class. - */ -@ExtendWith(SystemStubsExtension.class) -public class AbstractDbSourceTest { - - @SystemStub - private EnvironmentVariables environmentVariables; - - @Test - void testDeserializationOfLegacyState() throws IOException { - final AbstractDbSource dbSource = mock(AbstractDbSource.class, withSettings().useConstructor("").defaultAnswer(CALLS_REAL_METHODS)); - final JsonNode config = mock(JsonNode.class); - - final String legacyStateJson = MoreResources.readResource("states/legacy.json"); - final JsonNode legacyState = Jsons.deserialize(legacyStateJson); - - final List result = StateGeneratorUtils.deserializeInitialState(legacyState, - dbSource.getSupportedStateType(config)); - assertEquals(1, result.size()); - assertEquals(AirbyteStateType.LEGACY, result.get(0).getType()); - } - - @Test - void testDeserializationOfGlobalState() throws IOException { - final AbstractDbSource dbSource = mock(AbstractDbSource.class, withSettings().useConstructor("").defaultAnswer(CALLS_REAL_METHODS)); - final JsonNode config = mock(JsonNode.class); - - final String globalStateJson = MoreResources.readResource("states/global.json"); - final JsonNode globalState = Jsons.deserialize(globalStateJson); - - final List result = - StateGeneratorUtils.deserializeInitialState(globalState, dbSource.getSupportedStateType(config)); - assertEquals(1, result.size()); - assertEquals(AirbyteStateType.GLOBAL, result.get(0).getType()); - } - - @Test - void testDeserializationOfStreamState() throws IOException { - final AbstractDbSource dbSource = mock(AbstractDbSource.class, withSettings().useConstructor("").defaultAnswer(CALLS_REAL_METHODS)); - final JsonNode config = mock(JsonNode.class); - - final String streamStateJson = MoreResources.readResource("states/per_stream.json"); - final JsonNode streamState = Jsons.deserialize(streamStateJson); - - final List result = - StateGeneratorUtils.deserializeInitialState(streamState, dbSource.getSupportedStateType(config)); - assertEquals(2, result.size()); - assertEquals(AirbyteStateType.STREAM, result.get(0).getType()); - } - - @Test - void testDeserializationOfNullState() throws IOException { - final AbstractDbSource dbSource = mock(AbstractDbSource.class, withSettings().useConstructor("").defaultAnswer(CALLS_REAL_METHODS)); - final JsonNode config = mock(JsonNode.class); - - final List result = StateGeneratorUtils.deserializeInitialState(null, dbSource.getSupportedStateType(config)); - assertEquals(1, result.size()); - assertEquals(dbSource.getSupportedStateType(config), result.get(0).getType()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIteratorTest.java deleted file mode 100644 index 7bda248b89aab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/StateDecoratingIteratorTest.java +++ /dev/null @@ -1,467 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; - -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; -import io.airbyte.cdk.integrations.source.relationaldb.state.StreamStateManager; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.sql.SQLException; -import java.util.Collections; -import java.util.Iterator; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -class StateDecoratingIteratorTest { - - private static final String NAMESPACE = "public"; - private static final String STREAM_NAME = "shoes"; - private static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR = new AirbyteStreamNameNamespacePair(STREAM_NAME, NAMESPACE); - private static final String UUID_FIELD_NAME = "ascending_inventory_uuid"; - - private static final AirbyteMessage EMPTY_STATE_MESSAGE = createEmptyStateMessage(0.0); - - private static final String RECORD_VALUE_1 = "abc"; - private static final AirbyteMessage RECORD_MESSAGE_1 = createRecordMessage(RECORD_VALUE_1); - - private static final String RECORD_VALUE_2 = "def"; - private static final AirbyteMessage RECORD_MESSAGE_2 = createRecordMessage(RECORD_VALUE_2); - - private static final String RECORD_VALUE_3 = "ghi"; - private static final AirbyteMessage RECORD_MESSAGE_3 = createRecordMessage(RECORD_VALUE_3); - - private static final String RECORD_VALUE_4 = "jkl"; - private static final AirbyteMessage RECORD_MESSAGE_4 = createRecordMessage(RECORD_VALUE_4); - - private static final String RECORD_VALUE_5 = "xyz"; - private static final AirbyteMessage RECORD_MESSAGE_5 = createRecordMessage(RECORD_VALUE_5); - - private static AirbyteMessage createRecordMessage(final String recordValue) { - return new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(Jsons.jsonNode(ImmutableMap.of(UUID_FIELD_NAME, recordValue)))); - } - - private static AirbyteMessage createStateMessage(final String recordValue, final long cursorRecordCount, final double statsRecordCount) { - final DbStreamState dbStreamState = new DbStreamState() - .withCursorField(Collections.singletonList(UUID_FIELD_NAME)) - .withCursor(recordValue) - .withStreamName(STREAM_NAME) - .withStreamNamespace(NAMESPACE); - if (cursorRecordCount > 0) { - dbStreamState.withCursorRecordCount(cursorRecordCount); - } - final DbState dbState = new DbState().withCdc(false).withStreams(Collections.singletonList(dbStreamState)); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(dbStreamState))) - .withData(Jsons.jsonNode(dbState)) - .withSourceStats(new AirbyteStateStats().withRecordCount(statsRecordCount))); - } - - private static AirbyteMessage createEmptyStateMessage(final double statsRecordCount) { - final DbStreamState dbStreamState = new DbStreamState() - .withCursorField(Collections.singletonList(UUID_FIELD_NAME)) - .withStreamName(STREAM_NAME) - .withStreamNamespace(NAMESPACE); - - final DbState dbState = new DbState().withCdc(false).withStreams(Collections.singletonList(dbStreamState)); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage() - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(dbStreamState))) - .withData(Jsons.jsonNode(dbState)) - .withSourceStats(new AirbyteStateStats().withRecordCount(statsRecordCount))); - } - - private Iterator createExceptionIterator() { - return new Iterator<>() { - - final Iterator internalMessageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2, - RECORD_MESSAGE_2, RECORD_MESSAGE_3); - - @Override - public boolean hasNext() { - return true; - } - - @Override - public AirbyteMessage next() { - if (internalMessageIterator.hasNext()) { - return internalMessageIterator.next(); - } else { - // this line throws a RunTimeException wrapped around a SQLException to mimic the flow of when a - // SQLException is thrown and wrapped in - // StreamingJdbcDatabase#tryAdvance - throw new RuntimeException(new SQLException("Connection marked broken because of SQLSTATE(080006)", "08006")); - } - } - - }; - }; - - private static Iterator messageIterator; - private StateManager stateManager; - - @BeforeEach - void setup() { - final AirbyteStream airbyteStream = new AirbyteStream().withNamespace(NAMESPACE).withName(STREAM_NAME); - final ConfiguredAirbyteStream configuredAirbyteStream = new ConfiguredAirbyteStream() - .withStream(airbyteStream) - .withCursorField(Collections.singletonList(UUID_FIELD_NAME)); - - stateManager = new StreamStateManager(Collections.emptyList(), - new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(configuredAirbyteStream))); - } - - @Test - void testWithoutInitialCursor() { - messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2); - final StateDecoratingIterator iterator = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 0); - - assertEquals(RECORD_MESSAGE_1, iterator.next()); - assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(createStateMessage(RECORD_VALUE_2, 1, 2.0), iterator.next()); - assertFalse(iterator.hasNext()); - } - - @Test - void testWithInitialCursor() { - // record 1 and 2 has smaller cursor value, so at the end, the initial cursor is emitted with 0 - // record count - - messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2); - final StateDecoratingIterator iterator = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_5, - JsonSchemaPrimitive.STRING, - 0); - - assertEquals(RECORD_MESSAGE_1, iterator.next()); - assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(createStateMessage(RECORD_VALUE_5, 0, 2.0), iterator.next()); - assertFalse(iterator.hasNext()); - } - - @Test - void testCursorFieldIsEmpty() { - final AirbyteMessage recordMessage = Jsons.clone(RECORD_MESSAGE_1); - ((ObjectNode) recordMessage.getRecord().getData()).remove(UUID_FIELD_NAME); - final Iterator messageStream = MoreIterators.of(recordMessage); - - final StateDecoratingIterator iterator = new StateDecoratingIterator( - messageStream, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 0); - - assertEquals(recordMessage, iterator.next()); - // null because no records with a cursor field were replicated for the stream. - assertEquals(createEmptyStateMessage(1.0), iterator.next()); - assertFalse(iterator.hasNext()); - } - - @Test - void testIteratorCatchesExceptionWhenEmissionFrequencyNonZero() { - final Iterator exceptionIterator = createExceptionIterator(); - final StateDecoratingIterator iterator = new StateDecoratingIterator( - exceptionIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_1, - JsonSchemaPrimitive.STRING, - 1); - assertEquals(RECORD_MESSAGE_1, iterator.next()); - assertEquals(RECORD_MESSAGE_2, iterator.next()); - // continues to emit RECORD_MESSAGE_2 since cursorField has not changed thus not satisfying the - // condition of "ready" - assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(RECORD_MESSAGE_3, iterator.next()); - // emits the first state message since the iterator has changed cursorFields (2 -> 3) and met the - // frequency minimum of 1 record - assertEquals(createStateMessage(RECORD_VALUE_2, 2, 4.0), iterator.next()); - // no further records to read since Exception was caught above and marked iterator as endOfData() - assertFalse(iterator.hasNext()); - } - - @Test - void testIteratorCatchesExceptionWhenEmissionFrequencyZero() { - final Iterator exceptionIterator = createExceptionIterator(); - final StateDecoratingIterator iterator = new StateDecoratingIterator( - exceptionIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_1, - JsonSchemaPrimitive.STRING, - 0); - assertEquals(RECORD_MESSAGE_1, iterator.next()); - assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(RECORD_MESSAGE_3, iterator.next()); - // since stateEmission is not set to emit frequently, this will catch the error but not emit state - // message since it wasn't in a ready state - // of having a frequency > 0 but will prevent an exception from causing the iterator to fail by - // marking iterator as endOfData() - assertFalse(iterator.hasNext()); - } - - @Test - void testEmptyStream() { - final StateDecoratingIterator iterator = new StateDecoratingIterator( - Collections.emptyIterator(), - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 0); - - assertEquals(EMPTY_STATE_MESSAGE, iterator.next()); - assertFalse(iterator.hasNext()); - } - - @Test - void testUnicodeNull() { - final String recordValueWithNull = "abc\u0000"; - final AirbyteMessage recordMessageWithNull = createRecordMessage(recordValueWithNull); - - // UTF8 null \u0000 is removed from the cursor value in the state message - - messageIterator = MoreIterators.of(recordMessageWithNull); - - final StateDecoratingIterator iterator = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 0); - - assertEquals(recordMessageWithNull, iterator.next()); - assertEquals(createStateMessage(RECORD_VALUE_1, 1, 1.0), iterator.next()); - assertFalse(iterator.hasNext()); - } - - @Test - // When initial cursor is null, and emit state for every record - void testStateEmissionFrequency1() { - messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2, RECORD_MESSAGE_3, RECORD_MESSAGE_4, RECORD_MESSAGE_5); - final StateDecoratingIterator iterator1 = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 1); - - assertEquals(RECORD_MESSAGE_1, iterator1.next()); - // should emit state 1, but it is unclear whether there will be more - // records with the same cursor value, so no state is ready for emission - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - // emit state 1 because it is the latest state ready for emission - assertEquals(createStateMessage(RECORD_VALUE_1, 1, 2.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_2, 1, 1.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_4, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_3, 1, 1.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_5, iterator1.next()); - // state 4 is not emitted because there is no more record and only - // the final state should be emitted at this point; also the final - // state should only be emitted once - assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator1.next()); - assertFalse(iterator1.hasNext()); - } - - @Test - // When initial cursor is null, and emit state for every 2 records - void testStateEmissionFrequency2() { - messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2, RECORD_MESSAGE_3, RECORD_MESSAGE_4, RECORD_MESSAGE_5); - final StateDecoratingIterator iterator1 = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - null, - JsonSchemaPrimitive.STRING, - 2); - - assertEquals(RECORD_MESSAGE_1, iterator1.next()); - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - // emit state 1 because it is the latest state ready for emission - assertEquals(createStateMessage(RECORD_VALUE_1, 1, 2.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_4, iterator1.next()); - // emit state 3 because it is the latest state ready for emission - assertEquals(createStateMessage(RECORD_VALUE_3, 1, 2.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_5, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator1.next()); - assertFalse(iterator1.hasNext()); - } - - @Test - // When initial cursor is not null - void testStateEmissionWhenInitialCursorIsNotNull() { - messageIterator = MoreIterators.of(RECORD_MESSAGE_2, RECORD_MESSAGE_3, RECORD_MESSAGE_4, RECORD_MESSAGE_5); - final StateDecoratingIterator iterator1 = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_1, - JsonSchemaPrimitive.STRING, - 1); - - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_2, 1, 2.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_4, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_3, 1, 1.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_5, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator1.next()); - assertFalse(iterator1.hasNext()); - } - - /** - * Incremental syncs will sort the table with the cursor field, and emit the max cursor for every N - * records. The purpose is to emit the states frequently, so that if any transient failure occurs - * during a long sync, the next run does not need to start from the beginning, but can resume from - * the last successful intermediate state committed on the destination. The next run will start with - * `cursorField > cursor`. However, it is possible that there are multiple records with the same - * cursor value. If the intermediate state is emitted before all these records have been synced to - * the destination, some of these records may be lost. - *

    - * Here is an example: - * - *

    -   * | Record ID | Cursor Field | Other Field | Note                          |
    -   * | --------- | ------------ | ----------- | ----------------------------- |
    -   * | 1         | F1=16        | F2="abc"    |                               |
    -   * | 2         | F1=16        | F2="def"    | <- state emission and failure |
    -   * | 3         | F1=16        | F2="ghi"    |                               |
    -   * 
    - * - * If the intermediate state is emitted for record 2 and the sync fails immediately such that the - * cursor value `16` is committed, but only record 1 and 2 are actually synced, the next run will - * start with `F1 > 16` and skip record 3. - *

    - * So intermediate state emission should only happen when all records with the same cursor value has - * been synced to destination. Reference: - * link - */ - @Test - // When there are multiple records with the same cursor value - void testStateEmissionForRecordsSharingSameCursorValue() { - - messageIterator = MoreIterators.of( - RECORD_MESSAGE_2, RECORD_MESSAGE_2, - RECORD_MESSAGE_3, RECORD_MESSAGE_3, RECORD_MESSAGE_3, - RECORD_MESSAGE_4, - RECORD_MESSAGE_5, RECORD_MESSAGE_5); - final StateDecoratingIterator iterator1 = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_1, - JsonSchemaPrimitive.STRING, - 1); - - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - // state 2 is the latest state ready for emission because - // all records with the same cursor value have been emitted - assertEquals(createStateMessage(RECORD_VALUE_2, 2, 3.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_4, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_3, 3, 3.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_5, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_4, 1, 1.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_5, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_5, 2, 1.0), iterator1.next()); - assertFalse(iterator1.hasNext()); - } - - @Test - void testStateEmissionForRecordsSharingSameCursorValueButDifferentStatsCount() { - messageIterator = MoreIterators.of( - RECORD_MESSAGE_2, RECORD_MESSAGE_2, - RECORD_MESSAGE_2, RECORD_MESSAGE_2, - RECORD_MESSAGE_3, RECORD_MESSAGE_3, RECORD_MESSAGE_3, - RECORD_MESSAGE_3, - RECORD_MESSAGE_3, RECORD_MESSAGE_3, RECORD_MESSAGE_3); - final StateDecoratingIterator iterator1 = new StateDecoratingIterator( - messageIterator, - stateManager, - NAME_NAMESPACE_PAIR, - UUID_FIELD_NAME, - RECORD_VALUE_1, - JsonSchemaPrimitive.STRING, - 10); - - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_2, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - // state 2 is the latest state ready for emission because - // all records with the same cursor value have been emitted - assertEquals(createStateMessage(RECORD_VALUE_2, 4, 10.0), iterator1.next()); - assertEquals(RECORD_MESSAGE_3, iterator1.next()); - assertEquals(createStateMessage(RECORD_VALUE_3, 7, 1.0), iterator1.next()); - assertFalse(iterator1.hasNext()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.java deleted file mode 100644 index 9f5dccbed7fc3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_RECORD_COUNT; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.getCatalog; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.getState; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.getStream; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.Collections; -import java.util.Optional; -import java.util.function.Function; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link CursorManager} class. - */ -public class CursorManagerTest { - - private static final Function CURSOR_RECORD_COUNT_FUNCTION = stream -> { - if (stream.getCursorRecordCount() != null) { - return stream.getCursorRecordCount(); - } else { - return 0L; - } - }; - - @Test - void testCreateCursorInfoCatalogAndStateSameCursorField() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - getState(CURSOR_FIELD1, CURSOR, CURSOR_RECORD_COUNT), - getStream(CURSOR_FIELD1), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_RECORD_COUNT, CURSOR_FIELD1, CURSOR, CURSOR_RECORD_COUNT), actual); - } - - @Test - void testCreateCursorInfoCatalogAndStateSameCursorFieldButNoCursor() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, null, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - getState(CURSOR_FIELD1, null), - getStream(CURSOR_FIELD1), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(CURSOR_FIELD1, null, CURSOR_FIELD1, null), actual); - } - - @Test - void testCreateCursorInfoCatalogAndStateChangeInCursorFieldName() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - getState(CURSOR_FIELD1, CURSOR), - getStream(CURSOR_FIELD2), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, CURSOR_FIELD2, null), actual); - } - - @Test - void testCreateCursorInfoCatalogAndNoState() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - Optional.empty(), - getStream(CURSOR_FIELD1), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(null, null, CURSOR_FIELD1, null), actual); - } - - @Test - void testCreateCursorInfoStateAndNoCatalog() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - getState(CURSOR_FIELD1, CURSOR), - Optional.empty(), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); - } - - // this is what full refresh looks like. - @Test - void testCreateCursorInfoNoCatalogAndNoState() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - Optional.empty(), - Optional.empty(), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(null, null, null, null), actual); - } - - @Test - void testCreateCursorInfoStateAndCatalogButNoCursorField() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actual = cursorManager.createCursorInfoForStream( - NAME_NAMESPACE_PAIR1, - getState(CURSOR_FIELD1, CURSOR), - getStream(null), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION); - assertEquals(new CursorInfo(CURSOR_FIELD1, CURSOR, null, null), actual); - } - - @Test - void testGetters() { - final CursorManager cursorManager = createCursorManager(CURSOR_FIELD1, CURSOR, NAME_NAMESPACE_PAIR1); - final CursorInfo actualCursorInfo = new CursorInfo(CURSOR_FIELD1, CURSOR, null, null); - - assertEquals(Optional.of(actualCursorInfo), cursorManager.getCursorInfo(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.empty(), cursorManager.getCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.empty(), cursorManager.getCursor(NAME_NAMESPACE_PAIR1)); - - assertEquals(Optional.empty(), cursorManager.getCursorInfo(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), cursorManager.getCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), cursorManager.getCursor(NAME_NAMESPACE_PAIR2)); - } - - private CursorManager createCursorManager(final String cursorField, - final String cursor, - final AirbyteStreamNameNamespacePair nameNamespacePair) { - final DbStreamState dbStreamState = getState(cursorField, cursor).get(); - return new CursorManager<>( - getCatalog(cursorField).orElse(null), - () -> Collections.singleton(dbStreamState), - DbStreamState::getCursor, - DbStreamState::getCursorField, - CURSOR_RECORD_COUNT_FUNCTION, - s -> nameNamespacePair, - false); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.java deleted file mode 100644 index beee9c73aa897..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; - -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link GlobalStateManager} class. - */ -public class GlobalStateManagerTest { - - @Test - void testCdcStateManager() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); - final AirbyteGlobalState globalState = new AirbyteGlobalState().withSharedState(Jsons.jsonNode(cdcState)) - .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace("namespace").withName("name")) - .withStreamState(Jsons.jsonNode(new DbStreamState())))); - final StateManager stateManager = - new GlobalStateManager(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState), catalog); - assertNotNull(stateManager.getCdcStateManager()); - assertEquals(cdcState, stateManager.getCdcStateManager().getCdcState()); - assertEquals(1, stateManager.getCdcStateManager().getInitialStreamsSynced().size()); - assertTrue(stateManager.getCdcStateManager().getInitialStreamsSynced().contains(new AirbyteStreamNameNamespacePair("name", "namespace"))); - } - - @Test - void testToStateFromLegacyState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); - - final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); - final DbState dbState = new DbState() - .withCdc(true) - .withCdcState(cdcState) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)), - new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - final StateManager stateManager = new GlobalStateManager(new AirbyteStateMessage().withData(Jsons.jsonNode(dbState)), catalog); - - final long expectedRecordCount = 19L; - final DbState expectedDbState = new DbState() - .withCdc(true) - .withCdcState(cdcState) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a") - .withCursorRecordCount(expectedRecordCount), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)), - new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - - final AirbyteGlobalState expectedGlobalState = new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(cdcState)) - .withStreamStates(List.of( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a") - .withCursorRecordCount(expectedRecordCount))), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)))), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME3).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)))) - .stream().sorted(Comparator.comparing(o -> o.getStreamDescriptor().getName())).collect(Collectors.toList())); - final AirbyteStateMessage expected = new AirbyteStateMessage() - .withData(Jsons.jsonNode(expectedDbState)) - .withGlobal(expectedGlobalState) - .withType(AirbyteStateType.GLOBAL); - - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a", expectedRecordCount); - assertEquals(expected, actualFirstEmission); - } - - // Discovered during CDK migration. - // Failure is: Could not find cursor information for stream: public_cars - @Disabled("Failing test.") - @Test - void testToState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); - - final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); - final AirbyteGlobalState globalState = new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState())).withStreamStates( - List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor()).withStreamState(Jsons.jsonNode(new DbStreamState())))); - final StateManager stateManager = - new GlobalStateManager(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState), catalog); - stateManager.getCdcStateManager().setCdcState(cdcState); - - final DbState expectedDbState = new DbState() - .withCdc(true) - .withCdcState(cdcState) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a") - .withCursorRecordCount(1L), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)), - new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - - final AirbyteGlobalState expectedGlobalState = new AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(cdcState)) - .withStreamStates(List.of( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a") - .withCursorRecordCount(1L))), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)))), - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME3).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode(new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)))) - .stream().sorted(Comparator.comparing(o -> o.getStreamDescriptor().getName())).collect(Collectors.toList())); - final AirbyteStateMessage expected = new AirbyteStateMessage() - .withData(Jsons.jsonNode(expectedDbState)) - .withGlobal(expectedGlobalState) - .withType(AirbyteStateType.GLOBAL); - - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a", 1L); - assertEquals(expected, actualFirstEmission); - } - - @Test - void testToStateWithNoState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog(); - final StateManager stateManager = - new GlobalStateManager(new AirbyteStateMessage(), catalog); - - final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.empty()); - assertNotNull(airbyteStateMessage); - assertEquals(AirbyteStateType.GLOBAL, airbyteStateMessage.getType()); - assertEquals(0, airbyteStateMessage.getGlobal().getStreamStates().size()); - } - - @Test - void testCdcStateManagerLegacyState() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); - final DbState dbState = new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(cdcState))) - .withStreams(List - .of(new DbStreamState().withStreamName("name").withStreamNamespace("namespace").withCursor("").withCursorField(Collections.emptyList()))) - .withCdc(true); - final StateManager stateManager = - new GlobalStateManager(new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)), catalog); - assertNotNull(stateManager.getCdcStateManager()); - assertEquals(1, stateManager.getCdcStateManager().getInitialStreamsSynced().size()); - assertTrue(stateManager.getCdcStateManager().getInitialStreamsSynced().contains(new AirbyteStreamNameNamespacePair("name", "namespace"))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.java deleted file mode 100644 index 25214d1c77018..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.mockito.Mockito.mock; - -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link LegacyStateManager} class. - */ -public class LegacyStateManagerTest { - - @Test - void testGetters() { - final DbState state = new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor(CURSOR), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE))); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); - - final StateManager stateManager = new LegacyStateManager(state, catalog); - - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getCursor(NAME_NAMESPACE_PAIR1)); - - assertEquals(Optional.empty(), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursor(NAME_NAMESPACE_PAIR2)); - } - - @Test - void testToState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE)))); - - final StateManager stateManager = new LegacyStateManager(new DbState(), catalog); - - final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD2)), - new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - final AirbyteStateMessage expectedSecondEmission = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD2)) - .withCursor("b"), - new DbStreamState().withStreamName(STREAM_NAME3).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); - assertEquals(expectedSecondEmission, actualSecondEmission); - } - - @Test - void testToStateNullCursorField() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); - final StateManager stateManager = new LegacyStateManager(new DbState(), catalog); - - final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(false))); - - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - } - - @Test - void testCursorNotUpdatedForCdc() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE)))); - - final DbState state = new DbState(); - state.setCdc(true); - final StateManager stateManager = new LegacyStateManager(state, catalog); - - final AirbyteStateMessage expectedFirstEmission = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor(null), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of())) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(true))); - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - final AirbyteStateMessage expectedSecondEmission = new AirbyteStateMessage() - .withType(AirbyteStateType.LEGACY) - .withData(Jsons.jsonNode(new DbState().withStreams(List.of( - new DbStreamState().withStreamName(STREAM_NAME1).withStreamNamespace(NAMESPACE).withCursorField(List.of(CURSOR_FIELD1)) - .withCursor(null), - new DbStreamState().withStreamName(STREAM_NAME2).withStreamNamespace(NAMESPACE).withCursorField(List.of()) - .withCursor(null)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())) - .withCdc(true))); - final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b"); - assertEquals(expectedSecondEmission, actualSecondEmission); - } - - @Test - void testCdcStateManager() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final CdcState cdcState = new CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))); - final DbState dbState = new DbState().withCdcState(cdcState).withStreams(List.of( - new DbStreamState().withStreamNamespace(NAMESPACE).withStreamName(STREAM_NAME1))); - final StateManager stateManager = new LegacyStateManager(dbState, catalog); - assertNotNull(stateManager.getCdcStateManager()); - assertEquals(cdcState, stateManager.getCdcStateManager().getCdcState()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java deleted file mode 100644 index 626cd52545a48..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.time.Duration; -import java.util.Iterator; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class SourceStateIteratorTest { - - SourceStateMessageProducer mockProducer; - Iterator messageIterator; - ConfiguredAirbyteStream stream; - - SourceStateIterator sourceStateIterator; - - @BeforeEach - void setup() { - mockProducer = mock(SourceStateMessageProducer.class); - stream = mock(ConfiguredAirbyteStream.class); - messageIterator = mock(Iterator.class); - StateEmitFrequency stateEmitFrequency = new StateEmitFrequency(1L, Duration.ofSeconds(100L)); - sourceStateIterator = new SourceStateIterator(messageIterator, stream, mockProducer, stateEmitFrequency); - } - - // Provides a way to generate a record message and will verify corresponding spied functions have - // been called. - void processRecordMessage() { - doReturn(true).when(messageIterator).hasNext(); - doReturn(false).when(mockProducer).shouldEmitStateMessage(eq(stream)); - AirbyteMessage message = new AirbyteMessage().withType(Type.RECORD).withRecord(new AirbyteRecordMessage()); - doReturn(message).when(mockProducer).processRecordMessage(eq(stream), any()); - doReturn(message).when(messageIterator).next(); - - assertEquals(message, sourceStateIterator.computeNext()); - verify(mockProducer, atLeastOnce()).processRecordMessage(eq(stream), eq(message)); - } - - @Test - void testShouldProcessRecordMessage() { - processRecordMessage(); - } - - @Test - void testShouldEmitStateMessage() { - processRecordMessage(); - doReturn(true).when(mockProducer).shouldEmitStateMessage(eq(stream)); - final AirbyteStateMessage stateMessage = new AirbyteStateMessage(); - doReturn(stateMessage).when(mockProducer).generateStateMessageAtCheckpoint(stream); - AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.STATE).withState(stateMessage); - expectedMessage.getState().withSourceStats(new AirbyteStateStats().withRecordCount(1.0)); - assertEquals(expectedMessage, sourceStateIterator.computeNext()); - } - - @Test - void testShouldEmitFinalStateMessage() { - processRecordMessage(); - processRecordMessage(); - doReturn(false).when(messageIterator).hasNext(); - final AirbyteStateMessage stateMessage = new AirbyteStateMessage(); - doReturn(stateMessage).when(mockProducer).createFinalStateMessage(stream); - AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.STATE).withState(stateMessage); - expectedMessage.getState().withSourceStats(new AirbyteStateStats().withRecordCount(2.0)); - assertEquals(expectedMessage, sourceStateIterator.computeNext()); - } - - @Test - void testShouldSendEndOfData() { - processRecordMessage(); - doReturn(false).when(messageIterator).hasNext(); - doReturn(new AirbyteStateMessage()).when(mockProducer).createFinalStateMessage(stream); - sourceStateIterator.computeNext(); - - // After sending the final state, if iterator was called again, we will return null. - assertEquals(null, sourceStateIterator.computeNext()); - } - - @Test - void testShouldRethrowExceptions() { - processRecordMessage(); - doThrow(new ArrayIndexOutOfBoundsException("unexpected error")).when(messageIterator).hasNext(); - assertThrows(RuntimeException.class, () -> sourceStateIterator.computeNext()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java deleted file mode 100644 index 0f65df39d2920..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.protocol.models.v0.StreamDescriptor; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link StateGeneratorUtils} class. - */ -public class StateGeneratorUtilsTest { - - @Test - void testValidStreamDescriptor() { - final StreamDescriptor streamDescriptor1 = null; - final StreamDescriptor streamDescriptor2 = new StreamDescriptor(); - final StreamDescriptor streamDescriptor3 = new StreamDescriptor().withName("name"); - final StreamDescriptor streamDescriptor4 = new StreamDescriptor().withNamespace("namespace"); - final StreamDescriptor streamDescriptor5 = new StreamDescriptor().withName("name").withNamespace("namespace"); - final StreamDescriptor streamDescriptor6 = new StreamDescriptor().withName("name").withNamespace(""); - final StreamDescriptor streamDescriptor7 = new StreamDescriptor().withName("").withNamespace("namespace"); - final StreamDescriptor streamDescriptor8 = new StreamDescriptor().withName("").withNamespace(""); - - assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor1)); - assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor2)); - assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor3)); - assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor4)); - assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor5)); - assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor6)); - assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor7)); - assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.java deleted file mode 100644 index 702429adc9997..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.List; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link StateManagerFactory} class. - */ -public class StateManagerFactoryTest { - - private static final String NAMESPACE = "namespace"; - private static final String NAME = "name"; - - @Test - void testNullOrEmptyState() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, null, catalog); - }); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(), catalog); - }); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, null, catalog); - }); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, List.of(), catalog); - }); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.STREAM, null, catalog); - }); - - Assertions.assertThrows(IllegalArgumentException.class, () -> { - StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(), catalog); - }); - } - - @Test - void testLegacyStateManagerCreationFromAirbyteStateMessage() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteStateMessage airbyteStateMessage = mock(AirbyteStateMessage.class); - when(airbyteStateMessage.getData()).thenReturn(Jsons.jsonNode(new DbState())); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.LEGACY, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(LegacyStateManager.class, stateManager.getClass()); - } - - @Test - void testGlobalStateManagerCreation() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteGlobalState globalState = - new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) - .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) - .withStreamState(Jsons.jsonNode(new DbStreamState())))); - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); - } - - @Test - void testGlobalStateManagerCreationFromLegacyState() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final CdcState cdcState = new CdcState(); - final DbState dbState = new DbState() - .withCdcState(cdcState) - .withStreams(List.of(new DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE))); - final AirbyteStateMessage airbyteStateMessage = - new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); - } - - @Test - void testGlobalStateManagerCreationFromStreamState() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( - NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))); - - Assertions.assertThrows(IllegalArgumentException.class, - () -> StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog)); - } - - @Test - void testGlobalStateManagerCreationWithLegacyDataPresent() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteGlobalState globalState = - new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) - .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) - .withStreamState(Jsons.jsonNode(new DbStreamState())))); - final AirbyteStateMessage airbyteStateMessage = - new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState).withData(Jsons.jsonNode(new DbState())); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.GLOBAL, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(GlobalStateManager.class, stateManager.getClass()); - } - - @Test - void testStreamStateManagerCreation() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( - NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); - } - - @Test - void testStreamStateManagerCreationFromLegacy() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final CdcState cdcState = new CdcState(); - final DbState dbState = new DbState() - .withCdcState(cdcState) - .withStreams(List.of(new DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE))); - final AirbyteStateMessage airbyteStateMessage = - new AirbyteStateMessage().withType(AirbyteStateType.LEGACY).withData(Jsons.jsonNode(dbState)); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); - } - - @Test - void testStreamStateManagerCreationFromGlobal() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteGlobalState globalState = - new AirbyteGlobalState().withSharedState(Jsons.jsonNode(new DbState().withCdcState(new CdcState().withState(Jsons.jsonNode(new DbState()))))) - .withStreamStates(List.of(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withNamespace(NAMESPACE).withName(NAME)) - .withStreamState(Jsons.jsonNode(new DbStreamState())))); - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(globalState); - - Assertions.assertThrows(IllegalArgumentException.class, - () -> StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog)); - } - - @Test - void testStreamStateManagerCreationWithLegacyDataPresent() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(NAME).withNamespace( - NAMESPACE)).withStreamState(Jsons.jsonNode(new DbStreamState()))) - .withData(Jsons.jsonNode(new DbState())); - - final StateManager stateManager = StateManagerFactory.createStateManager(AirbyteStateType.STREAM, List.of(airbyteStateMessage), catalog); - - Assertions.assertNotNull(stateManager); - Assertions.assertEquals(StreamStateManager.class, stateManager.getClass()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.java deleted file mode 100644 index 0b6d0c4632d40..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import org.testcontainers.shaded.com.google.common.collect.Lists; - -/** - * Collection of constants for use in state management-related tests. - */ -public final class StateTestConstants { - - public static final String NAMESPACE = "public"; - public static final String STREAM_NAME1 = "cars"; - public static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR1 = new AirbyteStreamNameNamespacePair(STREAM_NAME1, NAMESPACE); - public static final String STREAM_NAME2 = "bicycles"; - public static final AirbyteStreamNameNamespacePair NAME_NAMESPACE_PAIR2 = new AirbyteStreamNameNamespacePair(STREAM_NAME2, NAMESPACE); - public static final String STREAM_NAME3 = "stationary_bicycles"; - public static final String CURSOR_FIELD1 = "year"; - public static final String CURSOR_FIELD2 = "generation"; - public static final String CURSOR = "2000"; - public static final long CURSOR_RECORD_COUNT = 19L; - - private StateTestConstants() {} - - public static Optional getState(final String cursorField, final String cursor) { - return Optional.of(new DbStreamState() - .withStreamName(STREAM_NAME1) - .withCursorField(Lists.newArrayList(cursorField)) - .withCursor(cursor)); - } - - public static Optional getState(final String cursorField, final String cursor, final long cursorRecordCount) { - return Optional.of(new DbStreamState() - .withStreamName(STREAM_NAME1) - .withCursorField(Lists.newArrayList(cursorField)) - .withCursor(cursor) - .withCursorRecordCount(cursorRecordCount)); - } - - public static Optional getCatalog(final String cursorField) { - return Optional.of(new ConfiguredAirbyteCatalog() - .withStreams(List.of(getStream(cursorField).orElse(null)))); - } - - public static Optional getStream(final String cursorField) { - return Optional.of(new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1)) - .withCursorField(cursorField == null ? Collections.emptyList() : Lists.newArrayList(cursorField))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.java deleted file mode 100644 index 3ed37ec423085..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.CURSOR_FIELD2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAMESPACE; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.NAME_NAMESPACE_PAIR2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME1; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME2; -import static io.airbyte.cdk.integrations.source.relationaldb.state.StateTestConstants.STREAM_NAME3; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.mockito.Mockito.mock; - -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link StreamStateManager} class. - */ -public class StreamStateManagerTest { - - @Test - void testCreationFromInvalidState() { - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(STREAM_NAME1).withNamespace(NAMESPACE)) - .withStreamState(Jsons.jsonNode("Not a state object"))); - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - - Assertions.assertDoesNotThrow(() -> { - final StateManager stateManager = new StreamStateManager(List.of(airbyteStateMessage), catalog); - assertNotNull(stateManager); - }); - } - - @Test - void testGetters() { - final List state = new ArrayList<>(); - state.add(createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), CURSOR, 0L)); - state.add(createStreamState(STREAM_NAME2, NAMESPACE, List.of(), null, 0L)); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))))); - - final StateManager stateManager = new StreamStateManager(state, catalog); - - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR_FIELD1), stateManager.getCursorField(NAME_NAMESPACE_PAIR1)); - assertEquals(Optional.of(CURSOR), stateManager.getCursor(NAME_NAMESPACE_PAIR1)); - - assertEquals(Optional.empty(), stateManager.getOriginalCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getOriginalCursor(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursorField(NAME_NAMESPACE_PAIR2)); - assertEquals(Optional.empty(), stateManager.getCursor(NAME_NAMESPACE_PAIR2)); - } - - @Test - void testToState() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))))); - - final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); - - final DbState expectedFirstDbState = new DbState() - .withCdc(false) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)), - new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - final AirbyteStateMessage expectedFirstEmission = - createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), "a", 0L).withData(Jsons.jsonNode(expectedFirstDbState)); - - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - - final long expectedRecordCount = 17L; - final DbState expectedSecondDbState = new DbState() - .withCdc(false) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD2)) - .withCursor("b") - .withCursorRecordCount(expectedRecordCount), - new DbStreamState() - .withStreamName(STREAM_NAME3) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - final AirbyteStateMessage expectedSecondEmission = - createStreamState(STREAM_NAME2, NAMESPACE, List.of(CURSOR_FIELD2), "b", expectedRecordCount).withData(Jsons.jsonNode(expectedSecondDbState)); - - final AirbyteStateMessage actualSecondEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR2, "b", expectedRecordCount); - assertEquals(expectedSecondEmission, actualSecondEmission); - } - - @Test - void testToStateWithoutCursorInfo() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))))); - final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair = new AirbyteStreamNameNamespacePair("other", "other"); - - final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); - final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.of(airbyteStreamNameNamespacePair)); - assertNotNull(airbyteStateMessage); - assertEquals(AirbyteStateType.STREAM, airbyteStateMessage.getType()); - assertNotNull(airbyteStateMessage.getStream()); - } - - @Test - void testToStateWithoutStreamPair() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD2)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME3).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))))); - - final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); - final AirbyteStateMessage airbyteStateMessage = stateManager.toState(Optional.empty()); - assertNotNull(airbyteStateMessage); - assertEquals(AirbyteStateType.STREAM, airbyteStateMessage.getType()); - assertNotNull(airbyteStateMessage.getStream()); - assertNull(airbyteStateMessage.getStream().getStreamState()); - } - - @Test - void testToStateNullCursorField() { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME1).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))) - .withCursorField(List.of(CURSOR_FIELD1)), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream().withName(STREAM_NAME2).withNamespace(NAMESPACE) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))))); - final StateManager stateManager = new StreamStateManager(createDefaultState(), catalog); - - final DbState expectedFirstDbState = new DbState() - .withCdc(false) - .withStreams(List.of( - new DbStreamState() - .withStreamName(STREAM_NAME1) - .withStreamNamespace(NAMESPACE) - .withCursorField(List.of(CURSOR_FIELD1)) - .withCursor("a"), - new DbStreamState() - .withStreamName(STREAM_NAME2) - .withStreamNamespace(NAMESPACE)) - .stream().sorted(Comparator.comparing(DbStreamState::getStreamName)).collect(Collectors.toList())); - - final AirbyteStateMessage expectedFirstEmission = - createStreamState(STREAM_NAME1, NAMESPACE, List.of(CURSOR_FIELD1), "a", 0L).withData(Jsons.jsonNode(expectedFirstDbState)); - final AirbyteStateMessage actualFirstEmission = stateManager.updateAndEmit(NAME_NAMESPACE_PAIR1, "a"); - assertEquals(expectedFirstEmission, actualFirstEmission); - } - - @Test - void testCdcStateManager() { - final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); - final StateManager stateManager = new StreamStateManager( - List.of(new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState())), catalog); - Assertions.assertThrows(UnsupportedOperationException.class, () -> stateManager.getCdcStateManager()); - } - - private List createDefaultState() { - return List.of(new AirbyteStateMessage().withType(AirbyteStateType.STREAM).withStream(new AirbyteStreamState())); - } - - private AirbyteStateMessage createStreamState(final String name, - final String namespace, - final List cursorFields, - final String cursorValue, - final long cursorRecordCount) { - final DbStreamState dbStreamState = new DbStreamState() - .withStreamName(name) - .withStreamNamespace(namespace); - - if (cursorFields != null && !cursorFields.isEmpty()) { - dbStreamState.withCursorField(cursorFields); - } - - if (cursorValue != null) { - dbStreamState.withCursor(cursorValue); - } - - if (cursorRecordCount > 0L) { - dbStreamState.withCursorRecordCount(cursorRecordCount); - } - - return new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(dbStreamState))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.java deleted file mode 100644 index 9f7008f5f6c98..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.testutils; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import com.zaxxer.hikari.HikariDataSource; -import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; - -class DatabaseConnectionHelperTest { - - private static final String DATABASE_NAME = "airbyte_test_database"; - - protected static PostgreSQLContainer container; - - @BeforeAll - static void dbSetup() { - container = new PostgreSQLContainer<>("postgres:13-alpine") - .withDatabaseName(DATABASE_NAME) - .withUsername("docker") - .withPassword("docker"); - container.start(); - } - - @AfterAll - static void dbDown() { - container.close(); - } - - @Test - void testCreatingFromATestContainer() { - final DataSource dataSource = DatabaseConnectionHelper.createDataSource(container); - assertNotNull(dataSource); - assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(10, ((HikariDataSource) dataSource).getHikariConfigMXBean().getMaximumPoolSize()); - } - - @Test - void testCreatingADslContextFromATestContainer() { - final SQLDialect dialect = SQLDialect.POSTGRES; - final DSLContext dslContext = DatabaseConnectionHelper.createDslContext(container, dialect); - assertNotNull(dslContext); - assertEquals(dialect, dslContext.configuration().dialect()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.kt new file mode 100644 index 0000000000000..8732a0a6546e7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandlerTest.kt @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.google.common.collect.Lists +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import java.util.List +import java.util.function.Consumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class AirbyteDebeziumHandlerTest { + @Test + fun shouldUseCdcTestShouldReturnTrue() { + val catalog = + AirbyteCatalog() + .withStreams( + List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(List.of(listOf("COL_ID"))) + ) + ) + val configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + // set all streams to incremental. + configuredCatalog.streams.forEach( + Consumer { s: ConfiguredAirbyteStream -> s.syncMode = SyncMode.INCREMENTAL } + ) + + Assertions.assertTrue( + AirbyteDebeziumHandler.isAnyStreamIncrementalSyncMode(configuredCatalog) + ) + } + + @Test + fun shouldUseCdcTestShouldReturnFalse() { + val catalog = + AirbyteCatalog() + .withStreams( + List.of( + CatalogHelpers.createAirbyteStream( + "MODELS_STREAM_NAME", + "MODELS_SCHEMA", + Field.of("COL_ID", JsonSchemaType.NUMBER), + Field.of("COL_MAKE_ID", JsonSchemaType.NUMBER), + Field.of("COL_MODEL", JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(List.of(listOf("COL_ID"))) + ) + ) + val configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + + Assertions.assertFalse( + AirbyteDebeziumHandler.isAnyStreamIncrementalSyncMode(configuredCatalog) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.kt new file mode 100644 index 0000000000000..aeba71586adb6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/AirbyteFileOffsetBackingStoreTest.kt @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import io.airbyte.cdk.integrations.debezium.internals.AirbyteFileOffsetBackingStore +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class AirbyteFileOffsetBackingStoreTest { + @Test + @Throws(IOException::class) + fun test() { + val testRoot = Files.createTempDirectory(Path.of("/tmp"), "offset-store-test") + + val bytes = MoreResources.readBytes("test_debezium_offset.dat") + val templateFilePath = testRoot.resolve("template_offset.dat") + IOs.writeFile(templateFilePath, bytes) + + val writeFilePath = testRoot.resolve("offset.dat") + val secondWriteFilePath = testRoot.resolve("offset_2.dat") + + val offsetStore = AirbyteFileOffsetBackingStore(templateFilePath, Optional.empty()) + val offset = offsetStore.read() + + val offsetStore2 = AirbyteFileOffsetBackingStore(writeFilePath, Optional.empty()) + offsetStore2.persist(Jsons.jsonNode(offset)) + val stateFromOffsetStore2 = offsetStore2.read() + + val offsetStore3 = AirbyteFileOffsetBackingStore(secondWriteFilePath, Optional.empty()) + offsetStore3.persist(Jsons.jsonNode(stateFromOffsetStore2)) + val stateFromOffsetStore3 = offsetStore3.read() + + // verify that, after a round trip through the offset store, we get back the same data. + Assertions.assertEquals(stateFromOffsetStore2, stateFromOffsetStore3) + // verify that the file written by the offset store is identical to the template file. + Assertions.assertTrue( + com.google.common.io.Files.equal(secondWriteFilePath.toFile(), writeFilePath.toFile()) + ) + } + + @Test + @Throws(IOException::class) + fun test2() { + val testRoot = Files.createTempDirectory(Path.of("/tmp"), "offset-store-test") + + val bytes = MoreResources.readBytes("test_debezium_offset.dat") + val templateFilePath = testRoot.resolve("template_offset.dat") + IOs.writeFile(templateFilePath, bytes) + + val writeFilePath = testRoot.resolve("offset.dat") + val secondWriteFilePath = testRoot.resolve("offset_2.dat") + + val offsetStore = AirbyteFileOffsetBackingStore(templateFilePath, Optional.of("orders")) + val offset = offsetStore.read() + + val offsetStore2 = AirbyteFileOffsetBackingStore(writeFilePath, Optional.of("orders")) + offsetStore2.persist(Jsons.jsonNode(offset)) + val stateFromOffsetStore2 = offsetStore2.read() + + val offsetStore3 = AirbyteFileOffsetBackingStore(secondWriteFilePath, Optional.of("orders")) + offsetStore3.persist(Jsons.jsonNode(stateFromOffsetStore2)) + val stateFromOffsetStore3 = offsetStore3.read() + + // verify that, after a round trip through the offset store, we get back the same data. + Assertions.assertEquals(stateFromOffsetStore2, stateFromOffsetStore3) + // verify that the file written by the offset store is identical to the template file. + Assertions.assertTrue( + com.google.common.io.Files.equal(secondWriteFilePath.toFile(), writeFilePath.toFile()) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.kt new file mode 100644 index 0000000000000..8a23f58e748bf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/DebeziumRecordPublisherTest.kt @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.google.common.collect.ImmutableList +import io.airbyte.cdk.integrations.debezium.internals.RelationalDbDebeziumPropertiesManager +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.SyncMode +import java.util.regex.Pattern +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class DebeziumRecordPublisherTest { + @Test + fun testTableIncludelistCreation() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public") + .withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_,something", "public") + .withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("n\"aMéS", "public") + .withSyncMode(SyncMode.INCREMENTAL) + ) + ) + + val expectedWhitelist = + "\\Qpublic.id_and_name\\E,\\Qpublic.id_\\,something\\E,\\Qpublic.n\"aMéS\\E" + val actualWhitelist = RelationalDbDebeziumPropertiesManager.getTableIncludelist(catalog) + + Assertions.assertEquals(expectedWhitelist, actualWhitelist) + } + + @Test + fun testTableIncludelistFiltersFullRefresh() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream("id_and_name", "public") + .withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_and_name2", "public") + .withSyncMode(SyncMode.FULL_REFRESH) + ) + ) + + val expectedWhitelist = "\\Qpublic.id_and_name\\E" + val actualWhitelist = RelationalDbDebeziumPropertiesManager.getTableIncludelist(catalog) + + Assertions.assertEquals(expectedWhitelist, actualWhitelist) + } + + @Test + fun testColumnIncludelistFiltersFullRefresh() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream( + "id_and_name", + "public", + Field.of("fld1", JsonSchemaType.NUMBER), + Field.of("fld2", JsonSchemaType.STRING) + ) + .withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_,something", "public") + .withSyncMode(SyncMode.INCREMENTAL), + CatalogHelpers.createConfiguredAirbyteStream("id_and_name2", "public") + .withSyncMode(SyncMode.FULL_REFRESH), + CatalogHelpers.createConfiguredAirbyteStream("n\"aMéS", "public") + .withSyncMode(SyncMode.INCREMENTAL) + ) + ) + + val expectedWhitelist = + "\\Qpublic.id_and_name\\E\\.(\\Qfld2\\E|\\Qfld1\\E),\\Qpublic.id_\\,something\\E,\\Qpublic.n\"aMéS\\E" + val actualWhitelist = RelationalDbDebeziumPropertiesManager.getColumnIncludeList(catalog) + + Assertions.assertEquals(expectedWhitelist, actualWhitelist) + } + + @Test + fun testColumnIncludeListEscaping() { + // final String a = "public\\.products\\*\\^\\$\\+-\\\\"; + // final String b = "public.products*^$+-\\"; + // final Pattern p = Pattern.compile(a, Pattern.UNIX_LINES); + // assertTrue(p.matcher(b).find()); + // assertTrue(Pattern.compile(Pattern.quote(b)).matcher(b).find()); + + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + ImmutableList.of( + CatalogHelpers.createConfiguredAirbyteStream( + "id_and_name", + "public", + Field.of("fld1", JsonSchemaType.NUMBER), + Field.of("fld2", JsonSchemaType.STRING) + ) + .withSyncMode(SyncMode.INCREMENTAL) + ) + ) + + val anchored = + "^" + RelationalDbDebeziumPropertiesManager.getColumnIncludeList(catalog) + "$" + val pattern = Pattern.compile(anchored) + + Assertions.assertTrue(pattern.matcher("public.id_and_name.fld1").find()) + Assertions.assertTrue(pattern.matcher("public.id_and_name.fld2").find()) + Assertions.assertFalse(pattern.matcher("ic.id_and_name.fl").find()) + Assertions.assertFalse(pattern.matcher("ppppublic.id_and_name.fld2333").find()) + Assertions.assertFalse(pattern.matcher("public.id_and_name.fld_wrong_wrong").find()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.kt new file mode 100644 index 0000000000000..217f4d0dffcab --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/AirbyteSchemaHistoryStorageTest.kt @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import java.io.IOException +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class AirbyteSchemaHistoryStorageTest { + @Test + @Throws(IOException::class) + fun testForContentBiggerThan1MBLimit() { + val contentReadDirectlyFromFile = + MoreResources.readResource("dbhistory_greater_than_1_mb.dat") + + val schemaHistoryStorageFromUncompressedContent = + AirbyteSchemaHistoryStorage.initializeDBHistory( + AirbyteSchemaHistoryStorage.SchemaHistory( + Optional.of(Jsons.jsonNode(contentReadDirectlyFromFile)), + false + ), + true + ) + val schemaHistoryFromUncompressedContent = + schemaHistoryStorageFromUncompressedContent.read() + + Assertions.assertTrue(schemaHistoryFromUncompressedContent.isCompressed) + Assertions.assertNotNull(schemaHistoryFromUncompressedContent.schema) + Assertions.assertEquals( + contentReadDirectlyFromFile, + schemaHistoryStorageFromUncompressedContent.readUncompressed() + ) + + val schemaHistoryStorageFromCompressedContent = + AirbyteSchemaHistoryStorage.initializeDBHistory( + AirbyteSchemaHistoryStorage.SchemaHistory( + Optional.of(Jsons.jsonNode(schemaHistoryFromUncompressedContent.schema)), + true + ), + true + ) + val schemaHistoryFromCompressedContent = schemaHistoryStorageFromCompressedContent.read() + + Assertions.assertTrue(schemaHistoryFromCompressedContent.isCompressed) + Assertions.assertNotNull(schemaHistoryFromCompressedContent.schema) + Assertions.assertEquals( + schemaHistoryFromUncompressedContent.schema, + schemaHistoryFromCompressedContent.schema + ) + } + + @Test + @Throws(IOException::class) + fun sizeTest() { + Assertions.assertEquals( + 5.881045341491699, + AirbyteSchemaHistoryStorage.calculateSizeOfStringInMB( + MoreResources.readResource("dbhistory_greater_than_1_mb.dat") + ) + ) + Assertions.assertEquals( + 0.0038671493530273438, + AirbyteSchemaHistoryStorage.calculateSizeOfStringInMB( + MoreResources.readResource("dbhistory_less_than_1_mb.dat") + ) + ) + } + + @Test + @Throws(IOException::class) + fun testForContentLessThan1MBLimit() { + val contentReadDirectlyFromFile = MoreResources.readResource("dbhistory_less_than_1_mb.dat") + + val schemaHistoryStorageFromUncompressedContent = + AirbyteSchemaHistoryStorage.initializeDBHistory( + AirbyteSchemaHistoryStorage.SchemaHistory( + Optional.of(Jsons.jsonNode(contentReadDirectlyFromFile)), + false + ), + true + ) + val schemaHistoryFromUncompressedContent = + schemaHistoryStorageFromUncompressedContent.read() + + Assertions.assertFalse(schemaHistoryFromUncompressedContent.isCompressed) + Assertions.assertNotNull(schemaHistoryFromUncompressedContent.schema) + Assertions.assertEquals( + contentReadDirectlyFromFile, + schemaHistoryFromUncompressedContent.schema + ) + + val schemaHistoryStorageFromCompressedContent = + AirbyteSchemaHistoryStorage.initializeDBHistory( + AirbyteSchemaHistoryStorage.SchemaHistory( + Optional.of(Jsons.jsonNode(schemaHistoryFromUncompressedContent.schema)), + false + ), + true + ) + val schemaHistoryFromCompressedContent = schemaHistoryStorageFromCompressedContent.read() + + Assertions.assertFalse(schemaHistoryFromCompressedContent.isCompressed) + Assertions.assertNotNull(schemaHistoryFromCompressedContent.schema) + Assertions.assertEquals( + schemaHistoryFromUncompressedContent.schema, + schemaHistoryFromCompressedContent.schema + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.kt new file mode 100644 index 0000000000000..0b288c96d8f5b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumConverterUtilsTest.kt @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.debezium.spi.converter.RelationalColumn +import java.sql.Timestamp +import java.time.Duration +import java.time.LocalDate +import java.time.LocalDateTime +import java.time.LocalTime +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class DebeziumConverterUtilsTest { + @Test + fun convertDefaultValueTest() { + val relationalColumn = Mockito.mock(RelationalColumn::class.java) + + Mockito.`when`(relationalColumn.isOptional).thenReturn(true) + var actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn) + Assertions.assertNull( + actualColumnDefaultValue, + "Default value for optional relational column should be null" + ) + + Mockito.`when`(relationalColumn.isOptional).thenReturn(false) + Mockito.`when`(relationalColumn.hasDefaultValue()).thenReturn(false) + actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn) + Assertions.assertNull(actualColumnDefaultValue) + + Mockito.`when`(relationalColumn.isOptional).thenReturn(false) + Mockito.`when`(relationalColumn.hasDefaultValue()).thenReturn(true) + val expectedColumnDefaultValue = "default value" + Mockito.`when`(relationalColumn.defaultValue()).thenReturn(expectedColumnDefaultValue) + actualColumnDefaultValue = DebeziumConverterUtils.convertDefaultValue(relationalColumn) + Assertions.assertEquals(actualColumnDefaultValue, expectedColumnDefaultValue) + } + + @Test + fun convertLocalDate() { + val localDate = LocalDate.of(2021, 1, 1) + + val actual = DebeziumConverterUtils.convertDate(localDate) + Assertions.assertEquals("2021-01-01T00:00:00Z", actual) + } + + @Test + fun convertTLocalTime() { + val localTime = LocalTime.of(8, 1, 1) + val actual = DebeziumConverterUtils.convertDate(localTime) + Assertions.assertEquals("08:01:01", actual) + } + + @Test + fun convertLocalDateTime() { + val localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1) + + val actual = DebeziumConverterUtils.convertDate(localDateTime) + Assertions.assertEquals("2021-01-01T08:01:01Z", actual) + } + + @Test + @Disabled + fun convertDuration() { + val duration = Duration.ofHours(100000) + + val actual = DebeziumConverterUtils.convertDate(duration) + Assertions.assertEquals("1981-05-29T20:00:00Z", actual) + } + + @Test + fun convertTimestamp() { + val localDateTime = LocalDateTime.of(2021, 1, 1, 8, 1, 1) + val timestamp = Timestamp.valueOf(localDateTime) + + val actual = DebeziumConverterUtils.convertDate(timestamp) + Assertions.assertEquals("2021-01-01T08:01:01.000000Z", actual) + } + + @Test + @Disabled + fun convertNumber() { + val number: Number = 100000 + + val actual = DebeziumConverterUtils.convertDate(number) + Assertions.assertEquals("1970-01-01T03:01:40Z", actual) + } + + @Test + fun convertStringDateFormat() { + val stringValue = "2021-01-01T00:00:00Z" + + val actual = DebeziumConverterUtils.convertDate(stringValue) + Assertions.assertEquals("2021-01-01T00:00:00Z", actual) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducerTest.kt new file mode 100644 index 0000000000000..703e29a45eb97 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumMessageProducerTest.kt @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.cdk.integrations.debezium.CdcStateHandler +import io.airbyte.cdk.integrations.debezium.CdcTargetPosition +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import java.util.* +import org.junit.Assert +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.eq +import org.mockito.kotlin.mock + +class DebeziumMessageProducerTest { + private var producer: DebeziumMessageProducer<*>? = null + + var cdcStateHandler: CdcStateHandler = mock() + var targetPosition: CdcTargetPosition = mock() + var eventConverter: DebeziumEventConverter = mock() + var offsetManager: AirbyteFileOffsetBackingStore = mock() + var schemaHistoryManager: AirbyteSchemaHistoryStorage = mock() + + @BeforeEach + fun setUp() { + cdcStateHandler = Mockito.mock(CdcStateHandler::class.java) + Mockito.`when`(cdcStateHandler.isCdcCheckpointEnabled).thenReturn(true) + targetPosition = mock() + eventConverter = Mockito.mock(DebeziumEventConverter::class.java) + offsetManager = Mockito.mock(AirbyteFileOffsetBackingStore::class.java) + Mockito.`when`>(offsetManager.read()).thenReturn(OFFSET_MANAGER_READ) + schemaHistoryManager = Mockito.mock(AirbyteSchemaHistoryStorage::class.java) + Mockito.`when`(schemaHistoryManager.read()).thenReturn(SCHEMA) + producer = + DebeziumMessageProducer( + cdcStateHandler, + targetPosition, + eventConverter!!, + offsetManager, + Optional.of(schemaHistoryManager) + ) + } + + @Test + fun testProcessRecordMessage() { + val message = Mockito.mock(ChangeEventWithMetadata::class.java) + + Mockito.`when`(targetPosition!!.isSameOffset(any(), any())).thenReturn(true) + producer!!.processRecordMessage(null, message) + Mockito.verify(eventConverter).toAirbyteMessage(message) + Assert.assertFalse(producer!!.shouldEmitStateMessage(null)) + } + + @Test + fun testProcessRecordMessageWithStateMessage() { + val message = Mockito.mock(ChangeEventWithMetadata::class.java) + + Mockito.`when`(targetPosition!!.isSameOffset(any(), any())).thenReturn(false) + Mockito.`when`(targetPosition!!.isEventAheadOffset(OFFSET_MANAGER_READ, message)) + .thenReturn(true) + producer!!.processRecordMessage(null, message) + Mockito.verify(eventConverter!!).toAirbyteMessage(message) + Assert.assertTrue(producer!!.shouldEmitStateMessage(null)) + + Mockito.`when`(cdcStateHandler!!.isCdcCheckpointEnabled).thenReturn(false) + Mockito.`when`(cdcStateHandler!!.saveState(eq(OFFSET_MANAGER_READ), eq(SCHEMA))) + .thenReturn(AirbyteMessage().withState(STATE_MESSAGE)) + + Assert.assertEquals(producer!!.generateStateMessageAtCheckpoint(null), STATE_MESSAGE) + } + + @Test + fun testGenerateFinalMessageNoProgress() { + Mockito.`when`(cdcStateHandler!!.saveState(eq(OFFSET_MANAGER_READ), eq(SCHEMA))) + .thenReturn(AirbyteMessage().withState(STATE_MESSAGE)) + + // initialOffset will be OFFSET_MANAGER_READ, final state would be OFFSET_MANAGER_READ2. + // Mock CDC handler will only accept OFFSET_MANAGER_READ. + Mockito.`when`>(offsetManager!!.read()).thenReturn(OFFSET_MANAGER_READ2) + + Mockito.`when`(targetPosition!!.isSameOffset(OFFSET_MANAGER_READ, OFFSET_MANAGER_READ2)) + .thenReturn(true) + + Assert.assertEquals(producer!!.createFinalStateMessage(null), STATE_MESSAGE) + } + + @Test + fun testGenerateFinalMessageWithProgress() { + Mockito.`when`(cdcStateHandler!!.saveState(eq(OFFSET_MANAGER_READ2), eq(SCHEMA))) + .thenReturn(AirbyteMessage().withState(STATE_MESSAGE)) + + // initialOffset will be OFFSET_MANAGER_READ, final state would be OFFSET_MANAGER_READ2. + // Mock CDC handler will only accept OFFSET_MANAGER_READ2. + Mockito.`when`>(offsetManager!!.read()).thenReturn(OFFSET_MANAGER_READ2) + Mockito.`when`(targetPosition!!.isSameOffset(OFFSET_MANAGER_READ, OFFSET_MANAGER_READ2)) + .thenReturn(false) + + Assert.assertEquals(producer!!.createFinalStateMessage(null), STATE_MESSAGE) + } + + companion object { + private val OFFSET_MANAGER_READ: Map = + HashMap(java.util.Map.of("key", "value")) + private val OFFSET_MANAGER_READ2: Map = + HashMap(java.util.Map.of("key2", "value2")) + + private val SCHEMA: AirbyteSchemaHistoryStorage.SchemaHistory = + AirbyteSchemaHistoryStorage.SchemaHistory("schema", false) + + private val STATE_MESSAGE: AirbyteStateMessage = + AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.kt new file mode 100644 index 0000000000000..00e9d918201b7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIteratorTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.cdk.integrations.debezium.CdcTargetPosition +import io.debezium.engine.ChangeEvent +import java.time.Duration +import java.util.* +import org.apache.kafka.connect.source.SourceRecord +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito.mock + +class DebeziumRecordIteratorTest { + @Test + fun getHeartbeatPositionTest() { + val debeziumRecordIterator = + DebeziumRecordIterator( + mock(), + object : CdcTargetPosition { + override fun reachedTargetPosition( + changeEventWithMetadata: ChangeEventWithMetadata? + ): Boolean { + return false + } + + override fun extractPositionFromHeartbeatOffset( + sourceOffset: Map + ): Long { + return sourceOffset!!["lsn"] as Long + } + }, + { false }, + mock(), + Duration.ZERO, + Duration.ZERO + ) + val lsn = + debeziumRecordIterator.getHeartbeatPosition( + object : ChangeEvent { + private val sourceRecord = + SourceRecord( + null, + Collections.singletonMap("lsn", 358824993496L), + null, + null, + null + ) + + override fun key(): String? { + return null + } + + override fun value(): String { + return "{\"ts_ms\":1667616934701}" + } + + override fun destination(): String? { + return null + } + + fun sourceRecord(): SourceRecord { + return sourceRecord + } + } + ) + + Assertions.assertEquals(lsn, 358824993496L) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.kt new file mode 100644 index 0000000000000..df7eb675bcc8a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/DebeziumShutdownProcedureTest.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import java.util.concurrent.Executors +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicInteger +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class DebeziumShutdownProcedureTest { + @Test + @Throws(InterruptedException::class) + fun test() { + val sourceQueue = LinkedBlockingQueue(10) + val recordsInserted = AtomicInteger() + val executorService = Executors.newSingleThreadExecutor() + val debeziumShutdownProcedure = + DebeziumShutdownProcedure( + sourceQueue, + { executorService.shutdown() }, + { recordsInserted.get() >= 99 } + ) + executorService.execute { + for (i in 0..99) { + try { + sourceQueue.put(i) + recordsInserted.set(i) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + } + } + + Thread.sleep(1000) + debeziumShutdownProcedure.initiateShutdownProcedure() + + Assertions.assertEquals(99, recordsInserted.get()) + Assertions.assertEquals(0, sourceQueue.size) + Assertions.assertEquals(100, debeziumShutdownProcedure.recordsRemainingAfterShutdown.size) + + for (i in 0..99) { + Assertions.assertEquals( + i, + debeziumShutdownProcedure.recordsRemainingAfterShutdown.poll() + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.kt new file mode 100644 index 0000000000000..19aa9ece08af6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/debezium/internals/RecordWaitTimeUtilTest.kt @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium.internals + +import io.airbyte.commons.json.Jsons +import java.time.Duration +import java.util.* +import java.util.Map +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class RecordWaitTimeUtilTest { + @Test + fun testGetFirstRecordWaitTime() { + val emptyConfig = Jsons.jsonNode(emptyMap()) + Assertions.assertDoesNotThrow { RecordWaitTimeUtil.checkFirstRecordWaitTime(emptyConfig) } + Assertions.assertEquals( + Optional.empty(), + RecordWaitTimeUtil.getFirstRecordWaitSeconds(emptyConfig) + ) + Assertions.assertEquals( + RecordWaitTimeUtil.DEFAULT_FIRST_RECORD_WAIT_TIME, + RecordWaitTimeUtil.getFirstRecordWaitTime(emptyConfig) + ) + + val normalConfig = + Jsons.jsonNode( + Map.of( + "replication_method", + Map.of("method", "CDC", "initial_waiting_seconds", 500) + ) + ) + Assertions.assertDoesNotThrow { RecordWaitTimeUtil.checkFirstRecordWaitTime(normalConfig) } + Assertions.assertEquals( + Optional.of(500), + RecordWaitTimeUtil.getFirstRecordWaitSeconds(normalConfig) + ) + Assertions.assertEquals( + Duration.ofSeconds(500), + RecordWaitTimeUtil.getFirstRecordWaitTime(normalConfig) + ) + + val tooShortTimeout = RecordWaitTimeUtil.MIN_FIRST_RECORD_WAIT_TIME.seconds.toInt() - 1 + val tooShortConfig = + Jsons.jsonNode( + Map.of( + "replication_method", + Map.of("method", "CDC", "initial_waiting_seconds", tooShortTimeout) + ) + ) + Assertions.assertThrows(IllegalArgumentException::class.java) { + RecordWaitTimeUtil.checkFirstRecordWaitTime(tooShortConfig) + } + Assertions.assertEquals( + Optional.of(tooShortTimeout), + RecordWaitTimeUtil.getFirstRecordWaitSeconds(tooShortConfig) + ) + Assertions.assertEquals( + RecordWaitTimeUtil.MIN_FIRST_RECORD_WAIT_TIME, + RecordWaitTimeUtil.getFirstRecordWaitTime(tooShortConfig) + ) + + val tooLongTimeout = RecordWaitTimeUtil.MAX_FIRST_RECORD_WAIT_TIME.seconds.toInt() + 1 + val tooLongConfig = + Jsons.jsonNode( + Map.of( + "replication_method", + Map.of("method", "CDC", "initial_waiting_seconds", tooLongTimeout) + ) + ) + Assertions.assertThrows(IllegalArgumentException::class.java) { + RecordWaitTimeUtil.checkFirstRecordWaitTime(tooLongConfig) + } + Assertions.assertEquals( + Optional.of(tooLongTimeout), + RecordWaitTimeUtil.getFirstRecordWaitSeconds(tooLongConfig) + ) + Assertions.assertEquals( + RecordWaitTimeUtil.MAX_FIRST_RECORD_WAIT_TIME, + RecordWaitTimeUtil.getFirstRecordWaitTime(tooLongConfig) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractDbSourceForTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractDbSourceForTest.kt new file mode 100644 index 0000000000000..4a049fd570c33 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/AbstractDbSourceForTest.kt @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.db.AbstractDatabase +import io.airbyte.cdk.integrations.source.relationaldb.AbstractDbSource +import io.airbyte.protocol.models.v0.AirbyteStateMessage + +abstract class AbstractDbSourceForTest( + driverClassName: String +) : AbstractDbSource(driverClassName) { + public override fun getSupportedStateType( + config: JsonNode? + ): AirbyteStateMessage.AirbyteStateType { + return super.getSupportedStateType(config) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.kt new file mode 100644 index 0000000000000..4d9155d412be3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcSourceAcceptanceTest.kt @@ -0,0 +1,219 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.JdbcUtils.parseJdbcParameters +import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.source.jdbc.DefaultJdbcSourceAcceptanceTest.BareBonesTestDatabase +import io.airbyte.cdk.integrations.source.jdbc.DefaultJdbcSourceAcceptanceTest.BareBonesTestDatabase.BareBonesConfigBuilder +import io.airbyte.cdk.integrations.source.jdbc.test.JdbcSourceAcceptanceTest +import io.airbyte.cdk.integrations.util.HostPortResolver.resolveHost +import io.airbyte.cdk.integrations.util.HostPortResolver.resolvePort +import io.airbyte.cdk.testutils.TestDatabase +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import java.sql.JDBCType +import java.util.List +import java.util.Map +import java.util.function.Supplier +import java.util.stream.Stream +import org.jooq.SQLDialect +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.testcontainers.containers.PostgreSQLContainer + +/** + * Runs the acceptance tests in the source-jdbc test module. We want this module to run these tests + * itself as a sanity check. The trade off here is that this class is duplicated from the one used + * in source-postgres. + */ +internal class DefaultJdbcSourceAcceptanceTest : + JdbcSourceAcceptanceTest< + DefaultJdbcSourceAcceptanceTest.PostgresTestSource, BareBonesTestDatabase>() { + override fun config(): JsonNode { + return testdb!!.testConfigBuilder()!!.build() + } + + override fun source(): PostgresTestSource { + return PostgresTestSource() + } + + override fun createTestDatabase(): BareBonesTestDatabase { + return BareBonesTestDatabase(PSQL_CONTAINER).initialized()!! + } + + public override fun supportsSchemas(): Boolean { + return true + } + + fun getConfigWithConnectionProperties( + psqlDb: PostgreSQLContainer<*>, + dbName: String, + additionalParameters: String + ): JsonNode { + return Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, resolveHost(psqlDb)) + .put(JdbcUtils.PORT_KEY, resolvePort(psqlDb)) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) + .put(JdbcUtils.USERNAME_KEY, psqlDb.username) + .put(JdbcUtils.PASSWORD_KEY, psqlDb.password) + .put(JdbcUtils.CONNECTION_PROPERTIES_KEY, additionalParameters) + .build() + ) + } + + class PostgresTestSource : + AbstractJdbcSource( + DRIVER_CLASS, + Supplier { AdaptiveStreamingQueryConfig() }, + JdbcUtils.defaultSourceOperations + ), + Source { + override fun toDatabaseConfig(config: JsonNode): JsonNode { + val configBuilder = + ImmutableMap.builder() + .put(JdbcUtils.USERNAME_KEY, config[JdbcUtils.USERNAME_KEY].asText()) + .put( + JdbcUtils.JDBC_URL_KEY, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + + if (config.has(JdbcUtils.PASSWORD_KEY)) { + configBuilder.put(JdbcUtils.PASSWORD_KEY, config[JdbcUtils.PASSWORD_KEY].asText()) + } + + return Jsons.jsonNode(configBuilder.build()) + } + + override val excludedInternalNameSpaces = + setOf("information_schema", "pg_catalog", "pg_internal", "catalog_history") + + override fun getSupportedStateType( + config: JsonNode? + ): AirbyteStateMessage.AirbyteStateType { + return AirbyteStateMessage.AirbyteStateType.STREAM + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(PostgresTestSource::class.java) + + val DRIVER_CLASS: String = DatabaseDriver.POSTGRESQL.driverClassName + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val source: Source = PostgresTestSource() + LOGGER.info("starting source: {}", PostgresTestSource::class.java) + IntegrationRunner(source).run(args) + LOGGER.info("completed source: {}", PostgresTestSource::class.java) + } + } + } + + class BareBonesTestDatabase(container: PostgreSQLContainer<*>) : + TestDatabase, BareBonesTestDatabase, BareBonesConfigBuilder>( + container + ) { + override fun inContainerBootstrapCmd(): Stream> { + val sql = + Stream.of( + String.format("CREATE DATABASE %s", databaseName), + String.format("CREATE USER %s PASSWORD '%s'", userName, password), + String.format( + "GRANT ALL PRIVILEGES ON DATABASE %s TO %s", + databaseName, + userName + ), + String.format("ALTER USER %s WITH SUPERUSER", userName) + ) + return Stream.of( + Stream.concat( + Stream.of( + "psql", + "-d", + container!!.databaseName, + "-U", + container.username, + "-v", + "ON_ERROR_STOP=1", + "-a" + ), + sql.flatMap { stmt: String? -> Stream.of("-c", stmt) } + ) + ) + } + + override fun inContainerUndoBootstrapCmd(): Stream { + return Stream.empty() + } + + override val databaseDriver: DatabaseDriver + get() = DatabaseDriver.POSTGRESQL + + override val sqlDialect: SQLDialect + get() = SQLDialect.POSTGRES + + override fun configBuilder(): BareBonesConfigBuilder { + return BareBonesConfigBuilder(this) + } + + class BareBonesConfigBuilder(testDatabase: BareBonesTestDatabase) : + ConfigBuilder(testDatabase) + } + + @Test + fun testCustomParametersOverwriteDefaultParametersExpectException() { + val connectionPropertiesUrl = "ssl=false" + val config = + getConfigWithConnectionProperties( + PSQL_CONTAINER, + testdb!!.databaseName, + connectionPropertiesUrl + ) + val customParameters = parseJdbcParameters(config, JdbcUtils.CONNECTION_PROPERTIES_KEY, "&") + val defaultParameters = Map.of("ssl", "true", "sslmode", "require") + Assertions.assertThrows(IllegalArgumentException::class.java) { + JdbcDataSourceUtils.assertCustomParametersDontOverwriteDefaultParameters( + customParameters, + defaultParameters + ) + } + } + + companion object { + private lateinit var PSQL_CONTAINER: PostgreSQLContainer<*> + + @JvmStatic + @BeforeAll + fun init(): Unit { + PSQL_CONTAINER = PostgreSQLContainer("postgres:13-alpine") + PSQL_CONTAINER!!.start() + CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "CREATE TABLE %s (%s BIT(3) NOT NULL);" + INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "INSERT INTO %s VALUES(B'101');" + } + + @JvmStatic + @AfterAll + fun cleanUp(): Unit { + PSQL_CONTAINER!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.kt new file mode 100644 index 0000000000000..85902d6915ade --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/DefaultJdbcStressTest.kt @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.source.jdbc.test.JdbcStressTest +import io.airbyte.cdk.testutils.PostgreSQLContainerHelper.runSqlScript +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.sql.JDBCType +import java.util.* +import java.util.function.Supplier +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Disabled +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.utility.MountableFile + +/** + * Runs the stress tests in the source-jdbc test module. We want this module to run these tests + * itself as a sanity check. The trade off here is that this class is duplicated from the one used + * in source-postgres. + */ +@Disabled +internal class DefaultJdbcStressTest : JdbcStressTest() { + private var config: JsonNode? = null + + @BeforeEach + @Throws(Exception::class) + override fun setup() { + val dbName = Strings.addRandomSuffix("db", "_", 10) + + config = + Jsons.jsonNode( + ImmutableMap.of( + JdbcUtils.HOST_KEY, + "localhost", + JdbcUtils.PORT_KEY, + 5432, + JdbcUtils.DATABASE_KEY, + "charles", + JdbcUtils.USERNAME_KEY, + "postgres", + JdbcUtils.PASSWORD_KEY, + "" + ) + ) + + config = + Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, PSQL_DB!!.host) + .put(JdbcUtils.PORT_KEY, PSQL_DB!!.firstMappedPort) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.USERNAME_KEY, PSQL_DB!!.username) + .put(JdbcUtils.PASSWORD_KEY, PSQL_DB!!.password) + .build() + ) + + val initScriptName = "init_$dbName.sql" + val tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE $dbName;") + runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB!!) + + super.setup() + } + + override val defaultSchemaName = Optional.of("public") + + override fun getSource(): AbstractJdbcSource { + return PostgresTestSource() + } + + override fun getConfig(): JsonNode { + return config!! + } + + override val driverClass = PostgresTestSource.DRIVER_CLASS + + private class PostgresTestSource : + AbstractJdbcSource( + DRIVER_CLASS, + Supplier { AdaptiveStreamingQueryConfig() }, + JdbcUtils.defaultSourceOperations + ), + Source { + override fun toDatabaseConfig(config: JsonNode): JsonNode { + val configBuilder = + ImmutableMap.builder() + .put(JdbcUtils.USERNAME_KEY, config[JdbcUtils.USERNAME_KEY].asText()) + .put( + JdbcUtils.JDBC_URL_KEY, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + + if (config.has(JdbcUtils.PASSWORD_KEY)) { + configBuilder.put(JdbcUtils.PASSWORD_KEY, config[JdbcUtils.PASSWORD_KEY].asText()) + } + + return Jsons.jsonNode(configBuilder.build()) + } + + public override val excludedInternalNameSpaces = + setOf("information_schema", "pg_catalog", "pg_internal", "catalog_history") + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(PostgresTestSource::class.java) + + val DRIVER_CLASS: String = DatabaseDriver.POSTGRESQL.driverClassName + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val source: Source = PostgresTestSource() + LOGGER.info("starting source: {}", PostgresTestSource::class.java) + IntegrationRunner(source).run(args) + LOGGER.info("completed source: {}", PostgresTestSource::class.java) + } + } + } + + companion object { + private var PSQL_DB: PostgreSQLContainer? = null + + @BeforeAll + @JvmStatic + fun init() { + PSQL_DB = PostgreSQLContainer("postgres:13-alpine") + PSQL_DB!!.start() + } + + @AfterAll + @JvmStatic + fun cleanUp() { + PSQL_DB!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.kt new file mode 100644 index 0000000000000..6a8dc1ab3d8b4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcDataSourceUtilsTest.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import io.airbyte.commons.json.Jsons +import java.util.function.Consumer +import org.assertj.core.api.AssertionsForClassTypes +import org.junit.Assert +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class JdbcDataSourceUtilsTest { + @Test + fun test() { + val validConfigString = + "{\"jdbc_url_params\":\"key1=val1&key3=key3\",\"connection_properties\":\"key1=val1&key2=val2\"}" + val validConfig = Jsons.deserialize(validConfigString) + val connectionProperties = JdbcDataSourceUtils.getConnectionProperties(validConfig) + val validKeys = listOf("key1", "key2", "key3") + validKeys.forEach( + Consumer { key: String -> Assert.assertTrue(connectionProperties.containsKey(key)) } + ) + + // For an invalid config, there is a conflict betweeen the values of keys in jdbc_url_params + // and + // connection_properties + val invalidConfigString = + "{\"jdbc_url_params\":\"key1=val2&key3=key3\",\"connection_properties\":\"key1=val1&key2=val2\"}" + val invalidConfig = Jsons.deserialize(invalidConfigString) + val exception: Exception = + Assertions.assertThrows(IllegalArgumentException::class.java) { + JdbcDataSourceUtils.getConnectionProperties(invalidConfig) + } + + val expectedMessage = "Cannot overwrite default JDBC parameter key1" + AssertionsForClassTypes.assertThat(expectedMessage == exception.message) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.kt new file mode 100644 index 0000000000000..a9a5b87afb2cb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/jdbc/JdbcSourceStressTest.kt @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.source.jdbc.test.JdbcStressTest +import io.airbyte.cdk.testutils.PostgreSQLContainerHelper.runSqlScript +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import java.sql.JDBCType +import java.util.* +import java.util.function.Supplier +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Disabled +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.utility.MountableFile + +/** + * Runs the stress tests in the source-jdbc test module. We want this module to run these tests + * itself as a sanity check. The trade off here is that this class is duplicated from the one used + * in source-postgres. + */ +@Disabled +internal class JdbcSourceStressTest : JdbcStressTest() { + private var config: JsonNode? = null + + @BeforeEach + @Throws(Exception::class) + override fun setup() { + val schemaName = Strings.addRandomSuffix("db", "_", 10) + + config = + Jsons.jsonNode( + ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, PSQL_DB!!.host) + .put(JdbcUtils.PORT_KEY, PSQL_DB!!.firstMappedPort) + .put(JdbcUtils.DATABASE_KEY, schemaName) + .put(JdbcUtils.USERNAME_KEY, PSQL_DB!!.username) + .put(JdbcUtils.PASSWORD_KEY, PSQL_DB!!.password) + .build() + ) + + val initScriptName = "init_$schemaName.sql" + val tmpFilePath = + IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE $schemaName;") + runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB!!) + + super.setup() + } + + override val defaultSchemaName = Optional.of("public") + + override fun getSource(): AbstractJdbcSource { + return PostgresTestSource() + } + + override fun getConfig(): JsonNode { + return config!! + } + + override val driverClass = PostgresTestSource.DRIVER_CLASS + + private class PostgresTestSource : + AbstractJdbcSource( + DRIVER_CLASS, + Supplier { AdaptiveStreamingQueryConfig() }, + JdbcUtils.defaultSourceOperations + ), + Source { + override fun toDatabaseConfig(config: JsonNode): JsonNode { + val configBuilder = + ImmutableMap.builder() + .put(JdbcUtils.USERNAME_KEY, config[JdbcUtils.USERNAME_KEY].asText()) + .put( + JdbcUtils.JDBC_URL_KEY, + String.format( + DatabaseDriver.POSTGRESQL.urlFormatString, + config[JdbcUtils.HOST_KEY].asText(), + config[JdbcUtils.PORT_KEY].asInt(), + config[JdbcUtils.DATABASE_KEY].asText() + ) + ) + + if (config.has(JdbcUtils.PASSWORD_KEY)) { + configBuilder.put(JdbcUtils.PASSWORD_KEY, config[JdbcUtils.PASSWORD_KEY].asText()) + } + + return Jsons.jsonNode(configBuilder.build()) + } + + override val excludedInternalNameSpaces = + setOf("information_schema", "pg_catalog", "pg_internal", "catalog_history") + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(PostgresTestSource::class.java) + + val DRIVER_CLASS: String = DatabaseDriver.POSTGRESQL.driverClassName + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + val source: Source = PostgresTestSource() + LOGGER.info("starting source: {}", PostgresTestSource::class.java) + IntegrationRunner(source).run(args) + LOGGER.info("completed source: {}", PostgresTestSource::class.java) + } + } + } + + companion object { + private lateinit var PSQL_DB: PostgreSQLContainer + + @BeforeAll + @JvmStatic + fun init() { + PSQL_DB = PostgreSQLContainer("postgres:13-alpine") + PSQL_DB!!.start() + } + + @AfterAll + @JvmStatic + fun cleanUp() { + PSQL_DB!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.kt new file mode 100644 index 0000000000000..a292255725f07 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSourceTest.kt @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.source.jdbc.AbstractDbSourceForTest +import io.airbyte.cdk.integrations.source.relationaldb.state.* +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import java.io.IOException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.Mockito +import uk.org.webcompere.systemstubs.environment.EnvironmentVariables +import uk.org.webcompere.systemstubs.jupiter.SystemStub +import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension + +/** Test suite for the [AbstractDbSource] class. */ +@ExtendWith(SystemStubsExtension::class) +class AbstractDbSourceTest { + @SystemStub private val environmentVariables: EnvironmentVariables? = null + + @Test + @Throws(IOException::class) + fun testDeserializationOfLegacyState() { + val dbSource = + Mockito.mock( + AbstractDbSourceForTest::class.java, + Mockito.withSettings().useConstructor("").defaultAnswer(Mockito.CALLS_REAL_METHODS) + ) + val config = Mockito.mock(JsonNode::class.java) + + val legacyStateJson = MoreResources.readResource("states/legacy.json") + val legacyState = Jsons.deserialize(legacyStateJson) + + val result = + StateGeneratorUtils.deserializeInitialState( + legacyState, + dbSource.getSupportedStateType(config) + ) + Assertions.assertEquals(1, result.size) + Assertions.assertEquals(AirbyteStateMessage.AirbyteStateType.LEGACY, result[0].type) + } + + @Test + @Throws(IOException::class) + fun testDeserializationOfGlobalState() { + val dbSource = + Mockito.mock( + AbstractDbSourceForTest::class.java, + Mockito.withSettings().useConstructor("").defaultAnswer(Mockito.CALLS_REAL_METHODS) + ) + val config = Mockito.mock(JsonNode::class.java) + + val globalStateJson = MoreResources.readResource("states/global.json") + val globalState = Jsons.deserialize(globalStateJson) + + val result = + StateGeneratorUtils.deserializeInitialState( + globalState, + dbSource.getSupportedStateType(config) + ) + Assertions.assertEquals(1, result.size) + Assertions.assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, result[0].type) + } + + @Test + @Throws(IOException::class) + fun testDeserializationOfStreamState() { + val dbSource = + Mockito.mock( + AbstractDbSourceForTest::class.java, + Mockito.withSettings().useConstructor("").defaultAnswer(Mockito.CALLS_REAL_METHODS) + ) + val config = Mockito.mock(JsonNode::class.java) + + val streamStateJson = MoreResources.readResource("states/per_stream.json") + val streamState = Jsons.deserialize(streamStateJson) + + val result = + StateGeneratorUtils.deserializeInitialState( + streamState, + dbSource.getSupportedStateType(config) + ) + Assertions.assertEquals(2, result.size) + Assertions.assertEquals(AirbyteStateMessage.AirbyteStateType.STREAM, result[0].type) + } + + @Test + @Throws(IOException::class) + fun testDeserializationOfNullState() { + val dbSource = + Mockito.mock( + AbstractDbSourceForTest::class.java, + Mockito.withSettings().useConstructor("").defaultAnswer(Mockito.CALLS_REAL_METHODS) + ) + val config = Mockito.mock(JsonNode::class.java) + + val result = + StateGeneratorUtils.deserializeInitialState( + null, + dbSource.getSupportedStateType(config) + ) + Assertions.assertEquals(1, result.size) + Assertions.assertEquals(dbSource.getSupportedStateType(config), result[0].type) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.kt new file mode 100644 index 0000000000000..c3905e5043ea1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorManagerTest.kt @@ -0,0 +1,269 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.* +import java.util.function.Function +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +/** Test suite for the [CursorManager] class. */ +class CursorManagerTest { + @Test + fun testCreateCursorInfoCatalogAndStateSameCursorField() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + StateTestConstants.getState( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.CURSOR_RECORD_COUNT + ), + StateTestConstants.getStream(StateTestConstants.CURSOR_FIELD1), + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.CURSOR_RECORD_COUNT, + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.CURSOR_RECORD_COUNT + ), + actual + ) + } + + @Test + fun testCreateCursorInfoCatalogAndStateSameCursorFieldButNoCursor() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + null, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + StateTestConstants.getState(StateTestConstants.CURSOR_FIELD1, null), + StateTestConstants.getStream(StateTestConstants.CURSOR_FIELD1), + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo( + StateTestConstants.CURSOR_FIELD1, + null, + StateTestConstants.CURSOR_FIELD1, + null + ), + actual + ) + } + + @Test + fun testCreateCursorInfoCatalogAndStateChangeInCursorFieldName() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + StateTestConstants.getState( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR + ), + StateTestConstants.getStream(StateTestConstants.CURSOR_FIELD2), + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.CURSOR_FIELD2, + null + ), + actual + ) + } + + @Test + fun testCreateCursorInfoCatalogAndNoState() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + Optional.empty(), + StateTestConstants.getStream(StateTestConstants.CURSOR_FIELD1), + Function { obj: DbStreamState? -> obj!!.cursor }, + Function { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo(null, null, StateTestConstants.CURSOR_FIELD1, null), + actual + ) + } + + @Test + fun testCreateCursorInfoStateAndNoCatalog() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + StateTestConstants.getState( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR + ), + Optional.empty(), + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo(StateTestConstants.CURSOR_FIELD1, StateTestConstants.CURSOR, null, null), + actual + ) + } + + // this is what full refresh looks like. + @Test + fun testCreateCursorInfoNoCatalogAndNoState() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + Optional.empty(), + Optional.empty(), + Function { obj: DbStreamState? -> obj!!.cursor }, + Function { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals(CursorInfo(null, null, null, null), actual) + } + + @Test + fun testCreateCursorInfoStateAndCatalogButNoCursorField() { + val cursorManager = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actual = + cursorManager.createCursorInfoForStream( + StateTestConstants.NAME_NAMESPACE_PAIR1, + StateTestConstants.getState( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR + ), + StateTestConstants.getStream(null), + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION + ) + Assertions.assertEquals( + CursorInfo(StateTestConstants.CURSOR_FIELD1, StateTestConstants.CURSOR, null, null), + actual + ) + } + + @Test + fun testGetters() { + val cursorManager: CursorManager<*> = + createCursorManager( + StateTestConstants.CURSOR_FIELD1, + StateTestConstants.CURSOR, + StateTestConstants.NAME_NAMESPACE_PAIR1 + ) + val actualCursorInfo = + CursorInfo(StateTestConstants.CURSOR_FIELD1, StateTestConstants.CURSOR, null, null) + + Assertions.assertEquals( + Optional.of(actualCursorInfo), + cursorManager.getCursorInfo(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.empty(), + cursorManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.empty(), + cursorManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + + Assertions.assertEquals( + Optional.empty(), + cursorManager.getCursorInfo(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + cursorManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + cursorManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + } + + private fun createCursorManager( + cursorField: String?, + cursor: String?, + nameNamespacePair: AirbyteStreamNameNamespacePair? + ): CursorManager { + val dbStreamState = StateTestConstants.getState(cursorField, cursor).get() + return CursorManager( + StateTestConstants.getCatalog(cursorField).orElse(null), + { setOf(dbStreamState) }, + { obj: DbStreamState? -> obj!!.cursor }, + { obj: DbStreamState? -> obj!!.cursorField }, + CURSOR_RECORD_COUNT_FUNCTION, + { s: DbStreamState? -> nameNamespacePair }, + false + ) + } + + companion object { + private val CURSOR_RECORD_COUNT_FUNCTION = Function { stream: DbStreamState -> + if (stream!!.cursorRecordCount != null) { + return@Function stream.cursorRecordCount + } else { + return@Function 0L + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducerTest.kt new file mode 100644 index 0000000000000..996b5e02c5196 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/CursorStateMessageProducerTest.kt @@ -0,0 +1,540 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.util.MoreIterators +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.sql.SQLException +import java.time.Duration +import java.util.* +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap + +internal class CursorStateMessageProducerTest { + private fun createExceptionIterator(): Iterator { + return object : Iterator { + val internalMessageIterator: Iterator = + MoreIterators.of( + RECORD_MESSAGE_1, + RECORD_MESSAGE_2, + RECORD_MESSAGE_2, + RECORD_MESSAGE_3 + ) + + override fun hasNext(): Boolean { + return true + } + + override fun next(): AirbyteMessage { + if (internalMessageIterator.hasNext()) { + return internalMessageIterator.next() + } else { + // this line throws a RunTimeException wrapped around a SQLException to mimic + // the flow of when a + // SQLException is thrown and wrapped in + // StreamingJdbcDatabase#tryAdvance + throw RuntimeException( + SQLException( + "Connection marked broken because of SQLSTATE(080006)", + "08006" + ) + ) + } + } + } + } + + private var stateManager: StateManager? = null + + @BeforeEach + fun setup() { + val airbyteStream = AirbyteStream().withNamespace(NAMESPACE).withName(STREAM_NAME) + val configuredAirbyteStream = + ConfiguredAirbyteStream() + .withStream(airbyteStream) + .withCursorField(listOf(UUID_FIELD_NAME)) + + stateManager = + StreamStateManager( + emptyList(), + ConfiguredAirbyteCatalog().withStreams(listOf(configuredAirbyteStream)) + ) + } + + @Test + fun testWithoutInitialCursor() { + messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2) + + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(0, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 1, 2.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testWithInitialCursor() { + // record 1 and 2 has smaller cursor value, so at the end, the initial cursor is emitted + // with 0 + // record count + + messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2) + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_5)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(0, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_5, 0, 2.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testCursorFieldIsEmpty() { + val recordMessage = Jsons.clone(RECORD_MESSAGE_1) + (recordMessage.record.data as ObjectNode).remove(UUID_FIELD_NAME) + val messageStream = MoreIterators.of(recordMessage) + + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageStream, + STREAM, + producer, + StateEmitFrequency(0, Duration.ZERO) + ) + + Assertions.assertEquals(recordMessage, iterator.next()) + // null because no records with a cursor field were replicated for the stream. + Assertions.assertEquals(createEmptyStateMessage(1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testIteratorCatchesExceptionWhenEmissionFrequencyNonZero() { + val exceptionIterator = createExceptionIterator() + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_1)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + exceptionIterator, + STREAM, + producer, + StateEmitFrequency(1, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + // continues to emit RECORD_MESSAGE_2 since cursorField has not changed thus not satisfying + // the + // condition of "ready" + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + // emits the first state message since the iterator has changed cursorFields (2 -> 3) and + // met the + // frequency minimum of 1 record + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 2, 4.0), iterator.next()) + // no further records to read since Exception was caught above and marked iterator as + // endOfData() + Assertions.assertThrows(FailedRecordIteratorException::class.java) { iterator.hasNext() } + } + + @Test + fun testIteratorCatchesExceptionWhenEmissionFrequencyZero() { + val exceptionIterator = createExceptionIterator() + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_1)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + exceptionIterator, + STREAM, + producer, + StateEmitFrequency(0, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + + Assertions.assertThrows(RuntimeException::class.java) { iterator.hasNext() } + } + + @Test + fun testEmptyStream() { + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + Collections.emptyIterator(), + STREAM, + producer, + StateEmitFrequency(1, Duration.ZERO) + ) + + Assertions.assertEquals(EMPTY_STATE_MESSAGE, iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testUnicodeNull() { + val recordValueWithNull = "abc\u0000" + val recordMessageWithNull = createRecordMessage(recordValueWithNull) + + // UTF8 null \u0000 is removed from the cursor value in the state message + messageIterator = MoreIterators.of(recordMessageWithNull) + + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(0, Duration.ZERO) + ) + + Assertions.assertEquals(recordMessageWithNull, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_1, 1, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testStateEmissionFrequency1() { + messageIterator = + MoreIterators.of( + RECORD_MESSAGE_1, + RECORD_MESSAGE_2, + RECORD_MESSAGE_3, + RECORD_MESSAGE_4, + RECORD_MESSAGE_5 + ) + + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(1, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + // should emit state 1, but it is unclear whether there will be more + // records with the same cursor value, so no state is ready for emission + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + // emit state 1 because it is the latest state ready for emission + Assertions.assertEquals(createStateMessage(RECORD_VALUE_1, 1, 2.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 1, 1.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_4, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_3, 1, 1.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_5, iterator.next()) + // state 4 is not emitted because there is no more record and only + // the final state should be emitted at this point; also the final + // state should only be emitted once + Assertions.assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testStateEmissionFrequency2() { + messageIterator = + MoreIterators.of( + RECORD_MESSAGE_1, + RECORD_MESSAGE_2, + RECORD_MESSAGE_3, + RECORD_MESSAGE_4, + RECORD_MESSAGE_5 + ) + + val producer = CursorStateMessageProducer(stateManager, Optional.empty()) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(2, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_1, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + // emit state 1 because it is the latest state ready for emission + Assertions.assertEquals(createStateMessage(RECORD_VALUE_1, 1, 2.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_4, iterator.next()) + // emit state 3 because it is the latest state ready for emission + Assertions.assertEquals(createStateMessage(RECORD_VALUE_3, 1, 2.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_5, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testStateEmissionWhenInitialCursorIsNotNull() { + messageIterator = + MoreIterators.of(RECORD_MESSAGE_2, RECORD_MESSAGE_3, RECORD_MESSAGE_4, RECORD_MESSAGE_5) + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_1)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(1, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 1, 2.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_4, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_3, 1, 1.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_5, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_5, 1, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + /** + * Incremental syncs will sort the table with the cursor field, and emit the max cursor for + * every N records. The purpose is to emit the states frequently, so that if any transient + * failure occurs during a long sync, the next run does not need to start from the beginning, + * but can resume from the last successful intermediate state committed on the destination. The + * next run will start with `cursorField > cursor`. However, it is possible that there are + * multiple records with the same cursor value. If the intermediate state is emitted before all + * these records have been synced to the destination, some of these records may be lost. + * + * Here is an example: + * + *

     | Record ID | Cursor Field | Other Field | Note | | --------- | ------------ |
    +     * ----------- | ----------------------------- | | 1 | F1=16 | F2="abc" | | | 2 | F1=16 |
    +     * F2="def" | <- state emission and failure | | 3 | F1=16 | F2="ghi" | | 
    * + * + * If the intermediate state is emitted for record 2 and the sync fails immediately such that + * the cursor value `16` is committed, but only record 1 and 2 are actually synced, the next run + * will start with `F1 > 16` and skip record 3. + * + * So intermediate state emission should only happen when all records with the same cursor value + * has been synced to destination. Reference: + * [link](https://github.com/airbytehq/airbyte/issues/15427) + */ + @Test + fun testStateEmissionForRecordsSharingSameCursorValue() { + messageIterator = + MoreIterators.of( + RECORD_MESSAGE_2, + RECORD_MESSAGE_2, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_4, + RECORD_MESSAGE_5, + RECORD_MESSAGE_5 + ) + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_1)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(1, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + // state 2 is the latest state ready for emission because + // all records with the same cursor value have been emitted + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 2, 3.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_4, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_3, 3, 3.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_5, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_4, 1, 1.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_5, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_5, 2, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + @Test + fun testStateEmissionForRecordsSharingSameCursorValueButDifferentStatsCount() { + messageIterator = + MoreIterators.of( + RECORD_MESSAGE_2, + RECORD_MESSAGE_2, + RECORD_MESSAGE_2, + RECORD_MESSAGE_2, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3, + RECORD_MESSAGE_3 + ) + + val producer = CursorStateMessageProducer(stateManager, Optional.of(RECORD_VALUE_1)) + + val iterator: SourceStateIterator<*> = + SourceStateIterator( + messageIterator, + STREAM, + producer, + StateEmitFrequency(10, Duration.ZERO) + ) + + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_2, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + // state 2 is the latest state ready for emission because + // all records with the same cursor value have been emitted + Assertions.assertEquals(createStateMessage(RECORD_VALUE_2, 4, 10.0), iterator.next()) + Assertions.assertEquals(RECORD_MESSAGE_3, iterator.next()) + Assertions.assertEquals(createStateMessage(RECORD_VALUE_3, 7, 1.0), iterator.next()) + Assertions.assertFalse(iterator.hasNext()) + } + + companion object { + private const val NAMESPACE = "public" + private const val STREAM_NAME = "shoes" + private const val UUID_FIELD_NAME = "ascending_inventory_uuid" + + private val STREAM: ConfiguredAirbyteStream = + CatalogHelpers.createConfiguredAirbyteStream( + STREAM_NAME, + NAMESPACE, + Field.of(UUID_FIELD_NAME, JsonSchemaType.STRING) + ) + .withCursorField(List.of(UUID_FIELD_NAME)) + + private val EMPTY_STATE_MESSAGE = createEmptyStateMessage(0.0) + + private const val RECORD_VALUE_1 = "abc" + private val RECORD_MESSAGE_1 = createRecordMessage(RECORD_VALUE_1) + + private const val RECORD_VALUE_2 = "def" + private val RECORD_MESSAGE_2 = createRecordMessage(RECORD_VALUE_2) + + private const val RECORD_VALUE_3 = "ghi" + private val RECORD_MESSAGE_3 = createRecordMessage(RECORD_VALUE_3) + + private const val RECORD_VALUE_4 = "jkl" + private val RECORD_MESSAGE_4 = createRecordMessage(RECORD_VALUE_4) + + private const val RECORD_VALUE_5 = "xyz" + private val RECORD_MESSAGE_5 = createRecordMessage(RECORD_VALUE_5) + + private fun createRecordMessage(recordValue: String): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.jsonNode(ImmutableMap.of(UUID_FIELD_NAME, recordValue))) + ) + } + + private fun createStateMessage( + recordValue: String, + cursorRecordCount: Long, + statsRecordCount: Double + ): AirbyteMessage { + val dbStreamState = + DbStreamState() + .withCursorField(listOf(UUID_FIELD_NAME)) + .withCursor(recordValue) + .withStreamName(STREAM_NAME) + .withStreamNamespace(NAMESPACE) + if (cursorRecordCount > 0) { + dbStreamState.withCursorRecordCount(cursorRecordCount) + } + val dbState = DbState().withCdc(false).withStreams(listOf(dbStreamState)) + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(STREAM_NAME) + .withNamespace(NAMESPACE) + ) + .withStreamState(Jsons.jsonNode(dbStreamState)) + ) + .withData(Jsons.jsonNode(dbState)) + .withSourceStats(AirbyteStateStats().withRecordCount(statsRecordCount)) + ) + } + + private fun createEmptyStateMessage(statsRecordCount: Double): AirbyteMessage { + val dbStreamState = + DbStreamState() + .withCursorField(listOf(UUID_FIELD_NAME)) + .withStreamName(STREAM_NAME) + .withStreamNamespace(NAMESPACE) + + val dbState = DbState().withCdc(false).withStreams(listOf(dbStreamState)) + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(STREAM_NAME) + .withNamespace(NAMESPACE) + ) + .withStreamState(Jsons.jsonNode(dbStreamState)) + ) + .withData(Jsons.jsonNode(dbState)) + .withSourceStats(AirbyteStateStats().withRecordCount(statsRecordCount)) + ) + } + + private lateinit var messageIterator: Iterator + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.kt new file mode 100644 index 0000000000000..ec7521360f37d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/GlobalStateManagerTest.kt @@ -0,0 +1,408 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.List +import java.util.Map +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +/** Test suite for the [GlobalStateManager] class. */ +class GlobalStateManagerTest { + @Test + fun testCdcStateManager() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val cdcState = CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))) + val globalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withNamespace("namespace").withName("name") + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + ) + val stateManager: StateManager = + GlobalStateManager( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState), + catalog + ) + Assertions.assertNotNull(stateManager.cdcStateManager) + Assertions.assertEquals(cdcState, stateManager.cdcStateManager.cdcState) + Assertions.assertEquals(1, stateManager.cdcStateManager.initialStreamsSynced!!.size) + Assertions.assertTrue( + stateManager.cdcStateManager.initialStreamsSynced!!.contains( + AirbyteStreamNameNamespacePair("name", "namespace") + ) + ) + } + + @Test + fun testToStateFromLegacyState() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + + val cdcState = CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))) + val dbState = + DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + val stateManager: StateManager = + GlobalStateManager(AirbyteStateMessage().withData(Jsons.jsonNode(dbState)), catalog) + + val expectedRecordCount = 19L + val expectedDbState = + DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)) + .withCursor("a") + .withCursorRecordCount(expectedRecordCount), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + + val expectedGlobalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a") + .withCursorRecordCount(expectedRecordCount) + ) + ), + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD2) + ) + ) + ), + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + .stream() + .sorted( + Comparator.comparing { o: AirbyteStreamState -> + o.streamDescriptor.name + } + ) + .collect(Collectors.toList()) + ) + val expected = + AirbyteStateMessage() + .withData(Jsons.jsonNode(expectedDbState)) + .withGlobal(expectedGlobalState) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + + val actualFirstEmission = + stateManager.updateAndEmit( + StateTestConstants.NAME_NAMESPACE_PAIR1, + "a", + expectedRecordCount + ) + Assertions.assertEquals(expected, actualFirstEmission) + } + + // Discovered during CDK migration. + // Failure is: Could not find cursor information for stream: public_cars + @Disabled("Failing test.") + @Test + fun testToState() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + + val cdcState = CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))) + val globalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(DbState())) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor()) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + ) + val stateManager: StateManager = + GlobalStateManager( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState), + catalog + ) + stateManager.cdcStateManager.cdcState = cdcState + + val expectedDbState = + DbState() + .withCdc(true) + .withCdcState(cdcState) + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)) + .withCursor("a") + .withCursorRecordCount(1L), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + + val expectedGlobalState = + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(cdcState)) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a") + .withCursorRecordCount(1L) + ) + ), + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD2) + ) + ) + ), + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState( + Jsons.jsonNode( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + .stream() + .sorted( + Comparator.comparing { o: AirbyteStreamState -> + o.streamDescriptor.name + } + ) + .collect(Collectors.toList()) + ) + val expected = + AirbyteStateMessage() + .withData(Jsons.jsonNode(expectedDbState)) + .withGlobal(expectedGlobalState) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a", 1L) + Assertions.assertEquals(expected, actualFirstEmission) + } + + @Test + fun testToStateWithNoState() { + val catalog = ConfiguredAirbyteCatalog() + val stateManager: StateManager = GlobalStateManager(AirbyteStateMessage(), catalog) + + val airbyteStateMessage = stateManager.toState(Optional.empty()) + Assertions.assertNotNull(airbyteStateMessage) + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + airbyteStateMessage!!.type + ) + Assertions.assertEquals(0, airbyteStateMessage.global.streamStates.size) + } + + @Test + fun testCdcStateManagerLegacyState() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val cdcState = CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))) + val dbState = + DbState() + .withCdcState(CdcState().withState(Jsons.jsonNode(cdcState))) + .withStreams( + List.of( + DbStreamState() + .withStreamName("name") + .withStreamNamespace("namespace") + .withCursor("") + .withCursorField(emptyList()) + ) + ) + .withCdc(true) + val stateManager: StateManager = + GlobalStateManager( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(dbState)), + catalog + ) + Assertions.assertNotNull(stateManager.cdcStateManager) + Assertions.assertEquals(1, stateManager.cdcStateManager.initialStreamsSynced!!.size) + Assertions.assertTrue( + stateManager.cdcStateManager.initialStreamsSynced!!.contains( + AirbyteStreamNameNamespacePair("name", "namespace") + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.kt new file mode 100644 index 0000000000000..b6a585713b956 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/LegacyStateManagerTest.kt @@ -0,0 +1,384 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.* +import java.util.List +import java.util.Map +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +/** Test suite for the [LegacyStateManager] class. */ +class LegacyStateManagerTest { + @Test + fun testGetters() { + val state = + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)) + .withCursor(StateTestConstants.CURSOR), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + ) + + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + + val stateManager: StateManager = LegacyStateManager(state, catalog) + + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR_FIELD1), + stateManager.getOriginalCursorField(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR), + stateManager.getOriginalCursor(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR_FIELD1), + stateManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR), + stateManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + + Assertions.assertEquals( + Optional.empty(), + stateManager.getOriginalCursorField(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getOriginalCursor(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + } + + @Test + fun testToState() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + + val stateManager: StateManager = LegacyStateManager(DbState(), catalog) + + val expectedFirstEmission = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData( + Jsons.jsonNode( + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD2) + ), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted( + Comparator.comparing { obj: DbStreamState -> + obj.streamName + } + ) + .collect(Collectors.toList()) + ) + .withCdc(false) + ) + ) + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a") + Assertions.assertEquals(expectedFirstEmission, actualFirstEmission) + val expectedSecondEmission = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData( + Jsons.jsonNode( + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD2) + ) + .withCursor("b"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted( + Comparator.comparing { obj: DbStreamState -> + obj.streamName + } + ) + .collect(Collectors.toList()) + ) + .withCdc(false) + ) + ) + val actualSecondEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR2, "b") + Assertions.assertEquals(expectedSecondEmission, actualSecondEmission) + } + + @Test + fun testToStateNullCursorField() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + val stateManager: StateManager = LegacyStateManager(DbState(), catalog) + + val expectedFirstEmission = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData( + Jsons.jsonNode( + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted( + Comparator.comparing { obj: DbStreamState -> + obj.streamName + } + ) + .collect(Collectors.toList()) + ) + .withCdc(false) + ) + ) + + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a") + Assertions.assertEquals(expectedFirstEmission, actualFirstEmission) + } + + @Test + fun testCursorNotUpdatedForCdc() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withCursorField(List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + ) + ) + ) + + val state = DbState() + state.cdc = true + val stateManager: StateManager = LegacyStateManager(state, catalog) + + val expectedFirstEmission = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData( + Jsons.jsonNode( + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor(null), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(listOf()) + ) + .stream() + .sorted( + Comparator.comparing { obj: DbStreamState -> + obj.streamName + } + ) + .collect(Collectors.toList()) + ) + .withCdc(true) + ) + ) + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a") + Assertions.assertEquals(expectedFirstEmission, actualFirstEmission) + val expectedSecondEmission = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData( + Jsons.jsonNode( + DbState() + .withStreams( + List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor(null), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField(listOf()) + .withCursor(null) + ) + .stream() + .sorted( + Comparator.comparing { obj: DbStreamState -> + obj.streamName + } + ) + .collect(Collectors.toList()) + ) + .withCdc(true) + ) + ) + val actualSecondEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR2, "b") + Assertions.assertEquals(expectedSecondEmission, actualSecondEmission) + } + + @Test + fun testCdcStateManager() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val cdcState = CdcState().withState(Jsons.jsonNode(Map.of("foo", "bar", "baz", 5))) + val dbState = + DbState() + .withCdcState(cdcState) + .withStreams( + List.of( + DbStreamState() + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withStreamName(StateTestConstants.STREAM_NAME1) + ) + ) + val stateManager: StateManager = LegacyStateManager(dbState, catalog) + Assertions.assertNotNull(stateManager.cdcStateManager) + Assertions.assertEquals(cdcState, stateManager.cdcStateManager.cdcState) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorForTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorForTest.kt new file mode 100644 index 0000000000000..009feab1682c7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorForTest.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream + +class SourceStateIteratorForTest( + messageIterator: Iterator, + stream: ConfiguredAirbyteStream?, + sourceStateMessageProducer: SourceStateMessageProducer, + stateEmitFrequency: StateEmitFrequency +) : + SourceStateIterator( + messageIterator, + stream, + sourceStateMessageProducer, + stateEmitFrequency + ) { + public override fun computeNext(): AirbyteMessage? = super.computeNext() +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.kt new file mode 100644 index 0000000000000..efe33c0dc01af --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.kt @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.protocol.models.v0.* +import java.time.Duration +import org.junit.Assert +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.mockito.Mockito.mock +import org.mockito.kotlin.any +import org.mockito.kotlin.eq + +class SourceStateIteratorTest { + var mockProducer: SourceStateMessageProducer = mock() + var messageIterator: Iterator = mock() + var stream: ConfiguredAirbyteStream = mock() + + var sourceStateIterator: SourceStateIteratorForTest<*>? = null + + @BeforeEach + fun setup() { + mockProducer = mock() + stream = mock() + messageIterator = mock() + val stateEmitFrequency = StateEmitFrequency(1L, Duration.ofSeconds(100L)) + sourceStateIterator = + SourceStateIteratorForTest(messageIterator, stream, mockProducer, stateEmitFrequency) + } + + // Provides a way to generate a record message and will verify corresponding spied functions + // have + // been called. + fun processRecordMessage() { + Mockito.doReturn(true).`when`(messageIterator).hasNext() + Mockito.doReturn(false) + .`when`(mockProducer) + .shouldEmitStateMessage(ArgumentMatchers.eq(stream)) + val message = + AirbyteMessage().withType(AirbyteMessage.Type.RECORD).withRecord(AirbyteRecordMessage()) + Mockito.doReturn(message).`when`(mockProducer).processRecordMessage(eq(stream), any()) + Mockito.doReturn(message).`when`(messageIterator).next() + + Assert.assertEquals(message, sourceStateIterator!!.computeNext()) + Mockito.verify(mockProducer, Mockito.atLeastOnce()) + .processRecordMessage(eq(stream), eq(message)) + } + + @Test + fun testShouldProcessRecordMessage() { + processRecordMessage() + } + + @Test + fun testShouldEmitStateMessage() { + processRecordMessage() + Mockito.doReturn(true) + .`when`(mockProducer) + .shouldEmitStateMessage(ArgumentMatchers.eq(stream)) + val stateMessage = AirbyteStateMessage() + Mockito.doReturn(stateMessage).`when`(mockProducer).generateStateMessageAtCheckpoint(stream) + val expectedMessage = + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(stateMessage) + expectedMessage.state.withSourceStats(AirbyteStateStats().withRecordCount(1.0)) + Assert.assertEquals(expectedMessage, sourceStateIterator!!.computeNext()) + } + + @Test + fun testShouldEmitFinalStateMessage() { + processRecordMessage() + processRecordMessage() + Mockito.doReturn(false).`when`(messageIterator).hasNext() + val stateMessage = AirbyteStateMessage() + Mockito.doReturn(stateMessage).`when`(mockProducer).createFinalStateMessage(stream) + val expectedMessage = + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(stateMessage) + expectedMessage.state.withSourceStats(AirbyteStateStats().withRecordCount(2.0)) + Assert.assertEquals(expectedMessage, sourceStateIterator!!.computeNext()) + } + + @Test + fun testShouldSendEndOfData() { + processRecordMessage() + Mockito.doReturn(false).`when`(messageIterator).hasNext() + Mockito.doReturn(AirbyteStateMessage()).`when`(mockProducer).createFinalStateMessage(stream) + sourceStateIterator!!.computeNext() + + // After sending the final state, if iterator was called again, we will return null. + Assert.assertEquals(null, sourceStateIterator!!.computeNext()) + } + + @Test + fun testShouldRethrowExceptions() { + processRecordMessage() + Mockito.doThrow(ArrayIndexOutOfBoundsException("unexpected error")) + .`when`(messageIterator) + .hasNext() + Assert.assertThrows(RuntimeException::class.java) { sourceStateIterator!!.computeNext() } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.kt new file mode 100644 index 0000000000000..e9334ff081f34 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateGeneratorUtilsTest.kt @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.protocol.models.v0.StreamDescriptor +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +/** Test suite for the [StateGeneratorUtils] class. */ +class StateGeneratorUtilsTest { + @Test + fun testValidStreamDescriptor() { + val streamDescriptor1: StreamDescriptor? = null + val streamDescriptor2 = StreamDescriptor() + val streamDescriptor3 = StreamDescriptor().withName("name") + val streamDescriptor4 = StreamDescriptor().withNamespace("namespace") + val streamDescriptor5 = StreamDescriptor().withName("name").withNamespace("namespace") + val streamDescriptor6 = StreamDescriptor().withName("name").withNamespace("") + val streamDescriptor7 = StreamDescriptor().withName("").withNamespace("namespace") + val streamDescriptor8 = StreamDescriptor().withName("").withNamespace("") + + Assertions.assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor1)) + Assertions.assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor2)) + Assertions.assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor3)) + Assertions.assertFalse(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor4)) + Assertions.assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor5)) + Assertions.assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor6)) + Assertions.assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor7)) + Assertions.assertTrue(StateGeneratorUtils.isValidStreamDescriptor(streamDescriptor8)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.kt new file mode 100644 index 0000000000000..ca8c76753b0c2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateManagerFactoryTest.kt @@ -0,0 +1,322 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.* +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +/** Test suite for the [StateManagerFactory] class. */ +class StateManagerFactoryTest { + @Test + fun testNullOrEmptyState() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + null, + catalog + ) + } + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + listOf(), + catalog + ) + } + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.LEGACY, + null, + catalog + ) + } + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.LEGACY, + listOf(), + catalog + ) + } + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + null, + catalog + ) + } + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + listOf(), + catalog + ) + } + } + + @Test + fun testLegacyStateManagerCreationFromAirbyteStateMessage() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val airbyteStateMessage = Mockito.mock(AirbyteStateMessage::class.java) + Mockito.`when`(airbyteStateMessage.data).thenReturn(Jsons.jsonNode(DbState())) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.LEGACY, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(LegacyStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testGlobalStateManagerCreation() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val globalState = + AirbyteGlobalState() + .withSharedState( + Jsons.jsonNode( + DbState().withCdcState(CdcState().withState(Jsons.jsonNode(DbState()))) + ) + ) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withNamespace(NAMESPACE).withName(NAME) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + ) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(GlobalStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testGlobalStateManagerCreationFromLegacyState() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val cdcState = CdcState() + val dbState = + DbState() + .withCdcState(cdcState) + .withStreams( + List.of(DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE)) + ) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(dbState)) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(GlobalStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testGlobalStateManagerCreationFromStreamState() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(NAME).withNamespace(NAMESPACE) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + List.of(airbyteStateMessage), + catalog + ) + } + } + + @Test + fun testGlobalStateManagerCreationWithLegacyDataPresent() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val globalState = + AirbyteGlobalState() + .withSharedState( + Jsons.jsonNode( + DbState().withCdcState(CdcState().withState(Jsons.jsonNode(DbState()))) + ) + ) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withNamespace(NAMESPACE).withName(NAME) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + ) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + .withData(Jsons.jsonNode(DbState())) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(GlobalStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testStreamStateManagerCreation() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(NAME).withNamespace(NAMESPACE) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(StreamStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testStreamStateManagerCreationFromLegacy() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val cdcState = CdcState() + val dbState = + DbState() + .withCdcState(cdcState) + .withStreams( + List.of(DbStreamState().withStreamName(NAME).withStreamNamespace(NAMESPACE)) + ) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(dbState)) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(StreamStateManager::class.java, stateManager.javaClass) + } + + @Test + fun testStreamStateManagerCreationFromGlobal() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val globalState = + AirbyteGlobalState() + .withSharedState( + Jsons.jsonNode( + DbState().withCdcState(CdcState().withState(Jsons.jsonNode(DbState()))) + ) + ) + .withStreamStates( + List.of( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withNamespace(NAMESPACE).withName(NAME) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + ) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + List.of(airbyteStateMessage), + catalog + ) + } + } + + @Test + fun testStreamStateManagerCreationWithLegacyDataPresent() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(NAME).withNamespace(NAMESPACE) + ) + .withStreamState(Jsons.jsonNode(DbStreamState())) + ) + .withData(Jsons.jsonNode(DbState())) + + val stateManager = + StateManagerFactory.createStateManager( + AirbyteStateMessage.AirbyteStateType.STREAM, + List.of(airbyteStateMessage), + catalog + ) + + Assertions.assertNotNull(stateManager) + Assertions.assertEquals(StreamStateManager::class.java, stateManager.javaClass) + } + + companion object { + private const val NAMESPACE = "namespace" + private const val NAME = "name" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.kt new file mode 100644 index 0000000000000..3ffd9781e7607 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StateTestConstants.kt @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.* +import java.util.List +import org.testcontainers.shaded.com.google.common.collect.Lists + +/** Collection of constants for use in state management-related tests. */ +object StateTestConstants { + const val NAMESPACE: String = "public" + const val STREAM_NAME1: String = "cars" + val NAME_NAMESPACE_PAIR1: AirbyteStreamNameNamespacePair = + AirbyteStreamNameNamespacePair(STREAM_NAME1, NAMESPACE) + const val STREAM_NAME2: String = "bicycles" + val NAME_NAMESPACE_PAIR2: AirbyteStreamNameNamespacePair = + AirbyteStreamNameNamespacePair(STREAM_NAME2, NAMESPACE) + const val STREAM_NAME3: String = "stationary_bicycles" + const val CURSOR_FIELD1: String = "year" + const val CURSOR_FIELD2: String = "generation" + const val CURSOR: String = "2000" + const val CURSOR_RECORD_COUNT: Long = 19L + + fun getState(cursorField: String?, cursor: String?): Optional { + return Optional.of( + DbStreamState() + .withStreamName(STREAM_NAME1) + .withCursorField(Lists.newArrayList(cursorField)) + .withCursor(cursor) + ) + } + + fun getState( + cursorField: String?, + cursor: String?, + cursorRecordCount: Long + ): Optional { + return Optional.of( + DbStreamState() + .withStreamName(STREAM_NAME1) + .withCursorField(Lists.newArrayList(cursorField)) + .withCursor(cursor) + .withCursorRecordCount(cursorRecordCount) + ) + } + + fun getCatalog(cursorField: String?): Optional { + return Optional.of( + ConfiguredAirbyteCatalog().withStreams(List.of(getStream(cursorField).orElse(null))) + ) + } + + fun getStream(cursorField: String?): Optional { + return Optional.of( + ConfiguredAirbyteStream() + .withStream(AirbyteStream().withName(STREAM_NAME1)) + .withCursorField( + if (cursorField == null) emptyList() else Lists.newArrayList(cursorField) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.kt new file mode 100644 index 0000000000000..6fba4dda3a85e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/integrations/source/relationaldb/state/StreamStateManagerTest.kt @@ -0,0 +1,473 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.relationaldb.state + +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +/** Test suite for the [StreamStateManager] class. */ +class StreamStateManagerTest { + @Test + fun testCreationFromInvalidState() { + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + ) + .withStreamState(Jsons.jsonNode("Not a state object")) + ) + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + + Assertions.assertDoesNotThrow { + val stateManager: StateManager = + StreamStateManager(java.util.List.of(airbyteStateMessage), catalog) + Assertions.assertNotNull(stateManager) + } + } + + @Test + fun testGetters() { + val state: MutableList = ArrayList() + state.add( + createStreamState( + StateTestConstants.STREAM_NAME1, + StateTestConstants.NAMESPACE, + java.util.List.of(StateTestConstants.CURSOR_FIELD1), + StateTestConstants.CURSOR, + 0L + ) + ) + state.add( + createStreamState( + StateTestConstants.STREAM_NAME2, + StateTestConstants.NAMESPACE, + listOf(), + null, + 0L + ) + ) + + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + ) + ) + + val stateManager: StateManager = StreamStateManager(state, catalog) + + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR_FIELD1), + stateManager.getOriginalCursorField(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR), + stateManager.getOriginalCursor(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR_FIELD1), + stateManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + Assertions.assertEquals( + Optional.of(StateTestConstants.CURSOR), + stateManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR1) + ) + + Assertions.assertEquals( + Optional.empty(), + stateManager.getOriginalCursorField(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getOriginalCursor(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getCursorField(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + Assertions.assertEquals( + Optional.empty(), + stateManager.getCursor(StateTestConstants.NAME_NAMESPACE_PAIR2) + ) + } + + @Test + fun testToState() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + ) + ) + + val stateManager: StateManager = StreamStateManager(createDefaultState(), catalog) + + val expectedFirstDbState = + DbState() + .withCdc(false) + .withStreams( + java.util.List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + java.util.List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + java.util.List.of(StateTestConstants.CURSOR_FIELD2) + ), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + val expectedFirstEmission = + createStreamState( + StateTestConstants.STREAM_NAME1, + StateTestConstants.NAMESPACE, + java.util.List.of(StateTestConstants.CURSOR_FIELD1), + "a", + 0L + ) + .withData(Jsons.jsonNode(expectedFirstDbState)) + + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a") + Assertions.assertEquals(expectedFirstEmission, actualFirstEmission) + + val expectedRecordCount = 17L + val expectedSecondDbState = + DbState() + .withCdc(false) + .withStreams( + java.util.List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + java.util.List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + java.util.List.of(StateTestConstants.CURSOR_FIELD2) + ) + .withCursor("b") + .withCursorRecordCount(expectedRecordCount), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME3) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + val expectedSecondEmission = + createStreamState( + StateTestConstants.STREAM_NAME2, + StateTestConstants.NAMESPACE, + java.util.List.of(StateTestConstants.CURSOR_FIELD2), + "b", + expectedRecordCount + ) + .withData(Jsons.jsonNode(expectedSecondDbState)) + + val actualSecondEmission = + stateManager.updateAndEmit( + StateTestConstants.NAME_NAMESPACE_PAIR2, + "b", + expectedRecordCount + ) + Assertions.assertEquals(expectedSecondEmission, actualSecondEmission) + } + + @Test + fun testToStateWithoutCursorInfo() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + ) + ) + val airbyteStreamNameNamespacePair = AirbyteStreamNameNamespacePair("other", "other") + + val stateManager: StateManager = StreamStateManager(createDefaultState(), catalog) + val airbyteStateMessage = stateManager.toState(Optional.of(airbyteStreamNameNamespacePair)) + Assertions.assertNotNull(airbyteStateMessage) + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.STREAM, + airbyteStateMessage.type + ) + Assertions.assertNotNull(airbyteStateMessage.stream) + } + + @Test + fun testToStateWithoutStreamPair() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD2)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME3) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + ) + ) + + val stateManager: StateManager = StreamStateManager(createDefaultState(), catalog) + val airbyteStateMessage = stateManager.toState(Optional.empty()) + Assertions.assertNotNull(airbyteStateMessage) + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.STREAM, + airbyteStateMessage.type + ) + Assertions.assertNotNull(airbyteStateMessage.stream) + Assertions.assertNull(airbyteStateMessage.stream.streamState) + } + + @Test + fun testToStateNullCursorField() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME1) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + .withCursorField(java.util.List.of(StateTestConstants.CURSOR_FIELD1)), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(StateTestConstants.STREAM_NAME2) + .withNamespace(StateTestConstants.NAMESPACE) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH) + ) + ) + ) + ) + val stateManager: StateManager = StreamStateManager(createDefaultState(), catalog) + + val expectedFirstDbState = + DbState() + .withCdc(false) + .withStreams( + java.util.List.of( + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME1) + .withStreamNamespace(StateTestConstants.NAMESPACE) + .withCursorField( + java.util.List.of(StateTestConstants.CURSOR_FIELD1) + ) + .withCursor("a"), + DbStreamState() + .withStreamName(StateTestConstants.STREAM_NAME2) + .withStreamNamespace(StateTestConstants.NAMESPACE) + ) + .stream() + .sorted(Comparator.comparing { obj: DbStreamState -> obj.streamName }) + .collect(Collectors.toList()) + ) + + val expectedFirstEmission = + createStreamState( + StateTestConstants.STREAM_NAME1, + StateTestConstants.NAMESPACE, + java.util.List.of(StateTestConstants.CURSOR_FIELD1), + "a", + 0L + ) + .withData(Jsons.jsonNode(expectedFirstDbState)) + val actualFirstEmission = + stateManager.updateAndEmit(StateTestConstants.NAME_NAMESPACE_PAIR1, "a") + Assertions.assertEquals(expectedFirstEmission, actualFirstEmission) + } + + @Test + fun testCdcStateManager() { + val catalog = Mockito.mock(ConfiguredAirbyteCatalog::class.java) + val stateManager: StateManager = + StreamStateManager( + java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(AirbyteStreamState()) + ), + catalog + ) + Assertions.assertThrows(UnsupportedOperationException::class.java) { + stateManager.cdcStateManager + } + } + + private fun createDefaultState(): List { + return java.util.List.of( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(AirbyteStreamState()) + ) + } + + private fun createStreamState( + name: String?, + namespace: String?, + cursorFields: List?, + cursorValue: String?, + cursorRecordCount: Long + ): AirbyteStateMessage { + val dbStreamState = DbStreamState().withStreamName(name).withStreamNamespace(namespace) + + if (cursorFields != null && !cursorFields.isEmpty()) { + dbStreamState.withCursorField(cursorFields) + } + + if (cursorValue != null) { + dbStreamState.withCursor(cursorValue) + } + + if (cursorRecordCount > 0L) { + dbStreamState.withCursorRecordCount(cursorRecordCount) + } + + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(name).withNamespace(namespace) + ) + .withStreamState(Jsons.jsonNode(dbStreamState)) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.kt new file mode 100644 index 0000000000000..7afe5b674876d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/kotlin/io/airbyte/cdk/test/utils/DatabaseConnectionHelperTest.kt @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.testutils + +import com.zaxxer.hikari.HikariDataSource +import io.airbyte.cdk.testutils.DatabaseConnectionHelper.createDataSource +import io.airbyte.cdk.testutils.DatabaseConnectionHelper.createDslContext +import org.jooq.SQLDialect +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test +import org.testcontainers.containers.PostgreSQLContainer + +internal class DatabaseConnectionHelperTest { + @Test + fun testCreatingFromATestContainer() { + val dataSource = createDataSource(container) + Assertions.assertNotNull(dataSource) + Assertions.assertEquals(HikariDataSource::class.java, dataSource!!.javaClass) + Assertions.assertEquals( + 10, + (dataSource as HikariDataSource?)!!.hikariConfigMXBean.maximumPoolSize + ) + } + + @Test + fun testCreatingADslContextFromATestContainer() { + val dialect = SQLDialect.POSTGRES + val dslContext = createDslContext(container, dialect) + Assertions.assertNotNull(dslContext) + Assertions.assertEquals(dialect, dslContext!!.configuration().dialect()) + } + + companion object { + private const val DATABASE_NAME = "airbyte_test_database" + + protected var container: PostgreSQLContainer<*> = + PostgreSQLContainer("postgres:13-alpine") + .withDatabaseName(DATABASE_NAME) + .withUsername("docker") + .withPassword("docker") + + @BeforeAll + @JvmStatic + fun dbSetup() { + container.start() + } + + @AfterAll + @JvmStatic + fun dbDown() { + container!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java deleted file mode 100644 index 729d774f33a82..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java +++ /dev/null @@ -1,864 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debezium; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.common.collect.Streams; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.testutils.TestDatabase; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class CdcSourceTest> { - - static private final Logger LOGGER = LoggerFactory.getLogger(CdcSourceTest.class); - - static protected final String MODELS_STREAM_NAME = "models"; - static protected final Set STREAM_NAMES = Set.of(MODELS_STREAM_NAME); - static protected final String COL_ID = "id"; - static protected final String COL_MAKE_ID = "make_id"; - static protected final String COL_MODEL = "model"; - - static protected final List MODEL_RECORDS = ImmutableList.of( - Jsons.jsonNode(ImmutableMap.of(COL_ID, 11, COL_MAKE_ID, 1, COL_MODEL, "Fiesta")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 12, COL_MAKE_ID, 1, COL_MODEL, "Focus")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 13, COL_MAKE_ID, 1, COL_MODEL, "Ranger")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 14, COL_MAKE_ID, 2, COL_MODEL, "GLA")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 15, COL_MAKE_ID, 2, COL_MODEL, "A 220")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 16, COL_MAKE_ID, 2, COL_MODEL, "E 350"))); - - static protected final String RANDOM_TABLE_NAME = MODELS_STREAM_NAME + "_random"; - - static protected final List MODEL_RECORDS_RANDOM = MODEL_RECORDS.stream() - .map(r -> Jsons.jsonNode(ImmutableMap.of( - COL_ID + "_random", r.get(COL_ID).asInt() * 1000, - COL_MAKE_ID + "_random", r.get(COL_MAKE_ID), - COL_MODEL + "_random", r.get(COL_MODEL).asText() + "-random"))) - .toList(); - - protected T testdb; - - protected String createTableSqlFmt() { - return "CREATE TABLE %s.%s(%s);"; - } - - protected String createSchemaSqlFmt() { - return "CREATE SCHEMA %s;"; - } - - protected String modelsSchema() { - return "models_schema"; - } - - /** - * The schema of a random table which is used as a new table in snapshot test - */ - protected String randomSchema() { - return "models_schema_random"; - } - - protected AirbyteCatalog getCatalog() { - return new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - MODELS_STREAM_NAME, - modelsSchema(), - Field.of(COL_ID, JsonSchemaType.INTEGER), - Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), - Field.of(COL_MODEL, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))))); - } - - protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - final var configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(getCatalog()); - configuredCatalog.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); - return configuredCatalog; - } - - protected abstract T createTestDatabase(); - - protected abstract S source(); - - protected abstract JsonNode config(); - - protected abstract CdcTargetPosition cdcLatestTargetPosition(); - - protected abstract CdcTargetPosition extractPosition(final JsonNode record); - - protected abstract void assertNullCdcMetaData(final JsonNode data); - - protected abstract void assertCdcMetaData(final JsonNode data, final boolean deletedAtNull); - - protected abstract void removeCDCColumns(final ObjectNode data); - - protected abstract void addCdcMetadataColumns(final AirbyteStream stream); - - protected abstract void addCdcDefaultCursorField(final AirbyteStream stream); - - protected abstract void assertExpectedStateMessages(final List stateMessages); - - // TODO: this assertion should be added into test cases in this class, we will need to implement - // corresponding iterator for other connectors before - // doing so. - protected void assertExpectedStateMessageCountMatches(final List stateMessages, long totalCount) { - // Do nothing. - } - - @BeforeEach - protected void setup() { - testdb = createTestDatabase(); - createTables(); - populateTables(); - } - - protected void createTables() { - // create and populate actual table - final var actualColumns = ImmutableMap.of( - COL_ID, "INTEGER", - COL_MAKE_ID, "INTEGER", - COL_MODEL, "VARCHAR(200)"); - testdb - .with(createSchemaSqlFmt(), modelsSchema()) - .with(createTableSqlFmt(), modelsSchema(), MODELS_STREAM_NAME, columnClause(actualColumns, Optional.of(COL_ID))); - - // Create random table. - // This table is not part of Airbyte sync. It is being created just to make sure the schemas not - // being synced by Airbyte are not causing issues with our debezium logic. - final var randomColumns = ImmutableMap.of( - COL_ID + "_random", "INTEGER", - COL_MAKE_ID + "_random", "INTEGER", - COL_MODEL + "_random", "VARCHAR(200)"); - if (!randomSchema().equals(modelsSchema())) { - testdb.with(createSchemaSqlFmt(), randomSchema()); - } - testdb.with(createTableSqlFmt(), randomSchema(), RANDOM_TABLE_NAME, columnClause(randomColumns, Optional.of(COL_ID + "_random"))); - } - - protected void populateTables() { - for (final JsonNode recordJson : MODEL_RECORDS) { - writeModelRecord(recordJson); - } - - for (final JsonNode recordJson : MODEL_RECORDS_RANDOM) { - writeRecords(recordJson, randomSchema(), RANDOM_TABLE_NAME, - COL_ID + "_random", COL_MAKE_ID + "_random", COL_MODEL + "_random"); - } - } - - @AfterEach - protected void tearDown() { - try { - testdb.close(); - } catch (Throwable e) { - LOGGER.error("exception during teardown", e); - } - } - - protected String columnClause(final Map columnsWithDataType, final Optional primaryKey) { - final StringBuilder columnClause = new StringBuilder(); - int i = 0; - for (final Map.Entry column : columnsWithDataType.entrySet()) { - columnClause.append(column.getKey()); - columnClause.append(" "); - columnClause.append(column.getValue()); - if (i < (columnsWithDataType.size() - 1)) { - columnClause.append(","); - columnClause.append(" "); - } - i++; - } - primaryKey.ifPresent(s -> columnClause.append(", PRIMARY KEY (").append(s).append(")")); - - return columnClause.toString(); - } - - protected void writeModelRecord(final JsonNode recordJson) { - writeRecords(recordJson, modelsSchema(), MODELS_STREAM_NAME, COL_ID, COL_MAKE_ID, COL_MODEL); - } - - protected void writeRecords( - final JsonNode recordJson, - final String dbName, - final String streamName, - final String idCol, - final String makeIdCol, - final String modelCol) { - testdb.with("INSERT INTO %s.%s (%s, %s, %s) VALUES (%s, %s, '%s');", dbName, streamName, - idCol, makeIdCol, modelCol, - recordJson.get(idCol).asInt(), recordJson.get(makeIdCol).asInt(), - recordJson.get(modelCol).asText()); - } - - protected void deleteMessageOnIdCol(final String streamName, final String idCol, final int idValue) { - testdb.with("DELETE FROM %s.%s WHERE %s = %s", modelsSchema(), streamName, idCol, idValue); - } - - protected void deleteCommand(final String streamName) { - testdb.with("DELETE FROM %s.%s", modelsSchema(), streamName); - } - - protected void updateCommand(final String streamName, final String modelCol, final String modelVal, final String idCol, final int idValue) { - testdb.with("UPDATE %s.%s SET %s = '%s' WHERE %s = %s", modelsSchema(), streamName, - modelCol, modelVal, COL_ID, 11); - } - - static protected Set removeDuplicates(final Set messages) { - final Set existingDataRecordsWithoutUpdated = new HashSet<>(); - final Set output = new HashSet<>(); - - for (final AirbyteRecordMessage message : messages) { - final ObjectNode node = message.getData().deepCopy(); - node.remove("_ab_cdc_updated_at"); - - if (existingDataRecordsWithoutUpdated.contains(node)) { - LOGGER.info("Removing duplicate node: " + node); - } else { - output.add(message); - existingDataRecordsWithoutUpdated.add(node); - } - } - - return output; - } - - protected Set extractRecordMessages(final List messages) { - final Map> recordsPerStream = extractRecordMessagesStreamWise(messages); - final Set consolidatedRecords = new HashSet<>(); - recordsPerStream.values().forEach(consolidatedRecords::addAll); - return consolidatedRecords; - } - - protected Map> extractRecordMessagesStreamWise(final List messages) { - final Map> recordsPerStream = new HashMap<>(); - for (final AirbyteMessage message : messages) { - if (message.getType() == Type.RECORD) { - AirbyteRecordMessage recordMessage = message.getRecord(); - recordsPerStream.computeIfAbsent(recordMessage.getStream(), (c) -> new ArrayList<>()).add(recordMessage); - } - } - - final Map> recordsPerStreamWithNoDuplicates = new HashMap<>(); - for (final Map.Entry> element : recordsPerStream.entrySet()) { - final String streamName = element.getKey(); - final List records = element.getValue(); - final Set recordMessageSet = new HashSet<>(records); - assertEquals(records.size(), recordMessageSet.size(), - "Expected no duplicates in airbyte record message output for a single sync."); - recordsPerStreamWithNoDuplicates.put(streamName, recordMessageSet); - } - - return recordsPerStreamWithNoDuplicates; - } - - protected List extractStateMessages(final List messages) { - return messages.stream().filter(r -> r.getType() == Type.STATE).map(AirbyteMessage::getState) - .collect(Collectors.toList()); - } - - protected void assertExpectedRecords(final Set expectedRecords, final Set actualRecords) { - // assume all streams are cdc. - assertExpectedRecords(expectedRecords, actualRecords, actualRecords.stream().map(AirbyteRecordMessage::getStream).collect(Collectors.toSet())); - } - - private void assertExpectedRecords(final Set expectedRecords, - final Set actualRecords, - final Set cdcStreams) { - assertExpectedRecords(expectedRecords, actualRecords, cdcStreams, STREAM_NAMES, modelsSchema()); - } - - protected void assertExpectedRecords(final Set expectedRecords, - final Set actualRecords, - final Set cdcStreams, - final Set streamNames, - final String namespace) { - final Set actualData = actualRecords - .stream() - .map(recordMessage -> { - assertTrue(streamNames.contains(recordMessage.getStream())); - assertNotNull(recordMessage.getEmittedAt()); - - assertEquals(namespace, recordMessage.getNamespace()); - - final JsonNode data = recordMessage.getData(); - - if (cdcStreams.contains(recordMessage.getStream())) { - assertCdcMetaData(data, true); - } else { - assertNullCdcMetaData(data); - } - - removeCDCColumns((ObjectNode) data); - - return data; - }) - .collect(Collectors.toSet()); - - assertEquals(expectedRecords, actualData); - } - - @Test - // On the first sync, produce returns records that exist in the database. - void testExistingData() throws Exception { - final CdcTargetPosition targetPosition = cdcLatestTargetPosition(); - final AutoCloseableIterator read = source().read(config(), getConfiguredCatalog(), null); - final List actualRecords = AutoCloseableIterators.toListAndClose(read); - - final Set recordMessages = extractRecordMessages(actualRecords); - final List stateMessages = extractStateMessages(actualRecords); - - assertNotNull(targetPosition); - recordMessages.forEach(record -> { - compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync(targetPosition, record); - }); - - assertExpectedRecords(new HashSet<>(MODEL_RECORDS), recordMessages); - assertExpectedStateMessages(stateMessages); - assertExpectedStateMessageCountMatches(stateMessages, MODEL_RECORDS.size()); - } - - protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync(final CdcTargetPosition targetPosition, - final AirbyteRecordMessage record) { - assertEquals(extractPosition(record.getData()), targetPosition); - } - - @Test - // When a record is deleted, produces a deletion record. - public void testDelete() throws Exception { - final AutoCloseableIterator read1 = source() - .read(config(), getConfiguredCatalog(), null); - final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); - final List stateMessages1 = extractStateMessages(actualRecords1); - assertExpectedStateMessages(stateMessages1); - - deleteMessageOnIdCol(MODELS_STREAM_NAME, COL_ID, 11); - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); - - final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); - final AutoCloseableIterator read2 = source() - .read(config(), getConfiguredCatalog(), state); - final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); - final List recordMessages2 = new ArrayList<>( - extractRecordMessages(actualRecords2)); - final List stateMessages2 = extractStateMessages(actualRecords2); - assertExpectedStateMessagesFromIncrementalSync(stateMessages2); - assertExpectedStateMessageCountMatches(stateMessages2, 1); - assertEquals(1, recordMessages2.size()); - assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); - assertCdcMetaData(recordMessages2.get(0).getData(), false); - } - - protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { - assertExpectedStateMessages(stateMessages); - } - - @Test - // When a record is updated, produces an update record. - public void testUpdate() throws Exception { - final String updatedModel = "Explorer"; - final AutoCloseableIterator read1 = source() - .read(config(), getConfiguredCatalog(), null); - final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); - final List stateMessages1 = extractStateMessages(actualRecords1); - assertExpectedStateMessages(stateMessages1); - - updateCommand(MODELS_STREAM_NAME, COL_MODEL, updatedModel, COL_ID, 11); - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); - - final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); - final AutoCloseableIterator read2 = source() - .read(config(), getConfiguredCatalog(), state); - final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); - final List recordMessages2 = new ArrayList<>( - extractRecordMessages(actualRecords2)); - final List stateMessages2 = extractStateMessages(actualRecords2); - assertExpectedStateMessagesFromIncrementalSync(stateMessages2); - assertEquals(1, recordMessages2.size()); - assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); - assertEquals(updatedModel, recordMessages2.get(0).getData().get(COL_MODEL).asText()); - assertCdcMetaData(recordMessages2.get(0).getData(), true); - assertExpectedStateMessageCountMatches(stateMessages2, 1); - } - - @SuppressWarnings({"BusyWait", "CodeBlock2Expr"}) - @Test - // Verify that when data is inserted into the database while a sync is happening and after the first - // sync, it all gets replicated. - protected void testRecordsProducedDuringAndAfterSync() throws Exception { - int recordsCreatedBeforeTestCount = MODEL_RECORDS.size(); - int expectedRecords = recordsCreatedBeforeTestCount; - int expectedRecordsInCdc = 0; - final int recordsToCreate = 20; - // first batch of records. 20 created here and 6 created in setup method. - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - expectedRecords++; - expectedRecordsInCdc++; - } - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc); - - final AutoCloseableIterator firstBatchIterator = source() - .read(config(), getConfiguredCatalog(), null); - final List dataFromFirstBatch = AutoCloseableIterators - .toListAndClose(firstBatchIterator); - final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); - assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(stateAfterFirstBatch); - final Set recordsFromFirstBatch = extractRecordMessages( - dataFromFirstBatch); - assertEquals(expectedRecords, recordsFromFirstBatch.size()); - - // second batch of records again 20 being created - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - expectedRecords++; - expectedRecordsInCdc++; - } - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc); - - final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1))); - final AutoCloseableIterator secondBatchIterator = source() - .read(config(), getConfiguredCatalog(), state); - final List dataFromSecondBatch = AutoCloseableIterators - .toListAndClose(secondBatchIterator); - - final List stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch); - assertExpectedStateMessagesFromIncrementalSync(stateAfterSecondBatch); - - final Set recordsFromSecondBatch = extractRecordMessages( - dataFromSecondBatch); - assertEquals(recordsToCreate, recordsFromSecondBatch.size(), - "Expected 20 records to be replicated in the second sync."); - - // sometimes there can be more than one of these at the end of the snapshot and just before the - // first incremental. - final Set recordsFromFirstBatchWithoutDuplicates = removeDuplicates( - recordsFromFirstBatch); - final Set recordsFromSecondBatchWithoutDuplicates = removeDuplicates( - recordsFromSecondBatch); - - assertTrue(recordsCreatedBeforeTestCount < recordsFromFirstBatchWithoutDuplicates.size(), - "Expected first sync to include records created while the test was running."); - assertEquals(expectedRecords, - recordsFromFirstBatchWithoutDuplicates.size() + recordsFromSecondBatchWithoutDuplicates - .size()); - } - - protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { - assertExpectedStateMessages(stateAfterFirstBatch); - } - - @Test - // When both incremental CDC and full refresh are configured for different streams in a sync, the - // data is replicated as expected. - public void testCdcAndFullRefreshInSameSync() throws Exception { - final ConfiguredAirbyteCatalog configuredCatalog = Jsons.clone(getConfiguredCatalog()); - - final List MODEL_RECORDS_2 = ImmutableList.of( - Jsons.jsonNode(ImmutableMap.of(COL_ID, 110, COL_MAKE_ID, 1, COL_MODEL, "Fiesta-2")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 120, COL_MAKE_ID, 1, COL_MODEL, "Focus-2")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 130, COL_MAKE_ID, 1, COL_MODEL, "Ranger-2")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 140, COL_MAKE_ID, 2, COL_MODEL, "GLA-2")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 150, COL_MAKE_ID, 2, COL_MODEL, "A 220-2")), - Jsons.jsonNode(ImmutableMap.of(COL_ID, 160, COL_MAKE_ID, 2, COL_MODEL, "E 350-2"))); - - final var columns = ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)"); - testdb.with(createTableSqlFmt(), modelsSchema(), MODELS_STREAM_NAME + "_2", columnClause(columns, Optional.of(COL_ID))); - - for (final JsonNode recordJson : MODEL_RECORDS_2) { - writeRecords(recordJson, modelsSchema(), MODELS_STREAM_NAME + "_2", COL_ID, COL_MAKE_ID, COL_MODEL); - } - - final ConfiguredAirbyteStream airbyteStream = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream( - MODELS_STREAM_NAME + "_2", - modelsSchema(), - Field.of(COL_ID, JsonSchemaType.INTEGER), - Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), - Field.of(COL_MODEL, JsonSchemaType.STRING)) - .withSupportedSyncModes( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID)))); - airbyteStream.setSyncMode(SyncMode.FULL_REFRESH); - - final List streams = configuredCatalog.getStreams(); - streams.add(airbyteStream); - configuredCatalog.withStreams(streams); - - final AutoCloseableIterator read1 = source() - .read(config(), configuredCatalog, null); - final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); - - final Set recordMessages1 = extractRecordMessages(actualRecords1); - final List stateMessages1 = extractStateMessages(actualRecords1); - final HashSet names = new HashSet<>(STREAM_NAMES); - names.add(MODELS_STREAM_NAME + "_2"); - assertExpectedStateMessages(stateMessages1); - // Full refresh does not get any state messages. - assertExpectedStateMessageCountMatches(stateMessages1, MODEL_RECORDS_2.size()); - assertExpectedRecords(Streams.concat(MODEL_RECORDS_2.stream(), MODEL_RECORDS.stream()) - .collect(Collectors.toSet()), - recordMessages1, - Collections.singleton(MODELS_STREAM_NAME), - names, - modelsSchema()); - - final JsonNode puntoRecord = Jsons - .jsonNode(ImmutableMap.of(COL_ID, 100, COL_MAKE_ID, 3, COL_MODEL, "Punto")); - writeModelRecord(puntoRecord); - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); - - final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); - final AutoCloseableIterator read2 = source() - .read(config(), configuredCatalog, state); - final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); - - final Set recordMessages2 = extractRecordMessages(actualRecords2); - final List stateMessages2 = extractStateMessages(actualRecords2); - assertExpectedStateMessagesFromIncrementalSync(stateMessages2); - assertExpectedStateMessageCountMatches(stateMessages2, 1); - assertExpectedRecords( - Streams.concat(MODEL_RECORDS_2.stream(), Stream.of(puntoRecord)) - .collect(Collectors.toSet()), - recordMessages2, - Collections.singleton(MODELS_STREAM_NAME), - names, - modelsSchema()); - } - - @Test - // When no records exist, no records are returned. - public void testNoData() throws Exception { - - deleteCommand(MODELS_STREAM_NAME); - waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, MODEL_RECORDS.size()); - final AutoCloseableIterator read = source().read(config(), getConfiguredCatalog(), null); - final List actualRecords = AutoCloseableIterators.toListAndClose(read); - - final Set recordMessages = extractRecordMessages(actualRecords); - final List stateMessages = extractStateMessages(actualRecords); - assertExpectedRecords(Collections.emptySet(), recordMessages); - assertExpectedStateMessagesForNoData(stateMessages); - assertExpectedStateMessageCountMatches(stateMessages, 0); - } - - protected void assertExpectedStateMessagesForNoData(final List stateMessages) { - assertExpectedStateMessages(stateMessages); - } - - @Test - // When no changes have been made to the database since the previous sync, no records are returned. - public void testNoDataOnSecondSync() throws Exception { - final AutoCloseableIterator read1 = source() - .read(config(), getConfiguredCatalog(), null); - final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); - final List stateMessagesFromFirstSync = extractStateMessages(actualRecords1); - final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessagesFromFirstSync.get(stateMessagesFromFirstSync.size() - 1))); - - final AutoCloseableIterator read2 = source() - .read(config(), getConfiguredCatalog(), state); - final List actualRecords2 = AutoCloseableIterators.toListAndClose(read2); - - final Set recordMessages2 = extractRecordMessages(actualRecords2); - final List stateMessages2 = extractStateMessages(actualRecords2); - - assertExpectedRecords(Collections.emptySet(), recordMessages2); - assertExpectedStateMessagesFromIncrementalSync(stateMessages2); - assertExpectedStateMessageCountMatches(stateMessages2, 0); - } - - @Test - public void testCheck() throws Exception { - final AirbyteConnectionStatus status = source().check(config()); - assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.SUCCEEDED); - } - - @Test - public void testDiscover() throws Exception { - final AirbyteCatalog expectedCatalog = expectedCatalogForDiscover(); - final AirbyteCatalog actualCatalog = source().discover(config()); - - assertEquals( - expectedCatalog.getStreams().stream().sorted(Comparator.comparing(AirbyteStream::getName)) - .collect(Collectors.toList()), - actualCatalog.getStreams().stream().sorted(Comparator.comparing(AirbyteStream::getName)) - .collect(Collectors.toList())); - } - - @Test - public void newTableSnapshotTest() throws Exception { - final AutoCloseableIterator firstBatchIterator = source() - .read(config(), getConfiguredCatalog(), null); - final List dataFromFirstBatch = AutoCloseableIterators - .toListAndClose(firstBatchIterator); - final Set recordsFromFirstBatch = extractRecordMessages( - dataFromFirstBatch); - final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); - assertExpectedStateMessages(stateAfterFirstBatch); - assertExpectedStateMessageCountMatches(stateAfterFirstBatch, MODEL_RECORDS.size()); - - final AirbyteStateMessage stateMessageEmittedAfterFirstSyncCompletion = stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1); - assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, stateMessageEmittedAfterFirstSyncCompletion.getType()); - assertNotNull(stateMessageEmittedAfterFirstSyncCompletion.getGlobal().getSharedState()); - final Set streamsInStateAfterFirstSyncCompletion = stateMessageEmittedAfterFirstSyncCompletion.getGlobal().getStreamStates() - .stream() - .map(AirbyteStreamState::getStreamDescriptor) - .collect(Collectors.toSet()); - assertEquals(1, streamsInStateAfterFirstSyncCompletion.size()); - assertTrue(streamsInStateAfterFirstSyncCompletion.contains(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()))); - assertNotNull(stateMessageEmittedAfterFirstSyncCompletion.getData()); - - assertEquals((MODEL_RECORDS.size()), recordsFromFirstBatch.size()); - assertExpectedRecords(new HashSet<>(MODEL_RECORDS), recordsFromFirstBatch); - - final JsonNode state = stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1).getData(); - - final ConfiguredAirbyteCatalog newTables = CatalogHelpers - .toDefaultConfiguredCatalog(new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - RANDOM_TABLE_NAME, - randomSchema(), - Field.of(COL_ID + "_random", JsonSchemaType.NUMBER), - Field.of(COL_MAKE_ID + "_random", JsonSchemaType.NUMBER), - Field.of(COL_MODEL + "_random", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID + "_random")))))); - - newTables.getStreams().forEach(s -> s.setSyncMode(SyncMode.INCREMENTAL)); - final List combinedStreams = new ArrayList<>(); - combinedStreams.addAll(getConfiguredCatalog().getStreams()); - combinedStreams.addAll(newTables.getStreams()); - - final ConfiguredAirbyteCatalog updatedCatalog = new ConfiguredAirbyteCatalog().withStreams(combinedStreams); - - /* - * Write 20 records to the existing table - */ - final Set recordsWritten = new HashSet<>(); - for (int recordsCreated = 0; recordsCreated < 20; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - recordsWritten.add(record); - writeModelRecord(record); - } - - final AutoCloseableIterator secondBatchIterator = source() - .read(config(), updatedCatalog, state); - final List dataFromSecondBatch = AutoCloseableIterators - .toListAndClose(secondBatchIterator); - - final List stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch); - assertStateMessagesForNewTableSnapshotTest(stateAfterSecondBatch, stateMessageEmittedAfterFirstSyncCompletion); - - final Map> recordsStreamWise = extractRecordMessagesStreamWise(dataFromSecondBatch); - assertTrue(recordsStreamWise.containsKey(MODELS_STREAM_NAME)); - assertTrue(recordsStreamWise.containsKey(RANDOM_TABLE_NAME)); - - final Set recordsForModelsStreamFromSecondBatch = recordsStreamWise.get(MODELS_STREAM_NAME); - final Set recordsForModelsRandomStreamFromSecondBatch = recordsStreamWise.get(RANDOM_TABLE_NAME); - - assertEquals((MODEL_RECORDS_RANDOM.size()), recordsForModelsRandomStreamFromSecondBatch.size()); - assertEquals(20, recordsForModelsStreamFromSecondBatch.size()); - assertExpectedRecords(new HashSet<>(MODEL_RECORDS_RANDOM), recordsForModelsRandomStreamFromSecondBatch, - recordsForModelsRandomStreamFromSecondBatch.stream().map(AirbyteRecordMessage::getStream).collect( - Collectors.toSet()), - Sets - .newHashSet(RANDOM_TABLE_NAME), - randomSchema()); - assertExpectedRecords(recordsWritten, recordsForModelsStreamFromSecondBatch); - - /* - * Write 20 records to both the tables - */ - final Set recordsWrittenInRandomTable = new HashSet<>(); - recordsWritten.clear(); - for (int recordsCreated = 30; recordsCreated < 50; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - recordsWritten.add(record); - - final JsonNode record2 = Jsons - .jsonNode(ImmutableMap - .of(COL_ID + "_random", 11000 + recordsCreated, COL_MAKE_ID + "_random", 1 + recordsCreated, COL_MODEL + "_random", - "Fiesta-random" + recordsCreated)); - writeRecords(record2, randomSchema(), RANDOM_TABLE_NAME, - COL_ID + "_random", COL_MAKE_ID + "_random", COL_MODEL + "_random"); - recordsWrittenInRandomTable.add(record2); - } - - final JsonNode state2 = stateAfterSecondBatch.get(stateAfterSecondBatch.size() - 1).getData(); - final AutoCloseableIterator thirdBatchIterator = source() - .read(config(), updatedCatalog, state2); - final List dataFromThirdBatch = AutoCloseableIterators - .toListAndClose(thirdBatchIterator); - - final List stateAfterThirdBatch = extractStateMessages(dataFromThirdBatch); - assertTrue(stateAfterThirdBatch.size() >= 1); - - final AirbyteStateMessage stateMessageEmittedAfterThirdSyncCompletion = stateAfterThirdBatch.get(stateAfterThirdBatch.size() - 1); - assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, stateMessageEmittedAfterThirdSyncCompletion.getType()); - assertNotEquals(stateMessageEmittedAfterThirdSyncCompletion.getGlobal().getSharedState(), - stateAfterSecondBatch.get(stateAfterSecondBatch.size() - 1).getGlobal().getSharedState()); - final Set streamsInSyncCompletionStateAfterThirdSync = stateMessageEmittedAfterThirdSyncCompletion.getGlobal().getStreamStates() - .stream() - .map(AirbyteStreamState::getStreamDescriptor) - .collect(Collectors.toSet()); - assertTrue( - streamsInSyncCompletionStateAfterThirdSync.contains( - new StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()))); - assertTrue( - streamsInSyncCompletionStateAfterThirdSync.contains(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()))); - assertNotNull(stateMessageEmittedAfterThirdSyncCompletion.getData()); - - final Map> recordsStreamWiseFromThirdBatch = extractRecordMessagesStreamWise(dataFromThirdBatch); - assertTrue(recordsStreamWiseFromThirdBatch.containsKey(MODELS_STREAM_NAME)); - assertTrue(recordsStreamWiseFromThirdBatch.containsKey(RANDOM_TABLE_NAME)); - - final Set recordsForModelsStreamFromThirdBatch = recordsStreamWiseFromThirdBatch.get(MODELS_STREAM_NAME); - final Set recordsForModelsRandomStreamFromThirdBatch = recordsStreamWiseFromThirdBatch.get(RANDOM_TABLE_NAME); - - assertEquals(20, recordsForModelsStreamFromThirdBatch.size()); - assertEquals(20, recordsForModelsRandomStreamFromThirdBatch.size()); - assertExpectedRecords(recordsWritten, recordsForModelsStreamFromThirdBatch); - assertExpectedRecords(recordsWrittenInRandomTable, recordsForModelsRandomStreamFromThirdBatch, - recordsForModelsRandomStreamFromThirdBatch.stream().map(AirbyteRecordMessage::getStream).collect( - Collectors.toSet()), - Sets - .newHashSet(RANDOM_TABLE_NAME), - randomSchema()); - } - - protected void assertStateMessagesForNewTableSnapshotTest(final List stateMessages, - final AirbyteStateMessage stateMessageEmittedAfterFirstSyncCompletion) { - assertEquals(2, stateMessages.size()); - final AirbyteStateMessage stateMessageEmittedAfterSnapshotCompletionInSecondSync = stateMessages.get(0); - assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, stateMessageEmittedAfterSnapshotCompletionInSecondSync.getType()); - assertEquals(stateMessageEmittedAfterFirstSyncCompletion.getGlobal().getSharedState(), - stateMessageEmittedAfterSnapshotCompletionInSecondSync.getGlobal().getSharedState()); - final Set streamsInSnapshotState = stateMessageEmittedAfterSnapshotCompletionInSecondSync.getGlobal().getStreamStates() - .stream() - .map(AirbyteStreamState::getStreamDescriptor) - .collect(Collectors.toSet()); - assertEquals(2, streamsInSnapshotState.size()); - assertTrue( - streamsInSnapshotState.contains(new StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()))); - assertTrue(streamsInSnapshotState.contains(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()))); - assertNotNull(stateMessageEmittedAfterSnapshotCompletionInSecondSync.getData()); - - final AirbyteStateMessage stateMessageEmittedAfterSecondSyncCompletion = stateMessages.get(1); - assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, stateMessageEmittedAfterSecondSyncCompletion.getType()); - assertNotEquals(stateMessageEmittedAfterFirstSyncCompletion.getGlobal().getSharedState(), - stateMessageEmittedAfterSecondSyncCompletion.getGlobal().getSharedState()); - final Set streamsInSyncCompletionState = stateMessageEmittedAfterSecondSyncCompletion.getGlobal().getStreamStates() - .stream() - .map(AirbyteStreamState::getStreamDescriptor) - .collect(Collectors.toSet()); - assertEquals(2, streamsInSnapshotState.size()); - assertTrue( - streamsInSyncCompletionState.contains( - new StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()))); - assertTrue(streamsInSyncCompletionState.contains(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()))); - assertNotNull(stateMessageEmittedAfterSecondSyncCompletion.getData()); - } - - protected AirbyteCatalog expectedCatalogForDiscover() { - final AirbyteCatalog expectedCatalog = Jsons.clone(getCatalog()); - - final var columns = ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)"); - testdb.with(createTableSqlFmt(), modelsSchema(), MODELS_STREAM_NAME + "_2", columnClause(columns, Optional.empty())); - - final List streams = expectedCatalog.getStreams(); - // stream with PK - streams.get(0).setSourceDefinedCursor(true); - addCdcMetadataColumns(streams.get(0)); - addCdcDefaultCursorField(streams.get(0)); - - final AirbyteStream streamWithoutPK = CatalogHelpers.createAirbyteStream( - MODELS_STREAM_NAME + "_2", - modelsSchema(), - Field.of(COL_ID, JsonSchemaType.INTEGER), - Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), - Field.of(COL_MODEL, JsonSchemaType.STRING)); - streamWithoutPK.setSourceDefinedPrimaryKey(Collections.emptyList()); - streamWithoutPK.setSupportedSyncModes(List.of(SyncMode.FULL_REFRESH)); - addCdcDefaultCursorField(streamWithoutPK); - addCdcMetadataColumns(streamWithoutPK); - - final AirbyteStream randomStream = CatalogHelpers.createAirbyteStream( - RANDOM_TABLE_NAME, - randomSchema(), - Field.of(COL_ID + "_random", JsonSchemaType.INTEGER), - Field.of(COL_MAKE_ID + "_random", JsonSchemaType.INTEGER), - Field.of(COL_MODEL + "_random", JsonSchemaType.STRING)) - .withSourceDefinedCursor(true) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID + "_random"))); - - addCdcDefaultCursorField(randomStream); - addCdcMetadataColumns(randomStream); - - streams.add(streamWithoutPK); - streams.add(randomStream); - expectedCatalog.withStreams(streams); - return expectedCatalog; - } - - protected void waitForCdcRecords(String schemaName, String tableName, int recordCount) - throws Exception {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debug/DebugUtil.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debug/DebugUtil.java deleted file mode 100644 index 836f6cf503472..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debug/DebugUtil.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.debug; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Collections; - -/** - * Utility class defined to debug a source. Copy over any relevant configurations, catalogs & state - * in the resources/debug_resources directory. - */ -public class DebugUtil { - - @SuppressWarnings({"unchecked", "deprecation", "resource"}) - public static void debug(final Source debugSource) throws Exception { - final JsonNode debugConfig = DebugUtil.getConfig(); - final ConfiguredAirbyteCatalog configuredAirbyteCatalog = DebugUtil.getCatalog(); - JsonNode state; - try { - state = DebugUtil.getState(); - } catch (final Exception e) { - state = null; - } - - debugSource.check(debugConfig); - debugSource.discover(debugConfig); - - final AutoCloseableIterator messageIterator = debugSource.read(debugConfig, configuredAirbyteCatalog, state); - messageIterator.forEachRemaining(message -> {}); - } - - private static JsonNode getConfig() throws Exception { - final JsonNode originalConfig = new ObjectMapper().readTree(MoreResources.readResource("debug_resources/config.json")); - final JsonNode debugConfig = ((ObjectNode) originalConfig.deepCopy()).put("debug_mode", true); - return debugConfig; - } - - private static ConfiguredAirbyteCatalog getCatalog() throws Exception { - final String catalog = MoreResources.readResource("debug_resources/configured_catalog.json"); - return Jsons.deserialize(catalog, ConfiguredAirbyteCatalog.class); - } - - private static JsonNode getState() throws Exception { - final AirbyteStateMessage message = Jsons.deserialize(MoreResources.readResource("debug_resources/state.json"), AirbyteStateMessage.class); - return Jsons.jsonNode(Collections.singletonList(message)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java deleted file mode 100644 index aac25c5d87b02..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java +++ /dev/null @@ -1,1108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc.test; - -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifier; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.doCallRealMethod; -import static org.mockito.Mockito.spy; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbState; -import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; -import io.airbyte.cdk.testutils.TestDatabase; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.MoreIterators; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.v0.AirbyteStateStats; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import io.airbyte.protocol.models.v0.SyncMode; -import java.math.BigDecimal; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.hamcrest.Matchers; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -/** - * Tests that should be run on all Sources that extend the AbstractJdbcSource. - */ -@SuppressFBWarnings( - value = {"MS_SHOULD_BE_FINAL"}, - justification = "The static variables are updated in subclasses for convenience, and cannot be final.") -abstract public class JdbcSourceAcceptanceTest> { - - static protected String SCHEMA_NAME = "jdbc_integration_test1"; - static protected String SCHEMA_NAME2 = "jdbc_integration_test2"; - static protected Set TEST_SCHEMAS = Set.of(SCHEMA_NAME, SCHEMA_NAME2); - - static protected String TABLE_NAME = "id_and_name"; - static protected String TABLE_NAME_WITH_SPACES = "id and name"; - static protected String TABLE_NAME_WITHOUT_PK = "id_and_name_without_pk"; - static protected String TABLE_NAME_COMPOSITE_PK = "full_name_composite_pk"; - static protected String TABLE_NAME_WITHOUT_CURSOR_TYPE = "table_without_cursor_type"; - static protected String TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE = "table_with_null_cursor_type"; - // this table is used in testing incremental sync with concurrent insertions - static protected String TABLE_NAME_AND_TIMESTAMP = "name_and_timestamp"; - - static protected String COL_ID = "id"; - static protected String COL_NAME = "name"; - static protected String COL_UPDATED_AT = "updated_at"; - static protected String COL_FIRST_NAME = "first_name"; - static protected String COL_LAST_NAME = "last_name"; - static protected String COL_LAST_NAME_WITH_SPACE = "last name"; - static protected String COL_CURSOR = "cursor_field"; - static protected String COL_TIMESTAMP = "timestamp"; - static protected String COL_TIMESTAMP_TYPE = "TIMESTAMP"; - static protected Number ID_VALUE_1 = 1; - static protected Number ID_VALUE_2 = 2; - static protected Number ID_VALUE_3 = 3; - static protected Number ID_VALUE_4 = 4; - static protected Number ID_VALUE_5 = 5; - - static protected String DROP_SCHEMA_QUERY = "DROP SCHEMA IF EXISTS %s CASCADE"; - static protected String COLUMN_CLAUSE_WITH_PK = "id INTEGER, name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL"; - static protected String COLUMN_CLAUSE_WITHOUT_PK = "id INTEGER, name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL"; - static protected String COLUMN_CLAUSE_WITH_COMPOSITE_PK = - "first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL"; - - static protected String CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "CREATE TABLE %s (%s bit NOT NULL);"; - static protected String INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "INSERT INTO %s VALUES(0);"; - static protected String CREATE_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY = "CREATE TABLE %s (%s VARCHAR(20));"; - static protected String INSERT_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY = "INSERT INTO %s VALUES('Hello world :)');"; - static protected String INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY = "INSERT INTO %s (name, timestamp) VALUES ('%s', '%s')"; - - protected T testdb; - - protected String streamName() { - return TABLE_NAME; - } - - /** - * A valid configuration to connect to a test database. - * - * @return config - */ - abstract protected JsonNode config(); - - /** - * An instance of the source that should be tests. - * - * @return abstract jdbc source - */ - abstract protected S source(); - - /** - * Creates a TestDatabase instance to be used in {@link #setup()}. - * - * @return TestDatabase instance to use for test case. - */ - abstract protected T createTestDatabase(); - - /** - * These tests write records without specifying a namespace (schema name). They will be written into - * whatever the default schema is for the database. When they are discovered they will be namespaced - * by the schema name (e.g. .). Thus the source needs to tell the - * tests what that default schema name is. If the database does not support schemas, then database - * name should used instead. - * - * @return name that will be used to namespace the record. - */ - abstract protected boolean supportsSchemas(); - - protected String createTableQuery(final String tableName, final String columnClause, final String primaryKeyClause) { - return String.format("CREATE TABLE %s(%s %s %s)", - tableName, columnClause, primaryKeyClause.equals("") ? "" : ",", primaryKeyClause); - } - - protected String primaryKeyClause(final List columns) { - if (columns.isEmpty()) { - return ""; - } - - final StringBuilder clause = new StringBuilder(); - clause.append("PRIMARY KEY ("); - for (int i = 0; i < columns.size(); i++) { - clause.append(columns.get(i)); - if (i != (columns.size() - 1)) { - clause.append(","); - } - } - clause.append(")"); - return clause.toString(); - } - - @BeforeEach - public void setup() throws Exception { - testdb = createTestDatabase(); - if (supportsSchemas()) { - createSchemas(); - } - if (testdb.getDatabaseDriver().equals(DatabaseDriver.ORACLE)) { - testdb.with("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD'"); - } - testdb - .with(createTableQuery(getFullyQualifiedTableName(TABLE_NAME), COLUMN_CLAUSE_WITH_PK, primaryKeyClause(Collections.singletonList("id")))) - .with("INSERT INTO %s(id, name, updated_at) VALUES (1, 'picard', '2004-10-19')", getFullyQualifiedTableName(TABLE_NAME)) - .with("INSERT INTO %s(id, name, updated_at) VALUES (2, 'crusher', '2005-10-19')", getFullyQualifiedTableName(TABLE_NAME)) - .with("INSERT INTO %s(id, name, updated_at) VALUES (3, 'vash', '2006-10-19')", getFullyQualifiedTableName(TABLE_NAME)) - .with(createTableQuery(getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK), COLUMN_CLAUSE_WITHOUT_PK, "")) - .with("INSERT INTO %s(id, name, updated_at) VALUES (1, 'picard', '2004-10-19')", getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK)) - .with("INSERT INTO %s(id, name, updated_at) VALUES (2, 'crusher', '2005-10-19')", getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK)) - .with("INSERT INTO %s(id, name, updated_at) VALUES (3, 'vash', '2006-10-19')", getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK)) - .with(createTableQuery(getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK), COLUMN_CLAUSE_WITH_COMPOSITE_PK, - primaryKeyClause(List.of("first_name", "last_name")))) - .with("INSERT INTO %s(first_name, last_name, updated_at) VALUES ('first', 'picard', '2004-10-19')", - getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK)) - .with("INSERT INTO %s(first_name, last_name, updated_at) VALUES ('second', 'crusher', '2005-10-19')", - getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK)) - .with("INSERT INTO %s(first_name, last_name, updated_at) VALUES ('third', 'vash', '2006-10-19')", - getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK)); - } - - protected void maybeSetShorterConnectionTimeout(final JsonNode config) { - // Optionally implement this to speed up test cases which will result in a connection timeout. - } - - @AfterEach - public void tearDown() { - testdb.close(); - } - - @Test - void testSpec() throws Exception { - final ConnectorSpecification actual = source().spec(); - final String resourceString = MoreResources.readResource("spec.json"); - final ConnectorSpecification expected = Jsons.deserialize(resourceString, ConnectorSpecification.class); - - assertEquals(expected, actual); - } - - @Test - void testCheckSuccess() throws Exception { - final AirbyteConnectionStatus actual = source().check(config()); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - assertEquals(expected, actual); - } - - @Test - protected void testCheckFailure() throws Exception { - final var config = config(); - maybeSetShorterConnectionTimeout(config); - ((ObjectNode) config).put(JdbcUtils.PASSWORD_KEY, "fake"); - final AirbyteConnectionStatus actual = source().check(config); - assertEquals(Status.FAILED, actual.getStatus()); - } - - @Test - void testDiscover() throws Exception { - final AirbyteCatalog actual = filterOutOtherSchemas(source().discover(config())); - final AirbyteCatalog expected = getCatalog(getDefaultNamespace()); - assertEquals(expected.getStreams().size(), actual.getStreams().size()); - actual.getStreams().forEach(actualStream -> { - final Optional expectedStream = - expected.getStreams().stream() - .filter(stream -> stream.getNamespace().equals(actualStream.getNamespace()) && stream.getName().equals(actualStream.getName())) - .findAny(); - assertTrue(expectedStream.isPresent(), String.format("Unexpected stream %s", actualStream.getName())); - assertEquals(expectedStream.get(), actualStream); - }); - } - - @Test - protected void testDiscoverWithNonCursorFields() throws Exception { - testdb.with(CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY, getFullyQualifiedTableName(TABLE_NAME_WITHOUT_CURSOR_TYPE), COL_CURSOR) - .with(INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY, getFullyQualifiedTableName(TABLE_NAME_WITHOUT_CURSOR_TYPE)); - final AirbyteCatalog actual = filterOutOtherSchemas(source().discover(config())); - final AirbyteStream stream = - actual.getStreams().stream().filter(s -> s.getName().equalsIgnoreCase(TABLE_NAME_WITHOUT_CURSOR_TYPE)).findFirst().orElse(null); - assertNotNull(stream); - assertEquals(TABLE_NAME_WITHOUT_CURSOR_TYPE.toLowerCase(), stream.getName().toLowerCase()); - assertEquals(1, stream.getSupportedSyncModes().size()); - assertEquals(SyncMode.FULL_REFRESH, stream.getSupportedSyncModes().get(0)); - } - - @Test - protected void testDiscoverWithNullableCursorFields() throws Exception { - testdb.with(CREATE_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY, getFullyQualifiedTableName(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE), COL_CURSOR) - .with(INSERT_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY, getFullyQualifiedTableName(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE)); - final AirbyteCatalog actual = filterOutOtherSchemas(source().discover(config())); - final AirbyteStream stream = - actual.getStreams().stream().filter(s -> s.getName().equalsIgnoreCase(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE)).findFirst().orElse(null); - assertNotNull(stream); - assertEquals(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE.toLowerCase(), stream.getName().toLowerCase()); - assertEquals(2, stream.getSupportedSyncModes().size()); - assertTrue(stream.getSupportedSyncModes().contains(SyncMode.FULL_REFRESH)); - assertTrue(stream.getSupportedSyncModes().contains(SyncMode.INCREMENTAL)); - } - - protected AirbyteCatalog filterOutOtherSchemas(final AirbyteCatalog catalog) { - if (supportsSchemas()) { - final AirbyteCatalog filteredCatalog = Jsons.clone(catalog); - filteredCatalog.setStreams(filteredCatalog.getStreams() - .stream() - .filter(stream -> TEST_SCHEMAS.stream().anyMatch(schemaName -> stream.getNamespace().startsWith(schemaName))) - .collect(Collectors.toList())); - return filteredCatalog; - } else { - return catalog; - } - - } - - @Test - protected void testDiscoverWithMultipleSchemas() throws Exception { - // clickhouse and mysql do not have a concept of schemas, so this test does not make sense for them. - switch (testdb.getDatabaseDriver()) { - case MYSQL, CLICKHOUSE, TERADATA: - return; - } - - // add table and data to a separate schema. - testdb.with("CREATE TABLE %s(id VARCHAR(200) NOT NULL, name VARCHAR(200) NOT NULL)", - RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME)) - .with("INSERT INTO %s(id, name) VALUES ('1','picard')", - RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME)) - .with("INSERT INTO %s(id, name) VALUES ('2', 'crusher')", - RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME)) - .with("INSERT INTO %s(id, name) VALUES ('3', 'vash')", - RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME)); - - final AirbyteCatalog actual = source().discover(config()); - - final AirbyteCatalog expected = getCatalog(getDefaultNamespace()); - final List catalogStreams = new ArrayList<>(); - catalogStreams.addAll(expected.getStreams()); - catalogStreams.add(CatalogHelpers - .createAirbyteStream(TABLE_NAME, - SCHEMA_NAME2, - Field.of(COL_ID, JsonSchemaType.STRING), - Field.of(COL_NAME, JsonSchemaType.STRING)) - .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))); - expected.setStreams(catalogStreams); - // sort streams by name so that we are comparing lists with the same order. - final Comparator schemaTableCompare = Comparator.comparing(stream -> stream.getNamespace() + "." + stream.getName()); - expected.getStreams().sort(schemaTableCompare); - actual.getStreams().sort(schemaTableCompare); - assertEquals(expected, filterOutOtherSchemas(actual)); - } - - @Test - void testReadSuccess() throws Exception { - final List actualMessages = - MoreIterators.toList( - source().read(config(), getConfiguredCatalogWithOneStream(getDefaultNamespace()), null)); - - setEmittedAtToNull(actualMessages); - final List expectedMessages = getTestMessages(); - assertThat(expectedMessages, Matchers.containsInAnyOrder(actualMessages.toArray())); - assertThat(actualMessages, Matchers.containsInAnyOrder(expectedMessages.toArray())); - } - - @Test - protected void testReadOneColumn() throws Exception { - final ConfiguredAirbyteCatalog catalog = CatalogHelpers - .createConfiguredAirbyteCatalog(streamName(), getDefaultNamespace(), Field.of(COL_ID, JsonSchemaType.NUMBER)); - final List actualMessages = MoreIterators - .toList(source().read(config(), catalog, null)); - - setEmittedAtToNull(actualMessages); - - final List expectedMessages = getAirbyteMessagesReadOneColumn(); - assertEquals(expectedMessages.size(), actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); - } - - protected List getAirbyteMessagesReadOneColumn() { - final List expectedMessages = getTestMessages().stream() - .map(Jsons::clone) - .peek(m -> { - ((ObjectNode) m.getRecord().getData()).remove(COL_NAME); - ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) m.getRecord().getData()).replace(COL_ID, - convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); - }) - .collect(Collectors.toList()); - return expectedMessages; - } - - @Test - protected void testReadMultipleTables() throws Exception { - final ConfiguredAirbyteCatalog catalog = getConfiguredCatalogWithOneStream( - getDefaultNamespace()); - final List expectedMessages = new ArrayList<>(getTestMessages()); - - for (int i = 2; i < 10; i++) { - final String streamName2 = streamName() + i; - final String tableName = getFullyQualifiedTableName(TABLE_NAME + i); - testdb.with(createTableQuery(tableName, "id INTEGER, name VARCHAR(200)", "")) - .with("INSERT INTO %s(id, name) VALUES (1,'picard')", tableName) - .with("INSERT INTO %s(id, name) VALUES (2, 'crusher')", tableName) - .with("INSERT INTO %s(id, name) VALUES (3, 'vash')", tableName); - catalog.getStreams().add(CatalogHelpers.createConfiguredAirbyteStream( - streamName2, - getDefaultNamespace(), - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING))); - - expectedMessages.addAll(getAirbyteMessagesSecondSync(streamName2)); - } - - final List actualMessages = MoreIterators - .toList(source().read(config(), catalog, null)); - - setEmittedAtToNull(actualMessages); - - assertEquals(expectedMessages.size(), actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); - } - - protected List getAirbyteMessagesSecondSync(final String streamName) { - return getTestMessages() - .stream() - .map(Jsons::clone) - .peek(m -> { - m.getRecord().setStream(streamName); - m.getRecord().setNamespace(getDefaultNamespace()); - ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) m.getRecord().getData()).replace(COL_ID, - convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); - }) - .collect(Collectors.toList()); - - } - - @Test - protected void testTablesWithQuoting() throws Exception { - final ConfiguredAirbyteStream streamForTableWithSpaces = createTableWithSpaces(); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of( - getConfiguredCatalogWithOneStream(getDefaultNamespace()).getStreams().get(0), - streamForTableWithSpaces)); - final List actualMessages = MoreIterators - .toList(source().read(config(), catalog, null)); - - setEmittedAtToNull(actualMessages); - - final List expectedMessages = new ArrayList<>(getTestMessages()); - expectedMessages.addAll(getAirbyteMessagesForTablesWithQuoting(streamForTableWithSpaces)); - - assertEquals(expectedMessages.size(), actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); - } - - protected List getAirbyteMessagesForTablesWithQuoting(final ConfiguredAirbyteStream streamForTableWithSpaces) { - return getTestMessages() - .stream() - .map(Jsons::clone) - .peek(m -> { - m.getRecord().setStream(streamForTableWithSpaces.getStream().getName()); - ((ObjectNode) m.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, - ((ObjectNode) m.getRecord().getData()).remove(COL_NAME)); - ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) m.getRecord().getData()).replace(COL_ID, - convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); - }) - .collect(Collectors.toList()); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - void testReadFailure() { - final ConfiguredAirbyteStream spiedAbStream = spy( - getConfiguredCatalogWithOneStream(getDefaultNamespace()).getStreams().get(0)); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of(spiedAbStream)); - doCallRealMethod().doThrow(new RuntimeException()).when(spiedAbStream).getStream(); - - assertThrows(RuntimeException.class, () -> source().read(config(), catalog, null)); - } - - @Test - void testIncrementalNoPreviousState() throws Exception { - incrementalCursorCheck( - COL_ID, - null, - "3", - getTestMessages()); - } - - @Test - void testIncrementalIntCheckCursor() throws Exception { - incrementalCursorCheck( - COL_ID, - "2", - "3", - List.of(getTestMessages().get(2))); - } - - @Test - void testIncrementalStringCheckCursor() throws Exception { - incrementalCursorCheck( - COL_NAME, - "patent", - "vash", - List.of(getTestMessages().get(0), getTestMessages().get(2))); - } - - @Test - void testIncrementalStringCheckCursorSpaceInColumnName() throws Exception { - final ConfiguredAirbyteStream streamWithSpaces = createTableWithSpaces(); - - final List expectedRecordMessages = getAirbyteMessagesCheckCursorSpaceInColumnName(streamWithSpaces); - incrementalCursorCheck( - COL_LAST_NAME_WITH_SPACE, - COL_LAST_NAME_WITH_SPACE, - "patent", - "vash", - expectedRecordMessages, - streamWithSpaces); - } - - protected List getAirbyteMessagesCheckCursorSpaceInColumnName(final ConfiguredAirbyteStream streamWithSpaces) { - final AirbyteMessage firstMessage = getTestMessages().get(0); - firstMessage.getRecord().setStream(streamWithSpaces.getStream().getName()); - ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) firstMessage.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, - ((ObjectNode) firstMessage.getRecord().getData()).remove(COL_NAME)); - - final AirbyteMessage secondMessage = getTestMessages().get(2); - secondMessage.getRecord().setStream(streamWithSpaces.getStream().getName()); - ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) secondMessage.getRecord().getData()).set(COL_LAST_NAME_WITH_SPACE, - ((ObjectNode) secondMessage.getRecord().getData()).remove(COL_NAME)); - - return List.of(firstMessage, secondMessage); - } - - @Test - void testIncrementalDateCheckCursor() throws Exception { - incrementalDateCheck(); - } - - protected void incrementalDateCheck() throws Exception { - incrementalCursorCheck( - COL_UPDATED_AT, - "2005-10-18", - "2006-10-19", - List.of(getTestMessages().get(1), getTestMessages().get(2))); - } - - @Test - void testIncrementalCursorChanges() throws Exception { - incrementalCursorCheck( - COL_ID, - COL_NAME, - // cheesing this value a little bit. in the correct implementation this initial cursor value should - // be ignored because the cursor field changed. setting it to a value that if used, will cause - // records to (incorrectly) be filtered out. - "data", - "vash", - getTestMessages()); - } - - @Test - protected void testReadOneTableIncrementallyTwice() throws Exception { - final var config = config(); - final String namespace = getDefaultNamespace(); - final ConfiguredAirbyteCatalog configuredCatalog = getConfiguredCatalogWithOneStream(namespace); - configuredCatalog.getStreams().forEach(airbyteStream -> { - airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(List.of(COL_ID)); - airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - }); - - final List actualMessagesFirstSync = MoreIterators - .toList(source().read(config, configuredCatalog, createEmptyState(streamName(), namespace))); - - final Optional stateAfterFirstSyncOptional = actualMessagesFirstSync.stream() - .filter(r -> r.getType() == Type.STATE).findFirst(); - assertTrue(stateAfterFirstSyncOptional.isPresent()); - - executeStatementReadIncrementallyTwice(); - - final List actualMessagesSecondSync = MoreIterators - .toList(source().read(config, configuredCatalog, extractState(stateAfterFirstSyncOptional.get()))); - - assertEquals(2, - (int) actualMessagesSecondSync.stream().filter(r -> r.getType() == Type.RECORD).count()); - final List expectedMessages = getExpectedAirbyteMessagesSecondSync(namespace); - - setEmittedAtToNull(actualMessagesSecondSync); - - assertEquals(expectedMessages.size(), actualMessagesSecondSync.size()); - assertTrue(expectedMessages.containsAll(actualMessagesSecondSync)); - assertTrue(actualMessagesSecondSync.containsAll(expectedMessages)); - } - - protected void executeStatementReadIncrementallyTwice() { - testdb - .with("INSERT INTO %s (id, name, updated_at) VALUES (4, 'riker', '2006-10-19')", getFullyQualifiedTableName(TABLE_NAME)) - .with("INSERT INTO %s (id, name, updated_at) VALUES (5, 'data', '2006-10-19')", getFullyQualifiedTableName(TABLE_NAME)); - } - - protected List getExpectedAirbyteMessagesSecondSync(final String namespace) { - final List expectedMessages = new ArrayList<>(); - expectedMessages.add(new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName()).withNamespace(namespace) - .withData(Jsons.jsonNode(Map - .of(COL_ID, ID_VALUE_4, - COL_NAME, "riker", - COL_UPDATED_AT, "2006-10-19"))))); - expectedMessages.add(new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName()).withNamespace(namespace) - .withData(Jsons.jsonNode(Map - .of(COL_ID, ID_VALUE_5, - COL_NAME, "data", - COL_UPDATED_AT, "2006-10-19"))))); - final DbStreamState state = new DbStreamState() - .withStreamName(streamName()) - .withStreamNamespace(namespace) - .withCursorField(List.of(COL_ID)) - .withCursor("5") - .withCursorRecordCount(1L); - expectedMessages.addAll(createExpectedTestMessages(List.of(state), 2L)); - return expectedMessages; - } - - @Test - protected void testReadMultipleTablesIncrementally() throws Exception { - final String tableName2 = TABLE_NAME + 2; - final String streamName2 = streamName() + 2; - final String fqTableName2 = getFullyQualifiedTableName(tableName2); - testdb.with(createTableQuery(fqTableName2, "id INTEGER, name VARCHAR(200)", "")) - .with("INSERT INTO %s(id, name) VALUES (1,'picard')", fqTableName2) - .with("INSERT INTO %s(id, name) VALUES (2, 'crusher')", fqTableName2) - .with("INSERT INTO %s(id, name) VALUES (3, 'vash')", fqTableName2); - - final String namespace = getDefaultNamespace(); - final ConfiguredAirbyteCatalog configuredCatalog = getConfiguredCatalogWithOneStream( - namespace); - configuredCatalog.getStreams().add(CatalogHelpers.createConfiguredAirbyteStream( - streamName2, - namespace, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING))); - configuredCatalog.getStreams().forEach(airbyteStream -> { - airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(List.of(COL_ID)); - airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - }); - - final List actualMessagesFirstSync = MoreIterators - .toList(source().read(config(), configuredCatalog, createEmptyState(streamName(), namespace))); - - // get last state message. - final Optional stateAfterFirstSyncOptional = actualMessagesFirstSync.stream() - .filter(r -> r.getType() == Type.STATE) - .reduce((first, second) -> second); - assertTrue(stateAfterFirstSyncOptional.isPresent()); - - // we know the second streams messages are the same as the first minus the updated at column. so we - // cheat and generate the expected messages off of the first expected messages. - final List secondStreamExpectedMessages = getAirbyteMessagesSecondStreamWithNamespace(streamName2); - - // Represents the state after the first stream has been updated - final List expectedStateStreams1 = List.of( - new DbStreamState() - .withStreamName(streamName()) - .withStreamNamespace(namespace) - .withCursorField(List.of(COL_ID)) - .withCursor("3") - .withCursorRecordCount(1L), - new DbStreamState() - .withStreamName(streamName2) - .withStreamNamespace(namespace) - .withCursorField(List.of(COL_ID))); - - // Represents the state after both streams have been updated - final List expectedStateStreams2 = List.of( - new DbStreamState() - .withStreamName(streamName()) - .withStreamNamespace(namespace) - .withCursorField(List.of(COL_ID)) - .withCursor("3") - .withCursorRecordCount(1L), - new DbStreamState() - .withStreamName(streamName2) - .withStreamNamespace(namespace) - .withCursorField(List.of(COL_ID)) - .withCursor("3") - .withCursorRecordCount(1L)); - - final List expectedMessagesFirstSync = new ArrayList<>(getTestMessages()); - expectedMessagesFirstSync.add(createStateMessage(expectedStateStreams1.get(0), expectedStateStreams1, 3L)); - expectedMessagesFirstSync.addAll(secondStreamExpectedMessages); - expectedMessagesFirstSync.add(createStateMessage(expectedStateStreams2.get(1), expectedStateStreams2, 3L)); - - setEmittedAtToNull(actualMessagesFirstSync); - - assertEquals(expectedMessagesFirstSync.size(), actualMessagesFirstSync.size()); - assertTrue(expectedMessagesFirstSync.containsAll(actualMessagesFirstSync)); - assertTrue(actualMessagesFirstSync.containsAll(expectedMessagesFirstSync)); - } - - protected List getAirbyteMessagesSecondStreamWithNamespace(final String streamName2) { - return getTestMessages() - .stream() - .map(Jsons::clone) - .peek(m -> { - m.getRecord().setStream(streamName2); - ((ObjectNode) m.getRecord().getData()).remove(COL_UPDATED_AT); - ((ObjectNode) m.getRecord().getData()).replace(COL_ID, - convertIdBasedOnDatabase(m.getRecord().getData().get(COL_ID).asInt())); - }) - .collect(Collectors.toList()); - } - - // when initial and final cursor fields are the same. - protected void incrementalCursorCheck( - final String cursorField, - final String initialCursorValue, - final String endCursorValue, - final List expectedRecordMessages) - throws Exception { - incrementalCursorCheck(cursorField, cursorField, initialCursorValue, endCursorValue, - expectedRecordMessages); - } - - // See https://github.com/airbytehq/airbyte/issues/14732 for rationale and details. - @Test - public void testIncrementalWithConcurrentInsertion() throws Exception { - final String namespace = getDefaultNamespace(); - final String fullyQualifiedTableName = getFullyQualifiedTableName(TABLE_NAME_AND_TIMESTAMP); - final String columnDefinition = String.format("name VARCHAR(200) NOT NULL, %s %s NOT NULL", COL_TIMESTAMP, COL_TIMESTAMP_TYPE); - - // 1st sync - testdb.with(createTableQuery(fullyQualifiedTableName, columnDefinition, "")) - .with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "a", "2021-01-01 00:00:00") - .with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "b", "2021-01-01 00:00:00"); - - final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog( - new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - TABLE_NAME_AND_TIMESTAMP, - namespace, - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_TIMESTAMP, JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE))))); - - configuredCatalog.getStreams().forEach(airbyteStream -> { - airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(List.of(COL_TIMESTAMP)); - airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - }); - - final List firstSyncActualMessages = MoreIterators.toList( - source().read(config(), configuredCatalog, createEmptyState(TABLE_NAME_AND_TIMESTAMP, namespace))); - - // cursor after 1st sync: 2021-01-01 00:00:00, count 2 - final Optional firstSyncStateOptional = firstSyncActualMessages.stream().filter(r -> r.getType() == Type.STATE).findFirst(); - assertTrue(firstSyncStateOptional.isPresent()); - final JsonNode firstSyncState = getStateData(firstSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP); - assertEquals(firstSyncState.get("cursor_field").elements().next().asText(), COL_TIMESTAMP); - assertTrue(firstSyncState.get("cursor").asText().contains("2021-01-01")); - assertTrue(firstSyncState.get("cursor").asText().contains("00:00:00")); - assertEquals(2L, firstSyncState.get("cursor_record_count").asLong()); - - final List firstSyncNames = firstSyncActualMessages.stream() - .filter(r -> r.getType() == Type.RECORD) - .map(r -> r.getRecord().getData().get(COL_NAME).asText()) - .toList(); - // some databases don't make insertion order guarantee when equal ordering value - if (testdb.getDatabaseDriver().equals(DatabaseDriver.TERADATA) || testdb.getDatabaseDriver().equals(DatabaseDriver.ORACLE)) { - assertThat(List.of("a", "b"), Matchers.containsInAnyOrder(firstSyncNames.toArray())); - } else { - assertEquals(List.of("a", "b"), firstSyncNames); - } - - // 2nd sync - testdb.with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "c", "2021-01-02 00:00:00"); - - final List secondSyncActualMessages = MoreIterators.toList( - source().read(config(), configuredCatalog, createState(TABLE_NAME_AND_TIMESTAMP, namespace, firstSyncState))); - - // cursor after 2nd sync: 2021-01-02 00:00:00, count 1 - final Optional secondSyncStateOptional = secondSyncActualMessages.stream().filter(r -> r.getType() == Type.STATE).findFirst(); - assertTrue(secondSyncStateOptional.isPresent()); - final JsonNode secondSyncState = getStateData(secondSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP); - assertEquals(secondSyncState.get("cursor_field").elements().next().asText(), COL_TIMESTAMP); - assertTrue(secondSyncState.get("cursor").asText().contains("2021-01-02")); - assertTrue(secondSyncState.get("cursor").asText().contains("00:00:00")); - assertEquals(1L, secondSyncState.get("cursor_record_count").asLong()); - - final List secondSyncNames = secondSyncActualMessages.stream() - .filter(r -> r.getType() == Type.RECORD) - .map(r -> r.getRecord().getData().get(COL_NAME).asText()) - .toList(); - assertEquals(List.of("c"), secondSyncNames); - - // 3rd sync has records with duplicated cursors - testdb.with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "d", "2021-01-02 00:00:00") - .with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "e", "2021-01-02 00:00:00") - .with(INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, fullyQualifiedTableName, "f", "2021-01-03 00:00:00"); - - final List thirdSyncActualMessages = MoreIterators.toList( - source().read(config(), configuredCatalog, createState(TABLE_NAME_AND_TIMESTAMP, namespace, secondSyncState))); - - // Cursor after 3rd sync is: 2021-01-03 00:00:00, count 1. - final Optional thirdSyncStateOptional = thirdSyncActualMessages.stream().filter(r -> r.getType() == Type.STATE).findFirst(); - assertTrue(thirdSyncStateOptional.isPresent()); - final JsonNode thirdSyncState = getStateData(thirdSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP); - assertEquals(thirdSyncState.get("cursor_field").elements().next().asText(), COL_TIMESTAMP); - assertTrue(thirdSyncState.get("cursor").asText().contains("2021-01-03")); - assertTrue(thirdSyncState.get("cursor").asText().contains("00:00:00")); - assertEquals(1L, thirdSyncState.get("cursor_record_count").asLong()); - - // The c, d, e, f are duplicated records from this sync, because the cursor - // record count in the database is different from that in the state. - final List thirdSyncExpectedNames = thirdSyncActualMessages.stream() - .filter(r -> r.getType() == Type.RECORD) - .map(r -> r.getRecord().getData().get(COL_NAME).asText()) - .toList(); - - // teradata doesn't make insertion order guarantee when equal ordering value - if (testdb.getDatabaseDriver().equals(DatabaseDriver.TERADATA)) { - assertThat(List.of("c", "d", "e", "f"), Matchers.containsInAnyOrder(thirdSyncExpectedNames.toArray())); - } else { - assertEquals(List.of("c", "d", "e", "f"), thirdSyncExpectedNames); - } - } - - protected JsonNode getStateData(final AirbyteMessage airbyteMessage, final String streamName) { - for (final JsonNode stream : airbyteMessage.getState().getData().get("streams")) { - if (stream.get("stream_name").asText().equals(streamName)) { - return stream; - } - } - throw new IllegalArgumentException("Stream not found in state message: " + streamName); - } - - private void incrementalCursorCheck( - final String initialCursorField, - final String cursorField, - final String initialCursorValue, - final String endCursorValue, - final List expectedRecordMessages) - throws Exception { - incrementalCursorCheck(initialCursorField, cursorField, initialCursorValue, endCursorValue, - expectedRecordMessages, - getConfiguredCatalogWithOneStream(getDefaultNamespace()).getStreams().get(0)); - } - - protected void incrementalCursorCheck( - final String initialCursorField, - final String cursorField, - final String initialCursorValue, - final String endCursorValue, - final List expectedRecordMessages, - final ConfiguredAirbyteStream airbyteStream) - throws Exception { - airbyteStream.setSyncMode(SyncMode.INCREMENTAL); - airbyteStream.setCursorField(List.of(cursorField)); - airbyteStream.setDestinationSyncMode(DestinationSyncMode.APPEND); - - final ConfiguredAirbyteCatalog configuredCatalog = new ConfiguredAirbyteCatalog() - .withStreams(List.of(airbyteStream)); - - final DbStreamState dbStreamState = buildStreamState(airbyteStream, initialCursorField, initialCursorValue); - - final List actualMessages = MoreIterators - .toList(source().read(config(), configuredCatalog, Jsons.jsonNode(createState(List.of(dbStreamState))))); - - setEmittedAtToNull(actualMessages); - - final List expectedStreams = List.of(buildStreamState(airbyteStream, cursorField, endCursorValue)); - - final List expectedMessages = new ArrayList<>(expectedRecordMessages); - expectedMessages.addAll(createExpectedTestMessages(expectedStreams, expectedRecordMessages.size())); - - assertEquals(expectedMessages.size(), actualMessages.size()); - assertTrue(expectedMessages.containsAll(actualMessages)); - assertTrue(actualMessages.containsAll(expectedMessages)); - } - - protected DbStreamState buildStreamState(final ConfiguredAirbyteStream configuredAirbyteStream, - final String cursorField, - final String cursorValue) { - return new DbStreamState() - .withStreamName(configuredAirbyteStream.getStream().getName()) - .withStreamNamespace(configuredAirbyteStream.getStream().getNamespace()) - .withCursorField(List.of(cursorField)) - .withCursor(cursorValue) - .withCursorRecordCount(1L); - } - - // get catalog and perform a defensive copy. - protected ConfiguredAirbyteCatalog getConfiguredCatalogWithOneStream(final String defaultNamespace) { - final ConfiguredAirbyteCatalog catalog = CatalogHelpers.toDefaultConfiguredCatalog(getCatalog(defaultNamespace)); - // Filter to only keep the main stream name as configured stream - catalog.withStreams( - catalog.getStreams().stream().filter(s -> s.getStream().getName().equals(streamName())) - .collect(Collectors.toList())); - return catalog; - } - - protected AirbyteCatalog getCatalog(final String defaultNamespace) { - return new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - TABLE_NAME, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.INTEGER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))), - CatalogHelpers.createAirbyteStream( - TABLE_NAME_WITHOUT_PK, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.INTEGER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(Collections.emptyList()), - CatalogHelpers.createAirbyteStream( - TABLE_NAME_COMPOSITE_PK, - defaultNamespace, - Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), - Field.of(COL_LAST_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) - .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey( - List.of(List.of(COL_FIRST_NAME), List.of(COL_LAST_NAME))))); - } - - protected List getTestMessages() { - return List.of( - new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName()).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(Map - .of(COL_ID, ID_VALUE_1, - COL_NAME, "picard", - COL_UPDATED_AT, "2004-10-19")))), - new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName()).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(Map - .of(COL_ID, ID_VALUE_2, - COL_NAME, "crusher", - COL_UPDATED_AT, - "2005-10-19")))), - new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName()).withNamespace(getDefaultNamespace()) - .withData(Jsons.jsonNode(Map - .of(COL_ID, ID_VALUE_3, - COL_NAME, "vash", - COL_UPDATED_AT, "2006-10-19"))))); - } - - protected List createExpectedTestMessages(final List states, final long numRecords) { - return states.stream() - .map(s -> new AirbyteMessage().withType(Type.STATE) - .withState( - new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) - .withStreamState(Jsons.jsonNode(s))) - .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(states))) - .withSourceStats(new AirbyteStateStats().withRecordCount((double) numRecords)))) - .collect( - Collectors.toList()); - } - - protected List createState(final List states) { - return states.stream() - .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace(s.getStreamNamespace()).withName(s.getStreamName())) - .withStreamState(Jsons.jsonNode(s)))) - .collect( - Collectors.toList()); - } - - protected ConfiguredAirbyteStream createTableWithSpaces() throws SQLException { - final String tableNameWithSpaces = TABLE_NAME_WITH_SPACES + "2"; - final String streamName2 = tableNameWithSpaces; - - try (final var connection = testdb.getDataSource().getConnection()) { - final String identifierQuoteString = connection.getMetaData().getIdentifierQuoteString(); - connection.createStatement() - .execute( - createTableQuery(getFullyQualifiedTableName( - enquoteIdentifier(tableNameWithSpaces, identifierQuoteString)), - "id INTEGER, " + enquoteIdentifier(COL_LAST_NAME_WITH_SPACE, identifierQuoteString) - + " VARCHAR(200)", - "")); - connection.createStatement() - .execute(String.format("INSERT INTO %s(id, %s) VALUES (1,'picard')", - getFullyQualifiedTableName( - enquoteIdentifier(tableNameWithSpaces, identifierQuoteString)), - enquoteIdentifier(COL_LAST_NAME_WITH_SPACE, identifierQuoteString))); - connection.createStatement() - .execute(String.format("INSERT INTO %s(id, %s) VALUES (2, 'crusher')", - getFullyQualifiedTableName( - enquoteIdentifier(tableNameWithSpaces, identifierQuoteString)), - enquoteIdentifier(COL_LAST_NAME_WITH_SPACE, identifierQuoteString))); - connection.createStatement() - .execute(String.format("INSERT INTO %s(id, %s) VALUES (3, 'vash')", - getFullyQualifiedTableName( - enquoteIdentifier(tableNameWithSpaces, identifierQuoteString)), - enquoteIdentifier(COL_LAST_NAME_WITH_SPACE, identifierQuoteString))); - } - - return CatalogHelpers.createConfiguredAirbyteStream( - streamName2, - getDefaultNamespace(), - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_LAST_NAME_WITH_SPACE, JsonSchemaType.STRING)); - } - - public String getFullyQualifiedTableName(final String tableName) { - return RelationalDbQueryUtils.getFullyQualifiedTableName(getDefaultSchemaName(), tableName); - } - - protected void createSchemas() { - if (supportsSchemas()) { - for (final String schemaName : TEST_SCHEMAS) { - testdb.with("CREATE SCHEMA %s;", schemaName); - } - } - } - - private JsonNode convertIdBasedOnDatabase(final int idValue) { - return switch (testdb.getDatabaseDriver()) { - case ORACLE, SNOWFLAKE -> Jsons.jsonNode(BigDecimal.valueOf(idValue)); - default -> Jsons.jsonNode(idValue); - }; - } - - private String getDefaultSchemaName() { - return supportsSchemas() ? SCHEMA_NAME : null; - } - - protected String getDefaultNamespace() { - return switch (testdb.getDatabaseDriver()) { - // mysql does not support schemas, it namespaces using database names instead. - case MYSQL, CLICKHOUSE, TERADATA -> testdb.getDatabaseName(); - default -> SCHEMA_NAME; - }; - } - - protected static void setEmittedAtToNull(final Iterable messages) { - for (final AirbyteMessage actualMessage : messages) { - if (actualMessage.getRecord() != null) { - actualMessage.getRecord().setEmittedAt(null); - } - } - } - - /** - * Creates empty state with the provided stream name and namespace. - * - * @param streamName The stream name. - * @param streamNamespace The stream namespace. - * @return {@link JsonNode} representation of the generated empty state. - */ - protected JsonNode createEmptyState(final String streamName, final String streamNamespace) { - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(streamName).withNamespace(streamNamespace))); - return Jsons.jsonNode(List.of(airbyteStateMessage)); - - } - - protected JsonNode createState(final String streamName, final String streamNamespace, final JsonNode stateData) { - final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream( - new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withName(streamName).withNamespace(streamNamespace)) - .withStreamState(stateData)); - return Jsons.jsonNode(List.of(airbyteStateMessage)); - } - - protected JsonNode extractState(final AirbyteMessage airbyteMessage) { - return Jsons.jsonNode(List.of(airbyteMessage.getState())); - } - - protected AirbyteMessage createStateMessage(final DbStreamState dbStreamState, final List legacyStates, final long recordCount) { - return new AirbyteMessage().withType(Type.STATE) - .withState( - new AirbyteStateMessage().withType(AirbyteStateType.STREAM) - .withStream(new AirbyteStreamState() - .withStreamDescriptor(new StreamDescriptor().withNamespace(dbStreamState.getStreamNamespace()) - .withName(dbStreamState.getStreamName())) - .withStreamState(Jsons.jsonNode(dbStreamState))) - .withData(Jsons.jsonNode(new DbState().withCdc(false).withStreams(legacyStates))) - .withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount))); - } - - protected List extractSpecificFieldFromCombinedMessages(final List messages, - final String streamName, - final String field) { - return extractStateMessage(messages).stream() - .filter(s -> s.getStream().getStreamDescriptor().getName().equals(streamName)) - .map(s -> s.getStream().getStreamState().get(field) != null ? s.getStream().getStreamState().get(field).asText() : "").toList(); - } - - protected List filterRecords(final List messages) { - return messages.stream().filter(r -> r.getType() == Type.RECORD) - .collect(Collectors.toList()); - } - - protected List extractStateMessage(final List messages) { - return messages.stream().filter(r -> r.getType() == Type.STATE).map(AirbyteMessage::getState) - .collect(Collectors.toList()); - } - - protected List extractStateMessage(final List messages, final String streamName) { - return messages.stream().filter(r -> r.getType() == Type.STATE && - r.getState().getStream().getStreamDescriptor().getName().equals(streamName)).map(AirbyteMessage::getState) - .collect(Collectors.toList()); - } - - protected AirbyteMessage createRecord(final String stream, final String namespace, final Map data) { - return new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withData(Jsons.jsonNode(data)).withStream(stream).withNamespace(namespace)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.java deleted file mode 100644 index 9c626a9ac911b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.jdbc.test; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.source.jdbc.AbstractJdbcSource; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.stream.MoreStreams; -import io.airbyte.commons.string.Strings; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.math.BigDecimal; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.BitSet; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Optional; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Runs a "large" amount of data through a JdbcSource to ensure that it streams / chunks records. - */ -// todo (cgardens) - this needs more love and thought. we should be able to test this without having -// to rewrite so much data. it is enough for now to sanity check that our JdbcSources can actually -// handle more data than fits in memory. -@SuppressFBWarnings( - value = {"MS_SHOULD_BE_FINAL"}, - justification = "The static variables are updated in sub classes for convenience, and cannot be final.") -public abstract class JdbcStressTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStressTest.class); - - // this will get rounded down to the nearest 1000th. - private static final long TOTAL_RECORDS = 10_000_000L; - private static final int BATCH_SIZE = 1000; - public static String TABLE_NAME = "id_and_name"; - public static String COL_ID = "id"; - public static String COL_NAME = "name"; - public static String COL_ID_TYPE = "BIGINT"; - public static String INSERT_STATEMENT = "(%s,'picard-%s')"; - - private static String streamName; - - private BitSet bitSet; - private JsonNode config; - private AbstractJdbcSource source; - - /** - * These tests write records without specifying a namespace (schema name). They will be written into - * whatever the default schema is for the database. When they are discovered they will be namespaced - * by the schema name (e.g. .). Thus the source needs to tell the - * tests what that default schema name is. If the database does not support schemas, then database - * name should used instead. - * - * @return name that will be used to namespace the record. - */ - public abstract Optional getDefaultSchemaName(); - - /** - * A valid configuration to connect to a test database. - * - * @return config - */ - public abstract JsonNode getConfig(); - - /** - * Full qualified class name of the JDBC driver for the database. - * - * @return driver - */ - public abstract String getDriverClass(); - - /** - * An instance of the source that should be tests. - * - * @return source - */ - public abstract AbstractJdbcSource getSource(); - - protected String createTableQuery(final String tableName, final String columnClause) { - return String.format("CREATE TABLE %s(%s)", - tableName, columnClause); - } - - public void setup() throws Exception { - LOGGER.info("running for driver:" + getDriverClass()); - bitSet = new BitSet((int) TOTAL_RECORDS); - - source = getSource(); - streamName = getDefaultSchemaName().map(val -> val + "." + TABLE_NAME).orElse(TABLE_NAME); - config = getConfig(); - - final JsonNode jdbcConfig = source.toDatabaseConfig(config); - final JdbcDatabase database = new DefaultJdbcDatabase( - DataSourceFactory.create( - jdbcConfig.get(JdbcUtils.USERNAME_KEY).asText(), - jdbcConfig.has(JdbcUtils.PASSWORD_KEY) ? jdbcConfig.get(JdbcUtils.PASSWORD_KEY).asText() : null, - getDriverClass(), - jdbcConfig.get(JdbcUtils.JDBC_URL_KEY).asText())); - - database.execute(connection -> connection.createStatement().execute( - createTableQuery("id_and_name", String.format("id %s, name VARCHAR(200)", COL_ID_TYPE)))); - final long batchCount = TOTAL_RECORDS / BATCH_SIZE; - LOGGER.info("writing {} batches of {}", batchCount, BATCH_SIZE); - for (int i = 0; i < batchCount; i++) { - if (i % 1000 == 0) - LOGGER.info("writing batch: " + i); - final List insert = new ArrayList<>(); - for (int j = 0; j < BATCH_SIZE; j++) { - final int recordNumber = (i * BATCH_SIZE) + j; - insert.add(String.format(INSERT_STATEMENT, recordNumber, recordNumber)); - } - - final String sql = prepareInsertStatement(insert); - database.execute(connection -> connection.createStatement().execute(sql)); - } - - } - - // todo (cgardens) - restructure these tests so that testFullRefresh() and testIncremental() can be - // separate tests. current constrained by only wanting to setup the fixture in the database once, - // but it is not trivial to move them to @BeforeAll because it is static and we are doing - // inheritance. Not impossible, just needs to be done thoughtfully and for all JdbcSources. - @Test - public void stressTest() throws Exception { - testFullRefresh(); - testIncremental(); - } - - private void testFullRefresh() throws Exception { - runTest(getConfiguredCatalogFullRefresh(), "full_refresh"); - } - - private void testIncremental() throws Exception { - runTest(getConfiguredCatalogIncremental(), "incremental"); - } - - private void runTest(final ConfiguredAirbyteCatalog configuredCatalog, final String testName) throws Exception { - LOGGER.info("running stress test for: " + testName); - final Iterator read = source.read(config, configuredCatalog, Jsons.jsonNode(Collections.emptyMap())); - final long actualCount = MoreStreams.toStream(read) - .filter(m -> m.getType() == Type.RECORD) - .peek(m -> { - if (m.getRecord().getData().get(COL_ID).asLong() % 100000 == 0) { - LOGGER.info("reading batch: " + m.getRecord().getData().get(COL_ID).asLong() / 1000); - } - }) - .peek(m -> assertExpectedMessage(m)) - .count(); - ByteBuffer a; - final long expectedRoundedRecordsCount = TOTAL_RECORDS - TOTAL_RECORDS % 1000; - LOGGER.info("expected records count: " + TOTAL_RECORDS); - LOGGER.info("actual records count: " + actualCount); - assertEquals(expectedRoundedRecordsCount, actualCount, "testing: " + testName); - assertEquals(expectedRoundedRecordsCount, bitSet.cardinality(), "testing: " + testName); - } - - // each is roughly 106 bytes. - private void assertExpectedMessage(final AirbyteMessage actualMessage) { - final long recordNumber = actualMessage.getRecord().getData().get(COL_ID).asLong(); - bitSet.set((int) recordNumber); - actualMessage.getRecord().setEmittedAt(null); - - final Number expectedRecordNumber = - getDriverClass().toLowerCase().contains("oracle") ? new BigDecimal(recordNumber) - : recordNumber; - - final AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName) - .withData(Jsons.jsonNode( - ImmutableMap.of(COL_ID, expectedRecordNumber, COL_NAME, "picard-" + recordNumber)))); - assertEquals(expectedMessage, actualMessage); - } - - private static ConfiguredAirbyteCatalog getConfiguredCatalogFullRefresh() { - return CatalogHelpers.toDefaultConfiguredCatalog(getCatalog()); - } - - private static ConfiguredAirbyteCatalog getConfiguredCatalogIncremental() { - return new ConfiguredAirbyteCatalog() - .withStreams(Collections.singletonList(new ConfiguredAirbyteStream().withStream(getCatalog().getStreams().get(0)) - .withCursorField(Collections.singletonList(COL_ID)) - .withSyncMode(SyncMode.INCREMENTAL) - .withDestinationSyncMode(DestinationSyncMode.APPEND))); - } - - private static AirbyteCatalog getCatalog() { - return new AirbyteCatalog().withStreams(Lists.newArrayList(CatalogHelpers.createAirbyteStream( - streamName, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)))); - } - - private String prepareInsertStatement(final List inserts) { - if (getDriverClass().toLowerCase().contains("oracle")) { - return String.format("INSERT ALL %s SELECT * FROM dual", Strings.join(inserts, " ")); - } - return String.format("INSERT INTO id_and_name (id, name) VALUES %s", Strings.join(inserts, ", ")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.java deleted file mode 100644 index 2393c4dcc5950..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.api.client.model.generated.DiscoverCatalogResult; -import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.JobGetSpecConfig; -import io.airbyte.configoss.StandardCheckConnectionInput; -import io.airbyte.configoss.StandardCheckConnectionOutput; -import io.airbyte.configoss.StandardDiscoverCatalogInput; -import io.airbyte.configoss.State; -import io.airbyte.configoss.WorkerSourceConfig; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.general.DefaultCheckConnectionTestHarness; -import io.airbyte.workers.general.DefaultDiscoverCatalogTestHarness; -import io.airbyte.workers.general.DefaultGetSpecTestHarness; -import io.airbyte.workers.helper.CatalogClientConverters; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.helper.EntrypointEnvChecker; -import io.airbyte.workers.internal.AirbyteSource; -import io.airbyte.workers.internal.DefaultAirbyteSource; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.DockerProcessFactory; -import io.airbyte.workers.process.ProcessFactory; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.mockito.ArgumentCaptor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This abstract class contains helpful functionality and boilerplate for testing a source - * connector. - */ -public abstract class AbstractSourceConnectorTest { - - protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractSourceConnectorTest.class); - private TestDestinationEnv environment; - private Path jobRoot; - protected Path localRoot; - private ProcessFactory processFactory; - - private static final String JOB_ID = String.valueOf(0L); - private static final int JOB_ATTEMPT = 0; - - private static final UUID CATALOG_ID = UUID.randomUUID(); - - private static final UUID SOURCE_ID = UUID.randomUUID(); - - private static final String CPU_REQUEST_FIELD_NAME = "cpuRequest"; - private static final String CPU_LIMIT_FIELD_NAME = "cpuLimit"; - private static final String MEMORY_REQUEST_FIELD_NAME = "memoryRequest"; - private static final String MEMORY_LIMIT_FIELD_NAME = "memoryLimit"; - - /** - * Name of the docker image that the tests will run against. - * - * @return docker image name - */ - protected abstract String getImageName(); - - /** - * Configuration specific to the integration. Will be passed to integration where appropriate in - * each test. Should be valid. - * - * @return integration-specific configuration - */ - protected abstract JsonNode getConfig() throws Exception; - - /** - * Function that performs any setup of external resources required for the test. e.g. instantiate a - * postgres database. This function will be called before EACH test. - * - * @param environment - information about the test environment. - * @throws Exception - can throw any exception, test framework will handle. - */ - protected abstract void setupEnvironment(TestDestinationEnv environment) throws Exception; - - /** - * Function that performs any clean up of external resources required for the test. e.g. delete a - * postgres database. This function will be called after EACH test. It MUST remove all data in the - * destination so that there is no contamination across tests. - * - * @param testEnv - information about the test environment. - * @throws Exception - can throw any exception, test framework will handle. - */ - protected abstract void tearDown(TestDestinationEnv testEnv) throws Exception; - - private AirbyteApiClient mAirbyteApiClient; - - private SourceApi mSourceApi; - - private ConnectorConfigUpdater mConnectorConfigUpdater; - - protected AirbyteCatalog getLastPersistedCatalog() { - return convertProtocolObject( - CatalogClientConverters.toAirbyteProtocol(discoverWriteRequest.getValue().getCatalog()), AirbyteCatalog.class); - } - - private final ArgumentCaptor discoverWriteRequest = - ArgumentCaptor.forClass(SourceDiscoverSchemaWriteRequestBody.class); - - @BeforeEach - public void setUpInternal() throws Exception { - final Path testDir = Path.of("/tmp/airbyte_tests/"); - Files.createDirectories(testDir); - final Path workspaceRoot = Files.createTempDirectory(testDir, "test"); - jobRoot = Files.createDirectories(Path.of(workspaceRoot.toString(), "job")); - localRoot = Files.createTempDirectory(testDir, "output"); - environment = new TestDestinationEnv(localRoot); - setupEnvironment(environment); - mAirbyteApiClient = mock(AirbyteApiClient.class); - mSourceApi = mock(SourceApi.class); - when(mAirbyteApiClient.getSourceApi()).thenReturn(mSourceApi); - when(mSourceApi.writeDiscoverCatalogResult(any())) - .thenReturn(new DiscoverCatalogResult().catalogId(CATALOG_ID)); - mConnectorConfigUpdater = mock(ConnectorConfigUpdater.class); - var envMap = new HashMap<>(new TestEnvConfigs().getJobDefaultEnvMap()); - envMap.put(EnvVariableFeatureFlags.DEPLOYMENT_MODE, featureFlags().deploymentMode()); - processFactory = new DockerProcessFactory( - workspaceRoot, - workspaceRoot.toString(), - localRoot.toString(), - "host", - envMap); - - postSetup(); - } - - /** - * Override this method if you want to do any per-test setup that depends on being able to e.g. - * {@link #runRead(ConfiguredAirbyteCatalog)}. - */ - protected void postSetup() throws Exception {} - - @AfterEach - public void tearDownInternal() throws Exception { - tearDown(environment); - } - - protected FeatureFlags featureFlags() { - return new EnvVariableFeatureFlags(); - } - - protected ConnectorSpecification runSpec() throws TestHarnessException { - final io.airbyte.protocol.models.ConnectorSpecification spec = new DefaultGetSpecTestHarness( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags())) - .run(new JobGetSpecConfig().withDockerImage(getImageName()), jobRoot).getSpec(); - return convertProtocolObject(spec, ConnectorSpecification.class); - } - - protected StandardCheckConnectionOutput runCheck() throws Exception { - return new DefaultCheckConnectionTestHarness( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags()), - mConnectorConfigUpdater) - .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot).getCheckConnection(); - } - - protected String runCheckAndGetStatusAsString(final JsonNode config) throws Exception { - return new DefaultCheckConnectionTestHarness( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags()), - mConnectorConfigUpdater) - .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getCheckConnection().getStatus().toString(); - } - - protected UUID runDiscover() throws Exception { - final UUID toReturn = new DefaultDiscoverCatalogTestHarness( - mAirbyteApiClient, - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags()), - mConnectorConfigUpdater) - .run(new StandardDiscoverCatalogInput().withSourceId(SOURCE_ID.toString()).withConnectionConfiguration(getConfig()), jobRoot) - .getDiscoverCatalogId(); - verify(mSourceApi).writeDiscoverCatalogResult(discoverWriteRequest.capture()); - return toReturn; - } - - protected void checkEntrypointEnvVariable() throws Exception { - final String entrypoint = EntrypointEnvChecker.getEntrypointEnvVariable( - processFactory, - JOB_ID, - JOB_ATTEMPT, - jobRoot, - getImageName()); - - assertNotNull(entrypoint); - assertFalse(entrypoint.isBlank()); - } - - protected List runRead(final ConfiguredAirbyteCatalog configuredCatalog) throws Exception { - return runRead(configuredCatalog, null); - } - - // todo (cgardens) - assume no state since we are all full refresh right now. - protected List runRead(final ConfiguredAirbyteCatalog catalog, final JsonNode state) throws Exception { - final WorkerSourceConfig sourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(getConfig()) - .withState(state == null ? null : new State().withState(state)) - .withCatalog(convertProtocolObject(catalog, io.airbyte.protocol.models.ConfiguredAirbyteCatalog.class)); - - final AirbyteSource source = new DefaultAirbyteSource( - new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags()), - featureFlags()); - final List messages = new ArrayList<>(); - source.start(sourceConfig, jobRoot); - while (!source.isFinished()) { - source.attemptRead().ifPresent(m -> messages.add(convertProtocolObject(m, AirbyteMessage.class))); - } - source.close(); - - return messages; - } - - protected Map runReadVerifyNumberOfReceivedMsgs(final ConfiguredAirbyteCatalog catalog, - final JsonNode state, - final Map mapOfExpectedRecordsCount) - throws Exception { - - final WorkerSourceConfig sourceConfig = new WorkerSourceConfig() - .withSourceConnectionConfiguration(getConfig()) - .withState(state == null ? null : new State().withState(state)) - .withCatalog(convertProtocolObject(catalog, io.airbyte.protocol.models.ConfiguredAirbyteCatalog.class)); - - final AirbyteSource source = prepareAirbyteSource(); - source.start(sourceConfig, jobRoot); - - while (!source.isFinished()) { - final Optional airbyteMessageOptional = source.attemptRead().map(m -> convertProtocolObject(m, AirbyteMessage.class)); - if (airbyteMessageOptional.isPresent() && airbyteMessageOptional.get().getType().equals(Type.RECORD)) { - final AirbyteMessage airbyteMessage = airbyteMessageOptional.get(); - final AirbyteRecordMessage record = airbyteMessage.getRecord(); - - final String streamName = record.getStream(); - mapOfExpectedRecordsCount.put(streamName, mapOfExpectedRecordsCount.get(streamName) - 1); - } - } - source.close(); - return mapOfExpectedRecordsCount; - } - - private AirbyteSource prepareAirbyteSource() { - final var integrationLauncher = new AirbyteIntegrationLauncher( - JOB_ID, - JOB_ATTEMPT, - getImageName(), - processFactory, - null, - null, - false, - featureFlags()); - return new DefaultAirbyteSource(integrationLauncher, featureFlags()); - } - - private static V0 convertProtocolObject(final V1 v1, final Class klass) { - return Jsons.object(Jsons.jsonNode(v1), klass); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java deleted file mode 100644 index 9dcb95773cdb5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This abstract class contains common helpers and boilerplate for comprehensively testing that all - * data types in a source can be read and handled correctly by the connector and within Airbyte's - * type system. - */ -public abstract class AbstractSourceDatabaseTypeTest extends AbstractSourceConnectorTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSourceDatabaseTypeTest.class); - - protected final List testDataHolders = new ArrayList<>(); - protected Database database; - - /** - * The column name will be used for a PK column in the test tables. Override it if default name is - * not valid for your source. - * - * @return Id column name - */ - protected String getIdColumnName() { - return "id"; - } - - /** - * The column name will be used for a test column in the test tables. Override it if default name is - * not valid for your source. - * - * @return Test column name - */ - protected String getTestColumnName() { - return "test_column"; - } - - /** - * Setup the test database. All tables and data described in the registered tests will be put there. - * - * @return configured test database - * @throws Exception - might throw any exception during initialization. - */ - protected abstract Database setupDatabase() throws Exception; - - /** - * Put all required tests here using method {@link #addDataTypeTestData(TestDataHolder)} - */ - protected abstract void initTests(); - - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - database = setupDatabase(); - initTests(); - createTables(); - populateTables(); - } - - /** - * Provide a source namespace. It's allocated place for table creation. It also known ask "Database - * Schema" or "Dataset" - * - * @return source name space - */ - protected abstract String getNameSpace(); - - /** - * Test the 'discover' command. TODO (liren): Some existing databases may fail testDataTypes(), so - * it is turned off by default. It should be enabled for all databases eventually. - */ - protected boolean testCatalog() { - return false; - } - - /** - * The test checks that the types from the catalog matches the ones discovered from the source. This - * test is disabled by default. To enable it you need to overwrite testCatalog() function. - */ - @Test - @SuppressWarnings("unchecked") - public void testDataTypes() throws Exception { - if (testCatalog()) { - runDiscover(); - final Map streams = getLastPersistedCatalog().getStreams().stream() - .collect(Collectors.toMap(AirbyteStream::getName, s -> s)); - - // testDataHolders should be initialized using the `addDataTypeTestData` function - testDataHolders.forEach(testDataHolder -> { - final AirbyteStream airbyteStream = streams.get(testDataHolder.getNameWithTestPrefix()); - final Map jsonSchemaTypeMap = (Map) Jsons.deserialize( - airbyteStream.getJsonSchema().get("properties").get(getTestColumnName()).toString(), Map.class); - assertEquals(testDataHolder.getAirbyteType().getJsonSchemaTypeMap(), jsonSchemaTypeMap, - "Expected column type for " + testDataHolder.getNameWithTestPrefix()); - }); - } - } - - /** - * The test checks that connector can fetch prepared data without failure. It uses a prepared - * catalog and read the source using that catalog. Then makes sure that the expected values are the - * ones inserted in the source. - */ - @Test - public void testDataContent() throws Exception { - // Class used to make easier the error reporting - class MissedRecords { - - // Stream that is missing any value - public String streamName; - // Which are the values that has not being gathered from the source - public List missedValues; - - public MissedRecords(String streamName, List missedValues) { - this.streamName = streamName; - this.missedValues = missedValues; - } - - } - - class UnexpectedRecord { - - public final String streamName; - public final String unexpectedValue; - - public UnexpectedRecord(String streamName, String unexpectedValue) { - this.streamName = streamName; - this.unexpectedValue = unexpectedValue; - } - - } - - final ConfiguredAirbyteCatalog catalog = getConfiguredCatalog(); - final List allMessages = runRead(catalog); - - final List recordMessages = allMessages.stream().filter(m -> m.getType() == Type.RECORD).toList(); - final Map> expectedValues = new HashMap<>(); - final Map> missedValuesByStream = new HashMap<>(); - final Map> unexpectedValuesByStream = new HashMap<>(); - final Map testByName = new HashMap<>(); - - // If there is no expected value in the test set we don't include it in the list to be asserted - // (even if the table contains records) - testDataHolders.forEach(testDataHolder -> { - if (!testDataHolder.getExpectedValues().isEmpty()) { - expectedValues.put(testDataHolder.getNameWithTestPrefix(), testDataHolder.getExpectedValues()); - testByName.put(testDataHolder.getNameWithTestPrefix(), testDataHolder); - } else { - LOGGER.warn("Missing expected values for type: " + testDataHolder.getSourceType()); - } - }); - - for (final AirbyteMessage message : recordMessages) { - final String streamName = message.getRecord().getStream(); - final List expectedValuesForStream = expectedValues.get(streamName); - if (expectedValuesForStream != null) { - final String value = getValueFromJsonNode(message.getRecord().getData().get(getTestColumnName())); - if (!expectedValuesForStream.contains(value)) { - unexpectedValuesByStream.putIfAbsent(streamName, new ArrayList<>()); - unexpectedValuesByStream.get(streamName).add(new UnexpectedRecord(streamName, value)); - } else { - expectedValuesForStream.remove(value); - } - } - } - - // Gather all the missing values, so we don't stop the test in the first missed one - expectedValues.forEach((streamName, values) -> { - if (!values.isEmpty()) { - missedValuesByStream.putIfAbsent(streamName, new ArrayList<>()); - missedValuesByStream.get(streamName).add(new MissedRecords(streamName, values)); - } - }); - - Map> errorsByStream = new HashMap<>(); - for (String streamName : unexpectedValuesByStream.keySet()) { - errorsByStream.putIfAbsent(streamName, new ArrayList<>()); - TestDataHolder test = testByName.get(streamName); - List unexpectedValues = unexpectedValuesByStream.get(streamName); - for (UnexpectedRecord unexpectedValue : unexpectedValues) { - errorsByStream.get(streamName).add( - "The stream '%s' checking type '%s' initialized at %s got unexpected values: %s".formatted(streamName, test.getSourceType(), - test.getDeclarationLocation(), unexpectedValue)); - } - } - - for (String streamName : missedValuesByStream.keySet()) { - errorsByStream.putIfAbsent(streamName, new ArrayList<>()); - TestDataHolder test = testByName.get(streamName); - List missedValues = missedValuesByStream.get(streamName); - for (MissedRecords missedValue : missedValues) { - errorsByStream.get(streamName).add( - "The stream '%s' checking type '%s' initialized at %s is missing values: %s".formatted(streamName, test.getSourceType(), - test.getDeclarationLocation(), missedValue)); - } - } - - List errorStrings = new ArrayList<>(); - for (List errors : errorsByStream.values()) { - errorStrings.add(StringUtils.join(errors, "\n")); - } - - assertTrue(errorsByStream.isEmpty(), StringUtils.join(errorStrings, "\n")); - } - - protected String getValueFromJsonNode(final JsonNode jsonNode) throws IOException { - if (jsonNode != null) { - if (jsonNode.isArray()) { - return jsonNode.toString(); - } - - String value = (jsonNode.isBinary() ? Arrays.toString(jsonNode.binaryValue()) : jsonNode.asText()); - value = (value != null && value.equals("null") ? null : value); - return value; - } - return null; - } - - /** - * Creates all tables and insert data described in the registered data type tests. - * - * @throws Exception might raise exception if configuration goes wrong or tables creation/insert - * scripts failed. - */ - - protected void createTables() throws Exception { - for (final TestDataHolder test : testDataHolders) { - database.query(ctx -> { - ctx.fetch(test.getCreateSqlQuery()); - LOGGER.info("Table {} is created.", test.getNameWithTestPrefix()); - return null; - }); - } - } - - protected void populateTables() throws Exception { - for (final TestDataHolder test : testDataHolders) { - database.query(ctx -> { - test.getInsertSqlQueries().forEach(ctx::fetch); - LOGGER.info("Inserted {} rows in Ttable {}", test.getInsertSqlQueries().size(), test.getNameWithTestPrefix()); - - return null; - }); - } - } - - /** - * Configures streams for all registered data type tests. - * - * @return configured catalog - */ - protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - return new ConfiguredAirbyteCatalog().withStreams( - testDataHolders - .stream() - .map(test -> new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(Lists.newArrayList(getIdColumnName())) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", test.getNameWithTestPrefix()), - String.format("%s", getNameSpace()), - Field.of(getIdColumnName(), JsonSchemaType.INTEGER), - Field.of(getTestColumnName(), test.getAirbyteType())) - .withSourceDefinedCursor(true) - .withSourceDefinedPrimaryKey(List.of(List.of(getIdColumnName()))) - .withSupportedSyncModes( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)))) - .collect(Collectors.toList())); - } - - /** - * Register your test in the run scope. For each test will be created a table with one column of - * specified type. Note! If you register more than one test with the same type name, they will be - * run as independent tests with own streams. - * - * @param test comprehensive data type test - */ - public void addDataTypeTestData(final TestDataHolder test) { - testDataHolders.add(test); - test.setTestNumber(testDataHolders.stream().filter(t -> t.getSourceType().equals(test.getSourceType())).count()); - test.setNameSpace(getNameSpace()); - test.setIdColumnName(getIdColumnName()); - test.setTestColumnName(getTestColumnName()); - test.setDeclarationLocation(Thread.currentThread().getStackTrace()); - } - - private String formatCollection(final Collection collection) { - return collection.stream().map(s -> "`" + s + "`").collect(Collectors.joining(", ")); - } - - /** - * Builds a table with all registered test cases with values using Markdown syntax (can be used in - * the github). - * - * @return formatted list of test cases - */ - public String getMarkdownTestTable() { - final StringBuilder table = new StringBuilder() - .append("|**Data Type**|**Insert values**|**Expected values**|**Comment**|**Common test result**|\n") - .append("|----|----|----|----|----|\n"); - - testDataHolders.forEach(test -> table.append(String.format("| %s | %s | %s | %s | %s |\n", - test.getSourceType(), - formatCollection(test.getValues()), - formatCollection(test.getExpectedValues()), - "", - "Ok"))); - return table.toString(); - } - - protected void printMarkdownTestTable() { - LOGGER.info(getMarkdownTestTable()); - } - - protected ConfiguredAirbyteStream createDummyTableWithData(final Database database) throws SQLException { - database.query(ctx -> { - ctx.fetch("CREATE TABLE " + getNameSpace() + ".random_dummy_table(id INTEGER PRIMARY KEY, test_column VARCHAR(63));"); - ctx.fetch("INSERT INTO " + getNameSpace() + ".random_dummy_table VALUES (2, 'Random Data');"); - return null; - }); - - return new ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(Lists.newArrayList("id")) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(CatalogHelpers.createAirbyteStream( - "random_dummy_table", - getNameSpace(), - Field.of("id", JsonSchemaType.INTEGER), - Field.of("test_column", JsonSchemaType.STRING)) - .withSourceDefinedCursor(true) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id")))); - - } - - protected List extractStateMessages(final List messages) { - return messages.stream().filter(r -> r.getType() == Type.STATE).map(AirbyteMessage::getState) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.java deleted file mode 100644 index f5caa2ad99788..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import com.google.common.collect.Streams; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.workers.TestHarnessUtils; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Extends TestSource such that it can be called using resources pulled from the file system. Will - * also add the ability to execute arbitrary scripts in the next version. - */ -public class PythonSourceAcceptanceTest extends SourceAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(PythonSourceAcceptanceTest.class); - private static final String OUTPUT_FILENAME = "output.json"; - - public static String IMAGE_NAME; - public static String PYTHON_CONTAINER_NAME; - - private Path testRoot; - - @Override - protected String getImageName() { - return IMAGE_NAME; - } - - @Override - protected ConnectorSpecification getSpec() throws IOException { - return runExecutable(Command.GET_SPEC, ConnectorSpecification.class); - } - - @Override - protected JsonNode getConfig() throws IOException { - return runExecutable(Command.GET_CONFIG); - } - - @Override - protected ConfiguredAirbyteCatalog getConfiguredCatalog() throws IOException { - return runExecutable(Command.GET_CONFIGURED_CATALOG, ConfiguredAirbyteCatalog.class); - } - - @Override - protected JsonNode getState() throws IOException { - return runExecutable(Command.GET_STATE); - } - - @Override - protected void assertFullRefreshMessages(final List allMessages) throws IOException { - final List regexTests = Streams.stream(runExecutable(Command.GET_REGEX_TESTS).withArray("tests").elements()) - .map(JsonNode::textValue).toList(); - final List stringMessages = allMessages.stream().map(Jsons::serialize).toList(); - LOGGER.info("Running " + regexTests.size() + " regex tests..."); - regexTests.forEach(regex -> { - LOGGER.info("Looking for [" + regex + "]"); - assertTrue(stringMessages.stream().anyMatch(line -> line.matches(regex)), "Failed to find regex: " + regex); - }); - } - - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - testRoot = Files.createTempDirectory(Files.createDirectories(Path.of("/tmp/standard_test")), "pytest"); - runExecutableVoid(Command.SETUP); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - runExecutableVoid(Command.TEARDOWN); - } - - private enum Command { - GET_SPEC, - GET_CONFIG, - GET_CONFIGURED_CATALOG, - GET_STATE, - GET_REGEX_TESTS, - SETUP, - TEARDOWN - } - - private T runExecutable(final Command cmd, final Class klass) throws IOException { - return Jsons.object(runExecutable(cmd), klass); - } - - private JsonNode runExecutable(final Command cmd) throws IOException { - return Jsons.deserialize(IOs.readFile(runExecutableInternal(cmd), OUTPUT_FILENAME)); - } - - private void runExecutableVoid(final Command cmd) throws IOException { - runExecutableInternal(cmd); - } - - private Path runExecutableInternal(final Command cmd) throws IOException { - LOGGER.info("testRoot = " + testRoot); - final List dockerCmd = - Lists.newArrayList( - "docker", - "run", - "--rm", - "-i", - "-v", - String.format("%s:%s", testRoot, "/test_root"), - "-w", - testRoot.toString(), - "--network", - "host", - PYTHON_CONTAINER_NAME, - cmd.toString().toLowerCase(), - "--out", - "/test_root"); - - final Process process = new ProcessBuilder(dockerCmd).start(); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - LineGobbler.gobble(process.getInputStream(), LOGGER::info); - - TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); - - final int exitCode = process.exitValue(); - if (exitCode != 0) { - throw new RuntimeException("python execution failed"); - } - - return testRoot; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.java deleted file mode 100644 index 9e77e0037d35d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.java +++ /dev/null @@ -1,393 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import static io.airbyte.protocol.models.v0.SyncMode.FULL_REFRESH; -import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.StandardCheckConnectionOutput.Status; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class SourceAcceptanceTest extends AbstractSourceConnectorTest { - - public static final String CDC_LSN = "_ab_cdc_lsn"; - public static final String CDC_UPDATED_AT = "_ab_cdc_updated_at"; - public static final String CDC_DELETED_AT = "_ab_cdc_deleted_at"; - public static final String CDC_LOG_FILE = "_ab_cdc_log_file"; - public static final String CDC_LOG_POS = "_ab_cdc_log_pos"; - public static final String CDC_DEFAULT_CURSOR = "_ab_cdc_cursor"; - public static final String CDC_EVENT_SERIAL_NO = "_ab_cdc_event_serial_no"; - - private static final Logger LOGGER = LoggerFactory.getLogger(SourceAcceptanceTest.class); - - /** - * TODO hack: Various Singer integrations use cursor fields inclusively i.e: they output records - * whose cursor field >= the provided cursor value. This leads to the last record in a sync to - * always be the first record in the next sync. This is a fine assumption from a product POV since - * we offer at-least-once delivery. But for simplicity, the incremental test suite currently assumes - * that the second incremental read should output no records when provided the state from the first - * sync. This works for many integrations but not some Singer ones, so we hardcode the list of - * integrations to skip over when performing those tests. - */ - private final Set IMAGES_TO_SKIP_SECOND_INCREMENTAL_READ = Sets.newHashSet( - "airbyte/source-intercom-singer", - "airbyte/source-exchangeratesapi-singer", - "airbyte/source-hubspot", - "airbyte/source-iterable", - "airbyte/source-marketo-singer", - "airbyte/source-twilio-singer", - "airbyte/source-mixpanel-singer", - "airbyte/source-twilio-singer", - "airbyte/source-braintree-singer", - "airbyte/source-stripe-singer", - "airbyte/source-exchange-rates", - "airbyte/source-stripe", - "airbyte/source-github-singer", - "airbyte/source-gitlab-singer", - "airbyte/source-google-workspace-admin-reports", - "airbyte/source-zendesk-talk", - "airbyte/source-zendesk-support-singer", - "airbyte/source-quickbooks-singer", - "airbyte/source-jira"); - - /** - * FIXME: Some sources can't guarantee that there will be no events between two sequential sync - */ - private final Set IMAGES_TO_SKIP_IDENTICAL_FULL_REFRESHES = Sets.newHashSet( - "airbyte/source-google-workspace-admin-reports", "airbyte/source-kafka"); - - /** - * Specification for integration. Will be passed to integration where appropriate in each test. - * Should be valid. - * - * @return integration-specific configuration - */ - protected abstract ConnectorSpecification getSpec() throws Exception; - - /** - * The catalog to use to validate the output of read operations. This will be used as follows: - *

    - * Full Refresh syncs will be tested on all the input streams which support it Incremental syncs: - - * if the stream declares a source-defined cursor, it will be tested with an incremental sync using - * the default cursor. - if the stream requires a user-defined cursor, it will be tested with the - * input cursor in both cases, the input {@link #getState()} will be used as the input state. - * - * @return - * @throws Exception - */ - protected abstract ConfiguredAirbyteCatalog getConfiguredCatalog() throws Exception; - - /** - * @return a JSON file representing the state file to use when testing incremental syncs - */ - protected abstract JsonNode getState() throws Exception; - - /** - * Verify that a spec operation issued to the connector returns a valid spec. - */ - @Test - public void testGetSpec() throws Exception { - assertEquals(getSpec(), runSpec(), "Expected spec output by integration to be equal to spec provided by test runner"); - } - - /** - * Verify that a check operation issued to the connector with the input config file returns a - * success response. - */ - @Test - public void testCheckConnection() throws Exception { - assertEquals(Status.SUCCEEDED, runCheck().getStatus(), "Expected check connection operation to succeed"); - } - - // /** - // * Verify that when given invalid credentials, that check connection returns a failed response. - // * Assume that the {@link TestSource#getFailCheckConfig()} is invalid. - // */ - // @Test - // public void testCheckConnectionInvalidCredentials() throws Exception { - // final OutputAndStatus output = runCheck(); - // assertTrue(output.getOutput().isPresent()); - // assertEquals(Status.FAILED, output.getOutput().get().getStatus()); - // } - - /** - * Verifies when a discover operation is run on the connector using the given config file, a valid - * catalog is output by the connector. - */ - @Test - public void testDiscover() throws Exception { - final UUID discoverOutput = runDiscover(); - final AirbyteCatalog discoveredCatalog = getLastPersistedCatalog(); - assertNotNull(discoveredCatalog, "Expected discover to produce a catalog"); - verifyCatalog(discoveredCatalog); - } - - /** - * Override this method to check the actual catalog. - */ - protected void verifyCatalog(final AirbyteCatalog catalog) throws Exception { - // do nothing by default - } - - /** - * Configuring all streams in the input catalog to full refresh mode, verifies that a read operation - * produces some RECORD messages. - */ - @Test - public void testFullRefreshRead() throws Exception { - if (!sourceSupportsFullRefresh()) { - LOGGER.info("Test skipped. Source does not support full refresh."); - return; - } - - final ConfiguredAirbyteCatalog catalog = withFullRefreshSyncModes(getConfiguredCatalog()); - final List allMessages = runRead(catalog); - - assertFalse(filterRecords(allMessages).isEmpty(), "Expected a full refresh sync to produce records"); - assertFullRefreshMessages(allMessages); - } - - /** - * Override this method to perform more specific assertion on the messages. - */ - protected void assertFullRefreshMessages(final List allMessages) throws Exception { - // do nothing by default - } - - /** - * Configuring all streams in the input catalog to full refresh mode, performs two read operations - * on all streams which support full refresh syncs. It then verifies that the RECORD messages output - * from both were identical. - */ - @Test - public void testIdenticalFullRefreshes() throws Exception { - if (!sourceSupportsFullRefresh()) { - LOGGER.info("Test skipped. Source does not support full refresh."); - return; - } - - if (IMAGES_TO_SKIP_IDENTICAL_FULL_REFRESHES.contains(getImageName().split(":")[0])) { - return; - } - - final ConfiguredAirbyteCatalog configuredCatalog = withFullRefreshSyncModes(getConfiguredCatalog()); - final List recordMessagesFirstRun = filterRecords(runRead(configuredCatalog)); - final List recordMessagesSecondRun = filterRecords(runRead(configuredCatalog)); - // the worker validates the messages, so we just validate the message, so we do not need to validate - // again (as long as we use the worker, which we will not want to do long term). - assertFalse(recordMessagesFirstRun.isEmpty(), "Expected first full refresh to produce records"); - assertFalse(recordMessagesSecondRun.isEmpty(), "Expected second full refresh to produce records"); - - assertSameRecords(recordMessagesFirstRun, recordMessagesSecondRun, "Expected two full refresh syncs to produce the same records"); - } - - /** - * This test verifies that all streams in the input catalog which support incremental sync can do so - * correctly. It does this by running two read operations on the connector's Docker image: the first - * takes the configured catalog and config provided to this test as input. It then verifies that the - * sync produced a non-zero number of RECORD and STATE messages. - *

    - * The second read takes the same catalog and config used in the first test, plus the last STATE - * message output by the first read operation as the input state file. It verifies that no records - * are produced (since we read all records in the first sync). - *

    - * This test is performed only for streams which support incremental. Streams which do not support - * incremental sync are ignored. If no streams in the input catalog support incremental sync, this - * test is skipped. - */ - @Test - public void testIncrementalSyncWithState() throws Exception { - if (!sourceSupportsIncremental()) { - return; - } - - final ConfiguredAirbyteCatalog configuredCatalog = withSourceDefinedCursors(getConfiguredCatalog()); - // only sync incremental streams - configuredCatalog.setStreams( - configuredCatalog.getStreams().stream().filter(s -> s.getSyncMode() == INCREMENTAL).collect(Collectors.toList())); - - final List airbyteMessages = runRead(configuredCatalog, getState()); - final List recordMessages = filterRecords(airbyteMessages); - final List stateMessages = airbyteMessages - .stream() - .filter(m -> m.getType() == Type.STATE) - .map(AirbyteMessage::getState) - .collect(Collectors.toList()); - assertFalse(recordMessages.isEmpty(), "Expected the first incremental sync to produce records"); - assertFalse(stateMessages.isEmpty(), "Expected incremental sync to produce STATE messages"); - // TODO validate exact records - - if (IMAGES_TO_SKIP_SECOND_INCREMENTAL_READ.contains(getImageName().split(":")[0])) { - return; - } - - // when we run incremental sync again there should be no new records. Run a sync with the latest - // state message and assert no records were emitted. - JsonNode latestState = null; - for (final AirbyteStateMessage stateMessage : stateMessages) { - if (stateMessage.getType().equals(AirbyteStateMessage.AirbyteStateType.STREAM)) { - latestState = Jsons.jsonNode(stateMessages); - break; - } else if (stateMessage.getType().equals(AirbyteStateMessage.AirbyteStateType.GLOBAL)) { - latestState = Jsons.jsonNode(List.of(Iterables.getLast(stateMessages))); - break; - } else { - throw new RuntimeException("Unknown state type " + stateMessage.getType()); - } - } - - assert Objects.nonNull(latestState); - final List secondSyncRecords = filterRecords(runRead(configuredCatalog, latestState)); - assertTrue( - secondSyncRecords.isEmpty(), - "Expected the second incremental sync to produce no records when given the first sync's output state."); - } - - /** - * If the source does not support incremental sync, this test is skipped. - *

    - * Otherwise, this test runs two syncs: one where all streams provided in the input catalog sync in - * full refresh mode, and another where all the streams which in the input catalog which support - * incremental, sync in incremental mode (streams which don't support incremental sync in full - * refresh mode). Then, the test asserts that the two syncs produced the same RECORD messages. Any - * other type of message is disregarded. - */ - @Test - public void testEmptyStateIncrementalIdenticalToFullRefresh() throws Exception { - if (!sourceSupportsIncremental()) { - return; - } - - if (!sourceSupportsFullRefresh()) { - LOGGER.info("Test skipped. Source does not support full refresh."); - return; - } - - final ConfiguredAirbyteCatalog configuredCatalog = getConfiguredCatalog(); - final ConfiguredAirbyteCatalog fullRefreshCatalog = withFullRefreshSyncModes(configuredCatalog); - - final List fullRefreshRecords = filterRecords(runRead(fullRefreshCatalog)); - final List emptyStateRecords = filterRecords(runRead(configuredCatalog, Jsons.jsonNode(new HashMap<>()))); - assertFalse(fullRefreshRecords.isEmpty(), "Expected a full refresh sync to produce records"); - assertFalse(emptyStateRecords.isEmpty(), "Expected state records to not be empty"); - assertSameRecords(fullRefreshRecords, emptyStateRecords, - "Expected a full refresh sync and incremental sync with no input state to produce identical records"); - } - - /** - * In order to launch a source on Kubernetes in a pod, we need to be able to wrap the entrypoint. - * The source connector must specify its entrypoint in the AIRBYTE_ENTRYPOINT variable. This test - * ensures that the entrypoint environment variable is set. - */ - @Test - public void testEntrypointEnvVar() throws Exception { - checkEntrypointEnvVariable(); - } - - protected static List filterRecords(final Collection messages) { - return messages.stream() - .filter(m -> m.getType() == Type.RECORD) - .map(AirbyteMessage::getRecord) - .collect(Collectors.toList()); - } - - protected ConfiguredAirbyteCatalog withSourceDefinedCursors(final ConfiguredAirbyteCatalog catalog) { - final ConfiguredAirbyteCatalog clone = Jsons.clone(catalog); - for (final ConfiguredAirbyteStream configuredStream : clone.getStreams()) { - if (configuredStream.getSyncMode() == INCREMENTAL - && configuredStream.getStream().getSourceDefinedCursor() != null - && configuredStream.getStream().getSourceDefinedCursor()) { - configuredStream.setCursorField(configuredStream.getStream().getDefaultCursorField()); - } - } - return clone; - } - - protected ConfiguredAirbyteCatalog withFullRefreshSyncModes(final ConfiguredAirbyteCatalog catalog) { - final ConfiguredAirbyteCatalog clone = Jsons.clone(catalog); - for (final ConfiguredAirbyteStream configuredStream : clone.getStreams()) { - if (configuredStream.getStream().getSupportedSyncModes().contains(FULL_REFRESH)) { - configuredStream.setSyncMode(FULL_REFRESH); - configuredStream.setDestinationSyncMode(DestinationSyncMode.OVERWRITE); - } - } - return clone; - } - - private boolean sourceSupportsIncremental() throws Exception { - return sourceSupports(INCREMENTAL); - } - - private boolean sourceSupportsFullRefresh() throws Exception { - return sourceSupports(FULL_REFRESH); - } - - private boolean sourceSupports(final SyncMode syncMode) throws Exception { - final ConfiguredAirbyteCatalog catalog = getConfiguredCatalog(); - for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { - if (stream.getStream().getSupportedSyncModes().contains(syncMode)) { - return true; - } - } - return false; - } - - private void assertSameRecords(final List expected, final List actual, final String message) { - final List prunedExpected = expected.stream().map(this::pruneEmittedAt).collect(Collectors.toList()); - final List prunedActual = actual - .stream() - .map(this::pruneEmittedAt) - .map(this::pruneCdcMetadata) - .collect(Collectors.toList()); - assertEquals(prunedExpected.size(), prunedActual.size(), message); - assertTrue(prunedExpected.containsAll(prunedActual), message); - assertTrue(prunedActual.containsAll(prunedExpected), message); - } - - private AirbyteRecordMessage pruneEmittedAt(final AirbyteRecordMessage m) { - return Jsons.clone(m).withEmittedAt(null); - } - - private AirbyteRecordMessage pruneCdcMetadata(final AirbyteRecordMessage m) { - final AirbyteRecordMessage clone = Jsons.clone(m); - ((ObjectNode) clone.getData()).remove(CDC_LSN); - ((ObjectNode) clone.getData()).remove(CDC_LOG_FILE); - ((ObjectNode) clone.getData()).remove(CDC_LOG_POS); - ((ObjectNode) clone.getData()).remove(CDC_UPDATED_AT); - ((ObjectNode) clone.getData()).remove(CDC_DELETED_AT); - ((ObjectNode) clone.getData()).remove(CDC_EVENT_SERIAL_NO); - ((ObjectNode) clone.getData()).remove(CDC_DEFAULT_CURSOR); - return clone; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.java deleted file mode 100644 index 8c8e0b103b306..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class TestDataHolder { - - private static final String DEFAULT_CREATE_TABLE_SQL = "CREATE TABLE %1$s(%2$s INTEGER PRIMARY KEY, %3$s %4$s)"; - private static final String DEFAULT_INSERT_SQL = "INSERT INTO %1$s VALUES (%2$s, %3$s)"; - - private final String sourceType; - private final JsonSchemaType airbyteType; - private final List values; - private final List expectedValues; - private final String createTablePatternSql; - private final String insertPatternSql; - private final String fullSourceDataType; - private String nameSpace; - private long testNumber; - private String idColumnName; - private String testColumnName; - - private StackTraceElement[] declarationLocation; - - TestDataHolder(final String sourceType, - final JsonSchemaType airbyteType, - final List values, - final List expectedValues, - final String createTablePatternSql, - final String insertPatternSql, - final String fullSourceDataType) { - this.sourceType = sourceType; - this.airbyteType = airbyteType; - this.values = values; - this.expectedValues = expectedValues; - this.createTablePatternSql = createTablePatternSql; - this.insertPatternSql = insertPatternSql; - this.fullSourceDataType = fullSourceDataType; - } - - /** - * The builder allows to setup any comprehensive data type test. - * - * @return builder for setup comprehensive test - */ - public static TestDataHolderBuilder builder() { - return new TestDataHolderBuilder(); - } - - public static class TestDataHolderBuilder { - - private String sourceType; - private JsonSchemaType airbyteType; - private final List values = new ArrayList<>(); - private final List expectedValues = new ArrayList<>(); - private String createTablePatternSql; - private String insertPatternSql; - private String fullSourceDataType; - - TestDataHolderBuilder() { - this.createTablePatternSql = DEFAULT_CREATE_TABLE_SQL; - this.insertPatternSql = DEFAULT_INSERT_SQL; - } - - /** - * The name of the source data type. Duplicates by name will be tested independently from each - * others. Note that this name will be used for connector setup and table creation. If source syntax - * requires more details (E.g. "varchar" type requires length "varchar(50)"), you can additionally - * set custom data type syntax by {@link TestDataHolderBuilder#fullSourceDataType(String)} method. - * - * @param sourceType source data type name - * @return builder - */ - public TestDataHolderBuilder sourceType(final String sourceType) { - this.sourceType = sourceType; - if (fullSourceDataType == null) - fullSourceDataType = sourceType; - return this; - } - - /** - * corresponding Airbyte data type. It requires for proper configuration - * {@link ConfiguredAirbyteStream} - * - * @param airbyteType Airbyte data type - * @return builder - */ - public TestDataHolderBuilder airbyteType(final JsonSchemaType airbyteType) { - this.airbyteType = airbyteType; - return this; - } - - /** - * Set custom the create table script pattern. Use it if you source uses untypical table creation - * sql. Default patter described {@link #DEFAULT_CREATE_TABLE_SQL} Note! The patter should contain - * four String place holders for the: - namespace.table name (as one placeholder together) - id - * column name - test column name - test column data type - * - * @param createTablePatternSql creation table sql pattern - * @return builder - */ - public TestDataHolderBuilder createTablePatternSql(final String createTablePatternSql) { - this.createTablePatternSql = createTablePatternSql; - return this; - } - - /** - * Set custom the insert record script pattern. Use it if you source uses untypical insert record - * sql. Default patter described {@link #DEFAULT_INSERT_SQL} Note! The patter should contains two - * String place holders for the table name and value. - * - * @param insertPatternSql creation table sql pattern - * @return builder - */ - public TestDataHolderBuilder insertPatternSql(final String insertPatternSql) { - this.insertPatternSql = insertPatternSql; - return this; - } - - /** - * Allows to set extended data type for the table creation. E.g. The "varchar" type requires in - * MySQL requires length. In this case fullSourceDataType will be "varchar(50)". - * - * @param fullSourceDataType actual string for the column data type description - * @return builder - */ - public TestDataHolderBuilder fullSourceDataType(final String fullSourceDataType) { - this.fullSourceDataType = fullSourceDataType; - return this; - } - - /** - * Adds value(s) to the scope of a corresponding test. The values will be inserted into the created - * table. Note! The value will be inserted into the insert script without any transformations. Make - * sure that the value is in line with the source syntax. - * - * @param insertValue test value - * @return builder - */ - public TestDataHolderBuilder addInsertValues(final String... insertValue) { - this.values.addAll(Arrays.asList(insertValue)); - return this; - } - - /** - * Adds expected value(s) to the test scope. If you add at least one value, it will check that all - * values are provided by corresponding streamer. - * - * @param expectedValue value which should be provided by a streamer - * @return builder - */ - public TestDataHolderBuilder addExpectedValues(final String... expectedValue) { - this.expectedValues.addAll(Arrays.asList(expectedValue)); - return this; - } - - /** - * Add NULL value to the expected value list. If you need to add only one value and it's NULL, you - * have to use this method instead of {@link #addExpectedValues(String...)} - * - * @return builder - */ - public TestDataHolderBuilder addNullExpectedValue() { - this.expectedValues.add(null); - return this; - } - - public TestDataHolder build() { - return new TestDataHolder(sourceType, airbyteType, values, expectedValues, createTablePatternSql, insertPatternSql, fullSourceDataType); - } - - } - - void setNameSpace(final String nameSpace) { - this.nameSpace = nameSpace; - } - - void setTestNumber(final long testNumber) { - this.testNumber = testNumber; - } - - void setIdColumnName(final String idColumnName) { - this.idColumnName = idColumnName; - } - - void setTestColumnName(final String testColumnName) { - this.testColumnName = testColumnName; - } - - public String getSourceType() { - return sourceType; - } - - public JsonSchemaType getAirbyteType() { - return airbyteType; - } - - public List getExpectedValues() { - return expectedValues; - } - - public List getValues() { - return values; - } - - public String getNameSpace() { - return nameSpace; - } - - public String getNameWithTestPrefix() { - // source type may include space (e.g. "character varying") - return nameSpace + "_" + testNumber + "_" + sourceType.replaceAll("\\s", "_"); - } - - public String getCreateSqlQuery() { - return String.format(createTablePatternSql, (nameSpace != null ? nameSpace + "." : "") + getNameWithTestPrefix(), idColumnName, testColumnName, - fullSourceDataType); - } - - void setDeclarationLocation(StackTraceElement[] declarationLocation) { - this.declarationLocation = declarationLocation; - } - - public String getDeclarationLocation() { - return Arrays.asList(declarationLocation).subList(2, 3).toString(); - } - - public List getInsertSqlQueries() { - final List insertSqls = new ArrayList<>(); - int rowId = 1; - for (final String value : values) { - insertSqls.add(String.format(insertPatternSql, (nameSpace != null ? nameSpace + "." : "") + getNameWithTestPrefix(), rowId++, value)); - } - return insertSqls; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.java deleted file mode 100644 index 451cb4864b8c1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import java.nio.file.Path; - -public class TestDestinationEnv { - - private final Path localRoot; - - public TestDestinationEnv(final Path localRoot) { - this.localRoot = localRoot; - } - - public Path getLocalRoot() { - return localRoot; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.java deleted file mode 100644 index 88992d8da6c46..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import com.google.common.base.Preconditions; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.commons.version.AirbyteVersion; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class passes environment variable to the DockerProcessFactory that runs the source in the - * SourceAcceptanceTest. - */ -// todo (cgardens) - this cloud_deployment implicit interface is going to bite us. -public class TestEnvConfigs { - - private static final Logger LOGGER = LoggerFactory.getLogger(TestEnvConfigs.class); - - // env variable names - public static final String AIRBYTE_ROLE = "AIRBYTE_ROLE"; - public static final String AIRBYTE_VERSION = "AIRBYTE_VERSION"; - public static final String WORKER_ENVIRONMENT = "WORKER_ENVIRONMENT"; - public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; - public static final String JOB_DEFAULT_ENV_PREFIX = "JOB_DEFAULT_ENV_"; - - public static final Map> JOB_SHARED_ENVS = Map.of( - AIRBYTE_VERSION, (instance) -> instance.getAirbyteVersion().serialize(), - AIRBYTE_ROLE, TestEnvConfigs::getAirbyteRole, - DEPLOYMENT_MODE, (instance) -> instance.getDeploymentMode().name(), - WORKER_ENVIRONMENT, (instance) -> instance.getWorkerEnvironment().name()); - - enum DeploymentMode { - OSS, - CLOUD - } - - enum WorkerEnvironment { - DOCKER, - KUBERNETES - } - - private final Function getEnv; - private final Supplier> getAllEnvKeys; - - public TestEnvConfigs() { - this(System.getenv()); - } - - private TestEnvConfigs(final Map envMap) { - getEnv = envMap::get; - getAllEnvKeys = envMap::keySet; - } - - // CORE - // General - public String getAirbyteRole() { - return getEnv(AIRBYTE_ROLE); - } - - public AirbyteVersion getAirbyteVersion() { - return new AirbyteVersion(getEnsureEnv(AIRBYTE_VERSION)); - } - - public DeploymentMode getDeploymentMode() { - return getEnvOrDefault(DEPLOYMENT_MODE, DeploymentMode.OSS, s -> { - try { - return DeploymentMode.valueOf(s); - } catch (final IllegalArgumentException e) { - LOGGER.info(s + " not recognized, defaulting to " + DeploymentMode.OSS); - return DeploymentMode.OSS; - } - }); - } - - public WorkerEnvironment getWorkerEnvironment() { - return getEnvOrDefault(WORKER_ENVIRONMENT, WorkerEnvironment.DOCKER, s -> WorkerEnvironment.valueOf(s.toUpperCase())); - } - - /** - * There are two types of environment variables available to the job container: - *

      - *
    • Exclusive variables prefixed with JOB_DEFAULT_ENV_PREFIX
    • - *
    • Shared variables defined in JOB_SHARED_ENVS
    • - *
    - */ - public Map getJobDefaultEnvMap() { - final Map jobPrefixedEnvMap = getAllEnvKeys.get().stream() - .filter(key -> key.startsWith(JOB_DEFAULT_ENV_PREFIX)) - .collect(Collectors.toMap(key -> key.replace(JOB_DEFAULT_ENV_PREFIX, ""), getEnv)); - // This method assumes that these shared env variables are not critical to the execution - // of the jobs, and only serve as metadata. So any exception is swallowed and default to - // an empty string. Change this logic if this assumption no longer holds. - final Map jobSharedEnvMap = JOB_SHARED_ENVS.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, - entry -> Exceptions.swallowWithDefault(() -> Objects.requireNonNullElse(entry.getValue().apply(this), ""), ""))); - return MoreMaps.merge(jobPrefixedEnvMap, jobSharedEnvMap); - } - - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { - return getEnvOrDefault(key, defaultValue, parser, false); - } - - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser, final boolean isSecret) { - final String value = getEnv.apply(key); - if (value != null && !value.isEmpty()) { - return parser.apply(value); - } else { - LOGGER.info("Using default value for environment variable {}: '{}'", key, isSecret ? "*****" : defaultValue); - return defaultValue; - } - } - - public String getEnv(final String name) { - return getEnv.apply(name); - } - - public String getEnsureEnv(final String name) { - final String value = getEnv(name); - Preconditions.checkArgument(value != null, "'%s' environment variable cannot be null", name); - - return value; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.java deleted file mode 100644 index f00f0f2a7e194..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import net.sourceforge.argparse4j.ArgumentParsers; -import net.sourceforge.argparse4j.inf.ArgumentParser; -import net.sourceforge.argparse4j.inf.ArgumentParserException; -import net.sourceforge.argparse4j.inf.Namespace; - -/** - * Parse command line arguments and inject them into the test class before running the test. Then - * runs the tests. - */ -public class TestPythonSourceMain { - - public static void main(final String[] args) { - final ArgumentParser parser = ArgumentParsers.newFor(TestPythonSourceMain.class.getName()).build() - .defaultHelp(true) - .description("Run standard source tests"); - - parser.addArgument("--imageName") - .help("Name of the integration image"); - - parser.addArgument("--pythonContainerName") - .help("Name of the python integration image"); - - Namespace ns = null; - try { - ns = parser.parseArgs(args); - } catch (final ArgumentParserException e) { - parser.handleError(e); - System.exit(1); - } - - final String imageName = ns.getString("imageName"); - final String pythonContainerName = ns.getString("pythonContainerName"); - - PythonSourceAcceptanceTest.IMAGE_NAME = imageName; - PythonSourceAcceptanceTest.PYTHON_CONTAINER_NAME = pythonContainerName; - - TestRunner.runTestClass(PythonSourceAcceptanceTest.class); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestRunner.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestRunner.java deleted file mode 100644 index 1f27307421fcb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/TestRunner.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source; - -import static org.junit.platform.engine.discovery.DiscoverySelectors.selectClass; - -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import org.junit.platform.launcher.Launcher; -import org.junit.platform.launcher.LauncherDiscoveryRequest; -import org.junit.platform.launcher.TestPlan; -import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder; -import org.junit.platform.launcher.core.LauncherFactory; -import org.junit.platform.launcher.listeners.SummaryGeneratingListener; - -public class TestRunner { - - public static void runTestClass(final Class testClass) { - final LauncherDiscoveryRequest request = LauncherDiscoveryRequestBuilder.request() - .selectors(selectClass(testClass)) - .build(); - - final TestPlan plan = LauncherFactory.create().discover(request); - final Launcher launcher = LauncherFactory.create(); - - // Register a listener of your choice - final SummaryGeneratingListener listener = new SummaryGeneratingListener(); - - launcher.execute(plan, listener); - - listener.getSummary().printFailuresTo(new PrintWriter(System.out, false, StandardCharsets.UTF_8)); - listener.getSummary().printTo(new PrintWriter(System.out, false, StandardCharsets.UTF_8)); - - if (listener.getSummary().getTestsFailedCount() > 0) { - System.out.println( - "There are failing tests. See https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/standard-source-tests " + - "for more information about the standard source test suite."); - System.exit(1); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.java deleted file mode 100644 index 9df6e564d945d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source.fs; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.source.SourceAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.nio.file.Path; -import javax.annotation.Nullable; - -/** - * Extends TestSource such that it can be called using resources pulled from the file system. Will - * also add the ability to execute arbitrary scripts in the next version. - */ -public class ExecutableTestSource extends SourceAcceptanceTest { - - public static class TestConfig { - - private final String imageName; - private final Path specPath; - private final Path configPath; - private final Path catalogPath; - - private final Path statePath; - - public TestConfig(final String imageName, final Path specPath, final Path configPath, final Path catalogPath, final Path statePath) { - this.imageName = imageName; - this.specPath = specPath; - this.configPath = configPath; - this.catalogPath = catalogPath; - this.statePath = statePath; - } - - public String getImageName() { - return imageName; - } - - public Path getSpecPath() { - return specPath; - } - - public Path getConfigPath() { - return configPath; - } - - public Path getCatalogPath() { - return catalogPath; - } - - @Nullable - public Path getStatePath() { - return statePath; - } - - } - - public static TestConfig TEST_CONFIG; - - @Override - protected ConnectorSpecification getSpec() { - return Jsons.deserialize(IOs.readFile(TEST_CONFIG.getSpecPath()), ConnectorSpecification.class); - } - - @Override - protected String getImageName() { - return TEST_CONFIG.getImageName(); - } - - @Override - protected JsonNode getConfig() { - return Jsons.deserialize(IOs.readFile(TEST_CONFIG.getConfigPath())); - } - - @Override - protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - return Jsons.deserialize(IOs.readFile(TEST_CONFIG.getCatalogPath()), ConfiguredAirbyteCatalog.class); - } - - @Override - protected JsonNode getState() { - if (TEST_CONFIG.getStatePath() != null) { - return Jsons.deserialize(IOs.readFile(TEST_CONFIG.getStatePath())); - } else { - return Jsons.deserialize("{}"); - } - - } - - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - // no-op, for now - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - // no-op, for now - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.java deleted file mode 100644 index 7eb5958b424e5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source.fs; - -import io.airbyte.cdk.integrations.standardtest.source.TestRunner; -import java.nio.file.Path; -import net.sourceforge.argparse4j.ArgumentParsers; -import net.sourceforge.argparse4j.inf.ArgumentParser; -import net.sourceforge.argparse4j.inf.ArgumentParserException; -import net.sourceforge.argparse4j.inf.Namespace; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Parse command line arguments and inject them into the test class before running the test. Then - * runs the tests. - */ -public class TestSourceMain { - - private static final Logger LOGGER = LoggerFactory.getLogger(TestSourceMain.class); - - public static void main(final String[] args) { - final ArgumentParser parser = ArgumentParsers.newFor(TestSourceMain.class.getName()).build() - .defaultHelp(true) - .description("Run standard source tests"); - - parser.addArgument("--imageName") - .required(true) - .help("Name of the source connector image e.g: airbyte/source-mailchimp"); - - parser.addArgument("--spec") - .required(true) - .help("Path to file that contains spec json"); - - parser.addArgument("--config") - .required(true) - .help("Path to file that contains config json"); - - parser.addArgument("--catalog") - .required(true) - .help("Path to file that contains catalog json"); - - parser.addArgument("--state") - .required(false) - .help("Path to the file containing state"); - - Namespace ns = null; - try { - ns = parser.parseArgs(args); - } catch (final ArgumentParserException e) { - parser.handleError(e); - System.exit(1); - } - - final String imageName = ns.getString("imageName"); - final String specFile = ns.getString("spec"); - final String configFile = ns.getString("config"); - final String catalogFile = ns.getString("catalog"); - final String stateFile = ns.getString("state"); - - ExecutableTestSource.TEST_CONFIG = new ExecutableTestSource.TestConfig( - imageName, - Path.of(specFile), - Path.of(configFile), - Path.of(catalogFile), - stateFile != null ? Path.of(stateFile) : null); - - TestRunner.runTestClass(ExecutableTestSource.class); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.java deleted file mode 100644 index c8a4ddaa52f9e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source.performancetest; - -import io.airbyte.cdk.integrations.standardtest.source.AbstractSourceConnectorTest; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; - -/** - * This abstract class contains common methods for both steams - Fill Db scripts and Performance - * tests. - */ -public abstract class AbstractSourceBasePerformanceTest extends AbstractSourceConnectorTest { - - private static final String TEST_COLUMN_NAME = "test_column"; - private static final String TEST_STREAM_NAME_TEMPLATE = "test_%S"; - - /** - * The column name will be used for a test column in the test tables. Override it if default name is - * not valid for your source. - * - * @return Test column name - */ - protected String getTestColumnName() { - return TEST_COLUMN_NAME; - } - - /** - * The stream name template will be used for a test tables. Override it if default name is not valid - * for your source. - * - * @return Test steam name template - */ - protected String getTestStreamNameTemplate() { - return TEST_STREAM_NAME_TEMPLATE; - } - - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - // DO NOTHING. Mandatory to override. DB will be setup as part of each test - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java deleted file mode 100644 index b8066a7aae8e7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source.performancetest; - -import io.airbyte.cdk.db.Database; -import java.util.StringJoiner; -import java.util.stream.Stream; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This abstract class contains common methods for Fill Db scripts. - */ -public abstract class AbstractSourceFillDbWithTestData extends AbstractSourceBasePerformanceTest { - - private static final String CREATE_DB_TABLE_TEMPLATE = "CREATE TABLE %s.%s(id INTEGER PRIMARY KEY, %s)"; - private static final String INSERT_INTO_DB_TABLE_QUERY_TEMPLATE = "INSERT INTO %s.%s (%s) VALUES %s"; - private static final String TEST_DB_FIELD_TYPE = "varchar(10)"; - - protected static final Logger c = LoggerFactory.getLogger(AbstractSourceFillDbWithTestData.class); - private static final String TEST_VALUE_TEMPLATE_POSTGRES = "\'Value id_placeholder\'"; - - /** - * Setup the test database. All tables and data described in the registered tests will be put there. - * - * @return configured test database - * @throws Exception - might throw any exception during initialization. - */ - protected abstract Database setupDatabase(String dbName) throws Exception; - - /** - * The test added test data to a new DB. 1. Set DB creds in static variables above 2. Set desired - * number for streams, coolumns and records 3. Run the test - */ - @Disabled - @ParameterizedTest - @MethodSource("provideParameters") - public void addTestData(final String dbName, - final String schemaName, - final int numberOfDummyRecords, - final int numberOfBatches, - final int numberOfColumns, - final int numberOfStreams) - throws Exception { - - final Database database = setupDatabase(dbName); - - database.query(ctx -> { - for (int currentSteamNumber = 0; currentSteamNumber < numberOfStreams; currentSteamNumber++) { - - final String currentTableName = String.format(getTestStreamNameTemplate(), currentSteamNumber); - - ctx.fetch(prepareCreateTableQuery(schemaName, numberOfColumns, currentTableName)); - for (int i = 0; i < numberOfBatches; i++) { - final String insertQueryTemplate = prepareInsertQueryTemplate(schemaName, i, - numberOfColumns, - numberOfDummyRecords); - ctx.fetch(String.format(insertQueryTemplate, currentTableName)); - } - - c.info("Finished processing for stream " + currentSteamNumber); - } - return null; - }); - } - - /** - * This is a data provider for fill DB script,, Each argument's group would be ran as a separate - * test. Set the "testArgs" in test class of your DB in @BeforeTest method. - * - * 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName that - * will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected records - * retrieved in each stream. 4th arg - a number of columns in each stream\table that will be use for - * Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte - * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. - * - * Stream.of( Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000) ); - */ - protected abstract Stream provideParameters(); - - protected String prepareCreateTableQuery(final String dbSchemaName, - final int numberOfColumns, - final String currentTableName) { - - final StringJoiner sj = new StringJoiner(","); - for (int i = 0; i < numberOfColumns; i++) { - sj.add(String.format(" %s%s %s", getTestColumnName(), i, TEST_DB_FIELD_TYPE)); - } - - return String.format(CREATE_DB_TABLE_TEMPLATE, dbSchemaName, currentTableName, sj.toString()); - } - - protected String prepareInsertQueryTemplate(final String dbSchemaName, - final int batchNumber, - final int numberOfColumns, - final int recordsNumber) { - - final StringJoiner fieldsNames = new StringJoiner(","); - fieldsNames.add("id"); - - final StringJoiner baseInsertQuery = new StringJoiner(","); - baseInsertQuery.add("id_placeholder"); - - for (int i = 0; i < numberOfColumns; i++) { - fieldsNames.add(getTestColumnName() + i); - baseInsertQuery.add(TEST_VALUE_TEMPLATE_POSTGRES); - } - - final StringJoiner insertGroupValuesJoiner = new StringJoiner(","); - - final int batchMessages = batchNumber * 100; - - for (int currentRecordNumber = batchMessages; - currentRecordNumber < recordsNumber + batchMessages; - currentRecordNumber++) { - insertGroupValuesJoiner - .add("(" + baseInsertQuery.toString() - .replaceAll("id_placeholder", String.valueOf(currentRecordNumber)) + ")"); - } - - return String - .format(INSERT_INTO_DB_TABLE_QUERY_TEMPLATE, dbSchemaName, "%s", fieldsNames.toString(), - insertGroupValuesJoiner.toString()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java deleted file mode 100644 index c4279364c5ad9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.standardtest.source.performancetest; - -import static org.junit.jupiter.api.Assertions.fail; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This abstract class contains common methods for Performance tests. - */ -public abstract class AbstractSourcePerformanceTest extends AbstractSourceBasePerformanceTest { - - protected static final Logger c = LoggerFactory.getLogger(AbstractSourcePerformanceTest.class); - private static final String ID_COLUMN_NAME = "id"; - protected JsonNode config; - - /** - * Setup the test database. All tables and data described in the registered tests will be put there. - * - * @throws Exception - might throw any exception during initialization. - */ - protected abstract void setupDatabase(String dbName) throws Exception; - - @Override - protected JsonNode getConfig() { - return config; - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) {} - - /** - * This is a data provider for performance tests, Each argument's group would be ran as a separate - * test. Set the "testArgs" in test class of your DB in @BeforeTest method. - * - * 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName that - * will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected records - * retrieved in each stream. 4th arg - a number of columns in each stream\table that will be used - * for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte - * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. - * - * Example: Stream.of( Arguments.of("test1000tables240columns200recordsDb", "dbo", 200, 240, 1000), - * Arguments.of("test5000tables240columns200recordsDb", "dbo", 200, 240, 1000), - * Arguments.of("newregular25tables50000records", "dbo", 50052, 8, 25), - * Arguments.of("newsmall1000tableswith10000rows", "dbo", 10011, 8, 1000) ); - */ - protected abstract Stream provideParameters(); - - @ParameterizedTest - @MethodSource("provideParameters") - public void testPerformance(final String dbName, - final String schemaName, - final int numberOfDummyRecords, - final int numberOfColumns, - final int numberOfStreams) - throws Exception { - - setupDatabase(dbName); - - final ConfiguredAirbyteCatalog catalog = getConfiguredCatalog(schemaName, numberOfStreams, - numberOfColumns); - final Map mapOfExpectedRecordsCount = prepareMapWithExpectedRecords( - numberOfStreams, numberOfDummyRecords); - final Map checkStatusMap = runReadVerifyNumberOfReceivedMsgs(catalog, null, - mapOfExpectedRecordsCount); - validateNumberOfReceivedMsgs(checkStatusMap); - - } - - /** - * The column name will be used for a PK column in the test tables. Override it if default name is - * not valid for your source. - * - * @return Id column name - */ - protected String getIdColumnName() { - return ID_COLUMN_NAME; - } - - protected void validateNumberOfReceivedMsgs(final Map checkStatusMap) { - // Iterate through all streams map and check for streams where - final Map failedStreamsMap = checkStatusMap.entrySet().stream() - .filter(el -> el.getValue() != 0).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); - - if (!failedStreamsMap.isEmpty()) { - fail("Non all messages were delivered. " + failedStreamsMap.toString()); - } - c.info("Finished all checks, no issues found for {} of streams", checkStatusMap.size()); - } - - protected Map prepareMapWithExpectedRecords(final int streamNumber, - final int expectedRecordsNumberInEachStream) { - final Map resultMap = new HashMap<>(); // streamName&expected records in stream - - for (int currentStream = 0; currentStream < streamNumber; currentStream++) { - final String streamName = String.format(getTestStreamNameTemplate(), currentStream); - resultMap.put(streamName, expectedRecordsNumberInEachStream); - } - return resultMap; - } - - /** - * Configures streams for all registered data type tests. - * - * @return configured catalog - */ - protected ConfiguredAirbyteCatalog getConfiguredCatalog(final String nameSpace, - final int numberOfStreams, - final int numberOfColumns) { - final List streams = new ArrayList<>(); - - for (int currentStream = 0; currentStream < numberOfStreams; currentStream++) { - - // CREATE TABLE test.test_1_int(id INTEGER PRIMARY KEY, test_column int) - final List fields = new ArrayList<>(); - - fields.add(Field.of(getIdColumnName(), JsonSchemaType.NUMBER)); - for (int currentColumnNumber = 0; - currentColumnNumber < numberOfColumns; - currentColumnNumber++) { - fields.add(Field.of(getTestColumnName() + currentColumnNumber, JsonSchemaType.STRING)); - } - - final AirbyteStream airbyteStream = CatalogHelpers - .createAirbyteStream(String.format(getTestStreamNameTemplate(), currentStream), - nameSpace, fields) - .withSourceDefinedCursor(true) - .withSourceDefinedPrimaryKey(List.of(List.of(getIdColumnName()))) - .withSupportedSyncModes( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - - final ConfiguredAirbyteStream configuredAirbyteStream = new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(Lists.newArrayList(getIdColumnName())) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(airbyteStream); - - streams.add(configuredAirbyteStream); - - } - - return new ConfiguredAirbyteCatalog().withStreams(streams); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debezium/CdcSourceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debezium/CdcSourceTest.kt new file mode 100644 index 0000000000000..383cc6fcb15bb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debezium/CdcSourceTest.kt @@ -0,0 +1,1109 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debezium + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.* +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.testutils.TestDatabase +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.util.AutoCloseableIterators +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.function.Consumer +import java.util.stream.Collectors +import java.util.stream.Stream +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class CdcSourceTest> { + @JvmField protected var testdb: T = createTestDatabase() + + protected open fun createTableSqlFmt(): String { + return "CREATE TABLE %s.%s(%s);" + } + + protected open fun createSchemaSqlFmt(): String { + return "CREATE SCHEMA %s;" + } + + protected open fun modelsSchema(): String { + return "models_schema" + } + + /** The schema of a random table which is used as a new table in snapshot test */ + protected open fun randomSchema(): String { + return "models_schema_random" + } + + protected val catalog: AirbyteCatalog + get() = + AirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME, + modelsSchema(), + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), + Field.of(COL_MODEL, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey( + java.util.List.of(java.util.List.of(COL_ID)) + ) + ) + ) + + protected val configuredCatalog: ConfiguredAirbyteCatalog + get() { + val configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + configuredCatalog.streams.forEach( + Consumer { s: ConfiguredAirbyteStream -> s.syncMode = SyncMode.INCREMENTAL } + ) + return configuredCatalog + } + + protected abstract fun createTestDatabase(): T + + protected abstract fun source(): S + + protected abstract fun config(): JsonNode? + + protected abstract fun cdcLatestTargetPosition(): CdcTargetPosition<*> + + protected abstract fun extractPosition(record: JsonNode?): CdcTargetPosition<*>? + + protected abstract fun assertNullCdcMetaData(data: JsonNode?) + + protected abstract fun assertCdcMetaData(data: JsonNode?, deletedAtNull: Boolean) + + protected abstract fun removeCDCColumns(data: ObjectNode?) + + protected abstract fun addCdcMetadataColumns(stream: AirbyteStream?) + + protected abstract fun addCdcDefaultCursorField(stream: AirbyteStream?) + + protected abstract fun assertExpectedStateMessages(stateMessages: List) + + // TODO: this assertion should be added into test cases in this class, we will need to implement + // corresponding iterator for other connectors before + // doing so. + protected open fun assertExpectedStateMessageCountMatches( + stateMessages: List, + totalCount: Long + ) { + // Do nothing. + } + + @BeforeEach + protected open fun setup() { + testdb = createTestDatabase() + createTables() + populateTables() + } + + protected fun createTables() { + // create and populate actual table + val actualColumns = + ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)") + testdb + .with(createSchemaSqlFmt(), modelsSchema()) + .with( + createTableSqlFmt(), + modelsSchema(), + MODELS_STREAM_NAME, + columnClause(actualColumns, Optional.of(COL_ID)) + ) + + // Create random table. + // This table is not part of Airbyte sync. It is being created just to make sure the schemas + // not + // being synced by Airbyte are not causing issues with our debezium logic. + val randomColumns = + ImmutableMap.of( + COL_ID + "_random", + "INTEGER", + COL_MAKE_ID + "_random", + "INTEGER", + COL_MODEL + "_random", + "VARCHAR(200)" + ) + if (randomSchema() != modelsSchema()) { + testdb!!.with(createSchemaSqlFmt(), randomSchema()) + } + testdb!!.with( + createTableSqlFmt(), + randomSchema(), + RANDOM_TABLE_NAME, + columnClause(randomColumns, Optional.of(COL_ID + "_random")) + ) + } + + protected fun populateTables() { + for (recordJson in MODEL_RECORDS) { + writeModelRecord(recordJson) + } + + for (recordJson in MODEL_RECORDS_RANDOM) { + writeRecords( + recordJson, + randomSchema(), + RANDOM_TABLE_NAME, + COL_ID + "_random", + COL_MAKE_ID + "_random", + COL_MODEL + "_random" + ) + } + } + + @AfterEach + protected open fun tearDown() { + try { + testdb!!.close() + } catch (e: Throwable) { + LOGGER.error("exception during teardown", e) + } + } + + protected open fun columnClause( + columnsWithDataType: Map, + primaryKey: Optional + ): String { + val columnClause = StringBuilder() + var i = 0 + for ((key, value) in columnsWithDataType) { + columnClause.append(key) + columnClause.append(" ") + columnClause.append(value) + if (i < (columnsWithDataType.size - 1)) { + columnClause.append(",") + columnClause.append(" ") + } + i++ + } + primaryKey.ifPresent { s: String? -> + columnClause.append(", PRIMARY KEY (").append(s).append(")") + } + + return columnClause.toString() + } + + protected fun writeModelRecord(recordJson: JsonNode) { + writeRecords(recordJson, modelsSchema(), MODELS_STREAM_NAME, COL_ID, COL_MAKE_ID, COL_MODEL) + } + + protected open fun writeRecords( + recordJson: JsonNode, + dbName: String?, + streamName: String?, + idCol: String?, + makeIdCol: String?, + modelCol: String? + ) { + testdb!!.with( + "INSERT INTO %s.%s (%s, %s, %s) VALUES (%s, %s, '%s');", + dbName, + streamName, + idCol, + makeIdCol, + modelCol, + recordJson[idCol].asInt(), + recordJson[makeIdCol].asInt(), + recordJson[modelCol].asText() + ) + } + + protected open fun deleteMessageOnIdCol(streamName: String?, idCol: String?, idValue: Int) { + testdb!!.with("DELETE FROM %s.%s WHERE %s = %s", modelsSchema(), streamName, idCol, idValue) + } + + protected open fun deleteCommand(streamName: String?) { + testdb!!.with("DELETE FROM %s.%s", modelsSchema(), streamName) + } + + protected open fun updateCommand( + streamName: String?, + modelCol: String?, + modelVal: String?, + idCol: String?, + idValue: Int + ) { + testdb!!.with( + "UPDATE %s.%s SET %s = '%s' WHERE %s = %s", + modelsSchema(), + streamName, + modelCol, + modelVal, + COL_ID, + 11 + ) + } + + protected fun extractRecordMessages(messages: List): Set { + val recordsPerStream = extractRecordMessagesStreamWise(messages) + val consolidatedRecords: MutableSet = HashSet() + recordsPerStream.values.forEach( + Consumer { c: Set? -> consolidatedRecords.addAll(c!!) } + ) + return consolidatedRecords + } + + protected fun extractRecordMessagesStreamWise( + messages: List + ): Map> { + val recordsPerStream: MutableMap> = HashMap() + for (message in messages) { + if (message.type == AirbyteMessage.Type.RECORD) { + val recordMessage = message.record + recordsPerStream + .computeIfAbsent(recordMessage.stream) { c: String? -> ArrayList() } + .add(recordMessage) + } + } + + val recordsPerStreamWithNoDuplicates: MutableMap> = + HashMap() + for ((streamName, records) in recordsPerStream) { + val recordMessageSet: Set = HashSet(records) + Assertions.assertEquals( + records.size, + recordMessageSet.size, + "Expected no duplicates in airbyte record message output for a single sync." + ) + recordsPerStreamWithNoDuplicates[streamName] = recordMessageSet + } + + return recordsPerStreamWithNoDuplicates + } + + protected fun extractStateMessages(messages: List): List { + return messages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .map { obj: AirbyteMessage -> obj.state } + .collect(Collectors.toList()) + } + + protected fun assertExpectedRecords( + expectedRecords: Set, + actualRecords: Set + ) { + // assume all streams are cdc. + assertExpectedRecords( + expectedRecords, + actualRecords, + actualRecords + .stream() + .map { obj: AirbyteRecordMessage -> obj.stream } + .collect(Collectors.toSet()) + ) + } + + private fun assertExpectedRecords( + expectedRecords: Set, + actualRecords: Set, + cdcStreams: Set + ) { + assertExpectedRecords( + expectedRecords, + actualRecords, + cdcStreams, + STREAM_NAMES, + modelsSchema() + ) + } + + protected fun assertExpectedRecords( + expectedRecords: Set?, + actualRecords: Set, + cdcStreams: Set, + streamNames: Set, + namespace: String? + ) { + val actualData = + actualRecords + .stream() + .map { recordMessage: AirbyteRecordMessage -> + Assertions.assertTrue(streamNames.contains(recordMessage.stream)) + Assertions.assertNotNull(recordMessage.emittedAt) + + Assertions.assertEquals(namespace, recordMessage.namespace) + + val data = recordMessage.data + + if (cdcStreams.contains(recordMessage.stream)) { + assertCdcMetaData(data, true) + } else { + assertNullCdcMetaData(data) + } + + removeCDCColumns(data as ObjectNode) + data + } + .collect(Collectors.toSet()) + + Assertions.assertEquals(expectedRecords, actualData) + } + + @Test + @Throws(Exception::class) + fun testExistingData() { + val targetPosition = cdcLatestTargetPosition() + val read = source()!!.read(config()!!, configuredCatalog, null) + val actualRecords = AutoCloseableIterators.toListAndClose(read) + + val recordMessages = extractRecordMessages(actualRecords) + val stateMessages = extractStateMessages(actualRecords) + + Assertions.assertNotNull(targetPosition) + recordMessages.forEach( + Consumer { record: AirbyteRecordMessage -> + compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync( + targetPosition, + record + ) + } + ) + + assertExpectedRecords(HashSet(MODEL_RECORDS), recordMessages) + assertExpectedStateMessages(stateMessages) + assertExpectedStateMessageCountMatches(stateMessages, MODEL_RECORDS.size.toLong()) + } + + protected open fun compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync( + targetPosition: CdcTargetPosition<*>?, + record: AirbyteRecordMessage + ) { + Assertions.assertEquals(extractPosition(record.data), targetPosition) + } + + @Test // When a record is deleted, produces a deletion record. + @Throws(Exception::class) + fun testDelete() { + val read1 = source().read(config()!!, configuredCatalog, null) + val actualRecords1 = AutoCloseableIterators.toListAndClose(read1) + val stateMessages1 = extractStateMessages(actualRecords1) + assertExpectedStateMessages(stateMessages1) + + deleteMessageOnIdCol(MODELS_STREAM_NAME, COL_ID, 11) + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1) + + val state = Jsons.jsonNode(listOf(stateMessages1[stateMessages1.size - 1])) + val read2 = source().read(config()!!, configuredCatalog, state) + val actualRecords2 = AutoCloseableIterators.toListAndClose(read2) + val recordMessages2: List = + ArrayList(extractRecordMessages(actualRecords2)) + val stateMessages2 = extractStateMessages(actualRecords2) + assertExpectedStateMessagesFromIncrementalSync(stateMessages2) + assertExpectedStateMessageCountMatches(stateMessages2, 1) + Assertions.assertEquals(1, recordMessages2.size) + Assertions.assertEquals(11, recordMessages2[0].data[COL_ID].asInt()) + assertCdcMetaData(recordMessages2[0].data, false) + } + + protected open fun assertExpectedStateMessagesFromIncrementalSync( + stateMessages: List + ) { + assertExpectedStateMessages(stateMessages) + } + + @Test // When a record is updated, produces an update record. + @Throws(Exception::class) + fun testUpdate() { + val updatedModel = "Explorer" + val read1 = source().read(config()!!, configuredCatalog, null) + val actualRecords1 = AutoCloseableIterators.toListAndClose(read1) + val stateMessages1 = extractStateMessages(actualRecords1) + assertExpectedStateMessages(stateMessages1) + + updateCommand(MODELS_STREAM_NAME, COL_MODEL, updatedModel, COL_ID, 11) + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1) + + val state = Jsons.jsonNode(listOf(stateMessages1[stateMessages1.size - 1])) + val read2 = source().read(config()!!, configuredCatalog, state) + val actualRecords2 = AutoCloseableIterators.toListAndClose(read2) + val recordMessages2: List = + ArrayList(extractRecordMessages(actualRecords2)) + val stateMessages2 = extractStateMessages(actualRecords2) + assertExpectedStateMessagesFromIncrementalSync(stateMessages2) + Assertions.assertEquals(1, recordMessages2.size) + Assertions.assertEquals(11, recordMessages2[0].data[COL_ID].asInt()) + Assertions.assertEquals(updatedModel, recordMessages2[0].data[COL_MODEL].asText()) + assertCdcMetaData(recordMessages2[0].data, true) + assertExpectedStateMessageCountMatches(stateMessages2, 1) + } + + @Test // Verify that when data is inserted into the database while a sync is happening and after + // the first + // sync, it all gets replicated. + @Throws(Exception::class) + protected fun testRecordsProducedDuringAndAfterSync() { + val recordsCreatedBeforeTestCount = MODEL_RECORDS.size + var expectedRecords = recordsCreatedBeforeTestCount + var expectedRecordsInCdc = 0 + val recordsToCreate = 20 + // first batch of records. 20 created here and 6 created in setup method. + for (recordsCreated in 0 until recordsToCreate) { + val record = + Jsons.jsonNode( + ImmutableMap.of( + COL_ID, + 100 + recordsCreated, + COL_MAKE_ID, + 1, + COL_MODEL, + "F-$recordsCreated" + ) + ) + writeModelRecord(record) + expectedRecords++ + expectedRecordsInCdc++ + } + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc) + + val firstBatchIterator = source().read(config()!!, configuredCatalog, null) + val dataFromFirstBatch = AutoCloseableIterators.toListAndClose(firstBatchIterator) + val stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch) + assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(stateAfterFirstBatch) + val recordsFromFirstBatch = extractRecordMessages(dataFromFirstBatch) + Assertions.assertEquals(expectedRecords, recordsFromFirstBatch.size) + + // second batch of records again 20 being created + for (recordsCreated in 0 until recordsToCreate) { + val record = + Jsons.jsonNode( + ImmutableMap.of( + COL_ID, + 200 + recordsCreated, + COL_MAKE_ID, + 1, + COL_MODEL, + "F-$recordsCreated" + ) + ) + writeModelRecord(record) + expectedRecords++ + expectedRecordsInCdc++ + } + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc) + + val state = Jsons.jsonNode(listOf(stateAfterFirstBatch[stateAfterFirstBatch.size - 1])) + val secondBatchIterator = source().read(config()!!, configuredCatalog, state) + val dataFromSecondBatch = AutoCloseableIterators.toListAndClose(secondBatchIterator) + + val stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch) + assertExpectedStateMessagesFromIncrementalSync(stateAfterSecondBatch) + + val recordsFromSecondBatch = extractRecordMessages(dataFromSecondBatch) + Assertions.assertEquals( + recordsToCreate, + recordsFromSecondBatch.size, + "Expected 20 records to be replicated in the second sync." + ) + + // sometimes there can be more than one of these at the end of the snapshot and just before + // the + // first incremental. + val recordsFromFirstBatchWithoutDuplicates = removeDuplicates(recordsFromFirstBatch) + val recordsFromSecondBatchWithoutDuplicates = removeDuplicates(recordsFromSecondBatch) + + Assertions.assertTrue( + recordsCreatedBeforeTestCount < recordsFromFirstBatchWithoutDuplicates.size, + "Expected first sync to include records created while the test was running." + ) + Assertions.assertEquals( + expectedRecords, + recordsFromFirstBatchWithoutDuplicates.size + + recordsFromSecondBatchWithoutDuplicates.size + ) + } + + protected open fun assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync( + stateAfterFirstBatch: List + ) { + assertExpectedStateMessages(stateAfterFirstBatch) + } + + @Test // When both incremental CDC and full refresh are configured for different streams in a + // sync, the + // data is replicated as expected. + @Throws(Exception::class) + fun testCdcAndFullRefreshInSameSync() { + val configuredCatalog = Jsons.clone(configuredCatalog) + + val MODEL_RECORDS_2: List = + ImmutableList.of( + Jsons.jsonNode(ImmutableMap.of(COL_ID, 110, COL_MAKE_ID, 1, COL_MODEL, "Fiesta-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 120, COL_MAKE_ID, 1, COL_MODEL, "Focus-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 130, COL_MAKE_ID, 1, COL_MODEL, "Ranger-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 140, COL_MAKE_ID, 2, COL_MODEL, "GLA-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 150, COL_MAKE_ID, 2, COL_MODEL, "A 220-2")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 160, COL_MAKE_ID, 2, COL_MODEL, "E 350-2")) + ) + + val columns = + ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)") + testdb!!.with( + createTableSqlFmt(), + modelsSchema(), + MODELS_STREAM_NAME + "_2", + columnClause(columns, Optional.of(COL_ID)) + ) + + for (recordJson in MODEL_RECORDS_2) { + writeRecords( + recordJson, + modelsSchema(), + MODELS_STREAM_NAME + "_2", + COL_ID, + COL_MAKE_ID, + COL_MODEL + ) + } + + val airbyteStream = + ConfiguredAirbyteStream() + .withStream( + CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME + "_2", + modelsSchema(), + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), + Field.of(COL_MODEL, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(java.util.List.of(java.util.List.of(COL_ID))) + ) + airbyteStream.syncMode = SyncMode.FULL_REFRESH + + val streams = configuredCatalog.streams + streams.add(airbyteStream) + configuredCatalog.withStreams(streams) + + val read1 = source().read(config()!!, configuredCatalog, null) + val actualRecords1 = AutoCloseableIterators.toListAndClose(read1) + + val recordMessages1 = extractRecordMessages(actualRecords1) + val stateMessages1 = extractStateMessages(actualRecords1) + val names = HashSet(STREAM_NAMES) + names.add(MODELS_STREAM_NAME + "_2") + assertExpectedStateMessages(stateMessages1) + // Full refresh does not get any state messages. + assertExpectedStateMessageCountMatches(stateMessages1, MODEL_RECORDS_2.size.toLong()) + assertExpectedRecords( + Streams.concat(MODEL_RECORDS_2.stream(), MODEL_RECORDS.stream()) + .collect(Collectors.toSet()), + recordMessages1, + setOf(MODELS_STREAM_NAME), + names, + modelsSchema() + ) + + val puntoRecord = + Jsons.jsonNode(ImmutableMap.of(COL_ID, 100, COL_MAKE_ID, 3, COL_MODEL, "Punto")) + writeModelRecord(puntoRecord) + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1) + + val state = Jsons.jsonNode(listOf(stateMessages1[stateMessages1.size - 1])) + val read2 = source().read(config()!!, configuredCatalog, state) + val actualRecords2 = AutoCloseableIterators.toListAndClose(read2) + + val recordMessages2 = extractRecordMessages(actualRecords2) + val stateMessages2 = extractStateMessages(actualRecords2) + assertExpectedStateMessagesFromIncrementalSync(stateMessages2) + assertExpectedStateMessageCountMatches(stateMessages2, 1) + assertExpectedRecords( + Streams.concat(MODEL_RECORDS_2.stream(), Stream.of(puntoRecord)) + .collect(Collectors.toSet()), + recordMessages2, + setOf(MODELS_STREAM_NAME), + names, + modelsSchema() + ) + } + + @Test // When no records exist, no records are returned. + @Throws(Exception::class) + fun testNoData() { + deleteCommand(MODELS_STREAM_NAME) + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, MODEL_RECORDS.size) + val read = source()!!.read(config()!!, configuredCatalog, null) + val actualRecords = AutoCloseableIterators.toListAndClose(read) + + val recordMessages = extractRecordMessages(actualRecords) + val stateMessages = extractStateMessages(actualRecords) + assertExpectedRecords(emptySet(), recordMessages) + assertExpectedStateMessagesForNoData(stateMessages) + assertExpectedStateMessageCountMatches(stateMessages, 0) + } + + protected open fun assertExpectedStateMessagesForNoData( + stateMessages: List + ) { + assertExpectedStateMessages(stateMessages) + } + + @Test // When no changes have been made to the database since the previous sync, no records are + // returned. + @Throws(Exception::class) + fun testNoDataOnSecondSync() { + val read1 = source().read(config()!!, configuredCatalog, null) + val actualRecords1 = AutoCloseableIterators.toListAndClose(read1) + val stateMessagesFromFirstSync = extractStateMessages(actualRecords1) + val state = + Jsons.jsonNode(listOf(stateMessagesFromFirstSync[stateMessagesFromFirstSync.size - 1])) + + val read2 = source().read(config()!!, configuredCatalog, state) + val actualRecords2 = AutoCloseableIterators.toListAndClose(read2) + + val recordMessages2 = extractRecordMessages(actualRecords2) + val stateMessages2 = extractStateMessages(actualRecords2) + + assertExpectedRecords(emptySet(), recordMessages2) + assertExpectedStateMessagesFromIncrementalSync(stateMessages2) + assertExpectedStateMessageCountMatches(stateMessages2, 0) + } + + @Test + @Throws(Exception::class) + fun testCheck() { + val status = source()!!.check(config()!!) + Assertions.assertEquals(status!!.status, AirbyteConnectionStatus.Status.SUCCEEDED) + } + + @Test + @Throws(Exception::class) + fun testDiscover() { + val expectedCatalog = expectedCatalogForDiscover() + val actualCatalog = source()!!.discover(config()!!) + + Assertions.assertEquals( + expectedCatalog.streams + .stream() + .sorted(Comparator.comparing { obj: AirbyteStream -> obj.name }) + .collect(Collectors.toList()), + actualCatalog!! + .streams + .stream() + .sorted(Comparator.comparing { obj: AirbyteStream -> obj.name }) + .collect(Collectors.toList()) + ) + } + + @Test + @Throws(Exception::class) + open fun newTableSnapshotTest() { + val firstBatchIterator = source().read(config()!!, configuredCatalog, null) + val dataFromFirstBatch = AutoCloseableIterators.toListAndClose(firstBatchIterator) + val recordsFromFirstBatch = extractRecordMessages(dataFromFirstBatch) + val stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch) + assertExpectedStateMessages(stateAfterFirstBatch) + assertExpectedStateMessageCountMatches(stateAfterFirstBatch, MODEL_RECORDS.size.toLong()) + + val stateMessageEmittedAfterFirstSyncCompletion = + stateAfterFirstBatch[stateAfterFirstBatch.size - 1] + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + stateMessageEmittedAfterFirstSyncCompletion.type + ) + Assertions.assertNotNull(stateMessageEmittedAfterFirstSyncCompletion.global.sharedState) + val streamsInStateAfterFirstSyncCompletion = + stateMessageEmittedAfterFirstSyncCompletion.global.streamStates + .stream() + .map { obj: AirbyteStreamState -> obj.streamDescriptor } + .collect(Collectors.toSet()) + Assertions.assertEquals(1, streamsInStateAfterFirstSyncCompletion.size) + Assertions.assertTrue( + streamsInStateAfterFirstSyncCompletion.contains( + StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()) + ) + ) + Assertions.assertNotNull(stateMessageEmittedAfterFirstSyncCompletion.data) + + Assertions.assertEquals((MODEL_RECORDS.size), recordsFromFirstBatch.size) + assertExpectedRecords(HashSet(MODEL_RECORDS), recordsFromFirstBatch) + + val state = stateAfterFirstBatch[stateAfterFirstBatch.size - 1].data + + val newTables = + CatalogHelpers.toDefaultConfiguredCatalog( + AirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createAirbyteStream( + RANDOM_TABLE_NAME, + randomSchema(), + Field.of(COL_ID + "_random", JsonSchemaType.NUMBER), + Field.of(COL_MAKE_ID + "_random", JsonSchemaType.NUMBER), + Field.of(COL_MODEL + "_random", JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey( + java.util.List.of(java.util.List.of(COL_ID + "_random")) + ) + ) + ) + ) + + newTables.streams.forEach( + Consumer { s: ConfiguredAirbyteStream -> s.syncMode = SyncMode.INCREMENTAL } + ) + val combinedStreams: MutableList = ArrayList() + combinedStreams.addAll(configuredCatalog.streams) + combinedStreams.addAll(newTables.streams) + + val updatedCatalog = ConfiguredAirbyteCatalog().withStreams(combinedStreams) + + /* + * Write 20 records to the existing table + */ + val recordsWritten: MutableSet = HashSet() + for (recordsCreated in 0..19) { + val record = + Jsons.jsonNode( + ImmutableMap.of( + COL_ID, + 100 + recordsCreated, + COL_MAKE_ID, + 1, + COL_MODEL, + "F-$recordsCreated" + ) + ) + recordsWritten.add(record) + writeModelRecord(record) + } + + val secondBatchIterator = source().read(config()!!, updatedCatalog, state) + val dataFromSecondBatch = AutoCloseableIterators.toListAndClose(secondBatchIterator) + + val stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch) + assertStateMessagesForNewTableSnapshotTest( + stateAfterSecondBatch, + stateMessageEmittedAfterFirstSyncCompletion + ) + + val recordsStreamWise = extractRecordMessagesStreamWise(dataFromSecondBatch) + Assertions.assertTrue(recordsStreamWise.containsKey(MODELS_STREAM_NAME)) + Assertions.assertTrue(recordsStreamWise.containsKey(RANDOM_TABLE_NAME)) + + val recordsForModelsStreamFromSecondBatch = recordsStreamWise[MODELS_STREAM_NAME]!! + val recordsForModelsRandomStreamFromSecondBatch = recordsStreamWise[RANDOM_TABLE_NAME]!! + + Assertions.assertEquals( + (MODEL_RECORDS_RANDOM.size), + recordsForModelsRandomStreamFromSecondBatch.size + ) + Assertions.assertEquals(20, recordsForModelsStreamFromSecondBatch.size) + assertExpectedRecords( + HashSet(MODEL_RECORDS_RANDOM), + recordsForModelsRandomStreamFromSecondBatch, + recordsForModelsRandomStreamFromSecondBatch + .stream() + .map { obj: AirbyteRecordMessage -> obj.stream } + .collect(Collectors.toSet()), + Sets.newHashSet(RANDOM_TABLE_NAME), + randomSchema() + ) + assertExpectedRecords(recordsWritten, recordsForModelsStreamFromSecondBatch) + + /* + * Write 20 records to both the tables + */ + val recordsWrittenInRandomTable: MutableSet = HashSet() + recordsWritten.clear() + for (recordsCreated in 30..49) { + val record = + Jsons.jsonNode( + ImmutableMap.of( + COL_ID, + 100 + recordsCreated, + COL_MAKE_ID, + 1, + COL_MODEL, + "F-$recordsCreated" + ) + ) + writeModelRecord(record) + recordsWritten.add(record) + + val record2 = + Jsons.jsonNode( + ImmutableMap.of( + COL_ID + "_random", + 11000 + recordsCreated, + COL_MAKE_ID + "_random", + 1 + recordsCreated, + COL_MODEL + "_random", + "Fiesta-random$recordsCreated" + ) + ) + writeRecords( + record2, + randomSchema(), + RANDOM_TABLE_NAME, + COL_ID + "_random", + COL_MAKE_ID + "_random", + COL_MODEL + "_random" + ) + recordsWrittenInRandomTable.add(record2) + } + + val state2 = stateAfterSecondBatch[stateAfterSecondBatch.size - 1].data + val thirdBatchIterator = source().read(config()!!, updatedCatalog, state2) + val dataFromThirdBatch = AutoCloseableIterators.toListAndClose(thirdBatchIterator) + + val stateAfterThirdBatch = extractStateMessages(dataFromThirdBatch) + Assertions.assertTrue(stateAfterThirdBatch.size >= 1) + + val stateMessageEmittedAfterThirdSyncCompletion = + stateAfterThirdBatch[stateAfterThirdBatch.size - 1] + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + stateMessageEmittedAfterThirdSyncCompletion.type + ) + Assertions.assertNotEquals( + stateMessageEmittedAfterThirdSyncCompletion.global.sharedState, + stateAfterSecondBatch[stateAfterSecondBatch.size - 1].global.sharedState + ) + val streamsInSyncCompletionStateAfterThirdSync = + stateMessageEmittedAfterThirdSyncCompletion.global.streamStates + .stream() + .map { obj: AirbyteStreamState -> obj.streamDescriptor } + .collect(Collectors.toSet()) + Assertions.assertTrue( + streamsInSyncCompletionStateAfterThirdSync.contains( + StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()) + ) + ) + Assertions.assertTrue( + streamsInSyncCompletionStateAfterThirdSync.contains( + StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()) + ) + ) + Assertions.assertNotNull(stateMessageEmittedAfterThirdSyncCompletion.data) + + val recordsStreamWiseFromThirdBatch = extractRecordMessagesStreamWise(dataFromThirdBatch) + Assertions.assertTrue(recordsStreamWiseFromThirdBatch.containsKey(MODELS_STREAM_NAME)) + Assertions.assertTrue(recordsStreamWiseFromThirdBatch.containsKey(RANDOM_TABLE_NAME)) + + val recordsForModelsStreamFromThirdBatch = + recordsStreamWiseFromThirdBatch[MODELS_STREAM_NAME]!! + val recordsForModelsRandomStreamFromThirdBatch = + recordsStreamWiseFromThirdBatch[RANDOM_TABLE_NAME]!! + + Assertions.assertEquals(20, recordsForModelsStreamFromThirdBatch.size) + Assertions.assertEquals(20, recordsForModelsRandomStreamFromThirdBatch.size) + assertExpectedRecords(recordsWritten, recordsForModelsStreamFromThirdBatch) + assertExpectedRecords( + recordsWrittenInRandomTable, + recordsForModelsRandomStreamFromThirdBatch, + recordsForModelsRandomStreamFromThirdBatch + .stream() + .map { obj: AirbyteRecordMessage -> obj.stream } + .collect(Collectors.toSet()), + Sets.newHashSet(RANDOM_TABLE_NAME), + randomSchema() + ) + } + + protected open fun assertStateMessagesForNewTableSnapshotTest( + stateMessages: List, + stateMessageEmittedAfterFirstSyncCompletion: AirbyteStateMessage + ) { + Assertions.assertEquals(2, stateMessages.size) + val stateMessageEmittedAfterSnapshotCompletionInSecondSync = stateMessages[0] + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + stateMessageEmittedAfterSnapshotCompletionInSecondSync.type + ) + Assertions.assertEquals( + stateMessageEmittedAfterFirstSyncCompletion.global.sharedState, + stateMessageEmittedAfterSnapshotCompletionInSecondSync.global.sharedState + ) + val streamsInSnapshotState = + stateMessageEmittedAfterSnapshotCompletionInSecondSync.global.streamStates + .stream() + .map { obj: AirbyteStreamState -> obj.streamDescriptor } + .collect(Collectors.toSet()) + Assertions.assertEquals(2, streamsInSnapshotState.size) + Assertions.assertTrue( + streamsInSnapshotState.contains( + StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()) + ) + ) + Assertions.assertTrue( + streamsInSnapshotState.contains( + StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()) + ) + ) + Assertions.assertNotNull(stateMessageEmittedAfterSnapshotCompletionInSecondSync.data) + + val stateMessageEmittedAfterSecondSyncCompletion = stateMessages[1] + Assertions.assertEquals( + AirbyteStateMessage.AirbyteStateType.GLOBAL, + stateMessageEmittedAfterSecondSyncCompletion.type + ) + Assertions.assertNotEquals( + stateMessageEmittedAfterFirstSyncCompletion.global.sharedState, + stateMessageEmittedAfterSecondSyncCompletion.global.sharedState + ) + val streamsInSyncCompletionState = + stateMessageEmittedAfterSecondSyncCompletion.global.streamStates + .stream() + .map { obj: AirbyteStreamState -> obj.streamDescriptor } + .collect(Collectors.toSet()) + Assertions.assertEquals(2, streamsInSnapshotState.size) + Assertions.assertTrue( + streamsInSyncCompletionState.contains( + StreamDescriptor().withName(RANDOM_TABLE_NAME).withNamespace(randomSchema()) + ) + ) + Assertions.assertTrue( + streamsInSyncCompletionState.contains( + StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(modelsSchema()) + ) + ) + Assertions.assertNotNull(stateMessageEmittedAfterSecondSyncCompletion.data) + } + + protected fun expectedCatalogForDiscover(): AirbyteCatalog { + val expectedCatalog = Jsons.clone(catalog) + + val columns = + ImmutableMap.of(COL_ID, "INTEGER", COL_MAKE_ID, "INTEGER", COL_MODEL, "VARCHAR(200)") + testdb!!.with( + createTableSqlFmt(), + modelsSchema(), + MODELS_STREAM_NAME + "_2", + columnClause(columns, Optional.empty()) + ) + + val streams = expectedCatalog.streams + // stream with PK + streams[0].sourceDefinedCursor = true + addCdcMetadataColumns(streams[0]) + addCdcDefaultCursorField(streams[0]) + + val streamWithoutPK = + CatalogHelpers.createAirbyteStream( + MODELS_STREAM_NAME + "_2", + modelsSchema(), + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_MAKE_ID, JsonSchemaType.INTEGER), + Field.of(COL_MODEL, JsonSchemaType.STRING) + ) + streamWithoutPK.sourceDefinedPrimaryKey = emptyList() + streamWithoutPK.supportedSyncModes = java.util.List.of(SyncMode.FULL_REFRESH) + addCdcDefaultCursorField(streamWithoutPK) + addCdcMetadataColumns(streamWithoutPK) + + val randomStream = + CatalogHelpers.createAirbyteStream( + RANDOM_TABLE_NAME, + randomSchema(), + Field.of(COL_ID + "_random", JsonSchemaType.INTEGER), + Field.of(COL_MAKE_ID + "_random", JsonSchemaType.INTEGER), + Field.of(COL_MODEL + "_random", JsonSchemaType.STRING) + ) + .withSourceDefinedCursor(true) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey( + java.util.List.of(java.util.List.of(COL_ID + "_random")) + ) + + addCdcDefaultCursorField(randomStream) + addCdcMetadataColumns(randomStream) + + streams.add(streamWithoutPK) + streams.add(randomStream) + expectedCatalog.withStreams(streams) + return expectedCatalog + } + + @Throws(Exception::class) + protected open fun waitForCdcRecords( + schemaName: String?, + tableName: String?, + recordCount: Int + ) {} + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CdcSourceTest::class.java) + + const val MODELS_STREAM_NAME: String = "models" + @JvmField val STREAM_NAMES: Set = java.util.Set.of(MODELS_STREAM_NAME) + protected const val COL_ID: String = "id" + protected const val COL_MAKE_ID: String = "make_id" + protected const val COL_MODEL: String = "model" + + @JvmField + val MODEL_RECORDS: List = + ImmutableList.of( + Jsons.jsonNode(ImmutableMap.of(COL_ID, 11, COL_MAKE_ID, 1, COL_MODEL, "Fiesta")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 12, COL_MAKE_ID, 1, COL_MODEL, "Focus")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 13, COL_MAKE_ID, 1, COL_MODEL, "Ranger")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 14, COL_MAKE_ID, 2, COL_MODEL, "GLA")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 15, COL_MAKE_ID, 2, COL_MODEL, "A 220")), + Jsons.jsonNode(ImmutableMap.of(COL_ID, 16, COL_MAKE_ID, 2, COL_MODEL, "E 350")) + ) + + protected const val RANDOM_TABLE_NAME: String = MODELS_STREAM_NAME + "_random" + + @JvmField + val MODEL_RECORDS_RANDOM: List = + MODEL_RECORDS.stream() + .map { r: JsonNode -> + Jsons.jsonNode( + ImmutableMap.of( + COL_ID + "_random", + r[COL_ID].asInt() * 1000, + COL_MAKE_ID + "_random", + r[COL_MAKE_ID], + COL_MODEL + "_random", + r[COL_MODEL].asText() + "-random" + ) + ) + } + .toList() + + @JvmStatic + protected fun removeDuplicates( + messages: Set + ): Set { + val existingDataRecordsWithoutUpdated: MutableSet = HashSet() + val output: MutableSet = HashSet() + + for (message in messages) { + val node = message.data.deepCopy() + node.remove("_ab_cdc_updated_at") + + if (existingDataRecordsWithoutUpdated.contains(node)) { + LOGGER.info("Removing duplicate node: $node") + } else { + output.add(message) + existingDataRecordsWithoutUpdated.add(node) + } + } + + return output + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debug/DebugUtil.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debug/DebugUtil.kt new file mode 100644 index 0000000000000..e0d59e0a2fe4b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/debug/DebugUtil.kt @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.debug + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog + +/** + * Utility class defined to debug a source. Copy over any relevant configurations, catalogs & state + * in the resources/debug_resources directory. + */ +object DebugUtil { + @Suppress("deprecation") + @Throws(Exception::class) + @JvmStatic + fun debug(debugSource: Source) { + val debugConfig = config + val configuredAirbyteCatalog = catalog + var state = + try { + state + } catch (e: Exception) { + null + } + + debugSource.check(debugConfig) + debugSource.discover(debugConfig) + + val messageIterator = debugSource.read(debugConfig, configuredAirbyteCatalog, state) + messageIterator.forEachRemaining { message: AirbyteMessage? -> } + } + + @get:Throws(Exception::class) + private val config: JsonNode + get() { + val originalConfig = + ObjectMapper().readTree(MoreResources.readResource("debug_resources/config.json")) + val debugConfig: JsonNode = + (originalConfig.deepCopy() as ObjectNode).put("debug_mode", true) + return debugConfig + } + + @get:Throws(Exception::class) + private val catalog: ConfiguredAirbyteCatalog + get() { + val catalog = MoreResources.readResource("debug_resources/configured_catalog.json") + return Jsons.deserialize(catalog, ConfiguredAirbyteCatalog::class.java) + } + + @get:Throws(Exception::class) + private val state: JsonNode + get() { + val message = + Jsons.deserialize( + MoreResources.readResource("debug_resources/state.json"), + AirbyteStateMessage::class.java + ) + return Jsons.jsonNode(listOf(message)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.kt new file mode 100644 index 0000000000000..74cb0cdc1f159 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.kt @@ -0,0 +1,1671 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc.test + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.db.factory.DatabaseDriver +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.integrations.base.Source +import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils +import io.airbyte.cdk.integrations.source.relationaldb.models.DbState +import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState +import io.airbyte.cdk.testutils.TestDatabase +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.commons.util.MoreIterators +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.math.BigDecimal +import java.sql.SQLException +import java.util.* +import java.util.function.Consumer +import java.util.stream.Collectors +import org.hamcrest.MatcherAssert +import org.hamcrest.Matchers +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +/** Tests that should be run on all Sources that extend the AbstractJdbcSource. */ +@SuppressFBWarnings( + value = ["MS_SHOULD_BE_FINAL"], + justification = + "The static variables are updated in subclasses for convenience, and cannot be final." +) +abstract class JdbcSourceAcceptanceTest> { + @JvmField protected var testdb: T = createTestDatabase() + + protected fun streamName(): String { + return TABLE_NAME + } + + /** + * A valid configuration to connect to a test database. + * + * @return config + */ + protected abstract fun config(): JsonNode + + /** + * An instance of the source that should be tests. + * + * @return abstract jdbc source + */ + protected abstract fun source(): S + + /** + * Creates a TestDatabase instance to be used in [.setup]. + * + * @return TestDatabase instance to use for test case. + */ + protected abstract fun createTestDatabase(): T + + /** + * These tests write records without specifying a namespace (schema name). They will be written + * into whatever the default schema is for the database. When they are discovered they will be + * namespaced by the schema name (e.g. .). Thus the source + * needs to tell the tests what that default schema name is. If the database does not support + * schemas, then database name should used instead. + * + * @return name that will be used to namespace the record. + */ + protected abstract fun supportsSchemas(): Boolean + + protected fun createTableQuery( + tableName: String?, + columnClause: String?, + primaryKeyClause: String + ): String { + return String.format( + "CREATE TABLE %s(%s %s %s)", + tableName, + columnClause, + if (primaryKeyClause == "") "" else ",", + primaryKeyClause + ) + } + + protected fun primaryKeyClause(columns: List): String { + if (columns.isEmpty()) { + return "" + } + + val clause = StringBuilder() + clause.append("PRIMARY KEY (") + for (i in columns.indices) { + clause.append(columns[i]) + if (i != (columns.size - 1)) { + clause.append(",") + } + } + clause.append(")") + return clause.toString() + } + + @BeforeEach + @Throws(Exception::class) + open fun setup() { + testdb = createTestDatabase() + if (supportsSchemas()) { + createSchemas() + } + if (testdb!!.databaseDriver == DatabaseDriver.ORACLE) { + testdb!!.with("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD'") + } + testdb + .with( + createTableQuery( + getFullyQualifiedTableName(TABLE_NAME), + COLUMN_CLAUSE_WITH_PK, + primaryKeyClause(listOf("id")) + ) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (1, 'picard', '2004-10-19')", + getFullyQualifiedTableName(TABLE_NAME) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (2, 'crusher', '2005-10-19')", + getFullyQualifiedTableName(TABLE_NAME) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (3, 'vash', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME) + ) + .with( + createTableQuery( + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK), + COLUMN_CLAUSE_WITHOUT_PK, + "" + ) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (1, 'picard', '2004-10-19')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (2, 'crusher', '2005-10-19')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK) + ) + .with( + "INSERT INTO %s(id, name, updated_at) VALUES (3, 'vash', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_PK) + ) + .with( + createTableQuery( + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK), + COLUMN_CLAUSE_WITH_COMPOSITE_PK, + primaryKeyClause(listOf("first_name", "last_name")) + ) + ) + .with( + "INSERT INTO %s(first_name, last_name, updated_at) VALUES ('first', 'picard', '2004-10-19')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK) + ) + .with( + "INSERT INTO %s(first_name, last_name, updated_at) VALUES ('second', 'crusher', '2005-10-19')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK) + ) + .with( + "INSERT INTO %s(first_name, last_name, updated_at) VALUES ('third', 'vash', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME_COMPOSITE_PK) + ) + } + + protected open fun maybeSetShorterConnectionTimeout(config: JsonNode?) { + // Optionally implement this to speed up test cases which will result in a connection + // timeout. + } + + @AfterEach + fun tearDown() { + testdb!!.close() + } + + @Test + @Throws(Exception::class) + open fun testSpec() { + val actual = source()!!.spec() + val resourceString = MoreResources.readResource("spec.json") + val expected = Jsons.deserialize(resourceString, ConnectorSpecification::class.java) + + Assertions.assertEquals(expected, actual) + } + + @Test + @Throws(Exception::class) + fun testCheckSuccess() { + val actual = source()!!.check(config()) + val expected = + AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED) + Assertions.assertEquals(expected, actual) + } + + @Test + @Throws(Exception::class) + protected fun testCheckFailure() { + val config = config() + maybeSetShorterConnectionTimeout(config) + (config as ObjectNode).put(JdbcUtils.PASSWORD_KEY, "fake") + val actual = source()!!.check(config) + Assertions.assertEquals(AirbyteConnectionStatus.Status.FAILED, actual!!.status) + } + + @Test + @Throws(Exception::class) + fun testDiscover() { + val actual = filterOutOtherSchemas(source()!!.discover(config())) + val expected = getCatalog(defaultNamespace) + Assertions.assertEquals(expected.streams.size, actual!!.streams.size) + actual.streams.forEach( + Consumer { actualStream: AirbyteStream -> + val expectedStream = + expected.streams + .stream() + .filter { stream: AirbyteStream -> + stream.namespace == actualStream.namespace && + stream.name == actualStream.name + } + .findAny() + Assertions.assertTrue( + expectedStream.isPresent, + String.format("Unexpected stream %s", actualStream.name) + ) + Assertions.assertEquals(expectedStream.get(), actualStream) + } + ) + } + + @Test + @Throws(Exception::class) + protected fun testDiscoverWithNonCursorFields() { + testdb!! + .with( + CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY, + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_CURSOR_TYPE), + COL_CURSOR + ) + .with( + INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY, + getFullyQualifiedTableName(TABLE_NAME_WITHOUT_CURSOR_TYPE) + ) + val actual = filterOutOtherSchemas(source()!!.discover(config())) + val stream = + actual!! + .streams + .stream() + .filter { s: AirbyteStream -> + s.name.equals(TABLE_NAME_WITHOUT_CURSOR_TYPE, ignoreCase = true) + } + .findFirst() + .orElse(null) + Assertions.assertNotNull(stream) + Assertions.assertEquals( + TABLE_NAME_WITHOUT_CURSOR_TYPE.lowercase(Locale.getDefault()), + stream.name.lowercase(Locale.getDefault()) + ) + Assertions.assertEquals(1, stream.supportedSyncModes.size) + Assertions.assertEquals(SyncMode.FULL_REFRESH, stream.supportedSyncModes[0]) + } + + @Test + @Throws(Exception::class) + protected fun testDiscoverWithNullableCursorFields() { + testdb!! + .with( + CREATE_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY, + getFullyQualifiedTableName(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE), + COL_CURSOR + ) + .with( + INSERT_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY, + getFullyQualifiedTableName(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE) + ) + val actual = filterOutOtherSchemas(source()!!.discover(config())) + val stream = + actual!! + .streams + .stream() + .filter { s: AirbyteStream -> + s.name.equals(TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE, ignoreCase = true) + } + .findFirst() + .orElse(null) + Assertions.assertNotNull(stream) + Assertions.assertEquals( + TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE.lowercase(Locale.getDefault()), + stream.name.lowercase(Locale.getDefault()) + ) + Assertions.assertEquals(2, stream.supportedSyncModes.size) + Assertions.assertTrue(stream.supportedSyncModes.contains(SyncMode.FULL_REFRESH)) + Assertions.assertTrue(stream.supportedSyncModes.contains(SyncMode.INCREMENTAL)) + } + + protected fun filterOutOtherSchemas(catalog: AirbyteCatalog): AirbyteCatalog { + if (supportsSchemas()) { + val filteredCatalog = Jsons.clone(catalog) + filteredCatalog!!.streams = + filteredCatalog.streams + .stream() + .filter { stream: AirbyteStream -> + TEST_SCHEMAS.stream().anyMatch { schemaName: String -> + stream.namespace.startsWith(schemaName) + } + } + .collect(Collectors.toList()) + return filteredCatalog + } else { + return catalog + } + } + + @Test + @Throws(Exception::class) + protected fun testDiscoverWithMultipleSchemas() { + // clickhouse and mysql do not have a concept of schemas, so this test does not make sense + // for them. + when (testdb!!.databaseDriver) { + DatabaseDriver.MYSQL, + DatabaseDriver.CLICKHOUSE, + DatabaseDriver.TERADATA -> return + else -> {} + } + // add table and data to a separate schema. + testdb!! + .with( + "CREATE TABLE %s(id VARCHAR(200) NOT NULL, name VARCHAR(200) NOT NULL)", + RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME) + ) + .with( + "INSERT INTO %s(id, name) VALUES ('1','picard')", + RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME) + ) + .with( + "INSERT INTO %s(id, name) VALUES ('2', 'crusher')", + RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME) + ) + .with( + "INSERT INTO %s(id, name) VALUES ('3', 'vash')", + RelationalDbQueryUtils.getFullyQualifiedTableName(SCHEMA_NAME2, TABLE_NAME) + ) + + val actual = source()!!.discover(config()) + + val expected = getCatalog(defaultNamespace) + val catalogStreams: MutableList = ArrayList() + catalogStreams.addAll(expected.streams) + catalogStreams.add( + CatalogHelpers.createAirbyteStream( + TABLE_NAME, + SCHEMA_NAME2, + Field.of(COL_ID, JsonSchemaType.STRING), + Field.of(COL_NAME, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + java.util.List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + ) + expected.streams = catalogStreams + // sort streams by name so that we are comparing lists with the same order. + val schemaTableCompare = + Comparator.comparing { stream: AirbyteStream -> stream.namespace + "." + stream.name } + expected.streams.sortWith(schemaTableCompare) + actual!!.streams.sortWith(schemaTableCompare) + Assertions.assertEquals(expected, filterOutOtherSchemas(actual)) + } + + @Test + @Throws(Exception::class) + fun testReadSuccess() { + val actualMessages = + MoreIterators.toList( + source()!!.read(config(), getConfiguredCatalogWithOneStream(defaultNamespace), null) + ) + + setEmittedAtToNull(actualMessages) + val expectedMessages = testMessages + MatcherAssert.assertThat( + expectedMessages, + Matchers.containsInAnyOrder(*actualMessages.toTypedArray()) + ) + MatcherAssert.assertThat( + actualMessages, + Matchers.containsInAnyOrder(*expectedMessages.toTypedArray()) + ) + } + + @Test + @Throws(Exception::class) + protected fun testReadOneColumn() { + val catalog = + CatalogHelpers.createConfiguredAirbyteCatalog( + streamName(), + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.NUMBER) + ) + val actualMessages = MoreIterators.toList(source()!!.read(config(), catalog, null)) + + setEmittedAtToNull(actualMessages) + + val expectedMessages = airbyteMessagesReadOneColumn + Assertions.assertEquals(expectedMessages.size, actualMessages.size) + Assertions.assertTrue(expectedMessages.containsAll(actualMessages)) + Assertions.assertTrue(actualMessages.containsAll(expectedMessages)) + } + + protected open val airbyteMessagesReadOneColumn: List + get() { + val expectedMessages = + testMessages + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> + (m.record.data as ObjectNode).remove(COL_NAME) + (m.record.data as ObjectNode).remove(COL_UPDATED_AT) + (m.record.data as ObjectNode).replace( + COL_ID, + convertIdBasedOnDatabase(m.record.data[COL_ID].asInt()) + ) + } + .collect(Collectors.toList()) + return expectedMessages + } + + @Test + @Throws(Exception::class) + protected fun testReadMultipleTables() { + val catalog = getConfiguredCatalogWithOneStream(defaultNamespace) + val expectedMessages: MutableList = ArrayList(testMessages) + + for (i in 2..9) { + val streamName2 = streamName() + i + val tableName = getFullyQualifiedTableName(TABLE_NAME + i) + testdb!! + .with(createTableQuery(tableName, "id INTEGER, name VARCHAR(200)", "")) + .with("INSERT INTO %s(id, name) VALUES (1,'picard')", tableName) + .with("INSERT INTO %s(id, name) VALUES (2, 'crusher')", tableName) + .with("INSERT INTO %s(id, name) VALUES (3, 'vash')", tableName) + catalog.streams.add( + CatalogHelpers.createConfiguredAirbyteStream( + streamName2, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_NAME, JsonSchemaType.STRING) + ) + ) + + expectedMessages.addAll(getAirbyteMessagesSecondSync(streamName2)) + } + + val actualMessages = MoreIterators.toList(source()!!.read(config(), catalog, null)) + + setEmittedAtToNull(actualMessages) + + Assertions.assertEquals(expectedMessages.size, actualMessages.size) + Assertions.assertTrue(expectedMessages.containsAll(actualMessages)) + Assertions.assertTrue(actualMessages.containsAll(expectedMessages)) + } + + protected open fun getAirbyteMessagesSecondSync(streamName: String?): List { + return testMessages + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> + m.record.stream = streamName + m.record.namespace = defaultNamespace + (m.record.data as ObjectNode).remove(COL_UPDATED_AT) + (m.record.data as ObjectNode).replace( + COL_ID, + convertIdBasedOnDatabase(m.record.data[COL_ID].asInt()) + ) + } + .collect(Collectors.toList()) + } + + @Test + @Throws(Exception::class) + protected fun testTablesWithQuoting() { + val streamForTableWithSpaces = createTableWithSpaces() + + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + getConfiguredCatalogWithOneStream(defaultNamespace).streams[0], + streamForTableWithSpaces + ) + ) + val actualMessages = MoreIterators.toList(source()!!.read(config(), catalog, null)) + + setEmittedAtToNull(actualMessages) + + val expectedMessages: MutableList = ArrayList(testMessages) + expectedMessages.addAll(getAirbyteMessagesForTablesWithQuoting(streamForTableWithSpaces)) + + Assertions.assertEquals(expectedMessages.size, actualMessages.size) + Assertions.assertTrue(expectedMessages.containsAll(actualMessages)) + Assertions.assertTrue(actualMessages.containsAll(expectedMessages)) + } + + protected open fun getAirbyteMessagesForTablesWithQuoting( + streamForTableWithSpaces: ConfiguredAirbyteStream + ): List { + return testMessages + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> + m.record.stream = streamForTableWithSpaces.stream.name + (m.record.data as ObjectNode).set( + COL_LAST_NAME_WITH_SPACE, + (m.record.data as ObjectNode).remove(COL_NAME) + ) + (m.record.data as ObjectNode).remove(COL_UPDATED_AT) + (m.record.data as ObjectNode).replace( + COL_ID, + convertIdBasedOnDatabase(m.record.data[COL_ID].asInt()) + ) + } + .collect(Collectors.toList()) + } + + @Test + fun testReadFailure() { + val spiedAbStream = + Mockito.spy(getConfiguredCatalogWithOneStream(defaultNamespace).streams[0]) + val catalog = ConfiguredAirbyteCatalog().withStreams(java.util.List.of(spiedAbStream)) + Mockito.doCallRealMethod().doThrow(RuntimeException()).`when`(spiedAbStream).stream + + Assertions.assertThrows(RuntimeException::class.java) { + source()!!.read(config(), catalog, null) + } + } + + @Test + @Throws(Exception::class) + fun testIncrementalNoPreviousState() { + incrementalCursorCheck(COL_ID, null, "3", testMessages) + } + + @Test + @Throws(Exception::class) + fun testIncrementalIntCheckCursor() { + incrementalCursorCheck(COL_ID, "2", "3", java.util.List.of(testMessages[2])) + } + + @Test + @Throws(Exception::class) + fun testIncrementalStringCheckCursor() { + incrementalCursorCheck( + COL_NAME, + "patent", + "vash", + java.util.List.of(testMessages[0], testMessages[2]) + ) + } + + @Test + @Throws(Exception::class) + fun testIncrementalStringCheckCursorSpaceInColumnName() { + val streamWithSpaces = createTableWithSpaces() + + val expectedRecordMessages = + getAirbyteMessagesCheckCursorSpaceInColumnName(streamWithSpaces) + incrementalCursorCheck( + COL_LAST_NAME_WITH_SPACE, + COL_LAST_NAME_WITH_SPACE, + "patent", + "vash", + expectedRecordMessages, + streamWithSpaces + ) + } + + protected open fun getAirbyteMessagesCheckCursorSpaceInColumnName( + streamWithSpaces: ConfiguredAirbyteStream + ): List { + val firstMessage = testMessages[0] + firstMessage.record.stream = streamWithSpaces.stream.name + (firstMessage.record.data as ObjectNode).remove(COL_UPDATED_AT) + (firstMessage.record.data as ObjectNode).set( + COL_LAST_NAME_WITH_SPACE, + (firstMessage.record.data as ObjectNode).remove(COL_NAME) + ) + + val secondMessage = testMessages[2] + secondMessage.record.stream = streamWithSpaces.stream.name + (secondMessage.record.data as ObjectNode).remove(COL_UPDATED_AT) + (secondMessage.record.data as ObjectNode).set( + COL_LAST_NAME_WITH_SPACE, + (secondMessage.record.data as ObjectNode).remove(COL_NAME) + ) + + return java.util.List.of(firstMessage, secondMessage) + } + + @Test + @Throws(Exception::class) + fun testIncrementalDateCheckCursor() { + incrementalDateCheck() + } + + @Throws(Exception::class) + protected open fun incrementalDateCheck() { + incrementalCursorCheck( + COL_UPDATED_AT, + "2005-10-18", + "2006-10-19", + java.util.List.of(testMessages[1], testMessages[2]) + ) + } + + @Test + @Throws(Exception::class) + fun testIncrementalCursorChanges() { + incrementalCursorCheck( + COL_ID, + COL_NAME, // cheesing this value a little bit. in the correct implementation this + // initial cursor value should + // be ignored because the cursor field changed. setting it to a value that if used, will + // cause + // records to (incorrectly) be filtered out. + "data", + "vash", + testMessages + ) + } + + @Test + @Throws(Exception::class) + protected fun testReadOneTableIncrementallyTwice() { + val config = config() + val namespace = defaultNamespace + val configuredCatalog = getConfiguredCatalogWithOneStream(namespace) + configuredCatalog.streams.forEach( + Consumer { airbyteStream: ConfiguredAirbyteStream -> + airbyteStream.syncMode = SyncMode.INCREMENTAL + airbyteStream.cursorField = java.util.List.of(COL_ID) + airbyteStream.destinationSyncMode = DestinationSyncMode.APPEND + } + ) + + val actualMessagesFirstSync = + MoreIterators.toList( + source()!!.read( + config, + configuredCatalog, + createEmptyState(streamName(), namespace) + ) + ) + + val stateAfterFirstSyncOptional = + actualMessagesFirstSync + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .findFirst() + Assertions.assertTrue(stateAfterFirstSyncOptional.isPresent) + + executeStatementReadIncrementallyTwice() + + val actualMessagesSecondSync = + MoreIterators.toList( + source()!!.read( + config, + configuredCatalog, + extractState(stateAfterFirstSyncOptional.get()) + ) + ) + + Assertions.assertEquals( + 2, + actualMessagesSecondSync + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.RECORD } + .count() + .toInt() + ) + val expectedMessages = getExpectedAirbyteMessagesSecondSync(namespace) + + setEmittedAtToNull(actualMessagesSecondSync) + + Assertions.assertEquals(expectedMessages.size, actualMessagesSecondSync.size) + Assertions.assertTrue(expectedMessages.containsAll(actualMessagesSecondSync)) + Assertions.assertTrue(actualMessagesSecondSync.containsAll(expectedMessages)) + } + + protected open fun executeStatementReadIncrementallyTwice() { + testdb + .with( + "INSERT INTO %s (id, name, updated_at) VALUES (4, 'riker', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME) + ) + .with( + "INSERT INTO %s (id, name, updated_at) VALUES (5, 'data', '2006-10-19')", + getFullyQualifiedTableName(TABLE_NAME) + ) + } + + protected open fun getExpectedAirbyteMessagesSecondSync( + namespace: String? + ): List { + val expectedMessages: MutableList = ArrayList() + expectedMessages.add( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName()) + .withNamespace(namespace) + .withData( + Jsons.jsonNode( + java.util.Map.of( + COL_ID, + ID_VALUE_4, + COL_NAME, + "riker", + COL_UPDATED_AT, + "2006-10-19" + ) + ) + ) + ) + ) + expectedMessages.add( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName()) + .withNamespace(namespace) + .withData( + Jsons.jsonNode( + java.util.Map.of( + COL_ID, + ID_VALUE_5, + COL_NAME, + "data", + COL_UPDATED_AT, + "2006-10-19" + ) + ) + ) + ) + ) + val state = + DbStreamState() + .withStreamName(streamName()) + .withStreamNamespace(namespace) + .withCursorField(java.util.List.of(COL_ID)) + .withCursor("5") + .withCursorRecordCount(1L) + expectedMessages.addAll(createExpectedTestMessages(java.util.List.of(state), 2L)) + return expectedMessages + } + + @Test + @Throws(Exception::class) + protected open fun testReadMultipleTablesIncrementally() { + val tableName2 = TABLE_NAME + 2 + val streamName2 = streamName() + 2 + val fqTableName2 = getFullyQualifiedTableName(tableName2) + testdb!! + .with(createTableQuery(fqTableName2, "id INTEGER, name VARCHAR(200)", "")) + .with("INSERT INTO %s(id, name) VALUES (1,'picard')", fqTableName2) + .with("INSERT INTO %s(id, name) VALUES (2, 'crusher')", fqTableName2) + .with("INSERT INTO %s(id, name) VALUES (3, 'vash')", fqTableName2) + + val namespace = defaultNamespace + val configuredCatalog = getConfiguredCatalogWithOneStream(namespace) + configuredCatalog.streams.add( + CatalogHelpers.createConfiguredAirbyteStream( + streamName2, + namespace, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_NAME, JsonSchemaType.STRING) + ) + ) + configuredCatalog.streams.forEach( + Consumer { airbyteStream: ConfiguredAirbyteStream -> + airbyteStream.syncMode = SyncMode.INCREMENTAL + airbyteStream.cursorField = java.util.List.of(COL_ID) + airbyteStream.destinationSyncMode = DestinationSyncMode.APPEND + } + ) + + val actualMessagesFirstSync = + MoreIterators.toList( + source()!!.read( + config(), + configuredCatalog, + createEmptyState(streamName(), namespace) + ) + ) + + // get last state message. + val stateAfterFirstSyncOptional = + actualMessagesFirstSync + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .reduce { first: AirbyteMessage?, second: AirbyteMessage -> second } + Assertions.assertTrue(stateAfterFirstSyncOptional.isPresent) + + // we know the second streams messages are the same as the first minus the updated at + // column. so we + // cheat and generate the expected messages off of the first expected messages. + val secondStreamExpectedMessages = getAirbyteMessagesSecondStreamWithNamespace(streamName2) + + // Represents the state after the first stream has been updated + val expectedStateStreams1 = + java.util.List.of( + DbStreamState() + .withStreamName(streamName()) + .withStreamNamespace(namespace) + .withCursorField(java.util.List.of(COL_ID)) + .withCursor("3") + .withCursorRecordCount(1L), + DbStreamState() + .withStreamName(streamName2) + .withStreamNamespace(namespace) + .withCursorField(java.util.List.of(COL_ID)) + ) + + // Represents the state after both streams have been updated + val expectedStateStreams2 = + java.util.List.of( + DbStreamState() + .withStreamName(streamName()) + .withStreamNamespace(namespace) + .withCursorField(java.util.List.of(COL_ID)) + .withCursor("3") + .withCursorRecordCount(1L), + DbStreamState() + .withStreamName(streamName2) + .withStreamNamespace(namespace) + .withCursorField(java.util.List.of(COL_ID)) + .withCursor("3") + .withCursorRecordCount(1L) + ) + + val expectedMessagesFirstSync: MutableList = ArrayList(testMessages) + expectedMessagesFirstSync.add( + createStateMessage(expectedStateStreams1[0], expectedStateStreams1, 3L) + ) + expectedMessagesFirstSync.addAll(secondStreamExpectedMessages) + expectedMessagesFirstSync.add( + createStateMessage(expectedStateStreams2[1], expectedStateStreams2, 3L) + ) + + setEmittedAtToNull(actualMessagesFirstSync) + + Assertions.assertEquals(expectedMessagesFirstSync.size, actualMessagesFirstSync.size) + Assertions.assertTrue(expectedMessagesFirstSync.containsAll(actualMessagesFirstSync)) + Assertions.assertTrue(actualMessagesFirstSync.containsAll(expectedMessagesFirstSync)) + } + + protected open fun getAirbyteMessagesSecondStreamWithNamespace( + streamName2: String? + ): List { + return testMessages + .stream() + .map { `object`: AirbyteMessage -> Jsons.clone(`object`) } + .peek { m: AirbyteMessage -> + m.record.stream = streamName2 + (m.record.data as ObjectNode).remove(COL_UPDATED_AT) + (m.record.data as ObjectNode).replace( + COL_ID, + convertIdBasedOnDatabase(m.record.data[COL_ID].asInt()) + ) + } + .collect(Collectors.toList()) + } + + // when initial and final cursor fields are the same. + @Throws(Exception::class) + protected fun incrementalCursorCheck( + cursorField: String, + initialCursorValue: String?, + endCursorValue: String, + expectedRecordMessages: List + ) { + incrementalCursorCheck( + cursorField, + cursorField, + initialCursorValue, + endCursorValue, + expectedRecordMessages + ) + } + + // See https://github.com/airbytehq/airbyte/issues/14732 for rationale and details. + @Test + @Throws(Exception::class) + fun testIncrementalWithConcurrentInsertion() { + val namespace = defaultNamespace + val fullyQualifiedTableName = getFullyQualifiedTableName(TABLE_NAME_AND_TIMESTAMP) + val columnDefinition = + String.format( + "name VARCHAR(200) NOT NULL, %s %s NOT NULL", + COL_TIMESTAMP, + COL_TIMESTAMP_TYPE + ) + + // 1st sync + testdb!! + .with(createTableQuery(fullyQualifiedTableName, columnDefinition, "")) + .with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "a", + "2021-01-01 00:00:00" + ) + .with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "b", + "2021-01-01 00:00:00" + ) + + val configuredCatalog = + CatalogHelpers.toDefaultConfiguredCatalog( + AirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createAirbyteStream( + TABLE_NAME_AND_TIMESTAMP, + namespace, + Field.of(COL_NAME, JsonSchemaType.STRING), + Field.of( + COL_TIMESTAMP, + JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE + ) + ) + ) + ) + ) + + configuredCatalog.streams.forEach( + Consumer { airbyteStream: ConfiguredAirbyteStream -> + airbyteStream.syncMode = SyncMode.INCREMENTAL + airbyteStream.cursorField = java.util.List.of(COL_TIMESTAMP) + airbyteStream.destinationSyncMode = DestinationSyncMode.APPEND + } + ) + + val firstSyncActualMessages = + MoreIterators.toList( + source()!!.read( + config(), + configuredCatalog, + createEmptyState(TABLE_NAME_AND_TIMESTAMP, namespace) + ) + ) + + // cursor after 1st sync: 2021-01-01 00:00:00, count 2 + val firstSyncStateOptional = + firstSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .findFirst() + Assertions.assertTrue(firstSyncStateOptional.isPresent) + val firstSyncState = getStateData(firstSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP) + Assertions.assertEquals( + firstSyncState["cursor_field"].elements().next().asText(), + COL_TIMESTAMP + ) + Assertions.assertTrue(firstSyncState["cursor"].asText().contains("2021-01-01")) + Assertions.assertTrue(firstSyncState["cursor"].asText().contains("00:00:00")) + Assertions.assertEquals(2L, firstSyncState["cursor_record_count"].asLong()) + + val firstSyncNames = + firstSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.RECORD } + .map { r: AirbyteMessage -> r.record.data[COL_NAME].asText() } + .toList() + // some databases don't make insertion order guarantee when equal ordering value + if ( + testdb!!.databaseDriver == DatabaseDriver.TERADATA || + testdb!!.databaseDriver == DatabaseDriver.ORACLE + ) { + MatcherAssert.assertThat( + listOf("a", "b"), + Matchers.containsInAnyOrder(*firstSyncNames.toTypedArray()) + ) + } else { + Assertions.assertEquals(listOf("a", "b"), firstSyncNames) + } + + // 2nd sync + testdb!!.with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "c", + "2021-01-02 00:00:00" + ) + + val secondSyncActualMessages = + MoreIterators.toList( + source()!!.read( + config(), + configuredCatalog, + createState(TABLE_NAME_AND_TIMESTAMP, namespace, firstSyncState) + ) + ) + + // cursor after 2nd sync: 2021-01-02 00:00:00, count 1 + val secondSyncStateOptional = + secondSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .findFirst() + Assertions.assertTrue(secondSyncStateOptional.isPresent) + val secondSyncState = getStateData(secondSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP) + Assertions.assertEquals( + secondSyncState["cursor_field"].elements().next().asText(), + COL_TIMESTAMP + ) + Assertions.assertTrue(secondSyncState["cursor"].asText().contains("2021-01-02")) + Assertions.assertTrue(secondSyncState["cursor"].asText().contains("00:00:00")) + Assertions.assertEquals(1L, secondSyncState["cursor_record_count"].asLong()) + + val secondSyncNames = + secondSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.RECORD } + .map { r: AirbyteMessage -> r.record.data[COL_NAME].asText() } + .toList() + Assertions.assertEquals(listOf("c"), secondSyncNames) + + // 3rd sync has records with duplicated cursors + testdb!! + .with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "d", + "2021-01-02 00:00:00" + ) + .with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "e", + "2021-01-02 00:00:00" + ) + .with( + INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY, + fullyQualifiedTableName, + "f", + "2021-01-03 00:00:00" + ) + + val thirdSyncActualMessages = + MoreIterators.toList( + source()!!.read( + config(), + configuredCatalog, + createState(TABLE_NAME_AND_TIMESTAMP, namespace, secondSyncState) + ) + ) + + // Cursor after 3rd sync is: 2021-01-03 00:00:00, count 1. + val thirdSyncStateOptional = + thirdSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .findFirst() + Assertions.assertTrue(thirdSyncStateOptional.isPresent) + val thirdSyncState = getStateData(thirdSyncStateOptional.get(), TABLE_NAME_AND_TIMESTAMP) + Assertions.assertEquals( + thirdSyncState["cursor_field"].elements().next().asText(), + COL_TIMESTAMP + ) + Assertions.assertTrue(thirdSyncState["cursor"].asText().contains("2021-01-03")) + Assertions.assertTrue(thirdSyncState["cursor"].asText().contains("00:00:00")) + Assertions.assertEquals(1L, thirdSyncState["cursor_record_count"].asLong()) + + // The c, d, e, f are duplicated records from this sync, because the cursor + // record count in the database is different from that in the state. + val thirdSyncExpectedNames = + thirdSyncActualMessages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.RECORD } + .map { r: AirbyteMessage -> r.record.data[COL_NAME].asText() } + .toList() + + // teradata doesn't make insertion order guarantee when equal ordering value + if (testdb!!.databaseDriver == DatabaseDriver.TERADATA) { + MatcherAssert.assertThat( + listOf("c", "d", "e", "f"), + Matchers.containsInAnyOrder(*thirdSyncExpectedNames.toTypedArray()) + ) + } else { + Assertions.assertEquals(listOf("c", "d", "e", "f"), thirdSyncExpectedNames) + } + } + + protected open fun getStateData(airbyteMessage: AirbyteMessage, streamName: String): JsonNode { + for (stream in airbyteMessage.state.data["streams"]) { + if (stream["stream_name"].asText() == streamName) { + return stream + } + } + throw IllegalArgumentException("Stream not found in state message: $streamName") + } + + @Throws(Exception::class) + private fun incrementalCursorCheck( + initialCursorField: String, + cursorField: String, + initialCursorValue: String?, + endCursorValue: String, + expectedRecordMessages: List + ) { + incrementalCursorCheck( + initialCursorField, + cursorField, + initialCursorValue, + endCursorValue, + expectedRecordMessages, + getConfiguredCatalogWithOneStream(defaultNamespace).streams[0] + ) + } + + @Throws(Exception::class) + protected fun incrementalCursorCheck( + initialCursorField: String?, + cursorField: String, + initialCursorValue: String?, + endCursorValue: String?, + expectedRecordMessages: List, + airbyteStream: ConfiguredAirbyteStream + ) { + airbyteStream.syncMode = SyncMode.INCREMENTAL + airbyteStream.cursorField = java.util.List.of(cursorField) + airbyteStream.destinationSyncMode = DestinationSyncMode.APPEND + + val configuredCatalog = + ConfiguredAirbyteCatalog().withStreams(java.util.List.of(airbyteStream)) + + val dbStreamState = buildStreamState(airbyteStream, initialCursorField, initialCursorValue) + + val actualMessages = + MoreIterators.toList( + source()!!.read( + config(), + configuredCatalog, + Jsons.jsonNode(createState(java.util.List.of(dbStreamState))) + ) + ) + + setEmittedAtToNull(actualMessages) + + val expectedStreams = + java.util.List.of(buildStreamState(airbyteStream, cursorField, endCursorValue)) + + val expectedMessages: MutableList = ArrayList(expectedRecordMessages) + expectedMessages.addAll( + createExpectedTestMessages(expectedStreams, expectedRecordMessages.size.toLong()) + ) + + Assertions.assertEquals(expectedMessages.size, actualMessages.size) + Assertions.assertTrue(expectedMessages.containsAll(actualMessages)) + Assertions.assertTrue(actualMessages.containsAll(expectedMessages)) + } + + protected open fun buildStreamState( + configuredAirbyteStream: ConfiguredAirbyteStream, + cursorField: String?, + cursorValue: String? + ): DbStreamState { + return DbStreamState() + .withStreamName(configuredAirbyteStream.stream.name) + .withStreamNamespace(configuredAirbyteStream.stream.namespace) + .withCursorField(java.util.List.of(cursorField)) + .withCursor(cursorValue) + .withCursorRecordCount(1L) + } + + // get catalog and perform a defensive copy. + protected fun getConfiguredCatalogWithOneStream( + defaultNamespace: String? + ): ConfiguredAirbyteCatalog { + val catalog = CatalogHelpers.toDefaultConfiguredCatalog(getCatalog(defaultNamespace)) + // Filter to only keep the main stream name as configured stream + catalog.withStreams( + catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> s.stream.name == streamName() } + .collect(Collectors.toList()) + ) + return catalog + } + + protected open fun getCatalog(defaultNamespace: String?): AirbyteCatalog { + return AirbyteCatalog() + .withStreams( + java.util.List.of( + CatalogHelpers.createAirbyteStream( + TABLE_NAME, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + java.util.List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(java.util.List.of(java.util.List.of(COL_ID))), + CatalogHelpers.createAirbyteStream( + TABLE_NAME_WITHOUT_PK, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + java.util.List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(emptyList()), + CatalogHelpers.createAirbyteStream( + TABLE_NAME_COMPOSITE_PK, + defaultNamespace, + Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), + Field.of(COL_LAST_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + java.util.List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey( + java.util.List.of( + java.util.List.of(COL_FIRST_NAME), + java.util.List.of(COL_LAST_NAME) + ) + ) + ) + ) + } + + protected open val testMessages: List + get() = + java.util.List.of( + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName()) + .withNamespace(defaultNamespace) + .withData( + Jsons.jsonNode( + java.util.Map.of( + COL_ID, + ID_VALUE_1, + COL_NAME, + "picard", + COL_UPDATED_AT, + "2004-10-19" + ) + ) + ) + ), + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName()) + .withNamespace(defaultNamespace) + .withData( + Jsons.jsonNode( + java.util.Map.of( + COL_ID, + ID_VALUE_2, + COL_NAME, + "crusher", + COL_UPDATED_AT, + "2005-10-19" + ) + ) + ) + ), + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName()) + .withNamespace(defaultNamespace) + .withData( + Jsons.jsonNode( + java.util.Map.of( + COL_ID, + ID_VALUE_3, + COL_NAME, + "vash", + COL_UPDATED_AT, + "2006-10-19" + ) + ) + ) + ) + ) + + protected open fun createExpectedTestMessages( + states: List, + numRecords: Long + ): List { + return states + .stream() + .map { s: DbStreamState -> + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(s.streamNamespace) + .withName(s.streamName) + ) + .withStreamState(Jsons.jsonNode(s)) + ) + .withData(Jsons.jsonNode(DbState().withCdc(false).withStreams(states))) + .withSourceStats( + AirbyteStateStats().withRecordCount(numRecords.toDouble()) + ) + ) + } + .collect(Collectors.toList()) + } + + protected open fun createState(states: List): List { + return states + .stream() + .map { s: DbStreamState -> + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(s.streamNamespace) + .withName(s.streamName) + ) + .withStreamState(Jsons.jsonNode(s)) + ) + } + .collect(Collectors.toList()) + } + + @Throws(SQLException::class) + protected fun createTableWithSpaces(): ConfiguredAirbyteStream { + val tableNameWithSpaces = TABLE_NAME_WITH_SPACES + "2" + val streamName2 = tableNameWithSpaces + + testdb!!.getDataSource()!!.connection.use { connection -> + val identifierQuoteString = connection.metaData.identifierQuoteString + connection + .createStatement() + .execute( + createTableQuery( + getFullyQualifiedTableName( + RelationalDbQueryUtils.enquoteIdentifier( + tableNameWithSpaces, + identifierQuoteString + ) + ), + "id INTEGER, " + + RelationalDbQueryUtils.enquoteIdentifier( + COL_LAST_NAME_WITH_SPACE, + identifierQuoteString + ) + + " VARCHAR(200)", + "" + ) + ) + connection + .createStatement() + .execute( + String.format( + "INSERT INTO %s(id, %s) VALUES (1,'picard')", + getFullyQualifiedTableName( + RelationalDbQueryUtils.enquoteIdentifier( + tableNameWithSpaces, + identifierQuoteString + ) + ), + RelationalDbQueryUtils.enquoteIdentifier( + COL_LAST_NAME_WITH_SPACE, + identifierQuoteString + ) + ) + ) + connection + .createStatement() + .execute( + String.format( + "INSERT INTO %s(id, %s) VALUES (2, 'crusher')", + getFullyQualifiedTableName( + RelationalDbQueryUtils.enquoteIdentifier( + tableNameWithSpaces, + identifierQuoteString + ) + ), + RelationalDbQueryUtils.enquoteIdentifier( + COL_LAST_NAME_WITH_SPACE, + identifierQuoteString + ) + ) + ) + connection + .createStatement() + .execute( + String.format( + "INSERT INTO %s(id, %s) VALUES (3, 'vash')", + getFullyQualifiedTableName( + RelationalDbQueryUtils.enquoteIdentifier( + tableNameWithSpaces, + identifierQuoteString + ) + ), + RelationalDbQueryUtils.enquoteIdentifier( + COL_LAST_NAME_WITH_SPACE, + identifierQuoteString + ) + ) + ) + } + return CatalogHelpers.createConfiguredAirbyteStream( + streamName2, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_LAST_NAME_WITH_SPACE, JsonSchemaType.STRING) + ) + } + + fun getFullyQualifiedTableName(tableName: String): String { + return RelationalDbQueryUtils.getFullyQualifiedTableName(defaultSchemaName, tableName) + } + + protected fun createSchemas() { + if (supportsSchemas()) { + for (schemaName in TEST_SCHEMAS) { + testdb!!.with("CREATE SCHEMA %s;", schemaName) + } + } + } + + private fun convertIdBasedOnDatabase(idValue: Int): JsonNode { + return when (testdb!!.databaseDriver) { + DatabaseDriver.ORACLE, + DatabaseDriver.SNOWFLAKE -> Jsons.jsonNode(BigDecimal.valueOf(idValue.toLong())) + else -> Jsons.jsonNode(idValue) + } + } + + private val defaultSchemaName: String? + get() = if (supportsSchemas()) SCHEMA_NAME else null + + protected val defaultNamespace: String + get() = + when (testdb!!.databaseDriver) { + DatabaseDriver.MYSQL, + DatabaseDriver.CLICKHOUSE, + DatabaseDriver.TERADATA -> testdb!!.databaseName!! + else -> SCHEMA_NAME + } + + /** + * Creates empty state with the provided stream name and namespace. + * + * @param streamName The stream name. + * @param streamNamespace The stream namespace. + * @return [JsonNode] representation of the generated empty state. + */ + protected fun createEmptyState(streamName: String?, streamNamespace: String?): JsonNode { + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(streamName).withNamespace(streamNamespace) + ) + ) + return Jsons.jsonNode(java.util.List.of(airbyteStateMessage)) + } + + protected fun createState( + streamName: String?, + streamNamespace: String?, + stateData: JsonNode? + ): JsonNode { + val airbyteStateMessage = + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor().withName(streamName).withNamespace(streamNamespace) + ) + .withStreamState(stateData) + ) + return Jsons.jsonNode(java.util.List.of(airbyteStateMessage)) + } + + protected fun extractState(airbyteMessage: AirbyteMessage): JsonNode { + return Jsons.jsonNode(java.util.List.of(airbyteMessage.state)) + } + + protected fun createStateMessage( + dbStreamState: DbStreamState, + legacyStates: List?, + recordCount: Long + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream( + AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(dbStreamState.streamNamespace) + .withName(dbStreamState.streamName) + ) + .withStreamState(Jsons.jsonNode(dbStreamState)) + ) + .withData(Jsons.jsonNode(DbState().withCdc(false).withStreams(legacyStates))) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + ) + } + + protected fun extractSpecificFieldFromCombinedMessages( + messages: List, + streamName: String, + field: String? + ): List { + return extractStateMessage(messages) + .stream() + .filter { s: AirbyteStateMessage -> s.stream.streamDescriptor.name == streamName } + .map { s: AirbyteStateMessage -> + if (s.stream.streamState[field] != null) s.stream.streamState[field].asText() + else "" + } + .toList() + } + + protected fun filterRecords(messages: List): List { + return messages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.RECORD } + .collect(Collectors.toList()) + } + + protected fun extractStateMessage(messages: List): List { + return messages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .map { obj: AirbyteMessage -> obj.state } + .collect(Collectors.toList()) + } + + protected fun extractStateMessage( + messages: List, + streamName: String + ): List { + return messages + .stream() + .filter { r: AirbyteMessage -> + r.type == AirbyteMessage.Type.STATE && + r.state.stream.streamDescriptor.name == streamName + } + .map { obj: AirbyteMessage -> obj.state } + .collect(Collectors.toList()) + } + + protected fun createRecord( + stream: String?, + namespace: String?, + data: Map + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(Jsons.jsonNode(data)) + .withStream(stream) + .withNamespace(namespace) + ) + } + + companion object { + @JvmField val SCHEMA_NAME: String = "jdbc_integration_test1" + @JvmField val SCHEMA_NAME2: String = "jdbc_integration_test2" + @JvmField val TEST_SCHEMAS: Set = java.util.Set.of(SCHEMA_NAME, SCHEMA_NAME2) + + @JvmField val TABLE_NAME: String = "id_and_name" + @JvmField val TABLE_NAME_WITH_SPACES: String = "id and name" + @JvmField val TABLE_NAME_WITHOUT_PK: String = "id_and_name_without_pk" + @JvmField val TABLE_NAME_COMPOSITE_PK: String = "full_name_composite_pk" + @JvmField val TABLE_NAME_WITHOUT_CURSOR_TYPE: String = "table_without_cursor_type" + @JvmField val TABLE_NAME_WITH_NULLABLE_CURSOR_TYPE: String = "table_with_null_cursor_type" + + // this table is used in testing incremental sync with concurrent insertions + @JvmField val TABLE_NAME_AND_TIMESTAMP: String = "name_and_timestamp" + + @JvmField val COL_ID: String = "id" + @JvmField val COL_NAME: String = "name" + @JvmField val COL_UPDATED_AT: String = "updated_at" + @JvmField val COL_FIRST_NAME: String = "first_name" + @JvmField val COL_LAST_NAME: String = "last_name" + @JvmField val COL_LAST_NAME_WITH_SPACE: String = "last name" + @JvmField val COL_CURSOR: String = "cursor_field" + @JvmField val COL_TIMESTAMP: String = "timestamp" + @JvmField val ID_VALUE_1: Number = 1 + @JvmField val ID_VALUE_2: Number = 2 + @JvmField val ID_VALUE_3: Number = 3 + @JvmField val ID_VALUE_4: Number = 4 + @JvmField val ID_VALUE_5: Number = 5 + + @JvmField val DROP_SCHEMA_QUERY: String = "DROP SCHEMA IF EXISTS %s CASCADE" + @JvmField + val CREATE_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY: String = + "CREATE TABLE %s (%s VARCHAR(20));" + @JvmField + val INSERT_TABLE_WITH_NULLABLE_CURSOR_TYPE_QUERY: String = + "INSERT INTO %s VALUES('Hello world :)');" + @JvmField + val INSERT_TABLE_NAME_AND_TIMESTAMP_QUERY: String = + "INSERT INTO %s (name, timestamp) VALUES ('%s', '%s')" + + @JvmField protected var COL_TIMESTAMP_TYPE: String = "TIMESTAMP" + @JvmField + protected var COLUMN_CLAUSE_WITH_PK: String = + "id INTEGER, name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL" + @JvmField + protected var COLUMN_CLAUSE_WITHOUT_PK: String = + "id INTEGER, name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL" + @JvmField + protected var COLUMN_CLAUSE_WITH_COMPOSITE_PK: String = + "first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL" + + @JvmField + var CREATE_TABLE_WITHOUT_CURSOR_TYPE_QUERY: String = "CREATE TABLE %s (%s bit NOT NULL);" + @JvmField var INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY: String = "INSERT INTO %s VALUES(0);" + + @JvmStatic + protected fun setEmittedAtToNull(messages: Iterable) { + for (actualMessage in messages) { + if (actualMessage.record != null) { + actualMessage.record.emittedAt = null + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.kt new file mode 100644 index 0000000000000..e6d10c704129e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/source/jdbc/test/JdbcStressTest.kt @@ -0,0 +1,274 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.source.jdbc.test + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.db.factory.DataSourceFactory.create +import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.cdk.integrations.source.jdbc.AbstractJdbcSource +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.stream.MoreStreams +import io.airbyte.commons.string.Strings +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.math.BigDecimal +import java.nio.ByteBuffer +import java.sql.Connection +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Runs a "large" amount of data through a JdbcSource to ensure that it streams / chunks records. + */ +// todo (cgardens) - this needs more love and thought. we should be able to test this without having +// to rewrite so much data. it is enough for now to sanity check that our JdbcSources can actually +// handle more data than fits in memory. +@SuppressFBWarnings( + value = ["MS_SHOULD_BE_FINAL"], + justification = + "The static variables are updated in sub classes for convenience, and cannot be final." +) +abstract class JdbcStressTest { + private var bitSet: BitSet? = null + private lateinit var config: JsonNode + private var source: AbstractJdbcSource<*>? = null + + /** + * These tests write records without specifying a namespace (schema name). They will be written + * into whatever the default schema is for the database. When they are discovered they will be + * namespaced by the schema name (e.g. .). Thus the source + * needs to tell the tests what that default schema name is. If the database does not support + * schemas, then database name should used instead. + * + * @return name that will be used to namespace the record. + */ + abstract val defaultSchemaName: Optional + + /** + * A valid configuration to connect to a test database. + * + * @return config + */ + abstract fun getConfig(): JsonNode + + /** + * Full qualified class name of the JDBC driver for the database. + * + * @return driver + */ + abstract val driverClass: String + + /** + * An instance of the source that should be tests. + * + * @return source + */ + abstract fun getSource(): AbstractJdbcSource<*>? + + protected fun createTableQuery(tableName: String?, columnClause: String?): String { + return String.format("CREATE TABLE %s(%s)", tableName, columnClause) + } + + @Throws(Exception::class) + open fun setup() { + LOGGER.info("running for driver:" + driverClass) + bitSet = BitSet(TOTAL_RECORDS.toInt()) + + source = getSource() + streamName = + defaultSchemaName.map { `val`: String -> `val` + "." + TABLE_NAME }.orElse(TABLE_NAME) + config = getConfig() + + val jdbcConfig = source!!.toDatabaseConfig(config) + val database: JdbcDatabase = + DefaultJdbcDatabase( + create( + jdbcConfig[JdbcUtils.USERNAME_KEY].asText(), + if (jdbcConfig.has(JdbcUtils.PASSWORD_KEY)) + jdbcConfig[JdbcUtils.PASSWORD_KEY].asText() + else null, + driverClass, + jdbcConfig[JdbcUtils.JDBC_URL_KEY].asText() + ) + ) + + database.execute( + CheckedConsumer { connection: Connection -> + connection + .createStatement() + .execute( + createTableQuery( + "id_and_name", + String.format("id %s, name VARCHAR(200)", COL_ID_TYPE) + ) + ) + } + ) + val batchCount = TOTAL_RECORDS / BATCH_SIZE + LOGGER.info("writing {} batches of {}", batchCount, BATCH_SIZE) + for (i in 0 until batchCount) { + if (i % 1000 == 0L) LOGGER.info("writing batch: $i") + val insert: MutableList = ArrayList() + for (j in 0 until BATCH_SIZE) { + val recordNumber = (i * BATCH_SIZE + j).toInt() + insert.add(String.format(INSERT_STATEMENT, recordNumber, recordNumber)) + } + + val sql = prepareInsertStatement(insert) + database.execute( + CheckedConsumer { connection: Connection -> + connection.createStatement().execute(sql) + } + ) + } + } + + // todo (cgardens) - restructure these tests so that testFullRefresh() and testIncremental() can + // be + // separate tests. current constrained by only wanting to setup the fixture in the database + // once, + // but it is not trivial to move them to @BeforeAll because it is static and we are doing + // inheritance. Not impossible, just needs to be done thoughtfully and for all JdbcSources. + @Test + @Throws(Exception::class) + fun stressTest() { + testFullRefresh() + testIncremental() + } + + @Throws(Exception::class) + private fun testFullRefresh() { + runTest(configuredCatalogFullRefresh, "full_refresh") + } + + @Throws(Exception::class) + private fun testIncremental() { + runTest(configuredCatalogIncremental, "incremental") + } + + @Throws(Exception::class) + private fun runTest(configuredCatalog: ConfiguredAirbyteCatalog, testName: String) { + LOGGER.info("running stress test for: $testName") + val read: Iterator = + source!!.read(config!!, configuredCatalog, Jsons.jsonNode(emptyMap())) + val actualCount = + MoreStreams.toStream(read) + .filter { m: AirbyteMessage -> m.type == AirbyteMessage.Type.RECORD } + .peek { m: AirbyteMessage -> + if (m.record.data[COL_ID].asLong() % 100000 == 0L) { + LOGGER.info("reading batch: " + m.record.data[COL_ID].asLong() / 1000) + } + } + .peek { m: AirbyteMessage -> assertExpectedMessage(m) } + .count() + var a: ByteBuffer + val expectedRoundedRecordsCount = TOTAL_RECORDS - TOTAL_RECORDS % 1000 + LOGGER.info("expected records count: " + TOTAL_RECORDS) + LOGGER.info("actual records count: $actualCount") + Assertions.assertEquals(expectedRoundedRecordsCount, actualCount, "testing: $testName") + Assertions.assertEquals( + expectedRoundedRecordsCount, + bitSet!!.cardinality().toLong(), + "testing: $testName" + ) + } + + // each is roughly 106 bytes. + private fun assertExpectedMessage(actualMessage: AirbyteMessage) { + val recordNumber = actualMessage.record.data[COL_ID].asLong() + bitSet!!.set(recordNumber.toInt()) + actualMessage.record.emittedAt = null + + val expectedRecordNumber: Number = + if (driverClass.lowercase(Locale.getDefault()).contains("oracle")) + BigDecimal(recordNumber) + else recordNumber + + val expectedMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(streamName) + .withData( + Jsons.jsonNode( + ImmutableMap.of( + COL_ID, + expectedRecordNumber, + COL_NAME, + "picard-$recordNumber" + ) + ) + ) + ) + Assertions.assertEquals(expectedMessage, actualMessage) + } + + private fun prepareInsertStatement(inserts: List): String { + if (driverClass.lowercase(Locale.getDefault()).contains("oracle")) { + return String.format("INSERT ALL %s SELECT * FROM dual", Strings.join(inserts, " ")) + } + return String.format( + "INSERT INTO id_and_name (id, name) VALUES %s", + Strings.join(inserts, ", ") + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(JdbcStressTest::class.java) + + // this will get rounded down to the nearest 1000th. + private const val TOTAL_RECORDS = 10000000L + private const val BATCH_SIZE = 1000 + var TABLE_NAME: String = "id_and_name" + var COL_ID: String = "id" + var COL_NAME: String = "name" + var COL_ID_TYPE: String = "BIGINT" + var INSERT_STATEMENT: String = "(%s,'picard-%s')" + + private var streamName: String? = null + + private val configuredCatalogFullRefresh: ConfiguredAirbyteCatalog + get() = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + + private val configuredCatalogIncremental: ConfiguredAirbyteCatalog + get() = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream() + .withStream(catalog.streams[0]) + .withCursorField(listOf(COL_ID)) + .withSyncMode(SyncMode.INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + ) + ) + + private val catalog: AirbyteCatalog + get() = + AirbyteCatalog() + .withStreams( + Lists.newArrayList( + CatalogHelpers.createAirbyteStream( + streamName, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_NAME, JsonSchemaType.STRING) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.kt new file mode 100644 index 0000000000000..ab99052d5b947 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceConnectorTest.kt @@ -0,0 +1,377 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.generated.SourceApi +import io.airbyte.api.client.model.generated.DiscoverCatalogResult +import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody +import io.airbyte.commons.features.EnvVariableFeatureFlags +import io.airbyte.commons.features.FeatureFlags +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.* +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConnectorSpecification +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.general.DefaultCheckConnectionTestHarness +import io.airbyte.workers.general.DefaultDiscoverCatalogTestHarness +import io.airbyte.workers.general.DefaultGetSpecTestHarness +import io.airbyte.workers.helper.CatalogClientConverters +import io.airbyte.workers.helper.ConnectorConfigUpdater +import io.airbyte.workers.helper.EntrypointEnvChecker +import io.airbyte.workers.internal.AirbyteSource +import io.airbyte.workers.internal.DefaultAirbyteSource +import io.airbyte.workers.process.AirbyteIntegrationLauncher +import io.airbyte.workers.process.DockerProcessFactory +import io.airbyte.workers.process.ProcessFactory +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import kotlin.test.assertNotNull +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.mockito.ArgumentCaptor +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This abstract class contains helpful functionality and boilerplate for testing a source + * connector. + */ +abstract class AbstractSourceConnectorTest { + private var environment: TestDestinationEnv? = null + private lateinit var jobRoot: Path + protected var localRoot: Path? = null + private lateinit var processFactory: ProcessFactory + + /** Name of the docker image that the tests will run against. */ + protected abstract val imageName: String + + @get:Throws(Exception::class) + protected abstract val config: JsonNode? + /** + * Configuration specific to the integration. Will be passed to integration where + * appropriate in each test. Should be valid. + * + * @return integration-specific configuration + */ + get + + /** + * Function that performs any setup of external resources required for the test. e.g. + * instantiate a postgres database. This function will be called before EACH test. + * + * @param environment + * - information about the test environment. + * @throws Exception + * - can throw any exception, test framework will handle. + */ + @Throws(Exception::class) + protected abstract fun setupEnvironment(environment: TestDestinationEnv?) + + /** + * Function that performs any clean up of external resources required for the test. e.g. delete + * a postgres database. This function will be called after EACH test. It MUST remove all data in + * the destination so that there is no contamination across tests. + * + * @param testEnv + * - information about the test environment. + * @throws Exception + * - can throw any exception, test framework will handle. + */ + @Throws(Exception::class) protected abstract fun tearDown(testEnv: TestDestinationEnv?) + + private lateinit var mAirbyteApiClient: AirbyteApiClient + + private lateinit var mSourceApi: SourceApi + + private lateinit var mConnectorConfigUpdater: ConnectorConfigUpdater + + protected val lastPersistedCatalog: AirbyteCatalog + get() = + convertProtocolObject( + CatalogClientConverters.toAirbyteProtocol(discoverWriteRequest.value.catalog), + AirbyteCatalog::class.java + ) + + private val discoverWriteRequest: ArgumentCaptor = + ArgumentCaptor.forClass(SourceDiscoverSchemaWriteRequestBody::class.java) + + @BeforeEach + @Throws(Exception::class) + fun setUpInternal() { + val testDir = Path.of("/tmp/airbyte_tests/") + Files.createDirectories(testDir) + val workspaceRoot = Files.createTempDirectory(testDir, "test") + jobRoot = Files.createDirectories(Path.of(workspaceRoot.toString(), "job")) + localRoot = Files.createTempDirectory(testDir, "output") + environment = TestDestinationEnv(localRoot) + setupEnvironment(environment) + mAirbyteApiClient = Mockito.mock(AirbyteApiClient::class.java) + mSourceApi = Mockito.mock(SourceApi::class.java) + Mockito.`when`(mAirbyteApiClient.sourceApi).thenReturn(mSourceApi) + Mockito.`when`(mSourceApi.writeDiscoverCatalogResult(ArgumentMatchers.any())) + .thenReturn(DiscoverCatalogResult().catalogId(CATALOG_ID)) + mConnectorConfigUpdater = Mockito.mock(ConnectorConfigUpdater::class.java) + val envMap = HashMap(TestEnvConfigs().jobDefaultEnvMap) + envMap[EnvVariableFeatureFlags.DEPLOYMENT_MODE] = featureFlags().deploymentMode() + processFactory = + DockerProcessFactory( + workspaceRoot, + workspaceRoot.toString(), + localRoot.toString(), + "host", + envMap + ) + + postSetup() + } + + /** + * Override this method if you want to do any per-test setup that depends on being able to e.g. + * [.runRead]. + */ + @Throws(Exception::class) protected open fun postSetup() {} + + @AfterEach + @Throws(Exception::class) + fun tearDownInternal() { + tearDown(environment) + } + + protected open fun featureFlags(): FeatureFlags { + return EnvVariableFeatureFlags() + } + + @Throws(TestHarnessException::class) + protected fun runSpec(): ConnectorSpecification { + val spec = + DefaultGetSpecTestHarness( + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ) + ) + .run(JobGetSpecConfig().withDockerImage(imageName), jobRoot) + .spec + return convertProtocolObject(spec, ConnectorSpecification::class.java) + } + + @Throws(Exception::class) + protected fun runCheck(): StandardCheckConnectionOutput { + return DefaultCheckConnectionTestHarness( + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ), + mConnectorConfigUpdater + ) + .run(StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot) + .checkConnection + } + + @Throws(Exception::class) + protected fun runCheckAndGetStatusAsString(config: JsonNode?): String { + return DefaultCheckConnectionTestHarness( + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ), + mConnectorConfigUpdater + ) + .run(StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot) + .checkConnection + .status + .toString() + } + + @Throws(Exception::class) + protected fun runDiscover(): UUID { + val toReturn = + DefaultDiscoverCatalogTestHarness( + mAirbyteApiClient, + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ), + mConnectorConfigUpdater + ) + .run( + StandardDiscoverCatalogInput() + .withSourceId(SOURCE_ID.toString()) + .withConnectionConfiguration(config), + jobRoot + ) + .discoverCatalogId + Mockito.verify(mSourceApi).writeDiscoverCatalogResult(discoverWriteRequest.capture()) + return toReturn + } + + @Throws(Exception::class) + protected fun checkEntrypointEnvVariable() { + val entrypoint = + EntrypointEnvChecker.getEntrypointEnvVariable( + processFactory, + JOB_ID, + JOB_ATTEMPT, + jobRoot, + imageName + ) + assertNotNull(entrypoint) + Assertions.assertFalse(entrypoint.isBlank()) + } + + @Throws(Exception::class) + protected open fun runRead(configuredCatalog: ConfiguredAirbyteCatalog?): List { + return runRead(configuredCatalog, null) + } + + // todo (cgardens) - assume no state since we are all full refresh right now. + @Throws(Exception::class) + protected fun runRead( + catalog: ConfiguredAirbyteCatalog?, + state: JsonNode? + ): List { + val sourceConfig = + WorkerSourceConfig() + .withSourceConnectionConfiguration(config) + .withState(if (state == null) null else State().withState(state)) + .withCatalog( + convertProtocolObject( + catalog, + io.airbyte.protocol.models.ConfiguredAirbyteCatalog::class.java + ) + ) + + val source: AirbyteSource = + DefaultAirbyteSource( + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ), + featureFlags() + ) + val messages: MutableList = ArrayList() + source.start(sourceConfig, jobRoot) + while (!source.isFinished) { + source.attemptRead().ifPresent { m: io.airbyte.protocol.models.AirbyteMessage -> + messages.add(convertProtocolObject(m, AirbyteMessage::class.java)) + } + } + source.close() + + return messages + } + + @Throws(Exception::class) + protected fun runReadVerifyNumberOfReceivedMsgs( + catalog: ConfiguredAirbyteCatalog, + state: JsonNode?, + mapOfExpectedRecordsCount: MutableMap + ): Map { + val sourceConfig = + WorkerSourceConfig() + .withSourceConnectionConfiguration(config) + .withState(if (state == null) null else State().withState(state)) + .withCatalog( + convertProtocolObject( + catalog, + io.airbyte.protocol.models.ConfiguredAirbyteCatalog::class.java + ) + ) + + val source = prepareAirbyteSource() + source.start(sourceConfig, jobRoot) + + while (!source.isFinished) { + val airbyteMessageOptional = + source.attemptRead().map { m: io.airbyte.protocol.models.AirbyteMessage -> + convertProtocolObject(m, AirbyteMessage::class.java) + } + if ( + airbyteMessageOptional.isPresent && + airbyteMessageOptional.get().type == AirbyteMessage.Type.RECORD + ) { + val airbyteMessage = airbyteMessageOptional.get() + val record = airbyteMessage.record + + val streamName = record.stream + mapOfExpectedRecordsCount[streamName] = mapOfExpectedRecordsCount[streamName]!! - 1 + } + } + source.close() + return mapOfExpectedRecordsCount + } + + private fun prepareAirbyteSource(): AirbyteSource { + val integrationLauncher = + AirbyteIntegrationLauncher( + JOB_ID, + JOB_ATTEMPT, + imageName, + processFactory, + null, + null, + false, + featureFlags() + ) + return DefaultAirbyteSource(integrationLauncher, featureFlags()) + } + + companion object { + protected val LOGGER: Logger = + LoggerFactory.getLogger(AbstractSourceConnectorTest::class.java) + private const val JOB_ID = 0L.toString() + private const val JOB_ATTEMPT = 0 + + private val CATALOG_ID: UUID = UUID.randomUUID() + + private val SOURCE_ID: UUID = UUID.randomUUID() + + private const val CPU_REQUEST_FIELD_NAME = "cpuRequest" + private const val CPU_LIMIT_FIELD_NAME = "cpuLimit" + private const val MEMORY_REQUEST_FIELD_NAME = "memoryRequest" + private const val MEMORY_LIMIT_FIELD_NAME = "memoryLimit" + + private fun convertProtocolObject(v1: V1, klass: Class): V0 { + return Jsons.`object`(Jsons.jsonNode(v1), klass) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.kt new file mode 100644 index 0000000000000..d5dc3959aed61 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.kt @@ -0,0 +1,426 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Lists +import io.airbyte.cdk.db.Database +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.io.IOException +import java.sql.SQLException +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import org.apache.commons.lang3.StringUtils +import org.jooq.DSLContext +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This abstract class contains common helpers and boilerplate for comprehensively testing that all + * data types in a source can be read and handled correctly by the connector and within Airbyte's + * type system. + */ +abstract class AbstractSourceDatabaseTypeTest : AbstractSourceConnectorTest() { + @JvmField protected val testDataHolders: MutableList = ArrayList() + @JvmField protected var database: Database? = null + + protected val idColumnName: String + /** + * The column name will be used for a PK column in the test tables. Override it if default + * name is not valid for your source. + * + * @return Id column name + */ + get() = "id" + + protected val testColumnName: String + /** + * The column name will be used for a test column in the test tables. Override it if default + * name is not valid for your source. + * + * @return Test column name + */ + get() = "test_column" + + /** + * Setup the test database. All tables and data described in the registered tests will be put + * there. + * + * @return configured test database + * @throws Exception + * - might throw any exception during initialization. + */ + @Throws(Exception::class) protected abstract fun setupDatabase(): Database? + + /** Put all required tests here using method [.addDataTypeTestData] */ + protected abstract fun initTests() + + @Throws(Exception::class) + override fun setupEnvironment(environment: TestDestinationEnv?) { + database = setupDatabase() + initTests() + createTables() + populateTables() + } + + protected abstract val nameSpace: String + /** + * Provide a source namespace. It's allocated place for table creation. It also known ask + * "Database Schema" or "Dataset" + * + * @return source name space + */ + get + + /** + * Test the 'discover' command. TODO (liren): Some existing databases may fail testDataTypes(), + * so it is turned off by default. It should be enabled for all databases eventually. + */ + protected open fun testCatalog(): Boolean { + return false + } + + /** + * The test checks that the types from the catalog matches the ones discovered from the source. + * This test is disabled by default. To enable it you need to overwrite testCatalog() function. + */ + @Test + @Throws(Exception::class) + fun testDataTypes() { + if (testCatalog()) { + runDiscover() + val streams = + lastPersistedCatalog.streams + .stream() + .collect( + Collectors.toMap( + Function { obj: AirbyteStream -> obj.name }, + Function { s: AirbyteStream? -> s } + ) + ) + + // testDataHolders should be initialized using the `addDataTypeTestData` function + testDataHolders.forEach( + Consumer { testDataHolder: TestDataHolder -> + val airbyteStream = streams[testDataHolder.nameWithTestPrefix] + val jsonSchemaTypeMap = + Jsons.deserialize( + airbyteStream!!.jsonSchema["properties"][testColumnName].toString(), + MutableMap::class.java + ) as Map + Assertions.assertEquals( + testDataHolder.airbyteType.jsonSchemaTypeMap, + jsonSchemaTypeMap, + "Expected column type for " + testDataHolder.nameWithTestPrefix + ) + } + ) + } + } + + /** + * The test checks that connector can fetch prepared data without failure. It uses a prepared + * catalog and read the source using that catalog. Then makes sure that the expected values are + * the ones inserted in the source. + */ + @Test + @Throws(Exception::class) + open fun testDataContent() { + // Class used to make easier the error reporting + class MissedRecords( // Stream that is missing any value + var streamName: + String?, // Which are the values that has not being gathered from the source + var missedValues: List? + ) + + class UnexpectedRecord(val streamName: String, val unexpectedValue: String?) + + val catalog = configuredCatalog + val allMessages = runRead(catalog) + + val recordMessages = + allMessages!! + .stream() + .filter { m: AirbyteMessage -> m.type == AirbyteMessage.Type.RECORD } + .toList() + val expectedValues: MutableMap?> = HashMap() + val missedValuesByStream: MutableMap> = HashMap() + val unexpectedValuesByStream: MutableMap> = HashMap() + val testByName: MutableMap = HashMap() + + // If there is no expected value in the test set we don't include it in the list to be + // asserted + // (even if the table contains records) + testDataHolders.forEach( + Consumer { testDataHolder: TestDataHolder -> + if (!testDataHolder.expectedValues.isEmpty()) { + expectedValues[testDataHolder.nameWithTestPrefix] = + testDataHolder.expectedValues + testByName[testDataHolder.nameWithTestPrefix] = testDataHolder + } else { + LOGGER.warn("Missing expected values for type: " + testDataHolder.sourceType) + } + } + ) + + for (message in recordMessages) { + val streamName = message!!.record.stream + val expectedValuesForStream = expectedValues[streamName] + if (expectedValuesForStream != null) { + val value = getValueFromJsonNode(message.record.data[testColumnName]) + if (!expectedValuesForStream.contains(value)) { + unexpectedValuesByStream.putIfAbsent(streamName, ArrayList()) + unexpectedValuesByStream[streamName]!!.add(UnexpectedRecord(streamName, value)) + } else { + expectedValuesForStream.remove(value) + } + } + } + + // Gather all the missing values, so we don't stop the test in the first missed one + expectedValues.forEach { (streamName: String?, values: List?) -> + if (!values!!.isEmpty()) { + missedValuesByStream.putIfAbsent(streamName, ArrayList()) + missedValuesByStream[streamName]!!.add(MissedRecords(streamName, values)) + } + } + + val errorsByStream: MutableMap> = HashMap() + for (streamName in unexpectedValuesByStream.keys) { + errorsByStream.putIfAbsent(streamName, ArrayList()) + val test = testByName.getValue(streamName) + val unexpectedValues: List = unexpectedValuesByStream[streamName]!! + for (unexpectedValue in unexpectedValues) { + errorsByStream[streamName]!!.add( + "The stream '%s' checking type '%s' initialized at %s got unexpected values: %s".formatted( + streamName, + test.sourceType, + test!!.declarationLocation, + unexpectedValue + ) + ) + } + } + + for (streamName in missedValuesByStream.keys) { + errorsByStream.putIfAbsent(streamName, ArrayList()) + val test = testByName.getValue(streamName) + val missedValues: List = missedValuesByStream[streamName]!! + for (missedValue in missedValues) { + errorsByStream[streamName]!!.add( + "The stream '%s' checking type '%s' initialized at %s is missing values: %s".formatted( + streamName, + test.sourceType, + test!!.declarationLocation, + missedValue + ) + ) + } + } + + val errorStrings: MutableList = ArrayList() + for (errors in errorsByStream.values) { + errorStrings.add(StringUtils.join(errors, "\n")) + } + + Assertions.assertTrue(errorsByStream.isEmpty(), StringUtils.join(errorStrings, "\n")) + } + + @Throws(IOException::class) + protected fun getValueFromJsonNode(jsonNode: JsonNode?): String? { + if (jsonNode != null) { + if (jsonNode.isArray) { + return jsonNode.toString() + } + + var value = + (if (jsonNode.isBinary) jsonNode.binaryValue().contentToString() + else jsonNode.asText()) + value = (if (value != null && value == "null") null else value) + return value + } + return null + } + + /** + * Creates all tables and insert data described in the registered data type tests. + * + * @throws Exception might raise exception if configuration goes wrong or tables creation/insert + * scripts failed. + */ + @Throws(Exception::class) + protected open fun createTables() { + for (test in testDataHolders) { + database!!.query { ctx: DSLContext? -> + ctx!!.fetch(test.createSqlQuery) + LOGGER.info("Table {} is created.", test.nameWithTestPrefix) + null + } + } + } + + @Throws(Exception::class) + protected open fun populateTables() { + for (test in testDataHolders) { + database!!.query { ctx: DSLContext? -> + test.insertSqlQueries.forEach(Consumer { sql: String? -> ctx!!.fetch(sql) }) + LOGGER.info( + "Inserted {} rows in Ttable {}", + test.insertSqlQueries.size, + test.nameWithTestPrefix + ) + null + } + } + } + + protected val configuredCatalog: ConfiguredAirbyteCatalog + /** + * Configures streams for all registered data type tests. + * + * @return configured catalog + */ + get() = + ConfiguredAirbyteCatalog() + .withStreams( + testDataHolders + .stream() + .map { test: TestDataHolder -> + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(Lists.newArrayList(idColumnName)) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream( + CatalogHelpers.createAirbyteStream( + String.format("%s", test.nameWithTestPrefix), + String.format("%s", nameSpace), + Field.of(idColumnName, JsonSchemaType.INTEGER), + Field.of(testColumnName, test.airbyteType) + ) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey( + java.util.List.of(java.util.List.of(idColumnName)) + ) + .withSupportedSyncModes( + Lists.newArrayList( + SyncMode.FULL_REFRESH, + SyncMode.INCREMENTAL + ) + ) + ) + } + .collect(Collectors.toList()) + ) + + /** + * Register your test in the run scope. For each test will be created a table with one column of + * specified type. Note! If you register more than one test with the same type name, they will + * be run as independent tests with own streams. + * + * @param test comprehensive data type test + */ + fun addDataTypeTestData(test: TestDataHolder) { + testDataHolders.add(test) + test.setTestNumber( + testDataHolders + .stream() + .filter { t: TestDataHolder -> t.sourceType == test.sourceType } + .count() + ) + test.nameSpace = nameSpace + test.setIdColumnName(idColumnName) + test.setTestColumnName(testColumnName) + test.setDeclarationLocation(Thread.currentThread().stackTrace) + } + + private fun formatCollection(collection: Collection?): String { + return collection!!.stream().map { s: String? -> "`$s`" }.collect(Collectors.joining(", ")) + } + + val markdownTestTable: String + /** + * Builds a table with all registered test cases with values using Markdown syntax (can be + * used in the github). + * + * @return formatted list of test cases + */ + get() { + val table = + StringBuilder() + .append( + "|**Data Type**|**Insert values**|**Expected values**|**Comment**|**Common test result**|\n" + ) + .append("|----|----|----|----|----|\n") + + testDataHolders.forEach( + Consumer { test: TestDataHolder -> + table.append( + String.format( + "| %s | %s | %s | %s | %s |\n", + test.sourceType, + formatCollection(test.values), + formatCollection(test.expectedValues), + "", + "Ok" + ) + ) + } + ) + return table.toString() + } + + protected fun printMarkdownTestTable() { + LOGGER.info(markdownTestTable) + } + + @Throws(SQLException::class) + protected fun createDummyTableWithData(database: Database): ConfiguredAirbyteStream { + database.query { ctx: DSLContext? -> + ctx!!.fetch( + "CREATE TABLE " + + nameSpace + + ".random_dummy_table(id INTEGER PRIMARY KEY, test_column VARCHAR(63));" + ) + ctx.fetch("INSERT INTO " + nameSpace + ".random_dummy_table VALUES (2, 'Random Data');") + null + } + + return ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(Lists.newArrayList("id")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream( + CatalogHelpers.createAirbyteStream( + "random_dummy_table", + nameSpace, + Field.of("id", JsonSchemaType.INTEGER), + Field.of("test_column", JsonSchemaType.STRING) + ) + .withSourceDefinedCursor(true) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + .withSourceDefinedPrimaryKey(java.util.List.of(listOf("id"))) + ) + } + + protected fun extractStateMessages(messages: List): List { + return messages + .stream() + .filter { r: AirbyteMessage -> r.type == AirbyteMessage.Type.STATE } + .map { obj: AirbyteMessage -> obj.state } + .collect(Collectors.toList()) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(AbstractSourceDatabaseTypeTest::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.kt new file mode 100644 index 0000000000000..c91023c8a095f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/PythonSourceAcceptanceTest.kt @@ -0,0 +1,161 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Lists +import com.google.common.collect.Streams +import io.airbyte.commons.io.IOs +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConnectorSpecification +import io.airbyte.workers.TestHarnessUtils +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.function.Consumer +import org.junit.jupiter.api.Assertions +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Extends TestSource such that it can be called using resources pulled from the file system. Will + * also add the ability to execute arbitrary scripts in the next version. + */ +class PythonSourceAcceptanceTest : SourceAcceptanceTest() { + private lateinit var testRoot: Path + + @get:Throws(IOException::class) + override val spec: ConnectorSpecification + get() = runExecutable(Command.GET_SPEC, ConnectorSpecification::class.java) + + @get:Throws(IOException::class) + override val config: JsonNode? + get() = runExecutable(Command.GET_CONFIG) + + @get:Throws(IOException::class) + override val configuredCatalog: ConfiguredAirbyteCatalog + get() = runExecutable(Command.GET_CONFIGURED_CATALOG, ConfiguredAirbyteCatalog::class.java) + + @get:Throws(IOException::class) + override val state: JsonNode? + get() = runExecutable(Command.GET_STATE) + + @Throws(IOException::class) + override fun assertFullRefreshMessages(allMessages: List) { + val regexTests = + Streams.stream( + runExecutable(Command.GET_REGEX_TESTS).withArray("tests").elements() + ) + .map { obj: JsonNode -> obj.textValue() } + .toList() + val stringMessages = + allMessages!! + .stream() + .map { `object`: AirbyteMessage -> Jsons.serialize(`object`) } + .toList() + LOGGER.info("Running " + regexTests.size + " regex tests...") + regexTests.forEach( + Consumer { regex: String -> + LOGGER.info("Looking for [$regex]") + Assertions.assertTrue( + stringMessages.stream().anyMatch { line: String -> + line.matches(regex.toRegex()) + }, + "Failed to find regex: $regex" + ) + } + ) + } + + override val imageName: String + get() = IMAGE_NAME + + @Throws(Exception::class) + override fun setupEnvironment(environment: TestDestinationEnv?) { + testRoot = + Files.createTempDirectory( + Files.createDirectories(Path.of("/tmp/standard_test")), + "pytest" + ) + runExecutableVoid(Command.SETUP) + } + + @Throws(Exception::class) + override fun tearDown(testEnv: TestDestinationEnv?) { + runExecutableVoid(Command.TEARDOWN) + } + + private enum class Command { + GET_SPEC, + GET_CONFIG, + GET_CONFIGURED_CATALOG, + GET_STATE, + GET_REGEX_TESTS, + SETUP, + TEARDOWN + } + + @Throws(IOException::class) + private fun runExecutable(cmd: Command, klass: Class): T { + return Jsons.`object`(runExecutable(cmd), klass) + } + + @Throws(IOException::class) + private fun runExecutable(cmd: Command): JsonNode { + return Jsons.deserialize(IOs.readFile(runExecutableInternal(cmd), OUTPUT_FILENAME)) + } + + @Throws(IOException::class) + private fun runExecutableVoid(cmd: Command) { + runExecutableInternal(cmd) + } + + @Throws(IOException::class) + private fun runExecutableInternal(cmd: Command): Path { + LOGGER.info("testRoot = $testRoot") + val dockerCmd: List = + Lists.newArrayList( + "docker", + "run", + "--rm", + "-i", + "-v", + String.format("%s:%s", testRoot, "/test_root"), + "-w", + testRoot.toString(), + "--network", + "host", + PYTHON_CONTAINER_NAME, + cmd.toString().lowercase(Locale.getDefault()), + "--out", + "/test_root" + ) + + val process = ProcessBuilder(dockerCmd).start() + LineGobbler.gobble(process.errorStream, { msg: String? -> LOGGER.error(msg) }) + LineGobbler.gobble(process.inputStream, { msg: String? -> LOGGER.info(msg) }) + + TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES) + + val exitCode = process.exitValue() + if (exitCode != 0) { + throw RuntimeException("python execution failed") + } + + return testRoot + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(PythonSourceAcceptanceTest::class.java) + private const val OUTPUT_FILENAME = "output.json" + + var IMAGE_NAME: String = "dummy_image_name" + var PYTHON_CONTAINER_NAME: String? = null + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.kt new file mode 100644 index 0000000000000..66b1dc8a1fa71 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/SourceAcceptanceTest.kt @@ -0,0 +1,476 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.Iterables +import com.google.common.collect.Sets +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.StandardCheckConnectionOutput +import io.airbyte.protocol.models.v0.* +import java.util.* +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class SourceAcceptanceTest : AbstractSourceConnectorTest() { + /** + * TODO hack: Various Singer integrations use cursor fields inclusively i.e: they output records + * whose cursor field >= the provided cursor value. This leads to the last record in a sync to + * always be the first record in the next sync. This is a fine assumption from a product POV + * since we offer at-least-once delivery. But for simplicity, the incremental test suite + * currently assumes that the second incremental read should output no records when provided the + * state from the first sync. This works for many integrations but not some Singer ones, so we + * hardcode the list of integrations to skip over when performing those tests. + */ + private val IMAGES_TO_SKIP_SECOND_INCREMENTAL_READ: Set = + Sets.newHashSet( + "airbyte/source-intercom-singer", + "airbyte/source-exchangeratesapi-singer", + "airbyte/source-hubspot", + "airbyte/source-iterable", + "airbyte/source-marketo-singer", + "airbyte/source-twilio-singer", + "airbyte/source-mixpanel-singer", + "airbyte/source-twilio-singer", + "airbyte/source-braintree-singer", + "airbyte/source-stripe-singer", + "airbyte/source-exchange-rates", + "airbyte/source-stripe", + "airbyte/source-github-singer", + "airbyte/source-gitlab-singer", + "airbyte/source-google-workspace-admin-reports", + "airbyte/source-zendesk-talk", + "airbyte/source-zendesk-support-singer", + "airbyte/source-quickbooks-singer", + "airbyte/source-jira" + ) + + /** + * FIXME: Some sources can't guarantee that there will be no events between two sequential sync + */ + private val IMAGES_TO_SKIP_IDENTICAL_FULL_REFRESHES: Set = + Sets.newHashSet("airbyte/source-google-workspace-admin-reports", "airbyte/source-kafka") + + @get:Throws(Exception::class) + protected abstract val spec: ConnectorSpecification + /** + * Specification for integration. Will be passed to integration where appropriate in each + * test. Should be valid. + * + * @return integration-specific configuration + */ + get + + @get:Throws(Exception::class) + protected abstract val configuredCatalog: ConfiguredAirbyteCatalog + /** + * The catalog to use to validate the output of read operations. This will be used as + * follows: + * + * Full Refresh syncs will be tested on all the input streams which support it Incremental + * syncs: - if the stream declares a source-defined cursor, it will be tested with an + * incremental sync using the default cursor. - if the stream requires a user-defined + * cursor, it will be tested with the input cursor in both cases, the input [.getState] will + * be used as the input state. + * + * @return + * @throws Exception + */ + get + + @get:Throws(Exception::class) + protected abstract val state: JsonNode? + /** @return a JSON file representing the state file to use when testing incremental syncs */ + get + + /** Verify that a spec operation issued to the connector returns a valid spec. */ + @Test + @Throws(Exception::class) + fun testGetSpec() { + Assertions.assertEquals( + spec, + runSpec(), + "Expected spec output by integration to be equal to spec provided by test runner" + ) + } + + /** + * Verify that a check operation issued to the connector with the input config file returns a + * success response. + */ + @Test + @Throws(Exception::class) + fun testCheckConnection() { + Assertions.assertEquals( + StandardCheckConnectionOutput.Status.SUCCEEDED, + runCheck().status, + "Expected check connection operation to succeed" + ) + } + + // /** + // * Verify that when given invalid credentials, that check connection returns a failed + // response. + // * Assume that the {@link TestSource#getFailCheckConfig()} is invalid. + // */ + // @Test + // public void testCheckConnectionInvalidCredentials() throws Exception { + // final OutputAndStatus output = runCheck(); + // assertTrue(output.getOutput().isPresent()); + // assertEquals(Status.FAILED, output.getOutput().get().getStatus()); + // } + /** + * Verifies when a discover operation is run on the connector using the given config file, a + * valid catalog is output by the connector. + */ + @Test + @Throws(Exception::class) + fun testDiscover() { + runDiscover() + val discoveredCatalog = lastPersistedCatalog + Assertions.assertNotNull(discoveredCatalog, "Expected discover to produce a catalog") + verifyCatalog(discoveredCatalog) + } + + /** Override this method to check the actual catalog. */ + @Throws(Exception::class) + protected open fun verifyCatalog(catalog: AirbyteCatalog?) { + // do nothing by default + } + + /** + * Configuring all streams in the input catalog to full refresh mode, verifies that a read + * operation produces some RECORD messages. + */ + @Test + @Throws(Exception::class) + fun testFullRefreshRead() { + if (!sourceSupportsFullRefresh()) { + LOGGER.info("Test skipped. Source does not support full refresh.") + return + } + + val catalog = withFullRefreshSyncModes(configuredCatalog) + val allMessages = runRead(catalog) + + Assertions.assertFalse( + filterRecords(allMessages).isEmpty(), + "Expected a full refresh sync to produce records" + ) + assertFullRefreshMessages(allMessages) + } + + /** Override this method to perform more specific assertion on the messages. */ + @Throws(Exception::class) + protected open fun assertFullRefreshMessages(allMessages: List) { + // do nothing by default + } + + /** + * Configuring all streams in the input catalog to full refresh mode, performs two read + * operations on all streams which support full refresh syncs. It then verifies that the RECORD + * messages output from both were identical. + */ + @Test + @Throws(Exception::class) + fun testIdenticalFullRefreshes() { + if (!sourceSupportsFullRefresh()) { + LOGGER.info("Test skipped. Source does not support full refresh.") + return + } + + if ( + IMAGES_TO_SKIP_IDENTICAL_FULL_REFRESHES.contains( + imageName.split(":".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[0] + ) + ) { + return + } + + val configuredCatalog = withFullRefreshSyncModes(configuredCatalog) + val recordMessagesFirstRun = filterRecords(runRead(configuredCatalog)) + val recordMessagesSecondRun = filterRecords(runRead(configuredCatalog)) + // the worker validates the messages, so we just validate the message, so we do not need to + // validate + // again (as long as we use the worker, which we will not want to do long term). + Assertions.assertFalse( + recordMessagesFirstRun.isEmpty(), + "Expected first full refresh to produce records" + ) + Assertions.assertFalse( + recordMessagesSecondRun.isEmpty(), + "Expected second full refresh to produce records" + ) + + assertSameRecords( + recordMessagesFirstRun, + recordMessagesSecondRun, + "Expected two full refresh syncs to produce the same records" + ) + } + + /** + * This test verifies that all streams in the input catalog which support incremental sync can + * do so correctly. It does this by running two read operations on the connector's Docker image: + * the first takes the configured catalog and config provided to this test as input. It then + * verifies that the sync produced a non-zero number of RECORD and STATE messages. + * + * The second read takes the same catalog and config used in the first test, plus the last STATE + * message output by the first read operation as the input state file. It verifies that no + * records are produced (since we read all records in the first sync). + * + * This test is performed only for streams which support incremental. Streams which do not + * support incremental sync are ignored. If no streams in the input catalog support incremental + * sync, this test is skipped. + */ + @Test + @Throws(Exception::class) + fun testIncrementalSyncWithState() { + if (!sourceSupportsIncremental()) { + return + } + + val configuredCatalog = withSourceDefinedCursors(configuredCatalog) + // only sync incremental streams + configuredCatalog.streams = + configuredCatalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> s.syncMode == SyncMode.INCREMENTAL } + .collect(Collectors.toList()) + + val airbyteMessages = runRead(configuredCatalog, state) + val recordMessages = filterRecords(airbyteMessages) + val stateMessages = + airbyteMessages + .stream() + .filter { m: AirbyteMessage -> m.type == AirbyteMessage.Type.STATE } + .map { obj: AirbyteMessage -> obj.state } + .collect(Collectors.toList()) + Assertions.assertFalse( + recordMessages.isEmpty(), + "Expected the first incremental sync to produce records" + ) + Assertions.assertFalse( + stateMessages.isEmpty(), + "Expected incremental sync to produce STATE messages" + ) + + // TODO validate exact records + if ( + IMAGES_TO_SKIP_SECOND_INCREMENTAL_READ.contains( + imageName.split(":".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[0] + ) + ) { + return + } + + // when we run incremental sync again there should be no new records. Run a sync with the + // latest + // state message and assert no records were emitted. + var latestState: JsonNode? = null + for (stateMessage in stateMessages) { + if (stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM) { + latestState = Jsons.jsonNode(stateMessages) + break + } else if (stateMessage.type == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + latestState = Jsons.jsonNode(java.util.List.of(Iterables.getLast(stateMessages))) + break + } else { + throw RuntimeException("Unknown state type " + stateMessage.type) + } + } + + assert(Objects.nonNull(latestState)) + val secondSyncRecords = filterRecords(runRead(configuredCatalog, latestState)) + Assertions.assertTrue( + secondSyncRecords.isEmpty(), + "Expected the second incremental sync to produce no records when given the first sync's output state." + ) + } + + /** + * If the source does not support incremental sync, this test is skipped. + * + * Otherwise, this test runs two syncs: one where all streams provided in the input catalog sync + * in full refresh mode, and another where all the streams which in the input catalog which + * support incremental, sync in incremental mode (streams which don't support incremental sync + * in full refresh mode). Then, the test asserts that the two syncs produced the same RECORD + * messages. Any other type of message is disregarded. + */ + @Test + @Throws(Exception::class) + fun testEmptyStateIncrementalIdenticalToFullRefresh() { + if (!sourceSupportsIncremental()) { + return + } + + if (!sourceSupportsFullRefresh()) { + LOGGER.info("Test skipped. Source does not support full refresh.") + return + } + + val configuredCatalog = configuredCatalog + val fullRefreshCatalog = withFullRefreshSyncModes(configuredCatalog) + + val fullRefreshRecords = filterRecords(runRead(fullRefreshCatalog)) + val emptyStateRecords = + filterRecords(runRead(configuredCatalog, Jsons.jsonNode(HashMap()))) + Assertions.assertFalse( + fullRefreshRecords.isEmpty(), + "Expected a full refresh sync to produce records" + ) + Assertions.assertFalse( + emptyStateRecords.isEmpty(), + "Expected state records to not be empty" + ) + assertSameRecords( + fullRefreshRecords, + emptyStateRecords, + "Expected a full refresh sync and incremental sync with no input state to produce identical records" + ) + } + + /** + * In order to launch a source on Kubernetes in a pod, we need to be able to wrap the + * entrypoint. The source connector must specify its entrypoint in the AIRBYTE_ENTRYPOINT + * variable. This test ensures that the entrypoint environment variable is set. + */ + @Test + @Throws(Exception::class) + fun testEntrypointEnvVar() { + checkEntrypointEnvVariable() + } + + protected fun withSourceDefinedCursors( + catalog: ConfiguredAirbyteCatalog + ): ConfiguredAirbyteCatalog { + val clone = Jsons.clone(catalog) + for (configuredStream in clone.streams) { + if ( + configuredStream.syncMode == SyncMode.INCREMENTAL && + configuredStream.stream.sourceDefinedCursor != null && + configuredStream.stream.sourceDefinedCursor + ) { + configuredStream.cursorField = configuredStream.stream.defaultCursorField + } + } + return clone + } + + protected fun withFullRefreshSyncModes( + catalog: ConfiguredAirbyteCatalog + ): ConfiguredAirbyteCatalog { + val clone = Jsons.clone(catalog) + for (configuredStream in clone.streams) { + if (configuredStream.stream.supportedSyncModes.contains(SyncMode.FULL_REFRESH)) { + configuredStream.syncMode = SyncMode.FULL_REFRESH + configuredStream.destinationSyncMode = DestinationSyncMode.OVERWRITE + } + } + return clone + } + + @Throws(Exception::class) + private fun sourceSupportsIncremental(): Boolean { + return sourceSupports(SyncMode.INCREMENTAL) + } + + @Throws(Exception::class) + private fun sourceSupportsFullRefresh(): Boolean { + return sourceSupports(SyncMode.FULL_REFRESH) + } + + @Throws(Exception::class) + private fun sourceSupports(syncMode: SyncMode): Boolean { + val catalog = configuredCatalog + for (stream in catalog.streams) { + if (stream.stream.supportedSyncModes.contains(syncMode)) { + return true + } + } + return false + } + + private fun assertSameRecords( + expected: List, + actual: List, + message: String + ) { + val prunedExpected = + expected + .stream() + .map { m: AirbyteRecordMessage -> this.pruneEmittedAt(m) } + .collect(Collectors.toList()) + val prunedActual = + actual + .stream() + .map { m: AirbyteRecordMessage -> this.pruneEmittedAt(m) } + .map { m: AirbyteRecordMessage -> this.pruneCdcMetadata(m) } + .collect(Collectors.toList()) + Assertions.assertEquals(prunedExpected.size, prunedActual.size, message) + Assertions.assertTrue(prunedExpected.containsAll(prunedActual), message) + Assertions.assertTrue(prunedActual.containsAll(prunedExpected), message) + } + + private fun pruneEmittedAt(m: AirbyteRecordMessage): AirbyteRecordMessage { + return Jsons.clone(m).withEmittedAt(null) + } + + private fun pruneCdcMetadata(m: AirbyteRecordMessage): AirbyteRecordMessage { + val clone = Jsons.clone(m) + (clone.data as ObjectNode).remove(CDC_LSN) + (clone.data as ObjectNode).remove(CDC_LOG_FILE) + (clone.data as ObjectNode).remove(CDC_LOG_POS) + (clone.data as ObjectNode).remove(CDC_UPDATED_AT) + (clone.data as ObjectNode).remove(CDC_DELETED_AT) + (clone.data as ObjectNode).remove(CDC_EVENT_SERIAL_NO) + (clone.data as ObjectNode).remove(CDC_DEFAULT_CURSOR) + return clone + } + + companion object { + const val CDC_LSN: String = "_ab_cdc_lsn" + const val CDC_UPDATED_AT: String = "_ab_cdc_updated_at" + const val CDC_DELETED_AT: String = "_ab_cdc_deleted_at" + const val CDC_LOG_FILE: String = "_ab_cdc_log_file" + const val CDC_LOG_POS: String = "_ab_cdc_log_pos" + const val CDC_DEFAULT_CURSOR: String = "_ab_cdc_cursor" + const val CDC_EVENT_SERIAL_NO: String = "_ab_cdc_event_serial_no" + + private val LOGGER: Logger = LoggerFactory.getLogger(SourceAcceptanceTest::class.java) + + @JvmStatic + protected fun filterRecords( + messages: Collection + ): List { + return messages + .stream() + .filter { m: AirbyteMessage -> m.type == AirbyteMessage.Type.RECORD } + .map { obj: AirbyteMessage -> obj.record } + .collect(Collectors.toList()) + } + + @JvmStatic + public fun extractLatestState(stateMessages: List): JsonNode? { + var latestState: JsonNode? = null + for (stateMessage in stateMessages) { + if (stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM) { + latestState = Jsons.jsonNode(stateMessages) + break + } else if (stateMessage.type == AirbyteStateMessage.AirbyteStateType.GLOBAL) { + latestState = + Jsons.jsonNode(java.util.List.of(Iterables.getLast(stateMessages))) + break + } else { + throw RuntimeException("Unknown state type " + stateMessage.type) + } + } + return latestState + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.kt new file mode 100644 index 0000000000000..e0f137b2547f9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDataHolder.kt @@ -0,0 +1,219 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import io.airbyte.protocol.models.JsonSchemaType +import java.util.* + +class TestDataHolder +internal constructor( + val sourceType: String?, + val airbyteType: JsonSchemaType, + val values: List, + val expectedValues: MutableList, + private val createTablePatternSql: String, + private val insertPatternSql: String, + private val fullSourceDataType: String? +) { + var nameSpace: String? = null + private var testNumber: Long = 0 + private var idColumnName: String? = null + private var testColumnName: String? = null + + var declarationLocation: String = "" + private set + + class TestDataHolderBuilder internal constructor() { + private var sourceType: String? = null + private lateinit var airbyteType: JsonSchemaType + private val values: MutableList = ArrayList() + private val expectedValues: MutableList = ArrayList() + private var createTablePatternSql: String + private var insertPatternSql: String + private var fullSourceDataType: String? = null + + init { + this.createTablePatternSql = DEFAULT_CREATE_TABLE_SQL + this.insertPatternSql = DEFAULT_INSERT_SQL + } + + /** + * The name of the source data type. Duplicates by name will be tested independently from + * each others. Note that this name will be used for connector setup and table creation. If + * source syntax requires more details (E.g. "varchar" type requires length "varchar(50)"), + * you can additionally set custom data type syntax by + * [TestDataHolderBuilder.fullSourceDataType] method. + * + * @param sourceType source data type name + * @return builder + */ + fun sourceType(sourceType: String?): TestDataHolderBuilder { + this.sourceType = sourceType + if (fullSourceDataType == null) fullSourceDataType = sourceType + return this + } + + /** + * corresponding Airbyte data type. It requires for proper configuration + * [ConfiguredAirbyteStream] + * + * @param airbyteType Airbyte data type + * @return builder + */ + fun airbyteType(airbyteType: JsonSchemaType): TestDataHolderBuilder { + this.airbyteType = airbyteType + return this + } + + /** + * Set custom the create table script pattern. Use it if you source uses untypical table + * creation sql. Default patter described [.DEFAULT_CREATE_TABLE_SQL] Note! The patter + * should contain four String place holders for the: - namespace.table name (as one + * placeholder together) - id column name - test column name - test column data type + * + * @param createTablePatternSql creation table sql pattern + * @return builder + */ + fun createTablePatternSql(createTablePatternSql: String): TestDataHolderBuilder { + this.createTablePatternSql = createTablePatternSql + return this + } + + /** + * Set custom the insert record script pattern. Use it if you source uses untypical insert + * record sql. Default patter described [.DEFAULT_INSERT_SQL] Note! The patter should + * contains two String place holders for the table name and value. + * + * @param insertPatternSql creation table sql pattern + * @return builder + */ + fun insertPatternSql(insertPatternSql: String): TestDataHolderBuilder { + this.insertPatternSql = insertPatternSql + return this + } + + /** + * Allows to set extended data type for the table creation. E.g. The "varchar" type requires + * in MySQL requires length. In this case fullSourceDataType will be "varchar(50)". + * + * @param fullSourceDataType actual string for the column data type description + * @return builder + */ + fun fullSourceDataType(fullSourceDataType: String?): TestDataHolderBuilder { + this.fullSourceDataType = fullSourceDataType + return this + } + + /** + * Adds value(s) to the scope of a corresponding test. The values will be inserted into the + * created table. Note! The value will be inserted into the insert script without any + * transformations. Make sure that the value is in line with the source syntax. + * + * @param insertValue test value + * @return builder + */ + fun addInsertValues(vararg insertValue: String): TestDataHolderBuilder { + values.addAll(Arrays.asList(*insertValue)) + return this + } + + /** + * Adds expected value(s) to the test scope. If you add at least one value, it will check + * that all values are provided by corresponding streamer. + * + * @param expectedValue value which should be provided by a streamer + * @return builder + */ + fun addExpectedValues(vararg expectedValue: String?): TestDataHolderBuilder { + expectedValues.addAll(Arrays.asList(*expectedValue)) + return this + } + + /** + * Add NULL value to the expected value list. If you need to add only one value and it's + * NULL, you have to use this method instead of [.addExpectedValues] + * + * @return builder + */ + fun addNullExpectedValue(): TestDataHolderBuilder { + expectedValues.add(null) + return this + } + + fun build(): TestDataHolder { + return TestDataHolder( + sourceType, + airbyteType, + values, + expectedValues, + createTablePatternSql, + insertPatternSql, + fullSourceDataType + ) + } + } + + fun setTestNumber(testNumber: Long) { + this.testNumber = testNumber + } + + fun setIdColumnName(idColumnName: String?) { + this.idColumnName = idColumnName + } + + fun setTestColumnName(testColumnName: String?) { + this.testColumnName = testColumnName + } + + val nameWithTestPrefix: String + get() = // source type may include space (e.g. "character varying") + nameSpace + "_" + testNumber + "_" + sourceType!!.replace("\\s".toRegex(), "_") + + val createSqlQuery: String + get() = + String.format( + createTablePatternSql, + (if (nameSpace != null) "$nameSpace." else "") + this.nameWithTestPrefix, + idColumnName, + testColumnName, + fullSourceDataType + ) + + fun setDeclarationLocation(declarationLocation: Array) { + this.declarationLocation = Arrays.asList(*declarationLocation).subList(2, 3).toString() + } + + val insertSqlQueries: List + get() { + val insertSqls: MutableList = ArrayList() + var rowId = 1 + for (value in values) { + insertSqls.add( + String.format( + insertPatternSql, + (if (nameSpace != null) "$nameSpace." else "") + this.nameWithTestPrefix, + rowId++, + value + ) + ) + } + return insertSqls + } + + companion object { + private const val DEFAULT_CREATE_TABLE_SQL = + "CREATE TABLE %1\$s(%2\$s INTEGER PRIMARY KEY, %3\$s %4\$s)" + private const val DEFAULT_INSERT_SQL = "INSERT INTO %1\$s VALUES (%2\$s, %3\$s)" + + /** + * The builder allows to setup any comprehensive data type test. + * + * @return builder for setup comprehensive test + */ + @JvmStatic + fun builder(): TestDataHolderBuilder { + return TestDataHolderBuilder() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.kt new file mode 100644 index 0000000000000..c58b6150f12a8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestDestinationEnv.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import java.nio.file.Path + +class TestDestinationEnv(val localRoot: Path?) diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.kt new file mode 100644 index 0000000000000..d8fb72eace198 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestEnvConfigs.kt @@ -0,0 +1,158 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import io.airbyte.commons.lang.Exceptions +import io.airbyte.commons.map.MoreMaps +import io.airbyte.commons.version.AirbyteVersion +import java.util.* +import java.util.function.Function +import java.util.function.Supplier +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class passes environment variable to the DockerProcessFactory that runs the source in the + * SourceAcceptanceTest. + */ +// todo (cgardens) - this cloud_deployment implicit interface is going to bite us. +class TestEnvConfigs private constructor(envMap: Map) { + enum class DeploymentMode { + OSS, + CLOUD + } + + enum class WorkerEnvironment { + DOCKER, + KUBERNETES + } + + private val getEnv = Function { key: String -> envMap.getValue(key) } + private val getAllEnvKeys = Supplier { envMap.keys } + + constructor() : this(System.getenv()) + + val airbyteRole: String + // CORE + get() = getEnv(AIRBYTE_ROLE) + + val airbyteVersion: AirbyteVersion + get() = AirbyteVersion(getEnsureEnv(AIRBYTE_VERSION)) + + val deploymentMode: DeploymentMode + get() = + getEnvOrDefault(DEPLOYMENT_MODE, DeploymentMode.OSS) { s: String -> + try { + return@getEnvOrDefault DeploymentMode.valueOf(s) + } catch (e: IllegalArgumentException) { + LOGGER.info(s + " not recognized, defaulting to " + DeploymentMode.OSS) + return@getEnvOrDefault DeploymentMode.OSS + } + } + + val workerEnvironment: WorkerEnvironment + get() = + getEnvOrDefault(WORKER_ENVIRONMENT, WorkerEnvironment.DOCKER) { s: String -> + WorkerEnvironment.valueOf(s.uppercase(Locale.getDefault())) + } + + val jobDefaultEnvMap: Map + /** + * There are two types of environment variables available to the job container: + * + * * Exclusive variables prefixed with JOB_DEFAULT_ENV_PREFIX + * * Shared variables defined in JOB_SHARED_ENVS + */ + get() { + val jobPrefixedEnvMap = + getAllEnvKeys + .get() + .stream() + .filter { key: String -> key.startsWith(JOB_DEFAULT_ENV_PREFIX) } + .collect( + Collectors.toMap( + Function { key: String -> key.replace(JOB_DEFAULT_ENV_PREFIX, "") }, + getEnv + ) + ) + // This method assumes that these shared env variables are not critical to the execution + // of the jobs, and only serve as metadata. So any exception is swallowed and default to + // an empty string. Change this logic if this assumption no longer holds. + val jobSharedEnvMap = + JOB_SHARED_ENVS.entries + .stream() + .collect( + Collectors.toMap( + Function { obj: Map.Entry> -> + obj.key + }, + Function { entry: Map.Entry> -> + Exceptions.swallowWithDefault( + { Objects.requireNonNullElse(entry.value.apply(this), "") }, + "" + ) + } + ) + ) + return MoreMaps.merge(jobPrefixedEnvMap, jobSharedEnvMap) + } + + fun getEnvOrDefault(key: String, defaultValue: T, parser: Function): T { + return getEnvOrDefault(key, defaultValue, parser, false) + } + + fun getEnvOrDefault( + key: String, + defaultValue: T, + parser: Function, + isSecret: Boolean + ): T { + val value = getEnv.apply(key) + if (value != null && !value.isEmpty()) { + return parser.apply(value) + } else { + LOGGER.info( + "Using default value for environment variable {}: '{}'", + key, + if (isSecret) "*****" else defaultValue + ) + return defaultValue + } + } + + fun getEnv(name: String): String { + return getEnv.apply(name) + } + + fun getEnsureEnv(name: String): String { + val value = getEnv(name) + checkNotNull(value != null) { "$name environment variable cannot be null" } + + return value!! + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(TestEnvConfigs::class.java) + + // env variable names + const val AIRBYTE_ROLE: String = "AIRBYTE_ROLE" + const val AIRBYTE_VERSION: String = "AIRBYTE_VERSION" + const val WORKER_ENVIRONMENT: String = "WORKER_ENVIRONMENT" + const val DEPLOYMENT_MODE: String = "DEPLOYMENT_MODE" + const val JOB_DEFAULT_ENV_PREFIX: String = "JOB_DEFAULT_ENV_" + + val JOB_SHARED_ENVS: Map> = + java.util.Map.of( + AIRBYTE_VERSION, + Function { instance: TestEnvConfigs -> instance.airbyteVersion.serialize() }, + AIRBYTE_ROLE, + Function { obj: TestEnvConfigs -> obj.airbyteRole }, + DEPLOYMENT_MODE, + Function { instance: TestEnvConfigs -> instance.deploymentMode.name }, + WORKER_ENVIRONMENT, + Function { instance: TestEnvConfigs -> instance.workerEnvironment.name } + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.kt new file mode 100644 index 0000000000000..9a9c0b90ee52e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestPythonSourceMain.kt @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import net.sourceforge.argparse4j.ArgumentParsers +import net.sourceforge.argparse4j.inf.ArgumentParserException +import net.sourceforge.argparse4j.inf.Namespace + +/** + * Parse command line arguments and inject them into the test class before running the test. Then + * runs the tests. + */ +object TestPythonSourceMain { + @JvmStatic + fun main(args: Array) { + val parser = + ArgumentParsers.newFor(TestPythonSourceMain::class.java.name) + .build() + .defaultHelp(true) + .description("Run standard source tests") + + parser.addArgument("--imageName").help("Name of the integration image") + + parser.addArgument("--pythonContainerName").help("Name of the python integration image") + + var ns: Namespace? = null + try { + ns = parser.parseArgs(args) + } catch (e: ArgumentParserException) { + parser.handleError(e) + System.exit(1) + } + + val imageName = ns!!.getString("imageName") + val pythonContainerName = ns.getString("pythonContainerName") + + PythonSourceAcceptanceTest.Companion.IMAGE_NAME = imageName + PythonSourceAcceptanceTest.Companion.PYTHON_CONTAINER_NAME = pythonContainerName + + TestRunner.runTestClass(PythonSourceAcceptanceTest::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestRunner.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestRunner.kt new file mode 100644 index 0000000000000..c28e8c07b99e2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/TestRunner.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source + +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import org.junit.platform.engine.discovery.DiscoverySelectors +import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder +import org.junit.platform.launcher.core.LauncherFactory +import org.junit.platform.launcher.listeners.SummaryGeneratingListener + +object TestRunner { + fun runTestClass(testClass: Class<*>?) { + val request = + LauncherDiscoveryRequestBuilder.request() + .selectors(DiscoverySelectors.selectClass(testClass)) + .build() + + val plan = LauncherFactory.create().discover(request) + val launcher = LauncherFactory.create() + + // Register a listener of your choice + val listener = SummaryGeneratingListener() + + launcher.execute(plan, listener) + + listener.summary.printFailuresTo(PrintWriter(System.out, false, StandardCharsets.UTF_8)) + listener.summary.printTo(PrintWriter(System.out, false, StandardCharsets.UTF_8)) + + if (listener.summary.testsFailedCount > 0) { + println( + "There are failing tests. See https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/standard-source-tests " + + "for more information about the standard source test suite." + ) + System.exit(1) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.kt new file mode 100644 index 0000000000000..6b79f0863bc56 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/ExecutableTestSource.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source.fs + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.standardtest.source.SourceAcceptanceTest +import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConnectorSpecification +import java.nio.file.Path + +/** + * Extends TestSource such that it can be called using resources pulled from the file system. Will + * also add the ability to execute arbitrary scripts in the next version. + */ +class ExecutableTestSource : SourceAcceptanceTest() { + class TestConfig( + val imageName: String, + val specPath: Path, + val configPath: Path, + val catalogPath: Path, + val statePath: Path? + ) + + override val spec: ConnectorSpecification + get() = + Jsons.deserialize( + IOs.readFile(TEST_CONFIG!!.specPath), + ConnectorSpecification::class.java + ) + + override val imageName: String + get() = TEST_CONFIG!!.imageName + + override val config: JsonNode? + get() = Jsons.deserialize(IOs.readFile(TEST_CONFIG!!.configPath)) + + override val configuredCatalog: ConfiguredAirbyteCatalog + get() = + Jsons.deserialize( + IOs.readFile(TEST_CONFIG!!.catalogPath), + ConfiguredAirbyteCatalog::class.java + ) + + override val state: JsonNode? + get() = + if (TEST_CONFIG!!.statePath != null) { + Jsons.deserialize(IOs.readFile(TEST_CONFIG!!.statePath)) + } else { + Jsons.deserialize("{}") + } + + @Throws(Exception::class) + override fun setupEnvironment(environment: TestDestinationEnv?) { + // no-op, for now + } + + @Throws(Exception::class) + override fun tearDown(testEnv: TestDestinationEnv?) { + // no-op, for now + } + + companion object { + var TEST_CONFIG: TestConfig? = null + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.kt new file mode 100644 index 0000000000000..b1552e38d7c2c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/fs/TestSourceMain.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source.fs + +import io.airbyte.cdk.integrations.standardtest.source.TestRunner +import java.nio.file.Path +import net.sourceforge.argparse4j.ArgumentParsers +import net.sourceforge.argparse4j.inf.ArgumentParserException +import net.sourceforge.argparse4j.inf.Namespace +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Parse command line arguments and inject them into the test class before running the test. Then + * runs the tests. + */ +object TestSourceMain { + private val LOGGER: Logger = LoggerFactory.getLogger(TestSourceMain::class.java) + + @JvmStatic + fun main(args: Array) { + val parser = + ArgumentParsers.newFor(TestSourceMain::class.java.name) + .build() + .defaultHelp(true) + .description("Run standard source tests") + + parser + .addArgument("--imageName") + .required(true) + .help("Name of the source connector image e.g: airbyte/source-mailchimp") + + parser.addArgument("--spec").required(true).help("Path to file that contains spec json") + + parser.addArgument("--config").required(true).help("Path to file that contains config json") + + parser + .addArgument("--catalog") + .required(true) + .help("Path to file that contains catalog json") + + parser.addArgument("--state").required(false).help("Path to the file containing state") + + var ns: Namespace? = null + try { + ns = parser.parseArgs(args) + } catch (e: ArgumentParserException) { + parser.handleError(e) + System.exit(1) + } + + val imageName = ns!!.getString("imageName") + val specFile = ns.getString("spec") + val configFile = ns.getString("config") + val catalogFile = ns.getString("catalog") + val stateFile = ns.getString("state") + + ExecutableTestSource.Companion.TEST_CONFIG = + ExecutableTestSource.TestConfig( + imageName, + Path.of(specFile), + Path.of(configFile), + Path.of(catalogFile), + if (stateFile != null) Path.of(stateFile) else null + ) + + TestRunner.runTestClass(ExecutableTestSource::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.kt new file mode 100644 index 0000000000000..6f94ccff21e84 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceBasePerformanceTest.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source.performancetest + +import io.airbyte.cdk.integrations.standardtest.source.AbstractSourceConnectorTest +import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv + +/** + * This abstract class contains common methods for both steams - Fill Db scripts and Performance + * tests. + */ +abstract class AbstractSourceBasePerformanceTest : AbstractSourceConnectorTest() { + /** + * The column name will be used for a test column in the test tables. Override it if default + * name is not valid for your source. + */ + protected val testColumnName + get() = TEST_COLUMN_NAME + /** + * The stream name template will be used for a test tables. Override it if default name is not + * valid for your source. + */ + protected val testStreamNameTemplate + get() = TEST_STREAM_NAME_TEMPLATE + @Throws(Exception::class) + override fun setupEnvironment(environment: TestDestinationEnv?) { + // DO NOTHING. Mandatory to override. DB will be setup as part of each test + } + + companion object { + protected const val TEST_COLUMN_NAME: String = "test_column" + protected const val TEST_STREAM_NAME_TEMPLATE: String = "test_%S" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.kt new file mode 100644 index 0000000000000..37e88b9e02693 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.kt @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source.performancetest + +import io.airbyte.cdk.db.Database +import java.util.* +import java.util.stream.Stream +import org.jooq.DSLContext +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.TestInstance +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** This abstract class contains common methods for Fill Db scripts. */ +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +abstract class AbstractSourceFillDbWithTestData : AbstractSourceBasePerformanceTest() { + /** + * Setup the test database. All tables and data described in the registered tests will be put + * there. + * + * @return configured test database + * @throws Exception + * - might throw any exception during initialization. + */ + @Throws(Exception::class) protected abstract fun setupDatabase(dbName: String?): Database + + /** + * The test added test data to a new DB. 1. Set DB creds in static variables above 2. Set + * desired number for streams, coolumns and records 3. Run the test + */ + @Disabled + @ParameterizedTest + @MethodSource("provideParameters") + @Throws(Exception::class) + fun addTestData( + dbName: String?, + schemaName: String?, + numberOfDummyRecords: Int, + numberOfBatches: Int, + numberOfColumns: Int, + numberOfStreams: Int + ) { + val database = setupDatabase(dbName) + + database.query { ctx: DSLContext? -> + for (currentSteamNumber in 0 until numberOfStreams) { + val currentTableName = String.format(testStreamNameTemplate, currentSteamNumber) + + ctx!!.fetch(prepareCreateTableQuery(schemaName, numberOfColumns, currentTableName)) + for (i in 0 until numberOfBatches) { + val insertQueryTemplate = + prepareInsertQueryTemplate( + schemaName, + i, + numberOfColumns, + numberOfDummyRecords + ) + ctx.fetch(String.format(insertQueryTemplate, currentTableName)) + } + + c.info("Finished processing for stream $currentSteamNumber") + } + null + } + } + + /** + * This is a data provider for fill DB script,, Each argument's group would be ran as a separate + * test. Set the "testArgs" in test class of your DB in @BeforeTest method. + * + * 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName + * that will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of + * expected records retrieved in each stream. 4th arg - a number of columns in each stream\table + * that will be use for Airbyte Cataloq configuration 5th arg - a number of streams to read in + * configured airbyte Catalog. Each stream\table in DB should be names like "test_0", + * "test_1",..., test_n. + * + * Stream.of( Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000) ); + */ + protected abstract fun provideParameters(): Stream? + + protected fun prepareCreateTableQuery( + dbSchemaName: String?, + numberOfColumns: Int, + currentTableName: String? + ): String { + val sj = StringJoiner(",") + for (i in 0 until numberOfColumns) { + sj.add(String.format(" %s%s %s", testColumnName, i, TEST_DB_FIELD_TYPE)) + } + + return String.format( + CREATE_DB_TABLE_TEMPLATE, + dbSchemaName, + currentTableName, + sj.toString() + ) + } + + protected fun prepareInsertQueryTemplate( + dbSchemaName: String?, + batchNumber: Int, + numberOfColumns: Int, + recordsNumber: Int + ): String { + val fieldsNames = StringJoiner(",") + fieldsNames.add("id") + + val baseInsertQuery = StringJoiner(",") + baseInsertQuery.add("id_placeholder") + + for (i in 0 until numberOfColumns) { + fieldsNames.add(testColumnName + i) + baseInsertQuery.add(TEST_VALUE_TEMPLATE_POSTGRES) + } + + val insertGroupValuesJoiner = StringJoiner(",") + + val batchMessages = batchNumber * 100 + + for (currentRecordNumber in batchMessages until recordsNumber + batchMessages) { + insertGroupValuesJoiner.add( + "(" + + baseInsertQuery + .toString() + .replace("id_placeholder".toRegex(), currentRecordNumber.toString()) + + ")" + ) + } + + return String.format( + INSERT_INTO_DB_TABLE_QUERY_TEMPLATE, + dbSchemaName, + "%s", + fieldsNames.toString(), + insertGroupValuesJoiner.toString() + ) + } + + companion object { + private const val CREATE_DB_TABLE_TEMPLATE = + "CREATE TABLE %s.%s(id INTEGER PRIMARY KEY, %s)" + private const val INSERT_INTO_DB_TABLE_QUERY_TEMPLATE = "INSERT INTO %s.%s (%s) VALUES %s" + private const val TEST_DB_FIELD_TYPE = "varchar(10)" + + protected val c: Logger = + LoggerFactory.getLogger(AbstractSourceFillDbWithTestData::class.java) + private const val TEST_VALUE_TEMPLATE_POSTGRES = "\'Value id_placeholder\'" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.kt b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.kt new file mode 100644 index 0000000000000..6e7848e02d47c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/kotlin/io/airbyte/cdk/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.kt @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.standardtest.source.performancetest + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.* +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.TestInstance +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** This abstract class contains common methods for Performance tests. */ +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +abstract class AbstractSourcePerformanceTest : AbstractSourceBasePerformanceTest() { + override var config: JsonNode? = null + /** + * The column name will be used for a PK column in the test tables. Override it if default name + * is not valid for your source. + */ + protected val idColumnName: String = "id" + + /** + * Setup the test database. All tables and data described in the registered tests will be put + * there. + * + * @throws Exception + * - might throw any exception during initialization. + */ + @Throws(Exception::class) protected abstract fun setupDatabase(dbName: String?) + + override fun tearDown(testEnv: TestDestinationEnv?) {} + + /** + * This is a data provider for performance tests, Each argument's group would be ran as a + * separate test. Set the "testArgs" in test class of your DB in @BeforeTest method. + * + * 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName + * that will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of + * expected records retrieved in each stream. 4th arg - a number of columns in each stream\table + * that will be used for Airbyte Cataloq configuration 5th arg - a number of streams to read in + * configured airbyte Catalog. Each stream\table in DB should be names like "test_0", + * "test_1",..., test_n. + * + * Example: Stream.of( Arguments.of("test1000tables240columns200recordsDb", "dbo", 200, 240, + * 1000), Arguments.of("test5000tables240columns200recordsDb", "dbo", 200, 240, 1000), + * Arguments.of("newregular25tables50000records", "dbo", 50052, 8, 25), + * Arguments.of("newsmall1000tableswith10000rows", "dbo", 10011, 8, 1000) ); + */ + protected abstract fun provideParameters(): Stream? + + @ParameterizedTest + @MethodSource("provideParameters") + @Throws(Exception::class) + fun testPerformance( + dbName: String?, + schemaName: String?, + numberOfDummyRecords: Int, + numberOfColumns: Int, + numberOfStreams: Int + ) { + setupDatabase(dbName) + + val catalog = getConfiguredCatalog(schemaName, numberOfStreams, numberOfColumns) + val mapOfExpectedRecordsCount = + prepareMapWithExpectedRecords(numberOfStreams, numberOfDummyRecords) + val checkStatusMap = + runReadVerifyNumberOfReceivedMsgs(catalog, null, mapOfExpectedRecordsCount) + validateNumberOfReceivedMsgs(checkStatusMap) + } + + protected fun validateNumberOfReceivedMsgs(checkStatusMap: Map) { + // Iterate through all streams map and check for streams where + val failedStreamsMap = + checkStatusMap.entries + .stream() + .filter { el: Map.Entry -> el.value != 0 } + .collect( + Collectors.toMap( + Function { obj: Map.Entry -> obj.key }, + Function { obj: Map.Entry -> obj.value } + ) + ) + + if (failedStreamsMap.isNotEmpty()) { + Assertions.fail("Non all messages were delivered. $failedStreamsMap") + } + c.info("Finished all checks, no issues found for {} of streams", checkStatusMap.size) + } + + protected fun prepareMapWithExpectedRecords( + streamNumber: Int, + expectedRecordsNumberInEachStream: Int + ): MutableMap { + val resultMap: MutableMap = HashMap() // streamName&expected records in stream + + for (currentStream in 0 until streamNumber) { + val streamName = String.format(testStreamNameTemplate, currentStream) + resultMap[streamName] = expectedRecordsNumberInEachStream + } + return resultMap + } + + /** + * Configures streams for all registered data type tests. + * + * @return configured catalog + */ + protected fun getConfiguredCatalog( + nameSpace: String?, + numberOfStreams: Int, + numberOfColumns: Int + ): ConfiguredAirbyteCatalog { + val streams: MutableList = ArrayList() + + for (currentStream in 0 until numberOfStreams) { + // CREATE TABLE test.test_1_int(id INTEGER PRIMARY KEY, test_column int) + + val fields: MutableList = ArrayList() + + fields.add(Field.of(this.idColumnName, JsonSchemaType.NUMBER)) + for (currentColumnNumber in 0 until numberOfColumns) { + fields.add(Field.of(testColumnName + currentColumnNumber, JsonSchemaType.STRING)) + } + + val airbyteStream = + CatalogHelpers.createAirbyteStream( + String.format(testStreamNameTemplate, currentStream), + nameSpace, + fields + ) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey( + java.util.List.of>( + java.util.List.of(this.idColumnName) + ) + ) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL) + ) + + val configuredAirbyteStream = + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(Lists.newArrayList(this.idColumnName)) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(airbyteStream) + + streams.add(configuredAirbyteStream) + } + + return ConfiguredAirbyteCatalog().withStreams(streams) + } + + companion object { + protected val c: Logger = LoggerFactory.getLogger(AbstractSourcePerformanceTest::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/build.gradle b/airbyte-cdk/java/airbyte-cdk/dependencies/build.gradle index 786d31082c794..62895180e54fe 100644 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/build.gradle @@ -1,10 +1,6 @@ -import org.openapitools.generator.gradle.plugin.tasks.GenerateTask - plugins { id "com.github.eirnym.js2p" version "1.0" - id "de.undercouch.download" version "5.4.0" id "java-library" - id "org.openapi.generator" version "6.2.1" } java { @@ -19,128 +15,13 @@ java { options.compilerArgs += "-Xlint:-try" } } +compileKotlin.compilerOptions.allWarningsAsErrors = false +compileTestFixturesKotlin.compilerOptions.allWarningsAsErrors = false +compileTestKotlin.compilerOptions.allWarningsAsErrors = false -String specFile = "$projectDir/src/main/openapi/config.yaml" -String serverOutputDir = "$buildDir/generated/api/server" -String clientOutputDir = "$buildDir/generated/api/client" -String docsOutputDir = "$buildDir/generated/api/docs" -Map schemaMappingsValue = [ - 'OAuthConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', - 'SourceDefinitionSpecification' : 'com.fasterxml.jackson.databind.JsonNode', - 'SourceConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', - 'DestinationDefinitionSpecification': 'com.fasterxml.jackson.databind.JsonNode', - 'DestinationConfiguration' : 'com.fasterxml.jackson.databind.JsonNode', - 'StreamJsonSchema' : 'com.fasterxml.jackson.databind.JsonNode', - 'StateBlob' : 'com.fasterxml.jackson.databind.JsonNode', - 'FieldSchema' : 'com.fasterxml.jackson.databind.JsonNode', -] def generate = tasks.register('generate') -def generateApiServer = tasks.register('generateApiServer', GenerateTask) { - - inputs.file specFile - outputs.dir serverOutputDir - - generatorName = "jaxrs-spec" - inputSpec = specFile - outputDir = serverOutputDir - - apiPackage = "io.airbyte.api.generated" - invokerPackage = "io.airbyte.api.invoker.generated" - modelPackage = "io.airbyte.api.model.generated" - - schemaMappings.set(schemaMappingsValue) - - generateApiDocumentation = false - - configOptions.set([ - dateLibrary : "java8", - generatePom : "false", - interfaceOnly: "true", - /* - JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. - It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. - The below Jackson annotation is made to only keep non null values in serialized json. - We are not yet using nullable=true properties in our OpenApi so this is a valid workaround at the moment to circumvent the default JAX-RS behavior described above. - Feel free to read the conversation on https://github.com/airbytehq/airbyte/pull/13370 for more details. - */ - additionalModelTypeAnnotations: "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", - ]) -} -generate.configure { - dependsOn generateApiServer -} - -def generateApiClient = tasks.register('generateApiClient', GenerateTask) { - - inputs.file specFile - outputs.dir clientOutputDir - - generatorName = "java" - inputSpec = specFile - outputDir = clientOutputDir - - apiPackage = "io.airbyte.api.client.generated" - invokerPackage = "io.airbyte.api.client.invoker.generated" - modelPackage = "io.airbyte.api.client.model.generated" - - schemaMappings.set(schemaMappingsValue) - - library = "native" - - generateApiDocumentation = false - - configOptions.set([ - dateLibrary : "java8", - generatePom : "false", - interfaceOnly: "true" - ]) -} -generate.configure { - dependsOn generateApiClient -} - -def generateApiDocs = tasks.register('generateApiDocs', GenerateTask) { - - generatorName = "html" - inputSpec = specFile - outputDir = docsOutputDir - - apiPackage = "io.airbyte.api.client.generated" - invokerPackage = "io.airbyte.api.client.invoker.generated" - modelPackage = "io.airbyte.api.client.model.generated" - - schemaMappings.set(schemaMappingsValue) - - generateApiDocumentation = false - - configOptions.set([ - dateLibrary : "java8", - generatePom : "false", - interfaceOnly: "true" - ]) -} -def deleteExistingDocs = tasks.register('deleteOldApiDocs', Delete) { - delete rootProject.file("docs/reference/api/generated-api-html") -} -deleteExistingDocs.configure { - dependsOn generateApiDocs -} -def copyApiDocs = tasks.register('copyApiDocs', Copy) { - from(docsOutputDir) { - include "**/*.html" - } - into rootProject.file("docs/reference/api/generated-api-html") - includeEmptyDirs = false -} -copyApiDocs.configure { - dependsOn deleteExistingDocs -} -generate.configure { - dependsOn copyApiDocs -} - dependencies { api platform('com.fasterxml.jackson:jackson-bom:2.15.2') api 'com.fasterxml.jackson.core:jackson-annotations' @@ -149,7 +30,7 @@ dependencies { api 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310' api 'com.google.guava:guava:33.0.0-jre' api 'commons-io:commons-io:2.15.1' - api ('io.airbyte.airbyte-protocol:protocol-models:0.5.0') { exclude group: 'com.google.api-client', module: 'google-api-client' } + api ('io.airbyte.airbyte-protocol:protocol-models:0.7.0') { exclude group: 'com.google.api-client', module: 'google-api-client' } api 'javax.annotation:javax.annotation-api:1.3.2' api 'org.apache.commons:commons-compress:1.25.0' api 'org.apache.commons:commons-lang3:3.14.0' @@ -159,6 +40,8 @@ dependencies { api 'org.apache.logging.log4j:log4j-slf4j2-impl:2.21.1' api 'org.slf4j:log4j-over-slf4j:2.0.11' api 'org.slf4j:slf4j-api:2.0.11' + api 'io.github.oshai:kotlin-logging-jvm:5.1.0' + implementation 'com.jayway.jsonpath:json-path:2.7.0' implementation 'com.networknt:json-schema-validator:1.0.72' @@ -169,10 +52,16 @@ dependencies { implementation 'me.andrz.jackson:jackson-json-reference-core:0.3.2' // needed so that we can follow $ref when parsing json implementation 'org.openapitools:jackson-databind-nullable:0.2.1' + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core')) + + testFixturesApi ('io.airbyte:airbyte-api:0.55.2') { transitive = false } + testFixturesApi 'org.jetbrains.kotlin:kotlin-test' + testFixturesImplementation 'io.swagger:swagger-annotations:1.6.2' testFixturesImplementation 'org.apache.ant:ant:1.10.11' testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.1' + testImplementation 'org.mockito.kotlin:mockito-kotlin:5.2.1' } @@ -196,23 +85,14 @@ generate.configure { dependsOn tasks.named('generateJsonSchema2Pojo') } - -sourceSets { - main { - java { - srcDirs([ - "$projectDir/src/main/java", - "${serverOutputDir}/src/gen/java", - "${clientOutputDir}/src/main/java", - ]) - } - resources { - srcDir "$projectDir/src/main/openapi/" - } - } +tasks.named('compileJava').configure { + dependsOn generate +} +tasks.named('compileTestJava').configure { + dependsOn generate } -tasks.named('compileJava').configure { +tasks.named('compileKotlin').configure { dependsOn generate } tasks.named('compileTestJava').configure { diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/AirbyteApiClient.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/AirbyteApiClient.java deleted file mode 100644 index 0dfb1d94403c3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/AirbyteApiClient.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.api.client; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.api.client.generated.AttemptApi; -import io.airbyte.api.client.generated.ConnectionApi; -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.DestinationDefinitionApi; -import io.airbyte.api.client.generated.DestinationDefinitionSpecificationApi; -import io.airbyte.api.client.generated.HealthApi; -import io.airbyte.api.client.generated.JobsApi; -import io.airbyte.api.client.generated.OperationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.api.client.generated.SourceDefinitionApi; -import io.airbyte.api.client.generated.SourceDefinitionSpecificationApi; -import io.airbyte.api.client.generated.StateApi; -import io.airbyte.api.client.generated.WorkspaceApi; -import io.airbyte.api.client.invoker.generated.ApiClient; -import java.util.Random; -import java.util.concurrent.Callable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is meant to consolidate all our API endpoints into a fluent-ish client. Currently, all - * open API generators create a separate class per API "root-route". For example, if our API has two - * routes "/v1/First/get" and "/v1/Second/get", OpenAPI generates (essentially) the following files: - *

    - * ApiClient.java, FirstApi.java, SecondApi.java - *

    - * To call the API type-safely, we'd do new FirstApi(new ApiClient()).get() or new SecondApi(new - * ApiClient()).get(), which can get cumbersome if we're interacting with many pieces of the API. - *

    - * This is currently manually maintained. We could look into autogenerating it if needed. - */ -public class AirbyteApiClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteApiClient.class); - private static final Random RANDOM = new Random(); - - public static final int DEFAULT_MAX_RETRIES = 4; - public static final int DEFAULT_RETRY_INTERVAL_SECS = 10; - public static final int DEFAULT_FINAL_INTERVAL_SECS = 10 * 60; - - private final ConnectionApi connectionApi; - private final DestinationDefinitionApi destinationDefinitionApi; - private final DestinationApi destinationApi; - private final DestinationDefinitionSpecificationApi destinationSpecificationApi; - private final JobsApi jobsApi; - private final PatchedLogsApi logsApi; - private final OperationApi operationApi; - private final SourceDefinitionApi sourceDefinitionApi; - private final SourceApi sourceApi; - private final SourceDefinitionSpecificationApi sourceDefinitionSpecificationApi; - private final WorkspaceApi workspaceApi; - private final HealthApi healthApi; - private final AttemptApi attemptApi; - private final StateApi stateApi; - - public AirbyteApiClient(final ApiClient apiClient) { - connectionApi = new ConnectionApi(apiClient); - destinationDefinitionApi = new DestinationDefinitionApi(apiClient); - destinationApi = new DestinationApi(apiClient); - destinationSpecificationApi = new DestinationDefinitionSpecificationApi(apiClient); - jobsApi = new JobsApi(apiClient); - logsApi = new PatchedLogsApi(apiClient); - operationApi = new OperationApi(apiClient); - sourceDefinitionApi = new SourceDefinitionApi(apiClient); - sourceApi = new SourceApi(apiClient); - sourceDefinitionSpecificationApi = new SourceDefinitionSpecificationApi(apiClient); - workspaceApi = new WorkspaceApi(apiClient); - healthApi = new HealthApi(apiClient); - attemptApi = new AttemptApi(apiClient); - stateApi = new StateApi(apiClient); - } - - public ConnectionApi getConnectionApi() { - return connectionApi; - } - - public DestinationDefinitionApi getDestinationDefinitionApi() { - return destinationDefinitionApi; - } - - public DestinationApi getDestinationApi() { - return destinationApi; - } - - public DestinationDefinitionSpecificationApi getDestinationDefinitionSpecificationApi() { - return destinationSpecificationApi; - } - - public JobsApi getJobsApi() { - return jobsApi; - } - - public SourceDefinitionApi getSourceDefinitionApi() { - return sourceDefinitionApi; - } - - public SourceApi getSourceApi() { - return sourceApi; - } - - public SourceDefinitionSpecificationApi getSourceDefinitionSpecificationApi() { - return sourceDefinitionSpecificationApi; - } - - public WorkspaceApi getWorkspaceApi() { - return workspaceApi; - } - - public PatchedLogsApi getLogsApi() { - return logsApi; - } - - public OperationApi getOperationApi() { - return operationApi; - } - - public HealthApi getHealthApi() { - return healthApi; - } - - public AttemptApi getAttemptApi() { - return attemptApi; - } - - public StateApi getStateApi() { - return stateApi; - } - - /** - * Default to 4 retries with a randomised 1 - 10 seconds interval between the first two retries and - * an 10-minute wait for the last retry. - */ - public static T retryWithJitter(final Callable call, final String desc) { - return retryWithJitter(call, desc, DEFAULT_RETRY_INTERVAL_SECS, DEFAULT_FINAL_INTERVAL_SECS, DEFAULT_MAX_RETRIES); - } - - /** - * Provides a simple retry wrapper for api calls. This retry behaviour is slightly different from - * generally available retries libraries - the last retry is able to wait an interval inconsistent - * with regular intervals/exponential backoff. - *

    - * Since the primary retries use case is long-running workflows, the benefit of waiting a couple of - * minutes as a last ditch effort to outlast networking disruption outweighs the cost of slightly - * longer jobs. - * - * @param call method to execute - * @param desc short readable explanation of why this method is executed - * @param jitterMaxIntervalSecs upper limit of the randomised retry interval. Minimum value is 1. - * @param finalIntervalSecs retry interval before the last retry. - */ - @VisibleForTesting - // This is okay since we are logging the stack trace, which PMD is not detecting. - @SuppressWarnings("PMD.PreserveStackTrace") - public static T retryWithJitter(final Callable call, - final String desc, - final int jitterMaxIntervalSecs, - final int finalIntervalSecs, - final int maxTries) { - int currRetries = 0; - boolean keepTrying = true; - - T data = null; - while (keepTrying && currRetries < maxTries) { - try { - LOGGER.info("Attempt {} to {}", currRetries, desc); - data = call.call(); - - keepTrying = false; - } catch (final Exception e) { - LOGGER.info("Attempt {} to {} error: {}", currRetries, desc, e); - currRetries++; - - // Sleep anywhere from 1 to jitterMaxIntervalSecs seconds. - final var backoffTimeSecs = Math.max(RANDOM.nextInt(jitterMaxIntervalSecs + 1), 1); - var backoffTimeMs = backoffTimeSecs * 1000; - - if (currRetries == maxTries - 1) { - // sleep for finalIntervalMins on the last attempt. - backoffTimeMs = finalIntervalSecs * 1000; - } - - try { - Thread.sleep(backoffTimeMs); - } catch (final InterruptedException ex) { - throw new RuntimeException(ex); - } - } - } - return data; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/PatchedLogsApi.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/PatchedLogsApi.java deleted file mode 100644 index dde17aeeb4c40..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/api/client/PatchedLogsApi.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.api.client; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.invoker.generated.ApiResponse; -import io.airbyte.api.client.model.generated.LogsRequestBody; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.time.Duration; -import java.util.function.Consumer; -import org.apache.commons.io.FileUtils; - -/** - * This class is a copy of {@link io.airbyte.api.client.generated.LogsApi} except it allows Accept: - * text/plain. Without this modification, {@link io.airbyte.api.client.generated.LogsApi} returns a - * 406 because the generated code requests the wrong response type. - */ -public class PatchedLogsApi { - - private final HttpClient memberVarHttpClient; - private final ObjectMapper memberVarObjectMapper; - private final String memberVarBaseUri; - private final Consumer memberVarInterceptor; - private final Duration memberVarReadTimeout; - private final Consumer> memberVarResponseInterceptor; - - public PatchedLogsApi() { - this(new ApiClient()); - } - - public PatchedLogsApi(final ApiClient apiClient) { - memberVarHttpClient = apiClient.getHttpClient(); - memberVarObjectMapper = apiClient.getObjectMapper(); - memberVarBaseUri = apiClient.getBaseUri(); - memberVarInterceptor = apiClient.getRequestInterceptor(); - memberVarReadTimeout = apiClient.getReadTimeout(); - memberVarResponseInterceptor = apiClient.getResponseInterceptor(); - } - - /** - * Get logs - * - * @param logsRequestBody (required) - * @return File - * @throws ApiException if fails to make API call - */ - public File getLogs(final LogsRequestBody logsRequestBody) throws ApiException { - final ApiResponse localVarResponse = getLogsWithHttpInfo(logsRequestBody); - return localVarResponse.getData(); - } - - /** - * Get logs - * - * @param logsRequestBody (required) - * @return ApiResponse<File> - * @throws ApiException if fails to make API call - */ - public ApiResponse getLogsWithHttpInfo(final LogsRequestBody logsRequestBody) throws ApiException { - final HttpRequest.Builder localVarRequestBuilder = getLogsRequestBuilder(logsRequestBody); - try { - final HttpResponse localVarResponse = memberVarHttpClient.send( - localVarRequestBuilder.build(), - HttpResponse.BodyHandlers.ofInputStream()); - if (memberVarResponseInterceptor != null) { - memberVarResponseInterceptor.accept(localVarResponse); - } - if (isErrorResponse(localVarResponse)) { - throw new ApiException(localVarResponse.statusCode(), - "getLogs call received non-success response", - localVarResponse.headers(), - localVarResponse.body() == null ? null : new String(localVarResponse.body().readAllBytes())); - } - - final File tmpFile = File.createTempFile("patched-logs-api", "response"); // CHANGED - tmpFile.deleteOnExit(); // CHANGED - - FileUtils.copyInputStreamToFile(localVarResponse.body(), tmpFile); // CHANGED - - return new ApiResponse( - localVarResponse.statusCode(), - localVarResponse.headers().map(), - tmpFile // CHANGED - ); - } catch (final IOException e) { - throw new ApiException(e); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new ApiException(e); - } - } - - private Boolean isErrorResponse(final HttpResponse httpResponse) { - return httpResponse.statusCode() / 100 != 2; - } - - private HttpRequest.Builder getLogsRequestBuilder(final LogsRequestBody logsRequestBody) throws ApiException { - // verify the required parameter 'logsRequestBody' is set - if (logsRequestBody == null) { - throw new ApiException(400, "Missing the required parameter 'logsRequestBody' when calling getLogs"); - } - - final HttpRequest.Builder localVarRequestBuilder = HttpRequest.newBuilder(); - - final String localVarPath = "/v1/logs/get"; - - localVarRequestBuilder.uri(URI.create(memberVarBaseUri + localVarPath)); - - localVarRequestBuilder.header("Content-Type", "application/json"); - - localVarRequestBuilder.header("Accept", "text/plain"); // CHANGED - - try { - final byte[] localVarPostBody = memberVarObjectMapper.writeValueAsBytes(logsRequestBody); - localVarRequestBuilder.method("POST", HttpRequest.BodyPublishers.ofByteArray(localVarPostBody)); - } catch (final IOException e) { - throw new ApiException(e); - } - if (memberVarReadTimeout != null) { - localVarRequestBuilder.timeout(memberVarReadTimeout); - } - if (memberVarInterceptor != null) { - memberVarInterceptor.accept(localVarRequestBuilder); - } - return localVarRequestBuilder; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/cli/Clis.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/cli/Clis.java deleted file mode 100644 index 5fde35eb08970..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/cli/Clis.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.cli; - -import java.util.ArrayList; -import java.util.List; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; - -public class Clis { - - /** - * Parse an options object - * - * @param args - command line args - * @param options - expected options - * @return object with parsed values. - */ - public static CommandLine parse(final String[] args, final Options options, final CommandLineParser parser, final String commandLineSyntax) { - final HelpFormatter helpFormatter = new HelpFormatter(); - - try { - return parser.parse(options, args); - } catch (final ParseException e) { - if (commandLineSyntax != null && !commandLineSyntax.isEmpty()) { - helpFormatter.printHelp(commandLineSyntax, options); - } - throw new IllegalArgumentException(e); - } - } - - public static CommandLine parse(final String[] args, final Options options, final String commandLineSyntax) { - return parse(args, options, new DefaultParser(), commandLineSyntax); - } - - public static CommandLine parse(final String[] args, final Options options, final CommandLineParser parser) { - return parse(args, options, parser, null); - } - - public static CommandLine parse(final String[] args, final Options options) { - return parse(args, options, new DefaultParser()); - } - - public static CommandLineParser getRelaxedParser() { - return new RelaxedParser(); - } - - // https://stackoverflow.com/questions/33874902/apache-commons-cli-1-3-1-how-to-ignore-unknown-arguments - private static class RelaxedParser extends DefaultParser { - - @Override - public CommandLine parse(final Options options, final String[] arguments) throws ParseException { - final List knownArgs = new ArrayList<>(); - for (int i = 0; i < arguments.length; i++) { - if (options.hasOption(arguments[i])) { - knownArgs.add(arguments[i]); - if (i + 1 < arguments.length && options.getOption(arguments[i]).hasArg()) { - knownArgs.add(arguments[i + 1]); - } - } - } - return super.parse(options, knownArgs.toArray(new String[0])); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/CompletableFutures.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/CompletableFutures.java deleted file mode 100644 index d7330332ca57d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/CompletableFutures.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.concurrency; - -import io.airbyte.commons.functional.Either; -import java.lang.reflect.Array; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.atomic.AtomicInteger; - -public class CompletableFutures { - - /** - * Non-blocking implementation which does not use join. and returns an aggregated future. The order - * of results is preserved from the original list of futures. - * - * @param futures list of futures - * @param type of result - * @return a future that completes when all the input futures have completed - */ - public static CompletionStage>> allOf(final List> futures) { - CompletableFuture>> result = new CompletableFuture<>(); - final int size = futures.size(); - final AtomicInteger counter = new AtomicInteger(); - @SuppressWarnings("unchecked") - final Either[] results = (Either[]) Array.newInstance(Either.class, size); - // attach a whenComplete to all futures - for (int i = 0; i < size; i++) { - final int currentIndex = i; - futures.get(i).whenComplete((value, exception) -> { - // if exception is null, then the future completed successfully - // maybe synchronization is unnecessary here, but it's better to be safe - synchronized (results) { - if (exception == null) { - results[currentIndex] = Either.right(value); - } else { - if (exception instanceof Exception) { - results[currentIndex] = Either.left((Exception) exception); - } else { - // this should never happen - throw new RuntimeException("Unexpected exception in a future completion.", exception); - } - } - } - int completedCount = counter.incrementAndGet(); - if (completedCount == size) { - result.complete(Arrays.asList(results)); - } - }); - } - return result; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/VoidCallable.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/VoidCallable.java deleted file mode 100644 index c0ad3cc94053f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/VoidCallable.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.concurrency; - -import java.util.concurrent.Callable; - -@FunctionalInterface -public interface VoidCallable extends Callable { - - VoidCallable NOOP = () -> {}; - - default @Override Void call() throws Exception { - voidCall(); - return null; - } - - void voidCall() throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/WaitingUtils.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/WaitingUtils.java deleted file mode 100644 index a4e85f54d332b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/concurrency/WaitingUtils.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.concurrency; - -import static java.lang.Thread.sleep; - -import java.time.Duration; -import java.util.function.Supplier; - -public class WaitingUtils { - - /** - * Wait for a condition or timeout. - * - * @param interval - frequency with which condition and timeout should be checked. - * @param timeout - how long to wait in total - * @param condition - supplier that returns whether the condition has been met. - * @return true if condition was met before the timeout was reached, otherwise false. - */ - @SuppressWarnings("BusyWait") - public static boolean waitForCondition(final Duration interval, final Duration timeout, final Supplier condition) { - Duration timeWaited = Duration.ZERO; - while (true) { - if (condition.get()) { - return true; - } - - if (timeout.minus(timeWaited).isNegative()) { - return false; - } - - try { - sleep(interval.toMillis()); - } catch (final InterruptedException e) { - throw new RuntimeException(e); - } - - timeWaited = timeWaited.plus(interval); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/constants/AirbyteSecretConstants.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/constants/AirbyteSecretConstants.java deleted file mode 100644 index 9788f565fd95c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/constants/AirbyteSecretConstants.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.constants; - -/** - * Collection of constants related to Airbyte secrets defined in connector configurations. - */ -public final class AirbyteSecretConstants { - - /** - * The name of a configuration property field that has been identified as a secret. - */ - public static final String AIRBYTE_SECRET_FIELD = "airbyte_secret"; - - /** - * Mask value that is displayed in place of a value associated with an airbyte secret. - */ - public static final String SECRETS_MASK = "**********"; - - private AirbyteSecretConstants() { - // Private constructor to prevent instantiation - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/enums/Enums.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/enums/Enums.java deleted file mode 100644 index 5033339deed4a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/enums/Enums.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.enums; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - -public class Enums { - - public static , T2 extends Enum> T2 convertTo(final T1 ie, final Class oe) { - if (ie == null) { - return null; - } - - return Enum.valueOf(oe, ie.name()); - } - - public static , T2 extends Enum> List convertListTo(final List ies, final Class oe) { - return ies - .stream() - .map(ie -> convertTo(ie, oe)) - .collect(Collectors.toList()); - } - - public static , T2 extends Enum> boolean isCompatible(final Class c1, - final Class c2) { - Preconditions.checkArgument(c1.isEnum()); - Preconditions.checkArgument(c2.isEnum()); - return c1.getEnumConstants().length == c2.getEnumConstants().length - && Sets.difference( - Arrays.stream(c1.getEnumConstants()).map(Enum::name).collect(Collectors.toSet()), - Arrays.stream(c2.getEnumConstants()).map(Enum::name).collect(Collectors.toSet())) - .isEmpty(); - } - - private static final Map, Map> NORMALIZED_ENUMS = Maps.newConcurrentMap(); - - @SuppressWarnings("unchecked") - public static > Optional toEnum(final String value, final Class enumClass) { - Preconditions.checkArgument(enumClass.isEnum()); - - if (!NORMALIZED_ENUMS.containsKey(enumClass)) { - final T[] values = enumClass.getEnumConstants(); - final Map mappings = Maps.newHashMapWithExpectedSize(values.length); - for (final T t : values) { - mappings.put(normalizeName(t.name()), t); - } - NORMALIZED_ENUMS.put(enumClass, mappings); - } - - return Optional.ofNullable((T) NORMALIZED_ENUMS.get(enumClass).get(normalizeName(value))); - } - - private static String normalizeName(final String name) { - return name.toLowerCase().replaceAll("[^a-zA-Z0-9]", ""); - } - - public static > Set valuesAsStrings(final Class e) { - Preconditions.checkArgument(e.isEnum()); - return Arrays.stream(e.getEnumConstants()).map(Enum::name).collect(Collectors.toSet()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConfigErrorException.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConfigErrorException.java deleted file mode 100644 index 144a5d12b554f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConfigErrorException.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.exceptions; - -/** - * An exception that indicates that there is something wrong with the user's connector setup. This - * exception is caught and emits an AirbyteTraceMessage. - */ -public class ConfigErrorException extends RuntimeException { - - private final String displayMessage; - - public ConfigErrorException(final String displayMessage) { - super(displayMessage); - this.displayMessage = displayMessage; - } - - public ConfigErrorException(final String displayMessage, final Throwable exception) { - super(displayMessage, exception); - this.displayMessage = displayMessage; - } - - public String getDisplayMessage() { - return displayMessage; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConnectionErrorException.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConnectionErrorException.java deleted file mode 100644 index 530eba0b5f1f3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/ConnectionErrorException.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.exceptions; - -public class ConnectionErrorException extends RuntimeException { - - private String stateCode; - private int errorCode; - private String exceptionMessage; - - public ConnectionErrorException(final String exceptionMessage) { - super(exceptionMessage); - } - - public ConnectionErrorException(final String stateCode, final Throwable exception) { - super(exception); - this.stateCode = stateCode; - this.exceptionMessage = exception.getMessage(); - } - - public ConnectionErrorException(final String stateCode, - final String exceptionMessage, - final Throwable exception) { - super(exception); - this.stateCode = stateCode; - this.exceptionMessage = exceptionMessage; - } - - public ConnectionErrorException(final String stateCode, - final int errorCode, - final String exceptionMessage, - final Throwable exception) { - super(exception); - this.stateCode = stateCode; - this.errorCode = errorCode; - this.exceptionMessage = exceptionMessage; - } - - public String getStateCode() { - return this.stateCode; - } - - public int getErrorCode() { - return errorCode; - } - - public String getExceptionMessage() { - return exceptionMessage; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/SQLRuntimeException.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/SQLRuntimeException.java deleted file mode 100644 index 02d8f8311c2ac..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/exceptions/SQLRuntimeException.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.exceptions; - -import java.sql.SQLException; - -/** - * Wrapper unchecked exception for {@link SQLException}. This can be used in functional interfaces - * that do not allow checked exceptions without the generic RuntimeException. - */ -public class SQLRuntimeException extends RuntimeException { - - public SQLRuntimeException(final SQLException cause) { - super(cause); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java deleted file mode 100644 index e2e86d1c26881..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.features; - -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class EnvVariableFeatureFlags implements FeatureFlags { - - private static final Logger log = LoggerFactory.getLogger(EnvVariableFeatureFlags.class); - - public static final String AUTO_DETECT_SCHEMA = "AUTO_DETECT_SCHEMA"; - // Set this value to true to see all messages from the source to destination, set to one second - // emission - public static final String LOG_CONNECTOR_MESSAGES = "LOG_CONNECTOR_MESSAGES"; - public static final String APPLY_FIELD_SELECTION = "APPLY_FIELD_SELECTION"; - public static final String FIELD_SELECTION_WORKSPACES = "FIELD_SELECTION_WORKSPACES"; - public static final String CONCURRENT_SOURCE_STREAM_READ = "CONCURRENT_SOURCE_STREAM_READ"; - public static final String STRICT_COMPARISON_NORMALIZATION_WORKSPACES = "STRICT_COMPARISON_NORMALIZATION_WORKSPACES"; - public static final String STRICT_COMPARISON_NORMALIZATION_TAG = "STRICT_COMPARISON_NORMALIZATION_TAG"; - public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; - - @Override - public boolean autoDetectSchema() { - return getEnvOrDefault(AUTO_DETECT_SCHEMA, true, Boolean::parseBoolean); - } - - @Override - public boolean logConnectorMessages() { - return getEnvOrDefault(LOG_CONNECTOR_MESSAGES, false, Boolean::parseBoolean); - } - - @Override - public boolean concurrentSourceStreamRead() { - return getEnvOrDefault(CONCURRENT_SOURCE_STREAM_READ, false, Boolean::parseBoolean); - } - - @Override - public boolean applyFieldSelection() { - return getEnvOrDefault(APPLY_FIELD_SELECTION, false, Boolean::parseBoolean); - } - - @Override - public String fieldSelectionWorkspaces() { - return getEnvOrDefault(FIELD_SELECTION_WORKSPACES, "", (arg) -> arg); - } - - @Override - public String strictComparisonNormalizationWorkspaces() { - return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_WORKSPACES, "", (arg) -> arg); - } - - @Override - public String strictComparisonNormalizationTag() { - return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_TAG, "strict_comparison2", (arg) -> arg); - } - - @Override - public String deploymentMode() { - return getEnvOrDefault(DEPLOYMENT_MODE, "", (arg) -> arg); - } - - // TODO: refactor in order to use the same method than the ones in EnvConfigs.java - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { - final String value = System.getenv(key); - if (value != null && !value.isEmpty()) { - return parser.apply(value); - } else { - log.debug("Using default value for environment variable {}: '{}'", key, defaultValue); - return defaultValue; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagHelper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagHelper.java deleted file mode 100644 index 8b47c576f61b0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagHelper.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.features; - -import com.google.common.annotations.VisibleForTesting; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class FeatureFlagHelper { - - public static boolean isFieldSelectionEnabledForWorkspace(final FeatureFlags featureFlags, final UUID workspaceId) { - return isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, workspaceId, "field selection") - || featureFlags.applyFieldSelection(); - } - - @VisibleForTesting - static boolean isWorkspaceIncludedInFlag(final FeatureFlags featureFlags, - final Function flagRetriever, - final UUID workspaceId, - final String context) { - final String workspaceIdsString = flagRetriever.apply(featureFlags); - final Set workspaceIds = new HashSet<>(); - if (workspaceIdsString != null && !workspaceIdsString.isEmpty()) { - for (final String id : workspaceIdsString.split(",")) { - try { - workspaceIds.add(UUID.fromString(id)); - } catch (final IllegalArgumentException e) { - log.warn("Malformed workspace id for {}: {}", context, id); - } - } - } - return workspaceId != null && workspaceIds.contains(workspaceId); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlags.java deleted file mode 100644 index fa55fbd9484c3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.features; - -/** - * Interface that describe which features are activated in airbyte. Currently, the only - * implementation relies on env. Ideally it should be on some DB. - */ -public interface FeatureFlags { - - boolean autoDetectSchema(); - - boolean logConnectorMessages(); - - boolean concurrentSourceStreamRead(); - - /** - * Return true if field selection should be applied. See also fieldSelectionWorkspaces. - * - * @return whether field selection should be applied - */ - boolean applyFieldSelection(); - - /** - * Get the workspaces allow-listed for field selection. This should take precedence over - * applyFieldSelection. - * - * @return a comma-separated list of workspace ids where field selection should be enabled. - */ - String fieldSelectionWorkspaces(); - - /** - * Get the workspaces allow-listed for strict incremental comparison in normalization. This takes - * precedence over the normalization version in destination_definitions.yaml. - * - * @return a comma-separated list of workspace ids where strict incremental comparison should be - * enabled in normalization. - */ - String strictComparisonNormalizationWorkspaces(); - - /** - * Get the Docker image tag representing the normalization version with strict-comparison. - * - * @return The Docker image tag representing the normalization version with strict-comparison - */ - String strictComparisonNormalizationTag(); - - /** - * Get the deployment mode used to deploy a connector. - * - * @return empty string for the default deployment mode, "CLOUD" for cloud deployment mode. - */ - String deploymentMode(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java deleted file mode 100644 index 17cdfa91dcbfb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.features; - -public class FeatureFlagsWrapper implements FeatureFlags { - - /** - * Overrides the {@link FeatureFlags#deploymentMode} method in the feature flags. - */ - static public FeatureFlags overridingDeploymentMode( - final FeatureFlags wrapped, - final String deploymentMode) { - return new FeatureFlagsWrapper(wrapped) { - - @Override - public String deploymentMode() { - return deploymentMode; - } - - }; - } - - private final FeatureFlags wrapped; - - public FeatureFlagsWrapper(FeatureFlags wrapped) { - this.wrapped = wrapped; - } - - @Override - public boolean autoDetectSchema() { - return wrapped.autoDetectSchema(); - } - - @Override - public boolean logConnectorMessages() { - return wrapped.logConnectorMessages(); - } - - @Override - public boolean concurrentSourceStreamRead() { - return wrapped.concurrentSourceStreamRead(); - } - - @Override - public boolean applyFieldSelection() { - return wrapped.applyFieldSelection(); - } - - @Override - public String fieldSelectionWorkspaces() { - return wrapped.fieldSelectionWorkspaces(); - } - - @Override - public String strictComparisonNormalizationWorkspaces() { - return wrapped.strictComparisonNormalizationWorkspaces(); - } - - @Override - public String strictComparisonNormalizationTag() { - return wrapped.strictComparisonNormalizationTag(); - } - - @Override - public String deploymentMode() { - return wrapped.deploymentMode(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiConsumer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiConsumer.java deleted file mode 100644 index d2c1d08aaf29b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiConsumer.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -@FunctionalInterface -public interface CheckedBiConsumer { - - void accept(T t, R r) throws E; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiFunction.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiFunction.java deleted file mode 100644 index b23857d85eea0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedBiFunction.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -public interface CheckedBiFunction { - - Result apply(First first, Second second) throws E; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedConsumer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedConsumer.java deleted file mode 100644 index 5e3fe89652eff..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedConsumer.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -@FunctionalInterface -public interface CheckedConsumer { - - void accept(T t) throws E; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedFunction.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedFunction.java deleted file mode 100644 index cb3d85917eefc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedFunction.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -@FunctionalInterface -public interface CheckedFunction { - - R apply(T t) throws E; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedSupplier.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedSupplier.java deleted file mode 100644 index 57be08def307d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/CheckedSupplier.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -@FunctionalInterface -public interface CheckedSupplier { - - T get() throws E; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/Either.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/Either.java deleted file mode 100644 index 187b109e42f2c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/functional/Either.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.functional; - -import java.util.Objects; - -/** - * A class that represents a value of one of two possible types (a disjoint union). An instance of - * Either is an instance of Left or Right. - * - * A common use of Either is for error handling in functional programming. By convention, Left is - * failure and Right is success. - * - * @param the type of the left value - * @param the type of the right value - */ -public class Either { - - private final Error left; - private final Result right; - - private Either(Error left, Result right) { - this.left = left; - this.right = right; - } - - public boolean isLeft() { - return left != null; - } - - public boolean isRight() { - return right != null; - } - - public Error getLeft() { - return left; - } - - public Result getRight() { - return right; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Either either = (Either) o; - return Objects.equals(left, either.left) && Objects.equals(right, either.right); - } - - @Override - public int hashCode() { - return Objects.hash(left, right); - } - - public static Either left(Error error) { - return new Either<>(error, null); - } - - public static Either right(Result result) { - return new Either<>(null, result); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/IOs.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/IOs.java deleted file mode 100644 index 5c929a8c9d126..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/IOs.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.io; - -import com.google.common.base.Charsets; -import java.io.BufferedReader; -import java.io.Closeable; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import org.apache.commons.io.input.ReversedLinesFileReader; - -public class IOs { - - public static Path writeFile(final Path path, final String fileName, final String contents) { - final Path filePath = path.resolve(fileName); - return writeFile(filePath, contents); - } - - public static Path writeFile(final Path filePath, final byte[] contents) { - try { - Files.write(filePath, contents); - return filePath; - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static Path writeFile(final Path filePath, final String contents) { - try { - Files.writeString(filePath, contents, StandardCharsets.UTF_8); - return filePath; - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - /** - * Writes a file to a random directory in the /tmp folder. Useful as a staging group for test - * resources. - */ - public static String writeFileToRandomTmpDir(final String filename, final String contents) { - final Path source = Paths.get("/tmp", UUID.randomUUID().toString()); - try { - final Path tmpFile = source.resolve(filename); - Files.deleteIfExists(tmpFile); - Files.createDirectory(source); - writeFile(tmpFile, contents); - return tmpFile.toString(); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static String readFile(final Path path, final String fileName) { - return readFile(path.resolve(fileName)); - } - - public static String readFile(final Path fullpath) { - try { - return Files.readString(fullpath, StandardCharsets.UTF_8); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static List getTail(final int numLines, final Path path) throws IOException { - if (path == null) { - return Collections.emptyList(); - } - - final File file = path.toFile(); - if (!file.exists()) { - return Collections.emptyList(); - } - - try (final ReversedLinesFileReader fileReader = new ReversedLinesFileReader(file, Charsets.UTF_8)) { - final List lines = new ArrayList<>(); - - String line = fileReader.readLine(); - while (line != null && lines.size() < numLines) { - lines.add(line); - line = fileReader.readLine(); - } - - Collections.reverse(lines); - - return lines; - } - } - - public static InputStream inputStream(final Path path) { - try { - return Files.newInputStream(path); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static void silentClose(final Closeable closeable) { - try { - closeable.close(); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static BufferedReader newBufferedReader(final InputStream inputStream) { - return new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/LineGobbler.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/LineGobbler.java deleted file mode 100644 index 6fa279805b604..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/io/LineGobbler.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.io; - -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.logging.MdcScope; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -public class LineGobbler implements VoidCallable { - - private final static Logger LOGGER = LoggerFactory.getLogger(LineGobbler.class); - private final static String GENERIC = "generic"; - - public static void gobble(final InputStream is, final Consumer consumer) { - gobble(is, consumer, GENERIC, MdcScope.DEFAULT_BUILDER); - } - - public static void gobble(final String message, final Consumer consumer) { - final InputStream stringAsSteam = new ByteArrayInputStream(message.getBytes(StandardCharsets.UTF_8)); - gobble(stringAsSteam, consumer); - } - - public static void gobble(final String message) { - gobble(message, LOGGER::info); - } - - /** - * Used to emit a visual separator in the user-facing logs indicating a start of a meaningful - * temporal activity - * - * @param message - */ - public static void startSection(final String message) { - gobble("\r\n----- START " + message + " -----\r\n\r\n"); - } - - /** - * Used to emit a visual separator in the user-facing logs indicating a end of a meaningful temporal - * activity - * - * @param message - */ - public static void endSection(final String message) { - gobble("\r\n----- END " + message + " -----\r\n\r\n"); - } - - public static void gobble(final InputStream is, final Consumer consumer, final MdcScope.Builder mdcScopeBuilder) { - gobble(is, consumer, GENERIC, mdcScopeBuilder); - } - - public static void gobble(final InputStream is, final Consumer consumer, final String caller, final MdcScope.Builder mdcScopeBuilder) { - final ExecutorService executor = Executors.newSingleThreadExecutor(); - final Map mdc = MDC.getCopyOfContextMap(); - final var gobbler = new LineGobbler(is, consumer, executor, mdc, caller, mdcScopeBuilder); - executor.submit(gobbler); - } - - private final BufferedReader is; - private final Consumer consumer; - private final ExecutorService executor; - private final Map mdc; - private final String caller; - private final MdcScope.Builder containerLogMdcBuilder; - - LineGobbler(final InputStream is, - final Consumer consumer, - final ExecutorService executor, - final Map mdc) { - this(is, consumer, executor, mdc, GENERIC, MdcScope.DEFAULT_BUILDER); - } - - LineGobbler(final InputStream is, - final Consumer consumer, - final ExecutorService executor, - final Map mdc, - final MdcScope.Builder mdcScopeBuilder) { - this(is, consumer, executor, mdc, GENERIC, mdcScopeBuilder); - } - - LineGobbler(final InputStream is, - final Consumer consumer, - final ExecutorService executor, - final Map mdc, - final String caller, - final MdcScope.Builder mdcScopeBuilder) { - this.is = IOs.newBufferedReader(is); - this.consumer = consumer; - this.executor = executor; - this.mdc = mdc; - this.caller = caller; - this.containerLogMdcBuilder = mdcScopeBuilder; - } - - @Override - public void voidCall() { - MDC.setContextMap(mdc); - try { - String line = is.readLine(); - while (line != null) { - try (final var mdcScope = containerLogMdcBuilder.build()) { - consumer.accept(line); - } - line = is.readLine(); - } - } catch (final IOException i) { - LOGGER.warn("{} gobbler IOException: {}. Typically happens when cancelling a job.", caller, i.getMessage()); - } catch (final Exception e) { - LOGGER.error("{} gobbler error when reading stream", caller, e); - } finally { - executor.shutdown(); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/jackson/MoreMappers.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/jackson/MoreMappers.java deleted file mode 100644 index d5f1720294bb6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/jackson/MoreMappers.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.jackson; - -import com.fasterxml.jackson.core.JsonGenerator.Feature; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; - -/** - * The {@link JavaTimeModule} allows mappers to accommodate different varieties of serialised date - * time strings. - * - * All jackson mapper creation should use the following methods for instantiation. - */ -public class MoreMappers { - - public static ObjectMapper initMapper() { - final ObjectMapper result = new ObjectMapper().registerModule(new JavaTimeModule()); - result.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - result.configure(Feature.WRITE_BIGDECIMAL_AS_PLAIN, true); - return result; - } - - public static ObjectMapper initYamlMapper(final YAMLFactory factory) { - return new ObjectMapper(factory).registerModule(new JavaTimeModule()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/JsonPaths.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/JsonPaths.java deleted file mode 100644 index 7041f33254791..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/JsonPaths.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.json; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.base.Preconditions; -import com.jayway.jsonpath.Configuration; -import com.jayway.jsonpath.JsonPath; -import com.jayway.jsonpath.Option; -import com.jayway.jsonpath.PathNotFoundException; -import com.jayway.jsonpath.spi.json.JacksonJsonNodeJsonProvider; -import com.jayway.jsonpath.spi.json.JsonProvider; -import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider; -import com.jayway.jsonpath.spi.mapper.MappingProvider; -import io.airbyte.commons.json.JsonSchemas.FieldNameOrList; -import io.airbyte.commons.util.MoreIterators; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * JSONPath is specification for querying JSON objects. More information about the specification can - * be found here: https://goessner.net/articles/JsonPath/. For those familiar with jq, JSONPath will - * be most recognizable as "that DSL that jq uses". - * - * We use a java implementation of this specification (repo: https://github.com/json-path/JsonPath). - * This class wraps that implementation to make it easier to leverage this tool internally. - * - * GOTCHA: Keep in mind with JSONPath, depending on the query, 0, 1, or N values may be returned. - * The pattern for handling return values is very much like writing SQL queries. When using it, you - * must consider what the number of return values for your query might be. e.g. for this object: { - * "alpha": [1, 2, 3] }, this JSONPath "$.alpha[*]", would return: [1, 2, 3], but this one - * "$.alpha[0]" would return: [1]. The Java interface we place over this query system defaults to - * returning a list for query results. In addition, we provide helper functions that will just - * return a single value (see: {@link JsonPaths#getSingleValue(JsonNode, String)}). These should - * only be used if it is not possible for a query to return more than one value. - */ -public class JsonPaths { - - private static final Logger LOGGER = LoggerFactory.getLogger(JsonPaths.class); - - static final String JSON_PATH_START_CHARACTER = "$"; - static final String JSON_PATH_LIST_SPLAT = "[*]"; - static final String JSON_PATH_FIELD_SEPARATOR = "."; - - // set default configurations at start up to match our JSON setup. - static { - Configuration.setDefaults(new Configuration.Defaults() { - - // allows us to pass in Jackson JsonNode - private static final JsonProvider jsonProvider = new JacksonJsonNodeJsonProvider(); - private static final MappingProvider mappingProvider = new JacksonMappingProvider(); - - @Override - public JsonProvider jsonProvider() { - return jsonProvider; - } - - @Override - public MappingProvider mappingProvider() { - return mappingProvider; - } - - @Override - public Set

    - * This class is helpful in the case where fields can be any UTF-8 string, so the only simple way to - * keep track of the different parts of a path without going crazy with escape characters is to keep - * it in a list with list set aside as a special case. - *

    - * We prefer using this scheme instead of JSONPath in the tree traversal because, it is easier to - * decompose a path in this scheme than it is in JSONPath. Some callers of the traversal logic want - * to isolate parts of the path easily without the need for complex regex (that would be required if - * we used JSONPath). - */ - public static class FieldNameOrList { - - private final String fieldName; - private final boolean isList; - - public static FieldNameOrList fieldName(final String fieldName) { - return new FieldNameOrList(fieldName); - } - - public static FieldNameOrList list() { - return new FieldNameOrList(null); - } - - private FieldNameOrList(final String fieldName) { - isList = fieldName == null; - this.fieldName = fieldName; - } - - public String getFieldName() { - Preconditions.checkState(!isList, "cannot return field name, is list node"); - return fieldName; - } - - public boolean isList() { - return isList; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (!(o instanceof FieldNameOrList)) { - return false; - } - final FieldNameOrList that = (FieldNameOrList) o; - return isList == that.isList && Objects.equals(fieldName, that.fieldName); - } - - @Override - public int hashCode() { - return Objects.hash(fieldName, isList); - } - - @Override - public String toString() { - return "FieldNameOrList{" + - "fieldName='" + fieldName + '\'' + - ", isList=" + isList + - '}'; - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/Jsons.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/Jsons.java deleted file mode 100644 index e6475509b5aa8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/json/Jsons.java +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.json; - -import static java.util.Collections.singletonMap; -import static java.util.stream.Collectors.toMap; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; -import com.fasterxml.jackson.core.util.Separators; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.stream.MoreStreams; -import java.io.File; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Set; -import java.util.function.BiConsumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD.AvoidReassigningParameters", "PMD.AvoidCatchingThrowable"}) -public class Jsons { - - private static final Logger LOGGER = LoggerFactory.getLogger(Jsons.class); - - // Object Mapper is thread-safe - private static final ObjectMapper OBJECT_MAPPER = MoreMappers.initMapper(); - // sort of a hotfix; I don't know how bad the performance hit is so not turning this on by default - // at time of writing (2023-08-18) this is only used in tests, so we don't care. - private static final ObjectMapper OBJECT_MAPPER_EXACT; - static { - OBJECT_MAPPER_EXACT = MoreMappers.initMapper(); - OBJECT_MAPPER_EXACT.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); - } - - private static final ObjectMapper YAML_OBJECT_MAPPER = MoreMappers.initYamlMapper(new YAMLFactory()); - private static final ObjectWriter OBJECT_WRITER = OBJECT_MAPPER.writer(new JsonPrettyPrinter()); - - public static String serialize(final T object) { - try { - return OBJECT_MAPPER.writeValueAsString(object); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final String jsonString, final Class klass) { - try { - return OBJECT_MAPPER.readValue(jsonString, klass); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final String jsonString, final TypeReference valueTypeRef) { - try { - return OBJECT_MAPPER.readValue(jsonString, valueTypeRef); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final File file, final Class klass) { - try { - return OBJECT_MAPPER.readValue(file, klass); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final File file, final TypeReference valueTypeRef) { - try { - return OBJECT_MAPPER.readValue(file, valueTypeRef); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static T convertValue(final Object object, final Class klass) { - return OBJECT_MAPPER.convertValue(object, klass); - } - - public static JsonNode deserialize(final String jsonString) { - try { - return OBJECT_MAPPER.readTree(jsonString); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static JsonNode deserializeExact(final String jsonString) { - try { - return OBJECT_MAPPER_EXACT.readTree(jsonString); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static Optional tryDeserialize(final String jsonString, final Class klass) { - try { - return Optional.of(OBJECT_MAPPER.readValue(jsonString, klass)); - } catch (final Throwable e) { - return handleDeserThrowable(e); - } - } - - public static Optional tryDeserializeExact(final String jsonString, final Class klass) { - try { - return Optional.of(OBJECT_MAPPER_EXACT.readValue(jsonString, klass)); - } catch (final Throwable e) { - return handleDeserThrowable(e); - } - } - - public static Optional tryDeserialize(final String jsonString) { - try { - return Optional.of(OBJECT_MAPPER.readTree(jsonString)); - } catch (final Throwable e) { - return handleDeserThrowable(e); - } - } - - /** - * This method does not generate deserialization warn log on why serialization failed. See also - * {@link #tryDeserialize(String)}. - * - * @param jsonString - * @return - */ - public static Optional tryDeserializeWithoutWarn(final String jsonString) { - try { - return Optional.of(OBJECT_MAPPER.readTree(jsonString)); - } catch (final Throwable e) { - return Optional.empty(); - } - } - - public static JsonNode jsonNode(final T object) { - return OBJECT_MAPPER.valueToTree(object); - } - - public static JsonNode jsonNodeFromFile(final File file) throws IOException { - return YAML_OBJECT_MAPPER.readTree(file); - } - - public static JsonNode emptyObject() { - return jsonNode(Collections.emptyMap()); - } - - public static ArrayNode arrayNode() { - return OBJECT_MAPPER.createArrayNode(); - } - - public static T object(final JsonNode jsonNode, final Class klass) { - return OBJECT_MAPPER.convertValue(jsonNode, klass); - } - - public static T object(final JsonNode jsonNode, final TypeReference typeReference) { - return OBJECT_MAPPER.convertValue(jsonNode, typeReference); - } - - public static Optional tryObject(final JsonNode jsonNode, final Class klass) { - try { - return Optional.of(OBJECT_MAPPER.convertValue(jsonNode, klass)); - } catch (final Exception e) { - return Optional.empty(); - } - } - - public static Optional tryObject(final JsonNode jsonNode, final TypeReference typeReference) { - try { - return Optional.of(OBJECT_MAPPER.convertValue(jsonNode, typeReference)); - } catch (final Exception e) { - return Optional.empty(); - } - } - - @SuppressWarnings("unchecked") - public static T clone(final T object) { - return (T) deserialize(serialize(object), object.getClass()); - } - - public static byte[] toBytes(final JsonNode jsonNode) { - return serialize(jsonNode).getBytes(Charsets.UTF_8); - } - - /** - * Use string length as an estimation for byte size, because all ASCII characters are one byte long - * in UTF-8, and ASCII characters cover most of the use cases. To be more precise, we can convert - * the string to byte[] and use the length of the byte[]. However, this conversion is expensive in - * memory consumption. Given that the byte size of the serialized JSON is already an estimation of - * the actual size of the JSON object, using a cheap operation seems an acceptable compromise. - */ - public static int getEstimatedByteSize(final JsonNode jsonNode) { - return serialize(jsonNode).length(); - } - - public static Set keys(final JsonNode jsonNode) { - if (jsonNode.isObject()) { - return Jsons.object(jsonNode, new TypeReference>() {}).keySet(); - } else { - return new HashSet<>(); - } - } - - public static List children(final JsonNode jsonNode) { - return MoreStreams.toStream(jsonNode.elements()).collect(Collectors.toList()); - } - - public static String toPrettyString(final JsonNode jsonNode) { - try { - return OBJECT_WRITER.writeValueAsString(jsonNode) + "\n"; - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - public static JsonNode navigateTo(JsonNode node, final List keys) { - for (final String key : keys) { - node = node.get(key); - } - return node; - } - - public static void replaceNestedValue(final JsonNode json, final List keys, final JsonNode replacement) { - replaceNested(json, keys, (node, finalKey) -> node.put(finalKey, replacement)); - } - - public static void replaceNestedString(final JsonNode json, final List keys, final String replacement) { - replaceNested(json, keys, (node, finalKey) -> node.put(finalKey, replacement)); - } - - public static void replaceNestedInt(final JsonNode json, final List keys, final int replacement) { - replaceNested(json, keys, (node, finalKey) -> node.put(finalKey, replacement)); - } - - private static void replaceNested(final JsonNode json, final List keys, final BiConsumer typedReplacement) { - Preconditions.checkArgument(!keys.isEmpty(), "Must pass at least one key"); - final JsonNode nodeContainingFinalKey = navigateTo(json, keys.subList(0, keys.size() - 1)); - typedReplacement.accept((ObjectNode) nodeContainingFinalKey, keys.get(keys.size() - 1)); - } - - public static Optional getOptional(final JsonNode json, final String... keys) { - return getOptional(json, Arrays.asList(keys)); - } - - public static Optional getOptional(JsonNode json, final List keys) { - for (final String key : keys) { - if (json == null) { - return Optional.empty(); - } - - json = json.get(key); - } - - return Optional.ofNullable(json); - } - - public static String getStringOrNull(final JsonNode json, final String... keys) { - return getStringOrNull(json, Arrays.asList(keys)); - } - - public static String getStringOrNull(final JsonNode json, final List keys) { - final Optional optional = getOptional(json, keys); - return optional.map(JsonNode::asText).orElse(null); - } - - public static int getIntOrZero(final JsonNode json, final String... keys) { - return getIntOrZero(json, Arrays.asList(keys)); - } - - public static int getIntOrZero(final JsonNode json, final List keys) { - final Optional optional = getOptional(json, keys); - return optional.map(JsonNode::asInt).orElse(0); - } - - /** - * Flattens an ObjectNode, or dumps it into a {null: value} map if it's not an object. When - * applyFlattenToArray is true, each element in the array will be one entry in the returned map. - * This behavior is used in the Redshift SUPER type. When it is false, the whole array will be one - * entry. This is used in the JobTracker. - */ - @SuppressWarnings("PMD.ForLoopCanBeForeach") - public static Map flatten(final JsonNode node, final Boolean applyFlattenToArray) { - if (node.isObject()) { - final Map output = new HashMap<>(); - for (final Iterator> it = node.fields(); it.hasNext();) { - final Entry entry = it.next(); - final String field = entry.getKey(); - final JsonNode value = entry.getValue(); - mergeMaps(output, field, flatten(value, applyFlattenToArray)); - } - return output; - } else if (node.isArray() && applyFlattenToArray) { - final Map output = new HashMap<>(); - final int arrayLen = node.size(); - for (int i = 0; i < arrayLen; i++) { - final String field = String.format("[%d]", i); - final JsonNode value = node.get(i); - mergeMaps(output, field, flatten(value, applyFlattenToArray)); - } - return output; - } else { - final Object value; - if (node.isBoolean()) { - value = node.asBoolean(); - } else if (node.isLong()) { - value = node.asLong(); - } else if (node.isInt()) { - value = node.asInt(); - } else if (node.isDouble()) { - value = node.asDouble(); - } else if (node.isValueNode() && !node.isNull()) { - value = node.asText(); - } else { - // Fallback handling for e.g. arrays - value = node.toString(); - } - return singletonMap(null, value); - } - } - - /** - * Flattens an ObjectNode, or dumps it into a {null: value} map if it's not an object. New usage of - * this function is best to explicitly declare the intended array mode. This version is provided for - * backward compatibility. - */ - public static Map flatten(final JsonNode node) { - return flatten(node, false); - } - - /** - * Prepend all keys in subMap with prefix, then merge that map into originalMap. - *

    - * If subMap contains a null key, then instead it is replaced with prefix. I.e. {null: value} is - * treated as {prefix: value} when merging into originalMap. - */ - public static void mergeMaps(final Map originalMap, final String prefix, final Map subMap) { - originalMap.putAll(subMap.entrySet().stream().collect(toMap( - e -> { - final String key = e.getKey(); - if (key != null) { - return prefix + "." + key; - } else { - return prefix; - } - }, - Entry::getValue))); - } - - public static Map deserializeToStringMap(final JsonNode json) { - return OBJECT_MAPPER.convertValue(json, new TypeReference<>() {}); - } - - /** - * By the Jackson DefaultPrettyPrinter prints objects with an extra space as follows: {"name" : - * "airbyte"}. We prefer {"name": "airbyte"}. - */ - private static class JsonPrettyPrinter extends DefaultPrettyPrinter { - - // this method has to be overridden because in the superclass it checks that it is an instance of - // DefaultPrettyPrinter (which is no longer the case in this inherited class). - @Override - public DefaultPrettyPrinter createInstance() { - return new DefaultPrettyPrinter(this); - } - - // override the method that inserts the extra space. - @Override - public DefaultPrettyPrinter withSeparators(final Separators separators) { - _separators = separators; - _objectFieldValueSeparatorWithSpaces = separators.getObjectFieldValueSeparator() + " "; - return this; - } - - } - - /** - * Simple utility method to log a semi-useful message when deserialization fails. Intentionally - * don't log the actual exception object, because it probably contains some/all of the inputString - * (e.g. `[Source: (String)"{"foo": "bar"; line: 1, column: 13]`). Logging the class name - * can at least help narrow down the problem, without leaking potentially-sensitive information. - */ - private static Optional handleDeserThrowable(Throwable t) { - // Manually build the stacktrace, excluding the top-level exception object - // so that we don't accidentally include the exception message. - // Otherwise we could just do ExceptionUtils.getStackTrace(t). - final StringBuilder sb = new StringBuilder(); - sb.append(t.getClass()); - for (final StackTraceElement traceElement : t.getStackTrace()) { - sb.append("\n\tat "); - sb.append(traceElement.toString()); - } - while (t.getCause() != null) { - t = t.getCause(); - sb.append("\nCaused by "); - sb.append(t.getClass()); - for (final StackTraceElement traceElement : t.getStackTrace()) { - sb.append("\n\tat "); - sb.append(traceElement.toString()); - } - } - LOGGER.warn("Failed to deserialize json due to {}", sb); - return Optional.empty(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableConsumer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableConsumer.java deleted file mode 100644 index dd6084ce2c9d7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableConsumer.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import java.util.function.Consumer; - -public interface CloseableConsumer extends Consumer, AutoCloseable {} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableQueue.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableQueue.java deleted file mode 100644 index a234f8c007348..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableQueue.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import java.util.Queue; - -public interface CloseableQueue extends Queue, AutoCloseable { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java deleted file mode 100644 index 18d3cf6209700..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/CloseableShutdownHook.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import com.google.common.annotations.VisibleForTesting; -import java.io.Closeable; -import java.util.Collection; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Registers a shutdown hook that calls the close method of the provided objects. If an object does - * not support either the {@link AutoCloseable} or {@link Closeable} interface, it will be ignored. - *

    - * This is a temporary class that is being provided to ensure that resources created by each - * application are properly closed on shutdown. This logic will no longer be necessary once an - * application framework is introduced to the project that can provide object lifecycle management. - */ -public class CloseableShutdownHook { - - private static final Logger log = LoggerFactory.getLogger(CloseableShutdownHook.class); - - /** - * Registers a runtime shutdown hook with the application for each provided closeable object. - * - * @param objects An array of objects to be closed on application shutdown. - */ - public static void registerRuntimeShutdownHook(final Object... objects) { - Runtime.getRuntime().addShutdownHook(buildShutdownHookThread(objects)); - } - - /** - * Builds the {@link Thread} that will be registered as an application shutdown hook. - * - * @param objects An array of objects to be closed on application shutdown. - * @return The application shutdown hook {@link Thread}. - */ - @VisibleForTesting - static Thread buildShutdownHookThread(final Object... objects) { - final Collection autoCloseables = Stream.of(objects) - .filter(o -> o instanceof AutoCloseable) - .map(AutoCloseable.class::cast) - .toList(); - - return new Thread(() -> { - autoCloseables.forEach(CloseableShutdownHook::close); - }); - } - - private static void close(final AutoCloseable autoCloseable) { - try { - autoCloseable.close(); - } catch (final Exception e) { - log.error("Unable to close object {}.", autoCloseable.getClass().getName(), e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/Exceptions.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/Exceptions.java deleted file mode 100644 index 7d66bb0483e81..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/Exceptions.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import java.lang.invoke.MethodHandles; -import java.util.concurrent.Callable; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class Exceptions { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - /** - * Catch a checked exception and rethrow as a {@link RuntimeException} - * - * @param callable - function that throws a checked exception. - * @param - return type of the function. - * @return object that the function returns. - */ - public static T toRuntime(final Callable callable) { - try { - return callable.call(); - } catch (final RuntimeException e) { - throw e; - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - /** - * Catch a checked exception and rethrow as a {@link RuntimeException}. - * - * @param voidCallable - function that throws a checked exception. - */ - public static void toRuntime(final Procedure voidCallable) { - castCheckedToRuntime(voidCallable, RuntimeException::new); - } - - private static void castCheckedToRuntime(final Procedure voidCallable, final Function exceptionFactory) { - try { - voidCallable.call(); - } catch (final RuntimeException e) { - throw e; - } catch (final Exception e) { - throw exceptionFactory.apply(e); - } - } - - public static void swallow(final Procedure procedure) { - try { - procedure.call(); - } catch (final Exception e) { - log.error("Swallowed error.", e); - } - } - - public interface Procedure { - - void call() throws Exception; - - } - - public static T swallowWithDefault(final Callable procedure, final T defaultValue) { - try { - return procedure.call(); - } catch (final Exception e) { - return defaultValue; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/MoreBooleans.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/MoreBooleans.java deleted file mode 100644 index 3522b7a3cf9ab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/lang/MoreBooleans.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -public class MoreBooleans { - - /** - * Safely handles converting boxed Booleans to booleans, even when they are null. Evaluates null as - * false. - * - * @param bool boxed - * @return unboxed - */ - public static boolean isTruthy(final Boolean bool) { - return bool != null && bool; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java deleted file mode 100644 index d9cca1b780899..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging; - -import com.google.common.annotations.VisibleForTesting; - -public class LoggingHelper { - - public enum Color { - - BLACK("\u001b[30m"), - RED("\u001b[31m"), - GREEN("\u001b[32m"), - YELLOW("\u001b[33m"), - BLUE("\u001b[34m"), - MAGENTA("\u001b[35m"), - CYAN("\u001b[36m"), - WHITE("\u001b[37m"), - BLUE_BACKGROUND("\u001b[44m"), // source - YELLOW_BACKGROUND("\u001b[43m"), // destination - GREEN_BACKGROUND("\u001b[42m"), // normalization - CYAN_BACKGROUND("\u001b[46m"), // container runner - RED_BACKGROUND("\u001b[41m"), // testcontainers - PURPLE_BACKGROUND("\u001b[45m"); // dbt - - private final String ansi; - - Color(final String ansiCode) { - this.ansi = ansiCode; - } - - public String getCode() { - return ansi; - } - - } - - public static final String LOG_SOURCE_MDC_KEY = "log_source"; - - @VisibleForTesting - public static final String RESET = "\u001B[0m"; - public static final String PREPARE_COLOR_CHAR = "\u001b[m"; - - public static String applyColor(final Color color, final String msg) { - return PREPARE_COLOR_CHAR + color.getCode() + msg + PREPARE_COLOR_CHAR + RESET; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/MdcScope.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/MdcScope.java deleted file mode 100644 index 5f49f00b1251c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/MdcScope.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging; - -import io.airbyte.commons.logging.LoggingHelper.Color; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.function.BiConsumer; -import org.slf4j.MDC; - -/** - * This class is an autoClosable class that will add some specific values into the log MDC. When - * being close, it will restore the original MDC. It is advised to use it like that: - * - *

    - *   
    - *     try(final ScopedMDCChange scopedMDCChange = new ScopedMDCChange(
    - *      new HashMap<String, String>() {{
    - *        put("my", "value");
    - *      }}
    - *     )) {
    - *        ...
    - *     }
    - *   
    - * 
    - */ -public class MdcScope implements AutoCloseable { - - public final static Builder DEFAULT_BUILDER = new Builder(); - - private final Map originalContextMap; - - public MdcScope(final Map keyValuesToAdd) { - originalContextMap = MDC.getCopyOfContextMap(); - - keyValuesToAdd.forEach(MDC::put); - } - - @Override - public void close() { - MDC.setContextMap(originalContextMap); - } - - public static class Builder { - - private Optional maybeLogPrefix = Optional.empty(); - private Optional maybePrefixColor = Optional.empty(); - private boolean simple = true; - - public Builder setLogPrefix(final String logPrefix) { - this.maybeLogPrefix = Optional.ofNullable(logPrefix); - - return this; - } - - public Builder setPrefixColor(final Color color) { - this.maybePrefixColor = Optional.ofNullable(color); - - return this; - } - - // Use this to disable simple logging for things in an MdcScope. - // If you're using this, you're probably starting to use MdcScope outside of container labelling. - // If so, consider changing the defaults / builder / naming. - public Builder setSimple(final boolean simple) { - this.simple = simple; - - return this; - } - - public void produceMappings(final BiConsumer mdcConsumer) { - maybeLogPrefix.ifPresent(logPrefix -> { - final String potentiallyColoredLog = maybePrefixColor - .map(color -> LoggingHelper.applyColor(color, logPrefix)) - .orElse(logPrefix); - - mdcConsumer.accept(LoggingHelper.LOG_SOURCE_MDC_KEY, potentiallyColoredLog); - - if (simple) { - // outputs much less information for this line. see log4j2.xml to see exactly what this does - mdcConsumer.accept("simple", "true"); - } - }); - } - - public MdcScope build() { - final Map extraMdcEntries = new HashMap<>(); - produceMappings(extraMdcEntries::put); - return new MdcScope(extraMdcEntries); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/map/MoreMaps.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/map/MoreMaps.java deleted file mode 100644 index 6bd1ad7d479a0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/map/MoreMaps.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.map; - -import com.google.common.base.Preconditions; -import java.util.HashMap; -import java.util.Map; - -public class MoreMaps { - - @SafeVarargs - public static Map merge(final Map... maps) { - final Map outputMap = new HashMap<>(); - - for (final Map map : maps) { - Preconditions.checkNotNull(map); - outputMap.putAll(map); - } - - return outputMap; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/DefaultProtocolSerializer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/DefaultProtocolSerializer.java deleted file mode 100644 index dc363fb91f5c1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/DefaultProtocolSerializer.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; - -public class DefaultProtocolSerializer implements ProtocolSerializer { - - @Override - public String serialize(ConfiguredAirbyteCatalog configuredAirbyteCatalog) { - return Jsons.serialize(configuredAirbyteCatalog); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/ProtocolSerializer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/ProtocolSerializer.java deleted file mode 100644 index e619c3ac3f4c2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/protocol/ProtocolSerializer.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol; - -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; - -public interface ProtocolSerializer { - - String serialize(final ConfiguredAirbyteCatalog configuredAirbyteCatalog); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/resources/MoreResources.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/resources/MoreResources.java deleted file mode 100644 index 44af8251fede9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/resources/MoreResources.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.resources; - -import com.google.common.base.Preconditions; -import com.google.common.io.Resources; -import io.airbyte.commons.lang.Exceptions; -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.stream.Stream; - -public class MoreResources { - - private static final String UNSTABLE_API_USAGE = "UnstableApiUsage"; - - @SuppressWarnings(UNSTABLE_API_USAGE) - public static String readResource(final String name) throws IOException { - final URL resource = Resources.getResource(name); - return Resources.toString(resource, StandardCharsets.UTF_8); - } - - @SuppressWarnings(UNSTABLE_API_USAGE) - public static String readResource(final Class klass, final String name) throws IOException { - final String rootedName = !name.startsWith("/") ? String.format("/%s", name) : name; - final URL url = Resources.getResource(klass, rootedName); - return Resources.toString(url, StandardCharsets.UTF_8); - } - - @SuppressWarnings(UNSTABLE_API_USAGE) - public static File readResourceAsFile(final String name) throws URISyntaxException { - return new File(Resources.getResource(name).toURI()); - } - - @SuppressWarnings(UNSTABLE_API_USAGE) - public static byte[] readBytes(final String name) throws IOException { - final URL resource = Resources.getResource(name); - return Resources.toByteArray(resource); - } - - /** - * This class is a bit of a hack. Might have unexpected behavior. - * - * @param klass class whose resources will be access - * @param name path to directory in resources list - * @return stream of paths to each resource file. THIS STREAM MUST BE CLOSED. - * @throws IOException you never know when you IO. - */ - public static Stream listResources(final Class klass, final String name) throws IOException { - Preconditions.checkNotNull(klass); - Preconditions.checkNotNull(name); - Preconditions.checkArgument(!name.isBlank()); - - try { - final String rootedResourceDir = !name.startsWith("/") ? String.format("/%s", name) : name; - final URL url = klass.getResource(rootedResourceDir); - // noinspection ConstantConditions - Preconditions.checkNotNull(url, "Could not find resource."); - - final Path searchPath; - if (url.toString().startsWith("jar")) { - final FileSystem fileSystem = FileSystems.newFileSystem(url.toURI(), Collections.emptyMap()); - searchPath = fileSystem.getPath(rootedResourceDir); - return Files.walk(searchPath, 1).onClose(() -> Exceptions.toRuntime(fileSystem::close)); - } else { - searchPath = Path.of(url.toURI()); - return Files.walk(searchPath, 1); - } - - } catch (final URISyntaxException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamStatusHolder.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamStatusHolder.java deleted file mode 100644 index 4aa3eb55e666f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamStatusHolder.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage.AirbyteStreamStatus; -import io.airbyte.protocol.models.v0.AirbyteTraceMessage; -import io.airbyte.protocol.models.v0.StreamDescriptor; - -/** - * Represents the current status of a stream provided by a source. - */ -public class AirbyteStreamStatusHolder { - - private final AirbyteStreamNameNamespacePair airbyteStream; - - private final AirbyteStreamStatus airbyteStreamStatus; - - public AirbyteStreamStatusHolder(final AirbyteStreamNameNamespacePair airbyteStream, - final AirbyteStreamStatus airbyteStreamStatus) { - this.airbyteStream = airbyteStream; - this.airbyteStreamStatus = airbyteStreamStatus; - } - - public AirbyteTraceMessage toTraceMessage() { - final AirbyteTraceMessage traceMessage = new AirbyteTraceMessage(); - final AirbyteStreamStatusTraceMessage streamStatusTraceMessage = new AirbyteStreamStatusTraceMessage() - .withStreamDescriptor(new StreamDescriptor().withName(airbyteStream.getName()).withNamespace(airbyteStream.getNamespace())) - .withStatus(airbyteStreamStatus); - return traceMessage.withEmittedAt(Long.valueOf(System.currentTimeMillis()).doubleValue()) - .withStreamStatus(streamStatusTraceMessage) - .withType(AirbyteTraceMessage.Type.STREAM_STATUS); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamUtils.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamUtils.java deleted file mode 100644 index edddb37be98ef..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/AirbyteStreamUtils.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStream; - -/** - * Collection of utility methods used to convert objects to {@link AirbyteStreamNameNamespacePair} - * objects. - */ -public class AirbyteStreamUtils { - - /** - * Converts an {@link AirbyteStream} to a {@link AirbyteStreamNameNamespacePair}. - * - * @param airbyteStream The {@link AirbyteStream} to convert. - * @return The {@link AirbyteStreamNameNamespacePair}. - */ - public static AirbyteStreamNameNamespacePair convertFromAirbyteStream(final AirbyteStream airbyteStream) { - return new AirbyteStreamNameNamespacePair(airbyteStream.getName(), airbyteStream.getNamespace()); - } - - /** - * Converts a stream name and namespace into a {@link AirbyteStreamNameNamespacePair}. - * - * @param name The name of the stream. - * @param namespace The namespace of the stream. - * @return The {@link AirbyteStreamNameNamespacePair}. - */ - public static AirbyteStreamNameNamespacePair convertFromNameAndNamespace(final String name, final String namespace) { - return new AirbyteStreamNameNamespacePair(name, namespace); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/MoreStreams.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/MoreStreams.java deleted file mode 100644 index b8ee9dd5dfaef..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/MoreStreams.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import java.util.Iterator; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; - -public class MoreStreams { - - public static Stream toStream(final Iterator iterator) { - return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); - } - - public static Stream toStream(final Iterable iterable) { - return toStream(iterable.iterator()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/StreamStatusUtils.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/StreamStatusUtils.java deleted file mode 100644 index f4f748bef9930..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/stream/StreamStatusUtils.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import io.airbyte.commons.util.AirbyteStreamAware; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage.AirbyteStreamStatus; -import java.util.Optional; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Collection of utility methods that support the generation of stream status updates. - */ -public class StreamStatusUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(StreamStatusUtils.class); - - /** - * Creates a new {@link Consumer} that wraps the provided {@link Consumer} with stream status - * reporting capabilities. Specifically, this consumer will emit an - * {@link AirbyteStreamStatus#RUNNING} status after the first message is consumed by the delegated - * {@link Consumer}. - * - * @param stream The stream from which the delegating {@link Consumer} will consume messages for - * processing. - * @param delegateRecordCollector The delegated {@link Consumer} that will be called when this - * consumer accepts a message for processing. - * @param streamStatusEmitter The optional {@link Consumer} that will be used to emit stream status - * updates. - * @return A wrapping {@link Consumer} that provides stream status updates when the provided - * delegate {@link Consumer} is invoked. - */ - public static Consumer statusTrackingRecordCollector(final AutoCloseableIterator stream, - final Consumer delegateRecordCollector, - final Optional> streamStatusEmitter) { - return new Consumer<>() { - - private boolean firstRead = true; - - @Override - public void accept(final AirbyteMessage airbyteMessage) { - try { - delegateRecordCollector.accept(airbyteMessage); - } finally { - if (firstRead) { - emitRunningStreamStatus(stream, streamStatusEmitter); - firstRead = false; - } - } - } - - }; - } - - /** - * Emits a {@link AirbyteStreamStatus#RUNNING} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitRunningStreamStatus(final AutoCloseableIterator airbyteStream, - final Optional> statusEmitter) { - if (airbyteStream instanceof AirbyteStreamAware) { - emitRunningStreamStatus((AirbyteStreamAware) airbyteStream, statusEmitter); - } - } - - /** - * Emits a {@link AirbyteStreamStatus#RUNNING} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitRunningStreamStatus(final AirbyteStreamAware airbyteStream, - final Optional> statusEmitter) { - emitRunningStreamStatus(airbyteStream.getAirbyteStream(), statusEmitter); - } - - /** - * Emits a {@link AirbyteStreamStatus#RUNNING} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitRunningStreamStatus(final Optional airbyteStream, - final Optional> statusEmitter) { - airbyteStream.ifPresent(s -> { - LOGGER.debug("RUNNING -> {}", s); - emitStreamStatus(s, AirbyteStreamStatus.RUNNING, statusEmitter); - }); - } - - /** - * Emits a {@link AirbyteStreamStatus#STARTED} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitStartStreamStatus(final AutoCloseableIterator airbyteStream, - final Optional> statusEmitter) { - if (airbyteStream instanceof AirbyteStreamAware) { - emitStartStreamStatus((AirbyteStreamAware) airbyteStream, statusEmitter); - } - } - - /** - * Emits a {@link AirbyteStreamStatus#STARTED} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitStartStreamStatus(final AirbyteStreamAware airbyteStream, - final Optional> statusEmitter) { - emitStartStreamStatus(airbyteStream.getAirbyteStream(), statusEmitter); - } - - /** - * Emits a {@link AirbyteStreamStatus#STARTED} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitStartStreamStatus(final Optional airbyteStream, - final Optional> statusEmitter) { - airbyteStream.ifPresent(s -> { - LOGGER.debug("STARTING -> {}", s); - emitStreamStatus(s, AirbyteStreamStatus.STARTED, statusEmitter); - }); - } - - /** - * Emits a {@link AirbyteStreamStatus#COMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitCompleteStreamStatus(final AutoCloseableIterator airbyteStream, - final Optional> statusEmitter) { - if (airbyteStream instanceof AirbyteStreamAware) { - emitCompleteStreamStatus((AirbyteStreamAware) airbyteStream, statusEmitter); - } - } - - /** - * Emits a {@link AirbyteStreamStatus#COMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitCompleteStreamStatus(final AirbyteStreamAware airbyteStream, - final Optional> statusEmitter) { - emitCompleteStreamStatus(airbyteStream.getAirbyteStream(), statusEmitter); - } - - /** - * Emits a {@link AirbyteStreamStatus#COMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitCompleteStreamStatus(final Optional airbyteStream, - final Optional> statusEmitter) { - airbyteStream.ifPresent(s -> { - LOGGER.debug("COMPLETE -> {}", s); - emitStreamStatus(s, AirbyteStreamStatus.COMPLETE, statusEmitter); - }); - } - - /** - * Emits a {@link AirbyteStreamStatus#INCOMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitIncompleteStreamStatus(final AutoCloseableIterator airbyteStream, - final Optional> statusEmitter) { - if (airbyteStream instanceof AirbyteStreamAware) { - emitIncompleteStreamStatus((AirbyteStreamAware) airbyteStream, statusEmitter); - } - } - - /** - * Emits a {@link AirbyteStreamStatus#INCOMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitIncompleteStreamStatus(final AirbyteStreamAware airbyteStream, - final Optional> statusEmitter) { - emitIncompleteStreamStatus(airbyteStream.getAirbyteStream(), statusEmitter); - } - - /** - * Emits a {@link AirbyteStreamStatus#INCOMPLETE} stream status for the provided stream. - * - * @param airbyteStream The stream that should be associated with the stream status. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - public static void emitIncompleteStreamStatus(final Optional airbyteStream, - final Optional> statusEmitter) { - airbyteStream.ifPresent(s -> { - LOGGER.debug("INCOMPLETE -> {}", s); - emitStreamStatus(s, AirbyteStreamStatus.INCOMPLETE, statusEmitter); - }); - } - - /** - * Emits a stream status for the provided stream. - * - * @param airbyteStreamNameNamespacePair The stream identifier. - * @param airbyteStreamStatus The status update. - * @param statusEmitter The {@link Optional} stream status emitter. - */ - private static void emitStreamStatus(final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair, - final AirbyteStreamStatus airbyteStreamStatus, - final Optional> statusEmitter) { - statusEmitter.ifPresent(consumer -> consumer.accept(new AirbyteStreamStatusHolder(airbyteStreamNameNamespacePair, airbyteStreamStatus))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/string/Strings.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/string/Strings.java deleted file mode 100644 index 346482a8248d4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/string/Strings.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.string; - -import com.google.common.collect.Streams; -import java.util.stream.Collectors; -import org.apache.commons.lang3.RandomStringUtils; - -public class Strings { - - public static String join(final Iterable iterable, final CharSequence separator) { - return Streams.stream(iterable) - .map(Object::toString) - .collect(Collectors.joining(separator)); - } - - public static String addRandomSuffix(final String base, final String separator, final int suffixLength) { - return base + separator + RandomStringUtils.randomAlphabetic(suffixLength).toLowerCase(); - } - - public static String safeTrim(final String string) { - return string == null ? null : string.trim(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Names.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Names.java deleted file mode 100644 index 32fc8abe3d54b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Names.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.text; - -import com.google.common.base.Preconditions; -import java.text.Normalizer; - -public class Names { - - public static final String NON_ALPHANUMERIC_AND_UNDERSCORE_PATTERN = "[^\\p{Alnum}_]"; - - /** - * Converts any UTF8 string to a string with only alphanumeric and _ characters without preserving - * accent characters. - * - * @param s string to convert - * @return cleaned string - */ - public static String toAlphanumericAndUnderscore(final String s) { - return Normalizer.normalize(s, Normalizer.Form.NFKD) - .replaceAll("\\p{M}", "") // P{M} matches a code point that is not a combining mark (unicode) - .replaceAll("\\s+", "_") - .replaceAll(NON_ALPHANUMERIC_AND_UNDERSCORE_PATTERN, "_"); - } - - public static String doubleQuote(final String value) { - return internalQuote(value, '"'); - } - - public static String singleQuote(final String value) { - return internalQuote(value, '\''); - } - - private static String internalQuote(final String value, final char quoteChar) { - Preconditions.checkNotNull(value); - - final boolean startsWithChar = value.charAt(0) == quoteChar; - final boolean endsWithChar = value.charAt(value.length() - 1) == quoteChar; - - Preconditions.checkState(startsWithChar == endsWithChar, "Invalid value: %s", value); - - if (startsWithChar) { - return value; - } else { - return String.format("%c%s%c", quoteChar, value, quoteChar); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Sqls.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Sqls.java deleted file mode 100644 index 8c267f1aa44ca..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/text/Sqls.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.text; - -import java.util.Collection; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -public class Sqls { - - public static > String toSqlName(final T value) { - return value.name().toLowerCase(); - } - - public static > Set toSqlNames(final Collection values) { - return values.stream().map(Sqls::toSqlName).collect(Collectors.toSet()); - } - - /** - * Generate a string fragment that can be put in the IN clause of a SQL statement. eg. column IN - * (value1, value2) - * - * @param values to encode - * @param enum type - * @return "'value1', 'value2', 'value3'" - */ - public static > String toSqlInFragment(final Iterable values) { - return StreamSupport.stream(values.spliterator(), false).map(Sqls::toSqlName).map(Names::singleQuote) - .collect(Collectors.joining(",", "(", ")")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AirbyteStreamAware.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AirbyteStreamAware.java deleted file mode 100644 index 4fb6efd6c4ee6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AirbyteStreamAware.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.Optional; - -/** - * Interface that indicates that an object exposes information used to identify an Airbyte stream. - */ -public interface AirbyteStreamAware { - - /** - * Returns the {@link AirbyteStreamNameNamespacePair} identifying the Airbyte stream associated with - * the object. - * - * @return The {@link AirbyteStreamNameNamespacePair} identifying the Airbyte stream (may be empty). - */ - default Optional getAirbyteStream() { - return Optional.empty(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterator.java deleted file mode 100644 index ccbc11e10a111..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterator.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import java.util.Iterator; - -/** - * If you operate on this iterator, you better close it. {@link AutoCloseableIterator#close} must be - * idempotent. The contract on this interface is that it may be called MANY times. - * - * @param type - */ -public interface AutoCloseableIterator extends Iterator, AutoCloseable, AirbyteStreamAware {} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java deleted file mode 100644 index 9423f54c5eb9b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.collect.Iterators; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.stream.AirbyteStreamStatusHolder; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.Iterator; -import java.util.List; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Stream; - -public class AutoCloseableIterators { - - /** - * Coerces a vanilla {@link Iterator} into a {@link AutoCloseableIterator} by adding a no op close - * function. - * - * @param iterator iterator to convert - * @param type - * @return closeable iterator - */ - public static AutoCloseableIterator fromIterator(final Iterator iterator) { - return new DefaultAutoCloseableIterator<>(iterator, VoidCallable.NOOP, null); - } - - /** - * Coerces a vanilla {@link Iterator} into a {@link AutoCloseableIterator} by adding a no op close - * function. - * - * @param iterator iterator to convert - * @param type - * @return closeable iterator - */ - public static AutoCloseableIterator fromIterator(final Iterator iterator, final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator<>(iterator, VoidCallable.NOOP, airbyteStream); - } - - /** - * Coerces a vanilla {@link Iterator} into a {@link AutoCloseableIterator}. The provided onClose - * function will be called at most one time. - * - * @param iterator autocloseable iterator to add another close to - * @param onClose the function that will be called on close - * @param type - * @return new autocloseable iterator with the close function appended - */ - public static AutoCloseableIterator fromIterator(final Iterator iterator, - final VoidCallable onClose, - final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator<>(iterator, onClose::call, airbyteStream); - } - - /** - * Wraps a {@link Stream} in a {@link AutoCloseableIterator}. The first time - * {@link AutoCloseableIterator#close()} is called, {@link Stream#close()} will be called. It will - * not be called again subsequently. - * - * @param stream stream to wrap - * @param type - * @return autocloseable iterator - */ - public static AutoCloseableIterator fromStream(final Stream stream, final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator<>(stream.iterator(), stream::close, airbyteStream); - } - - /** - * Consumes entire iterator and collect it into a list. Then it closes the iterator. - */ - public static List toListAndClose(final AutoCloseableIterator iterator) throws Exception { - try (iterator) { - return MoreIterators.toList(iterator); - } - } - - /** - * Returns a {@link AutoCloseableIterator} that will call the provided supplier ONE time when - * {@link AutoCloseableIterator#hasNext()} is called the first time. The supplier returns a stream - * that will be exposed as an iterator. - * - * @param iteratorSupplier supplier that provides a autocloseable iterator that will be invoked - * lazily - * @param type - * @return autocloseable iterator - */ - public static AutoCloseableIterator lazyIterator(final Supplier> iteratorSupplier, - final AirbyteStreamNameNamespacePair airbyteStream) { - return new LazyAutoCloseableIterator<>(iteratorSupplier, airbyteStream); - } - - /** - * Append a function to be called on {@link AutoCloseableIterator#close}. - * - * @param autoCloseableIterator autocloseable iterator to add another close to - * @param voidCallable the function that will be called on close - * @param type - * @return new autocloseable iterator with the close function appended - */ - public static AutoCloseableIterator appendOnClose(final AutoCloseableIterator autoCloseableIterator, final VoidCallable voidCallable) { - return new DefaultAutoCloseableIterator<>(autoCloseableIterator, () -> { - autoCloseableIterator.close(); - voidCallable.call(); - }, null); - } - - /** - * Append a function to be called on {@link AutoCloseableIterator#close}. - * - * @param autoCloseableIterator autocloseable iterator to add another close to - * @param voidCallable the function that will be called on close - * @param type - * @return new autocloseable iterator with the close function appended - */ - public static AutoCloseableIterator appendOnClose(final AutoCloseableIterator autoCloseableIterator, - final VoidCallable voidCallable, - final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator<>(autoCloseableIterator, () -> { - autoCloseableIterator.close(); - voidCallable.call(); - }, - airbyteStream); - } - - /** - * Lift and shift of Guava's {@link Iterators#transform} using the {@link AutoCloseableIterator} - * interface. - * - * @param fromIterator input autocloseable iterator - * @param function map function - * @param input type - * @param output type - * @return mapped autocloseable iterator - */ - public static AutoCloseableIterator transform(final AutoCloseableIterator fromIterator, - final Function function) { - return new DefaultAutoCloseableIterator<>(Iterators.transform(fromIterator, function::apply), fromIterator::close, null); - } - - /** - * Lift and shift of Guava's {@link Iterators#transform} using the {@link AutoCloseableIterator} - * interface. - * - * @param fromIterator input autocloseable iterator - * @param function map function - * @param input type - * @param output type - * @return mapped autocloseable iterator - */ - public static AutoCloseableIterator transform(final AutoCloseableIterator fromIterator, - final AirbyteStreamNameNamespacePair airbyteStream, - final Function function) { - return new DefaultAutoCloseableIterator<>(Iterators.transform(fromIterator, function::apply), fromIterator::close, airbyteStream); - } - - /** - * Map over a {@link AutoCloseableIterator} using a vanilla {@link Iterator} while retaining all of - * the Resource behavior of the input {@link AutoCloseableIterator}. - * - * @param iteratorCreator function that takes in a autocloseable iterator and uses it to create a - * vanilla iterator - * @param autoCloseableIterator input autocloseable iterator - * @param type - * @return autocloseable iterator that still has the close functionality of the original input - * iterator but is transformed by the iterator output by the iteratorCreator - */ - public static AutoCloseableIterator transform(final Function, Iterator> iteratorCreator, - final AutoCloseableIterator autoCloseableIterator, - final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator<>(iteratorCreator.apply(autoCloseableIterator), autoCloseableIterator::close, airbyteStream); - } - - public static AutoCloseableIterator transformIterator(final Function, Iterator> iteratorCreator, - final AutoCloseableIterator autoCloseableIterator, - final AirbyteStreamNameNamespacePair airbyteStream) { - return new DefaultAutoCloseableIterator(iteratorCreator.apply(autoCloseableIterator), autoCloseableIterator::close, airbyteStream); - } - - @SafeVarargs - public static CompositeIterator concatWithEagerClose(final Consumer airbyteStreamStatusConsumer, - final AutoCloseableIterator... iterators) { - return concatWithEagerClose(List.of(iterators), airbyteStreamStatusConsumer); - } - - @SafeVarargs - public static CompositeIterator concatWithEagerClose(final AutoCloseableIterator... iterators) { - return concatWithEagerClose(List.of(iterators), null); - } - - /** - * Creates a {@link CompositeIterator} that reads from the provided iterators in a serial fashion. - * - * @param iterators The list of iterators to be used in a serial fashion. - * @param airbyteStreamStatusConsumer The stream status consumer used to report stream status during - * iteration. - * @return A {@link CompositeIterator}. - * @param The type of data contained in each iterator. - */ - public static CompositeIterator concatWithEagerClose(final List> iterators, - final Consumer airbyteStreamStatusConsumer) { - return new CompositeIterator<>(iterators, airbyteStreamStatusConsumer); - } - - public static CompositeIterator concatWithEagerClose(final List> iterators) { - return concatWithEagerClose(iterators, null); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/CompositeIterator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/CompositeIterator.java deleted file mode 100644 index c8a3030bb92d5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/CompositeIterator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.base.Preconditions; -import com.google.common.collect.AbstractIterator; -import io.airbyte.commons.stream.AirbyteStreamStatusHolder; -import io.airbyte.commons.stream.StreamStatusUtils; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Composes multiple {@link AutoCloseableIterator}s. For each internal iterator, after the first - * time its {@link Iterator#hasNext} function returns false, the composite iterator will call - * {@link AutoCloseableIterator#close()} on that internal iterator. - * - *

    - * {@link CompositeIterator}s should be closed. Calling {@link CompositeIterator#close()} will - * attempt to close each internal iterator as well. Thus the close method on each internal iterator - * should be idempotent as it is will likely be called multiple times. - *

    - *

    - * {@link CompositeIterator#close()} gives the guarantee that it will call close on each internal - * iterator once (even if any of the iterators throw an exception). After it has attempted to close - * each one once, {@link CompositeIterator} will rethrow the _first_ exception that it encountered - * while closing internal iterators. If multiple internal iterators throw exceptions, only the first - * exception will be rethrown, though the others will be logged. - *

    - * - * @param type - */ -public final class CompositeIterator extends AbstractIterator implements AutoCloseableIterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(CompositeIterator.class); - - private final Optional> airbyteStreamStatusConsumer; - private final List> iterators; - - private int i; - private final Set> seenIterators; - private boolean hasClosed; - - CompositeIterator(final List> iterators, final Consumer airbyteStreamStatusConsumer) { - Preconditions.checkNotNull(iterators); - - this.airbyteStreamStatusConsumer = Optional.ofNullable(airbyteStreamStatusConsumer); - this.iterators = iterators; - this.i = 0; - this.seenIterators = new HashSet>(); - this.hasClosed = false; - } - - @Override - protected T computeNext() { - assertHasNotClosed(); - - if (iterators.isEmpty()) { - return endOfData(); - } - - // 1. search for an iterator that hasNext. - // 2. close each iterator we encounter those that do not. - // 3. if there are none, we are done. - while (!currentIterator().hasNext()) { - try { - currentIterator().close(); - emitStartStreamStatus(currentIterator().getAirbyteStream()); - StreamStatusUtils.emitCompleteStreamStatus(getAirbyteStream(), airbyteStreamStatusConsumer); - } catch (final Exception e) { - StreamStatusUtils.emitIncompleteStreamStatus(getAirbyteStream(), airbyteStreamStatusConsumer); - throw new RuntimeException(e); - } - - if (i + 1 < iterators.size()) { - i++; - } else { - return endOfData(); - } - } - - try { - final boolean isFirstRun = emitStartStreamStatus(currentIterator().getAirbyteStream()); - final T next = currentIterator().next(); - if (isFirstRun) { - StreamStatusUtils.emitRunningStreamStatus(getAirbyteStream(), airbyteStreamStatusConsumer); - } - return next; - } catch (final RuntimeException e) { - StreamStatusUtils.emitIncompleteStreamStatus(getAirbyteStream(), airbyteStreamStatusConsumer); - throw e; - } - } - - private AutoCloseableIterator currentIterator() { - return iterators.get(i); - } - - private boolean emitStartStreamStatus(final Optional airbyteStream) { - if (airbyteStream.isPresent() && !seenIterators.contains(airbyteStream)) { - seenIterators.add(airbyteStream); - StreamStatusUtils.emitStartStreamStatus(airbyteStream, airbyteStreamStatusConsumer); - return true; - } - return false; - } - - @Override - public void close() throws Exception { - hasClosed = true; - - final List exceptions = new ArrayList<>(); - for (final AutoCloseableIterator iterator : iterators) { - try { - iterator.close(); - } catch (final Exception e) { - LOGGER.error("exception while closing", e); - exceptions.add(e); - } - } - - if (!exceptions.isEmpty()) { - throw exceptions.get(0); - } - } - - @Override - public Optional getAirbyteStream() { - if (currentIterator() instanceof AirbyteStreamAware) { - return AirbyteStreamAware.class.cast(currentIterator()).getAirbyteStream(); - } else { - return Optional.empty(); - } - } - - private void assertHasNotClosed() { - Preconditions.checkState(!hasClosed); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/DefaultAutoCloseableIterator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/DefaultAutoCloseableIterator.java deleted file mode 100644 index effd09566e372..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/DefaultAutoCloseableIterator.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.base.Preconditions; -import com.google.common.collect.AbstractIterator; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.Iterator; -import java.util.Optional; - -/** - * The canonical {@link AutoCloseableIterator}. The default behavior guarantees that the provided - * close functional will be called no more than one time. - * - * @param type - */ -class DefaultAutoCloseableIterator extends AbstractIterator implements AutoCloseableIterator { - - private final AirbyteStreamNameNamespacePair airbyteStream; - private final Iterator iterator; - private final VoidCallable onClose; - - private boolean hasClosed; - - public DefaultAutoCloseableIterator(final Iterator iterator, final VoidCallable onClose, final AirbyteStreamNameNamespacePair airbyteStream) { - Preconditions.checkNotNull(iterator); - Preconditions.checkNotNull(onClose); - - this.airbyteStream = airbyteStream; - this.iterator = iterator; - this.onClose = onClose; - this.hasClosed = false; - } - - @Override - protected T computeNext() { - assertHasNotClosed(); - - if (iterator.hasNext()) { - return iterator.next(); - } else { - return endOfData(); - } - } - - @Override - public void close() throws Exception { - if (!hasClosed) { - hasClosed = true; - onClose.call(); - } - } - - @Override - public Optional getAirbyteStream() { - return Optional.ofNullable(airbyteStream); - } - - private void assertHasNotClosed() { - Preconditions.checkState(!hasClosed); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/LazyAutoCloseableIterator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/LazyAutoCloseableIterator.java deleted file mode 100644 index 77fcbeb513082..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/LazyAutoCloseableIterator.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.base.Preconditions; -import com.google.common.collect.AbstractIterator; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.Optional; -import java.util.function.Supplier; - -/** - * A {@link AutoCloseableIterator} that calls the provided supplier the first time - * {@link AutoCloseableIterator#hasNext} or {@link AutoCloseableIterator#next} is called. If - * {@link AutoCloseableIterator#hasNext} or {@link AutoCloseableIterator#next} are never called, - * then the supplier will never be called. This means if the iterator is closed in this state, the - * close function on the input iterator will not be called. The assumption here is that if nothing - * is ever supplied, then there is nothing to close. - * - * @param type - */ -class LazyAutoCloseableIterator extends AbstractIterator implements AutoCloseableIterator { - - private final Supplier> iteratorSupplier; - - private final AirbyteStreamNameNamespacePair airbyteStream; - private boolean hasSupplied; - private AutoCloseableIterator internalIterator; - - public LazyAutoCloseableIterator(final Supplier> iteratorSupplier, final AirbyteStreamNameNamespacePair airbyteStream) { - Preconditions.checkNotNull(iteratorSupplier); - this.airbyteStream = airbyteStream; - this.iteratorSupplier = iteratorSupplier; - this.hasSupplied = false; - } - - @Override - protected T computeNext() { - if (!hasSupplied) { - internalIterator = iteratorSupplier.get(); - Preconditions.checkNotNull(internalIterator, "Supplied iterator was null."); - hasSupplied = true; - } - - if (internalIterator.hasNext()) { - return internalIterator.next(); - } else { - return endOfData(); - } - } - - @Override - public void close() throws Exception { - if (internalIterator != null) { - internalIterator.close(); - } - } - - @Override - public Optional getAirbyteStream() { - return Optional.ofNullable(airbyteStream); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/MoreIterators.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/MoreIterators.java deleted file mode 100644 index b1924e2ecface..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/util/MoreIterators.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import com.google.common.collect.AbstractIterator; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.function.Supplier; - -public class MoreIterators { - - /** - * Create an iterator from elements - * - * @param elements element to put in iterator - * @param type - * @return iterator with all elements - */ - @SafeVarargs - public static Iterator of(final T... elements) { - return Arrays.asList(elements).iterator(); - } - - /** - * Create a list from an iterator - * - * @param iterator iterator to convert - * @param type - * @return list - */ - public static List toList(final Iterator iterator) { - final List list = new ArrayList<>(); - while (iterator.hasNext()) { - list.add(iterator.next()); - } - return list; - } - - /** - * Create a set from an iterator - * - * @param iterator iterator to convert - * @param type - * @return set - */ - public static Set toSet(final Iterator iterator) { - final Set set = new HashSet<>(); - while (iterator.hasNext()) { - set.add(iterator.next()); - } - return set; - } - - public static Iterator singletonIteratorFromSupplier(final Supplier supplier) { - return new AbstractIterator() { - - private boolean hasSupplied = false; - - @Override - protected T computeNext() { - if (!hasSupplied) { - hasSupplied = true; - return supplier.get(); - } else { - return endOfData(); - } - } - - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersion.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersion.java deleted file mode 100644 index 0dad1bf361d6d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteProtocolVersion.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -public class AirbyteProtocolVersion { - - public final static Version DEFAULT_AIRBYTE_PROTOCOL_VERSION = new Version("0.2.0"); - public final static Version V0 = new Version("0.3.0"); - public final static Version V1 = new Version("1.0.0"); - - public final static String AIRBYTE_PROTOCOL_VERSION_MAX_KEY_NAME = "airbyte_protocol_version_max"; - public final static String AIRBYTE_PROTOCOL_VERSION_MIN_KEY_NAME = "airbyte_protocol_version_min"; - - public static Version getWithDefault(final String version) { - if (version == null || version.isEmpty() || version.isBlank()) { - return DEFAULT_AIRBYTE_PROTOCOL_VERSION; - } else { - return new Version(version); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteVersion.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteVersion.java deleted file mode 100644 index 7284901de7802..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/AirbyteVersion.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -/** - * The AirbyteVersion identifies the version of the database used internally by Airbyte services. - */ -public class AirbyteVersion extends Version { - - public static final String AIRBYTE_VERSION_KEY_NAME = "airbyte_version"; - - public AirbyteVersion(final String version) { - super(version); - } - - public AirbyteVersion(final String major, final String minor, final String patch) { - super(major, minor, patch); - } - - public static void assertIsCompatible(final AirbyteVersion version1, final AirbyteVersion version2) throws IllegalStateException { - if (!isCompatible(version1, version2)) { - throw new IllegalStateException(getErrorMessage(version1, version2)); - } - } - - public static String getErrorMessage(final AirbyteVersion version1, final AirbyteVersion version2) { - return String.format( - "Version mismatch between %s and %s.\n" + - "Please upgrade or reset your Airbyte Database, see more at https://docs.airbyte.io/operator-guides/upgrading-airbyte", - version1.serialize(), version2.serialize()); - } - - @Override - public String toString() { - return "AirbyteVersion{" + - "version='" + version + '\'' + - ", major='" + major + '\'' + - ", minor='" + minor + '\'' + - ", patch='" + patch + '\'' + - '}'; - } - - public static AirbyteVersion versionWithoutPatch(final AirbyteVersion airbyteVersion) { - final String versionWithoutPatch = "" + airbyteVersion.getMajorVersion() - + "." - + airbyteVersion.getMinorVersion() - + ".0-" - + airbyteVersion.serialize().replace("\n", "").strip().split("-")[1]; - return new AirbyteVersion(versionWithoutPatch); - } - - public static AirbyteVersion versionWithoutPatch(final String airbyteVersion) { - return versionWithoutPatch(new AirbyteVersion(airbyteVersion)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/Version.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/Version.java deleted file mode 100644 index 2e289393048bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/Version.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.google.common.base.Preconditions; -import java.util.Objects; - -/** - * A semVer Version class that allows "dev" as a version. - */ -@SuppressWarnings({"PMD.AvoidFieldNameMatchingTypeName", "PMD.ConstructorCallsOverridableMethod"}) -@JsonDeserialize(using = VersionDeserializer.class) -@JsonSerialize(using = VersionSerializer.class) -public class Version { - - public static final String DEV_VERSION_PREFIX = "dev"; - - protected final String version; - protected final String major; - protected final String minor; - protected final String patch; - - public Version(final String version) { - Preconditions.checkNotNull(version); - this.version = version; - final String[] parsedVersion = version.replace("\n", "").strip().split("-")[0].split("\\."); - - if (isDev()) { - this.major = null; - this.minor = null; - this.patch = null; - } else { - Preconditions.checkArgument(parsedVersion.length >= 3, "Invalid version string: " + version); - this.major = parsedVersion[0]; - this.minor = parsedVersion[1]; - this.patch = parsedVersion[2]; - } - } - - public Version(final String major, final String minor, final String patch) { - this.version = String.format("%s.%s.%s", major, minor, patch); - this.major = major; - this.minor = minor; - this.patch = patch; - } - - public String serialize() { - return version; - } - - public String getMajorVersion() { - return major; - } - - public String getMinorVersion() { - return minor; - } - - public String getPatchVersion() { - return patch; - } - - /** - * Compares two Version to check if they are equivalent. - * - * Only the major and minor part of the Version is taken into account. - */ - public int compatibleVersionCompareTo(final Version another) { - if (isDev() || another.isDev()) - return 0; - final int majorDiff = compareVersion(major, another.major); - if (majorDiff != 0) { - return majorDiff; - } - return compareVersion(minor, another.minor); - } - - /** - * @return true if this is greater than other. otherwise false. - */ - public boolean greaterThan(final Version other) { - return patchVersionCompareTo(other) > 0; - } - - /** - * @return true if this is greater than or equal toother. otherwise false. - */ - public boolean greaterThanOrEqualTo(final Version other) { - return patchVersionCompareTo(other) >= 0; - } - - /** - * @return true if this is less than other. otherwise false. - */ - public boolean lessThan(final Version other) { - return patchVersionCompareTo(other) < 0; - } - - /** - * Compares two Version to check if they are equivalent (including patch version). - */ - public int patchVersionCompareTo(final Version another) { - if (isDev() || another.isDev()) { - return 0; - } - final int majorDiff = compareVersion(major, another.major); - if (majorDiff != 0) { - return majorDiff; - } - final int minorDiff = compareVersion(minor, another.minor); - if (minorDiff != 0) { - return minorDiff; - } - return compareVersion(patch, another.patch); - } - - /** - * Compares two Version to check if only the patch version was updated. - */ - public boolean checkOnlyPatchVersionIsUpdatedComparedTo(final Version another) { - if (isDev() || another.isDev()) { - return false; - } - final int majorDiff = compareVersion(major, another.major); - if (majorDiff > 0) { - return false; - } - final int minorDiff = compareVersion(minor, another.minor); - if (minorDiff > 0) { - return false; - } - return compareVersion(patch, another.patch) > 0; - } - - public boolean isDev() { - return version.startsWith(DEV_VERSION_PREFIX); - } - - /** - * Version string needs to be converted to integer for comparison, because string comparison does - * not handle version string with different digits correctly. For example: - * {@code "11".compare("3") < 0}, while {@code Integer.compare(11, 3) > 0}. - */ - private static int compareVersion(final String v1, final String v2) { - return Integer.compare(Integer.parseInt(v1), Integer.parseInt(v2)); - } - - public static boolean isCompatible(final Version v1, final Version v2) { - return v1.compatibleVersionCompareTo(v2) == 0; - } - - @Override - public String toString() { - return "Version{" + - "version='" + version + '\'' + - ", major='" + major + '\'' + - ", minor='" + minor + '\'' + - ", patch='" + patch + '\'' + - '}'; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final Version that = (Version) o; - return Objects.equals(version, that.version) && Objects.equals(major, that.major) && Objects.equals(minor, that.minor) - && Objects.equals(patch, that.patch); - } - - @Override - public int hashCode() { - return Objects.hash(version, major, minor, patch); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionDeserializer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionDeserializer.java deleted file mode 100644 index ec492174b02e5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionDeserializer.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import com.fasterxml.jackson.core.JacksonException; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import java.io.IOException; - -public class VersionDeserializer extends StdDeserializer { - - public VersionDeserializer() { - this(null); - } - - public VersionDeserializer(Class vc) { - super(vc); - } - - @Override - public Version deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JacksonException { - final JsonNode node = p.getCodec().readTree(p); - final String v = node.get("version").asText(); - return new Version(v); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionSerializer.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionSerializer.java deleted file mode 100644 index 6d54764981a21..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/version/VersionSerializer.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; - -public class VersionSerializer extends StdSerializer { - - public VersionSerializer() { - this(null); - } - - public VersionSerializer(Class t) { - super(t); - } - - @Override - public void serialize(Version value, JsonGenerator gen, SerializerProvider provider) throws IOException { - gen.writeStartObject(); - gen.writeStringField("version", value.version); - gen.writeEndObject(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/yaml/Yamls.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/yaml/Yamls.java deleted file mode 100644 index a8c930d9a54b7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/yaml/Yamls.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.yaml; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SequenceWriter; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; -import com.fasterxml.jackson.dataformat.yaml.YAMLParser; -import com.google.common.collect.AbstractIterator; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.lang.CloseableConsumer; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.commons.util.AutoCloseableIterators; -import java.io.IOException; -import java.io.InputStream; -import java.io.Writer; -import java.util.Iterator; - -@SuppressWarnings("PMD.AvoidBranchingStatementAsLastInLoop") -public class Yamls { - - private static final YAMLFactory YAML_FACTORY = new YAMLFactory(); - private static final ObjectMapper OBJECT_MAPPER = MoreMappers.initYamlMapper(YAML_FACTORY); - - private static final YAMLFactory YAML_FACTORY_WITHOUT_QUOTES = new YAMLFactory().enable(YAMLGenerator.Feature.MINIMIZE_QUOTES); - private static final ObjectMapper OBJECT_MAPPER_WITHOUT_QUOTES = MoreMappers.initYamlMapper(YAML_FACTORY_WITHOUT_QUOTES); - - /** - * Serialize object to YAML string. String values WILL be wrapped in double quotes. - * - * @param object - object to serialize - * @return YAML string version of object - */ - public static String serialize(final T object) { - try { - return OBJECT_MAPPER.writeValueAsString(object); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - /** - * Serialize object to YAML string. String values will NOT be wrapped in double quotes. - * - * @param object - object to serialize - * @return YAML string version of object - */ - public static String serializeWithoutQuotes(final Object object) { - try { - return OBJECT_MAPPER_WITHOUT_QUOTES.writeValueAsString(object); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final String yamlString, final Class klass) { - try { - return OBJECT_MAPPER.readValue(yamlString, klass); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static T deserialize(final String yamlString, final TypeReference typeReference) { - try { - return OBJECT_MAPPER.readValue(yamlString, typeReference); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static JsonNode deserialize(final String yamlString) { - try { - return OBJECT_MAPPER.readTree(yamlString); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - public static AutoCloseableIterator deserializeArray(final InputStream stream) { - try { - final YAMLParser parser = YAML_FACTORY.createParser(stream); - - // Check the first token - if (parser.nextToken() != JsonToken.START_ARRAY) { - throw new IllegalStateException("Expected content to be an array"); - } - - final Iterator iterator = new AbstractIterator<>() { - - @Override - protected JsonNode computeNext() { - try { - while (parser.nextToken() != JsonToken.END_ARRAY) { - return parser.readValueAsTree(); - } - } catch (final IOException e) { - throw new RuntimeException(e); - } - return endOfData(); - } - - }; - - return AutoCloseableIterators.fromIterator(iterator, parser::close, null); - - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - // todo (cgardens) - share this with Jsons if ever needed. - - /** - * Creates a consumer that writes list items to the writer in a streaming fashion. - * - * @param writer writer to write to - * @param type of items being written - * @return consumer that is able to write element to a list element by element. must be closed! - */ - public static CloseableConsumer listWriter(final Writer writer) { - return new YamlConsumer<>(writer, OBJECT_MAPPER); - } - - public static class YamlConsumer implements CloseableConsumer { - - private final SequenceWriter sequenceWriter; - - public YamlConsumer(final Writer writer, final ObjectMapper objectMapper) { - this.sequenceWriter = Exceptions.toRuntime(() -> objectMapper.writer().writeValuesAsArray(writer)); - - } - - @Override - public void accept(final T t) { - try { - sequenceWriter.write(t); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public void close() throws Exception { - // closing the SequenceWriter closes the Writer that it wraps. - sequenceWriter.close(); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfig.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfig.java deleted file mode 100644 index bbfe8e63fa727..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfig.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import java.io.File; - -/** - * This interface represents configuration objects used by Airbyte and Airbyte cloud - */ -public interface AirbyteConfig { - - String name(); - - /** - * @return the name of the field storing the id for the configuration object - */ - String getIdFieldName(); - - /** - * @return the actual id of the configuration object - */ - String getId(T config); - - /** - * @return the path to the yaml file that defines the schema of the configuration object - */ - File getConfigSchemaFile(); - - Class getClassName(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfigValidator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfigValidator.java deleted file mode 100644 index 9c56d56454fd9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/AirbyteConfigValidator.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import io.airbyte.validation.json.AbstractSchemaValidator; -import java.nio.file.Path; - -public class AirbyteConfigValidator extends AbstractSchemaValidator { - - final public static AirbyteConfigValidator AIRBYTE_CONFIG_VALIDATOR = new AirbyteConfigValidator(); - - @Override - public Path getSchemaPath(final ConfigSchema configType) { - return configType.getConfigSchemaFile().toPath(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/CatalogDefinitionsConfig.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/CatalogDefinitionsConfig.java deleted file mode 100644 index 87807f93f180f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/CatalogDefinitionsConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import java.util.Optional; - -public class CatalogDefinitionsConfig { - - private static final String SEED_SUBDIRECTORY = "seed/"; - private static final String ICON_SUBDIRECTORY = "icons/"; - private static final String LOCAL_CONNECTOR_CATALOG_FILE_NAME = "oss_registry.json"; - private static final String DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH = - SEED_SUBDIRECTORY + LOCAL_CONNECTOR_CATALOG_FILE_NAME; - - public static String getLocalConnectorCatalogPath() { - final Optional customCatalogPath = new EnvConfigs().getLocalCatalogPath(); - if (customCatalogPath.isPresent()) { - return customCatalogPath.get(); - } - - return DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH; - - } - - public static String getIconSubdirectory() { - return ICON_SUBDIRECTORY; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/ConfigSchema.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/ConfigSchema.java deleted file mode 100644 index 913acf46b95db..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/ConfigSchema.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import io.airbyte.commons.json.JsonSchemas; -import java.io.File; -import java.nio.file.Path; -import java.util.function.Function; - -@SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.NullAssignment"}) -public enum ConfigSchema implements AirbyteConfig { - - // workspace - WORKSPACE_WEBHOOK_OPERATION_CONFIGS("WebhookOperationConfigs.yaml", - WebhookOperationConfigs.class), - - // source - STANDARD_SOURCE_DEFINITION("StandardSourceDefinition.yaml", - StandardSourceDefinition.class, - standardSourceDefinition -> standardSourceDefinition.getSourceDefinitionId().toString(), - "sourceDefinitionId"), - SOURCE_CONNECTION("SourceConnection.yaml", - SourceConnection.class, - sourceConnection -> sourceConnection.getSourceId().toString(), - "sourceId"), - - // destination - STANDARD_DESTINATION_DEFINITION("StandardDestinationDefinition.yaml", - StandardDestinationDefinition.class, - standardDestinationDefinition -> standardDestinationDefinition.getDestinationDefinitionId().toString(), - "destinationDefinitionId"), - DESTINATION_CONNECTION("DestinationConnection.yaml", - DestinationConnection.class, - destinationConnection -> destinationConnection.getDestinationId().toString(), - "destinationId"), - - STANDARD_SYNC_OPERATION("StandardSyncOperation.yaml", - StandardSyncOperation.class, - standardSyncOperation -> standardSyncOperation.getOperationId().toString(), - "operationId"), - - SOURCE_OAUTH_PARAM("SourceOAuthParameter.yaml", SourceOAuthParameter.class, - sourceOAuthParameter -> sourceOAuthParameter.getOauthParameterId().toString(), - "oauthParameterId"), - DESTINATION_OAUTH_PARAM("DestinationOAuthParameter.yaml", DestinationOAuthParameter.class, - destinationOAuthParameter -> destinationOAuthParameter.getOauthParameterId().toString(), - "oauthParameterId"), - - // worker - STANDARD_SYNC_INPUT("StandardSyncInput.yaml", StandardSyncInput.class), - STATE("State.yaml", State.class); - - static final Path KNOWN_SCHEMAS_ROOT = JsonSchemas.prepareSchemas("types", ConfigSchema.class); - - private final String schemaFilename; - private final Class className; - private final Function extractId; - private final String idFieldName; - - ConfigSchema(final String schemaFilename, - final Class className, - final Function extractId, - final String idFieldName) { - this.schemaFilename = schemaFilename; - this.className = className; - this.extractId = extractId; - this.idFieldName = idFieldName; - } - - ConfigSchema(final String schemaFilename, - final Class className) { - this.schemaFilename = schemaFilename; - this.className = className; - extractId = object -> { - throw new RuntimeException(className.getSimpleName() + " doesn't have an id"); - }; - idFieldName = null; - } - - @Override - public File getConfigSchemaFile() { - return KNOWN_SCHEMAS_ROOT.resolve(schemaFilename).toFile(); - } - - @Override - public Class getClassName() { - return (Class) className; - } - - @Override - public String getId(final T object) { - if (getClassName().isInstance(object)) { - return ((Function) extractId).apply(object); - } - throw new RuntimeException("Object: " + object + " is not instance of class " + getClassName().getName()); - } - - @Override - public String getIdFieldName() { - return idFieldName; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/Configs.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/Configs.java deleted file mode 100644 index 5fd453dbc29e9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/Configs.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -/** - * This interface defines the general variables for configuring Airbyte. - *

    - * Please update the configuring-airbyte.md document when modifying this file. - *

    - * Please also add one of the following tags to the env var accordingly: - *

    - * 1. 'Internal-use only' if a var is mainly for Airbyte-only configuration. e.g. tracking, test or - * Cloud related etc. - *

    - * 2. 'Alpha support' if a var does not have proper support and should be used with care. - */ - -@SuppressWarnings("PMD.BooleanGetMethodName") -public interface Configs { - - /** - * Defines the bucket for caching specs. This immensely speeds up spec operations. This is updated - * when new versions are published. - */ - String getSpecCacheBucket(); - - enum DeploymentMode { - OSS, - CLOUD - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/EnvConfigs.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/EnvConfigs.java deleted file mode 100644 index a8e5b6261cbb2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/EnvConfigs.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD.LongVariable", "PMD.CyclomaticComplexity", "PMD.AvoidReassigningParameters", "PMD.ConstructorCallsOverridableMethod"}) -public class EnvConfigs implements Configs { - - private static final Logger LOGGER = LoggerFactory.getLogger(EnvConfigs.class); - - // env variable names - public static final String SPEC_CACHE_BUCKET = "SPEC_CACHE_BUCKET"; - public static final String LOCAL_CONNECTOR_CATALOG_PATH = "LOCAL_CONNECTOR_CATALOG_PATH"; - - // defaults - private static final String DEFAULT_SPEC_CACHE_BUCKET = "io-airbyte-cloud-spec-cache"; - - private final Function getEnv; - - /** - * Constructs {@link EnvConfigs} from actual environment variables. - */ - public EnvConfigs() { - this(System.getenv()); - } - - /** - * Constructs {@link EnvConfigs} from a provided map. This can be used for testing or getting - * variables from a non-envvar source. - */ - public EnvConfigs(final Map envMap) { - getEnv = envMap::get; - } - - @Override - public String getSpecCacheBucket() { - return getEnvOrDefault(SPEC_CACHE_BUCKET, DEFAULT_SPEC_CACHE_BUCKET); - } - - public Optional getLocalCatalogPath() { - return Optional.ofNullable(getEnv(LOCAL_CONNECTOR_CATALOG_PATH)); - } - - // Worker - Data plane - - // Helpers - public String getEnvOrDefault(final String key, final String defaultValue) { - return getEnvOrDefault(key, defaultValue, Function.identity(), false); - } - - public T getEnvOrDefault(final String key, final T defaultValue, final Function parser, final boolean isSecret) { - final String value = getEnv.apply(key); - if (value != null && !value.isEmpty()) { - return parser.apply(value); - } else { - LOGGER.info("Using default value for environment variable {}: '{}'", key, isSecret ? "*****" : defaultValue); - return defaultValue; - } - } - - public String getEnv(final String name) { - return getEnv.apply(name); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/StateMessageHelper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/StateMessageHelper.java deleted file mode 100644 index 22274b2dadff0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/StateMessageHelper.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss.helpers; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.State; -import io.airbyte.configoss.StateType; -import io.airbyte.configoss.StateWrapper; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import java.util.List; -import java.util.Optional; -import javax.annotation.Nullable; - -public class StateMessageHelper { - - public static class AirbyteStateMessageListTypeReference extends TypeReference> {} - - /** - * This a takes a json blob state and tries return either a legacy state in the format of a json - * object or a state message with the new format which is a list of airbyte state message. - * - * @param state - a blob representing the state - * @return An optional state wrapper, if there is no state an empty optional will be returned - */ - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public static Optional getTypedState(final JsonNode state) { - if (state == null) { - return Optional.empty(); - } else { - final List stateMessages; - try { - stateMessages = Jsons.object(state, new AirbyteStateMessageListTypeReference()); - } catch (final IllegalArgumentException e) { - return Optional.of(getLegacyStateWrapper(state)); - } - if (stateMessages.isEmpty()) { - return Optional.empty(); - } - - if (stateMessages.size() == 1) { - if (stateMessages.get(0).getType() == null) { - return Optional.of(getLegacyStateWrapper(state)); - } else { - switch (stateMessages.get(0).getType()) { - case GLOBAL -> { - return Optional.of(provideGlobalState(stateMessages.get(0))); - } - case STREAM -> { - return Optional.of(provideStreamState(stateMessages)); - } - case LEGACY -> { - return Optional.of(getLegacyStateWrapper(stateMessages.get(0).getData())); - } - default -> { - // Should not be reachable. - throw new IllegalStateException("Unexpected state type"); - } - } - } - } else { - if (stateMessages.stream().allMatch(stateMessage -> stateMessage.getType() == AirbyteStateType.STREAM)) { - return Optional.of(provideStreamState(stateMessages)); - } - if (stateMessages.stream().allMatch(stateMessage -> stateMessage.getType() == null)) { - return Optional.of(getLegacyStateWrapper(state)); - } - - throw new IllegalStateException("Unexpected state blob, the state contains either multiple global or conflicting state type."); - - } - } - } - - /** - * Converts a StateWrapper to a State - * - * LegacyStates are directly serialized into the state. GlobalStates and StreamStates are serialized - * as a list of AirbyteStateMessage in the state attribute. - * - * @param stateWrapper the StateWrapper to convert - * @return the Converted State - */ - @SuppressWarnings("UnnecessaryDefault") - public static State getState(final StateWrapper stateWrapper) { - return switch (stateWrapper.getStateType()) { - case LEGACY -> new State().withState(stateWrapper.getLegacyState()); - case STREAM -> new State().withState(Jsons.jsonNode(stateWrapper.getStateMessages())); - case GLOBAL -> new State().withState(Jsons.jsonNode(List.of(stateWrapper.getGlobal()))); - default -> throw new RuntimeException("Unexpected StateType " + stateWrapper.getStateType()); - }; - } - - public static Boolean isMigration(final StateType currentStateType, final Optional previousState) { - return previousState.isPresent() && isMigration(currentStateType, previousState.get().getStateType()); - } - - public static Boolean isMigration(final StateType currentStateType, final @Nullable StateType previousStateType) { - return previousStateType == StateType.LEGACY && currentStateType != StateType.LEGACY; - } - - private static StateWrapper provideGlobalState(final AirbyteStateMessage stateMessages) { - return new StateWrapper() - .withStateType(StateType.GLOBAL) - .withGlobal(stateMessages); - } - - /** - * This is returning a wrapped state, it assumes that the state messages are ordered. - * - * @param stateMessages - an ordered list of state message - * @param useStreamCapableState - a flag that indicates whether to return the new format - * @return a wrapped state - */ - private static StateWrapper provideStreamState(final List stateMessages) { - return new StateWrapper() - .withStateType(StateType.STREAM) - .withStateMessages(stateMessages); - - } - - private static StateWrapper getLegacyStateWrapper(final JsonNode state) { - return new StateWrapper() - .withStateType(StateType.LEGACY) - .withLegacyState(state); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.java deleted file mode 100644 index 4e1b4f7bf1f24..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.util.ClassUtil; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.yaml.Yamls; -import io.airbyte.configoss.StandardDestinationDefinition; -import io.airbyte.configoss.StandardSourceDefinition; -import java.util.AbstractMap.SimpleImmutableEntry; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * This is a convenience class for the conversion of a list of source/destination definitions from - * human-friendly yaml to processing friendly formats i.e. Java models or JSON. As this class - * performs validation, it is recommended to use this class to deal with plain lists. An example of - * such lists are Airbyte's master definition lists, which can be seen in the resources folder of - * the airbyte-config-oss/seed module. - * - * In addition to usual deserialization validations, we check: 1) The given list contains no - * duplicate names. 2) The given list contains no duplicate ids. - * - * Methods in these class throw Runtime exceptions upon validation failure. - */ -@SuppressWarnings("PMD.ShortVariable") -public class YamlListToStandardDefinitions { - - private static final Map CLASS_NAME_TO_ID_NAME = Map.ofEntries( - new SimpleImmutableEntry<>(StandardDestinationDefinition.class.getCanonicalName(), "destinationDefinitionId"), - new SimpleImmutableEntry<>(StandardSourceDefinition.class.getCanonicalName(), "sourceDefinitionId")); - - public static List toStandardSourceDefinitions(final String yamlStr) { - return verifyAndConvertToModelList(StandardSourceDefinition.class, yamlStr); - } - - public static List toStandardDestinationDefinitions(final String yamlStr) { - return verifyAndConvertToModelList(StandardDestinationDefinition.class, yamlStr); - } - - public static JsonNode verifyAndConvertToJsonNode(final String idName, final String yamlStr) { - final var jsonNode = Yamls.deserialize(yamlStr); - checkYamlIsPresentWithNoDuplicates(jsonNode, idName); - return jsonNode; - } - - @VisibleForTesting - static List verifyAndConvertToModelList(final Class klass, final String yamlStr) { - final var jsonNode = Yamls.deserialize(yamlStr); - final var idName = CLASS_NAME_TO_ID_NAME.get(klass.getCanonicalName()); - checkYamlIsPresentWithNoDuplicates(jsonNode, idName); - return toStandardXDefinitions(jsonNode.elements(), klass); - } - - private static void checkYamlIsPresentWithNoDuplicates(final JsonNode deserialize, final String idName) { - final var presentDestList = !deserialize.elements().equals(ClassUtil.emptyIterator()); - Preconditions.checkState(presentDestList, "Definition list is empty"); - checkNoDuplicateNames(deserialize.elements()); - checkNoDuplicateIds(deserialize.elements(), idName); - } - - private static void checkNoDuplicateNames(final Iterator iter) { - final var names = new HashSet(); - while (iter.hasNext()) { - final var element = Jsons.clone(iter.next()); - final var name = element.get("name").asText(); - if (names.contains(name)) { - throw new IllegalArgumentException("Multiple records have the name: " + name); - } - names.add(name); - } - } - - private static void checkNoDuplicateIds(final Iterator fileIterator, final String idName) { - final var ids = new HashSet(); - while (fileIterator.hasNext()) { - final var element = Jsons.clone(fileIterator.next()); - final var id = element.get(idName).asText(); - if (ids.contains(id)) { - throw new IllegalArgumentException("Multiple records have the id: " + id); - } - ids.add(id); - } - } - - private static List toStandardXDefinitions(final Iterator iter, final Class c) { - final Iterable iterable = () -> iter; - final var defList = new ArrayList(); - for (final JsonNode n : iterable) { - final var def = Jsons.object(n, c); - defList.add(def); - } - return defList; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/AbstractSchemaValidator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/AbstractSchemaValidator.java deleted file mode 100644 index 9674175614e9d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/AbstractSchemaValidator.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.validation.json; - -import com.fasterxml.jackson.databind.JsonNode; -import java.nio.file.Path; -import java.util.Set; - -public abstract class AbstractSchemaValidator> implements ConfigSchemaValidator { - - private final JsonSchemaValidator jsonSchemaValidator; - - public AbstractSchemaValidator() { - this(new JsonSchemaValidator()); - } - - public AbstractSchemaValidator(final JsonSchemaValidator jsonSchemaValidator) { - this.jsonSchemaValidator = jsonSchemaValidator; - } - - public abstract Path getSchemaPath(T configType); - - private JsonNode getSchemaJson(final T configType) { - return JsonSchemaValidator.getSchema(getSchemaPath(configType).toFile()); - } - - @Override - public final Set validate(final T configType, final JsonNode objectJson) { - return jsonSchemaValidator.validate(getSchemaJson(configType), objectJson); - } - - @Override - public final boolean test(final T configType, final JsonNode objectJson) { - return jsonSchemaValidator.test(getSchemaJson(configType), objectJson); - } - - @Override - public final void ensure(final T configType, final JsonNode objectJson) throws JsonValidationException { - jsonSchemaValidator.ensure(getSchemaJson(configType), objectJson); - } - - @Override - public final void ensureAsRuntime(final T configType, final JsonNode objectJson) { - jsonSchemaValidator.ensureAsRuntime(getSchemaJson(configType), objectJson); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/ConfigSchemaValidator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/ConfigSchemaValidator.java deleted file mode 100644 index a163f71c82d84..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/ConfigSchemaValidator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.validation.json; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Set; - -public interface ConfigSchemaValidator> { - - Set validate(T configType, JsonNode objectJson); - - boolean test(T configType, JsonNode objectJson); - - void ensure(T configType, JsonNode objectJson) throws JsonValidationException; - - void ensureAsRuntime(T configType, JsonNode objectJson); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java deleted file mode 100644 index 7f62079c61212..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.validation.json; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.networknt.schema.JsonMetaSchema; -import com.networknt.schema.JsonSchema; -import com.networknt.schema.JsonSchemaFactory; -import com.networknt.schema.SpecVersion; -import com.networknt.schema.ValidationContext; -import com.networknt.schema.ValidationMessage; -import io.airbyte.commons.string.Strings; -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import me.andrz.jackson.JsonContext; -import me.andrz.jackson.JsonReferenceException; -import me.andrz.jackson.JsonReferenceProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JsonSchemaValidator { - - private static final Logger LOGGER = LoggerFactory.getLogger(JsonSchemaValidator.class); - // This URI just needs to point at any path in the same directory as /app/WellKnownTypes.json - // It's required for the JsonSchema#validate method to resolve $ref correctly. - private static final URI DEFAULT_BASE_URI; - - static { - try { - DEFAULT_BASE_URI = new URI("file:///app/nonexistent_file.json"); - } catch (final URISyntaxException e) { - throw new RuntimeException(e); - } - } - - private final JsonSchemaFactory jsonSchemaFactory; - private final URI baseUri; - private final Map schemaToValidators = new HashMap<>(); - - public JsonSchemaValidator() { - this(DEFAULT_BASE_URI); - } - - /** - * The public constructor hardcodes a URL with access to WellKnownTypes.json. This method allows - * tests to override that URI - * - * Required to resolve $ref schemas using WellKnownTypes.json - * - * @param baseUri The base URI for schema resolution - */ - @VisibleForTesting - public JsonSchemaValidator(final URI baseUri) { - this.jsonSchemaFactory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7); - this.baseUri = baseUri; - } - - /** - * Create and cache a schema validator for a particular schema. This validator is used when - * {@link #testInitializedSchema(String, JsonNode)} and - * {@link #ensureInitializedSchema(String, JsonNode)} is called. - */ - public void initializeSchemaValidator(final String schemaName, final JsonNode schemaJson) { - schemaToValidators.put(schemaName, getSchemaValidator(schemaJson)); - } - - /** - * Returns true if the object adheres to the given schema and false otherwise. - */ - public boolean testInitializedSchema(final String schemaName, final JsonNode objectJson) { - final var schema = schemaToValidators.get(schemaName); - Preconditions.checkNotNull(schema, schemaName + " needs to be initialised before calling this method"); - - final var validate = schema.validate(objectJson); - return validate.isEmpty(); - } - - /** - * Throws an exception if the object does not adhere to the given schema. - */ - public void ensureInitializedSchema(final String schemaName, final JsonNode objectNode) throws JsonValidationException { - final var schema = schemaToValidators.get(schemaName); - Preconditions.checkNotNull(schema, schemaName + " needs to be initialised before calling this method"); - - final Set validationMessages = schema.validate(objectNode); - if (validationMessages.isEmpty()) { - return; - } - - throw new JsonValidationException( - String.format( - "json schema validation failed when comparing the data to the json schema. \nErrors: %s \nSchema: \n%s", Strings.join(validationMessages, - ", "), - schemaName)); - } - - /** - * WARNING - *

    - * The following methods perform JSON validation **by re-creating a validator each time**. This is - * both CPU and GC expensive, and should be used carefully. - */ - - // todo(davin): Rewrite this section to cache schemas. - public boolean test(final JsonNode schemaJson, final JsonNode objectJson) { - final Set validationMessages = validateInternal(schemaJson, objectJson); - - if (!validationMessages.isEmpty()) { - LOGGER.info("JSON schema validation failed. \nerrors: {}", Strings.join(validationMessages, ", ")); - } - - return validationMessages.isEmpty(); - } - - public Set validate(final JsonNode schemaJson, final JsonNode objectJson) { - return validateInternal(schemaJson, objectJson) - .stream() - .map(ValidationMessage::getMessage) - .collect(Collectors.toSet()); - } - - public List getValidationMessageArgs(final JsonNode schemaJson, final JsonNode objectJson) { - return validateInternal(schemaJson, objectJson) - .stream() - .map(ValidationMessage::getArguments) - .collect(Collectors.toList()); - } - - public List getValidationMessagePaths(final JsonNode schemaJson, final JsonNode objectJson) { - return validateInternal(schemaJson, objectJson) - .stream() - .map(ValidationMessage::getPath) - .collect(Collectors.toList()); - } - - public void ensure(final JsonNode schemaJson, final JsonNode objectJson) throws JsonValidationException { - final Set validationMessages = validateInternal(schemaJson, objectJson); - if (validationMessages.isEmpty()) { - return; - } - - throw new JsonValidationException(String.format( - "json schema validation failed when comparing the data to the json schema. \nErrors: %s \nSchema: \n%s", - Strings.join(validationMessages, ", "), - schemaJson.toPrettyString())); - } - - public void ensureAsRuntime(final JsonNode schemaJson, final JsonNode objectJson) { - try { - ensure(schemaJson, objectJson); - } catch (final JsonValidationException e) { - throw new RuntimeException(e); - } - } - - // keep this internal as it returns a type specific to the wrapped library. - private Set validateInternal(final JsonNode schemaJson, final JsonNode objectJson) { - Preconditions.checkNotNull(schemaJson); - Preconditions.checkNotNull(objectJson); - - final JsonSchema schema = getSchemaValidator(schemaJson); - return schema.validate(objectJson); - } - - /** - * Return a schema validator for a json schema, defaulting to the V7 Json schema. - */ - private JsonSchema getSchemaValidator(JsonNode schemaJson) { - // Default to draft-07, but have handling for the other metaschemas that networknt supports - final JsonMetaSchema metaschema; - final JsonNode metaschemaNode = schemaJson.get("$schema"); - if (metaschemaNode == null || metaschemaNode.asText() == null || metaschemaNode.asText().isEmpty()) { - metaschema = JsonMetaSchema.getV7(); - } else { - final String metaschemaString = metaschemaNode.asText(); - // We're not using "http://....".equals(), because we want to avoid weirdness with https, etc. - if (metaschemaString.contains("json-schema.org/draft-04")) { - metaschema = JsonMetaSchema.getV4(); - } else if (metaschemaString.contains("json-schema.org/draft-06")) { - metaschema = JsonMetaSchema.getV6(); - } else if (metaschemaString.contains("json-schema.org/draft/2019-09")) { - metaschema = JsonMetaSchema.getV201909(); - } else if (metaschemaString.contains("json-schema.org/draft/2020-12")) { - metaschema = JsonMetaSchema.getV202012(); - } else { - metaschema = JsonMetaSchema.getV7(); - } - } - - final ValidationContext context = new ValidationContext( - jsonSchemaFactory.getUriFactory(), - null, - metaschema, - jsonSchemaFactory, - null); - final JsonSchema schema = new JsonSchema( - context, - baseUri, - schemaJson); - return schema; - } - - /** - * Get JsonNode for an object defined as the main object in a JsonSchema file. Able to create the - * JsonNode even if the the JsonSchema refers to objects in other files. - * - * @param schemaFile - the schema file - * @return schema object processed from across all dependency files. - */ - public static JsonNode getSchema(final File schemaFile) { - try { - return getProcessor().process(schemaFile); - } catch (final IOException | JsonReferenceException e) { - throw new RuntimeException(e); - } - } - - /** - * Get JsonNode for an object defined in the "definitions" section of a JsonSchema file. Able to - * create the JsonNode even if the the JsonSchema refers to objects in other files. - * - * @param schemaFile - the schema file - * @param definitionStructName - get the schema from a struct defined in the "definitions" section - * of a JsonSchema file (instead of the main object in that file). - * @return schema object processed from across all dependency files. - */ - public static JsonNode getSchema(final File schemaFile, final String definitionStructName) { - try { - final JsonContext jsonContext = new JsonContext(schemaFile); - return getProcessor().process(jsonContext, jsonContext.getDocument().get("definitions").get(definitionStructName)); - } catch (final IOException | JsonReferenceException e) { - throw new RuntimeException(e); - } - } - - private static JsonReferenceProcessor getProcessor() { - // JsonReferenceProcessor follows $ref in json objects. Jackson does not natively support - // this. - final JsonReferenceProcessor jsonReferenceProcessor = new JsonReferenceProcessor(); - jsonReferenceProcessor.setMaxDepth(-1); // no max. - - return jsonReferenceProcessor; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonValidationException.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonValidationException.java deleted file mode 100644 index 187777daafaae..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/validation/json/JsonValidationException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.validation.json; - -public class JsonValidationException extends Exception { - - public JsonValidationException(final String message) { - super(message); - } - - public JsonValidationException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/cli/Clis.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/cli/Clis.kt new file mode 100644 index 0000000000000..432ac0995a913 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/cli/Clis.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.cli + +import org.apache.commons.cli.* + +object Clis { + /** + * Parse an options object + * + * @param args + * - command line args + * @param options + * - expected options + * @return object with parsed values. + */ + @JvmOverloads + fun parse( + args: Array, + options: Options, + parser: CommandLineParser = DefaultParser(), + commandLineSyntax: String? = null + ): CommandLine { + val helpFormatter = HelpFormatter() + + try { + return parser.parse(options, args) + } catch (e: ParseException) { + if (!commandLineSyntax.isNullOrEmpty()) { + helpFormatter.printHelp(commandLineSyntax, options) + } + throw IllegalArgumentException(e) + } + } + + fun parse(args: Array, options: Options, commandLineSyntax: String?): CommandLine { + return parse(args, options, DefaultParser(), commandLineSyntax) + } + + fun getRelaxedParser(): CommandLineParser = RelaxedParser() + + // https://stackoverflow.com/questions/33874902/apache-commons-cli-1-3-1-how-to-ignore-unknown-arguments + private class RelaxedParser : DefaultParser() { + @Throws(ParseException::class) + override fun parse(options: Options, arguments: Array): CommandLine { + val knownArgs: MutableList = ArrayList() + for (i in arguments.indices) { + if (options.hasOption(arguments[i])) { + knownArgs.add(arguments[i]) + if (i + 1 < arguments.size && options.getOption(arguments[i]).hasArg()) { + knownArgs.add(arguments[i + 1]) + } + } + } + return super.parse(options, knownArgs.toTypedArray()) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/CompletableFutures.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/CompletableFutures.kt new file mode 100644 index 0000000000000..d7388390fdd8c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/CompletableFutures.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.concurrency + +import io.airbyte.commons.functional.Either +import java.util.* +import java.util.concurrent.CompletableFuture +import java.util.concurrent.CompletionStage +import java.util.concurrent.atomic.AtomicInteger + +object CompletableFutures { + /** + * Non-blocking implementation which does not use join. and returns an aggregated future. The + * order of results is preserved from the original list of futures. + * + * @param futures list of futures + * @param type of result + * @return a future that completes when all the input futures have completed + */ + fun allOf( + futures: List> + ): CompletionStage>> { + val result = CompletableFuture>>() + val size = futures.size + val counter = AtomicInteger() + val results = + java.lang.reflect.Array.newInstance(Either::class.java, size) + as Array> + // attach a whenComplete to all futures + for (i in 0 until size) { + val currentIndex = i + futures[i].whenComplete { value: Result, exception: Throwable? -> + // if exception is null, then the future completed successfully + // maybe synchronization is unnecessary here, but it's better to be safe + synchronized(results) { + if (exception == null) { + results[currentIndex] = Either.right(value) + } else { + if (exception is Exception) { + results[currentIndex] = Either.left(exception) + } else { + // this should never happen + throw RuntimeException( + "Unexpected exception in a future completion.", + exception + ) + } + } + } + val completedCount = counter.incrementAndGet() + if (completedCount == size) { + result.complete(Arrays.asList(*results)) + } + } + } + return result + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/VoidCallable.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/VoidCallable.kt new file mode 100644 index 0000000000000..4a109be96f108 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/VoidCallable.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.concurrency + +import java.util.concurrent.Callable + +@FunctionalInterface +fun interface VoidCallable : Callable { + @Throws(Exception::class) + override fun call(): Void? { + voidCall() + return null + } + + @Throws(Exception::class) fun voidCall() + + companion object { + @JvmField val NOOP: VoidCallable = VoidCallable {} + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/WaitingUtils.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/WaitingUtils.kt new file mode 100644 index 0000000000000..740fa3967d3ea --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/concurrency/WaitingUtils.kt @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.concurrency + +import java.time.Duration +import java.util.function.Supplier + +object WaitingUtils { + /** + * Wait for a condition or timeout. + * + * @param interval + * - frequency with which condition and timeout should be checked. + * @param timeout + * - how long to wait in total + * @param condition + * - supplier that returns whether the condition has been met. + * @return true if condition was met before the timeout was reached, otherwise false. + */ + fun waitForCondition( + interval: Duration, + timeout: Duration, + condition: Supplier + ): Boolean { + var timeWaited = Duration.ZERO + while (true) { + if (condition.get()) { + return true + } + + if (timeout.minus(timeWaited).isNegative) { + return false + } + + try { + Thread.sleep(interval.toMillis()) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + + timeWaited = timeWaited.plus(interval) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/constants/AirbyteSecretConstants.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/constants/AirbyteSecretConstants.kt new file mode 100644 index 0000000000000..2a9aa4c88a01d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/constants/AirbyteSecretConstants.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.constants + +/** Collection of constants related to Airbyte secrets defined in connector configurations. */ +object AirbyteSecretConstants { + /** The name of a configuration property field that has been identified as a secret. */ + const val AIRBYTE_SECRET_FIELD: String = "airbyte_secret" + + /** Mask value that is displayed in place of a value associated with an airbyte secret. */ + const val SECRETS_MASK: String = "**********" +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/enums/Enums.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/enums/Enums.kt new file mode 100644 index 0000000000000..8a2295d580fa1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/enums/Enums.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.enums + +import com.google.common.base.Preconditions +import com.google.common.collect.Maps +import com.google.common.collect.Sets +import java.util.Arrays +import java.util.Locale +import java.util.Optional +import java.util.concurrent.ConcurrentMap +import java.util.stream.Collectors + +class Enums { + companion object { + inline fun , reified T2 : Enum> convertTo(ie: T1?, oe: Class): T2? { + if (ie == null) { + return null + } + + return enumValueOf(ie.name) + } + + private fun normalizeName(name: String): String { + return name.lowercase(Locale.getDefault()).replace("[^a-zA-Z0-9]".toRegex(), "") + } + + @Suppress("UNCHECKED_CAST") + fun > toEnum(value: String, enumClass: Class): Optional { + Preconditions.checkArgument(enumClass.isEnum) + + if (!NORMALIZED_ENUMS.containsKey(enumClass)) { + val values = enumClass.enumConstants + val mappings: MutableMap = Maps.newHashMapWithExpectedSize(values.size) + for (t in values) { + mappings[normalizeName(t!!.name)] = t + } + NORMALIZED_ENUMS[enumClass] = mappings + } + + return Optional.ofNullable( + NORMALIZED_ENUMS.getValue(enumClass)[normalizeName(value)] as T?, + ) + } + + private val NORMALIZED_ENUMS: ConcurrentMap, Map> = + Maps.newConcurrentMap() + + fun ?, T2 : Enum?> isCompatible(c1: Class, c2: Class): Boolean { + Preconditions.checkArgument(c1.isEnum) + Preconditions.checkArgument(c2.isEnum) + return (c1.enumConstants.size == c2.enumConstants.size && + Sets.difference( + Arrays.stream(c1.enumConstants) + .map { obj: T1 -> obj!!.name } + .collect(Collectors.toSet()), + Arrays.stream(c2.enumConstants) + .map { obj: T2 -> obj!!.name } + .collect(Collectors.toSet()), + ) + .isEmpty()) + } + + inline fun , reified T2 : Enum> convertListTo( + ies: List, + oe: Class + ): List { + return ies.map { convertTo(it, oe) }.toList() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConfigErrorException.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConfigErrorException.kt new file mode 100644 index 0000000000000..c25e8be1e1361 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConfigErrorException.kt @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.exceptions + +/** + * An exception that indicates that there is something wrong with the user's connector setup. This + * exception is caught and emits an AirbyteTraceMessage. + */ +class ConfigErrorException : RuntimeException { + val displayMessage: String + + constructor(displayMessage: String) : super(displayMessage) { + this.displayMessage = displayMessage + } + + constructor(displayMessage: String, exception: Throwable?) : super(displayMessage, exception) { + this.displayMessage = displayMessage + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConnectionErrorException.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConnectionErrorException.kt new file mode 100644 index 0000000000000..52f822e29cccf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/ConnectionErrorException.kt @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.exceptions + +class ConnectionErrorException : RuntimeException { + var stateCode: String? = null + private set + var errorCode: Int = 0 + private set + var exceptionMessage: String? = null + private set + + constructor(exceptionMessage: String?) : super(exceptionMessage) + + constructor(stateCode: String?, exception: Throwable) : super(exception) { + this.stateCode = stateCode + this.exceptionMessage = exception.message + } + + constructor( + stateCode: String?, + exceptionMessage: String?, + exception: Throwable? + ) : super(exception) { + this.stateCode = stateCode + this.exceptionMessage = exceptionMessage + } + + constructor( + stateCode: String?, + errorCode: Int, + exceptionMessage: String?, + exception: Throwable? + ) : super(exception) { + this.stateCode = stateCode + this.errorCode = errorCode + this.exceptionMessage = exceptionMessage + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/SQLRuntimeException.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/SQLRuntimeException.kt new file mode 100644 index 0000000000000..19519cae2af87 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/exceptions/SQLRuntimeException.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.exceptions + +import java.sql.SQLException + +/** + * Wrapper unchecked exception for [SQLException]. This can be used in functional interfaces that do + * not allow checked exceptions without the generic RuntimeException. + */ +class SQLRuntimeException(cause: SQLException?) : RuntimeException(cause) diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/EnvVariableFeatureFlags.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/EnvVariableFeatureFlags.kt new file mode 100644 index 0000000000000..9dc84fbc51a4b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/EnvVariableFeatureFlags.kt @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.features + +import java.util.function.Function +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class EnvVariableFeatureFlags : FeatureFlags { + override fun autoDetectSchema(): Boolean { + return getEnvOrDefault(AUTO_DETECT_SCHEMA, true) { s: String -> s.toBoolean() } + } + + override fun logConnectorMessages(): Boolean { + return getEnvOrDefault(LOG_CONNECTOR_MESSAGES, false) { s: String -> s.toBoolean() } + } + + override fun concurrentSourceStreamRead(): Boolean { + return getEnvOrDefault(CONCURRENT_SOURCE_STREAM_READ, false) { s: String -> s.toBoolean() } + } + + override fun applyFieldSelection(): Boolean { + return getEnvOrDefault(APPLY_FIELD_SELECTION, false) { s: String -> s.toBoolean() } + } + + override fun fieldSelectionWorkspaces(): String? { + return getEnvOrDefault(FIELD_SELECTION_WORKSPACES, "") { arg: String? -> arg } + } + + override fun strictComparisonNormalizationWorkspaces(): String? { + return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_WORKSPACES, "") { arg: String? -> + arg + } + } + + override fun strictComparisonNormalizationTag(): String? { + return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_TAG, "strict_comparison2") { + arg: String? -> + arg + } + } + + override fun deploymentMode(): String? { + return getEnvOrDefault(DEPLOYMENT_MODE, "") { arg: String? -> arg } + } + + // TODO: refactor in order to use the same method than the ones in EnvConfigs.java + fun getEnvOrDefault(key: String?, defaultValue: T, parser: Function): T { + val value = System.getenv(key) + if (value != null && !value.isEmpty()) { + return parser.apply(value) + } else { + log.debug("Using default value for environment variable {}: '{}'", key, defaultValue) + return defaultValue + } + } + + companion object { + private val log: Logger = LoggerFactory.getLogger(EnvVariableFeatureFlags::class.java) + + const val AUTO_DETECT_SCHEMA: String = "AUTO_DETECT_SCHEMA" + + // Set this value to true to see all messages from the source to destination, set to one + // second + // emission + const val LOG_CONNECTOR_MESSAGES: String = "LOG_CONNECTOR_MESSAGES" + const val APPLY_FIELD_SELECTION: String = "APPLY_FIELD_SELECTION" + const val FIELD_SELECTION_WORKSPACES: String = "FIELD_SELECTION_WORKSPACES" + const val CONCURRENT_SOURCE_STREAM_READ: String = "CONCURRENT_SOURCE_STREAM_READ" + const val STRICT_COMPARISON_NORMALIZATION_WORKSPACES: String = + "STRICT_COMPARISON_NORMALIZATION_WORKSPACES" + const val STRICT_COMPARISON_NORMALIZATION_TAG: String = + "STRICT_COMPARISON_NORMALIZATION_TAG" + const val DEPLOYMENT_MODE: String = "DEPLOYMENT_MODE" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagHelper.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagHelper.kt new file mode 100644 index 0000000000000..8f799d20dc747 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagHelper.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.features + +import com.google.common.annotations.VisibleForTesting +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.* +import java.util.function.Function + +private val log = KotlinLogging.logger {} + +object FeatureFlagHelper { + fun isFieldSelectionEnabledForWorkspace( + featureFlags: FeatureFlags, + workspaceId: UUID? + ): Boolean { + return (isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + workspaceId, + "field selection" + ) || featureFlags.applyFieldSelection()) + } + + @VisibleForTesting + fun isWorkspaceIncludedInFlag( + featureFlags: FeatureFlags, + flagRetriever: Function, + workspaceId: UUID?, + context: String? + ): Boolean { + val workspaceIdsString = flagRetriever.apply(featureFlags) + val workspaceIds: MutableSet = HashSet() + if (workspaceIdsString != null && !workspaceIdsString.isEmpty()) { + for (id in + workspaceIdsString + .split(",".toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray()) { + try { + workspaceIds.add(UUID.fromString(id)) + } catch (e: IllegalArgumentException) { + log.warn("Malformed workspace id for {}: {}", context, id) + } + } + } + return workspaceId != null && workspaceIds.contains(workspaceId) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlags.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlags.kt new file mode 100644 index 0000000000000..cca688469c082 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlags.kt @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.features + +/** + * Interface that describe which features are activated in airbyte. Currently, the only + * implementation relies on env. Ideally it should be on some DB. + */ +interface FeatureFlags { + fun autoDetectSchema(): Boolean + + fun logConnectorMessages(): Boolean + + fun concurrentSourceStreamRead(): Boolean + + /** + * Return true if field selection should be applied. See also fieldSelectionWorkspaces. + * + * @return whether field selection should be applied + */ + fun applyFieldSelection(): Boolean + + /** + * Get the workspaces allow-listed for field selection. This should take precedence over + * applyFieldSelection. + * + * @return a comma-separated list of workspace ids where field selection should be enabled. + */ + fun fieldSelectionWorkspaces(): String? + + /** + * Get the workspaces allow-listed for strict incremental comparison in normalization. This + * takes precedence over the normalization version in destination_definitions.yaml. + * + * @return a comma-separated list of workspace ids where strict incremental comparison should be + * enabled in normalization. + */ + fun strictComparisonNormalizationWorkspaces(): String? + + /** + * Get the Docker image tag representing the normalization version with strict-comparison. + * + * @return The Docker image tag representing the normalization version with strict-comparison + */ + fun strictComparisonNormalizationTag(): String? + + /** + * Get the deployment mode used to deploy a connector. + * + * @return empty string for the default deployment mode, "CLOUD" for cloud deployment mode. + */ + fun deploymentMode(): String? +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagsWrapper.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagsWrapper.kt new file mode 100644 index 0000000000000..e32b0e5533f84 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/features/FeatureFlagsWrapper.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.features + +open class FeatureFlagsWrapper(private val wrapped: FeatureFlags) : FeatureFlags { + override fun autoDetectSchema(): Boolean { + return wrapped.autoDetectSchema() + } + + override fun logConnectorMessages(): Boolean { + return wrapped.logConnectorMessages() + } + + override fun concurrentSourceStreamRead(): Boolean { + return wrapped.concurrentSourceStreamRead() + } + + override fun applyFieldSelection(): Boolean { + return wrapped.applyFieldSelection() + } + + override fun fieldSelectionWorkspaces(): String? { + return wrapped.fieldSelectionWorkspaces() + } + + override fun strictComparisonNormalizationWorkspaces(): String? { + return wrapped.strictComparisonNormalizationWorkspaces() + } + + override fun strictComparisonNormalizationTag(): String? { + return wrapped.strictComparisonNormalizationTag() + } + + override fun deploymentMode(): String? { + return wrapped.deploymentMode() + } + + companion object { + /** Overrides the [FeatureFlags.deploymentMode] method in the feature flags. */ + @JvmStatic + fun overridingDeploymentMode(wrapped: FeatureFlags, deploymentMode: String?): FeatureFlags { + return object : FeatureFlagsWrapper(wrapped) { + override fun deploymentMode(): String? { + return deploymentMode + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiConsumer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiConsumer.kt new file mode 100644 index 0000000000000..1feb924269629 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiConsumer.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import org.apache.commons.lang3.function.FailableBiConsumer + +fun interface CheckedBiConsumer : FailableBiConsumer diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiFunction.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiFunction.kt new file mode 100644 index 0000000000000..a7dc0678ed0b1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedBiFunction.kt @@ -0,0 +1,9 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import org.apache.commons.lang3.function.FailableBiFunction + +fun interface CheckedBiFunction : + FailableBiFunction diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedConsumer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedConsumer.kt new file mode 100644 index 0000000000000..9a604abed590b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedConsumer.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import org.apache.commons.lang3.function.FailableConsumer + +fun interface CheckedConsumer : FailableConsumer diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedFunction.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedFunction.kt new file mode 100644 index 0000000000000..abaf28d85b148 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedFunction.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import org.apache.commons.lang3.function.FailableFunction + +fun interface CheckedFunction : FailableFunction diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedSupplier.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedSupplier.kt new file mode 100644 index 0000000000000..825bb0ade2833 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/CheckedSupplier.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import org.apache.commons.lang3.function.FailableSupplier + +fun interface CheckedSupplier : FailableSupplier diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/Either.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/Either.kt new file mode 100644 index 0000000000000..50282726d33f0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/functional/Either.kt @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.functional + +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.* + +private val LOGGER = KotlinLogging.logger {} +/** + * A class that represents a value of one of two possible types (a disjoint union). An instance of + * Either is an instance of Left or Right. + * + * A common use of Either is for error handling in functional programming. By convention, Left is + * failure and Right is success. + * + * @param the type of the left value + * @param the type of the right value + */ +class Either private constructor(left: Error?, right: Result?) { + val left: Error? = left + val right: Result? = right + + fun isLeft(): Boolean { + return left != null + } + + fun isRight(): Boolean { + return right != null + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val either = o as Either<*, *> + return left == either.left && right == either.right + } + + override fun hashCode(): Int { + return Objects.hash(left, right) + } + + companion object { + fun left(error: Error): Either { + if (error == null) { + LOGGER.warn("Either.left called with a null!") + } + return Either(error!!, null) + } + + fun right(result: Result): Either { + if (result == null) { + // I ran into this when declaring a functional class of . + // In java, we must return null because Void has no instanciation + // In kotlin, though, we should change the return type to and return Unit + LOGGER.warn { "Either.right called with a null!" } + } + return Either(null, result) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/IOs.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/IOs.kt new file mode 100644 index 0000000000000..33bcfce0ede8c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/IOs.kt @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.io + +import com.google.common.base.Charsets +import java.io.* +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import java.util.* +import org.apache.commons.io.input.ReversedLinesFileReader + +object IOs { + @JvmStatic + fun writeFile(path: Path, fileName: String?, contents: String?): Path { + val filePath = path.resolve(fileName) + return writeFile(filePath, contents) + } + + @JvmStatic + fun writeFile(filePath: Path, contents: ByteArray): Path { + try { + Files.write(filePath, contents) + return filePath + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @JvmStatic + fun writeFile(filePath: Path, contents: String?): Path { + try { + Files.writeString(filePath, contents, StandardCharsets.UTF_8) + return filePath + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + /** + * Writes a file to a random directory in the /tmp folder. Useful as a staging group for test + * resources. + */ + @JvmStatic + fun writeFileToRandomTmpDir(filename: String?, contents: String?): String { + val source = Paths.get("/tmp", UUID.randomUUID().toString()) + try { + val tmpFile = source.resolve(filename) + Files.deleteIfExists(tmpFile) + Files.createDirectory(source) + writeFile(tmpFile, contents) + return tmpFile.toString() + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @JvmStatic + fun readFile(path: Path, fileName: String?): String { + return readFile(path.resolve(fileName)) + } + + @JvmStatic + fun readFile(fullpath: Path?): String { + try { + return Files.readString(fullpath, StandardCharsets.UTF_8) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @Throws(IOException::class) + @JvmStatic + fun getTail(numLines: Int, path: Path?): List { + if (path == null) { + return emptyList() + } + + val file = path.toFile() + if (!file.exists()) { + return emptyList() + } + + ReversedLinesFileReader(file, Charsets.UTF_8).use { fileReader -> + val lines: MutableList = ArrayList() + var line = fileReader.readLine() + while (line != null && lines.size < numLines) { + lines.add(line) + line = fileReader.readLine() + } + + Collections.reverse(lines) + return lines + } + } + + @JvmStatic + fun inputStream(path: Path): InputStream { + try { + return Files.newInputStream(path) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @JvmStatic + fun silentClose(closeable: Closeable) { + try { + closeable.close() + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @JvmStatic + fun newBufferedReader(inputStream: InputStream): BufferedReader { + return BufferedReader(InputStreamReader(inputStream, StandardCharsets.UTF_8)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/LineGobbler.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/LineGobbler.kt new file mode 100644 index 0000000000000..869e51cb186ae --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/io/LineGobbler.kt @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.io + +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.logging.MdcScope +import java.io.BufferedReader +import java.io.ByteArrayInputStream +import java.io.IOException +import java.io.InputStream +import java.nio.charset.StandardCharsets +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import org.slf4j.MDC + +class LineGobbler +@JvmOverloads +internal constructor( + `is`: InputStream, + private val consumer: Consumer, + private val executor: ExecutorService, + private val mdc: Map?, + private val caller: String = GENERIC, + private val containerLogMdcBuilder: MdcScope.Builder = MdcScope.Companion.DEFAULT_BUILDER +) : VoidCallable { + private val `is`: BufferedReader? = IOs.newBufferedReader(`is`) + + internal constructor( + `is`: InputStream, + consumer: Consumer, + executor: ExecutorService, + mdc: Map?, + mdcScopeBuilder: MdcScope.Builder + ) : this(`is`, consumer, executor, mdc, GENERIC, mdcScopeBuilder) + + override fun voidCall() { + MDC.setContextMap(mdc) + try { + var line = `is`!!.readLine() + while (line != null) { + containerLogMdcBuilder.build().use { mdcScope -> consumer.accept(line) } + line = `is`.readLine() + } + } catch (i: IOException) { + LOGGER.warn( + "{} gobbler IOException: {}. Typically happens when cancelling a job.", + caller, + i.message + ) + } catch (e: Exception) { + LOGGER.error("{} gobbler error when reading stream", caller, e) + } finally { + executor.shutdown() + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(LineGobbler::class.java) + private const val GENERIC = "generic" + + @JvmOverloads + fun gobble( + message: String, + consumer: Consumer = Consumer { msg: String -> LOGGER.info(msg) } + ) { + val stringAsSteam: InputStream = + ByteArrayInputStream(message.toByteArray(StandardCharsets.UTF_8)) + gobble(stringAsSteam, consumer) + } + + /** + * Used to emit a visual separator in the user-facing logs indicating a start of a + * meaningful temporal activity + * + * @param message + */ + fun startSection(message: String) { + gobble("\r\n----- START $message -----\r\n\r\n") + } + + /** + * Used to emit a visual separator in the user-facing logs indicating a end of a meaningful + * temporal activity + * + * @param message + */ + fun endSection(message: String) { + gobble("\r\n----- END $message -----\r\n\r\n") + } + + fun gobble( + `is`: InputStream, + consumer: Consumer, + mdcScopeBuilder: MdcScope.Builder + ) { + gobble(`is`, consumer, GENERIC, mdcScopeBuilder) + } + + @JvmOverloads + fun gobble( + `is`: InputStream, + consumer: Consumer, + caller: String = GENERIC, + mdcScopeBuilder: MdcScope.Builder = MdcScope.Companion.DEFAULT_BUILDER + ) { + val executor = Executors.newSingleThreadExecutor() + val mdc = MDC.getCopyOfContextMap() + val gobbler = LineGobbler(`is`, consumer, executor, mdc, caller, mdcScopeBuilder) + executor.submit(gobbler) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/jackson/MoreMappers.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/jackson/MoreMappers.kt new file mode 100644 index 0000000000000..93d274d622b04 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/jackson/MoreMappers.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.jackson + +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.DeserializationFeature +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule + +/** + * The [JavaTimeModule] allows mappers to accommodate different varieties of serialised date time + * strings. + * + * All jackson mapper creation should use the following methods for instantiation. + */ +object MoreMappers { + @JvmStatic + fun initMapper(): ObjectMapper { + val result = ObjectMapper().registerModule(JavaTimeModule()) + result.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + result.configure(JsonGenerator.Feature.WRITE_BIGDECIMAL_AS_PLAIN, true) + return result + } + + @JvmStatic + fun initYamlMapper(factory: YAMLFactory?): ObjectMapper { + return ObjectMapper(factory).registerModule(JavaTimeModule()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/json/JsonPaths.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/json/JsonPaths.kt new file mode 100644 index 0000000000000..3761fbe2a2418 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/json/JsonPaths.kt @@ -0,0 +1,358 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.json + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ArrayNode +import com.google.common.base.Preconditions +import com.jayway.jsonpath.Configuration +import com.jayway.jsonpath.JsonPath +import com.jayway.jsonpath.Option +import com.jayway.jsonpath.PathNotFoundException +import com.jayway.jsonpath.spi.json.JacksonJsonNodeJsonProvider +import com.jayway.jsonpath.spi.json.JsonProvider +import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider +import com.jayway.jsonpath.spi.mapper.MappingProvider +import io.airbyte.commons.util.MoreIterators +import java.util.* +import java.util.function.BiFunction +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * JSONPath is specification for querying JSON objects. More information about the specification can + * be found here: https://goessner.net/articles/JsonPath/. For those familiar with jq, JSONPath will + * be most recognizable as "that DSL that jq uses". + * + * We use a java implementation of this specification (repo: https://github.com/json-path/JsonPath). + * This class wraps that implementation to make it easier to leverage this tool internally. + * + * GOTCHA: Keep in mind with JSONPath, depending on the query, 0, 1, or N values may be returned. + * The pattern for handling return values is very much like writing SQL queries. When using it, you + * must consider what the number of return values for your query might be. e.g. for this object: { + * "alpha": [1, 2, 3] }, this JSONPath "$.alpha[*]", would return: [1, 2, 3], but this one + * "$.alpha[0]" would return: [1]. The Java interface we place over this query system defaults to + * returning a list for query results. In addition, we provide helper functions that will just + * return a single value (see: [JsonPaths.getSingleValue]). These should only be used if it is not + * possible for a query to return more than one value. + */ +object JsonPaths { + private val LOGGER: Logger = LoggerFactory.getLogger(JsonPaths::class.java) + + const val JSON_PATH_START_CHARACTER: String = "$" + const val JSON_PATH_LIST_SPLAT: String = "[*]" + const val JSON_PATH_FIELD_SEPARATOR: String = "." + + // set default configurations at start up to match our JSON setup. + init { + Configuration.setDefaults( + object : Configuration.Defaults { + // allows us to pass in Jackson JsonNode + private val jsonProvider: JsonProvider = JacksonJsonNodeJsonProvider() + private val mappingProvider: MappingProvider = JacksonMappingProvider() + + override fun jsonProvider(): JsonProvider { + return jsonProvider + } + + override fun mappingProvider(): MappingProvider { + return mappingProvider + } + + override fun options(): Set

     ` try(final ScopedMDCChange scopedMDCChange = new ScopedMDCChange( new HashMap() {{ put("my", "value"); }} )) { ... } ` * 
    * + */ +class MdcScope(keyValuesToAdd: Map) : AutoCloseable { + private val originalContextMap: Map = MDC.getCopyOfContextMap() + + init { + keyValuesToAdd.forEach { (key: String?, `val`: String?) -> MDC.put(key, `val`) } + } + + override fun close() { + MDC.setContextMap(originalContextMap) + } + + class Builder { + private var maybeLogPrefix = Optional.empty() + private var maybePrefixColor = Optional.empty() + private var simple = true + + fun setLogPrefix(logPrefix: String?): Builder { + this.maybeLogPrefix = Optional.ofNullable(logPrefix) + + return this + } + + fun setPrefixColor(color: LoggingHelper.Color?): Builder { + this.maybePrefixColor = Optional.ofNullable(color) + + return this + } + + // Use this to disable simple logging for things in an MdcScope. + // If you're using this, you're probably starting to use MdcScope outside of container + // labelling. + // If so, consider changing the defaults / builder / naming. + fun setSimple(simple: Boolean): Builder { + this.simple = simple + + return this + } + + fun produceMappings(mdcConsumer: BiConsumer) { + maybeLogPrefix.ifPresent { logPrefix: String -> + val potentiallyColoredLog = + maybePrefixColor + .map { color: LoggingHelper.Color -> + LoggingHelper.applyColor(color, logPrefix) + } + .orElse(logPrefix) + mdcConsumer.accept(LoggingHelper.LOG_SOURCE_MDC_KEY, potentiallyColoredLog) + if (simple) { + // outputs much less information for this line. see log4j2.xml to see exactly + // what this does + mdcConsumer.accept("simple", "true") + } + } + } + + fun build(): MdcScope { + val extraMdcEntries: MutableMap = HashMap() + produceMappings { key: String, value: String -> extraMdcEntries[key] = value } + return MdcScope(extraMdcEntries) + } + } + + companion object { + val DEFAULT_BUILDER: Builder = Builder() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/map/MoreMaps.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/map/MoreMaps.kt new file mode 100644 index 0000000000000..6aa4db06d2b57 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/map/MoreMaps.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.map + +import com.google.common.base.Preconditions + +object MoreMaps { + @SafeVarargs + @JvmStatic + fun merge(vararg maps: Map): Map { + val outputMap: MutableMap = HashMap() + + for (map in maps) { + Preconditions.checkNotNull(map) + outputMap.putAll(map) + } + + return outputMap + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/DefaultProtocolSerializer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/DefaultProtocolSerializer.kt new file mode 100644 index 0000000000000..8c828f37a3e41 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/DefaultProtocolSerializer.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.protocol + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog + +class DefaultProtocolSerializer : ProtocolSerializer { + override fun serialize(configuredAirbyteCatalog: ConfiguredAirbyteCatalog): String { + return Jsons.serialize(configuredAirbyteCatalog) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/ProtocolSerializer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/ProtocolSerializer.kt new file mode 100644 index 0000000000000..6f599a0758304 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/protocol/ProtocolSerializer.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.protocol + +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog + +interface ProtocolSerializer { + fun serialize(configuredAirbyteCatalog: ConfiguredAirbyteCatalog): String +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/resources/MoreResources.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/resources/MoreResources.kt new file mode 100644 index 0000000000000..b6ccbfc7bc74d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/resources/MoreResources.kt @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.resources + +import com.google.common.base.Preconditions +import com.google.common.io.Resources +import io.airbyte.commons.lang.Exceptions +import java.io.File +import java.io.IOException +import java.net.URISyntaxException +import java.nio.charset.StandardCharsets +import java.nio.file.FileSystems +import java.nio.file.Files +import java.nio.file.Path +import java.util.stream.Stream + +object MoreResources { + private const val UNSTABLE_API_USAGE = "UnstableApiUsage" + + @JvmStatic + @Throws(IOException::class) + fun readResource(name: String): String { + val resource = Resources.getResource(name) + return Resources.toString(resource, StandardCharsets.UTF_8) + } + + @Throws(IOException::class) + fun readResource(klass: Class<*>, name: String): String { + val rootedName = if (!name.startsWith("/")) String.format("/%s", name) else name + val url = Resources.getResource(klass, rootedName) + return Resources.toString(url, StandardCharsets.UTF_8) + } + + @Throws(URISyntaxException::class) + fun readResourceAsFile(name: String): File { + return File(Resources.getResource(name).toURI()) + } + + @Throws(IOException::class) + fun readBytes(name: String): ByteArray { + val resource = Resources.getResource(name) + return Resources.toByteArray(resource) + } + + /** + * This class is a bit of a hack. Might have unexpected behavior. + * + * @param klass class whose resources will be access + * @param name path to directory in resources list + * @return stream of paths to each resource file. THIS STREAM MUST BE CLOSED. + * @throws IOException you never know when you IO. + */ + @Throws(IOException::class) + fun listResources(klass: Class<*>, name: String): Stream { + Preconditions.checkNotNull(klass) + Preconditions.checkNotNull(name) + Preconditions.checkArgument(!name.isBlank()) + + try { + val rootedResourceDir = if (!name.startsWith("/")) String.format("/%s", name) else name + val url = klass.getResource(rootedResourceDir) + // noinspection ConstantConditions + Preconditions.checkNotNull(url, "Could not find resource.") + + val searchPath: Path + if (url.toString().startsWith("jar")) { + val fileSystem = FileSystems.newFileSystem(url.toURI(), emptyMap()) + searchPath = fileSystem.getPath(rootedResourceDir) + return Files.walk(searchPath, 1).onClose { + Exceptions.toRuntime { fileSystem.close() } + } + } else { + searchPath = Path.of(url.toURI()) + return Files.walk(searchPath, 1) + } + } catch (e: URISyntaxException) { + throw RuntimeException(e) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolder.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolder.kt new file mode 100644 index 0000000000000..14a992241ed4f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolder.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.v0.AirbyteTraceMessage +import io.airbyte.protocol.models.v0.StreamDescriptor + +/** Represents the current status of a stream provided by a source. */ +class AirbyteStreamStatusHolder( + private val airbyteStream: AirbyteStreamNameNamespacePair?, + private val airbyteStreamStatus: AirbyteStreamStatusTraceMessage.AirbyteStreamStatus +) { + fun toTraceMessage(): AirbyteTraceMessage { + val traceMessage = AirbyteTraceMessage() + val streamStatusTraceMessage = + AirbyteStreamStatusTraceMessage() + .withStreamDescriptor( + StreamDescriptor() + .withName(airbyteStream!!.name) + .withNamespace(airbyteStream.namespace) + ) + .withStatus(airbyteStreamStatus) + return traceMessage + .withEmittedAt(System.currentTimeMillis().toDouble()) + .withStreamStatus(streamStatusTraceMessage) + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamUtils.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamUtils.kt new file mode 100644 index 0000000000000..1d7d9df582f5a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/AirbyteStreamUtils.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteStream + +/** + * Collection of utility methods used to convert objects to [AirbyteStreamNameNamespacePair] + * objects. + */ +object AirbyteStreamUtils { + /** + * Converts an [AirbyteStream] to a [AirbyteStreamNameNamespacePair]. + * + * @param airbyteStream The [AirbyteStream] to convert. + * @return The [AirbyteStreamNameNamespacePair]. + */ + fun convertFromAirbyteStream(airbyteStream: AirbyteStream): AirbyteStreamNameNamespacePair { + return AirbyteStreamNameNamespacePair(airbyteStream.name, airbyteStream.namespace) + } + + /** + * Converts a stream name and namespace into a [AirbyteStreamNameNamespacePair]. + * + * @param name The name of the stream. + * @param namespace The namespace of the stream. + * @return The [AirbyteStreamNameNamespacePair]. + */ + @JvmStatic + fun convertFromNameAndNamespace( + name: String?, + namespace: String? + ): AirbyteStreamNameNamespacePair { + return AirbyteStreamNameNamespacePair(name, namespace) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/MoreStreams.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/MoreStreams.kt new file mode 100644 index 0000000000000..e235fb5454da6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/MoreStreams.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import java.util.* +import java.util.stream.Stream +import java.util.stream.StreamSupport + +object MoreStreams { + fun toStream(iterator: Iterator?): Stream { + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), + false + ) + } + + fun toStream(iterable: Iterable): Stream { + return toStream(iterable.iterator()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/StreamStatusUtils.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/StreamStatusUtils.kt new file mode 100644 index 0000000000000..e06503ecf4176 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/stream/StreamStatusUtils.kt @@ -0,0 +1,265 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import io.airbyte.commons.util.AirbyteStreamAware +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage +import java.util.* +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** Collection of utility methods that support the generation of stream status updates. */ +object StreamStatusUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(StreamStatusUtils::class.java) + + /** + * Creates a new [Consumer] that wraps the provided [Consumer] with stream status reporting + * capabilities. Specifically, this consumer will emit an [AirbyteStreamStatus.RUNNING] status + * after the first message is consumed by the delegated [Consumer]. + * + * @param stream The stream from which the delegating [Consumer] will consume messages for + * processing. + * @param delegateRecordCollector The delegated [Consumer] that will be called when this + * consumer accepts a message for processing. + * @param streamStatusEmitter The optional [Consumer] that will be used to emit stream status + * updates. + * @return A wrapping [Consumer] that provides stream status updates when the provided delegate + * [Consumer] is invoked. + */ + fun statusTrackingRecordCollector( + stream: AutoCloseableIterator, + delegateRecordCollector: Consumer, + streamStatusEmitter: Optional> + ): Consumer { + return object : Consumer { + private var firstRead = true + + override fun accept(airbyteMessage: AirbyteMessage) { + try { + delegateRecordCollector.accept(airbyteMessage) + } finally { + if (firstRead) { + emitRunningStreamStatus(stream, streamStatusEmitter) + firstRead = false + } + } + } + } + } + + /** + * Emits a [AirbyteStreamStatus.RUNNING] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitRunningStreamStatus( + airbyteStream: AutoCloseableIterator, + statusEmitter: Optional> + ) { + if (airbyteStream is AirbyteStreamAware) { + emitRunningStreamStatus(airbyteStream as AirbyteStreamAware, statusEmitter) + } + } + + /** + * Emits a [AirbyteStreamStatus.RUNNING] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitRunningStreamStatus( + airbyteStream: AirbyteStreamAware, + statusEmitter: Optional> + ) { + emitRunningStreamStatus(airbyteStream.airbyteStream, statusEmitter) + } + + /** + * Emits a [AirbyteStreamStatus.RUNNING] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitRunningStreamStatus( + airbyteStream: Optional, + statusEmitter: Optional> + ) { + airbyteStream!!.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("RUNNING -> {}", s) + emitStreamStatus( + s, + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING, + statusEmitter + ) + } + } + + /** + * Emits a [AirbyteStreamStatus.STARTED] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitStartStreamStatus( + airbyteStream: AutoCloseableIterator, + statusEmitter: Optional> + ) { + if (airbyteStream is AirbyteStreamAware) { + emitStartStreamStatus(airbyteStream as AirbyteStreamAware, statusEmitter) + } + } + + /** + * Emits a [AirbyteStreamStatus.STARTED] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitStartStreamStatus( + airbyteStream: AirbyteStreamAware, + statusEmitter: Optional> + ) { + emitStartStreamStatus(airbyteStream.airbyteStream, statusEmitter) + } + + /** + * Emits a [AirbyteStreamStatus.STARTED] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitStartStreamStatus( + airbyteStream: Optional, + statusEmitter: Optional> + ) { + airbyteStream!!.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("STARTING -> {}", s) + emitStreamStatus( + s, + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.STARTED, + statusEmitter + ) + } + } + + /** + * Emits a [AirbyteStreamStatus.COMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitCompleteStreamStatus( + airbyteStream: AutoCloseableIterator, + statusEmitter: Optional> + ) { + if (airbyteStream is AirbyteStreamAware) { + emitCompleteStreamStatus(airbyteStream as AirbyteStreamAware, statusEmitter) + } + } + + /** + * Emits a [AirbyteStreamStatus.COMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitCompleteStreamStatus( + airbyteStream: AirbyteStreamAware, + statusEmitter: Optional> + ) { + emitCompleteStreamStatus(airbyteStream.airbyteStream, statusEmitter) + } + + /** + * Emits a [AirbyteStreamStatus.COMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitCompleteStreamStatus( + airbyteStream: Optional, + statusEmitter: Optional> + ) { + airbyteStream!!.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("COMPLETE -> {}", s) + emitStreamStatus( + s, + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE, + statusEmitter + ) + } + } + + /** + * Emits a [AirbyteStreamStatus.INCOMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitIncompleteStreamStatus( + airbyteStream: AutoCloseableIterator, + statusEmitter: Optional> + ) { + if (airbyteStream is AirbyteStreamAware) { + emitIncompleteStreamStatus(airbyteStream as AirbyteStreamAware, statusEmitter) + } + } + + /** + * Emits a [AirbyteStreamStatus.INCOMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitIncompleteStreamStatus( + airbyteStream: AirbyteStreamAware, + statusEmitter: Optional> + ) { + emitIncompleteStreamStatus(airbyteStream.airbyteStream, statusEmitter) + } + + /** + * Emits a [AirbyteStreamStatus.INCOMPLETE] stream status for the provided stream. + * + * @param airbyteStream The stream that should be associated with the stream status. + * @param statusEmitter The [Optional] stream status emitter. + */ + fun emitIncompleteStreamStatus( + airbyteStream: Optional, + statusEmitter: Optional> + ) { + airbyteStream!!.ifPresent { s: AirbyteStreamNameNamespacePair? -> + LOGGER.debug("INCOMPLETE -> {}", s) + emitStreamStatus( + s, + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.INCOMPLETE, + statusEmitter + ) + } + } + + /** + * Emits a stream status for the provided stream. + * + * @param airbyteStreamNameNamespacePair The stream identifier. + * @param airbyteStreamStatus The status update. + * @param statusEmitter The [Optional] stream status emitter. + */ + private fun emitStreamStatus( + airbyteStreamNameNamespacePair: AirbyteStreamNameNamespacePair?, + airbyteStreamStatus: AirbyteStreamStatusTraceMessage.AirbyteStreamStatus, + statusEmitter: Optional> + ) { + statusEmitter.ifPresent { + it.accept( + AirbyteStreamStatusHolder(airbyteStreamNameNamespacePair, airbyteStreamStatus) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/string/Strings.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/string/Strings.kt new file mode 100644 index 0000000000000..b5f53c462570c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/string/Strings.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.string + +import java.util.* +import org.apache.commons.lang3.RandomStringUtils + +object Strings { + @JvmStatic + fun join(iterable: Iterable, separator: CharSequence): String { + return iterable.joinToString(separator) { it.toString() } + } + + @JvmStatic + fun addRandomSuffix(base: String, separator: String, suffixLength: Int): String { + return base + + separator + + RandomStringUtils.randomAlphabetic(suffixLength).lowercase(Locale.getDefault()) + } + + @JvmStatic + fun safeTrim(string: String?): String? { + return string?.trim { it <= ' ' } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Names.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Names.kt new file mode 100644 index 0000000000000..4f764ddd4c3c5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Names.kt @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.text + +import com.google.common.base.Preconditions +import java.text.Normalizer + +object Names { + const val NON_ALPHANUMERIC_AND_UNDERSCORE_PATTERN: String = "[^\\p{Alnum}_]" + + /** + * Converts any UTF8 string to a string with only alphanumeric and _ characters without + * preserving accent characters. + * + * @param s string to convert + * @return cleaned string + */ + @JvmStatic + fun toAlphanumericAndUnderscore(s: String): String { + return Normalizer.normalize(s, Normalizer.Form.NFKD) + .replace( + "\\p{M}".toRegex(), + "" + ) // P{M} matches a code point that is not a combining mark (unicode) + .replace("\\s+".toRegex(), "_") + .replace(NON_ALPHANUMERIC_AND_UNDERSCORE_PATTERN.toRegex(), "_") + } + + fun doubleQuote(value: String): String { + return internalQuote(value, '"') + } + + fun singleQuote(value: String): String { + return internalQuote(value, '\'') + } + + private fun internalQuote(value: String, quoteChar: Char): String { + Preconditions.checkNotNull(value) + + val startsWithChar = value[0] == quoteChar + val endsWithChar = value[value.length - 1] == quoteChar + + Preconditions.checkState(startsWithChar == endsWithChar, "Invalid value: %s", value) + + return if (startsWithChar) { + value + } else { + String.format("%c%s%c", quoteChar, value, quoteChar) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Sqls.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Sqls.kt new file mode 100644 index 0000000000000..f0a79783dfb45 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/text/Sqls.kt @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.text + +import java.util.* + +object Sqls { + fun ?> toSqlName(value: T): String { + return value!!.name.lowercase(Locale.getDefault()) + } + + fun ?> toSqlNames(values: Collection): Set { + return values.map { toSqlName(it) }.toSet() + } + + /** + * Generate a string fragment that can be put in the IN clause of a SQL statement. eg. column IN + * (value1, value2) + * + * @param values to encode + * @param enum type + * @return "'value1', 'value2', 'value3'" + */ + fun ?> toSqlInFragment(values: Iterable): String { + return values.map { toSqlName(it) }.joinToString(",", "(", ")") { Names.singleQuote(it) } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AirbyteStreamAware.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AirbyteStreamAware.kt new file mode 100644 index 0000000000000..d38d0c9e59c6e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AirbyteStreamAware.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.* + +/** + * Interface that indicates that an object exposes information used to identify an Airbyte stream. + */ +interface AirbyteStreamAware { + /** + * Returns the [AirbyteStreamNameNamespacePair] identifying the Airbyte stream associated with + * the object. + */ + val airbyteStream: Optional + get() = Optional.empty() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterator.kt new file mode 100644 index 0000000000000..fbb182e2e6123 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterator.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +/** + * If you operate on this iterator, you better close it. [AutoCloseableIterator.close] must be + * idempotent. The contract on this interface is that it may be called MANY times. + * + * @param type + */ +interface AutoCloseableIterator : MutableIterator, AutoCloseable, AirbyteStreamAware diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterators.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterators.kt new file mode 100644 index 0000000000000..5ef441c6e6e6c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/AutoCloseableIterators.kt @@ -0,0 +1,263 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.collect.Iterators +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.function.Consumer +import java.util.function.Function +import java.util.function.Supplier +import java.util.stream.Stream + +object AutoCloseableIterators { + /** + * Coerces a vanilla [Iterator] into a [AutoCloseableIterator] by adding a no op close function. + * + * @param iterator iterator to convert + * @param type + * @return closeable iterator + */ + @JvmStatic + fun fromIterator(iterator: Iterator): AutoCloseableIterator { + return DefaultAutoCloseableIterator(iterator, VoidCallable.NOOP, null) + } + + /** + * Coerces a vanilla [Iterator] into a [AutoCloseableIterator] by adding a no op close function. + * + * @param iterator iterator to convert + * @param type + * @return closeable iterator + */ + @JvmStatic + fun fromIterator( + iterator: Iterator, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator(iterator, VoidCallable.NOOP, airbyteStream) + } + + /** + * Coerces a vanilla [Iterator] into a [AutoCloseableIterator]. The provided onClose function + * will be called at most one time. + * + * @param iterator autocloseable iterator to add another close to + * @param onClose the function that will be called on close + * @param type + * @return new autocloseable iterator with the close function appended + */ + @JvmStatic + fun fromIterator( + iterator: Iterator, + onClose: VoidCallable, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator(iterator, onClose, airbyteStream) + } + + /** + * Wraps a [Stream] in a [AutoCloseableIterator]. The first time [AutoCloseableIterator.close] + * is called, [Stream.close] will be called. It will not be called again subsequently. + * + * @param stream stream to wrap + * @param type + * @return autocloseable iterator + */ + @JvmStatic + fun fromStream( + stream: Stream, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator(stream.iterator(), { stream.close() }, airbyteStream) + } + + /** Consumes entire iterator and collect it into a list. Then it closes the iterator. */ + @Throws(Exception::class) + @JvmStatic + fun toListAndClose(iterator: AutoCloseableIterator): List { + iterator.use { + return MoreIterators.toList(iterator) + } + } + + /** + * Returns a [AutoCloseableIterator] that will call the provided supplier ONE time when + * [AutoCloseableIterator.hasNext] is called the first time. The supplier returns a stream that + * will be exposed as an iterator. + * + * @param iteratorSupplier supplier that provides a autocloseable iterator that will be invoked + * lazily + * @param type + * @return autocloseable iterator + */ + @JvmStatic + fun lazyIterator( + iteratorSupplier: Supplier>, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return LazyAutoCloseableIterator(iteratorSupplier, airbyteStream) + } + + /** + * Append a function to be called on [AutoCloseableIterator.close]. + * + * @param autoCloseableIterator autocloseable iterator to add another close to + * @param voidCallable the function that will be called on close + * @param type + * @return new autocloseable iterator with the close function appended + */ + @JvmStatic + fun appendOnClose( + autoCloseableIterator: AutoCloseableIterator, + voidCallable: VoidCallable + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator( + autoCloseableIterator, + { + autoCloseableIterator.close() + voidCallable.call() + }, + null + ) + } + + /** + * Append a function to be called on [AutoCloseableIterator.close]. + * + * @param autoCloseableIterator autocloseable iterator to add another close to + * @param voidCallable the function that will be called on close + * @param type + * @return new autocloseable iterator with the close function appended + */ + fun appendOnClose( + autoCloseableIterator: AutoCloseableIterator, + voidCallable: VoidCallable, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator( + autoCloseableIterator, + { + autoCloseableIterator.close() + voidCallable.call() + }, + airbyteStream + ) + } + + /** + * Lift and shift of Guava's [Iterators.transform] using the [AutoCloseableIterator] interface. + * + * @param fromIterator input autocloseable iterator + * @param function map function + * @param input type + * @param output type + * @return mapped autocloseable iterator + */ + @JvmStatic + fun transform( + fromIterator: AutoCloseableIterator, + function: Function + ): AutoCloseableIterator { + val transformed = Iterators.transform(fromIterator) { function.apply(it) } + return DefaultAutoCloseableIterator(transformed, fromIterator::close, null) + } + + /** + * Lift and shift of Guava's [Iterators.transform] using the [AutoCloseableIterator] interface. + * + * @param fromIterator input autocloseable iterator + * @param function map function + * @param input type + * @param output type + * @return mapped autocloseable iterator + */ + @JvmStatic + fun transform( + fromIterator: AutoCloseableIterator, + airbyteStream: AirbyteStreamNameNamespacePair?, + function: Function + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator( + Iterators.transform(fromIterator) { t: F -> function.apply(t) }, + { fromIterator.close() }, + airbyteStream + ) + } + + /** + * Map over a [AutoCloseableIterator] using a vanilla [Iterator] while retaining all of the + * Resource behavior of the input [AutoCloseableIterator]. + * + * @param iteratorCreator function that takes in a autocloseable iterator and uses it to create + * a vanilla iterator + * @param autoCloseableIterator input autocloseable iterator + * @param type + * @return autocloseable iterator that still has the close functionality of the original input + * iterator but is transformed by the iterator output by the iteratorCreator + */ + @JvmStatic + fun transform( + iteratorCreator: Function, Iterator>, + autoCloseableIterator: AutoCloseableIterator, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator( + iteratorCreator.apply(autoCloseableIterator), + { autoCloseableIterator.close() }, + airbyteStream + ) + } + + @JvmStatic + fun transformIterator( + iteratorCreator: Function, Iterator>, + autoCloseableIterator: AutoCloseableIterator, + airbyteStream: AirbyteStreamNameNamespacePair? + ): AutoCloseableIterator { + return DefaultAutoCloseableIterator( + iteratorCreator.apply(autoCloseableIterator), + { autoCloseableIterator.close() }, + airbyteStream + ) + } + + @SafeVarargs + @JvmStatic + fun concatWithEagerClose( + airbyteStreamStatusConsumer: Consumer?, + vararg iterators: AutoCloseableIterator + ): CompositeIterator { + return concatWithEagerClose(java.util.List.of(*iterators), airbyteStreamStatusConsumer) + } + + @SafeVarargs + @JvmStatic + fun concatWithEagerClose(vararg iterators: AutoCloseableIterator): CompositeIterator { + return concatWithEagerClose(java.util.List.of(*iterators), null) + } + + /** + * Creates a [CompositeIterator] that reads from the provided iterators in a serial fashion. + * + * @param iterators The list of iterators to be used in a serial fashion. + * @param airbyteStreamStatusConsumer The stream status consumer used to report stream status + * during iteration. + * @return A [CompositeIterator]. + * @param The type of data contained in each iterator. + */ + @JvmStatic + fun concatWithEagerClose( + iterators: List>, + airbyteStreamStatusConsumer: Consumer? + ): CompositeIterator { + return CompositeIterator(iterators, airbyteStreamStatusConsumer) + } + + @JvmStatic + fun concatWithEagerClose(iterators: List>): CompositeIterator { + return concatWithEagerClose(iterators, null) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/CompositeIterator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/CompositeIterator.kt new file mode 100644 index 0000000000000..3b6c989995d8c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/CompositeIterator.kt @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.base.Preconditions +import com.google.common.collect.AbstractIterator +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.commons.stream.StreamStatusUtils +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.* +import java.util.function.Consumer +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Composes multiple [AutoCloseableIterator]s. For each internal iterator, after the first time its + * [Iterator.hasNext] function returns false, the composite iterator will call + * [AutoCloseableIterator.close] on that internal iterator. + * + * [CompositeIterator]s should be closed. Calling [CompositeIterator.close] will attempt to close + * each internal iterator as well. Thus the close method on each internal iterator should be + * idempotent as it is will likely be called multiple times. + * + * [CompositeIterator.close] gives the guarantee that it will call close on each internal iterator + * once (even if any of the iterators throw an exception). After it has attempted to close each one + * once, [CompositeIterator] will rethrow the _first_ exception that it encountered while closing + * internal iterators. If multiple internal iterators throw exceptions, only the first exception + * will be rethrown, though the others will be logged. + * + * @param type + */ +class CompositeIterator +internal constructor( + iterators: List>, + airbyteStreamStatusConsumer: Consumer? +) : AbstractIterator(), AutoCloseableIterator { + private val airbyteStreamStatusConsumer: Optional> + private val iterators: List> + + private var i: Int + private val seenIterators: MutableSet> + private var hasClosed: Boolean + + init { + Preconditions.checkNotNull(iterators) + + this.airbyteStreamStatusConsumer = Optional.ofNullable(airbyteStreamStatusConsumer) + this.iterators = iterators + this.i = 0 + this.seenIterators = HashSet() + this.hasClosed = false + } + + override fun computeNext(): T? { + assertHasNotClosed() + + if (iterators.isEmpty()) { + return endOfData() + } + + // 1. search for an iterator that hasNext. + // 2. close each iterator we encounter those that do not. + // 3. if there are none, we are done. + while (!currentIterator().hasNext()) { + try { + currentIterator().close() + emitStartStreamStatus(currentIterator().airbyteStream) + StreamStatusUtils.emitCompleteStreamStatus( + airbyteStream, + airbyteStreamStatusConsumer + ) + } catch (e: Exception) { + StreamStatusUtils.emitIncompleteStreamStatus( + airbyteStream, + airbyteStreamStatusConsumer + ) + throw RuntimeException(e) + } + + if (i + 1 < iterators.size) { + i++ + } else { + return endOfData() + } + } + + try { + val isFirstRun = emitStartStreamStatus(currentIterator().airbyteStream) + val next = currentIterator().next() + if (isFirstRun) { + StreamStatusUtils.emitRunningStreamStatus( + airbyteStream, + airbyteStreamStatusConsumer + ) + } + return next + } catch (e: RuntimeException) { + StreamStatusUtils.emitIncompleteStreamStatus(airbyteStream, airbyteStreamStatusConsumer) + throw e + } + } + + private fun currentIterator(): AutoCloseableIterator { + return iterators[i] + } + + private fun emitStartStreamStatus( + airbyteStream: Optional + ): Boolean { + if (airbyteStream!!.isPresent && !seenIterators.contains(airbyteStream)) { + seenIterators.add(airbyteStream) + StreamStatusUtils.emitStartStreamStatus(airbyteStream, airbyteStreamStatusConsumer) + return true + } + return false + } + + @Throws(Exception::class) + override fun close() { + hasClosed = true + + val exceptions: MutableList = ArrayList() + for (iterator in iterators) { + try { + iterator.close() + } catch (e: Exception) { + LOGGER.error("exception while closing", e) + exceptions.add(e) + } + } + + if (!exceptions.isEmpty()) { + throw exceptions[0] + } + } + + override val airbyteStream: Optional + get() = + if (currentIterator() is AirbyteStreamAware) { + AirbyteStreamAware::class.java.cast(currentIterator()).airbyteStream + } else { + Optional.empty() + } + + private fun assertHasNotClosed() { + Preconditions.checkState(!hasClosed) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CompositeIterator::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIterator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIterator.kt new file mode 100644 index 0000000000000..ce00340d5bea5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIterator.kt @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.base.Preconditions +import com.google.common.collect.AbstractIterator +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.* + +/** + * The canonical [AutoCloseableIterator]. The default behavior guarantees that the provided close + * functional will be called no more than one time. + * + * @param type + */ +internal class DefaultAutoCloseableIterator( + iterator: Iterator, + onClose: VoidCallable, + airbyteStream: AirbyteStreamNameNamespacePair? +) : AbstractIterator(), AutoCloseableIterator { + override val airbyteStream: Optional + private val iterator: Iterator + private val onClose: VoidCallable + + private var hasClosed: Boolean + + init { + Preconditions.checkNotNull(iterator) + Preconditions.checkNotNull(onClose) + + this.airbyteStream = Optional.ofNullable(airbyteStream) + this.iterator = iterator + this.onClose = onClose + this.hasClosed = false + } + + override fun computeNext(): T? { + assertHasNotClosed() + + return if (iterator.hasNext()) { + iterator.next() + } else { + endOfData() + } + } + + @Throws(Exception::class) + override fun close() { + if (!hasClosed) { + hasClosed = true + onClose.call() + } + } + + private fun assertHasNotClosed() { + Preconditions.checkState(!hasClosed) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/LazyAutoCloseableIterator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/LazyAutoCloseableIterator.kt new file mode 100644 index 0000000000000..7e63fdd140362 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/LazyAutoCloseableIterator.kt @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.base.Preconditions +import com.google.common.collect.AbstractIterator +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.* +import java.util.function.Supplier + +/** + * A [AutoCloseableIterator] that calls the provided supplier the first time + * [AutoCloseableIterator.hasNext] or [AutoCloseableIterator.next] is called. If + * [AutoCloseableIterator.hasNext] or [AutoCloseableIterator.next] are never called, then the + * supplier will never be called. This means if the iterator is closed in this state, the close + * function on the input iterator will not be called. The assumption here is that if nothing is ever + * supplied, then there is nothing to close. + * + * @param type + */ +internal class LazyAutoCloseableIterator( + iteratorSupplier: Supplier>, + airbyteStream: AirbyteStreamNameNamespacePair? +) : AbstractIterator(), AutoCloseableIterator { + private val iteratorSupplier: Supplier> + override val airbyteStream: Optional + private var hasSupplied: Boolean + private var internalIterator: AutoCloseableIterator? = null + + init { + Preconditions.checkNotNull(iteratorSupplier) + this.airbyteStream = Optional.ofNullable(airbyteStream) + this.iteratorSupplier = iteratorSupplier + this.hasSupplied = false + } + + override fun computeNext(): T? { + if (!hasSupplied) { + internalIterator = iteratorSupplier.get() + Preconditions.checkNotNull(internalIterator, "Supplied iterator was null.") + hasSupplied = true + } + + return if (internalIterator!!.hasNext()) { + internalIterator!!.next() + } else { + endOfData() + } + } + + @Throws(Exception::class) + override fun close() { + if (internalIterator != null) { + internalIterator!!.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/MoreIterators.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/MoreIterators.kt new file mode 100644 index 0000000000000..cd1e0db241783 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/util/MoreIterators.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.collect.AbstractIterator +import java.util.* +import java.util.function.Supplier + +object MoreIterators { + /** + * Create an iterator from elements + * + * @param elements element to put in iterator + * @param type + * @return iterator with all elements + */ + @SafeVarargs + @JvmStatic + fun of(vararg elements: T): Iterator { + return Arrays.asList(*elements).iterator() + } + + /** + * Create a list from an iterator + * + * @param iterator iterator to convert + * @param type + * @return list + */ + @JvmStatic + fun toList(iterator: Iterator): List { + val list: MutableList = ArrayList() + while (iterator.hasNext()) { + list.add(iterator.next()) + } + return list + } + + /** + * Create a set from an iterator + * + * @param iterator iterator to convert + * @param type + * @return set + */ + @JvmStatic + fun toSet(iterator: Iterator): Set { + val set: MutableSet = HashSet() + while (iterator.hasNext()) { + set.add(iterator.next()) + } + return set + } + + @JvmStatic + fun singletonIteratorFromSupplier(supplier: Supplier): Iterator { + return object : AbstractIterator() { + private var hasSupplied = false + + override fun computeNext(): T? { + if (!hasSupplied) { + hasSupplied = true + return supplier.get() + } else { + return endOfData() + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteProtocolVersion.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteProtocolVersion.kt new file mode 100644 index 0000000000000..59c0c29943857 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteProtocolVersion.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +object AirbyteProtocolVersion { + val DEFAULT_AIRBYTE_PROTOCOL_VERSION: Version = Version("0.2.0") + val V0: Version = Version("0.3.0") + val V1: Version = Version("1.0.0") + + const val AIRBYTE_PROTOCOL_VERSION_MAX_KEY_NAME: String = "airbyte_protocol_version_max" + const val AIRBYTE_PROTOCOL_VERSION_MIN_KEY_NAME: String = "airbyte_protocol_version_min" + + fun getWithDefault(version: String?): Version { + return if (version == null || version.isEmpty() || version.isBlank()) { + DEFAULT_AIRBYTE_PROTOCOL_VERSION + } else { + Version(version) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteVersion.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteVersion.kt new file mode 100644 index 0000000000000..b8d6c1af1b12a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/AirbyteVersion.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +/** + * The AirbyteVersion identifies the version of the database used internally by Airbyte services. + */ +class AirbyteVersion : Version { + constructor(version: String) : super(version) + + constructor(major: String?, minor: String?, patch: String?) : super(major, minor, patch) + + override fun toString(): String { + return "AirbyteVersion{" + + "version='" + + version + + '\'' + + ", major='" + + this.major + + '\'' + + ", minor='" + + minor + + '\'' + + ", patch='" + + patch + + '\'' + + '}' + } + + companion object { + const val AIRBYTE_VERSION_KEY_NAME: String = "airbyte_version" + + @Throws(IllegalStateException::class) + fun assertIsCompatible(version1: AirbyteVersion, version2: AirbyteVersion) { + check(Version.Companion.isCompatible(version1, version2)) { + getErrorMessage(version1, version2) + } + } + + fun getErrorMessage(version1: AirbyteVersion, version2: AirbyteVersion): String { + return String.format( + """ + Version mismatch between %s and %s. + Please upgrade or reset your Airbyte Database, see more at https://docs.airbyte.io/operator-guides/upgrading-airbyte + """.trimIndent(), + version1.serialize(), + version2.serialize() + ) + } + + fun versionWithoutPatch(airbyteVersion: AirbyteVersion): AirbyteVersion { + val versionWithoutPatch = + ("" + + airbyteVersion.major + + "." + + airbyteVersion.minor + + ".0-" + + airbyteVersion + .serialize() + .replace("\n", "") + .trim() + .split("-".toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray()[1]) + return AirbyteVersion(versionWithoutPatch) + } + + fun versionWithoutPatch(airbyteVersion: String): AirbyteVersion { + return versionWithoutPatch(AirbyteVersion(airbyteVersion)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/Version.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/Version.kt new file mode 100644 index 0000000000000..14ae34b61f82d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/Version.kt @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import com.fasterxml.jackson.databind.annotation.JsonSerialize +import com.google.common.base.Preconditions +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import java.util.* + +/** A semVer Version class that allows "dev" as a version. */ +@JsonDeserialize(using = VersionDeserializer::class) +@JsonSerialize(using = VersionSerializer::class) +open class Version { + // We really should have 2 private subtypes: One for dev, and one for standard version, where + // all the fields are non nullable + val version: String + val major: String? + val minor: String? + val patch: String? + + constructor(version: String) { + Preconditions.checkNotNull(version) + this.version = version + val parsedVersion = + version + .replace("\n", "") + .trim() + .split("-".toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray()[0] + .split("\\.".toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + + if (isDev) { + this.major = null + this.minor = null + this.patch = null + } else { + Preconditions.checkArgument(parsedVersion.size >= 3, "Invalid version string: $version") + this.major = parsedVersion[0] + this.minor = parsedVersion[1] + this.patch = parsedVersion[2] + } + } + + constructor(major: String?, minor: String?, patch: String?) { + this.version = String.format("%s.%s.%s", major, minor, patch) + this.major = major + this.minor = minor + this.patch = patch + } + + fun serialize(): String { + return version + } + + /** + * Compares two Version to check if they are equivalent. + * + * Only the major and minor part of the Version is taken into account. + */ + fun compatibleVersionCompareTo(another: Version): Int { + if (isDev || another.isDev) return 0 + val majorDiff = compareVersion(major!!, another.major!!) + if (majorDiff != 0) { + return majorDiff + } + return compareVersion(minor!!, another.minor!!) + } + + /** @return true if this is greater than other. otherwise false. */ + fun greaterThan(other: Version): Boolean { + return patchVersionCompareTo(other) > 0 + } + + /** @return true if this is greater than or equal toother. otherwise false. */ + fun greaterThanOrEqualTo(other: Version): Boolean { + return patchVersionCompareTo(other) >= 0 + } + + /** @return true if this is less than other. otherwise false. */ + fun lessThan(other: Version): Boolean { + return patchVersionCompareTo(other) < 0 + } + + /** Compares two Version to check if they are equivalent (including patch version). */ + fun patchVersionCompareTo(another: Version): Int { + if (isDev || another.isDev) { + return 0 + } + val majorDiff = compareVersion(major!!, another.major!!) + if (majorDiff != 0) { + return majorDiff + } + val minorDiff = compareVersion(minor!!, another.minor!!) + if (minorDiff != 0) { + return minorDiff + } + return compareVersion(patch!!, another.patch!!) + } + + /** Compares two Version to check if only the patch version was updated. */ + fun checkOnlyPatchVersionIsUpdatedComparedTo(another: Version): Boolean { + if (isDev || another.isDev) { + return false + } + val majorDiff = compareVersion(major!!, another.major!!) + if (majorDiff > 0) { + return false + } + val minorDiff = compareVersion(minor!!, another.minor!!) + if (minorDiff > 0) { + return false + } + return compareVersion(patch!!, another.patch!!) > 0 + } + + val isDev: Boolean + get() = version.startsWith(DEV_VERSION_PREFIX) + + override fun toString(): String { + return "Version{" + + "version='" + + version + + '\'' + + ", major='" + + major + + '\'' + + ", minor='" + + minor + + '\'' + + ", patch='" + + patch + + '\'' + + '}' + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as Version + return version == that.version && + major == that.major && + minor == that.minor && + patch == that.patch + } + + override fun hashCode(): Int { + return Objects.hash(version, major, minor, patch) + } + + companion object { + const val DEV_VERSION_PREFIX: String = "dev" + + /** + * Version string needs to be converted to integer for comparison, because string comparison + * does not handle version string with different digits correctly. For example: + * `"11".compare("3") < 0`, while `Integer.compare(11, 3) > 0`. + */ + @SuppressFBWarnings( + "NP_NULL_PARAM_DEREF" + ) // We really should have 2 different subtypes of version, one for dev, and for standard + // versions + private fun compareVersion(v1: String?, v2: String?): Int { + return Integer.compare(v1!!.toInt(), v2!!.toInt()) + } + + fun isCompatible(v1: Version, v2: Version): Boolean { + return v1.compatibleVersionCompareTo(v2) == 0 + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionDeserializer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionDeserializer.kt new file mode 100644 index 0000000000000..a3e92a82f6e84 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionDeserializer.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +import com.fasterxml.jackson.core.JacksonException +import com.fasterxml.jackson.core.JsonParser +import com.fasterxml.jackson.databind.DeserializationContext +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.deser.std.StdDeserializer +import java.io.IOException + +class VersionDeserializer @JvmOverloads constructor(vc: Class<*>? = null) : + StdDeserializer(vc) { + @Throws(IOException::class, JacksonException::class) + override fun deserialize(p: JsonParser, ctxt: DeserializationContext): Version { + val node = p.codec.readTree(p) + val v = node["version"].asText() + return Version(v) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionSerializer.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionSerializer.kt new file mode 100644 index 0000000000000..c90d270c450b7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/version/VersionSerializer.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.SerializerProvider +import com.fasterxml.jackson.databind.ser.std.StdSerializer +import java.io.IOException + +class VersionSerializer @JvmOverloads constructor(t: Class? = null) : + StdSerializer(t) { + @Throws(IOException::class) + override fun serialize(value: Version, gen: JsonGenerator, provider: SerializerProvider) { + gen.writeStartObject() + gen.writeStringField("version", value.version) + gen.writeEndObject() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/yaml/Yamls.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/yaml/Yamls.kt new file mode 100644 index 0000000000000..2b1a5a848ef0c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/commons/yaml/Yamls.kt @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.yaml + +import com.fasterxml.jackson.core.JsonProcessingException +import com.fasterxml.jackson.core.JsonToken +import com.fasterxml.jackson.core.type.TypeReference +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.SequenceWriter +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator +import com.google.common.collect.AbstractIterator +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.jackson.MoreMappers.initYamlMapper +import io.airbyte.commons.lang.CloseableConsumer +import io.airbyte.commons.lang.Exceptions.toRuntime +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.commons.util.AutoCloseableIterators +import java.io.IOException +import java.io.InputStream +import java.io.Writer + +object Yamls { + private val YAML_FACTORY = YAMLFactory() + private val OBJECT_MAPPER = initYamlMapper(YAML_FACTORY) + + private val YAML_FACTORY_WITHOUT_QUOTES: YAMLFactory = + YAMLFactory().enable(YAMLGenerator.Feature.MINIMIZE_QUOTES) + private val OBJECT_MAPPER_WITHOUT_QUOTES = initYamlMapper(YAML_FACTORY_WITHOUT_QUOTES) + + /** + * Serialize object to YAML string. String values WILL be wrapped in double quotes. + * + * @param object + * - object to serialize + * @return YAML string version of object + */ + fun serialize(`object`: T): String { + try { + return OBJECT_MAPPER.writeValueAsString(`object`) + } catch (e: JsonProcessingException) { + throw RuntimeException(e) + } + } + + /** + * Serialize object to YAML string. String values will NOT be wrapped in double quotes. + * + * @param object + * - object to serialize + * @return YAML string version of object + */ + fun serializeWithoutQuotes(`object`: Any?): String { + try { + return OBJECT_MAPPER_WITHOUT_QUOTES.writeValueAsString(`object`) + } catch (e: JsonProcessingException) { + throw RuntimeException(e) + } + } + + fun deserialize(yamlString: String?, klass: Class?): T { + try { + return OBJECT_MAPPER.readValue(yamlString, klass) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + fun deserialize(yamlString: String?, typeReference: TypeReference?): T { + try { + return OBJECT_MAPPER.readValue(yamlString, typeReference) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @JvmStatic + fun deserialize(yamlString: String?): JsonNode { + try { + return OBJECT_MAPPER.readTree(yamlString) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + fun deserializeArray(stream: InputStream?): AutoCloseableIterator { + try { + val parser = YAML_FACTORY.createParser(stream) + + // Check the first token + check(parser.nextToken() == JsonToken.START_ARRAY) { "Expected content to be an array" } + + val iterator: Iterator = + object : AbstractIterator() { + override fun computeNext(): JsonNode? { + try { + while (parser.nextToken() != JsonToken.END_ARRAY) { + return parser.readValueAsTree() + } + } catch (e: IOException) { + throw RuntimeException(e) + } + return endOfData() + } + } + + return AutoCloseableIterators.fromIterator( + iterator, + VoidCallable { parser.close() }, + null + )!! + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + // todo (cgardens) - share this with Jsons if ever needed. + /** + * Creates a consumer that writes list items to the writer in a streaming fashion. + * + * @param writer writer to write to + * @param type of items being written + * @return consumer that is able to write element to a list element by element. must be closed! + * + */ + fun listWriter(writer: Writer?): CloseableConsumer { + return YamlConsumer(writer, OBJECT_MAPPER) + } + + class YamlConsumer(writer: Writer?, objectMapper: ObjectMapper) : CloseableConsumer { + private val sequenceWriter: SequenceWriter = + toRuntime(callable = { objectMapper.writer().writeValuesAsArray(writer) }) + + override fun accept(t: T) { + try { + sequenceWriter.write(t) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @Throws(Exception::class) + override fun close() { + // closing the SequenceWriter closes the Writer that it wraps. + sequenceWriter.close() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfig.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfig.kt new file mode 100644 index 0000000000000..02bf6472a4003 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfig.kt @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import java.io.File + +/** This interface represents configuration objects used by Airbyte and Airbyte cloud */ +interface AirbyteConfig { + fun getName(): String? + + /** @return the name of the field storing the id for the configuration object */ + val idFieldName: String? + + /** @return the actual id of the configuration object */ + fun getId(config: T): String + + /** @return the path to the yaml file that defines the schema of the configuration object */ + val configSchemaFile: File + + fun getClassName(): Class +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfigValidator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfigValidator.kt new file mode 100644 index 0000000000000..d0cb7b54f2e28 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/AirbyteConfigValidator.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import io.airbyte.validation.json.AbstractSchemaValidator +import java.nio.file.Path + +class AirbyteConfigValidator : AbstractSchemaValidator() { + override fun getSchemaPath(configType: ConfigSchema): Path { + return configType.configSchemaFile.toPath() + } + + companion object { + val AIRBYTE_CONFIG_VALIDATOR: AirbyteConfigValidator = AirbyteConfigValidator() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/CatalogDefinitionsConfig.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/CatalogDefinitionsConfig.kt new file mode 100644 index 0000000000000..aa95fbf367534 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/CatalogDefinitionsConfig.kt @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +object CatalogDefinitionsConfig { + private const val SEED_SUBDIRECTORY = "seed/" + const val iconSubdirectory: String = "icons/" + private const val LOCAL_CONNECTOR_CATALOG_FILE_NAME = "oss_registry.json" + private const val DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH = + SEED_SUBDIRECTORY + LOCAL_CONNECTOR_CATALOG_FILE_NAME + + val localConnectorCatalogPath: String + get() { + val customCatalogPath = EnvConfigs().localCatalogPath + if (customCatalogPath!!.isPresent) { + return customCatalogPath.get() + } + + return DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/ConfigSchema.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/ConfigSchema.kt new file mode 100644 index 0000000000000..d003618dbc885 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/ConfigSchema.kt @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import io.airbyte.commons.json.JsonSchemas.prepareSchemas +import java.io.File +import java.nio.file.Path +import java.util.function.Function + +enum class ConfigSchema : AirbyteConfig { + // workspace + WORKSPACE_WEBHOOK_OPERATION_CONFIGS( + "WebhookOperationConfigs.yaml", + WebhookOperationConfigs::class.java + ), + + // source + STANDARD_SOURCE_DEFINITION( + "StandardSourceDefinition.yaml", + StandardSourceDefinition::class.java, + Function { + standardSourceDefinition: StandardSourceDefinition -> + standardSourceDefinition.sourceDefinitionId.toString() + }, + "sourceDefinitionId" + ), + SOURCE_CONNECTION( + "SourceConnection.yaml", + SourceConnection::class.java, + Function { sourceConnection: SourceConnection -> + sourceConnection.sourceId.toString() + }, + "sourceId" + ), + + // destination + STANDARD_DESTINATION_DEFINITION( + "StandardDestinationDefinition.yaml", + StandardDestinationDefinition::class.java, + Function { + standardDestinationDefinition: StandardDestinationDefinition -> + standardDestinationDefinition.destinationDefinitionId.toString() + }, + "destinationDefinitionId" + ), + DESTINATION_CONNECTION( + "DestinationConnection.yaml", + DestinationConnection::class.java, + Function { destinationConnection: DestinationConnection -> + destinationConnection.destinationId.toString() + }, + "destinationId" + ), + STANDARD_SYNC_OPERATION( + "StandardSyncOperation.yaml", + StandardSyncOperation::class.java, + Function { standardSyncOperation: StandardSyncOperation -> + standardSyncOperation.operationId.toString() + }, + "operationId" + ), + SOURCE_OAUTH_PARAM( + "SourceOAuthParameter.yaml", + SourceOAuthParameter::class.java, + Function { sourceOAuthParameter: SourceOAuthParameter -> + sourceOAuthParameter.oauthParameterId.toString() + }, + "oauthParameterId" + ), + DESTINATION_OAUTH_PARAM( + "DestinationOAuthParameter.yaml", + DestinationOAuthParameter::class.java, + Function { + destinationOAuthParameter: DestinationOAuthParameter -> + destinationOAuthParameter.oauthParameterId.toString() + }, + "oauthParameterId" + ), + + // worker + STANDARD_SYNC_INPUT("StandardSyncInput.yaml", StandardSyncInput::class.java), + STATE("State.yaml", State::class.java); + + private val schemaFilename: String + private val className: Class<*> + private val extractId: Function<*, String> + override val idFieldName: String? + + constructor( + schemaFilename: String, + className: Class<*>, + extractId: Function<*, String>, + idFieldName: String + ) { + this.schemaFilename = schemaFilename + this.className = className + this.extractId = extractId + this.idFieldName = idFieldName + } + + constructor(schemaFilename: String, className: Class<*>) { + this.schemaFilename = schemaFilename + this.className = className + extractId = Function { `object`: Any? -> + throw RuntimeException(className.getSimpleName() + " doesn't have an id") + } + idFieldName = null + } + + override val configSchemaFile: File + get() = KNOWN_SCHEMAS_ROOT.resolve(schemaFilename).toFile() + + override fun getClassName(): Class { + return className as Class + } + + override fun getId(`object`: T): String { + if (getClassName().isInstance(`object`)) { + return (extractId as Function).apply(`object`) + } + throw RuntimeException( + "Object: " + `object` + " is not instance of class " + getClassName().name + ) + } + + override fun getName(): String { + return name + } + + companion object { + val KNOWN_SCHEMAS_ROOT: Path = prepareSchemas("types", ConfigSchema::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/Configs.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/Configs.kt new file mode 100644 index 0000000000000..4fb4944bdb92a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/Configs.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +/** + * This interface defines the general variables for configuring Airbyte. + * + * Please update the configuring-airbyte.md document when modifying this file. + * + * Please also add one of the following tags to the env var accordingly: + * + * 1. 'Internal-use only' if a var is mainly for Airbyte-only configuration. e.g. tracking, test or + * Cloud related etc. + * + * 2. 'Alpha support' if a var does not have proper support and should be used with care. + */ +interface Configs { + /** + * Defines the bucket for caching specs. This immensely speeds up spec operations. This is + * updated when new versions are published. + */ + val specCacheBucket: String + + enum class DeploymentMode { + OSS, + CLOUD + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/EnvConfigs.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/EnvConfigs.kt new file mode 100644 index 0000000000000..c1928e9fea24d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/EnvConfigs.kt @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import java.util.* +import java.util.function.Function +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class EnvConfigs @JvmOverloads constructor(envMap: Map = System.getenv()) : + Configs { + private val getEnv = Function { key: String? -> envMap[key] } + + /** + * Constructs [EnvConfigs] from a provided map. This can be used for testing or getting + * variables from a non-envvar source. + */ + override val specCacheBucket: String + get() = getEnvOrDefault(SPEC_CACHE_BUCKET, DEFAULT_SPEC_CACHE_BUCKET) + + val localCatalogPath: Optional + get() = Optional.ofNullable(getEnv(LOCAL_CONNECTOR_CATALOG_PATH)) + + // Worker - Data plane + // Helpers + fun getEnvOrDefault(key: String, defaultValue: String): String { + return getEnvOrDefault(key, defaultValue, Function.identity(), false) + } + + fun getEnvOrDefault( + key: String, + defaultValue: T, + parser: Function, + isSecret: Boolean + ): T { + val value = getEnv.apply(key) + if (value != null && !value.isEmpty()) { + return parser.apply(value) + } else { + LOGGER.info( + "Using default value for environment variable {}: '{}'", + key, + if (isSecret) "*****" else defaultValue + ) + return defaultValue + } + } + + fun getEnv(name: String): String? { + return getEnv.apply(name) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(EnvConfigs::class.java) + + // env variable names + const val SPEC_CACHE_BUCKET: String = "SPEC_CACHE_BUCKET" + const val LOCAL_CONNECTOR_CATALOG_PATH: String = "LOCAL_CONNECTOR_CATALOG_PATH" + + // defaults + private const val DEFAULT_SPEC_CACHE_BUCKET = "io-airbyte-cloud-spec-cache" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/StateMessageHelper.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/StateMessageHelper.kt new file mode 100644 index 0000000000000..da986dbf15448 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/StateMessageHelper.kt @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss.helpers + +import com.fasterxml.jackson.core.type.TypeReference +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons.jsonNode +import io.airbyte.commons.json.Jsons.`object` +import io.airbyte.configoss.State +import io.airbyte.configoss.StateType +import io.airbyte.configoss.StateWrapper +import io.airbyte.protocol.models.AirbyteStateMessage +import java.util.* + +object StateMessageHelper { + /** + * This a takes a json blob state and tries return either a legacy state in the format of a json + * object or a state message with the new format which is a list of airbyte state message. + * + * @param state + * - a blob representing the state + * @return An optional state wrapper, if there is no state an empty optional will be returned + */ + fun getTypedState(state: JsonNode?): Optional { + if (state == null) { + return Optional.empty() + } else { + val stateMessages: List + try { + stateMessages = + `object`>( + state, + AirbyteStateMessageListTypeReference() + ) + } catch (e: IllegalArgumentException) { + return Optional.of(getLegacyStateWrapper(state)) + } + if (stateMessages.isEmpty()) { + return Optional.empty() + } + + if (stateMessages.size == 1) { + return if (stateMessages[0].type == null) { + Optional.of(getLegacyStateWrapper(state)) + } else { + when (stateMessages[0].type) { + AirbyteStateMessage.AirbyteStateType.GLOBAL -> { + Optional.of(provideGlobalState(stateMessages[0])) + } + AirbyteStateMessage.AirbyteStateType.STREAM -> { + Optional.of(provideStreamState(stateMessages)) + } + AirbyteStateMessage.AirbyteStateType.LEGACY -> { + Optional.of(getLegacyStateWrapper(stateMessages[0].data)) + } + else -> { + // Should not be reachable. + throw IllegalStateException("Unexpected state type") + } + } + } + } else { + if ( + stateMessages.stream().allMatch { stateMessage: AirbyteStateMessage -> + stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM + } + ) { + return Optional.of(provideStreamState(stateMessages)) + } + if ( + stateMessages.stream().allMatch { stateMessage: AirbyteStateMessage -> + stateMessage.type == null + } + ) { + return Optional.of(getLegacyStateWrapper(state)) + } + + throw IllegalStateException( + "Unexpected state blob, the state contains either multiple global or conflicting state type." + ) + } + } + } + + /** + * Converts a StateWrapper to a State + * + * LegacyStates are directly serialized into the state. GlobalStates and StreamStates are + * serialized as a list of AirbyteStateMessage in the state attribute. + * + * @param stateWrapper the StateWrapper to convert + * @return the Converted State + */ + fun getState(stateWrapper: StateWrapper): State { + return when (stateWrapper.stateType) { + StateType.LEGACY -> State().withState(stateWrapper.legacyState) + StateType.STREAM -> State().withState(jsonNode(stateWrapper.stateMessages)) + StateType.GLOBAL -> State().withState(jsonNode(java.util.List.of(stateWrapper.global))) + else -> throw RuntimeException("Unexpected StateType " + stateWrapper.stateType) + } + } + + fun isMigration(currentStateType: StateType, previousState: Optional): Boolean { + return previousState.isPresent && + isMigration(currentStateType, previousState.get().stateType) + } + + fun isMigration(currentStateType: StateType, previousStateType: StateType?): Boolean { + return previousStateType == StateType.LEGACY && currentStateType != StateType.LEGACY + } + + private fun provideGlobalState(stateMessages: AirbyteStateMessage): StateWrapper { + return StateWrapper().withStateType(StateType.GLOBAL).withGlobal(stateMessages) + } + + /** + * This is returning a wrapped state, it assumes that the state messages are ordered. + * + * @param stateMessages + * - an ordered list of state message + * @param useStreamCapableState + * - a flag that indicates whether to return the new format + * @return a wrapped state + */ + private fun provideStreamState(stateMessages: List): StateWrapper { + return StateWrapper().withStateType(StateType.STREAM).withStateMessages(stateMessages) + } + + private fun getLegacyStateWrapper(state: JsonNode): StateWrapper { + return StateWrapper().withStateType(StateType.LEGACY).withLegacyState(state) + } + + class AirbyteStateMessageListTypeReference : TypeReference>() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.kt new file mode 100644 index 0000000000000..92a7110a7483a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitions.kt @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss.helpers + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.util.ClassUtil +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import io.airbyte.commons.json.Jsons.clone +import io.airbyte.commons.json.Jsons.`object` +import io.airbyte.commons.yaml.Yamls.deserialize +import io.airbyte.configoss.StandardDestinationDefinition +import io.airbyte.configoss.StandardSourceDefinition +import java.util.* + +/** + * This is a convenience class for the conversion of a list of source/destination definitions from + * human-friendly yaml to processing friendly formats i.e. Java models or JSON. As this class + * performs validation, it is recommended to use this class to deal with plain lists. An example of + * such lists are Airbyte's master definition lists, which can be seen in the resources folder of + * the airbyte-config-oss/seed module. + * + * In addition to usual deserialization validations, we check: 1) The given list contains no + * duplicate names. 2) The given list contains no duplicate ids. + * + * Methods in these class throw Runtime exceptions upon validation failure. + */ +object YamlListToStandardDefinitions { + private val CLASS_NAME_TO_ID_NAME: Map = + java.util.Map.ofEntries( + AbstractMap.SimpleImmutableEntry( + StandardDestinationDefinition::class.java.canonicalName, + "destinationDefinitionId" + ), + AbstractMap.SimpleImmutableEntry( + StandardSourceDefinition::class.java.canonicalName, + "sourceDefinitionId" + ) + ) + + fun toStandardSourceDefinitions(yamlStr: String?): List { + return verifyAndConvertToModelList(StandardSourceDefinition::class.java, yamlStr) + } + + fun toStandardDestinationDefinitions(yamlStr: String?): List { + return verifyAndConvertToModelList(StandardDestinationDefinition::class.java, yamlStr) + } + + fun verifyAndConvertToJsonNode(idName: String?, yamlStr: String?): JsonNode { + val jsonNode = deserialize(yamlStr) + checkYamlIsPresentWithNoDuplicates(jsonNode, idName) + return jsonNode + } + + @VisibleForTesting + fun verifyAndConvertToModelList(klass: Class, yamlStr: String?): List { + val jsonNode = deserialize(yamlStr) + val idName = CLASS_NAME_TO_ID_NAME[klass.canonicalName] + checkYamlIsPresentWithNoDuplicates(jsonNode, idName) + return toStandardXDefinitions(jsonNode.elements(), klass) + } + + private fun checkYamlIsPresentWithNoDuplicates(deserialize: JsonNode, idName: String?) { + val presentDestList = deserialize.elements() != ClassUtil.emptyIterator() + Preconditions.checkState(presentDestList, "Definition list is empty") + checkNoDuplicateNames(deserialize.elements()) + checkNoDuplicateIds(deserialize.elements(), idName) + } + + private fun checkNoDuplicateNames(iter: Iterator) { + val names = HashSet() + while (iter.hasNext()) { + val element = clone(iter.next()) + val name = element["name"].asText() + require(!names.contains(name)) { "Multiple records have the name: $name" } + names.add(name) + } + } + + private fun checkNoDuplicateIds(fileIterator: Iterator, idName: String?) { + val ids = HashSet() + while (fileIterator.hasNext()) { + val element = clone(fileIterator.next()) + val id = element[idName].asText() + require(!ids.contains(id)) { "Multiple records have the id: $id" } + ids.add(id) + } + } + + private fun toStandardXDefinitions(iter: Iterator, c: Class): List { + val iterable = Iterable { iter } + val defList = ArrayList() + for (n in iterable) { + val def = `object`(n, c) + defList.add(def) + } + return defList + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/AbstractSchemaValidator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/AbstractSchemaValidator.kt new file mode 100644 index 0000000000000..8b3d0cfb1d39e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/AbstractSchemaValidator.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.validation.json + +import com.fasterxml.jackson.databind.JsonNode +import java.nio.file.Path + +abstract class AbstractSchemaValidator> +@JvmOverloads +constructor(private val jsonSchemaValidator: JsonSchemaValidator = JsonSchemaValidator()) : + ConfigSchemaValidator { + abstract fun getSchemaPath(configType: T): Path + + private fun getSchemaJson(configType: T): JsonNode { + return JsonSchemaValidator.Companion.getSchema(getSchemaPath(configType).toFile()) + } + + override fun validate(configType: T, objectJson: JsonNode): Set? { + return jsonSchemaValidator.validate(getSchemaJson(configType), objectJson) + } + + override fun test(configType: T, objectJson: JsonNode): Boolean { + return jsonSchemaValidator.test(getSchemaJson(configType), objectJson) + } + + @Throws(JsonValidationException::class) + override fun ensure(configType: T, objectJson: JsonNode) { + jsonSchemaValidator.ensure(getSchemaJson(configType), objectJson) + } + + override fun ensureAsRuntime(configType: T, objectJson: JsonNode) { + jsonSchemaValidator.ensureAsRuntime(getSchemaJson(configType), objectJson) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/ConfigSchemaValidator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/ConfigSchemaValidator.kt new file mode 100644 index 0000000000000..68cbb19c61f60 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/ConfigSchemaValidator.kt @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.validation.json + +import com.fasterxml.jackson.databind.JsonNode + +interface ConfigSchemaValidator> { + fun validate(configType: T, objectJson: JsonNode): Set? + + fun test(configType: T, objectJson: JsonNode): Boolean + + @Throws(JsonValidationException::class) fun ensure(configType: T, objectJson: JsonNode) + + fun ensureAsRuntime(configType: T, objectJson: JsonNode) +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonSchemaValidator.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonSchemaValidator.kt new file mode 100644 index 0000000000000..185a614e67d12 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonSchemaValidator.kt @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.validation.json + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import com.networknt.schema.* +import java.io.File +import java.io.IOException +import java.net.URI +import java.net.URISyntaxException +import java.util.stream.Collectors +import me.andrz.jackson.JsonContext +import me.andrz.jackson.JsonReferenceException +import me.andrz.jackson.JsonReferenceProcessor +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class JsonSchemaValidator @VisibleForTesting constructor(private val baseUri: URI?) { + private val jsonSchemaFactory: JsonSchemaFactory = + JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7) + private val schemaToValidators: MutableMap = HashMap() + + constructor() : this(DEFAULT_BASE_URI) + + /** + * Create and cache a schema validator for a particular schema. This validator is used when + * [.testInitializedSchema] and [.ensureInitializedSchema] is called. + */ + fun initializeSchemaValidator(schemaName: String, schemaJson: JsonNode) { + schemaToValidators[schemaName] = getSchemaValidator(schemaJson) + } + + /** Returns true if the object adheres to the given schema and false otherwise. */ + fun testInitializedSchema(schemaName: String, objectJson: JsonNode?): Boolean { + val schema = schemaToValidators[schemaName] + Preconditions.checkNotNull( + schema, + "$schemaName needs to be initialised before calling this method" + ) + + val validate = schema!!.validate(objectJson) + return validate.isEmpty() + } + + /** Throws an exception if the object does not adhere to the given schema. */ + @Throws(JsonValidationException::class) + fun ensureInitializedSchema(schemaName: String, objectNode: JsonNode?) { + val schema = schemaToValidators[schemaName] + Preconditions.checkNotNull( + schema, + "$schemaName needs to be initialised before calling this method" + ) + + val validationMessages = schema!!.validate(objectNode) + if (validationMessages.isEmpty()) { + return + } + throw JsonValidationException( + String.format( + "json schema validation failed when comparing the data to the json schema. \nErrors: %s \nSchema: \n%s", + validationMessages.joinToString(", "), + schemaName + ) + ) + } + + /** + * WARNING + * + * The following methods perform JSON validation **by re-creating a validator each time**. This + * is both CPU and GC expensive, and should be used carefully. + */ + // todo(davin): Rewrite this section to cache schemas. + fun test(schemaJson: JsonNode, objectJson: JsonNode): Boolean { + val validationMessages = validateInternal(schemaJson, objectJson) + + if (!validationMessages.isEmpty()) { + LOGGER.info( + "JSON schema validation failed. \nerrors: {}", + validationMessages.joinToString(", ") + ) + } + + return validationMessages.isEmpty() + } + + fun validate(schemaJson: JsonNode, objectJson: JsonNode): Set { + return validateInternal(schemaJson, objectJson) + .stream() + .map { obj: ValidationMessage -> obj.message } + .collect(Collectors.toSet()) + } + + fun getValidationMessageArgs(schemaJson: JsonNode, objectJson: JsonNode): List> { + return validateInternal(schemaJson, objectJson) + .stream() + .map { obj: ValidationMessage -> obj.arguments } + .collect(Collectors.toList()) + } + + fun getValidationMessagePaths(schemaJson: JsonNode, objectJson: JsonNode): List { + return validateInternal(schemaJson, objectJson) + .stream() + .map { obj: ValidationMessage -> obj.path } + .collect(Collectors.toList()) + } + + @Throws(JsonValidationException::class) + fun ensure(schemaJson: JsonNode, objectJson: JsonNode) { + val validationMessages = validateInternal(schemaJson, objectJson) + if (validationMessages.isEmpty()) { + return + } + + throw JsonValidationException( + String.format( + "json schema validation failed when comparing the data to the json schema. \nErrors: %s \nSchema: \n%s", + validationMessages.joinToString(", "), + schemaJson.toPrettyString() + ) + ) + } + + fun ensureAsRuntime(schemaJson: JsonNode, objectJson: JsonNode) { + try { + ensure(schemaJson, objectJson) + } catch (e: JsonValidationException) { + throw RuntimeException(e) + } + } + + // keep this internal as it returns a type specific to the wrapped library. + private fun validateInternal( + schemaJson: JsonNode, + objectJson: JsonNode + ): Set { + Preconditions.checkNotNull(schemaJson) + Preconditions.checkNotNull(objectJson) + + val schema = getSchemaValidator(schemaJson) + return schema.validate(objectJson) + } + + /** Return a schema validator for a json schema, defaulting to the V7 Json schema. */ + private fun getSchemaValidator(schemaJson: JsonNode): JsonSchema { + // Default to draft-07, but have handling for the other metaschemas that networknt supports + val metaschema: JsonMetaSchema + val metaschemaNode = schemaJson["\$schema"] + if (metaschemaNode?.asText() == null || metaschemaNode.asText().isEmpty()) { + metaschema = JsonMetaSchema.getV7() + } else { + val metaschemaString = metaschemaNode.asText() + // We're not using "http://....".equals(), because we want to avoid weirdness with + // https, etc. + metaschema = + if (metaschemaString.contains("json-schema.org/draft-04")) { + JsonMetaSchema.getV4() + } else if (metaschemaString.contains("json-schema.org/draft-06")) { + JsonMetaSchema.getV6() + } else if (metaschemaString.contains("json-schema.org/draft/2019-09")) { + JsonMetaSchema.getV201909() + } else if (metaschemaString.contains("json-schema.org/draft/2020-12")) { + JsonMetaSchema.getV202012() + } else { + JsonMetaSchema.getV7() + } + } + + val context = + ValidationContext( + jsonSchemaFactory.uriFactory, + null, + metaschema, + jsonSchemaFactory, + null + ) + val schema = JsonSchema(context, baseUri, schemaJson) + return schema + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(JsonSchemaValidator::class.java) + + // This URI just needs to point at any path in the same directory as + // /app/WellKnownTypes.json + // It's required for the JsonSchema#validate method to resolve $ref correctly. + private var DEFAULT_BASE_URI: URI? = null + + init { + try { + DEFAULT_BASE_URI = URI("file:///app/nonexistent_file.json") + } catch (e: URISyntaxException) { + throw RuntimeException(e) + } + } + + /** + * Get JsonNode for an object defined as the main object in a JsonSchema file. Able to + * create the JsonNode even if the the JsonSchema refers to objects in other files. + * + * @param schemaFile + * - the schema file + * @return schema object processed from across all dependency files. + */ + @JvmStatic + fun getSchema(schemaFile: File?): JsonNode { + try { + return processor.process(schemaFile) + } catch (e: IOException) { + throw RuntimeException(e) + } catch (e: JsonReferenceException) { + throw RuntimeException(e) + } + } + + /** + * Get JsonNode for an object defined in the "definitions" section of a JsonSchema file. + * Able to create the JsonNode even if the the JsonSchema refers to objects in other files. + * + * @param schemaFile + * - the schema file + * @param definitionStructName + * - get the schema from a struct defined in the "definitions" section of a JsonSchema file + * (instead of the main object in that file). + * @return schema object processed from across all dependency files. + */ + fun getSchema(schemaFile: File?, definitionStructName: String?): JsonNode { + try { + val jsonContext = JsonContext(schemaFile) + return processor.process( + jsonContext, + jsonContext.document["definitions"][definitionStructName] + ) + } catch (e: IOException) { + throw RuntimeException(e) + } catch (e: JsonReferenceException) { + throw RuntimeException(e) + } + } + + private val processor: JsonReferenceProcessor + get() { + // JsonReferenceProcessor follows $ref in json objects. Jackson does not natively + // support + // this. + val jsonReferenceProcessor = JsonReferenceProcessor() + jsonReferenceProcessor.maxDepth = -1 // no max. + + return jsonReferenceProcessor + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonValidationException.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonValidationException.kt new file mode 100644 index 0000000000000..88715fdccf462 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/kotlin/io/airbyte/validation/json/JsonValidationException.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.validation.json + +class JsonValidationException : Exception { + constructor(message: String?) : super(message) + + constructor(message: String?, cause: Throwable?) : super(message, cause) +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/api/client/AirbyteApiClientTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/api/client/AirbyteApiClientTest.java deleted file mode 100644 index f344d96a28e00..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/api/client/AirbyteApiClientTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.api.client; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.util.concurrent.Callable; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; - -public class AirbyteApiClientTest { - - // These set of configurations are so each test case takes ~3 secs. - private static final int TEST_JITTER_INTERVAL_SECS = 1; - private static final int TEST_FINAL_INTERVAL_SECS = 1; - private static final int TEST_MAX_RETRIES = 2; - @Mock - private Callable mockCallable; - - @Nested - class RetryWithJitter { - - @Test - // Should not retry on success - void ifSucceedShouldNotRetry() throws Exception { - mockCallable = mock(Callable.class); - when(mockCallable.call()).thenReturn("Success!"); - - AirbyteApiClient.retryWithJitter(mockCallable, "test", TEST_JITTER_INTERVAL_SECS, TEST_FINAL_INTERVAL_SECS, TEST_MAX_RETRIES); - - verify(mockCallable, times(1)).call(); - } - - @Test - // Should retry up to the configured max retries on continued errors - void onlyRetryTillMaxRetries() throws Exception { - mockCallable = mock(Callable.class); - when(mockCallable.call()).thenThrow(new RuntimeException("Bomb!")); - - AirbyteApiClient.retryWithJitter(mockCallable, "test", TEST_JITTER_INTERVAL_SECS, TEST_FINAL_INTERVAL_SECS, TEST_MAX_RETRIES); - - verify(mockCallable, times(TEST_MAX_RETRIES)).call(); - - } - - @Test - // Should retry only if there are errors - void onlyRetryOnErrors() throws Exception { - mockCallable = mock(Callable.class); - // Because we succeed on the second try, we should only call the method twice. - when(mockCallable.call()) - .thenThrow(new RuntimeException("Bomb!")) - .thenReturn("Success!"); - - AirbyteApiClient.retryWithJitter(mockCallable, "test", TEST_JITTER_INTERVAL_SECS, TEST_FINAL_INTERVAL_SECS, 3); - - verify(mockCallable, times(2)).call(); - - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/cli/ClisTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/cli/ClisTest.java deleted file mode 100644 index fa89a64d23d50..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/cli/ClisTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.cli; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.junit.jupiter.api.Test; - -class ClisTest { - - private static final String ALPHA = "alpha"; - private static final String BETA = "beta"; - - @Test - void testParse() { - final Option optionA = Option.builder("a").required(true).hasArg(true).build(); - final Option optionB = Option.builder("b").required(true).hasArg(true).build(); - final Options options = new Options().addOption(optionA).addOption(optionB); - final String[] args = {"-a", ALPHA, "-b", BETA}; - final CommandLine parsed = Clis.parse(args, options, new DefaultParser()); - assertEquals(ALPHA, parsed.getOptions()[0].getValue()); - assertEquals(BETA, parsed.getOptions()[1].getValue()); - } - - @Test - void testParseNonConforming() { - final Option optionA = Option.builder("a").required(true).hasArg(true).build(); - final Option optionB = Option.builder("b").required(true).hasArg(true).build(); - final Options options = new Options().addOption(optionA).addOption(optionB); - final String[] args = {"-a", ALPHA, "-b", BETA, "-c", "charlie"}; - assertThrows(IllegalArgumentException.class, () -> Clis.parse(args, options, new DefaultParser())); - } - - @Test - void testParseNonConformingWithSyntax() { - final Option optionA = Option.builder("a").required(true).hasArg(true).build(); - final Option optionB = Option.builder("b").required(true).hasArg(true).build(); - final Options options = new Options().addOption(optionA).addOption(optionB); - final String[] args = {"-a", ALPHA, "-b", BETA, "-c", "charlie"}; - assertThrows(IllegalArgumentException.class, () -> Clis.parse(args, options, new DefaultParser(), "search")); - } - - @Test - void testRelaxedParser() { - final Option optionA = Option.builder("a").required(true).hasArg(true).build(); - final Option optionB = Option.builder("b").required(true).hasArg(true).build(); - final Options options = new Options().addOption(optionA).addOption(optionB); - final String[] args = {"-a", ALPHA, "-b", BETA, "-c", "charlie"}; - final CommandLine parsed = Clis.parse(args, options, Clis.getRelaxedParser()); - assertEquals(ALPHA, parsed.getOptions()[0].getValue()); - assertEquals(BETA, parsed.getOptions()[1].getValue()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/CompletableFuturesTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/CompletableFuturesTest.java deleted file mode 100644 index def67f8e5916f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/CompletableFuturesTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.concurrency; - -import static org.junit.jupiter.api.Assertions.*; - -import io.airbyte.commons.functional.Either; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import org.junit.jupiter.api.Test; - -class CompletableFuturesTest { - - @Test - public void testAllOf() { - // Complete in random order - final List> futures = Arrays.asList( - returnSuccessWithDelay(1, 2000), - returnSuccessWithDelay(2, 200), - returnSuccessWithDelay(3, 500), - returnSuccessWithDelay(4, 100), - returnFailureWithDelay("Fail 5", 2000), - returnFailureWithDelay("Fail 6", 300)); - - final CompletableFuture>> allOfResult = CompletableFutures.allOf(futures).toCompletableFuture(); - final List> result = allOfResult.join(); - List> success = result.stream().filter(Either::isRight).toList(); - assertEquals(success, Arrays.asList( - Either.right(1), - Either.right(2), - Either.right(3), - Either.right(4))); - // Extract wrapped CompletionException messages. - final List failureMessages = result.stream().filter(Either::isLeft).map(either -> either.getLeft().getCause().getMessage()).toList(); - assertEquals(failureMessages, Arrays.asList("Fail 5", "Fail 6")); - } - - private CompletableFuture returnSuccessWithDelay(final int value, final long delayMs) { - return CompletableFuture.supplyAsync(() -> { - try { - Thread.sleep(delayMs); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - return value; - }); - } - - private CompletableFuture returnFailureWithDelay(final String message, final long delayMs) { - return CompletableFuture.supplyAsync(() -> { - try { - Thread.sleep(delayMs); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - throw new RuntimeException(message); - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/WaitingUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/WaitingUtilsTest.java deleted file mode 100644 index f3b6f655f97b4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/concurrency/WaitingUtilsTest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.concurrency; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.time.Duration; -import java.util.function.Supplier; -import org.junit.jupiter.api.Test; - -class WaitingUtilsTest { - - @SuppressWarnings("unchecked") - @Test - void testWaitForConditionConditionMet() { - final Supplier condition = mock(Supplier.class); - when(condition.get()) - .thenReturn(false) - .thenReturn(false) - .thenReturn(true); - assertTrue(WaitingUtils.waitForCondition(Duration.ofMillis(1), Duration.ofMillis(5), condition)); - } - - @SuppressWarnings("unchecked") - @Test - void testWaitForConditionTimeout() { - final Supplier condition = mock(Supplier.class); - when(condition.get()).thenReturn(false); - assertFalse(WaitingUtils.waitForCondition(Duration.ofMillis(1), Duration.ofMillis(5), condition)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/enums/EnumsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/enums/EnumsTest.java deleted file mode 100644 index 2f16974524c34..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/enums/EnumsTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.enums; - -import static io.airbyte.commons.enums.Enums.convertTo; -import static io.airbyte.commons.enums.Enums.isCompatible; -import static io.airbyte.commons.enums.Enums.toEnum; - -import java.util.Optional; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class EnumsTest { - - enum E1 { - TEST, - TEST2 - } - - enum E2 { - TEST - } - - enum E3 { - TEST, - TEST2 - } - - enum E4 { - TEST, - TEST3 - } - - @Test - void testConversion() { - Assertions.assertEquals(E2.TEST, convertTo(E1.TEST, E2.class)); - } - - @Test - void testConversionFails() { - Assertions.assertThrows(IllegalArgumentException.class, () -> convertTo(E1.TEST2, E2.class)); - } - - @Test - void testSelfCompatible() { - Assertions.assertTrue(isCompatible(E1.class, E1.class)); - } - - @Test - void testIsCompatible() { - Assertions.assertTrue(isCompatible(E1.class, E3.class)); - } - - @Test - void testNotCompatibleDifferentNames() { - Assertions.assertFalse(isCompatible(E1.class, E4.class)); - } - - @Test - void testNotCompatibleDifferentLength() { - Assertions.assertFalse(isCompatible(E1.class, E4.class)); - } - - @Test - void testNotCompatibleDifferentLength2() { - Assertions.assertFalse(isCompatible(E4.class, E1.class)); - } - - enum E5 { - VALUE_1, - VALUE_TWO, - value_three, - value_4 - } - - @Test - void testToEnum() { - Assertions.assertEquals(Optional.of(E1.TEST), toEnum("test", E1.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_1), toEnum("VALUE_1", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_1), toEnum("value_1", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_TWO), toEnum("VALUE_TWO", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_TWO), toEnum("valuetwo", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_TWO), toEnum("valueTWO", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_TWO), toEnum("valueTWO$", E5.class)); - Assertions.assertEquals(Optional.of(E5.VALUE_TWO), toEnum("___valueTWO___", E5.class)); - Assertions.assertEquals(Optional.of(E5.value_three), toEnum("VALUE_THREE", E5.class)); - Assertions.assertEquals(Optional.of(E5.value_4), toEnum("VALUE_4", E5.class)); - Assertions.assertEquals(Optional.empty(), toEnum("VALUE_5", E5.class)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/features/FeatureFlagHelperTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/features/FeatureFlagHelperTest.java deleted file mode 100644 index 6d000e86f8f12..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/features/FeatureFlagHelperTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.features; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class FeatureFlagHelperTest { - - FeatureFlags featureFlags; - - @BeforeEach - void beforeEach() { - featureFlags = mock(FeatureFlags.class); - } - - @Test - void isFieldSelectionEnabledForWorkspaceWithEmptyString() { - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(""); - - assertFalse(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, UUID.randomUUID(), null)); - } - - @Test - void isFieldSelectionEnabledForNullWorkspaceWithEmptyString() { - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(""); - - assertFalse(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, null, null)); - } - - @Test - void isFieldSelectionEnabledForWorkspaceWithSpaceString() { - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(" "); - - assertFalse(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, UUID.randomUUID(), null)); - } - - @Test - void isFieldSelectionEnabledForWorkspaceWithNullString() { - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(null); - - assertFalse(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, UUID.randomUUID(), null)); - } - - @Test - void isFieldSelectionEnabledForWorkspaceWithSomeIdsAndAMatch() { - final UUID workspaceId = UUID.randomUUID(); - final UUID randomId = UUID.randomUUID(); - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(randomId + "," + workspaceId); - - assertTrue(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, workspaceId, null)); - } - - @Test - void isFieldSelectionEnabledForWorkspaceWithSomeIdsAndNoMatch() { - final UUID workspaceId = UUID.randomUUID(); - final UUID randomId1 = UUID.randomUUID(); - final UUID randomId2 = UUID.randomUUID(); - when(featureFlags.fieldSelectionWorkspaces()).thenReturn(randomId1 + "," + randomId2); - - assertFalse(FeatureFlagHelper.isWorkspaceIncludedInFlag(featureFlags, FeatureFlags::fieldSelectionWorkspaces, workspaceId, null)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/IOsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/IOsTest.java deleted file mode 100644 index 8e6a5ceb9870d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/IOsTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.io; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.google.common.collect.Iterables; -import java.io.BufferedWriter; -import java.io.Closeable; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Writer; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class IOsTest { - - private static final String ABC = "abc"; - private static final String FILE = "file"; - - @Test - void testReadWrite() throws IOException { - final Path path = Files.createTempDirectory("tmp"); - - final Path filePath = IOs.writeFile(path, FILE, ABC); - - assertEquals(path.resolve(FILE), filePath); - assertEquals(ABC, IOs.readFile(path, FILE)); - assertEquals(ABC, IOs.readFile(path.resolve(FILE))); - } - - @Test - void testWriteBytes() throws IOException { - final Path path = Files.createTempDirectory("tmp"); - - final Path filePath = IOs.writeFile(path.resolve(FILE), ABC.getBytes(StandardCharsets.UTF_8)); - - assertEquals(path.resolve(FILE), filePath); - assertEquals(ABC, IOs.readFile(path, FILE)); - } - - @Test - void testWriteFileToRandomDir() throws IOException { - final String contents = "something to remember"; - final String tmpFilePath = IOs.writeFileToRandomTmpDir("file.txt", contents); - assertEquals(contents, Files.readString(Path.of(tmpFilePath))); - } - - @Test - void testGetTailDoesNotExist() throws IOException { - final List tail = IOs.getTail(100, Path.of(RandomStringUtils.randomAlphanumeric(100))); - assertEquals(Collections.emptyList(), tail); - } - - @Test - void testGetTailExists() throws IOException { - final Path stdoutFile = Files.createTempFile("job-history-handler-test", "stdout"); - - final List head = List.of( - "line1", - "line2", - "line3", - "line4"); - - final List expectedTail = List.of( - "line5", - "line6", - "line7", - "line8"); - - final Writer writer = new BufferedWriter(new FileWriter(stdoutFile.toString(), StandardCharsets.UTF_8, true)); - - for (final String line : Iterables.concat(head, expectedTail)) { - writer.write(line + "\n"); - } - - writer.close(); - - final List tail = IOs.getTail(expectedTail.size(), stdoutFile); - assertEquals(expectedTail, tail); - } - - @Test - void testInputStream() { - assertThrows(RuntimeException.class, () -> { - IOs.inputStream(Path.of("idontexist")); - }); - } - - @Test - void testSilentClose() throws IOException { - final Closeable closeable = Mockito.mock(Closeable.class); - - assertDoesNotThrow(() -> IOs.silentClose(closeable)); - - Mockito.doThrow(new IOException()).when(closeable).close(); - assertThrows(RuntimeException.class, () -> IOs.silentClose(closeable)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/LineGobblerTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/LineGobblerTest.java deleted file mode 100644 index ec040173b36f4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/io/LineGobblerTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.io; - -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; - -import com.google.common.collect.ImmutableMap; -import com.google.common.util.concurrent.MoreExecutors; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.concurrent.ExecutorService; -import java.util.function.Consumer; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class LineGobblerTest { - - @Test - @SuppressWarnings("unchecked") - void readAllLines() { - final Consumer consumer = Mockito.mock(Consumer.class); - final InputStream is = new ByteArrayInputStream("test\ntest2\n".getBytes(StandardCharsets.UTF_8)); - final ExecutorService executor = spy(MoreExecutors.newDirectExecutorService()); - - executor.submit(new LineGobbler(is, consumer, executor, ImmutableMap.of())); - - Mockito.verify(consumer).accept("test"); - Mockito.verify(consumer).accept("test2"); - Mockito.verify(executor).shutdown(); - } - - @Test - @SuppressWarnings("unchecked") - void shutdownOnSuccess() { - final Consumer consumer = Mockito.mock(Consumer.class); - final InputStream is = new ByteArrayInputStream("test\ntest2\n".getBytes(StandardCharsets.UTF_8)); - final ExecutorService executor = spy(MoreExecutors.newDirectExecutorService()); - - executor.submit(new LineGobbler(is, consumer, executor, ImmutableMap.of())); - - Mockito.verify(consumer, Mockito.times(2)).accept(anyString()); - Mockito.verify(executor).shutdown(); - } - - @Test - @SuppressWarnings("unchecked") - void shutdownOnError() { - final Consumer consumer = Mockito.mock(Consumer.class); - Mockito.doThrow(RuntimeException.class).when(consumer).accept(anyString()); - final InputStream is = new ByteArrayInputStream("test\ntest2\n".getBytes(StandardCharsets.UTF_8)); - final ExecutorService executor = spy(MoreExecutors.newDirectExecutorService()); - - executor.submit(new LineGobbler(is, consumer, executor, ImmutableMap.of())); - - verify(consumer).accept(anyString()); - Mockito.verify(executor).shutdown(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonPathsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonPathsTest.java deleted file mode 100644 index 77cc2d685c222..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonPathsTest.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.json; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.jayway.jsonpath.PathNotFoundException; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -class JsonPathsTest { - - private static final String JSON = """ - { - "one": [0,1,2], - "two": { "nested": 10} - }"""; - private static final JsonNode JSON_NODE = Jsons.deserialize(JSON); - private static final String LIST_ALL_QUERY = "$.one[*]"; - private static final String LIST_ONE_QUERY = "$.one[1]"; - private static final String NESTED_FIELD_QUERY = "$.two.nested"; - private static final String JSON_OBJECT_QUERY = "$.two"; - private static final String EMPTY_RETURN_QUERY = "$.three"; - private static final String REPLACEMENT_STRING = "replaced"; - private static final JsonNode REPLACEMENT_JSON = Jsons.deserialize("{ \"replacement\": \"replaced\" }"); - private static final String ONE = "one"; - - @Test - void testGetValues() { - assertEquals(List.of(0, 1, 2), JsonPaths.getValues(JSON_NODE, LIST_ALL_QUERY).stream().map(JsonNode::asInt).collect(Collectors.toList())); - assertEquals(List.of(1), JsonPaths.getValues(JSON_NODE, LIST_ONE_QUERY).stream().map(JsonNode::asInt).collect(Collectors.toList())); - assertEquals(List.of(10), JsonPaths.getValues(JSON_NODE, NESTED_FIELD_QUERY).stream().map(JsonNode::asInt).collect(Collectors.toList())); - assertEquals(JSON_NODE.get("two"), JsonPaths.getValues(JSON_NODE, JSON_OBJECT_QUERY).stream().findFirst().orElse(null)); - assertEquals(Collections.emptyList(), JsonPaths.getValues(JSON_NODE, EMPTY_RETURN_QUERY)); - } - - @Test - void testGetSingleValue() { - assertThrows(IllegalArgumentException.class, () -> JsonPaths.getSingleValue(JSON_NODE, LIST_ALL_QUERY)); - assertEquals(1, JsonPaths.getSingleValue(JSON_NODE, LIST_ONE_QUERY).map(JsonNode::asInt).orElse(null)); - assertEquals(10, JsonPaths.getSingleValue(JSON_NODE, NESTED_FIELD_QUERY).map(JsonNode::asInt).orElse(null)); - assertEquals(JSON_NODE.get("two"), JsonPaths.getSingleValue(JSON_NODE, JSON_OBJECT_QUERY).orElse(null)); - assertNull(JsonPaths.getSingleValue(JSON_NODE, EMPTY_RETURN_QUERY).orElse(null)); - } - - @Test - void testGetPaths() { - assertEquals(List.of("$['one'][0]", "$['one'][1]", "$['one'][2]"), JsonPaths.getPaths(JSON_NODE, LIST_ALL_QUERY)); - assertEquals(List.of("$['one'][1]"), JsonPaths.getPaths(JSON_NODE, LIST_ONE_QUERY)); - assertEquals(List.of("$['two']['nested']"), JsonPaths.getPaths(JSON_NODE, NESTED_FIELD_QUERY)); - assertEquals(List.of("$['two']"), JsonPaths.getPaths(JSON_NODE, JSON_OBJECT_QUERY)); - assertEquals(Collections.emptyList(), JsonPaths.getPaths(JSON_NODE, EMPTY_RETURN_QUERY)); - } - - @Test - void testIsPathPresent() { - assertThrows(IllegalArgumentException.class, () -> JsonPaths.isPathPresent(JSON_NODE, LIST_ALL_QUERY)); - assertTrue(JsonPaths.isPathPresent(JSON_NODE, LIST_ONE_QUERY)); - assertTrue(JsonPaths.isPathPresent(JSON_NODE, NESTED_FIELD_QUERY)); - assertTrue(JsonPaths.isPathPresent(JSON_NODE, JSON_OBJECT_QUERY)); - assertFalse(JsonPaths.isPathPresent(JSON_NODE, EMPTY_RETURN_QUERY)); - } - - @Test - void testReplaceAtStringLoud() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(1, REPLACEMENT_STRING); - - final JsonNode actual = JsonPaths.replaceAtStringLoud(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_STRING); - assertEquals(expected, actual); - }); - } - - @SuppressWarnings("CodeBlock2Expr") - @Test - void testReplaceAtStringLoudEmptyPathThrows() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - assertThrows(PathNotFoundException.class, () -> JsonPaths.replaceAtStringLoud(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_STRING)); - }); - } - - @Test - void testReplaceAtString() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(1, REPLACEMENT_STRING); - - final JsonNode actual = JsonPaths.replaceAtString(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_STRING); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAtStringEmptyReturnNoOp() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - - final JsonNode actual = JsonPaths.replaceAtString(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_STRING); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAtJsonNodeLoud() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(1, REPLACEMENT_JSON); - - final JsonNode actual = JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_JSON); - assertEquals(expected, actual); - }); - } - - @SuppressWarnings("CodeBlock2Expr") - @Test - void testReplaceAtJsonNodeLoudEmptyPathThrows() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - assertThrows(PathNotFoundException.class, () -> JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_JSON)); - }); - } - - @Test - void testReplaceAtJsonNodeLoudMultipleReplace() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(0, REPLACEMENT_JSON); - ((ArrayNode) expected.get(ONE)).set(1, REPLACEMENT_JSON); - ((ArrayNode) expected.get(ONE)).set(2, REPLACEMENT_JSON); - - final JsonNode actual = JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, LIST_ALL_QUERY, REPLACEMENT_JSON); - assertEquals(expected, actual); - }); - } - - // todo (cgardens) - this behavior is a little unintuitive, but based on the docs, there's not an - // obvious workaround. in this case, i would expect this to silently do nothing instead of throwing. - // for now just documenting it with a test. to avoid this, use the non-loud version of this method. - @SuppressWarnings("CodeBlock2Expr") - @Test - void testReplaceAtJsonNodeLoudMultipleReplaceSplatInEmptyArrayThrows() { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).removeAll(); - - assertOriginalObjectNotModified(expected, () -> { - assertThrows(PathNotFoundException.class, () -> JsonPaths.replaceAtJsonNodeLoud(expected, "$.one[*]", REPLACEMENT_JSON)); - }); - } - - @Test - void testReplaceAtJsonNode() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(1, REPLACEMENT_JSON); - - final JsonNode actual = JsonPaths.replaceAtJsonNode(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_JSON); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAtJsonNodeEmptyReturnNoOp() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - - final JsonNode actual = JsonPaths.replaceAtJsonNode(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_JSON); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAt() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(1, "1-$['one'][1]"); - - final JsonNode actual = JsonPaths.replaceAt(JSON_NODE, LIST_ONE_QUERY, (node, path) -> Jsons.jsonNode(node + "-" + path)); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAtMultiple() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - ((ArrayNode) expected.get(ONE)).set(0, "0-$['one'][0]"); - ((ArrayNode) expected.get(ONE)).set(1, "1-$['one'][1]"); - ((ArrayNode) expected.get(ONE)).set(2, "2-$['one'][2]"); - - final JsonNode actual = JsonPaths.replaceAt(JSON_NODE, LIST_ALL_QUERY, (node, path) -> Jsons.jsonNode(node + "-" + path)); - assertEquals(expected, actual); - }); - } - - @Test - void testReplaceAtEmptyReturnNoOp() { - assertOriginalObjectNotModified(JSON_NODE, () -> { - final JsonNode expected = Jsons.clone(JSON_NODE); - - final JsonNode actual = JsonPaths.replaceAt(JSON_NODE, EMPTY_RETURN_QUERY, (node, path) -> Jsons.jsonNode(node + "-" + path)); - assertEquals(expected, actual); - }); - } - - /** - * For all replacement functions, they should NOT mutate in place. Helper assertion to verify that - * invariant. - * - * @param json - json object used for testing - * @param runnable - the rest of the test code that does the replacement - */ - private static void assertOriginalObjectNotModified(final JsonNode json, final Runnable runnable) { - final JsonNode originalJsonNode = Jsons.clone(json); - runnable.run(); - // verify the original object was not mutated. - assertEquals(originalJsonNode, json); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java deleted file mode 100644 index f4d71c297cec6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonSchemasTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.json; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.JsonSchemas.FieldNameOrList; -import io.airbyte.commons.resources.MoreResources; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.function.BiConsumer; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.mockito.InOrder; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class JsonSchemasTest { - - private static final String UNCHECKED = "unchecked"; - private static final String NAME = "name"; - private static final String PROPERTIES = "properties"; - private static final String PETS = "pets"; - private static final String COMPANY = "company"; - private static final String ITEMS = "items"; - private static final String USER = "user"; - - @Test - void testMutateTypeToArrayStandard() { - final JsonNode expectedWithoutType = Jsons.deserialize("{\"test\":\"abc\"}"); - final JsonNode actualWithoutType = Jsons.clone(expectedWithoutType); - JsonSchemas.mutateTypeToArrayStandard(expectedWithoutType); - assertEquals(expectedWithoutType, actualWithoutType); - - final JsonNode expectedWithArrayType = Jsons.deserialize("{\"test\":\"abc\", \"type\":[\"object\"]}"); - final JsonNode actualWithArrayType = Jsons.clone(expectedWithArrayType); - JsonSchemas.mutateTypeToArrayStandard(actualWithArrayType); - assertEquals(expectedWithoutType, actualWithoutType); - - final JsonNode expectedWithoutArrayType = Jsons.deserialize("{\"test\":\"abc\", \"type\":[\"object\"]}"); - final JsonNode actualWithStringType = Jsons.deserialize("{\"test\":\"abc\", \"type\":\"object\"}"); - JsonSchemas.mutateTypeToArrayStandard(actualWithStringType); - assertEquals(expectedWithoutArrayType, actualWithStringType); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testTraverse() throws IOException { - final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_all_types.json")); - final BiConsumer> mock = mock(BiConsumer.class); - - JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); - final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(NAME), List.of(FieldNameOrList.fieldName(NAME))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(NAME).get(PROPERTIES).get("first"), - List.of(FieldNameOrList.fieldName(NAME), FieldNameOrList.fieldName("first"))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(NAME).get(PROPERTIES).get("last"), - List.of(FieldNameOrList.fieldName(NAME), FieldNameOrList.fieldName("last"))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(COMPANY), List.of(FieldNameOrList.fieldName(COMPANY))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(PETS), List.of(FieldNameOrList.fieldName(PETS))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(PETS).get(ITEMS), - List.of(FieldNameOrList.fieldName(PETS), FieldNameOrList.list())); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(PETS).get(ITEMS).get(PROPERTIES).get("type"), - List.of(FieldNameOrList.fieldName(PETS), FieldNameOrList.list(), FieldNameOrList.fieldName("type"))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(PETS).get(ITEMS).get(PROPERTIES).get("number"), - List.of(FieldNameOrList.fieldName(PETS), FieldNameOrList.list(), FieldNameOrList.fieldName("number"))); - inOrder.verifyNoMoreInteractions(); - } - - @SuppressWarnings(UNCHECKED) - @ValueSource(strings = { - "anyOf", - "oneOf", - "allOf" - }) - @ParameterizedTest - void testTraverseComposite(final String compositeKeyword) throws IOException { - final String jsonSchemaString = MoreResources.readResource("json_schemas/composite_json_schema.json") - .replaceAll("", compositeKeyword); - final JsonNode jsonWithAllTypes = Jsons.deserialize(jsonSchemaString); - final BiConsumer> mock = mock(BiConsumer.class); - - JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); - - final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0), Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1), Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get(PROPERTIES).get("prop1"), - List.of(FieldNameOrList.fieldName("prop1"))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2), Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(2).get(ITEMS), List.of(FieldNameOrList.list())); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(0), Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1), Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(3).get(compositeKeyword).get(1).get(ITEMS), - List.of(FieldNameOrList.list())); - inOrder.verifyNoMoreInteractions(); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testTraverseMultiType() throws IOException { - final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields.json")); - final BiConsumer> mock = mock(BiConsumer.class); - - JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); - final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(PROPERTIES).get(COMPANY), List.of(FieldNameOrList.fieldName(COMPANY))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(ITEMS), List.of(FieldNameOrList.list())); - inOrder.verify(mock).accept(jsonWithAllTypes.get(ITEMS).get(PROPERTIES).get(USER), - List.of(FieldNameOrList.list(), FieldNameOrList.fieldName(USER))); - inOrder.verifyNoMoreInteractions(); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testTraverseMultiTypeComposite() throws IOException { - final String compositeKeyword = "anyOf"; - final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields_with_composites.json")); - final BiConsumer> mock = mock(BiConsumer.class); - - JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); - - final InOrder inOrder = Mockito.inOrder(mock); - inOrder.verify(mock).accept(jsonWithAllTypes, Collections.emptyList()); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(0).get(PROPERTIES).get(COMPANY), - List.of(FieldNameOrList.fieldName(COMPANY))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(compositeKeyword).get(1).get(PROPERTIES).get("organization"), - List.of(FieldNameOrList.fieldName("organization"))); - inOrder.verify(mock).accept(jsonWithAllTypes.get(ITEMS), List.of(FieldNameOrList.list())); - inOrder.verify(mock).accept(jsonWithAllTypes.get(ITEMS).get(PROPERTIES).get(USER), - List.of(FieldNameOrList.list(), FieldNameOrList.fieldName("user"))); - inOrder.verifyNoMoreInteractions(); - } - - @SuppressWarnings(UNCHECKED) - @Test - void testTraverseArrayTypeWithNoItemsDoNotThrowsException() throws IOException { - final JsonNode jsonWithAllTypes = Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_array_type_fields_no_items.json")); - final BiConsumer> mock = mock(BiConsumer.class); - - JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonsTest.java deleted file mode 100644 index 4ab0edf72857c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/json/JsonsTest.java +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.json; - -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotSame; -import static org.junit.jupiter.api.Assertions.assertNull; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.BinaryNode; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; - -class JsonsTest { - - private static final String SERIALIZED_JSON = "{\"str\":\"abc\",\"num\":999,\"numLong\":888}"; - private static final String SERIALIZED_JSON2 = "{\"str\":\"abc\"}"; - private static final String ABC = "abc"; - private static final String DEF = "def"; - private static final String GHI = "ghi"; - private static final String JKL = "jkl"; - private static final String MNO = "mno"; - private static final String PQR = "pqr"; - private static final String STU = "stu"; - private static final String TEST = "test"; - private static final String TEST2 = "test2"; - private static final String XYZ = "xyz"; - - @Test - void testSerialize() { - assertEquals( - SERIALIZED_JSON, - Jsons.serialize(new ToClass(ABC, 999, 888L))); - - assertEquals( - "{\"test\":\"abc\",\"test2\":\"def\"}", - Jsons.serialize( - ImmutableMap.of( - TEST, ABC, - TEST2, DEF))); - } - - @Test - void testSerializeJsonNode() { - assertEquals( - SERIALIZED_JSON, - Jsons.serialize(Jsons.jsonNode(new ToClass(ABC, 999, 888L)))); - - assertEquals( - "{\"test\":\"abc\",\"test2\":\"def\"}", - Jsons.serialize(Jsons.jsonNode(ImmutableMap.of( - TEST, ABC, - TEST2, DEF)))); - // issue: 5878 add test for binary node serialization, binary data are - // serialized into base64 - assertEquals( - "{\"test\":\"dGVzdA==\"}", - Jsons.serialize(Jsons.jsonNode(ImmutableMap.of( - TEST, new BinaryNode("test".getBytes(StandardCharsets.UTF_8)))))); - } - - @Test - void testDeserialize() { - assertEquals( - new ToClass(ABC, 999, 888L), - Jsons.deserialize("{\"str\":\"abc\", \"num\": 999, \"numLong\": 888}", ToClass.class)); - } - - @Test - void testDeserializeToJsonNode() { - assertEquals( - SERIALIZED_JSON2, - Jsons.deserialize(SERIALIZED_JSON2).toString()); - - assertEquals( - "[{\"str\":\"abc\"},{\"str\":\"abc\"}]", - Jsons.deserialize("[{\"str\":\"abc\"},{\"str\":\"abc\"}]").toString()); - // issue: 5878 add test for binary node deserialization, for now should be - // base64 string - assertEquals( - "{\"test\":\"dGVzdA==\"}", - Jsons.deserialize("{\"test\":\"dGVzdA==\"}").toString()); - } - - @Test - void testTryDeserialize() { - assertEquals( - Optional.of(new ToClass(ABC, 999, 888L)), - Jsons.tryDeserialize("{\"str\":\"abc\", \"num\": 999, \"numLong\": 888}", ToClass.class)); - - assertEquals( - Optional.of(new ToClass(ABC, 999, 0L)), - Jsons.tryDeserialize("{\"str\":\"abc\", \"num\": 999, \"test\": 888}", ToClass.class)); - } - - @Test - void testTryDeserializeToJsonNode() { - assertEquals( - Optional.of(Jsons.deserialize(SERIALIZED_JSON2)), - Jsons.tryDeserialize(SERIALIZED_JSON2)); - - assertEquals( - Optional.empty(), - Jsons.tryDeserialize("{\"str\":\"abc\", \"num\": 999, \"test}")); - } - - @Test - void testToJsonNode() { - assertEquals( - SERIALIZED_JSON, - Jsons.jsonNode(new ToClass(ABC, 999, 888L)).toString()); - - assertEquals( - "{\"test\":\"abc\",\"test2\":\"def\"}", - Jsons.jsonNode( - ImmutableMap.of( - TEST, ABC, - TEST2, DEF)) - .toString()); - - assertEquals( - "{\"test\":\"abc\",\"test2\":{\"inner\":1}}", - Jsons.jsonNode( - ImmutableMap.of( - TEST, ABC, - TEST2, ImmutableMap.of("inner", 1))) - .toString()); - - assertEquals( - Jsons.jsonNode(new ToClass(ABC, 999, 888L)), - Jsons.jsonNode(Jsons.jsonNode(new ToClass(ABC, 999, 888L)))); - } - - @Test - void testEmptyObject() { - assertEquals(Jsons.deserialize("{}"), Jsons.emptyObject()); - } - - @Test - void testArrayNode() { - assertEquals(Jsons.deserialize("[]"), Jsons.arrayNode()); - } - - @Test - void testToObject() { - final ToClass expected = new ToClass(ABC, 999, 888L); - assertEquals( - expected, - Jsons.object(Jsons.jsonNode(expected), ToClass.class)); - - assertEquals( - Lists.newArrayList(expected), - Jsons.object(Jsons.jsonNode(Lists.newArrayList(expected)), new TypeReference>() {})); - - assertEquals( - new ToClass(), - Jsons.object(Jsons.deserialize("{\"a\":1}"), ToClass.class)); - } - - @Test - void testTryToObject() { - final ToClass expected = new ToClass(ABC, 999, 888L); - assertEquals( - Optional.of(expected), - Jsons.tryObject(Jsons.deserialize(SERIALIZED_JSON), ToClass.class)); - - assertEquals( - Optional.of(expected), - Jsons.tryObject(Jsons.deserialize(SERIALIZED_JSON), new TypeReference() {})); - - final ToClass emptyExpected = new ToClass(); - assertEquals( - Optional.of(emptyExpected), - Jsons.tryObject(Jsons.deserialize("{\"str1\":\"abc\"}"), ToClass.class)); - - assertEquals( - Optional.of(emptyExpected), - Jsons.tryObject(Jsons.deserialize("{\"str1\":\"abc\"}"), new TypeReference() {})); - - } - - @Test - void testClone() { - final ToClass expected = new ToClass("abc", 999, 888L); - final ToClass actual = Jsons.clone(expected); - assertNotSame(expected, actual); - assertEquals(expected, actual); - } - - @Test - void testToBytes() { - final String jsonString = "{\"test\":\"abc\",\"type\":[\"object\"]}"; - assertArrayEquals(jsonString.getBytes(Charsets.UTF_8), Jsons.toBytes(Jsons.deserialize(jsonString))); - } - - @Test - void testKeys() { - // test object json node - final JsonNode jsonNode = Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, DEF)); - assertEquals(Sets.newHashSet(TEST, TEST2), Jsons.keys(jsonNode)); - - // test literal jsonNode - assertEquals(Collections.emptySet(), Jsons.keys(jsonNode.get("test"))); - - // test nested object json node. should only return top-level keys. - final JsonNode nestedJsonNode = Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, ImmutableMap.of("test3", "def"))); - assertEquals(Sets.newHashSet(TEST, TEST2), Jsons.keys(nestedJsonNode)); - - // test array json node - final JsonNode arrayJsonNode = Jsons.jsonNode(ImmutableList.of(ImmutableMap.of(TEST, ABC, TEST2, DEF))); - assertEquals(Collections.emptySet(), Jsons.keys(arrayJsonNode)); - } - - @Test - void testToPrettyString() { - final JsonNode jsonNode = Jsons.jsonNode(ImmutableMap.of(TEST, ABC)); - final String expectedOutput = "" - + "{\n" - + " \"test\": \"abc\"\n" - + "}\n"; - assertEquals(expectedOutput, Jsons.toPrettyString(jsonNode)); - } - - @Test - void testGetOptional() { - final JsonNode json = Jsons - .deserialize("{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": {}, \"mno\": \"pqr\", \"stu\": null }"); - - assertEquals(Optional.of(Jsons.jsonNode(GHI)), Jsons.getOptional(json, ABC, DEF)); - assertEquals(Optional.of(Jsons.emptyObject()), Jsons.getOptional(json, JKL)); - assertEquals(Optional.of(Jsons.jsonNode(PQR)), Jsons.getOptional(json, MNO)); - assertEquals(Optional.of(Jsons.jsonNode(null)), Jsons.getOptional(json, STU)); - assertEquals(Optional.empty(), Jsons.getOptional(json, XYZ)); - assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, XYZ)); - assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, DEF, XYZ)); - assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, JKL, XYZ)); - assertEquals(Optional.empty(), Jsons.getOptional(json, STU, XYZ)); - } - - @Test - void testGetStringOrNull() { - final JsonNode json = Jsons.deserialize("{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": \"mno\", \"pqr\": 1 }"); - - assertEquals(GHI, Jsons.getStringOrNull(json, ABC, DEF)); - assertEquals(MNO, Jsons.getStringOrNull(json, JKL)); - assertEquals("1", Jsons.getStringOrNull(json, PQR)); - assertNull(Jsons.getStringOrNull(json, ABC, DEF, XYZ)); - assertNull(Jsons.getStringOrNull(json, XYZ)); - } - - @Test - void testGetEstimatedByteSize() { - final JsonNode json = Jsons.deserialize("{\"string_key\":\"abc\",\"array_key\":[\"item1\", \"item2\"]}"); - assertEquals(Jsons.toBytes(json).length, Jsons.getEstimatedByteSize(json)); - } - - @Test - void testFlatten__noArrays() { - final JsonNode json = Jsons.deserialize("{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": true, \"pqr\": 1 }"); - Map expected = Stream.of(new Object[][] { - {"abc.def", GHI}, - {JKL, true}, - {PQR, 1}, - }).collect(Collectors.toMap(data -> (String) data[0], data -> data[1])); - assertEquals(expected, Jsons.flatten(json, false)); - } - - @Test - void testFlatten__withArraysNoApplyFlatten() { - final JsonNode json = Jsons - .deserialize("{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }"); - Map expected = Stream.of(new Object[][] { - {ABC, "[{\"def\":\"ghi\"},{\"fed\":\"ihg\"}]"}, - {JKL, true}, - {PQR, 1}, - }).collect(Collectors.toMap(data -> (String) data[0], data -> data[1])); - assertEquals(expected, Jsons.flatten(json, false)); - } - - @Test - void testFlatten__checkBackwardCompatiblity() { - final JsonNode json = Jsons - .deserialize("{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }"); - Map expected = Stream.of(new Object[][] { - {ABC, "[{\"def\":\"ghi\"},{\"fed\":\"ihg\"}]"}, - {JKL, true}, - {PQR, 1}, - }).collect(Collectors.toMap(data -> (String) data[0], data -> data[1])); - assertEquals(expected, Jsons.flatten(json)); - } - - @Test - void testFlatten__withArraysApplyFlatten() { - final JsonNode json = Jsons - .deserialize("{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }"); - Map expected = Stream.of(new Object[][] { - {"abc.[0].def", "ghi"}, - {"abc.[1].fed", "ihg"}, - {JKL, true}, - {PQR, 1}, - }).collect(Collectors.toMap(data -> (String) data[0], data -> data[1])); - assertEquals(expected, Jsons.flatten(json, true)); - } - - @Test - void testFlatten__withArraysApplyFlattenNested() { - final JsonNode json = Jsons - .deserialize( - "{ \"abc\": [{ \"def\": {\"ghi\": [\"xyz\"] }}, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }"); - Map expected = Stream.of(new Object[][] { - {"abc.[0].def.ghi.[0]", "xyz"}, - {"abc.[1].fed", "ihg"}, - {JKL, true}, - {PQR, 1}, - }).collect(Collectors.toMap(data -> (String) data[0], data -> data[1])); - assertEquals(expected, Jsons.flatten(json, true)); - } - - private static class ToClass { - - @JsonProperty("str") - String str; - - @JsonProperty("num") - Integer num; - - @JsonProperty("numLong") - long numLong; - - public ToClass() {} - - public ToClass(final String str, final Integer num, final long numLong) { - this.str = str; - this.num = num; - this.numLong = numLong; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final ToClass toClass = (ToClass) o; - return numLong == toClass.numLong - && Objects.equals(str, toClass.str) - && Objects.equals(num, toClass.num); - } - - @Override - public int hashCode() { - return Objects.hash(str, num, numLong); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java deleted file mode 100644 index 9ec099d1595c1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/CloseableShutdownHookTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import java.io.InputStream; -import org.junit.jupiter.api.Test; - -class CloseableShutdownHookTest { - - @Test - void testRegisteringShutdownHook() throws Exception { - final InputStream closeable = mock(InputStream.class); - final CloseableQueue autoCloseable = mock(CloseableQueue.class); - final String notCloseable = "Not closeable"; - - final Thread thread = CloseableShutdownHook.buildShutdownHookThread(closeable, autoCloseable, notCloseable, null); - thread.run(); - - verify(closeable, times(1)).close(); - verify(autoCloseable, times(1)).close(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/ExceptionsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/ExceptionsTest.java deleted file mode 100644 index 0f4b6da55a928..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/ExceptionsTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ExceptionsTest { - - @Test - void testToRuntime() { - assertEquals("hello", Exceptions.toRuntime(() -> callable("hello", false))); - assertThrows(RuntimeException.class, () -> Exceptions.toRuntime(() -> callable("goodbye", true))); - } - - @Test - void testToRuntimeVoid() { - final List list = new ArrayList<>(); - assertThrows(RuntimeException.class, () -> Exceptions.toRuntime(() -> voidCallable(list, "hello", true))); - assertEquals(0, list.size()); - - Exceptions.toRuntime(() -> voidCallable(list, "goodbye", false)); - assertEquals(1, list.size()); - assertEquals("goodbye", list.get(0)); - } - - @Test - void testSwallow() { - Exceptions.swallow(() -> { - throw new RuntimeException(); - }); - Exceptions.swallow(() -> { - throw new Exception(); - }); - } - - private String callable(final String input, final boolean shouldThrow) throws IOException { - if (shouldThrow) { - throw new IOException(); - } else { - return input; - } - } - - private void voidCallable(final List list, final String input, final boolean shouldThrow) throws IOException { - if (shouldThrow) { - throw new IOException(); - } else { - list.add(input); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/MoreBooleansTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/MoreBooleansTest.java deleted file mode 100644 index eb0ca475a65a8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/lang/MoreBooleansTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.lang; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import org.junit.jupiter.api.Test; - -class MoreBooleansTest { - - @SuppressWarnings("ConstantConditions") - @Test - void evaluateNullAsFalse() { - assertTrue(MoreBooleans.isTruthy(Boolean.TRUE)); - assertFalse(MoreBooleans.isTruthy(Boolean.FALSE)); - assertFalse(MoreBooleans.isTruthy(null)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/logging/MdcScopeTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/logging/MdcScopeTest.java deleted file mode 100644 index d545f1d4eedfb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/logging/MdcScopeTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging; - -import java.util.Map; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.MDC; - -class MdcScopeTest { - - private static final Map originalMap = Map.of("test", "entry", "testOverride", "should be overrided"); - - private static final Map modificationInMDC = Map.of("new", "will be added", "testOverride", "will override"); - - @BeforeEach - void init() { - MDC.setContextMap(originalMap); - } - - @Test - // The MDC context is properly overrided - void testMDCModified() { - try (final MdcScope ignored = new MdcScope(modificationInMDC)) { - final Map mdcState = MDC.getCopyOfContextMap(); - - Assertions.assertThat(mdcState).containsExactlyInAnyOrderEntriesOf( - Map.of("test", "entry", "new", "will be added", "testOverride", "will override")); - } - } - - @Test - // The MDC context is properly restored - void testMDCRestore() { - try (final MdcScope ignored = new MdcScope(modificationInMDC)) {} - - final Map mdcState = MDC.getCopyOfContextMap(); - - Assertions.assertThat(mdcState).containsAllEntriesOf(originalMap); - Assertions.assertThat(mdcState).doesNotContainKey("new"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/map/MoreMapsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/map/MoreMapsTest.java deleted file mode 100644 index f9117a0641ae3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/map/MoreMapsTest.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.map; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.google.common.collect.ImmutableMap; -import java.util.Map; -import org.junit.jupiter.api.Test; - -class MoreMapsTest { - - @Test - void testMerge() { - final Map map1 = ImmutableMap.of("a", 3, "b", 2); - final Map map2 = ImmutableMap.of("a", 1); - - assertEquals(ImmutableMap.of("a", 1, "b", 2), MoreMaps.merge(map1, map2)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/resources/MoreResourcesTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/resources/MoreResourcesTest.java deleted file mode 100644 index 9504ac6c33740..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/resources/MoreResourcesTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.resources; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.google.common.collect.Sets; -import io.airbyte.commons.io.IOs; -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -class MoreResourcesTest { - - private static final String CONTENT_1 = "content1\n"; - private static final String CONTENT_2 = "content2\n"; - private static final String RESOURCE_TEST = "resource_test"; - - @Test - void testResourceRead() throws IOException { - assertEquals(CONTENT_1, MoreResources.readResource(RESOURCE_TEST)); - assertEquals(CONTENT_2, MoreResources.readResource("subdir/resource_test_sub")); - - assertThrows(IllegalArgumentException.class, () -> MoreResources.readResource("invalid")); - } - - @Test - void testResourceReadWithClass() throws IOException { - assertEquals(CONTENT_1, MoreResources.readResource(MoreResourcesTest.class, RESOURCE_TEST)); - assertEquals(CONTENT_2, MoreResources.readResource(MoreResourcesTest.class, "subdir/resource_test_sub")); - - assertEquals(CONTENT_1, MoreResources.readResource(MoreResourcesTest.class, "/resource_test")); - assertEquals(CONTENT_2, MoreResources.readResource(MoreResourcesTest.class, "/subdir/resource_test_sub")); - - assertThrows(IllegalArgumentException.class, () -> MoreResources.readResource(MoreResourcesTest.class, "invalid")); - } - - @Test - void testReadResourceAsFile() throws URISyntaxException { - final File file = MoreResources.readResourceAsFile(RESOURCE_TEST); - assertEquals(CONTENT_1, IOs.readFile(file.toPath())); - } - - @Test - void testReadBytes() throws IOException { - assertEquals(CONTENT_1, new String(MoreResources.readBytes(RESOURCE_TEST), StandardCharsets.UTF_8)); - assertEquals(CONTENT_2, new String(MoreResources.readBytes("subdir/resource_test_sub"), StandardCharsets.UTF_8)); - - assertThrows(IllegalArgumentException.class, () -> MoreResources.readBytes("invalid")); - } - - @Test - void testResourceReadDuplicateName() throws IOException { - assertEquals(CONTENT_1, MoreResources.readResource("resource_test_a")); - assertEquals(CONTENT_2, MoreResources.readResource("subdir/resource_test_a")); - } - - @Test - void testListResource() throws IOException { - assertEquals( - Sets.newHashSet("subdir", "resource_test_sub", "resource_test_sub_2", "resource_test_a"), - MoreResources.listResources(MoreResourcesTest.class, "subdir") - .map(Path::getFileName) - .map(Path::toString) - .collect(Collectors.toSet())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.java deleted file mode 100644 index 756d4fa42a261..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage.AirbyteStreamStatus; -import io.airbyte.protocol.models.v0.AirbyteTraceMessage; -import io.airbyte.protocol.models.v0.AirbyteTraceMessage.Type; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import org.junit.jupiter.api.Test; - -/** - * Test suite for the {@link AirbyteStreamStatusHolder} class. - */ -class AirbyteStreamStatusHolderTest { - - @Test - void testToTraceMessage() { - final Double startTime = Long.valueOf(System.currentTimeMillis()).doubleValue(); - final AirbyteStreamNameNamespacePair airbyteStreamNameAndNamespacePair = new AirbyteStreamNameNamespacePair("name", "namespace"); - final AirbyteStreamStatus streamStatus = AirbyteStreamStatus.RUNNING; - final AirbyteStreamStatusHolder holder = new AirbyteStreamStatusHolder(airbyteStreamNameAndNamespacePair, streamStatus); - - final AirbyteTraceMessage traceMessage = holder.toTraceMessage(); - assertTrue(traceMessage.getEmittedAt() >= startTime); - assertEquals(Type.STREAM_STATUS, traceMessage.getType()); - assertEquals(streamStatus, traceMessage.getStreamStatus().getStatus()); - assertEquals(new StreamDescriptor() - .withName(airbyteStreamNameAndNamespacePair.getName()) - .withNamespace(airbyteStreamNameAndNamespacePair.getNamespace()), traceMessage.getStreamStatus().getStreamDescriptor()); - } - - @Test - void testToTraceMessageWithOptionalData() { - final Double startTime = Long.valueOf(System.currentTimeMillis()).doubleValue(); - final AirbyteStreamNameNamespacePair airbyteStreamNameAndNamespacePair = new AirbyteStreamNameNamespacePair("name", "namespace"); - final AirbyteStreamStatus streamStatus = AirbyteStreamStatus.COMPLETE; - final AirbyteStreamStatusHolder holder = new AirbyteStreamStatusHolder(airbyteStreamNameAndNamespacePair, streamStatus); - - final AirbyteTraceMessage traceMessage = holder.toTraceMessage(); - assertTrue(traceMessage.getEmittedAt() >= startTime); - assertEquals(Type.STREAM_STATUS, traceMessage.getType()); - assertEquals(streamStatus, traceMessage.getStreamStatus().getStatus()); - assertEquals(new StreamDescriptor() - .withName(airbyteStreamNameAndNamespacePair.getName()) - .withNamespace(airbyteStreamNameAndNamespacePair.getNamespace()), traceMessage.getStreamStatus().getStreamDescriptor()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/StreamStatusUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/StreamStatusUtilsTest.java deleted file mode 100644 index 5ddbbd2ed2889..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/stream/StreamStatusUtilsTest.java +++ /dev/null @@ -1,498 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.stream; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.util.AirbyteStreamAware; -import io.airbyte.commons.util.AutoCloseableIterator; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage.AirbyteStreamStatus; -import java.util.Optional; -import java.util.function.Consumer; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.junit.jupiter.MockitoExtension; - -/** - * Test suite for the {@link StreamStatusUtils} class. - */ -@ExtendWith(MockitoExtension.class) -class StreamStatusUtilsTest { - - private static final String NAME = "name"; - private static final String NAMESPACE = "namespace"; - - @Captor - private ArgumentCaptor airbyteStreamStatusHolderArgumentCaptor; - - @Test - void testCreateStreamStatusConsumerWrapper() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Optional> streamStatusEmitter = Optional.empty(); - final Consumer messageConsumer = mock(Consumer.class); - - final Consumer wrappedMessageConsumer = - StreamStatusUtils.statusTrackingRecordCollector(stream, messageConsumer, streamStatusEmitter); - - assertNotEquals(messageConsumer, wrappedMessageConsumer); - } - - @Test - void testStreamStatusConsumerWrapperProduceStreamStatus() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - final Consumer messageConsumer = mock(Consumer.class); - final AirbyteMessage airbyteMessage = mock(AirbyteMessage.class); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - final Consumer wrappedMessageConsumer = - StreamStatusUtils.statusTrackingRecordCollector(stream, messageConsumer, streamStatusEmitter); - - assertNotEquals(messageConsumer, wrappedMessageConsumer); - - wrappedMessageConsumer.accept(airbyteMessage); - wrappedMessageConsumer.accept(airbyteMessage); - wrappedMessageConsumer.accept(airbyteMessage); - - verify(messageConsumer, times(3)).accept(any()); - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.RUNNING, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitRunningStreamStatusIterator() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.RUNNING, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitRunningStreamStatusIteratorEmptyAirbyteStream() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitRunningStreamStatusIteratorEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitRunningStreamStatusAirbyteStreamAware() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.RUNNING, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitRunningStreamStatusAirbyteStreamAwareEmptyStream() { - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitRunningStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitRunningStreamStatusAirbyteStream() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - StreamStatusUtils.emitRunningStreamStatus(Optional.of(airbyteStream), streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.RUNNING, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitRunningStreamStatusEmptyAirbyteStream() { - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(Optional.empty(), streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitRunningStreamStatusAirbyteStreamEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Optional> streamStatusEmitter = Optional.empty(); - - assertDoesNotThrow(() -> StreamStatusUtils.emitRunningStreamStatus(Optional.of(airbyteStream), streamStatusEmitter)); - } - - @Test - void testEmitStartedStreamStatusIterator() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.STARTED, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitStartedStreamStatusIteratorEmptyAirbyteStream() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitStartedStreamStatusIteratorEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitStartedStreamStatusAirbyteStreamAware() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.STARTED, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitStartedStreamStatusAirbyteStreamAwareEmptyStream() { - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitStartedStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitStartedStreamStatusAirbyteStream() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - StreamStatusUtils.emitStartStreamStatus(Optional.of(airbyteStream), streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.STARTED, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitStartedStreamStatusEmptyAirbyteStream() { - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(Optional.empty(), streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitStartedStreamStatusAirbyteStreamEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Optional> streamStatusEmitter = Optional.empty(); - - assertDoesNotThrow(() -> StreamStatusUtils.emitStartStreamStatus(Optional.of(airbyteStream), streamStatusEmitter)); - } - - @Test - void testEmitCompleteStreamStatusIterator() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.COMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitCompleteStreamStatusIteratorEmptyAirbyteStream() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitCompleteStreamStatusIteratorEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitCompleteStreamStatusAirbyteStreamAware() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.COMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitCompleteStreamStatusAirbyteStreamAwareEmptyStream() { - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitCompleteStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitCompleteStreamStatusAirbyteStream() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - StreamStatusUtils.emitCompleteStreamStatus(Optional.of(airbyteStream), streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.COMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitCompleteStreamStatusEmptyAirbyteStream() { - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(Optional.empty(), streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitCompleteStreamStatusAirbyteStreamEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Optional> streamStatusEmitter = Optional.empty(); - - assertDoesNotThrow(() -> StreamStatusUtils.emitCompleteStreamStatus(Optional.of(airbyteStream), streamStatusEmitter)); - } - - @Test - void testEmitIncompleteStreamStatusIterator() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.INCOMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitIncompleteStreamStatusIteratorEmptyAirbyteStream() { - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitIncompleteStreamStatusIteratorEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AutoCloseableIterator stream = mock(AutoCloseableIterator.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitIncompleteStreamStatusAirbyteStreamAware() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.INCOMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitIncompleteStreamStatusAirbyteStreamAwareEmptyStream() { - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - when(stream.getAirbyteStream()).thenReturn(Optional.empty()); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitIncompleteStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final AirbyteStreamAware stream = mock(AirbyteStreamAware.class); - final Optional> streamStatusEmitter = Optional.empty(); - - when(stream.getAirbyteStream()).thenReturn(Optional.of(airbyteStream)); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter)); - } - - @Test - void testEmitIncompleteStreamStatusAirbyteStream() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - StreamStatusUtils.emitIncompleteStreamStatus(Optional.of(airbyteStream), streamStatusEmitter); - - verify(statusEmitter, times(1)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - assertEquals(AirbyteStreamStatus.INCOMPLETE, airbyteStreamStatusHolderArgumentCaptor.getValue().toTraceMessage().getStreamStatus().getStatus()); - } - - @Test - void testEmitIncompleteStreamStatusEmptyAirbyteStream() { - final Consumer statusEmitter = mock(Consumer.class); - final Optional> streamStatusEmitter = Optional.of(statusEmitter); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(Optional.empty(), streamStatusEmitter)); - verify(statusEmitter, times(0)).accept(airbyteStreamStatusHolderArgumentCaptor.capture()); - } - - @Test - void testEmitIncompleteStreamStatusAirbyteStreamEmptyStatusEmitter() { - final AirbyteStreamNameNamespacePair airbyteStream = new AirbyteStreamNameNamespacePair(NAME, NAMESPACE); - final Optional> streamStatusEmitter = Optional.empty(); - - assertDoesNotThrow(() -> StreamStatusUtils.emitIncompleteStreamStatus(Optional.of(airbyteStream), streamStatusEmitter)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/string/StringsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/string/StringsTest.java deleted file mode 100644 index 49805da19e9ea..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/string/StringsTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.string; - -import com.google.common.collect.Lists; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class StringsTest { - - private static class JoinClass { - - private final int id; - - public JoinClass(final int id) { - this.id = id; - } - - @Override - public String toString() { - return "id = " + id; - } - - } - - @Test - void testJoin() { - Assertions.assertEquals( - "1, 2, 3, 4, 5", - Strings.join(Lists.newArrayList(1, 2, 3, 4, 5), ", ")); - - Assertions.assertEquals( - "id = 1, id = 2, id = 3", - Strings.join(Lists.newArrayList(new JoinClass(1), new JoinClass(2), new JoinClass(3)), ", ")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/NamesTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/NamesTest.java deleted file mode 100644 index 1c8b469bd78e4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/NamesTest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.text; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import org.junit.jupiter.api.Test; - -class NamesTest { - - @Test - void testToAlphanumericAndUnderscore() { - assertEquals("users", Names.toAlphanumericAndUnderscore("users")); - assertEquals("users123", Names.toAlphanumericAndUnderscore("users123")); - assertEquals("UsErS", Names.toAlphanumericAndUnderscore("UsErS")); - assertEquals("users_USE_special_____", Names.toAlphanumericAndUnderscore("users USE special !@#$")); - } - - @Test - void testDoubleQuote() { - assertEquals("\"abc\"", Names.doubleQuote("abc")); - assertEquals("\"abc\"", Names.doubleQuote("\"abc\"")); - assertThrows(IllegalStateException.class, () -> Names.doubleQuote("\"abc")); - assertThrows(IllegalStateException.class, () -> Names.doubleQuote("abc\"")); - } - - @Test - void testSimpleQuote() { - assertEquals("'abc'", Names.singleQuote("abc")); - assertEquals("'abc'", Names.singleQuote("'abc'")); - assertThrows(IllegalStateException.class, () -> Names.singleQuote("'abc")); - assertThrows(IllegalStateException.class, () -> Names.singleQuote("abc'")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/SqlsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/SqlsTest.java deleted file mode 100644 index f77869144e42c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/text/SqlsTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.text; - -import static io.airbyte.commons.text.Sqls.toSqlName; - -import com.google.common.collect.Lists; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class SqlsTest { - - enum E1 { - VALUE_1, - VALUE_TWO, - value_three, - } - - @Test - void testToSqlName() { - Assertions.assertEquals("value_1", toSqlName(E1.VALUE_1)); - Assertions.assertEquals("value_two", toSqlName(E1.VALUE_TWO)); - Assertions.assertEquals("value_three", toSqlName(E1.value_three)); - } - - @Test - void testInFragment() { - Assertions.assertEquals("('value_two','value_three')", Sqls.toSqlInFragment(Lists.newArrayList(E1.VALUE_TWO, E1.value_three))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java deleted file mode 100644 index 3fbb86b2d9507..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.google.common.collect.Iterators; -import io.airbyte.commons.concurrency.VoidCallable; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; - -class AutoCloseableIteratorsTest { - - @Test - void testFromIterator() throws Exception { - final VoidCallable onClose = mock(VoidCallable.class); - final AutoCloseableIterator iterator = AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose, null); - - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - iterator.close(); - - verify(onClose).call(); - } - - @Test - void testFromStream() throws Exception { - final AtomicBoolean isClosed = new AtomicBoolean(false); - final Stream stream = Stream.of("a", "b", "c"); - stream.onClose(() -> isClosed.set(true)); - - final AutoCloseableIterator iterator = AutoCloseableIterators.fromStream(stream, null); - - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - iterator.close(); - - assertTrue(isClosed.get()); - } - - private void assertNext(final Iterator iterator, final String value) { - assertTrue(iterator.hasNext()); - assertEquals(value, iterator.next()); - } - - @Test - void testAppendOnClose() throws Exception { - final VoidCallable onClose1 = mock(VoidCallable.class); - final VoidCallable onClose2 = mock(VoidCallable.class); - - final AutoCloseableIterator iterator = AutoCloseableIterators.fromIterator(MoreIterators.of(1, 2, 3), onClose1, null); - final AutoCloseableIterator iteratorWithExtraClose = AutoCloseableIterators.appendOnClose(iterator, onClose2); - - iteratorWithExtraClose.close(); - verify(onClose1).call(); - verify(onClose2).call(); - } - - @Test - void testTransform() { - final Iterator transform = Iterators.transform(MoreIterators.of(1, 2, 3), i -> i + 1); - assertEquals(List.of(2, 3, 4), MoreIterators.toList(transform)); - } - - @Test - void testConcatWithEagerClose() throws Exception { - final VoidCallable onClose1 = mock(VoidCallable.class); - final VoidCallable onClose2 = mock(VoidCallable.class); - - final AutoCloseableIterator iterator = new CompositeIterator<>(List.of( - AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b"), onClose1, null), - AutoCloseableIterators.fromIterator(MoreIterators.of("d"), onClose2, null)), null); - - assertOnCloseInvocations(List.of(), List.of(onClose1, onClose2)); - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "d"); - assertOnCloseInvocations(List.of(onClose1), List.of(onClose2)); - assertFalse(iterator.hasNext()); - assertOnCloseInvocations(List.of(onClose1, onClose2), List.of()); - - iterator.close(); - - verify(onClose1, times(1)).call(); - verify(onClose2, times(1)).call(); - } - - private void assertOnCloseInvocations(final List haveClosed, final List haveNotClosed) throws Exception { - for (final VoidCallable voidCallable : haveClosed) { - verify(voidCallable).call(); - } - - for (final VoidCallable voidCallable : haveNotClosed) { - verify(voidCallable, never()).call(); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/CompositeIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/CompositeIteratorTest.java deleted file mode 100644 index 478c4a499305f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/CompositeIteratorTest.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import com.google.common.collect.ImmutableList; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class CompositeIteratorTest { - - private VoidCallable onClose1; - private VoidCallable onClose2; - private VoidCallable onClose3; - private Consumer airbyteStreamStatusConsumer; - private AirbyteStreamNameNamespacePair airbyteStream1; - private AirbyteStreamNameNamespacePair airbyteStream2; - private AirbyteStreamNameNamespacePair airbyteStream3; - - @BeforeEach - void setup() { - onClose1 = mock(VoidCallable.class); - onClose2 = mock(VoidCallable.class); - onClose3 = mock(VoidCallable.class); - airbyteStreamStatusConsumer = mock(Consumer.class); - airbyteStream1 = new AirbyteStreamNameNamespacePair("stream1", "namespace"); - airbyteStream2 = new AirbyteStreamNameNamespacePair("stream2", "namespace"); - airbyteStream3 = new AirbyteStreamNameNamespacePair("stream3", "namespace"); - } - - @Test - void testNullInput() { - assertThrows(NullPointerException.class, () -> new CompositeIterator<>(null, airbyteStreamStatusConsumer)); - verify(airbyteStreamStatusConsumer, times(0)).accept(any()); - } - - @Test - void testEmptyInput() { - final AutoCloseableIterator iterator = new CompositeIterator<>(Collections.emptyList(), airbyteStreamStatusConsumer); - assertFalse(iterator.hasNext()); - verify(airbyteStreamStatusConsumer, times(0)).accept(any()); - } - - @Test - void testMultipleIterators() throws Exception { - final AutoCloseableIterator iterator = new CompositeIterator<>(ImmutableList.of( - AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose1, airbyteStream1), - AutoCloseableIterators.fromIterator(MoreIterators.of("d", "e", "f"), onClose2, airbyteStream2), - AutoCloseableIterators.fromIterator(MoreIterators.of("g", "h", "i"), onClose3, airbyteStream3)), airbyteStreamStatusConsumer); - - assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1, onClose2, onClose3)); - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - assertNext(iterator, "d"); - assertOnCloseInvocations(ImmutableList.of(onClose1), ImmutableList.of(onClose2, onClose3)); - assertNext(iterator, "e"); - assertNext(iterator, "f"); - assertNext(iterator, "g"); - assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)); - assertNext(iterator, "h"); - assertNext(iterator, "i"); - assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)); - assertFalse(iterator.hasNext()); - assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2, onClose3), ImmutableList.of()); - - iterator.close(); - - verify(onClose1, times(1)).call(); - verify(onClose2, times(1)).call(); - verify(onClose3, times(1)).call(); - verify(airbyteStreamStatusConsumer, times(9)).accept(any()); - } - - @Test - void testWithEmptyIterators() throws Exception { - final AutoCloseableIterator iterator = new CompositeIterator<>(ImmutableList.of( - AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose1, airbyteStream1), - AutoCloseableIterators.fromIterator(MoreIterators.of(), onClose2, airbyteStream2), - AutoCloseableIterators.fromIterator(MoreIterators.of("g", "h", "i"), onClose3, airbyteStream3)), airbyteStreamStatusConsumer); - - assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1, onClose2, onClose3)); - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - assertNext(iterator, "g"); - assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)); - assertNext(iterator, "h"); - assertNext(iterator, "i"); - assertFalse(iterator.hasNext()); - assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2, onClose3), ImmutableList.of()); - verify(airbyteStreamStatusConsumer, times(8)).accept(any()); - } - - @Test - void testCloseBeforeUsingItUp() throws Exception { - final AutoCloseableIterator iterator = new CompositeIterator<>(ImmutableList.of( - AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose1, airbyteStream1)), airbyteStreamStatusConsumer); - - assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)); - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)); - iterator.close(); - assertOnCloseInvocations(ImmutableList.of(onClose1), ImmutableList.of()); - verify(airbyteStreamStatusConsumer, times(2)).accept(any()); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - void testCannotOperateAfterClosing() throws Exception { - final AutoCloseableIterator iterator = new CompositeIterator<>(ImmutableList.of( - AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose1, airbyteStream1)), airbyteStreamStatusConsumer); - - assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)); - assertNext(iterator, "a"); - assertNext(iterator, "b"); - iterator.close(); - assertThrows(IllegalStateException.class, iterator::hasNext); - assertThrows(IllegalStateException.class, iterator::next); - iterator.close(); // still allowed to close again. - verify(airbyteStreamStatusConsumer, times(2)).accept(any()); - } - - private void assertNext(final Iterator iterator, final String value) { - assertTrue(iterator.hasNext()); - assertEquals(value, iterator.next()); - } - - private void assertOnCloseInvocations(final List haveClosed, final List haveNotClosed) throws Exception { - for (final VoidCallable voidCallable : haveClosed) { - verify(voidCallable).call(); - } - - for (final VoidCallable voidCallable : haveNotClosed) { - verify(voidCallable, never()).call(); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.java deleted file mode 100644 index 18d1046ebff80..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -import io.airbyte.commons.concurrency.VoidCallable; -import java.util.Collections; -import java.util.Iterator; -import org.junit.jupiter.api.Test; - -class DefaultAutoCloseableIteratorTest { - - @Test - void testNullInput() { - final VoidCallable onClose = mock(VoidCallable.class); - assertThrows(NullPointerException.class, () -> new DefaultAutoCloseableIterator<>(null, onClose, null)); - assertThrows(NullPointerException.class, () -> new DefaultAutoCloseableIterator<>(Collections.emptyIterator(), null, null)); - assertThrows(NullPointerException.class, () -> new DefaultAutoCloseableIterator<>(null, null, null)); - } - - @Test - void testEmptyInput() throws Exception { - final VoidCallable onClose = mock(VoidCallable.class); - final AutoCloseableIterator iterator = new DefaultAutoCloseableIterator<>(Collections.emptyIterator(), onClose, null); - assertFalse(iterator.hasNext()); - iterator.close(); - verify(onClose).call(); - } - - @Test - void test() throws Exception { - final VoidCallable onClose = mock(VoidCallable.class); - final AutoCloseableIterator iterator = new DefaultAutoCloseableIterator<>(MoreIterators.of("a", "b", "c"), onClose, null); - - assertNext(iterator, "a"); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - iterator.close(); - - verify(onClose).call(); - } - - @Test - void testCannotOperateAfterClosing() throws Exception { - final VoidCallable onClose = mock(VoidCallable.class); - final AutoCloseableIterator iterator = new DefaultAutoCloseableIterator<>(MoreIterators.of("a", "b", "c"), onClose, null); - - assertNext(iterator, "a"); - assertNext(iterator, "b"); - iterator.close(); - assertThrows(IllegalStateException.class, iterator::hasNext); - assertThrows(IllegalStateException.class, iterator::next); - iterator.close(); // still allowed to close again. - } - - private void assertNext(final Iterator iterator, final String value) { - assertTrue(iterator.hasNext()); - assertEquals(value, iterator.next()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.java deleted file mode 100644 index cf3c431b57259..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import java.util.Collections; -import java.util.Iterator; -import java.util.function.Supplier; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class LazyAutoCloseableIteratorTest { - - private AutoCloseableIterator internalIterator; - private Supplier> iteratorSupplier; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() { - internalIterator = (AutoCloseableIterator) mock(AutoCloseableIterator.class); - iteratorSupplier = mock(Supplier.class); - when(iteratorSupplier.get()).thenReturn(internalIterator); - } - - @Test - void testNullInput() { - assertThrows(NullPointerException.class, () -> new LazyAutoCloseableIterator<>(null, null)); - final AutoCloseableIterator iteratorWithNullSupplier = new LazyAutoCloseableIterator<>(() -> null, null); - assertThrows(NullPointerException.class, iteratorWithNullSupplier::next); - } - - @Test - void testEmptyInput() throws Exception { - mockInternalIteratorWith(Collections.emptyIterator()); - final AutoCloseableIterator iterator = new LazyAutoCloseableIterator<>(iteratorSupplier, null); - - assertFalse(iterator.hasNext()); - iterator.close(); - verify(internalIterator).close(); - } - - @Test - void test() throws Exception { - mockInternalIteratorWith(MoreIterators.of("a", "b", "c")); - - final AutoCloseableIterator iterator = new LazyAutoCloseableIterator<>(iteratorSupplier, null); - verify(iteratorSupplier, never()).get(); - assertNext(iterator, "a"); - verify(iteratorSupplier).get(); - verifyNoMoreInteractions(iteratorSupplier); - assertNext(iterator, "b"); - assertNext(iterator, "c"); - iterator.close(); - verify(internalIterator).close(); - } - - @Test - void testCloseBeforeSupply() throws Exception { - mockInternalIteratorWith(MoreIterators.of("a", "b", "c")); - final AutoCloseableIterator iterator = new LazyAutoCloseableIterator<>(iteratorSupplier, null); - iterator.close(); - verify(iteratorSupplier, never()).get(); - } - - private void mockInternalIteratorWith(final Iterator iterator) { - when(internalIterator.hasNext()).then((a) -> iterator.hasNext()); - when(internalIterator.next()).then((a) -> iterator.next()); - } - - private void assertNext(final Iterator iterator, final String value) { - assertTrue(iterator.hasNext()); - assertEquals(value, iterator.next()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java deleted file mode 100644 index 00c157de431d2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import org.junit.jupiter.api.Test; - -class AirbyteVersionTest { - - private static final String VERSION_678 = "6.7.8"; - private static final String VERSION_678_OMEGA = "6.7.8-omega"; - private static final String VERSION_678_ALPHA = "6.7.8-alpha"; - private static final String VERSION_678_GAMMA = "6.7.8-gamma"; - private static final String VERSION_679_ALPHA = "6.7.9-alpha"; - private static final String VERSION_680_ALPHA = "6.8.0-alpha"; - private static final String VERSION_6110_ALPHA = "6.11.0-alpha"; - private static final String VERSION_123_PROD = "1.2.3-prod"; - private static final String DEV = "dev"; - private static final String VERSION_380_ALPHA = "3.8.0-alpha"; - - @Test - void testParseVersion() { - final AirbyteVersion version = new AirbyteVersion(VERSION_678); - assertEquals("6", version.getMajorVersion()); - assertEquals("7", version.getMinorVersion()); - assertEquals("8", version.getPatchVersion()); - } - - @Test - void testParseVersionWithLabel() { - final AirbyteVersion version = new AirbyteVersion(VERSION_678_OMEGA); - assertEquals("6", version.getMajorVersion()); - assertEquals("7", version.getMinorVersion()); - assertEquals("8", version.getPatchVersion()); - } - - @Test - void testCompatibleVersionCompareTo() { - assertEquals(0, new AirbyteVersion(VERSION_678_OMEGA).compatibleVersionCompareTo(new AirbyteVersion(VERSION_678_GAMMA))); - assertEquals(0, new AirbyteVersion(VERSION_678_ALPHA).compatibleVersionCompareTo(new AirbyteVersion(VERSION_679_ALPHA))); - assertTrue(0 < new AirbyteVersion(VERSION_680_ALPHA).compatibleVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 < new AirbyteVersion("11.8.0-alpha").compatibleVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 < new AirbyteVersion(VERSION_6110_ALPHA).compatibleVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 > new AirbyteVersion("0.8.0-alpha").compatibleVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertEquals(0, new AirbyteVersion(VERSION_123_PROD).compatibleVersionCompareTo(new AirbyteVersion(DEV))); - assertEquals(0, new AirbyteVersion(DEV).compatibleVersionCompareTo(new AirbyteVersion(VERSION_123_PROD))); - } - - @Test - void testPatchVersionCompareTo() { - assertEquals(0, new AirbyteVersion(VERSION_678_OMEGA).patchVersionCompareTo(new AirbyteVersion(VERSION_678_GAMMA))); - assertTrue(0 > new AirbyteVersion(VERSION_678_ALPHA).patchVersionCompareTo(new AirbyteVersion(VERSION_679_ALPHA))); - assertTrue(0 > new AirbyteVersion(VERSION_678_ALPHA).patchVersionCompareTo(new AirbyteVersion("6.7.11-alpha"))); - assertTrue(0 < new AirbyteVersion(VERSION_680_ALPHA).patchVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 < new AirbyteVersion(VERSION_6110_ALPHA).patchVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 > new AirbyteVersion(VERSION_380_ALPHA).patchVersionCompareTo(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(0 > new AirbyteVersion(VERSION_380_ALPHA).patchVersionCompareTo(new AirbyteVersion("11.7.8-alpha"))); - assertEquals(0, new AirbyteVersion(VERSION_123_PROD).patchVersionCompareTo(new AirbyteVersion(DEV))); - assertEquals(0, new AirbyteVersion(DEV).patchVersionCompareTo(new AirbyteVersion(VERSION_123_PROD))); - } - - @Test - void testGreaterThan() { - assertFalse(new AirbyteVersion(VERSION_678_OMEGA).greaterThan(new AirbyteVersion(VERSION_678_GAMMA))); - assertFalse(new AirbyteVersion(VERSION_678_ALPHA).greaterThan(new AirbyteVersion(VERSION_679_ALPHA))); - assertFalse(new AirbyteVersion(VERSION_678_ALPHA).greaterThan(new AirbyteVersion("6.7.11-alpha"))); - assertTrue(new AirbyteVersion(VERSION_680_ALPHA).greaterThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(new AirbyteVersion(VERSION_6110_ALPHA).greaterThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertFalse(new AirbyteVersion(VERSION_380_ALPHA).greaterThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertFalse(new AirbyteVersion(VERSION_380_ALPHA).greaterThan(new AirbyteVersion("11.7.8-alpha"))); - assertFalse(new AirbyteVersion(VERSION_123_PROD).greaterThan(new AirbyteVersion(DEV))); - assertFalse(new AirbyteVersion(DEV).greaterThan(new AirbyteVersion(VERSION_123_PROD))); - } - - @Test - void testLessThan() { - assertFalse(new AirbyteVersion(VERSION_678_OMEGA).lessThan(new AirbyteVersion(VERSION_678_GAMMA))); - assertTrue(new AirbyteVersion(VERSION_678_ALPHA).lessThan(new AirbyteVersion(VERSION_679_ALPHA))); - assertTrue(new AirbyteVersion(VERSION_678_ALPHA).lessThan(new AirbyteVersion("6.7.11-alpha"))); - assertFalse(new AirbyteVersion(VERSION_680_ALPHA).lessThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertFalse(new AirbyteVersion(VERSION_6110_ALPHA).lessThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(new AirbyteVersion(VERSION_380_ALPHA).lessThan(new AirbyteVersion(VERSION_678_ALPHA))); - assertTrue(new AirbyteVersion(VERSION_380_ALPHA).lessThan(new AirbyteVersion("11.7.8-alpha"))); - assertFalse(new AirbyteVersion(VERSION_123_PROD).lessThan(new AirbyteVersion(DEV))); - assertFalse(new AirbyteVersion(DEV).lessThan(new AirbyteVersion(VERSION_123_PROD))); - } - - @Test - void testInvalidVersions() { - assertThrows(NullPointerException.class, () -> new AirbyteVersion(null)); - assertThrows(IllegalArgumentException.class, () -> new AirbyteVersion("0.6")); - } - - @Test - void testSerialize() { - assertEquals(DEV, new AirbyteVersion(DEV).serialize()); - - final var nonDevVersion = "0.1.2-alpha"; - assertEquals(nonDevVersion, new AirbyteVersion(nonDevVersion).serialize()); - } - - @Test - void testCheckVersion() { - AirbyteVersion.assertIsCompatible(new AirbyteVersion("3.2.1"), new AirbyteVersion("3.2.1")); - assertThrows(IllegalStateException.class, () -> AirbyteVersion.assertIsCompatible(new AirbyteVersion("1.2.3"), new AirbyteVersion("3.2.1"))); - } - - @Test - void testCheckOnlyPatchVersion() { - assertFalse(new AirbyteVersion(VERSION_678).checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion(VERSION_678))); - assertFalse(new AirbyteVersion("6.9.8").checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion("6.8.9"))); - assertFalse(new AirbyteVersion("7.7.8").checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion("6.7.11"))); - assertTrue(new AirbyteVersion("6.7.9").checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion(VERSION_678))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/VersionTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/VersionTest.java deleted file mode 100644 index a620af01f2ad2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/version/VersionTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.version; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.json.Jsons; -import org.junit.jupiter.api.Test; - -class VersionTest { - - @Test - void testJsonSerializationDeserialization() { - final String jsonString = """ - {"version": "1.2.3"} - """; - final Version expectedVersion = new Version("1.2.3"); - - final Version deserializedVersion = Jsons.deserialize(jsonString, Version.class); - assertEquals(expectedVersion, deserializedVersion); - - final Version deserializedVersionLoop = Jsons.deserialize(Jsons.serialize(deserializedVersion), Version.class); - assertEquals(expectedVersion, deserializedVersionLoop); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/yaml/YamlsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/yaml/YamlsTest.java deleted file mode 100644 index 81cefc795bdae..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/commons/yaml/YamlsTest.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.yaml; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.CloseableConsumer; -import io.airbyte.commons.stream.MoreStreams; -import io.airbyte.commons.util.AutoCloseableIterator; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.StringWriter; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -class YamlsTest { - - private static final String LINE_BREAK = "---\n"; - private static final String STR_ABC = "str: \"abc\"\n"; - private static final String ABC = "abc"; - - @Test - void testSerialize() { - assertEquals( - LINE_BREAK - + STR_ABC - + "num: 999\n" - + "numLong: 888\n", - Yamls.serialize(new ToClass(ABC, 999, 888L))); - - assertEquals( - LINE_BREAK - + "test: \"abc\"\n" - + "test2: \"def\"\n", - Yamls.serialize( - ImmutableMap.of( - "test", ABC, - "test2", "def"))); - } - - @Test - void testSerializeWithoutQuotes() { - assertEquals( - LINE_BREAK - + "str: abc\n" - + "num: 999\n" - + "numLong: 888\n", - Yamls.serializeWithoutQuotes(new ToClass(ABC, 999, 888L))); - - assertEquals( - LINE_BREAK - + "test: abc\n" - + "test2: def\n", - Yamls.serializeWithoutQuotes( - ImmutableMap.of( - "test", ABC, - "test2", "def"))); - } - - @Test - void testSerializeJsonNode() { - assertEquals( - LINE_BREAK - + STR_ABC - + "num: 999\n" - + "numLong: 888\n", - Yamls.serialize(Jsons.jsonNode(new ToClass(ABC, 999, 888L)))); - - assertEquals( - LINE_BREAK - + "test: \"abc\"\n" - + "test2: \"def\"\n", - Yamls.serialize(Jsons.jsonNode(ImmutableMap.of( - "test", ABC, - "test2", "def")))); - } - - @Test - void testDeserialize() { - assertEquals( - new ToClass(ABC, 999, 888L), - Yamls.deserialize( - LINE_BREAK - + STR_ABC - + "num: \"999\"\n" - + "numLong: \"888\"\n", - ToClass.class)); - } - - @Test - void testDeserializeToJsonNode() { - assertEquals( - "{\"str\":\"abc\"}", - Yamls.deserialize( - LINE_BREAK - + STR_ABC) - .toString()); - - assertEquals( - "[{\"str\":\"abc\"},{\"str\":\"abc\"}]", - Yamls.deserialize( - LINE_BREAK - + "- str: \"abc\"\n" - + "- str: \"abc\"\n") - .toString()); - } - - @Test - void testListWriter() throws Exception { - final List values = Lists.newArrayList(1, 2, 3); - final StringWriter writer = spy(new StringWriter()); - final CloseableConsumer consumer = Yamls.listWriter(writer); - values.forEach(consumer); - consumer.close(); - - verify(writer).close(); - - final List deserialize = Yamls.deserialize(writer.toString(), List.class); - assertEquals(values, deserialize); - } - - @Test - void testStreamRead() throws IOException { - final List classes = Lists.newArrayList( - new ToClass("1", 1, 1), - new ToClass("2", 2, 2), - new ToClass("3", 3, 3)); - final ByteArrayInputStream input = spy(new ByteArrayInputStream(Yamls.serialize(classes).getBytes(StandardCharsets.UTF_8))); - - try (final AutoCloseableIterator iterator = Yamls.deserializeArray(input)) { - assertEquals( - classes, - MoreStreams.toStream(iterator) - .map(e -> Jsons.object(e, ToClass.class)) - .collect(Collectors.toList())); - } catch (final Exception e) { - fail(); - } - - verify(input).close(); - } - - private static class ToClass { - - @JsonProperty("str") - String str; - - @JsonProperty("num") - Integer num; - - @JsonProperty("numLong") - long numLong; - - public ToClass() {} - - public ToClass(final String str, final Integer num, final long numLong) { - this.str = str; - this.num = num; - this.numLong = numLong; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final ToClass toClass = (ToClass) o; - return numLong == toClass.numLong - && Objects.equals(str, toClass.str) - && Objects.equals(num, toClass.num); - } - - @Override - public int hashCode() { - return Objects.hash(str, num, numLong); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/ConfigSchemaTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/ConfigSchemaTest.java deleted file mode 100644 index 19ec815c4234d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/ConfigSchemaTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import org.junit.jupiter.api.Test; - -class ConfigSchemaTest { - - @Test - void testFile() throws IOException { - final String schema = Files.readString(ConfigSchema.STATE.getConfigSchemaFile().toPath(), StandardCharsets.UTF_8); - assertTrue(schema.contains("title")); - } - - @Test - void testPrepareKnownSchemas() { - for (final ConfigSchema value : ConfigSchema.values()) { - assertTrue(Files.exists(value.getConfigSchemaFile().toPath()), value.getConfigSchemaFile().toPath().toString() + " does not exist"); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/DataTypeEnumTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/DataTypeEnumTest.java deleted file mode 100644 index 9bd09dc411c36..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/DataTypeEnumTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import org.junit.jupiter.api.Test; - -class DataTypeEnumTest { - - // We use JsonSchemaPrimitive in tests to construct schemas. We want to verify that their are valid - // conversions between JsonSchemaPrimitive to DataType so that if anything changes we won't have - // hard-to-decipher errors in our tests. Once we get rid of Schema, we can can drop this test. - @Test - void testConversionFromJsonSchemaPrimitiveToDataType() { - assertEquals(5, DataType.class.getEnumConstants().length); - assertEquals(17, JsonSchemaPrimitive.class.getEnumConstants().length); - - assertEquals(DataType.STRING, DataType.fromValue(JsonSchemaPrimitive.STRING.toString().toLowerCase())); - assertEquals(DataType.NUMBER, DataType.fromValue(JsonSchemaPrimitive.NUMBER.toString().toLowerCase())); - assertEquals(DataType.BOOLEAN, DataType.fromValue(JsonSchemaPrimitive.BOOLEAN.toString().toLowerCase())); - assertEquals(DataType.ARRAY, DataType.fromValue(JsonSchemaPrimitive.ARRAY.toString().toLowerCase())); - assertEquals(DataType.OBJECT, DataType.fromValue(JsonSchemaPrimitive.OBJECT.toString().toLowerCase())); - assertThrows(IllegalArgumentException.class, () -> DataType.fromValue(JsonSchemaPrimitive.NULL.toString().toLowerCase())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.java deleted file mode 100644 index 869d1159e3fd2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.configoss.helpers; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.configoss.StandardDestinationDefinition; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -class YamlListToStandardDefinitionsTest { - - private static final String DESTINATION_DEFINITION_ID = "- destinationDefinitionId: a625d593-bba5-4a1c-a53d-2d246268a816\n"; - private static final String DESTINATION_NAME = " name: Local JSON\n"; - private static final String DOCKER_REPO = " dockerRepository: airbyte/destination-local-json\n"; - private static final String DOCKER_IMAGE_TAG = " dockerImageTag: 0.1.4\n"; - private static final String GOOD_DES_DEF_YAML = - DESTINATION_DEFINITION_ID - + DESTINATION_NAME - + DOCKER_REPO - + DOCKER_IMAGE_TAG - + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json"; - private static final String DUPLICATE_ID = - DESTINATION_DEFINITION_ID - + DESTINATION_NAME - + DOCKER_REPO - + DOCKER_IMAGE_TAG - + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json" - + DESTINATION_DEFINITION_ID - + " name: JSON 2\n" - + DOCKER_REPO - + DOCKER_IMAGE_TAG - + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json"; - private static final String DUPLICATE_NAME = - DESTINATION_DEFINITION_ID - + DESTINATION_NAME - + DOCKER_REPO - + DOCKER_IMAGE_TAG - + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json\n" - + "- destinationDefinitionId: 8be1cf83-fde1-477f-a4ad-318d23c9f3c6\n" - + DESTINATION_NAME - + " dockerRepository: airbyte/destination-csv\n" - + " dockerImageTag: 0.1.8\n" - + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-csv"; - private static final String BAD_DATA = - DESTINATION_DEFINITION_ID - + DESTINATION_NAME - + DOCKER_REPO - + " dockerImageTag: 0.1.8\n" - + " documentationUrl"; - - @Nested - // vertifyAndConvertToJsonNode - class VerifyAndConvertToJsonNode { - - private static final String ID_NAME = "destinationDefinitionId"; - - private final ObjectMapper mapper = MoreMappers.initMapper(); - - @Test - // should correctly read yaml file - void correctlyReadTest() throws JsonProcessingException { - final var jsonDefs = YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, GOOD_DES_DEF_YAML); - final var defList = mapper.treeToValue(jsonDefs, StandardDestinationDefinition[].class); - assertEquals(1, defList.length); - assertEquals("Local JSON", defList[0].getName()); - } - - @Test - // should error out on duplicate id - void duplicateIdTest() { - assertThrows(RuntimeException.class, () -> YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, DUPLICATE_ID)); - } - - @Test - // should error out on duplicate name - void duplicateNameTest() { - assertThrows(RuntimeException.class, () -> YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, DUPLICATE_NAME)); - } - - @Test - // should error out on empty file - void emptyFileTest() { - assertThrows(RuntimeException.class, () -> YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, "")); - } - - @Test - // should error out on bad data - void badDataTest() { - assertThrows(RuntimeException.class, () -> YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, BAD_DATA)); - } - - } - - @Nested - // verifyAndConvertToModelList - class VerifyAndConvertToModelList { - - @Test - // should correctly read yaml file - void correctlyReadTest() { - final var defs = YamlListToStandardDefinitions - .verifyAndConvertToModelList(StandardDestinationDefinition.class, GOOD_DES_DEF_YAML); - assertEquals(1, defs.size()); - assertEquals("Local JSON", defs.get(0).getName()); - } - - @Test - // should error out on duplicate id - void duplicateIdTest() { - assertThrows(RuntimeException.class, - () -> YamlListToStandardDefinitions.verifyAndConvertToModelList(StandardDestinationDefinition.class, DUPLICATE_ID)); - } - - @Test - // should error out on duplicate name - void duplicateNameTest() { - assertThrows(RuntimeException.class, - () -> YamlListToStandardDefinitions.verifyAndConvertToModelList(StandardDestinationDefinition.class, DUPLICATE_NAME)); - } - - @Test - // should error out on empty file - void emptyFileTest() { - assertThrows(RuntimeException.class, - () -> YamlListToStandardDefinitions.verifyAndConvertToModelList(StandardDestinationDefinition.class, "")); - } - - @Test - // should error out on bad data - void badDataTest() { - assertThrows(RuntimeException.class, - () -> YamlListToStandardDefinitions.verifyAndConvertToModelList(StandardDestinationDefinition.class, BAD_DATA)); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/validation/json/JsonSchemaValidatorTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/validation/json/JsonSchemaValidatorTest.java deleted file mode 100644 index 4cdf4cc89e56e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/validation/json/JsonSchemaValidatorTest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.validation.json; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.util.Set; -import org.junit.jupiter.api.Test; - -class JsonSchemaValidatorTest { - - private static final String PROPERTIES = "properties"; - - private static final JsonNode VALID_SCHEMA = Jsons.deserialize( - "{\n" + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + - " \"title\": \"test\",\n" + - " \"type\": \"object\",\n" + - " \"required\": [\"host\"],\n" + - " \"additionalProperties\": false,\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"string\"\n" + - " },\n" + - " \"port\": {\n" + - " \"type\": \"integer\",\n" + - " \"minimum\": 0,\n" + - " \"maximum\": 65536\n" + - " }" + - " }\n" + - " }"); - - @Test - void testValidateSuccess() { - final JsonSchemaValidator validator = new JsonSchemaValidator(); - - final JsonNode object1 = Jsons.deserialize("{\"host\":\"abc\"}"); - assertTrue(validator.validate(VALID_SCHEMA, object1).isEmpty()); - assertDoesNotThrow(() -> validator.ensure(VALID_SCHEMA, object1)); - - final JsonNode object2 = Jsons.deserialize("{\"host\":\"abc\", \"port\":1}"); - assertTrue(validator.validate(VALID_SCHEMA, object2).isEmpty()); - assertDoesNotThrow(() -> validator.ensure(VALID_SCHEMA, object2)); - } - - @Test - void testValidateFail() { - final JsonSchemaValidator validator = new JsonSchemaValidator(); - - final JsonNode object1 = Jsons.deserialize("{}"); - assertFalse(validator.validate(VALID_SCHEMA, object1).isEmpty()); - assertThrows(JsonValidationException.class, () -> validator.ensure(VALID_SCHEMA, object1)); - - final JsonNode object2 = Jsons.deserialize("{\"host\":\"abc\", \"port\":9999999}"); - assertFalse(validator.validate(VALID_SCHEMA, object2).isEmpty()); - assertThrows(JsonValidationException.class, () -> validator.ensure(VALID_SCHEMA, object2)); - } - - @Test - void test() throws IOException { - final String schema = "{\n" - + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" - + " \"title\": \"OuterObject\",\n" - + " \"type\": \"object\",\n" - + " \"properties\": {\n" - + " \"field1\": {\n" - + " \"type\": \"string\"\n" - + " }\n" - + " },\n" - + " \"definitions\": {\n" - + " \"InnerObject\": {\n" - + " \"type\": \"object\",\n" - + " \"properties\": {\n" - + " \"field2\": {\n" - + " \"type\": \"string\"\n" - + " }\n" - + " }\n" - + " }\n" - + " }\n" - + "}\n"; - - final File schemaFile = IOs.writeFile(Files.createTempDirectory("test"), "schema.json", schema).toFile(); - - // outer object - assertTrue(JsonSchemaValidator.getSchema(schemaFile).get(PROPERTIES).has("field1")); - assertFalse(JsonSchemaValidator.getSchema(schemaFile).get(PROPERTIES).has("field2")); - // inner object - assertTrue(JsonSchemaValidator.getSchema(schemaFile, "InnerObject").get(PROPERTIES).has("field2")); - assertFalse(JsonSchemaValidator.getSchema(schemaFile, "InnerObject").get(PROPERTIES).has("field1")); - // non-existent object - assertNull(JsonSchemaValidator.getSchema(schemaFile, "NonExistentObject")); - } - - @Test - void testResolveReferences() throws IOException, URISyntaxException { - String referencableSchemas = """ - { - "definitions": { - "ref1": {"type": "string"}, - "ref2": {"type": "boolean"} - } - } - """; - final File schemaFile = IOs.writeFile(Files.createTempDirectory("test"), "WellKnownTypes.json", referencableSchemas).toFile(); - JsonSchemaValidator jsonSchemaValidator = - new JsonSchemaValidator(new URI("file://" + schemaFile.getParentFile().getAbsolutePath() + "/foo.json")); - - Set validationResult = jsonSchemaValidator.validate( - Jsons.deserialize(""" - { - "type": "object", - "properties": { - "prop1": {"$ref": "WellKnownTypes.json#/definitions/ref1"}, - "prop2": {"$ref": "WellKnownTypes.json#/definitions/ref2"} - } - } - """), - Jsons.deserialize(""" - { - "prop1": "foo", - "prop2": "false" - } - """)); - - assertEquals(Set.of("$.prop2: string found, boolean expected"), validationResult); - } - - @Test - void testIntializedMethodsShouldErrorIfNotInitialised() { - final var validator = new JsonSchemaValidator(); - - assertThrows(NullPointerException.class, () -> validator.testInitializedSchema("uninitialised", Jsons.deserialize("{}"))); - assertThrows(NullPointerException.class, () -> validator.ensureInitializedSchema("uninitialised", Jsons.deserialize("{}"))); - } - - @Test - void testIntializedMethodsShouldValidateIfInitialised() { - final JsonSchemaValidator validator = new JsonSchemaValidator(); - final var schemaName = "schema_name"; - final JsonNode goodJson = Jsons.deserialize("{\"host\":\"abc\"}"); - - validator.initializeSchemaValidator(schemaName, VALID_SCHEMA); - - assertTrue(validator.testInitializedSchema(schemaName, goodJson)); - assertDoesNotThrow(() -> validator.ensureInitializedSchema(schemaName, goodJson)); - - final JsonNode badJson = Jsons.deserialize("{\"host\":1}"); - assertFalse(validator.testInitializedSchema(schemaName, badJson)); - assertThrows(JsonValidationException.class, () -> validator.ensureInitializedSchema(schemaName, badJson)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java deleted file mode 100644 index 793f96e44398c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/TestHarnessUtilsTest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.configoss.StandardSyncInput; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.workers.internal.HeartbeatMonitor; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class TestHarnessUtilsTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(GentleCloseWithHeartbeat.class); - - @Nested - class GentleCloseWithHeartbeat { - - private final Duration CHECK_HEARTBEAT_DURATION = Duration.of(10, ChronoUnit.MILLIS); - - private final Duration SHUTDOWN_TIME_DURATION = Duration.of(100, ChronoUnit.MILLIS); - - private Process process; - private HeartbeatMonitor heartbeatMonitor; - private BiConsumer forceShutdown; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() { - process = mock(Process.class); - heartbeatMonitor = mock(HeartbeatMonitor.class); - forceShutdown = mock(BiConsumer.class); - } - - private void runShutdown() { - gentleCloseWithHeartbeat( - process, - heartbeatMonitor, - SHUTDOWN_TIME_DURATION, - CHECK_HEARTBEAT_DURATION, - SHUTDOWN_TIME_DURATION, - forceShutdown); - } - - @SuppressWarnings("BusyWait") - // Verify that shutdown waits indefinitely when heartbeat and process are healthy. - @Test - void testStartsWait() throws InterruptedException { - when(process.isAlive()).thenReturn(true); - final AtomicInteger recordedBeats = new AtomicInteger(0); - doAnswer((ignored) -> { - recordedBeats.incrementAndGet(); - return true; - }).when(heartbeatMonitor).isBeating(); - - final Thread thread = new Thread(this::runShutdown); - - thread.start(); - - // block until the loop is running. - while (recordedBeats.get() < 3) { - Thread.sleep(10); - } - } - - @Test - // Test heartbeat ends and graceful shutdown. - void testGracefulShutdown() { - when(heartbeatMonitor.isBeating()).thenReturn(false); - when(process.isAlive()).thenReturn(false); - - runShutdown(); - - verifyNoInteractions(forceShutdown); - } - - @Test - // Test heartbeat ends and shutdown is forced. - void testForcedShutdown() { - when(heartbeatMonitor.isBeating()).thenReturn(false); - when(process.isAlive()).thenReturn(true); - - runShutdown(); - - verify(forceShutdown).accept(process, SHUTDOWN_TIME_DURATION); - } - - @Test - // Test process dies. - void testProcessDies() { - when(heartbeatMonitor.isBeating()).thenReturn(true); - when(process.isAlive()).thenReturn(false); - runShutdown(); - - verifyNoInteractions(forceShutdown); - } - - } - - @Test - void testMapStreamNamesToSchemasWithNullNamespace() { - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); - final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput); - assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", null))); - } - - @Test - void testMapStreamNamesToSchemasWithMultipleNamespaces() { - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(true); - final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput); - assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace"))); - assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace2"))); - } - - /** - * As long as the the heartbeatMonitor detects a heartbeat, the process will be allowed to continue. - * This method checks the heartbeat once every minute. Once there is no heartbeat detected, if the - * process has ended, then the method returns. If the process is still running it is given a grace - * period of the timeout arguments passed into the method. Once those expire the process is killed - * forcibly. If the process cannot be killed, this method will log that this is the case, but then - * returns. - * - * @param process - process to monitor. - * @param heartbeatMonitor - tracks if the heart is still beating for the given process. - * @param gracefulShutdownDuration - grace period to give the process to die after its heart stops - * beating. - * @param checkHeartbeatDuration - frequency with which the heartbeat of the process is checked. - * @param forcedShutdownDuration - amount of time to wait if a process needs to be destroyed - * forcibly. - */ - static void gentleCloseWithHeartbeat(final Process process, - final HeartbeatMonitor heartbeatMonitor, - final Duration gracefulShutdownDuration, - final Duration checkHeartbeatDuration, - final Duration forcedShutdownDuration, - final BiConsumer forceShutdown) { - while (process.isAlive() && heartbeatMonitor.isBeating()) { - try { - process.waitFor(checkHeartbeatDuration.toMillis(), TimeUnit.MILLISECONDS); - } catch (final InterruptedException e) { - LOGGER.error("Exception while waiting for process to finish", e); - } - } - - if (process.isAlive()) { - try { - process.waitFor(gracefulShutdownDuration.toMillis(), TimeUnit.MILLISECONDS); - } catch (final InterruptedException e) { - LOGGER.error("Exception during grace period for process to finish. This can happen when cancelling jobs."); - } - } - - // if we were unable to exist gracefully, force shutdown... - if (process.isAlive()) { - forceShutdown.accept(process, forcedShutdownDuration); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java deleted file mode 100644 index b2a009c5e6434..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/CatalogClientConvertersTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.google.common.collect.Lists; -import io.airbyte.commons.text.Names; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.SyncMode; -import java.util.Collections; -import java.util.List; -import org.junit.jupiter.api.Test; - -class CatalogClientConvertersTest { - - public static final String ID_FIELD_NAME = "id"; - private static final String STREAM_NAME = "users-data"; - private static final AirbyteStream STREAM = new AirbyteStream() - .withName(STREAM_NAME) - .withJsonSchema( - CatalogHelpers.fieldsToJsonSchema(Field.of(ID_FIELD_NAME, JsonSchemaType.STRING))) - .withDefaultCursorField(Lists.newArrayList(ID_FIELD_NAME)) - .withSourceDefinedCursor(false) - .withSourceDefinedPrimaryKey(Collections.emptyList()) - .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - - private static final io.airbyte.api.client.model.generated.AirbyteStream CLIENT_STREAM = - new io.airbyte.api.client.model.generated.AirbyteStream() - .name(STREAM_NAME) - .jsonSchema(CatalogHelpers.fieldsToJsonSchema(Field.of(ID_FIELD_NAME, JsonSchemaType.STRING))) - .defaultCursorField(Lists.newArrayList(ID_FIELD_NAME)) - .sourceDefinedCursor(false) - .sourceDefinedPrimaryKey(Collections.emptyList()) - .supportedSyncModes(List.of(io.airbyte.api.client.model.generated.SyncMode.FULL_REFRESH, - io.airbyte.api.client.model.generated.SyncMode.INCREMENTAL)); - private static final io.airbyte.api.client.model.generated.AirbyteStreamConfiguration CLIENT_DEFAULT_STREAM_CONFIGURATION = - new io.airbyte.api.client.model.generated.AirbyteStreamConfiguration() - .syncMode(io.airbyte.api.client.model.generated.SyncMode.FULL_REFRESH) - .cursorField(Lists.newArrayList(ID_FIELD_NAME)) - .destinationSyncMode(io.airbyte.api.client.model.generated.DestinationSyncMode.APPEND) - .primaryKey(Collections.emptyList()) - .aliasName(Names.toAlphanumericAndUnderscore(STREAM_NAME)) - .selected(true); - - private static final AirbyteCatalog BASIC_MODEL_CATALOG = new AirbyteCatalog().withStreams( - Lists.newArrayList(STREAM)); - - private static final io.airbyte.api.client.model.generated.AirbyteCatalog EXPECTED_CLIENT_CATALOG = - new io.airbyte.api.client.model.generated.AirbyteCatalog() - .streams(Lists.newArrayList( - new io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration() - .stream(CLIENT_STREAM) - .config(CLIENT_DEFAULT_STREAM_CONFIGURATION))); - - @Test - void testConvertToClientAPI() { - assertEquals(EXPECTED_CLIENT_CATALOG, - CatalogClientConverters.toAirbyteCatalogClientApi(BASIC_MODEL_CATALOG)); - } - - @Test - void testConvertToProtocol() { - assertEquals(BASIC_MODEL_CATALOG, - CatalogClientConverters.toAirbyteProtocol(EXPECTED_CLIENT_CATALOG)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java deleted file mode 100644 index 77c1e408fad80..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.DestinationIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationRead; -import io.airbyte.api.client.model.generated.DestinationUpdate; -import io.airbyte.api.client.model.generated.SourceIdRequestBody; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.SourceUpdate; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Config; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class ConnectorConfigUpdaterTest { - - private static final UUID SOURCE_ID = UUID.randomUUID(); - private static final String SOURCE_NAME = "source-stripe"; - private static final UUID DESTINATION_ID = UUID.randomUUID(); - private static final String DESTINATION_NAME = "destination-google-sheets"; - - private final SourceApi mSourceApi = mock(SourceApi.class); - private final DestinationApi mDestinationApi = mock(DestinationApi.class); - - private ConnectorConfigUpdater connectorConfigUpdater; - - @BeforeEach - void setUp() throws ApiException { - when(mSourceApi.getSource(new SourceIdRequestBody() - .sourceId(SOURCE_ID))).thenReturn(new SourceRead() - .sourceId(SOURCE_ID) - .name(SOURCE_NAME)); - - when(mDestinationApi.getDestination(new DestinationIdRequestBody() - .destinationId(DESTINATION_ID))).thenReturn(new DestinationRead() - .destinationId(DESTINATION_ID) - .name(DESTINATION_NAME)); - - connectorConfigUpdater = new ConnectorConfigUpdater(mSourceApi, mDestinationApi); - } - - @Test - void testPersistSourceConfig() throws ApiException { - final Config newConfiguration = new Config().withAdditionalProperty("key", "new_value"); - final JsonNode configJson = Jsons.jsonNode(newConfiguration.getAdditionalProperties()); - - final SourceUpdate expectedSourceUpdate = new SourceUpdate() - .sourceId(SOURCE_ID) - .name(SOURCE_NAME) - .connectionConfiguration(configJson); - - when(mSourceApi.updateSource(Mockito.any())).thenReturn(new SourceRead().connectionConfiguration(configJson)); - - connectorConfigUpdater.updateSource(SOURCE_ID, newConfiguration); - verify(mSourceApi).updateSource(expectedSourceUpdate); - } - - @Test - void testPersistDestinationConfig() throws ApiException { - final Config newConfiguration = new Config().withAdditionalProperty("key", "new_value"); - final JsonNode configJson = Jsons.jsonNode(newConfiguration.getAdditionalProperties()); - - final DestinationUpdate expectedDestinationUpdate = new DestinationUpdate() - .destinationId(DESTINATION_ID) - .name(DESTINATION_NAME) - .connectionConfiguration(configJson); - - when(mDestinationApi.updateDestination(Mockito.any())).thenReturn(new DestinationRead().connectionConfiguration(configJson)); - - connectorConfigUpdater.updateDestination(DESTINATION_ID, newConfiguration); - verify(mDestinationApi).updateDestination(expectedDestinationUpdate); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java deleted file mode 100644 index 4baf66fb51f25..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; - -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.FailureReason.FailureOrigin; -import io.airbyte.configoss.FailureReason.FailureType; -import io.airbyte.configoss.Metadata; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.helper.FailureHelper.ConnectorCommand; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.junit.jupiter.api.Test; - -class FailureHelperTest { - - private static final String FROM_TRACE_MESSAGE_KEY = "from_trace_message"; - private static final String CONNECTOR_COMMAND_KEY = "connector_command"; - private static final String JOB_ID_KEY = "jobId"; - private static final String ATTEMPT_NUMBER_KEY = "attemptNumber"; - - private static final FailureReason TRACE_FAILURE_REASON = new FailureReason() - .withInternalMessage("internal message") - .withStacktrace("stack trace") - .withTimestamp(Long.valueOf(1111112)) - .withMetadata(new Metadata() - .withAdditionalProperty(JOB_ID_KEY, 12345) - .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1) - .withAdditionalProperty(FROM_TRACE_MESSAGE_KEY, true)); - - private static final FailureReason TRACE_FAILURE_REASON_2 = new FailureReason() - .withInternalMessage("internal message") - .withStacktrace("stack trace") - .withTimestamp(Long.valueOf(1111113)) - .withMetadata(new Metadata() - .withAdditionalProperty(JOB_ID_KEY, 12345) - .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1) - .withAdditionalProperty(FROM_TRACE_MESSAGE_KEY, true)); - - private static final FailureReason EXCEPTION_FAILURE_REASON = new FailureReason() - .withInternalMessage("internal message") - .withStacktrace("stack trace") - .withTimestamp(Long.valueOf(1111111)) - .withMetadata(new Metadata() - .withAdditionalProperty(JOB_ID_KEY, 12345) - .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1)); - - private static final AirbyteTraceMessage TRACE_MESSAGE = AirbyteMessageUtils.createErrorTraceMessage( - "trace message error", - Double.valueOf(123), - AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR); - - @Test - void testGenericFailureFromTrace() throws Exception { - final AirbyteTraceMessage traceMessage = AirbyteMessageUtils.createErrorTraceMessage("trace message error", Double.valueOf(123), - AirbyteErrorTraceMessage.FailureType.CONFIG_ERROR); - final FailureReason failureReason = FailureHelper.genericFailure(traceMessage, Long.valueOf(12345), 1); - assertEquals(FailureType.CONFIG_ERROR, failureReason.getFailureType()); - } - - @Test - void testGenericFailureFromTraceNoFailureType() throws Exception { - final FailureReason failureReason = FailureHelper.genericFailure(TRACE_MESSAGE, Long.valueOf(12345), 1); - assertEquals(failureReason.getFailureType(), FailureType.SYSTEM_ERROR); - } - - @Test - void testConnectorCommandFailure() { - final Throwable t = new RuntimeException(); - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.CHECK); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("check", metadata.get(CONNECTOR_COMMAND_KEY)); - assertNull(metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testConnectorCommandFailureFromTrace() { - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.connectorCommandFailure(TRACE_MESSAGE, jobId, attemptNumber, ConnectorCommand.DISCOVER); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("discover", metadata.get(CONNECTOR_COMMAND_KEY)); - assertEquals(true, metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testSourceFailure() { - final Throwable t = new RuntimeException(); - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.sourceFailure(t, jobId, attemptNumber); - assertEquals(FailureOrigin.SOURCE, failureReason.getFailureOrigin()); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("read", metadata.get(CONNECTOR_COMMAND_KEY)); - assertNull(metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testSourceFailureFromTrace() { - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.sourceFailure(TRACE_MESSAGE, jobId, attemptNumber); - assertEquals(FailureOrigin.SOURCE, failureReason.getFailureOrigin()); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("read", metadata.get(CONNECTOR_COMMAND_KEY)); - assertEquals(true, metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testDestinationFailure() { - final Throwable t = new RuntimeException(); - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.destinationFailure(t, jobId, attemptNumber); - assertEquals(FailureOrigin.DESTINATION, failureReason.getFailureOrigin()); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("write", metadata.get(CONNECTOR_COMMAND_KEY)); - assertNull(metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testDestinationFailureFromTrace() { - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.destinationFailure(TRACE_MESSAGE, jobId, attemptNumber); - assertEquals(FailureOrigin.DESTINATION, failureReason.getFailureOrigin()); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("write", metadata.get(CONNECTOR_COMMAND_KEY)); - assertEquals(true, metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testCheckFailure() { - final Throwable t = new RuntimeException(); - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.checkFailure(t, jobId, attemptNumber, FailureOrigin.DESTINATION); - assertEquals(FailureOrigin.DESTINATION, failureReason.getFailureOrigin()); - - final Map metadata = failureReason.getMetadata().getAdditionalProperties(); - assertEquals("check", metadata.get(CONNECTOR_COMMAND_KEY)); - assertNull(metadata.get(FROM_TRACE_MESSAGE_KEY)); - assertEquals(jobId, metadata.get(JOB_ID_KEY)); - assertEquals(attemptNumber, metadata.get(ATTEMPT_NUMBER_KEY)); - } - - @Test - void testOrderedFailures() throws Exception { - final List failureReasonList = - FailureHelper.orderedFailures(Set.of(TRACE_FAILURE_REASON_2, TRACE_FAILURE_REASON, EXCEPTION_FAILURE_REASON)); - assertEquals(failureReasonList.get(0), TRACE_FAILURE_REASON); - } - - @Test - void testUnknownOriginFailure() { - final Throwable t = new RuntimeException(); - final Long jobId = 12345L; - final Integer attemptNumber = 1; - final FailureReason failureReason = FailureHelper.unknownOriginFailure(t, jobId, attemptNumber); - assertEquals(FailureOrigin.UNKNOWN, failureReason.getFailureOrigin()); - assertEquals("An unknown failure occurred", failureReason.getExternalMessage()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java deleted file mode 100644 index 1eabc015de38e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; - -class DefaultAirbyteStreamFactoryTest { - - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - - private AirbyteProtocolPredicate protocolPredicate; - private Logger logger; - - @BeforeEach - void setup() { - protocolPredicate = mock(AirbyteProtocolPredicate.class); - when(protocolPredicate.test(any())).thenReturn(true); - logger = mock(Logger.class); - } - - @Test - void testValid() { - final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); - - final Stream messageStream = stringToMessageStream(Jsons.serialize(record1)); - final Stream expectedStream = Stream.of(record1); - - assertEquals(expectedStream.collect(Collectors.toList()), messageStream.collect(Collectors.toList())); - verifyNoInteractions(logger); - } - - @Test - void testLoggingLine() { - final String invalidRecord = "invalid line"; - - final Stream messageStream = stringToMessageStream(invalidRecord); - - assertEquals(Collections.emptyList(), messageStream.collect(Collectors.toList())); - verify(logger).info(anyString()); - verifyNoMoreInteractions(logger); - } - - @Test - void testLoggingLevel() { - final AirbyteMessage logMessage = AirbyteMessageUtils.createLogMessage(AirbyteLogMessage.Level.WARN, "warning"); - - final Stream messageStream = stringToMessageStream(Jsons.serialize(logMessage)); - - assertEquals(Collections.emptyList(), messageStream.collect(Collectors.toList())); - verify(logger).warn("warning"); - verifyNoMoreInteractions(logger); - } - - @Test - void testFailValidation() { - final String invalidRecord = "{ \"fish\": \"tuna\"}"; - - when(protocolPredicate.test(Jsons.deserialize(invalidRecord))).thenReturn(false); - - final Stream messageStream = stringToMessageStream(invalidRecord); - - assertEquals(Collections.emptyList(), messageStream.collect(Collectors.toList())); - verify(logger).error(anyString(), anyString()); - verifyNoMoreInteractions(logger); - } - - @Test - void testFailDeserialization() { - final String invalidRecord = "{ \"type\": \"abc\"}"; - - when(protocolPredicate.test(Jsons.deserialize(invalidRecord))).thenReturn(true); - - final Stream messageStream = stringToMessageStream(invalidRecord); - - assertEquals(Collections.emptyList(), messageStream.collect(Collectors.toList())); - verify(logger).error(anyString(), anyString()); - verifyNoMoreInteractions(logger); - } - - @Test - void testFailsSize() { - final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); - - final InputStream inputStream = new ByteArrayInputStream(record1.toString().getBytes(StandardCharsets.UTF_8)); - final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - - final Stream messageStream = - new DefaultAirbyteStreamFactory(protocolPredicate, logger, new Builder(), Optional.of(RuntimeException.class), 1l).create(bufferedReader); - - assertThrows(RuntimeException.class, () -> messageStream.toList()); - } - - @Test - @Disabled - void testMissingNewLineBetweenValidRecords() { - final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); - final AirbyteMessage record2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow"); - - final String inputString = Jsons.serialize(record1) + Jsons.serialize(record2); - - final Stream messageStream = stringToMessageStream(inputString); - - assertEquals(Collections.emptyList(), messageStream.collect(Collectors.toList())); - verify(logger).error(anyString(), anyString()); - verifyNoMoreInteractions(logger); - } - - private Stream stringToMessageStream(final String inputString) { - final InputStream inputStream = new ByteArrayInputStream(inputString.getBytes(StandardCharsets.UTF_8)); - final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - return new DefaultAirbyteStreamFactory(protocolPredicate, logger, new Builder(), Optional.empty()).create(bufferedReader); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/cli/ClisTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/cli/ClisTest.kt new file mode 100644 index 0000000000000..4b823ecc1b552 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/cli/ClisTest.kt @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.cli + +import org.apache.commons.cli.DefaultParser +import org.apache.commons.cli.Option +import org.apache.commons.cli.Options +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class ClisTest { + @Test + fun testParse() { + val optionA = Option.builder("a").required(true).hasArg(true).build() + val optionB = Option.builder("b").required(true).hasArg(true).build() + val options = Options().addOption(optionA).addOption(optionB) + val args = arrayOf("-a", ALPHA, "-b", BETA) + val parsed = Clis.parse(args, options, DefaultParser()) + Assertions.assertEquals(ALPHA, parsed.options[0].value) + Assertions.assertEquals(BETA, parsed.options[1].value) + } + + @Test + fun testParseNonConforming() { + val optionA = Option.builder("a").required(true).hasArg(true).build() + val optionB = Option.builder("b").required(true).hasArg(true).build() + val options = Options().addOption(optionA).addOption(optionB) + val args = arrayOf("-a", ALPHA, "-b", BETA, "-c", "charlie") + Assertions.assertThrows(IllegalArgumentException::class.java) { + Clis.parse(args, options, DefaultParser()) + } + } + + @Test + fun testParseNonConformingWithSyntax() { + val optionA = Option.builder("a").required(true).hasArg(true).build() + val optionB = Option.builder("b").required(true).hasArg(true).build() + val options = Options().addOption(optionA).addOption(optionB) + val args = arrayOf("-a", ALPHA, "-b", BETA, "-c", "charlie") + Assertions.assertThrows(IllegalArgumentException::class.java) { + Clis.parse(args, options, DefaultParser(), "search") + } + } + + @Test + fun testRelaxedParser() { + val optionA = Option.builder("a").required(true).hasArg(true).build() + val optionB = Option.builder("b").required(true).hasArg(true).build() + val options = Options().addOption(optionA).addOption(optionB) + val args = arrayOf("-a", ALPHA, "-b", BETA, "-c", "charlie") + val parsed = Clis.parse(args, options, Clis.getRelaxedParser()) + Assertions.assertEquals(ALPHA, parsed.options[0].value) + Assertions.assertEquals(BETA, parsed.options[1].value) + } + + companion object { + private const val ALPHA = "alpha" + private const val BETA = "beta" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/CompletableFuturesTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/CompletableFuturesTest.kt new file mode 100644 index 0000000000000..e2b4a06546664 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/CompletableFuturesTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.concurrency + +import io.airbyte.commons.functional.Either +import java.util.* +import java.util.concurrent.CompletableFuture +import java.util.concurrent.CompletionStage +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class CompletableFuturesTest { + @Test + fun testAllOf() { + // Complete in random order + val futures = + Arrays.asList>( + returnSuccessWithDelay(1, 2000), + returnSuccessWithDelay(2, 200), + returnSuccessWithDelay(3, 500), + returnSuccessWithDelay(4, 100), + returnFailureWithDelay("Fail 5", 2000), + returnFailureWithDelay("Fail 6", 300) + ) + + val allOfResult = CompletableFutures.allOf(futures).toCompletableFuture() + val result = allOfResult.join() + val success = + result.stream().filter { obj: Either -> obj.isRight() }.toList() + Assertions.assertEquals( + success, + Arrays.asList( + Either.right(1), + Either.right(2), + Either.right(3), + Either.right(4) + ) + ) + // Extract wrapped CompletionException messages. + val failureMessages = + result + .stream() + .filter { obj: Either -> obj.isLeft() } + .map { either: Either -> either.left!!.cause!!.message } + .toList() + Assertions.assertEquals(failureMessages, mutableListOf("Fail 5", "Fail 6")) + } + + private fun returnSuccessWithDelay(value: Int, delayMs: Long): CompletableFuture { + return CompletableFuture.supplyAsync { + try { + Thread.sleep(delayMs) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + value + } + } + + private fun returnFailureWithDelay(message: String, delayMs: Long): CompletableFuture { + return CompletableFuture.supplyAsync { + try { + Thread.sleep(delayMs) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + throw RuntimeException(message) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/WaitingUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/WaitingUtilsTest.kt new file mode 100644 index 0000000000000..4e9f2c3eaf248 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/concurrency/WaitingUtilsTest.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.concurrency + +import java.time.Duration +import java.util.function.Supplier +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.mock + +internal class WaitingUtilsTest { + @Test + fun testWaitForConditionConditionMet() { + val condition: Supplier = mock() + Mockito.`when`(condition.get()).thenReturn(false).thenReturn(false).thenReturn(true) + Assertions.assertTrue( + WaitingUtils.waitForCondition(Duration.ofMillis(1), Duration.ofMillis(5), condition) + ) + } + + @Test + fun testWaitForConditionTimeout() { + val condition: Supplier = mock() + Mockito.`when`(condition.get()).thenReturn(false) + Assertions.assertFalse( + WaitingUtils.waitForCondition(Duration.ofMillis(1), Duration.ofMillis(5), condition) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/enums/EnumsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/enums/EnumsTest.kt new file mode 100644 index 0000000000000..f61ddf9435618 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/enums/EnumsTest.kt @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.enums + +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class EnumsTest { + internal enum class E1 { + TEST, + TEST2 + } + + internal enum class E2 { + TEST + } + + internal enum class E3 { + TEST, + TEST2 + } + + internal enum class E4 { + TEST, + TEST3 + } + + @Test + fun testConversion() { + Assertions.assertEquals(E2.TEST, Enums.convertTo(E1.TEST, E2::class.java)) + } + + @Test + fun testConversionFails() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + Enums.convertTo(E1.TEST2, E2::class.java) + } + } + + @Test + fun testSelfCompatible() { + Assertions.assertTrue(Enums.isCompatible(E1::class.java, E1::class.java)) + } + + @Test + fun testIsCompatible() { + Assertions.assertTrue(Enums.isCompatible(E1::class.java, E3::class.java)) + } + + @Test + fun testNotCompatibleDifferentNames() { + Assertions.assertFalse(Enums.isCompatible(E1::class.java, E4::class.java)) + } + + @Test + fun testNotCompatibleDifferentLength() { + Assertions.assertFalse(Enums.isCompatible(E1::class.java, E4::class.java)) + } + + @Test + fun testNotCompatibleDifferentLength2() { + Assertions.assertFalse(Enums.isCompatible(E4::class.java, E1::class.java)) + } + + internal enum class E5 { + VALUE_1, + VALUE_TWO, + value_three, + value_4 + } + + @Test + fun testToEnum() { + Assertions.assertEquals(Optional.of(E1.TEST), Enums.toEnum("test", E1::class.java)) + Assertions.assertEquals(Optional.of(E5.VALUE_1), Enums.toEnum("VALUE_1", E5::class.java)) + Assertions.assertEquals(Optional.of(E5.VALUE_1), Enums.toEnum("value_1", E5::class.java)) + Assertions.assertEquals( + Optional.of(E5.VALUE_TWO), + Enums.toEnum("VALUE_TWO", E5::class.java) + ) + Assertions.assertEquals(Optional.of(E5.VALUE_TWO), Enums.toEnum("valuetwo", E5::class.java)) + Assertions.assertEquals(Optional.of(E5.VALUE_TWO), Enums.toEnum("valueTWO", E5::class.java)) + Assertions.assertEquals( + Optional.of(E5.VALUE_TWO), + Enums.toEnum("valueTWO$", E5::class.java) + ) + Assertions.assertEquals( + Optional.of(E5.VALUE_TWO), + Enums.toEnum("___valueTWO___", E5::class.java) + ) + Assertions.assertEquals( + Optional.of(E5.value_three), + Enums.toEnum("VALUE_THREE", E5::class.java) + ) + Assertions.assertEquals(Optional.of(E5.value_4), Enums.toEnum("VALUE_4", E5::class.java)) + Assertions.assertEquals(Optional.empty(), Enums.toEnum("VALUE_5", E5::class.java)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/features/FeatureFlagHelperTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/features/FeatureFlagHelperTest.kt new file mode 100644 index 0000000000000..d05280e024724 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/features/FeatureFlagHelperTest.kt @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.features + +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class FeatureFlagHelperTest { + private lateinit var featureFlags: FeatureFlags + + @BeforeEach + fun beforeEach() { + featureFlags = Mockito.mock(FeatureFlags::class.java) + } + + @Test + fun isFieldSelectionEnabledForWorkspaceWithEmptyString() { + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()).thenReturn("") + + Assertions.assertFalse( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + UUID.randomUUID(), + null + ) + ) + } + + @Test + fun isFieldSelectionEnabledForNullWorkspaceWithEmptyString() { + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()).thenReturn("") + + Assertions.assertFalse( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + null, + null + ) + ) + } + + @Test + fun isFieldSelectionEnabledForWorkspaceWithSpaceString() { + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()).thenReturn(" ") + + Assertions.assertFalse( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + UUID.randomUUID(), + null + ) + ) + } + + @Test + fun isFieldSelectionEnabledForWorkspaceWithNullString() { + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()).thenReturn(null) + + Assertions.assertFalse( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + UUID.randomUUID(), + null + ) + ) + } + + @Test + fun isFieldSelectionEnabledForWorkspaceWithSomeIdsAndAMatch() { + val workspaceId = UUID.randomUUID() + val randomId = UUID.randomUUID() + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()) + .thenReturn("$randomId,$workspaceId") + + Assertions.assertTrue( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + workspaceId, + null + ) + ) + } + + @Test + fun isFieldSelectionEnabledForWorkspaceWithSomeIdsAndNoMatch() { + val workspaceId = UUID.randomUUID() + val randomId1 = UUID.randomUUID() + val randomId2 = UUID.randomUUID() + Mockito.`when`(featureFlags!!.fieldSelectionWorkspaces()) + .thenReturn("$randomId1,$randomId2") + + Assertions.assertFalse( + FeatureFlagHelper.isWorkspaceIncludedInFlag( + featureFlags, + { obj: FeatureFlags -> obj.fieldSelectionWorkspaces() }, + workspaceId, + null + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/IOsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/IOsTest.kt new file mode 100644 index 0000000000000..7e5891651acaf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/IOsTest.kt @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.io + +import com.google.common.collect.Iterables +import java.io.* +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Path +import org.apache.commons.lang3.RandomStringUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class IOsTest { + @Test + @Throws(IOException::class) + fun testReadWrite() { + val path = Files.createTempDirectory("tmp") + + val filePath = IOs.writeFile(path, FILE, ABC) + + Assertions.assertEquals(path.resolve(FILE), filePath) + Assertions.assertEquals(ABC, IOs.readFile(path, FILE)) + Assertions.assertEquals(ABC, IOs.readFile(path.resolve(FILE))) + } + + @Test + @Throws(IOException::class) + fun testWriteBytes() { + val path = Files.createTempDirectory("tmp") + + val filePath = IOs.writeFile(path.resolve(FILE), ABC.toByteArray(StandardCharsets.UTF_8)) + + Assertions.assertEquals(path.resolve(FILE), filePath) + Assertions.assertEquals(ABC, IOs.readFile(path, FILE)) + } + + @Test + @Throws(IOException::class) + fun testWriteFileToRandomDir() { + val contents = "something to remember" + val tmpFilePath = IOs.writeFileToRandomTmpDir("file.txt", contents) + Assertions.assertEquals(contents, Files.readString(Path.of(tmpFilePath))) + } + + @Test + @Throws(IOException::class) + fun testGetTailDoesNotExist() { + val tail = IOs.getTail(100, Path.of(RandomStringUtils.randomAlphanumeric(100))) + Assertions.assertEquals(emptyList(), tail) + } + + @Test + @Throws(IOException::class) + fun testGetTailExists() { + val stdoutFile = Files.createTempFile("job-history-handler-test", "stdout") + + val head = listOf("line1", "line2", "line3", "line4") + + val expectedTail = listOf("line5", "line6", "line7", "line8") + + val writer: Writer = + BufferedWriter(FileWriter(stdoutFile.toString(), StandardCharsets.UTF_8, true)) + + for (line in Iterables.concat(head, expectedTail)) { + writer.write(line + "\n") + } + + writer.close() + + val tail = IOs.getTail(expectedTail.size, stdoutFile) + Assertions.assertEquals(expectedTail, tail) + } + + @Test + fun testInputStream() { + Assertions.assertThrows(RuntimeException::class.java) { + IOs.inputStream(Path.of("idontexist")) + } + } + + @Test + @Throws(IOException::class) + fun testSilentClose() { + val closeable = Mockito.mock(Closeable::class.java) + + Assertions.assertDoesNotThrow { IOs.silentClose(closeable) } + + Mockito.doThrow(IOException()).`when`(closeable).close() + Assertions.assertThrows(RuntimeException::class.java) { IOs.silentClose(closeable) } + } + + companion object { + private const val ABC = "abc" + private const val FILE = "file" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/LineGobblerTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/LineGobblerTest.kt new file mode 100644 index 0000000000000..95c563d483fd9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/io/LineGobblerTest.kt @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.io + +import com.google.common.collect.ImmutableMap +import com.google.common.util.concurrent.MoreExecutors +import java.io.ByteArrayInputStream +import java.io.InputStream +import java.nio.charset.StandardCharsets +import java.util.concurrent.ExecutorService +import java.util.function.Consumer +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.mockito.kotlin.mock + +internal class LineGobblerTest { + @Test + fun readAllLines() { + val consumer: Consumer = mock() + val `is`: InputStream = + ByteArrayInputStream("test\ntest2\n".toByteArray(StandardCharsets.UTF_8)) + val executor: ExecutorService = Mockito.spy(MoreExecutors.newDirectExecutorService()) + + executor.submit(LineGobbler(`is`, consumer, executor, ImmutableMap.of())) + + Mockito.verify(consumer).accept("test") + Mockito.verify(consumer).accept("test2") + Mockito.verify(executor).shutdown() + } + + @Test + fun shutdownOnSuccess() { + val consumer: Consumer = mock() + val `is`: InputStream = + ByteArrayInputStream("test\ntest2\n".toByteArray(StandardCharsets.UTF_8)) + val executor: ExecutorService = Mockito.spy(MoreExecutors.newDirectExecutorService()) + + executor.submit(LineGobbler(`is`, consumer, executor, ImmutableMap.of())) + + Mockito.verify(consumer, Mockito.times(2)).accept(ArgumentMatchers.anyString()) + Mockito.verify(executor).shutdown() + } + + @Test + fun shutdownOnError() { + val consumer: Consumer = mock() + Mockito.doThrow(RuntimeException::class.java) + .`when`(consumer) + .accept(ArgumentMatchers.anyString()) + val `is`: InputStream = + ByteArrayInputStream("test\ntest2\n".toByteArray(StandardCharsets.UTF_8)) + val executor: ExecutorService = Mockito.spy(MoreExecutors.newDirectExecutorService()) + + executor.submit(LineGobbler(`is`, consumer, executor, ImmutableMap.of())) + + Mockito.verify(consumer).accept(ArgumentMatchers.anyString()) + Mockito.verify(executor).shutdown() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonPathsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonPathsTest.kt new file mode 100644 index 0000000000000..2897598619746 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonPathsTest.kt @@ -0,0 +1,306 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.json + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ArrayNode +import com.jayway.jsonpath.PathNotFoundException +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable + +internal class JsonPathsTest { + @Test + fun testGetValues() { + Assertions.assertEquals( + listOf(0, 1, 2), + JsonPaths.getValues(JSON_NODE, LIST_ALL_QUERY) + .stream() + .map { obj: JsonNode -> obj.asInt() } + .collect(Collectors.toList()) + ) + Assertions.assertEquals( + listOf(1), + JsonPaths.getValues(JSON_NODE, LIST_ONE_QUERY) + .stream() + .map { obj: JsonNode -> obj.asInt() } + .collect(Collectors.toList()) + ) + Assertions.assertEquals( + listOf(10), + JsonPaths.getValues(JSON_NODE, NESTED_FIELD_QUERY) + .stream() + .map { obj: JsonNode -> obj.asInt() } + .collect(Collectors.toList()) + ) + Assertions.assertEquals( + JSON_NODE["two"], + JsonPaths.getValues(JSON_NODE, JSON_OBJECT_QUERY).stream().findFirst().orElse(null) + ) + Assertions.assertEquals( + emptyList(), + JsonPaths.getValues(JSON_NODE, EMPTY_RETURN_QUERY) + ) + } + + @Test + fun testGetSingleValue() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + JsonPaths.getSingleValue(JSON_NODE, LIST_ALL_QUERY) + } + Assertions.assertEquals( + 1, + JsonPaths.getSingleValue(JSON_NODE, LIST_ONE_QUERY) + .map { obj: JsonNode -> obj.asInt() } + .orElse(null) + ) + Assertions.assertEquals( + 10, + JsonPaths.getSingleValue(JSON_NODE, NESTED_FIELD_QUERY) + .map { obj: JsonNode -> obj.asInt() } + .orElse(null) + ) + Assertions.assertEquals( + JSON_NODE["two"], + JsonPaths.getSingleValue(JSON_NODE, JSON_OBJECT_QUERY).orElse(null) + ) + Assertions.assertNull(JsonPaths.getSingleValue(JSON_NODE, EMPTY_RETURN_QUERY).orElse(null)) + } + + @Test + fun testGetPaths() { + Assertions.assertEquals( + listOf("$['one'][0]", "$['one'][1]", "$['one'][2]"), + JsonPaths.getPaths(JSON_NODE, LIST_ALL_QUERY) + ) + Assertions.assertEquals( + listOf("$['one'][1]"), + JsonPaths.getPaths(JSON_NODE, LIST_ONE_QUERY) + ) + Assertions.assertEquals( + listOf("$['two']['nested']"), + JsonPaths.getPaths(JSON_NODE, NESTED_FIELD_QUERY) + ) + Assertions.assertEquals( + listOf("$['two']"), + JsonPaths.getPaths(JSON_NODE, JSON_OBJECT_QUERY) + ) + Assertions.assertEquals(emptyList(), JsonPaths.getPaths(JSON_NODE, EMPTY_RETURN_QUERY)) + } + + @Test + fun testIsPathPresent() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + JsonPaths.isPathPresent(JSON_NODE, LIST_ALL_QUERY) + } + Assertions.assertTrue(JsonPaths.isPathPresent(JSON_NODE, LIST_ONE_QUERY)) + Assertions.assertTrue(JsonPaths.isPathPresent(JSON_NODE, NESTED_FIELD_QUERY)) + Assertions.assertTrue(JsonPaths.isPathPresent(JSON_NODE, JSON_OBJECT_QUERY)) + Assertions.assertFalse(JsonPaths.isPathPresent(JSON_NODE, EMPTY_RETURN_QUERY)) + } + + @Test + fun testReplaceAtStringLoud() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[1] = REPLACEMENT_STRING + + val actual = + JsonPaths.replaceAtStringLoud(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_STRING) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtStringLoudEmptyPathThrows() { + assertOriginalObjectNotModified(JSON_NODE) { + Assertions.assertThrows( + PathNotFoundException::class.java, + Executable { + JsonPaths.replaceAtStringLoud(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_STRING) + } + ) + } + } + + @Test + fun testReplaceAtString() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[1] = REPLACEMENT_STRING + + val actual = JsonPaths.replaceAtString(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_STRING) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtStringEmptyReturnNoOp() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + val actual = + JsonPaths.replaceAtString(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_STRING) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtJsonNodeLoud() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[1] = REPLACEMENT_JSON + + val actual = + JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_JSON) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtJsonNodeLoudEmptyPathThrows() { + assertOriginalObjectNotModified(JSON_NODE) { + Assertions.assertThrows( + PathNotFoundException::class.java, + Executable { + JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_JSON) + } + ) + } + } + + @Test + fun testReplaceAtJsonNodeLoudMultipleReplace() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[0] = REPLACEMENT_JSON + (expected[ONE] as ArrayNode)[1] = REPLACEMENT_JSON + (expected[ONE] as ArrayNode)[2] = REPLACEMENT_JSON + + val actual = + JsonPaths.replaceAtJsonNodeLoud(JSON_NODE, LIST_ALL_QUERY, REPLACEMENT_JSON) + Assertions.assertEquals(expected, actual) + } + } + + // todo (cgardens) - this behavior is a little unintuitive, but based on the docs, there's not + // an + // obvious workaround. in this case, i would expect this to silently do nothing instead of + // throwing. + // for now just documenting it with a test. to avoid this, use the non-loud version of this + // method. + @Test + fun testReplaceAtJsonNodeLoudMultipleReplaceSplatInEmptyArrayThrows() { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode).removeAll() + + assertOriginalObjectNotModified(expected) { + Assertions.assertThrows( + PathNotFoundException::class.java, + Executable { + JsonPaths.replaceAtJsonNodeLoud(expected, "$.one[*]", REPLACEMENT_JSON) + } + ) + } + } + + @Test + fun testReplaceAtJsonNode() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[1] = REPLACEMENT_JSON + + val actual = JsonPaths.replaceAtJsonNode(JSON_NODE, LIST_ONE_QUERY, REPLACEMENT_JSON) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtJsonNodeEmptyReturnNoOp() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + val actual = + JsonPaths.replaceAtJsonNode(JSON_NODE, EMPTY_RETURN_QUERY, REPLACEMENT_JSON) + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAt() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[1] = "1-$['one'][1]" + + val actual = + JsonPaths.replaceAt(JSON_NODE, LIST_ONE_QUERY) { node: JsonNode, path: String -> + Jsons.jsonNode("$node-$path") + } + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtMultiple() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + (expected[ONE] as ArrayNode)[0] = "0-$['one'][0]" + (expected[ONE] as ArrayNode)[1] = "1-$['one'][1]" + (expected[ONE] as ArrayNode)[2] = "2-$['one'][2]" + + val actual = + JsonPaths.replaceAt(JSON_NODE, LIST_ALL_QUERY) { node: JsonNode, path: String -> + Jsons.jsonNode("$node-$path") + } + Assertions.assertEquals(expected, actual) + } + } + + @Test + fun testReplaceAtEmptyReturnNoOp() { + assertOriginalObjectNotModified(JSON_NODE) { + val expected = Jsons.clone(JSON_NODE) + val actual = + JsonPaths.replaceAt(JSON_NODE, EMPTY_RETURN_QUERY) { node: JsonNode, path: String -> + Jsons.jsonNode("$node-$path") + } + Assertions.assertEquals(expected, actual) + } + } + + companion object { + private val JSON = + """ + { + "one": [0,1,2], + "two": { "nested": 10} + } + """.trimIndent() + private val JSON_NODE: JsonNode = Jsons.deserialize(JSON) + private const val LIST_ALL_QUERY = "$.one[*]" + private const val LIST_ONE_QUERY = "$.one[1]" + private const val NESTED_FIELD_QUERY = "$.two.nested" + private const val JSON_OBJECT_QUERY = "$.two" + private const val EMPTY_RETURN_QUERY = "$.three" + private const val REPLACEMENT_STRING = "replaced" + private val REPLACEMENT_JSON: JsonNode = + Jsons.deserialize("{ \"replacement\": \"replaced\" }") + private const val ONE = "one" + + /** + * For all replacement functions, they should NOT mutate in place. Helper assertion to + * verify that invariant. + * + * @param json + * - json object used for testing + * @param runnable + * - the rest of the test code that does the replacement + */ + private fun assertOriginalObjectNotModified(json: JsonNode, runnable: Runnable) { + val originalJsonNode = Jsons.clone(json) + runnable.run() + // verify the original object was not mutated. + Assertions.assertEquals(originalJsonNode, json) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonSchemasTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonSchemasTest.kt new file mode 100644 index 0000000000000..fa88d822a9fb5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonSchemasTest.kt @@ -0,0 +1,256 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.json + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.resources.MoreResources +import java.io.IOException +import java.util.function.BiConsumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource +import org.mockito.Mockito +import org.mockito.kotlin.mock + +internal class JsonSchemasTest { + @Test + fun testMutateTypeToArrayStandard() { + val expectedWithoutType = Jsons.deserialize("{\"test\":\"abc\"}") + val actualWithoutType = Jsons.clone(expectedWithoutType) + JsonSchemas.mutateTypeToArrayStandard(expectedWithoutType) + Assertions.assertEquals(expectedWithoutType, actualWithoutType) + + val expectedWithArrayType = Jsons.deserialize("{\"test\":\"abc\", \"type\":[\"object\"]}") + val actualWithArrayType = Jsons.clone(expectedWithArrayType) + JsonSchemas.mutateTypeToArrayStandard(actualWithArrayType) + Assertions.assertEquals(expectedWithoutType, actualWithoutType) + + val expectedWithoutArrayType = + Jsons.deserialize("{\"test\":\"abc\", \"type\":[\"object\"]}") + val actualWithStringType = Jsons.deserialize("{\"test\":\"abc\", \"type\":\"object\"}") + JsonSchemas.mutateTypeToArrayStandard(actualWithStringType) + Assertions.assertEquals(expectedWithoutArrayType, actualWithStringType) + } + + @Test + @Throws(IOException::class) + fun testTraverse() { + val jsonWithAllTypes = + Jsons.deserialize(MoreResources.readResource("json_schemas/json_with_all_types.json")) + val mock: BiConsumer> = mock() + + JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock) + val inOrder = Mockito.inOrder(mock) + inOrder.verify(mock).accept(jsonWithAllTypes, emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][NAME], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName(NAME)) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][NAME][PROPERTIES]["first"], + java.util.List.of( + JsonSchemas.FieldNameOrList.fieldName(NAME), + JsonSchemas.FieldNameOrList.fieldName("first") + ) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][NAME][PROPERTIES]["last"], + java.util.List.of( + JsonSchemas.FieldNameOrList.fieldName(NAME), + JsonSchemas.FieldNameOrList.fieldName("last") + ) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][COMPANY], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName(COMPANY)) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][PETS], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName(PETS)) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][PETS][ITEMS], + java.util.List.of( + JsonSchemas.FieldNameOrList.fieldName(PETS), + JsonSchemas.FieldNameOrList.list() + ) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][PETS][ITEMS][PROPERTIES]["type"], + java.util.List.of( + JsonSchemas.FieldNameOrList.fieldName(PETS), + JsonSchemas.FieldNameOrList.list(), + JsonSchemas.FieldNameOrList.fieldName("type") + ) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][PETS][ITEMS][PROPERTIES]["number"], + java.util.List.of( + JsonSchemas.FieldNameOrList.fieldName(PETS), + JsonSchemas.FieldNameOrList.list(), + JsonSchemas.FieldNameOrList.fieldName("number") + ) + ) + inOrder.verifyNoMoreInteractions() + } + + @ValueSource(strings = ["anyOf", "oneOf", "allOf"]) + @ParameterizedTest + @Throws(IOException::class) + fun testTraverseComposite(compositeKeyword: String) { + val jsonSchemaString = + MoreResources.readResource("json_schemas/composite_json_schema.json") + .replace("".toRegex(), compositeKeyword) + val jsonWithAllTypes = Jsons.deserialize(jsonSchemaString) + val mock: BiConsumer> = mock() + + JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock) + + val inOrder = Mockito.inOrder(mock) + inOrder.verify(mock).accept(jsonWithAllTypes, emptyList()) + inOrder.verify(mock).accept(jsonWithAllTypes[compositeKeyword][0], emptyList()) + inOrder.verify(mock).accept(jsonWithAllTypes[compositeKeyword][1], emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[compositeKeyword][1][PROPERTIES]["prop1"], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName("prop1")) + ) + inOrder.verify(mock).accept(jsonWithAllTypes[compositeKeyword][2], emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[compositeKeyword][2][ITEMS], + java.util.List.of(JsonSchemas.FieldNameOrList.list()) + ) + inOrder + .verify(mock) + .accept(jsonWithAllTypes[compositeKeyword][3][compositeKeyword][0], emptyList()) + inOrder + .verify(mock) + .accept(jsonWithAllTypes[compositeKeyword][3][compositeKeyword][1], emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[compositeKeyword][3][compositeKeyword][1][ITEMS], + java.util.List.of(JsonSchemas.FieldNameOrList.list()) + ) + inOrder.verifyNoMoreInteractions() + } + + @Test + @Throws(IOException::class) + fun testTraverseMultiType() { + val jsonWithAllTypes = + Jsons.deserialize( + MoreResources.readResource("json_schemas/json_with_array_type_fields.json") + ) + val mock: BiConsumer> = mock() + + JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock) + val inOrder = Mockito.inOrder(mock) + inOrder.verify(mock).accept(jsonWithAllTypes, emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[PROPERTIES][COMPANY], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName(COMPANY)) + ) + inOrder + .verify(mock) + .accept(jsonWithAllTypes[ITEMS], java.util.List.of(JsonSchemas.FieldNameOrList.list())) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[ITEMS][PROPERTIES][USER], + java.util.List.of( + JsonSchemas.FieldNameOrList.list(), + JsonSchemas.FieldNameOrList.fieldName(USER) + ) + ) + inOrder.verifyNoMoreInteractions() + } + + @Test + @Throws(IOException::class) + fun testTraverseMultiTypeComposite() { + val compositeKeyword = "anyOf" + val jsonWithAllTypes = + Jsons.deserialize( + MoreResources.readResource( + "json_schemas/json_with_array_type_fields_with_composites.json" + ) + ) + val mock: BiConsumer> = mock() + + JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock) + + val inOrder = Mockito.inOrder(mock) + inOrder.verify(mock).accept(jsonWithAllTypes, emptyList()) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[compositeKeyword][0][PROPERTIES][COMPANY], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName(COMPANY)) + ) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[compositeKeyword][1][PROPERTIES]["organization"], + java.util.List.of(JsonSchemas.FieldNameOrList.fieldName("organization")) + ) + inOrder + .verify(mock) + .accept(jsonWithAllTypes[ITEMS], java.util.List.of(JsonSchemas.FieldNameOrList.list())) + inOrder + .verify(mock) + .accept( + jsonWithAllTypes[ITEMS][PROPERTIES][USER], + java.util.List.of( + JsonSchemas.FieldNameOrList.list(), + JsonSchemas.FieldNameOrList.fieldName("user") + ) + ) + inOrder.verifyNoMoreInteractions() + } + + @Test + @Throws(IOException::class) + fun testTraverseArrayTypeWithNoItemsDoNotThrowsException() { + val jsonWithAllTypes = + Jsons.deserialize( + MoreResources.readResource("json_schemas/json_with_array_type_fields_no_items.json") + ) + val mock: BiConsumer> = mock() + + JsonSchemas.traverseJsonSchema(jsonWithAllTypes, mock) + } + + companion object { + private const val UNCHECKED = "unchecked" + private const val NAME = "name" + private const val PROPERTIES = "properties" + private const val PETS = "pets" + private const val COMPANY = "company" + private const val ITEMS = "items" + private const val USER = "user" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonsTest.kt new file mode 100644 index 0000000000000..add78013093bc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/json/JsonsTest.kt @@ -0,0 +1,444 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.json + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.core.type.TypeReference +import com.fasterxml.jackson.databind.node.BinaryNode +import com.google.common.base.Charsets +import com.google.common.collect.ImmutableList +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import com.google.common.collect.Sets +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class JsonsTest { + @Test + fun testSerialize() { + Assertions.assertEquals(SERIALIZED_JSON, Jsons.serialize(ToClass(ABC, 999, 888L))) + + Assertions.assertEquals( + "{\"test\":\"abc\",\"test2\":\"def\"}", + Jsons.serialize(ImmutableMap.of(TEST, ABC, TEST2, DEF)) + ) + } + + @Test + fun testSerializeJsonNode() { + Assertions.assertEquals( + SERIALIZED_JSON, + Jsons.serialize(Jsons.jsonNode(ToClass(ABC, 999, 888L))) + ) + + Assertions.assertEquals( + "{\"test\":\"abc\",\"test2\":\"def\"}", + Jsons.serialize(Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, DEF))) + ) + // issue: 5878 add test for binary node serialization, binary data are + // serialized into base64 + Assertions.assertEquals( + "{\"test\":\"dGVzdA==\"}", + Jsons.serialize( + Jsons.jsonNode( + ImmutableMap.of(TEST, BinaryNode("test".toByteArray(StandardCharsets.UTF_8))) + ) + ) + ) + } + + @Test + fun testDeserialize() { + Assertions.assertEquals( + ToClass(ABC, 999, 888L), + Jsons.deserialize( + "{\"str\":\"abc\", \"num\": 999, \"numLong\": 888}", + ToClass::class.java + ) + ) + } + + @Test + fun testDeserializeToJsonNode() { + Assertions.assertEquals(SERIALIZED_JSON2, Jsons.deserialize(SERIALIZED_JSON2).toString()) + + Assertions.assertEquals( + "[{\"str\":\"abc\"},{\"str\":\"abc\"}]", + Jsons.deserialize("[{\"str\":\"abc\"},{\"str\":\"abc\"}]").toString() + ) + // issue: 5878 add test for binary node deserialization, for now should be + // base64 string + Assertions.assertEquals( + "{\"test\":\"dGVzdA==\"}", + Jsons.deserialize("{\"test\":\"dGVzdA==\"}").toString() + ) + } + + @Test + fun testTryDeserialize() { + Assertions.assertEquals( + Optional.of(ToClass(ABC, 999, 888L)), + Jsons.tryDeserialize( + "{\"str\":\"abc\", \"num\": 999, \"numLong\": 888}", + ToClass::class.java + ) + ) + + Assertions.assertEquals( + Optional.of(ToClass(ABC, 999, 0L)), + Jsons.tryDeserialize( + "{\"str\":\"abc\", \"num\": 999, \"test\": 888}", + ToClass::class.java + ) + ) + } + + @Test + fun testTryDeserializeToJsonNode() { + Assertions.assertEquals( + Optional.of(Jsons.deserialize(SERIALIZED_JSON2)), + Jsons.tryDeserialize(SERIALIZED_JSON2) + ) + + Assertions.assertEquals( + Optional.empty(), + Jsons.tryDeserialize("{\"str\":\"abc\", \"num\": 999, \"test}") + ) + } + + @Test + fun testToJsonNode() { + Assertions.assertEquals(SERIALIZED_JSON, Jsons.jsonNode(ToClass(ABC, 999, 888L)).toString()) + + Assertions.assertEquals( + "{\"test\":\"abc\",\"test2\":\"def\"}", + Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, DEF)).toString() + ) + + Assertions.assertEquals( + "{\"test\":\"abc\",\"test2\":{\"inner\":1}}", + Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, ImmutableMap.of("inner", 1))) + .toString() + ) + + Assertions.assertEquals( + Jsons.jsonNode(ToClass(ABC, 999, 888L)), + Jsons.jsonNode(Jsons.jsonNode(ToClass(ABC, 999, 888L))) + ) + } + + @Test + fun testEmptyObject() { + Assertions.assertEquals(Jsons.deserialize("{}"), Jsons.emptyObject()) + } + + @Test + fun testArrayNode() { + Assertions.assertEquals(Jsons.deserialize("[]"), Jsons.arrayNode()) + } + + @Test + fun testToObject() { + val expected = ToClass(ABC, 999, 888L) + Assertions.assertEquals( + expected, + Jsons.`object`(Jsons.jsonNode(expected), ToClass::class.java) + ) + + Assertions.assertEquals( + Lists.newArrayList(expected), + Jsons.`object`>( + Jsons.jsonNode(Lists.newArrayList(expected)), + object : TypeReference>() {} + ) + ) + + Assertions.assertEquals( + ToClass(), + Jsons.`object`(Jsons.deserialize("{\"a\":1}"), ToClass::class.java) + ) + } + + @Test + fun testTryToObject() { + val expected = ToClass(ABC, 999, 888L) + Assertions.assertEquals( + Optional.of(expected), + Jsons.tryObject(Jsons.deserialize(SERIALIZED_JSON), ToClass::class.java) + ) + + Assertions.assertEquals( + Optional.of(expected), + Jsons.tryObject( + Jsons.deserialize(SERIALIZED_JSON), + object : TypeReference() {} + ) + ) + + val emptyExpected = ToClass() + Assertions.assertEquals( + Optional.of(emptyExpected), + Jsons.tryObject(Jsons.deserialize("{\"str1\":\"abc\"}"), ToClass::class.java) + ) + + Assertions.assertEquals( + Optional.of(emptyExpected), + Jsons.tryObject( + Jsons.deserialize("{\"str1\":\"abc\"}"), + object : TypeReference() {} + ) + ) + } + + @Test + fun testClone() { + val expected = ToClass("abc", 999, 888L) + val actual = Jsons.clone(expected) + Assertions.assertNotSame(expected, actual) + Assertions.assertEquals(expected, actual) + } + + @Test + fun testToBytes() { + val jsonString = "{\"test\":\"abc\",\"type\":[\"object\"]}" + Assertions.assertArrayEquals( + jsonString.toByteArray(Charsets.UTF_8), + Jsons.toBytes(Jsons.deserialize(jsonString)) + ) + } + + @Test + fun testKeys() { + // test object json node + val jsonNode = Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, DEF)) + Assertions.assertEquals(Sets.newHashSet(TEST, TEST2), Jsons.keys(jsonNode)) + + // test literal jsonNode + Assertions.assertEquals(emptySet(), Jsons.keys(jsonNode["test"])) + + // test nested object json node. should only return top-level keys. + val nestedJsonNode = + Jsons.jsonNode(ImmutableMap.of(TEST, ABC, TEST2, ImmutableMap.of("test3", "def"))) + Assertions.assertEquals(Sets.newHashSet(TEST, TEST2), Jsons.keys(nestedJsonNode)) + + // test array json node + val arrayJsonNode = Jsons.jsonNode(ImmutableList.of(ImmutableMap.of(TEST, ABC, TEST2, DEF))) + Assertions.assertEquals(emptySet(), Jsons.keys(arrayJsonNode)) + } + + @Test + fun testToPrettyString() { + val jsonNode = Jsons.jsonNode(ImmutableMap.of(TEST, ABC)) + val expectedOutput = """{ + "test": "abc" +} +""" + Assertions.assertEquals(expectedOutput, Jsons.toPrettyString(jsonNode)) + } + + @Test + fun testGetOptional() { + val json = + Jsons.deserialize( + "{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": {}, \"mno\": \"pqr\", \"stu\": null }" + ) + + Assertions.assertEquals(Optional.of(Jsons.jsonNode(GHI)), Jsons.getOptional(json, ABC, DEF)) + Assertions.assertEquals(Optional.of(Jsons.emptyObject()), Jsons.getOptional(json, JKL)) + Assertions.assertEquals(Optional.of(Jsons.jsonNode(PQR)), Jsons.getOptional(json, MNO)) + Assertions.assertEquals( + Optional.of(Jsons.jsonNode(null)), + Jsons.getOptional(json, STU) + ) + Assertions.assertEquals(Optional.empty(), Jsons.getOptional(json, XYZ)) + Assertions.assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, XYZ)) + Assertions.assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, DEF, XYZ)) + Assertions.assertEquals(Optional.empty(), Jsons.getOptional(json, ABC, JKL, XYZ)) + Assertions.assertEquals(Optional.empty(), Jsons.getOptional(json, STU, XYZ)) + } + + @Test + fun testGetStringOrNull() { + val json = + Jsons.deserialize("{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": \"mno\", \"pqr\": 1 }") + + Assertions.assertEquals(GHI, Jsons.getStringOrNull(json, ABC, DEF)) + Assertions.assertEquals(MNO, Jsons.getStringOrNull(json, JKL)) + Assertions.assertEquals("1", Jsons.getStringOrNull(json, PQR)) + Assertions.assertNull(Jsons.getStringOrNull(json, ABC, DEF, XYZ)) + Assertions.assertNull(Jsons.getStringOrNull(json, XYZ)) + } + + @Test + fun testGetEstimatedByteSize() { + val json = + Jsons.deserialize("{\"string_key\":\"abc\",\"array_key\":[\"item1\", \"item2\"]}") + Assertions.assertEquals(Jsons.toBytes(json).size, Jsons.getEstimatedByteSize(json)) + } + + @Test + fun testFlatten__noArrays() { + val json = Jsons.deserialize("{ \"abc\": { \"def\": \"ghi\" }, \"jkl\": true, \"pqr\": 1 }") + val expected = + Stream.of( + *arrayOf( + arrayOf("abc.def", GHI), + arrayOf(JKL, true), + arrayOf(PQR, 1), + ) + ) + .collect( + Collectors.toMap( + Function { data: Array -> data[0] as String }, + Function { data: Array -> data[1] } + ) + ) + Assertions.assertEquals(expected, Jsons.flatten(json, false)) + } + + @Test + fun testFlatten__withArraysNoApplyFlatten() { + val json = + Jsons.deserialize( + "{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }" + ) + val expected = + Stream.of( + *arrayOf( + arrayOf(ABC, "[{\"def\":\"ghi\"},{\"fed\":\"ihg\"}]"), + arrayOf(JKL, true), + arrayOf(PQR, 1), + ) + ) + .collect( + Collectors.toMap( + Function { data: Array -> data[0] as String }, + Function { data: Array -> data[1] } + ) + ) + Assertions.assertEquals(expected, Jsons.flatten(json, false)) + } + + @Test + fun testFlatten__checkBackwardCompatiblity() { + val json = + Jsons.deserialize( + "{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }" + ) + val expected = + Stream.of( + *arrayOf( + arrayOf(ABC, "[{\"def\":\"ghi\"},{\"fed\":\"ihg\"}]"), + arrayOf(JKL, true), + arrayOf(PQR, 1), + ) + ) + .collect( + Collectors.toMap( + Function { data: Array -> data[0] as String }, + Function { data: Array -> data[1] } + ) + ) + Assertions.assertEquals(expected, Jsons.flatten(json)) + } + + @Test + fun testFlatten__withArraysApplyFlatten() { + val json = + Jsons.deserialize( + "{ \"abc\": [{ \"def\": \"ghi\" }, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }" + ) + val expected = + Stream.of( + *arrayOf( + arrayOf("abc.[0].def", "ghi"), + arrayOf("abc.[1].fed", "ihg"), + arrayOf(JKL, true), + arrayOf(PQR, 1), + ) + ) + .collect( + Collectors.toMap( + Function { data: Array -> data[0] as String }, + Function { data: Array -> data[1] } + ) + ) + Assertions.assertEquals(expected, Jsons.flatten(json, true)) + } + + @Test + fun testFlatten__withArraysApplyFlattenNested() { + val json = + Jsons.deserialize( + "{ \"abc\": [{ \"def\": {\"ghi\": [\"xyz\"] }}, { \"fed\": \"ihg\" }], \"jkl\": true, \"pqr\": 1 }" + ) + val expected = + Stream.of( + *arrayOf( + arrayOf("abc.[0].def.ghi.[0]", "xyz"), + arrayOf("abc.[1].fed", "ihg"), + arrayOf(JKL, true), + arrayOf(PQR, 1), + ) + ) + .collect( + Collectors.toMap( + Function { data: Array -> data[0] as String }, + Function { data: Array -> data[1] } + ) + ) + Assertions.assertEquals(expected, Jsons.flatten(json, true)) + } + + private class ToClass { + @JsonProperty("str") var str: String? = null + + @JsonProperty("num") var num: Int? = null + + @JsonProperty("numLong") var numLong: Long = 0 + + constructor() + + constructor(str: String?, num: Int?, numLong: Long) { + this.str = str + this.num = num + this.numLong = numLong + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val toClass = o as ToClass + return numLong == toClass.numLong && str == toClass.str && num == toClass.num + } + + override fun hashCode(): Int { + return Objects.hash(str, num, numLong) + } + } + + companion object { + private const val SERIALIZED_JSON = "{\"str\":\"abc\",\"num\":999,\"numLong\":888}" + private const val SERIALIZED_JSON2 = "{\"str\":\"abc\"}" + private const val ABC = "abc" + private const val DEF = "def" + private const val GHI = "ghi" + private const val JKL = "jkl" + private const val MNO = "mno" + private const val PQR = "pqr" + private const val STU = "stu" + private const val TEST = "test" + private const val TEST2 = "test2" + private const val XYZ = "xyz" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/CloseableShutdownHookTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/CloseableShutdownHookTest.kt new file mode 100644 index 0000000000000..65b3597389fdb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/CloseableShutdownHookTest.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.lang + +import java.io.InputStream +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class CloseableShutdownHookTest { + @Test + @Throws(Exception::class) + fun testRegisteringShutdownHook() { + val closeable = Mockito.mock(InputStream::class.java) + val autoCloseable = Mockito.mock(CloseableQueue::class.java) + val notCloseable = "Not closeable" + + val thread = + CloseableShutdownHook.buildShutdownHookThread( + closeable, + autoCloseable, + notCloseable, + null + ) + thread.run() + + Mockito.verify(closeable, Mockito.times(1)).close() + Mockito.verify(autoCloseable, Mockito.times(1)).close() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/ExceptionsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/ExceptionsTest.kt new file mode 100644 index 0000000000000..9855d0a3080d0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/ExceptionsTest.kt @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.lang + +import java.io.IOException +import java.util.concurrent.Callable +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class ExceptionsTest { + @Test + fun testToRuntime() { + Assertions.assertEquals("hello", Exceptions.toRuntime { callable("hello", false) }) + Assertions.assertThrows(RuntimeException::class.java) { + Exceptions.toRuntime(Callable { callable("goodbye", true) }) + } + } + + @Test + fun testToRuntimeVoid() { + val list: MutableList = ArrayList() + Assertions.assertThrows(RuntimeException::class.java) { + Exceptions.toRuntime { voidCallable(list, "hello", true) } + } + Assertions.assertEquals(0, list.size) + + Exceptions.toRuntime { voidCallable(list, "goodbye", false) } + Assertions.assertEquals(1, list.size) + Assertions.assertEquals("goodbye", list[0]) + } + + @Test + fun testSwallow() { + Exceptions.swallow { throw RuntimeException() } + Exceptions.swallow { throw Exception() } + } + + @Throws(IOException::class) + private fun callable(input: String, shouldThrow: Boolean): String { + if (shouldThrow) { + throw IOException() + } else { + return input + } + } + + @Throws(IOException::class) + private fun voidCallable(list: MutableList, input: String, shouldThrow: Boolean) { + if (shouldThrow) { + throw IOException() + } else { + list.add(input) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/MoreBooleansTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/MoreBooleansTest.kt new file mode 100644 index 0000000000000..fa7e2333c5457 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/lang/MoreBooleansTest.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.lang + +import java.lang.Boolean +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class MoreBooleansTest { + @Test + fun evaluateNullAsFalse() { + Assertions.assertTrue(MoreBooleans.isTruthy(Boolean.TRUE)) + Assertions.assertFalse(MoreBooleans.isTruthy(Boolean.FALSE)) + Assertions.assertFalse(MoreBooleans.isTruthy(null)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/logging/MdcScopeTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/logging/MdcScopeTest.kt new file mode 100644 index 0000000000000..58e468d06c0c3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/logging/MdcScopeTest.kt @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.logging + +import org.assertj.core.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.slf4j.MDC + +internal class MdcScopeTest { + @BeforeEach + fun init() { + MDC.setContextMap(originalMap) + } + + @Test + fun testMDCModified() { + MdcScope(modificationInMDC).use { ignored -> + val mdcState = MDC.getCopyOfContextMap() + Assertions.assertThat(mdcState) + .containsExactlyInAnyOrderEntriesOf( + java.util.Map.of( + "test", + "entry", + "new", + "will be added", + "testOverride", + "will override" + ) + ) + } + } + + @Test + fun testMDCRestore() { + MdcScope(modificationInMDC).use { ignored -> } + val mdcState = MDC.getCopyOfContextMap() + + Assertions.assertThat(mdcState).containsAllEntriesOf(originalMap) + Assertions.assertThat(mdcState).doesNotContainKey("new") + } + + companion object { + private val originalMap: Map = + java.util.Map.of("test", "entry", "testOverride", "should be overrided") + + private val modificationInMDC: Map = + java.util.Map.of("new", "will be added", "testOverride", "will override") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/map/MoreMapsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/map/MoreMapsTest.kt new file mode 100644 index 0000000000000..dafa2f04f26f7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/map/MoreMapsTest.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.map + +import com.google.common.collect.ImmutableMap +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class MoreMapsTest { + @Test + fun testMerge() { + val map1: Map = ImmutableMap.of("a", 3, "b", 2) + val map2: Map = ImmutableMap.of("a", 1) + + Assertions.assertEquals(ImmutableMap.of("a", 1, "b", 2), MoreMaps.merge(map1, map2)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/resources/MoreResourcesTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/resources/MoreResourcesTest.kt new file mode 100644 index 0000000000000..b930f8f7c62d5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/resources/MoreResourcesTest.kt @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.resources + +import com.google.common.collect.Sets +import io.airbyte.commons.io.IOs +import java.io.IOException +import java.net.URISyntaxException +import java.nio.charset.StandardCharsets +import java.nio.file.Path +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class MoreResourcesTest { + @Test + @Throws(IOException::class) + fun testResourceRead() { + Assertions.assertEquals(CONTENT_1, MoreResources.readResource(RESOURCE_TEST)) + Assertions.assertEquals(CONTENT_2, MoreResources.readResource("subdir/resource_test_sub")) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + MoreResources.readResource("invalid") + } + } + + @Test + @Throws(IOException::class) + fun testResourceReadWithClass() { + Assertions.assertEquals( + CONTENT_1, + MoreResources.readResource(MoreResourcesTest::class.java, RESOURCE_TEST) + ) + Assertions.assertEquals( + CONTENT_2, + MoreResources.readResource(MoreResourcesTest::class.java, "subdir/resource_test_sub") + ) + + Assertions.assertEquals( + CONTENT_1, + MoreResources.readResource(MoreResourcesTest::class.java, "/resource_test") + ) + Assertions.assertEquals( + CONTENT_2, + MoreResources.readResource(MoreResourcesTest::class.java, "/subdir/resource_test_sub") + ) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + MoreResources.readResource(MoreResourcesTest::class.java, "invalid") + } + } + + @Test + @Throws(URISyntaxException::class) + fun testReadResourceAsFile() { + val file = MoreResources.readResourceAsFile(RESOURCE_TEST) + Assertions.assertEquals(CONTENT_1, IOs.readFile(file.toPath())) + } + + @Test + @Throws(IOException::class) + fun testReadBytes() { + Assertions.assertEquals( + CONTENT_1, + String(MoreResources.readBytes(RESOURCE_TEST), StandardCharsets.UTF_8) + ) + Assertions.assertEquals( + CONTENT_2, + String(MoreResources.readBytes("subdir/resource_test_sub"), StandardCharsets.UTF_8) + ) + + Assertions.assertThrows(IllegalArgumentException::class.java) { + MoreResources.readBytes("invalid") + } + } + + @Test + @Throws(IOException::class) + fun testResourceReadDuplicateName() { + Assertions.assertEquals(CONTENT_1, MoreResources.readResource("resource_test_a")) + Assertions.assertEquals(CONTENT_2, MoreResources.readResource("subdir/resource_test_a")) + } + + @Test + @Throws(IOException::class) + fun testListResource() { + Assertions.assertEquals( + Sets.newHashSet( + "subdir", + "resource_test_sub", + "resource_test_sub_2", + "resource_test_a" + ), + MoreResources.listResources(MoreResourcesTest::class.java, "subdir") + .map { obj: Path -> obj.fileName } + .map { obj: Path -> obj.toString() } + .collect(Collectors.toSet()) + ) + } + + companion object { + private const val CONTENT_1 = "content1\n" + private const val CONTENT_2 = "content2\n" + private const val RESOURCE_TEST = "resource_test" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.kt new file mode 100644 index 0000000000000..a36f868d8328a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/AirbyteStreamStatusHolderTest.kt @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.v0.AirbyteTraceMessage +import io.airbyte.protocol.models.v0.StreamDescriptor +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +/** Test suite for the [AirbyteStreamStatusHolder] class. */ +internal class AirbyteStreamStatusHolderTest { + @Test + fun testToTraceMessage() { + val startTime = System.currentTimeMillis().toDouble() + val airbyteStreamNameAndNamespacePair = AirbyteStreamNameNamespacePair("name", "namespace") + val streamStatus = AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING + val holder = AirbyteStreamStatusHolder(airbyteStreamNameAndNamespacePair, streamStatus) + + val traceMessage = holder.toTraceMessage() + Assertions.assertTrue(traceMessage.emittedAt >= startTime) + Assertions.assertEquals(AirbyteTraceMessage.Type.STREAM_STATUS, traceMessage.type) + Assertions.assertEquals(streamStatus, traceMessage.streamStatus.status) + Assertions.assertEquals( + StreamDescriptor() + .withName(airbyteStreamNameAndNamespacePair.name) + .withNamespace(airbyteStreamNameAndNamespacePair.namespace), + traceMessage.streamStatus.streamDescriptor + ) + } + + @Test + fun testToTraceMessageWithOptionalData() { + val startTime = System.currentTimeMillis().toDouble() + val airbyteStreamNameAndNamespacePair = AirbyteStreamNameNamespacePair("name", "namespace") + val streamStatus = AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE + val holder = AirbyteStreamStatusHolder(airbyteStreamNameAndNamespacePair, streamStatus) + + val traceMessage = holder.toTraceMessage() + Assertions.assertTrue(traceMessage.emittedAt >= startTime) + Assertions.assertEquals(AirbyteTraceMessage.Type.STREAM_STATUS, traceMessage.type) + Assertions.assertEquals(streamStatus, traceMessage.streamStatus.status) + Assertions.assertEquals( + StreamDescriptor() + .withName(airbyteStreamNameAndNamespacePair.name) + .withNamespace(airbyteStreamNameAndNamespacePair.namespace), + traceMessage.streamStatus.streamDescriptor + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/StreamStatusUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/StreamStatusUtilsTest.kt new file mode 100644 index 0000000000000..58e8678722a89 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/stream/StreamStatusUtilsTest.kt @@ -0,0 +1,624 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.stream + +import io.airbyte.commons.util.AirbyteStreamAware +import io.airbyte.commons.util.AutoCloseableIterator +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStreamStatusTraceMessage +import java.util.* +import java.util.function.Consumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.ArgumentCaptor +import org.mockito.ArgumentMatchers +import org.mockito.Captor +import org.mockito.Mockito +import org.mockito.junit.jupiter.MockitoExtension +import org.mockito.kotlin.mock + +/** Test suite for the [StreamStatusUtils] class. */ +@ExtendWith(MockitoExtension::class) +internal class StreamStatusUtilsTest { + @Captor + private val airbyteStreamStatusHolderArgumentCaptor: + ArgumentCaptor? = + null + + @Test + fun testCreateStreamStatusConsumerWrapper() { + val stream: AutoCloseableIterator = mock() + val streamStatusEmitter = Optional.empty>() + val messageConsumer: Consumer = mock() + + val wrappedMessageConsumer = + StreamStatusUtils.statusTrackingRecordCollector( + stream, + messageConsumer, + streamStatusEmitter + ) + + Assertions.assertNotEquals(messageConsumer, wrappedMessageConsumer) + } + + @Test + fun testStreamStatusConsumerWrapperProduceStreamStatus() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + val messageConsumer: Consumer = mock() + val airbyteMessage = Mockito.mock(AirbyteMessage::class.java) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + val wrappedMessageConsumer = + StreamStatusUtils.statusTrackingRecordCollector( + stream, + messageConsumer, + streamStatusEmitter + ) + + Assertions.assertNotEquals(messageConsumer, wrappedMessageConsumer) + + wrappedMessageConsumer.accept(airbyteMessage) + wrappedMessageConsumer.accept(airbyteMessage) + wrappedMessageConsumer.accept(airbyteMessage) + + Mockito.verify(messageConsumer, Mockito.times(3)).accept(ArgumentMatchers.any()) + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitRunningStreamStatusIterator() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitRunningStreamStatusIteratorEmptyAirbyteStream() { + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitRunningStreamStatusIteratorEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitRunningStreamStatusAirbyteStreamAware() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitRunningStreamStatusAirbyteStreamAwareEmptyStream() { + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitRunningStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitRunningStreamStatusAirbyteStream() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + StreamStatusUtils.emitRunningStreamStatus(Optional.of(airbyteStream), streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.RUNNING, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitRunningStreamStatusEmptyAirbyteStream() { + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus(Optional.empty(), streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitRunningStreamStatusAirbyteStreamEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val streamStatusEmitter = Optional.empty>() + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitRunningStreamStatus( + Optional.of(airbyteStream), + streamStatusEmitter + ) + } + } + + @Test + fun testEmitStartedStreamStatusIterator() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.STARTED, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitStartedStreamStatusIteratorEmptyAirbyteStream() { + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitStartedStreamStatusIteratorEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitStartedStreamStatusAirbyteStreamAware() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.STARTED, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitStartedStreamStatusAirbyteStreamAwareEmptyStream() { + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitStartedStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitStartedStreamStatusAirbyteStream() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + StreamStatusUtils.emitStartStreamStatus(Optional.of(airbyteStream), streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.STARTED, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitStartedStreamStatusEmptyAirbyteStream() { + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(Optional.empty(), streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitStartedStreamStatusAirbyteStreamEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val streamStatusEmitter = Optional.empty>() + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitStartStreamStatus(Optional.of(airbyteStream), streamStatusEmitter) + } + } + + @Test + fun testEmitCompleteStreamStatusIterator() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitCompleteStreamStatusIteratorEmptyAirbyteStream() { + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitCompleteStreamStatusIteratorEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitCompleteStreamStatusAirbyteStreamAware() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitCompleteStreamStatusAirbyteStreamAwareEmptyStream() { + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitCompleteStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitCompleteStreamStatusAirbyteStream() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + StreamStatusUtils.emitCompleteStreamStatus(Optional.of(airbyteStream), streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitCompleteStreamStatusEmptyAirbyteStream() { + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus(Optional.empty(), streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitCompleteStreamStatusAirbyteStreamEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val streamStatusEmitter = Optional.empty>() + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitCompleteStreamStatus( + Optional.of(airbyteStream), + streamStatusEmitter + ) + } + } + + @Test + fun testEmitIncompleteStreamStatusIterator() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.INCOMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitIncompleteStreamStatusIteratorEmptyAirbyteStream() { + val stream: AutoCloseableIterator = mock() + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitIncompleteStreamStatusIteratorEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream: AutoCloseableIterator = mock() + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitIncompleteStreamStatusAirbyteStreamAware() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.INCOMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitIncompleteStreamStatusAirbyteStreamAwareEmptyStream() { + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.empty()) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitIncompleteStreamStatusAirbyteStreamAwareEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val stream = Mockito.mock(AirbyteStreamAware::class.java) + val streamStatusEmitter = Optional.empty>() + + Mockito.`when`(stream.airbyteStream).thenReturn(Optional.of(airbyteStream)) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus(stream, streamStatusEmitter) + } + } + + @Test + fun testEmitIncompleteStreamStatusAirbyteStream() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + StreamStatusUtils.emitIncompleteStreamStatus( + Optional.of(airbyteStream), + streamStatusEmitter + ) + + Mockito.verify(statusEmitter, Mockito.times(1)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + Assertions.assertEquals( + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.INCOMPLETE, + airbyteStreamStatusHolderArgumentCaptor.value.toTraceMessage().streamStatus.status + ) + } + + @Test + fun testEmitIncompleteStreamStatusEmptyAirbyteStream() { + val statusEmitter: Consumer = mock() + val streamStatusEmitter = Optional.of(statusEmitter) + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus(Optional.empty(), streamStatusEmitter) + } + Mockito.verify(statusEmitter, Mockito.times(0)) + .accept(airbyteStreamStatusHolderArgumentCaptor!!.capture()) + } + + @Test + fun testEmitIncompleteStreamStatusAirbyteStreamEmptyStatusEmitter() { + val airbyteStream = AirbyteStreamNameNamespacePair(NAME, NAMESPACE) + val streamStatusEmitter = Optional.empty>() + + Assertions.assertDoesNotThrow { + StreamStatusUtils.emitIncompleteStreamStatus( + Optional.of(airbyteStream), + streamStatusEmitter + ) + } + } + + companion object { + private const val NAME = "name" + private const val NAMESPACE = "namespace" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/string/StringsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/string/StringsTest.kt new file mode 100644 index 0000000000000..b663450f03481 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/string/StringsTest.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.string + +import com.google.common.collect.Lists +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class StringsTest { + private class JoinClass(private val id: Int) { + override fun toString(): String { + return "id = $id" + } + } + + @Test + fun testJoin() { + Assertions.assertEquals( + "1, 2, 3, 4, 5", + Strings.join(Lists.newArrayList(1, 2, 3, 4, 5), ", ") + ) + + Assertions.assertEquals( + "id = 1, id = 2, id = 3", + Strings.join(Lists.newArrayList(JoinClass(1), JoinClass(2), JoinClass(3)), ", ") + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/NamesTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/NamesTest.kt new file mode 100644 index 0000000000000..aa3d92efac39b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/NamesTest.kt @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.text + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class NamesTest { + @Test + fun testToAlphanumericAndUnderscore() { + Assertions.assertEquals("users", Names.toAlphanumericAndUnderscore("users")) + Assertions.assertEquals("users123", Names.toAlphanumericAndUnderscore("users123")) + Assertions.assertEquals("UsErS", Names.toAlphanumericAndUnderscore("UsErS")) + Assertions.assertEquals( + "users_USE_special_____", + Names.toAlphanumericAndUnderscore("users USE special !@#$") + ) + } + + @Test + fun testDoubleQuote() { + Assertions.assertEquals("\"abc\"", Names.doubleQuote("abc")) + Assertions.assertEquals("\"abc\"", Names.doubleQuote("\"abc\"")) + Assertions.assertThrows(IllegalStateException::class.java) { Names.doubleQuote("\"abc") } + Assertions.assertThrows(IllegalStateException::class.java) { Names.doubleQuote("abc\"") } + } + + @Test + fun testSimpleQuote() { + Assertions.assertEquals("'abc'", Names.singleQuote("abc")) + Assertions.assertEquals("'abc'", Names.singleQuote("'abc'")) + Assertions.assertThrows(IllegalStateException::class.java) { Names.singleQuote("'abc") } + Assertions.assertThrows(IllegalStateException::class.java) { Names.singleQuote("abc'") } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/SqlsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/SqlsTest.kt new file mode 100644 index 0000000000000..a5b87eb066c59 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/text/SqlsTest.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.text + +import com.google.common.collect.Lists +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class SqlsTest { + internal enum class E1 { + VALUE_1, + VALUE_TWO, + value_three, + } + + @Test + fun testToSqlName() { + Assertions.assertEquals("value_1", Sqls.toSqlName(E1.VALUE_1)) + Assertions.assertEquals("value_two", Sqls.toSqlName(E1.VALUE_TWO)) + Assertions.assertEquals("value_three", Sqls.toSqlName(E1.value_three)) + } + + @Test + fun testInFragment() { + Assertions.assertEquals( + "('value_two','value_three')", + Sqls.toSqlInFragment(Lists.newArrayList(E1.VALUE_TWO, E1.value_three)) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/AutoCloseableIteratorsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/AutoCloseableIteratorsTest.kt new file mode 100644 index 0000000000000..374c61595e918 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/AutoCloseableIteratorsTest.kt @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.collect.Iterators +import io.airbyte.commons.concurrency.VoidCallable +import java.util.concurrent.atomic.AtomicBoolean +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class AutoCloseableIteratorsTest { + @Test + @Throws(Exception::class) + fun testFromIterator() { + val onClose = Mockito.mock(VoidCallable::class.java) + val iterator = + AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b", "c"), onClose, null) + + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "c") + iterator.close() + + Mockito.verify(onClose).call() + } + + @Test + @Throws(Exception::class) + fun testFromStream() { + val isClosed = AtomicBoolean(false) + val stream = Stream.of("a", "b", "c") + stream.onClose { isClosed.set(true) } + + val iterator = AutoCloseableIterators.fromStream(stream, null) + + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "c") + iterator.close() + + Assertions.assertTrue(isClosed.get()) + } + + private fun assertNext(iterator: Iterator, value: String) { + Assertions.assertTrue(iterator.hasNext()) + Assertions.assertEquals(value, iterator.next()) + } + + @Test + @Throws(Exception::class) + fun testAppendOnClose() { + val onClose1 = Mockito.mock(VoidCallable::class.java) + val onClose2 = Mockito.mock(VoidCallable::class.java) + + val iterator = + AutoCloseableIterators.fromIterator(MoreIterators.of(1, 2, 3), onClose1, null) + val iteratorWithExtraClose = AutoCloseableIterators.appendOnClose(iterator, onClose2) + + iteratorWithExtraClose.close() + Mockito.verify(onClose1).call() + Mockito.verify(onClose2).call() + } + + @Test + fun testTransform() { + val transform = Iterators.transform(MoreIterators.of(1, 2, 3)) { i: Int -> i + 1 } + Assertions.assertEquals(listOf(2, 3, 4), MoreIterators.toList(transform)) + } + + @Test + @Throws(Exception::class) + fun testConcatWithEagerClose() { + val onClose1 = Mockito.mock(VoidCallable::class.java) + val onClose2 = Mockito.mock(VoidCallable::class.java) + + val iterator: AutoCloseableIterator = + CompositeIterator( + java.util.List.of( + AutoCloseableIterators.fromIterator(MoreIterators.of("a", "b"), onClose1, null), + AutoCloseableIterators.fromIterator(MoreIterators.of("d"), onClose2, null) + ), + null + ) + + assertOnCloseInvocations(listOf(), java.util.List.of(onClose1, onClose2)) + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "d") + assertOnCloseInvocations(java.util.List.of(onClose1), java.util.List.of(onClose2)) + Assertions.assertFalse(iterator.hasNext()) + assertOnCloseInvocations(java.util.List.of(onClose1, onClose2), listOf()) + + iterator.close() + + Mockito.verify(onClose1, Mockito.times(1)).call() + Mockito.verify(onClose2, Mockito.times(1)).call() + } + + @Throws(Exception::class) + private fun assertOnCloseInvocations( + haveClosed: List, + haveNotClosed: List + ) { + for (voidCallable in haveClosed) { + Mockito.verify(voidCallable).call() + } + + for (voidCallable in haveNotClosed) { + Mockito.verify(voidCallable, Mockito.never()).call() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/CompositeIteratorTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/CompositeIteratorTest.kt new file mode 100644 index 0000000000000..ede1ea15acd99 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/CompositeIteratorTest.kt @@ -0,0 +1,208 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import com.google.common.collect.ImmutableList +import io.airbyte.commons.concurrency.VoidCallable +import io.airbyte.commons.stream.AirbyteStreamStatusHolder +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import java.util.function.Consumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.kotlin.any +import org.mockito.kotlin.mock + +internal class CompositeIteratorTest { + private lateinit var onClose1: VoidCallable + private lateinit var onClose2: VoidCallable + private lateinit var onClose3: VoidCallable + private lateinit var airbyteStreamStatusConsumer: Consumer + private var airbyteStream1: AirbyteStreamNameNamespacePair? = null + private var airbyteStream2: AirbyteStreamNameNamespacePair? = null + private var airbyteStream3: AirbyteStreamNameNamespacePair? = null + + @BeforeEach + fun setup() { + onClose1 = Mockito.mock(VoidCallable::class.java) + onClose2 = Mockito.mock(VoidCallable::class.java) + onClose3 = Mockito.mock(VoidCallable::class.java) + airbyteStreamStatusConsumer = mock() + airbyteStream1 = AirbyteStreamNameNamespacePair("stream1", "namespace") + airbyteStream2 = AirbyteStreamNameNamespacePair("stream2", "namespace") + airbyteStream3 = AirbyteStreamNameNamespacePair("stream3", "namespace") + } + + @Test + fun testNullInput() { + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(0)).accept(any()) + } + + @Test + fun testEmptyInput() { + val iterator: AutoCloseableIterator = + CompositeIterator( + emptyList>(), + airbyteStreamStatusConsumer + ) + Assertions.assertFalse(iterator.hasNext()) + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(0)).accept(any()) + } + + @Test + @Throws(Exception::class) + fun testMultipleIterators() { + val iterator: AutoCloseableIterator = + CompositeIterator( + ImmutableList.of( + AutoCloseableIterators.fromIterator( + MoreIterators.of("a", "b", "c"), + onClose1, + airbyteStream1 + ), + AutoCloseableIterators.fromIterator( + MoreIterators.of("d", "e", "f"), + onClose2, + airbyteStream2 + ), + AutoCloseableIterators.fromIterator( + MoreIterators.of("g", "h", "i"), + onClose3, + airbyteStream3 + ) + ), + airbyteStreamStatusConsumer + ) + + assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1, onClose2, onClose3)) + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "c") + assertNext(iterator, "d") + assertOnCloseInvocations(ImmutableList.of(onClose1), ImmutableList.of(onClose2, onClose3)) + assertNext(iterator, "e") + assertNext(iterator, "f") + assertNext(iterator, "g") + assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)) + assertNext(iterator, "h") + assertNext(iterator, "i") + assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)) + Assertions.assertFalse(iterator.hasNext()) + assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2, onClose3), ImmutableList.of()) + + iterator.close() + + Mockito.verify(onClose1, Mockito.times(1)).call() + Mockito.verify(onClose2, Mockito.times(1)).call() + Mockito.verify(onClose3, Mockito.times(1)).call() + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(9)).accept(any()) + } + + @Test + @Throws(Exception::class) + fun testWithEmptyIterators() { + val iterator: AutoCloseableIterator = + CompositeIterator( + ImmutableList.of( + AutoCloseableIterators.fromIterator( + MoreIterators.of("a", "b", "c"), + onClose1, + airbyteStream1 + ), + AutoCloseableIterators.fromIterator( + MoreIterators.of(), + onClose2, + airbyteStream2 + ), + AutoCloseableIterators.fromIterator( + MoreIterators.of("g", "h", "i"), + onClose3, + airbyteStream3 + ) + ), + airbyteStreamStatusConsumer + ) + + assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1, onClose2, onClose3)) + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "c") + assertNext(iterator, "g") + assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2), ImmutableList.of(onClose3)) + assertNext(iterator, "h") + assertNext(iterator, "i") + Assertions.assertFalse(iterator.hasNext()) + assertOnCloseInvocations(ImmutableList.of(onClose1, onClose2, onClose3), ImmutableList.of()) + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(8)).accept(any()) + } + + @Test + @Throws(Exception::class) + fun testCloseBeforeUsingItUp() { + val iterator: AutoCloseableIterator = + CompositeIterator( + ImmutableList.of( + AutoCloseableIterators.fromIterator( + MoreIterators.of("a", "b", "c"), + onClose1, + airbyteStream1 + ) + ), + airbyteStreamStatusConsumer + ) + + assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)) + assertNext(iterator, "a") + assertNext(iterator, "b") + assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)) + iterator.close() + assertOnCloseInvocations(ImmutableList.of(onClose1), ImmutableList.of()) + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(2)).accept(any()) + } + + @Test + @Throws(Exception::class) + fun testCannotOperateAfterClosing() { + val iterator: AutoCloseableIterator = + CompositeIterator( + ImmutableList.of( + AutoCloseableIterators.fromIterator( + MoreIterators.of("a", "b", "c"), + onClose1, + airbyteStream1 + ) + ), + airbyteStreamStatusConsumer + ) + + assertOnCloseInvocations(ImmutableList.of(), ImmutableList.of(onClose1)) + assertNext(iterator, "a") + assertNext(iterator, "b") + iterator.close() + Assertions.assertThrows(IllegalStateException::class.java) { iterator.hasNext() } + Assertions.assertThrows(IllegalStateException::class.java) { iterator.next() } + iterator.close() // still allowed to close again. + Mockito.verify(airbyteStreamStatusConsumer, Mockito.times(2)).accept(any()) + } + + private fun assertNext(iterator: Iterator, value: String) { + Assertions.assertTrue(iterator.hasNext()) + Assertions.assertEquals(value, iterator.next()) + } + + @Throws(Exception::class) + private fun assertOnCloseInvocations( + haveClosed: List, + haveNotClosed: List + ) { + for (voidCallable in haveClosed) { + Mockito.verify(voidCallable).call() + } + + for (voidCallable in haveNotClosed) { + Mockito.verify(voidCallable, Mockito.never()).call() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.kt new file mode 100644 index 0000000000000..46f9f14ecf50c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/DefaultAutoCloseableIteratorTest.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import io.airbyte.commons.concurrency.VoidCallable +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class DefaultAutoCloseableIteratorTest { + + @Test + @Throws(Exception::class) + fun testEmptyInput() { + val onClose = Mockito.mock(VoidCallable::class.java) + val iterator: AutoCloseableIterator = + DefaultAutoCloseableIterator(Collections.emptyIterator(), onClose, null) + Assertions.assertFalse(iterator.hasNext()) + iterator.close() + Mockito.verify(onClose).call() + } + + @Test + @Throws(Exception::class) + fun test() { + val onClose = Mockito.mock(VoidCallable::class.java) + val iterator: AutoCloseableIterator = + DefaultAutoCloseableIterator(MoreIterators.of("a", "b", "c"), onClose, null) + + assertNext(iterator, "a") + assertNext(iterator, "b") + assertNext(iterator, "c") + iterator.close() + + Mockito.verify(onClose).call() + } + + @Test + @Throws(Exception::class) + fun testCannotOperateAfterClosing() { + val onClose = Mockito.mock(VoidCallable::class.java) + val iterator: AutoCloseableIterator = + DefaultAutoCloseableIterator(MoreIterators.of("a", "b", "c"), onClose, null) + + assertNext(iterator, "a") + assertNext(iterator, "b") + iterator.close() + Assertions.assertThrows(IllegalStateException::class.java) { iterator.hasNext() } + Assertions.assertThrows(IllegalStateException::class.java) { iterator.next() } + iterator.close() // still allowed to close again. + } + + private fun assertNext(iterator: Iterator, value: String) { + Assertions.assertTrue(iterator.hasNext()) + Assertions.assertEquals(value, iterator.next()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.kt new file mode 100644 index 0000000000000..6b3f85caf0174 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/util/LazyAutoCloseableIteratorTest.kt @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.util + +import java.util.* +import java.util.function.Supplier +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.invocation.InvocationOnMock +import org.mockito.kotlin.mock + +internal class LazyAutoCloseableIteratorTest { + private var internalIterator: AutoCloseableIterator = mock() + private var iteratorSupplier: Supplier> = mock() + + @BeforeEach + fun setup() { + internalIterator = mock() + iteratorSupplier = mock() + Mockito.`when`(iteratorSupplier.get()).thenReturn(internalIterator) + } + + @Test + @Throws(Exception::class) + fun testEmptyInput() { + mockInternalIteratorWith(Collections.emptyIterator()) + val iterator: AutoCloseableIterator = + LazyAutoCloseableIterator(iteratorSupplier, null) + + Assertions.assertFalse(iterator.hasNext()) + iterator.close() + Mockito.verify(internalIterator).close() + } + + @Test + @Throws(Exception::class) + fun test() { + mockInternalIteratorWith(MoreIterators.of("a", "b", "c")) + + val iterator: AutoCloseableIterator = + LazyAutoCloseableIterator(iteratorSupplier, null) + Mockito.verify(iteratorSupplier, Mockito.never()).get() + assertNext(iterator, "a") + Mockito.verify(iteratorSupplier).get() + Mockito.verifyNoMoreInteractions(iteratorSupplier) + assertNext(iterator, "b") + assertNext(iterator, "c") + iterator.close() + Mockito.verify(internalIterator).close() + } + + @Test + @Throws(Exception::class) + fun testCloseBeforeSupply() { + mockInternalIteratorWith(MoreIterators.of("a", "b", "c")) + val iterator: AutoCloseableIterator = + LazyAutoCloseableIterator(iteratorSupplier, null) + iterator.close() + Mockito.verify(iteratorSupplier, Mockito.never()).get() + } + + private fun mockInternalIteratorWith(iterator: Iterator) { + Mockito.`when`(internalIterator!!.hasNext()).then { a: InvocationOnMock? -> + iterator.hasNext() + } + Mockito.`when`(internalIterator!!.next()).then { a: InvocationOnMock? -> iterator.next() } + } + + private fun assertNext(iterator: Iterator, value: String) { + Assertions.assertTrue(iterator.hasNext()) + Assertions.assertEquals(value, iterator.next()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/AirbyteVersionTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/AirbyteVersionTest.kt new file mode 100644 index 0000000000000..4e5b874753734 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/AirbyteVersionTest.kt @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class AirbyteVersionTest { + @Test + fun testParseVersion() { + val version = AirbyteVersion(VERSION_678) + Assertions.assertEquals("6", version.major) + Assertions.assertEquals("7", version.minor) + Assertions.assertEquals("8", version.patch) + } + + @Test + fun testParseVersionWithLabel() { + val version = AirbyteVersion(VERSION_678_OMEGA) + Assertions.assertEquals("6", version.major) + Assertions.assertEquals("7", version.minor) + Assertions.assertEquals("8", version.patch) + } + + @Test + fun testCompatibleVersionCompareTo() { + Assertions.assertEquals( + 0, + AirbyteVersion(VERSION_678_OMEGA) + .compatibleVersionCompareTo(AirbyteVersion(VERSION_678_GAMMA)) + ) + Assertions.assertEquals( + 0, + AirbyteVersion(VERSION_678_ALPHA) + .compatibleVersionCompareTo(AirbyteVersion(VERSION_679_ALPHA)) + ) + Assertions.assertTrue( + 0 < + AirbyteVersion(VERSION_680_ALPHA) + .compatibleVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 < + AirbyteVersion("11.8.0-alpha") + .compatibleVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 < + AirbyteVersion(VERSION_6110_ALPHA) + .compatibleVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 > + AirbyteVersion("0.8.0-alpha") + .compatibleVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertEquals( + 0, + AirbyteVersion(VERSION_123_PROD).compatibleVersionCompareTo(AirbyteVersion(DEV)) + ) + Assertions.assertEquals( + 0, + AirbyteVersion(DEV).compatibleVersionCompareTo(AirbyteVersion(VERSION_123_PROD)) + ) + } + + @Test + fun testPatchVersionCompareTo() { + Assertions.assertEquals( + 0, + AirbyteVersion(VERSION_678_OMEGA) + .patchVersionCompareTo(AirbyteVersion(VERSION_678_GAMMA)) + ) + Assertions.assertTrue( + 0 > + AirbyteVersion(VERSION_678_ALPHA) + .patchVersionCompareTo(AirbyteVersion(VERSION_679_ALPHA)) + ) + Assertions.assertTrue( + 0 > + AirbyteVersion(VERSION_678_ALPHA) + .patchVersionCompareTo(AirbyteVersion("6.7.11-alpha")) + ) + Assertions.assertTrue( + 0 < + AirbyteVersion(VERSION_680_ALPHA) + .patchVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 < + AirbyteVersion(VERSION_6110_ALPHA) + .patchVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 > + AirbyteVersion(VERSION_380_ALPHA) + .patchVersionCompareTo(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + 0 > + AirbyteVersion(VERSION_380_ALPHA) + .patchVersionCompareTo(AirbyteVersion("11.7.8-alpha")) + ) + Assertions.assertEquals( + 0, + AirbyteVersion(VERSION_123_PROD).patchVersionCompareTo(AirbyteVersion(DEV)) + ) + Assertions.assertEquals( + 0, + AirbyteVersion(DEV).patchVersionCompareTo(AirbyteVersion(VERSION_123_PROD)) + ) + } + + @Test + fun testGreaterThan() { + Assertions.assertFalse( + AirbyteVersion(VERSION_678_OMEGA).greaterThan(AirbyteVersion(VERSION_678_GAMMA)) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_678_ALPHA).greaterThan(AirbyteVersion(VERSION_679_ALPHA)) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_678_ALPHA).greaterThan(AirbyteVersion("6.7.11-alpha")) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_680_ALPHA).greaterThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_6110_ALPHA).greaterThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_380_ALPHA).greaterThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_380_ALPHA).greaterThan(AirbyteVersion("11.7.8-alpha")) + ) + Assertions.assertFalse(AirbyteVersion(VERSION_123_PROD).greaterThan(AirbyteVersion(DEV))) + Assertions.assertFalse(AirbyteVersion(DEV).greaterThan(AirbyteVersion(VERSION_123_PROD))) + } + + @Test + fun testLessThan() { + Assertions.assertFalse( + AirbyteVersion(VERSION_678_OMEGA).lessThan(AirbyteVersion(VERSION_678_GAMMA)) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_678_ALPHA).lessThan(AirbyteVersion(VERSION_679_ALPHA)) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_678_ALPHA).lessThan(AirbyteVersion("6.7.11-alpha")) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_680_ALPHA).lessThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertFalse( + AirbyteVersion(VERSION_6110_ALPHA).lessThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_380_ALPHA).lessThan(AirbyteVersion(VERSION_678_ALPHA)) + ) + Assertions.assertTrue( + AirbyteVersion(VERSION_380_ALPHA).lessThan(AirbyteVersion("11.7.8-alpha")) + ) + Assertions.assertFalse(AirbyteVersion(VERSION_123_PROD).lessThan(AirbyteVersion(DEV))) + Assertions.assertFalse(AirbyteVersion(DEV).lessThan(AirbyteVersion(VERSION_123_PROD))) + } + + @Test + fun testInvalidVersions() { + Assertions.assertThrows(IllegalArgumentException::class.java) { AirbyteVersion("0.6") } + } + + @Test + fun testSerialize() { + Assertions.assertEquals(DEV, AirbyteVersion(DEV).serialize()) + + val nonDevVersion = "0.1.2-alpha" + Assertions.assertEquals(nonDevVersion, AirbyteVersion(nonDevVersion).serialize()) + } + + @Test + fun testCheckVersion() { + AirbyteVersion.assertIsCompatible(AirbyteVersion("3.2.1"), AirbyteVersion("3.2.1")) + Assertions.assertThrows(IllegalStateException::class.java) { + AirbyteVersion.assertIsCompatible(AirbyteVersion("1.2.3"), AirbyteVersion("3.2.1")) + } + } + + @Test + fun testCheckOnlyPatchVersion() { + Assertions.assertFalse( + AirbyteVersion(VERSION_678) + .checkOnlyPatchVersionIsUpdatedComparedTo(AirbyteVersion(VERSION_678)) + ) + Assertions.assertFalse( + AirbyteVersion("6.9.8") + .checkOnlyPatchVersionIsUpdatedComparedTo(AirbyteVersion("6.8.9")) + ) + Assertions.assertFalse( + AirbyteVersion("7.7.8") + .checkOnlyPatchVersionIsUpdatedComparedTo(AirbyteVersion("6.7.11")) + ) + Assertions.assertTrue( + AirbyteVersion("6.7.9") + .checkOnlyPatchVersionIsUpdatedComparedTo(AirbyteVersion(VERSION_678)) + ) + } + + companion object { + private const val VERSION_678 = "6.7.8" + private const val VERSION_678_OMEGA = "6.7.8-omega" + private const val VERSION_678_ALPHA = "6.7.8-alpha" + private const val VERSION_678_GAMMA = "6.7.8-gamma" + private const val VERSION_679_ALPHA = "6.7.9-alpha" + private const val VERSION_680_ALPHA = "6.8.0-alpha" + private const val VERSION_6110_ALPHA = "6.11.0-alpha" + private const val VERSION_123_PROD = "1.2.3-prod" + private const val DEV = "dev" + private const val VERSION_380_ALPHA = "3.8.0-alpha" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/VersionTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/VersionTest.kt new file mode 100644 index 0000000000000..db343117c86ff --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/version/VersionTest.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.version + +import io.airbyte.commons.json.Jsons +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class VersionTest { + @Test + fun testJsonSerializationDeserialization() { + val jsonString = + """ + {"version": "1.2.3"} + + """.trimIndent() + val expectedVersion = Version("1.2.3") + + val deserializedVersion = Jsons.deserialize(jsonString, Version::class.java) + Assertions.assertEquals(expectedVersion, deserializedVersion) + + val deserializedVersionLoop = + Jsons.deserialize(Jsons.serialize(deserializedVersion), Version::class.java) + Assertions.assertEquals(expectedVersion, deserializedVersionLoop) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/yaml/YamlsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/yaml/YamlsTest.kt new file mode 100644 index 0000000000000..bc6c67d11b08b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/commons/yaml/YamlsTest.kt @@ -0,0 +1,161 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.yaml + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.stream.MoreStreams +import java.io.* +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class YamlsTest { + @Test + fun testSerialize() { + Assertions.assertEquals( + "$LINE_BREAK${STR_ABC}num: 999\nnumLong: 888\n", + Yamls.serialize(ToClass(ABC, 999, 888L)) + ) + + Assertions.assertEquals( + "${LINE_BREAK}test: \"abc\"\ntest2: \"def\"\n", + Yamls.serialize(ImmutableMap.of("test", ABC, "test2", "def")) + ) + } + + @Test + fun testSerializeWithoutQuotes() { + Assertions.assertEquals( + "${LINE_BREAK}str: abc\nnum: 999\nnumLong: 888\n", + Yamls.serializeWithoutQuotes(ToClass(ABC, 999, 888L)) + ) + + Assertions.assertEquals( + "${LINE_BREAK}test: abc\ntest2: def\n", + Yamls.serializeWithoutQuotes(ImmutableMap.of("test", ABC, "test2", "def")) + ) + } + + @Test + fun testSerializeJsonNode() { + Assertions.assertEquals( + "$LINE_BREAK${STR_ABC}num: 999\nnumLong: 888\n", + Yamls.serialize(Jsons.jsonNode(ToClass(ABC, 999, 888L))) + ) + + Assertions.assertEquals( + "${LINE_BREAK}test: \"abc\"\ntest2: \"def\"\n", + Yamls.serialize(Jsons.jsonNode(ImmutableMap.of("test", ABC, "test2", "def"))) + ) + } + + @Test + fun testDeserialize() { + Assertions.assertEquals( + ToClass(ABC, 999, 888L), + Yamls.deserialize( + "$LINE_BREAK${STR_ABC}num: \"999\"\nnumLong: \"888\"\n", + ToClass::class.java + ) + ) + } + + @Test + fun testDeserializeToJsonNode() { + Assertions.assertEquals( + "{\"str\":\"abc\"}", + Yamls.deserialize(LINE_BREAK + STR_ABC).toString() + ) + + Assertions.assertEquals( + "[{\"str\":\"abc\"},{\"str\":\"abc\"}]", + Yamls.deserialize("$LINE_BREAK- str: \"abc\"\n- str: \"abc\"\n").toString() + ) + } + + @Test + @Throws(Exception::class) + fun testListWriter() { + val values: List = Lists.newArrayList(1, 2, 3) + val writer = Mockito.spy(StringWriter()) + val consumer = Yamls.listWriter(writer) + values.forEach(consumer) + consumer.close() + + Mockito.verify(writer).close() + + val deserialize: List<*> = Yamls.deserialize(writer.toString(), MutableList::class.java) + Assertions.assertEquals(values, deserialize) + } + + @Test + @Throws(IOException::class) + fun testStreamRead() { + val classes: List = + Lists.newArrayList(ToClass("1", 1, 1), ToClass("2", 2, 2), ToClass("3", 3, 3)) + val input = + Mockito.spy( + ByteArrayInputStream(Yamls.serialize(classes).toByteArray(StandardCharsets.UTF_8)) + ) + + try { + Yamls.deserializeArray(input).use { iterator -> + Assertions.assertEquals( + classes, + MoreStreams.toStream(iterator) + .map { e: JsonNode? -> Jsons.`object`(e, ToClass::class.java) } + .collect(Collectors.toList()) + ) + } + } catch (e: Exception) { + Assertions.fail() + } + + Mockito.verify(input).close() + } + + private class ToClass { + @JsonProperty("str") var str: String? = null + + @JsonProperty("num") var num: Int? = null + + @JsonProperty("numLong") var numLong: Long = 0 + + constructor() + + constructor(str: String?, num: Int?, numLong: Long) { + this.str = str + this.num = num + this.numLong = numLong + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val toClass = o as ToClass + return numLong == toClass.numLong && str == toClass.str && num == toClass.num + } + + override fun hashCode(): Int { + return Objects.hash(str, num, numLong) + } + } + + companion object { + private const val LINE_BREAK = "---\n" + private const val STR_ABC = "str: \"abc\"\n" + private const val ABC = "abc" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/ConfigSchemaTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/ConfigSchemaTest.kt new file mode 100644 index 0000000000000..a93e59526305a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/ConfigSchemaTest.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import java.io.IOException +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class ConfigSchemaTest { + @Test + @Throws(IOException::class) + fun testFile() { + val schema = + Files.readString(ConfigSchema.STATE.configSchemaFile.toPath(), StandardCharsets.UTF_8) + Assertions.assertTrue(schema.contains("title")) + } + + @Test + fun testPrepareKnownSchemas() { + for (value in ConfigSchema.entries) { + Assertions.assertTrue( + Files.exists(value.configSchemaFile.toPath()), + value.configSchemaFile.toPath().toString() + " does not exist" + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/DataTypeEnumTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/DataTypeEnumTest.kt new file mode 100644 index 0000000000000..742181abf0351 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/DataTypeEnumTest.kt @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss + +import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class DataTypeEnumTest { + // We use JsonSchemaPrimitive in tests to construct schemas. We want to verify that their are + // valid + // conversions between JsonSchemaPrimitive to DataType so that if anything changes we won't have + // hard-to-decipher errors in our tests. Once we get rid of Schema, we can can drop this test. + @Test + fun testConversionFromJsonSchemaPrimitiveToDataType() { + Assertions.assertEquals(5, DataType::class.java.enumConstants.size) + Assertions.assertEquals( + 17, + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive::class.java.enumConstants.size + ) + + Assertions.assertEquals( + DataType.STRING, + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.STRING.toString() + .lowercase(Locale.getDefault()) + ) + ) + Assertions.assertEquals( + DataType.NUMBER, + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NUMBER.toString() + .lowercase(Locale.getDefault()) + ) + ) + Assertions.assertEquals( + DataType.BOOLEAN, + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.BOOLEAN.toString() + .lowercase(Locale.getDefault()) + ) + ) + Assertions.assertEquals( + DataType.ARRAY, + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.ARRAY.toString() + .lowercase(Locale.getDefault()) + ) + ) + Assertions.assertEquals( + DataType.OBJECT, + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.OBJECT.toString() + .lowercase(Locale.getDefault()) + ) + ) + Assertions.assertThrows(IllegalArgumentException::class.java) { + DataType.fromValue( + JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.NULL.toString() + .lowercase(Locale.getDefault()) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.kt new file mode 100644 index 0000000000000..8e064a8ab4517 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/configoss/helpers/YamlListToStandardDefinitionsTest.kt @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.configoss.helpers + +import com.fasterxml.jackson.core.JsonProcessingException +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.configoss.StandardDestinationDefinition +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test + +internal class YamlListToStandardDefinitionsTest { + @Nested + internal inner class VerifyAndConvertToJsonNode { + private val mapper: ObjectMapper = MoreMappers.initMapper() + + @Test + @Throws(JsonProcessingException::class) + fun correctlyReadTest() { + val jsonDefs = + YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, GOOD_DES_DEF_YAML) + val defList = + mapper.treeToValue(jsonDefs, Array::class.java) + Assertions.assertEquals(1, defList.size) + Assertions.assertEquals("Local JSON", defList[0].name) + } + + @Test + fun duplicateIdTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, DUPLICATE_ID) + } + } + + @Test + fun duplicateNameTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, DUPLICATE_NAME) + } + } + + @Test + fun emptyFileTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, "") + } + } + + @Test + fun badDataTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToJsonNode(ID_NAME, BAD_DATA) + } + } + } + + @Nested + internal inner class VerifyAndConvertToModelList { + @Test + fun correctlyReadTest() { + val defs = + YamlListToStandardDefinitions.verifyAndConvertToModelList( + StandardDestinationDefinition::class.java, + GOOD_DES_DEF_YAML + ) + Assertions.assertEquals(1, defs.size) + Assertions.assertEquals("Local JSON", defs[0].name) + } + + @Test + fun duplicateIdTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToModelList( + StandardDestinationDefinition::class.java, + DUPLICATE_ID + ) + } + } + + @Test + fun duplicateNameTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToModelList( + StandardDestinationDefinition::class.java, + DUPLICATE_NAME + ) + } + } + + @Test + fun emptyFileTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToModelList( + StandardDestinationDefinition::class.java, + "" + ) + } + } + + @Test + fun badDataTest() { + Assertions.assertThrows(RuntimeException::class.java) { + YamlListToStandardDefinitions.verifyAndConvertToModelList( + StandardDestinationDefinition::class.java, + BAD_DATA + ) + } + } + } + + companion object { + private const val DESTINATION_DEFINITION_ID = + "- destinationDefinitionId: a625d593-bba5-4a1c-a53d-2d246268a816\n" + private const val DESTINATION_NAME = " name: Local JSON\n" + private const val DOCKER_REPO = " dockerRepository: airbyte/destination-local-json\n" + private const val DOCKER_IMAGE_TAG = " dockerImageTag: 0.1.4\n" + private const val GOOD_DES_DEF_YAML = + (DESTINATION_DEFINITION_ID + + DESTINATION_NAME + + DOCKER_REPO + + DOCKER_IMAGE_TAG + + " documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json") + private const val DUPLICATE_ID = + """$DESTINATION_DEFINITION_ID$DESTINATION_NAME$DOCKER_REPO$DOCKER_IMAGE_TAG documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json$DESTINATION_DEFINITION_ID name: JSON 2 +$DOCKER_REPO$DOCKER_IMAGE_TAG documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json""" + private const val DUPLICATE_NAME = + """$DESTINATION_DEFINITION_ID$DESTINATION_NAME$DOCKER_REPO$DOCKER_IMAGE_TAG documentationUrl: https://docs.airbyte.io/integrations/destinations/local-json +- destinationDefinitionId: 8be1cf83-fde1-477f-a4ad-318d23c9f3c6 +$DESTINATION_NAME dockerRepository: airbyte/destination-csv + dockerImageTag: 0.1.8 + documentationUrl: https://docs.airbyte.io/integrations/destinations/local-csv""" + private const val BAD_DATA = + """$DESTINATION_DEFINITION_ID$DESTINATION_NAME$DOCKER_REPO dockerImageTag: 0.1.8 + documentationUrl""" + private const val ID_NAME = "destinationDefinitionId" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/validation/json/JsonSchemaValidatorTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/validation/json/JsonSchemaValidatorTest.kt new file mode 100644 index 0000000000000..8ddb8b6a6a230 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/validation/json/JsonSchemaValidatorTest.kt @@ -0,0 +1,196 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.validation.json + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.net.URI +import java.net.URISyntaxException +import java.nio.file.Files +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class JsonSchemaValidatorTest { + @Test + fun testValidateSuccess() { + val validator = JsonSchemaValidator() + + val object1 = Jsons.deserialize("{\"host\":\"abc\"}") + Assertions.assertTrue(validator.validate(VALID_SCHEMA, object1).isEmpty()) + Assertions.assertDoesNotThrow { validator.ensure(VALID_SCHEMA, object1) } + + val object2 = Jsons.deserialize("{\"host\":\"abc\", \"port\":1}") + Assertions.assertTrue(validator.validate(VALID_SCHEMA, object2).isEmpty()) + Assertions.assertDoesNotThrow { validator.ensure(VALID_SCHEMA, object2) } + } + + @Test + fun testValidateFail() { + val validator = JsonSchemaValidator() + + val object1 = Jsons.deserialize("{}") + Assertions.assertFalse(validator.validate(VALID_SCHEMA, object1).isEmpty()) + Assertions.assertThrows(JsonValidationException::class.java) { + validator.ensure(VALID_SCHEMA, object1) + } + + val object2 = Jsons.deserialize("{\"host\":\"abc\", \"port\":9999999}") + Assertions.assertFalse(validator.validate(VALID_SCHEMA, object2).isEmpty()) + Assertions.assertThrows(JsonValidationException::class.java) { + validator.ensure(VALID_SCHEMA, object2) + } + } + + @Test + @Throws(IOException::class) + fun test() { + val schema = + """{ + "${"$"}schema": "http://json-schema.org/draft-07/schema#", + "title": "OuterObject", + "type": "object", + "properties": { + "field1": { + "type": "string" + } + }, + "definitions": { + "InnerObject": { + "type": "object", + "properties": { + "field2": { + "type": "string" + } + } + } + } +} +""" + + val schemaFile = + IOs.writeFile(Files.createTempDirectory("test"), "schema.json", schema).toFile() + + // outer object + Assertions.assertTrue(JsonSchemaValidator.getSchema(schemaFile)[PROPERTIES].has("field1")) + Assertions.assertFalse(JsonSchemaValidator.getSchema(schemaFile)[PROPERTIES].has("field2")) + // inner object + Assertions.assertTrue( + JsonSchemaValidator.getSchema(schemaFile, "InnerObject")[PROPERTIES].has("field2") + ) + Assertions.assertFalse( + JsonSchemaValidator.getSchema(schemaFile, "InnerObject")[PROPERTIES].has("field1") + ) + // non-existent object + Assertions.assertThrows(NullPointerException::class.java) { + JsonSchemaValidator.getSchema(schemaFile, "NonExistentObject") + } + } + + @Test + @Throws(IOException::class, URISyntaxException::class) + fun testResolveReferences() { + val referencableSchemas = + """ + { + "definitions": { + "ref1": {"type": "string"}, + "ref2": {"type": "boolean"} + } + } + + """.trimIndent() + val schemaFile = + IOs.writeFile( + Files.createTempDirectory("test"), + "WellKnownTypes.json", + referencableSchemas + ) + .toFile() + val jsonSchemaValidator = + JsonSchemaValidator(URI("file://" + schemaFile.parentFile.absolutePath + "/foo.json")) + + val validationResult = + jsonSchemaValidator.validate( + Jsons.deserialize( + """ + { + "type": "object", + "properties": { + "prop1": {"${'$'}ref": "WellKnownTypes.json#/definitions/ref1"}, + "prop2": {"${'$'}ref": "WellKnownTypes.json#/definitions/ref2"} + } + } + + """.trimIndent() + ), + Jsons.deserialize( + """ + { + "prop1": "foo", + "prop2": "false" + } + + """.trimIndent() + ) + ) + + Assertions.assertEquals(setOf("$.prop2: string found, boolean expected"), validationResult) + } + + @Test + fun testIntializedMethodsShouldErrorIfNotInitialised() { + val validator = JsonSchemaValidator() + + Assertions.assertThrows(NullPointerException::class.java) { + validator.testInitializedSchema("uninitialised", Jsons.deserialize("{}")) + } + Assertions.assertThrows(NullPointerException::class.java) { + validator.ensureInitializedSchema("uninitialised", Jsons.deserialize("{}")) + } + } + + @Test + fun testIntializedMethodsShouldValidateIfInitialised() { + val validator = JsonSchemaValidator() + val schemaName = "schema_name" + val goodJson = Jsons.deserialize("{\"host\":\"abc\"}") + + validator.initializeSchemaValidator(schemaName, VALID_SCHEMA) + + Assertions.assertTrue(validator.testInitializedSchema(schemaName, goodJson)) + Assertions.assertDoesNotThrow { validator.ensureInitializedSchema(schemaName, goodJson) } + + val badJson = Jsons.deserialize("{\"host\":1}") + Assertions.assertFalse(validator.testInitializedSchema(schemaName, badJson)) + Assertions.assertThrows(JsonValidationException::class.java) { + validator.ensureInitializedSchema(schemaName, badJson) + } + } + + companion object { + private const val PROPERTIES = "properties" + + private val VALID_SCHEMA: JsonNode = + Jsons.deserialize( + """{ + "${"$"}schema": "http://json-schema.org/draft-07/schema#", + "title": "test", + "type": "object", + "required": ["host"], + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "integer", + "minimum": 0, + "maximum": 65536 + } } + }""" + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/TestHarnessUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/TestHarnessUtilsTest.kt new file mode 100644 index 0000000000000..56bc423d5f532 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/TestHarnessUtilsTest.kt @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers + +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.workers.internal.HeartbeatMonitor +import io.airbyte.workers.test_utils.TestConfigHelpers +import java.time.Duration +import java.time.temporal.ChronoUnit +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicInteger +import java.util.function.BiConsumer +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.invocation.InvocationOnMock +import org.mockito.kotlin.mock +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +internal class TestHarnessUtilsTest { + @Nested + internal inner class GentleCloseWithHeartbeat { + private val CHECK_HEARTBEAT_DURATION: Duration = Duration.of(10, ChronoUnit.MILLIS) + + private val SHUTDOWN_TIME_DURATION: Duration = Duration.of(100, ChronoUnit.MILLIS) + + private var process: Process = mock() + private var heartbeatMonitor: HeartbeatMonitor = mock() + private var forceShutdown: BiConsumer = mock() + + @BeforeEach + fun setup() { + process = Mockito.mock(Process::class.java) + heartbeatMonitor = Mockito.mock(HeartbeatMonitor::class.java) + forceShutdown = mock() + } + + private fun runShutdown() { + gentleCloseWithHeartbeat( + process, + heartbeatMonitor, + SHUTDOWN_TIME_DURATION, + CHECK_HEARTBEAT_DURATION, + SHUTDOWN_TIME_DURATION, + forceShutdown + ) + } + + // Verify that shutdown waits indefinitely when heartbeat and process are healthy. + @Test + @Throws(InterruptedException::class) + fun testStartsWait() { + Mockito.`when`(process.isAlive).thenReturn(true) + val recordedBeats = AtomicInteger(0) + Mockito.doAnswer { ignored: InvocationOnMock? -> + recordedBeats.incrementAndGet() + true + } + .`when`(heartbeatMonitor) + .isBeating + + val thread = Thread { this.runShutdown() } + + thread.start() + + // block until the loop is running. + while (recordedBeats.get() < 3) { + Thread.sleep(10) + } + } + + @Test + fun testGracefulShutdown() { + Mockito.`when`(heartbeatMonitor.isBeating).thenReturn(false) + Mockito.`when`(process.isAlive).thenReturn(false) + + runShutdown() + + Mockito.verifyNoInteractions(forceShutdown) + } + + @Test + fun testForcedShutdown() { + Mockito.`when`(heartbeatMonitor.isBeating).thenReturn(false) + Mockito.`when`(process.isAlive).thenReturn(true) + + runShutdown() + + Mockito.verify(forceShutdown).accept(process, SHUTDOWN_TIME_DURATION) + } + + @Test + fun testProcessDies() { + Mockito.`when`(heartbeatMonitor.isBeating).thenReturn(true) + Mockito.`when`(process.isAlive).thenReturn(false) + runShutdown() + + Mockito.verifyNoInteractions(forceShutdown) + } + } + + @Test + fun testMapStreamNamesToSchemasWithNullNamespace() { + val syncPair = TestConfigHelpers.createSyncConfig() + val syncInput = syncPair.value + val mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput) + Assertions.assertNotNull( + mapOutput[AirbyteStreamNameNamespacePair("user_preferences", null)] + ) + } + + @Test + fun testMapStreamNamesToSchemasWithMultipleNamespaces() { + val syncPair = TestConfigHelpers.createSyncConfig(true) + val syncInput = syncPair.value + val mapOutput = TestHarnessUtils.mapStreamNamesToSchemas(syncInput) + Assertions.assertNotNull( + mapOutput[AirbyteStreamNameNamespacePair("user_preferences", "namespace")] + ) + Assertions.assertNotNull( + mapOutput[AirbyteStreamNameNamespacePair("user_preferences", "namespace2")] + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(GentleCloseWithHeartbeat::class.java) + + /** + * As long as the the heartbeatMonitor detects a heartbeat, the process will be allowed to + * continue. This method checks the heartbeat once every minute. Once there is no heartbeat + * detected, if the process has ended, then the method returns. If the process is still + * running it is given a grace period of the timeout arguments passed into the method. Once + * those expire the process is killed forcibly. If the process cannot be killed, this method + * will log that this is the case, but then returns. + * + * @param process + * - process to monitor. + * @param heartbeatMonitor + * - tracks if the heart is still beating for the given process. + * @param gracefulShutdownDuration + * - grace period to give the process to die after its heart stops beating. + * @param checkHeartbeatDuration + * - frequency with which the heartbeat of the process is checked. + * @param forcedShutdownDuration + * - amount of time to wait if a process needs to be destroyed forcibly. + */ + fun gentleCloseWithHeartbeat( + process: Process, + heartbeatMonitor: HeartbeatMonitor, + gracefulShutdownDuration: Duration, + checkHeartbeatDuration: Duration, + forcedShutdownDuration: Duration, + forceShutdown: BiConsumer + ) { + while (process.isAlive && heartbeatMonitor.isBeating) { + try { + process.waitFor(checkHeartbeatDuration.toMillis(), TimeUnit.MILLISECONDS) + } catch (e: InterruptedException) { + LOGGER.error("Exception while waiting for process to finish", e) + } + } + + if (process.isAlive) { + try { + process.waitFor(gracefulShutdownDuration.toMillis(), TimeUnit.MILLISECONDS) + } catch (e: InterruptedException) { + LOGGER.error( + "Exception during grace period for process to finish. This can happen when cancelling jobs." + ) + } + } + + // if we were unable to exist gracefully, force shutdown... + if (process.isAlive) { + forceShutdown.accept(process, forcedShutdownDuration) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/CatalogClientConvertersTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/CatalogClientConvertersTest.kt new file mode 100644 index 0000000000000..7869f72d46b05 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/CatalogClientConvertersTest.kt @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import com.google.common.collect.Lists +import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration +import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration +import io.airbyte.api.client.model.generated.DestinationSyncMode +import io.airbyte.api.client.model.generated.SyncMode +import io.airbyte.commons.text.Names +import io.airbyte.protocol.models.* +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class CatalogClientConvertersTest { + @Test + fun testConvertToClientAPI() { + Assertions.assertEquals( + EXPECTED_CLIENT_CATALOG, + CatalogClientConverters.toAirbyteCatalogClientApi(BASIC_MODEL_CATALOG) + ) + } + + @Test + fun testConvertToProtocol() { + Assertions.assertEquals( + BASIC_MODEL_CATALOG, + CatalogClientConverters.toAirbyteProtocol(EXPECTED_CLIENT_CATALOG) + ) + } + + companion object { + const val ID_FIELD_NAME: String = "id" + private const val STREAM_NAME = "users-data" + private val STREAM: AirbyteStream = + AirbyteStream() + .withName(STREAM_NAME) + .withJsonSchema( + CatalogHelpers.fieldsToJsonSchema( + Field.of(ID_FIELD_NAME, JsonSchemaType.STRING) + ) + ) + .withDefaultCursorField(Lists.newArrayList(ID_FIELD_NAME)) + .withSourceDefinedCursor(false) + .withSourceDefinedPrimaryKey(emptyList()) + .withSupportedSyncModes( + List.of( + io.airbyte.protocol.models.SyncMode.FULL_REFRESH, + io.airbyte.protocol.models.SyncMode.INCREMENTAL + ) + ) + + private val CLIENT_STREAM: io.airbyte.api.client.model.generated.AirbyteStream = + io.airbyte.api.client.model.generated + .AirbyteStream() + .name(STREAM_NAME) + .jsonSchema( + CatalogHelpers.fieldsToJsonSchema( + Field.of(ID_FIELD_NAME, JsonSchemaType.STRING) + ) + ) + .defaultCursorField(Lists.newArrayList(ID_FIELD_NAME)) + .sourceDefinedCursor(false) + .sourceDefinedPrimaryKey(emptyList()) + .supportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + private val CLIENT_DEFAULT_STREAM_CONFIGURATION: AirbyteStreamConfiguration = + AirbyteStreamConfiguration() + .syncMode(SyncMode.FULL_REFRESH) + .cursorField(Lists.newArrayList(ID_FIELD_NAME)) + .destinationSyncMode(DestinationSyncMode.APPEND) + .primaryKey(emptyList()) + .aliasName(Names.toAlphanumericAndUnderscore(STREAM_NAME)) + .selected(true) + + private val BASIC_MODEL_CATALOG: AirbyteCatalog = + AirbyteCatalog().withStreams(Lists.newArrayList(STREAM)) + + private val EXPECTED_CLIENT_CATALOG: io.airbyte.api.client.model.generated.AirbyteCatalog = + io.airbyte.api.client.model.generated + .AirbyteCatalog() + .streams( + Lists.newArrayList( + AirbyteStreamAndConfiguration() + .stream(CLIENT_STREAM) + .config(CLIENT_DEFAULT_STREAM_CONFIGURATION) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.kt new file mode 100644 index 0000000000000..12c3f0aa726c7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdaterTest.kt @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import io.airbyte.api.client.generated.DestinationApi +import io.airbyte.api.client.generated.SourceApi +import io.airbyte.api.client.invoker.generated.ApiException +import io.airbyte.api.client.model.generated.* +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Config +import java.util.* +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class ConnectorConfigUpdaterTest { + private val mSourceApi: SourceApi = Mockito.mock(SourceApi::class.java) + private val mDestinationApi: DestinationApi = Mockito.mock(DestinationApi::class.java) + + private var connectorConfigUpdater: ConnectorConfigUpdater? = null + + @BeforeEach + @Throws(ApiException::class) + fun setUp() { + Mockito.`when`(mSourceApi.getSource(SourceIdRequestBody().sourceId(SOURCE_ID))) + .thenReturn(SourceRead().sourceId(SOURCE_ID).name(SOURCE_NAME)) + + Mockito.`when`( + mDestinationApi.getDestination( + DestinationIdRequestBody().destinationId(DESTINATION_ID) + ) + ) + .thenReturn(DestinationRead().destinationId(DESTINATION_ID).name(DESTINATION_NAME)) + + connectorConfigUpdater = ConnectorConfigUpdater(mSourceApi, mDestinationApi) + } + + @Test + @Throws(ApiException::class) + fun testPersistSourceConfig() { + val newConfiguration = Config().withAdditionalProperty("key", "new_value") + val configJson = Jsons.jsonNode(newConfiguration.additionalProperties) + + val expectedSourceUpdate = + SourceUpdate().sourceId(SOURCE_ID).name(SOURCE_NAME).connectionConfiguration(configJson) + + Mockito.`when`(mSourceApi.updateSource(Mockito.any())) + .thenReturn(SourceRead().connectionConfiguration(configJson)) + + connectorConfigUpdater!!.updateSource(SOURCE_ID, newConfiguration) + Mockito.verify(mSourceApi).updateSource(expectedSourceUpdate) + } + + @Test + @Throws(ApiException::class) + fun testPersistDestinationConfig() { + val newConfiguration = Config().withAdditionalProperty("key", "new_value") + val configJson = Jsons.jsonNode(newConfiguration.additionalProperties) + + val expectedDestinationUpdate = + DestinationUpdate() + .destinationId(DESTINATION_ID) + .name(DESTINATION_NAME) + .connectionConfiguration(configJson) + + Mockito.`when`(mDestinationApi.updateDestination(Mockito.any())) + .thenReturn(DestinationRead().connectionConfiguration(configJson)) + + connectorConfigUpdater!!.updateDestination(DESTINATION_ID, newConfiguration) + Mockito.verify(mDestinationApi).updateDestination(expectedDestinationUpdate) + } + + companion object { + private val SOURCE_ID: UUID = UUID.randomUUID() + private const val SOURCE_NAME = "source-stripe" + private val DESTINATION_ID: UUID = UUID.randomUUID() + private const val DESTINATION_NAME = "destination-google-sheets" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/FailureHelperTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/FailureHelperTest.kt new file mode 100644 index 0000000000000..7f5c7766c5a32 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/helper/FailureHelperTest.kt @@ -0,0 +1,231 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import io.airbyte.configoss.FailureReason +import io.airbyte.configoss.Metadata +import io.airbyte.protocol.models.AirbyteErrorTraceMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.workers.test_utils.AirbyteMessageUtils +import java.util.Set +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class FailureHelperTest { + @Test + @Throws(Exception::class) + fun testGenericFailureFromTrace() { + val traceMessage = + AirbyteMessageUtils.createErrorTraceMessage( + "trace message error", + 123.0, + AirbyteErrorTraceMessage.FailureType.CONFIG_ERROR + ) + val failureReason = FailureHelper.genericFailure(traceMessage, 12345, 1) + Assertions.assertEquals(FailureReason.FailureType.CONFIG_ERROR, failureReason.failureType) + } + + @Test + @Throws(Exception::class) + fun testGenericFailureFromTraceNoFailureType() { + val failureReason = FailureHelper.genericFailure(TRACE_MESSAGE, 12345, 1) + Assertions.assertEquals(failureReason.failureType, FailureReason.FailureType.SYSTEM_ERROR) + } + + @Test + fun testConnectorCommandFailure() { + val t: Throwable = RuntimeException() + val jobId = 12345L + val attemptNumber = 1 + val failureReason = + FailureHelper.connectorCommandFailure( + t, + jobId, + attemptNumber, + FailureHelper.ConnectorCommand.CHECK + ) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("check", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertNull(metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testConnectorCommandFailureFromTrace() { + val jobId = 12345L + val attemptNumber = 1 + val failureReason = + FailureHelper.connectorCommandFailure( + TRACE_MESSAGE, + jobId, + attemptNumber, + FailureHelper.ConnectorCommand.DISCOVER + ) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("discover", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertEquals(true, metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testSourceFailure() { + val t: Throwable = RuntimeException() + val jobId = 12345L + val attemptNumber = 1 + val failureReason = FailureHelper.sourceFailure(t, jobId, attemptNumber) + Assertions.assertEquals(FailureReason.FailureOrigin.SOURCE, failureReason.failureOrigin) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("read", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertNull(metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testSourceFailureFromTrace() { + val jobId = 12345L + val attemptNumber = 1 + val failureReason = FailureHelper.sourceFailure(TRACE_MESSAGE, jobId, attemptNumber) + Assertions.assertEquals(FailureReason.FailureOrigin.SOURCE, failureReason.failureOrigin) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("read", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertEquals(true, metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testDestinationFailure() { + val t: Throwable = RuntimeException() + val jobId = 12345L + val attemptNumber = 1 + val failureReason = FailureHelper.destinationFailure(t, jobId, attemptNumber) + Assertions.assertEquals( + FailureReason.FailureOrigin.DESTINATION, + failureReason.failureOrigin + ) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("write", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertNull(metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testDestinationFailureFromTrace() { + val jobId = 12345L + val attemptNumber = 1 + val failureReason = FailureHelper.destinationFailure(TRACE_MESSAGE, jobId, attemptNumber) + Assertions.assertEquals( + FailureReason.FailureOrigin.DESTINATION, + failureReason.failureOrigin + ) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("write", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertEquals(true, metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + fun testCheckFailure() { + val t: Throwable = RuntimeException() + val jobId = 12345L + val attemptNumber = 1 + val failureReason = + FailureHelper.checkFailure( + t, + jobId, + attemptNumber, + FailureReason.FailureOrigin.DESTINATION + ) + Assertions.assertEquals( + FailureReason.FailureOrigin.DESTINATION, + failureReason.failureOrigin + ) + + val metadata = failureReason.metadata.additionalProperties + Assertions.assertEquals("check", metadata[CONNECTOR_COMMAND_KEY]) + Assertions.assertNull(metadata[FROM_TRACE_MESSAGE_KEY]) + Assertions.assertEquals(jobId, metadata[JOB_ID_KEY]) + Assertions.assertEquals(attemptNumber, metadata[ATTEMPT_NUMBER_KEY]) + } + + @Test + @Throws(Exception::class) + fun testOrderedFailures() { + val failureReasonList = + FailureHelper.orderedFailures( + Set.of(TRACE_FAILURE_REASON_2, TRACE_FAILURE_REASON, EXCEPTION_FAILURE_REASON) + ) + Assertions.assertEquals(failureReasonList[0], TRACE_FAILURE_REASON) + } + + @Test + fun testUnknownOriginFailure() { + val t: Throwable = RuntimeException() + val jobId = 12345L + val attemptNumber = 1 + val failureReason = FailureHelper.unknownOriginFailure(t, jobId, attemptNumber) + Assertions.assertEquals(FailureReason.FailureOrigin.UNKNOWN, failureReason.failureOrigin) + Assertions.assertEquals("An unknown failure occurred", failureReason.externalMessage) + } + + companion object { + private const val FROM_TRACE_MESSAGE_KEY = "from_trace_message" + private const val CONNECTOR_COMMAND_KEY = "connector_command" + private const val JOB_ID_KEY = "jobId" + private const val ATTEMPT_NUMBER_KEY = "attemptNumber" + + private val TRACE_FAILURE_REASON: FailureReason = + FailureReason() + .withInternalMessage("internal message") + .withStacktrace("stack trace") + .withTimestamp(1111112) + .withMetadata( + Metadata() + .withAdditionalProperty(JOB_ID_KEY, 12345) + .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1) + .withAdditionalProperty(FROM_TRACE_MESSAGE_KEY, true) + ) + + private val TRACE_FAILURE_REASON_2: FailureReason = + FailureReason() + .withInternalMessage("internal message") + .withStacktrace("stack trace") + .withTimestamp(1111113) + .withMetadata( + Metadata() + .withAdditionalProperty(JOB_ID_KEY, 12345) + .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1) + .withAdditionalProperty(FROM_TRACE_MESSAGE_KEY, true) + ) + + private val EXCEPTION_FAILURE_REASON: FailureReason = + FailureReason() + .withInternalMessage("internal message") + .withStacktrace("stack trace") + .withTimestamp(1111111) + .withMetadata( + Metadata() + .withAdditionalProperty(JOB_ID_KEY, 12345) + .withAdditionalProperty(ATTEMPT_NUMBER_KEY, 1) + ) + + private val TRACE_MESSAGE: AirbyteTraceMessage = + AirbyteMessageUtils.createErrorTraceMessage( + "trace message error", + 123.0, + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.kt new file mode 100644 index 0000000000000..96e1983ebbb2b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/test/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactoryTest.kt @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.MdcScope +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.test_utils.AirbyteMessageUtils +import java.io.BufferedReader +import java.io.ByteArrayInputStream +import java.io.InputStream +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.stream.Collectors +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.slf4j.Logger + +internal class DefaultAirbyteStreamFactoryTest { + private lateinit var protocolPredicate: AirbyteProtocolPredicate + private lateinit var logger: Logger + + @BeforeEach + fun setup() { + protocolPredicate = Mockito.mock(AirbyteProtocolPredicate::class.java) + Mockito.`when`(protocolPredicate.test(ArgumentMatchers.any())).thenReturn(true) + logger = Mockito.mock(Logger::class.java) + } + + @Test + fun testValid() { + val record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green") + + val messageStream = stringToMessageStream(Jsons.serialize(record1)) + val expectedStream = Stream.of(record1) + + Assertions.assertEquals( + expectedStream.collect(Collectors.toList()), + messageStream.collect(Collectors.toList()) + ) + Mockito.verifyNoInteractions(logger) + } + + @Test + fun testLoggingLine() { + val invalidRecord = "invalid line" + + val messageStream = stringToMessageStream(invalidRecord) + + Assertions.assertEquals(emptyList(), messageStream.collect(Collectors.toList())) + Mockito.verify(logger).info(ArgumentMatchers.anyString()) + Mockito.verifyNoMoreInteractions(logger) + } + + @Test + fun testLoggingLevel() { + val logMessage = + AirbyteMessageUtils.createLogMessage(AirbyteLogMessage.Level.WARN, "warning") + + val messageStream = stringToMessageStream(Jsons.serialize(logMessage)) + + Assertions.assertEquals(emptyList(), messageStream.collect(Collectors.toList())) + Mockito.verify(logger).warn("warning") + Mockito.verifyNoMoreInteractions(logger) + } + + @Test + fun testFailValidation() { + val invalidRecord = "{ \"fish\": \"tuna\"}" + + Mockito.`when`(protocolPredicate!!.test(Jsons.deserialize(invalidRecord))).thenReturn(false) + + val messageStream = stringToMessageStream(invalidRecord) + + Assertions.assertEquals(emptyList(), messageStream.collect(Collectors.toList())) + Mockito.verify(logger).error(ArgumentMatchers.anyString(), ArgumentMatchers.anyString()) + Mockito.verifyNoMoreInteractions(logger) + } + + @Test + fun testFailDeserialization() { + val invalidRecord = "{ \"type\": \"abc\"}" + + Mockito.`when`(protocolPredicate!!.test(Jsons.deserialize(invalidRecord))).thenReturn(true) + + val messageStream = stringToMessageStream(invalidRecord) + + Assertions.assertEquals(emptyList(), messageStream.collect(Collectors.toList())) + Mockito.verify(logger).error(ArgumentMatchers.anyString(), ArgumentMatchers.anyString()) + Mockito.verifyNoMoreInteractions(logger) + } + + @Test + fun testFailsSize() { + val record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green") + + val inputStream: InputStream = + ByteArrayInputStream(record1.toString().toByteArray(StandardCharsets.UTF_8)) + val bufferedReader = BufferedReader(InputStreamReader(inputStream, StandardCharsets.UTF_8)) + + val messageStream = + DefaultAirbyteStreamFactory( + protocolPredicate, + logger, + MdcScope.Builder(), + Optional.of(RuntimeException::class.java), + 1L + ) + .create(bufferedReader) + + Assertions.assertThrows(RuntimeException::class.java) { messageStream.toList() } + } + + @Test + @Disabled + fun testMissingNewLineBetweenValidRecords() { + val record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green") + val record2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "yellow") + + val inputString = Jsons.serialize(record1) + Jsons.serialize(record2) + + val messageStream = stringToMessageStream(inputString) + + Assertions.assertEquals(emptyList(), messageStream.collect(Collectors.toList())) + Mockito.verify(logger).error(ArgumentMatchers.anyString(), ArgumentMatchers.anyString()) + Mockito.verifyNoMoreInteractions(logger) + } + + private fun stringToMessageStream(inputString: String): Stream { + val inputStream: InputStream = + ByteArrayInputStream(inputString.toByteArray(StandardCharsets.UTF_8)) + val bufferedReader = BufferedReader(InputStreamReader(inputStream, StandardCharsets.UTF_8)) + return DefaultAirbyteStreamFactory( + protocolPredicate, + logger, + MdcScope.Builder(), + Optional.empty() + ) + .create(bufferedReader) + } + + companion object { + private const val STREAM_NAME = "user_preferences" + private const val FIELD_NAME = "favorite_color" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarness.java deleted file mode 100644 index 24f6d3fdbbd25..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarness.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import io.airbyte.workers.exception.TestHarnessException; -import java.nio.file.Path; - -public interface TestHarness { - - /** - * Blocking call to run the worker's workflow. Once this is complete, getStatus should return either - * COMPLETE, FAILED, or CANCELLED. - */ - OutputType run(InputType inputType, Path jobRoot) throws TestHarnessException; - - /** - * Cancels in-progress workers. Although all workers support cancel, in reality only the - * asynchronous {@link DefaultReplicationWorker}'s cancel is used. - */ - void cancel(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarnessUtils.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarnessUtils.java deleted file mode 100644 index 524624685034f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/TestHarnessUtils.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.ConnectorJobOutput.OutputType; -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.StandardSyncInput; -import io.airbyte.configoss.WorkerDestinationConfig; -import io.airbyte.configoss.WorkerSourceConfig; -import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; -import io.airbyte.protocol.models.AirbyteControlMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.Config; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.helper.FailureHelper.ConnectorCommand; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// TODO:(Issue-4824): Figure out how to log Docker process information. -public class TestHarnessUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(TestHarnessUtils.class); - - public static void gentleClose(final Process process, final long timeout, final TimeUnit timeUnit) { - - if (process == null) { - return; - } - - if (process.info() != null) { - process.info().commandLine().ifPresent(commandLine -> LOGGER.debug("Gently closing process {}", commandLine)); - } - - try { - if (process.isAlive()) { - process.waitFor(timeout, timeUnit); - } - } catch (final InterruptedException e) { - LOGGER.error("Exception while while waiting for process to finish", e); - } - - if (process.isAlive()) { - closeProcess(process, Duration.of(1, ChronoUnit.MINUTES)); - } - } - - public static void closeProcess(final Process process, final Duration lastChanceDuration) { - if (process == null) { - return; - } - try { - process.destroy(); - process.waitFor(lastChanceDuration.toMillis(), TimeUnit.MILLISECONDS); - if (process.isAlive()) { - LOGGER.warn("Process is still alive after calling destroy. Attempting to destroy forcibly..."); - process.destroyForcibly(); - } - } catch (final InterruptedException e) { - LOGGER.error("Exception when closing process.", e); - } - } - - public static void wait(final Process process) { - try { - process.waitFor(); - } catch (final InterruptedException e) { - LOGGER.error("Exception while while waiting for process to finish", e); - } - } - - public static void cancelProcess(final Process process) { - closeProcess(process, Duration.of(10, ChronoUnit.SECONDS)); - } - - /** - * Translates a StandardSyncInput into a WorkerSourceConfig. WorkerSourceConfig is a subset of - * StandardSyncInput. - */ - public static WorkerSourceConfig syncToWorkerSourceConfig(final StandardSyncInput sync) { - return new WorkerSourceConfig() - .withSourceId(sync.getSourceId()) - .withSourceConnectionConfiguration(sync.getSourceConfiguration()) - .withCatalog(sync.getCatalog()) - .withState(sync.getState()); - } - - /** - * Translates a StandardSyncInput into a WorkerDestinationConfig. WorkerDestinationConfig is a - * subset of StandardSyncInput. - */ - public static WorkerDestinationConfig syncToWorkerDestinationConfig(final StandardSyncInput sync) { - return new WorkerDestinationConfig() - .withDestinationId(sync.getDestinationId()) - .withDestinationConnectionConfiguration(sync.getDestinationConfiguration()) - .withCatalog(sync.getCatalog()) - .withState(sync.getState()); - } - - private static ConnectorCommand getConnectorCommandFromOutputType(final OutputType outputType) { - return switch (outputType) { - case SPEC -> ConnectorCommand.SPEC; - case CHECK_CONNECTION -> ConnectorCommand.CHECK; - case DISCOVER_CATALOG_ID -> ConnectorCommand.DISCOVER; - }; - } - - public static Optional getMostRecentConfigControlMessage(final Map> messagesByType) { - return messagesByType.getOrDefault(Type.CONTROL, new ArrayList<>()).stream() - .map(AirbyteMessage::getControl) - .filter(control -> control.getType() == AirbyteControlMessage.Type.CONNECTOR_CONFIG) - .map(AirbyteControlMessage::getConnectorConfig) - .reduce((first, second) -> second); - } - - private static Optional getTraceMessageFromMessagesByType(final Map> messagesByType) { - return messagesByType.getOrDefault(Type.TRACE, new ArrayList<>()).stream() - .map(AirbyteMessage::getTrace) - .filter(trace -> trace.getType() == AirbyteTraceMessage.Type.ERROR) - .findFirst(); - } - - public static Boolean getDidControlMessageChangeConfig(final JsonNode initialConfigJson, final AirbyteControlConnectorConfigMessage configMessage) { - final Config newConfig = configMessage.getConfig(); - final JsonNode newConfigJson = Jsons.jsonNode(newConfig); - return !initialConfigJson.equals(newConfigJson); - } - - public static Map> getMessagesByType(final Process process, final AirbyteStreamFactory streamFactory, final int timeOut) - throws IOException { - final Map> messagesByType; - try (final InputStream stdout = process.getInputStream()) { - messagesByType = streamFactory.create(IOs.newBufferedReader(stdout)) - .collect(Collectors.groupingBy(AirbyteMessage::getType)); - - TestHarnessUtils.gentleClose(process, timeOut, TimeUnit.MINUTES); - return messagesByType; - } - } - - public static Optional getJobFailureReasonFromMessages(final OutputType outputType, - final Map> messagesByType) { - final Optional traceMessage = getTraceMessageFromMessagesByType(messagesByType); - if (traceMessage.isPresent()) { - final ConnectorCommand connectorCommand = getConnectorCommandFromOutputType(outputType); - return Optional.of(FailureHelper.connectorCommandFailure(traceMessage.get(), null, null, connectorCommand)); - } else { - return Optional.empty(); - } - - } - - public static Map mapStreamNamesToSchemas(final StandardSyncInput syncInput) { - return syncInput.getCatalog().getStreams().stream().collect( - Collectors.toMap( - k -> AirbyteStreamNameNamespacePair.fromAirbyteStream(k.getStream()), - v -> v.getStream().getJsonSchema())); - - } - - public static String getStdErrFromErrorStream(final InputStream errorStream) throws IOException { - final BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8)); - final StringBuilder errorOutput = new StringBuilder(); - String line; - while ((line = reader.readLine()) != null) { - errorOutput.append(line); - errorOutput.append(System.lineSeparator()); - } - return errorOutput.toString(); - } - - public static void throwWorkerException(final String errorMessage, final Process process) - throws TestHarnessException, IOException { - final String stderr = getStdErrFromErrorStream(process.getErrorStream()); - if (stderr.isEmpty()) { - throw new TestHarnessException(errorMessage); - } else { - throw new TestHarnessException(errorMessage + ": \n" + stderr); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/WorkerConstants.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/WorkerConstants.java deleted file mode 100644 index df65779fbfcbb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/WorkerConstants.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers; - -public class WorkerConstants { - - public static final String SOURCE_CONFIG_JSON_FILENAME = "source_config.json"; - public static final String DESTINATION_CONFIG_JSON_FILENAME = "destination_config.json"; - - public static final String SOURCE_CATALOG_JSON_FILENAME = "source_catalog.json"; - public static final String DESTINATION_CATALOG_JSON_FILENAME = "destination_catalog.json"; - public static final String INPUT_STATE_JSON_FILENAME = "input_state.json"; - - public static final String RESET_JOB_SOURCE_DOCKER_IMAGE_STUB = "airbyte_empty"; - - public static final String WORKER_ENVIRONMENT = "WORKER_ENVIRONMENT"; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/exception/TestHarnessException.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/exception/TestHarnessException.java deleted file mode 100644 index d86e21880a3a1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/exception/TestHarnessException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.exception; - -public class TestHarnessException extends Exception { - - public TestHarnessException(final String message) { - super(message); - } - - public TestHarnessException(final String message, final Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/CheckConnectionTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/CheckConnectionTestHarness.java deleted file mode 100644 index 66133566620d5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/CheckConnectionTestHarness.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.StandardCheckConnectionInput; -import io.airbyte.workers.TestHarness; - -public interface CheckConnectionTestHarness extends TestHarness {} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DbtTransformationRunner.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DbtTransformationRunner.java deleted file mode 100644 index 29d72014dd435..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DbtTransformationRunner.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.workers.process.Metadata.CUSTOM_STEP; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.configoss.OperatorDbt; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.normalization.NormalizationRunner; -import io.airbyte.workers.process.ProcessFactory; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import org.apache.tools.ant.types.Commandline; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DbtTransformationRunner implements AutoCloseable { - - private static final Logger LOGGER = LoggerFactory.getLogger(DbtTransformationRunner.class); - private static final String DBT_ENTRYPOINT_SH = "entrypoint.sh"; - private static final Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix("dbt") - .setPrefixColor(Color.PURPLE_BACKGROUND); - - private final ProcessFactory processFactory; - private final NormalizationRunner normalizationRunner; - private Process process = null; - - public DbtTransformationRunner(final ProcessFactory processFactory, - final NormalizationRunner normalizationRunner) { - this.processFactory = processFactory; - this.normalizationRunner = normalizationRunner; - } - - public void start() throws Exception { - normalizationRunner.start(); - } - - /** - * The docker image used by the DbtTransformationRunner is provided by the User, so we can't ensure - * to have the right python, dbt, dependencies etc software installed to successfully run our - * transform-config scripts (to translate Airbyte Catalogs into Dbt profiles file). Thus, we depend - * on the NormalizationRunner to configure the dbt project with the appropriate destination settings - * and pull the custom git repository into the workspace. - *

    - * Once the workspace folder/files is setup to run, we invoke the custom transformation command as - * provided by the user to execute whatever extra transformation has been implemented. - */ - public boolean run(final String jobId, - final int attempt, - final Path jobRoot, - final JsonNode config, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - if (!normalizationRunner.configureDbt(jobId, attempt, jobRoot, config, resourceRequirements, dbtConfig)) { - return false; - } - return transform(jobId, attempt, jobRoot, config, resourceRequirements, dbtConfig); - } - - public boolean transform(final String jobId, - final int attempt, - final Path jobRoot, - final JsonNode config, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - try { - final Map files = ImmutableMap.of( - DBT_ENTRYPOINT_SH, MoreResources.readResource("dbt_transformation_entrypoint.sh"), - "sshtunneling.sh", MoreResources.readResource("sshtunneling.sh")); - final List dbtArguments = new ArrayList<>(); - dbtArguments.add(DBT_ENTRYPOINT_SH); - if (Strings.isNullOrEmpty(dbtConfig.getDbtArguments())) { - throw new TestHarnessException("Dbt Arguments are required"); - } - Collections.addAll(dbtArguments, Commandline.translateCommandline(dbtConfig.getDbtArguments())); - process = - processFactory.create( - CUSTOM_STEP, - jobId, - attempt, - jobRoot, - dbtConfig.getDockerImage(), - false, - false, - files, - "/bin/bash", - resourceRequirements, - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, CUSTOM_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), - dbtArguments.toArray(new String[0])); - LineGobbler.gobble(process.getInputStream(), LOGGER::info, CONTAINER_LOG_MDC_BUILDER); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - - TestHarnessUtils.wait(process); - - return process.exitValue() == 0; - } catch (final Exception e) { - // make sure we kill the process on failure to avoid zombies. - if (process != null) { - TestHarnessUtils.cancelProcess(process); - } - throw e; - } - } - - @Override - public void close() throws Exception { - normalizationRunner.close(); - - if (process == null) { - return; - } - - LOGGER.debug("Closing dbt transformation process"); - TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); - if (process.isAlive() || process.exitValue() != 0) { - throw new TestHarnessException("Dbt transformation process wasn't successful"); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java deleted file mode 100644 index 1653659f1f63f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.ConnectorJobOutput.OutputType; -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.StandardCheckConnectionInput; -import io.airbyte.configoss.StandardCheckConnectionOutput; -import io.airbyte.configoss.StandardCheckConnectionOutput.Status; -import io.airbyte.protocol.models.AirbyteConnectionStatus; -import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; -import io.airbyte.workers.process.IntegrationLauncher; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultCheckConnectionTestHarness implements CheckConnectionTestHarness { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCheckConnectionTestHarness.class); - - private final IntegrationLauncher integrationLauncher; - private final ConnectorConfigUpdater connectorConfigUpdater; - private final AirbyteStreamFactory streamFactory; - - private Process process; - - public DefaultCheckConnectionTestHarness(final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater, - final AirbyteStreamFactory streamFactory) { - this.integrationLauncher = integrationLauncher; - this.connectorConfigUpdater = connectorConfigUpdater; - this.streamFactory = streamFactory; - } - - public DefaultCheckConnectionTestHarness(final IntegrationLauncher integrationLauncher, final ConnectorConfigUpdater connectorConfigUpdater) { - this(integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); - } - - @Override - public ConnectorJobOutput run(final StandardCheckConnectionInput input, final Path jobRoot) throws TestHarnessException { - LineGobbler.startSection("CHECK"); - - try { - final JsonNode inputConfig = input.getConnectionConfiguration(); - process = integrationLauncher.check( - jobRoot, - WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, - Jsons.serialize(inputConfig)); - - final ConnectorJobOutput jobOutput = new ConnectorJobOutput() - .withOutputType(OutputType.CHECK_CONNECTION); - - LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - - final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); - final Optional connectionStatus = messagesByType - .getOrDefault(Type.CONNECTION_STATUS, new ArrayList<>()).stream() - .map(AirbyteMessage::getConnectionStatus) - .findFirst(); - - if (input.getActorId() != null && input.getActorType() != null) { - final Optional optionalConfigMsg = TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType); - if (optionalConfigMsg.isPresent() && TestHarnessUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { - switch (input.getActorType()) { - case SOURCE -> connectorConfigUpdater.updateSource( - input.getActorId(), - optionalConfigMsg.get().getConfig()); - case DESTINATION -> connectorConfigUpdater.updateDestination( - input.getActorId(), - optionalConfigMsg.get().getConfig()); - } - jobOutput.setConnectorConfigurationUpdated(true); - } - } - - final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.CHECK_CONNECTION, messagesByType); - failureReason.ifPresent(jobOutput::setFailureReason); - - final int exitCode = process.exitValue(); - if (exitCode != 0) { - LOGGER.warn("Check connection job subprocess finished with exit code {}", exitCode); - } - - if (connectionStatus.isPresent()) { - final StandardCheckConnectionOutput output = new StandardCheckConnectionOutput() - .withStatus(Enums.convertTo(connectionStatus.get().getStatus(), Status.class)) - .withMessage(connectionStatus.get().getMessage()); - LOGGER.info("Check connection job received output: {}", output); - jobOutput.setCheckConnection(output); - } else if (failureReason.isEmpty()) { - TestHarnessUtils.throwWorkerException("Error checking connection status: no status nor failure reason were outputted", process); - } - LineGobbler.endSection("CHECK"); - return jobOutput; - - } catch (final Exception e) { - LOGGER.error("Unexpected error while checking connection: ", e); - LineGobbler.endSection("CHECK"); - throw new TestHarnessException("Unexpected error while getting checking connection.", e); - } - } - - @Override - public void cancel() { - TestHarnessUtils.cancelProcess(process); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java deleted file mode 100644 index be8d006136649..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.model.generated.DiscoverCatalogResult; -import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.ConnectorJobOutput.OutputType; -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.StandardDiscoverCatalogInput; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.helper.CatalogClientConverters; -import io.airbyte.workers.helper.ConnectorConfigUpdater; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; -import io.airbyte.workers.process.IntegrationLauncher; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultDiscoverCatalogTestHarness implements DiscoverCatalogTestHarness { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiscoverCatalogTestHarness.class); - private static final String WRITE_DISCOVER_CATALOG_LOGS_TAG = "call to write discover schema result"; - - private final IntegrationLauncher integrationLauncher; - private final AirbyteStreamFactory streamFactory; - private final ConnectorConfigUpdater connectorConfigUpdater; - private final AirbyteApiClient airbyteApiClient; - private volatile Process process; - - public DefaultDiscoverCatalogTestHarness(final AirbyteApiClient airbyteApiClient, - final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater, - final AirbyteStreamFactory streamFactory) { - this.airbyteApiClient = airbyteApiClient; - this.integrationLauncher = integrationLauncher; - this.streamFactory = streamFactory; - this.connectorConfigUpdater = connectorConfigUpdater; - } - - public DefaultDiscoverCatalogTestHarness(final AirbyteApiClient airbyteApiClient, - final IntegrationLauncher integrationLauncher, - final ConnectorConfigUpdater connectorConfigUpdater) { - this(airbyteApiClient, integrationLauncher, connectorConfigUpdater, new DefaultAirbyteStreamFactory()); - } - - @Override - public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) throws TestHarnessException { - try { - final JsonNode inputConfig = discoverSchemaInput.getConnectionConfiguration(); - process = integrationLauncher.discover( - jobRoot, - WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, - Jsons.serialize(inputConfig)); - - final ConnectorJobOutput jobOutput = new ConnectorJobOutput() - .withOutputType(OutputType.DISCOVER_CATALOG_ID); - - LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - - final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); - - final Optional catalog = messagesByType - .getOrDefault(Type.CATALOG, new ArrayList<>()).stream() - .map(AirbyteMessage::getCatalog) - .findFirst(); - - final Optional optionalConfigMsg = TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType); - if (optionalConfigMsg.isPresent() && TestHarnessUtils.getDidControlMessageChangeConfig(inputConfig, optionalConfigMsg.get())) { - connectorConfigUpdater.updateSource( - UUID.fromString(discoverSchemaInput.getSourceId()), - optionalConfigMsg.get().getConfig()); - jobOutput.setConnectorConfigurationUpdated(true); - } - - final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.DISCOVER_CATALOG_ID, messagesByType); - failureReason.ifPresent(jobOutput::setFailureReason); - - final int exitCode = process.exitValue(); - if (exitCode != 0) { - LOGGER.warn("Discover job subprocess finished with exit codee {}", exitCode); - } - - if (catalog.isPresent()) { - final DiscoverCatalogResult result = - AirbyteApiClient.retryWithJitter(() -> airbyteApiClient.getSourceApi() - .writeDiscoverCatalogResult(buildSourceDiscoverSchemaWriteRequestBody(discoverSchemaInput, catalog.get())), - WRITE_DISCOVER_CATALOG_LOGS_TAG); - jobOutput.setDiscoverCatalogId(result.getCatalogId()); - } else if (failureReason.isEmpty()) { - TestHarnessUtils.throwWorkerException("Integration failed to output a catalog struct and did not output a failure reason", process); - } - return jobOutput; - } catch (final TestHarnessException e) { - throw e; - } catch (final Exception e) { - throw new TestHarnessException("Error while discovering schema", e); - } - } - - private SourceDiscoverSchemaWriteRequestBody buildSourceDiscoverSchemaWriteRequestBody(final StandardDiscoverCatalogInput discoverSchemaInput, - final AirbyteCatalog catalog) { - return new SourceDiscoverSchemaWriteRequestBody().catalog( - CatalogClientConverters.toAirbyteCatalogClientApi(catalog)).sourceId( - // NOTE: sourceId is marked required in the OpenAPI config but the code generator doesn't enforce - // it, so we check again here. - discoverSchemaInput.getSourceId() == null ? null : UUID.fromString(discoverSchemaInput.getSourceId())) - .connectorVersion( - discoverSchemaInput.getConnectorVersion()) - .configurationHash( - discoverSchemaInput.getConfigHash()); - } - - @Override - public void cancel() { - TestHarnessUtils.cancelProcess(process); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java deleted file mode 100644 index c6413132b86ca..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DefaultGetSpecTestHarness.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.ConnectorJobOutput.OutputType; -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.JobGetSpecConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import io.airbyte.workers.internal.DefaultAirbyteStreamFactory; -import io.airbyte.workers.process.IntegrationLauncher; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultGetSpecTestHarness implements GetSpecTestHarness { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultGetSpecTestHarness.class); - - private final IntegrationLauncher integrationLauncher; - private final AirbyteStreamFactory streamFactory; - private Process process; - - public DefaultGetSpecTestHarness(final IntegrationLauncher integrationLauncher, - final AirbyteStreamFactory streamFactory) { - this.integrationLauncher = integrationLauncher; - this.streamFactory = streamFactory; - } - - public DefaultGetSpecTestHarness(final IntegrationLauncher integrationLauncher) { - this(integrationLauncher, new DefaultAirbyteStreamFactory()); - } - - @Override - public ConnectorJobOutput run(final JobGetSpecConfig config, final Path jobRoot) throws TestHarnessException { - try { - process = integrationLauncher.spec(jobRoot); - - final ConnectorJobOutput jobOutput = new ConnectorJobOutput().withOutputType(OutputType.SPEC); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - - final Map> messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30); - - final Optional spec = messagesByType - .getOrDefault(Type.SPEC, new ArrayList<>()).stream() - .map(AirbyteMessage::getSpec) - .findFirst(); - - final Optional failureReason = TestHarnessUtils.getJobFailureReasonFromMessages(OutputType.SPEC, messagesByType); - failureReason.ifPresent(jobOutput::setFailureReason); - - final int exitCode = process.exitValue(); - if (exitCode != 0) { - LOGGER.warn("Spec job subprocess finished with exit code {}", exitCode); - } - - if (spec.isPresent()) { - jobOutput.setSpec(spec.get()); - } else if (failureReason.isEmpty()) { - TestHarnessUtils.throwWorkerException("Integration failed to output a spec struct and did not output a failure reason", process); - } - - return jobOutput; - } catch (final Exception e) { - throw new TestHarnessException(String.format("Error while getting spec from image %s", config.getDockerImage()), e); - } - - } - - @Override - public void cancel() { - TestHarnessUtils.cancelProcess(process); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java deleted file mode 100644 index 25d887a832c82..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/DiscoverCatalogTestHarness.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.StandardDiscoverCatalogInput; -import io.airbyte.workers.TestHarness; - -public interface DiscoverCatalogTestHarness extends TestHarness {} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/GetSpecTestHarness.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/GetSpecTestHarness.java deleted file mode 100644 index 9e63c6d2686ef..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/general/GetSpecTestHarness.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import io.airbyte.configoss.ConnectorJobOutput; -import io.airbyte.configoss.JobGetSpecConfig; -import io.airbyte.workers.TestHarness; - -public interface GetSpecTestHarness extends TestHarness {} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/CatalogClientConverters.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/CatalogClientConverters.java deleted file mode 100644 index 99fad0c29e184..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/CatalogClientConverters.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration; -import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.text.Names; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.validation.json.JsonValidationException; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Utilities to convert Catalog protocol to Catalog API client. This class was similar to existing - * logic in CatalogConverter.java; But code can't be shared because the protocol model is - * essentially converted to two different api models. Thus, if we need to change logic on either - * place we have to take care of the other one too. - */ -public class CatalogClientConverters { - - /** - * - * @param catalog - * @return - */ - public static io.airbyte.protocol.models.AirbyteCatalog toAirbyteProtocol(final io.airbyte.api.client.model.generated.AirbyteCatalog catalog) { - - io.airbyte.protocol.models.AirbyteCatalog protoCatalog = - new io.airbyte.protocol.models.AirbyteCatalog(); - var airbyteStream = catalog.getStreams().stream().map(stream -> { - try { - return toConfiguredProtocol(stream.getStream(), stream.getConfig()); - } catch (JsonValidationException e) { - return null; - } - }).collect(Collectors.toList()); - - protoCatalog.withStreams(airbyteStream); - return protoCatalog; - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private static AirbyteStream toConfiguredProtocol(final io.airbyte.api.client.model.generated.AirbyteStream stream, - AirbyteStreamConfiguration config) - throws JsonValidationException { - if (config.getFieldSelectionEnabled() != null && config.getFieldSelectionEnabled()) { - // Validate the selected field paths. - if (config.getSelectedFields() == null) { - throw new JsonValidationException("Requested field selection but no selected fields provided"); - } - final JsonNode properties = stream.getJsonSchema().findValue("properties"); - if (properties == null || !properties.isObject()) { - throw new JsonValidationException("Requested field selection but no properties node found"); - } - for (final var selectedFieldInfo : config.getSelectedFields()) { - if (selectedFieldInfo.getFieldPath() == null || selectedFieldInfo.getFieldPath().isEmpty()) { - throw new JsonValidationException("Selected field path cannot be empty"); - } - if (selectedFieldInfo.getFieldPath().size() > 1) { - // TODO(mfsiega-airbyte): support nested fields. - throw new UnsupportedOperationException("Nested field selection not supported"); - } - } - // Only include the selected fields. - // NOTE: we verified above that each selected field has at least one element in the field path. - final Set selectedFieldNames = - config.getSelectedFields().stream().map((field) -> field.getFieldPath().get(0)).collect(Collectors.toSet()); - // TODO(mfsiega-airbyte): we only check the top level of the cursor/primary key fields because we - // don't support filtering nested fields yet. - if (config.getSyncMode().equals(SyncMode.INCREMENTAL) // INCREMENTAL sync mode, AND - && !config.getCursorField().isEmpty() // There is a cursor configured, AND - && !selectedFieldNames.contains(config.getCursorField().get(0))) { // The cursor isn't in the selected fields. - throw new JsonValidationException("Cursor field cannot be de-selected in INCREMENTAL syncs"); - } - if (config.getDestinationSyncMode().equals(DestinationSyncMode.APPEND_DEDUP)) { - for (final List primaryKeyComponent : config.getPrimaryKey()) { - if (!selectedFieldNames.contains(primaryKeyComponent.get(0))) { - throw new JsonValidationException("Primary key field cannot be de-selected in DEDUP mode"); - } - } - } - for (final String selectedFieldName : selectedFieldNames) { - if (!properties.has(selectedFieldName)) { - throw new JsonValidationException(String.format("Requested selected field %s not found in JSON schema", selectedFieldName)); - } - } - ((ObjectNode) properties).retain(selectedFieldNames); - } - return new AirbyteStream() - .withName(stream.getName()) - .withJsonSchema(stream.getJsonSchema()) - .withSupportedSyncModes(Enums.convertListTo(stream.getSupportedSyncModes(), io.airbyte.protocol.models.SyncMode.class)) - .withSourceDefinedCursor(stream.getSourceDefinedCursor()) - .withDefaultCursorField(stream.getDefaultCursorField()) - .withSourceDefinedPrimaryKey( - Optional.ofNullable(stream.getSourceDefinedPrimaryKey()).orElse(Collections.emptyList())) - .withNamespace(stream.getNamespace()); - } - - /** - * Converts a protocol AirbyteCatalog to an OpenAPI client versioned AirbyteCatalog. - */ - public static io.airbyte.api.client.model.generated.AirbyteCatalog toAirbyteCatalogClientApi( - final io.airbyte.protocol.models.AirbyteCatalog catalog) { - return new io.airbyte.api.client.model.generated.AirbyteCatalog() - .streams(catalog.getStreams() - .stream() - .map(stream -> toAirbyteStreamClientApi(stream)) - .map(s -> new io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration() - .stream(s) - .config(generateDefaultConfiguration(s))) - .collect(Collectors.toList())); - } - - private static AirbyteStreamConfiguration generateDefaultConfiguration( - final io.airbyte.api.client.model.generated.AirbyteStream stream) { - final AirbyteStreamConfiguration result = - new AirbyteStreamConfiguration() - .aliasName(Names.toAlphanumericAndUnderscore(stream.getName())) - .cursorField(stream.getDefaultCursorField()) - .destinationSyncMode(DestinationSyncMode.APPEND) - .primaryKey(stream.getSourceDefinedPrimaryKey()) - .selected(true); - if (stream.getSupportedSyncModes().size() > 0) { - result.setSyncMode(Enums.convertTo(stream.getSupportedSyncModes().get(0), - SyncMode.class)); - } else { - result.setSyncMode(SyncMode.INCREMENTAL); - } - return result; - } - - private static io.airbyte.api.client.model.generated.AirbyteStream toAirbyteStreamClientApi( - final AirbyteStream stream) { - return new io.airbyte.api.client.model.generated.AirbyteStream() - .name(stream.getName()) - .jsonSchema(stream.getJsonSchema()) - .supportedSyncModes(Enums.convertListTo(stream.getSupportedSyncModes(), - SyncMode.class)) - .sourceDefinedCursor(stream.getSourceDefinedCursor()) - .defaultCursorField(stream.getDefaultCursorField()) - .sourceDefinedPrimaryKey(stream.getSourceDefinedPrimaryKey()) - .namespace(stream.getNamespace()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java deleted file mode 100644 index 440e659dcb3fd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/ConnectorConfigUpdater.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import com.google.common.hash.Hashing; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.DestinationApi; -import io.airbyte.api.client.generated.SourceApi; -import io.airbyte.api.client.model.generated.DestinationIdRequestBody; -import io.airbyte.api.client.model.generated.DestinationRead; -import io.airbyte.api.client.model.generated.DestinationUpdate; -import io.airbyte.api.client.model.generated.SourceIdRequestBody; -import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.SourceUpdate; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Config; -import java.nio.charset.StandardCharsets; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Helper class for workers to persist updates to Source/Destination configs emitted from - * AirbyteControlMessages. - * - * This is in order to support connectors updating configs when running commands, which is specially - * useful for migrating configuration to a new version or for enabling connectors that require - * single-use or short-lived OAuth tokens. - */ -public class ConnectorConfigUpdater { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorConfigUpdater.class); - - private final SourceApi sourceApi; - private final DestinationApi destinationApi; - - public ConnectorConfigUpdater(final SourceApi sourceApi, final DestinationApi destinationApi) { - this.sourceApi = sourceApi; - this.destinationApi = destinationApi; - } - - /** - * Updates the Source from a sync job ID with the provided Configuration. Secrets and OAuth - * parameters will be masked when saving. - */ - public void updateSource(final UUID sourceId, final Config config) { - final SourceRead source = AirbyteApiClient.retryWithJitter( - () -> sourceApi.getSource(new SourceIdRequestBody().sourceId(sourceId)), - "get source"); - - final SourceRead updatedSource = AirbyteApiClient.retryWithJitter( - () -> sourceApi - .updateSource(new SourceUpdate() - .sourceId(sourceId) - .name(source.getName()) - .connectionConfiguration(Jsons.jsonNode(config.getAdditionalProperties()))), - "update source"); - - LOGGER.info("Persisted updated configuration for source {}. New config hash: {}.", sourceId, - Hashing.sha256().hashString(updatedSource.getConnectionConfiguration().asText(), StandardCharsets.UTF_8)); - - } - - /** - * Updates the Destination from a sync job ID with the provided Configuration. Secrets and OAuth - * parameters will be masked when saving. - */ - public void updateDestination(final UUID destinationId, final Config config) { - final DestinationRead destination = AirbyteApiClient.retryWithJitter( - () -> destinationApi.getDestination(new DestinationIdRequestBody().destinationId(destinationId)), - "get destination"); - - final DestinationRead updatedDestination = AirbyteApiClient.retryWithJitter( - () -> destinationApi - .updateDestination(new DestinationUpdate() - .destinationId(destinationId) - .name(destination.getName()) - .connectionConfiguration(Jsons.jsonNode(config.getAdditionalProperties()))), - "update destination"); - - LOGGER.info("Persisted updated configuration for destination {}. New config hash: {}.", destinationId, - Hashing.sha256().hashString(updatedDestination.getConnectionConfiguration().asText(), StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/EntrypointEnvChecker.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/EntrypointEnvChecker.java deleted file mode 100644 index 30c860365f1cb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/EntrypointEnvChecker.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.process.ProcessFactory; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Collections; - -/** - * Should only be used by connector testing. - */ -public class EntrypointEnvChecker { - - /** - * @param processFactory any process factory - * @param jobId used as input to processFactory.create - * @param jobAttempt used as input to processFactory.create - * @param jobRoot used as input to processFactory.create - * @param imageName used as input to processFactory.create - * @return the entrypoint in the env variable AIRBYTE_ENTRYPOINT - * @throws RuntimeException if there is ambiguous output from the container - */ - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public static String getEntrypointEnvVariable(final ProcessFactory processFactory, - final String jobId, - final int jobAttempt, - final Path jobRoot, - final String imageName) - throws IOException, InterruptedException, TestHarnessException { - final Process process = processFactory.create( - "entrypoint-checker", - jobId, - jobAttempt, - jobRoot, - imageName, - false, - false, - Collections.emptyMap(), - "printenv", - null, - null, - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap()); - - final BufferedReader stdout = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); - - String outputLine = null; - - String line = stdout.readLine(); - while ((line != null) && outputLine == null) { - if (line.contains("AIRBYTE_ENTRYPOINT")) { - outputLine = line; - } - line = stdout.readLine(); - } - - process.waitFor(); - - if (outputLine != null) { - final String[] splits = outputLine.split("=", 2); - if (splits.length != 2) { - throw new RuntimeException("String could not be split into multiple segments: " + outputLine); - } else { - return splits[1].strip(); - } - } else { - return null; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/FailureHelper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/FailureHelper.java deleted file mode 100644 index a518a98d4ff5e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/helper/FailureHelper.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.helper; - -import com.fasterxml.jackson.annotation.JsonValue; -import io.airbyte.configoss.FailureReason; -import io.airbyte.configoss.FailureReason.FailureOrigin; -import io.airbyte.configoss.FailureReason.FailureType; -import io.airbyte.configoss.Metadata; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import java.util.Comparator; -import java.util.List; -import java.util.Set; -import org.apache.commons.lang3.exception.ExceptionUtils; - -public class FailureHelper { - - private static final String JOB_ID_METADATA_KEY = "jobId"; - private static final String ATTEMPT_NUMBER_METADATA_KEY = "attemptNumber"; - private static final String TRACE_MESSAGE_METADATA_KEY = "from_trace_message"; - private static final String CONNECTOR_COMMAND_METADATA_KEY = "connector_command"; - - public enum ConnectorCommand { - - SPEC("spec"), - CHECK("check"), - DISCOVER("discover"), - WRITE("write"), - READ("read"); - - private final String value; - - ConnectorCommand(final String value) { - this.value = value; - } - - @Override - @JsonValue - public String toString() { - return String.valueOf(value); - } - - } - - public static FailureReason genericFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return new FailureReason() - .withInternalMessage(t.getMessage()) - .withStacktrace(ExceptionUtils.getStackTrace(t)) - .withTimestamp(System.currentTimeMillis()) - .withMetadata(jobAndAttemptMetadata(jobId, attemptNumber)); - } - - // Generate a FailureReason from an AirbyteTraceMessage. - // The FailureReason.failureType enum value is taken from the - // AirbyteErrorTraceMessage.failureType enum value, so the same enum value - // must exist on both Enums in order to be applied correctly to the FailureReason - public static FailureReason genericFailure(final AirbyteTraceMessage m, final Long jobId, final Integer attemptNumber) { - FailureType failureType; - if (m.getError().getFailureType() == null) { - // default to system_error when no failure type is set - failureType = FailureType.SYSTEM_ERROR; - } else { - try { - final String traceMessageError = m.getError().getFailureType().toString(); - failureType = FailureType.fromValue(traceMessageError); - } catch (final IllegalArgumentException e) { - // the trace message error does not exist as a FailureReason failure type, - // so set the failure type to null - failureType = FailureType.SYSTEM_ERROR; - } - } - return new FailureReason() - .withInternalMessage(m.getError().getInternalMessage()) - .withExternalMessage(m.getError().getMessage()) - .withStacktrace(m.getError().getStackTrace()) - .withTimestamp(m.getEmittedAt().longValue()) - .withFailureType(failureType) - .withMetadata(traceMessageMetadata(jobId, attemptNumber)); - } - - public static FailureReason connectorCommandFailure(final AirbyteTraceMessage m, - final Long jobId, - final Integer attemptNumber, - final ConnectorCommand connectorCommand) { - final Metadata metadata = traceMessageMetadata(jobId, attemptNumber); - metadata.withAdditionalProperty(CONNECTOR_COMMAND_METADATA_KEY, connectorCommand.toString()); - return genericFailure(m, jobId, attemptNumber) - .withMetadata(metadata); - } - - public static FailureReason connectorCommandFailure(final Throwable t, - final Long jobId, - final Integer attemptNumber, - final ConnectorCommand connectorCommand) { - final Metadata metadata = jobAndAttemptMetadata(jobId, attemptNumber); - metadata.withAdditionalProperty(CONNECTOR_COMMAND_METADATA_KEY, connectorCommand.toString()); - return genericFailure(t, jobId, attemptNumber) - .withMetadata(metadata); - } - - public static FailureReason sourceFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.READ) - .withFailureOrigin(FailureOrigin.SOURCE) - .withExternalMessage("Something went wrong within the source connector"); - } - - public static FailureReason sourceFailure(final AirbyteTraceMessage m, final Long jobId, final Integer attemptNumber) { - return connectorCommandFailure(m, jobId, attemptNumber, ConnectorCommand.READ) - .withFailureOrigin(FailureOrigin.SOURCE); - } - - public static FailureReason destinationFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.WRITE) - .withFailureOrigin(FailureOrigin.DESTINATION) - .withExternalMessage("Something went wrong within the destination connector"); - } - - public static FailureReason destinationFailure(final AirbyteTraceMessage m, final Long jobId, final Integer attemptNumber) { - return connectorCommandFailure(m, jobId, attemptNumber, ConnectorCommand.WRITE) - .withFailureOrigin(FailureOrigin.DESTINATION); - } - - public static FailureReason checkFailure(final Throwable t, - final Long jobId, - final Integer attemptNumber, - final FailureOrigin origin) { - return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.CHECK) - .withFailureOrigin(origin) - .withFailureType(FailureType.CONFIG_ERROR) - .withRetryable(false) - .withExternalMessage(String - .format("Checking %s connection failed - please review this connection's configuration to prevent future syncs from failing", origin)); - } - - public static FailureReason unknownOriginFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.UNKNOWN) - .withExternalMessage("An unknown failure occurred"); - } - - private static Metadata jobAndAttemptMetadata(final Long jobId, final Integer attemptNumber) { - return new Metadata() - .withAdditionalProperty(JOB_ID_METADATA_KEY, jobId) - .withAdditionalProperty(ATTEMPT_NUMBER_METADATA_KEY, attemptNumber); - } - - private static Metadata traceMessageMetadata(final Long jobId, final Integer attemptNumber) { - return new Metadata() - .withAdditionalProperty(JOB_ID_METADATA_KEY, jobId) - .withAdditionalProperty(ATTEMPT_NUMBER_METADATA_KEY, attemptNumber) - .withAdditionalProperty(TRACE_MESSAGE_METADATA_KEY, true); - } - - /** - * Orders failures by putting errors from trace messages first, and then orders by timestamp, so - * that earlier failures come first. - */ - public static List orderedFailures(final Set failures) { - final Comparator compareByIsTrace = Comparator.comparing(failureReason -> { - final Object metadata = failureReason.getMetadata(); - if (metadata != null) { - return failureReason.getMetadata().getAdditionalProperties().containsKey(TRACE_MESSAGE_METADATA_KEY) ? 0 : 1; - } else { - return 1; - } - }); - final Comparator compareByTimestamp = Comparator.comparing(FailureReason::getTimestamp); - final Comparator compareByTraceAndTimestamp = compareByIsTrace.thenComparing(compareByTimestamp); - return failures.stream().sorted(compareByTraceAndTimestamp).toList(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteDestination.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteDestination.java deleted file mode 100644 index caf9d2e2268a7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteDestination.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.commons.functional.CheckedConsumer; -import io.airbyte.configoss.WorkerDestinationConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; - -/** - * This interface provides a java interface over all interactions with a Destination from the POV of - * the platform. It encapsulates the full lifecycle of the Destination as well as any inputs and - * outputs. - */ -public interface AirbyteDestination extends CheckedConsumer, AutoCloseable { - - /** - * Starts the Destination container. It instantiates a writer to write to STDIN on that container. - * It also instantiates a reader to listen on STDOUT. - * - * @param destinationConfig - contains the arguments that must be passed to the write method of the - * Destination. - * @param jobRoot - directory where the job can write data. - * @param additionalEnvironmentVariables - * @throws Exception - throws if there is any failure in startup. - */ - void start(WorkerDestinationConfig destinationConfig, Path jobRoot, Map additionalEnvironmentVariables) throws Exception; - - /** - * Accepts an AirbyteMessage and writes it to STDIN of the Destination. Blocks if STDIN's buffer is - * full. - * - * @param message message to send to destination. - * @throws Exception - throws if there is any failure in writing to Destination. - */ - @Override - void accept(AirbyteMessage message) throws Exception; - - /** - * This method is a flush to make sure all data that should be written to the Destination is - * written. Any messages that have already been accepted - * ({@link AirbyteDestination#accept(AirbyteMessage)} ()}) will be flushed. Any additional messages - * sent to accept will not be flushed. In fact, flush should fail if the caller attempts to send it - * additional messages after calling this method. - * - * (Potentially should just rename it to flush) - * - * @throws Exception - throws if there is any failure when flushing. - */ - void notifyEndOfInput() throws Exception; - - /** - * Means no more data will be emitted by the Destination. This may be because all data has already - * been emitted or because the Destination container has exited. - * - * @return true, if no more data will be emitted. otherwise, false. - */ - boolean isFinished(); - - /** - * Gets the exit value of the destination process. This should only be called after the destination - * process has finished. - * - * @return exit code of the destination process - * @throws IllegalStateException if the destination process has not exited - */ - int getExitValue(); - - /** - * Attempts to read an AirbyteMessage from the Destination. - * - * @return returns an AirbyteMessage if the Destination emits one. Otherwise, empty. This method - * BLOCKS on waiting for the Destination to emit data to STDOUT. - */ - Optional attemptRead(); - - /** - * Attempts to shut down the Destination's container. Waits for a graceful shutdown, capped by a - * timeout. - * - * @throws Exception - throws if there is any failure in shutdown. - */ - @Override - void close() throws Exception; - - /** - * Attempt to shut down the Destination's container quickly. - * - * @throws Exception - throws if there is any failure in shutdown. - */ - void cancel() throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java deleted file mode 100644 index 2a9e14a226f0b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.protocol.models.AirbyteMessage; -import java.io.IOException; - -public interface AirbyteMessageBufferedWriter { - - void write(AirbyteMessage message) throws IOException; - - void flush() throws IOException; - - void close() throws IOException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java deleted file mode 100644 index 17f1c250eaebd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import java.io.BufferedWriter; - -public interface AirbyteMessageBufferedWriterFactory { - - AirbyteMessageBufferedWriter createWriter(BufferedWriter bufferedWriter); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java deleted file mode 100644 index 1db8c67c8530f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteProtocolPredicate.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.AirbyteProtocolSchema; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.util.function.Predicate; - -/** - * Verify that the provided JsonNode is a valid AirbyteMessage. Any AirbyteMessage type is allowed - * (e.g. Record, State, Log, etc). - */ -public class AirbyteProtocolPredicate implements Predicate { - - private static final String PROTOCOL_SCHEMA_NAME = "protocol schema"; - private final JsonSchemaValidator jsonSchemaValidator; - - public AirbyteProtocolPredicate() { - jsonSchemaValidator = new JsonSchemaValidator(); - final JsonNode schema = JsonSchemaValidator.getSchema(AirbyteProtocolSchema.PROTOCOL.getFile(), "AirbyteMessage"); - jsonSchemaValidator.initializeSchemaValidator(PROTOCOL_SCHEMA_NAME, schema); - } - - @Override - public boolean test(final JsonNode s) { - return jsonSchemaValidator.testInitializedSchema(PROTOCOL_SCHEMA_NAME, s); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteSource.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteSource.java deleted file mode 100644 index 536084390fadc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteSource.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.configoss.WorkerSourceConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import java.nio.file.Path; -import java.util.Optional; - -/** - * This interface provides a java interface over all interactions with a Source from the POV of the - * platform. It encapsulates the full lifecycle of the Source as well as any outputs. - */ -public interface AirbyteSource extends AutoCloseable { - - /** - * Starts the Source container and opens a connection to STDOUT on that container. - * - * @param sourceConfig - contains the arguments that must be passed to the read method of the - * Source. - * @param jobRoot - directory where the job can write data. - * @throws Exception - throws if there is any failure in startup. - */ - void start(WorkerSourceConfig sourceConfig, Path jobRoot) throws Exception; - - /** - * Means no more data will be emitted by the Source. This may be because all data has already been - * emitted or because the Source container has exited. - * - * @return true, if no more data will be emitted. otherwise, false. - */ - boolean isFinished(); - - /** - * Gets the exit value of the source process. This should only be called after the source process - * has finished. - * - * @return exit code of the source process - * @throws IllegalStateException if the source process has not exited - */ - int getExitValue(); - - /** - * Attempts to read an AirbyteMessage from the Source. - * - * @return returns an AirbyteMessage is the Source emits one. Otherwise, empty. This method BLOCKS - * on waiting for the Source to emit data to STDOUT. - */ - Optional attemptRead(); - - /** - * Attempts to shut down the Source's container. Waits for a graceful shutdown, capped by a timeout. - * - * @throws Exception - throws if there is any failure in shutdown. - */ - @Override - void close() throws Exception; - - /** - * Attempt to shut down the Source's container quickly. - * - * @throws Exception - throws if there is any failure in shutdown. - */ - void cancel() throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteStreamFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteStreamFactory.java deleted file mode 100644 index 06463b290d87b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/AirbyteStreamFactory.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.protocol.models.AirbyteMessage; -import java.io.BufferedReader; -import java.util.stream.Stream; - -public interface AirbyteStreamFactory { - - Stream create(BufferedReader bufferedReader); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java deleted file mode 100644 index 4d5b07bd873ed..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.commons.protocol.DefaultProtocolSerializer; -import io.airbyte.commons.protocol.ProtocolSerializer; -import io.airbyte.configoss.WorkerDestinationConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.process.IntegrationLauncher; -import java.io.BufferedWriter; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.nio.file.Path; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultAirbyteDestination implements AirbyteDestination { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteDestination.class); - public static final Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix("destination") - .setPrefixColor(Color.YELLOW_BACKGROUND); - static final Set IGNORED_EXIT_CODES = Set.of( - 0, // Normal exit - 143 // SIGTERM - ); - - private final IntegrationLauncher integrationLauncher; - private final AirbyteStreamFactory streamFactory; - private final AirbyteMessageBufferedWriterFactory messageWriterFactory; - private final ProtocolSerializer protocolSerializer; - - private final AtomicBoolean inputHasEnded = new AtomicBoolean(false); - - private Process destinationProcess = null; - private AirbyteMessageBufferedWriter writer = null; - private Iterator messageIterator = null; - private Integer exitValue = null; - - public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher) { - this(integrationLauncher, new DefaultAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER), new DefaultAirbyteMessageBufferedWriterFactory(), - new DefaultProtocolSerializer()); - - } - - public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, - final AirbyteStreamFactory streamFactory, - final AirbyteMessageBufferedWriterFactory messageWriterFactory, - final ProtocolSerializer protocolSerializer) { - this.integrationLauncher = integrationLauncher; - this.streamFactory = streamFactory; - this.messageWriterFactory = messageWriterFactory; - this.protocolSerializer = protocolSerializer; - } - - @Override - public void start(final WorkerDestinationConfig destinationConfig, final Path jobRoot, final Map additionalEnvironmentVariables) - throws IOException, TestHarnessException { - Preconditions.checkState(destinationProcess == null); - - LOGGER.info("Running destination..."); - destinationProcess = integrationLauncher.write( - jobRoot, - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - Jsons.serialize(destinationConfig.getDestinationConnectionConfiguration()), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, - protocolSerializer.serialize(destinationConfig.getCatalog()), - additionalEnvironmentVariables); - // stdout logs are logged elsewhere since stdout also contains data - LineGobbler.gobble(destinationProcess.getErrorStream(), LOGGER::error, "airbyte-destination", CONTAINER_LOG_MDC_BUILDER); - - writer = messageWriterFactory.createWriter(new BufferedWriter(new OutputStreamWriter(destinationProcess.getOutputStream(), Charsets.UTF_8))); - - final List acceptedMessageTypes = List.of(Type.STATE, Type.TRACE, Type.CONTROL); - messageIterator = streamFactory.create(IOs.newBufferedReader(destinationProcess.getInputStream())) - .filter(message -> acceptedMessageTypes.contains(message.getType())) - .iterator(); - } - - @Override - public void accept(final AirbyteMessage message) throws IOException { - Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()); - - writer.write(message); - } - - @Override - public void notifyEndOfInput() throws IOException { - Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()); - - writer.flush(); - writer.close(); - inputHasEnded.set(true); - } - - @Override - public void close() throws Exception { - if (destinationProcess == null) { - LOGGER.debug("Destination process already exited"); - return; - } - - if (!inputHasEnded.get()) { - notifyEndOfInput(); - } - - LOGGER.debug("Closing destination process"); - TestHarnessUtils.gentleClose(destinationProcess, 1, TimeUnit.MINUTES); - if (destinationProcess.isAlive() || !IGNORED_EXIT_CODES.contains(getExitValue())) { - final String message = - destinationProcess.isAlive() ? "Destination has not terminated " : "Destination process exit with code " + getExitValue(); - throw new TestHarnessException(message + ". This warning is normal if the job was cancelled."); - } - } - - @Override - public void cancel() throws Exception { - LOGGER.info("Attempting to cancel destination process..."); - - if (destinationProcess == null) { - LOGGER.info("Destination process no longer exists, cancellation is a no-op."); - } else { - LOGGER.info("Destination process exists, cancelling..."); - TestHarnessUtils.cancelProcess(destinationProcess); - LOGGER.info("Cancelled destination process!"); - } - } - - @Override - public boolean isFinished() { - Preconditions.checkState(destinationProcess != null); - /* - * As this check is done on every message read, it is important for this operation to be efficient. - * Short circuit early to avoid checking the underlying process. Note: hasNext is blocking. - */ - return !messageIterator.hasNext() && !destinationProcess.isAlive(); - } - - @Override - public int getExitValue() { - Preconditions.checkState(destinationProcess != null, "Destination process is null, cannot retrieve exit value."); - Preconditions.checkState(!destinationProcess.isAlive(), "Destination process is still alive, cannot retrieve exit value."); - - if (exitValue == null) { - exitValue = destinationProcess.exitValue(); - } - - return exitValue; - } - - @Override - public Optional attemptRead() { - Preconditions.checkState(destinationProcess != null); - - return Optional.ofNullable(messageIterator.hasNext() ? messageIterator.next() : null); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java deleted file mode 100644 index d6f5fc436b7bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteMessage; -import java.io.BufferedWriter; -import java.io.IOException; - -public class DefaultAirbyteMessageBufferedWriter implements AirbyteMessageBufferedWriter { - - protected final BufferedWriter writer; - - public DefaultAirbyteMessageBufferedWriter(final BufferedWriter writer) { - this.writer = writer; - } - - @Override - public void write(final AirbyteMessage message) throws IOException { - writer.write(Jsons.serialize(message)); - writer.newLine(); - } - - @Override - public void flush() throws IOException { - writer.flush(); - } - - @Override - public void close() throws IOException { - writer.close(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java deleted file mode 100644 index fce1c2ff3ee7e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import java.io.BufferedWriter; - -public class DefaultAirbyteMessageBufferedWriterFactory implements AirbyteMessageBufferedWriterFactory { - - @Override - public AirbyteMessageBufferedWriter createWriter(BufferedWriter writer) { - return new DefaultAirbyteMessageBufferedWriter(writer); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java deleted file mode 100644 index 269841619bfaa..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.commons.protocol.DefaultProtocolSerializer; -import io.airbyte.commons.protocol.ProtocolSerializer; -import io.airbyte.configoss.WorkerSourceConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.process.IntegrationLauncher; -import java.nio.file.Path; -import java.time.Duration; -import java.time.temporal.ChronoUnit; -import java.util.Iterator; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultAirbyteSource implements AirbyteSource { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteSource.class); - - private static final Duration HEARTBEAT_FRESH_DURATION = Duration.of(5, ChronoUnit.MINUTES); - private static final Duration GRACEFUL_SHUTDOWN_DURATION = Duration.of(1, ChronoUnit.MINUTES); - static final Set IGNORED_EXIT_CODES = Set.of( - 0, // Normal exit - 143 // SIGTERM - ); - - public static final Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix("source") - .setPrefixColor(Color.BLUE_BACKGROUND); - - private final IntegrationLauncher integrationLauncher; - private final AirbyteStreamFactory streamFactory; - private final ProtocolSerializer protocolSerializer; - private final HeartbeatMonitor heartbeatMonitor; - - private Process sourceProcess = null; - private Iterator messageIterator = null; - private Integer exitValue = null; - private final boolean featureFlagLogConnectorMsgs; - - public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, final FeatureFlags featureFlags) { - this(integrationLauncher, new DefaultAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER), new DefaultProtocolSerializer(), featureFlags); - } - - public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, - final AirbyteStreamFactory streamFactory, - final ProtocolSerializer protocolSerializer, - final FeatureFlags featureFlags) { - this(integrationLauncher, streamFactory, new HeartbeatMonitor(HEARTBEAT_FRESH_DURATION), protocolSerializer, featureFlags); - } - - @VisibleForTesting - DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, - final AirbyteStreamFactory streamFactory, - final HeartbeatMonitor heartbeatMonitor, - final ProtocolSerializer protocolSerializer, - final FeatureFlags featureFlags) { - this.integrationLauncher = integrationLauncher; - this.streamFactory = streamFactory; - this.protocolSerializer = protocolSerializer; - this.heartbeatMonitor = heartbeatMonitor; - featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); - } - - @Override - public void start(final WorkerSourceConfig sourceConfig, final Path jobRoot) throws Exception { - Preconditions.checkState(sourceProcess == null); - - sourceProcess = integrationLauncher.read(jobRoot, - WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, - Jsons.serialize(sourceConfig.getSourceConnectionConfiguration()), - WorkerConstants.SOURCE_CATALOG_JSON_FILENAME, - protocolSerializer.serialize(sourceConfig.getCatalog()), - sourceConfig.getState() == null ? null : WorkerConstants.INPUT_STATE_JSON_FILENAME, - // TODO We should be passing a typed state here and use the protocolSerializer - sourceConfig.getState() == null ? null : Jsons.serialize(sourceConfig.getState().getState())); - // stdout logs are logged elsewhere since stdout also contains data - LineGobbler.gobble(sourceProcess.getErrorStream(), LOGGER::error, "airbyte-source", CONTAINER_LOG_MDC_BUILDER); - - logInitialStateAsJSON(sourceConfig); - - final List acceptedMessageTypes = List.of(Type.RECORD, Type.STATE, Type.TRACE, Type.CONTROL); - messageIterator = streamFactory.create(IOs.newBufferedReader(sourceProcess.getInputStream())) - .peek(message -> heartbeatMonitor.beat()) - .filter(message -> acceptedMessageTypes.contains(message.getType())) - .iterator(); - } - - @Override - public boolean isFinished() { - Preconditions.checkState(sourceProcess != null); - - /* - * As this check is done on every message read, it is important for this operation to be efficient. - * Short circuit early to avoid checking the underlying process. note: hasNext is blocking. - */ - return !messageIterator.hasNext() && !sourceProcess.isAlive(); - } - - @Override - public int getExitValue() throws IllegalStateException { - Preconditions.checkState(sourceProcess != null, "Source process is null, cannot retrieve exit value."); - Preconditions.checkState(!sourceProcess.isAlive(), "Source process is still alive, cannot retrieve exit value."); - - if (exitValue == null) { - exitValue = sourceProcess.exitValue(); - } - - return exitValue; - } - - @Override - public Optional attemptRead() { - Preconditions.checkState(sourceProcess != null); - - return Optional.ofNullable(messageIterator.hasNext() ? messageIterator.next() : null); - } - - @Override - public void close() throws Exception { - if (sourceProcess == null) { - LOGGER.debug("Source process already exited"); - return; - } - - LOGGER.debug("Closing source process"); - TestHarnessUtils.gentleClose( - sourceProcess, - GRACEFUL_SHUTDOWN_DURATION.toMillis(), - TimeUnit.MILLISECONDS); - - if (sourceProcess.isAlive() || !IGNORED_EXIT_CODES.contains(getExitValue())) { - final String message = sourceProcess.isAlive() ? "Source has not terminated " : "Source process exit with code " + getExitValue(); - LOGGER.warn(message + ". This warning is normal if the job was cancelled."); - } - } - - @Override - public void cancel() throws Exception { - LOGGER.info("Attempting to cancel source process..."); - - if (sourceProcess == null) { - LOGGER.info("Source process no longer exists, cancellation is a no-op."); - } else { - LOGGER.info("Source process exists, cancelling..."); - TestHarnessUtils.cancelProcess(sourceProcess); - LOGGER.info("Cancelled source process!"); - } - } - - private void logInitialStateAsJSON(final WorkerSourceConfig sourceConfig) { - if (!featureFlagLogConnectorMsgs) { - return; - } - - if (sourceConfig.getState() == null) { - LOGGER.info("source starting state | empty"); - return; - } - - LOGGER.info("source starting state | " + Jsons.serialize(sourceConfig.getState().getState())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java deleted file mode 100644 index 2badef87b11ad..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import java.io.BufferedReader; -import java.lang.reflect.InvocationTargetException; -import java.nio.charset.StandardCharsets; -import java.text.CharacterIterator; -import java.text.StringCharacterIterator; -import java.time.Instant; -import java.util.Optional; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Creates a stream from an input stream. The produced stream attempts to parse each line of the - * InputStream into a AirbyteMessage. If the line cannot be parsed into a AirbyteMessage it is - * dropped. Each record MUST be new line separated. - * - *

    - * If a line starts with a AirbyteMessage and then has other characters after it, that - * AirbyteMessage will still be parsed. If there are multiple AirbyteMessage records on the same - * line, only the first will be parsed. - */ -@SuppressWarnings("PMD.MoreThanOneLogger") -public class DefaultAirbyteStreamFactory implements AirbyteStreamFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteStreamFactory.class); - private final double MAX_SIZE_RATIO = 0.8; - - private final MdcScope.Builder containerLogMdcBuilder; - private final AirbyteProtocolPredicate protocolValidator; - protected final Logger logger; - private final long maxMemory; - private final Optional> exceptionClass; - - public DefaultAirbyteStreamFactory() { - this(MdcScope.DEFAULT_BUILDER); - } - - public DefaultAirbyteStreamFactory(final MdcScope.Builder containerLogMdcBuilder) { - this(new AirbyteProtocolPredicate(), LOGGER, containerLogMdcBuilder, Optional.empty()); - } - - /** - * Create a default airbyte stream, if a `messageSizeExceptionClass` is not empty, the message size - * will be checked and if it more than the available memory * MAX_SIZE_RATIO the sync will be failed - * by throwing the exception provided. The exception must have a constructor that accept a string. - */ - DefaultAirbyteStreamFactory(final AirbyteProtocolPredicate protocolPredicate, - final Logger logger, - final MdcScope.Builder containerLogMdcBuilder, - final Optional> messageSizeExceptionClass) { - protocolValidator = protocolPredicate; - this.logger = logger; - this.containerLogMdcBuilder = containerLogMdcBuilder; - this.exceptionClass = messageSizeExceptionClass; - this.maxMemory = Runtime.getRuntime().maxMemory(); - } - - @VisibleForTesting - DefaultAirbyteStreamFactory(final AirbyteProtocolPredicate protocolPredicate, - final Logger logger, - final MdcScope.Builder containerLogMdcBuilder, - final Optional> messageSizeExceptionClass, - final long maxMemory) { - protocolValidator = protocolPredicate; - this.logger = logger; - this.containerLogMdcBuilder = containerLogMdcBuilder; - this.exceptionClass = messageSizeExceptionClass; - this.maxMemory = maxMemory; - } - - @Override - public Stream create(final BufferedReader bufferedReader) { - return bufferedReader - .lines() - .peek(str -> { - if (exceptionClass.isPresent()) { - final long messageSize = str.getBytes(StandardCharsets.UTF_8).length; - if (messageSize > maxMemory * MAX_SIZE_RATIO) { - try { - final String errorMessage = String.format( - "Airbyte has received a message at %s UTC which is larger than %s (size: %s). The sync has been failed to prevent running out of memory.", - Instant.now(), - humanReadableByteCountSI(maxMemory), - humanReadableByteCountSI(messageSize)); - throw exceptionClass.get().getConstructor(String.class).newInstance(errorMessage); - } catch (final InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new RuntimeException(e); - } - } - } - }) - .flatMap(this::parseJson) - .filter(this::validate) - .flatMap(this::toAirbyteMessage) - .filter(this::filterLog); - } - - protected Stream parseJson(final String line) { - final Optional jsonLine = Jsons.tryDeserializeWithoutWarn(line); - if (jsonLine.isEmpty()) { - // we log as info all the lines that are not valid json - // some sources actually log their process on stdout, we - // want to make sure this info is available in the logs. - try (final var mdcScope = containerLogMdcBuilder.build()) { - logger.info(line); - } - } - return jsonLine.stream(); - } - - protected boolean validate(final JsonNode json) { - final boolean res = protocolValidator.test(json); - if (!res) { - logger.error("Validation failed: {}", Jsons.serialize(json)); - } - return res; - } - - protected Stream toAirbyteMessage(final JsonNode json) { - final Optional m = Jsons.tryObject(json, AirbyteMessage.class); - if (m.isEmpty()) { - logger.error("Deserialization failed: {}", Jsons.serialize(json)); - } - return m.stream(); - } - - protected boolean filterLog(final AirbyteMessage message) { - final boolean isLog = message.getType() == AirbyteMessage.Type.LOG; - if (isLog) { - try (final var mdcScope = containerLogMdcBuilder.build()) { - internalLog(message.getLog()); - } - } - return !isLog; - } - - protected void internalLog(final AirbyteLogMessage logMessage) { - final String combinedMessage = - logMessage.getMessage() + (logMessage.getStackTrace() != null ? (System.lineSeparator() - + "Stack Trace: " + logMessage.getStackTrace()) : ""); - - switch (logMessage.getLevel()) { - case FATAL, ERROR -> logger.error(combinedMessage); - case WARN -> logger.warn(combinedMessage); - case DEBUG -> logger.debug(combinedMessage); - case TRACE -> logger.trace(combinedMessage); - default -> logger.info(combinedMessage); - } - } - - // Human-readable byte size from - // https://stackoverflow.com/questions/3758606/how-can-i-convert-byte-size-into-a-human-readable-format-in-java - @SuppressWarnings("PMD.AvoidReassigningParameters") - private String humanReadableByteCountSI(long bytes) { - if (-1000 < bytes && bytes < 1000) { - return bytes + " B"; - } - final CharacterIterator ci = new StringCharacterIterator("kMGTPE"); - while (bytes <= -999_950 || bytes >= 999_950) { - bytes /= 1000; - ci.next(); - } - return String.format("%.1f %cB", bytes / 1000.0, ci.current()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/HeartbeatMonitor.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/HeartbeatMonitor.java deleted file mode 100644 index ed34ed97481f1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/HeartbeatMonitor.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.internal; - -import com.google.common.annotations.VisibleForTesting; -import java.time.Duration; -import java.time.Instant; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; - -/** - * Tracks heartbeats and, when asked, says if it has been too long since the last heartbeat. He's - * dead Jim! - * - * It is ThreadSafe. - */ -public class HeartbeatMonitor { - - private final Duration heartBeatFreshDuration; - private final Supplier nowSupplier; - private final AtomicReference lastBeat; - - public HeartbeatMonitor(final Duration heartBeatFreshDuration) { - this(heartBeatFreshDuration, Instant::now); - } - - @VisibleForTesting - public HeartbeatMonitor(final Duration heartBeatFreshDuration, final Supplier nowSupplier) { - this.heartBeatFreshDuration = heartBeatFreshDuration; - this.nowSupplier = nowSupplier; - this.lastBeat = new AtomicReference<>(null); - } - - /** - * Register a heartbeat - */ - public void beat() { - lastBeat.set(nowSupplier.get()); - } - - /** - * - * @return true if the last heartbeat is still "fresh". i.e. time since last heartbeat is less than - * heartBeatFreshDuration. otherwise, false. - */ - public boolean isBeating() { - final Instant instantFetched = lastBeat.get(); - final Instant now = nowSupplier.get(); - return instantFetched != null && instantFetched.plus(heartBeatFreshDuration).isAfter(now); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java deleted file mode 100644 index c85be16f5738c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.NORMALIZE_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.configoss.OperatorDbt; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.TestHarnessException; -import io.airbyte.workers.process.ProcessFactory; -import java.io.InputStream; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultNormalizationRunner implements NormalizationRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationRunner.class); - private static final Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix("normalization") - .setPrefixColor(Color.GREEN_BACKGROUND); - - private final String normalizationIntegrationType; - private final ProcessFactory processFactory; - private final String normalizationImageName; - private final NormalizationAirbyteStreamFactory streamFactory = new NormalizationAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER); - private Map> airbyteMessagesByType; - private String dbtErrorStack; - - private Process process = null; - - public DefaultNormalizationRunner(final ProcessFactory processFactory, - final String normalizationImage, - final String normalizationIntegrationType) { - this.processFactory = processFactory; - normalizationImageName = normalizationImage; - this.normalizationIntegrationType = normalizationIntegrationType; - } - - @Override - public boolean configureDbt(final String jobId, - final int attempt, - final Path jobRoot, - final JsonNode config, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config)); - final String gitRepoUrl = dbtConfig.getGitRepoUrl(); - if (Strings.isNullOrEmpty(gitRepoUrl)) { - throw new TestHarnessException("Git Repo Url is required"); - } - final String gitRepoBranch = dbtConfig.getGitRepoBranch(); - if (Strings.isNullOrEmpty(gitRepoBranch)) { - return runProcess(jobId, attempt, jobRoot, files, resourceRequirements, "configure-dbt", - "--integration-type", normalizationIntegrationType.toLowerCase(), - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--git-repo", gitRepoUrl); - } else { - return runProcess(jobId, attempt, jobRoot, files, resourceRequirements, "configure-dbt", - "--integration-type", normalizationIntegrationType.toLowerCase(), - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--git-repo", gitRepoUrl, - "--git-branch", gitRepoBranch); - } - } - - @Override - public boolean normalize(final String jobId, - final int attempt, - final Path jobRoot, - final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final ResourceRequirements resourceRequirements) - throws Exception { - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, Jsons.serialize(catalog)); - - return runProcess(jobId, attempt, jobRoot, files, resourceRequirements, "run", - "--integration-type", normalizationIntegrationType.toLowerCase(), - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--catalog", WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private boolean runProcess(final String jobId, - final int attempt, - final Path jobRoot, - final Map files, - final ResourceRequirements resourceRequirements, - final String... args) - throws Exception { - try { - LOGGER.info("Running with normalization version: {}", normalizationImageName); - process = processFactory.create( - NORMALIZE_STEP, - jobId, - attempt, - jobRoot, - normalizationImageName, - // custom connector does not use normalization - false, - false, files, - null, - resourceRequirements, - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, NORMALIZE_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), - args); - - try (final InputStream stdout = process.getInputStream()) { - // finds and collects any AirbyteMessages from stdout - // also builds a list of raw dbt errors and stores in streamFactory - airbyteMessagesByType = streamFactory.create(IOs.newBufferedReader(stdout)) - .collect(Collectors.groupingBy(AirbyteMessage::getType)); - - // picks up error logs from dbt - dbtErrorStack = String.join("\n", streamFactory.getDbtErrors()); - - if (!"".equals(dbtErrorStack)) { - final AirbyteMessage dbtTraceMessage = new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage() - .withType(AirbyteTraceMessage.Type.ERROR) - .withEmittedAt((double) System.currentTimeMillis()) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(FailureType.SYSTEM_ERROR) // TODO: decide on best FailureType for this - .withMessage("Normalization failed during the dbt run. This may indicate a problem with the data itself.") - // due to the lack of consistent defining features in dbt errors we're injecting a breadcrumb to the - // stacktrace so we can confidently identify all dbt errors when parsing and sending to Sentry - // see dbt error examples: https://docs.getdbt.com/guides/legacy/debugging-errors for more context - .withStackTrace("AirbyteDbtError: \n".concat(dbtErrorStack)))); - - airbyteMessagesByType.putIfAbsent(Type.TRACE, List.of(dbtTraceMessage)); - } - } - LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - - TestHarnessUtils.wait(process); - - return process.exitValue() == 0; - } catch (final Exception e) { - // make sure we kill the process on failure to avoid zombies. - if (process != null) { - TestHarnessUtils.cancelProcess(process); - } - throw e; - } - } - - @Override - public void close() throws Exception { - if (process == null) { - return; - } - - LOGGER.info("Terminating normalization process..."); - TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES); - - /* - * After attempting to close the process check the following: - * - * Did the process actually terminate? If "yes", did it do so nominally? - */ - if (process.isAlive()) { - throw new TestHarnessException("Normalization process did not terminate after 1 minute."); - } else if (process.exitValue() != 0) { - throw new TestHarnessException("Normalization process did not terminate normally (exit code: " + process.exitValue() + ")"); - } else { - LOGGER.info("Normalization process successfully terminated."); - } - } - - @Override - public Stream getTraceMessages() { - if (airbyteMessagesByType != null && airbyteMessagesByType.get(Type.TRACE) != null) { - return airbyteMessagesByType.get(Type.TRACE).stream().map(AirbyteMessage::getTrace); - } - return Stream.empty(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java deleted file mode 100644 index c9f2cbf60a525..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.JsonNodeType; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import java.io.BufferedReader; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Creates a stream from an input stream. The produced stream attempts to parse each line of the - * InputStream into a AirbyteMessage. If the line cannot be parsed into a AirbyteMessage it is - * assumed to be from dbt. dbt [error] messages are also parsed - * - *

    - * If a line starts with a AirbyteMessage and then has other characters after it, that - * AirbyteMessage will still be parsed. If there are multiple AirbyteMessage records on the same - * line, only the first will be parsed. - */ -@SuppressWarnings("PMD.MoreThanOneLogger") -public class NormalizationAirbyteStreamFactory implements AirbyteStreamFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(NormalizationAirbyteStreamFactory.class); - - private final MdcScope.Builder containerLogMdcBuilder; - private final Logger logger; - private final List dbtErrors = new ArrayList<>(); - - public NormalizationAirbyteStreamFactory(final MdcScope.Builder containerLogMdcBuilder) { - this(LOGGER, containerLogMdcBuilder); - } - - NormalizationAirbyteStreamFactory(final Logger logger, final MdcScope.Builder containerLogMdcBuilder) { - this.logger = logger; - this.containerLogMdcBuilder = containerLogMdcBuilder; - } - - @Override - public Stream create(final BufferedReader bufferedReader) { - return bufferedReader - .lines() - .flatMap(this::filterOutAndHandleNonJsonLines) - .flatMap(this::filterOutAndHandleNonAirbyteMessageLines) - // so now we are just left with AirbyteMessages - .filter(airbyteMessage -> { - final boolean isLog = airbyteMessage.getType() == AirbyteMessage.Type.LOG; - if (isLog) { - try (final var mdcScope = containerLogMdcBuilder.build()) { - internalLog(airbyteMessage.getLog()); - } - } - return !isLog; - }); - } - - private Stream filterOutAndHandleNonJsonLines(final String line) { - final Optional jsonLine = Jsons.tryDeserialize(line); - if (jsonLine.isEmpty()) { - // we log as info all the lines that are not valid json. - try (final var mdcScope = containerLogMdcBuilder.build()) { - logger.info(line); - // this is really hacky and vulnerable to picking up lines we don't want, - // however it is only for destinations that are using dbt version < 1.0. - // For v1 + we switch on JSON logging and parse those in the next block. - if (line.contains("[error]")) { - dbtErrors.add(line); - } - } - } - return jsonLine.stream(); - } - - private Stream filterOutAndHandleNonAirbyteMessageLines(final JsonNode jsonLine) { - final Optional m = Jsons.tryObject(jsonLine, AirbyteMessage.class); - if (m.isEmpty()) { - // valid JSON but not an AirbyteMessage, so we assume this is a dbt json log - try { - final String logLevel = (jsonLine.getNodeType() == JsonNodeType.NULL || jsonLine.get("level").isNull()) - ? "" - : jsonLine.get("level").asText(); - final String logMsg = jsonLine.get("msg").isNull() ? "" : jsonLine.get("msg").asText(); - try (final var mdcScope = containerLogMdcBuilder.build()) { - switch (logLevel) { - case "debug" -> logger.debug(logMsg); - case "info" -> logger.info(logMsg); - case "warn" -> logger.warn(logMsg); - case "error" -> logAndCollectErrorMessage(logMsg); - default -> logger.info(jsonLine.toPrettyString()); // this shouldn't happen but logging it to avoid hiding unexpected lines. - } - } - } catch (final Exception e) { - logger.info(jsonLine.toPrettyString()); - } - } - return m.stream(); - } - - private void logAndCollectErrorMessage(final String logMsg) { - logger.error(logMsg); - dbtErrors.add(logMsg); - } - - public List getDbtErrors() { - return dbtErrors; - } - - private void internalLog(final AirbyteLogMessage logMessage) { - switch (logMessage.getLevel()) { - case FATAL, ERROR -> logger.error(logMessage.getMessage()); - case WARN -> logger.warn(logMessage.getMessage()); - case DEBUG -> logger.debug(logMessage.getMessage()); - case TRACE -> logger.trace(logMessage.getMessage()); - default -> logger.info(logMessage.getMessage()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationRunner.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationRunner.java deleted file mode 100644 index 5b8a13683b249..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/normalization/NormalizationRunner.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.configoss.OperatorDbt; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.nio.file.Path; -import java.util.stream.Stream; - -public interface NormalizationRunner extends AutoCloseable { - - /** - * After this method is called, the caller must call close. Previous to this method being called a - * NormalizationRunner can be instantiated and not worry about close being called. - * - * @throws Exception - any exception thrown from normalization will be handled gracefully by the - * caller. - */ - default void start() throws Exception { - // no-op. - } - - /** - * Prepare a configured folder to run dbt commands from (similar to what is required by - * normalization models) However, this does not run the normalization file generation process or dbt - * at all. This is pulling files from a distant git repository instead of the dbt-project-template. - * - * @return true if configuration succeeded. otherwise false. - * @throws Exception - any exception thrown from configuration will be handled gracefully by the - * caller. - */ - boolean configureDbt(String jobId, - int attempt, - Path jobRoot, - JsonNode config, - ResourceRequirements resourceRequirements, - OperatorDbt dbtConfig) - throws Exception; - - /** - * Executes normalization of the data in the destination. - * - * @param jobId - id of the job that launched normalization - * @param attempt - current attempt - * @param jobRoot - root dir available for the runner to use. - * @param config - configuration for connecting to the destination - * @param catalog - the schema of the json blob in the destination. it is used normalize the blob - * into typed columns. - * @param resourceRequirements - * @return true of normalization succeeded. otherwise false. - * @throws Exception - any exception thrown from normalization will be handled gracefully by the - * caller. - */ - boolean normalize(String jobId, - int attempt, - Path jobRoot, - JsonNode config, - ConfiguredAirbyteCatalog catalog, - ResourceRequirements resourceRequirements) - throws Exception; - - Stream getTraceMessages(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java deleted file mode 100644 index b8592a8e6c195..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import static io.airbyte.workers.process.Metadata.CHECK_JOB; -import static io.airbyte.workers.process.Metadata.DISCOVER_JOB; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.READ_STEP; -import static io.airbyte.workers.process.Metadata.SPEC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; -import static io.airbyte.workers.process.Metadata.WRITE_STEP; - -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.configoss.AllowedHosts; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.workers.exception.TestHarnessException; -import java.nio.file.Path; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AirbyteIntegrationLauncher implements IntegrationLauncher { - - private static final String CONFIG = "--config"; - - private final String jobId; - private final int attempt; - private final String imageName; - private final ProcessFactory processFactory; - private final ResourceRequirements resourceRequirement; - private final FeatureFlags featureFlags; - - /** - * If true, launcher will use a separated isolated pool to run the job. - *

    - * At this moment, we put custom connector jobs into an isolated pool. - */ - private final boolean useIsolatedPool; - private final AllowedHosts allowedHosts; - - public AirbyteIntegrationLauncher(final String jobId, - final int attempt, - final String imageName, - final ProcessFactory processFactory, - final ResourceRequirements resourceRequirement, - final AllowedHosts allowedHosts, - final boolean useIsolatedPool, - final FeatureFlags featureFlags) { - this.jobId = jobId; - this.attempt = attempt; - this.imageName = imageName; - this.processFactory = processFactory; - this.resourceRequirement = resourceRequirement; - this.allowedHosts = allowedHosts; - this.featureFlags = featureFlags; - this.useIsolatedPool = useIsolatedPool; - } - - @Override - public Process spec(final Path jobRoot) throws TestHarnessException { - return processFactory.create( - SPEC_JOB, - jobId, - attempt, - jobRoot, - imageName, - useIsolatedPool, - false, - Collections.emptyMap(), - null, - resourceRequirement, - allowedHosts, - Map.of(JOB_TYPE_KEY, SPEC_JOB), - getWorkerMetadata(), - Collections.emptyMap(), - Collections.emptyMap(), - "spec"); - } - - @Override - public Process check(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException { - return processFactory.create( - CHECK_JOB, - jobId, - attempt, - jobRoot, - imageName, - useIsolatedPool, - false, - ImmutableMap.of(configFilename, configContents), - null, - resourceRequirement, - allowedHosts, - Map.of(JOB_TYPE_KEY, CHECK_JOB), - getWorkerMetadata(), - Collections.emptyMap(), - Collections.emptyMap(), - "check", - CONFIG, configFilename); - } - - @Override - public Process discover(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException { - return processFactory.create( - DISCOVER_JOB, - jobId, - attempt, - jobRoot, - imageName, - useIsolatedPool, - false, - ImmutableMap.of(configFilename, configContents), - null, - resourceRequirement, - allowedHosts, - Map.of(JOB_TYPE_KEY, DISCOVER_JOB), - getWorkerMetadata(), - Collections.emptyMap(), - Collections.emptyMap(), - "discover", - CONFIG, configFilename); - } - - @Override - public Process read(final Path jobRoot, - final String configFilename, - final String configContents, - final String catalogFilename, - final String catalogContents, - final String stateFilename, - final String stateContents) - throws TestHarnessException { - final List arguments = Lists.newArrayList( - "read", - CONFIG, configFilename, - "--catalog", catalogFilename); - - final Map files = new HashMap<>(); - files.put(configFilename, configContents); - files.put(catalogFilename, catalogContents); - - if (stateFilename != null) { - arguments.add("--state"); - arguments.add(stateFilename); - - Preconditions.checkNotNull(stateContents); - files.put(stateFilename, stateContents); - } - - return processFactory.create( - READ_STEP, - jobId, - attempt, - jobRoot, - imageName, - useIsolatedPool, - false, - files, - null, - resourceRequirement, - allowedHosts, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, READ_STEP), - getWorkerMetadata(), - Collections.emptyMap(), - Collections.emptyMap(), - arguments.toArray(new String[arguments.size()])); - } - - @Override - public Process write(final Path jobRoot, - final String configFilename, - final String configContents, - final String catalogFilename, - final String catalogContents, - final Map additionalEnvironmentVariables) - throws TestHarnessException { - final Map files = ImmutableMap.of( - configFilename, configContents, - catalogFilename, catalogContents); - - return processFactory.create( - WRITE_STEP, - jobId, - attempt, - jobRoot, - imageName, - useIsolatedPool, - true, - files, - null, - resourceRequirement, - allowedHosts, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, WRITE_STEP), - getWorkerMetadata(), - Collections.emptyMap(), - additionalEnvironmentVariables, - "write", - CONFIG, configFilename, - "--catalog", catalogFilename); - } - - private Map getWorkerMetadata() { - // We've managed to exceed the maximum number of parameters for Map.of(), so use a builder + convert - // back to hashmap - return Maps.newHashMap( - ImmutableMap.builder() - .put("WORKER_CONNECTOR_IMAGE", imageName) - .put("WORKER_JOB_ID", jobId) - .put("WORKER_JOB_ATTEMPT", String.valueOf(attempt)) - .put(EnvVariableFeatureFlags.AUTO_DETECT_SCHEMA, String.valueOf(featureFlags.autoDetectSchema())) - .put(EnvVariableFeatureFlags.APPLY_FIELD_SELECTION, String.valueOf(featureFlags.applyFieldSelection())) - .put(EnvVariableFeatureFlags.FIELD_SELECTION_WORKSPACES, featureFlags.fieldSelectionWorkspaces()) - .build()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/DockerProcessFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/DockerProcessFactory.java deleted file mode 100644 index 115967bca1eab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/DockerProcessFactory.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Joiner; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.configoss.AllowedHosts; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.workers.TestHarnessUtils; -import io.airbyte.workers.exception.TestHarnessException; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DockerProcessFactory implements ProcessFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(DockerProcessFactory.class); - private static final int DOCKER_NAME_LEN_LIMIT = 128; - - private static final Path DATA_MOUNT_DESTINATION = Path.of("/data"); - private static final Path LOCAL_MOUNT_DESTINATION = Path.of("/local"); - private static final String IMAGE_EXISTS_SCRIPT = "image_exists.sh"; - - private final String workspaceMountSource; - private final Path workspaceRoot; - private final String localMountSource; - private final String networkName; - private final Map envMap; - private final Path imageExistsScriptPath; - - /** - * Used to construct a Docker process. - * - * @param workspaceRoot real root of workspace - * @param workspaceMountSource workspace volume - * @param localMountSource local volume - * @param networkName docker network - * @param envMap - */ - public DockerProcessFactory(final Path workspaceRoot, - final String workspaceMountSource, - final String localMountSource, - final String networkName, - final Map envMap) { - this.workspaceRoot = workspaceRoot; - this.workspaceMountSource = workspaceMountSource; - this.localMountSource = localMountSource; - this.networkName = networkName; - this.envMap = envMap; - imageExistsScriptPath = prepareImageExistsScript(); - } - - private static Path prepareImageExistsScript() { - try { - final Path basePath = Files.createTempDirectory("scripts"); - final String scriptContents = MoreResources.readResource(IMAGE_EXISTS_SCRIPT); - final Path scriptPath = IOs.writeFile(basePath, IMAGE_EXISTS_SCRIPT, scriptContents); - if (!scriptPath.toFile().setExecutable(true)) { - throw new RuntimeException(String.format("Could not set %s to executable", scriptPath)); - } - return scriptPath; - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public Process create(final String jobType, - final String jobId, - final int attempt, - final Path jobRoot, - final String imageName, - final boolean usesIsolatedPool, - final boolean usesStdin, - final Map files, - final String entrypoint, - final ResourceRequirements resourceRequirements, - final AllowedHosts allowedHosts, - final Map labels, - final Map jobMetadata, - final Map internalToExternalPorts, - final Map additionalEnvironmentVariables, - final String... args) - throws TestHarnessException { - try { - if (!checkImageExists(imageName)) { - throw new TestHarnessException("Could not find image: " + imageName); - } - - if (!jobRoot.toFile().exists()) { - Files.createDirectory(jobRoot); - } - - for (final Map.Entry file : files.entrySet()) { - IOs.writeFile(jobRoot, file.getKey(), file.getValue()); - } - - final List cmd = Lists.newArrayList( - "docker", - "run", - "--rm", - "--init", - "-i", - "-w", - rebasePath(jobRoot).toString(), // rebases the job root on the job data mount - "--log-driver", - "none"); - final String containerName = ProcessFactory.createProcessName(imageName, jobType, jobId, attempt, DOCKER_NAME_LEN_LIMIT); - LOGGER.info("Creating docker container = {} with resources {} and allowedHosts {}", containerName, resourceRequirements, allowedHosts); - cmd.add("--name"); - cmd.add(containerName); - cmd.addAll(localDebuggingOptions(containerName)); - - if (networkName != null) { - cmd.add("--network"); - cmd.add(networkName); - } - - if (workspaceMountSource != null) { - cmd.add("-v"); - cmd.add(String.format("%s:%s", workspaceMountSource, DATA_MOUNT_DESTINATION)); - } - - if (localMountSource != null) { - cmd.add("-v"); - cmd.add(String.format("%s:%s", localMountSource, LOCAL_MOUNT_DESTINATION)); - } - - final Map allEnvMap = MoreMaps.merge(jobMetadata, envMap, additionalEnvironmentVariables); - for (final Map.Entry envEntry : allEnvMap.entrySet()) { - cmd.add("-e"); - cmd.add(envEntry.getKey() + "=" + envEntry.getValue()); - } - - if (!Strings.isNullOrEmpty(entrypoint)) { - cmd.add("--entrypoint"); - cmd.add(entrypoint); - } - if (resourceRequirements != null) { - if (!Strings.isNullOrEmpty(resourceRequirements.getCpuLimit())) { - cmd.add(String.format("--cpus=%s", resourceRequirements.getCpuLimit())); - } - if (!Strings.isNullOrEmpty(resourceRequirements.getMemoryRequest())) { - cmd.add(String.format("--memory-reservation=%s", resourceRequirements.getMemoryRequest())); - } - if (!Strings.isNullOrEmpty(resourceRequirements.getMemoryLimit())) { - cmd.add(String.format("--memory=%s", resourceRequirements.getMemoryLimit())); - } - } - - cmd.add(imageName); - cmd.addAll(Arrays.asList(args)); - - LOGGER.info("Preparing command: {}", Joiner.on(" ").join(cmd)); - - return new ProcessBuilder(cmd).start(); - } catch (final IOException e) { - throw new TestHarnessException(e.getMessage(), e); - } - } - - /** - * !! ONLY FOR DEBUGGING, SHOULD NOT BE USED IN PRODUCTION !! If you set the DEBUG_CONTAINER_IMAGE - * environment variable, and it matches the image name of a spawned container, this method will add - * the necessary params to connect a debugger. For example, to enable this for - * `destination-bigquery` start the services locally with: ``` VERSION="dev" - * DEBUG_CONTAINER_IMAGE="destination-bigquery" docker compose -f docker-compose.yaml -f - * docker-compose.debug.yaml up ``` Additionally you may have to update the image version of your - * target image to 'dev' in the UI of your local airbyte platform. See the - * `docker-compose.debug.yaml` file for more context. - * - * @param containerName the name of the container which could be debugged. - * @return A list with debugging arguments or an empty list - */ - static List localDebuggingOptions(final String containerName) { - final boolean shouldAddDebuggerOptions = - Optional.ofNullable(System.getenv("DEBUG_CONTAINER_IMAGE")).filter(StringUtils::isNotEmpty) - .map(imageName -> ProcessFactory.extractShortImageName(containerName).startsWith(imageName)).orElse(false) - && Optional.ofNullable(System.getenv("DEBUG_CONTAINER_JAVA_OPTS")).isPresent(); - if (shouldAddDebuggerOptions) { - return List.of("-e", "JAVA_TOOL_OPTIONS=" + System.getenv("DEBUG_CONTAINER_JAVA_OPTS"), "-p5005:5005"); - } else { - return Collections.emptyList(); - } - } - - private Path rebasePath(final Path jobRoot) { - final Path relativePath = workspaceRoot.relativize(jobRoot); - return DATA_MOUNT_DESTINATION.resolve(relativePath); - } - - @VisibleForTesting - boolean checkImageExists(final String imageName) throws TestHarnessException { - try { - final Process process = new ProcessBuilder(imageExistsScriptPath.toString(), imageName).start(); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error); - LineGobbler.gobble(process.getInputStream(), LOGGER::info); - - TestHarnessUtils.gentleClose(process, 10, TimeUnit.MINUTES); - - if (process.isAlive()) { - throw new TestHarnessException("Process to check if image exists is stuck. Exiting."); - } else { - return process.exitValue() == 0; - } - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/IntegrationLauncher.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/IntegrationLauncher.java deleted file mode 100644 index c2f11905dc73b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/IntegrationLauncher.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import io.airbyte.workers.exception.TestHarnessException; -import java.nio.file.Path; -import java.util.Map; - -/** - * This interface provides an abstraction for launching a container that implements the Airbyte - * Protocol. Such containers implement each method that is defined in the Protocol. This class, - * provides java methods to invoke the methods on these containers. - * - * Each method takes in a jobRoot that is a directory where the worker that runs the method can use - * as temporary file system storage. - */ -public interface IntegrationLauncher { - - Process spec(final Path jobRoot) throws TestHarnessException; - - Process check(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException; - - Process discover(final Path jobRoot, final String configFilename, final String configContents) throws TestHarnessException; - - Process read(final Path jobRoot, - final String configFilename, - final String configContents, - final String catalogFilename, - final String catalogContents, - final String stateFilename, - final String stateContents) - throws TestHarnessException; - - default Process read(final Path jobRoot, - final String configFilename, - final String configContents, - final String catalogFilename, - final String catalogContents) - throws TestHarnessException { - return read(jobRoot, configFilename, configContents, catalogFilename, catalogContents, null, null); - } - - Process write(final Path jobRoot, - final String configFilename, - final String configContents, - final String catalogFilename, - final String catalogContents, - final Map additionalEnvironmentVariables) - throws TestHarnessException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/Metadata.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/Metadata.java deleted file mode 100644 index 6743c5ad8653e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/Metadata.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -/** - * The following variables help, either via names or labels, add metadata to processes actually - * running operations to ease operations. - */ -public final class Metadata { - - /** - * General Metadata - */ - static final String JOB_LABEL_KEY = "job_id"; - static final String ATTEMPT_LABEL_KEY = "attempt_id"; - static final String WORKER_POD_LABEL_KEY = "airbyte"; - static final String WORKER_POD_LABEL_VALUE = "job-pod"; - public static final String CONNECTION_ID_LABEL_KEY = "connection_id"; - - /** - * These are more readable forms of {@link io.airbyte.config.JobTypeResourceLimit.JobType}. - */ - public static final String JOB_TYPE_KEY = "job_type"; - public static final String SYNC_JOB = "sync"; - public static final String SPEC_JOB = "spec"; - public static final String CHECK_JOB = "check"; - public static final String DISCOVER_JOB = "discover"; - - /** - * A sync job can actually be broken down into the following steps. Try to be as precise as possible - * with naming/labels to help operations. - */ - public static final String SYNC_STEP_KEY = "sync_step"; - public static final String READ_STEP = "read"; - public static final String WRITE_STEP = "write"; - public static final String NORMALIZE_STEP = "normalize"; - public static final String CUSTOM_STEP = "custom"; - public static final String ORCHESTRATOR_STEP = "orchestrator"; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/ProcessFactory.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/ProcessFactory.java deleted file mode 100644 index 6b88927abf9f6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/process/ProcessFactory.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.process; - -import io.airbyte.configoss.AllowedHosts; -import io.airbyte.configoss.ResourceRequirements; -import io.airbyte.workers.exception.TestHarnessException; -import java.nio.file.Path; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.commons.lang3.RandomStringUtils; - -public interface ProcessFactory { - - String VERSION_DELIMITER = ":"; - String DOCKER_DELIMITER = "/"; - Pattern ALPHABETIC = Pattern.compile("[a-zA-Z]+"); - - /** - * Creates a ProcessBuilder to run a program in a new Process. - * - * @param jobType type of job to add to name for easier operational processes. - * @param jobId job Id - * @param attempt attempt Id - * @param jobPath Workspace directory to run the process from. - * @param imageName Docker image name to start the process from. - * @param usesIsolatedPool whether to use isolated pool to run the jobs. - * @param files File name to contents map that will be written into the working dir of the process - * prior to execution. - * @param entrypoint If not null, the default entrypoint program of the docker image can be changed - * by this argument. - * @param resourceRequirements CPU and RAM to assign to the created process. - * @param labels Labels to assign to the created Kube pod, if any. Ignore for docker. - * @param jobMetadata Job metadata that will be passed to the created process as environment - * variables. - * @param additionalEnvironmentVariables - * @param args Arguments to pass to the docker image being run in the new process. - * @return ProcessBuilder object to run the process. - * @throws TestHarnessException - */ - Process create(String jobType, - String jobId, - int attempt, - final Path jobPath, - final String imageName, - final boolean usesIsolatedPool, - final boolean usesStdin, - final Map files, - final String entrypoint, - final ResourceRequirements resourceRequirements, - final AllowedHosts allowedHosts, - final Map labels, - final Map jobMetadata, - final Map portMapping, - final Map additionalEnvironmentVariables, - final String... args) - throws TestHarnessException; - - /** - * Docker image names are by convention separated by slashes. The last portion is the image's name. - * This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 or - * gcr.io/my-project/image-name:v2. - * - * With these two facts, attempt to construct a unique process name with the image name present that - * can be used by the factories implementing this interface for easier operations. - */ - static String createProcessName(final String fullImagePath, final String jobType, final String jobId, final int attempt, final int lenLimit) { - - var imageName = extractShortImageName(fullImagePath); - final var randSuffix = RandomStringUtils.randomAlphabetic(5).toLowerCase(); - final String suffix = jobType + "-" + jobId + "-" + attempt + "-" + randSuffix; - - var processName = imageName + "-" + suffix; - if (processName.length() > lenLimit) { - final var extra = processName.length() - lenLimit; - imageName = imageName.substring(extra); - processName = imageName + "-" + suffix; - } - - // Kubernetes pod names must start with an alphabetic character while Docker names accept - // alphanumeric. - // Use the stricter convention for simplicity. - final Matcher m = ALPHABETIC.matcher(processName); - // Since we add sync-UUID as a suffix a couple of lines up, there will always be a substring - // starting with an alphabetic character. - // If the image name is a no-op, this function should always return `sync-UUID` at the minimum. - m.find(); - return processName.substring(m.start()); - } - - /** - * Docker image names are by convention separated by slashes. The last portion is the image's name. - * This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 or - * gcr.io/my-project/my-project:v2. - * - * @param fullImagePath the image name with repository and version ex - * gcr.io/my-project/image-name:v2 - * @return the image name without the repo and version, ex. image-name - */ - static String extractShortImageName(final String fullImagePath) { - final var noVersion = fullImagePath.split(VERSION_DELIMITER)[0]; - - final var nameParts = noVersion.split(DOCKER_DELIMITER); - return nameParts[nameParts.length - 1]; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java deleted file mode 100644 index 15c20de236bae..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.test_utils; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.AirbyteControlConnectorConfigMessage; -import io.airbyte.protocol.models.AirbyteControlMessage; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteEstimateTraceMessage; -import io.airbyte.protocol.models.AirbyteGlobalState; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; -import io.airbyte.protocol.models.AirbyteStreamState; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.Config; -import io.airbyte.protocol.models.StreamDescriptor; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public class AirbyteMessageUtils { - - public static AirbyteMessage createRecordMessage(final String tableName, - final JsonNode record, - final Instant timeExtracted) { - - return new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withData(record) - .withStream(tableName) - .withEmittedAt(timeExtracted.getEpochSecond())); - } - - public static AirbyteMessage createLogMessage(final AirbyteLogMessage.Level level, - final String message) { - - return new AirbyteMessage() - .withType(Type.LOG) - .withLog(new AirbyteLogMessage() - .withLevel(level) - .withMessage(message)); - } - - public static AirbyteMessage createRecordMessage(final String tableName, - final String key, - final String value) { - return createRecordMessage(tableName, ImmutableMap.of(key, value)); - } - - public static AirbyteMessage createRecordMessage(final String tableName, - final String key, - final Integer value) { - return createRecordMessage(tableName, ImmutableMap.of(key, value)); - } - - public static AirbyteMessage createRecordMessage(final String tableName, - final Map record) { - return createRecordMessage(tableName, Jsons.jsonNode(record), Instant.EPOCH); - } - - public static AirbyteMessage createRecordMessage(final String streamName, final int recordData) { - return new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(streamName).withData(Jsons.jsonNode(recordData))); - } - - public static AirbyteMessage createStateMessage(final int stateData) { - return new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(stateData))); - } - - public static AirbyteMessage createStateMessage(final String key, final String value) { - return new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(key, value)))); - } - - public static AirbyteStateMessage createStreamStateMessage(final String streamName, final int stateData) { - return new AirbyteStateMessage() - .withType(AirbyteStateType.STREAM) - .withStream(createStreamState(streamName).withStreamState(Jsons.jsonNode(stateData))); - } - - public static AirbyteMessage createGlobalStateMessage(final int stateData, final String... streamNames) { - final List streamStates = new ArrayList<>(); - for (final String streamName : streamNames) { - streamStates.add(createStreamState(streamName).withStreamState(Jsons.jsonNode(stateData))); - } - return new AirbyteMessage() - .withType(Type.STATE) - .withState(new AirbyteStateMessage().withType(AirbyteStateType.GLOBAL).withGlobal(new AirbyteGlobalState().withStreamStates(streamStates))); - } - - public static AirbyteStreamState createStreamState(final String streamName) { - return new AirbyteStreamState().withStreamDescriptor(new StreamDescriptor().withName(streamName)); - } - - public static AirbyteMessage createStreamEstimateMessage(final String name, final String namespace, final long byteEst, final long rowEst) { - return createEstimateMessage(AirbyteEstimateTraceMessage.Type.STREAM, name, namespace, byteEst, rowEst); - } - - public static AirbyteMessage createSyncEstimateMessage(final long byteEst, final long rowEst) { - return createEstimateMessage(AirbyteEstimateTraceMessage.Type.SYNC, null, null, byteEst, rowEst); - } - - public static AirbyteMessage createEstimateMessage(AirbyteEstimateTraceMessage.Type type, - final String name, - final String namespace, - final long byteEst, - final long rowEst) { - final var est = new AirbyteEstimateTraceMessage() - .withType(type) - .withByteEstimate(byteEst) - .withRowEstimate(rowEst); - - if (name != null) { - est.withName(name); - } - if (namespace != null) { - est.withNamespace(namespace); - } - - return new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage().withType(AirbyteTraceMessage.Type.ESTIMATE) - .withEstimate(est)); - } - - public static AirbyteMessage createErrorMessage(final String message, final Double emittedAt) { - return new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(createErrorTraceMessage(message, emittedAt)); - } - - public static AirbyteTraceMessage createErrorTraceMessage(final String message, final Double emittedAt) { - return createErrorTraceMessage(message, emittedAt, null); - } - - public static AirbyteTraceMessage createErrorTraceMessage(final String message, - final Double emittedAt, - final AirbyteErrorTraceMessage.FailureType failureType) { - final var msg = new AirbyteTraceMessage() - .withType(AirbyteTraceMessage.Type.ERROR) - .withError(new AirbyteErrorTraceMessage().withMessage(message)) - .withEmittedAt(emittedAt); - - if (failureType != null) { - msg.getError().withFailureType(failureType); - } - - return msg; - } - - public static AirbyteMessage createConfigControlMessage(final Config config, final Double emittedAt) { - return new AirbyteMessage() - .withType(Type.CONTROL) - .withControl(new AirbyteControlMessage() - .withEmittedAt(emittedAt) - .withType(AirbyteControlMessage.Type.CONNECTOR_CONFIG) - .withConnectorConfig(new AirbyteControlConnectorConfigMessage() - .withConfig(config))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/TestConfigHelpers.java deleted file mode 100644 index 338ee87b17118..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.test_utils; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.DestinationConnection; -import io.airbyte.configoss.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.configoss.OperatorDbt; -import io.airbyte.configoss.OperatorNormalization; -import io.airbyte.configoss.OperatorNormalization.Option; -import io.airbyte.configoss.SourceConnection; -import io.airbyte.configoss.StandardSyncInput; -import io.airbyte.configoss.StandardSyncOperation; -import io.airbyte.configoss.StandardSyncOperation.OperatorType; -import io.airbyte.configoss.State; -import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.apache.commons.lang3.tuple.ImmutablePair; - -public class TestConfigHelpers { - - private static final String CONNECTION_NAME = "favorite_color_pipe"; - private static final String STREAM_NAME = "user_preferences"; - private static final String FIELD_NAME = "favorite_color"; - private static final long LAST_SYNC_TIME = 1598565106; - - public static ImmutablePair createSyncConfig() { - return createSyncConfig(false); - } - - public static ImmutablePair createSyncConfig(final Boolean multipleNamespaces) { - final UUID workspaceId = UUID.randomUUID(); - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID destinationId = UUID.randomUUID(); - final UUID normalizationOperationId = UUID.randomUUID(); - final UUID dbtOperationId = UUID.randomUUID(); - - final JsonNode sourceConnection = - Jsons.jsonNode( - Map.of( - "apiKey", "123", - "region", "us-east")); - - final JsonNode destinationConnection = - Jsons.jsonNode( - Map.of( - "username", "airbyte", - "token", "anau81b")); - - final SourceConnection sourceConnectionConfig = new SourceConnection() - .withConfiguration(sourceConnection) - .withWorkspaceId(workspaceId) - .withSourceDefinitionId(sourceDefinitionId) - .withSourceId(sourceId) - .withTombstone(false); - - final DestinationConnection destinationConnectionConfig = new DestinationConnection() - .withConfiguration(destinationConnection) - .withWorkspaceId(workspaceId) - .withDestinationDefinitionId(destinationDefinitionId) - .withDestinationId(destinationId) - .withTombstone(false); - - final StandardSyncOperation normalizationOperation = new StandardSyncOperation() - .withOperationId(normalizationOperationId) - .withName("Normalization") - .withOperatorType(OperatorType.NORMALIZATION) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withTombstone(false); - - final StandardSyncOperation customDbtOperation = new StandardSyncOperation() - .withOperationId(dbtOperationId) - .withName("Custom Transformation") - .withOperatorType(OperatorType.DBT) - .withOperatorDbt(new OperatorDbt() - .withDockerImage("docker") - .withDbtArguments("--help") - .withGitRepoUrl("git url") - .withGitRepoBranch("git url")) - .withTombstone(false); - - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog(); - if (multipleNamespaces) { - final ConfiguredAirbyteStream streamOne = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, "namespace", Field.of(FIELD_NAME, JsonSchemaType.STRING))); - final ConfiguredAirbyteStream streamTwo = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, "namespace2", Field.of(FIELD_NAME, JsonSchemaType.STRING))); - - final List streams = List.of(streamOne, streamTwo); - catalog.withStreams(streams); - - } else { - final ConfiguredAirbyteStream stream = new ConfiguredAirbyteStream() - .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME, Field.of(FIELD_NAME, JsonSchemaType.STRING))); - catalog.withStreams(Collections.singletonList(stream)); - } - - final String stateValue = Jsons.serialize(Map.of("lastSync", String.valueOf(LAST_SYNC_TIME))); - - final State state = new State().withState(Jsons.jsonNode(stateValue)); - - final StandardSyncInput syncInput = new StandardSyncInput() - .withNamespaceDefinition(NamespaceDefinitionType.SOURCE) - .withPrefix(CONNECTION_NAME) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withDestinationConfiguration(destinationConnectionConfig.getConfiguration()) - .withCatalog(catalog) - .withSourceConfiguration(sourceConnectionConfig.getConfiguration()) - .withState(state) - .withOperationSequence(List.of(normalizationOperation, customDbtOperation)) - .withWorkspaceId(workspaceId); - - return new ImmutablePair<>(null, syncInput); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarness.kt new file mode 100644 index 0000000000000..6d57c4c397f63 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarness.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers + +import io.airbyte.workers.exception.TestHarnessException +import java.nio.file.Path + +interface TestHarness { + /** + * Blocking call to run the worker's workflow. Once this is complete, getStatus should return + * either COMPLETE, FAILED, or CANCELLED. + */ + @Throws(TestHarnessException::class) fun run(inputType: InputType, jobRoot: Path): OutputType + + /** + * Cancels in-progress workers. Although all workers support cancel, in reality only the + * asynchronous [DefaultReplicationWorker]'s cancel is used. + */ + fun cancel() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarnessUtils.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarnessUtils.kt new file mode 100644 index 0000000000000..f8672a0efbb68 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/TestHarnessUtils.kt @@ -0,0 +1,221 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.io.IOs +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.* +import io.airbyte.protocol.models.* +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.helper.FailureHelper +import io.airbyte.workers.internal.AirbyteStreamFactory +import java.io.* +import java.nio.charset.StandardCharsets +import java.time.Duration +import java.time.temporal.ChronoUnit +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +// TODO:(Issue-4824): Figure out how to log Docker process information. +object TestHarnessUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(TestHarnessUtils::class.java) + + fun gentleClose(process: Process?, timeout: Long, timeUnit: TimeUnit?) { + if (process == null) { + return + } + + if (process.info() != null) { + process.info().commandLine().ifPresent { commandLine: String? -> + LOGGER.debug("Gently closing process {}", commandLine) + } + } + + try { + if (process.isAlive) { + process.waitFor(timeout, timeUnit) + } + } catch (e: InterruptedException) { + LOGGER.error("Exception while while waiting for process to finish", e) + } + + if (process.isAlive) { + closeProcess(process, Duration.of(1, ChronoUnit.MINUTES)) + } + } + + fun closeProcess(process: Process?, lastChanceDuration: Duration) { + if (process == null) { + return + } + try { + process.destroy() + process.waitFor(lastChanceDuration.toMillis(), TimeUnit.MILLISECONDS) + if (process.isAlive) { + LOGGER.warn( + "Process is still alive after calling destroy. Attempting to destroy forcibly..." + ) + process.destroyForcibly() + } + } catch (e: InterruptedException) { + LOGGER.error("Exception when closing process.", e) + } + } + + fun wait(process: Process) { + try { + process.waitFor() + } catch (e: InterruptedException) { + LOGGER.error("Exception while while waiting for process to finish", e) + } + } + + fun cancelProcess(process: Process?) { + closeProcess(process, Duration.of(10, ChronoUnit.SECONDS)) + } + + /** + * Translates a StandardSyncInput into a WorkerSourceConfig. WorkerSourceConfig is a subset of + * StandardSyncInput. + */ + fun syncToWorkerSourceConfig(sync: StandardSyncInput): WorkerSourceConfig { + return WorkerSourceConfig() + .withSourceId(sync.sourceId) + .withSourceConnectionConfiguration(sync.sourceConfiguration) + .withCatalog(sync.catalog) + .withState(sync.state) + } + + /** + * Translates a StandardSyncInput into a WorkerDestinationConfig. WorkerDestinationConfig is a + * subset of StandardSyncInput. + */ + fun syncToWorkerDestinationConfig(sync: StandardSyncInput): WorkerDestinationConfig { + return WorkerDestinationConfig() + .withDestinationId(sync.destinationId) + .withDestinationConnectionConfiguration(sync.destinationConfiguration) + .withCatalog(sync.catalog) + .withState(sync.state) + } + + private fun getConnectorCommandFromOutputType( + outputType: ConnectorJobOutput.OutputType + ): FailureHelper.ConnectorCommand { + return when (outputType) { + ConnectorJobOutput.OutputType.SPEC -> FailureHelper.ConnectorCommand.SPEC + ConnectorJobOutput.OutputType.CHECK_CONNECTION -> FailureHelper.ConnectorCommand.CHECK + ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID -> + FailureHelper.ConnectorCommand.DISCOVER + } + } + + fun getMostRecentConfigControlMessage( + messagesByType: Map> + ): Optional { + return messagesByType + .getOrDefault(AirbyteMessage.Type.CONTROL, ArrayList()) + .stream() + .map { obj: AirbyteMessage -> obj.control } + .filter { control: AirbyteControlMessage -> + control.type == AirbyteControlMessage.Type.CONNECTOR_CONFIG + } + .map { obj: AirbyteControlMessage -> obj.connectorConfig } + .reduce { + first: AirbyteControlConnectorConfigMessage?, + second: AirbyteControlConnectorConfigMessage -> + second + } + } + + private fun getTraceMessageFromMessagesByType( + messagesByType: Map> + ): Optional { + return messagesByType + .getOrDefault(AirbyteMessage.Type.TRACE, ArrayList()) + .stream() + .map { obj: AirbyteMessage -> obj.trace } + .filter { trace: AirbyteTraceMessage -> trace.type == AirbyteTraceMessage.Type.ERROR } + .findFirst() + } + + fun getDidControlMessageChangeConfig( + initialConfigJson: JsonNode, + configMessage: AirbyteControlConnectorConfigMessage + ): Boolean { + val newConfig = configMessage.config + val newConfigJson = Jsons.jsonNode(newConfig) + return initialConfigJson != newConfigJson + } + + @Throws(IOException::class) + fun getMessagesByType( + process: Process, + streamFactory: AirbyteStreamFactory, + timeOut: Int + ): Map> { + val messagesByType: Map> + process.inputStream.use { stdout -> + messagesByType = + streamFactory + .create(IOs.newBufferedReader(stdout)) + .collect(Collectors.groupingBy { obj: AirbyteMessage -> obj.type }) + gentleClose(process, timeOut.toLong(), TimeUnit.MINUTES) + return messagesByType + } + } + + fun getJobFailureReasonFromMessages( + outputType: ConnectorJobOutput.OutputType, + messagesByType: Map> + ): Optional { + val traceMessage = getTraceMessageFromMessagesByType(messagesByType) + if (traceMessage.isPresent) { + val connectorCommand = getConnectorCommandFromOutputType(outputType) + return Optional.of( + FailureHelper.connectorCommandFailure( + traceMessage.get(), + null, + null, + connectorCommand + ) + ) + } else { + return Optional.empty() + } + } + + fun mapStreamNamesToSchemas( + syncInput: StandardSyncInput + ): Map { + return syncInput.catalog.streams.associate { + AirbyteStreamNameNamespacePair.fromAirbyteStream(it.stream) to it.stream.jsonSchema + } + } + + @Throws(IOException::class) + fun getStdErrFromErrorStream(errorStream: InputStream): String { + val reader = BufferedReader(InputStreamReader(errorStream, StandardCharsets.UTF_8)) + val errorOutput = StringBuilder() + var line: String? + while ((reader.readLine().also { line = it }) != null) { + errorOutput.append(line) + errorOutput.append(System.lineSeparator()) + } + return errorOutput.toString() + } + + @Throws(TestHarnessException::class, IOException::class) + fun throwWorkerException(errorMessage: String, process: Process) { + val stderr = getStdErrFromErrorStream(process.errorStream) + if (stderr.isEmpty()) { + throw TestHarnessException(errorMessage) + } else { + throw TestHarnessException("$errorMessage: \n$stderr") + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/WorkerConstants.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/WorkerConstants.kt new file mode 100644 index 0000000000000..8192437d7e43d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/WorkerConstants.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers + +object WorkerConstants { + const val SOURCE_CONFIG_JSON_FILENAME: String = "source_config.json" + const val DESTINATION_CONFIG_JSON_FILENAME: String = "destination_config.json" + + const val SOURCE_CATALOG_JSON_FILENAME: String = "source_catalog.json" + const val DESTINATION_CATALOG_JSON_FILENAME: String = "destination_catalog.json" + const val INPUT_STATE_JSON_FILENAME: String = "input_state.json" + + const val RESET_JOB_SOURCE_DOCKER_IMAGE_STUB: String = "airbyte_empty" + + const val WORKER_ENVIRONMENT: String = "WORKER_ENVIRONMENT" +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/exception/TestHarnessException.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/exception/TestHarnessException.kt new file mode 100644 index 0000000000000..15adcfdd2a068 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/exception/TestHarnessException.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.exception + +class TestHarnessException : Exception { + constructor(message: String?) : super(message) + + constructor(message: String?, cause: Throwable?) : super(message, cause) +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/CheckConnectionTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/CheckConnectionTestHarness.kt new file mode 100644 index 0000000000000..1015d2db5aebc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/CheckConnectionTestHarness.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.configoss.ConnectorJobOutput +import io.airbyte.configoss.StandardCheckConnectionInput +import io.airbyte.workers.TestHarness + +interface CheckConnectionTestHarness : + TestHarness diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DbtTransformationRunner.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DbtTransformationRunner.kt new file mode 100644 index 0000000000000..3f78b196fc2ed --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DbtTransformationRunner.kt @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Strings +import com.google.common.collect.ImmutableMap +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.MdcScope +import io.airbyte.commons.resources.MoreResources +import io.airbyte.configoss.OperatorDbt +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.normalization.NormalizationRunner +import io.airbyte.workers.process.Metadata +import io.airbyte.workers.process.ProcessFactory +import java.nio.file.Path +import java.util.* +import java.util.concurrent.TimeUnit +import org.apache.tools.ant.types.Commandline +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DbtTransformationRunner( + private val processFactory: ProcessFactory, + private val normalizationRunner: NormalizationRunner +) : AutoCloseable { + private lateinit var process: Process + + @Throws(Exception::class) + fun start() { + normalizationRunner.start() + } + + /** + * The docker image used by the DbtTransformationRunner is provided by the User, so we can't + * ensure to have the right python, dbt, dependencies etc software installed to successfully run + * our transform-config scripts (to translate Airbyte Catalogs into Dbt profiles file). Thus, we + * depend on the NormalizationRunner to configure the dbt project with the appropriate + * destination settings and pull the custom git repository into the workspace. + * + * Once the workspace folder/files is setup to run, we invoke the custom transformation command + * as provided by the user to execute whatever extra transformation has been implemented. + */ + @Throws(Exception::class) + fun run( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode?, + resourceRequirements: ResourceRequirements?, + dbtConfig: OperatorDbt + ): Boolean { + if ( + !normalizationRunner.configureDbt( + jobId, + attempt, + jobRoot, + config, + resourceRequirements, + dbtConfig + ) + ) { + return false + } + return transform(jobId, attempt, jobRoot, config, resourceRequirements, dbtConfig) + } + + @Throws(Exception::class) + fun transform( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode?, + resourceRequirements: ResourceRequirements?, + dbtConfig: OperatorDbt + ): Boolean { + try { + val files: Map = + ImmutableMap.of( + DBT_ENTRYPOINT_SH, + MoreResources.readResource("dbt_transformation_entrypoint.sh"), + "sshtunneling.sh", + MoreResources.readResource("sshtunneling.sh") + ) + val dbtArguments: MutableList = ArrayList() + dbtArguments.add(DBT_ENTRYPOINT_SH) + if (Strings.isNullOrEmpty(dbtConfig.dbtArguments)) { + throw TestHarnessException("Dbt Arguments are required") + } + Collections.addAll( + dbtArguments, + *Commandline.translateCommandline(dbtConfig.dbtArguments) + ) + val process = + processFactory.create( + Metadata.CUSTOM_STEP, + jobId, + attempt, + jobRoot, + dbtConfig.dockerImage, + false, + false, + files, + "/bin/bash", + resourceRequirements, + null, + java.util.Map.of( + Metadata.JOB_TYPE_KEY, + Metadata.SYNC_JOB, + Metadata.SYNC_STEP_KEY, + Metadata.CUSTOM_STEP + ), + emptyMap(), + emptyMap(), + emptyMap(), + *dbtArguments.toTypedArray() + ) + this.process = process + LineGobbler.gobble( + process.inputStream, + { msg: String? -> LOGGER.info(msg) }, + CONTAINER_LOG_MDC_BUILDER + ) + LineGobbler.gobble( + process.errorStream, + { msg: String? -> LOGGER.error(msg) }, + CONTAINER_LOG_MDC_BUILDER + ) + + TestHarnessUtils.wait(process) + + return process.exitValue() == 0 + } catch (e: Exception) { + // make sure we kill the process on failure to avoid zombies. + process?.let { TestHarnessUtils.cancelProcess(process) } + throw e + } + } + + @Throws(Exception::class) + override fun close() { + normalizationRunner.close() + + if (process == null) { + return + } + + LOGGER.debug("Closing dbt transformation process") + TestHarnessUtils.gentleClose(process, 1, TimeUnit.MINUTES) + if (process!!.isAlive || process!!.exitValue() != 0) { + throw TestHarnessException("Dbt transformation process wasn't successful") + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DbtTransformationRunner::class.java) + private const val DBT_ENTRYPOINT_SH = "entrypoint.sh" + private val CONTAINER_LOG_MDC_BUILDER: MdcScope.Builder = + MdcScope.Builder() + .setLogPrefix("dbt") + .setPrefixColor(LoggingHelper.Color.PURPLE_BACKGROUND) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.kt new file mode 100644 index 0000000000000..603c267b168c3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultCheckConnectionTestHarness.kt @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.commons.enums.Enums +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.* +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.WorkerConstants +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.helper.ConnectorConfigUpdater +import io.airbyte.workers.internal.AirbyteStreamFactory +import io.airbyte.workers.internal.DefaultAirbyteStreamFactory +import io.airbyte.workers.process.IntegrationLauncher +import java.nio.file.Path +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultCheckConnectionTestHarness +@JvmOverloads +constructor( + private val integrationLauncher: IntegrationLauncher, + private val connectorConfigUpdater: ConnectorConfigUpdater, + private val streamFactory: AirbyteStreamFactory = DefaultAirbyteStreamFactory() +) : CheckConnectionTestHarness { + private lateinit var process: Process + + @Throws(TestHarnessException::class) + override fun run(input: StandardCheckConnectionInput, jobRoot: Path): ConnectorJobOutput { + LineGobbler.startSection("CHECK") + + try { + val inputConfig = input.connectionConfiguration + val process = + integrationLauncher.check( + jobRoot, + WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, + Jsons.serialize(inputConfig) + ) + this.process = process + + val jobOutput = + ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) + + LineGobbler.gobble(process.errorStream, { msg: String? -> LOGGER.error(msg) }) + + val messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30) + val connectionStatus = + messagesByType + .getOrDefault(AirbyteMessage.Type.CONNECTION_STATUS, ArrayList()) + .stream() + .map { obj: AirbyteMessage -> obj.connectionStatus } + .findFirst() + + if (input.actorId != null && input.actorType != null) { + val optionalConfigMsg = + TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType) + if ( + optionalConfigMsg.isPresent && + TestHarnessUtils.getDidControlMessageChangeConfig( + inputConfig, + optionalConfigMsg.get() + ) + ) { + when (input.actorType) { + ActorType.SOURCE -> + connectorConfigUpdater.updateSource( + input.actorId, + optionalConfigMsg.get().config + ) + ActorType.DESTINATION -> + connectorConfigUpdater.updateDestination( + input.actorId, + optionalConfigMsg.get().config + ) + } + jobOutput.connectorConfigurationUpdated = true + } + } + + val failureReason = + TestHarnessUtils.getJobFailureReasonFromMessages( + ConnectorJobOutput.OutputType.CHECK_CONNECTION, + messagesByType + ) + failureReason.ifPresent { failureReason: FailureReason? -> + jobOutput.failureReason = failureReason + } + + val exitCode = process.exitValue() + if (exitCode != 0) { + LOGGER.warn("Check connection job subprocess finished with exit code {}", exitCode) + } + + if (connectionStatus.isPresent) { + val output = + StandardCheckConnectionOutput() + .withStatus( + Enums.convertTo( + connectionStatus.get().status, + StandardCheckConnectionOutput.Status::class.java + ) + ) + .withMessage(connectionStatus.get().message) + LOGGER.info("Check connection job received output: {}", output) + jobOutput.checkConnection = output + } else if (failureReason.isEmpty) { + TestHarnessUtils.throwWorkerException( + "Error checking connection status: no status nor failure reason were outputted", + process + ) + } + LineGobbler.endSection("CHECK") + return jobOutput + } catch (e: Exception) { + LOGGER.error("Unexpected error while checking connection: ", e) + LineGobbler.endSection("CHECK") + throw TestHarnessException("Unexpected error while getting checking connection.", e) + } + } + + override fun cancel() { + TestHarnessUtils.cancelProcess(process) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(DefaultCheckConnectionTestHarness::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.kt new file mode 100644 index 0000000000000..e3075c6cebde5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultDiscoverCatalogTestHarness.kt @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.model.generated.SourceDiscoverSchemaWriteRequestBody +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.ConnectorJobOutput +import io.airbyte.configoss.StandardDiscoverCatalogInput +import io.airbyte.protocol.models.AirbyteCatalog +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.WorkerConstants +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.helper.CatalogClientConverters +import io.airbyte.workers.helper.ConnectorConfigUpdater +import io.airbyte.workers.internal.AirbyteStreamFactory +import io.airbyte.workers.internal.DefaultAirbyteStreamFactory +import io.airbyte.workers.process.IntegrationLauncher +import java.nio.file.Path +import java.util.* +import kotlin.concurrent.Volatile +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultDiscoverCatalogTestHarness +@JvmOverloads +constructor( + private val airbyteApiClient: AirbyteApiClient, + private val integrationLauncher: IntegrationLauncher, + private val connectorConfigUpdater: ConnectorConfigUpdater, + private val streamFactory: AirbyteStreamFactory = DefaultAirbyteStreamFactory() +) : DiscoverCatalogTestHarness { + @Volatile private lateinit var process: Process + + @Throws(TestHarnessException::class) + override fun run( + discoverSchemaInput: StandardDiscoverCatalogInput, + jobRoot: Path + ): ConnectorJobOutput { + try { + val inputConfig = discoverSchemaInput.connectionConfiguration + process = + integrationLauncher.discover( + jobRoot, + WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, + Jsons.serialize(inputConfig) + ) + + val jobOutput = + ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) + + LineGobbler.gobble(process.errorStream, { msg: String? -> LOGGER.error(msg) }) + + val messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30) + + val catalog = + messagesByType + .getOrDefault(AirbyteMessage.Type.CATALOG, ArrayList()) + .stream() + .map { obj: AirbyteMessage -> obj.catalog } + .findFirst() + + val optionalConfigMsg = + TestHarnessUtils.getMostRecentConfigControlMessage(messagesByType) + if ( + optionalConfigMsg.isPresent && + TestHarnessUtils.getDidControlMessageChangeConfig( + inputConfig, + optionalConfigMsg.get() + ) + ) { + connectorConfigUpdater.updateSource( + UUID.fromString(discoverSchemaInput.sourceId), + optionalConfigMsg.get().config + ) + jobOutput.connectorConfigurationUpdated = true + } + + val failureReason = + TestHarnessUtils.getJobFailureReasonFromMessages( + ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID, + messagesByType + ) + failureReason.ifPresent { jobOutput.failureReason = it } + + val exitCode = process.exitValue() + if (exitCode != 0) { + LOGGER.warn("Discover job subprocess finished with exit codee {}", exitCode) + } + + if (catalog.isPresent) { + val result = + AirbyteApiClient.retryWithJitter( + { + airbyteApiClient.sourceApi.writeDiscoverCatalogResult( + buildSourceDiscoverSchemaWriteRequestBody( + discoverSchemaInput, + catalog.get() + ) + ) + }, + WRITE_DISCOVER_CATALOG_LOGS_TAG + )!! + jobOutput.discoverCatalogId = result.catalogId + } else if (failureReason.isEmpty) { + TestHarnessUtils.throwWorkerException( + "Integration failed to output a catalog struct and did not output a failure reason", + process + ) + } + return jobOutput + } catch (e: TestHarnessException) { + throw e + } catch (e: Exception) { + throw TestHarnessException("Error while discovering schema", e) + } + } + + private fun buildSourceDiscoverSchemaWriteRequestBody( + discoverSchemaInput: StandardDiscoverCatalogInput, + catalog: AirbyteCatalog + ): SourceDiscoverSchemaWriteRequestBody { + return SourceDiscoverSchemaWriteRequestBody() + .catalog(CatalogClientConverters.toAirbyteCatalogClientApi(catalog)) + .sourceId( // NOTE: sourceId is marked required in the OpenAPI config but the code + // generator doesn't enforce + // it, so we check again here. + if (discoverSchemaInput.sourceId == null) null + else UUID.fromString(discoverSchemaInput.sourceId) + ) + .connectorVersion(discoverSchemaInput.connectorVersion) + .configurationHash(discoverSchemaInput.configHash) + } + + override fun cancel() { + TestHarnessUtils.cancelProcess(process) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(DefaultDiscoverCatalogTestHarness::class.java) + private const val WRITE_DISCOVER_CATALOG_LOGS_TAG = "call to write discover schema result" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultGetSpecTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultGetSpecTestHarness.kt new file mode 100644 index 0000000000000..61f554a6f1175 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DefaultGetSpecTestHarness.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.commons.io.LineGobbler +import io.airbyte.configoss.ConnectorJobOutput +import io.airbyte.configoss.FailureReason +import io.airbyte.configoss.JobGetSpecConfig +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.internal.AirbyteStreamFactory +import io.airbyte.workers.internal.DefaultAirbyteStreamFactory +import io.airbyte.workers.process.IntegrationLauncher +import java.nio.file.Path +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultGetSpecTestHarness +@JvmOverloads +constructor( + private val integrationLauncher: IntegrationLauncher, + private val streamFactory: AirbyteStreamFactory = DefaultAirbyteStreamFactory() +) : GetSpecTestHarness { + private lateinit var process: Process + + @Throws(TestHarnessException::class) + override fun run(config: JobGetSpecConfig, jobRoot: Path): ConnectorJobOutput { + try { + val process = integrationLauncher.spec(jobRoot) + this.process = process + + val jobOutput = ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.SPEC) + LineGobbler.gobble(process!!.errorStream, { msg: String? -> LOGGER.error(msg) }) + + val messagesByType = TestHarnessUtils.getMessagesByType(process, streamFactory, 30) + + val spec = + messagesByType + .getOrDefault(AirbyteMessage.Type.SPEC, ArrayList())!! + .stream() + .map { obj: AirbyteMessage -> obj.spec } + .findFirst() + + val failureReason = + TestHarnessUtils.getJobFailureReasonFromMessages( + ConnectorJobOutput.OutputType.SPEC, + messagesByType + ) + failureReason!!.ifPresent { failureReason: FailureReason? -> + jobOutput.failureReason = failureReason + } + + val exitCode = process!!.exitValue() + if (exitCode != 0) { + LOGGER.warn("Spec job subprocess finished with exit code {}", exitCode) + } + + if (spec.isPresent) { + jobOutput.spec = spec.get() + } else if (failureReason.isEmpty) { + TestHarnessUtils.throwWorkerException( + "Integration failed to output a spec struct and did not output a failure reason", + process + ) + } + + return jobOutput + } catch (e: Exception) { + throw TestHarnessException( + String.format("Error while getting spec from image %s", config.dockerImage), + e + ) + } + } + + override fun cancel() { + TestHarnessUtils.cancelProcess(process) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DefaultGetSpecTestHarness::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DiscoverCatalogTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DiscoverCatalogTestHarness.kt new file mode 100644 index 0000000000000..8ae60fbfc2aca --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/DiscoverCatalogTestHarness.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.configoss.ConnectorJobOutput +import io.airbyte.configoss.StandardDiscoverCatalogInput +import io.airbyte.workers.TestHarness + +interface DiscoverCatalogTestHarness : + TestHarness diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/GetSpecTestHarness.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/GetSpecTestHarness.kt new file mode 100644 index 0000000000000..73e06f6e4e040 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/general/GetSpecTestHarness.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.general + +import io.airbyte.configoss.ConnectorJobOutput +import io.airbyte.configoss.JobGetSpecConfig +import io.airbyte.workers.TestHarness + +interface GetSpecTestHarness : TestHarness diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/CatalogClientConverters.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/CatalogClientConverters.kt new file mode 100644 index 0000000000000..ea2da8e967730 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/CatalogClientConverters.kt @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.api.client.model.generated.* +import io.airbyte.commons.enums.Enums +import io.airbyte.commons.text.Names +import io.airbyte.protocol.models.SyncMode +import io.airbyte.validation.json.JsonValidationException +import java.util.* +import java.util.stream.Collectors + +/** + * Utilities to convert Catalog protocol to Catalog API client. This class was similar to existing + * logic in CatalogConverter.java; But code can't be shared because the protocol model is + * essentially converted to two different api models. Thus, if we need to change logic on either + * place we have to take care of the other one too. + */ +object CatalogClientConverters { + /** + * + * @param catalog + * @return + */ + fun toAirbyteProtocol(catalog: AirbyteCatalog): io.airbyte.protocol.models.AirbyteCatalog { + val protoCatalog = io.airbyte.protocol.models.AirbyteCatalog() + val airbyteStream = + catalog.streams + .stream() + .map { stream: AirbyteStreamAndConfiguration -> + try { + return@map toConfiguredProtocol(stream.stream, stream.config) + } catch (e: JsonValidationException) { + return@map null + } + } + .collect(Collectors.toList()) + + protoCatalog.withStreams(airbyteStream) + return protoCatalog + } + + @Throws(JsonValidationException::class) + private fun toConfiguredProtocol( + stream: AirbyteStream?, + config: AirbyteStreamConfiguration? + ): io.airbyte.protocol.models.AirbyteStream { + if (config!!.fieldSelectionEnabled != null && config.fieldSelectionEnabled!!) { + // Validate the selected field paths. + if (config.selectedFields == null) { + throw JsonValidationException( + "Requested field selection but no selected fields provided" + ) + } + val properties = stream!!.jsonSchema!!.findValue("properties") + if (properties == null || !properties.isObject) { + throw JsonValidationException( + "Requested field selection but no properties node found" + ) + } + for (selectedFieldInfo in config.selectedFields!!) { + if ( + selectedFieldInfo.fieldPath == null || selectedFieldInfo.fieldPath!!.isEmpty() + ) { + throw JsonValidationException("Selected field path cannot be empty") + } + if (selectedFieldInfo.fieldPath!!.size > 1) { + // TODO(mfsiega-airbyte): support nested fields. + throw UnsupportedOperationException("Nested field selection not supported") + } + } + // Only include the selected fields. + // NOTE: we verified above that each selected field has at least one element in the + // field path. + val selectedFieldNames = + config.selectedFields!! + .stream() + .map { field: SelectedFieldInfo -> field.fieldPath!![0] } + .collect(Collectors.toSet()) + // TODO(mfsiega-airbyte): we only check the top level of the cursor/primary key fields + // because we + // don't support filtering nested fields yet. + if ( + config.syncMode == io.airbyte.api.client.model.generated.SyncMode.INCREMENTAL && + !config.cursorField!!.isEmpty() // There is a cursor configured, AND + && + !selectedFieldNames.contains(config.cursorField!![0]) + ) { // The cursor isn't in the selected fields. + throw JsonValidationException( + "Cursor field cannot be de-selected in INCREMENTAL syncs" + ) + } + if (config.destinationSyncMode == DestinationSyncMode.APPEND_DEDUP) { + for (primaryKeyComponent in config.primaryKey!!) { + if (!selectedFieldNames.contains(primaryKeyComponent[0])) { + throw JsonValidationException( + "Primary key field cannot be de-selected in DEDUP mode" + ) + } + } + } + for (selectedFieldName in selectedFieldNames) { + if (!properties.has(selectedFieldName)) { + throw JsonValidationException( + String.format( + "Requested selected field %s not found in JSON schema", + selectedFieldName + ) + ) + } + } + (properties as ObjectNode).retain(selectedFieldNames) + } + return io.airbyte.protocol.models + .AirbyteStream() + .withName(stream!!.name) + .withJsonSchema(stream.jsonSchema) + .withSupportedSyncModes( + Enums.convertListTo(stream.supportedSyncModes!!, SyncMode::class.java) + ) + .withSourceDefinedCursor(stream.sourceDefinedCursor) + .withDefaultCursorField(stream.defaultCursorField) + .withSourceDefinedPrimaryKey( + Optional.ofNullable(stream.sourceDefinedPrimaryKey).orElse(emptyList()) + ) + .withNamespace(stream.namespace) + } + + /** Converts a protocol AirbyteCatalog to an OpenAPI client versioned AirbyteCatalog. */ + fun toAirbyteCatalogClientApi( + catalog: io.airbyte.protocol.models.AirbyteCatalog + ): AirbyteCatalog { + return AirbyteCatalog() + .streams( + catalog.streams + .stream() + .map { stream: io.airbyte.protocol.models.AirbyteStream -> + toAirbyteStreamClientApi(stream) + } + .map { s: AirbyteStream -> + AirbyteStreamAndConfiguration() + .stream(s) + .config(generateDefaultConfiguration(s)) + } + .collect(Collectors.toList()) + ) + } + + private fun generateDefaultConfiguration(stream: AirbyteStream): AirbyteStreamConfiguration { + val result = + AirbyteStreamConfiguration() + .aliasName(Names.toAlphanumericAndUnderscore(stream.name)) + .cursorField(stream.defaultCursorField) + .destinationSyncMode(DestinationSyncMode.APPEND) + .primaryKey(stream.sourceDefinedPrimaryKey) + .selected(true) + if (stream.supportedSyncModes!!.size > 0) { + result.setSyncMode( + Enums.convertTo( + stream.supportedSyncModes!![0], + io.airbyte.api.client.model.generated.SyncMode::class.java + ) + ) + } else { + result.syncMode = io.airbyte.api.client.model.generated.SyncMode.INCREMENTAL + } + return result + } + + private fun toAirbyteStreamClientApi( + stream: io.airbyte.protocol.models.AirbyteStream + ): AirbyteStream { + return AirbyteStream() + .name(stream.name) + .jsonSchema(stream.jsonSchema) + .supportedSyncModes( + Enums.convertListTo( + stream.supportedSyncModes, + io.airbyte.api.client.model.generated.SyncMode::class.java + ) + ) + .sourceDefinedCursor(stream.sourceDefinedCursor) + .defaultCursorField(stream.defaultCursorField) + .sourceDefinedPrimaryKey(stream.sourceDefinedPrimaryKey) + .namespace(stream.namespace) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdater.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdater.kt new file mode 100644 index 0000000000000..be819be4c67aa --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/ConnectorConfigUpdater.kt @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import com.google.common.hash.Hashing +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.generated.DestinationApi +import io.airbyte.api.client.generated.SourceApi +import io.airbyte.api.client.model.generated.DestinationIdRequestBody +import io.airbyte.api.client.model.generated.DestinationUpdate +import io.airbyte.api.client.model.generated.SourceIdRequestBody +import io.airbyte.api.client.model.generated.SourceUpdate +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Config +import java.nio.charset.StandardCharsets +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Helper class for workers to persist updates to Source/Destination configs emitted from + * AirbyteControlMessages. + * + * This is in order to support connectors updating configs when running commands, which is specially + * useful for migrating configuration to a new version or for enabling connectors that require + * single-use or short-lived OAuth tokens. + */ +class ConnectorConfigUpdater( + private val sourceApi: SourceApi, + private val destinationApi: DestinationApi +) { + /** + * Updates the Source from a sync job ID with the provided Configuration. Secrets and OAuth + * parameters will be masked when saving. + */ + fun updateSource(sourceId: UUID?, config: Config) { + val source = + AirbyteApiClient.retryWithJitter( + { sourceApi.getSource(SourceIdRequestBody().sourceId(sourceId)) }, + "get source" + )!! + + val updatedSource = + AirbyteApiClient.retryWithJitter( + { + sourceApi.updateSource( + SourceUpdate() + .sourceId(sourceId) + .name(source.name) + .connectionConfiguration(Jsons.jsonNode(config.additionalProperties)) + ) + }, + "update source" + )!! + + LOGGER.info( + "Persisted updated configuration for source {}. New config hash: {}.", + sourceId, + Hashing.sha256() + .hashString(updatedSource.connectionConfiguration.asText(), StandardCharsets.UTF_8) + ) + } + + /** + * Updates the Destination from a sync job ID with the provided Configuration. Secrets and OAuth + * parameters will be masked when saving. + */ + fun updateDestination(destinationId: UUID?, config: Config) { + val destination = + AirbyteApiClient.retryWithJitter( + { + destinationApi.getDestination( + DestinationIdRequestBody().destinationId(destinationId) + ) + }, + "get destination" + )!! + + val updatedDestination = + AirbyteApiClient.retryWithJitter( + { + destinationApi.updateDestination( + DestinationUpdate() + .destinationId(destinationId) + .name(destination.name) + .connectionConfiguration(Jsons.jsonNode(config.additionalProperties)) + ) + }, + "update destination" + )!! + + LOGGER.info( + "Persisted updated configuration for destination {}. New config hash: {}.", + destinationId, + Hashing.sha256() + .hashString( + updatedDestination.connectionConfiguration.asText(), + StandardCharsets.UTF_8 + ) + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(ConnectorConfigUpdater::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/EntrypointEnvChecker.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/EntrypointEnvChecker.kt new file mode 100644 index 0000000000000..b31da22439ac7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/EntrypointEnvChecker.kt @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.process.ProcessFactory +import java.io.BufferedReader +import java.io.IOException +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.nio.file.Path + +/** Should only be used by connector testing. */ +object EntrypointEnvChecker { + /** + * @param processFactory any process factory + * @param jobId used as input to processFactory.create + * @param jobAttempt used as input to processFactory.create + * @param jobRoot used as input to processFactory.create + * @param imageName used as input to processFactory.create + * @return the entrypoint in the env variable AIRBYTE_ENTRYPOINT + * @throws RuntimeException if there is ambiguous output from the container + */ + @Throws(IOException::class, InterruptedException::class, TestHarnessException::class) + fun getEntrypointEnvVariable( + processFactory: ProcessFactory, + jobId: String, + jobAttempt: Int, + jobRoot: Path, + imageName: String + ): String? { + val process = + processFactory.create( + "entrypoint-checker", + jobId, + jobAttempt, + jobRoot, + imageName, + false, + false, + emptyMap(), + "printenv", + null, + null, + emptyMap(), + emptyMap(), + emptyMap(), + emptyMap() + ) + + val stdout = + BufferedReader(InputStreamReader(process!!.inputStream, StandardCharsets.UTF_8)) + + var outputLine: String? = null + + var line = stdout.readLine() + while ((line != null) && outputLine == null) { + if (line.contains("AIRBYTE_ENTRYPOINT")) { + outputLine = line + } + line = stdout.readLine() + } + + process.waitFor() + + return if (outputLine != null) { + val splits = outputLine.split("=".toRegex(), limit = 2).toTypedArray() + if (splits.size != 2) { + throw RuntimeException( + "String could not be split into multiple segments: $outputLine" + ) + } else { + splits[1].trim() + } + } else { + null + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/FailureHelper.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/FailureHelper.kt new file mode 100644 index 0000000000000..9ba16639e1fc9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/helper/FailureHelper.kt @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.helper + +import com.fasterxml.jackson.annotation.JsonValue +import io.airbyte.configoss.FailureReason +import io.airbyte.configoss.Metadata +import io.airbyte.protocol.models.AirbyteTraceMessage +import org.apache.commons.lang3.exception.ExceptionUtils + +object FailureHelper { + private const val JOB_ID_METADATA_KEY = "jobId" + private const val ATTEMPT_NUMBER_METADATA_KEY = "attemptNumber" + private const val TRACE_MESSAGE_METADATA_KEY = "from_trace_message" + private const val CONNECTOR_COMMAND_METADATA_KEY = "connector_command" + + fun genericFailure(t: Throwable, jobId: Long, attemptNumber: Int): FailureReason { + return FailureReason() + .withInternalMessage(t.message) + .withStacktrace(ExceptionUtils.getStackTrace(t)) + .withTimestamp(System.currentTimeMillis()) + .withMetadata(jobAndAttemptMetadata(jobId, attemptNumber)) + } + + // Generate a FailureReason from an AirbyteTraceMessage. + // The FailureReason.failureType enum value is taken from the + // AirbyteErrorTraceMessage.failureType enum value, so the same enum value + // must exist on both Enums in order to be applied correctly to the FailureReason + fun genericFailure(m: AirbyteTraceMessage, jobId: Long?, attemptNumber: Int?): FailureReason { + var failureType: FailureReason.FailureType? + if (m.error.failureType == null) { + // default to system_error when no failure type is set + failureType = FailureReason.FailureType.SYSTEM_ERROR + } else { + try { + val traceMessageError = m.error.failureType.toString() + failureType = FailureReason.FailureType.fromValue(traceMessageError) + } catch (e: IllegalArgumentException) { + // the trace message error does not exist as a FailureReason failure type, + // so set the failure type to null + failureType = FailureReason.FailureType.SYSTEM_ERROR + } + } + return FailureReason() + .withInternalMessage(m.error.internalMessage) + .withExternalMessage(m.error.message) + .withStacktrace(m.error.stackTrace) + .withTimestamp(m.emittedAt.toLong()) + .withFailureType(failureType) + .withMetadata(traceMessageMetadata(jobId, attemptNumber)) + } + + fun connectorCommandFailure( + m: AirbyteTraceMessage, + jobId: Long?, + attemptNumber: Int?, + connectorCommand: ConnectorCommand + ): FailureReason { + val metadata = traceMessageMetadata(jobId, attemptNumber) + metadata.withAdditionalProperty(CONNECTOR_COMMAND_METADATA_KEY, connectorCommand.toString()) + return genericFailure(m, jobId, attemptNumber).withMetadata(metadata) + } + + fun connectorCommandFailure( + t: Throwable, + jobId: Long, + attemptNumber: Int, + connectorCommand: ConnectorCommand + ): FailureReason { + val metadata = jobAndAttemptMetadata(jobId, attemptNumber) + metadata.withAdditionalProperty(CONNECTOR_COMMAND_METADATA_KEY, connectorCommand.toString()) + return genericFailure(t, jobId, attemptNumber).withMetadata(metadata) + } + + fun sourceFailure(t: Throwable, jobId: Long, attemptNumber: Int): FailureReason { + return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.READ) + .withFailureOrigin(FailureReason.FailureOrigin.SOURCE) + .withExternalMessage("Something went wrong within the source connector") + } + + fun sourceFailure(m: AirbyteTraceMessage, jobId: Long?, attemptNumber: Int?): FailureReason { + return connectorCommandFailure(m, jobId, attemptNumber, ConnectorCommand.READ) + .withFailureOrigin(FailureReason.FailureOrigin.SOURCE) + } + + fun destinationFailure(t: Throwable, jobId: Long, attemptNumber: Int): FailureReason { + return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.WRITE) + .withFailureOrigin(FailureReason.FailureOrigin.DESTINATION) + .withExternalMessage("Something went wrong within the destination connector") + } + + fun destinationFailure( + m: AirbyteTraceMessage, + jobId: Long?, + attemptNumber: Int? + ): FailureReason { + return connectorCommandFailure(m, jobId, attemptNumber, ConnectorCommand.WRITE) + .withFailureOrigin(FailureReason.FailureOrigin.DESTINATION) + } + + fun checkFailure( + t: Throwable, + jobId: Long, + attemptNumber: Int, + origin: FailureReason.FailureOrigin? + ): FailureReason { + return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.CHECK) + .withFailureOrigin(origin) + .withFailureType(FailureReason.FailureType.CONFIG_ERROR) + .withRetryable(false) + .withExternalMessage( + String.format( + "Checking %s connection failed - please review this connection's configuration to prevent future syncs from failing", + origin + ) + ) + } + + fun unknownOriginFailure(t: Throwable, jobId: Long, attemptNumber: Int): FailureReason { + return genericFailure(t, jobId, attemptNumber) + .withFailureOrigin(FailureReason.FailureOrigin.UNKNOWN) + .withExternalMessage("An unknown failure occurred") + } + + private fun jobAndAttemptMetadata(jobId: Long, attemptNumber: Int): Metadata { + return Metadata() + .withAdditionalProperty(JOB_ID_METADATA_KEY, jobId) + .withAdditionalProperty(ATTEMPT_NUMBER_METADATA_KEY, attemptNumber) + } + + private fun traceMessageMetadata(jobId: Long?, attemptNumber: Int?): Metadata { + return Metadata() + .withAdditionalProperty(JOB_ID_METADATA_KEY, jobId) + .withAdditionalProperty(ATTEMPT_NUMBER_METADATA_KEY, attemptNumber) + .withAdditionalProperty(TRACE_MESSAGE_METADATA_KEY, true) + } + + /** + * Orders failures by putting errors from trace messages first, and then orders by timestamp, so + * that earlier failures come first. + */ + fun orderedFailures(failures: Set): List { + val compareByIsTrace = + Comparator.comparing { failureReason: FailureReason -> + val metadata: Any? = failureReason.metadata + if (metadata != null) { + return@comparing if ( + failureReason.metadata.additionalProperties.containsKey( + TRACE_MESSAGE_METADATA_KEY + ) + ) + 0 + else 1 + } else { + return@comparing 1 + } + } + val compareByTimestamp = Comparator.comparing { obj: FailureReason -> obj.timestamp } + val compareByTraceAndTimestamp = compareByIsTrace.thenComparing(compareByTimestamp) + return failures.stream().sorted(compareByTraceAndTimestamp).toList() + } + + enum class ConnectorCommand(private val value: String) { + SPEC("spec"), + CHECK("check"), + DISCOVER("discover"), + WRITE("write"), + READ("read"); + + @JsonValue + override fun toString(): String { + return value.toString() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteDestination.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteDestination.kt new file mode 100644 index 0000000000000..2ae0f0d25b13b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteDestination.kt @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.commons.functional.CheckedConsumer +import io.airbyte.configoss.WorkerDestinationConfig +import io.airbyte.protocol.models.AirbyteMessage +import java.nio.file.Path +import java.util.* + +/** + * This interface provides a java interface over all interactions with a Destination from the POV of + * the platform. It encapsulates the full lifecycle of the Destination as well as any inputs and + * outputs. + */ +interface AirbyteDestination : CheckedConsumer, AutoCloseable { + /** + * Starts the Destination container. It instantiates a writer to write to STDIN on that + * container. It also instantiates a reader to listen on STDOUT. + * + * @param destinationConfig + * - contains the arguments that must be passed to the write method of the Destination. + * @param jobRoot + * - directory where the job can write data. + * @param additionalEnvironmentVariables + * @throws Exception + * - throws if there is any failure in startup. + */ + @Throws(Exception::class) + fun start( + destinationConfig: WorkerDestinationConfig, + jobRoot: Path, + additionalEnvironmentVariables: Map + ) + + /** + * Accepts an AirbyteMessage and writes it to STDIN of the Destination. Blocks if STDIN's buffer + * is full. + * + * @param message message to send to destination. + * @throws Exception + * - throws if there is any failure in writing to Destination. + */ + @Throws(Exception::class) override fun accept(message: AirbyteMessage) + + /** + * This method is a flush to make sure all data that should be written to the Destination is + * written. Any messages that have already been accepted ([AirbyteDestination.accept] ()}) will + * be flushed. Any additional messages sent to accept will not be flushed. In fact, flush should + * fail if the caller attempts to send it additional messages after calling this method. + * + * (Potentially should just rename it to flush) + * + * @throws Exception + * - throws if there is any failure when flushing. + */ + @Throws(Exception::class) fun notifyEndOfInput() + + /** + * Means no more data will be emitted by the Destination. This may be because all data has + * already been emitted or because the Destination container has exited. + * + * @return true, if no more data will be emitted. otherwise, false. + */ + fun isFinished(): Boolean + + /** + * Gets the exit value of the destination process. This should only be called after the + * destination process has finished. + * + * @return exit code of the destination process + * @throws IllegalStateException if the destination process has not exited + */ + fun getExitValue(): Int + + /** + * Attempts to read an AirbyteMessage from the Destination. + * + * @return returns an AirbyteMessage if the Destination emits one. Otherwise, empty. This method + * BLOCKS on waiting for the Destination to emit data to STDOUT. + */ + fun attemptRead(): Optional + + /** + * Attempts to shut down the Destination's container. Waits for a graceful shutdown, capped by a + * timeout. + * + * @throws Exception + * - throws if there is any failure in shutdown. + */ + @Throws(Exception::class) override fun close() + + /** + * Attempt to shut down the Destination's container quickly. + * + * @throws Exception + * - throws if there is any failure in shutdown. + */ + @Throws(Exception::class) fun cancel() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.kt new file mode 100644 index 0000000000000..70e5c6b61c5f9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriter.kt @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.protocol.models.AirbyteMessage +import java.io.IOException + +interface AirbyteMessageBufferedWriter { + @Throws(IOException::class) fun write(message: AirbyteMessage) + + @Throws(IOException::class) fun flush() + + @Throws(IOException::class) fun close() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.kt new file mode 100644 index 0000000000000..329c05e59d96e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteMessageBufferedWriterFactory.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import java.io.BufferedWriter + +interface AirbyteMessageBufferedWriterFactory { + fun createWriter(bufferedWriter: BufferedWriter): AirbyteMessageBufferedWriter +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteProtocolPredicate.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteProtocolPredicate.kt new file mode 100644 index 0000000000000..2a616ffb9a8cc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteProtocolPredicate.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.AirbyteProtocolSchema +import io.airbyte.validation.json.JsonSchemaValidator +import java.util.function.Predicate + +/** + * Verify that the provided JsonNode is a valid AirbyteMessage. Any AirbyteMessage type is allowed + * (e.g. Record, State, Log, etc). + */ +class AirbyteProtocolPredicate : Predicate { + private val jsonSchemaValidator = JsonSchemaValidator() + + init { + val schema = + JsonSchemaValidator.getSchema(AirbyteProtocolSchema.PROTOCOL.file, "AirbyteMessage") + jsonSchemaValidator.initializeSchemaValidator(PROTOCOL_SCHEMA_NAME, schema) + } + + override fun test(s: JsonNode?): Boolean { + return jsonSchemaValidator.testInitializedSchema(PROTOCOL_SCHEMA_NAME, s) + } + + companion object { + private const val PROTOCOL_SCHEMA_NAME = "protocol schema" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteSource.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteSource.kt new file mode 100644 index 0000000000000..4c7c40b3137ca --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteSource.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.configoss.WorkerSourceConfig +import io.airbyte.protocol.models.AirbyteMessage +import java.nio.file.Path +import java.util.* + +/** + * This interface provides a java interface over all interactions with a Source from the POV of the + * platform. It encapsulates the full lifecycle of the Source as well as any outputs. + */ +interface AirbyteSource : AutoCloseable { + /** + * Starts the Source container and opens a connection to STDOUT on that container. + * + * @param sourceConfig + * - contains the arguments that must be passed to the read method of the Source. + * @param jobRoot + * - directory where the job can write data. + * @throws Exception + * - throws if there is any failure in startup. + */ + @Throws(Exception::class) fun start(sourceConfig: WorkerSourceConfig, jobRoot: Path) + + /** + * Means no more data will be emitted by the Source. This may be because all data has already + * been emitted or because the Source container has exited. + * + * @return true, if no more data will be emitted. otherwise, false. + */ + val isFinished: Boolean + + /** + * Gets the exit value of the source process. This should only be called after the source + * process has finished. + * + * @return exit code of the source process + * @throws IllegalStateException if the source process has not exited + */ + val exitValue: Int + + /** + * Attempts to read an AirbyteMessage from the Source. + * + * @return returns an AirbyteMessage is the Source emits one. Otherwise, empty. This method + * BLOCKS on waiting for the Source to emit data to STDOUT. + */ + fun attemptRead(): Optional + + /** + * Attempts to shut down the Source's container. Waits for a graceful shutdown, capped by a + * timeout. + * + * @throws Exception + * - throws if there is any failure in shutdown. + */ + @Throws(Exception::class) override fun close() + + /** + * Attempt to shut down the Source's container quickly. + * + * @throws Exception + * - throws if there is any failure in shutdown. + */ + @Throws(Exception::class) fun cancel() +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteStreamFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteStreamFactory.kt new file mode 100644 index 0000000000000..528f5288b29b6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/AirbyteStreamFactory.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.protocol.models.AirbyteMessage +import java.io.BufferedReader +import java.util.stream.Stream + +interface AirbyteStreamFactory { + fun create(bufferedReader: BufferedReader): Stream +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteDestination.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteDestination.kt new file mode 100644 index 0000000000000..cb1e62e3d15b8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteDestination.kt @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import com.google.common.base.Charsets +import com.google.common.base.Preconditions +import io.airbyte.commons.io.IOs +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.MdcScope +import io.airbyte.commons.protocol.DefaultProtocolSerializer +import io.airbyte.commons.protocol.ProtocolSerializer +import io.airbyte.configoss.WorkerDestinationConfig +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.WorkerConstants +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.process.IntegrationLauncher +import java.io.BufferedWriter +import java.io.IOException +import java.io.OutputStreamWriter +import java.nio.file.Path +import java.util.* +import java.util.List +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicBoolean +import kotlin.collections.Iterator +import kotlin.collections.Map +import kotlin.collections.Set +import kotlin.collections.contains +import kotlin.collections.setOf +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultAirbyteDestination +@JvmOverloads +constructor( + private val integrationLauncher: IntegrationLauncher, + private val streamFactory: AirbyteStreamFactory = + DefaultAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER), + private val messageWriterFactory: AirbyteMessageBufferedWriterFactory = + DefaultAirbyteMessageBufferedWriterFactory(), + private val protocolSerializer: ProtocolSerializer = DefaultProtocolSerializer() +) : AirbyteDestination { + private val inputHasEnded = AtomicBoolean(false) + + private var destinationProcess: Process? = null + private var writer: AirbyteMessageBufferedWriter? = null + private var messageIterator: Iterator? = null + + private var exitValueIsSet = false + private var exitValue: Int = 0 + override fun getExitValue(): Int { + Preconditions.checkState( + destinationProcess != null, + "Destination process is null, cannot retrieve exit value." + ) + Preconditions.checkState( + !destinationProcess!!.isAlive, + "Destination process is still alive, cannot retrieve exit value." + ) + + if (!exitValueIsSet) { + exitValueIsSet = true + exitValue = destinationProcess!!.exitValue() + } + + return exitValue + } + + @Throws(IOException::class, TestHarnessException::class) + override fun start( + destinationConfig: WorkerDestinationConfig, + jobRoot: Path, + additionalEnvironmentVariables: Map + ) { + Preconditions.checkState(destinationProcess == null) + + LOGGER.info("Running destination...") + destinationProcess = + integrationLauncher.write( + jobRoot, + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + Jsons.serialize(destinationConfig.destinationConnectionConfiguration), + WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, + protocolSerializer.serialize(destinationConfig.catalog), + additionalEnvironmentVariables + ) + // stdout logs are logged elsewhere since stdout also contains data + LineGobbler.gobble( + destinationProcess!!.errorStream, + { msg: String? -> LOGGER.error(msg) }, + "airbyte-destination", + CONTAINER_LOG_MDC_BUILDER + ) + + writer = + messageWriterFactory.createWriter( + BufferedWriter( + OutputStreamWriter(destinationProcess!!.outputStream, Charsets.UTF_8) + ) + ) + + val acceptedMessageTypes = + List.of( + AirbyteMessage.Type.STATE, + AirbyteMessage.Type.TRACE, + AirbyteMessage.Type.CONTROL + ) + messageIterator = + streamFactory + .create(IOs.newBufferedReader(destinationProcess!!.inputStream)) + .filter { message: AirbyteMessage -> acceptedMessageTypes.contains(message.type) } + .iterator() + } + + @Throws(IOException::class) + override fun accept(message: AirbyteMessage) { + Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()) + + writer!!.write(message) + } + + @Throws(IOException::class) + override fun notifyEndOfInput() { + Preconditions.checkState(destinationProcess != null && !inputHasEnded.get()) + + writer!!.flush() + writer!!.close() + inputHasEnded.set(true) + } + + @Throws(Exception::class) + override fun close() { + if (destinationProcess == null) { + LOGGER.debug("Destination process already exited") + return + } + + if (!inputHasEnded.get()) { + notifyEndOfInput() + } + + LOGGER.debug("Closing destination process") + TestHarnessUtils.gentleClose(destinationProcess, 1, TimeUnit.MINUTES) + if (destinationProcess!!.isAlive || !IGNORED_EXIT_CODES.contains(exitValue)) { + val message = + if (destinationProcess!!.isAlive) "Destination has not terminated " + else "Destination process exit with code " + exitValue + throw TestHarnessException("$message. This warning is normal if the job was cancelled.") + } + } + + @Throws(Exception::class) + override fun cancel() { + LOGGER.info("Attempting to cancel destination process...") + + if (destinationProcess == null) { + LOGGER.info("Destination process no longer exists, cancellation is a no-op.") + } else { + LOGGER.info("Destination process exists, cancelling...") + TestHarnessUtils.cancelProcess(destinationProcess) + LOGGER.info("Cancelled destination process!") + } + } + + override fun isFinished(): Boolean { + Preconditions.checkState(destinationProcess != null) + /* + * As this check is done on every message read, it is important for this operation to be efficient. + * Short circuit early to avoid checking the underlying process. Note: hasNext is blocking. + */ + return !messageIterator!!.hasNext() && !destinationProcess!!.isAlive + } + + override fun attemptRead(): Optional { + Preconditions.checkState(destinationProcess != null) + + return Optional.ofNullable( + if (messageIterator!!.hasNext()) messageIterator!!.next() else null + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DefaultAirbyteDestination::class.java) + val CONTAINER_LOG_MDC_BUILDER: MdcScope.Builder = + MdcScope.Builder() + .setLogPrefix("destination") + .setPrefixColor(LoggingHelper.Color.YELLOW_BACKGROUND) + val IGNORED_EXIT_CODES: Set = + setOf( + 0, // Normal exit + 143 // SIGTERM + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.kt new file mode 100644 index 0000000000000..7f200fcbedcf8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriter.kt @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteMessage +import java.io.BufferedWriter +import java.io.IOException + +class DefaultAirbyteMessageBufferedWriter(protected val writer: BufferedWriter) : + AirbyteMessageBufferedWriter { + @Throws(IOException::class) + override fun write(message: AirbyteMessage) { + writer.write(Jsons.serialize(message)) + writer.newLine() + } + + @Throws(IOException::class) + override fun flush() { + writer.flush() + } + + @Throws(IOException::class) + override fun close() { + writer.close() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.kt new file mode 100644 index 0000000000000..17f7b944988f0 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteMessageBufferedWriterFactory.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import java.io.BufferedWriter + +class DefaultAirbyteMessageBufferedWriterFactory : AirbyteMessageBufferedWriterFactory { + override fun createWriter(writer: BufferedWriter): AirbyteMessageBufferedWriter { + return DefaultAirbyteMessageBufferedWriter(writer) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteSource.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteSource.kt new file mode 100644 index 0000000000000..8018576ba8c74 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteSource.kt @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Preconditions +import io.airbyte.commons.features.FeatureFlags +import io.airbyte.commons.io.IOs +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.MdcScope +import io.airbyte.commons.protocol.DefaultProtocolSerializer +import io.airbyte.commons.protocol.ProtocolSerializer +import io.airbyte.configoss.WorkerSourceConfig +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.WorkerConstants +import io.airbyte.workers.process.IntegrationLauncher +import java.nio.file.Path +import java.time.Duration +import java.time.temporal.ChronoUnit +import java.util.* +import java.util.List +import java.util.concurrent.TimeUnit +import kotlin.collections.Iterator +import kotlin.collections.Set +import kotlin.collections.contains +import kotlin.collections.setOf +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultAirbyteSource +@VisibleForTesting +internal constructor( + private val integrationLauncher: IntegrationLauncher, + private val streamFactory: AirbyteStreamFactory, + private val heartbeatMonitor: HeartbeatMonitor, + private val protocolSerializer: ProtocolSerializer, + featureFlags: FeatureFlags +) : AirbyteSource { + private var sourceProcess: Process? = null + private var messageIterator: Iterator? = null + + private var exitValueIsSet = false + @get:Throws(IllegalStateException::class) + override var exitValue: Int = 0 + get() { + Preconditions.checkState( + sourceProcess != null, + "Source process is null, cannot retrieve exit value." + ) + Preconditions.checkState( + !sourceProcess!!.isAlive, + "Source process is still alive, cannot retrieve exit value." + ) + + if (!exitValueIsSet) { + exitValueIsSet = true + field = sourceProcess!!.exitValue() + } + + return field + } + private set + private val featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages() + + constructor( + integrationLauncher: IntegrationLauncher, + featureFlags: FeatureFlags + ) : this( + integrationLauncher, + DefaultAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER), + DefaultProtocolSerializer(), + featureFlags + ) + + constructor( + integrationLauncher: IntegrationLauncher, + streamFactory: AirbyteStreamFactory, + protocolSerializer: ProtocolSerializer, + featureFlags: FeatureFlags + ) : this( + integrationLauncher, + streamFactory, + HeartbeatMonitor(HEARTBEAT_FRESH_DURATION), + protocolSerializer, + featureFlags + ) + + @Throws(Exception::class) + override fun start(sourceConfig: WorkerSourceConfig, jobRoot: Path) { + Preconditions.checkState(sourceProcess == null) + + sourceProcess = + integrationLauncher.read( + jobRoot, + WorkerConstants.SOURCE_CONFIG_JSON_FILENAME, + Jsons.serialize(sourceConfig.sourceConnectionConfiguration), + WorkerConstants.SOURCE_CATALOG_JSON_FILENAME, + protocolSerializer.serialize(sourceConfig.catalog), + if (sourceConfig.state == null) null + else + WorkerConstants + .INPUT_STATE_JSON_FILENAME, // TODO We should be passing a typed state here + // and use the protocolSerializer + if (sourceConfig.state == null) null else Jsons.serialize(sourceConfig.state.state) + ) + // stdout logs are logged elsewhere since stdout also contains data + LineGobbler.gobble( + sourceProcess!!.errorStream, + { msg: String? -> LOGGER.error(msg) }, + "airbyte-source", + CONTAINER_LOG_MDC_BUILDER + ) + + logInitialStateAsJSON(sourceConfig) + + val acceptedMessageTypes = + List.of( + AirbyteMessage.Type.RECORD, + AirbyteMessage.Type.STATE, + AirbyteMessage.Type.TRACE, + AirbyteMessage.Type.CONTROL + ) + messageIterator = + streamFactory + .create(IOs.newBufferedReader(sourceProcess!!.inputStream)) + .peek { message: AirbyteMessage? -> heartbeatMonitor.beat() } + .filter { message: AirbyteMessage -> acceptedMessageTypes.contains(message.type) } + .iterator() + } + + override val isFinished: Boolean + get() { + Preconditions.checkState(sourceProcess != null) + + /* + * As this check is done on every message read, it is important for this operation to be efficient. + * Short circuit early to avoid checking the underlying process. note: hasNext is blocking. + */ + return !messageIterator!!.hasNext() && !sourceProcess!!.isAlive + } + + override fun attemptRead(): Optional { + Preconditions.checkState(sourceProcess != null) + + return Optional.ofNullable( + if (messageIterator!!.hasNext()) messageIterator!!.next() else null + ) + } + + @Throws(Exception::class) + override fun close() { + if (sourceProcess == null) { + LOGGER.debug("Source process already exited") + return + } + + LOGGER.debug("Closing source process") + TestHarnessUtils.gentleClose( + sourceProcess, + GRACEFUL_SHUTDOWN_DURATION.toMillis(), + TimeUnit.MILLISECONDS + ) + + if (sourceProcess!!.isAlive || !IGNORED_EXIT_CODES.contains(exitValue)) { + val message = + if (sourceProcess!!.isAlive) "Source has not terminated " + else "Source process exit with code " + exitValue + LOGGER.warn("$message. This warning is normal if the job was cancelled.") + } + } + + @Throws(Exception::class) + override fun cancel() { + LOGGER.info("Attempting to cancel source process...") + + if (sourceProcess == null) { + LOGGER.info("Source process no longer exists, cancellation is a no-op.") + } else { + LOGGER.info("Source process exists, cancelling...") + TestHarnessUtils.cancelProcess(sourceProcess) + LOGGER.info("Cancelled source process!") + } + } + + private fun logInitialStateAsJSON(sourceConfig: WorkerSourceConfig) { + if (!featureFlagLogConnectorMsgs) { + return + } + + if (sourceConfig.state == null) { + LOGGER.info("source starting state | empty") + return + } + + LOGGER.info("source starting state | " + Jsons.serialize(sourceConfig.state.state)) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DefaultAirbyteSource::class.java) + + private val HEARTBEAT_FRESH_DURATION: Duration = Duration.of(5, ChronoUnit.MINUTES) + private val GRACEFUL_SHUTDOWN_DURATION: Duration = Duration.of(1, ChronoUnit.MINUTES) + val IGNORED_EXIT_CODES: Set = + setOf( + 0, // Normal exit + 143 // SIGTERM + ) + + val CONTAINER_LOG_MDC_BUILDER: MdcScope.Builder = + MdcScope.Builder() + .setLogPrefix("source") + .setPrefixColor(LoggingHelper.Color.BLUE_BACKGROUND) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.kt new file mode 100644 index 0000000000000..6f13fc7c1cba1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/DefaultAirbyteStreamFactory.kt @@ -0,0 +1,193 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.MdcScope +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import java.io.BufferedReader +import java.lang.reflect.InvocationTargetException +import java.nio.charset.StandardCharsets +import java.text.CharacterIterator +import java.text.StringCharacterIterator +import java.time.Instant +import java.util.* +import java.util.stream.Stream +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Creates a stream from an input stream. The produced stream attempts to parse each line of the + * InputStream into a AirbyteMessage. If the line cannot be parsed into a AirbyteMessage it is + * dropped. Each record MUST be new line separated. + * + * If a line starts with a AirbyteMessage and then has other characters after it, that + * AirbyteMessage will still be parsed. If there are multiple AirbyteMessage records on the same + * line, only the first will be parsed. + */ +class DefaultAirbyteStreamFactory : AirbyteStreamFactory { + private val MAX_SIZE_RATIO = 0.8 + + private val containerLogMdcBuilder: MdcScope.Builder + private val protocolValidator: AirbyteProtocolPredicate + protected val logger: Logger + private val maxMemory: Long + private val exceptionClass: Optional> + + @JvmOverloads + constructor( + containerLogMdcBuilder: MdcScope.Builder = MdcScope.DEFAULT_BUILDER + ) : this( + AirbyteProtocolPredicate(), + LOGGER, + containerLogMdcBuilder, + Optional.empty>() + ) + + /** + * Create a default airbyte stream, if a `messageSizeExceptionClass` is not empty, the message + * size will be checked and if it more than the available memory * MAX_SIZE_RATIO the sync will + * be failed by throwing the exception provided. The exception must have a constructor that + * accept a string. + */ + internal constructor( + protocolPredicate: AirbyteProtocolPredicate, + logger: Logger, + containerLogMdcBuilder: MdcScope.Builder, + messageSizeExceptionClass: Optional> + ) { + protocolValidator = protocolPredicate + this.logger = logger + this.containerLogMdcBuilder = containerLogMdcBuilder + this.exceptionClass = messageSizeExceptionClass + this.maxMemory = Runtime.getRuntime().maxMemory() + } + + @VisibleForTesting + internal constructor( + protocolPredicate: AirbyteProtocolPredicate, + logger: Logger, + containerLogMdcBuilder: MdcScope.Builder, + messageSizeExceptionClass: Optional>, + maxMemory: Long + ) { + protocolValidator = protocolPredicate + this.logger = logger + this.containerLogMdcBuilder = containerLogMdcBuilder + this.exceptionClass = messageSizeExceptionClass + this.maxMemory = maxMemory + } + + override fun create(bufferedReader: BufferedReader): Stream { + return bufferedReader + .lines() + .peek { str: String -> + if (exceptionClass.isPresent) { + val messageSize = str.toByteArray(StandardCharsets.UTF_8).size.toLong() + if (messageSize > maxMemory * MAX_SIZE_RATIO) { + try { + val errorMessage = + String.format( + "Airbyte has received a message at %s UTC which is larger than %s (size: %s). The sync has been failed to prevent running out of memory.", + Instant.now(), + humanReadableByteCountSI(maxMemory), + humanReadableByteCountSI(messageSize) + ) + throw exceptionClass + .get() + .getConstructor(String::class.java) + .newInstance(errorMessage)!! + } catch (e: InstantiationException) { + throw RuntimeException(e) + } catch (e: IllegalAccessException) { + throw RuntimeException(e) + } catch (e: InvocationTargetException) { + throw RuntimeException(e) + } catch (e: NoSuchMethodException) { + throw RuntimeException(e) + } + } + } + } + .flatMap { line: String? -> this.parseJson(line) } + .filter { json: JsonNode? -> this.validate(json) } + .flatMap { json: JsonNode? -> this.toAirbyteMessage(json) } + .filter { message: AirbyteMessage -> this.filterLog(message) } + } + + protected fun parseJson(line: String?): Stream { + val jsonLine = Jsons.tryDeserializeWithoutWarn(line) + if (jsonLine.isEmpty) { + // we log as info all the lines that are not valid json + // some sources actually log their process on stdout, we + // want to make sure this info is available in the logs. + containerLogMdcBuilder.build().use { mdcScope -> logger.info(line) } + } + return jsonLine.stream() + } + + protected fun validate(json: JsonNode?): Boolean { + val res = protocolValidator.test(json) + if (!res) { + logger.error("Validation failed: {}", Jsons.serialize(json)) + } + return res + } + + protected fun toAirbyteMessage(json: JsonNode?): Stream { + val m = Jsons.tryObject(json, AirbyteMessage::class.java) + if (m.isEmpty) { + logger.error("Deserialization failed: {}", Jsons.serialize(json)) + } + return m.stream() + } + + protected fun filterLog(message: AirbyteMessage): Boolean { + val isLog = message.type == AirbyteMessage.Type.LOG + if (isLog) { + containerLogMdcBuilder.build().use { mdcScope -> internalLog(message.log) } + } + return !isLog + } + + protected fun internalLog(logMessage: AirbyteLogMessage) { + val combinedMessage = + logMessage.message + + (if (logMessage.stackTrace != null) + (System.lineSeparator() + "Stack Trace: " + logMessage.stackTrace) + else "") + + when (logMessage.level) { + AirbyteLogMessage.Level.FATAL, + AirbyteLogMessage.Level.ERROR -> logger.error(combinedMessage) + AirbyteLogMessage.Level.WARN -> logger.warn(combinedMessage) + AirbyteLogMessage.Level.DEBUG -> logger.debug(combinedMessage) + AirbyteLogMessage.Level.TRACE -> logger.trace(combinedMessage) + else -> logger.info(combinedMessage) + } + } + + // Human-readable byte size from + // https://stackoverflow.com/questions/3758606/how-can-i-convert-byte-size-into-a-human-readable-format-in-java + private fun humanReadableByteCountSI(bytes: Long): String { + var bytes = bytes + if (-1000 < bytes && bytes < 1000) { + return "$bytes B" + } + val ci: CharacterIterator = StringCharacterIterator("kMGTPE") + while (bytes <= -999950 || bytes >= 999950) { + bytes /= 1000 + ci.next() + } + return String.format("%.1f %cB", bytes / 1000.0, ci.current()) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(DefaultAirbyteStreamFactory::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/HeartbeatMonitor.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/HeartbeatMonitor.kt new file mode 100644 index 0000000000000..c0f328dd1945f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/internal/HeartbeatMonitor.kt @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.internal + +import com.google.common.annotations.VisibleForTesting +import java.time.Duration +import java.time.Instant +import java.util.concurrent.atomic.AtomicReference +import java.util.function.Supplier + +/** + * Tracks heartbeats and, when asked, says if it has been too long since the last heartbeat. He's + * dead Jim! + * + * It is ThreadSafe. + */ +class HeartbeatMonitor +@VisibleForTesting +constructor( + private val heartBeatFreshDuration: Duration?, + private val nowSupplier: Supplier +) { + private val lastBeat = AtomicReference(null) + + constructor( + heartBeatFreshDuration: Duration? + ) : this(heartBeatFreshDuration, Supplier { Instant.now() }) + + /** Register a heartbeat */ + fun beat() { + lastBeat.set(nowSupplier.get()) + } + + val isBeating: Boolean + /** + * + * @return true if the last heartbeat is still "fresh". i.e. time since last heartbeat is + * less than heartBeatFreshDuration. otherwise, false. + */ + get() { + val instantFetched = lastBeat.get() + val now = nowSupplier.get() + return instantFetched != null && + instantFetched.plus(heartBeatFreshDuration).isAfter(now) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/DefaultNormalizationRunner.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/DefaultNormalizationRunner.kt new file mode 100644 index 0000000000000..87ec510ea110a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/DefaultNormalizationRunner.kt @@ -0,0 +1,277 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.normalization + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Strings +import com.google.common.collect.ImmutableMap +import io.airbyte.commons.io.IOs +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.MdcScope +import io.airbyte.configoss.OperatorDbt +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.protocol.models.AirbyteErrorTraceMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.WorkerConstants +import io.airbyte.workers.exception.TestHarnessException +import io.airbyte.workers.process.Metadata +import io.airbyte.workers.process.ProcessFactory +import java.nio.file.Path +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DefaultNormalizationRunner( + private val processFactory: ProcessFactory, + private val normalizationImageName: String?, + private val normalizationIntegrationType: String? +) : NormalizationRunner { + private val streamFactory = NormalizationAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER) + private var airbyteMessagesByType: MutableMap> = + HashMap() + private var dbtErrorStack: String? = null + + private var process: Process? = null + + @Throws(Exception::class) + override fun configureDbt( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode?, + resourceRequirements: ResourceRequirements?, + dbtConfig: OperatorDbt + ): Boolean { + val files: Map = + ImmutableMap.of( + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + Jsons.serialize(config) + ) + val gitRepoUrl = dbtConfig.gitRepoUrl + if (Strings.isNullOrEmpty(gitRepoUrl)) { + throw TestHarnessException("Git Repo Url is required") + } + val gitRepoBranch = dbtConfig.gitRepoBranch + return if (Strings.isNullOrEmpty(gitRepoBranch)) { + runProcess( + jobId, + attempt, + jobRoot, + files, + resourceRequirements, + "configure-dbt", + "--integration-type", + normalizationIntegrationType!!.lowercase(Locale.getDefault()), + "--config", + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + "--git-repo", + gitRepoUrl + ) + } else { + runProcess( + jobId, + attempt, + jobRoot, + files, + resourceRequirements, + "configure-dbt", + "--integration-type", + normalizationIntegrationType!!.lowercase(Locale.getDefault()), + "--config", + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + "--git-repo", + gitRepoUrl, + "--git-branch", + gitRepoBranch + ) + } + } + + @Throws(Exception::class) + override fun normalize( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + resourceRequirements: ResourceRequirements? + ): Boolean { + val files: Map = + ImmutableMap.of( + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + Jsons.serialize(config), + WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, + Jsons.serialize(catalog) + ) + + return runProcess( + jobId, + attempt, + jobRoot, + files, + resourceRequirements, + "run", + "--integration-type", + normalizationIntegrationType!!.lowercase(Locale.getDefault()), + "--config", + WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, + "--catalog", + WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME + ) + } + + @Throws(Exception::class) + private fun runProcess( + jobId: String, + attempt: Int, + jobRoot: Path, + files: Map, + resourceRequirements: ResourceRequirements?, + vararg args: String + ): Boolean { + try { + LOGGER.info("Running with normalization version: {}", normalizationImageName) + var process = + processFactory.create( + Metadata.NORMALIZE_STEP, + jobId, + attempt, + jobRoot, + normalizationImageName!!, // custom connector does not use normalization + false, + false, + files, + null, + resourceRequirements, + null, + java.util.Map.of( + Metadata.JOB_TYPE_KEY, + Metadata.SYNC_JOB, + Metadata.SYNC_STEP_KEY, + Metadata.NORMALIZE_STEP + ), + emptyMap(), + emptyMap(), + emptyMap(), + *args + ) + this.process = process + + process.inputStream.use { stdout -> + // finds and collects any AirbyteMessages from stdout + // also builds a list of raw dbt errors and stores in streamFactory + airbyteMessagesByType = + streamFactory + .create(IOs.newBufferedReader(stdout)) + .collect( + Collectors.groupingBy(Function { obj: AirbyteMessage -> obj.type }) + ) + + // picks up error logs from dbt + dbtErrorStack = java.lang.String.join("\n", streamFactory.dbtErrors) + if ("" != dbtErrorStack) { + val dbtTraceMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.ERROR) + .withEmittedAt(System.currentTimeMillis().toDouble()) + .withError( + AirbyteErrorTraceMessage() + .withFailureType( + AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR + ) // TODO: decide on best FailureType for this + .withMessage( + "Normalization failed during the dbt run. This may indicate a problem with the data itself." + ) // due to the lack of consistent defining features in + // dbt errors we're injecting a breadcrumb to the + // stacktrace so we can confidently identify all dbt + // errors when parsing and sending to Sentry + // see dbt error examples: + // https://docs.getdbt.com/guides/legacy/debugging-errors for more context + .withStackTrace("AirbyteDbtError: \n$dbtErrorStack") + ) + ) + + airbyteMessagesByType.putIfAbsent( + AirbyteMessage.Type.TRACE, + java.util.List.of(dbtTraceMessage) + ) + } + } + LineGobbler.gobble( + process.errorStream, + { msg: String? -> LOGGER.error(msg) }, + CONTAINER_LOG_MDC_BUILDER + ) + + TestHarnessUtils.wait(process) + + return process.exitValue() == 0 + } catch (e: Exception) { + // make sure we kill the process on failure to avoid zombies. + process?.let { TestHarnessUtils.cancelProcess(process) } + throw e + } + } + + @Throws(Exception::class) + override fun close() { + process?.let { + LOGGER.info("Terminating normalization process...") + TestHarnessUtils.gentleClose(it, 1, TimeUnit.MINUTES) + + /* + * After attempting to close the process check the following: + * + * Did the process actually terminate? If "yes", did it do so nominally? + */ + if (it.isAlive) { + throw TestHarnessException( + "Normalization process did not terminate after 1 minute." + ) + } else if (it.exitValue() != 0) { + throw TestHarnessException( + "Normalization process did not terminate normally (exit code: " + + it.exitValue() + + ")" + ) + } else { + LOGGER.info("Normalization process successfully terminated.") + } + } + } + + override val traceMessages: Stream + get() { + if ( + airbyteMessagesByType != null && + airbyteMessagesByType!![AirbyteMessage.Type.TRACE] != null + ) { + return airbyteMessagesByType!![AirbyteMessage.Type.TRACE]!!.stream().map { + obj: AirbyteMessage -> + obj.trace + } + } + return Stream.empty() + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DefaultNormalizationRunner::class.java) + private val CONTAINER_LOG_MDC_BUILDER: MdcScope.Builder = + MdcScope.Builder() + .setLogPrefix("normalization") + .setPrefixColor(LoggingHelper.Color.GREEN_BACKGROUND) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.kt new file mode 100644 index 0000000000000..a2cc295a6ae19 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.kt @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.normalization + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.JsonNodeType +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.logging.MdcScope +import io.airbyte.protocol.models.AirbyteLogMessage +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.workers.internal.AirbyteStreamFactory +import java.io.BufferedReader +import java.util.stream.Stream +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Creates a stream from an input stream. The produced stream attempts to parse each line of the + * InputStream into a AirbyteMessage. If the line cannot be parsed into a AirbyteMessage it is + * assumed to be from dbt. dbt [error] messages are also parsed + * + * If a line starts with a AirbyteMessage and then has other characters after it, that + * AirbyteMessage will still be parsed. If there are multiple AirbyteMessage records on the same + * line, only the first will be parsed. + */ +class NormalizationAirbyteStreamFactory +internal constructor( + private val logger: Logger, + private val containerLogMdcBuilder: MdcScope.Builder +) : AirbyteStreamFactory { + val dbtErrors: MutableList = ArrayList() + + constructor(containerLogMdcBuilder: MdcScope.Builder) : this(LOGGER, containerLogMdcBuilder) + + override fun create(bufferedReader: BufferedReader): Stream { + return bufferedReader + .lines() + .flatMap { line: String -> this.filterOutAndHandleNonJsonLines(line) } + .flatMap { jsonLine: JsonNode -> + this.filterOutAndHandleNonAirbyteMessageLines(jsonLine) + } // so now we are just left with AirbyteMessages + .filter { airbyteMessage: AirbyteMessage -> + val isLog = airbyteMessage!!.type == AirbyteMessage.Type.LOG + if (isLog) { + containerLogMdcBuilder.build().use { mdcScope -> + internalLog(airbyteMessage.log) + } + } + !isLog + } + } + + private fun filterOutAndHandleNonJsonLines(line: String): Stream { + val jsonLine = Jsons.tryDeserialize(line) + if (jsonLine.isEmpty) { + // we log as info all the lines that are not valid json. + containerLogMdcBuilder.build().use { mdcScope -> + logger.info(line) + // this is really hacky and vulnerable to picking up lines we don't want, + // however it is only for destinations that are using dbt version < 1.0. + // For v1 + we switch on JSON logging and parse those in the next block. + if (line.contains("[error]")) { + dbtErrors.add(line) + } + } + } + return jsonLine.stream() + } + + private fun filterOutAndHandleNonAirbyteMessageLines( + jsonLine: JsonNode + ): Stream { + val m = Jsons.tryObject(jsonLine, AirbyteMessage::class.java) + if (m.isEmpty) { + // valid JSON but not an AirbyteMessage, so we assume this is a dbt json log + try { + val logLevel = + if ((jsonLine.nodeType == JsonNodeType.NULL || jsonLine["level"].isNull)) "" + else jsonLine["level"].asText() + val logMsg = if (jsonLine["msg"].isNull) "" else jsonLine["msg"].asText() + containerLogMdcBuilder.build().use { mdcScope -> + when (logLevel) { + "debug" -> logger.debug(logMsg) + "info" -> logger.info(logMsg) + "warn" -> logger.warn(logMsg) + "error" -> logAndCollectErrorMessage(logMsg) + else -> logger.info(jsonLine.toPrettyString()) + } + } + } catch (e: Exception) { + logger.info(jsonLine.toPrettyString()) + } + } + return m.stream() + } + + private fun logAndCollectErrorMessage(logMsg: String) { + logger.error(logMsg) + dbtErrors.add(logMsg) + } + + private fun internalLog(logMessage: AirbyteLogMessage) { + when (logMessage.level) { + AirbyteLogMessage.Level.FATAL, + AirbyteLogMessage.Level.ERROR -> logger.error(logMessage.message) + AirbyteLogMessage.Level.WARN -> logger.warn(logMessage.message) + AirbyteLogMessage.Level.DEBUG -> logger.debug(logMessage.message) + AirbyteLogMessage.Level.TRACE -> logger.trace(logMessage.message) + else -> logger.info(logMessage.message) + } + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(NormalizationAirbyteStreamFactory::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationRunner.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationRunner.kt new file mode 100644 index 0000000000000..1ae93cda1b653 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/normalization/NormalizationRunner.kt @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.normalization + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.configoss.OperatorDbt +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import java.nio.file.Path +import java.util.stream.Stream + +interface NormalizationRunner : AutoCloseable { + /** + * After this method is called, the caller must call close. Previous to this method being called + * a NormalizationRunner can be instantiated and not worry about close being called. + * + * @throws Exception + * - any exception thrown from normalization will be handled gracefully by the caller. + */ + @Throws(Exception::class) + fun start() { + // no-op. + } + + /** + * Prepare a configured folder to run dbt commands from (similar to what is required by + * normalization models) However, this does not run the normalization file generation process or + * dbt at all. This is pulling files from a distant git repository instead of the + * dbt-project-template. + * + * @return true if configuration succeeded. otherwise false. + * @throws Exception + * - any exception thrown from configuration will be handled gracefully by the caller. + */ + @Throws(Exception::class) + fun configureDbt( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode?, + resourceRequirements: ResourceRequirements?, + dbtConfig: OperatorDbt + ): Boolean + + /** + * Executes normalization of the data in the destination. + * + * @param jobId + * - id of the job that launched normalization + * @param attempt + * - current attempt + * @param jobRoot + * - root dir available for the runner to use. + * @param config + * - configuration for connecting to the destination + * @param catalog + * - the schema of the json blob in the destination. it is used normalize the blob into typed + * columns. + * @param resourceRequirements + * @return true of normalization succeeded. otherwise false. + * @throws Exception + * - any exception thrown from normalization will be handled gracefully by the caller. + */ + @Throws(Exception::class) + fun normalize( + jobId: String, + attempt: Int, + jobRoot: Path, + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + resourceRequirements: ResourceRequirements? + ): Boolean + + val traceMessages: Stream +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/AirbyteIntegrationLauncher.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/AirbyteIntegrationLauncher.kt new file mode 100644 index 0000000000000..f429bbc6604ef --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/AirbyteIntegrationLauncher.kt @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.process + +import com.google.common.base.Preconditions +import com.google.common.collect.ImmutableMap +import com.google.common.collect.Lists +import com.google.common.collect.Maps +import io.airbyte.commons.features.EnvVariableFeatureFlags +import io.airbyte.commons.features.FeatureFlags +import io.airbyte.configoss.AllowedHosts +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.workers.exception.TestHarnessException +import java.nio.file.Path + +class AirbyteIntegrationLauncher( + private val jobId: String, + private val attempt: Int, + private val imageName: String, + private val processFactory: ProcessFactory, + private val resourceRequirement: ResourceRequirements?, + private val allowedHosts: AllowedHosts?, + /** + * If true, launcher will use a separated isolated pool to run the job. + * + * At this moment, we put custom connector jobs into an isolated pool. + */ + private val useIsolatedPool: Boolean, + private val featureFlags: FeatureFlags +) : IntegrationLauncher { + @Throws(TestHarnessException::class) + override fun spec(jobRoot: Path): Process { + return processFactory.create( + Metadata.SPEC_JOB, + jobId, + attempt, + jobRoot, + imageName, + useIsolatedPool, + false, + emptyMap(), + null, + resourceRequirement, + allowedHosts, + java.util.Map.of(Metadata.JOB_TYPE_KEY, Metadata.SPEC_JOB), + workerMetadata, + emptyMap(), + emptyMap(), + "spec" + ) + } + + @Throws(TestHarnessException::class) + override fun check(jobRoot: Path, configFilename: String, configContents: String): Process { + return processFactory.create( + Metadata.CHECK_JOB, + jobId, + attempt, + jobRoot, + imageName, + useIsolatedPool, + false, + ImmutableMap.of(configFilename, configContents), + null, + resourceRequirement, + allowedHosts, + java.util.Map.of(Metadata.JOB_TYPE_KEY, Metadata.CHECK_JOB), + workerMetadata, + emptyMap(), + emptyMap(), + "check", + CONFIG, + configFilename + ) + } + + @Throws(TestHarnessException::class) + override fun discover(jobRoot: Path, configFilename: String, configContents: String): Process { + return processFactory.create( + Metadata.DISCOVER_JOB, + jobId, + attempt, + jobRoot, + imageName, + useIsolatedPool, + false, + ImmutableMap.of(configFilename, configContents), + null, + resourceRequirement, + allowedHosts, + java.util.Map.of(Metadata.JOB_TYPE_KEY, Metadata.DISCOVER_JOB), + workerMetadata, + emptyMap(), + emptyMap(), + "discover", + CONFIG, + configFilename + ) + } + + @Throws(TestHarnessException::class) + override fun read( + jobRoot: Path, + configFilename: String?, + configContents: String?, + catalogFilename: String?, + catalogContents: String?, + stateFilename: String?, + stateContents: String? + ): Process? { + val arguments: MutableList = + Lists.newArrayList("read", CONFIG, configFilename, "--catalog", catalogFilename) + + val files: MutableMap = HashMap() + files[configFilename] = configContents + files[catalogFilename] = catalogContents + + if (stateFilename != null) { + arguments.add("--state") + arguments.add(stateFilename) + + Preconditions.checkNotNull(stateContents) + files[stateFilename] = stateContents + } + + return processFactory.create( + Metadata.READ_STEP, + jobId, + attempt, + jobRoot, + imageName, + useIsolatedPool, + false, + files, + null, + resourceRequirement, + allowedHosts, + java.util.Map.of( + Metadata.JOB_TYPE_KEY, + Metadata.SYNC_JOB, + Metadata.SYNC_STEP_KEY, + Metadata.READ_STEP + ), + workerMetadata, + emptyMap(), + emptyMap(), + *arguments.toTypedArray() + ) + } + + @Throws(TestHarnessException::class) + override fun write( + jobRoot: Path, + configFilename: String, + configContents: String, + catalogFilename: String, + catalogContents: String, + additionalEnvironmentVariables: Map + ): Process? { + val files: Map = + ImmutableMap.of(configFilename, configContents, catalogFilename, catalogContents) + + return processFactory.create( + Metadata.WRITE_STEP, + jobId, + attempt, + jobRoot, + imageName, + useIsolatedPool, + true, + files, + null, + resourceRequirement, + allowedHosts, + java.util.Map.of( + Metadata.JOB_TYPE_KEY, + Metadata.SYNC_JOB, + Metadata.SYNC_STEP_KEY, + Metadata.WRITE_STEP + ), + workerMetadata, + emptyMap(), + additionalEnvironmentVariables, + "write", + CONFIG, + configFilename, + "--catalog", + catalogFilename + ) + } + + private val workerMetadata: Map + get() = // We've managed to exceed the maximum number of parameters for Map.of(), so use a + // builder + convert + // back to hashmap + Maps.newHashMap( + ImmutableMap.builder() + .put("WORKER_CONNECTOR_IMAGE", imageName) + .put("WORKER_JOB_ID", jobId) + .put("WORKER_JOB_ATTEMPT", attempt.toString()) + .put( + EnvVariableFeatureFlags.AUTO_DETECT_SCHEMA, + featureFlags.autoDetectSchema().toString() + ) + .put( + EnvVariableFeatureFlags.APPLY_FIELD_SELECTION, + featureFlags.applyFieldSelection().toString() + ) + .put( + EnvVariableFeatureFlags.FIELD_SELECTION_WORKSPACES, + featureFlags.fieldSelectionWorkspaces() + ) + .build() + ) + + companion object { + private const val CONFIG = "--config" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/DockerProcessFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/DockerProcessFactory.kt new file mode 100644 index 0000000000000..10edca57ab7dc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/DockerProcessFactory.kt @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.process + +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Joiner +import com.google.common.base.Strings +import com.google.common.collect.Lists +import io.airbyte.commons.io.IOs +import io.airbyte.commons.io.LineGobbler +import io.airbyte.commons.map.MoreMaps +import io.airbyte.commons.resources.MoreResources +import io.airbyte.configoss.AllowedHosts +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.workers.TestHarnessUtils +import io.airbyte.workers.exception.TestHarnessException +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.function.Function +import org.apache.commons.lang3.StringUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class DockerProcessFactory( + private val workspaceRoot: Path, + private val workspaceMountSource: String?, + private val localMountSource: String?, + private val networkName: String?, + private val envMap: Map +) : ProcessFactory { + private val imageExistsScriptPath: Path + + /** + * Used to construct a Docker process. + * + * @param workspaceRoot real root of workspace + * @param workspaceMountSource workspace volume + * @param localMountSource local volume + * @param networkName docker network + * @param envMap + */ + init { + imageExistsScriptPath = prepareImageExistsScript() + } + + @Throws(TestHarnessException::class) + override fun create( + jobType: String?, + jobId: String, + attempt: Int, + jobRoot: Path, + imageName: String, + usesIsolatedPool: Boolean, + usesStdin: Boolean, + files: Map, + entrypoint: String?, + resourceRequirements: ResourceRequirements?, + allowedHosts: AllowedHosts?, + labels: Map?, + jobMetadata: Map, + internalToExternalPorts: Map?, + additionalEnvironmentVariables: Map, + vararg args: String? + ): Process { + try { + if (!checkImageExists(imageName)) { + throw TestHarnessException("Could not find image: $imageName") + } + + if (!jobRoot.toFile().exists()) { + Files.createDirectory(jobRoot) + } + + for ((key, value) in files) { + IOs.writeFile(jobRoot, key, value) + } + + val cmd: MutableList = + Lists.newArrayList( + "docker", + "run", + "--rm", + "--init", + "-i", + "-w", + rebasePath(jobRoot).toString(), // rebases the job root on the job data mount + "--log-driver", + "none" + ) + val containerName: String = + ProcessFactory.Companion.createProcessName( + imageName, + jobType, + jobId, + attempt, + DOCKER_NAME_LEN_LIMIT + ) + LOGGER.info( + "Creating docker container = {} with resources {} and allowedHosts {}", + containerName, + resourceRequirements, + allowedHosts + ) + cmd.add("--name") + cmd.add(containerName) + cmd.addAll(localDebuggingOptions(containerName)) + + if (networkName != null) { + cmd.add("--network") + cmd.add(networkName) + } + + if (workspaceMountSource != null) { + cmd.add("-v") + cmd.add(String.format("%s:%s", workspaceMountSource, DATA_MOUNT_DESTINATION)) + } + + if (localMountSource != null) { + cmd.add("-v") + cmd.add(String.format("%s:%s", localMountSource, LOCAL_MOUNT_DESTINATION)) + } + + val allEnvMap = MoreMaps.merge(jobMetadata, envMap, additionalEnvironmentVariables) + for ((key, value) in allEnvMap) { + cmd.add("-e") + cmd.add("$key=$value") + } + + if (!Strings.isNullOrEmpty(entrypoint)) { + cmd.add("--entrypoint") + cmd.add(entrypoint) + } + if (resourceRequirements != null) { + if (!Strings.isNullOrEmpty(resourceRequirements.cpuLimit)) { + cmd.add(String.format("--cpus=%s", resourceRequirements.cpuLimit)) + } + if (!Strings.isNullOrEmpty(resourceRequirements.memoryRequest)) { + cmd.add( + String.format("--memory-reservation=%s", resourceRequirements.memoryRequest) + ) + } + if (!Strings.isNullOrEmpty(resourceRequirements.memoryLimit)) { + cmd.add(String.format("--memory=%s", resourceRequirements.memoryLimit)) + } + } + + cmd.add(imageName) + cmd.addAll(Arrays.asList(*args)) + + LOGGER.info("Preparing command: {}", Joiner.on(" ").join(cmd)) + + return ProcessBuilder(cmd).start() + } catch (e: IOException) { + throw TestHarnessException(e.message, e) + } + } + + private fun rebasePath(jobRoot: Path): Path { + val relativePath = workspaceRoot.relativize(jobRoot) + return DATA_MOUNT_DESTINATION.resolve(relativePath) + } + + @VisibleForTesting + @Throws(TestHarnessException::class) + fun checkImageExists(imageName: String?): Boolean { + try { + val process = ProcessBuilder(imageExistsScriptPath.toString(), imageName).start() + LineGobbler.gobble(process.errorStream, { msg: String? -> LOGGER.error(msg) }) + LineGobbler.gobble(process.inputStream, { msg: String? -> LOGGER.info(msg) }) + + TestHarnessUtils.gentleClose(process, 10, TimeUnit.MINUTES) + + if (process.isAlive) { + throw TestHarnessException("Process to check if image exists is stuck. Exiting.") + } else { + return process.exitValue() == 0 + } + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(DockerProcessFactory::class.java) + private const val DOCKER_NAME_LEN_LIMIT = 128 + + private val DATA_MOUNT_DESTINATION: Path = Path.of("/data") + private val LOCAL_MOUNT_DESTINATION: Path = Path.of("/local") + private const val IMAGE_EXISTS_SCRIPT = "image_exists.sh" + + private fun prepareImageExistsScript(): Path { + try { + val basePath = Files.createTempDirectory("scripts") + val scriptContents = MoreResources.readResource(IMAGE_EXISTS_SCRIPT) + val scriptPath = IOs.writeFile(basePath, IMAGE_EXISTS_SCRIPT, scriptContents) + if (!scriptPath.toFile().setExecutable(true)) { + throw RuntimeException( + String.format("Could not set %s to executable", scriptPath) + ) + } + return scriptPath + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + /** + * !! ONLY FOR DEBUGGING, SHOULD NOT BE USED IN PRODUCTION !! If you set the + * DEBUG_CONTAINER_IMAGE environment variable, and it matches the image name of a spawned + * container, this method will add the necessary params to connect a debugger. For example, + * to enable this for `destination-bigquery` start the services locally with: + * ``` + * ``` + * VERSION="dev" + * ``` + * DEBUG_CONTAINER_IMAGE="destination-bigquery" docker compose -f docker-compose.yaml -f + * docker-compose.debug.yaml up ``` Additionally you may have to update the image version of your + * target image to 'dev' in the UI of your local airbyte platform. See the + * `docker-compose.debug.yaml` file for more context. + * + * @param containerName the name of the container which could be debugged. + * @return A list with debugging arguments or an empty list + * ``` + */ + fun localDebuggingOptions(containerName: String): List { + val shouldAddDebuggerOptions = + (Optional.ofNullable(System.getenv("DEBUG_CONTAINER_IMAGE")) + .filter { cs: String? -> StringUtils.isNotEmpty(cs) } + .map( + Function { imageName: String? -> + ProcessFactory.Companion.extractShortImageName(containerName) + .startsWith(imageName!!) + } + ) + .orElse(false) && + Optional.ofNullable(System.getenv("DEBUG_CONTAINER_JAVA_OPTS")) + .isPresent) + return if (shouldAddDebuggerOptions) { + java.util.List.of( + "-e", + "JAVA_TOOL_OPTIONS=" + System.getenv("DEBUG_CONTAINER_JAVA_OPTS"), + "-p5005:5005" + ) + } else { + emptyList() + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/IntegrationLauncher.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/IntegrationLauncher.kt new file mode 100644 index 0000000000000..1489860e2e769 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/IntegrationLauncher.kt @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.process + +import io.airbyte.workers.exception.TestHarnessException +import java.nio.file.Path + +/** + * This interface provides an abstraction for launching a container that implements the Airbyte + * Protocol. Such containers implement each method that is defined in the Protocol. This class, + * provides java methods to invoke the methods on these containers. + * + * Each method takes in a jobRoot that is a directory where the worker that runs the method can use + * as temporary file system storage. + */ +interface IntegrationLauncher { + @Throws(TestHarnessException::class) fun spec(jobRoot: Path): Process + + @Throws(TestHarnessException::class) + fun check(jobRoot: Path, configFilename: String, configContents: String): Process + + @Throws(TestHarnessException::class) + fun discover(jobRoot: Path, configFilename: String, configContents: String): Process + + @Throws(TestHarnessException::class) + fun read( + jobRoot: Path, + configFilename: String?, + configContents: String?, + catalogFilename: String?, + catalogContents: String?, + stateFilename: String?, + stateContents: String? + ): Process? + + @Throws(TestHarnessException::class) + fun read( + jobRoot: Path, + configFilename: String?, + configContents: String?, + catalogFilename: String?, + catalogContents: String? + ): Process? { + return read( + jobRoot, + configFilename, + configContents, + catalogFilename, + catalogContents, + null, + null + ) + } + + @Throws(TestHarnessException::class) + fun write( + jobRoot: Path, + configFilename: String, + configContents: String, + catalogFilename: String, + catalogContents: String, + additionalEnvironmentVariables: Map + ): Process? +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/Metadata.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/Metadata.kt new file mode 100644 index 0000000000000..808169ba67e4c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/Metadata.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.process + +/** + * The following variables help, either via names or labels, add metadata to processes actually + * running operations to ease operations. + */ +object Metadata { + /** General Metadata */ + const val JOB_LABEL_KEY: String = "job_id" + const val ATTEMPT_LABEL_KEY: String = "attempt_id" + const val WORKER_POD_LABEL_KEY: String = "airbyte" + const val WORKER_POD_LABEL_VALUE: String = "job-pod" + const val CONNECTION_ID_LABEL_KEY: String = "connection_id" + + /** These are more readable forms of [io.airbyte.config.JobTypeResourceLimit.JobType]. */ + const val JOB_TYPE_KEY: String = "job_type" + const val SYNC_JOB: String = "sync" + const val SPEC_JOB: String = "spec" + const val CHECK_JOB: String = "check" + const val DISCOVER_JOB: String = "discover" + + /** + * A sync job can actually be broken down into the following steps. Try to be as precise as + * possible with naming/labels to help operations. + */ + const val SYNC_STEP_KEY: String = "sync_step" + const val READ_STEP: String = "read" + const val WRITE_STEP: String = "write" + const val NORMALIZE_STEP: String = "normalize" + const val CUSTOM_STEP: String = "custom" + const val ORCHESTRATOR_STEP: String = "orchestrator" +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/ProcessFactory.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/ProcessFactory.kt new file mode 100644 index 0000000000000..0ced27e656e15 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/process/ProcessFactory.kt @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.process + +import io.airbyte.configoss.AllowedHosts +import io.airbyte.configoss.ResourceRequirements +import io.airbyte.workers.exception.TestHarnessException +import java.nio.file.Path +import java.util.* +import java.util.regex.Pattern +import org.apache.commons.lang3.RandomStringUtils + +interface ProcessFactory { + /** + * Creates a ProcessBuilder to run a program in a new Process. + * + * @param jobType type of job to add to name for easier operational processes. + * @param jobId job Id + * @param attempt attempt Id + * @param jobPath Workspace directory to run the process from. + * @param imageName Docker image name to start the process from. + * @param usesIsolatedPool whether to use isolated pool to run the jobs. + * @param files File name to contents map that will be written into the working dir of the + * process prior to execution. + * @param entrypoint If not null, the default entrypoint program of the docker image can be + * changed by this argument. + * @param resourceRequirements CPU and RAM to assign to the created process. + * @param labels Labels to assign to the created Kube pod, if any. Ignore for docker. + * @param jobMetadata Job metadata that will be passed to the created process as environment + * variables. + * @param additionalEnvironmentVariables + * @param args Arguments to pass to the docker image being run in the new process. + * @return ProcessBuilder object to run the process. + * @throws TestHarnessException + */ + @Throws(TestHarnessException::class) + fun create( + jobType: String?, + jobId: String, + attempt: Int, + jobPath: Path, + imageName: String, + usesIsolatedPool: Boolean, + usesStdin: Boolean, + files: Map, + entrypoint: String?, + resourceRequirements: ResourceRequirements?, + allowedHosts: AllowedHosts?, + labels: Map?, + jobMetadata: Map, + portMapping: Map?, + additionalEnvironmentVariables: Map, + vararg args: String? + ): Process + + companion object { + /** + * Docker image names are by convention separated by slashes. The last portion is the + * image's name. This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 + * or gcr.io/my-project/image-name:v2. + * + * With these two facts, attempt to construct a unique process name with the image name + * present that can be used by the factories implementing this interface for easier + * operations. + */ + fun createProcessName( + fullImagePath: String, + jobType: String?, + jobId: String, + attempt: Int, + lenLimit: Int + ): String { + var imageName = extractShortImageName(fullImagePath) + val randSuffix = RandomStringUtils.randomAlphabetic(5).lowercase(Locale.getDefault()) + val suffix = "$jobType-$jobId-$attempt-$randSuffix" + + var processName = "$imageName-$suffix" + if (processName.length > lenLimit) { + val extra = processName.length - lenLimit + imageName = imageName.substring(extra) + processName = "$imageName-$suffix" + } + + // Kubernetes pod names must start with an alphabetic character while Docker names + // accept + // alphanumeric. + // Use the stricter convention for simplicity. + val m = ALPHABETIC.matcher(processName) + // Since we add sync-UUID as a suffix a couple of lines up, there will always be a + // substring + // starting with an alphabetic character. + // If the image name is a no-op, this function should always return `sync-UUID` at the + // minimum. + m.find() + return processName.substring(m.start()) + } + + /** + * Docker image names are by convention separated by slashes. The last portion is the + * image's name. This is followed by a colon and a version number. e.g. airbyte/scheduler:v1 + * or gcr.io/my-project/my-project:v2. + * + * @param fullImagePath the image name with repository and version ex + * gcr.io/my-project/image-name:v2 + * @return the image name without the repo and version, ex. image-name + */ + fun extractShortImageName(fullImagePath: String): String { + val noVersion = + fullImagePath + .split(VERSION_DELIMITER.toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray()[0] + + val nameParts = + noVersion + .split(DOCKER_DELIMITER.toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + return nameParts[nameParts.size - 1] + } + + const val VERSION_DELIMITER: String = ":" + const val DOCKER_DELIMITER: String = "/" + val ALPHABETIC: Pattern = Pattern.compile("[a-zA-Z]+") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/AirbyteMessageUtils.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/AirbyteMessageUtils.kt new file mode 100644 index 0000000000000..c16ffdabd392c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/AirbyteMessageUtils.kt @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.test_utils + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.* +import java.time.Instant + +object AirbyteMessageUtils { + fun createRecordMessage( + tableName: String?, + record: JsonNode?, + timeExtracted: Instant + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withData(record) + .withStream(tableName) + .withEmittedAt(timeExtracted.epochSecond) + ) + } + + fun createLogMessage(level: AirbyteLogMessage.Level?, message: String?): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.LOG) + .withLog(AirbyteLogMessage().withLevel(level).withMessage(message)) + } + + fun createRecordMessage(tableName: String?, key: String, value: String): AirbyteMessage { + return createRecordMessage(tableName, ImmutableMap.of(key, value)) + } + + fun createRecordMessage(tableName: String?, key: String, value: Int): AirbyteMessage { + return createRecordMessage(tableName, ImmutableMap.of(key, value)) + } + + fun createRecordMessage(tableName: String?, record: Map?): AirbyteMessage { + return createRecordMessage(tableName, Jsons.jsonNode(record), Instant.EPOCH) + } + + fun createRecordMessage(streamName: String?, recordData: Int): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage().withStream(streamName).withData(Jsons.jsonNode(recordData)) + ) + } + + fun createStateMessage(stateData: Int): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState(AirbyteStateMessage().withData(Jsons.jsonNode(stateData))) + } + + fun createStateMessage(key: String, value: String): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState(AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(key, value)))) + } + + fun createStreamStateMessage(streamName: String?, stateData: Int): AirbyteStateMessage { + return AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(createStreamState(streamName).withStreamState(Jsons.jsonNode(stateData))) + } + + fun createGlobalStateMessage(stateData: Int, vararg streamNames: String?): AirbyteMessage { + val streamStates: MutableList = ArrayList() + for (streamName in streamNames) { + streamStates.add( + createStreamState(streamName).withStreamState(Jsons.jsonNode(stateData)) + ) + } + return AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(AirbyteGlobalState().withStreamStates(streamStates)) + ) + } + + fun createStreamState(streamName: String?): AirbyteStreamState { + return AirbyteStreamState().withStreamDescriptor(StreamDescriptor().withName(streamName)) + } + + fun createStreamEstimateMessage( + name: String?, + namespace: String?, + byteEst: Long, + rowEst: Long + ): AirbyteMessage { + return createEstimateMessage( + AirbyteEstimateTraceMessage.Type.STREAM, + name, + namespace, + byteEst, + rowEst + ) + } + + fun createSyncEstimateMessage(byteEst: Long, rowEst: Long): AirbyteMessage { + return createEstimateMessage( + AirbyteEstimateTraceMessage.Type.SYNC, + null, + null, + byteEst, + rowEst + ) + } + + fun createEstimateMessage( + type: AirbyteEstimateTraceMessage.Type?, + name: String?, + namespace: String?, + byteEst: Long, + rowEst: Long + ): AirbyteMessage { + val est = + AirbyteEstimateTraceMessage() + .withType(type) + .withByteEstimate(byteEst) + .withRowEstimate(rowEst) + + if (name != null) { + est.withName(name) + } + if (namespace != null) { + est.withNamespace(namespace) + } + + return AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage().withType(AirbyteTraceMessage.Type.ESTIMATE).withEstimate(est) + ) + } + + fun createErrorMessage(message: String?, emittedAt: Double?): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace(createErrorTraceMessage(message, emittedAt)) + } + + @JvmOverloads + fun createErrorTraceMessage( + message: String?, + emittedAt: Double?, + failureType: AirbyteErrorTraceMessage.FailureType? = null + ): AirbyteTraceMessage { + val msg = + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.ERROR) + .withError(AirbyteErrorTraceMessage().withMessage(message)) + .withEmittedAt(emittedAt) + + if (failureType != null) { + msg.error.withFailureType(failureType) + } + + return msg + } + + fun createConfigControlMessage(config: Config?, emittedAt: Double?): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.CONTROL) + .withControl( + AirbyteControlMessage() + .withEmittedAt(emittedAt) + .withType(AirbyteControlMessage.Type.CONNECTOR_CONFIG) + .withConnectorConfig(AirbyteControlConnectorConfigMessage().withConfig(config)) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/TestConfigHelpers.kt b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/TestConfigHelpers.kt new file mode 100644 index 0000000000000..1a856f4f75e81 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/kotlin/io/airbyte/workers/test_utils/TestConfigHelpers.kt @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.test_utils + +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.* +import io.airbyte.protocol.models.* +import java.util.* +import java.util.List +import java.util.Map +import org.apache.commons.lang3.tuple.ImmutablePair + +object TestConfigHelpers { + private const val CONNECTION_NAME = "favorite_color_pipe" + private const val STREAM_NAME = "user_preferences" + private const val FIELD_NAME = "favorite_color" + private const val LAST_SYNC_TIME: Long = 1598565106 + + @JvmOverloads + fun createSyncConfig( + multipleNamespaces: Boolean = false + ): ImmutablePair { + val workspaceId = UUID.randomUUID() + val sourceDefinitionId = UUID.randomUUID() + val sourceId = UUID.randomUUID() + val destinationDefinitionId = UUID.randomUUID() + val destinationId = UUID.randomUUID() + val normalizationOperationId = UUID.randomUUID() + val dbtOperationId = UUID.randomUUID() + + val sourceConnection = Jsons.jsonNode(Map.of("apiKey", "123", "region", "us-east")) + + val destinationConnection = + Jsons.jsonNode(Map.of("username", "airbyte", "token", "anau81b")) + + val sourceConnectionConfig = + SourceConnection() + .withConfiguration(sourceConnection) + .withWorkspaceId(workspaceId) + .withSourceDefinitionId(sourceDefinitionId) + .withSourceId(sourceId) + .withTombstone(false) + + val destinationConnectionConfig = + DestinationConnection() + .withConfiguration(destinationConnection) + .withWorkspaceId(workspaceId) + .withDestinationDefinitionId(destinationDefinitionId) + .withDestinationId(destinationId) + .withTombstone(false) + + val normalizationOperation = + StandardSyncOperation() + .withOperationId(normalizationOperationId) + .withName("Normalization") + .withOperatorType(StandardSyncOperation.OperatorType.NORMALIZATION) + .withOperatorNormalization( + OperatorNormalization().withOption(OperatorNormalization.Option.BASIC) + ) + .withTombstone(false) + + val customDbtOperation = + StandardSyncOperation() + .withOperationId(dbtOperationId) + .withName("Custom Transformation") + .withOperatorType(StandardSyncOperation.OperatorType.DBT) + .withOperatorDbt( + OperatorDbt() + .withDockerImage("docker") + .withDbtArguments("--help") + .withGitRepoUrl("git url") + .withGitRepoBranch("git url") + ) + .withTombstone(false) + + val catalog = ConfiguredAirbyteCatalog() + if (multipleNamespaces) { + val streamOne = + ConfiguredAirbyteStream() + .withStream( + CatalogHelpers.createAirbyteStream( + STREAM_NAME, + "namespace", + Field.of(FIELD_NAME, JsonSchemaType.STRING) + ) + ) + val streamTwo = + ConfiguredAirbyteStream() + .withStream( + CatalogHelpers.createAirbyteStream( + STREAM_NAME, + "namespace2", + Field.of(FIELD_NAME, JsonSchemaType.STRING) + ) + ) + + val streams = List.of(streamOne, streamTwo) + catalog.withStreams(streams) + } else { + val stream = + ConfiguredAirbyteStream() + .withStream( + CatalogHelpers.createAirbyteStream( + STREAM_NAME, + Field.of(FIELD_NAME, JsonSchemaType.STRING) + ) + ) + catalog.withStreams(listOf(stream)) + } + + val stateValue = Jsons.serialize(Map.of("lastSync", LAST_SYNC_TIME.toString())) + + val state = State().withState(Jsons.jsonNode(stateValue)) + + val syncInput = + StandardSyncInput() + .withNamespaceDefinition(JobSyncConfig.NamespaceDefinitionType.SOURCE) + .withPrefix(CONNECTION_NAME) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withDestinationConfiguration(destinationConnectionConfig.configuration) + .withCatalog(catalog) + .withSourceConfiguration(sourceConnectionConfig.configuration) + .withState(state) + .withOperationSequence(List.of(normalizationOperation, customDbtOperation)) + .withWorkspaceId(workspaceId) + + return ImmutablePair(null, syncInput) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/build.gradle b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/build.gradle index 2f86620d7ec63..dbb42326adea9 100644 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/build.gradle @@ -8,13 +8,26 @@ java { } } +compileKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + +compileTestFixturesKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - implementation project(':airbyte-cdk:java:airbyte-cdk:core') - api project(':airbyte-cdk:java:airbyte-cdk:s3-destinations') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') + api project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-s3-destinations') api 'com.google.cloud:google-cloud-storage:2.32.1' - testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:s3-destinations') - testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:s3-destinations')) + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core')) + testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-s3-destinations') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-s3-destinations')) } diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.java deleted file mode 100644 index 4dedbba45ab0a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import static io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.s3.S3BaseChecks; -import io.airbyte.cdk.integrations.destination.s3.S3ConsumerFactory; -import io.airbyte.cdk.integrations.destination.s3.SerializedBufferFactory; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class BaseGcsDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseGcsDestination.class); - public static final String EXPECTED_ROLES = "storage.multipartUploads.abort, storage.multipartUploads.create, " - + "storage.objects.create, storage.objects.delete, storage.objects.get, storage.objects.list"; - - private final NamingConventionTransformer nameTransformer; - - public BaseGcsDestination() { - this.nameTransformer = new GcsNameTransformer(); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - try { - final GcsDestinationConfig destinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config); - final AmazonS3 s3Client = destinationConfig.getS3Client(); - - // Test single upload (for small files) permissions - S3BaseChecks.testSingleUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath()); - - // Test multipart upload with stream transfer manager - S3BaseChecks.testMultipartUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath()); - - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final AmazonS3Exception e) { - LOGGER.error("Exception attempting to access the Gcs bucket", e); - final String message = getErrorMessage(e.getErrorCode(), 0, e.getMessage(), e); - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, message); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage(message); - } catch (final Exception e) { - LOGGER.error("Exception attempting to access the Gcs bucket: {}. Please make sure you account has all of these roles: " + EXPECTED_ROLES, e); - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, e.getMessage()); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("Could not connect to the Gcs bucket with the provided configuration. \n" + e - .getMessage()); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog configuredCatalog, - final Consumer outputRecordCollector) { - final GcsDestinationConfig gcsConfig = GcsDestinationConfig.getGcsDestinationConfig(config); - return new S3ConsumerFactory().create( - outputRecordCollector, - new GcsStorageOperations(nameTransformer, gcsConfig.getS3Client(), gcsConfig), - nameTransformer, - SerializedBufferFactory.getCreateFunction(gcsConfig, FileBuffer::new), - gcsConfig, - configuredCatalog); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.java deleted file mode 100644 index 2f7865057584f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialConfig; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialConfigs; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfigs; -import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations; - -/** - * Currently we always reuse the S3 client for GCS. So the GCS config extends from the S3 config. - * This may change in the future. - */ -public class GcsDestinationConfig extends S3DestinationConfig { - - private static final String GCS_ENDPOINT = "https://storage.googleapis.com"; - - private final GcsCredentialConfig credentialConfig; - - public GcsDestinationConfig(final String bucketName, - final String bucketPath, - final String bucketRegion, - final GcsCredentialConfig credentialConfig, - final S3FormatConfig formatConfig) { - - super(GCS_ENDPOINT, - bucketName, - bucketPath, - bucketRegion, - S3DestinationConstants.DEFAULT_PATH_FORMAT, - credentialConfig.getS3CredentialConfig().orElseThrow(), - formatConfig, - null, - null, - false, - S3StorageOperations.DEFAULT_UPLOAD_THREADS); - - this.credentialConfig = credentialConfig; - } - - public static GcsDestinationConfig getGcsDestinationConfig(final JsonNode config) { - return new GcsDestinationConfig( - config.get("gcs_bucket_name").asText(), - config.get("gcs_bucket_path").asText(), - config.get("gcs_bucket_region").asText(), - GcsCredentialConfigs.getCredentialConfig(config), - S3FormatConfigs.getS3FormatConfig(config)); - } - - @Override - protected AmazonS3 createS3Client() { - switch (credentialConfig.getCredentialType()) { - case HMAC_KEY -> { - final GcsHmacKeyCredentialConfig hmacKeyCredential = (GcsHmacKeyCredentialConfig) credentialConfig; - final BasicAWSCredentials awsCreds = new BasicAWSCredentials(hmacKeyCredential.getHmacKeyAccessId(), hmacKeyCredential.getHmacKeySecret()); - - return AmazonS3ClientBuilder.standard() - .withEndpointConfiguration( - new AwsClientBuilder.EndpointConfiguration(GCS_ENDPOINT, getBucketRegion())) - .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) - .build(); - } - default -> throw new IllegalArgumentException("Unsupported credential type: " + credentialConfig.getCredentialType().name()); - } - } - - public GcsCredentialConfig getGcsCredentialConfig() { - return credentialConfig; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.java deleted file mode 100644 index c2cce517070ce..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; - -public class GcsNameTransformer extends S3NameTransformer { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.java deleted file mode 100644 index 7233ac4fdb69f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GcsStorageOperations extends S3StorageOperations { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsStorageOperations.class); - - public GcsStorageOperations(final NamingConventionTransformer nameTransformer, - final AmazonS3 s3Client, - final S3DestinationConfig s3Config) { - super(nameTransformer, s3Client, s3Config); - } - - /** - * GCS only supports the legacy AmazonS3#doesBucketExist method. - */ - @Override - protected boolean doesBucketExist(final String bucket) { - return s3Client.doesBucketExist(bucket); - } - - /** - * This method is overridden because GCS doesn't accept request to delete multiple objects. The only - * difference is that the AmazonS3#deleteObjects method is replaced with AmazonS3#deleteObject. - */ - @Override - protected void cleanUpObjects(final String bucket, final List keysToDelete) { - for (final KeyVersion keyToDelete : keysToDelete) { - LOGGER.info("Deleting object {}", keyToDelete.getKey()); - s3Client.deleteObject(bucket, keyToDelete.getKey()); - } - } - - @Override - protected Map getMetadataMapping() { - return new HashMap<>(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.java deleted file mode 100644 index bd3b5a1813ded..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.avro; - -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.util.GcsUtils; -import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.file.DataFileWriter; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumWriter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class GcsAvroWriter extends BaseGcsWriter implements DestinationFileWriter { - - protected static final Logger LOGGER = LoggerFactory.getLogger(GcsAvroWriter.class); - - private final AvroRecordFactory avroRecordFactory; - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final DataFileWriter dataFileWriter; - private final String gcsFileLocation; - private final String objectKey; - - public GcsAvroWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final JsonAvroConverter converter) - throws IOException { - this(config, s3Client, configuredStream, uploadTimestamp, converter, null); - } - - public GcsAvroWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final JsonAvroConverter converter, - final JsonNode jsonSchema) - throws IOException { - super(config, s3Client, configuredStream); - - final Schema schema = jsonSchema == null - ? GcsUtils.getDefaultAvroSchema(stream.getName(), stream.getNamespace(), true, false) - : new JsonToAvroSchemaConverter().getAvroSchema(jsonSchema, stream.getName(), - stream.getNamespace(), true, false, false, true); - LOGGER.info("Avro schema for stream {}: {}", stream.getName(), schema.toString(false)); - - final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.AVRO); - objectKey = String.join("/", outputPrefix, outputFilename); - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - - LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), - objectKey); - - this.avroRecordFactory = new AvroRecordFactory(schema, converter); - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .setPartSize((long) DEFAULT_PART_SIZE_MB) - .get(); - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - - final S3AvroFormatConfig formatConfig = (S3AvroFormatConfig) config.getFormatConfig(); - // The DataFileWriter always uses binary encoding. - // If json encoding is needed in the future, use the GenericDatumWriter directly. - this.dataFileWriter = new DataFileWriter<>(new GenericDatumWriter()) - .setCodec(formatConfig.getCodecFactory()) - .create(schema, outputStream); - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - dataFileWriter.append(avroRecordFactory.getAvroRecord(id, recordMessage)); - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - final Record record = avroRecordFactory.getAvroRecord(formattedData); - dataFileWriter.append(record); - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - protected void closeWhenSucceed() throws IOException { - dataFileWriter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() throws IOException { - dataFileWriter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public S3Format getFileFormat() { - return S3Format.AVRO; - } - - @Override - public String getOutputPath() { - return objectKey; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.java deleted file mode 100644 index 0166337f33c68..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.credential; - -import io.airbyte.cdk.integrations.destination.s3.credential.BlobStorageCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig; -import java.util.Optional; - -public interface GcsCredentialConfig extends BlobStorageCredentialConfig { - - Optional getS3CredentialConfig(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.java deleted file mode 100644 index 9241a6af94f22..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.credential; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; - -public class GcsCredentialConfigs { - - public static GcsCredentialConfig getCredentialConfig(final JsonNode config) { - final JsonNode credentialConfig = config.get("credential"); - final GcsCredentialType credentialType = GcsCredentialType.valueOf(credentialConfig.get("credential_type").asText().toUpperCase()); - - if (credentialType == GcsCredentialType.HMAC_KEY) { - return new GcsHmacKeyCredentialConfig(credentialConfig); - } - throw new RuntimeException("Unexpected credential: " + Jsons.serialize(credentialConfig)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.java deleted file mode 100644 index a44f77241e39a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.credential; - -public enum GcsCredentialType { - HMAC_KEY -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.java deleted file mode 100644 index 18bc1da6df61d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.credential; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig; -import java.util.Optional; - -public class GcsHmacKeyCredentialConfig implements GcsCredentialConfig { - - private final String hmacKeyAccessId; - private final String hmacKeySecret; - - public GcsHmacKeyCredentialConfig(final JsonNode credentialConfig) { - this.hmacKeyAccessId = credentialConfig.get("hmac_key_access_id").asText(); - this.hmacKeySecret = credentialConfig.get("hmac_key_secret").asText(); - } - - public GcsHmacKeyCredentialConfig(final String hmacKeyAccessId, final String hmacKeySecret) { - this.hmacKeyAccessId = hmacKeyAccessId; - this.hmacKeySecret = hmacKeySecret; - } - - public String getHmacKeyAccessId() { - return hmacKeyAccessId; - } - - public String getHmacKeySecret() { - return hmacKeySecret; - } - - @Override - public GcsCredentialType getCredentialType() { - return GcsCredentialType.HMAC_KEY; - } - - @Override - public Optional getS3CredentialConfig() { - return Optional.of(new S3AccessKeyCredentialConfig(hmacKeyAccessId, hmacKeySecret)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.java deleted file mode 100644 index 097c457e7d2dd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.csv; - -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.csv.CsvSheetGenerator; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.csv.QuoteMode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GcsCsvWriter extends BaseGcsWriter implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsCsvWriter.class); - - private final CsvSheetGenerator csvSheetGenerator; - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final CSVPrinter csvPrinter; - private final String gcsFileLocation; - private final String objectKey; - - public GcsCsvWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) - throws IOException { - super(config, s3Client, configuredStream); - - final S3CsvFormatConfig formatConfig = (S3CsvFormatConfig) config.getFormatConfig(); - this.csvSheetGenerator = CsvSheetGenerator.Factory.create(configuredStream.getStream().getJsonSchema(), formatConfig); - - final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.CSV); - objectKey = String.join("/", outputPrefix, outputFilename); - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - - LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), - objectKey); - - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .setPartSize((long) DEFAULT_PART_SIZE_MB) - .get(); - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - this.csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), - CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL) - .withHeader(csvSheetGenerator.getHeaderRow().toArray(new String[0]))); - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(id, recordMessage)); - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(formattedData)); - } - - @Override - protected void closeWhenSucceed() throws IOException { - csvPrinter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() throws IOException { - csvPrinter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - public CSVPrinter getCsvPrinter() { - return csvPrinter; - } - - @Override - public S3Format getFileFormat() { - return S3Format.CSV; - } - - @Override - public String getOutputPath() { - return objectKey; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.java deleted file mode 100644 index 23113125f0f56..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.jsonl; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GcsJsonlWriter extends BaseGcsWriter implements DestinationFileWriter { - - protected static final Logger LOGGER = LoggerFactory.getLogger(GcsJsonlWriter.class); - - private static final ObjectMapper MAPPER = MoreMappers.initMapper(); - - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final PrintWriter printWriter; - private final String gcsFileLocation; - private final String objectKey; - - public GcsJsonlWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) { - super(config, s3Client, configuredStream); - - final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.JSONL); - objectKey = String.join("/", outputPrefix, outputFilename); - - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), objectKey); - - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .get(); - - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - this.printWriter = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) { - final ObjectNode json = MAPPER.createObjectNode(); - json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()); - json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()); - json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.getData()); - printWriter.println(Jsons.serialize(json)); - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - printWriter.println(Jsons.serialize(formattedData)); - } - - @Override - protected void closeWhenSucceed() { - printWriter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() { - printWriter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - public S3Format getFileFormat() { - return S3Format.JSONL; - } - - @Override - public String getOutputPath() { - return objectKey; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.java deleted file mode 100644 index 0223556617728..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.parquet; - -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.gcs.util.GcsS3FileSystem; -import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory; -import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData.Record; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.avro.AvroParquetWriter; -import org.apache.parquet.hadoop.ParquetWriter; -import org.apache.parquet.hadoop.util.HadoopOutputFile; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class GcsParquetWriter extends BaseGcsWriter implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsParquetWriter.class); - private static final ObjectMapper MAPPER = new ObjectMapper(); - - private final ParquetWriter parquetWriter; - private final AvroRecordFactory avroRecordFactory; - private final String gcsFileLocation; - private final String objectKey; - - public GcsParquetWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final Schema schema, - final JsonAvroConverter converter) - throws URISyntaxException, IOException { - super(config, s3Client, configuredStream); - - final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.PARQUET); - objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Storage path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), objectKey); - - gcsFileLocation = String.format("s3a://%s/%s/%s", config.getBucketName(), outputPrefix, outputFilename); - final URI uri = new URI(gcsFileLocation); - final Path path = new Path(uri); - - LOGGER.info("Full GCS path for stream '{}': {}", stream.getName(), path); - - final S3ParquetFormatConfig formatConfig = (S3ParquetFormatConfig) config.getFormatConfig(); - final Configuration hadoopConfig = getHadoopConfig(config); - this.parquetWriter = AvroParquetWriter.builder(HadoopOutputFile.fromPath(path, hadoopConfig)) - .withSchema(schema) - .withCompressionCodec(formatConfig.getCompressionCodec()) - .withRowGroupSize(formatConfig.getBlockSize()) - .withMaxPaddingSize(formatConfig.getMaxPaddingSize()) - .withPageSize(formatConfig.getPageSize()) - .withDictionaryPageSize(formatConfig.getDictionaryPageSize()) - .withDictionaryEncoding(formatConfig.isDictionaryEncoding()) - .build(); - this.avroRecordFactory = new AvroRecordFactory(schema, converter); - } - - public static Configuration getHadoopConfig(final GcsDestinationConfig config) { - final GcsHmacKeyCredentialConfig hmacKeyCredential = (GcsHmacKeyCredentialConfig) config.getGcsCredentialConfig(); - final Configuration hadoopConfig = new Configuration(); - - // the default org.apache.hadoop.fs.s3a.S3AFileSystem does not work for GCS - hadoopConfig.set("fs.s3a.impl", GcsS3FileSystem.class.getCanonicalName()); - - // https://stackoverflow.com/questions/64141204/process-data-in-google-storage-on-an-aws-emr-cluster-in-spark - hadoopConfig.set("fs.s3a.access.key", hmacKeyCredential.getHmacKeyAccessId()); - hadoopConfig.set("fs.s3a.secret.key", hmacKeyCredential.getHmacKeySecret()); - hadoopConfig.setBoolean("fs.s3a.path.style.access", true); - hadoopConfig.set("fs.s3a.endpoint", "storage.googleapis.com"); - hadoopConfig.setInt("fs.s3a.list.version", 1); - - return hadoopConfig; - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - parquetWriter.write(avroRecordFactory.getAvroRecord(id, recordMessage)); - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - parquetWriter.write(avroRecordFactory.getAvroRecord(formattedData)); - } - - @Override - public void close(final boolean hasFailed) throws IOException { - if (hasFailed) { - LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.getName()); - parquetWriter.close(); - LOGGER.warn("Upload of stream '{}' aborted.", stream.getName()); - } else { - LOGGER.info("Uploading remaining data for stream '{}'.", stream.getName()); - parquetWriter.close(); - LOGGER.info("Upload completed for stream '{}'.", stream.getName()); - } - } - - @Override - public String getOutputPath() { - return objectKey; - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - public S3Format getFileFormat() { - return S3Format.PARQUET; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.java deleted file mode 100644 index eb0978cb2c06e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.util; - -import java.io.IOException; -import org.apache.hadoop.fs.s3a.Retries; -import org.apache.hadoop.fs.s3a.S3AFileSystem; - -/** - * Patch {@link S3AFileSystem} to make it work for GCS. - */ -public class GcsS3FileSystem extends S3AFileSystem { - - /** - * Method {@code doesBucketExistV2} used in the {@link S3AFileSystem#verifyBucketExistsV2} does not - * work for GCS. - */ - @Override - @Retries.RetryTranslated - protected void verifyBucketExistsV2() throws IOException { - super.verifyBucketExists(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.java deleted file mode 100644 index 1bb7606d096e8..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.util; - -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import javax.annotation.Nullable; -import org.apache.avro.LogicalTypes; -import org.apache.avro.Schema; -import org.apache.avro.SchemaBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GcsUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsUtils.class); - private static final Schema UUID_SCHEMA = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING)); - private static final Schema TIMESTAMP_MILLIS_SCHEMA = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG)); - private static final Schema NULLABLE_TIMESTAMP_MILLIS = SchemaBuilder.builder().unionOf().nullType().and().type(TIMESTAMP_MILLIS_SCHEMA).endUnion(); - - public static Schema getDefaultAvroSchema(final String name, - @Nullable final String namespace, - final boolean appendAirbyteFields, - final boolean useDestinationsV2Columns) { - LOGGER.info("Default schema."); - final String stdName = AvroConstants.NAME_TRANSFORMER.getIdentifier(name); - final String stdNamespace = AvroConstants.NAME_TRANSFORMER.getNamespace(namespace); - SchemaBuilder.RecordBuilder builder = SchemaBuilder.record(stdName); - - if (stdNamespace != null) { - builder = builder.namespace(stdNamespace); - } - if (useDestinationsV2Columns) { - builder.namespace("airbyte"); - } - - SchemaBuilder.FieldAssembler assembler = builder.fields(); - if (useDestinationsV2Columns) { - if (appendAirbyteFields) { - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_AB_RAW_ID).type(UUID_SCHEMA).noDefault(); - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT).type(TIMESTAMP_MILLIS_SCHEMA).noDefault(); - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT).type(NULLABLE_TIMESTAMP_MILLIS).withDefault(null); - } - } else { - if (appendAirbyteFields) { - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_AB_ID).type(UUID_SCHEMA).noDefault(); - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_EMITTED_AT).type(TIMESTAMP_MILLIS_SCHEMA).noDefault(); - } - } - assembler = assembler.name(JavaBaseConstants.COLUMN_NAME_DATA).type().stringType().noDefault(); - - return assembler.endRecord(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.java deleted file mode 100644 index fdac8772e48c4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.writer; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.HeadBucketRequest; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.util.S3OutputPathHelper; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.IOException; -import java.sql.Timestamp; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.LinkedList; -import java.util.List; -import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The base implementation takes care of the following: - *

      - *
    • Create shared instance variables.
    • - *
    • Create the bucket and prepare the bucket path.
    • - *
    - */ -public abstract class BaseGcsWriter implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseGcsWriter.class); - - protected final GcsDestinationConfig config; - protected final AmazonS3 s3Client; - protected final AirbyteStream stream; - protected final DestinationSyncMode syncMode; - protected final String outputPrefix; - - protected BaseGcsWriter(final GcsDestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream) { - this.config = config; - this.s3Client = s3Client; - this.stream = configuredStream.getStream(); - this.syncMode = configuredStream.getDestinationSyncMode(); - this.outputPrefix = S3OutputPathHelper.getOutputPrefix(config.getBucketPath(), stream); - } - - /** - *
      - *
    • 1. Create bucket if necessary.
    • - *
    • 2. Under OVERWRITE mode, delete all objects with the output prefix.
    • - *
    - */ - @Override - public void initialize() throws IOException { - try { - final String bucket = config.getBucketName(); - if (!gcsBucketExist(s3Client, bucket)) { - LOGGER.info("Bucket {} does not exist; creating...", bucket); - s3Client.createBucket(bucket); - LOGGER.info("Bucket {} has been created.", bucket); - } - - if (syncMode == DestinationSyncMode.OVERWRITE) { - LOGGER.info("Overwrite mode"); - final List keysToDelete = new LinkedList<>(); - final List objects = s3Client.listObjects(bucket, outputPrefix) - .getObjectSummaries(); - for (final S3ObjectSummary object : objects) { - keysToDelete.add(new KeyVersion(object.getKey())); - } - - if (keysToDelete.size() > 0) { - LOGGER.info("Purging non-empty output path for stream '{}' under OVERWRITE mode...", stream.getName()); - // Google Cloud Storage doesn't accept request to delete multiple objects - for (final KeyVersion keyToDelete : keysToDelete) { - s3Client.deleteObject(bucket, keyToDelete.getKey()); - } - LOGGER.info("Deleted {} file(s) for stream '{}'.", keysToDelete.size(), - stream.getName()); - } - LOGGER.info("Overwrite is finished"); - } - } catch (Exception e) { - LOGGER.error("Failed to initialize: ", e); - closeWhenFail(); - throw e; - } - } - - /** - * {@link AmazonS3#doesBucketExistV2} should be used to check the bucket existence. However, this - * method does not work for GCS. So we use {@link AmazonS3#headBucket} instead, which will throw an - * exception if the bucket does not exist, or there is no permission to access it. - */ - public boolean gcsBucketExist(final AmazonS3 s3Client, final String bucket) { - try { - s3Client.headBucket(new HeadBucketRequest(bucket)); - return true; - } catch (final Exception e) { - return false; - } - } - - @Override - public void close(final boolean hasFailed) throws IOException { - if (hasFailed) { - LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.getName()); - closeWhenFail(); - LOGGER.warn("Upload of stream '{}' aborted.", stream.getName()); - } else { - LOGGER.info("Uploading remaining data for stream '{}'.", stream.getName()); - closeWhenSucceed(); - LOGGER.info("Upload completed for stream '{}'.", stream.getName()); - } - } - - /** - * Operations that will run when the write succeeds. - */ - protected void closeWhenSucceed() throws IOException { - // Do nothing by default - } - - /** - * Operations that will run when the write fails. - */ - protected void closeWhenFail() throws IOException { - // Do nothing by default - } - - // Filename: __0. - public static String getOutputFilename(final Timestamp timestamp, final S3Format format) { - final DateFormat formatter = new SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING); - formatter.setTimeZone(TimeZone.getTimeZone("UTC")); - return String.format( - "%s_%d_0.%s", - formatter.format(timestamp), - timestamp.getTime(), - format.getFileExtension()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.java deleted file mode 100644 index fc5ddfb908019..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs; - -import com.fasterxml.jackson.databind.JsonNode; - -public class GcsConfig { - - private final String projectId; - private final String bucketName; - private final String credentialsJson; - - public GcsConfig(final String projectId, final String bucketName, final String credentialsJson) { - this.projectId = projectId; - this.bucketName = bucketName; - this.credentialsJson = credentialsJson; - } - - public String getProjectId() { - return projectId; - } - - public String getBucketName() { - return bucketName; - } - - public String getCredentialsJson() { - return credentialsJson; - } - - public static GcsConfig getGcsConfig(final JsonNode config) { - return new GcsConfig( - config.get("loading_method").get("project_id").asText(), - config.get("loading_method").get("bucket_name").asText(), - config.get("loading_method").get("credentials_json").asText()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java deleted file mode 100644 index 0c74a3853b2db..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs; - -import com.google.auth.oauth2.GoogleCredentials; -import com.google.cloud.WriteChannel; -import com.google.cloud.storage.BlobId; -import com.google.cloud.storage.BlobInfo; -import com.google.cloud.storage.Storage; -import com.google.cloud.storage.StorageOptions; -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.cdk.integrations.destination.jdbc.StagingFilenameGenerator; -import io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.nio.channels.Channels; -import java.nio.charset.StandardCharsets; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.time.Instant; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class GcsStreamCopier implements StreamCopier { - - private static final Logger LOGGER = LoggerFactory.getLogger(GcsStreamCopier.class); - // It is optimal to write every 10,000,000 records (BATCH_SIZE * MAX_PER_FILE_PART_COUNT) to a new - // file. - // The BATCH_SIZE is defined in CopyConsumerFactory. - // The average size of such a file will be about 1 GB. - // This will make it easier to work with files and speed up the recording of large amounts of data. - // In addition, for a large number of records, we will not get a drop in the copy request to - // QUERY_TIMEOUT when - // the records from the file are copied to the staging table. - public static final int MAX_PARTS_PER_FILE = 1000; - protected final GcsConfig gcsConfig; - protected final String tmpTableName; - protected final String schemaName; - protected final String streamName; - protected final JdbcDatabase db; - protected final Set gcsStagingFiles = new HashSet<>(); - protected final String stagingFolder; - protected StagingFilenameGenerator filenameGenerator; - private final Storage storageClient; - private final DestinationSyncMode destSyncMode; - private final StandardNameTransformer nameTransformer; - private final SqlOperations sqlOperations; - private final HashMap channels = new HashMap<>(); - private final HashMap csvPrinters = new HashMap<>(); - - public GcsStreamCopier(final String stagingFolder, - final DestinationSyncMode destSyncMode, - final String schema, - final String streamName, - final Storage storageClient, - final JdbcDatabase db, - final GcsConfig gcsConfig, - final StandardNameTransformer nameTransformer, - final SqlOperations sqlOperations) { - this.destSyncMode = destSyncMode; - this.schemaName = schema; - this.streamName = streamName; - this.stagingFolder = stagingFolder; - this.db = db; - this.nameTransformer = nameTransformer; - this.sqlOperations = sqlOperations; - this.tmpTableName = nameTransformer.getTmpTableName(streamName); - this.storageClient = storageClient; - this.gcsConfig = gcsConfig; - this.filenameGenerator = new StagingFilenameGenerator(streamName, GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES); - } - - private String prepareGcsStagingFile() { - return String.join("/", stagingFolder, schemaName, filenameGenerator.getStagingFilename()); - } - - @Override - public String prepareStagingFile() { - final var name = prepareGcsStagingFile(); - if (!gcsStagingFiles.contains(name)) { - gcsStagingFiles.add(name); - final var blobId = BlobId.of(gcsConfig.getBucketName(), name); - final var blobInfo = BlobInfo.newBuilder(blobId).build(); - final var blob = storageClient.create(blobInfo); - final var channel = blob.writer(); - channels.put(name, channel); - final OutputStream outputStream = Channels.newOutputStream(channel); - - final var writer = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); - try { - csvPrinters.put(name, new CSVPrinter(writer, CSVFormat.DEFAULT)); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - return name; - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage, final String gcsFileName) throws Exception { - if (csvPrinters.containsKey(gcsFileName)) { - csvPrinters.get(gcsFileName).printRecord(id, - Jsons.serialize(recordMessage.getData()), - Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); - } - } - - @Override - public void closeNonCurrentStagingFileWriters() throws Exception { - // TODO need to update this method when updating whole class for using GcsWriter - } - - @Override - public void closeStagingUploader(final boolean hasFailed) throws Exception { - LOGGER.info("Uploading remaining data for {} stream.", streamName); - for (final var csvPrinter : csvPrinters.values()) { - csvPrinter.close(); - } - for (final var channel : channels.values()) { - channel.close(); - } - LOGGER.info("All data for {} stream uploaded.", streamName); - } - - @Override - public void copyStagingFileToTemporaryTable() throws Exception { - LOGGER.info("Starting copy to tmp table: {} in destination for stream: {}, schema: {}.", tmpTableName, streamName, schemaName); - for (final var gcsStagingFile : gcsStagingFiles) { - copyGcsCsvFileIntoTable(db, getFullGcsPath(gcsConfig.getBucketName(), gcsStagingFile), schemaName, tmpTableName, gcsConfig); - } - LOGGER.info("Copy to tmp table {} in destination for stream {} complete.", tmpTableName, streamName); - } - - @Override - public void removeFileAndDropTmpTable() throws Exception { - for (final var gcsStagingFile : gcsStagingFiles) { - LOGGER.info("Begin cleaning gcs staging file {}.", gcsStagingFile); - final var blobId = BlobId.of(gcsConfig.getBucketName(), gcsStagingFile); - if (storageClient.get(blobId).exists()) { - storageClient.delete(blobId); - } - LOGGER.info("GCS staging file {} cleaned.", gcsStagingFile); - } - - LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName); - sqlOperations.dropTableIfExists(db, schemaName, tmpTableName); - LOGGER.info("{} tmp table in destination cleaned.", tmpTableName); - } - - @Override - public void createDestinationSchema() throws Exception { - LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName); - sqlOperations.createSchemaIfNotExists(db, schemaName); - } - - @Override - public void createTemporaryTable() throws Exception { - LOGGER.info("Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.", streamName, schemaName, tmpTableName); - sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName); - } - - @Override - public String createDestinationTable() throws Exception { - final var destTableName = nameTransformer.getRawTableName(streamName); - LOGGER.info("Preparing table {} in destination.", destTableName); - sqlOperations.createTableIfNotExists(db, schemaName, destTableName); - LOGGER.info("Table {} in destination prepared.", tmpTableName); - - return destTableName; - } - - @Override - public String generateMergeStatement(final String destTableName) throws Exception { - LOGGER.info("Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.", tmpTableName, destTableName, schemaName); - final var queries = new StringBuilder(); - if (destSyncMode.equals(DestinationSyncMode.OVERWRITE)) { - queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName)); - LOGGER.info("Destination OVERWRITE mode detected. Dest table: {}, schema: {}, will be truncated.", destTableName, schemaName); - } - queries.append(sqlOperations.insertTableQuery(db, schemaName, tmpTableName, destTableName)); - return queries.toString(); - } - - @Override - public String getCurrentFile() { - // TODO need to update this method when updating whole class for using GcsWriter - return null; - } - - private static String getFullGcsPath(final String bucketName, final String stagingFile) { - // this is intentionally gcs:/ not gcs:// since the join adds the additional slash - return String.join("/", "gcs:/", bucketName, stagingFile); - } - - public static void attemptWriteToPersistence(final GcsConfig gcsConfig) throws IOException { - final String outputTableName = "_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", ""); - attemptWriteAndDeleteGcsObject(gcsConfig, outputTableName); - } - - private static void attemptWriteAndDeleteGcsObject(final GcsConfig gcsConfig, final String outputTableName) throws IOException { - final var storage = getStorageClient(gcsConfig); - final var blobId = BlobId.of(gcsConfig.getBucketName(), "check-content/" + outputTableName); - final var blobInfo = BlobInfo.newBuilder(blobId).build(); - - storage.create(blobInfo, "".getBytes(StandardCharsets.UTF_8)); - storage.delete(blobId); - } - - public static Storage getStorageClient(final GcsConfig gcsConfig) throws IOException { - final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes(StandardCharsets.UTF_8)); - final GoogleCredentials credentials = GoogleCredentials.fromStream(credentialsInputStream); - return StorageOptions.newBuilder() - .setCredentials(credentials) - .setProjectId(gcsConfig.getProjectId()) - .build() - .getService(); - } - - @VisibleForTesting - public String getTmpTableName() { - return tmpTableName; - } - - @VisibleForTesting - public Set getGcsStagingFiles() { - return gcsStagingFiles; - } - - public abstract void copyGcsCsvFileIntoTable(JdbcDatabase database, - String gcsFileLocation, - String schema, - String tableName, - GcsConfig gcsConfig) - throws SQLException; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java deleted file mode 100644 index 3247a5c8fbf3d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs; - -import com.google.auth.oauth2.GoogleCredentials; -import com.google.cloud.storage.Storage; -import com.google.cloud.storage.StorageOptions; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -public abstract class GcsStreamCopierFactory implements StreamCopierFactory { - - /** - * Used by the copy consumer. - */ - @Override - public StreamCopier create(final String configuredSchema, - final GcsConfig gcsConfig, - final String stagingFolder, - final ConfiguredAirbyteStream configuredStream, - final StandardNameTransformer nameTransformer, - final JdbcDatabase db, - final SqlOperations sqlOperations) { - try { - final AirbyteStream stream = configuredStream.getStream(); - final DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode(); - final String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); - - final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes(StandardCharsets.UTF_8)); - final GoogleCredentials credentials = GoogleCredentials.fromStream(credentialsInputStream); - final Storage storageClient = StorageOptions.newBuilder() - .setCredentials(credentials) - .setProjectId(gcsConfig.getProjectId()) - .build() - .getService(); - - return create(stagingFolder, syncMode, schema, stream.getName(), storageClient, db, gcsConfig, nameTransformer, sqlOperations); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - /** - * For specific copier suppliers to implement. - */ - public abstract StreamCopier create(String stagingFolder, - DestinationSyncMode syncMode, - String schema, - String streamName, - Storage storageClient, - JdbcDatabase db, - GcsConfig gcsConfig, - StandardNameTransformer nameTransformer, - SqlOperations sqlOperations) - throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.kt new file mode 100644 index 0000000000000..8855f7fafc76f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/BaseGcsDestination.kt @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.model.AmazonS3Exception +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.BaseConnector +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility.emitConfigErrorTrace +import io.airbyte.cdk.integrations.base.Destination +import io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.s3.S3BaseChecks.testMultipartUpload +import io.airbyte.cdk.integrations.destination.s3.S3BaseChecks.testSingleUpload +import io.airbyte.cdk.integrations.destination.s3.S3ConsumerFactory +import io.airbyte.cdk.integrations.destination.s3.SerializedBufferFactory.Companion.getCreateFunction +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.function.Consumer +import java.util.function.Function +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class BaseGcsDestination : BaseConnector(), Destination { + private val nameTransformer: NamingConventionTransformer = GcsNameTransformer() + + override fun check(config: JsonNode): AirbyteConnectionStatus? { + try { + val destinationConfig: GcsDestinationConfig = + GcsDestinationConfig.Companion.getGcsDestinationConfig(config) + val s3Client = destinationConfig.getS3Client() + + // Test single upload (for small files) permissions + testSingleUpload(s3Client, destinationConfig.bucketName, destinationConfig.bucketPath!!) + + // Test multipart upload with stream transfer manager + testMultipartUpload( + s3Client, + destinationConfig.bucketName, + destinationConfig.bucketPath!! + ) + + return AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED) + } catch (e: AmazonS3Exception) { + LOGGER.error("Exception attempting to access the Gcs bucket", e) + val message = getErrorMessage(e.errorCode, 0, e.message, e) + emitConfigErrorTrace(e, message) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage(message) + } catch (e: Exception) { + LOGGER.error( + "Exception attempting to access the Gcs bucket: {}. Please make sure you account has all of these roles: " + + EXPECTED_ROLES, + e + ) + emitConfigErrorTrace(e, e.message) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage( + "Could not connect to the Gcs bucket with the provided configuration. \n" + + e.message + ) + } + } + + override fun getConsumer( + config: JsonNode, + configuredCatalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): AirbyteMessageConsumer? { + val gcsConfig: GcsDestinationConfig = + GcsDestinationConfig.Companion.getGcsDestinationConfig(config) + return S3ConsumerFactory() + .create( + outputRecordCollector, + GcsStorageOperations(nameTransformer, gcsConfig.getS3Client(), gcsConfig), + nameTransformer, + getCreateFunction( + gcsConfig, + Function { fileExtension: String -> + FileBuffer(fileExtension) + } + ), + gcsConfig, + configuredCatalog + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseGcsDestination::class.java) + const val EXPECTED_ROLES: String = + ("storage.multipartUploads.abort, storage.multipartUploads.create, " + + "storage.objects.create, storage.objects.delete, storage.objects.get, storage.objects.list") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.kt new file mode 100644 index 0000000000000..77795cdf06c55 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfig.kt @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.auth.AWSStaticCredentialsProvider +import com.amazonaws.auth.BasicAWSCredentials +import com.amazonaws.client.builder.AwsClientBuilder +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.AmazonS3ClientBuilder +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialConfig +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialConfigs +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialType +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfigs.getS3FormatConfig +import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations + +/** + * Currently we always reuse the S3 client for GCS. So the GCS config extends from the S3 config. + * This may change in the future. + */ +class GcsDestinationConfig( + bucketName: String, + bucketPath: String, + bucketRegion: String?, + val gcsCredentialConfig: GcsCredentialConfig, + formatConfig: S3FormatConfig +) : + S3DestinationConfig( + GCS_ENDPOINT, + bucketName!!, + bucketPath!!, + bucketRegion, + S3DestinationConstants.DEFAULT_PATH_FORMAT, + gcsCredentialConfig.s3CredentialConfig.orElseThrow(), + formatConfig!!, + null, + null, + false, + S3StorageOperations.DEFAULT_UPLOAD_THREADS + ) { + override fun createS3Client(): AmazonS3 { + when (gcsCredentialConfig.credentialType) { + GcsCredentialType.HMAC_KEY -> { + val hmacKeyCredential = gcsCredentialConfig as GcsHmacKeyCredentialConfig + val awsCreds = + BasicAWSCredentials( + hmacKeyCredential.hmacKeyAccessId, + hmacKeyCredential.hmacKeySecret + ) + + return AmazonS3ClientBuilder.standard() + .withEndpointConfiguration( + AwsClientBuilder.EndpointConfiguration(GCS_ENDPOINT, bucketRegion) + ) + .withCredentials(AWSStaticCredentialsProvider(awsCreds)) + .build() + } + else -> + throw IllegalArgumentException( + "Unsupported credential type: " + gcsCredentialConfig.credentialType!!.name + ) + } + } + + companion object { + private const val GCS_ENDPOINT = "https://storage.googleapis.com" + + fun getGcsDestinationConfig(config: JsonNode): GcsDestinationConfig { + return GcsDestinationConfig( + config["gcs_bucket_name"].asText(), + config["gcs_bucket_path"].asText(), + config["gcs_bucket_region"].asText(), + GcsCredentialConfigs.getCredentialConfig(config), + getS3FormatConfig(config) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.kt new file mode 100644 index 0000000000000..3e593786cac60 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsNameTransformer.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer + +class GcsNameTransformer : S3NameTransformer() diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.kt new file mode 100644 index 0000000000000..c1948134cba61 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsStorageOperations.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class GcsStorageOperations( + nameTransformer: NamingConventionTransformer, + s3Client: AmazonS3, + s3Config: S3DestinationConfig +) : S3StorageOperations(nameTransformer!!, s3Client!!, s3Config!!) { + /** GCS only supports the legacy AmazonS3#doesBucketExist method. */ + override fun doesBucketExist(bucket: String?): Boolean { + return s3Client.doesBucketExist(bucket) + } + + /** + * This method is overridden because GCS doesn't accept request to delete multiple objects. The + * only difference is that the AmazonS3#deleteObjects method is replaced with + * AmazonS3#deleteObject. + */ + override fun cleanUpObjects( + bucket: String?, + keysToDelete: List + ) { + for (keyToDelete in keysToDelete) { + LOGGER.info("Deleting object {}", keyToDelete.key) + s3Client.deleteObject(bucket, keyToDelete.key) + } + } + + override fun getMetadataMapping(): Map { + return HashMap() + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(GcsStorageOperations::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.kt new file mode 100644 index 0000000000000..c22c96e32697f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriter.kt @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.avro + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.util.GcsUtils +import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory +import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.sql.Timestamp +import java.util.* +import org.apache.avro.file.DataFileWriter +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumWriter +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class GcsAvroWriter +@JvmOverloads +constructor( + config: GcsDestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp, + converter: JsonAvroConverter?, + jsonSchema: JsonNode? = null +) : BaseGcsWriter(config, s3Client, configuredStream), DestinationFileWriter { + private val avroRecordFactory: AvroRecordFactory + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + private val dataFileWriter: DataFileWriter + override val fileLocation: String + override val outputPath: String + + init { + val schema = + if (jsonSchema == null) + GcsUtils.getDefaultAvroSchema(stream.name, stream.namespace, true, false) + else + JsonToAvroSchemaConverter() + .getAvroSchema( + jsonSchema, + stream.name, + stream.namespace, + true, + false, + false, + true + ) + LOGGER.info("Avro schema for stream {}: {}", stream.name, schema!!.toString(false)) + + val outputFilename: String = + BaseGcsWriter.Companion.getOutputFilename(uploadTimestamp, S3Format.AVRO) + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + + LOGGER.info( + "Full GCS path for stream '{}': {}/{}", + stream.name, + config.bucketName, + outputPath + ) + + this.avroRecordFactory = AvroRecordFactory(schema, converter) + this.uploadManager = + create(config.bucketName, outputPath, s3Client) + .setPartSize(StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB.toLong()) + .get() + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + + val formatConfig = config.formatConfig as S3AvroFormatConfig + // The DataFileWriter always uses binary encoding. + // If json encoding is needed in the future, use the GenericDatumWriter directly. + this.dataFileWriter = + DataFileWriter(GenericDatumWriter()) + .setCodec(formatConfig.codecFactory) + .create(schema, outputStream) + } + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + dataFileWriter.append(avroRecordFactory.getAvroRecord(id, recordMessage)) + } + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + val record = avroRecordFactory.getAvroRecord(formattedData) + dataFileWriter.append(record) + } + + @Throws(IOException::class) + override fun closeWhenSucceed() { + dataFileWriter.close() + outputStream.close() + uploadManager.complete() + } + + @Throws(IOException::class) + override fun closeWhenFail() { + dataFileWriter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format + get() = S3Format.AVRO + + companion object { + protected val LOGGER: Logger = LoggerFactory.getLogger(GcsAvroWriter::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.kt new file mode 100644 index 0000000000000..1899cb84b52a6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfig.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.credential + +import io.airbyte.cdk.integrations.destination.s3.credential.BlobStorageCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig +import java.util.* + +interface GcsCredentialConfig : BlobStorageCredentialConfig { + val s3CredentialConfig: Optional +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.kt new file mode 100644 index 0000000000000..62a2349df970b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialConfigs.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.credential + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import java.util.* + +object GcsCredentialConfigs { + fun getCredentialConfig(config: JsonNode): GcsCredentialConfig { + val credentialConfig = config["credential"] + val credentialType = + GcsCredentialType.valueOf( + credentialConfig["credential_type"].asText().uppercase(Locale.getDefault()) + ) + + if (credentialType == GcsCredentialType.HMAC_KEY) { + return GcsHmacKeyCredentialConfig(credentialConfig) + } + throw RuntimeException("Unexpected credential: " + Jsons.serialize(credentialConfig)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.kt new file mode 100644 index 0000000000000..1161593eefb86 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsCredentialType.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.credential + +enum class GcsCredentialType { + HMAC_KEY +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.kt new file mode 100644 index 0000000000000..cbe8b32f6541e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/credential/GcsHmacKeyCredentialConfig.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.credential + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig +import java.util.* + +class GcsHmacKeyCredentialConfig : GcsCredentialConfig { + val hmacKeyAccessId: String + val hmacKeySecret: String + + constructor(credentialConfig: JsonNode) { + this.hmacKeyAccessId = credentialConfig["hmac_key_access_id"].asText() + this.hmacKeySecret = credentialConfig["hmac_key_secret"].asText() + } + + constructor(hmacKeyAccessId: String, hmacKeySecret: String) { + this.hmacKeyAccessId = hmacKeyAccessId + this.hmacKeySecret = hmacKeySecret + } + + override val credentialType: GcsCredentialType + get() = GcsCredentialType.HMAC_KEY + + override val s3CredentialConfig: Optional + get() = Optional.of(S3AccessKeyCredentialConfig(hmacKeyAccessId, hmacKeySecret)) +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.kt new file mode 100644 index 0000000000000..ac85087a0ea22 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvWriter.kt @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.csv + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.csv.CsvSheetGenerator +import io.airbyte.cdk.integrations.destination.s3.csv.CsvSheetGenerator.Factory.create +import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.sql.Timestamp +import java.util.* +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.apache.commons.csv.QuoteMode +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class GcsCsvWriter( + config: GcsDestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp +) : BaseGcsWriter(config, s3Client, configuredStream), DestinationFileWriter { + private val csvSheetGenerator: CsvSheetGenerator + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + val csvPrinter: CSVPrinter + override val fileLocation: String + override val outputPath: String + + init { + val formatConfig = config.formatConfig as S3CsvFormatConfig + this.csvSheetGenerator = create(configuredStream.stream.jsonSchema, formatConfig) + + val outputFilename: String = + BaseGcsWriter.Companion.getOutputFilename(uploadTimestamp, S3Format.CSV) + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + + LOGGER.info( + "Full GCS path for stream '{}': {}/{}", + stream.name, + config.bucketName, + outputPath + ) + + this.uploadManager = + create(config.bucketName, outputPath, s3Client) + .setPartSize(StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB.toLong()) + .get() + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + this.csvPrinter = + CSVPrinter( + PrintWriter(outputStream, true, StandardCharsets.UTF_8), + CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL) + .withHeader(*csvSheetGenerator.getHeaderRow().toTypedArray()) + ) + } + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + csvPrinter.printRecord(csvSheetGenerator.getDataRow(id, recordMessage)) + } + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + csvPrinter.printRecord(csvSheetGenerator.getDataRow(formattedData)) + } + + @Throws(IOException::class) + override fun closeWhenSucceed() { + csvPrinter.close() + outputStream.close() + uploadManager.complete() + } + + @Throws(IOException::class) + override fun closeWhenFail() { + csvPrinter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format + get() = S3Format.CSV + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(GcsCsvWriter::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.kt new file mode 100644 index 0000000000000..0cd765543d46a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlWriter.kt @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.jsonl + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.sql.Timestamp +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class GcsJsonlWriter( + config: GcsDestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp +) : BaseGcsWriter(config, s3Client, configuredStream), DestinationFileWriter { + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + private val printWriter: PrintWriter + override val fileLocation: String + override val outputPath: String + + init { + val outputFilename: String = + BaseGcsWriter.Companion.getOutputFilename(uploadTimestamp, S3Format.JSONL) + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + LOGGER.info( + "Full GCS path for stream '{}': {}/{}", + stream.name, + config.bucketName, + outputPath + ) + + this.uploadManager = create(config.bucketName, outputPath, s3Client).get() + + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + this.printWriter = PrintWriter(outputStream, true, StandardCharsets.UTF_8) + } + + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + val json = MAPPER.createObjectNode() + json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()) + json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.emittedAt) + json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.data) + printWriter.println(Jsons.serialize(json)) + } + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + printWriter.println(Jsons.serialize(formattedData)) + } + + override fun closeWhenSucceed() { + printWriter.close() + outputStream.close() + uploadManager.complete() + } + + override fun closeWhenFail() { + printWriter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format + get() = S3Format.JSONL + + companion object { + protected val LOGGER: Logger = LoggerFactory.getLogger(GcsJsonlWriter::class.java) + + private val MAPPER: ObjectMapper = MoreMappers.initMapper() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.kt new file mode 100644 index 0000000000000..1cc78d4f7511f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/parquet/GcsParquetWriter.kt @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.parquet + +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.gcs.util.GcsS3FileSystem +import io.airbyte.cdk.integrations.destination.gcs.writer.BaseGcsWriter +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory +import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetFormatConfig +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.net.URI +import java.sql.Timestamp +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.Path +import org.apache.parquet.avro.AvroParquetWriter +import org.apache.parquet.hadoop.ParquetWriter +import org.apache.parquet.hadoop.util.HadoopOutputFile +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class GcsParquetWriter( + config: GcsDestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp, + schema: Schema?, + converter: JsonAvroConverter? +) : BaseGcsWriter(config, s3Client, configuredStream), DestinationFileWriter { + private val parquetWriter: ParquetWriter + private val avroRecordFactory: AvroRecordFactory + override val fileLocation: String + override val outputPath: String + + init { + val outputFilename: String = + BaseGcsWriter.Companion.getOutputFilename(uploadTimestamp, S3Format.PARQUET) + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + LOGGER.info( + "Storage path for stream '{}': {}/{}", + stream.name, + config.bucketName, + outputPath + ) + + fileLocation = + String.format("s3a://%s/%s/%s", config.bucketName, outputPrefix, outputFilename) + val uri = URI(fileLocation) + val path = Path(uri) + + LOGGER.info("Full GCS path for stream '{}': {}", stream.name, path) + + val formatConfig = config.formatConfig as S3ParquetFormatConfig + val hadoopConfig = getHadoopConfig(config) + this.parquetWriter = + AvroParquetWriter.builder( + HadoopOutputFile.fromPath(path, hadoopConfig) + ) + .withSchema(schema) + .withCompressionCodec(formatConfig.compressionCodec) + .withRowGroupSize(formatConfig.blockSize) + .withMaxPaddingSize(formatConfig.maxPaddingSize) + .withPageSize(formatConfig.pageSize) + .withDictionaryPageSize(formatConfig.dictionaryPageSize) + .withDictionaryEncoding(formatConfig.isDictionaryEncoding) + .build() + this.avroRecordFactory = AvroRecordFactory(schema, converter) + } + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + parquetWriter.write(avroRecordFactory.getAvroRecord(id, recordMessage)) + } + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + parquetWriter.write(avroRecordFactory.getAvroRecord(formattedData)) + } + + @Throws(IOException::class) + override fun close(hasFailed: Boolean) { + if (hasFailed) { + LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.name) + parquetWriter.close() + LOGGER.warn("Upload of stream '{}' aborted.", stream.name) + } else { + LOGGER.info("Uploading remaining data for stream '{}'.", stream.name) + parquetWriter.close() + LOGGER.info("Upload completed for stream '{}'.", stream.name) + } + } + + override val fileFormat: S3Format + get() = S3Format.PARQUET + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(GcsParquetWriter::class.java) + private val MAPPER = ObjectMapper() + + fun getHadoopConfig(config: GcsDestinationConfig): Configuration { + val hmacKeyCredential = config.gcsCredentialConfig as GcsHmacKeyCredentialConfig + val hadoopConfig = Configuration() + + // the default org.apache.hadoop.fs.s3a.S3AFileSystem does not work for GCS + hadoopConfig["fs.s3a.impl"] = GcsS3FileSystem::class.java.canonicalName + + // https://stackoverflow.com/questions/64141204/process-data-in-google-storage-on-an-aws-emr-cluster-in-spark + hadoopConfig["fs.s3a.access.key"] = hmacKeyCredential.hmacKeyAccessId + hadoopConfig["fs.s3a.secret.key"] = hmacKeyCredential.hmacKeySecret + hadoopConfig.setBoolean("fs.s3a.path.style.access", true) + hadoopConfig["fs.s3a.endpoint"] = "storage.googleapis.com" + hadoopConfig.setInt("fs.s3a.list.version", 1) + + return hadoopConfig + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.kt new file mode 100644 index 0000000000000..8c2ad5ae911cb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsS3FileSystem.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.util + +import java.io.IOException +import org.apache.hadoop.fs.s3a.Retries +import org.apache.hadoop.fs.s3a.S3AFileSystem + +/** Patch [S3AFileSystem] to make it work for GCS. */ +class GcsS3FileSystem : S3AFileSystem() { + /** + * Method `doesBucketExistV2` used in the [S3AFileSystem.verifyBucketExistsV2] does not work for + * GCS. + */ + @Retries.RetryTranslated + @Throws(IOException::class) + override fun verifyBucketExistsV2() { + super.verifyBucketExists() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.kt new file mode 100644 index 0000000000000..fb65d0b98f83b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/GcsUtils.kt @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.util + +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import org.apache.avro.LogicalTypes +import org.apache.avro.Schema +import org.apache.avro.SchemaBuilder +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object GcsUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(GcsUtils::class.java) + private val UUID_SCHEMA: Schema = + LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING)) + private val TIMESTAMP_MILLIS_SCHEMA: Schema = + LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG)) + private val NULLABLE_TIMESTAMP_MILLIS: Schema = + SchemaBuilder.builder().unionOf().nullType().and().type(TIMESTAMP_MILLIS_SCHEMA).endUnion() + + fun getDefaultAvroSchema( + name: String, + namespace: String, + appendAirbyteFields: Boolean, + useDestinationsV2Columns: Boolean + ): Schema? { + LOGGER.info("Default schema.") + val stdName = AvroConstants.NAME_TRANSFORMER.getIdentifier(name!!) + val stdNamespace = AvroConstants.NAME_TRANSFORMER.getNamespace(namespace!!) + var builder = SchemaBuilder.record(stdName) + + if (stdNamespace != null) { + builder = builder.namespace(stdNamespace) + } + if (useDestinationsV2Columns) { + builder.namespace("airbyte") + } + + var assembler = builder.fields() + if (useDestinationsV2Columns) { + if (appendAirbyteFields) { + assembler = + assembler + .name(JavaBaseConstants.COLUMN_NAME_AB_RAW_ID) + .type(UUID_SCHEMA) + .noDefault() + assembler = + assembler + .name(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT) + .type(TIMESTAMP_MILLIS_SCHEMA) + .noDefault() + assembler = + assembler + .name(JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT) + .type(NULLABLE_TIMESTAMP_MILLIS) + .withDefault(null) + } + } else { + if (appendAirbyteFields) { + assembler = + assembler + .name(JavaBaseConstants.COLUMN_NAME_AB_ID) + .type(UUID_SCHEMA) + .noDefault() + assembler = + assembler + .name(JavaBaseConstants.COLUMN_NAME_EMITTED_AT) + .type(TIMESTAMP_MILLIS_SCHEMA) + .noDefault() + } + } + assembler = + assembler.name(JavaBaseConstants.COLUMN_NAME_DATA).type().stringType().noDefault() + + return assembler.endRecord() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.kt new file mode 100644 index 0000000000000..636345ece2fa3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/gcs/writer/BaseGcsWriter.kt @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.writer + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import com.amazonaws.services.s3.model.HeadBucketRequest +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.util.S3OutputPathHelper.getOutputPrefix +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.IOException +import java.sql.Timestamp +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * The base implementation takes care of the following: + * + * * Create shared instance variables. + * * Create the bucket and prepare the bucket path. + */ +abstract class BaseGcsWriter +protected constructor( + protected val config: GcsDestinationConfig, + protected val s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream +) : DestinationFileWriter { + protected val stream: AirbyteStream = configuredStream.stream + protected val syncMode: DestinationSyncMode? = configuredStream.destinationSyncMode + protected val outputPrefix: String = getOutputPrefix(config.bucketPath, stream) + + /** + * + * * 1. Create bucket if necessary. + * * 2. Under OVERWRITE mode, delete all objects with the output prefix. + */ + @Throws(IOException::class) + override fun initialize() { + try { + val bucket = config.bucketName + if (!gcsBucketExist(s3Client, bucket)) { + LOGGER.info("Bucket {} does not exist; creating...", bucket) + s3Client.createBucket(bucket) + LOGGER.info("Bucket {} has been created.", bucket) + } + + if (syncMode == DestinationSyncMode.OVERWRITE) { + LOGGER.info("Overwrite mode") + val keysToDelete: MutableList = LinkedList() + val objects = s3Client.listObjects(bucket, outputPrefix).objectSummaries + for (`object` in objects) { + keysToDelete.add(DeleteObjectsRequest.KeyVersion(`object`.key)) + } + + if (keysToDelete.size > 0) { + LOGGER.info( + "Purging non-empty output path for stream '{}' under OVERWRITE mode...", + stream.name + ) + // Google Cloud Storage doesn't accept request to delete multiple objects + for (keyToDelete in keysToDelete) { + s3Client.deleteObject(bucket, keyToDelete.key) + } + LOGGER.info( + "Deleted {} file(s) for stream '{}'.", + keysToDelete.size, + stream.name + ) + } + LOGGER.info("Overwrite is finished") + } + } catch (e: Exception) { + LOGGER.error("Failed to initialize: ", e) + closeWhenFail() + throw e + } + } + + /** + * [AmazonS3.doesBucketExistV2] should be used to check the bucket existence. However, this + * method does not work for GCS. So we use [AmazonS3.headBucket] instead, which will throw an + * exception if the bucket does not exist, or there is no permission to access it. + */ + fun gcsBucketExist(s3Client: AmazonS3, bucket: String?): Boolean { + try { + s3Client.headBucket(HeadBucketRequest(bucket)) + return true + } catch (e: Exception) { + return false + } + } + + @Throws(IOException::class) + override fun close(hasFailed: Boolean) { + if (hasFailed) { + LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.name) + closeWhenFail() + LOGGER.warn("Upload of stream '{}' aborted.", stream.name) + } else { + LOGGER.info("Uploading remaining data for stream '{}'.", stream.name) + closeWhenSucceed() + LOGGER.info("Upload completed for stream '{}'.", stream.name) + } + } + + /** Operations that will run when the write succeeds. */ + @Throws(IOException::class) + protected open fun closeWhenSucceed() { + // Do nothing by default + } + + /** Operations that will run when the write fails. */ + @Throws(IOException::class) + protected open fun closeWhenFail() { + // Do nothing by default + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseGcsWriter::class.java) + + // Filename: __0. + fun getOutputFilename(timestamp: Timestamp, format: S3Format): String { + val formatter: DateFormat = + SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING) + formatter.timeZone = TimeZone.getTimeZone("UTC") + return String.format( + "%s_%d_0.%s", + formatter.format(timestamp), + timestamp.time, + format.fileExtension + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.kt new file mode 100644 index 0000000000000..f33bf8266d0d4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsConfig.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs + +import com.fasterxml.jackson.databind.JsonNode + +class GcsConfig(val projectId: String, val bucketName: String, val credentialsJson: String) { + companion object { + fun getGcsConfig(config: JsonNode): GcsConfig { + return GcsConfig( + config["loading_method"]["project_id"].asText(), + config["loading_method"]["bucket_name"].asText(), + config["loading_method"]["credentials_json"].asText() + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.kt new file mode 100644 index 0000000000000..c2ebd0eac01a9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.kt @@ -0,0 +1,266 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs + +import com.google.auth.oauth2.GoogleCredentials +import com.google.cloud.WriteChannel +import com.google.cloud.storage.BlobId +import com.google.cloud.storage.BlobInfo +import com.google.cloud.storage.Storage +import com.google.cloud.storage.StorageOptions +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.cdk.integrations.destination.jdbc.StagingFilenameGenerator +import io.airbyte.cdk.integrations.destination.jdbc.constants.GlobalDataSizeConstants +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.ByteArrayInputStream +import java.io.IOException +import java.io.InputStream +import java.io.PrintWriter +import java.nio.channels.Channels +import java.nio.charset.StandardCharsets +import java.sql.SQLException +import java.sql.Timestamp +import java.time.Instant +import java.util.* +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class GcsStreamCopier( + protected val stagingFolder: String, + private val destSyncMode: DestinationSyncMode, + protected val schemaName: String, + protected val streamName: String, + private val storageClient: Storage, + protected val db: JdbcDatabase, + protected val gcsConfig: GcsConfig, + private val nameTransformer: StandardNameTransformer, + private val sqlOperations: SqlOperations +) : StreamCopier { + @get:VisibleForTesting val tmpTableName: String = nameTransformer.getTmpTableName(streamName) + protected val gcsStagingFiles: MutableSet = HashSet() + protected var filenameGenerator: StagingFilenameGenerator = + StagingFilenameGenerator( + streamName, + GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES.toLong() + ) + private val channels = HashMap() + private val csvPrinters = HashMap() + + private fun prepareGcsStagingFile(): String { + return java.lang.String.join( + "/", + stagingFolder, + schemaName, + filenameGenerator.stagingFilename + ) + } + + override fun prepareStagingFile(): String? { + val name = prepareGcsStagingFile() + if (!gcsStagingFiles.contains(name)) { + gcsStagingFiles.add(name) + val blobId = BlobId.of(gcsConfig.bucketName, name) + val blobInfo = BlobInfo.newBuilder(blobId).build() + val blob = storageClient.create(blobInfo) + val channel = blob.writer() + channels[name] = channel + val outputStream = Channels.newOutputStream(channel) + + val writer = PrintWriter(outputStream, true, StandardCharsets.UTF_8) + try { + csvPrinters[name] = CSVPrinter(writer, CSVFormat.DEFAULT) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + return name + } + + @Throws(Exception::class) + override fun write(id: UUID?, recordMessage: AirbyteRecordMessage?, gcsFileName: String?) { + if (csvPrinters.containsKey(gcsFileName)) { + csvPrinters[gcsFileName]!!.printRecord( + id, + Jsons.serialize(recordMessage!!.data), + Timestamp.from(Instant.ofEpochMilli(recordMessage.emittedAt)) + ) + } + } + + @Throws(Exception::class) + override fun closeNonCurrentStagingFileWriters() { + // TODO need to update this method when updating whole class for using GcsWriter + } + + @Throws(Exception::class) + override fun closeStagingUploader(hasFailed: Boolean) { + LOGGER.info("Uploading remaining data for {} stream.", streamName) + for (csvPrinter in csvPrinters.values) { + csvPrinter.close() + } + for (channel in channels.values) { + channel.close() + } + LOGGER.info("All data for {} stream uploaded.", streamName) + } + + @Throws(Exception::class) + override fun copyStagingFileToTemporaryTable() { + LOGGER.info( + "Starting copy to tmp table: {} in destination for stream: {}, schema: {}.", + tmpTableName, + streamName, + schemaName + ) + for (gcsStagingFile in gcsStagingFiles) { + copyGcsCsvFileIntoTable( + db, + getFullGcsPath(gcsConfig.bucketName, gcsStagingFile), + schemaName, + tmpTableName, + gcsConfig + ) + } + LOGGER.info( + "Copy to tmp table {} in destination for stream {} complete.", + tmpTableName, + streamName + ) + } + + @Throws(Exception::class) + override fun removeFileAndDropTmpTable() { + for (gcsStagingFile in gcsStagingFiles) { + LOGGER.info("Begin cleaning gcs staging file {}.", gcsStagingFile) + val blobId = BlobId.of(gcsConfig.bucketName, gcsStagingFile) + if (storageClient[blobId].exists()) { + storageClient.delete(blobId) + } + LOGGER.info("GCS staging file {} cleaned.", gcsStagingFile) + } + + LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName) + sqlOperations.dropTableIfExists(db, schemaName, tmpTableName) + LOGGER.info("{} tmp table in destination cleaned.", tmpTableName) + } + + @Throws(Exception::class) + override fun createDestinationSchema() { + LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName) + sqlOperations.createSchemaIfNotExists(db, schemaName) + } + + @Throws(Exception::class) + override fun createTemporaryTable() { + LOGGER.info( + "Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.", + streamName, + schemaName, + tmpTableName + ) + sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName) + } + + @Throws(Exception::class) + override fun createDestinationTable(): String? { + val destTableName = nameTransformer.getRawTableName(streamName) + LOGGER.info("Preparing table {} in destination.", destTableName) + sqlOperations.createTableIfNotExists(db, schemaName, destTableName) + LOGGER.info("Table {} in destination prepared.", tmpTableName) + + return destTableName + } + + @Throws(Exception::class) + override fun generateMergeStatement(destTableName: String?): String { + LOGGER.info( + "Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.", + tmpTableName, + destTableName, + schemaName + ) + val queries = StringBuilder() + if (destSyncMode == DestinationSyncMode.OVERWRITE) { + queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName)) + LOGGER.info( + "Destination OVERWRITE mode detected. Dest table: {}, schema: {}, will be truncated.", + destTableName, + schemaName + ) + } + queries.append(sqlOperations.insertTableQuery(db, schemaName, tmpTableName, destTableName)) + return queries.toString() + } + + override val currentFile: String? + get() = // TODO need to update this method when updating whole class for using GcsWriter + null + + @Throws(SQLException::class) + abstract fun copyGcsCsvFileIntoTable( + database: JdbcDatabase?, + gcsFileLocation: String?, + schema: String?, + tableName: String?, + gcsConfig: GcsConfig? + ) + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(GcsStreamCopier::class.java) + + // It is optimal to write every 10,000,000 records (BATCH_SIZE * MAX_PER_FILE_PART_COUNT) to + // a new + // file. + // The BATCH_SIZE is defined in CopyConsumerFactory. + // The average size of such a file will be about 1 GB. + // This will make it easier to work with files and speed up the recording of large amounts + // of data. + // In addition, for a large number of records, we will not get a drop in the copy request to + // QUERY_TIMEOUT when + // the records from the file are copied to the staging table. + const val MAX_PARTS_PER_FILE: Int = 1000 + private fun getFullGcsPath(bucketName: String?, stagingFile: String): String { + // this is intentionally gcs:/ not gcs:// since the join adds the additional slash + return java.lang.String.join("/", "gcs:/", bucketName, stagingFile) + } + + @Throws(IOException::class) + fun attemptWriteToPersistence(gcsConfig: GcsConfig) { + val outputTableName = + "_airbyte_connection_test_" + + UUID.randomUUID().toString().replace("-".toRegex(), "") + attemptWriteAndDeleteGcsObject(gcsConfig, outputTableName) + } + + @Throws(IOException::class) + private fun attemptWriteAndDeleteGcsObject(gcsConfig: GcsConfig, outputTableName: String) { + val storage = getStorageClient(gcsConfig) + val blobId = BlobId.of(gcsConfig.bucketName, "check-content/$outputTableName") + val blobInfo = BlobInfo.newBuilder(blobId).build() + + storage.create(blobInfo, "".toByteArray(StandardCharsets.UTF_8)) + storage.delete(blobId) + } + + @Throws(IOException::class) + fun getStorageClient(gcsConfig: GcsConfig): Storage { + val credentialsInputStream: InputStream = + ByteArrayInputStream(gcsConfig.credentialsJson.toByteArray(StandardCharsets.UTF_8)) + val credentials = GoogleCredentials.fromStream(credentialsInputStream) + return StorageOptions.newBuilder() + .setCredentials(credentials) + .setProjectId(gcsConfig.projectId) + .build() + .service + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.kt new file mode 100644 index 0000000000000..e397e70227497 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.kt @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.gcs + +import com.google.auth.oauth2.GoogleCredentials +import com.google.cloud.storage.Storage +import com.google.cloud.storage.StorageOptions +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory +import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopierFactory.Companion.getSchema +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.ByteArrayInputStream +import java.io.InputStream +import java.nio.charset.StandardCharsets + +abstract class GcsStreamCopierFactory : StreamCopierFactory { + /** Used by the copy consumer. */ + fun create( + configuredSchema: String?, + gcsConfig: GcsConfig, + stagingFolder: String?, + configuredStream: ConfiguredAirbyteStream?, + nameTransformer: StandardNameTransformer?, + db: JdbcDatabase?, + sqlOperations: SqlOperations? + ): StreamCopier? { + try { + val stream = configuredStream!!.stream + val syncMode = configuredStream.destinationSyncMode + val schema = getSchema(stream.namespace, configuredSchema!!, nameTransformer!!) + + val credentialsInputStream: InputStream = + ByteArrayInputStream(gcsConfig.credentialsJson.toByteArray(StandardCharsets.UTF_8)) + val credentials = GoogleCredentials.fromStream(credentialsInputStream) + val storageClient = + StorageOptions.newBuilder() + .setCredentials(credentials) + .setProjectId(gcsConfig.projectId) + .build() + .service + + return create( + stagingFolder, + syncMode, + schema, + stream.name, + storageClient, + db, + gcsConfig, + nameTransformer, + sqlOperations + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + } + + /** For specific copier suppliers to implement. */ + @Throws(Exception::class) + abstract fun create( + stagingFolder: String?, + syncMode: DestinationSyncMode?, + schema: String?, + streamName: String?, + storageClient: Storage?, + db: JdbcDatabase?, + gcsConfig: GcsConfig?, + nameTransformer: StandardNameTransformer?, + sqlOperations: SqlOperations? + ): StreamCopier? +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.java deleted file mode 100644 index d4252764384e7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsCredentialConfig; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import java.io.IOException; -import org.junit.jupiter.api.Test; - -class GcsDestinationConfigTest { - - @Test - public void testGetGcsDestinationConfig() throws IOException { - final JsonNode configJson = Jsons.deserialize(MoreResources.readResource("test_config.json")); - - final GcsDestinationConfig config = GcsDestinationConfig.getGcsDestinationConfig(configJson); - assertEquals("test_bucket", config.getBucketName()); - assertEquals("test_path", config.getBucketPath()); - assertEquals("us-west1", config.getBucketRegion()); - - final GcsCredentialConfig credentialConfig = config.getGcsCredentialConfig(); - assertTrue(credentialConfig instanceof GcsHmacKeyCredentialConfig); - - final GcsHmacKeyCredentialConfig hmacKeyConfig = (GcsHmacKeyCredentialConfig) credentialConfig; - assertEquals("test_access_id", hmacKeyConfig.getHmacKeyAccessId()); - assertEquals("test_secret", hmacKeyConfig.getHmacKeySecret()); - - final S3FormatConfig formatConfig = config.getFormatConfig(); - assertTrue(formatConfig instanceof S3AvroFormatConfig); - - final S3AvroFormatConfig avroFormatConfig = (S3AvroFormatConfig) formatConfig; - assertEquals("deflate-5", avroFormatConfig.getCodecFactory().toString()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java deleted file mode 100644 index a535a4679c567..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.avro; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import java.util.List; -import org.apache.avro.file.CodecFactory; -import org.apache.avro.file.DataFileConstants; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class GcsAvroFormatConfigTest { - - @Test - public void testParseCodecConfigNull() { - final List nullConfigs = Lists.newArrayList("{}", "{ \"codec\": \"no compression\" }"); - for (final String nullConfig : nullConfigs) { - assertEquals( - DataFileConstants.NULL_CODEC, - S3AvroFormatConfig.parseCodecConfig(Jsons.deserialize(nullConfig)).toString()); - } - } - - @Test - public void testParseCodecConfigDeflate() { - // default compression level 0 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"deflate\" }")); - assertEquals("deflate-0", codecFactory1.toString()); - - // compression level 5 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"deflate\", \"compression_level\": 5 }")); - assertEquals("deflate-5", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigBzip2() { - final JsonNode bzip2Config = Jsons.deserialize("{ \"codec\": \"bzip2\" }"); - final CodecFactory codecFactory = S3AvroFormatConfig.parseCodecConfig(bzip2Config); - assertEquals(DataFileConstants.BZIP2_CODEC, codecFactory.toString()); - } - - @Test - public void testParseCodecConfigXz() { - // default compression level 6 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"xz\" }")); - assertEquals("xz-6", codecFactory1.toString()); - - // compression level 7 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"xz\", \"compression_level\": 7 }")); - assertEquals("xz-7", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigZstandard() { - // default compression level 3 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"zstandard\" }")); - // There is no way to verify the checksum; all relevant methods are private or protected... - assertEquals("zstandard[3]", codecFactory1.toString()); - - // compression level 20 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize( - "{ \"codec\": \"zstandard\", \"compression_level\": 20, \"include_checksum\": true }")); - // There is no way to verify the checksum; all relevant methods are private or protected... - assertEquals("zstandard[20]", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigSnappy() { - final JsonNode snappyConfig = Jsons.deserialize("{ \"codec\": \"snappy\" }"); - final CodecFactory codecFactory = S3AvroFormatConfig.parseCodecConfig(snappyConfig); - assertEquals(DataFileConstants.SNAPPY_CODEC, codecFactory.toString()); - } - - @Test - public void testParseCodecConfigInvalid() { - Assertions.assertThrows(IllegalArgumentException.class, () -> { - final JsonNode invalidConfig = Jsons.deserialize("{ \"codec\": \"bi-directional-bfs\" }"); - S3AvroFormatConfig.parseCodecConfig(invalidConfig); - }); - } - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig - .getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); - assertEquals("AVRO", formatConfig.getFormat().name()); - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig - .getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.java deleted file mode 100644 index dca5629d5319f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.RETURNS_DEEP_STUBS; -import static org.mockito.Mockito.mock; - -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import java.io.IOException; -import java.sql.Timestamp; -import java.time.Instant; -import org.junit.jupiter.api.Test; - -class GcsAvroWriterTest { - - @Test - public void generatesCorrectObjectPath() throws IOException { - DestinationConfig.initialize(Jsons.deserialize("{}")); - - final GcsAvroWriter writer = new GcsAvroWriter( - new GcsDestinationConfig( - "fake-bucket", - "fake-bucketPath", - "fake-bucketRegion", - new GcsHmacKeyCredentialConfig("fake-access-id", "fake-secret"), - new S3AvroFormatConfig(new ObjectMapper().createObjectNode())), - mock(AmazonS3.class, RETURNS_DEEP_STUBS), - new ConfiguredAirbyteStream() - .withStream(new AirbyteStream() - .withNamespace("fake-namespace") - .withName("fake-stream").withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH))), - Timestamp.from(Instant.ofEpochMilli(1234)), - null); - - assertEquals("fake-bucketPath/fake-namespace/fake-stream/1970_01_01_1234_0.avro", writer.getOutputPath()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java deleted file mode 100644 index 9b58552db7e14..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.csv; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Test; - -// GcsCsvFormatConfig -public class GcsCsvFormatConfigTest { - - @Test - // Flattening enums can be created from value string - public void testFlatteningCreationFromString() { - assertEquals(Flattening.NO, Flattening.fromValue("no flattening")); - assertEquals(Flattening.ROOT_LEVEL, Flattening.fromValue("root level flattening")); - try { - Flattening.fromValue("invalid flattening value"); - } catch (final Exception e) { - assertTrue(e instanceof IllegalArgumentException); - } - } - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); - assertEquals("CSV", formatConfig.getFormat().name()); - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java deleted file mode 100644 index 10ee1f187db31..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.jsonl; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Test; - -// GcsJsonlFormatConfig -public class GcsJsonlFormatConfigTest { - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig - .getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final S3FormatConfig formatConfig = gcsDestinationConfig.getFormatConfig(); - assertEquals("JSONL", formatConfig.getFormat().name()); - - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\"\n" - + "}")); - - final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig - .getGcsDestinationConfig(config); - ConfigTestUtils.assertBaseConfig(gcsDestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(gcsDestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.java deleted file mode 100644 index ef47148b802fd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/java/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig; -import io.airbyte.commons.json.Jsons; - -public class ConfigTestUtils { - - public static JsonNode getBaseConfig(final JsonNode formatConfig) { - return Jsons.deserialize("{\n" - + " \"gcs_bucket_name\": \"test-bucket-name\",\n" - + " \"gcs_bucket_path\": \"test_path\",\n" - + " \"gcs_bucket_region\": \"us-east-2\"," - + " \"credential\": {\n" - + " \"credential_type\": \"HMAC_KEY\",\n" - + " \"hmac_key_access_id\": \"some_hmac_key\",\n" - + " \"hmac_key_secret\": \"some_key_secret\"\n" - + " }," - + " \"format\": " + formatConfig - + "}"); - - } - - public static void assertBaseConfig(final GcsDestinationConfig gcsDestinationConfig) { - assertEquals("test-bucket-name", gcsDestinationConfig.getBucketName()); - assertEquals("test_path", gcsDestinationConfig.getBucketPath()); - assertEquals("us-east-2", gcsDestinationConfig.getBucketRegion()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.kt new file mode 100644 index 0000000000000..7ced8b69ac8ea --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationConfigTest.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import java.io.IOException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class GcsDestinationConfigTest { + @Test + @Throws(IOException::class) + fun testGetGcsDestinationConfig() { + val configJson = Jsons.deserialize(MoreResources.readResource("test_config.json")) + + val config = GcsDestinationConfig.getGcsDestinationConfig(configJson) + Assertions.assertEquals("test_bucket", config.bucketName) + Assertions.assertEquals("test_path", config.bucketPath) + Assertions.assertEquals("us-west1", config.bucketRegion) + + val credentialConfig = config.gcsCredentialConfig + Assertions.assertTrue(credentialConfig is GcsHmacKeyCredentialConfig) + + val hmacKeyConfig = credentialConfig as GcsHmacKeyCredentialConfig + Assertions.assertEquals("test_access_id", hmacKeyConfig.hmacKeyAccessId) + Assertions.assertEquals("test_secret", hmacKeyConfig.hmacKeySecret) + + val formatConfig = config.formatConfig + Assertions.assertTrue(formatConfig is S3AvroFormatConfig) + + val avroFormatConfig = formatConfig as S3AvroFormatConfig + Assertions.assertEquals("deflate-5", avroFormatConfig.codecFactory.toString()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.kt new file mode 100644 index 0000000000000..0f8713a11362c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroFormatConfigTest.kt @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.avro + +import com.amazonaws.services.s3.internal.Constants +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig.Companion.parseCodecConfig +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.commons.json.Jsons +import org.apache.avro.file.DataFileConstants +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class GcsAvroFormatConfigTest { + @Test + fun testParseCodecConfigNull() { + val nullConfigs: List = + Lists.newArrayList("{}", "{ \"codec\": \"no compression\" }") + for (nullConfig in nullConfigs) { + Assertions.assertEquals( + DataFileConstants.NULL_CODEC, + parseCodecConfig(Jsons.deserialize(nullConfig)).toString() + ) + } + } + + @Test + fun testParseCodecConfigDeflate() { + // default compression level 0 + val codecFactory1 = parseCodecConfig(Jsons.deserialize("{ \"codec\": \"deflate\" }")) + Assertions.assertEquals("deflate-0", codecFactory1.toString()) + + // compression level 5 + val codecFactory2 = + parseCodecConfig( + Jsons.deserialize("{ \"codec\": \"deflate\", \"compression_level\": 5 }") + ) + Assertions.assertEquals("deflate-5", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigBzip2() { + val bzip2Config = Jsons.deserialize("{ \"codec\": \"bzip2\" }") + val codecFactory = parseCodecConfig(bzip2Config) + Assertions.assertEquals(DataFileConstants.BZIP2_CODEC, codecFactory.toString()) + } + + @Test + fun testParseCodecConfigXz() { + // default compression level 6 + val codecFactory1 = parseCodecConfig(Jsons.deserialize("{ \"codec\": \"xz\" }")) + Assertions.assertEquals("xz-6", codecFactory1.toString()) + + // compression level 7 + val codecFactory2 = + parseCodecConfig(Jsons.deserialize("{ \"codec\": \"xz\", \"compression_level\": 7 }")) + Assertions.assertEquals("xz-7", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigZstandard() { + // default compression level 3 + val codecFactory1 = parseCodecConfig(Jsons.deserialize("{ \"codec\": \"zstandard\" }")) + // There is no way to verify the checksum; all relevant methods are private or protected... + Assertions.assertEquals("zstandard[3]", codecFactory1.toString()) + + // compression level 20 + val codecFactory2 = + parseCodecConfig( + Jsons.deserialize( + "{ \"codec\": \"zstandard\", \"compression_level\": 20, \"include_checksum\": true }" + ) + ) + // There is no way to verify the checksum; all relevant methods are private or protected... + Assertions.assertEquals("zstandard[20]", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigSnappy() { + val snappyConfig = Jsons.deserialize("{ \"codec\": \"snappy\" }") + val codecFactory = parseCodecConfig(snappyConfig) + Assertions.assertEquals(DataFileConstants.SNAPPY_CODEC, codecFactory.toString()) + } + + @Test + fun testParseCodecConfigInvalid() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + val invalidConfig = Jsons.deserialize("{ \"codec\": \"bi-directional-bfs\" }") + parseCodecConfig(invalidConfig) + } + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig(Jsons.deserialize("""{ + "format_type": "AVRO" +}""")) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val formatConfig = gcsDestinationConfig.formatConfig!! + Assertions.assertEquals("AVRO", formatConfig.format.name) + // Assert that is set properly in config + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig(Jsons.deserialize("""{ + "format_type": "AVRO" +}""")) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.kt new file mode 100644 index 0000000000000..c5473698ef905 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/avro/GcsAvroWriterTest.kt @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.avro + +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.ObjectMapper +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.initialize +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import java.io.IOException +import java.sql.Timestamp +import java.time.Instant +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito + +internal class GcsAvroWriterTest { + @Test + @Throws(IOException::class) + fun generatesCorrectObjectPath() { + initialize(Jsons.deserialize("{}")) + + val writer = + GcsAvroWriter( + GcsDestinationConfig( + "fake-bucket", + "fake-bucketPath", + "fake-bucketRegion", + GcsHmacKeyCredentialConfig("fake-access-id", "fake-secret"), + S3AvroFormatConfig(ObjectMapper().createObjectNode()) + ), + Mockito.mock(AmazonS3::class.java, Mockito.RETURNS_DEEP_STUBS), + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withNamespace("fake-namespace") + .withName("fake-stream") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ), + Timestamp.from(Instant.ofEpochMilli(1234)), + null + ) + + Assertions.assertEquals( + "fake-bucketPath/fake-namespace/fake-stream/1970_01_01_1234_0.avro", + writer.outputPath + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.kt new file mode 100644 index 0000000000000..74abb6ee1925b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/csv/GcsCsvFormatConfigTest.kt @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.csv + +import com.amazonaws.services.s3.internal.Constants +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.Flattening.Companion.fromValue +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.commons.json.Jsons +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +// GcsCsvFormatConfig +class GcsCsvFormatConfigTest { + @Test // Flattening enums can be created from value string + fun testFlatteningCreationFromString() { + Assertions.assertEquals(Flattening.NO, fromValue("no flattening")) + Assertions.assertEquals(Flattening.ROOT_LEVEL, fromValue("root level flattening")) + try { + fromValue("invalid flattening value") + } catch (e: Exception) { + Assertions.assertTrue(e is IllegalArgumentException) + } + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig( + Jsons.deserialize( + """{ + "format_type": "CSV", + "flattening": "Root level flattening" +}""" + ) + ) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val formatConfig = gcsDestinationConfig.formatConfig!! + Assertions.assertEquals("CSV", formatConfig.format.name) + // Assert that is set properly in config + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig( + Jsons.deserialize( + """{ + "format_type": "CSV", + "flattening": "Root level flattening" +}""" + ) + ) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.kt new file mode 100644 index 0000000000000..0b0bc3c122478 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/jsonl/GcsJsonlFormatConfigTest.kt @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.jsonl + +import com.amazonaws.services.s3.internal.Constants +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.cdk.integrations.destination.gcs.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.commons.json.Jsons +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +// GcsJsonlFormatConfig +class GcsJsonlFormatConfigTest { + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig(Jsons.deserialize("""{ + "format_type": "JSONL" +}""")) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val formatConfig = gcsDestinationConfig.formatConfig!! + Assertions.assertEquals("JSONL", formatConfig.format.name) + + // Assert that is set properly in config + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig(Jsons.deserialize("""{ + "format_type": "JSONL" +}""")) + + val gcsDestinationConfig = GcsDestinationConfig.getGcsDestinationConfig(config) + ConfigTestUtils.assertBaseConfig(gcsDestinationConfig) + + val streamTransferManager = create(gcsDestinationConfig.bucketName, "objectKey", null).get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.kt new file mode 100644 index 0000000000000..85e00456c1345 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/gcs/util/ConfigTestUtils.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs.util + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.gcs.GcsDestinationConfig +import io.airbyte.commons.json.Jsons +import org.junit.jupiter.api.Assertions + +object ConfigTestUtils { + fun getBaseConfig(formatConfig: JsonNode): JsonNode { + return Jsons.deserialize( + """{ + "gcs_bucket_name": "test-bucket-name", + "gcs_bucket_path": "test_path", + "gcs_bucket_region": "us-east-2", "credential": { + "credential_type": "HMAC_KEY", + "hmac_key_access_id": "some_hmac_key", + "hmac_key_secret": "some_key_secret" + }, "format": $formatConfig}""" + ) + } + + fun assertBaseConfig(gcsDestinationConfig: GcsDestinationConfig) { + Assertions.assertEquals("test-bucket-name", gcsDestinationConfig.bucketName) + Assertions.assertEquals("test_path", gcsDestinationConfig.bucketPath) + Assertions.assertEquals("us-east-2", gcsDestinationConfig.bucketRegion) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java deleted file mode 100644 index f34a2d21b77f1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonSchemaType; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.cdk.integrations.standardtest.destination.argproviders.NumberDataTypeTestArgumentProvider; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericData.Record; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ArgumentsSource; - -public abstract class GcsAvroParquetDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { - - public GcsAvroParquetDestinationAcceptanceTest(final S3Format s3Format) { - super(s3Format); - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @ParameterizedTest - @ArgumentsSource(NumberDataTypeTestArgumentProvider.class) - public void testNumberDataType(final String catalogFileName, final String messagesFileName) throws Exception { - final AirbyteCatalog catalog = readCatalogFromFile(catalogFileName); - final List messages = readMessagesFromFile(messagesFileName); - - final JsonNode config = getConfig(); - final String defaultSchema = getDefaultSchema(config); - final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog); - runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false); - - for (final AirbyteStream stream : catalog.getStreams()) { - final String streamName = stream.getName(); - final String schema = stream.getNamespace() != null ? stream.getNamespace() : defaultSchema; - - final Map> actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema); - final Map> expectedSchemaTypes = retrieveExpectedDataTypes(stream); - - assertEquals(expectedSchemaTypes, actualSchemaTypes); - } - } - - private Map> retrieveExpectedDataTypes(final AirbyteStream stream) { - final Iterable iterableNames = () -> stream.getJsonSchema().get("properties").fieldNames(); - final Map nameToNode = StreamSupport.stream(iterableNames.spliterator(), false) - .collect(Collectors.toMap( - Function.identity(), - name -> getJsonNode(stream, name))); - - return nameToNode - .entrySet() - .stream() - .collect(Collectors.toMap( - Entry::getKey, - entry -> getExpectedSchemaType(entry.getValue()))); - } - - private JsonNode getJsonNode(final AirbyteStream stream, final String name) { - final JsonNode properties = stream.getJsonSchema().get("properties"); - if (properties.size() == 1) { - return properties.get("data"); - } - return properties.get(name).get("items"); - } - - private Set getExpectedSchemaType(final JsonNode fieldDefinition) { - // The $ref is a migration to V1 data type protocol see well_known_types.yaml - final JsonNode typeProperty = fieldDefinition.get("type") == null ? fieldDefinition.get("$ref") : fieldDefinition.get("type"); - final JsonNode airbyteTypeProperty = fieldDefinition.get("airbyte_type"); - final String airbyteTypePropertyText = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); - return Arrays.stream(JsonSchemaType.values()) - .filter( - value -> value.getJsonSchemaType().equals(typeProperty.asText()) && compareAirbyteTypes(airbyteTypePropertyText, value)) - .map(JsonSchemaType::getAvroType) - .collect(Collectors.toSet()); - } - - private boolean compareAirbyteTypes(final String airbyteTypePropertyText, final JsonSchemaType value) { - if (airbyteTypePropertyText == null) { - return value.getJsonSchemaAirbyteType() == null; - } - return airbyteTypePropertyText.equals(value.getJsonSchemaAirbyteType()); - } - - private AirbyteCatalog readCatalogFromFile(final String catalogFilename) throws IOException { - return Jsons.deserialize(MoreResources.readResource(catalogFilename), AirbyteCatalog.class); - } - - private List readMessagesFromFile(final String messagesFilename) throws IOException { - return MoreResources.readResource(messagesFilename).lines() - .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); - } - - protected abstract Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception; - - protected Map> getTypes(final Record record) { - - final List fieldList = record - .getSchema() - .getFields() - .stream() - .filter(field -> !field.name().startsWith("_airbyte")) - .toList(); - - if (fieldList.size() == 1) { - return fieldList - .stream() - .collect( - Collectors.toMap( - Field::name, - field -> field.schema().getTypes().stream().map(Schema::getType).filter(type -> !type.equals(Type.NULL)) - .collect(Collectors.toSet()))); - } else { - return fieldList - .stream() - .collect( - Collectors.toMap( - Field::name, - field -> field.schema().getTypes() - .stream().filter(type -> !type.getType().equals(Type.NULL)) - .flatMap(type -> type.getElementType().getTypes().stream()).map(Schema::getType).filter(type -> !type.equals(Type.NULL)) - .collect(Collectors.toSet()))); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.java deleted file mode 100644 index 97793a57758d1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import java.nio.charset.StandardCharsets; -import java.time.*; -import java.time.format.DateTimeFormatter; -import java.util.Base64; - -public class GcsAvroTestDataComparator extends AdvancedTestDataComparator { - - @Override - protected boolean compareDateValues(String expectedValue, String actualValue) { - LocalDate destinationDate = LocalDate.ofEpochDay(Long.parseLong(actualValue)); - LocalDate expectedDate = LocalDate.parse(expectedValue, DateTimeFormatter.ofPattern(AIRBYTE_DATE_FORMAT)); - return expectedDate.equals(destinationDate); - } - - private Instant getInstantFromEpoch(String epochValue) { - return Instant.ofEpochMilli(Long.parseLong(epochValue) / 1000); - } - - @Override - protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { - return ZonedDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC); - } - - @Override - protected boolean compareDateTimeValues(String airbyteMessageValue, String destinationValue) { - DateTimeFormatter format = DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT); - LocalDateTime dateTime = LocalDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC); - return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)); - } - - @Override - protected boolean compareTimeWithoutTimeZone(final String airbyteMessageValue, final String destinationValue) { - LocalTime destinationDate = LocalTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC); - LocalTime expectedDate = LocalTime.parse(airbyteMessageValue, DateTimeFormatter.ISO_TIME); - return expectedDate.equals(destinationDate); - } - - @Override - protected boolean compareString(final JsonNode expectedValue, final JsonNode actualValue) { - // to handle base64 encoded strings - return expectedValue.asText().equals(actualValue.asText()) - || decodeBase64(expectedValue.asText()).equals(actualValue.asText()); - } - - private String decodeBase64(String string) { - byte[] decoded = Base64.getDecoder().decode(string); - return new String(decoded, StandardCharsets.UTF_8); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.java deleted file mode 100644 index f855843de3ebf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.avro.Schema.Type; -import org.apache.avro.file.DataFileReader; -import org.apache.avro.file.SeekableByteArrayInput; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumReader; - -public abstract class GcsBaseAvroDestinationAcceptanceTest extends GcsAvroParquetDestinationAcceptanceTest { - - public GcsBaseAvroDestinationAcceptanceTest() { - super(S3Format.AVRO); - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.deserialize("{\n" - + " \"format_type\": \"Avro\",\n" - + " \"compression_codec\": { \"codec\": \"no compression\", \"compression_level\": 5, \"include_checksum\": true }\n" - + "}"); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new GcsAvroTestDataComparator(); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws Exception { - final JsonFieldNameUpdater nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema); - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final DataFileReader dataFileReader = new DataFileReader<>( - new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), - new GenericDatumReader<>())) { - final ObjectReader jsonReader = MAPPER.reader(); - while (dataFileReader.hasNext()) { - final Record record = dataFileReader.next(); - final byte[] jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record); - JsonNode jsonRecord = jsonReader.readTree(jsonBytes); - jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord); - jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)); - } - } - } - - return jsonRecords; - } - - @Override - protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final Map> resultDataTypes = new HashMap<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final DataFileReader dataFileReader = new DataFileReader<>( - new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), - new GenericDatumReader<>())) { - while (dataFileReader.hasNext()) { - final Record record = dataFileReader.next(); - final Map> actualDataTypes = getTypes(record); - resultDataTypes.putAll(actualDataTypes); - } - } - } - return resultDataTypes; - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.java deleted file mode 100644 index 5b55c80c71912..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.StreamSupport; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVRecord; -import org.apache.commons.csv.QuoteMode; - -public abstract class GcsBaseCsvDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { - - public GcsBaseCsvDestinationAcceptanceTest() { - super(S3Format.CSV); - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.ROOT_LEVEL.getValue(), - "compression", Jsons.jsonNode(Map.of("compression_type", "No Compression")))); - } - - /** - * Convert json_schema to a map from field name to field types. - */ - private static Map getFieldTypes(final JsonNode streamSchema) { - final Map fieldTypes = new HashMap<>(); - final JsonNode fieldDefinitions = streamSchema.get("properties"); - final Iterator> iterator = fieldDefinitions.fields(); - while (iterator.hasNext()) { - final Entry entry = iterator.next(); - JsonNode fieldValue = entry.getValue(); - JsonNode typeValue = fieldValue.get("type") == null ? fieldValue.get("$ref") : fieldValue.get("type"); - fieldTypes.put(entry.getKey(), typeValue.asText()); - } - return fieldTypes; - } - - private static JsonNode getJsonNode(final Map input, final Map fieldTypes) { - final ObjectNode json = MAPPER.createObjectNode(); - - if (input.containsKey(JavaBaseConstants.COLUMN_NAME_DATA)) { - return Jsons.deserialize(input.get(JavaBaseConstants.COLUMN_NAME_DATA)); - } - - for (final Entry entry : input.entrySet()) { - final String key = entry.getKey(); - if (key.equals(JavaBaseConstants.COLUMN_NAME_AB_ID) || key - .equals(JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) { - continue; - } - final String value = entry.getValue(); - if (value == null || value.equals("")) { - continue; - } - final String type = fieldTypes.get(key); - switch (type) { - case "boolean" -> json.put(key, Boolean.valueOf(value)); - case "integer" -> json.put(key, Integer.valueOf(value)); - case "number" -> json.put(key, Double.valueOf(value)); - case "" -> addNoTypeValue(json, key, value); - default -> json.put(key, value); - } - } - return json; - } - - private static void addNoTypeValue(final ObjectNode json, final String key, final String value) { - if (value != null && (value.matches("^\\[.*\\]$")) || value.matches("^\\{.*\\}$")) { - final var newNode = Jsons.deserialize(value); - json.set(key, newNode); - } else { - json.put(key, value); - } - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException { - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - - final Map fieldTypes = getFieldTypes(streamSchema); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - try (final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final Reader in = getReader(object)) { - final Iterable records = CSVFormat.DEFAULT - .withQuoteMode(QuoteMode.NON_NUMERIC) - .withFirstRecordAsHeader() - .parse(in); - StreamSupport.stream(records.spliterator(), false) - .forEach(r -> jsonRecords.add(getJsonNode(r.toMap(), fieldTypes))); - } - } - - return jsonRecords; - } - - protected Reader getReader(final S3Object s3Object) throws IOException { - return new InputStreamReader(s3Object.getObjectContent(), StandardCharsets.UTF_8); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.java deleted file mode 100644 index 6bf3d81ea48d6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.zip.GZIPInputStream; - -public abstract class GcsBaseCsvGzipDestinationAcceptanceTest extends GcsBaseCsvDestinationAcceptanceTest { - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getFormatConfig() { - // config without compression defaults to GZIP - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.ROOT_LEVEL.getValue())); - } - - protected Reader getReader(final S3Object s3Object) throws IOException { - return new InputStreamReader(new GZIPInputStream(s3Object.getObjectContent()), StandardCharsets.UTF_8); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.java deleted file mode 100644 index 40e7bae0051f4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.commons.json.Jsons; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -public abstract class GcsBaseJsonlDestinationAcceptanceTest extends GcsDestinationAcceptanceTest { - - public GcsBaseJsonlDestinationAcceptanceTest() { - super(S3Format.JSONL); - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "compression", Jsons.jsonNode(Map.of("compression_type", "No Compression")))); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException { - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final BufferedReader reader = getReader(object)) { - String line; - while ((line = reader.readLine()) != null) { - jsonRecords.add(Jsons.deserialize(line).get(JavaBaseConstants.COLUMN_NAME_DATA)); - } - } - } - - return jsonRecords; - } - - protected BufferedReader getReader(final S3Object s3Object) throws IOException { - return new BufferedReader(new InputStreamReader(s3Object.getObjectContent(), StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.java deleted file mode 100644 index 2924aecc8d1e9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.commons.json.Jsons; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.zip.GZIPInputStream; - -public abstract class GcsBaseJsonlGzipDestinationAcceptanceTest extends GcsBaseJsonlDestinationAcceptanceTest { - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getFormatConfig() { - // config without compression defaults to GZIP - return Jsons.jsonNode(Map.of("format_type", outputFormat)); - } - - protected BufferedReader getReader(final S3Object s3Object) throws IOException { - return new BufferedReader(new InputStreamReader(new GZIPInputStream(s3Object.getObjectContent()), StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.java deleted file mode 100644 index 3725e76e02eaf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import io.airbyte.cdk.integrations.destination.gcs.parquet.GcsParquetWriter; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter; -import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericData.Record; -import org.apache.hadoop.conf.Configuration; -import org.apache.parquet.avro.AvroReadSupport; -import org.apache.parquet.hadoop.ParquetReader; - -public abstract class GcsBaseParquetDestinationAcceptanceTest extends GcsAvroParquetDestinationAcceptanceTest { - - public GcsBaseParquetDestinationAcceptanceTest() { - super(S3Format.PARQUET); - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", "Parquet", - "compression_codec", "GZIP")); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new GcsAvroTestDataComparator(); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException, URISyntaxException { - final JsonFieldNameUpdater nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema); - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); - final var path = new org.apache.hadoop.fs.Path(uri); - final Configuration hadoopConfig = GcsParquetWriter.getHadoopConfig(config); - - try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) - .withConf(hadoopConfig) - .build()) { - final ObjectReader jsonReader = MAPPER.reader(); - Record record; - while ((record = parquetReader.read()) != null) { - final byte[] jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record); - JsonNode jsonRecord = jsonReader.readTree(jsonBytes); - jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord); - jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)); - } - } - } - - return jsonRecords; - } - - @Override - protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final Map> resultDataTypes = new HashMap<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); - final var path = new org.apache.hadoop.fs.Path(uri); - final Configuration hadoopConfig = S3ParquetWriter.getHadoopConfig(config); - - try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) - .withConf(hadoopConfig) - .build()) { - Record record; - while ((record = parquetReader.read()) != null) { - final Map> actualDataTypes = getTypes(record); - resultDataTypes.putAll(actualDataTypes); - } - } - } - - return resultDataTypes; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.java deleted file mode 100644 index a92ee08a4e159..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.gcs; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import io.airbyte.configoss.StandardCheckConnectionOutput.Status; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import java.nio.file.Path; -import java.util.Comparator; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Locale; -import java.util.stream.Collectors; -import org.apache.commons.lang3.RandomStringUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * When adding a new GCS destination acceptance test, extend this class and do the following: - *
  • Implement {@link #getFormatConfig} that returns a {@link S3FormatConfig}
  • - *
  • Implement {@link #retrieveRecords} that returns the Json records for the test
  • - * - * Under the hood, a {@link GcsDestinationConfig} is constructed as follows: - *
  • Retrieve the secrets from "secrets/config.json"
  • - *
  • Get the GCS bucket path from the constructor
  • - *
  • Get the format config from {@link #getFormatConfig}
  • - */ -public abstract class GcsDestinationAcceptanceTest extends DestinationAcceptanceTest { - - protected static final Logger LOGGER = LoggerFactory.getLogger(GcsDestinationAcceptanceTest.class); - protected static final ObjectMapper MAPPER = MoreMappers.initMapper(); - - protected static final String SECRET_FILE_PATH = "secrets/config.json"; - protected static final String SECRET_FILE_PATH_INSUFFICIENT_ROLES = "secrets/insufficient_roles_config.json"; - protected final S3Format outputFormat; - protected JsonNode configJson; - protected GcsDestinationConfig config; - protected AmazonS3 s3Client; - protected NamingConventionTransformer nameTransformer; - protected S3StorageOperations s3StorageOperations; - - public GcsDestinationAcceptanceTest(final S3Format outputFormat) { - this.outputFormat = outputFormat; - } - - protected JsonNode getBaseConfigJson() { - return Jsons.deserialize(IOs.readFile(Path.of(SECRET_FILE_PATH))); - } - - @Override - public ProtocolVersion getProtocolVersion() { - return ProtocolVersion.V1; - } - - @Override - protected JsonNode getConfig() { - return configJson; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - if (config.has("gcs_bucket_path")) { - return config.get("gcs_bucket_path").asText(); - } - return null; - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected JsonNode getFailCheckConfig() { - final JsonNode baseJson = getBaseConfigJson(); - final JsonNode failCheckJson = Jsons.clone(baseJson); - // invalid credential - ((ObjectNode) failCheckJson).put("hmac_key_access_id", "fake-key"); - ((ObjectNode) failCheckJson).put("hmac_key_secret", "fake-secret"); - return failCheckJson; - } - - /** - * Helper method to retrieve all synced objects inside the configured bucket path. - */ - protected List getAllSyncedObjects(final String streamName, final String namespace) { - final String namespaceStr = nameTransformer.getNamespace(namespace); - final String streamNameStr = nameTransformer.getIdentifier(streamName); - final String outputPrefix = s3StorageOperations.getBucketObjectPath( - namespaceStr, - streamNameStr, - DateTime.now(DateTimeZone.UTC), - config.getPathFormat()); - // the child folder contains a non-deterministic epoch timestamp, so use the parent folder - final String parentFolder = outputPrefix.substring(0, outputPrefix.lastIndexOf("/") + 1); - final List objectSummaries = s3Client - .listObjects(config.getBucketName(), parentFolder) - .getObjectSummaries() - .stream() - .filter(o -> o.getKey().contains(streamNameStr + "/")) - .sorted(Comparator.comparingLong(o -> o.getLastModified().getTime())) - .collect(Collectors.toList()); - LOGGER.info( - "All objects: {}", - objectSummaries.stream().map(o -> String.format("%s/%s", o.getBucketName(), o.getKey())).collect(Collectors.toList())); - return objectSummaries; - } - - protected abstract JsonNode getFormatConfig(); - - /** - * This method does the following: - *
  • Construct the GCS destination config.
  • - *
  • Construct the GCS client.
  • - */ - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - final JsonNode baseConfigJson = getBaseConfigJson(); - // Set a random GCS bucket path for each integration test - final JsonNode configJson = Jsons.clone(baseConfigJson); - final String testBucketPath = String.format( - "%s_test_%s", - outputFormat.name().toLowerCase(Locale.ROOT), - RandomStringUtils.randomAlphanumeric(5)); - ((ObjectNode) configJson) - .put("gcs_bucket_path", testBucketPath) - .set("format", getFormatConfig()); - this.configJson = configJson; - this.config = GcsDestinationConfig.getGcsDestinationConfig(configJson); - LOGGER.info("Test full path: {}/{}", config.getBucketName(), config.getBucketPath()); - - this.s3Client = config.getS3Client(); - this.nameTransformer = new GcsNameTransformer(); - this.s3StorageOperations = new S3StorageOperations(nameTransformer, s3Client, config); - } - - /** - * Remove all the S3 output from the tests. - */ - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - final List keysToDelete = new LinkedList<>(); - final List objects = s3Client - .listObjects(config.getBucketName(), config.getBucketPath()) - .getObjectSummaries(); - for (final S3ObjectSummary object : objects) { - keysToDelete.add(new KeyVersion(object.getKey())); - } - - if (keysToDelete.size() > 0) { - LOGGER.info("Tearing down test bucket path: {}/{}", config.getBucketName(), - config.getBucketPath()); - // Google Cloud Storage doesn't accept request to delete multiple objects - for (final KeyVersion keyToDelete : keysToDelete) { - s3Client.deleteObject(config.getBucketName(), keyToDelete.getKey()); - } - LOGGER.info("Deleted {} file(s).", keysToDelete.size()); - } - } - - /** - * Verify that when given user with no Multipart Upload Roles, that check connection returns a - * failed response. Assume that the #getInsufficientRolesFailCheckConfig() returns the service - * account has storage.objects.create permission but not storage.multipartUploads.create. - */ - @Test - public void testCheckConnectionInsufficientRoles() throws Exception { - final JsonNode baseConfigJson = Jsons.deserialize(IOs.readFile(Path.of( - SECRET_FILE_PATH_INSUFFICIENT_ROLES))); - - // Set a random GCS bucket path for each integration test - final JsonNode configJson = Jsons.clone(baseConfigJson); - final String testBucketPath = String.format( - "%s_test_%s", - outputFormat.name().toLowerCase(Locale.ROOT), - RandomStringUtils.randomAlphanumeric(5)); - ((ObjectNode) configJson) - .put("gcs_bucket_path", testBucketPath) - .set("format", getFormatConfig()); - - assertEquals(Status.FAILED, runCheck(configJson).getStatus()); - } - - @Test - public void testCheckIncorrectHmacKeyAccessIdCredential() { - final JsonNode baseJson = getBaseConfigJson(); - final JsonNode credential = Jsons.jsonNode(ImmutableMap.builder() - .put("credential_type", "HMAC_KEY") - .put("hmac_key_access_id", "fake-key") - .put("hmac_key_secret", baseJson.get("credential").get("hmac_key_secret").asText()) - .build()); - - ((ObjectNode) baseJson).put("credential", credential); - ((ObjectNode) baseJson).set("format", getFormatConfig()); - - final BaseGcsDestination destination = new BaseGcsDestination() {}; - final AirbyteConnectionStatus status = destination.check(baseJson); - assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); - assertTrue(status.getMessage().contains("State code: SignatureDoesNotMatch;")); - } - - @Test - public void testCheckIncorrectHmacKeySecretCredential() { - final JsonNode baseJson = getBaseConfigJson(); - final JsonNode credential = Jsons.jsonNode(ImmutableMap.builder() - .put("credential_type", "HMAC_KEY") - .put("hmac_key_access_id", baseJson.get("credential").get("hmac_key_access_id").asText()) - .put("hmac_key_secret", "fake-secret") - .build()); - - ((ObjectNode) baseJson).put("credential", credential); - ((ObjectNode) baseJson).set("format", getFormatConfig()); - - final BaseGcsDestination destination = new BaseGcsDestination() {}; - final AirbyteConnectionStatus status = destination.check(baseJson); - assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); - assertTrue(status.getMessage().contains("State code: SignatureDoesNotMatch;")); - } - - @Test - public void testCheckIncorrectBucketCredential() { - final JsonNode baseJson = getBaseConfigJson(); - ((ObjectNode) baseJson).put("gcs_bucket_name", "fake_bucket"); - ((ObjectNode) baseJson).set("format", getFormatConfig()); - - final BaseGcsDestination destination = new BaseGcsDestination() {}; - final AirbyteConnectionStatus status = destination.check(baseJson); - assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); - assertTrue(status.getMessage().contains("State code: NoSuchKey;")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..f1cd685917e76 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroParquetDestinationAcceptanceTest.kt @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.JsonSchemaType +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.cdk.integrations.standardtest.destination.argproviders.NumberDataTypeTestArgumentProvider +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.CatalogHelpers +import java.io.IOException +import java.util.* +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.StreamSupport +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ArgumentsSource + +abstract class GcsAvroParquetDestinationAcceptanceTest(s3Format: S3Format) : + GcsDestinationAcceptanceTest(s3Format) { + override fun getProtocolVersion() = ProtocolVersion.V1 + + @ParameterizedTest + @ArgumentsSource(NumberDataTypeTestArgumentProvider::class) + @Throws(Exception::class) + fun testNumberDataType(catalogFileName: String, messagesFileName: String) { + val catalog = readCatalogFromFile(catalogFileName) + val messages = readMessagesFromFile(messagesFileName) + + val config = this.getConfig() + val defaultSchema = getDefaultSchema(config) + val configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false) + + for (stream in catalog.streams) { + val streamName = stream.name + val schema = if (stream.namespace != null) stream.namespace else defaultSchema!! + + val actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema) + val expectedSchemaTypes = retrieveExpectedDataTypes(stream) + + Assertions.assertEquals(expectedSchemaTypes, actualSchemaTypes) + } + } + + private fun retrieveExpectedDataTypes(stream: AirbyteStream): Map> { + val iterableNames = Iterable { stream.jsonSchema["properties"].fieldNames() } + val nameToNode = + StreamSupport.stream(iterableNames.spliterator(), false) + .collect( + Collectors.toMap( + Function.identity(), + Function { name: String -> getJsonNode(stream, name) } + ) + ) + + return nameToNode.entries + .stream() + .collect( + Collectors.toMap( + Function { obj: Map.Entry -> obj.key }, + Function { entry: Map.Entry -> + getExpectedSchemaType(entry.value) + } + ) + ) + } + + private fun getJsonNode(stream: AirbyteStream, name: String): JsonNode { + val properties = stream.jsonSchema["properties"] + if (properties.size() == 1) { + return properties["data"] + } + return properties[name]["items"] + } + + private fun getExpectedSchemaType(fieldDefinition: JsonNode): Set { + // The $ref is a migration to V1 data type protocol see well_known_types.yaml + val typeProperty = + if (fieldDefinition["type"] == null) fieldDefinition["\$ref"] + else fieldDefinition["type"] + val airbyteTypeProperty = fieldDefinition["airbyte_type"] + val airbyteTypePropertyText = airbyteTypeProperty?.asText() + return JsonSchemaType.entries + .toTypedArray() + .filter { value: JsonSchemaType -> + value.jsonSchemaType == typeProperty.asText() && + compareAirbyteTypes(airbyteTypePropertyText, value) + } + .map { it.avroType } + .toSet() + } + + private fun compareAirbyteTypes( + airbyteTypePropertyText: String?, + value: JsonSchemaType + ): Boolean { + if (airbyteTypePropertyText == null) { + return value.jsonSchemaAirbyteType == null + } + return airbyteTypePropertyText == value.jsonSchemaAirbyteType + } + + @Throws(IOException::class) + private fun readCatalogFromFile(catalogFilename: String): AirbyteCatalog { + return Jsons.deserialize( + MoreResources.readResource(catalogFilename), + AirbyteCatalog::class.java + ) + } + + @Throws(IOException::class) + private fun readMessagesFromFile(messagesFilename: String): List { + return MoreResources.readResource(messagesFilename).lines().map { + Jsons.deserialize(it, AirbyteMessage::class.java) + } + } + + @Throws(Exception::class) + protected abstract fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map?> + + protected fun getTypes(record: GenericData.Record): Map> { + val fieldList = + record.schema.fields + .stream() + .filter { field: Schema.Field -> !field.name().startsWith("_airbyte") } + .toList() + + return if (fieldList.size == 1) { + fieldList + .stream() + .collect( + Collectors.toMap( + Function { obj: Schema.Field -> obj.name() }, + Function { field: Schema.Field -> + field + .schema() + .types + .stream() + .map { obj: Schema -> obj.type } + .filter { type: Schema.Type -> type != Schema.Type.NULL } + .collect(Collectors.toSet()) + } + ) + ) + } else { + fieldList + .stream() + .collect( + Collectors.toMap( + Function { obj: Schema.Field -> obj.name() }, + Function { field: Schema.Field -> + field + .schema() + .types + .stream() + .filter { type: Schema -> type.type != Schema.Type.NULL } + .flatMap { type: Schema -> type.elementType.types.stream() } + .map { obj: Schema -> obj.type } + .filter { type: Schema.Type -> type != Schema.Type.NULL } + .collect(Collectors.toSet()) + } + ) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.kt new file mode 100644 index 0000000000000..7792e2c88996b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsAvroTestDataComparator.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator +import java.nio.charset.StandardCharsets +import java.time.* +import java.time.format.DateTimeFormatter +import java.util.* + +class GcsAvroTestDataComparator : AdvancedTestDataComparator() { + override fun compareDateValues(expectedValue: String, actualValue: String): Boolean { + val destinationDate = LocalDate.ofEpochDay(actualValue.toLong()) + val expectedDate = + LocalDate.parse(expectedValue, DateTimeFormatter.ofPattern(AIRBYTE_DATE_FORMAT)) + return expectedDate == destinationDate + } + + private fun getInstantFromEpoch(epochValue: String): Instant { + return Instant.ofEpochMilli(epochValue.toLong() / 1000) + } + + override fun parseDestinationDateWithTz(destinationValue: String): ZonedDateTime { + return ZonedDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC) + } + + override fun compareDateTimeValues( + airbyteMessageValue: String, + destinationValue: String + ): Boolean { + val format = DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT) + val dateTime = + LocalDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC) + return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)) + } + + override fun compareTimeWithoutTimeZone( + airbyteMessageValue: String, + destinationValue: String + ): Boolean { + val destinationDate = + LocalTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC) + val expectedDate = LocalTime.parse(airbyteMessageValue, DateTimeFormatter.ISO_TIME) + return expectedDate == destinationDate + } + + override fun compareString(expectedValue: JsonNode, actualValue: JsonNode): Boolean { + // to handle base64 encoded strings + return expectedValue.asText() == actualValue.asText() || + decodeBase64(expectedValue.asText()) == actualValue.asText() + } + + private fun decodeBase64(string: String): String { + val decoded = Base64.getDecoder().decode(string) + return String(decoded, StandardCharsets.UTF_8) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..878d79abc9906 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseAvroDestinationAcceptanceTest.kt @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectReader +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper.getFieldNameUpdater +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper.pruneAirbyteJson +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.json.Jsons +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.file.DataFileReader +import org.apache.avro.file.SeekableByteArrayInput +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumReader + +abstract class GcsBaseAvroDestinationAcceptanceTest : + GcsAvroParquetDestinationAcceptanceTest(S3Format.AVRO) { + override val formatConfig: JsonNode? + get() = + Jsons.deserialize( + """{ + "format_type": "Avro", + "compression_codec": { "codec": "no compression", "compression_level": 5, "include_checksum": true } +}""" + ) + + override fun getTestDataComparator(): TestDataComparator = GcsAvroTestDataComparator() + + @Throws(Exception::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val nameUpdater = getFieldNameUpdater(streamName, namespace, streamSchema) + + val objectSummaries = getAllSyncedObjects(streamName, namespace!!) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + val `object` = s3Client.getObject(objectSummary.bucketName, objectSummary.key) + DataFileReader( + SeekableByteArrayInput(`object`.objectContent.readAllBytes()), + GenericDatumReader() + ) + .use { dataFileReader -> + val jsonReader: ObjectReader = MAPPER.reader() + while (dataFileReader.hasNext()) { + val record = dataFileReader.next() + val jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record) + var jsonRecord = jsonReader.readTree(jsonBytes) + jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord!!) + jsonRecords.add(pruneAirbyteJson(jsonRecord)) + } + } + } + + return jsonRecords + } + + @Throws(Exception::class) + override fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map?> { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val resultDataTypes: MutableMap?> = HashMap() + + for (objectSummary in objectSummaries!!) { + val `object` = s3Client!!.getObject(objectSummary!!.bucketName, objectSummary.key) + DataFileReader( + SeekableByteArrayInput(`object`.objectContent.readAllBytes()), + GenericDatumReader() + ) + .use { dataFileReader -> + while (dataFileReader.hasNext()) { + val record = dataFileReader.next() + val actualDataTypes = getTypes(record) + resultDataTypes.putAll(actualDataTypes!!) + } + } + } + return resultDataTypes + } + + override fun getProtocolVersion() = ProtocolVersion.V1 +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..55f4767e4da90 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvDestinationAcceptanceTest.kt @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.io.InputStreamReader +import java.io.Reader +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.stream.StreamSupport +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVRecord +import org.apache.commons.csv.QuoteMode + +abstract class GcsBaseCsvDestinationAcceptanceTest : GcsDestinationAcceptanceTest(S3Format.CSV) { + override fun getProtocolVersion() = ProtocolVersion.V1 + + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode( + java.util.Map.of( + "format_type", + outputFormat, + "flattening", + Flattening.ROOT_LEVEL.value, + "compression", + Jsons.jsonNode(java.util.Map.of("compression_type", "No Compression")) + ) + ) + + @Throws(IOException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + + val fieldTypes = getFieldTypes(streamSchema) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries!!) { + s3Client!!.getObject(objectSummary!!.bucketName, objectSummary.key).use { `object` -> + getReader(`object`).use { `in` -> + val records: Iterable = + CSVFormat.DEFAULT.withQuoteMode(QuoteMode.NON_NUMERIC) + .withFirstRecordAsHeader() + .parse(`in`) + StreamSupport.stream(records.spliterator(), false).forEach { r: CSVRecord -> + jsonRecords.add(getJsonNode(r.toMap(), fieldTypes)) + } + } + } + } + + return jsonRecords + } + + @Throws(IOException::class) + protected open fun getReader(s3Object: S3Object): Reader { + return InputStreamReader(s3Object.objectContent, StandardCharsets.UTF_8) + } + + companion object { + /** Convert json_schema to a map from field name to field types. */ + private fun getFieldTypes(streamSchema: JsonNode): Map { + val fieldTypes: MutableMap = HashMap() + val fieldDefinitions = streamSchema["properties"] + val iterator = fieldDefinitions.fields() + while (iterator.hasNext()) { + val entry = iterator.next() + val fieldValue = entry.value + val typeValue = + if (fieldValue["type"] == null) fieldValue["\$ref"] else fieldValue["type"] + fieldTypes[entry.key] = typeValue.asText() + } + return fieldTypes + } + + private fun getJsonNode( + input: Map, + fieldTypes: Map + ): JsonNode { + val json: ObjectNode = MAPPER.createObjectNode() + + if (input.containsKey(JavaBaseConstants.COLUMN_NAME_DATA)) { + return Jsons.deserialize(input[JavaBaseConstants.COLUMN_NAME_DATA]) + } + + for ((key, value) in input) { + if ( + key == JavaBaseConstants.COLUMN_NAME_AB_ID || + (key == JavaBaseConstants.COLUMN_NAME_EMITTED_AT) + ) { + continue + } + if (value == null || value == "") { + continue + } + val type = fieldTypes[key] + when (type) { + "boolean" -> json.put(key, value.toBoolean()) + "integer" -> json.put(key, value.toInt()) + "number" -> json.put(key, value.toDouble()) + "" -> addNoTypeValue(json, key, value) + else -> json.put(key, value) + } + } + return json + } + + private fun addNoTypeValue(json: ObjectNode, key: String, value: String?) { + if ( + value != null && (value.matches("^\\[.*\\]$".toRegex())) || + value!!.matches("^\\{.*\\}$".toRegex()) + ) { + val newNode = Jsons.deserialize(value) + json.set(key, newNode) + } else { + json.put(key, value) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..6b9347abd1f3a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseCsvGzipDestinationAcceptanceTest.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.io.InputStreamReader +import java.io.Reader +import java.nio.charset.StandardCharsets +import java.util.Map +import java.util.zip.GZIPInputStream + +abstract class GcsBaseCsvGzipDestinationAcceptanceTest : GcsBaseCsvDestinationAcceptanceTest() { + override fun getProtocolVersion() = ProtocolVersion.V1 + + override val formatConfig: JsonNode? + get() = // config without compression defaults to GZIP + Jsons.jsonNode( + Map.of("format_type", outputFormat, "flattening", Flattening.ROOT_LEVEL.value) + ) + + @Throws(IOException::class) + override fun getReader(s3Object: S3Object): Reader { + return InputStreamReader(GZIPInputStream(s3Object.objectContent), StandardCharsets.UTF_8) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..4627425e4c9a8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlDestinationAcceptanceTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.commons.json.Jsons +import java.io.BufferedReader +import java.io.IOException +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.Map +import kotlin.collections.List +import kotlin.collections.MutableList + +abstract class GcsBaseJsonlDestinationAcceptanceTest : + GcsDestinationAcceptanceTest(S3Format.JSONL) { + override fun getProtocolVersion() = ProtocolVersion.V1 + + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode( + Map.of( + "format_type", + outputFormat, + "compression", + Jsons.jsonNode(Map.of("compression_type", "No Compression")) + ) + ) + + @Throws(IOException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries!!) { + val `object` = s3Client!!.getObject(objectSummary!!.bucketName, objectSummary.key) + getReader(`object`).use { reader -> + var line: String? + while ((reader.readLine().also { line = it }) != null) { + jsonRecords.add(Jsons.deserialize(line)[JavaBaseConstants.COLUMN_NAME_DATA]) + } + } + } + + return jsonRecords + } + + @Throws(IOException::class) + protected open fun getReader(s3Object: S3Object): BufferedReader { + return BufferedReader(InputStreamReader(s3Object.objectContent, StandardCharsets.UTF_8)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..746e37e32617a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseJsonlGzipDestinationAcceptanceTest.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.commons.json.Jsons +import java.io.BufferedReader +import java.io.IOException +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.Map +import java.util.zip.GZIPInputStream + +abstract class GcsBaseJsonlGzipDestinationAcceptanceTest : GcsBaseJsonlDestinationAcceptanceTest() { + override fun getProtocolVersion() = ProtocolVersion.V1 + + override val formatConfig: JsonNode? + get() = // config without compression defaults to GZIP + Jsons.jsonNode(Map.of("format_type", outputFormat)) + + @Throws(IOException::class) + override fun getReader(s3Object: S3Object): BufferedReader { + return BufferedReader( + InputStreamReader(GZIPInputStream(s3Object.objectContent), StandardCharsets.UTF_8) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..004fc3ceed6c2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsBaseParquetDestinationAcceptanceTest.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectReader +import io.airbyte.cdk.integrations.destination.gcs.parquet.GcsParquetWriter +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter.Companion.getHadoopConfig +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper.getFieldNameUpdater +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper.pruneAirbyteJson +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.net.URI +import java.net.URISyntaxException +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.hadoop.fs.Path +import org.apache.parquet.avro.AvroReadSupport +import org.apache.parquet.hadoop.ParquetReader + +abstract class GcsBaseParquetDestinationAcceptanceTest : + GcsAvroParquetDestinationAcceptanceTest(S3Format.PARQUET) { + override fun getProtocolVersion() = ProtocolVersion.V1 + + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode(java.util.Map.of("format_type", "Parquet", "compression_codec", "GZIP")) + + override fun getTestDataComparator(): TestDataComparator = GcsAvroTestDataComparator() + + @Throws(IOException::class, URISyntaxException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val nameUpdater = getFieldNameUpdater(streamName, namespace, streamSchema) + + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + val uri = URI(String.format("s3a://%s/%s", `object`.bucketName, `object`.key)) + val path = Path(uri) + val hadoopConfig = GcsParquetWriter.getHadoopConfig(config) + + ParquetReader.builder(AvroReadSupport(), path) + .withConf(hadoopConfig) + .build() + .use { parquetReader -> + val jsonReader: ObjectReader = + GcsDestinationAcceptanceTest.Companion.MAPPER.reader() + var record: GenericData.Record? + while ((parquetReader.read().also { record = it }) != null) { + val jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record) + var jsonRecord = jsonReader.readTree(jsonBytes) + jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord!!) + jsonRecords.add(pruneAirbyteJson(jsonRecord)) + } + } + } + + return jsonRecords + } + + @Throws(Exception::class) + override fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map?> { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val resultDataTypes: MutableMap?> = HashMap() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + val uri = URI(String.format("s3a://%s/%s", `object`.bucketName, `object`.key)) + val path = Path(uri) + val hadoopConfig = getHadoopConfig(config) + + ParquetReader.builder(AvroReadSupport(), path) + .withConf(hadoopConfig) + .build() + .use { parquetReader -> + var record: GenericData.Record? + while ((parquetReader.read().also { record = it }) != null) { + val actualDataTypes = getTypes(record!!) + resultDataTypes.putAll(actualDataTypes!!) + } + } + } + + return resultDataTypes + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..92a18d74d4613 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/gcs-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/gcs/GcsDestinationAcceptanceTest.kt @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.gcs + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import com.amazonaws.services.s3.model.S3ObjectSummary +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations +import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest +import io.airbyte.cdk.integrations.standardtest.destination.ProtocolVersion +import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.io.IOs +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import io.airbyte.configoss.StandardCheckConnectionOutput +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import java.nio.file.Path +import java.util.* +import java.util.stream.Collectors +import org.apache.commons.lang3.RandomStringUtils +import org.joda.time.DateTime +import org.joda.time.DateTimeZone +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.mockito.Mockito.mock +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * When adding a new GCS destination acceptance test, extend this class and do the following: + * * Implement [.getFormatConfig] that returns a [S3FormatConfig] + * * Implement [.retrieveRecords] that returns the Json records for the test + * + * Under the hood, a [GcsDestinationConfig] is constructed as follows: + * * Retrieve the secrets from "secrets/config.json" + * * Get the GCS bucket path from the constructor + * * Get the format config from [.getFormatConfig] + */ +abstract class GcsDestinationAcceptanceTest(protected val outputFormat: S3Format) : + DestinationAcceptanceTest() { + protected var configJson: JsonNode? = null + // Not a big fan of those mocks(). Here to make spotbugs happy + protected var config: GcsDestinationConfig = mock() + protected var s3Client: AmazonS3 = mock() + protected var nameTransformer: NamingConventionTransformer = mock() + protected var s3StorageOperations: S3StorageOperations? = null + + protected val baseConfigJson: JsonNode + get() = Jsons.deserialize(IOs.readFile(Path.of(SECRET_FILE_PATH))) + + override fun getProtocolVersion(): ProtocolVersion { + return ProtocolVersion.V1 + } + + override fun getConfig(): JsonNode { + return configJson!! + } + + override fun getDefaultSchema(config: JsonNode): String? { + if (config.has("gcs_bucket_path")) { + return config["gcs_bucket_path"].asText() + } + return null + } + + override fun supportBasicDataTypeTest(): Boolean { + return true + } + + override fun supportArrayDataTypeTest(): Boolean { + return true + } + + override fun supportObjectDataTypeTest(): Boolean { + return true + } + + override fun getTestDataComparator(): TestDataComparator { + return AdvancedTestDataComparator() + } + + override fun getFailCheckConfig(): JsonNode { + val baseJson = baseConfigJson + val failCheckJson = Jsons.clone(baseJson) + // invalid credential + (failCheckJson as ObjectNode).put("hmac_key_access_id", "fake-key") + failCheckJson.put("hmac_key_secret", "fake-secret") + return failCheckJson + } + + /** Helper method to retrieve all synced objects inside the configured bucket path. */ + protected fun getAllSyncedObjects( + streamName: String, + namespace: String + ): List { + val namespaceStr = nameTransformer.getNamespace(namespace) + val streamNameStr = nameTransformer.getIdentifier(streamName) + val outputPrefix = + s3StorageOperations!!.getBucketObjectPath( + namespaceStr, + streamNameStr, + DateTime.now(DateTimeZone.UTC), + config.pathFormat!! + ) + // the child folder contains a non-deterministic epoch timestamp, so use the parent folder + val parentFolder = outputPrefix.substring(0, outputPrefix.lastIndexOf("/") + 1) + val objectSummaries = + s3Client + .listObjects(config.bucketName, parentFolder) + .objectSummaries + .stream() + .filter { o: S3ObjectSummary -> o.key.contains("$streamNameStr/") } + .sorted(Comparator.comparingLong { o: S3ObjectSummary -> o.lastModified.time }) + .collect(Collectors.toList()) + LOGGER.info( + "All objects: {}", + objectSummaries + .stream() + .map { o: S3ObjectSummary -> String.format("%s/%s", o.bucketName, o.key) } + .collect(Collectors.toList()) + ) + return objectSummaries + } + + protected abstract val formatConfig: JsonNode? + get + + /** + * This method does the following: + * * Construct the GCS destination config. + * * Construct the GCS client. + */ + override fun setup(testEnv: TestDestinationEnv, TEST_SCHEMAS: HashSet) { + val baseConfigJson = baseConfigJson + // Set a random GCS bucket path for each integration test + val configJson = Jsons.clone(baseConfigJson) + val testBucketPath = + String.format( + "%s_test_%s", + outputFormat.name.lowercase(), + RandomStringUtils.randomAlphanumeric(5) + ) + (configJson as ObjectNode) + .put("gcs_bucket_path", testBucketPath) + .set("format", formatConfig) + this.configJson = configJson + this.config = GcsDestinationConfig.getGcsDestinationConfig(configJson) + LOGGER.info("Test full path: {}/{}", config.bucketName, config.bucketPath) + + this.s3Client = config.getS3Client() + this.nameTransformer = GcsNameTransformer() + this.s3StorageOperations = S3StorageOperations(nameTransformer, s3Client!!, config) + } + + /** Remove all the S3 output from the tests. */ + override fun tearDown(testEnv: TestDestinationEnv) { + val keysToDelete: MutableList = LinkedList() + val objects = s3Client.listObjects(config!!.bucketName, config!!.bucketPath).objectSummaries + for (`object` in objects) { + keysToDelete.add(DeleteObjectsRequest.KeyVersion(`object`.key)) + } + + if (keysToDelete.size > 0) { + LOGGER.info( + "Tearing down test bucket path: {}/{}", + config!!.bucketName, + config!!.bucketPath + ) + // Google Cloud Storage doesn't accept request to delete multiple objects + for (keyToDelete in keysToDelete) { + s3Client!!.deleteObject(config!!.bucketName, keyToDelete.key) + } + LOGGER.info("Deleted {} file(s).", keysToDelete.size) + } + } + + /** + * Verify that when given user with no Multipart Upload Roles, that check connection returns a + * failed response. Assume that the #getInsufficientRolesFailCheckConfig() returns the service + * account has storage.objects.create permission but not storage.multipartUploads.create. + */ + @Test + @Throws(Exception::class) + fun testCheckConnectionInsufficientRoles() { + val baseConfigJson = + Jsons.deserialize(IOs.readFile(Path.of(SECRET_FILE_PATH_INSUFFICIENT_ROLES))) + + // Set a random GCS bucket path for each integration test + val configJson = Jsons.clone(baseConfigJson) + val testBucketPath = + String.format( + "%s_test_%s", + outputFormat.name.lowercase(), + RandomStringUtils.randomAlphanumeric(5) + ) + (configJson as ObjectNode) + .put("gcs_bucket_path", testBucketPath) + .set("format", formatConfig) + + Assertions.assertEquals( + StandardCheckConnectionOutput.Status.FAILED, + runCheck(configJson).status + ) + } + + @Test + fun testCheckIncorrectHmacKeyAccessIdCredential() { + val baseJson = baseConfigJson + val credential = + Jsons.jsonNode( + ImmutableMap.builder() + .put("credential_type", "HMAC_KEY") + .put("hmac_key_access_id", "fake-key") + .put("hmac_key_secret", baseJson["credential"]["hmac_key_secret"].asText()) + .build() + ) + + (baseJson as ObjectNode).put("credential", credential) + baseJson.set("format", formatConfig) + + val destination: BaseGcsDestination = object : BaseGcsDestination() {} + val status = destination.check(baseJson) + Assertions.assertEquals(AirbyteConnectionStatus.Status.FAILED, status!!.status) + Assertions.assertTrue(status.message.contains("State code: SignatureDoesNotMatch;")) + } + + @Test + fun testCheckIncorrectHmacKeySecretCredential() { + val baseJson = baseConfigJson + val credential = + Jsons.jsonNode( + ImmutableMap.builder() + .put("credential_type", "HMAC_KEY") + .put( + "hmac_key_access_id", + baseJson["credential"]["hmac_key_access_id"].asText() + ) + .put("hmac_key_secret", "fake-secret") + .build() + ) + + (baseJson as ObjectNode).put("credential", credential) + baseJson.set("format", formatConfig) + + val destination: BaseGcsDestination = object : BaseGcsDestination() {} + val status = destination.check(baseJson) + Assertions.assertEquals(AirbyteConnectionStatus.Status.FAILED, status!!.status) + Assertions.assertTrue(status.message.contains("State code: SignatureDoesNotMatch;")) + } + + @Test + fun testCheckIncorrectBucketCredential() { + val baseJson = baseConfigJson + (baseJson as ObjectNode).put("gcs_bucket_name", "fake_bucket") + baseJson.set("format", formatConfig) + + val destination: BaseGcsDestination = object : BaseGcsDestination() {} + val status = destination.check(baseJson) + Assertions.assertEquals(AirbyteConnectionStatus.Status.FAILED, status!!.status) + Assertions.assertTrue(status.message.contains("State code: NoSuchKey;")) + } + + companion object { + protected val LOGGER: Logger = + LoggerFactory.getLogger(GcsDestinationAcceptanceTest::class.java) + @JvmStatic protected val MAPPER: ObjectMapper = MoreMappers.initMapper() + + protected const val SECRET_FILE_PATH: String = "secrets/config.json" + protected const val SECRET_FILE_PATH_INSUFFICIENT_ROLES: String = + "secrets/insufficient_roles_config.json" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/build.gradle b/airbyte-cdk/java/airbyte-cdk/s3-destinations/build.gradle index 946b011adba37..93631c1aa4adb 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/build.gradle @@ -1,3 +1,6 @@ +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.dsl.KotlinVersion + java { // TODO: rewrite code to avoid javac wornings in the first place compileJava { @@ -11,11 +14,29 @@ java { } } +compileTestFixturesKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + +compileTestKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + +compileKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - implementation project(':airbyte-cdk:java:airbyte-cdk:core') - implementation project(':airbyte-cdk:java:airbyte-cdk:typing-deduping') - implementation project(':airbyte-cdk:java:airbyte-cdk:db-destinations') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-typing-deduping') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-db-destinations') // Re-export dependencies for gcs-destinations. api 'com.amazonaws:aws-java-sdk-s3:1.12.647' @@ -30,14 +51,14 @@ dependencies { api 'org.apache.parquet:parquet-avro:1.13.1' runtimeOnly 'com.hadoop.gplcompression:hadoop-lzo:0.4.20' - testImplementation 'org.mockito:mockito-inline:5.2.0' + testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies')) + testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core')) + testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-typing-deduping') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-typing-deduping')) + testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-db-destinations') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-db-destinations')) - testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:dependencies') - testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:dependencies')) - testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:core') - testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:core')) - testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:typing-deduping') - testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:typing-deduping')) - testFixturesApi project(':airbyte-cdk:java:airbyte-cdk:db-destinations') - testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:db-destinations')) + testImplementation 'org.mockito:mockito-inline:5.2.0' } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.java deleted file mode 100644 index ca25e43f0ca21..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.jdbc.copy.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; - -/** - * S3 copy destinations need an S3DestinationConfig to configure the basic upload behavior. We also - * want additional flags to configure behavior that only applies to the copy-to-S3 + - * load-into-warehouse portion. Currently this is just purgeStagingData, but this may expand. - */ -public record S3CopyConfig(boolean purgeStagingData, S3DestinationConfig s3Config) { - - public static boolean shouldPurgeStagingData(final JsonNode config) { - if (config.get("purge_staging_data") == null) { - return true; - } else { - return config.get("purge_staging_data").asBoolean(); - } - } - - public static S3CopyConfig getS3CopyConfig(final JsonNode config) { - return new S3CopyConfig(S3CopyConfig.shouldPurgeStagingData(config), - S3DestinationConfig.getS3DestinationConfig(config)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.java deleted file mode 100644 index cff03ce937890..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import javax.annotation.Nonnull; -import javax.crypto.KeyGenerator; -import org.apache.commons.lang3.StringUtils; - -/** - * @param key The key to use for encryption. - * @param keyType Where the key came from. - */ -public record AesCbcEnvelopeEncryption(@Nonnull byte[] key, @Nonnull KeyType keyType) implements EncryptionConfig { - - public enum KeyType { - EPHEMERAL, - USER_PROVIDED - } - - public static AesCbcEnvelopeEncryption fromJson(final JsonNode encryptionNode) { - if (!encryptionNode.has("key_encrypting_key")) { - return encryptionWithRandomKey(); - } - final String kek = encryptionNode.get("key_encrypting_key").asText(); - if (StringUtils.isEmpty(kek)) { - return encryptionWithRandomKey(); - } else { - return new AesCbcEnvelopeEncryption(BASE64_DECODER.decode(kek), KeyType.USER_PROVIDED); - } - } - - private static AesCbcEnvelopeEncryption encryptionWithRandomKey() { - try { - final KeyGenerator kekGenerator = KeyGenerator.getInstance(AesCbcEnvelopeEncryptionBlobDecorator.KEY_ENCRYPTING_ALGO); - kekGenerator.init(AesCbcEnvelopeEncryptionBlobDecorator.AES_KEY_SIZE_BITS); - return new AesCbcEnvelopeEncryption(kekGenerator.generateKey().getEncoded(), KeyType.EPHEMERAL); - } catch (final NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - final AesCbcEnvelopeEncryption that = (AesCbcEnvelopeEncryption) o; - - if (!Arrays.equals(key, that.key)) { - return false; - } - return keyType == that.keyType; - } - - @Override - public int hashCode() { - int result = Arrays.hashCode(key); - result = 31 * result + keyType.hashCode(); - return result; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.java deleted file mode 100644 index d8d9113d220b9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.google.common.annotations.VisibleForTesting; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import java.io.OutputStream; -import java.security.InvalidAlgorithmParameterException; -import java.security.InvalidKeyException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; -import java.util.Base64; -import java.util.Base64.Encoder; -import java.util.Map; -import javax.crypto.BadPaddingException; -import javax.crypto.Cipher; -import javax.crypto.CipherOutputStream; -import javax.crypto.IllegalBlockSizeException; -import javax.crypto.KeyGenerator; -import javax.crypto.NoSuchPaddingException; -import javax.crypto.SecretKey; -import javax.crypto.spec.IvParameterSpec; -import javax.crypto.spec.SecretKeySpec; - -/** - * This class implements the envelope encryption that Redshift and Snowflake use when loading - * encrypted files from S3 (or other blob stores): - *
      - *
    • A content-encrypting-key (CEK) is used to encrypt the actual data (i.e. the CSV file)
    • - *
    • A key-encrypting-key (KEK) is used to encrypt the CEK
    • - *
    • The encrypted CEK is stored in the S3 object metadata, along with the plaintext - * initialization vector
    • - *
    • The COPY command includes the KEK (in plaintext). Redshift/Snowflake will use it to decrypt - * the CEK, which it then uses to decrypt the CSV file.
    • - *
    - *

    - * A new CEK is generated for each S3 object, but each sync uses a single KEK. The KEK may be either - * user-provided (if the user wants to keep the data for further use), or generated per-sync (if - * they simply want to add additional security around their COPY operation). - *

    - * Redshift does not support loading directly from GCS or Azure Blob Storage. - *

    - * Snowflake only supports client-side encryption in S3 and Azure Storage; it does not support this - * feature in GCS (https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html). Azure - * Storage uses a similar envelope encryption technique to S3 - * (https://docs.microsoft.com/en-us/azure/storage/common/storage-client-side-encryption?tabs=dotnet#encryption-via-the-envelope-technique). - */ -public class AesCbcEnvelopeEncryptionBlobDecorator implements BlobDecorator { - - public static final String ENCRYPTED_CONTENT_ENCRYPTING_KEY = "aes_cbc_envelope_encryption-content-encrypting-key"; - public static final String INITIALIZATION_VECTOR = "aes_cbc_envelope_encryption-initialization-vector"; - - public static final int AES_KEY_SIZE_BITS = 256; - private static final int AES_CBC_INITIALIZATION_VECTOR_SIZE_BYTES = 16; - private static final Encoder BASE64_ENCODER = Base64.getEncoder(); - private static final SecureRandom SECURE_RANDOM = new SecureRandom(); - - public static final String KEY_ENCRYPTING_ALGO = "AES"; - - // There's no specific KeyGenerator for AES/CBC/PKCS5Padding, so we just use a normal AES - // KeyGenerator - private static final String CONTENT_ENCRYPTING_KEY_ALGO = "AES"; - // Redshift's UNLOAD command uses this cipher mode, so we'll use it here as well. - // TODO If we eventually want to expose client-side encryption in destination-s3, we should probably - // also implement - // AES-GCM, since it's mostly superior to CBC mode. (if we do that: make sure that the output is - // compatible with - // aws-java-sdk's AmazonS3EncryptionV2Client, which requires a slightly different set of metadata) - private static final String CONTENT_ENCRYPTING_CIPHER_ALGO = "AES/CBC/PKCS5Padding"; - - // The real "secret key". Should be handled with great care. - private final SecretKey keyEncryptingKey; - // A random key generated for each file. Also should be handled with care. - private final SecretKey contentEncryptingKey; - // Arbitrary bytes required by the CBC algorithm. Not a sensitive value. - // The only requirement is that we never reuse an (IV, CEK) pair. - private final byte[] initializationVector; - - public AesCbcEnvelopeEncryptionBlobDecorator(final SecretKey keyEncryptingKey) { - this(keyEncryptingKey, randomContentEncryptingKey(), randomInitializationVector()); - } - - public AesCbcEnvelopeEncryptionBlobDecorator(final byte[] keyEncryptingKey) { - this(new SecretKeySpec(keyEncryptingKey, KEY_ENCRYPTING_ALGO)); - } - - @VisibleForTesting - AesCbcEnvelopeEncryptionBlobDecorator(final SecretKey keyEncryptingKey, final SecretKey contentEncryptingKey, final byte[] initializationVector) { - this.keyEncryptingKey = keyEncryptingKey; - this.contentEncryptingKey = contentEncryptingKey; - - this.initializationVector = initializationVector; - } - - @SuppressFBWarnings( - value = {"PADORA", "CIPINT"}, - justification = "We're using this cipher for compatibility with Redshift/Snowflake.") - @Override - public OutputStream wrap(final OutputStream stream) { - try { - final Cipher dataCipher = Cipher.getInstance(CONTENT_ENCRYPTING_CIPHER_ALGO); - dataCipher.init(Cipher.ENCRYPT_MODE, contentEncryptingKey, new IvParameterSpec(initializationVector)); - return new CipherOutputStream(stream, dataCipher); - } catch (final InvalidAlgorithmParameterException | NoSuchPaddingException | NoSuchAlgorithmException | InvalidKeyException e) { - throw new RuntimeException(e); - } - } - - @SuppressFBWarnings( - value = {"CIPINT", "SECECB"}, - justification = "We're using this cipher for compatibility with Redshift/Snowflake.") - @Override - public void updateMetadata(final Map metadata, final Map metadataKeyMapping) { - try { - final Cipher keyCipher = Cipher.getInstance(KEY_ENCRYPTING_ALGO); - keyCipher.init(Cipher.ENCRYPT_MODE, keyEncryptingKey); - final byte[] encryptedCekBytes = keyCipher.doFinal(contentEncryptingKey.getEncoded()); - - BlobDecorator.insertMetadata(metadata, metadataKeyMapping, ENCRYPTED_CONTENT_ENCRYPTING_KEY, BASE64_ENCODER.encodeToString(encryptedCekBytes)); - BlobDecorator.insertMetadata(metadata, metadataKeyMapping, INITIALIZATION_VECTOR, BASE64_ENCODER.encodeToString(initializationVector)); - } catch (final NoSuchPaddingException | NoSuchAlgorithmException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException e) { - throw new RuntimeException(e); - } - } - - private static SecretKey randomContentEncryptingKey() { - try { - final KeyGenerator cekGenerator = KeyGenerator.getInstance(CONTENT_ENCRYPTING_KEY_ALGO); - cekGenerator.init(AES_KEY_SIZE_BITS); - return cekGenerator.generateKey(); - } catch (final NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - } - - private static byte[] randomInitializationVector() { - final byte[] initializationVector = new byte[AES_CBC_INITIALIZATION_VECTOR_SIZE_BYTES]; - SECURE_RANDOM.nextBytes(initializationVector); - return initializationVector; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.java deleted file mode 100644 index fab68a7a8d5ab..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class BaseS3Destination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseS3Destination.class); - - protected final S3DestinationConfigFactory configFactory; - - private final NamingConventionTransformer nameTransformer; - - protected BaseS3Destination() { - this(new S3DestinationConfigFactory()); - } - - protected BaseS3Destination(final S3DestinationConfigFactory configFactory) { - this.configFactory = configFactory; - this.nameTransformer = new S3NameTransformer(); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - try { - final S3DestinationConfig destinationConfig = configFactory.getS3DestinationConfig(config, storageProvider()); - final AmazonS3 s3Client = destinationConfig.getS3Client(); - - S3BaseChecks.testIAMUserHasListObjectPermission(s3Client, destinationConfig.getBucketName()); - S3BaseChecks.testSingleUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath()); - S3BaseChecks.testMultipartUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath()); - - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final Exception e) { - LOGGER.error("Exception attempting to access the S3 bucket: ", e); - return new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage("Could not connect to the S3 bucket with the provided configuration. \n" + e - .getMessage()); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - final S3DestinationConfig s3Config = configFactory.getS3DestinationConfig(config, storageProvider()); - return new S3ConsumerFactory().create( - outputRecordCollector, - new S3StorageOperations(nameTransformer, s3Config.getS3Client(), s3Config), - nameTransformer, - SerializedBufferFactory.getCreateFunction(s3Config, FileBuffer::new), - s3Config, - catalog); - } - - public abstract StorageProvider storageProvider(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.java deleted file mode 100644 index 61831fd995f22..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.google.common.annotations.VisibleForTesting; -import java.io.OutputStream; -import java.util.Map; - -/** - * Represents the ability to modify how a blob is stored, by modifying the data being written and/or - * the blob's metadata. - */ -public interface BlobDecorator { - - OutputStream wrap(OutputStream stream); - - /** - * Modifies the blob's metadata. - *

    - * In the most common case, BlobDecorator implementations will insert new entries into the metadata - * map. These entries may be vendor-specific. The metadataKeyMapping parameter defines a mapping - * from the "canonical" keys to the vendor-specific keys. See - * {@link S3StorageOperations#getMetadataMapping()} for an example. - *

    - * If a key is not defined in metadataKeyMapping, it will not be inserted into the metadata. - * - * @param metadata The blob's metadata - * @param metadataKeyMapping The mapping from canonical to vendor-specific key names - */ - void updateMetadata(Map metadata, Map metadataKeyMapping); - - /** - * A convenience method for subclasses. Handles inserting new metadata entries according to the - * metadataKeyMapping. - */ - @VisibleForTesting - static void insertMetadata(final Map metadata, - final Map metadataKeyMapping, - final String key, - final String value) { - if (metadataKeyMapping.containsKey(key)) { - metadata.put(metadataKeyMapping.get(key), value); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.java deleted file mode 100644 index 9df281e9e19b2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import org.joda.time.DateTime; - -public abstract class BlobStorageOperations { - - protected final List blobDecorators; - - protected BlobStorageOperations() { - this.blobDecorators = new ArrayList<>(); - } - - public abstract String getBucketObjectPath(String namespace, String streamName, DateTime writeDatetime, String customFormat); - - /** - * Ensure that the bucket specified in the config exists - */ - public abstract void createBucketIfNotExists() throws Exception; - - /** - * Upload the data files into the storage area. - * - * @return the name of the file that was uploaded. - */ - public abstract String uploadRecordsToBucket(SerializableBuffer recordsData, String namespace, String objectPath) - throws Exception; - - /** - * Remove files that were just stored in the bucket - */ - public abstract void cleanUpBucketObject(String objectPath, List stagedFiles) throws Exception; - - /** - * Deletes all the bucket objects for the specified bucket path - * - * @param namespace Optional source-defined namespace name - * @param streamName Name of the stream - * @param objectPath file path to where staging files are stored - * @param pathFormat formatted string for the path - */ - public abstract void cleanUpBucketObject(String namespace, String streamName, String objectPath, String pathFormat); - - public abstract void dropBucketObject(String objectPath); - - public abstract boolean isValidData(JsonNode jsonNode); - - protected abstract Map getMetadataMapping(); - - public void addBlobDecorator(final BlobDecorator blobDecorator) { - blobDecorators.add(blobDecorator); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.java deleted file mode 100644 index ecd02c1e6da33..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Base64; -import java.util.Base64.Decoder; - -public sealed interface EncryptionConfig permits AesCbcEnvelopeEncryption,NoEncryption { - - Decoder BASE64_DECODER = Base64.getDecoder(); - - static EncryptionConfig fromJson(final JsonNode encryptionNode) { - // For backwards-compatibility. Preexisting configs which don't contain the "encryption" key will - // pass a null JsonNode into this method. - if (encryptionNode == null) { - return new NoEncryption(); - } - - final String encryptionType = encryptionNode.get("encryption_type").asText(); - return switch (encryptionType) { - case "none" -> new NoEncryption(); - case "aes_cbc_envelope" -> AesCbcEnvelopeEncryption.fromJson(encryptionNode); - default -> throw new IllegalArgumentException("Invalid encryption type: " + encryptionType); - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/NoEncryption.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/NoEncryption.java deleted file mode 100644 index 7dcbd6669195f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/NoEncryption.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -public final class NoEncryption implements EncryptionConfig { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.java deleted file mode 100644 index 81219eb00a50f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Strings; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.util.UUID; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public final class S3BaseChecks { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3BaseChecks.class); - - private S3BaseChecks() {} - - /** - * Note that this method completely ignores s3Config.getBucketPath(), in favor of the bucketPath - * parameter. - */ - public static void attemptS3WriteAndDelete(final S3StorageOperations storageOperations, - final S3DestinationConfig s3Config, - final String bucketPath) { - attemptS3WriteAndDelete(storageOperations, s3Config, bucketPath, s3Config.getS3Client()); - } - - public static void testSingleUpload(final AmazonS3 s3Client, final String bucketName, final String bucketPath) { - LOGGER.info("Started testing if all required credentials assigned to user for single file uploading"); - final var prefix = bucketPath.endsWith("/") ? bucketPath : bucketPath + "/"; - final String testFile = prefix + "test_" + System.currentTimeMillis(); - try { - s3Client.putObject(bucketName, testFile, "this is a test file"); - } finally { - s3Client.deleteObject(bucketName, testFile); - } - LOGGER.info("Finished checking for normal upload mode"); - } - - public static void testMultipartUpload(final AmazonS3 s3Client, final String bucketName, final String bucketPath) throws IOException { - LOGGER.info("Started testing if all required credentials assigned to user for multipart upload"); - final var prefix = bucketPath.endsWith("/") ? bucketPath : bucketPath + "/"; - final String testFile = prefix + "test_" + System.currentTimeMillis(); - final StreamTransferManager manager = StreamTransferManagerFactory.create(bucketName, testFile, s3Client).get(); - boolean success = false; - try (final MultiPartOutputStream outputStream = manager.getMultiPartOutputStreams().get(0); - final CSVPrinter csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), CSVFormat.DEFAULT)) { - final String oneMegaByteString = "a".repeat(500_000); - // write a file larger than the 5 MB, which is the default part size, to make sure it is a multipart - // upload - for (int i = 0; i < 7; ++i) { - csvPrinter.printRecord(System.currentTimeMillis(), oneMegaByteString); - } - success = true; - } finally { - if (success) { - manager.complete(); - } else { - manager.abort(); - } - s3Client.deleteObject(bucketName, testFile); - } - LOGGER.info("Finished verification for multipart upload mode"); - } - - /** - * Checks that S3 custom endpoint uses a variant that only uses HTTPS - * - * @param endpoint URL string representing an accessible S3 bucket - */ - public static boolean testCustomEndpointSecured(final String endpoint) { - // if user does not use a custom endpoint, do not fail - if (endpoint == null || endpoint.length() == 0) { - return true; - } else { - return endpoint.startsWith("https://"); - } - } - - @VisibleForTesting - static void attemptS3WriteAndDelete(final S3StorageOperations storageOperations, - final S3DestinationConfig s3Config, - final String bucketPath, - final AmazonS3 s3) { - final String prefix; - if (Strings.isNullOrEmpty(bucketPath)) { - prefix = ""; - } else if (bucketPath.endsWith("/")) { - prefix = bucketPath; - } else { - prefix = bucketPath + "/"; - } - - final String outputTableName = prefix + "_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", ""); - attemptWriteAndDeleteS3Object(storageOperations, s3Config, outputTableName, s3); - } - - /** - * Runs some permissions checks: 1. Check whether the bucket exists; create it if not 2. Check - * whether s3://bucketName/bucketPath/ exists; create it (with empty contents) if not. (if - * bucketPath is null/empty-string, then skip this step) 3. Attempt to create and delete - * s3://bucketName/outputTableName 4. Attempt to list all objects in the bucket - */ - private static void attemptWriteAndDeleteS3Object(final S3StorageOperations storageOperations, - final S3DestinationConfig s3Config, - final String outputTableName, - final AmazonS3 s3) { - final var s3Bucket = s3Config.getBucketName(); - final var bucketPath = s3Config.getBucketPath(); - - if (!Strings.isNullOrEmpty(bucketPath)) { - storageOperations.createBucketIfNotExists(); - } - s3.putObject(s3Bucket, outputTableName, "check-content"); - testIAMUserHasListObjectPermission(s3, s3Bucket); - s3.deleteObject(s3Bucket, outputTableName); - } - - public static void testIAMUserHasListObjectPermission(final AmazonS3 s3, final String bucketName) { - LOGGER.info("Started testing if IAM user can call listObjects on the destination bucket"); - final ListObjectsRequest request = new ListObjectsRequest().withBucketName(bucketName).withMaxKeys(1); - s3.listObjects(request); - LOGGER.info("Finished checking for listObjects permission"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.java deleted file mode 100644 index 38068dbf38c1c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnCloseFunction; -import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.FlushBufferFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializedBufferingStrategy; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class S3ConsumerFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3ConsumerFactory.class); - private static final DateTime SYNC_DATETIME = DateTime.now(DateTimeZone.UTC); - - public AirbyteMessageConsumer create(final Consumer outputRecordCollector, - final BlobStorageOperations storageOperations, - final NamingConventionTransformer namingResolver, - final BufferCreateFunction onCreateBuffer, - final S3DestinationConfig s3Config, - final ConfiguredAirbyteCatalog catalog) { - final List writeConfigs = createWriteConfigs(storageOperations, namingResolver, s3Config, catalog); - return new BufferedStreamConsumer( - outputRecordCollector, - onStartFunction(storageOperations, writeConfigs), - new SerializedBufferingStrategy( - onCreateBuffer, - catalog, - flushBufferFunction(storageOperations, writeConfigs, catalog)), - onCloseFunction(storageOperations, writeConfigs), - catalog, - storageOperations::isValidData); - } - - private static List createWriteConfigs(final BlobStorageOperations storageOperations, - final NamingConventionTransformer namingResolver, - final S3DestinationConfig config, - final ConfiguredAirbyteCatalog catalog) { - return catalog.getStreams() - .stream() - .map(toWriteConfig(storageOperations, namingResolver, config)) - .collect(Collectors.toList()); - } - - private static Function toWriteConfig(final BlobStorageOperations storageOperations, - final NamingConventionTransformer namingResolver, - final S3DestinationConfig s3Config) { - return stream -> { - Preconditions.checkNotNull(stream.getDestinationSyncMode(), "Undefined destination sync mode"); - final AirbyteStream abStream = stream.getStream(); - final String namespace = abStream.getNamespace(); - final String streamName = abStream.getName(); - final String bucketPath = s3Config.getBucketPath(); - final String customOutputFormat = String.join("/", bucketPath, s3Config.getPathFormat()); - final String fullOutputPath = storageOperations.getBucketObjectPath(namespace, streamName, SYNC_DATETIME, customOutputFormat); - final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); - final WriteConfig writeConfig = new WriteConfig(namespace, streamName, bucketPath, customOutputFormat, fullOutputPath, syncMode); - LOGGER.info("Write config: {}", writeConfig); - return writeConfig; - }; - } - - private OnStartFunction onStartFunction(final BlobStorageOperations storageOperations, final List writeConfigs) { - return () -> { - LOGGER.info("Preparing bucket in destination started for {} streams", writeConfigs.size()); - for (final WriteConfig writeConfig : writeConfigs) { - if (writeConfig.getSyncMode().equals(DestinationSyncMode.OVERWRITE)) { - final String namespace = writeConfig.getNamespace(); - final String stream = writeConfig.getStreamName(); - final String outputBucketPath = writeConfig.getOutputBucketPath(); - final String pathFormat = writeConfig.getPathFormat(); - LOGGER.info("Clearing storage area in destination started for namespace {} stream {} bucketObject {} pathFormat {}", - namespace, stream, outputBucketPath, pathFormat); - storageOperations.cleanUpBucketObject(namespace, stream, outputBucketPath, pathFormat); - LOGGER.info("Clearing storage area in destination completed for namespace {} stream {} bucketObject {}", namespace, stream, - outputBucketPath); - } - } - LOGGER.info("Preparing storage area in destination completed."); - }; - } - - private static AirbyteStreamNameNamespacePair toNameNamespacePair(final WriteConfig config) { - return new AirbyteStreamNameNamespacePair(config.getStreamName(), config.getNamespace()); - } - - private FlushBufferFunction flushBufferFunction(final BlobStorageOperations storageOperations, - final List writeConfigs, - final ConfiguredAirbyteCatalog catalog) { - final Map pairToWriteConfig = - writeConfigs.stream() - .collect(Collectors.toUnmodifiableMap( - S3ConsumerFactory::toNameNamespacePair, Function.identity())); - - return (pair, writer) -> { - LOGGER.info("Flushing buffer for stream {} ({}) to storage", pair.getName(), FileUtils.byteCountToDisplaySize(writer.getByteCount())); - if (!pairToWriteConfig.containsKey(pair)) { - throw new IllegalArgumentException( - String.format("Message contained record from a stream %s that was not in the catalog. \ncatalog: %s", pair, Jsons.serialize(catalog))); - } - - final WriteConfig writeConfig = pairToWriteConfig.get(pair); - try (writer) { - writer.flush(); - writeConfig.addStoredFile(storageOperations.uploadRecordsToBucket( - writer, - writeConfig.getNamespace(), - writeConfig.getFullOutputPath())); - } catch (final Exception e) { - LOGGER.error("Failed to flush and upload buffer to storage:", e); - throw new RuntimeException("Failed to upload buffer to storage", e); - } - }; - } - - private OnCloseFunction onCloseFunction(final BlobStorageOperations storageOperations, - final List writeConfigs) { - return (hasFailed, streamSyncSummaries) -> { - if (hasFailed) { - LOGGER.info("Cleaning up destination started for {} streams", writeConfigs.size()); - for (final WriteConfig writeConfig : writeConfigs) { - storageOperations.cleanUpBucketObject(writeConfig.getFullOutputPath(), writeConfig.getStoredFiles()); - writeConfig.clearStoredFiles(); - } - LOGGER.info("Cleaning up destination completed."); - } - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.java deleted file mode 100644 index 6694c0484dd3d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.ACCESS_KEY_ID; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.ACCOUNT_ID; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.FILE_NAME_PATTERN; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.SECRET_ACCESS_KEY; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.S_3_BUCKET_NAME; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.S_3_BUCKET_PATH; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.S_3_BUCKET_REGION; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.S_3_ENDPOINT; -import static io.airbyte.cdk.integrations.destination.s3.constant.S3Constants.S_3_PATH_FORMAT; - -import com.amazonaws.ClientConfiguration; -import com.amazonaws.Protocol; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.client.builder.AwsClientBuilder; -import com.amazonaws.retry.RetryMode; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AWSDefaultProfileCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialType; -import java.util.Objects; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * An S3 configuration. Typical usage sets at most one of {@code bucketPath} (necessary for more - * delicate data syncing to S3) - */ -public class S3DestinationConfig { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3DestinationConfig.class); - private static final String R2_INSTANCE_URL = "https://%s.r2.cloudflarestorage.com"; - - private final String endpoint; - private final String bucketName; - private final String bucketPath; - private final String bucketRegion; - private final String pathFormat; - private final S3CredentialConfig credentialConfig; - private final S3FormatConfig formatConfig; - private String fileNamePattern; - - private final Object lock = new Object(); - private AmazonS3 s3Client; - - private boolean checkIntegrity = true; - - private int uploadThreadsCount = S3StorageOperations.DEFAULT_UPLOAD_THREADS; - - public S3DestinationConfig(final String endpoint, - final String bucketName, - final String bucketPath, - final String bucketRegion, - final String pathFormat, - final S3CredentialConfig credentialConfig, - final S3FormatConfig formatConfig, - final AmazonS3 s3Client) { - this.endpoint = endpoint; - this.bucketName = bucketName; - this.bucketPath = bucketPath; - this.bucketRegion = bucketRegion; - this.pathFormat = pathFormat; - this.credentialConfig = credentialConfig; - this.formatConfig = formatConfig; - this.s3Client = s3Client; - } - - public S3DestinationConfig(final String endpoint, - final String bucketName, - final String bucketPath, - final String bucketRegion, - final String pathFormat, - final S3CredentialConfig credentialConfig, - final S3FormatConfig formatConfig, - final AmazonS3 s3Client, - final String fileNamePattern, - final boolean checkIntegrity, - final int uploadThreadsCount) { - this.endpoint = endpoint; - this.bucketName = bucketName; - this.bucketPath = bucketPath; - this.bucketRegion = bucketRegion; - this.pathFormat = pathFormat; - this.credentialConfig = credentialConfig; - this.formatConfig = formatConfig; - this.s3Client = s3Client; - this.fileNamePattern = fileNamePattern; - this.checkIntegrity = checkIntegrity; - this.uploadThreadsCount = uploadThreadsCount; - } - - public static Builder create(final String bucketName, final String bucketPath, final String bucketRegion) { - return new Builder(bucketName, bucketPath, bucketRegion); - } - - public static Builder create(final S3DestinationConfig config) { - return new Builder(config.getBucketName(), config.getBucketPath(), config.getBucketRegion()) - .withEndpoint(config.getEndpoint()) - .withCredentialConfig(config.getS3CredentialConfig()) - .withFormatConfig(config.getFormatConfig()); - } - - public static S3DestinationConfig getS3DestinationConfig(@Nonnull final JsonNode config) { - return getS3DestinationConfig(config, StorageProvider.AWS_S3); - } - - public static S3DestinationConfig getS3DestinationConfig(@Nonnull final JsonNode config, @Nonnull final StorageProvider storageProvider) { - Builder builder = create( - getProperty(config, S_3_BUCKET_NAME), - "", - getProperty(config, S_3_BUCKET_REGION)); - - if (config.has(S_3_BUCKET_PATH)) { - builder = builder.withBucketPath(config.get(S_3_BUCKET_PATH).asText()); - } - - if (config.has(FILE_NAME_PATTERN)) { - builder = builder.withFileNamePattern(config.get(FILE_NAME_PATTERN).asText()); - } - - if (config.has(S_3_PATH_FORMAT)) { - builder = builder.withPathFormat(config.get(S_3_PATH_FORMAT).asText()); - } - - switch (storageProvider) { - case CF_R2 -> { - if (config.has(ACCOUNT_ID)) { - final String endpoint = String.format(R2_INSTANCE_URL, getProperty(config, ACCOUNT_ID)); - builder = builder.withEndpoint(endpoint); - } - builder = builder.withCheckIntegrity(false) - // https://developers.cloudflare.com/r2/platform/s3-compatibility/api/#implemented-object-level-operations - // 3 or less - .withUploadThreadsCount(S3StorageOperations.R2_UPLOAD_THREADS); - } - default -> { - if (config.has(S_3_ENDPOINT)) { - builder = builder.withEndpoint(config.get(S_3_ENDPOINT).asText()); - } - } - } - - final S3CredentialConfig credentialConfig; - if (config.has(ACCESS_KEY_ID)) { - credentialConfig = new S3AccessKeyCredentialConfig(getProperty(config, ACCESS_KEY_ID), getProperty(config, SECRET_ACCESS_KEY)); - } else { - credentialConfig = new S3AWSDefaultProfileCredentialConfig(); - } - builder = builder.withCredentialConfig(credentialConfig); - - // In the "normal" S3 destination, this is never null. However, the Redshift and Snowflake copy - // destinations don't set a Format config. - if (config.has("format")) { - builder = builder.withFormatConfig(S3FormatConfigs.getS3FormatConfig(config)); - } - - return builder.get(); - } - - @Nullable - private static String getProperty(@Nonnull final JsonNode config, @Nonnull final String key) { - final JsonNode node = config.get(key); - if (node == null) { - return null; - } - return node.asText(); - } - - public String getEndpoint() { - return endpoint; - } - - public String getBucketName() { - return bucketName; - } - - public String getBucketPath() { - return bucketPath; - } - - public String getPathFormat() { - return pathFormat; - } - - public String getBucketRegion() { - return bucketRegion; - } - - public String getFileNamePattern() { - return fileNamePattern; - } - - public S3CredentialConfig getS3CredentialConfig() { - return credentialConfig; - } - - public S3FormatConfig getFormatConfig() { - return formatConfig; - } - - public boolean isCheckIntegrity() { - return checkIntegrity; - } - - public int getUploadThreadsCount() { - return uploadThreadsCount; - } - - public AmazonS3 getS3Client() { - synchronized (lock) { - if (s3Client == null) { - return resetS3Client(); - } - return s3Client; - } - } - - AmazonS3 resetS3Client() { - synchronized (lock) { - if (s3Client != null) { - s3Client.shutdown(); - } - s3Client = createS3Client(); - return s3Client; - } - } - - protected AmazonS3 createS3Client() { - LOGGER.info("Creating S3 client..."); - - final AWSCredentialsProvider credentialsProvider = credentialConfig.getS3CredentialsProvider(); - final S3CredentialType credentialType = credentialConfig.getCredentialType(); - - if (S3CredentialType.DEFAULT_PROFILE == credentialType) { - return AmazonS3ClientBuilder.standard() - .withRegion(bucketRegion) - .withCredentials(credentialsProvider) - // the SDK defaults to RetryMode.LEGACY - // (https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html) - // this _can_ be configured via environment variable, but it seems more reliable to configure it - // programmatically - .withClientConfiguration(new ClientConfiguration().withRetryMode(RetryMode.STANDARD)) - .build(); - } - - if (null == endpoint || endpoint.isEmpty()) { - return AmazonS3ClientBuilder.standard() - .withCredentials(credentialsProvider) - .withRegion(bucketRegion) - .build(); - } - - final ClientConfiguration clientConfiguration = new ClientConfiguration().withProtocol(Protocol.HTTPS); - clientConfiguration.setSignerOverride("AWSS3V4SignerType"); - - return AmazonS3ClientBuilder.standard() - .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, bucketRegion)) - .withPathStyleAccessEnabled(true) - .withClientConfiguration(clientConfiguration) - .withCredentials(credentialsProvider) - .build(); - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final S3DestinationConfig that = (S3DestinationConfig) o; - return Objects.equals(endpoint, that.endpoint) && Objects.equals(bucketName, that.bucketName) && Objects.equals( - bucketPath, that.bucketPath) && Objects.equals(bucketRegion, that.bucketRegion) - && Objects.equals(credentialConfig, that.credentialConfig) - && Objects.equals(formatConfig, that.formatConfig); - } - - @Override - public int hashCode() { - return Objects.hash(endpoint, bucketName, bucketPath, bucketRegion, credentialConfig, formatConfig); - } - - public static class Builder { - - private String endpoint = ""; - private String pathFormat = S3DestinationConstants.DEFAULT_PATH_FORMAT; - - private String bucketName; - private String bucketPath; - private String bucketRegion; - private S3CredentialConfig credentialConfig; - private S3FormatConfig formatConfig; - private AmazonS3 s3Client; - private String fileNamePattern; - - private boolean checkIntegrity = true; - - private int uploadThreadsCount = S3StorageOperations.DEFAULT_UPLOAD_THREADS; - - protected Builder(final String bucketName, final String bucketPath, final String bucketRegion) { - this.bucketName = bucketName; - this.bucketPath = bucketPath; - this.bucketRegion = bucketRegion; - } - - public Builder withBucketName(final String bucketName) { - this.bucketName = bucketName; - return this; - } - - public Builder withFileNamePattern(final String fileNamePattern) { - this.fileNamePattern = fileNamePattern; - return this; - } - - public Builder withBucketPath(final String bucketPath) { - this.bucketPath = bucketPath; - return this; - } - - public Builder withBucketRegion(final String bucketRegion) { - this.bucketRegion = bucketRegion; - return this; - } - - public Builder withPathFormat(final String pathFormat) { - this.pathFormat = pathFormat; - return this; - } - - public Builder withEndpoint(final String endpoint) { - this.endpoint = endpoint; - return this; - } - - public Builder withFormatConfig(final S3FormatConfig formatConfig) { - this.formatConfig = formatConfig; - return this; - } - - public Builder withAccessKeyCredential(final String accessKeyId, final String secretAccessKey) { - this.credentialConfig = new S3AccessKeyCredentialConfig(accessKeyId, secretAccessKey); - return this; - } - - public Builder withCredentialConfig(final S3CredentialConfig credentialConfig) { - this.credentialConfig = credentialConfig; - return this; - } - - public Builder withS3Client(final AmazonS3 s3Client) { - this.s3Client = s3Client; - return this; - } - - public Builder withCheckIntegrity(final boolean checkIntegrity) { - this.checkIntegrity = checkIntegrity; - return this; - } - - public Builder withUploadThreadsCount(final int uploadThreadsCount) { - this.uploadThreadsCount = uploadThreadsCount; - return this; - } - - public S3DestinationConfig get() { - return new S3DestinationConfig( - endpoint, - bucketName, - bucketPath, - bucketRegion, - pathFormat, - credentialConfig, - formatConfig, - s3Client, - fileNamePattern, - checkIntegrity, - uploadThreadsCount); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.java deleted file mode 100644 index 6dbbe4cd97725..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import javax.annotation.Nonnull; - -public class S3DestinationConfigFactory { - - public S3DestinationConfig getS3DestinationConfig(final JsonNode config, @Nonnull final StorageProvider storageProvider) { - return S3DestinationConfig.getS3DestinationConfig(config, storageProvider); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.java deleted file mode 100644 index 82585016f7c44..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; - -public final class S3DestinationConstants { - - public static final String YYYY_MM_DD_FORMAT_STRING = "yyyy_MM_dd"; - public static final S3NameTransformer NAME_TRANSFORMER = new S3NameTransformer(); - public static final String DEFAULT_PATH_FORMAT = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"; - - // gzip compression for CSV and JSONL - public static final String COMPRESSION_ARG_NAME = "compression"; - public static final String COMPRESSION_TYPE_ARG_NAME = "compression_type"; - public static final CompressionType DEFAULT_COMPRESSION_TYPE = CompressionType.GZIP; - - // Flattening for CSV and JSONL - public static final String FLATTENING_ARG_NAME = "flattening"; - - private S3DestinationConstants() {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3Format.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3Format.java deleted file mode 100644 index 319dbd7ef5454..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3Format.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -public enum S3Format { - - AVRO("avro"), - CSV("csv"), - JSONL("jsonl"), - PARQUET("parquet"); - - private final String fileExtension; - - S3Format(final String fileExtension) { - this.fileExtension = fileExtension; - } - - public String getFileExtension() { - return fileExtension; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.java deleted file mode 100644 index f4852b09be2ac..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; - -public interface S3FormatConfig { - - S3Format getFormat(); - - String getFileExtension(); - - static String withDefault(final JsonNode config, final String property, final String defaultValue) { - final JsonNode value = config.get(property); - if (value == null || value.isNull()) { - return defaultValue; - } - return value.asText(); - } - - static int withDefault(final JsonNode config, final String property, final int defaultValue) { - final JsonNode value = config.get(property); - if (value == null || value.isNull()) { - return defaultValue; - } - return value.asInt(); - } - - static boolean withDefault(final JsonNode config, final String property, final boolean defaultValue) { - final JsonNode value = config.get(property); - if (value == null || value.isNull()) { - return defaultValue; - } - return value.asBoolean(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.java deleted file mode 100644 index 8a64fad378dad..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetFormatConfig; -import io.airbyte.commons.json.Jsons; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class S3FormatConfigs { - - protected static final Logger LOGGER = LoggerFactory.getLogger(S3FormatConfigs.class); - - public static S3FormatConfig getS3FormatConfig(final JsonNode config) { - final JsonNode formatConfig = config.get("format"); - LOGGER.info("S3 format config: {}", formatConfig.toString()); - final S3Format formatType = S3Format.valueOf(formatConfig.get("format_type").asText().toUpperCase()); - - switch (formatType) { - case AVRO -> { - return new S3AvroFormatConfig(formatConfig); - } - case CSV -> { - return new S3CsvFormatConfig(formatConfig); - } - case JSONL -> { - return new S3JsonlFormatConfig(formatConfig); - } - case PARQUET -> { - return new S3ParquetFormatConfig(formatConfig); - } - default -> { - throw new RuntimeException("Unexpected output format: " + Jsons.serialize(config)); - } - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.java deleted file mode 100644 index 9db0d0d4994af..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.java +++ /dev/null @@ -1,365 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.apache.logging.log4j.util.Strings.isNotBlank; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.amazonaws.services.s3.model.ObjectListing; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateManager; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.string.Strings; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Pattern; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang3.StringUtils; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class S3StorageOperations extends BlobStorageOperations { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3StorageOperations.class); - - private final S3FilenameTemplateManager s3FilenameTemplateManager = new S3FilenameTemplateManager(); - - public static final int DEFAULT_UPLOAD_THREADS = 10; // The S3 cli uses 10 threads by default. - - public static final int R2_UPLOAD_THREADS = 3; - - private static final int DEFAULT_QUEUE_CAPACITY = DEFAULT_UPLOAD_THREADS; - private static final int DEFAULT_PART_SIZE = 10; - private static final int UPLOAD_RETRY_LIMIT = 3; - - private static final String FORMAT_VARIABLE_NAMESPACE = "${NAMESPACE}"; - private static final String FORMAT_VARIABLE_STREAM_NAME = "${STREAM_NAME}"; - private static final String FORMAT_VARIABLE_YEAR = "${YEAR}"; - private static final String FORMAT_VARIABLE_MONTH = "${MONTH}"; - private static final String FORMAT_VARIABLE_DAY = "${DAY}"; - private static final String FORMAT_VARIABLE_HOUR = "${HOUR}"; - private static final String FORMAT_VARIABLE_MINUTE = "${MINUTE}"; - private static final String FORMAT_VARIABLE_SECOND = "${SECOND}"; - private static final String FORMAT_VARIABLE_MILLISECOND = "${MILLISECOND}"; - private static final String FORMAT_VARIABLE_EPOCH = "${EPOCH}"; - private static final String FORMAT_VARIABLE_UUID = "${UUID}"; - private static final String GZ_FILE_EXTENSION = "gz"; - private final ConcurrentMap partCounts = new ConcurrentHashMap<>(); - - private final NamingConventionTransformer nameTransformer; - protected final S3DestinationConfig s3Config; - protected AmazonS3 s3Client; - - public S3StorageOperations(final NamingConventionTransformer nameTransformer, final AmazonS3 s3Client, final S3DestinationConfig s3Config) { - this.nameTransformer = nameTransformer; - this.s3Client = s3Client; - this.s3Config = s3Config; - } - - @Override - public String getBucketObjectPath(final String namespace, final String streamName, final DateTime writeDatetime, final String customPathFormat) { - final String namespaceStr = nameTransformer.getNamespace(isNotBlank(namespace) ? namespace : ""); - final String streamNameStr = nameTransformer.getIdentifier(streamName); - return nameTransformer.applyDefaultCase( - customPathFormat - .replaceAll(Pattern.quote(FORMAT_VARIABLE_NAMESPACE), namespaceStr) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_STREAM_NAME), streamNameStr) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_YEAR), String.format("%s", writeDatetime.year().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MONTH), String.format("%02d", writeDatetime.monthOfYear().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_DAY), String.format("%02d", writeDatetime.dayOfMonth().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_HOUR), String.format("%02d", writeDatetime.hourOfDay().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MINUTE), String.format("%02d", writeDatetime.minuteOfHour().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_SECOND), String.format("%02d", writeDatetime.secondOfMinute().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MILLISECOND), String.format("%04d", writeDatetime.millisOfSecond().get())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_EPOCH), String.format("%d", writeDatetime.getMillis())) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_UUID), String.format("%s", UUID.randomUUID())) - .replaceAll("/+", "/")); - } - - /** - * Create a directory object at the specified location. Creates the bucket if necessary. - */ - @Override - public void createBucketIfNotExists() { - final String bucket = s3Config.getBucketName(); - if (!doesBucketExist(bucket)) { - LOGGER.info("Bucket {} does not exist; creating...", bucket); - s3Client.createBucket(bucket); - LOGGER.info("Bucket {} has been created.", bucket); - } - } - - protected boolean doesBucketExist(final String bucket) { - return s3Client.doesBucketExistV2(bucket); - } - - @Override - public String uploadRecordsToBucket(final SerializableBuffer recordsData, - final String namespace, - final String objectPath) { - final List exceptionsThrown = new ArrayList<>(); - while (exceptionsThrown.size() < UPLOAD_RETRY_LIMIT) { - if (!exceptionsThrown.isEmpty()) { - LOGGER.info("Retrying to upload records into storage {} ({}/{}})", objectPath, exceptionsThrown.size(), UPLOAD_RETRY_LIMIT); - // Force a reconnection before retrying in case error was due to network issues... - s3Client = s3Config.resetS3Client(); - } - - try { - final String fileName = loadDataIntoBucket(objectPath, recordsData); - LOGGER.info("Successfully loaded records to stage {} with {} re-attempt(s)", objectPath, exceptionsThrown.size()); - return fileName; - } catch (final Exception e) { - LOGGER.error("Failed to upload records into storage {}", objectPath, e); - exceptionsThrown.add(e); - } - } - // Verifying that ALL exceptions are authentication related before assuming this is a configuration - // issue reduces risk of misidentifying errors or reporting a transient error. - final boolean areAllExceptionsAuthExceptions = exceptionsThrown.stream().filter(e -> e instanceof AmazonS3Exception) - .map(s3e -> ((AmazonS3Exception) s3e).getStatusCode()) - .filter(ConnectorExceptionUtil.HTTP_AUTHENTICATION_ERROR_CODES::contains) - .count() == exceptionsThrown.size(); - if (areAllExceptionsAuthExceptions) { - throw new ConfigErrorException(exceptionsThrown.get(0).getMessage(), exceptionsThrown.get(0)); - } else { - throw new RuntimeException(String.format("Exceptions thrown while uploading records into storage: %s", Strings.join(exceptionsThrown, "\n"))); - } - } - - /** - * Upload the file from {@code recordsData} to S3 and simplify the filename as .. - * - * @return the uploaded filename, which is different from the serialized buffer filename - */ - private String loadDataIntoBucket(final String objectPath, final SerializableBuffer recordsData) throws IOException { - final long partSize = DEFAULT_PART_SIZE; - final String bucket = s3Config.getBucketName(); - final String partId = getPartId(objectPath); - final String fileExtension = getExtension(recordsData.getFilename()); - final String fullObjectKey; - if (StringUtils.isNotBlank(s3Config.getFileNamePattern())) { - fullObjectKey = s3FilenameTemplateManager - .applyPatternToFilename( - S3FilenameTemplateParameterObject - .builder() - .partId(partId) - .recordsData(recordsData) - .objectPath(objectPath) - .fileExtension(fileExtension) - .fileNamePattern(s3Config.getFileNamePattern()) - .build()); - } else { - fullObjectKey = objectPath + partId + fileExtension; - } - final Map metadata = new HashMap<>(); - for (final BlobDecorator blobDecorator : blobDecorators) { - blobDecorator.updateMetadata(metadata, getMetadataMapping()); - } - final StreamTransferManager uploadManager = StreamTransferManagerFactory.create(bucket, fullObjectKey, s3Client) - .setPartSize(partSize) - .setUserMetadata(metadata) - .get() - .checkIntegrity(s3Config.isCheckIntegrity()) - .numUploadThreads(s3Config.getUploadThreadsCount()) - .queueCapacity(DEFAULT_QUEUE_CAPACITY); - boolean succeeded = false; - - // Wrap output stream in decorators - OutputStream rawOutputStream = uploadManager.getMultiPartOutputStreams().get(0); - for (final BlobDecorator blobDecorator : blobDecorators) { - rawOutputStream = blobDecorator.wrap(rawOutputStream); - } - - try (final OutputStream outputStream = rawOutputStream; - final InputStream dataStream = recordsData.getInputStream()) { - dataStream.transferTo(outputStream); - succeeded = true; - } catch (final Exception e) { - LOGGER.error("Failed to load data into storage {}", objectPath, e); - throw new RuntimeException(e); - } finally { - if (!succeeded) { - uploadManager.abort(); - } else { - uploadManager.complete(); - } - } - if (!s3Client.doesObjectExist(bucket, fullObjectKey)) { - LOGGER.error("Failed to upload data into storage, object {} not found", fullObjectKey); - throw new RuntimeException("Upload failed"); - } - final String newFilename = getFilename(fullObjectKey); - LOGGER.info("Uploaded buffer file to storage: {} -> {} (filename: {})", recordsData.getFilename(), fullObjectKey, newFilename); - return newFilename; - } - - /** - * Users want deterministic file names (e.g. the first file part is really foo-0.csv). Using UUIDs - * (previous approach) doesn't allow that. However, using pure integers could lead to a collision - * with an upload from another thread. We also want to be able to continue the same offset between - * attempts. So, we'll count up the existing files in the directory and use that as a lazy-offset, - * assuming airbyte manages the dir and has similar naming conventions. `getPartId` will be - * 0-indexed. - */ - @VisibleForTesting - synchronized String getPartId(String objectPath) { - final AtomicInteger partCount = partCounts.computeIfAbsent(objectPath, k -> new AtomicInteger(0)); - - if (partCount.get() == 0) { - ObjectListing objects; - int objectCount = 0; - - final String bucket = s3Config.getBucketName(); - objects = s3Client.listObjects(bucket, objectPath); - - if (objects != null) { - objectCount = objectCount + objects.getObjectSummaries().size(); - while (objects != null && objects.getNextMarker() != null) { - objects = s3Client.listObjects(new ListObjectsRequest().withBucketName(bucket).withPrefix(objectPath).withMarker(objects.getNextMarker())); - if (objects != null) { - objectCount = objectCount + objects.getObjectSummaries().size(); - } - } - } - - partCount.set(objectCount); - } - - return Integer.toString(partCount.getAndIncrement()); - } - - @VisibleForTesting - static String getFilename(final String fullPath) { - return fullPath.substring(fullPath.lastIndexOf("/") + 1); - } - - protected static String getExtension(final String filename) { - final String result = FilenameUtils.getExtension(filename); - if (result.isBlank()) { - return result; - } else if (GZ_FILE_EXTENSION.equals(result)) { - return getExtension(filename.substring(0, filename.length() - 3)) + "." + GZ_FILE_EXTENSION; - } - return "." + result; - } - - @Override - public void dropBucketObject(final String objectPath) { - cleanUpBucketObject(objectPath, List.of()); - } - - @Override - public void cleanUpBucketObject(final String namespace, final String streamName, final String objectPath, final String pathFormat) { - final String bucket = s3Config.getBucketName(); - ObjectListing objects = s3Client.listObjects(new ListObjectsRequest() - .withBucketName(bucket) - .withPrefix(objectPath) - // pathFormat may use subdirectories under the objectPath to organize files - // so we need to recursively list them and filter files matching the pathFormat - .withDelimiter("")); - final Pattern regexFormat = Pattern.compile(getRegexFormat(namespace, streamName, pathFormat)); - while (objects.getObjectSummaries().size() > 0) { - final List keysToDelete = objects.getObjectSummaries() - .stream() - .filter(obj -> regexFormat.matcher(obj.getKey()).matches()) - .map(obj -> new KeyVersion(obj.getKey())) - .toList(); - cleanUpObjects(bucket, keysToDelete); - LOGGER.info("Storage bucket {} has been cleaned-up ({} objects matching {} were deleted)...", objectPath, keysToDelete.size(), regexFormat); - if (objects.isTruncated()) { - objects = s3Client.listNextBatchOfObjects(objects); - } else { - break; - } - } - } - - protected String getRegexFormat(final String namespace, final String streamName, final String pathFormat) { - final String namespaceStr = nameTransformer.getNamespace(isNotBlank(namespace) ? namespace : ""); - final String streamNameStr = nameTransformer.getIdentifier(streamName); - return nameTransformer.applyDefaultCase(pathFormat - .replaceAll(Pattern.quote(FORMAT_VARIABLE_NAMESPACE), namespaceStr) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_STREAM_NAME), streamNameStr) - .replaceAll(Pattern.quote(FORMAT_VARIABLE_YEAR), "[0-9]{4}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MONTH), "[0-9]{2}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_DAY), "[0-9]{2}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_HOUR), "[0-9]{2}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MINUTE), "[0-9]{2}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_SECOND), "[0-9]{2}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_MILLISECOND), "[0-9]{4}") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_EPOCH), "[0-9]+") - .replaceAll(Pattern.quote(FORMAT_VARIABLE_UUID), ".*") - .replaceAll("/+", "/") - // match part_id and extension at the end - + ".*"); - } - - @Override - public void cleanUpBucketObject(final String objectPath, final List stagedFiles) { - final String bucket = s3Config.getBucketName(); - ObjectListing objects = s3Client.listObjects(bucket, objectPath); - while (objects.getObjectSummaries().size() > 0) { - final List keysToDelete = objects.getObjectSummaries() - .stream() - .filter(obj -> stagedFiles.isEmpty() || stagedFiles.contains(obj.getKey())) - .map(obj -> new KeyVersion(obj.getKey())) - .toList(); - cleanUpObjects(bucket, keysToDelete); - LOGGER.info("Storage bucket {} has been cleaned-up ({} objects were deleted)...", objectPath, keysToDelete.size()); - if (objects.isTruncated()) { - objects = s3Client.listNextBatchOfObjects(objects); - } else { - break; - } - } - } - - protected void cleanUpObjects(final String bucket, final List keysToDelete) { - if (!keysToDelete.isEmpty()) { - LOGGER.info("Deleting objects {}", String.join(", ", keysToDelete.stream().map(KeyVersion::getKey).toList())); - s3Client.deleteObjects(new DeleteObjectsRequest(bucket).withKeys(keysToDelete)); - } - } - - @Override - public boolean isValidData(final JsonNode jsonNode) { - return true; - } - - @Override - protected Map getMetadataMapping() { - return ImmutableMap.of( - AesCbcEnvelopeEncryptionBlobDecorator.ENCRYPTED_CONTENT_ENCRYPTING_KEY, "x-amz-key", - AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR, "x-amz-iv"); - } - - public void uploadManifest(final String bucketName, final String manifestFilePath, final String manifestContents) { - s3Client.putObject(s3Config.getBucketName(), manifestFilePath, manifestContents); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.java deleted file mode 100644 index a4deb0aa57061..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroSerializedBuffer; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.jsonl.JsonLSerializedBuffer; -import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.parquet.ParquetSerializedBuffer; -import io.airbyte.commons.json.Jsons; -import java.util.concurrent.Callable; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SerializedBufferFactory { - - protected static final Logger LOGGER = LoggerFactory.getLogger(SerializedBufferFactory.class); - - /** - * When running a - * {@link io.airbyte.cdk.integrations.destination.record_buffer.SerializedBufferingStrategy}, it - * would usually need to instantiate new buffers when flushing data or when it receives data for a - * brand-new stream. This factory fills this need and @return the function to be called on such - * events. - *

    - * The factory is responsible for choosing the correct constructor function for a new - * {@link SerializableBuffer} that handles the correct serialized format of the data. It is - * configured by composition with another function to create a new {@link BufferStorage} where to - * store it. - *

    - * This factory determines which {@link S3FormatConfig} to use depending on the user provided @param - * config, The @param createStorageFunctionWithoutExtension is the constructor function to call when - * creating a new buffer where to store data. Note that we typically associate which format is being - * stored in the storage object thanks to its file extension. - */ - public static BufferCreateFunction getCreateFunction(final S3DestinationConfig config, - final Function createStorageFunctionWithoutExtension) { - final S3FormatConfig formatConfig = config.getFormatConfig(); - LOGGER.info("S3 format config: {}", formatConfig.toString()); - switch (formatConfig.getFormat()) { - case AVRO -> { - final Callable createStorageFunctionWithExtension = - () -> createStorageFunctionWithoutExtension.apply(formatConfig.getFileExtension()); - return AvroSerializedBuffer.createFunction((S3AvroFormatConfig) formatConfig, createStorageFunctionWithExtension); - } - case CSV -> { - final Callable createStorageFunctionWithExtension = - () -> createStorageFunctionWithoutExtension.apply(formatConfig.getFileExtension()); - return CsvSerializedBuffer.createFunction((S3CsvFormatConfig) formatConfig, createStorageFunctionWithExtension); - } - case JSONL -> { - final Callable createStorageFunctionWithExtension = - () -> createStorageFunctionWithoutExtension.apply(formatConfig.getFileExtension()); - return JsonLSerializedBuffer.createBufferFunction((S3JsonlFormatConfig) formatConfig, createStorageFunctionWithExtension); - } - case PARQUET -> { - // we can't choose the type of buffer storage with parquet because of how the underlying hadoop - // library is imposing file usage. - return ParquetSerializedBuffer.createFunction(config); - } - default -> { - throw new RuntimeException("Unexpected output format: " + Jsons.serialize(config)); - } - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/StorageProvider.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/StorageProvider.java deleted file mode 100644 index c21ad66667f03..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/StorageProvider.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -/** - * Represents storage provider type - */ -public enum StorageProvider { - AWS_S3, - CF_R2; -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/WriteConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/WriteConfig.java deleted file mode 100644 index be4bdc2e0f408..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/WriteConfig.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.util.ArrayList; -import java.util.List; - -/** - * Write configuration POJO for blob storage destinations - */ -public class WriteConfig { - - private final String namespace; - private final String streamName; - private final String outputBucketPath; - private final String pathFormat; - private final String fullOutputPath; - private final DestinationSyncMode syncMode; - private final List storedFiles; - - public WriteConfig(final String namespace, - final String streamName, - final String outputBucketPath, - final String pathFormat, - final String fullOutputPath, - final DestinationSyncMode syncMode) { - this.namespace = namespace; - this.streamName = streamName; - this.outputBucketPath = outputBucketPath; - this.pathFormat = pathFormat; - this.fullOutputPath = fullOutputPath; - this.syncMode = syncMode; - this.storedFiles = new ArrayList<>(); - } - - public String getNamespace() { - return namespace; - } - - public String getStreamName() { - return streamName; - } - - public String getOutputBucketPath() { - return outputBucketPath; - } - - public String getPathFormat() { - return pathFormat; - } - - public String getFullOutputPath() { - return fullOutputPath; - } - - public DestinationSyncMode getSyncMode() { - return syncMode; - } - - public List getStoredFiles() { - return storedFiles; - } - - public void addStoredFile(final String file) { - storedFiles.add(file); - } - - public void clearStoredFiles() { - storedFiles.clear(); - } - - @Override - public String toString() { - return "WriteConfig{" + - "streamName=" + streamName + - ", namespace=" + namespace + - ", outputBucketPath=" + outputBucketPath + - ", pathFormat=" + pathFormat + - ", fullOutputPath=" + fullOutputPath + - ", syncMode=" + syncMode + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.java deleted file mode 100644 index 46455961059b3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import java.util.Set; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class AvroConstants { - - // Field name with special character - public static final String DOC_KEY_VALUE_DELIMITER = ":"; - public static final String DOC_KEY_ORIGINAL_NAME = "_airbyte_original_name"; - - public static final String AVRO_EXTRA_PROPS_FIELD = "_airbyte_additional_properties"; - // This set must include _ab_additional_col in source_s3/source_files_abstract/stream.py - public static final Set JSON_EXTRA_PROPS_FIELDS = Set.of("_ab_additional_properties", AVRO_EXTRA_PROPS_FIELD); - public static final AvroNameTransformer NAME_TRANSFORMER = new AvroNameTransformer(); - public static final JsonAvroConverter JSON_CONVERTER = JsonAvroConverter.builder() - .setNameTransformer(NAME_TRANSFORMER::getIdentifier) - .setJsonAdditionalPropsFieldNames(JSON_EXTRA_PROPS_FIELDS) - .setAvroAdditionalPropsFieldName(AVRO_EXTRA_PROPS_FIELD) - .build(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.java deleted file mode 100644 index 8521b687e6df9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import java.util.Arrays; - -/** - *

      - *
    • An Avro name starts with [A-Za-z_], followed by [A-Za-z0-9_].
    • - *
    • An Avro namespace is a dot-separated sequence of such names.
    • - *
    • Reference: https://avro.apache.org/docs/current/spec.html#names
    • - *
    - */ -public class AvroNameTransformer extends StandardNameTransformer { - - @Override - public String applyDefaultCase(final String input) { - return super.convertStreamName(input).toLowerCase(); - } - - @Override - public String convertStreamName(final String input) { - if (input == null) { - return null; - } else if (input.isBlank()) { - return input; - } - - final String normalizedName = super.convertStreamName(input); - if (normalizedName.substring(0, 1).matches("[A-Za-z_]")) { - return normalizedName; - } else { - return "_" + normalizedName; - } - } - - @Override - public String getNamespace(final String input) { - if (input == null) { - return null; - } - - final String[] tokens = input.split("\\."); - return String.join(".", Arrays.stream(tokens).map(this::getIdentifier).toList()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.java deleted file mode 100644 index 8115d9f98357c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class AvroRecordFactory { - - private static final ObjectMapper MAPPER = MoreMappers.initMapper(); - private static final ObjectWriter WRITER = MAPPER.writer(); - - private final Schema schema; - private final JsonAvroConverter converter; - - public AvroRecordFactory(final Schema schema, final JsonAvroConverter converter) { - this.schema = schema; - this.converter = converter; - } - - public GenericData.Record getAvroRecord(final UUID id, final AirbyteRecordMessage recordMessage) throws JsonProcessingException { - final ObjectNode jsonRecord = MAPPER.createObjectNode(); - jsonRecord.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()); - jsonRecord.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()); - jsonRecord.setAll((ObjectNode) recordMessage.getData()); - - return converter.convertToGenericDataRecord(WRITER.writeValueAsBytes(jsonRecord), schema); - } - - public GenericData.Record getAvroRecord(final JsonNode formattedData) throws JsonProcessingException { - final var bytes = WRITER.writeValueAsBytes(formattedData); - return converter.convertToGenericDataRecord(bytes, schema); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.java deleted file mode 100644 index bcfcb9ac9e478..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.io.OutputStream; -import java.util.UUID; -import java.util.concurrent.Callable; -import org.apache.avro.Schema; -import org.apache.avro.file.CodecFactory; -import org.apache.avro.file.DataFileWriter; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumWriter; -import org.apache.commons.lang3.StringUtils; - -public class AvroSerializedBuffer extends BaseSerializedBuffer { - - public static final String DEFAULT_SUFFIX = ".avro"; - - private final CodecFactory codecFactory; - private final Schema schema; - protected final AvroRecordFactory avroRecordFactory; - protected DataFileWriter dataFileWriter; - - public AvroSerializedBuffer(final BufferStorage bufferStorage, final CodecFactory codecFactory, final Schema schema) throws Exception { - super(bufferStorage); - // disable compression stream as it is already handled by codecFactory - withCompression(false); - this.codecFactory = codecFactory; - this.schema = schema; - avroRecordFactory = new AvroRecordFactory(schema, AvroConstants.JSON_CONVERTER); - dataFileWriter = null; - } - - @Override - protected void initWriter(final OutputStream outputStream) throws IOException { - dataFileWriter = new DataFileWriter<>(new GenericDatumWriter()) - .setCodec(codecFactory) - .create(schema, outputStream); - } - - @Override - protected void writeRecord(final AirbyteRecordMessage record) throws IOException { - dataFileWriter.append(avroRecordFactory.getAvroRecord(UUID.randomUUID(), record)); - } - - @Override - protected void flushWriter() throws IOException { - dataFileWriter.flush(); - } - - @Override - protected void closeWriter() throws IOException { - dataFileWriter.close(); - } - - public static BufferCreateFunction createFunction(final S3AvroFormatConfig config, - final Callable createStorageFunction) { - final CodecFactory codecFactory = config.getCodecFactory(); - return (final AirbyteStreamNameNamespacePair stream, final ConfiguredAirbyteCatalog catalog) -> { - final JsonToAvroSchemaConverter schemaConverter = new JsonToAvroSchemaConverter(); - final Schema schema = schemaConverter.getAvroSchema(catalog.getStreams() - .stream() - .filter(s -> s.getStream().getName().equals(stream.getName()) && StringUtils.equals(s.getStream().getNamespace(), stream.getNamespace())) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No such stream %s.%s", stream.getNamespace(), stream.getName()))) - .getStream() - .getJsonSchema(), - stream.getName(), stream.getNamespace()); - return new AvroSerializedBuffer(createStorageFunction.call(), codecFactory, schema); - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.java deleted file mode 100644 index 1da76566a71c9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import java.util.Map; - -/** - * This helper class is for testing only. It tracks the original and standardized names, and revert - * them when necessary, so that the tests can correctly compare the generated json with the original - * input. - */ -public class JsonFieldNameUpdater { - - // A map from original name to standardized name. - private final Map standardizedNames; - - public JsonFieldNameUpdater(final Map standardizedNames) { - this.standardizedNames = ImmutableMap.copyOf(standardizedNames); - } - - public JsonNode getJsonWithOriginalFieldNames(final JsonNode input) { - if (standardizedNames.size() == 0) { - return input; - } - String jsonString = Jsons.serialize(input); - for (final Map.Entry entry : standardizedNames.entrySet()) { - jsonString = jsonString.replaceAll(quote(entry.getValue()), quote(entry.getKey())); - } - return Jsons.deserialize(jsonString); - } - - @Override - public String toString() { - return standardizedNames.toString(); - } - - private static String quote(final String input) { - return "\"" + input + "\""; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.java deleted file mode 100644 index 14dee754f0f9c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import java.util.Arrays; -import java.util.List; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import org.apache.avro.Schema; - -/** - * Mapping of JsonSchema types to Avro types. - */ -public enum JsonSchemaType { - - STRING_V1("WellKnownTypes.json#/definitions/String", Schema.Type.STRING), - INTEGER_V1("WellKnownTypes.json#/definitions/Integer", Schema.Type.LONG), - NUMBER_V1("WellKnownTypes.json#/definitions/Number", Schema.Type.DOUBLE), - BOOLEAN_V1("WellKnownTypes.json#/definitions/Boolean", Schema.Type.BOOLEAN), - BINARY_DATA_V1("WellKnownTypes.json#/definitions/BinaryData", Schema.Type.BYTES), - DATE_V1("WellKnownTypes.json#/definitions/Date", Schema.Type.INT), - TIMESTAMP_WITH_TIMEZONE_V1("WellKnownTypes.json#/definitions/TimestampWithTimezone", Schema.Type.LONG), - TIMESTAMP_WITHOUT_TIMEZONE_V1("WellKnownTypes.json#/definitions/TimestampWithoutTimezone", Schema.Type.LONG), - TIME_WITH_TIMEZONE_V1("WellKnownTypes.json#/definitions/TimeWithTimezone", Schema.Type.STRING), - TIME_WITHOUT_TIMEZONE_V1("WellKnownTypes.json#/definitions/TimeWithoutTimezone", Schema.Type.LONG), - OBJECT("object", Schema.Type.RECORD), - ARRAY("array", Schema.Type.ARRAY), - COMBINED("combined", Schema.Type.UNION), - @Deprecated - STRING_V0("string", null, Schema.Type.STRING), - @Deprecated - NUMBER_INT_V0("number", "integer", Schema.Type.LONG), - @Deprecated - NUMBER_BIGINT_V0("string", "big_integer", Schema.Type.STRING), - @Deprecated - NUMBER_FLOAT_V0("number", "float", Schema.Type.FLOAT), - @Deprecated - NUMBER_V0("number", null, Schema.Type.DOUBLE), - @Deprecated - INTEGER_V0("integer", null, Schema.Type.LONG), - @Deprecated - BOOLEAN_V0("boolean", null, Schema.Type.BOOLEAN), - @Deprecated - NULL("null", null, Schema.Type.NULL); - - private final String jsonSchemaType; - private final Schema.Type avroType; - private String jsonSchemaAirbyteType; - - JsonSchemaType(final String jsonSchemaType, final String jsonSchemaAirbyteType, final Schema.Type avroType) { - this.jsonSchemaType = jsonSchemaType; - this.jsonSchemaAirbyteType = jsonSchemaAirbyteType; - this.avroType = avroType; - } - - JsonSchemaType(final String jsonSchemaType, final Schema.Type avroType) { - this.jsonSchemaType = jsonSchemaType; - this.avroType = avroType; - } - - public static JsonSchemaType fromJsonSchemaType(final String jsonSchemaType) { - return fromJsonSchemaType(jsonSchemaType, null); - } - - public static JsonSchemaType fromJsonSchemaType(final @Nonnull String jsonSchemaType, final @Nullable String jsonSchemaAirbyteType) { - List matchSchemaType = null; - // Match by Type + airbyteType - if (jsonSchemaAirbyteType != null) { - matchSchemaType = Arrays.stream(values()) - .filter(type -> jsonSchemaType.equals(type.jsonSchemaType)) - .filter(type -> jsonSchemaAirbyteType.equals(type.jsonSchemaAirbyteType)) - .toList(); - } - - // Match by Type are no results already - if (matchSchemaType == null || matchSchemaType.isEmpty()) { - matchSchemaType = - Arrays.stream(values()).filter(format -> jsonSchemaType.equals(format.jsonSchemaType) && format.jsonSchemaAirbyteType == null).toList(); - } - - if (matchSchemaType.isEmpty()) { - throw new IllegalArgumentException( - String.format("Unexpected jsonSchemaType - %s and jsonSchemaAirbyteType - %s", jsonSchemaType, jsonSchemaAirbyteType)); - } else if (matchSchemaType.size() > 1) { - throw new RuntimeException( - String.format("Match with more than one json type! Matched types : %s, Inputs jsonSchemaType : %s, jsonSchemaAirbyteType : %s", - matchSchemaType, jsonSchemaType, jsonSchemaAirbyteType)); - } else { - return matchSchemaType.get(0); - } - } - - public String getJsonSchemaType() { - return jsonSchemaType; - } - - public Schema.Type getAvroType() { - return avroType; - } - - @Override - public String toString() { - return jsonSchemaType; - } - - public String getJsonSchemaAirbyteType() { - return jsonSchemaAirbyteType; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java deleted file mode 100644 index d29e3f8476dad..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java +++ /dev/null @@ -1,496 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.util.MoreIterators; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.annotation.Nullable; -import org.apache.avro.LogicalTypes; -import org.apache.avro.Schema; -import org.apache.avro.SchemaBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import tech.allegro.schema.json2avro.converter.AdditionalPropertyField; - -/** - * The main function of this class is to convert a JsonSchema to Avro schema. It can also - * standardize schema names, and keep track of a mapping from the original names to the standardized - * ones, which is needed for unit tests.
    - * For limitations of this converter, see the README of this connector: - * https://docs.airbyte.io/integrations/destinations/s3#avro - */ -public class JsonToAvroSchemaConverter { - - private static final String REFERENCE_TYPE = "$ref"; - private static final String TYPE = "type"; - private static final String AIRBYTE_TYPE = "airbyte_type"; - private static final Schema UUID_SCHEMA = LogicalTypes.uuid() - .addToSchema(Schema.create(Schema.Type.STRING)); - private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL); - private static final Schema STRING_SCHEMA = Schema.create(Schema.Type.STRING); - private static final Logger LOGGER = LoggerFactory.getLogger(JsonToAvroSchemaConverter.class); - private static final Schema TIMESTAMP_MILLIS_SCHEMA = LogicalTypes.timestampMillis() - .addToSchema(Schema.create(Schema.Type.LONG)); - - private final Map standardizedNames = new HashMap<>(); - - static List getNonNullTypes(final String fieldName, final JsonNode fieldDefinition) { - return getTypes(fieldName, fieldDefinition).stream() - .filter(type -> type != JsonSchemaType.NULL).collect(Collectors.toList()); - } - - /** - * When no type or $ref are specified, it will default to string. - */ - static List getTypes(final String fieldName, final JsonNode fieldDefinition) { - final Optional combinedRestriction = getCombinedRestriction(fieldDefinition); - if (combinedRestriction.isPresent()) { - return Collections.singletonList(JsonSchemaType.COMBINED); - } - - final JsonNode typeProperty = fieldDefinition.get(TYPE); - final JsonNode referenceType = fieldDefinition.get(REFERENCE_TYPE); - - final JsonNode airbyteTypeProperty = fieldDefinition.get(AIRBYTE_TYPE); - final String airbyteType = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); - - if (typeProperty != null && typeProperty.isArray()) { - return MoreIterators.toList(typeProperty.elements()).stream() - .map(s -> JsonSchemaType.fromJsonSchemaType(s.asText())) - .collect(Collectors.toList()); - } - - if (hasTextValue(typeProperty)) { - return Collections.singletonList(JsonSchemaType.fromJsonSchemaType(typeProperty.asText(), airbyteType)); - } - - if (hasTextValue(referenceType)) { - return Collections.singletonList(JsonSchemaType.fromJsonSchemaType(referenceType.asText(), airbyteType)); - } - - LOGGER.warn("Field \"{}\" has unexpected type {}. It will default to string.", fieldName, referenceType); - return Collections.singletonList(JsonSchemaType.STRING_V1); - } - - private static boolean hasTextValue(JsonNode value) { - return value != null && !value.isNull() && value.isTextual(); - } - - static Optional getCombinedRestriction(final JsonNode fieldDefinition) { - if (fieldDefinition.has("anyOf")) { - return Optional.of(fieldDefinition.get("anyOf")); - } - if (fieldDefinition.has("allOf")) { - return Optional.of(fieldDefinition.get("allOf")); - } - if (fieldDefinition.has("oneOf")) { - return Optional.of(fieldDefinition.get("oneOf")); - } - return Optional.empty(); - } - - public Map getStandardizedNames() { - return standardizedNames; - } - - /** - * @return Avro schema based on the input {@code jsonSchema}. - */ - public Schema getAvroSchema(final JsonNode jsonSchema, - final String streamName, - @Nullable final String namespace) { - return getAvroSchema(jsonSchema, streamName, namespace, true, true, true, true); - } - - /** - * @param appendAirbyteFields Add default airbyte fields (e.g. _airbyte_id) to the output Avro - * schema. - * @param appendExtraProps Add default additional property field to the output Avro schema. - * @param addStringToLogicalTypes Default logical type field to string. - * @param isRootNode Whether it is the root field in the input Json schema. - * @return Avro schema based on the input {@code jsonSchema}. - */ - public Schema getAvroSchema(final JsonNode jsonSchema, - final String fieldName, - @Nullable final String fieldNamespace, - final boolean appendAirbyteFields, - final boolean appendExtraProps, - final boolean addStringToLogicalTypes, - final boolean isRootNode) { - final String stdName = AvroConstants.NAME_TRANSFORMER.getIdentifier(fieldName); - final String stdNamespace = AvroConstants.NAME_TRANSFORMER.getNamespace(fieldNamespace); - final SchemaBuilder.RecordBuilder builder = SchemaBuilder.record(stdName); - if (!stdName.equals(fieldName)) { - standardizedNames.put(fieldName, stdName); - LOGGER.warn("Schema name \"{}\" contains illegal character(s) and is standardized to \"{}\"", fieldName, - stdName); - builder.doc( - String.format("%s%s%s", - AvroConstants.DOC_KEY_ORIGINAL_NAME, - AvroConstants.DOC_KEY_VALUE_DELIMITER, - fieldName)); - } - if (stdNamespace != null) { - builder.namespace(stdNamespace); - } - - final JsonNode properties = jsonSchema.get("properties"); - // object field with no "properties" will be handled by the default additional properties - // field during object conversion; so it is fine if there is no "properties" - final List subfieldNames = properties == null - ? Collections.emptyList() - : new ArrayList<>(MoreIterators.toList(properties.fieldNames())); - - final SchemaBuilder.FieldAssembler assembler = builder.fields(); - - if (appendAirbyteFields) { - assembler.name(JavaBaseConstants.COLUMN_NAME_AB_ID).type(UUID_SCHEMA).noDefault(); - assembler.name(JavaBaseConstants.COLUMN_NAME_EMITTED_AT) - .type(TIMESTAMP_MILLIS_SCHEMA).noDefault(); - } - - for (final String subfieldName : subfieldNames) { - // ignore additional properties fields, which will be consolidated - // into one field at the end - if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(subfieldName)) { - continue; - } - - final String stdFieldName = AvroConstants.NAME_TRANSFORMER.getIdentifier(subfieldName); - final JsonNode subfieldDefinition = properties.get(subfieldName); - final SchemaBuilder.FieldBuilder fieldBuilder = assembler.name(stdFieldName); - if (!stdFieldName.equals(subfieldName)) { - standardizedNames.put(subfieldName, stdFieldName); - LOGGER.warn("Field name \"{}\" contains illegal character(s) and is standardized to \"{}\"", - subfieldName, stdFieldName); - fieldBuilder.doc(String.format("%s%s%s", - AvroConstants.DOC_KEY_ORIGINAL_NAME, - AvroConstants.DOC_KEY_VALUE_DELIMITER, - subfieldName)); - } - final String subfieldNamespace = isRootNode - // Omit the namespace for root level fields, because it is directly assigned in the builder above. - // This may not be the correct choice. - ? null - : (stdNamespace == null ? stdName : (stdNamespace + "." + stdName)); - fieldBuilder.type(parseJsonField(subfieldName, subfieldNamespace, subfieldDefinition, appendExtraProps, addStringToLogicalTypes)) - .withDefault(null); - } - - if (appendExtraProps) { - // support additional properties in one field - assembler.name(AvroConstants.AVRO_EXTRA_PROPS_FIELD) - .type(AdditionalPropertyField.FIELD_SCHEMA).withDefault(null); - } - - return assembler.endRecord(); - } - - /** - * Generate Avro schema for a single Json field type. For example: - * - *
    -   * "number" -> ["double"]
    -   * 
    - */ - Schema parseSingleType(final String fieldName, - @Nullable final String fieldNamespace, - final JsonSchemaType fieldType, - final JsonNode fieldDefinition, - final boolean appendExtraProps, - final boolean addStringToLogicalTypes) { - Preconditions - .checkState(fieldType != JsonSchemaType.NULL, "Null types should have been filtered out"); - - // the additional properties fields are filtered out and never passed into this method; - // but this method is able to handle them for completeness - if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(fieldName)) { - return AdditionalPropertyField.FIELD_SCHEMA; - } - - final Schema fieldSchema; - switch (fieldType) { - case INTEGER_V1, NUMBER_V1, BOOLEAN_V1, STRING_V1, TIME_WITH_TIMEZONE_V1, BINARY_DATA_V1 -> fieldSchema = - Schema.create(fieldType.getAvroType()); - case DATE_V1 -> fieldSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT)); - case TIMESTAMP_WITH_TIMEZONE_V1, TIMESTAMP_WITHOUT_TIMEZONE_V1 -> fieldSchema = LogicalTypes.timestampMicros() - .addToSchema(Schema.create(Schema.Type.LONG)); - case TIME_WITHOUT_TIMEZONE_V1 -> fieldSchema = LogicalTypes.timeMicros().addToSchema(Schema.create(Schema.Type.LONG)); - case INTEGER_V0, NUMBER_V0, NUMBER_INT_V0, NUMBER_BIGINT_V0, NUMBER_FLOAT_V0, BOOLEAN_V0 -> fieldSchema = - Schema.create(fieldType.getAvroType()); - case STRING_V0 -> { - if (fieldDefinition.has("format")) { - final String format = fieldDefinition.get("format").asText(); - fieldSchema = switch (format) { - case "date-time" -> LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG)); - case "date" -> LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT)); - case "time" -> LogicalTypes.timeMicros().addToSchema(Schema.create(Schema.Type.LONG)); - default -> Schema.create(fieldType.getAvroType()); - }; - } else { - fieldSchema = Schema.create(fieldType.getAvroType()); - } - } - case COMBINED -> { - final Optional combinedRestriction = getCombinedRestriction(fieldDefinition); - final List unionTypes = - parseJsonTypeUnion(fieldName, fieldNamespace, (ArrayNode) combinedRestriction.get(), appendExtraProps, addStringToLogicalTypes); - fieldSchema = createUnionAndCheckLongTypesDuplications(unionTypes); - } - case ARRAY -> { - final JsonNode items = fieldDefinition.get("items"); - if (items == null) { - LOGGER.warn("Array field \"{}\" does not specify the items type. It will default to an array of strings", fieldName); - fieldSchema = Schema.createArray(Schema.createUnion(NULL_SCHEMA, STRING_SCHEMA)); - } else if (items.isObject()) { - if ((items.has("type") && !items.get("type").isNull()) || - items.has("$ref") && !items.get("$ref").isNull()) { - // Objects inside Json array has no names. We name it with the ".items" suffix. - final String elementFieldName = fieldName + ".items"; - fieldSchema = Schema.createArray(parseJsonField(elementFieldName, fieldNamespace, items, appendExtraProps, addStringToLogicalTypes)); - } else { - LOGGER.warn("Array field \"{}\" does not specify the items type. it will default to an array of strings", fieldName); - fieldSchema = Schema.createArray(Schema.createUnion(NULL_SCHEMA, STRING_SCHEMA)); - } - } else if (items.isArray()) { - final List arrayElementTypes = - parseJsonTypeUnion(fieldName, fieldNamespace, (ArrayNode) items, appendExtraProps, addStringToLogicalTypes); - arrayElementTypes.add(0, NULL_SCHEMA); - fieldSchema = Schema.createArray(Schema.createUnion(arrayElementTypes)); - } else { - LOGGER.warn("Array field \"{}\" has invalid items specification: {}. It will default to an array of strings.", fieldName, items); - fieldSchema = Schema.createArray(Schema.createUnion(NULL_SCHEMA, STRING_SCHEMA)); - } - } - case OBJECT -> fieldSchema = - getAvroSchema(fieldDefinition, fieldName, fieldNamespace, false, appendExtraProps, addStringToLogicalTypes, false); - default -> { - LOGGER.warn("Field \"{}\" has invalid type definition: {}. It will default to string.", fieldName, fieldDefinition); - fieldSchema = Schema.createUnion(NULL_SCHEMA, STRING_SCHEMA); - } - } - return fieldSchema; - } - - /** - * Take in a union of Json field definitions, and generate Avro field schema unions. For example: - * - *
    -   * ["number", { ... }] -> ["double", { ... }]
    -   * 
    - */ - List parseJsonTypeUnion(final String fieldName, - @Nullable final String fieldNamespace, - final ArrayNode types, - final boolean appendExtraProps, - final boolean addStringToLogicalTypes) { - final List schemas = MoreIterators.toList(types.elements()) - .stream() - .flatMap(definition -> getNonNullTypes(fieldName, definition).stream().flatMap(type -> { - final String namespace = fieldNamespace == null - ? fieldName - : fieldNamespace + "." + fieldName; - final Schema singleFieldSchema = parseSingleType(fieldName, namespace, type, definition, appendExtraProps, addStringToLogicalTypes); - - if (singleFieldSchema.isUnion()) { - return singleFieldSchema.getTypes().stream(); - } else { - return Stream.of(singleFieldSchema); - } - })) - .distinct() - .collect(Collectors.toList()); - - return mergeRecordSchemas(fieldName, fieldNamespace, schemas, appendExtraProps); - } - - /** - * If there are multiple object fields, those fields are combined into one Avro record. This is - * because Avro does not allow specifying a tuple of types (i.e. the first element is type x, the - * second element is type y, and so on). For example, the following Json field types: - * - *
    -   * [
    -   *   {
    -   *     "type": "object",
    -   *     "properties": {
    -   *       "id": { "type": "integer" }
    -   *     }
    -   *   },
    -   *   {
    -   *     "type": "object",
    -   *     "properties": {
    -   *       "id": { "type": "string" }
    -   *       "message": { "type": "string" }
    -   *     }
    -   *   }
    -   * ]
    -   * 
    - * - * is converted to this Avro schema: - * - *
    -   * {
    -   *   "type": "record",
    -   *   "fields": [
    -   *     { "name": "id", "type": ["int", "string"] },
    -   *     { "name": "message", "type": "string" }
    -   *   ]
    -   * }
    -   * 
    - */ - List mergeRecordSchemas(final String fieldName, - @Nullable final String fieldNamespace, - final List schemas, - final boolean appendExtraProps) { - final LinkedHashMap> recordFieldSchemas = new LinkedHashMap<>(); - final Map> recordFieldDocs = new HashMap<>(); - - final List mergedSchemas = schemas.stream() - // gather record schemas to construct a single record schema later on - .peek(schema -> { - if (schema.getType() == Schema.Type.RECORD) { - for (final Schema.Field field : schema.getFields()) { - recordFieldSchemas.putIfAbsent(field.name(), new LinkedList<>()); - recordFieldSchemas.get(field.name()).add(field.schema()); - if (field.doc() != null) { - recordFieldDocs.putIfAbsent(field.name(), new LinkedList<>()); - recordFieldDocs.get(field.name()).add(field.doc()); - } - } - } - }) - // remove record schemas because they will be merged into one - .filter(schema -> schema.getType() != Schema.Type.RECORD) - .collect(Collectors.toList()); - - // create one record schema from all the record fields - if (!recordFieldSchemas.isEmpty()) { - final SchemaBuilder.RecordBuilder builder = SchemaBuilder.record(fieldName); - if (fieldNamespace != null) { - builder.namespace(fieldNamespace); - } - - final SchemaBuilder.FieldAssembler assembler = builder.fields(); - - for (final Map.Entry> entry : recordFieldSchemas.entrySet()) { - final String subfieldName = entry.getKey(); - // ignore additional properties fields, which will be consolidated - // into one field at the end - if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(subfieldName)) { - continue; - } - - final SchemaBuilder.FieldBuilder subfieldBuilder = assembler.name(subfieldName); - final List subfieldDocs = recordFieldDocs.getOrDefault(subfieldName, Collections.emptyList()); - if (!subfieldDocs.isEmpty()) { - subfieldBuilder.doc(String.join("; ", subfieldDocs)); - } - final List subfieldSchemas = entry.getValue().stream() - .flatMap(schema -> schema.getTypes().stream() - // filter out null and add it later on as the first element - .filter(s -> !s.equals(NULL_SCHEMA))) - .distinct() - .collect(Collectors.toList()); - final String subfieldNamespace = fieldNamespace == null ? fieldName : (fieldNamespace + "." + fieldName); - // recursively merge schemas of a subfield because they may include multiple record schemas as well - final List mergedSubfieldSchemas = mergeRecordSchemas(subfieldName, subfieldNamespace, subfieldSchemas, appendExtraProps); - mergedSubfieldSchemas.add(0, NULL_SCHEMA); - subfieldBuilder.type(Schema.createUnion(mergedSubfieldSchemas)).withDefault(null); - } - - if (appendExtraProps) { - // add back additional properties - assembler.name(AvroConstants.AVRO_EXTRA_PROPS_FIELD) - .type(AdditionalPropertyField.FIELD_SCHEMA).withDefault(null); - } - mergedSchemas.add(assembler.endRecord()); - } - - return mergedSchemas; - } - - /** - * Take in a Json field definition, and generate a nullable Avro field schema. For example: - * - *
    -   * {"type": ["number", { ... }]} -> ["null", "double", { ... }]
    -   * 
    - */ - Schema parseJsonField(final String fieldName, - @Nullable final String fieldNamespace, - final JsonNode fieldDefinition, - final boolean appendExtraProps, - final boolean addStringToLogicalTypes) { - // Filter out null types, which will be added back in the end. - final List nonNullFieldTypes = getNonNullTypes(fieldName, fieldDefinition) - .stream() - .flatMap(fieldType -> { - final Schema singleFieldSchema = - parseSingleType(fieldName, fieldNamespace, fieldType, fieldDefinition, appendExtraProps, addStringToLogicalTypes); - if (singleFieldSchema.isUnion()) { - return singleFieldSchema.getTypes().stream(); - } else { - return Stream.of(singleFieldSchema); - } - }) - .distinct() - .collect(Collectors.toList()); - - if (nonNullFieldTypes.isEmpty()) { - return Schema.create(Schema.Type.NULL); - } else { - // Mark every field as nullable to prevent missing value exceptions from Avro / Parquet. - if (!nonNullFieldTypes.contains(NULL_SCHEMA)) { - nonNullFieldTypes.add(0, NULL_SCHEMA); - } - // Logical types are converted to a union of logical type itself and string. The purpose is to - // default the logical type field to a string, if the value of the logical type field is invalid and - // cannot be properly processed. - if ((nonNullFieldTypes - .stream().anyMatch(schema -> schema.getLogicalType() != null)) && - (!nonNullFieldTypes.contains(STRING_SCHEMA)) && addStringToLogicalTypes) { - nonNullFieldTypes.add(STRING_SCHEMA); - } - return Schema.createUnion(nonNullFieldTypes); - } - } - - /** - * Method checks unionTypes list for content. If we have both "long" and "long-timestamp" types then - * it keeps the "long" only. Need to do it for Schema creation otherwise it would fail with a - * duplicated types exception. - * - * @param unionTypes - list of union types - * @return new Schema - */ - private Schema createUnionAndCheckLongTypesDuplications(List unionTypes) { - Predicate isALong = type -> type.getType() == Schema.Type.LONG; - Predicate isPlainLong = isALong.and(type -> Objects.isNull(type.getLogicalType())); - Predicate isTimestampMicrosLong = - isALong.and(type -> Objects.nonNull(type.getLogicalType()) && "timestamp-micros".equals(type.getLogicalType().getName())); - - boolean hasPlainLong = unionTypes.stream().anyMatch(isPlainLong); - boolean hasTimestampMicrosLong = unionTypes.stream().anyMatch(isTimestampMicrosLong); - Predicate removeTimestampType = type -> !(hasPlainLong && hasTimestampMicrosLong && isTimestampMicrosLong.test(type)); - return Schema.createUnion(unionTypes.stream().filter(removeTimestampType).collect(Collectors.toList())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.java deleted file mode 100644 index abd5d81df6dbc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import org.apache.avro.file.CodecFactory; - -public class S3AvroFormatConfig implements S3FormatConfig { - - public static final String DEFAULT_SUFFIX = ".avro"; - - private final CodecFactory codecFactory; - - public S3AvroFormatConfig(final CodecFactory codecFactory) { - this.codecFactory = codecFactory; - } - - public S3AvroFormatConfig(final JsonNode formatConfig) { - this.codecFactory = parseCodecConfig(formatConfig.get("compression_codec")); - } - - public static CodecFactory parseCodecConfig(final JsonNode compressionCodecConfig) { - if (compressionCodecConfig == null || compressionCodecConfig.isNull()) { - return CodecFactory.nullCodec(); - } - - final JsonNode codecConfig = compressionCodecConfig.get("codec"); - if (codecConfig == null || codecConfig.isNull() || !codecConfig.isTextual()) { - return CodecFactory.nullCodec(); - } - final String codecType = codecConfig.asText(); - final CompressionCodec codec = CompressionCodec.fromConfigValue(codecConfig.asText()); - switch (codec) { - case NULL -> { - return CodecFactory.nullCodec(); - } - case DEFLATE -> { - final int compressionLevel = getCompressionLevel(compressionCodecConfig, 0, 0, 9); - return CodecFactory.deflateCodec(compressionLevel); - } - case BZIP2 -> { - return CodecFactory.bzip2Codec(); - } - case XZ -> { - final int compressionLevel = getCompressionLevel(compressionCodecConfig, 6, 0, 9); - return CodecFactory.xzCodec(compressionLevel); - } - case ZSTANDARD -> { - final int compressionLevel = getCompressionLevel(compressionCodecConfig, 3, -5, 22); - final boolean includeChecksum = getIncludeChecksum(compressionCodecConfig, false); - return CodecFactory.zstandardCodec(compressionLevel, includeChecksum); - } - case SNAPPY -> { - return CodecFactory.snappyCodec(); - } - default -> { - throw new IllegalArgumentException("Unsupported compression codec: " + codecType); - } - } - } - - public static int getCompressionLevel(final JsonNode compressionCodecConfig, final int defaultLevel, final int minLevel, final int maxLevel) { - final JsonNode levelConfig = compressionCodecConfig.get("compression_level"); - if (levelConfig == null || levelConfig.isNull() || !levelConfig.isIntegralNumber()) { - return defaultLevel; - } - final int level = levelConfig.asInt(); - if (level < minLevel || level > maxLevel) { - throw new IllegalArgumentException( - String.format("Invalid compression level: %d, expected an integer in range [%d, %d]", level, minLevel, maxLevel)); - } - return level; - } - - public static boolean getIncludeChecksum(final JsonNode compressionCodecConfig, final boolean defaultValue) { - final JsonNode checksumConfig = compressionCodecConfig.get("include_checksum"); - if (checksumConfig == null || checksumConfig.isNumber() || !checksumConfig.isBoolean()) { - return defaultValue; - } - return checksumConfig.asBoolean(); - } - - public CodecFactory getCodecFactory() { - return codecFactory; - } - - @Override - public String getFileExtension() { - return DEFAULT_SUFFIX; - } - - @Override - public S3Format getFormat() { - return S3Format.AVRO; - } - - public enum CompressionCodec { - - NULL("no compression"), - DEFLATE("deflate"), - BZIP2("bzip2"), - XZ("xz"), - ZSTANDARD("zstandard"), - SNAPPY("snappy"); - - private final String configValue; - - CompressionCodec(final String configValue) { - this.configValue = configValue; - } - - public static CompressionCodec fromConfigValue(final String configValue) { - for (final CompressionCodec codec : values()) { - if (configValue.equalsIgnoreCase(codec.configValue)) { - return codec; - } - } - throw new IllegalArgumentException("Unknown codec config value: " + configValue); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.java deleted file mode 100644 index 4d4512e67b066..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.file.DataFileWriter; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumWriter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class S3AvroWriter extends BaseS3Writer implements DestinationFileWriter { - - protected static final Logger LOGGER = LoggerFactory.getLogger(S3AvroWriter.class); - - private final AvroRecordFactory avroRecordFactory; - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final DataFileWriter dataFileWriter; - private final String objectKey; - private final String gcsFileLocation; - - public S3AvroWriter(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final Schema schema, - final JsonAvroConverter converter) - throws IOException { - super(config, s3Client, configuredStream); - - final String outputFilename = determineOutputFilename(S3FilenameTemplateParameterObject - .builder() - .timestamp(uploadTimestamp) - .s3Format(S3Format.AVRO) - .fileExtension(S3Format.AVRO.getFileExtension()) - .fileNamePattern(config.getFileNamePattern()) - .build()); - - objectKey = String.join("/", outputPrefix, outputFilename); - - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - - this.avroRecordFactory = new AvroRecordFactory(schema, converter); - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .get(); - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - - final S3AvroFormatConfig formatConfig = (S3AvroFormatConfig) config.getFormatConfig(); - // The DataFileWriter always uses binary encoding. - // If json encoding is needed in the future, use the GenericDatumWriter directly. - this.dataFileWriter = new DataFileWriter<>(new GenericDatumWriter()) - .setCodec(formatConfig.getCodecFactory()) - .create(schema, outputStream); - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - dataFileWriter.append(avroRecordFactory.getAvroRecord(id, recordMessage)); - } - - @Override - protected void closeWhenSucceed() throws IOException { - dataFileWriter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() throws IOException { - dataFileWriter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public String getOutputPath() { - return objectKey; - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - public S3Format getFileFormat() { - return S3Format.AVRO; - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - final Record record = avroRecordFactory.getAvroRecord(formattedData); - dataFileWriter.append(record); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.java deleted file mode 100644 index 8bb737e24f321..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.constant; - -public final class S3Constants { - - public static final String S_3_BUCKET_PATH = "s3_bucket_path"; - public static final String FILE_NAME_PATTERN = "file_name_pattern"; - public static final String S_3_PATH_FORMAT = "s3_path_format"; - public static final String S_3_ENDPOINT = "s3_endpoint"; - public static final String ACCESS_KEY_ID = "access_key_id"; - public static final String S_3_ACCESS_KEY_ID = "s3_access_key_id"; - public static final String S_3_SECRET_ACCESS_KEY = "s3_secret_access_key"; - public static final String SECRET_ACCESS_KEY = "secret_access_key"; - public static final String S_3_BUCKET_NAME = "s3_bucket_name"; - public static final String S_3_BUCKET_REGION = "s3_bucket_region"; - - // r2 requires account_id - public static final String ACCOUNT_ID = "account_id"; - - private S3Constants() {} - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.java deleted file mode 100644 index 564e85c3dc62c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -public interface BlobStorageCredentialConfig { - - CredentialType getCredentialType(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.java deleted file mode 100644 index 141a12d4cec53..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; - -public class S3AWSDefaultProfileCredentialConfig implements S3CredentialConfig { - - @Override - public S3CredentialType getCredentialType() { - return S3CredentialType.DEFAULT_PROFILE; - } - - @Override - public AWSCredentialsProvider getS3CredentialsProvider() { - return new DefaultAWSCredentialsProviderChain(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.java deleted file mode 100644 index 0f775162ca081..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; - -public class S3AccessKeyCredentialConfig implements S3CredentialConfig { - - private final String accessKeyId; - private final String secretAccessKey; - - public S3AccessKeyCredentialConfig(final String accessKeyId, final String secretAccessKey) { - this.accessKeyId = accessKeyId; - this.secretAccessKey = secretAccessKey; - } - - @Override - public S3CredentialType getCredentialType() { - return S3CredentialType.ACCESS_KEY; - } - - @Override - public AWSCredentialsProvider getS3CredentialsProvider() { - final AWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretAccessKey); - return new AWSStaticCredentialsProvider(awsCreds); - } - - public String getAccessKeyId() { - return accessKeyId; - } - - public String getSecretAccessKey() { - return secretAccessKey; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.java deleted file mode 100644 index d85f5fa07faf7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -import com.amazonaws.auth.AWSCredentialsProvider; - -public interface S3CredentialConfig extends BlobStorageCredentialConfig { - - AWSCredentialsProvider getS3CredentialsProvider(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.java deleted file mode 100644 index 2f65c1b98d64f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -public enum S3CredentialType { - - ACCESS_KEY, - DEFAULT_PROFILE - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.java deleted file mode 100644 index 13af785fc6b5d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.credential; - -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; - -public class S3InstanceProfileCredentialConfig implements S3CredentialConfig { - - @Override - public S3CredentialType getCredentialType() { - return S3CredentialType.DEFAULT_PROFILE; - } - - @Override - public AWSCredentialsProvider getS3CredentialsProvider() { - return new InstanceProfileCredentialsProvider(false); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.java deleted file mode 100644 index 89962a61088a2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.LinkedList; -import java.util.List; -import java.util.UUID; - -/** - * CSV data row = ID column + timestamp column + record columns. This class takes care of the first - * two columns, which is shared by downstream implementations. - */ -public abstract class BaseSheetGenerator implements CsvSheetGenerator { - - public List getDataRow(final UUID id, final AirbyteRecordMessage recordMessage) { - final List data = new LinkedList<>(); - data.add(id); - data.add(recordMessage.getEmittedAt()); - data.addAll(getRecordColumns(recordMessage.getData())); - return data; - } - - @Override - public List getDataRow(final JsonNode formattedData) { - return new LinkedList<>(getRecordColumns(formattedData)); - } - - public List getDataRow(final UUID id, final String formattedString, final long emittedAt) { - throw new UnsupportedOperationException("Not implemented in BaseSheetGenerator"); - } - - abstract List getRecordColumns(JsonNode json); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.java deleted file mode 100644 index 8555dc0d58e46..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.util.UUID; -import java.util.concurrent.Callable; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.csv.QuoteMode; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CsvSerializedBuffer extends BaseSerializedBuffer { - - private static final Logger LOGGER = LoggerFactory.getLogger(CsvSerializedBuffer.class); - - public static final String CSV_GZ_SUFFIX = ".csv.gz"; - - private final CsvSheetGenerator csvSheetGenerator; - private CSVPrinter csvPrinter; - private CSVFormat csvFormat; - - public CsvSerializedBuffer(final BufferStorage bufferStorage, - final CsvSheetGenerator csvSheetGenerator, - final boolean compression) - throws Exception { - super(bufferStorage); - this.csvSheetGenerator = csvSheetGenerator; - csvPrinter = null; - csvFormat = CSVFormat.DEFAULT; - // we always want to compress csv files - withCompression(compression); - } - - public CsvSerializedBuffer withCsvFormat(final CSVFormat csvFormat) { - if (csvPrinter == null) { - this.csvFormat = csvFormat; - return this; - } - throw new RuntimeException("Options should be configured before starting to write"); - } - - @Override - protected void initWriter(final OutputStream outputStream) throws IOException { - csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), csvFormat); - } - - /** - * TODO: (ryankfu) remove this call within {@link SerializedBufferingStrategy} and move to use - * recordString - * - * @param record AirbyteRecordMessage to be written - * @throws IOException - */ - @Override - protected void writeRecord(final AirbyteRecordMessage record) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(UUID.randomUUID(), record)); - } - - @Override - protected void writeRecord(final String recordString, final long emittedAt) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(UUID.randomUUID(), recordString, emittedAt)); - } - - @Override - protected void flushWriter() throws IOException { - // in an async world, it is possible that flush writer gets called even if no records were accepted. - if (csvPrinter != null) { - csvPrinter.flush(); - } else { - LOGGER.warn("Trying to flush but no printer is initialized."); - } - } - - @Override - protected void closeWriter() throws IOException { - // in an async world, it is possible that flush writer gets called even if no records were accepted. - if (csvPrinter != null) { - csvPrinter.close(); - } else { - LOGGER.warn("Trying to close but no printer is initialized."); - } - } - - public static BufferCreateFunction createFunction(final S3CsvFormatConfig config, - final Callable createStorageFunction) { - return (final AirbyteStreamNameNamespacePair stream, final ConfiguredAirbyteCatalog catalog) -> { - if (config == null) { - return new CsvSerializedBuffer(createStorageFunction.call(), new StagingDatabaseCsvSheetGenerator(), true); - } - - final CsvSheetGenerator csvSheetGenerator = CsvSheetGenerator.Factory.create(catalog.getStreams() - .stream() - .filter(s -> s.getStream().getName().equals(stream.getName()) && StringUtils.equals(s.getStream().getNamespace(), stream.getNamespace())) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No such stream %s.%s", stream.getNamespace(), stream.getName()))) - .getStream() - .getJsonSchema(), - config); - final CSVFormat csvSettings = CSVFormat.DEFAULT - .withQuoteMode(QuoteMode.NON_NUMERIC) - .withHeader(csvSheetGenerator.getHeaderRow().toArray(new String[0])); - final boolean compression = config.getCompressionType() != CompressionType.NO_COMPRESSION; - return new CsvSerializedBuffer(createStorageFunction.call(), csvSheetGenerator, compression).withCsvFormat(csvSettings); - }; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.java deleted file mode 100644 index 3da15a08780c5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.util.List; -import java.util.UUID; - -/** - * This class takes case of the generation of the CSV data sheet, including the header row and the - * data row. - */ -public interface CsvSheetGenerator { - - List getHeaderRow(); - - // TODO: (ryankfu) remove this and switch over all destinations to pass in serialized recordStrings, - // both for performance and lowers memory footprint - List getDataRow(UUID id, AirbyteRecordMessage recordMessage); - - List getDataRow(JsonNode formattedData); - - List getDataRow(UUID id, String formattedString, long emittedAt); - - final class Factory { - - public static CsvSheetGenerator create(final JsonNode jsonSchema, final S3CsvFormatConfig formatConfig) { - if (formatConfig.getFlattening() == Flattening.NO) { - return new NoFlatteningSheetGenerator(); - } else if (formatConfig.getFlattening() == Flattening.ROOT_LEVEL) { - return new RootLevelFlatteningSheetGenerator(jsonSchema); - } else { - throw new IllegalArgumentException( - "Unexpected flattening config: " + formatConfig.getFlattening()); - } - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.java deleted file mode 100644 index 3f65f3875196c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -public class CsvSheetGenerators { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.java deleted file mode 100644 index e37c26020abad..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.json.Jsons; -import java.util.Collections; -import java.util.List; - -public class NoFlatteningSheetGenerator extends BaseSheetGenerator implements CsvSheetGenerator { - - @Override - public List getHeaderRow() { - return Lists.newArrayList( - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); - } - - /** - * When no flattening is needed, the record column is just one json blob. - */ - @Override - List getRecordColumns(final JsonNode json) { - return Collections.singletonList(Jsons.serialize(json)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.java deleted file mode 100644 index 9be064aad4aaf..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.util.MoreIterators; -import java.util.LinkedList; -import java.util.List; -import java.util.stream.Collectors; - -public class RootLevelFlatteningSheetGenerator extends BaseSheetGenerator implements CsvSheetGenerator { - - /** - * Keep a header list to iterate the input json object with a defined order. - */ - private final List recordHeaders; - - public RootLevelFlatteningSheetGenerator(final JsonNode jsonSchema) { - this.recordHeaders = MoreIterators.toList(jsonSchema.get("properties").fieldNames()) - .stream().sorted().collect(Collectors.toList()); - } - - @Override - public List getHeaderRow() { - final List headers = Lists.newArrayList(JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - headers.addAll(recordHeaders); - return headers; - } - - /** - * With root level flattening, the record columns are the first level fields of the json. - */ - @Override - List getRecordColumns(final JsonNode json) { - final List values = new LinkedList<>(); - for (final String field : recordHeaders) { - final JsonNode value = json.get(field); - if (value == null) { - values.add(""); - } else if (value.isValueNode()) { - // Call asText method on value nodes so that proper string - // representation of json values can be returned by Jackson. - // Otherwise, CSV printer will just call the toString method, - // which can be problematic (e.g. text node will have extra - // double quotation marks around its text value). - values.add(value.asText()); - } else { - values.add(Jsons.serialize(value)); - } - } - - return values; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.java deleted file mode 100644 index 663e39192acee..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.COMPRESSION_ARG_NAME; -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.DEFAULT_COMPRESSION_TYPE; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionTypeHelper; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import java.util.Objects; - -public class S3CsvFormatConfig implements S3FormatConfig { - - public static final String CSV_SUFFIX = ".csv"; - - private final Flattening flattening; - private final CompressionType compressionType; - - public S3CsvFormatConfig(final JsonNode formatConfig) { - this( - Flattening.fromValue(formatConfig.has("flattening") ? formatConfig.get("flattening").asText() : Flattening.NO.getValue()), - formatConfig.has(COMPRESSION_ARG_NAME) - ? CompressionTypeHelper.parseCompressionType(formatConfig.get(COMPRESSION_ARG_NAME)) - : DEFAULT_COMPRESSION_TYPE); - } - - public S3CsvFormatConfig(final Flattening flattening, final CompressionType compressionType) { - this.flattening = flattening; - this.compressionType = compressionType; - } - - @Override - public S3Format getFormat() { - return S3Format.CSV; - } - - public Flattening getFlattening() { - return flattening; - } - - @Override - public String getFileExtension() { - return CSV_SUFFIX + compressionType.getFileExtension(); - } - - public CompressionType getCompressionType() { - return compressionType; - } - - @Override - public String toString() { - return "S3CsvFormatConfig{" + - "flattening=" + flattening + - ", compression=" + compressionType.name() + - '}'; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final S3CsvFormatConfig that = (S3CsvFormatConfig) o; - return flattening == that.flattening - && Objects.equals(compressionType, that.compressionType); - } - - @Override - public int hashCode() { - return Objects.hash(flattening, compressionType); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.java deleted file mode 100644 index 294d01bbd5c9a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.csv.QuoteMode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class S3CsvWriter extends BaseS3Writer implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3CsvWriter.class); - - private final CsvSheetGenerator csvSheetGenerator; - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final CSVPrinter csvPrinter; - private final String objectKey; - private final String gcsFileLocation; - - private S3CsvWriter(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final int uploadThreads, - final int queueCapacity, - final boolean writeHeader, - CSVFormat csvSettings, - final CsvSheetGenerator csvSheetGenerator) - throws IOException { - super(config, s3Client, configuredStream); - - this.csvSheetGenerator = csvSheetGenerator; - - final String fileSuffix = "_" + UUID.randomUUID(); - final String outputFilename = determineOutputFilename(S3FilenameTemplateParameterObject - .builder() - .customSuffix(fileSuffix) - .s3Format(S3Format.CSV) - .fileExtension(S3Format.CSV.getFileExtension()) - .fileNamePattern(config.getFileNamePattern()) - .timestamp(uploadTimestamp) - .build()); - this.objectKey = String.join("/", outputPrefix, outputFilename); - - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), - objectKey); - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .get() - .numUploadThreads(uploadThreads) - .queueCapacity(queueCapacity); - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - if (writeHeader) { - csvSettings = csvSettings.withHeader(csvSheetGenerator.getHeaderRow().toArray(new String[0])); - } - this.csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), csvSettings); - } - - public static class Builder { - - private final S3DestinationConfig config; - private final AmazonS3 s3Client; - private final ConfiguredAirbyteStream configuredStream; - private final Timestamp uploadTimestamp; - private int uploadThreads = StreamTransferManagerFactory.DEFAULT_UPLOAD_THREADS; - private int queueCapacity = StreamTransferManagerFactory.DEFAULT_QUEUE_CAPACITY; - private boolean withHeader = true; - private CSVFormat csvSettings = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); - private CsvSheetGenerator csvSheetGenerator; - - public Builder(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) { - this.config = config; - this.s3Client = s3Client; - this.configuredStream = configuredStream; - this.uploadTimestamp = uploadTimestamp; - } - - public Builder uploadThreads(final int uploadThreads) { - this.uploadThreads = uploadThreads; - return this; - } - - public Builder queueCapacity(final int queueCapacity) { - this.queueCapacity = queueCapacity; - return this; - } - - public Builder withHeader(final boolean withHeader) { - this.withHeader = withHeader; - return this; - } - - public Builder csvSettings(final CSVFormat csvSettings) { - this.csvSettings = csvSettings; - return this; - } - - public Builder csvSheetGenerator(final CsvSheetGenerator csvSheetGenerator) { - this.csvSheetGenerator = csvSheetGenerator; - return this; - } - - public S3CsvWriter build() throws IOException { - if (csvSheetGenerator == null) { - final S3CsvFormatConfig formatConfig = (S3CsvFormatConfig) config.getFormatConfig(); - csvSheetGenerator = CsvSheetGenerator.Factory.create(configuredStream.getStream().getJsonSchema(), formatConfig); - } - return new S3CsvWriter(config, - s3Client, - configuredStream, - uploadTimestamp, - uploadThreads, - queueCapacity, - withHeader, - csvSettings, - csvSheetGenerator); - } - - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(id, recordMessage)); - } - - @Override - protected void closeWhenSucceed() throws IOException { - csvPrinter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() throws IOException { - csvPrinter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public String getOutputPath() { - return objectKey; - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - public S3Format getFileFormat() { - return S3Format.CSV; - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - csvPrinter.printRecord(csvSheetGenerator.getDataRow(formattedData)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java deleted file mode 100644 index 9ff3ecb9dab64..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.time.Instant; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.UUID; - -/** - * A CsvSheetGenerator that produces data in the format expected by JdbcSqlOperations. See - * JdbcSqlOperations#createTableQuery. - *

    - * This intentionally does not extend {@link BaseSheetGenerator}, because it needs the columns in a - * different order (ABID, JSON, timestamp) vs (ABID, timestamp, JSON) - *

    - * In 1s1t mode, the column ordering is also different (raw_id, extracted_at, loaded_at, data). Note - * that the loaded_at column is rendered as an empty string; callers are expected to configure their - * destination to parse this as NULL. For example, Snowflake's COPY into command accepts a NULL_IF - * parameter, and Redshift accepts an EMPTYASNULL option. - */ -public class StagingDatabaseCsvSheetGenerator implements CsvSheetGenerator { - - private final boolean useDestinationsV2Columns; - private final List header; - - public StagingDatabaseCsvSheetGenerator() { - this(false); - } - - public StagingDatabaseCsvSheetGenerator(final boolean useDestinationsV2Columns) { - this.useDestinationsV2Columns = useDestinationsV2Columns; - this.header = this.useDestinationsV2Columns ? JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES : JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS; - } - - // TODO is this even used anywhere? - @Override - public List getHeaderRow() { - return header; - } - - @Override - public List getDataRow(final UUID id, final AirbyteRecordMessage recordMessage) { - return getDataRow(id, Jsons.serialize(recordMessage.getData()), recordMessage.getEmittedAt()); - } - - @Override - public List getDataRow(final JsonNode formattedData) { - return new LinkedList<>(Collections.singletonList(Jsons.serialize(formattedData))); - } - - @Override - public List getDataRow(final UUID id, final String formattedString, final long emittedAt) { - if (useDestinationsV2Columns) { - return List.of( - id, - Instant.ofEpochMilli(emittedAt), - "", - formattedString); - } else { - return List.of( - id, - formattedString, - Instant.ofEpochMilli(emittedAt)); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java deleted file mode 100644 index 6e901ce7a19f2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.jsonl; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.Callable; - -public class JsonLSerializedBuffer extends BaseSerializedBuffer { - - private static final ObjectMapper MAPPER = MoreMappers.initMapper(); - - private PrintWriter printWriter; - - private final boolean flattenData; - - protected JsonLSerializedBuffer(final BufferStorage bufferStorage, final boolean gzipCompression, final boolean flattenData) throws Exception { - super(bufferStorage); - // we always want to compress jsonl files - withCompression(gzipCompression); - this.flattenData = flattenData; - } - - @Override - protected void initWriter(final OutputStream outputStream) { - printWriter = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); - } - - @Override - protected void writeRecord(final AirbyteRecordMessage record) { - final ObjectNode json = MAPPER.createObjectNode(); - json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, UUID.randomUUID().toString()); - json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, record.getEmittedAt()); - if (flattenData) { - final Map data = MAPPER.convertValue(record.getData(), new TypeReference<>() {}); - json.setAll(data); - } else { - json.set(JavaBaseConstants.COLUMN_NAME_DATA, record.getData()); - } - printWriter.println(Jsons.serialize(json)); - } - - @Override - protected void flushWriter() { - printWriter.flush(); - } - - @Override - protected void closeWriter() { - printWriter.close(); - } - - public static BufferCreateFunction createBufferFunction(final S3JsonlFormatConfig config, - final Callable createStorageFunction) { - return (final AirbyteStreamNameNamespacePair stream, final ConfiguredAirbyteCatalog catalog) -> { - final CompressionType compressionType = config == null - ? S3DestinationConstants.DEFAULT_COMPRESSION_TYPE - : config.getCompressionType(); - - final Flattening flattening = config == null - ? Flattening.NO - : config.getFlatteningType(); - return new JsonLSerializedBuffer(createStorageFunction.call(), compressionType != CompressionType.NO_COMPRESSION, - flattening != Flattening.NO); - }; - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java deleted file mode 100644 index bc9cf84aa29ff..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.jsonl; - -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.COMPRESSION_ARG_NAME; -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.DEFAULT_COMPRESSION_TYPE; -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.FLATTENING_ARG_NAME; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionTypeHelper; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import java.util.Objects; -import lombok.ToString; - -@ToString -public class S3JsonlFormatConfig implements S3FormatConfig { - - public static final String JSONL_SUFFIX = ".jsonl"; - - private final Flattening flattening; - - private final CompressionType compressionType; - - public S3JsonlFormatConfig(final JsonNode formatConfig) { - this( - formatConfig.has(FLATTENING_ARG_NAME) - ? Flattening.fromValue(formatConfig.get(FLATTENING_ARG_NAME).asText()) - : Flattening.NO, - formatConfig.has(COMPRESSION_ARG_NAME) - ? CompressionTypeHelper.parseCompressionType(formatConfig.get(COMPRESSION_ARG_NAME)) - : DEFAULT_COMPRESSION_TYPE); - } - - public S3JsonlFormatConfig(final Flattening flattening, final CompressionType compressionType) { - this.flattening = flattening; - this.compressionType = compressionType; - } - - @Override - public S3Format getFormat() { - return S3Format.JSONL; - } - - @Override - public String getFileExtension() { - return JSONL_SUFFIX + compressionType.getFileExtension(); - } - - public CompressionType getCompressionType() { - return compressionType; - } - - public Flattening getFlatteningType() { - return flattening; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final S3JsonlFormatConfig that = (S3JsonlFormatConfig) o; - return flattening == that.flattening - && Objects.equals(compressionType, that.compressionType); - } - - @Override - public int hashCode() { - return Objects.hash(flattening, compressionType); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.java deleted file mode 100644 index c4e96533486af..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.jsonl; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import java.util.UUID; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class S3JsonlWriter extends BaseS3Writer implements DestinationFileWriter { - - protected static final Logger LOGGER = LoggerFactory.getLogger(S3JsonlWriter.class); - - private static final ObjectMapper MAPPER = MoreMappers.initMapper(); - - private final StreamTransferManager uploadManager; - private final MultiPartOutputStream outputStream; - private final PrintWriter printWriter; - private final String objectKey; - private final String gcsFileLocation; - - public S3JsonlWriter(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) - throws IOException { - super(config, s3Client, configuredStream); - - final String outputFilename = determineOutputFilename(S3FilenameTemplateParameterObject - .builder() - .timestamp(uploadTimestamp) - .s3Format(S3Format.JSONL) - .fileExtension(S3Format.JSONL.getFileExtension()) - .fileNamePattern(config.getFileNamePattern()) - .build()); - objectKey = String.join("/", outputPrefix, outputFilename); - - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); - gcsFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); - - this.uploadManager = StreamTransferManagerFactory - .create(config.getBucketName(), objectKey, s3Client) - .get(); - // We only need one output stream as we only have one input stream. This is reasonably performant. - this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - this.printWriter = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) { - final ObjectNode json = MAPPER.createObjectNode(); - json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()); - json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()); - json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.getData()); - printWriter.println(Jsons.serialize(json)); - } - - @Override - protected void closeWhenSucceed() { - printWriter.close(); - outputStream.close(); - uploadManager.complete(); - } - - @Override - protected void closeWhenFail() { - printWriter.close(); - outputStream.close(); - uploadManager.abort(); - } - - @Override - public String getOutputPath() { - return objectKey; - } - - @Override - public String getFileLocation() { - return gcsFileLocation; - } - - @Override - public S3Format getFileFormat() { - return S3Format.JSONL; - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - printWriter.println(Jsons.serialize(formattedData)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.java deleted file mode 100644 index f33778d751b75..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import static org.apache.parquet.avro.AvroWriteSupport.WRITE_OLD_LIST_STRUCTURE; - -import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData.Record; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.parquet.avro.AvroParquetWriter; -import org.apache.parquet.hadoop.ParquetWriter; -import org.apache.parquet.hadoop.util.HadoopOutputFile; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The {@link io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer} class - * abstracts the {@link io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage} from - * the details of the format the data is going to be stored in. - *

    - * Unfortunately, the Parquet library doesn't allow us to manipulate the output stream and forces us - * to go through {@link HadoopOutputFile} instead. So we can't benefit from the abstraction - * described above. Therefore, we re-implement the necessary methods to be used as - * {@link SerializableBuffer}, while data will be buffered in such a hadoop file. - */ -public class ParquetSerializedBuffer implements SerializableBuffer { - - private static final Logger LOGGER = LoggerFactory.getLogger(ParquetSerializedBuffer.class); - - private final AvroRecordFactory avroRecordFactory; - private final ParquetWriter parquetWriter; - private final Path bufferFile; - private InputStream inputStream; - private Long lastByteCount; - private boolean isClosed; - - public ParquetSerializedBuffer(final S3DestinationConfig config, - final AirbyteStreamNameNamespacePair stream, - final ConfiguredAirbyteCatalog catalog) - throws IOException { - final JsonToAvroSchemaConverter schemaConverter = new JsonToAvroSchemaConverter(); - final Schema schema = schemaConverter.getAvroSchema(catalog.getStreams() - .stream() - .filter(s -> s.getStream().getName().equals(stream.getName()) && StringUtils.equals(s.getStream().getNamespace(), stream.getNamespace())) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No such stream %s.%s", stream.getNamespace(), stream.getName()))) - .getStream() - .getJsonSchema(), - stream.getName(), stream.getNamespace()); - bufferFile = Files.createTempFile(UUID.randomUUID().toString(), ".parquet"); - Files.deleteIfExists(bufferFile); - avroRecordFactory = new AvroRecordFactory(schema, AvroConstants.JSON_CONVERTER); - final S3ParquetFormatConfig formatConfig = (S3ParquetFormatConfig) config.getFormatConfig(); - final Configuration avroConfig = new Configuration(); - avroConfig.setBoolean(WRITE_OLD_LIST_STRUCTURE, false); - parquetWriter = AvroParquetWriter.builder(HadoopOutputFile - .fromPath(new org.apache.hadoop.fs.Path(bufferFile.toUri()), avroConfig)) - .withConf(avroConfig) // yes, this should be here despite the fact we pass this config above in path - .withSchema(schema) - .withCompressionCodec(formatConfig.getCompressionCodec()) - .withRowGroupSize(formatConfig.getBlockSize()) - .withMaxPaddingSize(formatConfig.getMaxPaddingSize()) - .withPageSize(formatConfig.getPageSize()) - .withDictionaryPageSize(formatConfig.getDictionaryPageSize()) - .withDictionaryEncoding(formatConfig.isDictionaryEncoding()) - .build(); - inputStream = null; - isClosed = false; - lastByteCount = 0L; - } - - @Override - public long accept(final AirbyteRecordMessage record) throws Exception { - if (inputStream == null && !isClosed) { - final long startCount = getByteCount(); - parquetWriter.write(avroRecordFactory.getAvroRecord(UUID.randomUUID(), record)); - return getByteCount() - startCount; - } else { - throw new IllegalCallerException("Buffer is already closed, it cannot accept more messages"); - } - } - - @Override - public long accept(final String recordString, final long emittedAt) throws Exception { - throw new UnsupportedOperationException("This method is not supported for ParquetSerializedBuffer"); - } - - @Override - public void flush() throws Exception { - if (inputStream == null && !isClosed) { - getByteCount(); - parquetWriter.close(); - inputStream = new FileInputStream(bufferFile.toFile()); - LOGGER.info("Finished writing data to {} ({})", getFilename(), FileUtils.byteCountToDisplaySize(getByteCount())); - } - } - - @Override - public long getByteCount() { - if (inputStream != null) { - // once the parquetWriter is closed, we can't query how many bytes are in it, so we cache the last - // count - return lastByteCount; - } - lastByteCount = parquetWriter.getDataSize(); - return lastByteCount; - } - - @Override - public String getFilename() throws IOException { - return bufferFile.getFileName().toString(); - } - - @Override - public File getFile() throws IOException { - return bufferFile.toFile(); - } - - @Override - public InputStream getInputStream() { - return inputStream; - } - - @Override - public long getMaxTotalBufferSizeInBytes() { - return FileBuffer.MAX_TOTAL_BUFFER_SIZE_BYTES; - } - - @Override - public long getMaxPerStreamBufferSizeInBytes() { - return FileBuffer.MAX_PER_STREAM_BUFFER_SIZE_BYTES; - } - - @Override - public int getMaxConcurrentStreamsInBuffer() { - return FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER; - } - - @Override - public void close() throws Exception { - if (!isClosed) { - inputStream.close(); - Files.deleteIfExists(bufferFile); - isClosed = true; - } - } - - public static BufferCreateFunction createFunction(final S3DestinationConfig s3DestinationConfig) { - return (final AirbyteStreamNameNamespacePair stream, final ConfiguredAirbyteCatalog catalog) -> new ParquetSerializedBuffer(s3DestinationConfig, - stream, catalog); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.java deleted file mode 100644 index 6a6ccb9486036..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import org.apache.parquet.hadoop.metadata.CompressionCodecName; - -public class S3ParquetConstants { - - // Parquet writer - public static final CompressionCodecName DEFAULT_COMPRESSION_CODEC = CompressionCodecName.UNCOMPRESSED; - public static final int DEFAULT_BLOCK_SIZE_MB = 128; - public static final int DEFAULT_MAX_PADDING_SIZE_MB = 8; - public static final int DEFAULT_PAGE_SIZE_KB = 1024; - public static final int DEFAULT_DICTIONARY_PAGE_SIZE_KB = 1024; - public static final boolean DEFAULT_DICTIONARY_ENCODING = true; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.java deleted file mode 100644 index e7e14a3634349..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import org.apache.parquet.hadoop.metadata.CompressionCodecName; - -public class S3ParquetFormatConfig implements S3FormatConfig { - - public static final String PARQUET_SUFFIX = ".parquet"; - - private final CompressionCodecName compressionCodec; - private final int blockSize; - private final int maxPaddingSize; - private final int pageSize; - private final int dictionaryPageSize; - private final boolean dictionaryEncoding; - - public S3ParquetFormatConfig(final JsonNode formatConfig) { - final int blockSizeMb = S3FormatConfig.withDefault(formatConfig, "block_size_mb", S3ParquetConstants.DEFAULT_BLOCK_SIZE_MB); - final int maxPaddingSizeMb = S3FormatConfig.withDefault(formatConfig, "max_padding_size_mb", S3ParquetConstants.DEFAULT_MAX_PADDING_SIZE_MB); - final int pageSizeKb = S3FormatConfig.withDefault(formatConfig, "page_size_kb", S3ParquetConstants.DEFAULT_PAGE_SIZE_KB); - final int dictionaryPageSizeKb = - S3FormatConfig.withDefault(formatConfig, "dictionary_page_size_kb", S3ParquetConstants.DEFAULT_DICTIONARY_PAGE_SIZE_KB); - - this.compressionCodec = CompressionCodecName - .valueOf(S3FormatConfig.withDefault(formatConfig, "compression_codec", S3ParquetConstants.DEFAULT_COMPRESSION_CODEC.name()).toUpperCase()); - this.blockSize = blockSizeMb * 1024 * 1024; - this.maxPaddingSize = maxPaddingSizeMb * 1024 * 1024; - this.pageSize = pageSizeKb * 1024; - this.dictionaryPageSize = dictionaryPageSizeKb * 1024; - this.dictionaryEncoding = S3FormatConfig.withDefault(formatConfig, "dictionary_encoding", S3ParquetConstants.DEFAULT_DICTIONARY_ENCODING); - } - - @Override - public S3Format getFormat() { - return S3Format.PARQUET; - } - - @Override - public String getFileExtension() { - return PARQUET_SUFFIX; - } - - public CompressionCodecName getCompressionCodec() { - return compressionCodec; - } - - public int getBlockSize() { - return blockSize; - } - - public int getMaxPaddingSize() { - return maxPaddingSize; - } - - public int getPageSize() { - return pageSize; - } - - public int getDictionaryPageSize() { - return dictionaryPageSize; - } - - public boolean isDictionaryEncoding() { - return dictionaryEncoding; - } - - @Override - public String toString() { - return "S3ParquetFormatConfig{" + - "compressionCodec=" + compressionCodec + ", " + - "blockSize=" + blockSize + ", " + - "maxPaddingSize=" + maxPaddingSize + ", " + - "pageSize=" + pageSize + ", " + - "dictionaryPageSize=" + dictionaryPageSize + ", " + - "dictionaryEncoding=" + dictionaryEncoding + ", " + - '}'; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.java deleted file mode 100644 index 91d940f08db82..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import static org.apache.parquet.avro.AvroWriteSupport.WRITE_OLD_LIST_STRUCTURE; - -import com.amazonaws.services.s3.AmazonS3; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer; -import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.Timestamp; -import java.util.UUID; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData.Record; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.s3a.Constants; -import org.apache.parquet.avro.AvroParquetWriter; -import org.apache.parquet.hadoop.ParquetWriter; -import org.apache.parquet.hadoop.util.HadoopOutputFile; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import tech.allegro.schema.json2avro.converter.JsonAvroConverter; - -public class S3ParquetWriter extends BaseS3Writer implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(S3ParquetWriter.class); - - private final ParquetWriter parquetWriter; - private final AvroRecordFactory avroRecordFactory; - private final Schema schema; - private final String outputFilename; - // object key = / - private final String objectKey; - // full file path = s3://// - private final String fullFilePath; - - public S3ParquetWriter(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp, - final Schema schema, - final JsonAvroConverter converter) - throws URISyntaxException, IOException { - super(config, s3Client, configuredStream); - - outputFilename = determineOutputFilename(S3FilenameTemplateParameterObject - .builder() - .s3Format(S3Format.PARQUET) - .timestamp(uploadTimestamp) - .fileExtension(S3Format.PARQUET.getFileExtension()) - .fileNamePattern(config.getFileNamePattern()) - .build()); - - objectKey = String.join("/", outputPrefix, outputFilename); - fullFilePath = String.format("s3a://%s/%s", config.getBucketName(), objectKey); - LOGGER.info("Full S3 path for stream '{}': {}", stream.getName(), fullFilePath); - - final Path path = new Path(new URI(fullFilePath)); - final S3ParquetFormatConfig formatConfig = (S3ParquetFormatConfig) config.getFormatConfig(); - final Configuration hadoopConfig = getHadoopConfig(config); - hadoopConfig.setBoolean(WRITE_OLD_LIST_STRUCTURE, false); - this.parquetWriter = AvroParquetWriter.builder(HadoopOutputFile.fromPath(path, hadoopConfig)) - .withConf(hadoopConfig) // yes, this should be here despite the fact we pass this config above in path - .withSchema(schema) - .withCompressionCodec(formatConfig.getCompressionCodec()) - .withRowGroupSize(formatConfig.getBlockSize()) - .withMaxPaddingSize(formatConfig.getMaxPaddingSize()) - .withPageSize(formatConfig.getPageSize()) - .withDictionaryPageSize(formatConfig.getDictionaryPageSize()) - .withDictionaryEncoding(formatConfig.isDictionaryEncoding()) - .build(); - this.avroRecordFactory = new AvroRecordFactory(schema, converter); - this.schema = schema; - } - - public static Configuration getHadoopConfig(final S3DestinationConfig config) { - final Configuration hadoopConfig = new Configuration(); - final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) config.getS3CredentialConfig(); - hadoopConfig.set(Constants.ACCESS_KEY, credentialConfig.getAccessKeyId()); - hadoopConfig.set(Constants.SECRET_KEY, credentialConfig.getSecretAccessKey()); - if (config.getEndpoint().isEmpty()) { - hadoopConfig.set(Constants.ENDPOINT, String.format("s3.%s.amazonaws.com", config.getBucketRegion())); - } else { - hadoopConfig.set(Constants.ENDPOINT, config.getEndpoint()); - hadoopConfig.set(Constants.PATH_STYLE_ACCESS, "true"); - } - hadoopConfig.set(Constants.AWS_CREDENTIALS_PROVIDER, - "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"); - return hadoopConfig; - } - - public Schema getSchema() { - return schema; - } - - /** - * The file path includes prefix and filename, but does not include the bucket name. - */ - public String getOutputFilePath() { - return outputPrefix + "/" + outputFilename; - } - - public String getOutputFilename() { - return outputFilename; - } - - @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage) throws IOException { - parquetWriter.write(avroRecordFactory.getAvroRecord(id, recordMessage)); - } - - @Override - protected void closeWhenSucceed() throws IOException { - parquetWriter.close(); - } - - @Override - protected void closeWhenFail() throws IOException { - parquetWriter.close(); - } - - @Override - public String getOutputPath() { - return objectKey; - } - - @Override - public String getFileLocation() { - return fullFilePath; - } - - @Override - public S3Format getFileFormat() { - return S3Format.PARQUET; - } - - @Override - public void write(final JsonNode formattedData) throws IOException { - parquetWriter.write(avroRecordFactory.getAvroRecord(formattedData)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.java deleted file mode 100644 index 73058c75cf55a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.template; - -import static java.util.Optional.ofNullable; -import static org.apache.commons.lang3.StringUtils.EMPTY; - -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import java.io.IOException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import java.util.TimeZone; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.commons.text.StringSubstitutor; -import org.apache.commons.text.lookup.StringLookupFactory; - -/** - * This class is responsible for building the filename template based on user input, see - * file_name_pattern in the specification of connector currently supported only S3 staging. - */ -public class S3FilenameTemplateManager { - - private static final String UTC = "UTC"; - private final StringSubstitutor stringSubstitutor; - - public S3FilenameTemplateManager() { - stringSubstitutor = new StringSubstitutor(); - } - - public String applyPatternToFilename(final S3FilenameTemplateParameterObject parameterObject) - throws IOException { - // sanitize fileFormat - final String sanitizedFileFormat = parameterObject - .getFileNamePattern() - .trim() - .replaceAll(" ", "_"); - - stringSubstitutor.setVariableResolver( - StringLookupFactory.INSTANCE.mapStringLookup(fillTheMapWithDefaultPlaceHolders(sanitizedFileFormat, parameterObject))); - stringSubstitutor.setVariablePrefix("{"); - stringSubstitutor.setVariableSuffix("}"); - return ofNullable(parameterObject.getObjectPath()).orElse(EMPTY) + stringSubstitutor.replace(sanitizedFileFormat); - } - - private Map fillTheMapWithDefaultPlaceHolders(final String stringToReplaceWithPlaceholder, - final S3FilenameTemplateParameterObject parameterObject) { - - final long currentTimeMillis = Instant.now().toEpochMilli(); - - final Map valuesMap = processExtendedPlaceholder(currentTimeMillis, stringToReplaceWithPlaceholder); - - final DateFormat defaultDateFormat = new SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING); - defaultDateFormat.setTimeZone(TimeZone.getTimeZone(UTC)); - - // here we set default values for supported placeholders. - valuesMap.put("date", ofNullable(defaultDateFormat.format(currentTimeMillis)).orElse(EMPTY)); - valuesMap.put("timestamp", ofNullable(String.valueOf(currentTimeMillis)).orElse(EMPTY)); - valuesMap.put("sync_id", ofNullable(System.getenv("WORKER_JOB_ID")).orElse(EMPTY)); - valuesMap.put("format_extension", ofNullable(parameterObject.getFileExtension()).orElse(EMPTY)); - valuesMap.put("part_number", ofNullable(parameterObject.getPartId()).orElse(EMPTY)); - - return valuesMap; - } - - /** - * By extended placeholders we assume next types: {date:yyyy_MM}, {timestamp:millis}, - * {timestamp:micro}, etc Limited combinations are supported by the method see the method body. - * - * @param stringToReplaceWithPlaceholder - string where the method will search for extended - * placeholders - * @return map with prepared placeholders. - */ - private Map processExtendedPlaceholder(final long currentTimeMillis, final String stringToReplaceWithPlaceholder) { - final Map valuesMap = new HashMap<>(); - - final Pattern pattern = Pattern.compile("\\{(date:.+?|timestamp:.+?)\\}"); - final Matcher matcher = pattern.matcher(stringToReplaceWithPlaceholder); - - while (matcher.find()) { - final String[] splitByColon = matcher.group(1).split(":"); - switch (splitByColon[0].toLowerCase(Locale.ROOT)) { - case "date" -> { - final DateFormat dateFormat = new SimpleDateFormat(splitByColon[1]); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - valuesMap.put(matcher.group(1), dateFormat.format(currentTimeMillis)); - } - case "timestamp" -> { - switch (splitByColon[1]) { - case "millis" -> { - valuesMap.put(matcher.group(1), String.valueOf(currentTimeMillis)); - } - case "micro" -> { - valuesMap.put(matcher.group(1), String.valueOf(convertToMicrosecondsRepresentation(currentTimeMillis))); - } - } - } - } - } - return valuesMap; - } - - private long convertToMicrosecondsRepresentation(final long milliSeconds) { - // The time representation in microseconds is equal to the milliseconds multiplied by 1,000. - return milliSeconds * 1000; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.java deleted file mode 100644 index ca270a9f91b4f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.template; - -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import java.sql.Timestamp; -import java.util.Objects; - -/** - * This class is used as argument holder S3FilenameTemplateManager.class - * - * @see S3FilenameTemplateManager#applyPatternToFilename(S3FilenameTemplateParameterObject) - */ -public class S3FilenameTemplateParameterObject { - - private final String objectPath; - private final SerializableBuffer recordsData; - private final String fileNamePattern; - private final String fileExtension; - private final String partId; - private final S3Format s3Format; - private final Timestamp timestamp; - private final String customSuffix; - - S3FilenameTemplateParameterObject(String objectPath, - SerializableBuffer recordsData, - String fileNamePattern, - String fileExtension, - String partId, - S3Format s3Format, - Timestamp timestamp, - String customSuffix) { - this.objectPath = objectPath; - this.recordsData = recordsData; - this.fileNamePattern = fileNamePattern; - this.fileExtension = fileExtension; - this.partId = partId; - this.s3Format = s3Format; - this.timestamp = timestamp; - this.customSuffix = customSuffix; - } - - public String getObjectPath() { - return objectPath; - } - - public SerializableBuffer getRecordsData() { - return recordsData; - } - - public String getFileNamePattern() { - return fileNamePattern; - } - - public String getFileExtension() { - return fileExtension; - } - - public String getPartId() { - return partId; - } - - public S3Format getS3Format() { - return s3Format; - } - - public Timestamp getTimestamp() { - return timestamp; - } - - public String getCustomSuffix() { - return customSuffix; - } - - public static S3FilenameTemplateParameterObjectBuilder builder() { - return new S3FilenameTemplateParameterObjectBuilder(); - } - - public static class S3FilenameTemplateParameterObjectBuilder { - - private String objectPath; - private SerializableBuffer recordsData; - private String fileNamePattern; - private String fileExtension; - private String partId; - private S3Format s3Format; - private Timestamp timestamp; - private String customSuffix; - - S3FilenameTemplateParameterObjectBuilder() {} - - public S3FilenameTemplateParameterObjectBuilder objectPath(String objectPath) { - this.objectPath = objectPath; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder recordsData(SerializableBuffer recordsData) { - this.recordsData = recordsData; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder fileNamePattern(String fileNamePattern) { - this.fileNamePattern = fileNamePattern; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder fileExtension(String fileExtension) { - this.fileExtension = fileExtension; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder partId(String partId) { - this.partId = partId; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder s3Format(S3Format s3Format) { - this.s3Format = s3Format; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder timestamp(Timestamp timestamp) { - this.timestamp = timestamp; - return this; - } - - public S3FilenameTemplateParameterObjectBuilder customSuffix(String customSuffix) { - this.customSuffix = customSuffix; - return this; - } - - public S3FilenameTemplateParameterObject build() { - return new S3FilenameTemplateParameterObject(objectPath, recordsData, fileNamePattern, fileExtension, partId, s3Format, timestamp, - customSuffix); - } - - public String toString() { - return "S3FilenameTemplateParameterObject.S3FilenameTemplateParameterObjectBuilder(objectPath=" + this.objectPath + ", recordsData=" - + this.recordsData + ", fileNamePattern=" - + this.fileNamePattern + ", fileExtension=" + this.fileExtension + ", partId=" + this.partId + ", s3Format=" + this.s3Format - + ", timestamp=" + this.timestamp + ", customSuffix=" - + this.customSuffix + ")"; - } - - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final S3FilenameTemplateParameterObject that = (S3FilenameTemplateParameterObject) o; - return Objects.equals(objectPath, that.objectPath) && Objects.equals(recordsData, that.recordsData) - && Objects.equals(fileNamePattern, that.fileNamePattern) - && Objects.equals(fileExtension, that.fileExtension) && Objects.equals(partId, that.partId) && s3Format == that.s3Format - && Objects.equals(timestamp, - that.timestamp) - && Objects.equals(customSuffix, that.customSuffix); - } - - @Override - public int hashCode() { - return Objects.hash(objectPath, recordsData, fileNamePattern, fileExtension, partId, s3Format, timestamp, customSuffix); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.java deleted file mode 100644 index 5b24b92ace719..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter; -import io.airbyte.commons.util.MoreIterators; - -/** - * Helper methods for unit tests. This is needed by multiple modules, so it is in the src directory. - */ -public class AvroRecordHelper { - - public static JsonFieldNameUpdater getFieldNameUpdater(final String streamName, final String namespace, final JsonNode streamSchema) { - final JsonToAvroSchemaConverter schemaConverter = new JsonToAvroSchemaConverter(); - schemaConverter.getAvroSchema(streamSchema, streamName, namespace); - return new JsonFieldNameUpdater(schemaConverter.getStandardizedNames()); - } - - /** - * Convert an Airbyte JsonNode from Avro / Parquet Record to a plain one. - *

      - *
    • Remove the airbyte id and emission timestamp fields.
    • - *
    • Remove null fields that must exist in Parquet but does not in original Json. This function - * mutates the input Json.
    • - *
    - */ - public static JsonNode pruneAirbyteJson(final JsonNode input) { - final ObjectNode output = (ObjectNode) input; - - // Remove Airbyte columns. - output.remove(JavaBaseConstants.COLUMN_NAME_AB_ID); - output.remove(JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - - // Fields with null values does not exist in the original Json but only in Parquet. - for (final String field : MoreIterators.toList(output.fieldNames())) { - if (output.get(field) == null || output.get(field).isNull()) { - output.remove(field); - } - } - - return output; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.java deleted file mode 100644 index 8fc79df5a74ef..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -public enum CompressionType { - - NO_COMPRESSION(""), - GZIP(".gz"); - - private final String fileExtension; - - CompressionType(final String fileExtension) { - this.fileExtension = fileExtension; - } - - public String getFileExtension() { - return fileExtension; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.java deleted file mode 100644 index 0963a1ff63a38..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.COMPRESSION_TYPE_ARG_NAME; -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.DEFAULT_COMPRESSION_TYPE; - -import com.fasterxml.jackson.databind.JsonNode; - -public class CompressionTypeHelper { - - private CompressionTypeHelper() {} - - /** - * Sample expected input: { "compression_type": "No Compression" } - */ - public static CompressionType parseCompressionType(final JsonNode compressionConfig) { - if (compressionConfig == null || compressionConfig.isNull()) { - return DEFAULT_COMPRESSION_TYPE; - } - final String compressionType = compressionConfig.get(COMPRESSION_TYPE_ARG_NAME).asText(); - if (compressionType.toUpperCase().equals(CompressionType.GZIP.name())) { - return CompressionType.GZIP; - } else { - return CompressionType.NO_COMPRESSION; - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/Flattening.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/Flattening.java deleted file mode 100644 index 57248ef4f1da3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/Flattening.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import com.fasterxml.jackson.annotation.JsonCreator; - -public enum Flattening { - - NO("No flattening"), - ROOT_LEVEL("Root level flattening"); - - private final String value; - - Flattening(final String value) { - this.value = value; - } - - public String getValue() { - return value; - } - - @JsonCreator - public static Flattening fromValue(final String value) { - for (final Flattening f : Flattening.values()) { - if (f.getValue().equalsIgnoreCase(value)) { - return f; - } - } - throw new IllegalArgumentException("Unexpected value: " + value); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.java deleted file mode 100644 index 4c6e92fd647e6..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import io.airbyte.commons.io.LineGobbler; -import java.io.File; -import java.io.IOException; -import java.util.Arrays; -import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class JavaProcessRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(JavaProcessRunner.class); - - public static void runProcess(final String path, final Runtime run, final String... commands) throws IOException, InterruptedException { - LOGGER.info("Running process: " + Arrays.asList(commands)); - final Process pr = path.equals(System.getProperty("user.dir")) ? run.exec(commands) : run.exec(commands, null, new File(path)); - LineGobbler.gobble(pr.getErrorStream(), LOGGER::error); - LineGobbler.gobble(pr.getInputStream(), LOGGER::info); - if (!pr.waitFor(10, TimeUnit.MINUTES)) { - pr.destroy(); - throw new RuntimeException("Timeout while executing: " + Arrays.toString(commands)); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.java deleted file mode 100644 index 96d5377a2f45e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import java.text.Normalizer; -import java.util.regex.Pattern; - -public class S3NameTransformer extends StandardNameTransformer { - - // see https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html - private static final String S3_SAFE_CHARACTERS = "\\p{Alnum}/!_.*')("; - private static final String S3_SPECIAL_CHARACTERS = "&$@=;:+,?-"; - private static final String S3_CHARACTER_PATTERN = "[^" + S3_SAFE_CHARACTERS + Pattern.quote(S3_SPECIAL_CHARACTERS) + "]"; - - @Override - public String convertStreamName(final String input) { - return Normalizer.normalize(input, Normalizer.Form.NFKD) - .replaceAll("\\p{M}", "") // P{M} matches a code point that is not a combining mark (unicode) - .replaceAll(S3_CHARACTER_PATTERN, "_"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.java deleted file mode 100644 index 147bcd577b092..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import static io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants.NAME_TRANSFORMER; - -import io.airbyte.protocol.models.v0.AirbyteStream; -import java.util.LinkedList; -import java.util.List; - -public class S3OutputPathHelper { - - public static String getOutputPrefix(final String bucketPath, final AirbyteStream stream) { - return getOutputPrefix(bucketPath, stream.getNamespace(), stream.getName()); - } - - /** - * Prefix: <bucket-path>/<source-namespace-if-present>/<stream-name> - */ - // Prefix: // - public static String getOutputPrefix(final String bucketPath, final String namespace, final String streamName) { - final List paths = new LinkedList<>(); - - if (bucketPath != null) { - paths.add(bucketPath); - } - if (namespace != null) { - paths.add(NAME_TRANSFORMER.convertStreamName(namespace)); - } - paths.add(NAME_TRANSFORMER.convertStreamName(streamName)); - - return String.join("/", paths).replaceAll("/+", "/"); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.java deleted file mode 100644 index 3738b0a416aaa..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import java.util.Collections; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StreamTransferManagerFactory { - - protected static final Logger LOGGER = LoggerFactory.getLogger(StreamTransferManagerFactory.class); - - // See this doc about how they affect memory usage: - // https://alexmojaki.github.io/s3-stream-upload/javadoc/apidocs/alex/mojaki/s3upload/StreamTransferManager.html - // Total memory = (numUploadThreads + queueCapacity) * partSize + numStreams * (partSize + 6MB) - // = 31 MB at current configurations - public static final int DEFAULT_UPLOAD_THREADS = 2; - public static final int DEFAULT_QUEUE_CAPACITY = 2; - public static final int DEFAULT_PART_SIZE_MB = 5; - // MAX object size for AWS and GCS is 5TB (max allowed 10,000 parts*525mb) - // (https://aws.amazon.com/s3/faqs/, https://cloud.google.com/storage/quotas) - public static final int MAX_ALLOWED_PART_SIZE_MB = 525; - public static final int DEFAULT_NUM_STREAMS = 1; - - public static Builder create(final String bucketName, - final String objectKey, - final AmazonS3 s3Client) { - return new Builder(bucketName, objectKey, s3Client); - } - - public static class Builder { - - private final String bucketName; - private final String objectKey; - private final AmazonS3 s3Client; - private Map userMetadata; - private long partSize = DEFAULT_PART_SIZE_MB; - - private Builder(final String bucketName, - final String objectKey, - final AmazonS3 s3Client) { - this.bucketName = bucketName; - this.objectKey = objectKey; - this.s3Client = s3Client; - } - - public Builder setPartSize(final Long partSize) { - if (partSize == null) { - this.partSize = DEFAULT_PART_SIZE_MB; - } else if (partSize < DEFAULT_PART_SIZE_MB) { - LOGGER.warn("Part size {} is smaller than the minimum allowed, default to {}", partSize, DEFAULT_PART_SIZE_MB); - this.partSize = DEFAULT_PART_SIZE_MB; - } else if (partSize > MAX_ALLOWED_PART_SIZE_MB) { - LOGGER.warn("Part size {} is larger than the maximum allowed, default to {}", partSize, MAX_ALLOWED_PART_SIZE_MB); - this.partSize = MAX_ALLOWED_PART_SIZE_MB; - } else { - this.partSize = partSize; - } - return this; - } - - public Builder setUserMetadata(final Map userMetadata) { - this.userMetadata = userMetadata; - return this; - } - - public StreamTransferManager get() { - if (userMetadata == null) { - userMetadata = Collections.emptyMap(); - } - return new StreamTransferManagerWithMetadata(bucketName, objectKey, s3Client, userMetadata) - .numStreams(DEFAULT_NUM_STREAMS) - .queueCapacity(DEFAULT_QUEUE_CAPACITY) - .numUploadThreads(DEFAULT_UPLOAD_THREADS) - .partSize(partSize); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.java deleted file mode 100644 index 968a35a3a9e9c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; -import com.amazonaws.services.s3.model.ObjectMetadata; -import java.util.Map; - -/** - * A custom stream transfer manager which overwrites the metadata on the - * InitiateMultipartUploadRequest. - *

    - * This is, apparently, the correct way to implement this functionality. - * https://github.com/alexmojaki/s3-stream-upload/issues/3 - */ -public class StreamTransferManagerWithMetadata extends StreamTransferManager { - - private final Map userMetadata; - - public StreamTransferManagerWithMetadata(final String bucketName, - final String putKey, - final AmazonS3 s3Client, - final Map userMetadata) { - super(bucketName, putKey, s3Client); - this.userMetadata = userMetadata; - } - - @Override - public void customiseInitiateRequest(final InitiateMultipartUploadRequest request) { - if (userMetadata != null) { - ObjectMetadata objectMetadata = request.getObjectMetadata(); - if (objectMetadata == null) { - objectMetadata = new ObjectMetadata(); - } - objectMetadata.setUserMetadata(userMetadata); - request.setObjectMetadata(objectMetadata); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.java deleted file mode 100644 index 97426c9043f5f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.DeleteObjectsResult; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateManager; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import io.airbyte.cdk.integrations.destination.s3.util.S3OutputPathHelper; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.IOException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.LinkedList; -import java.util.List; -import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * The base implementation takes care of the following: - *

      - *
    • Create shared instance variables.
    • - *
    • Create the bucket and prepare the bucket path.
    • - *
    • Log and close the write.
    • - *
    - */ -public abstract class BaseS3Writer implements DestinationFileWriter { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseS3Writer.class); - - private static final S3FilenameTemplateManager s3FilenameTemplateManager = new S3FilenameTemplateManager(); - private static final String DEFAULT_SUFFIX = "_0"; - - protected final S3DestinationConfig config; - protected final AmazonS3 s3Client; - protected final AirbyteStream stream; - protected final DestinationSyncMode syncMode; - protected final String outputPrefix; - - protected BaseS3Writer(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream) { - this.config = config; - this.s3Client = s3Client; - this.stream = configuredStream.getStream(); - this.syncMode = configuredStream.getDestinationSyncMode(); - this.outputPrefix = S3OutputPathHelper.getOutputPrefix(config.getBucketPath(), stream); - } - - public String getOutputPrefix() { - return outputPrefix; - } - - /** - *
      - *
    • 1. Create bucket if necessary.
    • - *
    • 2. Under OVERWRITE mode, delete all objects with the output prefix.
    • - *
    - */ - @Override - public void initialize() throws IOException { - try { - final String bucket = config.getBucketName(); - if (!s3Client.doesBucketExistV2(bucket)) { - LOGGER.info("Bucket {} does not exist; creating...", bucket); - s3Client.createBucket(bucket); - LOGGER.info("Bucket {} has been created.", bucket); - } - - if (syncMode == DestinationSyncMode.OVERWRITE) { - LOGGER.info("Overwrite mode"); - final List keysToDelete = new LinkedList<>(); - final List objects = s3Client.listObjects(bucket, outputPrefix) - .getObjectSummaries(); - for (final S3ObjectSummary object : objects) { - keysToDelete.add(new KeyVersion(object.getKey())); - } - - if (keysToDelete.size() > 0) { - LOGGER.info("Purging non-empty output path for stream '{}' under OVERWRITE mode...", - stream.getName()); - final DeleteObjectsResult result = s3Client - .deleteObjects(new DeleteObjectsRequest(bucket).withKeys(keysToDelete)); - LOGGER.info("Deleted {} file(s) for stream '{}'.", result.getDeletedObjects().size(), - stream.getName()); - } - } - } catch (Exception e) { - LOGGER.error("Failed to initialize: ", e); - closeWhenFail(); - throw e; - } - } - - /** - * Log and close the write. - */ - @Override - public void close(final boolean hasFailed) throws IOException { - if (hasFailed) { - LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.getName()); - closeWhenFail(); - LOGGER.warn("Upload of stream '{}' aborted.", stream.getName()); - } else { - LOGGER.info("Uploading remaining data for stream '{}'.", stream.getName()); - closeWhenSucceed(); - LOGGER.info("Upload completed for stream '{}'.", stream.getName()); - } - } - - /** - * Operations that will run when the write succeeds. - */ - protected void closeWhenSucceed() throws IOException { - // Do nothing by default - } - - /** - * Operations that will run when the write fails. - */ - protected void closeWhenFail() throws IOException { - // Do nothing by default - } - - public static String determineOutputFilename(final S3FilenameTemplateParameterObject parameterObject) - throws IOException { - return isNotBlank(parameterObject.getFileNamePattern()) ? getOutputFilename(parameterObject) : getDefaultOutputFilename(parameterObject); - } - - /** - * @param parameterObject - an object which holds all necessary parameters required for default - * filename creation. - * @return A string in the format "{upload-date}_{upload-millis}_{suffix}.{format-extension}". For - * example, "2021_12_09_1639077474000_customSuffix.csv" - */ - private static String getDefaultOutputFilename(final S3FilenameTemplateParameterObject parameterObject) { - final DateFormat formatter = new SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING); - formatter.setTimeZone(TimeZone.getTimeZone("UTC")); - return String.format( - "%s_%d%s.%s", - formatter.format(parameterObject.getTimestamp()), - parameterObject.getTimestamp().getTime(), - null == parameterObject.getCustomSuffix() ? DEFAULT_SUFFIX : parameterObject.getCustomSuffix(), - parameterObject.getS3Format().getFileExtension()); - } - - private static String getOutputFilename(final S3FilenameTemplateParameterObject parameterObject) throws IOException { - return s3FilenameTemplateManager.applyPatternToFilename(parameterObject); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.java deleted file mode 100644 index 8006127a21bb0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import io.airbyte.cdk.integrations.destination.s3.S3Format; - -public interface DestinationFileWriter extends DestinationWriter { - - String getFileLocation(); - - S3Format getFileFormat(); - - String getOutputPath(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.java deleted file mode 100644 index 20fa6b926c73a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.protocol.models.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.IOException; -import java.util.UUID; - -/** - * {@link DestinationWriter} is responsible for writing Airbyte stream data to an S3 location in a - * specific format. - */ -public interface DestinationWriter { - - /** - * Prepare an S3 writer for the stream. - */ - void initialize() throws IOException; - - /** - * Write an Airbyte record message to an S3 object. - */ - void write(UUID id, AirbyteRecordMessage recordMessage) throws IOException; - - void write(JsonNode formattedData) throws IOException; - - default void write(String formattedData) throws IOException { - write(Jsons.deserialize(formattedData)); - } - - /** - * Close the S3 writer for the stream. - */ - void close(boolean hasFailed) throws IOException; - - default void closeAfterPush() throws IOException { - close(false); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.java deleted file mode 100644 index afe16c1379784..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import com.amazonaws.services.s3.AmazonS3; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter; -import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroWriter; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvWriter; -import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlWriter; -import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.sql.Timestamp; -import org.apache.avro.Schema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ProductionWriterFactory implements S3WriterFactory { - - protected static final Logger LOGGER = LoggerFactory.getLogger(ProductionWriterFactory.class); - - @Override - public DestinationFileWriter create(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) - throws Exception { - final S3Format format = config.getFormatConfig().getFormat(); - - if (format == S3Format.AVRO || format == S3Format.PARQUET) { - final AirbyteStream stream = configuredStream.getStream(); - LOGGER.info("Json schema for stream {}: {}", stream.getName(), stream.getJsonSchema()); - - final JsonToAvroSchemaConverter schemaConverter = new JsonToAvroSchemaConverter(); - final Schema avroSchema = schemaConverter.getAvroSchema(stream.getJsonSchema(), stream.getName(), stream.getNamespace()); - - LOGGER.info("Avro schema for stream {}: {}", stream.getName(), avroSchema.toString(false)); - - if (format == S3Format.AVRO) { - return new S3AvroWriter(config, s3Client, configuredStream, uploadTimestamp, avroSchema, AvroConstants.JSON_CONVERTER); - } else { - return new S3ParquetWriter(config, s3Client, configuredStream, uploadTimestamp, avroSchema, AvroConstants.JSON_CONVERTER); - } - } - - if (format == S3Format.CSV) { - return new S3CsvWriter.Builder(config, s3Client, configuredStream, uploadTimestamp).build(); - } - - if (format == S3Format.JSONL) { - return new S3JsonlWriter(config, s3Client, configuredStream, uploadTimestamp); - } - - throw new RuntimeException("Unexpected S3 destination format: " + format); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.java deleted file mode 100644 index 1855f1dd0e245..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import com.amazonaws.services.s3.AmazonS3; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.sql.Timestamp; - -/** - * Create different {@link DestinationFileWriter} based on {@link S3DestinationConfig}. - */ -public interface S3WriterFactory { - - DestinationFileWriter create(S3DestinationConfig config, - AmazonS3 s3Client, - ConfiguredAirbyteStream configuredStream, - Timestamp uploadTimestamp) - throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.java deleted file mode 100644 index 0ad0363678371..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.staging; - -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.jdbc.WriteConfig; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer; -import io.airbyte.cdk.integrations.destination.s3.csv.StagingDatabaseCsvSheetGenerator; -import io.airbyte.cdk.integrations.destination_async.DestinationFlushFunction; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve; -import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.List; -import java.util.Map; -import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; - -/** - * Async flushing logic. Flushing async prevents backpressure and is the superior flushing strategy. - */ -@Slf4j -class AsyncFlush implements DestinationFlushFunction { - - private final Map streamDescToWriteConfig; - private final StagingOperations stagingOperations; - private final JdbcDatabase database; - private final ConfiguredAirbyteCatalog catalog; - private final TypeAndDedupeOperationValve typerDeduperValve; - private final TyperDeduper typerDeduper; - private final long optimalBatchSizeBytes; - private final boolean useDestinationsV2Columns; - - public AsyncFlush(final Map streamDescToWriteConfig, - final StagingOperations stagingOperations, - final JdbcDatabase database, - final ConfiguredAirbyteCatalog catalog, - final TypeAndDedupeOperationValve typerDeduperValve, - final TyperDeduper typerDeduper, - // In general, this size is chosen to improve the performance of lower memory connectors. With 1 Gi - // of - // resource the connector will usually at most fill up around 150 MB in a single queue. By lowering - // the batch size, the AsyncFlusher will flush in smaller batches which allows for memory to be - // freed earlier similar to a sliding window effect - final long optimalBatchSizeBytes, - final boolean useDestinationsV2Columns) { - this.streamDescToWriteConfig = streamDescToWriteConfig; - this.stagingOperations = stagingOperations; - this.database = database; - this.catalog = catalog; - this.typerDeduperValve = typerDeduperValve; - this.typerDeduper = typerDeduper; - this.optimalBatchSizeBytes = optimalBatchSizeBytes; - this.useDestinationsV2Columns = useDestinationsV2Columns; - } - - @Override - public void flush(final StreamDescriptor decs, final Stream stream) throws Exception { - final CsvSerializedBuffer writer; - try { - writer = new CsvSerializedBuffer( - new FileBuffer(CsvSerializedBuffer.CSV_GZ_SUFFIX), - new StagingDatabaseCsvSheetGenerator(useDestinationsV2Columns), - true); - - // reassign as lambdas require references to be final. - stream.forEach(record -> { - try { - // todo (cgardens) - most writers just go ahead and re-serialize the contents of the record message. - // we should either just pass the raw string or at least have a way to do that and create a default - // impl that maintains backwards compatible behavior. - writer.accept(record.getSerialized(), record.getRecord().getEmittedAt()); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - } catch (final Exception e) { - throw new RuntimeException(e); - } - - writer.flush(); - log.info("Flushing CSV buffer for stream {} ({}) to staging", decs.getName(), FileUtils.byteCountToDisplaySize(writer.getByteCount())); - if (!streamDescToWriteConfig.containsKey(decs)) { - throw new IllegalArgumentException( - String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s", Jsons.serialize(catalog))); - } - - final WriteConfig writeConfig = streamDescToWriteConfig.get(decs); - final String schemaName = writeConfig.getOutputSchemaName(); - final String stageName = stagingOperations.getStageName(schemaName, writeConfig.getOutputTableName()); - final String stagingPath = - stagingOperations.getStagingPath( - GeneralStagingFunctions.RANDOM_CONNECTION_ID, - schemaName, - writeConfig.getStreamName(), - writeConfig.getOutputTableName(), - writeConfig.getWriteDatetime()); - try { - final String stagedFile = stagingOperations.uploadRecordsToStage(database, writer, schemaName, stageName, stagingPath); - GeneralStagingFunctions.copyIntoTableFromStage( - database, - stageName, - stagingPath, - List.of(stagedFile), - writeConfig.getOutputTableName(), - schemaName, - stagingOperations, - writeConfig.getNamespace(), - writeConfig.getStreamName(), - typerDeduperValve, - typerDeduper); - } catch (final Exception e) { - log.error("Failed to flush and commit buffer data into destination's raw table", e); - throw new RuntimeException("Failed to upload buffer to stage and commit to destination", e); - } - - writer.close(); - } - - @Override - public long getOptimalBatchSizeBytes() { - return optimalBatchSizeBytes; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.java deleted file mode 100644 index d064af037f93f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.staging; - -import static io.airbyte.cdk.integrations.destination_async.buffers.BufferManager.MEMORY_LIMIT_RATIO; -import static java.util.stream.Collectors.joining; -import static java.util.stream.Collectors.toList; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.WriteConfig; -import io.airbyte.cdk.integrations.destination_async.AsyncStreamConsumer; -import io.airbyte.cdk.integrations.destination_async.buffers.BufferManager; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; -import io.airbyte.integrations.base.destination.typing_deduping.StreamId; -import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve; -import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Consumer; -import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Uses both Factory and Consumer design pattern to create a single point of creation for consuming - * {@link AirbyteMessage} for processing - */ -public class StagingConsumerFactory extends SerialStagingConsumerFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(StagingConsumerFactory.class); - - private static final Instant SYNC_DATETIME = Instant.now(); - - private final Consumer outputRecordCollector; - private final JdbcDatabase database; - private final StagingOperations stagingOperations; - private final NamingConventionTransformer namingResolver; - private final JsonNode config; - private final ConfiguredAirbyteCatalog catalog; - private final boolean purgeStagingData; - private final TypeAndDedupeOperationValve typerDeduperValve; - private final TyperDeduper typerDeduper; - private final ParsedCatalog parsedCatalog; - private final String defaultNamespace; - private final boolean useDestinationsV2Columns; - - // Optional fields - private final Optional bufferMemoryLimit; - private final long optimalBatchSizeBytes; - - private StagingConsumerFactory( - final Consumer outputRecordCollector, - final JdbcDatabase database, - final StagingOperations stagingOperations, - final NamingConventionTransformer namingResolver, - final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final boolean purgeStagingData, - final TypeAndDedupeOperationValve typerDeduperValve, - final TyperDeduper typerDeduper, - final ParsedCatalog parsedCatalog, - final String defaultNamespace, - final boolean useDestinationsV2Columns, - final Optional bufferMemoryLimit, - final long optimalBatchSizeBytes) { - this.outputRecordCollector = outputRecordCollector; - this.database = database; - this.stagingOperations = stagingOperations; - this.namingResolver = namingResolver; - this.config = config; - this.catalog = catalog; - this.purgeStagingData = purgeStagingData; - this.typerDeduperValve = typerDeduperValve; - this.typerDeduper = typerDeduper; - this.parsedCatalog = parsedCatalog; - this.defaultNamespace = defaultNamespace; - this.useDestinationsV2Columns = useDestinationsV2Columns; - this.bufferMemoryLimit = bufferMemoryLimit; - this.optimalBatchSizeBytes = optimalBatchSizeBytes; - } - - public static class Builder { - - // Required (?) fields - // (TODO which of these are _actually_ required, and which have we just coincidentally always - // provided?) - private Consumer outputRecordCollector; - private JdbcDatabase database; - private StagingOperations stagingOperations; - private NamingConventionTransformer namingResolver; - private JsonNode config; - private ConfiguredAirbyteCatalog catalog; - private boolean purgeStagingData; - private TypeAndDedupeOperationValve typerDeduperValve; - private TyperDeduper typerDeduper; - private ParsedCatalog parsedCatalog; - private String defaultNamespace; - private boolean useDestinationsV2Columns; - - // Optional fields - private Optional bufferMemoryLimit = Optional.empty(); - private long optimalBatchSizeBytes = 50 * 1024 * 1024; - - private Builder() {} - - public Builder setBufferMemoryLimit(final Optional bufferMemoryLimit) { - this.bufferMemoryLimit = bufferMemoryLimit; - return this; - } - - public Builder setOptimalBatchSizeBytes(final long optimalBatchSizeBytes) { - this.optimalBatchSizeBytes = optimalBatchSizeBytes; - return this; - } - - public StagingConsumerFactory build() { - return new StagingConsumerFactory( - outputRecordCollector, - database, - stagingOperations, - namingResolver, - config, - catalog, - purgeStagingData, - typerDeduperValve, - typerDeduper, - parsedCatalog, - defaultNamespace, - useDestinationsV2Columns, - bufferMemoryLimit, - optimalBatchSizeBytes); - } - - } - - public static Builder builder( - final Consumer outputRecordCollector, - final JdbcDatabase database, - final StagingOperations stagingOperations, - final NamingConventionTransformer namingResolver, - final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final boolean purgeStagingData, - final TypeAndDedupeOperationValve typerDeduperValve, - final TyperDeduper typerDeduper, - final ParsedCatalog parsedCatalog, - final String defaultNamespace, - final boolean useDestinationsV2Columns) { - final Builder builder = new Builder(); - builder.outputRecordCollector = outputRecordCollector; - builder.database = database; - builder.stagingOperations = stagingOperations; - builder.namingResolver = namingResolver; - builder.config = config; - builder.catalog = catalog; - builder.purgeStagingData = purgeStagingData; - builder.typerDeduperValve = typerDeduperValve; - builder.typerDeduper = typerDeduper; - builder.parsedCatalog = parsedCatalog; - builder.defaultNamespace = defaultNamespace; - builder.useDestinationsV2Columns = useDestinationsV2Columns; - return builder; - } - - public SerializedAirbyteMessageConsumer createAsync() { - final List writeConfigs = createWriteConfigs(namingResolver, config, catalog, parsedCatalog, useDestinationsV2Columns); - final var streamDescToWriteConfig = streamDescToWriteConfig(writeConfigs); - final var flusher = new AsyncFlush( - streamDescToWriteConfig, - stagingOperations, - database, - catalog, - typerDeduperValve, - typerDeduper, - optimalBatchSizeBytes, - useDestinationsV2Columns); - return new AsyncStreamConsumer( - outputRecordCollector, - GeneralStagingFunctions.onStartFunction(database, stagingOperations, writeConfigs, typerDeduper), - // todo (cgardens) - wrapping the old close function to avoid more code churn. - (hasFailed, streamSyncSummaries) -> { - try { - GeneralStagingFunctions.onCloseFunction( - database, - stagingOperations, - writeConfigs, - purgeStagingData, - typerDeduper).accept(false, streamSyncSummaries); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }, - flusher, - catalog, - new BufferManager(getMemoryLimit(bufferMemoryLimit)), - defaultNamespace); - } - - private static long getMemoryLimit(final Optional bufferMemoryLimit) { - return bufferMemoryLimit.orElse((long) (Runtime.getRuntime().maxMemory() * MEMORY_LIMIT_RATIO)); - } - - private static Map streamDescToWriteConfig(final List writeConfigs) { - final Set conflictingStreams = new HashSet<>(); - final Map streamDescToWriteConfig = new HashMap<>(); - for (final WriteConfig config : writeConfigs) { - final StreamDescriptor streamIdentifier = toStreamDescriptor(config); - if (streamDescToWriteConfig.containsKey(streamIdentifier)) { - conflictingStreams.add(config); - final WriteConfig existingConfig = streamDescToWriteConfig.get(streamIdentifier); - // The first conflicting stream won't have any problems, so we need to explicitly add it here. - conflictingStreams.add(existingConfig); - } else { - streamDescToWriteConfig.put(streamIdentifier, config); - } - } - if (!conflictingStreams.isEmpty()) { - final String message = String.format( - "You are trying to write multiple streams to the same table. Consider switching to a custom namespace format using ${SOURCE_NAMESPACE}, or moving one of them into a separate connection with a different stream prefix. Affected streams: %s", - conflictingStreams.stream().map(config -> config.getNamespace() + "." + config.getStreamName()).collect(joining(", "))); - throw new ConfigErrorException(message); - } - return streamDescToWriteConfig; - } - - private static StreamDescriptor toStreamDescriptor(final WriteConfig config) { - return new StreamDescriptor().withName(config.getStreamName()).withNamespace(config.getNamespace()); - } - - /** - * Creates a list of all {@link WriteConfig} for each stream within a - * {@link ConfiguredAirbyteCatalog}. Each write config represents the configuration settings for - * writing to a destination connector - * - * @param namingResolver {@link NamingConventionTransformer} used to transform names that are - * acceptable by each destination connector - * @param config destination connector configuration parameters - * @param catalog {@link ConfiguredAirbyteCatalog} collection of configured - * {@link ConfiguredAirbyteStream} - * @return list of all write configs for each stream in a {@link ConfiguredAirbyteCatalog} - */ - private static List createWriteConfigs(final NamingConventionTransformer namingResolver, - final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final ParsedCatalog parsedCatalog, - final boolean useDestinationsV2Columns) { - - return catalog.getStreams().stream().map(toWriteConfig(namingResolver, config, parsedCatalog, useDestinationsV2Columns)).collect(toList()); - } - - private static Function toWriteConfig(final NamingConventionTransformer namingResolver, - final JsonNode config, - final ParsedCatalog parsedCatalog, - final boolean useDestinationsV2Columns) { - return stream -> { - Preconditions.checkNotNull(stream.getDestinationSyncMode(), "Undefined destination sync mode"); - final AirbyteStream abStream = stream.getStream(); - final String streamName = abStream.getName(); - - final String outputSchema; - final String tableName; - if (useDestinationsV2Columns) { - final StreamId streamId = parsedCatalog.getStream(abStream.getNamespace(), streamName).id(); - outputSchema = streamId.rawNamespace(); - tableName = streamId.rawName(); - } else { - outputSchema = getOutputSchema(abStream, config.get("schema").asText(), namingResolver); - tableName = namingResolver.getRawTableName(streamName); - } - final String tmpTableName = namingResolver.getTmpTableName(streamName); - final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); - - final WriteConfig writeConfig = - new WriteConfig(streamName, abStream.getNamespace(), outputSchema, tmpTableName, tableName, syncMode, SYNC_DATETIME); - LOGGER.info("Write config: {}", writeConfig); - - return writeConfig; - }; - } - - private static String getOutputSchema(final AirbyteStream stream, - final String defaultDestSchema, - final NamingConventionTransformer namingResolver) { - return stream.getNamespace() != null - ? namingResolver.getNamespace(stream.getNamespace()) - : namingResolver.getNamespace(defaultDestSchema); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.kt new file mode 100644 index 0000000000000..54127bc3e2b56 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfig.kt @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.jdbc.copy.s3 + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig + +/** + * S3 copy destinations need an S3DestinationConfig to configure the basic upload behavior. We also + * want additional flags to configure behavior that only applies to the copy-to-S3 + + * load-into-warehouse portion. Currently this is just purgeStagingData, but this may expand. + */ +class S3CopyConfig(val purgeStagingData: Boolean, val s3Config: S3DestinationConfig) { + + companion object { + @JvmStatic + fun shouldPurgeStagingData(config: JsonNode): Boolean { + return if (config["purge_staging_data"] == null) { + true + } else { + config["purge_staging_data"].asBoolean() + } + } + + fun getS3CopyConfig(config: JsonNode): S3CopyConfig { + return S3CopyConfig( + shouldPurgeStagingData(config), + S3DestinationConfig.Companion.getS3DestinationConfig(config) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.kt new file mode 100644 index 0000000000000..c615d645df0b7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryption.kt @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import java.security.NoSuchAlgorithmException +import javax.annotation.Nonnull +import javax.crypto.KeyGenerator +import org.apache.commons.lang3.StringUtils + +/** + * @param key The key to use for encryption. + * @param keyType Where the key came from. + */ +@JvmRecord +data class AesCbcEnvelopeEncryption( + @field:Nonnull @param:Nonnull val key: ByteArray, + @field:Nonnull @param:Nonnull val keyType: KeyType +) : EncryptionConfig { + enum class KeyType { + EPHEMERAL, + USER_PROVIDED + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + + val that = o as AesCbcEnvelopeEncryption + + if (!key.contentEquals(that.key)) { + return false + } + return keyType == that.keyType + } + + override fun hashCode(): Int { + var result = key.contentHashCode() + result = 31 * result + keyType.hashCode() + return result + } + + companion object { + fun fromJson(encryptionNode: JsonNode): AesCbcEnvelopeEncryption { + if (!encryptionNode.has("key_encrypting_key")) { + return encryptionWithRandomKey() + } + val kek = encryptionNode["key_encrypting_key"].asText() + return if (StringUtils.isEmpty(kek)) { + encryptionWithRandomKey() + } else { + AesCbcEnvelopeEncryption( + EncryptionConfig.Companion.BASE64_DECODER.decode(kek), + KeyType.USER_PROVIDED + ) + } + } + + private fun encryptionWithRandomKey(): AesCbcEnvelopeEncryption { + try { + val kekGenerator = + KeyGenerator.getInstance( + AesCbcEnvelopeEncryptionBlobDecorator.KEY_ENCRYPTING_ALGO + ) + kekGenerator.init(AesCbcEnvelopeEncryptionBlobDecorator.AES_KEY_SIZE_BITS) + return AesCbcEnvelopeEncryption( + kekGenerator.generateKey().encoded, + KeyType.EPHEMERAL + ) + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException(e) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.kt new file mode 100644 index 0000000000000..fd48bf2e7e45a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecorator.kt @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import com.google.common.annotations.VisibleForTesting +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import java.io.OutputStream +import java.security.InvalidAlgorithmParameterException +import java.security.InvalidKeyException +import java.security.NoSuchAlgorithmException +import java.security.SecureRandom +import java.util.Base64 +import javax.crypto.BadPaddingException +import javax.crypto.Cipher +import javax.crypto.CipherOutputStream +import javax.crypto.IllegalBlockSizeException +import javax.crypto.KeyGenerator +import javax.crypto.NoSuchPaddingException +import javax.crypto.SecretKey +import javax.crypto.spec.IvParameterSpec +import javax.crypto.spec.SecretKeySpec + +/** + * This class implements the envelope encryption that Redshift and Snowflake use when loading + * encrypted files from S3 (or other blob stores): + * + * * A content-encrypting-key (CEK) is used to encrypt the actual data (i.e. the CSV file) + * * A key-encrypting-key (KEK) is used to encrypt the CEK + * * The encrypted CEK is stored in the S3 object metadata, along with the plaintext initialization + * vector + * * The COPY command includes the KEK (in plaintext). Redshift/Snowflake will use it to decrypt the + * CEK, which it then uses to decrypt the CSV file. + * + * A new CEK is generated for each S3 object, but each sync uses a single KEK. The KEK may be either + * user-provided (if the user wants to keep the data for further use), or generated per-sync (if + * they simply want to add additional security around their COPY operation). + * + * Redshift does not support loading directly from GCS or Azure Blob Storage. + * + * Snowflake only supports client-side encryption in S3 and Azure Storage; it does not support this + * feature in GCS (https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html). Azure + * Storage uses a similar envelope encryption technique to S3 + * (https://docs.microsoft.com/en-us/azure/storage/common/storage-client-side-encryption?tabs=dotnet#encryption-via-the-envelope-technique). + */ +class AesCbcEnvelopeEncryptionBlobDecorator +@VisibleForTesting +internal constructor( // The real "secret key". Should be handled with great care. + private val keyEncryptingKey: + SecretKey?, // A random key generated for each file. Also should be handled with care. + private val contentEncryptingKey: + SecretKey, // Arbitrary bytes required by the CBC algorithm. Not a sensitive value. + // The only requirement is that we never reuse an (IV, CEK) pair. + private val initializationVector: ByteArray +) : BlobDecorator { + constructor( + keyEncryptingKey: SecretKey? + ) : this( + keyEncryptingKey, + randomContentEncryptingKey(), + randomInitializationVector(), + ) + + constructor( + keyEncryptingKey: ByteArray? + ) : this( + SecretKeySpec( + keyEncryptingKey, + KEY_ENCRYPTING_ALGO, + ), + ) + + @SuppressFBWarnings( + value = ["PADORA", "CIPINT"], + justification = "We're using this cipher for compatibility with Redshift/Snowflake.", + ) + override fun wrap(stream: OutputStream): OutputStream { + try { + val dataCipher = Cipher.getInstance(CONTENT_ENCRYPTING_CIPHER_ALGO) + dataCipher.init( + Cipher.ENCRYPT_MODE, + contentEncryptingKey, + IvParameterSpec( + initializationVector, + ), + ) + return CipherOutputStream(stream, dataCipher) + } catch (e: InvalidAlgorithmParameterException) { + throw RuntimeException(e) + } catch (e: NoSuchPaddingException) { + throw RuntimeException(e) + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException(e) + } catch (e: InvalidKeyException) { + throw RuntimeException(e) + } + } + + @SuppressFBWarnings( + value = ["CIPINT", "SECECB"], + justification = "We're using this cipher for compatibility with Redshift/Snowflake.", + ) + override fun updateMetadata( + metadata: MutableMap, + metadataKeyMapping: Map + ) { + try { + val keyCipher = Cipher.getInstance(KEY_ENCRYPTING_ALGO) + keyCipher.init(Cipher.ENCRYPT_MODE, keyEncryptingKey) + val encryptedCekBytes = keyCipher.doFinal(contentEncryptingKey.encoded) + + BlobDecorator.insertMetadata( + metadata, + metadataKeyMapping, + ENCRYPTED_CONTENT_ENCRYPTING_KEY, + BASE64_ENCODER.encodeToString(encryptedCekBytes), + ) + BlobDecorator.insertMetadata( + metadata, + metadataKeyMapping, + INITIALIZATION_VECTOR, + BASE64_ENCODER.encodeToString( + initializationVector, + ), + ) + } catch (e: NoSuchPaddingException) { + throw RuntimeException(e) + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException(e) + } catch (e: InvalidKeyException) { + throw RuntimeException(e) + } catch (e: IllegalBlockSizeException) { + throw RuntimeException(e) + } catch (e: BadPaddingException) { + throw RuntimeException(e) + } + } + + companion object { + const val ENCRYPTED_CONTENT_ENCRYPTING_KEY: String = + "aes_cbc_envelope_encryption-content-encrypting-key" + const val INITIALIZATION_VECTOR: String = + "aes_cbc_envelope_encryption-initialization-vector" + + const val AES_KEY_SIZE_BITS: Int = 256 + private const val AES_CBC_INITIALIZATION_VECTOR_SIZE_BYTES = 16 + private val BASE64_ENCODER: Base64.Encoder = Base64.getEncoder() + private val SECURE_RANDOM = SecureRandom() + + const val KEY_ENCRYPTING_ALGO: String = "AES" + + // There's no specific KeyGenerator for AES/CBC/PKCS5Padding, so we just use a normal AES + // KeyGenerator + private const val CONTENT_ENCRYPTING_KEY_ALGO = "AES" + + // Redshift's UNLOAD command uses this cipher mode, so we'll use it here as well. + // TODO If we eventually want to expose client-side encryption in destination-s3, we should + // probably + // also implement + // AES-GCM, since it's mostly superior to CBC mode. (if we do that: make sure that the + // output is + // compatible with + // aws-java-sdk's AmazonS3EncryptionV2Client, which requires a slightly different set of + // metadata) + private const val CONTENT_ENCRYPTING_CIPHER_ALGO = "AES/CBC/PKCS5Padding" + + private fun randomContentEncryptingKey(): SecretKey { + try { + val cekGenerator = + KeyGenerator.getInstance( + CONTENT_ENCRYPTING_KEY_ALGO, + ) + cekGenerator.init(AES_KEY_SIZE_BITS) + return cekGenerator.generateKey() + } catch (e: NoSuchAlgorithmException) { + throw RuntimeException(e) + } + } + + private fun randomInitializationVector(): ByteArray { + val initializationVector = ByteArray(AES_CBC_INITIALIZATION_VECTOR_SIZE_BYTES) + SECURE_RANDOM.nextBytes(initializationVector) + return initializationVector + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.kt new file mode 100644 index 0000000000000..259839bdadc73 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BaseS3Destination.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.BaseConnector +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.base.Destination +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.s3.SerializedBufferFactory.Companion.getCreateFunction +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.util.function.Consumer +import java.util.function.Function +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class BaseS3Destination +protected constructor( + protected val configFactory: S3DestinationConfigFactory = S3DestinationConfigFactory() +) : BaseConnector(), Destination { + private val nameTransformer: NamingConventionTransformer = S3NameTransformer() + + override fun check(config: JsonNode): AirbyteConnectionStatus? { + try { + val destinationConfig = configFactory.getS3DestinationConfig(config, storageProvider()) + val s3Client = destinationConfig.getS3Client() + + S3BaseChecks.testIAMUserHasListObjectPermission(s3Client, destinationConfig.bucketName) + S3BaseChecks.testSingleUpload( + s3Client, + destinationConfig.bucketName, + destinationConfig.bucketPath!! + ) + S3BaseChecks.testMultipartUpload( + s3Client, + destinationConfig.bucketName, + destinationConfig.bucketPath + ) + + return AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED) + } catch (e: Exception) { + LOGGER.error("Exception attempting to access the S3 bucket: ", e) + return AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage( + "Could not connect to the S3 bucket with the provided configuration. \n" + + e.message + ) + } + } + + override fun getConsumer( + config: JsonNode, + catalog: ConfiguredAirbyteCatalog, + outputRecordCollector: Consumer + ): AirbyteMessageConsumer? { + val s3Config = configFactory.getS3DestinationConfig(config, storageProvider()) + return S3ConsumerFactory() + .create( + outputRecordCollector, + S3StorageOperations(nameTransformer, s3Config.getS3Client(), s3Config), + nameTransformer, + getCreateFunction( + s3Config, + Function { fileExtension: String -> + FileBuffer(fileExtension) + } + ), + s3Config, + catalog + ) + } + + abstract fun storageProvider(): StorageProvider + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseS3Destination::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.kt new file mode 100644 index 0000000000000..205ad7339adb3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecorator.kt @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.google.common.annotations.VisibleForTesting +import java.io.OutputStream + +/** + * Represents the ability to modify how a blob is stored, by modifying the data being written and/or + * the blob's metadata. + */ +interface BlobDecorator { + fun wrap(stream: OutputStream): OutputStream + + /** + * Modifies the blob's metadata. + * + * In the most common case, BlobDecorator implementations will insert new entries into the + * metadata map. These entries may be vendor-specific. The metadataKeyMapping parameter defines + * a mapping from the "canonical" keys to the vendor-specific keys. See + * [S3StorageOperations.getMetadataMapping] for an example. + * + * If a key is not defined in metadataKeyMapping, it will not be inserted into the metadata. + * + * @param metadata The blob's metadata + * @param metadataKeyMapping The mapping from canonical to vendor-specific key names + */ + fun updateMetadata( + metadata: MutableMap, + metadataKeyMapping: Map + ) + + companion object { + /** + * A convenience method for subclasses. Handles inserting new metadata entries according to + * the metadataKeyMapping. + */ + @VisibleForTesting + fun insertMetadata( + metadata: MutableMap, + metadataKeyMapping: Map, + key: String, + value: String + ) { + if (metadataKeyMapping.containsKey(key)) { + metadata[metadataKeyMapping.getValue(key)] = value + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.kt new file mode 100644 index 0000000000000..9bea164215371 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobStorageOperations.kt @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import org.joda.time.DateTime + +abstract class BlobStorageOperations protected constructor() { + protected val blobDecorators: MutableList = ArrayList() + + abstract fun getBucketObjectPath( + namespace: String?, + streamName: String, + writeDatetime: DateTime, + customFormat: String + ): String? + + /** Ensure that the bucket specified in the config exists */ + @Throws(Exception::class) abstract fun createBucketIfNotExists() + + /** + * Upload the data files into the storage area. + * + * @return the name of the file that was uploaded. + */ + @Throws(Exception::class) + abstract fun uploadRecordsToBucket( + recordsData: SerializableBuffer, + namespace: String?, + objectPath: String + ): String? + + /** Remove files that were just stored in the bucket */ + @Throws(Exception::class) + abstract fun cleanUpBucketObject(objectPath: String, stagedFiles: List) + + /** + * Deletes all the bucket objects for the specified bucket path + * + * @param namespace Optional source-defined namespace name + * @param streamName Name of the stream + * @param objectPath file path to where staging files are stored + * @param pathFormat formatted string for the path + */ + abstract fun cleanUpBucketObject( + namespace: String?, + streamName: String, + objectPath: String, + pathFormat: String + ) + + abstract fun dropBucketObject(objectPath: String) + + abstract fun isValidData(jsonNode: JsonNode): Boolean + + abstract fun getMetadataMapping(): Map + + fun addBlobDecorator(blobDecorator: BlobDecorator) { + blobDecorators.add(blobDecorator) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.kt new file mode 100644 index 0000000000000..3cfcf71d96d3d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/EncryptionConfig.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import java.util.* + +interface EncryptionConfig { + companion object { + @JvmStatic + fun fromJson(encryptionNode: JsonNode?): EncryptionConfig { + // For backwards-compatibility. Preexisting configs which don't contain the "encryption" + // key will + // pass a null JsonNode into this method. + if (encryptionNode == null) { + return NoEncryption() + } + + return when (val encryptionType = encryptionNode["encryption_type"].asText()) { + "none" -> NoEncryption() + "aes_cbc_envelope" -> AesCbcEnvelopeEncryption.Companion.fromJson(encryptionNode) + else -> throw IllegalArgumentException("Invalid encryption type: $encryptionType") + } + } + + val BASE64_DECODER: Base64.Decoder = Base64.getDecoder() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/NoEncryption.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/NoEncryption.kt new file mode 100644 index 0000000000000..af47e9fcad348 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/NoEncryption.kt @@ -0,0 +1,6 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +class NoEncryption : EncryptionConfig diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.kt new file mode 100644 index 0000000000000..d81a147784d52 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecks.kt @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.ListObjectsRequest +import com.google.common.annotations.VisibleForTesting +import com.google.common.base.Strings +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import java.io.IOException +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.util.* +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object S3BaseChecks { + private val LOGGER: Logger = LoggerFactory.getLogger(S3BaseChecks::class.java) + + /** + * Note that this method completely ignores s3Config.getBucketPath(), in favor of the bucketPath + * parameter. + */ + @JvmStatic + fun attemptS3WriteAndDelete( + storageOperations: S3StorageOperations, + s3Config: S3DestinationConfig, + bucketPath: String? + ) { + attemptS3WriteAndDelete(storageOperations, s3Config, bucketPath, s3Config.getS3Client()) + } + + @JvmStatic + fun testSingleUpload(s3Client: AmazonS3, bucketName: String?, bucketPath: String) { + LOGGER.info( + "Started testing if all required credentials assigned to user for single file uploading" + ) + val prefix = if (bucketPath.endsWith("/")) bucketPath else "$bucketPath/" + val testFile = prefix + "test_" + System.currentTimeMillis() + try { + s3Client.putObject(bucketName, testFile, "this is a test file") + } finally { + s3Client.deleteObject(bucketName, testFile) + } + LOGGER.info("Finished checking for normal upload mode") + } + + @JvmStatic + @Throws(IOException::class) + fun testMultipartUpload(s3Client: AmazonS3, bucketName: String?, bucketPath: String) { + LOGGER.info( + "Started testing if all required credentials assigned to user for multipart upload" + ) + val prefix = if (bucketPath.endsWith("/")) bucketPath else "$bucketPath/" + val testFile = prefix + "test_" + System.currentTimeMillis() + val manager = create(bucketName, testFile, s3Client).get() + var success = false + try { + manager.multiPartOutputStreams[0].use { outputStream -> + CSVPrinter( + PrintWriter(outputStream, true, StandardCharsets.UTF_8), + CSVFormat.DEFAULT + ) + .use { csvPrinter -> + val oneMegaByteString = "a".repeat(500000) + // write a file larger than the 5 MB, which is the default part size, to + // make sure it is a multipart + // upload + for (i in 0..6) { + csvPrinter.printRecord(System.currentTimeMillis(), oneMegaByteString) + } + success = true + } + } + } finally { + if (success) { + manager.complete() + } else { + manager.abort() + } + s3Client!!.deleteObject(bucketName, testFile) + } + LOGGER.info("Finished verification for multipart upload mode") + } + + /** + * Checks that S3 custom endpoint uses a variant that only uses HTTPS + * + * @param endpoint URL string representing an accessible S3 bucket + */ + @JvmStatic + fun testCustomEndpointSecured(endpoint: String?): Boolean { + // if user does not use a custom endpoint, do not fail + return if (endpoint == null || endpoint.length == 0) { + true + } else { + endpoint.startsWith("https://") + } + } + + @VisibleForTesting + fun attemptS3WriteAndDelete( + storageOperations: S3StorageOperations, + s3Config: S3DestinationConfig, + bucketPath: String?, + s3: AmazonS3 + ) { + val prefix = + if (bucketPath.isNullOrEmpty()) { + "" + } else if (bucketPath.endsWith("/")) { + bucketPath + } else { + "$bucketPath/" + } + + val outputTableName = + prefix + + "_airbyte_connection_test_" + + UUID.randomUUID().toString().replace("-".toRegex(), "") + attemptWriteAndDeleteS3Object(storageOperations, s3Config, outputTableName, s3) + } + + /** + * Runs some permissions checks: 1. Check whether the bucket exists; create it if not 2. Check + * whether s3://bucketName/bucketPath/ exists; create it (with empty contents) if not. (if + * bucketPath is null/empty-string, then skip this step) 3. Attempt to create and delete + * s3://bucketName/outputTableName 4. Attempt to list all objects in the bucket + */ + private fun attemptWriteAndDeleteS3Object( + storageOperations: S3StorageOperations, + s3Config: S3DestinationConfig, + outputTableName: String, + s3: AmazonS3? + ) { + val s3Bucket = s3Config.bucketName + val bucketPath = s3Config.bucketPath + + if (!Strings.isNullOrEmpty(bucketPath)) { + storageOperations.createBucketIfNotExists() + } + s3!!.putObject(s3Bucket, outputTableName, "check-content") + testIAMUserHasListObjectPermission(s3, s3Bucket) + s3.deleteObject(s3Bucket, outputTableName) + } + + fun testIAMUserHasListObjectPermission(s3: AmazonS3, bucketName: String?) { + LOGGER.info("Started testing if IAM user can call listObjects on the destination bucket") + val request = ListObjectsRequest().withBucketName(bucketName).withMaxKeys(1) + s3.listObjects(request) + LOGGER.info("Finished checking for listObjects permission") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.kt new file mode 100644 index 0000000000000..0052c69c26cda --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3ConsumerFactory.kt @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Preconditions +import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnCloseFunction +import io.airbyte.cdk.integrations.destination.buffered_stream_consumer.OnStartFunction +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.FlushBufferFunction +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.SerializedBufferingStrategy +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.* +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import org.apache.commons.io.FileUtils +import org.joda.time.DateTime +import org.joda.time.DateTimeZone +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class S3ConsumerFactory { + fun create( + outputRecordCollector: Consumer, + storageOperations: BlobStorageOperations, + namingResolver: NamingConventionTransformer, + onCreateBuffer: BufferCreateFunction, + s3Config: S3DestinationConfig, + catalog: ConfiguredAirbyteCatalog + ): AirbyteMessageConsumer { + val writeConfigs = createWriteConfigs(storageOperations, namingResolver, s3Config, catalog) + return BufferedStreamConsumer( + outputRecordCollector, + onStartFunction(storageOperations, writeConfigs), + SerializedBufferingStrategy( + onCreateBuffer, + catalog, + flushBufferFunction(storageOperations, writeConfigs, catalog) + ), + onCloseFunction(storageOperations, writeConfigs), + catalog + ) { jsonNode: JsonNode? -> storageOperations.isValidData(jsonNode!!) } + } + + private fun onStartFunction( + storageOperations: BlobStorageOperations, + writeConfigs: List + ): OnStartFunction { + return OnStartFunction { + LOGGER.info("Preparing bucket in destination started for {} streams", writeConfigs.size) + for (writeConfig in writeConfigs) { + if (writeConfig.syncMode == DestinationSyncMode.OVERWRITE) { + val namespace = writeConfig.namespace + val stream = writeConfig.streamName + val outputBucketPath = writeConfig.outputBucketPath + val pathFormat = writeConfig.pathFormat + LOGGER.info( + "Clearing storage area in destination started for namespace {} stream {} bucketObject {} pathFormat {}", + namespace, + stream, + outputBucketPath, + pathFormat + ) + storageOperations.cleanUpBucketObject( + namespace, + stream, + outputBucketPath, + pathFormat + ) + LOGGER.info( + "Clearing storage area in destination completed for namespace {} stream {} bucketObject {}", + namespace, + stream, + outputBucketPath + ) + } + } + LOGGER.info("Preparing storage area in destination completed.") + } + } + + private fun flushBufferFunction( + storageOperations: BlobStorageOperations, + writeConfigs: List, + catalog: ConfiguredAirbyteCatalog? + ): FlushBufferFunction { + val pairToWriteConfig = + writeConfigs + .stream() + .collect( + Collectors.toUnmodifiableMap( + Function { config: WriteConfig -> toNameNamespacePair(config) }, + Function.identity() + ) + ) + + return FlushBufferFunction { + pair: AirbyteStreamNameNamespacePair, + writer: SerializableBuffer -> + LOGGER.info( + "Flushing buffer for stream {} ({}) to storage", + pair.name, + FileUtils.byteCountToDisplaySize(writer.byteCount) + ) + require(pairToWriteConfig.containsKey(pair)) { + String.format( + "Message contained record from a stream %s that was not in the catalog. \ncatalog: %s", + pair, + Jsons.serialize(catalog) + ) + } + + val writeConfig = pairToWriteConfig[pair] + try { + writer.use { + writer.flush() + writeConfig!!.addStoredFile( + storageOperations.uploadRecordsToBucket( + writer, + writeConfig.namespace, + writeConfig.fullOutputPath + )!! + ) + } + } catch (e: Exception) { + LOGGER.error("Failed to flush and upload buffer to storage:", e) + throw RuntimeException("Failed to upload buffer to storage", e) + } + } + } + + private fun onCloseFunction( + storageOperations: BlobStorageOperations, + writeConfigs: List + ): OnCloseFunction { + return OnCloseFunction { hasFailed: Boolean, _: Map -> + if (hasFailed) { + LOGGER.info("Cleaning up destination started for {} streams", writeConfigs.size) + for (writeConfig in writeConfigs) { + storageOperations.cleanUpBucketObject( + writeConfig.fullOutputPath, + writeConfig.storedFiles + ) + writeConfig.clearStoredFiles() + } + LOGGER.info("Cleaning up destination completed.") + } + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(S3ConsumerFactory::class.java) + private val SYNC_DATETIME: DateTime = DateTime.now(DateTimeZone.UTC) + + private fun createWriteConfigs( + storageOperations: BlobStorageOperations, + namingResolver: NamingConventionTransformer, + config: S3DestinationConfig, + catalog: ConfiguredAirbyteCatalog? + ): List { + return catalog!! + .streams + .stream() + .map(toWriteConfig(storageOperations, namingResolver, config)) + .collect(Collectors.toList()) + } + + private fun toWriteConfig( + storageOperations: BlobStorageOperations, + namingResolver: NamingConventionTransformer, + s3Config: S3DestinationConfig + ): Function { + return Function { stream: ConfiguredAirbyteStream -> + Preconditions.checkNotNull( + stream.destinationSyncMode, + "Undefined destination sync mode" + ) + val abStream = stream.stream + val namespace: String? = abStream.namespace + val streamName = abStream.name + val bucketPath = s3Config.bucketPath + val customOutputFormat = java.lang.String.join("/", bucketPath, s3Config.pathFormat) + val fullOutputPath = + storageOperations.getBucketObjectPath( + namespace, + streamName, + SYNC_DATETIME, + customOutputFormat + ) + val syncMode = stream.destinationSyncMode + val writeConfig = + WriteConfig( + namespace, + streamName, + bucketPath!!, + customOutputFormat, + fullOutputPath!!, + syncMode + ) + LOGGER.info("Write config: {}", writeConfig) + writeConfig + } + } + + private fun toNameNamespacePair(config: WriteConfig): AirbyteStreamNameNamespacePair { + return AirbyteStreamNameNamespacePair(config.streamName, config.namespace) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.kt new file mode 100644 index 0000000000000..6b1a0a16501f2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfig.kt @@ -0,0 +1,366 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.ClientConfiguration +import com.amazonaws.Protocol +import com.amazonaws.client.builder.AwsClientBuilder +import com.amazonaws.retry.RetryMode +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.AmazonS3ClientBuilder +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.constant.S3Constants +import io.airbyte.cdk.integrations.destination.s3.credential.S3AWSDefaultProfileCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3CredentialType +import java.util.* +import javax.annotation.Nonnull +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * An S3 configuration. Typical usage sets at most one of `bucketPath` (necessary for more delicate + * data syncing to S3) + */ +open class S3DestinationConfig { + val endpoint: String? + val bucketName: String? + val bucketPath: String? + val bucketRegion: String? + val pathFormat: String? + val s3CredentialConfig: S3CredentialConfig? + val formatConfig: S3FormatConfig? + var fileNamePattern: String? = null + private set + + private val lock = Any() + private var s3Client: AmazonS3? + fun getS3Client(): AmazonS3 { + synchronized(lock) { + s3Client?.let { + return it + } + return resetS3Client() + } + } + + var isCheckIntegrity: Boolean = true + private set + + var uploadThreadsCount: Int = S3StorageOperations.DEFAULT_UPLOAD_THREADS + private set + + constructor( + endpoint: String?, + bucketName: String, + bucketPath: String?, + bucketRegion: String?, + pathFormat: String?, + credentialConfig: S3CredentialConfig?, + formatConfig: S3FormatConfig?, + s3Client: AmazonS3 + ) { + this.endpoint = endpoint + this.bucketName = bucketName + this.bucketPath = bucketPath + this.bucketRegion = bucketRegion + this.pathFormat = pathFormat + this.s3CredentialConfig = credentialConfig + this.formatConfig = formatConfig + this.s3Client = s3Client + } + + constructor( + endpoint: String?, + bucketName: String?, + bucketPath: String?, + bucketRegion: String?, + pathFormat: String?, + credentialConfig: S3CredentialConfig?, + formatConfig: S3FormatConfig?, + s3Client: AmazonS3?, + fileNamePattern: String?, + checkIntegrity: Boolean, + uploadThreadsCount: Int + ) { + this.endpoint = endpoint + this.bucketName = bucketName + this.bucketPath = bucketPath + this.bucketRegion = bucketRegion + this.pathFormat = pathFormat + this.s3CredentialConfig = credentialConfig + this.formatConfig = formatConfig + this.s3Client = s3Client + this.fileNamePattern = fileNamePattern + this.isCheckIntegrity = checkIntegrity + this.uploadThreadsCount = uploadThreadsCount + } + + fun resetS3Client(): AmazonS3 { + synchronized(lock) { + s3Client?.shutdown() + val s3Client = createS3Client() + this.s3Client = s3Client + return s3Client + } + } + + protected open fun createS3Client(): AmazonS3 { + LOGGER.info("Creating S3 client...") + + val credentialsProvider = s3CredentialConfig!!.s3CredentialsProvider + val credentialType = s3CredentialConfig!!.credentialType + + if (S3CredentialType.DEFAULT_PROFILE == credentialType) { + return AmazonS3ClientBuilder.standard() + .withRegion(bucketRegion) + .withCredentials(credentialsProvider) // the SDK defaults to RetryMode.LEGACY + // (https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html) + // this _can_ be configured via environment variable, but it seems more reliable to + // configure it + // programmatically + .withClientConfiguration(ClientConfiguration().withRetryMode(RetryMode.STANDARD)) + .build() + } + + if (null == endpoint || endpoint.isEmpty()) { + return AmazonS3ClientBuilder.standard() + .withCredentials(credentialsProvider) + .withRegion(bucketRegion) + .build() + } + + val clientConfiguration = ClientConfiguration().withProtocol(Protocol.HTTPS) + clientConfiguration.signerOverride = "AWSS3V4SignerType" + + return AmazonS3ClientBuilder.standard() + .withEndpointConfiguration( + AwsClientBuilder.EndpointConfiguration(endpoint, bucketRegion) + ) + .withPathStyleAccessEnabled(true) + .withClientConfiguration(clientConfiguration) + .withCredentials(credentialsProvider) + .build() + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as S3DestinationConfig + return endpoint == that.endpoint && + bucketName == that.bucketName && + bucketPath == that.bucketPath && + bucketRegion == that.bucketRegion && + s3CredentialConfig == that.s3CredentialConfig && + formatConfig == that.formatConfig + } + + override fun hashCode(): Int { + return Objects.hash( + endpoint, + bucketName, + bucketPath, + bucketRegion, + s3CredentialConfig, + formatConfig + ) + } + + class Builder( + private var bucketName: String?, + private var bucketPath: String, + private var bucketRegion: String? + ) { + private var endpoint: String? = "" + private var pathFormat = S3DestinationConstants.DEFAULT_PATH_FORMAT + + private lateinit var credentialConfig: S3CredentialConfig + private var formatConfig: S3FormatConfig? = null + private var s3Client: AmazonS3? = null + private var fileNamePattern: String? = null + + private var checkIntegrity = true + + private var uploadThreadsCount = S3StorageOperations.DEFAULT_UPLOAD_THREADS + + fun withBucketName(bucketName: String): Builder { + this.bucketName = bucketName + return this + } + + fun withFileNamePattern(fileNamePattern: String?): Builder { + this.fileNamePattern = fileNamePattern + return this + } + + fun withBucketPath(bucketPath: String): Builder { + this.bucketPath = bucketPath + return this + } + + fun withBucketRegion(bucketRegion: String?): Builder { + this.bucketRegion = bucketRegion + return this + } + + fun withPathFormat(pathFormat: String): Builder { + this.pathFormat = pathFormat + return this + } + + fun withEndpoint(endpoint: String?): Builder { + this.endpoint = endpoint + return this + } + + fun withFormatConfig(formatConfig: S3FormatConfig?): Builder { + this.formatConfig = formatConfig + return this + } + + fun withAccessKeyCredential(accessKeyId: String?, secretAccessKey: String?): Builder { + this.credentialConfig = S3AccessKeyCredentialConfig(accessKeyId, secretAccessKey) + return this + } + + fun withCredentialConfig(credentialConfig: S3CredentialConfig): Builder { + this.credentialConfig = credentialConfig + return this + } + + fun withS3Client(s3Client: AmazonS3): Builder { + this.s3Client = s3Client + return this + } + + fun withCheckIntegrity(checkIntegrity: Boolean): Builder { + this.checkIntegrity = checkIntegrity + return this + } + + fun withUploadThreadsCount(uploadThreadsCount: Int): Builder { + this.uploadThreadsCount = uploadThreadsCount + return this + } + + fun get(): S3DestinationConfig { + return S3DestinationConfig( + endpoint, + bucketName, + bucketPath, + bucketRegion, + pathFormat, + credentialConfig, + formatConfig, + s3Client, + fileNamePattern, + checkIntegrity, + uploadThreadsCount + ) + } + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(S3DestinationConfig::class.java) + private const val R2_INSTANCE_URL = "https://%s.r2.cloudflarestorage.com" + + @JvmStatic + fun create(bucketName: String?, bucketPath: String, bucketRegion: String?): Builder { + return Builder(bucketName, bucketPath, bucketRegion) + } + + @JvmStatic + fun create(config: S3DestinationConfig): Builder { + return Builder(config.bucketName, config.bucketPath!!, config.bucketRegion) + .withEndpoint(config.endpoint) + .withCredentialConfig(config.s3CredentialConfig!!) + .withFormatConfig(config.formatConfig) + } + + @JvmStatic + fun getS3DestinationConfig(@Nonnull config: JsonNode): S3DestinationConfig { + return getS3DestinationConfig(config, StorageProvider.AWS_S3) + } + + @JvmStatic + fun getS3DestinationConfig( + @Nonnull config: JsonNode, + @Nonnull storageProvider: StorageProvider + ): S3DestinationConfig { + var builder = + create( + getProperty(config, S3Constants.S_3_BUCKET_NAME), + "", + getProperty(config, S3Constants.S_3_BUCKET_REGION) + ) + + if (config!!.has(S3Constants.S_3_BUCKET_PATH)) { + builder = builder.withBucketPath(config[S3Constants.S_3_BUCKET_PATH].asText()) + } + + if (config.has(S3Constants.FILE_NAME_PATTERN)) { + builder = + builder.withFileNamePattern(config[S3Constants.FILE_NAME_PATTERN].asText()) + } + + if (config.has(S3Constants.S_3_PATH_FORMAT)) { + builder = builder.withPathFormat(config[S3Constants.S_3_PATH_FORMAT].asText()) + } + + when (storageProvider) { + StorageProvider.CF_R2 -> { + if (config.has(S3Constants.ACCOUNT_ID)) { + val endpoint = + String.format( + R2_INSTANCE_URL, + getProperty(config, S3Constants.ACCOUNT_ID) + ) + builder = builder.withEndpoint(endpoint) + } + builder = + builder + .withCheckIntegrity( + false + ) // https://developers.cloudflare.com/r2/platform/s3-compatibility/api/#implemented-object-level-operations + // 3 or less + .withUploadThreadsCount(S3StorageOperations.R2_UPLOAD_THREADS) + } + else -> { + if (config.has(S3Constants.S_3_ENDPOINT)) { + builder = builder.withEndpoint(config[S3Constants.S_3_ENDPOINT].asText()) + } + } + } + val credentialConfig = + if (config.has(S3Constants.ACCESS_KEY_ID)) { + S3AccessKeyCredentialConfig( + getProperty(config, S3Constants.ACCESS_KEY_ID), + getProperty(config, S3Constants.SECRET_ACCESS_KEY) + ) + } else { + S3AWSDefaultProfileCredentialConfig() + } + builder = builder.withCredentialConfig(credentialConfig) + + // In the "normal" S3 destination, this is never null. However, the Redshift and + // Snowflake copy + // destinations don't set a Format config. + if (config.has("format")) { + builder = builder.withFormatConfig(S3FormatConfigs.getS3FormatConfig(config)) + } + + return builder.get() + } + + private fun getProperty(config: JsonNode, @Nonnull key: String): String? { + val node: JsonNode? = config.get(key) + return node?.asText() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.kt new file mode 100644 index 0000000000000..b063990dd2863 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigFactory.kt @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import javax.annotation.Nonnull + +open class S3DestinationConfigFactory { + open fun getS3DestinationConfig( + config: JsonNode, + @Nonnull storageProvider: StorageProvider + ): S3DestinationConfig { + return S3DestinationConfig.Companion.getS3DestinationConfig(config, storageProvider) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.kt new file mode 100644 index 0000000000000..7bd86e09f9fb1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConstants.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer + +class S3DestinationConstants { + + companion object { + const val YYYY_MM_DD_FORMAT_STRING: String = "yyyy_MM_dd" + @JvmField val NAME_TRANSFORMER: S3NameTransformer = S3NameTransformer() + const val DEFAULT_PATH_FORMAT: String = + "\${NAMESPACE}/\${STREAM_NAME}/\${YEAR}_\${MONTH}_\${DAY}_\${EPOCH}_" + + // gzip compression for CSV and JSONL + const val COMPRESSION_ARG_NAME: String = "compression" + const val COMPRESSION_TYPE_ARG_NAME: String = "compression_type" + @JvmField val DEFAULT_COMPRESSION_TYPE: CompressionType = CompressionType.GZIP + + // Flattening for CSV and JSONL + const val FLATTENING_ARG_NAME: String = "flattening" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3Format.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3Format.kt new file mode 100644 index 0000000000000..c3ba6df42f707 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3Format.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +enum class S3Format(val fileExtension: String) { + AVRO("avro"), + CSV("csv"), + JSONL("jsonl"), + PARQUET("parquet"), +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.kt new file mode 100644 index 0000000000000..c2c3d2b513a21 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfig.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode + +interface S3FormatConfig { + val format: S3Format + + val fileExtension: String + + companion object { + fun withDefault(config: JsonNode, property: String?, defaultValue: String): String { + val value = config[property] + if (value == null || value.isNull) { + return defaultValue + } + return value.asText() + } + + fun withDefault(config: JsonNode, property: String?, defaultValue: Int): Int { + val value = config[property] + if (value == null || value.isNull) { + return defaultValue + } + return value.asInt() + } + + fun withDefault(config: JsonNode, property: String?, defaultValue: Boolean): Boolean { + val value = config[property] + if (value == null || value.isNull) { + return defaultValue + } + return value.asBoolean() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.kt new file mode 100644 index 0000000000000..7c918f97f2453 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigs.kt @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig +import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig +import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlFormatConfig +import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetFormatConfig +import io.airbyte.commons.json.Jsons +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object S3FormatConfigs { + internal val LOGGER: Logger = LoggerFactory.getLogger(S3FormatConfigs::class.java) + + @JvmStatic + fun getS3FormatConfig(config: JsonNode): S3FormatConfig { + val formatConfig = config["format"] + LOGGER.info("S3 format config: {}", formatConfig.toString()) + val formatType = + S3Format.valueOf(formatConfig["format_type"].asText().uppercase(Locale.getDefault())) + + return when (formatType) { + S3Format.AVRO -> { + S3AvroFormatConfig(formatConfig) + } + S3Format.CSV -> { + S3CsvFormatConfig(formatConfig) + } + S3Format.JSONL -> { + S3JsonlFormatConfig(formatConfig) + } + S3Format.PARQUET -> { + S3ParquetFormatConfig(formatConfig) + } + else -> { + throw RuntimeException("Unexpected output format: " + Jsons.serialize(config)) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt new file mode 100644 index 0000000000000..56bfde2291f2b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt @@ -0,0 +1,463 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.AmazonS3Exception +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import com.amazonaws.services.s3.model.ListObjectsRequest +import com.amazonaws.services.s3.model.ObjectListing +import com.amazonaws.services.s3.model.S3ObjectSummary +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.annotations.VisibleForTesting +import com.google.common.collect.ImmutableMap +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateManager +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil +import io.airbyte.commons.exceptions.ConfigErrorException +import io.github.oshai.kotlinlogging.KotlinLogging +import java.io.IOException +import java.io.OutputStream +import java.util.* +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import java.util.concurrent.atomic.AtomicInteger +import java.util.regex.Pattern +import org.apache.commons.io.FilenameUtils +import org.joda.time.DateTime + +private val logger = KotlinLogging.logger {} + +open class S3StorageOperations( + private val nameTransformer: NamingConventionTransformer, + var s3Client: AmazonS3, + private val s3Config: S3DestinationConfig +) : BlobStorageOperations() { + private val s3FilenameTemplateManager: S3FilenameTemplateManager = S3FilenameTemplateManager() + + private val partCounts: ConcurrentMap = ConcurrentHashMap() + + override fun getBucketObjectPath( + namespace: String?, + streamName: String, + writeDatetime: DateTime, + customFormat: String + ): String { + val namespaceStr: String = + nameTransformer.getNamespace(if (!namespace.isNullOrBlank()) namespace else "") + val streamNameStr: String = nameTransformer.getIdentifier(streamName) + return nameTransformer.applyDefaultCase( + customFormat + .replace(Pattern.quote(FORMAT_VARIABLE_NAMESPACE).toRegex(), namespaceStr) + .replace(Pattern.quote(FORMAT_VARIABLE_STREAM_NAME).toRegex(), streamNameStr) + .replace( + Pattern.quote(FORMAT_VARIABLE_YEAR).toRegex(), + String.format("%s", writeDatetime.year().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_MONTH).toRegex(), + String.format("%02d", writeDatetime.monthOfYear().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_DAY).toRegex(), + String.format("%02d", writeDatetime.dayOfMonth().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_HOUR).toRegex(), + String.format("%02d", writeDatetime.hourOfDay().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_MINUTE).toRegex(), + String.format("%02d", writeDatetime.minuteOfHour().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_SECOND).toRegex(), + String.format("%02d", writeDatetime.secondOfMinute().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_MILLISECOND).toRegex(), + String.format("%04d", writeDatetime.millisOfSecond().get()), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_EPOCH).toRegex(), + String.format("%d", writeDatetime.millis), + ) + .replace( + Pattern.quote(FORMAT_VARIABLE_UUID).toRegex(), + String.format("%s", UUID.randomUUID()), + ) + .replace("/+".toRegex(), "/"), + ) + } + + /** Create a directory object at the specified location. Creates the bucket if necessary. */ + override fun createBucketIfNotExists() { + val bucket: String? = s3Config.bucketName + if (!doesBucketExist(bucket)) { + logger.info { "Bucket $bucket does not exist; creating..." } + s3Client.createBucket(bucket) + logger.info { "Bucket $bucket has been created." } + } + } + + protected open fun doesBucketExist(bucket: String?): Boolean { + return s3Client.doesBucketExistV2(bucket) + } + + override fun uploadRecordsToBucket( + recordsData: SerializableBuffer, + namespace: String?, + objectPath: String + ): String { + val exceptionsThrown: MutableList = ArrayList() + while (exceptionsThrown.size < UPLOAD_RETRY_LIMIT) { + if (exceptionsThrown.isNotEmpty()) { + logger.info { + "Retrying to upload records into storage $objectPath (${exceptionsThrown.size}/$UPLOAD_RETRY_LIMIT)" + } + // Force a reconnection before retrying in case error was due to network issues... + s3Client = s3Config.resetS3Client() + } + + try { + val fileName: String = loadDataIntoBucket(objectPath, recordsData) + logger.info { + "Successfully loaded records to stage $objectPath with ${exceptionsThrown.size} re-attempt(s)" + } + return fileName + } catch (e: Exception) { + logger.error(e) { "Failed to upload records into storage $objectPath" } + exceptionsThrown.add(e) + } + } + // Verifying that ALL exceptions are authentication related before assuming this is a + // configuration + // issue reduces risk of misidentifying errors or reporting a transient error. + val areAllExceptionsAuthExceptions: Boolean = + exceptionsThrown + .stream() + .filter { e: Exception -> e is AmazonS3Exception } + .map { s3e: Exception -> (s3e as AmazonS3Exception).statusCode } + .filter { o: Int? -> + ConnectorExceptionUtil.HTTP_AUTHENTICATION_ERROR_CODES.contains( + o, + ) + } + .count() == exceptionsThrown.size.toLong() + if (areAllExceptionsAuthExceptions) { + throw ConfigErrorException(exceptionsThrown[0].message!!, exceptionsThrown[0]) + } else { + throw RuntimeException( + "Exceptions thrown while uploading records into storage: ${exceptionsThrown.joinToString(separator = "\n")}", + ) + } + } + + /** + * Upload the file from `recordsData` to S3 and simplify the filename as .. + * + * @return the uploaded filename, which is different from the serialized buffer filename + * + */ + @Throws(IOException::class) + private fun loadDataIntoBucket(objectPath: String, recordsData: SerializableBuffer): String { + val partSize: Long = DEFAULT_PART_SIZE.toLong() + val bucket: String? = s3Config.bucketName + val partId: String = getPartId(objectPath) + val fileExtension: String = getExtension(recordsData.filename) + val fullObjectKey: String = + if (!s3Config.fileNamePattern.isNullOrBlank()) { + s3FilenameTemplateManager.applyPatternToFilename( + S3FilenameTemplateParameterObject.builder() + .partId(partId) + .recordsData(recordsData) + .objectPath(objectPath) + .fileExtension(fileExtension) + .fileNamePattern(s3Config.fileNamePattern) + .build(), + ) + } else { + objectPath + partId + fileExtension + } + val metadata: MutableMap = HashMap() + for (blobDecorator: BlobDecorator in blobDecorators) { + blobDecorator.updateMetadata(metadata, getMetadataMapping()) + } + val uploadManager: StreamTransferManager = + StreamTransferManagerFactory.create( + bucket, + fullObjectKey, + s3Client, + ) + .setPartSize(partSize) + .setUserMetadata(metadata) + .get() + .checkIntegrity(s3Config.isCheckIntegrity) + .numUploadThreads(s3Config.uploadThreadsCount) + .queueCapacity(DEFAULT_QUEUE_CAPACITY) + var succeeded: Boolean = false + + // Wrap output stream in decorators + var rawOutputStream: OutputStream = uploadManager.multiPartOutputStreams.first() + for (blobDecorator: BlobDecorator in blobDecorators) { + rawOutputStream = blobDecorator.wrap(rawOutputStream) + } + + try { + rawOutputStream.use { outputStream -> + recordsData.inputStream!!.use { dataStream -> + dataStream.transferTo(outputStream) + succeeded = true + } + } + } catch (e: Exception) { + logger.error(e) { "Failed to load data into storage $objectPath" } + throw RuntimeException(e) + } finally { + if (!succeeded) { + uploadManager.abort() + } else { + uploadManager.complete() + } + } + if (!s3Client.doesObjectExist(bucket, fullObjectKey)) { + logger.error { "Failed to upload data into storage, object $fullObjectKey not found" } + throw RuntimeException("Upload failed") + } + val newFilename: String = getFilename(fullObjectKey) + logger.info { + "Uploaded buffer file to storage: ${recordsData.filename} -> $fullObjectKey (filename: $newFilename)" + } + return newFilename + } + + /** + * Users want deterministic file names (e.g. the first file part is really foo-0.csv). Using + * UUIDs (previous approach) doesn't allow that. However, using pure integers could lead to a + * collision with an upload from another thread. We also want to be able to continue the same + * offset between attempts. So, we'll count up the existing files in the directory and use that + * as a lazy-offset, assuming airbyte manages the dir and has similar naming conventions. + * `getPartId` will be 0-indexed. + */ + @VisibleForTesting + @Synchronized + fun getPartId(objectPath: String): String { + val partCount: AtomicInteger = + partCounts.computeIfAbsent( + objectPath, + ) { + AtomicInteger(0) + } + + if (partCount.get() == 0) { + var objects: ObjectListing? + var objectCount = 0 + + val bucket: String? = s3Config.bucketName + objects = s3Client.listObjects(bucket, objectPath) + + if (objects != null) { + objectCount += objects.objectSummaries.size + while (objects != null && objects.nextMarker != null) { + objects = + s3Client.listObjects( + ListObjectsRequest() + .withBucketName(bucket) + .withPrefix(objectPath) + .withMarker(objects.nextMarker), + ) + if (objects != null) { + objectCount += objects.objectSummaries.size + } + } + } + + partCount.set(objectCount) + } + + return partCount.getAndIncrement().toString() + } + + override fun dropBucketObject(objectPath: String) { + cleanUpBucketObject(objectPath, listOf()) + } + + override fun cleanUpBucketObject( + namespace: String?, + streamName: String, + objectPath: String, + pathFormat: String + ) { + val bucket: String? = s3Config.bucketName + var objects: ObjectListing = + s3Client.listObjects( + ListObjectsRequest() + .withBucketName(bucket) + .withPrefix( + objectPath, + ) // pathFormat may use subdirectories under the objectPath to organize files + // so we need to recursively list them and filter files matching the pathFormat + .withDelimiter(""), + ) + val regexFormat: Pattern = + Pattern.compile(getRegexFormat(namespace, streamName, pathFormat)) + while (objects.objectSummaries.size > 0) { + val keysToDelete: List = + objects.objectSummaries + .stream() + .filter { obj: S3ObjectSummary -> + regexFormat + .matcher( + obj.key, + ) + .matches() + } + .map { obj: S3ObjectSummary -> + DeleteObjectsRequest.KeyVersion( + obj.key, + ) + } + .toList() + cleanUpObjects(bucket, keysToDelete) + logger.info { + "Storage bucket $objectPath has been cleaned-up (${keysToDelete.size} objects matching $regexFormat were deleted)..." + } + if (objects.isTruncated) { + objects = s3Client.listNextBatchOfObjects(objects) + } else { + break + } + } + } + + fun getRegexFormat(namespace: String?, streamName: String, pathFormat: String): String { + val namespaceStr: String = nameTransformer.getNamespace(namespace ?: "") + val streamNameStr: String = nameTransformer.getIdentifier(streamName) + return nameTransformer.applyDefaultCase( + (pathFormat + .replace(Pattern.quote(FORMAT_VARIABLE_NAMESPACE).toRegex(), namespaceStr) + .replace(Pattern.quote(FORMAT_VARIABLE_STREAM_NAME).toRegex(), streamNameStr) + .replace(Pattern.quote(FORMAT_VARIABLE_YEAR).toRegex(), "[0-9]{4}") + .replace(Pattern.quote(FORMAT_VARIABLE_MONTH).toRegex(), "[0-9]{2}") + .replace(Pattern.quote(FORMAT_VARIABLE_DAY).toRegex(), "[0-9]{2}") + .replace(Pattern.quote(FORMAT_VARIABLE_HOUR).toRegex(), "[0-9]{2}") + .replace(Pattern.quote(FORMAT_VARIABLE_MINUTE).toRegex(), "[0-9]{2}") + .replace(Pattern.quote(FORMAT_VARIABLE_SECOND).toRegex(), "[0-9]{2}") + .replace(Pattern.quote(FORMAT_VARIABLE_MILLISECOND).toRegex(), "[0-9]{4}") + .replace(Pattern.quote(FORMAT_VARIABLE_EPOCH).toRegex(), "[0-9]+") + .replace(Pattern.quote(FORMAT_VARIABLE_UUID).toRegex(), ".*") + .replace("/+".toRegex(), "/") // match part_id and extension at the end + + ".*"), + ) + } + + override fun cleanUpBucketObject(objectPath: String, stagedFiles: List) { + val bucket: String? = s3Config.bucketName + var objects: ObjectListing = s3Client.listObjects(bucket, objectPath) + while (objects.objectSummaries.size > 0) { + val keysToDelete: List = + objects.objectSummaries + .stream() + .filter { obj: S3ObjectSummary -> + stagedFiles.isEmpty() || + stagedFiles.contains( + obj.key, + ) + } + .map { obj: S3ObjectSummary -> + DeleteObjectsRequest.KeyVersion( + obj.key, + ) + } + .toList() + cleanUpObjects(bucket, keysToDelete) + logger.info { + "Storage bucket $objectPath has been cleaned-up (${keysToDelete.size} objects were deleted)..." + } + if (objects.isTruncated) { + objects = s3Client.listNextBatchOfObjects(objects) + } else { + break + } + } + } + + protected open fun cleanUpObjects( + bucket: String?, + keysToDelete: List + ) { + if (keysToDelete.isNotEmpty()) { + logger.info { + "Deleting objects ${keysToDelete.stream().map { obj: DeleteObjectsRequest.KeyVersion -> obj.key } + .toList().joinToString(separator = ", ")}" + } + s3Client.deleteObjects(DeleteObjectsRequest(bucket).withKeys(keysToDelete)) + } + } + + override fun isValidData(jsonNode: JsonNode): Boolean { + return true + } + + override fun getMetadataMapping(): Map { + return ImmutableMap.of( + AesCbcEnvelopeEncryptionBlobDecorator.ENCRYPTED_CONTENT_ENCRYPTING_KEY, + "x-amz-key", + AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR, + "x-amz-iv", + ) + } + + fun uploadManifest(bucketName: String, manifestFilePath: String, manifestContents: String) { + s3Client.putObject(s3Config.bucketName, manifestFilePath, manifestContents) + } + + companion object { + const val DEFAULT_UPLOAD_THREADS: Int = 10 // The S3 cli uses 10 threads by default. + const val R2_UPLOAD_THREADS: Int = 3 + + private const val DEFAULT_QUEUE_CAPACITY: Int = DEFAULT_UPLOAD_THREADS + private const val DEFAULT_PART_SIZE: Int = 10 + private const val UPLOAD_RETRY_LIMIT: Int = 3 + private const val FORMAT_VARIABLE_NAMESPACE: String = "\${NAMESPACE}" + private const val FORMAT_VARIABLE_STREAM_NAME: String = "\${STREAM_NAME}" + private const val FORMAT_VARIABLE_YEAR: String = "\${YEAR}" + private const val FORMAT_VARIABLE_MONTH: String = "\${MONTH}" + private const val FORMAT_VARIABLE_DAY: String = "\${DAY}" + private const val FORMAT_VARIABLE_HOUR: String = "\${HOUR}" + private const val FORMAT_VARIABLE_MINUTE: String = "\${MINUTE}" + private const val FORMAT_VARIABLE_SECOND: String = "\${SECOND}" + private const val FORMAT_VARIABLE_MILLISECOND: String = "\${MILLISECOND}" + private const val FORMAT_VARIABLE_EPOCH: String = "\${EPOCH}" + private const val FORMAT_VARIABLE_UUID: String = "\${UUID}" + private const val GZ_FILE_EXTENSION: String = "gz" + @VisibleForTesting + @JvmStatic + fun getFilename(fullPath: String): String { + return fullPath.substring(fullPath.lastIndexOf("/") + 1) + } + + @VisibleForTesting + @JvmStatic + fun getExtension(filename: String): String { + val result: String = FilenameUtils.getExtension(filename) + if (result.isBlank()) { + return result + } else if ((GZ_FILE_EXTENSION == result)) { + return getExtension( + filename.substring( + 0, + filename.length - 3, + ), + ) + "." + GZ_FILE_EXTENSION + } + return ".$result" + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.kt new file mode 100644 index 0000000000000..6b97596ced3ae --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/SerializedBufferFactory.kt @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.cdk.integrations.destination.s3.avro.AvroSerializedBuffer +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig +import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer +import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig +import io.airbyte.cdk.integrations.destination.s3.jsonl.JsonLSerializedBuffer +import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlFormatConfig +import io.airbyte.cdk.integrations.destination.s3.parquet.ParquetSerializedBuffer +import io.airbyte.commons.json.Jsons +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.Callable +import java.util.function.Function + +private val logger = KotlinLogging.logger {} + +class SerializedBufferFactory { + + companion object { + /** + * When running a + * [io.airbyte.cdk.integrations.destination.record_buffer.SerializedBufferingStrategy], it + * would usually need to instantiate new buffers when flushing data or when it receives data + * for a brand-new stream. This factory fills this need and @return the function to be + * called on such events. + * + * The factory is responsible for choosing the correct constructor function for a new + * [SerializableBuffer] that handles the correct serialized format of the data. It is + * configured by composition with another function to create a new [BufferStorage] where to + * store it. + * + * This factory determines which [S3FormatConfig] to use depending on the user provided + * @param config, The @param createStorageFunctionWithoutExtension is the constructor + * function to call when creating a new buffer where to store data. Note that we typically + * associate which format is being stored in the storage object thanks to its file + * extension. + */ + @JvmStatic + fun getCreateFunction( + config: S3DestinationConfig, + createStorageFunctionWithoutExtension: Function + ): BufferCreateFunction { + val formatConfig = config.formatConfig!! + logger.info { "S3 format config: $formatConfig" } + when (formatConfig.format) { + S3Format.AVRO -> { + val createStorageFunctionWithExtension = Callable { + createStorageFunctionWithoutExtension.apply( + formatConfig.fileExtension, + ) + } + return AvroSerializedBuffer.createFunction( + formatConfig as S3AvroFormatConfig, + createStorageFunctionWithExtension, + ) + } + S3Format.CSV -> { + val createStorageFunctionWithExtension = Callable { + createStorageFunctionWithoutExtension.apply( + formatConfig.fileExtension, + ) + } + return CsvSerializedBuffer.createFunction( + formatConfig as S3CsvFormatConfig, + createStorageFunctionWithExtension, + ) + } + S3Format.JSONL -> { + val createStorageFunctionWithExtension = Callable { + createStorageFunctionWithoutExtension.apply( + formatConfig.fileExtension, + ) + } + return JsonLSerializedBuffer.createBufferFunction( + formatConfig as S3JsonlFormatConfig, + createStorageFunctionWithExtension, + ) + } + S3Format.PARQUET -> { + // we can't choose the type of buffer storage with parquet because of how the + // underlying hadoop + // library is imposing file usage. + return ParquetSerializedBuffer.createFunction(config) + } + else -> { + throw RuntimeException("Unexpected output format: ${Jsons.serialize(config)}") + } + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/StorageProvider.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/StorageProvider.kt new file mode 100644 index 0000000000000..43ec503424666 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/StorageProvider.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +/** Represents storage provider type */ +enum class StorageProvider { + AWS_S3, + CF_R2 +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/WriteConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/WriteConfig.kt new file mode 100644 index 0000000000000..c0170e24a8de3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/WriteConfig.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.protocol.models.v0.DestinationSyncMode + +class WriteConfig +@JvmOverloads +constructor( + val namespace: String?, + val streamName: String, + val outputBucketPath: String, + val pathFormat: String, + val fullOutputPath: String, + val syncMode: DestinationSyncMode, + val storedFiles: MutableList = arrayListOf(), +) { + + fun addStoredFile(file: String) { + storedFiles.add(file) + } + + fun clearStoredFiles() { + storedFiles.clear() + } + + override fun toString(): String { + return "WriteConfig{" + + "streamName=$streamName" + + ", namespace=$namespace" + + ", outputBucketPath=$outputBucketPath" + + ", pathFormat=$pathFormat" + + ", fullOutputPath=$fullOutputPath" + + ", syncMode=$syncMode" + + '}' + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.kt new file mode 100644 index 0000000000000..6a7fd125a0000 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroConstants.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class AvroConstants { + + companion object { + // Field name with special character + const val DOC_KEY_VALUE_DELIMITER: String = ":" + const val DOC_KEY_ORIGINAL_NAME: String = "_airbyte_original_name" + const val AVRO_EXTRA_PROPS_FIELD: String = "_airbyte_additional_properties" + + // This set must include _ab_additional_col in source_s3/source_files_abstract/stream.py + val JSON_EXTRA_PROPS_FIELDS: Set = + setOf("_ab_additional_properties", AVRO_EXTRA_PROPS_FIELD) + + @JvmField val NAME_TRANSFORMER: AvroNameTransformer = AvroNameTransformer() + + @JvmField + val JSON_CONVERTER: JsonAvroConverter = + JsonAvroConverter.builder() + .setNameTransformer { name: String -> + NAME_TRANSFORMER.getIdentifier( + name, + ) + } + .setJsonAdditionalPropsFieldNames(JSON_EXTRA_PROPS_FIELDS) + .setAvroAdditionalPropsFieldName(AVRO_EXTRA_PROPS_FIELD) + .build() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.kt new file mode 100644 index 0000000000000..028b1af85058f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformer.kt @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import java.util.Arrays +import java.util.Locale + +/** + * + * * An Avro name starts with [A-Za-z_], followed by [A-Za-z0-9_]. + * * An Avro namespace is a dot-separated sequence of such names. + * * Reference: https://avro.apache.org/docs/current/spec.html#names + */ +class AvroNameTransformer : StandardNameTransformer() { + override fun applyDefaultCase(input: String): String { + return super.convertStreamName(input).lowercase(Locale.getDefault()) + } + + override fun convertStreamName(input: String): String { + val normalizedName = super.convertStreamName(input) + return if (normalizedName.substring(0, 1).matches("[A-Za-z_]".toRegex())) { + normalizedName + } else { + "_$normalizedName" + } + } + + override fun getNamespace(input: String): String { + val tokens = input.split("\\.".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray() + return Arrays.stream(tokens) + .map { name: String -> + this.getIdentifier( + name, + ) + } + .toList() + .joinToString(separator = ".") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.kt new file mode 100644 index 0000000000000..4b8eed3174727 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroRecordFactory.kt @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.core.JsonProcessingException +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.ObjectWriter +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.util.UUID +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class AvroRecordFactory(private val schema: Schema?, private val converter: JsonAvroConverter?) { + + companion object { + private val MAPPER: ObjectMapper = MoreMappers.initMapper() + private val WRITER: ObjectWriter = MAPPER.writer() + } + + @Throws(JsonProcessingException::class) + fun getAvroRecord(id: UUID, recordMessage: AirbyteRecordMessage): GenericData.Record { + val jsonRecord = MAPPER.createObjectNode() + jsonRecord.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()) + jsonRecord.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.emittedAt) + jsonRecord.setAll(recordMessage.data as ObjectNode) + + return converter!!.convertToGenericDataRecord(WRITER.writeValueAsBytes(jsonRecord), schema) + } + + @Throws(JsonProcessingException::class) + fun getAvroRecord(formattedData: JsonNode?): GenericData.Record { + val bytes = WRITER.writeValueAsBytes(formattedData) + return converter!!.convertToGenericDataRecord(bytes, schema) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.kt new file mode 100644 index 0000000000000..205c8fc64cc72 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBuffer.kt @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.io.OutputStream +import java.util.UUID +import java.util.concurrent.Callable +import org.apache.avro.Schema +import org.apache.avro.file.CodecFactory +import org.apache.avro.file.DataFileWriter +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumWriter +import org.apache.commons.lang3.StringUtils + +class AvroSerializedBuffer( + bufferStorage: BufferStorage, + codecFactory: CodecFactory, + schema: Schema +) : BaseSerializedBuffer(bufferStorage) { + private val codecFactory: CodecFactory + private val schema: Schema + private val avroRecordFactory: AvroRecordFactory + private var dataFileWriter: DataFileWriter? + + init { + // disable compression stream as it is already handled by codecFactory + withCompression(false) + this.codecFactory = codecFactory + this.schema = schema + avroRecordFactory = AvroRecordFactory(schema, AvroConstants.JSON_CONVERTER) + dataFileWriter = null + } + + @Throws(IOException::class) + override fun initWriter(outputStream: OutputStream) { + dataFileWriter = + DataFileWriter(GenericDatumWriter()) + .setCodec(codecFactory) + .create(schema, outputStream) + } + + @Deprecated("Deprecated in Java") + @Throws(IOException::class) + override fun writeRecord(record: AirbyteRecordMessage) { + dataFileWriter!!.append(avroRecordFactory.getAvroRecord(UUID.randomUUID(), record)) + } + + @Throws(IOException::class) + @Suppress("DEPRECATION") + override fun writeRecord(recordString: String, airbyteMetaString: String, emittedAt: Long) { + // TODO Remove this double deserialization when S3 Destinations moves to Async. + writeRecord( + Jsons.deserialize( + recordString, + AirbyteRecordMessage::class.java, + ) + .withEmittedAt(emittedAt), + ) + } + + @Throws(IOException::class) + override fun flushWriter() { + dataFileWriter!!.flush() + } + + @Throws(IOException::class) + override fun closeWriter() { + dataFileWriter!!.close() + } + + companion object { + const val DEFAULT_SUFFIX: String = ".avro" + + fun createFunction( + config: S3AvroFormatConfig, + createStorageFunction: Callable + ): BufferCreateFunction { + val codecFactory = config.codecFactory + return BufferCreateFunction { + stream: AirbyteStreamNameNamespacePair, + catalog: ConfiguredAirbyteCatalog -> + val schemaConverter = JsonToAvroSchemaConverter() + val schema = + schemaConverter.getAvroSchema( + catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> + s.stream.name == stream.name && + StringUtils.equals( + s.stream.namespace, + stream.namespace, + ) + } + .findFirst() + .orElseThrow { + RuntimeException( + "No such stream ${stream.namespace}.${stream.name}" + ) + } + .stream + .jsonSchema, + stream.name, + stream.namespace, + ) + AvroSerializedBuffer(createStorageFunction.call(), codecFactory, schema) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.kt new file mode 100644 index 0000000000000..d10bc42a9d3dd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdater.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.ImmutableMap +import io.airbyte.commons.json.Jsons + +/** + * This helper class is for testing only. It tracks the original and standardized names, and revert + * them when necessary, so that the tests can correctly compare the generated json with the original + * input. + */ +class JsonFieldNameUpdater(standardizedNames: Map) { + // A map from original name to standardized name. + private val standardizedNames: Map = ImmutableMap.copyOf(standardizedNames) + + fun getJsonWithOriginalFieldNames(input: JsonNode): JsonNode { + if (standardizedNames.size == 0) { + return input + } + var jsonString = Jsons.serialize(input) + for ((key, value) in standardizedNames) { + jsonString = jsonString.replace(quote(value).toRegex(), quote(key)) + } + return Jsons.deserialize(jsonString) + } + + override fun toString(): String { + return standardizedNames.toString() + } + + companion object { + private fun quote(input: String): String { + return "\"" + input + "\"" + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.kt new file mode 100644 index 0000000000000..70ebb36ff07fc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaType.kt @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import java.util.* +import javax.annotation.Nonnull +import org.apache.avro.Schema + +/** Mapping of JsonSchema types to Avro types. */ +enum class JsonSchemaType { + STRING_V1("WellKnownTypes.json#/definitions/String", Schema.Type.STRING), + INTEGER_V1("WellKnownTypes.json#/definitions/Integer", Schema.Type.LONG), + NUMBER_V1("WellKnownTypes.json#/definitions/Number", Schema.Type.DOUBLE), + BOOLEAN_V1("WellKnownTypes.json#/definitions/Boolean", Schema.Type.BOOLEAN), + BINARY_DATA_V1("WellKnownTypes.json#/definitions/BinaryData", Schema.Type.BYTES), + DATE_V1("WellKnownTypes.json#/definitions/Date", Schema.Type.INT), + TIMESTAMP_WITH_TIMEZONE_V1( + "WellKnownTypes.json#/definitions/TimestampWithTimezone", + Schema.Type.LONG + ), + TIMESTAMP_WITHOUT_TIMEZONE_V1( + "WellKnownTypes.json#/definitions/TimestampWithoutTimezone", + Schema.Type.LONG + ), + TIME_WITH_TIMEZONE_V1("WellKnownTypes.json#/definitions/TimeWithTimezone", Schema.Type.STRING), + TIME_WITHOUT_TIMEZONE_V1( + "WellKnownTypes.json#/definitions/TimeWithoutTimezone", + Schema.Type.LONG + ), + OBJECT("object", Schema.Type.RECORD), + ARRAY("array", Schema.Type.ARRAY), + COMBINED("combined", Schema.Type.UNION), + @Deprecated("") STRING_V0("string", null, Schema.Type.STRING), + @Deprecated("") NUMBER_INT_V0("number", "integer", Schema.Type.LONG), + @Deprecated("") NUMBER_BIGINT_V0("string", "big_integer", Schema.Type.STRING), + @Deprecated("") NUMBER_FLOAT_V0("number", "float", Schema.Type.FLOAT), + @Deprecated("") NUMBER_V0("number", null, Schema.Type.DOUBLE), + @Deprecated("") INTEGER_V0("integer", null, Schema.Type.LONG), + @Deprecated("") BOOLEAN_V0("boolean", null, Schema.Type.BOOLEAN), + @Deprecated("") NULL("null", null, Schema.Type.NULL); + + @JvmField val jsonSchemaType: String + val avroType: Schema.Type + var jsonSchemaAirbyteType: String? = null + private set + + constructor(jsonSchemaType: String, jsonSchemaAirbyteType: String?, avroType: Schema.Type) { + this.jsonSchemaType = jsonSchemaType + this.jsonSchemaAirbyteType = jsonSchemaAirbyteType + this.avroType = avroType + } + + constructor(jsonSchemaType: String, avroType: Schema.Type) { + this.jsonSchemaType = jsonSchemaType + this.avroType = avroType + } + + override fun toString(): String { + return jsonSchemaType + } + + companion object { + @JvmStatic + @JvmOverloads + fun fromJsonSchemaType( + @Nonnull jsonSchemaType: String, + jsonSchemaAirbyteType: String? = null + ): JsonSchemaType { + var matchSchemaType: List? = null + // Match by Type + airbyteType + if (jsonSchemaAirbyteType != null) { + matchSchemaType = + Arrays.stream(entries.toTypedArray()) + .filter { type: JsonSchemaType -> jsonSchemaType == type.jsonSchemaType } + .filter { type: JsonSchemaType -> + jsonSchemaAirbyteType == type.jsonSchemaAirbyteType + } + .toList() + } + + // Match by Type are no results already + if (matchSchemaType == null || matchSchemaType.isEmpty()) { + matchSchemaType = + Arrays.stream(entries.toTypedArray()) + .filter { format: JsonSchemaType -> + jsonSchemaType == format.jsonSchemaType && + format.jsonSchemaAirbyteType == null + } + .toList() + } + + require(!matchSchemaType!!.isEmpty()) { + String.format( + "Unexpected jsonSchemaType - %s and jsonSchemaAirbyteType - %s", + jsonSchemaType, + jsonSchemaAirbyteType + ) + } + if (matchSchemaType.size > 1) { + throw RuntimeException( + String.format( + "Match with more than one json type! Matched types : %s, Inputs jsonSchemaType : %s, jsonSchemaAirbyteType : %s", + matchSchemaType, + jsonSchemaType, + jsonSchemaAirbyteType + ) + ) + } else { + return matchSchemaType[0] + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.kt new file mode 100644 index 0000000000000..1312e327c7c9d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroSchemaConverter.kt @@ -0,0 +1,714 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ArrayNode +import com.google.common.base.Preconditions +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.util.MoreIterators +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.LinkedList +import java.util.Objects +import java.util.Optional +import java.util.function.Predicate +import java.util.stream.Collectors +import java.util.stream.Stream +import org.apache.avro.LogicalTypes +import org.apache.avro.Schema +import org.apache.avro.SchemaBuilder +import tech.allegro.schema.json2avro.converter.AdditionalPropertyField + +private val logger = KotlinLogging.logger {} + +/** + * The main function of this class is to convert a JsonSchema to Avro schema. It can also + * standardize schema names, and keep track of a mapping from the original names to the standardized + * ones, which is needed for unit tests.

    For limitations of this converter, see the README + * of this connector: https://docs.airbyte.io/integrations/destinations/s3#avro + */ +class JsonToAvroSchemaConverter() { + private val standardizedNames: MutableMap = HashMap() + + fun getStandardizedNames(): Map { + return standardizedNames + } + + /** @return Avro schema based on the input `jsonSchema`. */ + fun getAvroSchema(jsonSchema: JsonNode, streamName: String, namespace: String?): Schema { + return getAvroSchema( + jsonSchema, + streamName, + namespace, + appendAirbyteFields = true, + appendExtraProps = true, + addStringToLogicalTypes = true, + isRootNode = true + ) + } + + /** + * @param appendAirbyteFields Add default airbyte fields (e.g. _airbyte_id) to the output Avro + * schema. + * @param appendExtraProps Add default additional property field to the output Avro schema. + * @param addStringToLogicalTypes Default logical type field to string. + * @param isRootNode Whether it is the root field in the input Json schema. + * @return Avro schema based on the input `jsonSchema`. + */ + fun getAvroSchema( + jsonSchema: JsonNode, + fieldName: String, + fieldNamespace: String?, + appendAirbyteFields: Boolean, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean, + isRootNode: Boolean + ): Schema { + val stdName: String = AvroConstants.NAME_TRANSFORMER.getIdentifier(fieldName) + val stdNamespace: String? = + if (fieldNamespace != null) AvroConstants.NAME_TRANSFORMER.getNamespace(fieldNamespace) + else null + val builder: SchemaBuilder.RecordBuilder = SchemaBuilder.record(stdName) + if (stdName != fieldName) { + standardizedNames[fieldName] = stdName + logger.warn { + "Schema name \"$fieldName\" contains illegal character(s) and is standardized to \"$stdName\"" + } + builder.doc( + "${AvroConstants.DOC_KEY_ORIGINAL_NAME}${AvroConstants.DOC_KEY_VALUE_DELIMITER}$fieldName" + ) + } + if (stdNamespace != null) { + builder.namespace(stdNamespace) + } + + val properties: JsonNode? = jsonSchema.get("properties") + // object field with no "properties" will be handled by the default additional properties + // field during object conversion; so it is fine if there is no "properties" + val subfieldNames: List = + if (properties == null) emptyList() + else ArrayList(MoreIterators.toList(properties.fieldNames())) + + val assembler: SchemaBuilder.FieldAssembler = builder.fields() + + if (appendAirbyteFields) { + assembler + .name(JavaBaseConstants.COLUMN_NAME_AB_ID) + .type( + UUID_SCHEMA, + ) + .noDefault() + assembler + .name(JavaBaseConstants.COLUMN_NAME_EMITTED_AT) + .type(TIMESTAMP_MILLIS_SCHEMA) + .noDefault() + } + + for (subfieldName: String in subfieldNames) { + // ignore additional properties fields, which will be consolidated + // into one field at the end + if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(subfieldName)) { + continue + } + + val stdFieldName: String = AvroConstants.NAME_TRANSFORMER.getIdentifier(subfieldName) + val subfieldDefinition: JsonNode = properties!!.get(subfieldName) + val fieldBuilder: SchemaBuilder.FieldBuilder = assembler.name(stdFieldName) + if (stdFieldName != subfieldName) { + standardizedNames[subfieldName] = stdFieldName + logger.warn { + "Field name \"$subfieldName\" contains illegal character(s) and is standardized to \"$stdFieldName\"" + } + fieldBuilder.doc( + "${AvroConstants.DOC_KEY_ORIGINAL_NAME}${AvroConstants.DOC_KEY_VALUE_DELIMITER}$subfieldName" + ) + } + val subfieldNamespace: String? = + if ( + isRootNode // Omit the namespace for root level fields, because it is directly + // assigned in the builder above. + // This may not be the correct choice. + ) null + else (if (stdNamespace == null) stdName else ("$stdNamespace.$stdName")) + fieldBuilder + .type( + parseJsonField( + subfieldName, + subfieldNamespace, + subfieldDefinition, + appendExtraProps, + addStringToLogicalTypes, + ), + ) + .withDefault(null) + } + + if (appendExtraProps) { + // support additional properties in one field + assembler + .name(AvroConstants.AVRO_EXTRA_PROPS_FIELD) + .type(AdditionalPropertyField.FIELD_SCHEMA) + .withDefault(null) + } + + return assembler.endRecord() + } + + /** + * Generate Avro schema for a single Json field type. For example: + * + *
     "number" -> ["double"] 
    * + */ + @Suppress("DEPRECATION") + private fun parseSingleType( + fieldName: String, + fieldNamespace: String?, + fieldType: JsonSchemaType, + fieldDefinition: JsonNode, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean + ): Schema { + Preconditions.checkState( + fieldType != JsonSchemaType.NULL, + "Null types should have been filtered out", + ) + + // the additional properties fields are filtered out and never passed into this method; + // but this method is able to handle them for completeness + if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(fieldName)) { + return AdditionalPropertyField.FIELD_SCHEMA + } + + val fieldSchema: Schema + when (fieldType) { + JsonSchemaType.INTEGER_V1, + JsonSchemaType.NUMBER_V1, + JsonSchemaType.BOOLEAN_V1, + JsonSchemaType.STRING_V1, + JsonSchemaType.TIME_WITH_TIMEZONE_V1, + JsonSchemaType.BINARY_DATA_V1 -> fieldSchema = Schema.create(fieldType.avroType) + JsonSchemaType.DATE_V1 -> + fieldSchema = + LogicalTypes.date() + .addToSchema( + Schema.create( + Schema.Type.INT, + ), + ) + JsonSchemaType.TIMESTAMP_WITH_TIMEZONE_V1, + JsonSchemaType.TIMESTAMP_WITHOUT_TIMEZONE_V1 -> + fieldSchema = + LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG)) + JsonSchemaType.TIME_WITHOUT_TIMEZONE_V1 -> + fieldSchema = + LogicalTypes.timeMicros() + .addToSchema( + Schema.create(Schema.Type.LONG), + ) + JsonSchemaType.INTEGER_V0, + JsonSchemaType.NUMBER_V0, + JsonSchemaType.NUMBER_INT_V0, + JsonSchemaType.NUMBER_BIGINT_V0, + JsonSchemaType.NUMBER_FLOAT_V0, + JsonSchemaType.BOOLEAN_V0 -> fieldSchema = Schema.create(fieldType.avroType) + JsonSchemaType.STRING_V0 -> { + if (fieldDefinition.has("format")) { + val format: String = fieldDefinition.get("format").asText() + fieldSchema = + when (format) { + "date-time" -> + LogicalTypes.timestampMicros() + .addToSchema( + Schema.create(Schema.Type.LONG), + ) + "date" -> + LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT)) + "time" -> + LogicalTypes.timeMicros() + .addToSchema(Schema.create(Schema.Type.LONG)) + else -> Schema.create(fieldType.avroType) + } + } else { + fieldSchema = Schema.create(fieldType.avroType) + } + } + JsonSchemaType.COMBINED -> { + val combinedRestriction: Optional = + getCombinedRestriction(fieldDefinition) + val unionTypes: List = + parseJsonTypeUnion( + fieldName, + fieldNamespace, + combinedRestriction.get() as ArrayNode, + appendExtraProps, + addStringToLogicalTypes, + ) + fieldSchema = createUnionAndCheckLongTypesDuplications(unionTypes) + } + JsonSchemaType.ARRAY -> { + val items: JsonNode? = fieldDefinition.get("items") + if (items == null) { + logger.warn { + "Array field \"$fieldName\" does not specify the items type. It will default to an array of strings" + } + fieldSchema = + Schema.createArray( + Schema.createUnion( + NULL_SCHEMA, + STRING_SCHEMA, + ), + ) + } else if (items.isObject) { + if ( + (items.has("type") && !items.get("type").isNull) || + items.has("\$ref") && !items.get("\$ref").isNull + ) { + // Objects inside Json array has no names. We name it with the ".items" + // suffix. + val elementFieldName: String = "$fieldName.items" + fieldSchema = + Schema.createArray( + parseJsonField( + elementFieldName, + fieldNamespace, + items, + appendExtraProps, + addStringToLogicalTypes, + ), + ) + } else { + logger.warn { + "Array field \"$fieldName\" does not specify the items type. it will default to an array of strings" + } + fieldSchema = + Schema.createArray( + Schema.createUnion( + NULL_SCHEMA, + STRING_SCHEMA, + ), + ) + } + } else if (items.isArray) { + val arrayElementTypes: MutableList = + parseJsonTypeUnion( + fieldName, + fieldNamespace, + items as ArrayNode, + appendExtraProps, + addStringToLogicalTypes, + ) + arrayElementTypes.add(0, NULL_SCHEMA) + fieldSchema = Schema.createArray(Schema.createUnion(arrayElementTypes)) + } else { + logger.warn { + "Array field \"$fieldName\" has invalid items specification: $items. It will default to an array of strings." + } + fieldSchema = + Schema.createArray( + Schema.createUnion( + NULL_SCHEMA, + STRING_SCHEMA, + ), + ) + } + } + JsonSchemaType.OBJECT -> + fieldSchema = + getAvroSchema( + fieldDefinition, + fieldName, + fieldNamespace, + false, + appendExtraProps, + addStringToLogicalTypes, + false, + ) + else -> { + logger.warn { + "Field \"$fieldName\" has invalid type definition: $fieldDefinition. It will default to string." + } + fieldSchema = Schema.createUnion(NULL_SCHEMA, STRING_SCHEMA) + } + } + return fieldSchema + } + + /** + * Take in a union of Json field definitions, and generate Avro field schema unions. For + * example: + * + *
     ["number", { ... }] -> ["double", { ... }] 
    * + */ + private fun parseJsonTypeUnion( + fieldName: String, + fieldNamespace: String?, + types: ArrayNode, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean + ): MutableList { + val schemas: List = + MoreIterators.toList(types.elements()) + .stream() + .flatMap { definition: JsonNode -> + getSchemas( + fieldName = fieldName, + fieldNamespace = fieldNamespace, + definition = definition, + appendExtraProps = appendExtraProps, + addStringToLogicalTypes = addStringToLogicalTypes + ) + } + .distinct() + .collect(Collectors.toList()) + + return mergeRecordSchemas(fieldName, fieldNamespace, schemas, appendExtraProps) + } + + private fun getSchemas( + fieldName: String, + fieldNamespace: String?, + definition: JsonNode, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean + ): Stream? { + return getNonNullTypes(fieldName, definition).stream().flatMap { type: JsonSchemaType -> + getSchema( + fieldName = fieldName, + fieldNamespace = fieldNamespace, + type = type, + definition = definition, + appendExtraProps = appendExtraProps, + addStringToLogicalTypes = addStringToLogicalTypes + ) + } + } + + private fun getSchema( + fieldName: String, + fieldNamespace: String?, + type: JsonSchemaType, + definition: JsonNode, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean + ): Stream? { + val namespace: String = + if (fieldNamespace == null) fieldName else "$fieldNamespace.$fieldName" + val singleFieldSchema: Schema = + parseSingleType( + fieldName, + namespace, + type, + definition, + appendExtraProps, + addStringToLogicalTypes, + ) + if (singleFieldSchema.isUnion) { + return singleFieldSchema.types.stream() + } else { + return Stream.of( + singleFieldSchema, + ) + } + } + + /** + * If there are multiple object fields, those fields are combined into one Avro record. This is + * because Avro does not allow specifying a tuple of types (i.e. the first element is type x, + * the second element is type y, and so on). For example, the following Json field types: + * + *
     [ { "type": "object", "properties": { "id": { "type": "integer" } } }, { "type":
    +     * "object", "properties": { "id": { "type": "string" } "message": { "type": "string" } } } ]
    +     * 
    * + * + * is converted to this Avro schema: + * + *
     { "type": "record", "fields": [ { "name": "id", "type": ["int", "string"] }, { "name":
    +     * "message", "type": "string" } ] } 
    * + */ + private fun mergeRecordSchemas( + fieldName: String, + fieldNamespace: String?, + schemas: List, + appendExtraProps: Boolean + ): MutableList { + val recordFieldSchemas: LinkedHashMap> = LinkedHashMap() + val recordFieldDocs: MutableMap> = HashMap() + + val mergedSchemas: MutableList = + schemas + .stream() // gather record schemas to construct a single record schema later on + .peek { schema: Schema -> + if (schema.type == Schema.Type.RECORD) { + for (field: Schema.Field in schema.fields) { + recordFieldSchemas.putIfAbsent( + field.name(), + LinkedList(), + ) + recordFieldSchemas[field.name()]!!.add(field.schema()) + if (field.doc() != null) { + recordFieldDocs.putIfAbsent( + field.name(), + LinkedList(), + ) + recordFieldDocs[field.name()]!!.add(field.doc()) + } + } + } + } // remove record schemas because they will be merged into one + .filter { schema: Schema -> schema.type != Schema.Type.RECORD } + .collect(Collectors.toList()) + + // create one record schema from all the record fields + if (recordFieldSchemas.isNotEmpty()) { + val builder: SchemaBuilder.RecordBuilder = SchemaBuilder.record(fieldName) + if (fieldNamespace != null) { + builder.namespace(fieldNamespace) + } + + val assembler: SchemaBuilder.FieldAssembler = builder.fields() + + for (entry: Map.Entry> in recordFieldSchemas.entries) { + val subfieldName: String = entry.key + // ignore additional properties fields, which will be consolidated + // into one field at the end + if (AvroConstants.JSON_EXTRA_PROPS_FIELDS.contains(subfieldName)) { + continue + } + + val subfieldBuilder: SchemaBuilder.FieldBuilder = + assembler.name(subfieldName) + val subfieldDocs: List = + recordFieldDocs.getOrDefault(subfieldName, emptyList()) + if (subfieldDocs.isNotEmpty()) { + subfieldBuilder.doc(subfieldDocs.joinToString(separator = "; ")) + } + val subfieldSchemas: List = + entry.value + .stream() + .flatMap { schema: Schema -> + schema.types + .stream() // filter out null and add it later on as the first + // element + .filter { s: Schema -> s != NULL_SCHEMA } + } + .distinct() + .collect(Collectors.toList()) + val subfieldNamespace: String = + if (fieldNamespace == null) fieldName else ("$fieldNamespace.$fieldName") + // recursively merge schemas of a subfield because they may include multiple record + // schemas as well + val mergedSubfieldSchemas: MutableList = + mergeRecordSchemas( + subfieldName, + subfieldNamespace, + subfieldSchemas, + appendExtraProps, + ) + mergedSubfieldSchemas.add(0, NULL_SCHEMA) + subfieldBuilder.type(Schema.createUnion(mergedSubfieldSchemas)).withDefault(null) + } + + if (appendExtraProps) { + // add back additional properties + assembler + .name(AvroConstants.AVRO_EXTRA_PROPS_FIELD) + .type(AdditionalPropertyField.FIELD_SCHEMA) + .withDefault(null) + } + mergedSchemas.add(assembler.endRecord()) + } + + return mergedSchemas + } + + /** + * Take in a Json field definition, and generate a nullable Avro field schema. For example: + * + *
     {"type": ["number", { ... }]} -> ["null", "double", { ... }] 
    * + */ + fun parseJsonField( + fieldName: String, + fieldNamespace: String?, + fieldDefinition: JsonNode, + appendExtraProps: Boolean, + addStringToLogicalTypes: Boolean + ): Schema { + // Filter out null types, which will be added back in the end. + val nonNullFieldTypes: MutableList = + getNonNullTypes(fieldName, fieldDefinition) + .stream() + .flatMap { fieldType: JsonSchemaType -> + val singleFieldSchema: Schema = + parseSingleType( + fieldName, + fieldNamespace, + fieldType, + fieldDefinition, + appendExtraProps, + addStringToLogicalTypes, + ) + if (singleFieldSchema.isUnion) { + return@flatMap singleFieldSchema.types.stream() + } else { + return@flatMap Stream.of( + singleFieldSchema, + ) + } + } + .distinct() + .collect(Collectors.toList()) + + if (nonNullFieldTypes.isEmpty()) { + return Schema.create(Schema.Type.NULL) + } else { + // Mark every field as nullable to prevent missing value exceptions from Avro / Parquet. + if (!nonNullFieldTypes.contains(NULL_SCHEMA)) { + nonNullFieldTypes.add(0, NULL_SCHEMA) + } + // Logical types are converted to a union of logical type itself and string. The purpose + // is to + // default the logical type field to a string, if the value of the logical type field is + // invalid and + // cannot be properly processed. + if ( + ((nonNullFieldTypes.stream().anyMatch { schema: Schema -> + schema.logicalType != null + }) && (!nonNullFieldTypes.contains(STRING_SCHEMA)) && addStringToLogicalTypes) + ) { + nonNullFieldTypes.add(STRING_SCHEMA) + } + return Schema.createUnion(nonNullFieldTypes) + } + } + + /** + * Method checks unionTypes list for content. If we have both "long" and "long-timestamp" types + * then it keeps the "long" only. Need to do it for Schema creation otherwise it would fail with + * a duplicated types exception. + * + * @param unionTypes + * - list of union types + * @return new Schema + */ + private fun createUnionAndCheckLongTypesDuplications(unionTypes: List): Schema { + val isALong: Predicate = Predicate { type: Schema -> type.type == Schema.Type.LONG } + val isPlainLong: Predicate = + isALong.and { type: Schema -> + Objects.isNull( + type.logicalType, + ) + } + val isTimestampMicrosLong: Predicate = + isALong.and { type: Schema -> + Objects.nonNull( + type.logicalType, + ) && ("timestamp-micros" == type.logicalType.name) + } + + val hasPlainLong: Boolean = unionTypes.stream().anyMatch(isPlainLong) + val hasTimestampMicrosLong: Boolean = unionTypes.stream().anyMatch(isTimestampMicrosLong) + val removeTimestampType: Predicate = Predicate { type: Schema -> + !(hasPlainLong && + hasTimestampMicrosLong && + isTimestampMicrosLong.test( + type, + )) + } + return Schema.createUnion( + unionTypes + .stream() + .filter(removeTimestampType) + .collect( + Collectors.toList(), + ), + ) + } + + companion object { + private const val REFERENCE_TYPE: String = "\$ref" + private const val TYPE: String = "type" + private const val AIRBYTE_TYPE: String = "airbyte_type" + private val UUID_SCHEMA: Schema = + LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING)) + private val NULL_SCHEMA: Schema = Schema.create(Schema.Type.NULL) + private val STRING_SCHEMA: Schema = Schema.create(Schema.Type.STRING) + + private val TIMESTAMP_MILLIS_SCHEMA: Schema = + LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG)) + + @Suppress("DEPRECATION") + fun getNonNullTypes(fieldName: String?, fieldDefinition: JsonNode): List { + return getTypes(fieldName, fieldDefinition) + .stream() + .filter { type: JsonSchemaType -> type != JsonSchemaType.NULL } + .collect( + Collectors.toList(), + ) + } + + /** When no type or $ref are specified, it will default to string. */ + fun getTypes(fieldName: String?, fieldDefinition: JsonNode): List { + val combinedRestriction: Optional = getCombinedRestriction(fieldDefinition) + if (combinedRestriction.isPresent) { + return listOf(JsonSchemaType.COMBINED) + } + + val typeProperty: JsonNode? = fieldDefinition.get(TYPE) + val referenceType: JsonNode? = fieldDefinition.get(REFERENCE_TYPE) + val airbyteType: String? = fieldDefinition.get(AIRBYTE_TYPE)?.asText() + + if (typeProperty != null && typeProperty.isArray) { + return MoreIterators.toList(typeProperty.elements()) + .stream() + .map { s: JsonNode -> + JsonSchemaType.fromJsonSchemaType( + s.asText(), + ) + } + .collect(Collectors.toList()) + } + + if (hasTextValue(typeProperty)) { + return listOf( + JsonSchemaType.fromJsonSchemaType( + typeProperty!!.asText(), + airbyteType, + ), + ) + } + + if (hasTextValue(referenceType)) { + return listOf( + JsonSchemaType.fromJsonSchemaType( + referenceType!!.asText(), + airbyteType, + ), + ) + } + + logger.warn { + "Field \"$fieldName\" has unexpected type $referenceType. It will default to string." + } + return listOf(JsonSchemaType.STRING_V1) + } + + private fun hasTextValue(value: JsonNode?): Boolean { + return (value != null) && !value.isNull && value.isTextual + } + + fun getCombinedRestriction(fieldDefinition: JsonNode): Optional { + if (fieldDefinition.has("anyOf")) { + return Optional.of(fieldDefinition.get("anyOf")) + } + if (fieldDefinition.has("allOf")) { + return Optional.of(fieldDefinition.get("allOf")) + } + if (fieldDefinition.has("oneOf")) { + return Optional.of(fieldDefinition.get("oneOf")) + } + return Optional.empty() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.kt new file mode 100644 index 0000000000000..5a96b66a27d9e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfig.kt @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig +import org.apache.avro.file.CodecFactory + +class S3AvroFormatConfig : S3FormatConfig { + val codecFactory: CodecFactory + + override val fileExtension: String = DEFAULT_SUFFIX + + constructor(codecFactory: CodecFactory) { + this.codecFactory = codecFactory + } + + constructor(formatConfig: JsonNode) { + this.codecFactory = parseCodecConfig(formatConfig["compression_codec"]) + } + + override val format: S3Format + get() = S3Format.AVRO + + enum class CompressionCodec(private val configValue: String) { + NULL("no compression"), + DEFLATE("deflate"), + BZIP2("bzip2"), + XZ("xz"), + ZSTANDARD("zstandard"), + SNAPPY("snappy"); + + companion object { + fun fromConfigValue(configValue: String): CompressionCodec { + for (codec in entries) { + if (configValue.equals(codec.configValue, ignoreCase = true)) { + return codec + } + } + throw IllegalArgumentException("Unknown codec config value: $configValue") + } + } + } + + companion object { + @JvmStatic val DEFAULT_SUFFIX: String = ".avro" + + @JvmStatic + fun parseCodecConfig(compressionCodecConfig: JsonNode?): CodecFactory { + if (compressionCodecConfig == null || compressionCodecConfig.isNull) { + return CodecFactory.nullCodec() + } + + val codecConfig = compressionCodecConfig["codec"] + if (codecConfig == null || codecConfig.isNull || !codecConfig.isTextual) { + return CodecFactory.nullCodec() + } + val codecType = codecConfig.asText() + val codec = CompressionCodec.fromConfigValue(codecConfig.asText()) + when (codec) { + CompressionCodec.NULL -> { + return CodecFactory.nullCodec() + } + CompressionCodec.DEFLATE -> { + val compressionLevel = getCompressionLevel(compressionCodecConfig, 0, 0, 9) + return CodecFactory.deflateCodec(compressionLevel) + } + CompressionCodec.BZIP2 -> { + return CodecFactory.bzip2Codec() + } + CompressionCodec.XZ -> { + val compressionLevel = getCompressionLevel(compressionCodecConfig, 6, 0, 9) + return CodecFactory.xzCodec(compressionLevel) + } + CompressionCodec.ZSTANDARD -> { + val compressionLevel = getCompressionLevel(compressionCodecConfig, 3, -5, 22) + val includeChecksum = getIncludeChecksum(compressionCodecConfig, false) + return CodecFactory.zstandardCodec(compressionLevel, includeChecksum) + } + CompressionCodec.SNAPPY -> { + return CodecFactory.snappyCodec() + } + else -> { + throw IllegalArgumentException("Unsupported compression codec: $codecType") + } + } + } + + fun getCompressionLevel( + compressionCodecConfig: JsonNode, + defaultLevel: Int, + minLevel: Int, + maxLevel: Int + ): Int { + val levelConfig = compressionCodecConfig["compression_level"] + if (levelConfig == null || levelConfig.isNull || !levelConfig.isIntegralNumber) { + return defaultLevel + } + val level = levelConfig.asInt() + require(!(level < minLevel || level > maxLevel)) { + String.format( + "Invalid compression level: %d, expected an integer in range [%d, %d]", + level, + minLevel, + maxLevel + ) + } + return level + } + + fun getIncludeChecksum(compressionCodecConfig: JsonNode, defaultValue: Boolean): Boolean { + val checksumConfig = compressionCodecConfig["include_checksum"] + if (checksumConfig == null || checksumConfig.isNumber || !checksumConfig.isBoolean) { + return defaultValue + } + return checksumConfig.asBoolean() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.kt new file mode 100644 index 0000000000000..42960a0030c82 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroWriter.kt @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject.Companion.builder +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.sql.Timestamp +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.file.DataFileWriter +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumWriter +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class S3AvroWriter( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp?, + schema: Schema?, + converter: JsonAvroConverter? +) : BaseS3Writer(config, s3Client, configuredStream), DestinationFileWriter { + private val avroRecordFactory: AvroRecordFactory + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + private val dataFileWriter: DataFileWriter + override val outputPath: String + override val fileLocation: String + + init { + val outputFilename: String = + BaseS3Writer.Companion.determineOutputFilename( + builder() + .timestamp(uploadTimestamp) + .s3Format(S3Format.AVRO) + .fileExtension(S3Format.AVRO.fileExtension) + .fileNamePattern(config.fileNamePattern) + .build() + ) + + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + + LOGGER.info( + "Full S3 path for stream '{}': s3://{}/{}", + stream.name, + config.bucketName, + outputPath + ) + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + + this.avroRecordFactory = AvroRecordFactory(schema, converter) + this.uploadManager = create(config.bucketName, outputPath, s3Client).get() + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + + val formatConfig = config.formatConfig as S3AvroFormatConfig + // The DataFileWriter always uses binary encoding. + // If json encoding is needed in the future, use the GenericDatumWriter directly. + this.dataFileWriter = + DataFileWriter(GenericDatumWriter()) + .setCodec(formatConfig.codecFactory) + .create(schema, outputStream) + } + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + dataFileWriter.append(avroRecordFactory.getAvroRecord(id, recordMessage)) + } + + @Throws(IOException::class) + override fun closeWhenSucceed() { + dataFileWriter.close() + outputStream.close() + uploadManager.complete() + } + + @Throws(IOException::class) + override fun closeWhenFail() { + dataFileWriter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format? + get() = S3Format.AVRO + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + val record = avroRecordFactory.getAvroRecord(formattedData) + dataFileWriter.append(record) + } + + companion object { + protected val LOGGER: Logger = LoggerFactory.getLogger(S3AvroWriter::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.kt new file mode 100644 index 0000000000000..d0490e024a230 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/constant/S3Constants.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.constant + +class S3Constants { + companion object { + const val S_3_BUCKET_PATH: String = "s3_bucket_path" + const val FILE_NAME_PATTERN: String = "file_name_pattern" + const val S_3_PATH_FORMAT: String = "s3_path_format" + const val S_3_ENDPOINT: String = "s3_endpoint" + const val ACCESS_KEY_ID: String = "access_key_id" + const val S_3_ACCESS_KEY_ID: String = "s3_access_key_id" + const val S_3_SECRET_ACCESS_KEY: String = "s3_secret_access_key" + const val SECRET_ACCESS_KEY: String = "secret_access_key" + const val S_3_BUCKET_NAME: String = "s3_bucket_name" + const val S_3_BUCKET_REGION: String = "s3_bucket_region" + + // r2 requires account_id + const val ACCOUNT_ID: String = "account_id" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.kt new file mode 100644 index 0000000000000..6f9aa06c5fbcf --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/BlobStorageCredentialConfig.kt @@ -0,0 +1,8 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +interface BlobStorageCredentialConfig { + val credentialType: CredentialType +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.kt new file mode 100644 index 0000000000000..fdd006fdeb346 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AWSDefaultProfileCredentialConfig.kt @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +import com.amazonaws.auth.AWSCredentialsProvider +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain + +class S3AWSDefaultProfileCredentialConfig : S3CredentialConfig { + override val credentialType: S3CredentialType + get() = S3CredentialType.DEFAULT_PROFILE + + override val s3CredentialsProvider: AWSCredentialsProvider + get() = DefaultAWSCredentialsProviderChain() +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.kt new file mode 100644 index 0000000000000..f683027a637f1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3AccessKeyCredentialConfig.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +import com.amazonaws.auth.AWSCredentials +import com.amazonaws.auth.AWSCredentialsProvider +import com.amazonaws.auth.AWSStaticCredentialsProvider +import com.amazonaws.auth.BasicAWSCredentials + +class S3AccessKeyCredentialConfig(val accessKeyId: String?, val secretAccessKey: String?) : + S3CredentialConfig { + override val credentialType: S3CredentialType + get() = S3CredentialType.ACCESS_KEY + + override val s3CredentialsProvider: AWSCredentialsProvider + get() { + val awsCreds: AWSCredentials = BasicAWSCredentials(accessKeyId, secretAccessKey) + return AWSStaticCredentialsProvider(awsCreds) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.kt new file mode 100644 index 0000000000000..a72c38f553401 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialConfig.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +import com.amazonaws.auth.AWSCredentialsProvider + +interface S3CredentialConfig : BlobStorageCredentialConfig { + val s3CredentialsProvider: AWSCredentialsProvider +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.kt new file mode 100644 index 0000000000000..fcbe5eead5831 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3CredentialType.kt @@ -0,0 +1,9 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +enum class S3CredentialType { + ACCESS_KEY, + DEFAULT_PROFILE +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.kt new file mode 100644 index 0000000000000..f2897d22f257a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/credential/S3InstanceProfileCredentialConfig.kt @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.credential + +import com.amazonaws.auth.AWSCredentialsProvider +import com.amazonaws.auth.InstanceProfileCredentialsProvider + +class S3InstanceProfileCredentialConfig : S3CredentialConfig { + override val credentialType: S3CredentialType + get() = S3CredentialType.DEFAULT_PROFILE + + override val s3CredentialsProvider: AWSCredentialsProvider + get() = InstanceProfileCredentialsProvider(false) +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.kt new file mode 100644 index 0000000000000..d77cae03ec1f2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/BaseSheetGenerator.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.util.* + +/** + * CSV data row = ID column + timestamp column + record columns. This class takes care of the first + * two columns, which is shared by downstream implementations. + */ +abstract class BaseSheetGenerator : CsvSheetGenerator { + override fun getDataRow(id: UUID, recordMessage: AirbyteRecordMessage): List { + val data: MutableList = LinkedList() + data.add(id) + data.add(recordMessage.emittedAt) + data.addAll(getRecordColumns(recordMessage.data)!!) + return data + } + + override fun getDataRow(formattedData: JsonNode): List { + return LinkedList(getRecordColumns(formattedData)) + } + + override fun getDataRow( + id: UUID, + formattedString: String, + emittedAt: Long, + airbyteMetaString: String + ): List { + // TODO: Make this abstract or default if No-op is intended in NoFlatteningSheetGenerator or + // RootLevelFlatteningSheetGenerator + throw UnsupportedOperationException("Not implemented in BaseSheetGenerator") + } + + abstract fun getRecordColumns(json: JsonNode): List? +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.kt new file mode 100644 index 0000000000000..77c850337738f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBuffer.kt @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.github.oshai.kotlinlogging.KotlinLogging +import java.io.IOException +import java.io.OutputStream +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.util.UUID +import java.util.concurrent.Callable +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.apache.commons.csv.QuoteMode +import org.apache.commons.lang3.StringUtils + +private val logger = KotlinLogging.logger {} + +class CsvSerializedBuffer( + bufferStorage: BufferStorage, + private val csvSheetGenerator: CsvSheetGenerator, + compression: Boolean +) : BaseSerializedBuffer(bufferStorage) { + private var csvPrinter: CSVPrinter? = null + private var csvFormat: CSVFormat + + init { + csvFormat = CSVFormat.DEFAULT + // we always want to compress csv files + withCompression(compression) + } + + fun withCsvFormat(csvFormat: CSVFormat): CsvSerializedBuffer { + if (csvPrinter == null) { + this.csvFormat = csvFormat + return this + } + throw RuntimeException("Options should be configured before starting to write") + } + + @Throws(IOException::class) + override fun initWriter(outputStream: OutputStream) { + csvPrinter = CSVPrinter(PrintWriter(outputStream, true, StandardCharsets.UTF_8), csvFormat) + } + + /** + * TODO: (ryankfu) remove this call within + * [io.airbyte.cdk.integrations.destination.record_buffer.SerializedBufferingStrategy] and move + * to use recordString + * + * @param record AirbyteRecordMessage to be written + * @throws IOException + */ + @Deprecated("Deprecated in Java") + @Throws(IOException::class) + override fun writeRecord(record: AirbyteRecordMessage) { + csvPrinter!!.printRecord(csvSheetGenerator.getDataRow(UUID.randomUUID(), record)) + } + + @Throws(IOException::class) + override fun writeRecord(recordString: String, airbyteMetaString: String, emittedAt: Long) { + csvPrinter!!.printRecord( + csvSheetGenerator.getDataRow( + UUID.randomUUID(), + recordString, + emittedAt, + airbyteMetaString, + ), + ) + } + + @Throws(IOException::class) + override fun flushWriter() { + // in an async world, it is possible that flush writer gets called even if no records were + // accepted. + if (csvPrinter != null) { + csvPrinter!!.flush() + } else { + logger.warn { "Trying to flush but no printer is initialized." } + } + } + + @Throws(IOException::class) + override fun closeWriter() { + // in an async world, it is possible that flush writer gets called even if no records were + // accepted. + if (csvPrinter != null) { + csvPrinter!!.close() + } else { + logger.warn { "Trying to close but no printer is initialized." } + } + } + + companion object { + + const val CSV_GZ_SUFFIX: String = ".csv.gz" + + @JvmStatic + @Suppress("DEPRECATION") + fun createFunction( + config: S3CsvFormatConfig?, + createStorageFunction: Callable + ): BufferCreateFunction { + return BufferCreateFunction { + stream: AirbyteStreamNameNamespacePair, + catalog: ConfiguredAirbyteCatalog -> + if (config == null) { + return@BufferCreateFunction CsvSerializedBuffer( + createStorageFunction.call(), + StagingDatabaseCsvSheetGenerator(), + true, + ) + } + val csvSheetGenerator = + CsvSheetGenerator.Factory.create( + catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> + s.stream.name == stream.name && + StringUtils.equals( + s.stream.namespace, + stream.namespace, + ) + } + .findFirst() + .orElseThrow { + RuntimeException( + String.format( + "No such stream %s.%s", + stream.namespace, + stream.name, + ), + ) + } + .stream + .jsonSchema, + config, + ) + val csvSettings = + CSVFormat.DEFAULT.withQuoteMode(QuoteMode.NON_NUMERIC) + .withHeader(*csvSheetGenerator.getHeaderRow().toTypedArray()) + val compression = config.compressionType != CompressionType.NO_COMPRESSION + CsvSerializedBuffer( + createStorageFunction.call(), + csvSheetGenerator, + compression, + ) + .withCsvFormat(csvSettings) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.kt new file mode 100644 index 0000000000000..55ce59daa2c79 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerator.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.util.* + +/** + * This class takes case of the generation of the CSV data sheet, including the header row and the + * data row. + */ +interface CsvSheetGenerator { + fun getHeaderRow(): List + + // TODO: (ryankfu) remove this and switch over all destinations to pass in serialized + // recordStrings, + // both for performance and lowers memory footprint + fun getDataRow(id: UUID, recordMessage: AirbyteRecordMessage): List + + fun getDataRow(formattedData: JsonNode): List + + fun getDataRow( + id: UUID, + formattedString: String, + emittedAt: Long, + formattedAirbyteMetaString: String + ): List + + object Factory { + @JvmStatic + fun create(jsonSchema: JsonNode?, formatConfig: S3CsvFormatConfig): CsvSheetGenerator { + return if (formatConfig.flattening == Flattening.NO) { + NoFlatteningSheetGenerator() + } else if (formatConfig.flattening == Flattening.ROOT_LEVEL) { + RootLevelFlatteningSheetGenerator(jsonSchema!!) + } else { + throw IllegalArgumentException( + "Unexpected flattening config: " + formatConfig.flattening + ) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.kt new file mode 100644 index 0000000000000..9d4d90bd8bbf5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSheetGenerators.kt @@ -0,0 +1,6 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +class CsvSheetGenerators diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.kt new file mode 100644 index 0000000000000..c46be7c0327fc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGenerator.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.json.Jsons + +class NoFlatteningSheetGenerator : BaseSheetGenerator(), CsvSheetGenerator { + override fun getHeaderRow(): List { + return listOf( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + JavaBaseConstants.COLUMN_NAME_DATA, + ) + } + + /** When no flattening is needed, the record column is just one json blob. */ + override fun getRecordColumns(json: JsonNode): List { + return listOf(Jsons.serialize(json)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.kt new file mode 100644 index 0000000000000..70b9c7fd8374e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGenerator.kt @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.util.MoreIterators +import java.util.LinkedList +import java.util.stream.Collectors + +class RootLevelFlatteningSheetGenerator(jsonSchema: JsonNode) : + BaseSheetGenerator(), CsvSheetGenerator { + /** Keep a header list to iterate the input json object with a defined order. */ + private val recordHeaders: List = + MoreIterators.toList( + jsonSchema["properties"].fieldNames(), + ) + .stream() + .sorted() + .collect(Collectors.toList()) + + override fun getHeaderRow(): List { + val headers: MutableList = + Lists.newArrayList( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + ) + headers.addAll(recordHeaders) + return headers + } + + /** With root level flattening, the record columns are the first level fields of the json. */ + public override fun getRecordColumns(json: JsonNode): List { + val values: MutableList = LinkedList() + for (field in recordHeaders) { + val value = json[field] + if (value == null) { + values.add("") + } else if (value.isValueNode) { + // Call asText method on value nodes so that proper string + // representation of json values can be returned by Jackson. + // Otherwise, CSV printer will just call the toString method, + // which can be problematic (e.g. text node will have extra + // double quotation marks around its text value). + values.add(value.asText()) + } else { + values.add(Jsons.serialize(value)) + } + } + + return values + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.kt new file mode 100644 index 0000000000000..5a0f7f0a4001c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfig.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.CompressionTypeHelper +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.Flattening.Companion.fromValue +import java.util.* + +class S3CsvFormatConfig(val flattening: Flattening, val compressionType: CompressionType) : + S3FormatConfig { + constructor( + formatConfig: JsonNode + ) : this( + fromValue( + if (formatConfig.has("flattening")) formatConfig["flattening"].asText() + else Flattening.NO.value + ), + if (formatConfig.has(S3DestinationConstants.COMPRESSION_ARG_NAME)) + CompressionTypeHelper.parseCompressionType( + formatConfig[S3DestinationConstants.COMPRESSION_ARG_NAME] + ) + else S3DestinationConstants.DEFAULT_COMPRESSION_TYPE + ) + + override val format: S3Format = S3Format.CSV + + override val fileExtension: String = CSV_SUFFIX + compressionType.fileExtension + + override fun toString(): String { + return "S3CsvFormatConfig{" + + "flattening=" + + flattening + + ", compression=" + + compressionType!!.name + + '}' + } + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as S3CsvFormatConfig + return flattening == that.flattening && compressionType == that.compressionType + } + + override fun hashCode(): Int { + return Objects.hash(flattening, compressionType) + } + + companion object { + const val CSV_SUFFIX: String = ".csv" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.kt new file mode 100644 index 0000000000000..568cc93ba45a1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriter.kt @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject.Companion.builder +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.sql.Timestamp +import java.util.* +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVPrinter +import org.apache.commons.csv.QuoteMode +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class S3CsvWriter +private constructor( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp, + uploadThreads: Int, + queueCapacity: Int, + writeHeader: Boolean, + csvSettings: CSVFormat, + csvSheetGenerator: CsvSheetGenerator +) : BaseS3Writer(config, s3Client, configuredStream), DestinationFileWriter { + private val csvSheetGenerator: CsvSheetGenerator + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + private val csvPrinter: CSVPrinter + override val outputPath: String + override val fileLocation: String + + init { + var csvSettings = csvSettings + this.csvSheetGenerator = csvSheetGenerator + + val fileSuffix = "_" + UUID.randomUUID() + val outputFilename: String = + BaseS3Writer.Companion.determineOutputFilename( + builder() + .customSuffix(fileSuffix) + .s3Format(S3Format.CSV) + .fileExtension(S3Format.CSV.fileExtension) + .fileNamePattern(config.fileNamePattern) + .timestamp(uploadTimestamp) + .build() + ) + this.outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + + LOGGER.info( + "Full S3 path for stream '{}': s3://{}/{}", + stream.name, + config.bucketName, + outputPath + ) + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + + this.uploadManager = + create(config.bucketName, outputPath, s3Client) + .get() + .numUploadThreads(uploadThreads) + .queueCapacity(queueCapacity) + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + if (writeHeader) { + csvSettings = + csvSettings.withHeader(*csvSheetGenerator.getHeaderRow().toTypedArray()) + } + this.csvPrinter = + CSVPrinter(PrintWriter(outputStream, true, StandardCharsets.UTF_8), csvSettings) + } + + class Builder( + private val config: S3DestinationConfig, + private val s3Client: AmazonS3, + private val configuredStream: ConfiguredAirbyteStream, + private val uploadTimestamp: Timestamp + ) { + private var uploadThreads = StreamTransferManagerFactory.DEFAULT_UPLOAD_THREADS + private var queueCapacity = StreamTransferManagerFactory.DEFAULT_QUEUE_CAPACITY + private var withHeader = true + private var csvSettings: CSVFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL) + private lateinit var _csvSheetGenerator: CsvSheetGenerator + + fun uploadThreads(uploadThreads: Int): Builder { + this.uploadThreads = uploadThreads + return this + } + + fun queueCapacity(queueCapacity: Int): Builder { + this.queueCapacity = queueCapacity + return this + } + + fun withHeader(withHeader: Boolean): Builder { + this.withHeader = withHeader + return this + } + + fun csvSettings(csvSettings: CSVFormat): Builder { + this.csvSettings = csvSettings + return this + } + + fun csvSheetGenerator(csvSheetGenerator: CsvSheetGenerator): Builder { + this._csvSheetGenerator = csvSheetGenerator + return this + } + + @Throws(IOException::class) + fun build(): S3CsvWriter { + if (!::_csvSheetGenerator.isInitialized) { + val formatConfig = config.formatConfig as S3CsvFormatConfig + _csvSheetGenerator = + CsvSheetGenerator.Factory.create( + configuredStream.stream.jsonSchema, + formatConfig + ) + } + return S3CsvWriter( + config, + s3Client, + configuredStream, + uploadTimestamp, + uploadThreads, + queueCapacity, + withHeader, + csvSettings, + _csvSheetGenerator + ) + } + } + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + csvPrinter.printRecord(csvSheetGenerator!!.getDataRow(id, recordMessage)) + } + + @Throws(IOException::class) + override fun closeWhenSucceed() { + csvPrinter.close() + outputStream.close() + uploadManager.complete() + } + + @Throws(IOException::class) + override fun closeWhenFail() { + csvPrinter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format? + get() = S3Format.CSV + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + csvPrinter.printRecord(csvSheetGenerator!!.getDataRow(formattedData)) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(S3CsvWriter::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.kt new file mode 100644 index 0000000000000..7386dbcfee769 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.kt @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.time.Instant +import java.util.* + +/** + * A CsvSheetGenerator that produces data in the format expected by JdbcSqlOperations. See + * JdbcSqlOperations#createTableQuery. + * + * This intentionally does not extend [BaseSheetGenerator], because it needs the columns in a + * different order (ABID, JSON, timestamp) vs (ABID, timestamp, JSON) + * + * In 1s1t mode, the column ordering is also different (raw_id, extracted_at, loaded_at, data). Note + * that the loaded_at column is rendered as an empty string; callers are expected to configure their + * destination to parse this as NULL. For example, Snowflake's COPY into command accepts a NULL_IF + * parameter, and Redshift accepts an EMPTYASNULL option. + */ +class StagingDatabaseCsvSheetGenerator +@JvmOverloads +constructor(private val useDestinationsV2Columns: Boolean = false) : CsvSheetGenerator { + // TODO is this even used anywhere? + private var header: List = + if (this.useDestinationsV2Columns) JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES + else JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS + + override fun getHeaderRow(): List { + return header + } + + override fun getDataRow(id: UUID, recordMessage: AirbyteRecordMessage): List { + return getDataRow( + id, + Jsons.serialize(recordMessage.data), + recordMessage.emittedAt, + Jsons.serialize(recordMessage.meta) + ) + } + + override fun getDataRow(formattedData: JsonNode): List { + return LinkedList(listOf(Jsons.serialize(formattedData))) + } + + override fun getDataRow( + id: UUID, + formattedString: String, + emittedAt: Long, + formattedAirbyteMetaString: String + ): List { + return if (useDestinationsV2Columns) { + java.util.List.of( + id, + Instant.ofEpochMilli(emittedAt), + "", + formattedString, + formattedAirbyteMetaString + ) + } else { + java.util.List.of(id, formattedString, Instant.ofEpochMilli(emittedAt)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.kt new file mode 100644 index 0000000000000..2d0b2b6bb7f88 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBuffer.kt @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.jsonl + +import com.fasterxml.jackson.core.type.TypeReference +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.io.OutputStream +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.util.UUID +import java.util.concurrent.Callable + +class JsonLSerializedBuffer( + bufferStorage: BufferStorage, + gzipCompression: Boolean, + private val flattenData: Boolean = false +) : BaseSerializedBuffer(bufferStorage) { + + private lateinit var printWriter: PrintWriter + + init { + withCompression(gzipCompression) + } + + override fun initWriter(outputStream: OutputStream) { + printWriter = PrintWriter(outputStream, true, StandardCharsets.UTF_8) + } + + @Deprecated("Deprecated in Java") + override fun writeRecord(record: AirbyteRecordMessage) { + val json = MAPPER.createObjectNode() + json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, UUID.randomUUID().toString()) + json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, record.emittedAt) + if (flattenData) { + val data: Map = + MAPPER.convertValue( + record.data, + object : TypeReference>() {}, + ) + json.setAll(data) + } else { + json.set(JavaBaseConstants.COLUMN_NAME_DATA, record.data) + } + printWriter.println(Jsons.serialize(json)) + } + + @Suppress("DEPRECATION") + override fun writeRecord(recordString: String, airbyteMetaString: String, emittedAt: Long) { + // TODO Remove this double deserialization when S3 Destinations moves to Async. + writeRecord( + Jsons.deserialize( + recordString, + AirbyteRecordMessage::class.java, + ) + .withEmittedAt(emittedAt), + ) + } + + override fun flushWriter() { + printWriter.flush() + } + + override fun closeWriter() { + printWriter.close() + } + + companion object { + private val MAPPER: ObjectMapper = MoreMappers.initMapper() + + @JvmStatic + fun createBufferFunction( + config: S3JsonlFormatConfig?, + createStorageFunction: Callable + ): BufferCreateFunction { + return BufferCreateFunction { + _: AirbyteStreamNameNamespacePair?, + _: ConfiguredAirbyteCatalog? -> + val compressionType = + if (config == null) S3DestinationConstants.DEFAULT_COMPRESSION_TYPE + else config.compressionType + val flattening = if (config == null) Flattening.NO else config.flatteningType + JsonLSerializedBuffer( + createStorageFunction.call(), + compressionType != CompressionType.NO_COMPRESSION, + flattening != Flattening.NO, + ) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.kt new file mode 100644 index 0000000000000..ed2ca921150a7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfig.kt @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.jsonl + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.CompressionTypeHelper +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.Flattening.Companion.fromValue +import java.util.* +import lombok.ToString + +@ToString +class S3JsonlFormatConfig(val flatteningType: Flattening, val compressionType: CompressionType) : + S3FormatConfig { + constructor( + formatConfig: JsonNode + ) : this( + if (formatConfig.has(S3DestinationConstants.FLATTENING_ARG_NAME)) + fromValue(formatConfig[S3DestinationConstants.FLATTENING_ARG_NAME].asText()) + else Flattening.NO, + if (formatConfig.has(S3DestinationConstants.COMPRESSION_ARG_NAME)) + CompressionTypeHelper.parseCompressionType( + formatConfig[S3DestinationConstants.COMPRESSION_ARG_NAME] + ) + else S3DestinationConstants.DEFAULT_COMPRESSION_TYPE + ) + + override val format: S3Format = S3Format.JSONL + + override val fileExtension: String = JSONL_SUFFIX + compressionType.fileExtension + + override fun equals(o: Any?): Boolean { + if (this === o) { + return true + } + if (o == null || javaClass != o.javaClass) { + return false + } + val that = o as S3JsonlFormatConfig + return flatteningType == that.flatteningType && compressionType == that.compressionType + } + + override fun hashCode(): Int { + return Objects.hash(flatteningType, compressionType) + } + + companion object { + const val JSONL_SUFFIX: String = ".jsonl" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.kt new file mode 100644 index 0000000000000..942dbd6eb36dd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlWriter.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.jsonl + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject.Companion.builder +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.create +import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.io.PrintWriter +import java.nio.charset.StandardCharsets +import java.sql.Timestamp +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class S3JsonlWriter( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp? +) : BaseS3Writer(config, s3Client, configuredStream), DestinationFileWriter { + private val uploadManager: StreamTransferManager + private val outputStream: MultiPartOutputStream + private val printWriter: PrintWriter + override val outputPath: String + override val fileLocation: String + + init { + val outputFilename: String = + BaseS3Writer.Companion.determineOutputFilename( + builder() + .timestamp(uploadTimestamp) + .s3Format(S3Format.JSONL) + .fileExtension(S3Format.JSONL.fileExtension) + .fileNamePattern(config.fileNamePattern) + .build() + ) + outputPath = java.lang.String.join("/", outputPrefix, outputFilename) + + LOGGER.info( + "Full S3 path for stream '{}': s3://{}/{}", + stream.name, + config.bucketName, + outputPath + ) + fileLocation = String.format("gs://%s/%s", config.bucketName, outputPath) + + this.uploadManager = create(config.bucketName, outputPath, s3Client).get() + // We only need one output stream as we only have one input stream. This is reasonably + // performant. + this.outputStream = uploadManager.multiPartOutputStreams[0] + this.printWriter = PrintWriter(outputStream, true, StandardCharsets.UTF_8) + } + + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + val json = MAPPER.createObjectNode() + json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, id.toString()) + json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.emittedAt) + json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.data) + printWriter.println(Jsons.serialize(json)) + } + + override fun closeWhenSucceed() { + printWriter.close() + outputStream.close() + uploadManager.complete() + } + + override fun closeWhenFail() { + printWriter.close() + outputStream.close() + uploadManager.abort() + } + + override val fileFormat: S3Format? + get() = S3Format.JSONL + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + printWriter.println(Jsons.serialize(formattedData)) + } + + companion object { + protected val LOGGER: Logger = LoggerFactory.getLogger(S3JsonlWriter::class.java) + + private val MAPPER: ObjectMapper = MoreMappers.initMapper() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.kt new file mode 100644 index 0000000000000..a02757975530a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBuffer.kt @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.parquet + +import io.airbyte.cdk.integrations.destination.record_buffer.BufferCreateFunction +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory +import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.github.oshai.kotlinlogging.KotlinLogging +import java.io.File +import java.io.FileInputStream +import java.io.IOException +import java.io.InputStream +import java.nio.file.Files +import java.nio.file.Path +import java.util.UUID +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.commons.io.FileUtils +import org.apache.commons.lang3.StringUtils +import org.apache.hadoop.conf.Configuration +import org.apache.parquet.avro.AvroParquetWriter +import org.apache.parquet.avro.AvroWriteSupport +import org.apache.parquet.hadoop.ParquetWriter +import org.apache.parquet.hadoop.util.HadoopOutputFile + +private val logger = KotlinLogging.logger {} + +/** + * The [io.airbyte.cdk.integrations.destination.record_buffer.BaseSerializedBuffer] class abstracts + * the [io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage] from the details of the + * format the data is going to be stored in. + * + * Unfortunately, the Parquet library doesn't allow us to manipulate the output stream and forces us + * to go through [HadoopOutputFile] instead. So we can't benefit from the abstraction described + * above. Therefore, we re-implement the necessary methods to be used as [SerializableBuffer], while + * data will be buffered in such a hadoop file. + */ +class ParquetSerializedBuffer( + config: S3DestinationConfig, + stream: AirbyteStreamNameNamespacePair, + catalog: ConfiguredAirbyteCatalog +) : SerializableBuffer { + private val avroRecordFactory: AvroRecordFactory + private val parquetWriter: ParquetWriter + private val bufferFile: Path + override var inputStream: InputStream? = null + private var lastByteCount: Long + private var isClosed: Boolean + + init { + val schemaConverter = JsonToAvroSchemaConverter() + val schema: Schema = + schemaConverter.getAvroSchema( + catalog.streams + .stream() + .filter { s: ConfiguredAirbyteStream -> + (s.stream.name == stream.name) && + StringUtils.equals( + s.stream.namespace, + stream.namespace, + ) + } + .findFirst() + .orElseThrow { + RuntimeException("No such stream ${stream.namespace}.${stream.name}") + } + .stream + .jsonSchema, + stream.name, + stream.namespace, + ) + bufferFile = Files.createTempFile(UUID.randomUUID().toString(), ".parquet") + Files.deleteIfExists(bufferFile) + avroRecordFactory = AvroRecordFactory(schema, AvroConstants.JSON_CONVERTER) + val formatConfig: S3ParquetFormatConfig = config.formatConfig as S3ParquetFormatConfig + val avroConfig = Configuration() + avroConfig.setBoolean(AvroWriteSupport.WRITE_OLD_LIST_STRUCTURE, false) + parquetWriter = + AvroParquetWriter.builder( + HadoopOutputFile.fromPath( + org.apache.hadoop.fs.Path(bufferFile.toUri()), + avroConfig + ), + ) + .withConf( + avroConfig + ) // yes, this should be here despite the fact we pass this config above in path + .withSchema(schema) + .withCompressionCodec(formatConfig.compressionCodec) + .withRowGroupSize(formatConfig.blockSize.toLong()) + .withMaxPaddingSize(formatConfig.maxPaddingSize) + .withPageSize(formatConfig.pageSize) + .withDictionaryPageSize(formatConfig.dictionaryPageSize) + .withDictionaryEncoding(formatConfig.isDictionaryEncoding) + .build() + isClosed = false + lastByteCount = 0L + } + + @Deprecated("Deprecated in Java") + @Throws(Exception::class) + override fun accept(record: AirbyteRecordMessage): Long { + if (inputStream == null && !isClosed) { + val startCount: Long = byteCount + parquetWriter.write(avroRecordFactory.getAvroRecord(UUID.randomUUID(), record)) + return byteCount - startCount + } else { + throw IllegalCallerException("Buffer is already closed, it cannot accept more messages") + } + } + + @Throws(Exception::class) + override fun accept(recordString: String, airbyteMetaString: String, emittedAt: Long): Long { + throw UnsupportedOperationException( + "This method is not supported for ParquetSerializedBuffer" + ) + } + + @Throws(Exception::class) + override fun flush() { + if (inputStream == null && !isClosed) { + byteCount + parquetWriter.close() + inputStream = FileInputStream(bufferFile.toFile()) + logger.info { + "Finished writing data to ${filename} (${FileUtils.byteCountToDisplaySize(byteCount)})" + } + } + } + + override val byteCount: Long + get() { + if (inputStream != null) { + // once the parquetWriter is closed, we can't query how many bytes are in it, so we + // cache the last + // count + return lastByteCount + } + lastByteCount = parquetWriter.dataSize + return lastByteCount + } + + override val filename: String + @Throws(IOException::class) + get() { + return bufferFile.fileName.toString() + } + + override val file: File + @Throws(IOException::class) + get() { + return bufferFile.toFile() + } + + override val maxTotalBufferSizeInBytes: Long = FileBuffer.MAX_TOTAL_BUFFER_SIZE_BYTES + + override val maxPerStreamBufferSizeInBytes: Long = FileBuffer.MAX_PER_STREAM_BUFFER_SIZE_BYTES + + override val maxConcurrentStreamsInBuffer: Int = + FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER + + @Throws(Exception::class) + override fun close() { + if (!isClosed) { + inputStream?.close() + Files.deleteIfExists(bufferFile) + isClosed = true + } + } + + companion object { + @JvmStatic + fun createFunction(s3DestinationConfig: S3DestinationConfig): BufferCreateFunction { + return BufferCreateFunction { + stream: AirbyteStreamNameNamespacePair, + catalog: ConfiguredAirbyteCatalog -> + ParquetSerializedBuffer( + s3DestinationConfig, + stream, + catalog, + ) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.kt new file mode 100644 index 0000000000000..22067bde592a6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetConstants.kt @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.parquet + +import org.apache.parquet.hadoop.metadata.CompressionCodecName + +class S3ParquetConstants { + + companion object { + @JvmField + val DEFAULT_COMPRESSION_CODEC: CompressionCodecName = CompressionCodecName.UNCOMPRESSED + const val DEFAULT_BLOCK_SIZE_MB: Int = 128 + const val DEFAULT_MAX_PADDING_SIZE_MB: Int = 8 + const val DEFAULT_PAGE_SIZE_KB: Int = 1024 + const val DEFAULT_DICTIONARY_PAGE_SIZE_KB: Int = 1024 + const val DEFAULT_DICTIONARY_ENCODING: Boolean = true + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.kt new file mode 100644 index 0000000000000..f232e6f38cf21 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfig.kt @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.parquet + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig +import java.util.* +import org.apache.parquet.hadoop.metadata.CompressionCodecName + +class S3ParquetFormatConfig(formatConfig: JsonNode) : S3FormatConfig { + @JvmField val compressionCodec: CompressionCodecName + @JvmField val blockSize: Int + @JvmField val maxPaddingSize: Int + @JvmField val pageSize: Int + @JvmField val dictionaryPageSize: Int + val isDictionaryEncoding: Boolean + override val fileExtension: String = PARQUET_SUFFIX + + init { + val blockSizeMb: Int = + S3FormatConfig.Companion.withDefault( + formatConfig, + "block_size_mb", + S3ParquetConstants.DEFAULT_BLOCK_SIZE_MB + ) + val maxPaddingSizeMb: Int = + S3FormatConfig.Companion.withDefault( + formatConfig, + "max_padding_size_mb", + S3ParquetConstants.DEFAULT_MAX_PADDING_SIZE_MB + ) + val pageSizeKb: Int = + S3FormatConfig.Companion.withDefault( + formatConfig, + "page_size_kb", + S3ParquetConstants.DEFAULT_PAGE_SIZE_KB + ) + val dictionaryPageSizeKb: Int = + S3FormatConfig.Companion.withDefault( + formatConfig, + "dictionary_page_size_kb", + S3ParquetConstants.DEFAULT_DICTIONARY_PAGE_SIZE_KB + ) + + this.compressionCodec = + CompressionCodecName.valueOf( + S3FormatConfig.Companion.withDefault( + formatConfig, + "compression_codec", + S3ParquetConstants.DEFAULT_COMPRESSION_CODEC.name + ) + .uppercase(Locale.getDefault()) + ) + this.blockSize = blockSizeMb * 1024 * 1024 + this.maxPaddingSize = maxPaddingSizeMb * 1024 * 1024 + this.pageSize = pageSizeKb * 1024 + this.dictionaryPageSize = dictionaryPageSizeKb * 1024 + this.isDictionaryEncoding = + S3FormatConfig.Companion.withDefault( + formatConfig, + "dictionary_encoding", + S3ParquetConstants.DEFAULT_DICTIONARY_ENCODING + ) + } + + override val format: S3Format + get() = S3Format.PARQUET + + override fun toString(): String { + return "S3ParquetFormatConfig{" + + "compressionCodec=" + + compressionCodec + + ", " + + "blockSize=" + + blockSize + + ", " + + "maxPaddingSize=" + + maxPaddingSize + + ", " + + "pageSize=" + + pageSize + + ", " + + "dictionaryPageSize=" + + dictionaryPageSize + + ", " + + "dictionaryEncoding=" + + isDictionaryEncoding + + ", " + + '}' + } + + companion object { + @JvmField val PARQUET_SUFFIX: String = ".parquet" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.kt new file mode 100644 index 0000000000000..208b9a6417b07 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetWriter.kt @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.parquet + +import com.amazonaws.services.s3.AmazonS3 +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroRecordFactory +import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject.Companion.builder +import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer +import io.airbyte.cdk.integrations.destination.s3.writer.DestinationFileWriter +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.io.IOException +import java.net.URI +import java.sql.Timestamp +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.Path +import org.apache.hadoop.fs.s3a.Constants +import org.apache.parquet.avro.AvroParquetWriter +import org.apache.parquet.avro.AvroWriteSupport +import org.apache.parquet.hadoop.ParquetWriter +import org.apache.parquet.hadoop.util.HadoopOutputFile +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import tech.allegro.schema.json2avro.converter.JsonAvroConverter + +class S3ParquetWriter( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp?, + schema: Schema?, + converter: JsonAvroConverter? +) : BaseS3Writer(config, s3Client, configuredStream), DestinationFileWriter { + private val parquetWriter: ParquetWriter + private val avroRecordFactory: AvroRecordFactory + val schema: Schema? + val outputFilename: String = + BaseS3Writer.Companion.determineOutputFilename( + builder() + .s3Format(S3Format.PARQUET) + .timestamp(uploadTimestamp) + .fileExtension(S3Format.PARQUET.fileExtension) + .fileNamePattern(config.fileNamePattern) + .build() + ) + + // object key = / + override val outputPath: String = java.lang.String.join("/", outputPrefix, outputFilename) + + // full file path = s3://// + override val fileLocation: String = String.format("s3a://%s/%s", config.bucketName, outputPath) + + init { + LOGGER.info("Full S3 path for stream '{}': {}", stream.name, fileLocation) + + val path = Path(URI(fileLocation)) + val formatConfig = config.formatConfig as S3ParquetFormatConfig + val hadoopConfig = getHadoopConfig(config) + hadoopConfig.setBoolean(AvroWriteSupport.WRITE_OLD_LIST_STRUCTURE, false) + this.parquetWriter = + AvroParquetWriter.builder( + HadoopOutputFile.fromPath(path, hadoopConfig) + ) + .withConf( + hadoopConfig + ) // yes, this should be here despite the fact we pass this config above in path + .withSchema(schema) + .withCompressionCodec(formatConfig.compressionCodec) + .withRowGroupSize(formatConfig.blockSize) + .withMaxPaddingSize(formatConfig.maxPaddingSize) + .withPageSize(formatConfig.pageSize) + .withDictionaryPageSize(formatConfig.dictionaryPageSize) + .withDictionaryEncoding(formatConfig.isDictionaryEncoding) + .build() + this.avroRecordFactory = AvroRecordFactory(schema, converter) + this.schema = schema + } + + val outputFilePath: String + /** The file path includes prefix and filename, but does not include the bucket name. */ + get() = "$outputPrefix/$outputFilename" + + @Throws(IOException::class) + override fun write(id: UUID, recordMessage: AirbyteRecordMessage) { + parquetWriter.write(avroRecordFactory.getAvroRecord(id, recordMessage)) + } + + @Throws(IOException::class) + override fun closeWhenSucceed() { + parquetWriter.close() + } + + @Throws(IOException::class) + override fun closeWhenFail() { + parquetWriter.close() + } + + override val fileFormat: S3Format? + get() = S3Format.PARQUET + + @Throws(IOException::class) + override fun write(formattedData: JsonNode) { + parquetWriter.write(avroRecordFactory.getAvroRecord(formattedData)) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(S3ParquetWriter::class.java) + + @JvmStatic + fun getHadoopConfig(config: S3DestinationConfig): Configuration { + val hadoopConfig = Configuration() + val credentialConfig = config.s3CredentialConfig as S3AccessKeyCredentialConfig + hadoopConfig[Constants.ACCESS_KEY] = credentialConfig.accessKeyId + hadoopConfig[Constants.SECRET_KEY] = credentialConfig.secretAccessKey + if (config.endpoint.isNullOrEmpty()) { + hadoopConfig[Constants.ENDPOINT] = + String.format("s3.%s.amazonaws.com", config.bucketRegion) + } else { + hadoopConfig[Constants.ENDPOINT] = config.endpoint + hadoopConfig[Constants.PATH_STYLE_ACCESS] = "true" + } + hadoopConfig[Constants.AWS_CREDENTIALS_PROVIDER] = + "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" + return hadoopConfig + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.kt new file mode 100644 index 0000000000000..947163659c524 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManager.kt @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.template + +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import java.io.IOException +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.time.Instant +import java.util.Optional +import java.util.TimeZone +import java.util.regex.Pattern +import org.apache.commons.lang3.StringUtils +import org.apache.commons.text.StringSubstitutor +import org.apache.commons.text.lookup.StringLookupFactory + +/** + * This class is responsible for building the filename template based on user input, see + * file_name_pattern in the specification of connector currently supported only S3 staging. + */ +class S3FilenameTemplateManager { + private val stringSubstitutor = StringSubstitutor() + + @Throws(IOException::class) + fun applyPatternToFilename(parameterObject: S3FilenameTemplateParameterObject): String { + // sanitize fileFormat + val sanitizedFileFormat = + parameterObject.fileNamePattern?.trim { it <= ' ' }?.replace(" ".toRegex(), "_") + + stringSubstitutor.setVariableResolver( + StringLookupFactory.INSTANCE.mapStringLookup( + fillTheMapWithDefaultPlaceHolders( + sanitizedFileFormat, + parameterObject, + ), + ), + ) + stringSubstitutor.setVariablePrefix("{") + stringSubstitutor.setVariableSuffix("}") + return Optional.ofNullable(parameterObject.objectPath).orElse(StringUtils.EMPTY) + + stringSubstitutor.replace(sanitizedFileFormat) + } + + private fun fillTheMapWithDefaultPlaceHolders( + stringToReplaceWithPlaceholder: String?, + parameterObject: S3FilenameTemplateParameterObject + ): Map { + val currentTimeMillis = Instant.now().toEpochMilli() + + val valuesMap = + processExtendedPlaceholder(currentTimeMillis, stringToReplaceWithPlaceholder) + + val defaultDateFormat: DateFormat = + SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING) + defaultDateFormat.timeZone = TimeZone.getTimeZone(UTC) + + // here we set default values for supported placeholders. + valuesMap["date"] = + Optional.ofNullable(defaultDateFormat.format(currentTimeMillis)) + .orElse( + StringUtils.EMPTY, + ) + valuesMap["timestamp"] = + Optional.ofNullable(currentTimeMillis.toString()).orElse(StringUtils.EMPTY) + valuesMap["sync_id"] = + Optional.ofNullable(System.getenv("WORKER_JOB_ID")).orElse(StringUtils.EMPTY) + valuesMap["format_extension"] = + Optional.ofNullable(parameterObject.fileExtension).orElse(StringUtils.EMPTY) + valuesMap["part_number"] = + Optional.ofNullable(parameterObject.partId).orElse(StringUtils.EMPTY) + + return valuesMap + } + + /** + * By extended placeholders we assume next types: {date:yyyy_MM}, {timestamp:millis}, + * {timestamp:micro}, etc Limited combinations are supported by the method see the method body. + * + * @param stringToReplaceWithPlaceholder + * - string where the method will search for extended placeholders + * @return map with prepared placeholders. + */ + private fun processExtendedPlaceholder( + currentTimeMillis: Long, + stringToReplaceWithPlaceholder: String? + ): MutableMap { + val valuesMap: MutableMap = HashMap() + + val pattern = Pattern.compile("\\{(date:.+?|timestamp:.+?)}") + val matcher = stringToReplaceWithPlaceholder?.let { pattern.matcher(it) } + + while (matcher?.find() == true) { + val splitByColon = + matcher.group(1).split(":".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray() + when (splitByColon[0].lowercase()) { + "date" -> { + val dateFormat: DateFormat = SimpleDateFormat(splitByColon[1]) + dateFormat.timeZone = TimeZone.getTimeZone("UTC") + valuesMap[matcher.group(1)] = dateFormat.format(currentTimeMillis) + } + "timestamp" -> { + when (splitByColon[1]) { + "millis" -> { + valuesMap[matcher.group(1)] = currentTimeMillis.toString() + } + "micro" -> { + valuesMap[matcher.group(1)] = + convertToMicrosecondsRepresentation(currentTimeMillis).toString() + } + } + } + } + } + return valuesMap + } + + private fun convertToMicrosecondsRepresentation(milliSeconds: Long): Long { + // The time representation in microseconds is equal to the milliseconds multiplied by 1,000. + return milliSeconds * 1000 + } + + companion object { + private const val UTC = "UTC" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.kt new file mode 100644 index 0000000000000..ff859e9061d20 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateParameterObject.kt @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.template + +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.cdk.integrations.destination.s3.S3Format +import java.sql.Timestamp +import java.util.Objects + +/** + * This class is used as argument holder S3FilenameTemplateManager.class + * + * @see S3FilenameTemplateManager.applyPatternToFilename + */ +class S3FilenameTemplateParameterObject +internal constructor( + val objectPath: String?, + private val recordsData: SerializableBuffer?, + val fileNamePattern: String?, + val fileExtension: String?, + val partId: String?, + val s3Format: S3Format?, + val timestamp: Timestamp?, + val customSuffix: String? +) { + class S3FilenameTemplateParameterObjectBuilder internal constructor() { + private var objectPath: String? = null + private var recordsData: SerializableBuffer? = null + private var fileNamePattern: String? = null + private var fileExtension: String? = null + private var partId: String? = null + private var s3Format: S3Format? = null + private var timestamp: Timestamp? = null + private var customSuffix: String? = null + + fun objectPath(objectPath: String?): S3FilenameTemplateParameterObjectBuilder { + this.objectPath = objectPath + return this + } + + fun recordsData( + recordsData: SerializableBuffer? + ): S3FilenameTemplateParameterObjectBuilder { + this.recordsData = recordsData + return this + } + + fun fileNamePattern(fileNamePattern: String?): S3FilenameTemplateParameterObjectBuilder { + this.fileNamePattern = fileNamePattern + return this + } + + fun fileExtension(fileExtension: String?): S3FilenameTemplateParameterObjectBuilder { + this.fileExtension = fileExtension + return this + } + + fun partId(partId: String?): S3FilenameTemplateParameterObjectBuilder { + this.partId = partId + return this + } + + fun s3Format(s3Format: S3Format?): S3FilenameTemplateParameterObjectBuilder { + this.s3Format = s3Format + return this + } + + fun timestamp(timestamp: Timestamp?): S3FilenameTemplateParameterObjectBuilder { + this.timestamp = timestamp + return this + } + + fun customSuffix(customSuffix: String?): S3FilenameTemplateParameterObjectBuilder { + this.customSuffix = customSuffix + return this + } + + fun build(): S3FilenameTemplateParameterObject { + return S3FilenameTemplateParameterObject( + objectPath, + recordsData, + fileNamePattern, + fileExtension, + partId, + s3Format, + timestamp, + customSuffix, + ) + } + + override fun toString(): String { + return ("S3FilenameTemplateParameterObject.S3FilenameTemplateParameterObjectBuilder(objectPath=" + + this.objectPath + + ", recordsData=" + + this.recordsData + + ", fileNamePattern=" + + this.fileNamePattern + + ", fileExtension=" + + this.fileExtension + + ", partId=" + + this.partId + + ", s3Format=" + + this.s3Format + + ", timestamp=" + + this.timestamp + + ", customSuffix=" + + this.customSuffix + + ")") + } + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + val that = other as S3FilenameTemplateParameterObject + return objectPath == that.objectPath && + recordsData == that.recordsData && + fileNamePattern == that.fileNamePattern && + fileExtension == that.fileExtension && + partId == that.partId && + s3Format == that.s3Format && + timestamp == that.timestamp && + customSuffix == that.customSuffix + } + + override fun hashCode(): Int { + return Objects.hash( + objectPath, + recordsData, + fileNamePattern, + fileExtension, + partId, + s3Format, + timestamp, + customSuffix, + ) + } + + companion object { + @JvmStatic + fun builder(): S3FilenameTemplateParameterObjectBuilder { + return S3FilenameTemplateParameterObjectBuilder() + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.kt new file mode 100644 index 0000000000000..c57dc4b425d29 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/AvroRecordHelper.kt @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater +import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter +import io.airbyte.commons.util.MoreIterators + +/** + * Helper methods for unit tests. This is needed by multiple modules, so it is in the src directory. + */ +object AvroRecordHelper { + @JvmStatic + fun getFieldNameUpdater( + streamName: String, + namespace: String?, + streamSchema: JsonNode + ): JsonFieldNameUpdater { + val schemaConverter = JsonToAvroSchemaConverter() + schemaConverter.getAvroSchema(streamSchema, streamName, namespace) + return JsonFieldNameUpdater(schemaConverter.getStandardizedNames()) + } + + /** + * Convert an Airbyte JsonNode from Avro / Parquet Record to a plain one. + * + * * Remove the airbyte id and emission timestamp fields. + * * Remove null fields that must exist in Parquet but does not in original Json. This function + * mutates the input Json. + */ + @JvmStatic + fun pruneAirbyteJson(input: JsonNode): JsonNode { + val output = input as ObjectNode + + // Remove Airbyte columns. + output.remove(JavaBaseConstants.COLUMN_NAME_AB_ID) + output.remove(JavaBaseConstants.COLUMN_NAME_EMITTED_AT) + + // Fields with null values does not exist in the original Json but only in Parquet. + for (field in MoreIterators.toList(output.fieldNames())) { + if (output[field] == null || output[field].isNull) { + output.remove(field) + } + } + + return output + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.kt new file mode 100644 index 0000000000000..5522e1a8f8c35 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionType.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.util + +enum class CompressionType(val fileExtension: String) { + NO_COMPRESSION(""), + GZIP(".gz") +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.kt new file mode 100644 index 0000000000000..57596a55dcf96 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelper.kt @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import java.util.* + +object CompressionTypeHelper { + /** Sample expected input: { "compression_type": "No Compression" } */ + @JvmStatic + fun parseCompressionType(compressionConfig: JsonNode?): CompressionType { + if (compressionConfig == null || compressionConfig.isNull) { + return S3DestinationConstants.DEFAULT_COMPRESSION_TYPE + } + val compressionType = + compressionConfig[S3DestinationConstants.COMPRESSION_TYPE_ARG_NAME].asText() + return if (compressionType.uppercase(Locale.getDefault()) == CompressionType.GZIP.name) { + CompressionType.GZIP + } else { + CompressionType.NO_COMPRESSION + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/Flattening.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/Flattening.kt new file mode 100644 index 0000000000000..f8d7f2b658659 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/Flattening.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.util + +import com.fasterxml.jackson.annotation.JsonCreator +import kotlin.jvm.Throws + +enum class Flattening(val value: String) { + NO("No flattening"), + ROOT_LEVEL("Root level flattening"); + + companion object { + @JvmStatic + @JsonCreator + @Throws(IllegalArgumentException::class) + fun fromValue(value: String): Flattening { + for (f in entries) { + if (f.value.equals(value, ignoreCase = true)) { + return f + } + } + throw IllegalArgumentException("Unexpected value: $value") + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.kt new file mode 100644 index 0000000000000..7e90344ef3958 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/JavaProcessRunner.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import io.airbyte.commons.io.LineGobbler +import java.io.File +import java.io.IOException +import java.util.* +import java.util.concurrent.TimeUnit +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object JavaProcessRunner { + private val LOGGER: Logger = LoggerFactory.getLogger(JavaProcessRunner::class.java) + + @Throws(IOException::class, InterruptedException::class) + fun runProcess(path: String, run: Runtime, vararg commands: String?) { + LOGGER.info("Running process: " + Arrays.asList(*commands)) + val pr = + if (path == System.getProperty("user.dir")) run.exec(commands) + else run.exec(commands, null, File(path)) + LineGobbler.gobble(`is` = pr.errorStream, { LOGGER.warn(it) }) + LineGobbler.gobble(`is` = pr.inputStream, { LOGGER.info(it) }) + if (!pr.waitFor(10, TimeUnit.MINUTES)) { + pr.destroy() + throw RuntimeException("Timeout while executing: " + commands.contentToString()) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.kt new file mode 100644 index 0000000000000..234fbb3e31f00 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3NameTransformer.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.util + +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import java.text.Normalizer +import java.util.regex.Pattern + +open class S3NameTransformer : StandardNameTransformer() { + + companion object { + // see https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html + private const val S3_SAFE_CHARACTERS = "\\p{Alnum}/!_.*')(" + private const val S3_SPECIAL_CHARACTERS = "&$@=;:+,?-" + private val S3_CHARACTER_PATTERN = + "[^${S3_SAFE_CHARACTERS}${Pattern.quote(S3_SPECIAL_CHARACTERS)}]" + } + + override fun convertStreamName(input: String): String { + return Normalizer.normalize(input, Normalizer.Form.NFKD) + .replace( + "\\p{M}".toRegex(), + "", + ) // P{M} matches a code point that is not a combining mark (unicode) + .replace(S3_CHARACTER_PATTERN.toRegex(), "_") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.kt new file mode 100644 index 0000000000000..67330b0ab2553 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelper.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.protocol.models.v0.AirbyteStream +import java.util.* + +object S3OutputPathHelper { + @JvmStatic + fun getOutputPrefix(bucketPath: String?, stream: AirbyteStream): String { + return getOutputPrefix(bucketPath, stream.namespace, stream.name) + } + + /** Prefix: <bucket-path>/<source-namespace-if-present>/<stream-name> */ + // Prefix: // + fun getOutputPrefix(bucketPath: String?, namespace: String?, streamName: String): String { + val paths: MutableList = LinkedList() + + if (bucketPath != null) { + paths.add(bucketPath) + } + if (namespace != null) { + paths.add(S3DestinationConstants.NAME_TRANSFORMER.convertStreamName(namespace)) + } + paths.add(S3DestinationConstants.NAME_TRANSFORMER.convertStreamName(streamName)) + + return java.lang.String.join("/", paths).replace("/+".toRegex(), "/") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.kt new file mode 100644 index 0000000000000..2b0af7db50118 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerFactory.kt @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.util + +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import io.github.oshai.kotlinlogging.KotlinLogging + +private val logger = KotlinLogging.logger {} + +object StreamTransferManagerFactory { + + // See this doc about how they affect memory usage: + // https://alexmojaki.github.io/s3-stream-upload/javadoc/apidocs/alex/mojaki/s3upload/StreamTransferManager.html + // Total memory = (numUploadThreads + queueCapacity) * partSize + numStreams * (partSize + 6MB) + // = 31 MB at current configurations + const val DEFAULT_UPLOAD_THREADS: Int = 2 + const val DEFAULT_QUEUE_CAPACITY: Int = 2 + const val DEFAULT_PART_SIZE_MB: Int = 5 + + // MAX object size for AWS and GCS is 5TB (max allowed 10,000 parts*525mb) + // (https://aws.amazon.com/s3/faqs/, https://cloud.google.com/storage/quotas) + const val MAX_ALLOWED_PART_SIZE_MB: Int = 525 + const val DEFAULT_NUM_STREAMS: Int = 1 + + @JvmStatic + fun create(bucketName: String?, objectKey: String, s3Client: AmazonS3?): Builder { + return Builder(bucketName, objectKey, s3Client) + } + + class Builder + internal constructor( + private val bucketName: String?, + private val objectKey: String, + private val s3Client: AmazonS3? + ) { + private var userMetadata: Map? = null + private var partSize = DEFAULT_PART_SIZE_MB.toLong() + + fun setPartSize(partSize: Long): Builder { + if (partSize < DEFAULT_PART_SIZE_MB) { + logger.warn { + "Part size $partSize is smaller than the minimum allowed, default to $DEFAULT_PART_SIZE_MB" + } + this.partSize = DEFAULT_PART_SIZE_MB.toLong() + } else if (partSize > MAX_ALLOWED_PART_SIZE_MB) { + logger.warn { + "Part size $partSize is larger than the maximum allowed, default to $MAX_ALLOWED_PART_SIZE_MB" + } + this.partSize = MAX_ALLOWED_PART_SIZE_MB.toLong() + } else { + this.partSize = partSize + } + return this + } + + fun setUserMetadata(userMetadata: Map?): Builder { + this.userMetadata = userMetadata + return this + } + + fun get(): StreamTransferManager { + if (userMetadata == null) { + userMetadata = emptyMap() + } + return StreamTransferManagerWithMetadata( + bucketName, + objectKey, + s3Client, + userMetadata, + ) + .numStreams(DEFAULT_NUM_STREAMS) + .queueCapacity(DEFAULT_QUEUE_CAPACITY) + .numUploadThreads(DEFAULT_UPLOAD_THREADS) + .partSize(partSize) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.kt new file mode 100644 index 0000000000000..89156db689085 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/util/StreamTransferManagerWithMetadata.kt @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest +import com.amazonaws.services.s3.model.ObjectMetadata + +/** + * A custom stream transfer manager which overwrites the metadata on the + * InitiateMultipartUploadRequest. + * + * This is, apparently, the correct way to implement this functionality. + * https://github.com/alexmojaki/s3-stream-upload/issues/3 + */ +class StreamTransferManagerWithMetadata( + bucketName: String?, + putKey: String?, + s3Client: AmazonS3?, + private val userMetadata: Map? +) : StreamTransferManager(bucketName, putKey, s3Client) { + override fun customiseInitiateRequest(request: InitiateMultipartUploadRequest) { + if (userMetadata != null) { + var objectMetadata = request.getObjectMetadata() + if (objectMetadata == null) { + objectMetadata = ObjectMetadata() + } + objectMetadata.userMetadata = userMetadata + request.setObjectMetadata(objectMetadata) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.kt new file mode 100644 index 0000000000000..57df1c39890d9 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3Writer.kt @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateManager +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject +import io.airbyte.cdk.integrations.destination.s3.util.S3OutputPathHelper +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.IOException +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.util.* +import org.apache.commons.lang3.StringUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * The base implementation takes care of the following: + * + * * Create shared instance variables. + * * Create the bucket and prepare the bucket path. + * * Log and close the write. + */ +abstract class BaseS3Writer +protected constructor( + protected val config: S3DestinationConfig, + protected val s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream +) : DestinationFileWriter { + protected val stream: AirbyteStream = configuredStream.stream + protected val syncMode: DestinationSyncMode = configuredStream.destinationSyncMode + val outputPrefix: String? = S3OutputPathHelper.getOutputPrefix(config.bucketPath, stream) + + /** + * + * * 1. Create bucket if necessary. + * * 2. Under OVERWRITE mode, delete all objects with the output prefix. + */ + @Throws(IOException::class) + override fun initialize() { + try { + val bucket = config.bucketName + if (!s3Client.doesBucketExistV2(bucket)) { + LOGGER.info("Bucket {} does not exist; creating...", bucket) + s3Client.createBucket(bucket) + LOGGER.info("Bucket {} has been created.", bucket) + } + + if (syncMode == DestinationSyncMode.OVERWRITE) { + LOGGER.info("Overwrite mode") + val keysToDelete: MutableList = LinkedList() + val objects = s3Client.listObjects(bucket, outputPrefix).objectSummaries + for (`object` in objects) { + keysToDelete.add(DeleteObjectsRequest.KeyVersion(`object`.key)) + } + + if (keysToDelete.size > 0) { + LOGGER.info( + "Purging non-empty output path for stream '{}' under OVERWRITE mode...", + stream.name + ) + val result = + s3Client.deleteObjects(DeleteObjectsRequest(bucket).withKeys(keysToDelete)) + LOGGER.info( + "Deleted {} file(s) for stream '{}'.", + result.deletedObjects.size, + stream.name + ) + } + } + } catch (e: Exception) { + LOGGER.error("Failed to initialize: ", e) + closeWhenFail() + throw e + } + } + + /** Log and close the write. */ + @Throws(IOException::class) + override fun close(hasFailed: Boolean) { + if (hasFailed) { + LOGGER.warn("Failure detected. Aborting upload of stream '{}'...", stream.name) + closeWhenFail() + LOGGER.warn("Upload of stream '{}' aborted.", stream.name) + } else { + LOGGER.info("Uploading remaining data for stream '{}'.", stream.name) + closeWhenSucceed() + LOGGER.info("Upload completed for stream '{}'.", stream.name) + } + } + + /** Operations that will run when the write succeeds. */ + @Throws(IOException::class) + protected open fun closeWhenSucceed() { + // Do nothing by default + } + + /** Operations that will run when the write fails. */ + @Throws(IOException::class) + protected open fun closeWhenFail() { + // Do nothing by default + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseS3Writer::class.java) + + private val s3FilenameTemplateManager = S3FilenameTemplateManager() + private const val DEFAULT_SUFFIX = "_0" + + @JvmStatic + @Throws(IOException::class) + fun determineOutputFilename(parameterObject: S3FilenameTemplateParameterObject): String { + return if (StringUtils.isNotBlank(parameterObject.fileNamePattern)) + getOutputFilename(parameterObject) + else getDefaultOutputFilename(parameterObject) + } + + /** + * @param parameterObject + * - an object which holds all necessary parameters required for default filename creation. + * @return A string in the format + * "{upload-date}_{upload-millis}_{suffix}.{format-extension}". For example, + * "2021_12_09_1639077474000_customSuffix.csv" + */ + private fun getDefaultOutputFilename( + parameterObject: S3FilenameTemplateParameterObject + ): String { + val formatter: DateFormat = + SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING) + formatter.timeZone = TimeZone.getTimeZone("UTC") + return String.format( + "%s_%d%s.%s", + formatter.format(parameterObject.timestamp), + parameterObject.timestamp!!.time, + parameterObject.customSuffix ?: DEFAULT_SUFFIX, + parameterObject.s3Format!!.fileExtension + ) + } + + @Throws(IOException::class) + private fun getOutputFilename(parameterObject: S3FilenameTemplateParameterObject): String { + return s3FilenameTemplateManager.applyPatternToFilename(parameterObject) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.kt new file mode 100644 index 0000000000000..0b92215614765 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationFileWriter.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import io.airbyte.cdk.integrations.destination.s3.S3Format + +interface DestinationFileWriter : DestinationWriter { + val fileLocation: String + + val fileFormat: S3Format? + + val outputPath: String +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.kt new file mode 100644 index 0000000000000..bf34a9fd1da86 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/DestinationWriter.kt @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.protocol.models.Jsons +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import java.io.IOException +import java.util.* + +/** + * [DestinationWriter] is responsible for writing Airbyte stream data to an S3 location in a + * specific format. + */ +interface DestinationWriter { + /** Prepare an S3 writer for the stream. */ + @Throws(IOException::class) fun initialize() + + /** Write an Airbyte record message to an S3 object. */ + @Throws(IOException::class) fun write(id: UUID, recordMessage: AirbyteRecordMessage) + + @Throws(IOException::class) fun write(formattedData: JsonNode) + + @Throws(IOException::class) + fun write(formattedData: String?) { + write(Jsons.deserialize(formattedData)) + } + + /** Close the S3 writer for the stream. */ + @Throws(IOException::class) fun close(hasFailed: Boolean) + + @Throws(IOException::class) + fun closeAfterPush() { + close(false) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.kt new file mode 100644 index 0000000000000..f3b4342a82187 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/ProductionWriterFactory.kt @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import com.amazonaws.services.s3.AmazonS3 +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.avro.JsonToAvroSchemaConverter +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroWriter +import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvWriter +import io.airbyte.cdk.integrations.destination.s3.jsonl.S3JsonlWriter +import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.sql.Timestamp +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class ProductionWriterFactory : S3WriterFactory { + @Throws(Exception::class) + override fun create( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp + ): DestinationFileWriter? { + val format = config.formatConfig!!.format + + if (format == S3Format.AVRO || format == S3Format.PARQUET) { + val stream = configuredStream.stream + LOGGER.info("Json schema for stream {}: {}", stream.name, stream.jsonSchema) + + val schemaConverter = JsonToAvroSchemaConverter() + val avroSchema = + schemaConverter.getAvroSchema(stream.jsonSchema, stream.name, stream.namespace) + + LOGGER.info("Avro schema for stream {}: {}", stream.name, avroSchema.toString(false)) + + return if (format == S3Format.AVRO) { + S3AvroWriter( + config, + s3Client, + configuredStream, + uploadTimestamp, + avroSchema, + AvroConstants.JSON_CONVERTER + ) + } else { + S3ParquetWriter( + config, + s3Client, + configuredStream, + uploadTimestamp, + avroSchema, + AvroConstants.JSON_CONVERTER + ) + } + } + + if (format == S3Format.CSV) { + return S3CsvWriter.Builder(config, s3Client, configuredStream, uploadTimestamp).build() + } + + if (format == S3Format.JSONL) { + return S3JsonlWriter(config, s3Client, configuredStream, uploadTimestamp) + } + + throw RuntimeException("Unexpected S3 destination format: $format") + } + + companion object { + protected val LOGGER: Logger = LoggerFactory.getLogger(ProductionWriterFactory::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.kt new file mode 100644 index 0000000000000..d23d4f4f407b5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/S3WriterFactory.kt @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import com.amazonaws.services.s3.AmazonS3 +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.sql.Timestamp + +/** Create different [DestinationFileWriter] based on [S3DestinationConfig]. */ +interface S3WriterFactory { + @Throws(Exception::class) + fun create( + config: S3DestinationConfig, + s3Client: AmazonS3, + configuredStream: ConfiguredAirbyteStream, + uploadTimestamp: Timestamp + ): DestinationFileWriter? +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.kt new file mode 100644 index 0000000000000..8faa11cdbff70 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/AsyncFlush.kt @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.staging + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.async.function.DestinationFlushFunction +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.jdbc.WriteConfig +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer +import io.airbyte.cdk.integrations.destination.s3.csv.StagingDatabaseCsvSheetGenerator +import io.airbyte.commons.json.Jsons +import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.stream.Stream +import org.apache.commons.io.FileUtils + +/** + * Async flushing logic. Flushing async prevents backpressure and is the superior flushing strategy. + */ +private val logger = KotlinLogging.logger {} + +internal class AsyncFlush( + private val streamDescToWriteConfig: Map, + private val stagingOperations: StagingOperations?, + private val database: JdbcDatabase?, + private val catalog: ConfiguredAirbyteCatalog?, + private val typerDeduperValve: TypeAndDedupeOperationValve, + private val typerDeduper: TyperDeduper, + // In general, this size is chosen to improve the performance of lower memory + // connectors. With 1 Gi + // of + // resource the connector will usually at most fill up around 150 MB in a single queue. By + // lowering + // the batch size, the AsyncFlusher will flush in smaller batches which allows for memory to be + // freed earlier similar to a sliding window effect + override val optimalBatchSizeBytes: Long, + private val useDestinationsV2Columns: Boolean +) : DestinationFlushFunction { + + @Throws(Exception::class) + override fun flush(decs: StreamDescriptor, stream: Stream) { + val writer: CsvSerializedBuffer + try { + writer = + CsvSerializedBuffer( + FileBuffer(CsvSerializedBuffer.CSV_GZ_SUFFIX), + StagingDatabaseCsvSheetGenerator(useDestinationsV2Columns), + true + ) + + // reassign as lambdas require references to be final. + stream.forEach { record: PartialAirbyteMessage? -> + try { + // todo (cgardens) - most writers just go ahead and re-serialize the contents of + // the record message. + // we should either just pass the raw string or at least have a way to do that + // and create a default + // impl that maintains backwards compatible behavior. + writer.accept( + record!!.serialized!!, + Jsons.serialize(record.record!!.meta), + record.record!!.emittedAt + ) + } catch (e: Exception) { + throw RuntimeException(e) + } + } + } catch (e: Exception) { + throw RuntimeException(e) + } + + writer.flush() + logger.info { + "Flushing CSV buffer for stream ${decs.name} (${FileUtils.byteCountToDisplaySize(writer.byteCount)}) to staging" + } + require(streamDescToWriteConfig.containsKey(decs)) { + String.format( + "Message contained record from a stream that was not in the catalog. \ncatalog: %s", + Jsons.serialize(catalog) + ) + } + + val writeConfig: WriteConfig = streamDescToWriteConfig.getValue(decs) + val schemaName: String = writeConfig.outputSchemaName + val stageName = stagingOperations!!.getStageName(schemaName, writeConfig.outputTableName) + val stagingPath = + stagingOperations.getStagingPath( + GeneralStagingFunctions.RANDOM_CONNECTION_ID, + schemaName, + writeConfig.streamName, + writeConfig.outputTableName, + writeConfig.writeDatetime + ) + try { + val stagedFile = + stagingOperations.uploadRecordsToStage( + database, + writer, + schemaName, + stageName, + stagingPath + ) + GeneralStagingFunctions.copyIntoTableFromStage( + database, + stageName, + stagingPath, + listOf(stagedFile), + writeConfig.outputTableName, + schemaName, + stagingOperations, + writeConfig.namespace, + writeConfig.streamName, + typerDeduperValve, + typerDeduper + ) + } catch (e: Exception) { + logger.error(e) { + "Failed to flush and commit buffer data into destination's raw table" + } + throw RuntimeException("Failed to upload buffer to stage and commit to destination", e) + } + + writer.close() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.kt new file mode 100644 index 0000000000000..85c5b0c8c4407 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/staging/StagingConsumerFactory.kt @@ -0,0 +1,328 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.staging + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.base.Preconditions +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer +import io.airbyte.cdk.integrations.destination.async.buffers.BufferManager +import io.airbyte.cdk.integrations.destination.async.deser.AirbyteMessageDeserializer +import io.airbyte.cdk.integrations.destination.async.deser.IdentityDataTransformer +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer +import io.airbyte.cdk.integrations.destination.async.state.FlushFailure +import io.airbyte.cdk.integrations.destination.jdbc.WriteConfig +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog +import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper +import io.airbyte.protocol.models.v0.* +import java.time.Instant +import java.util.* +import java.util.concurrent.Executors +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * Uses both Factory and Consumer design pattern to create a single point of creation for consuming + * [AirbyteMessage] for processing + */ +class StagingConsumerFactory +private constructor( + private val outputRecordCollector: Consumer?, + private val database: JdbcDatabase?, + private val stagingOperations: StagingOperations?, + private val namingResolver: NamingConventionTransformer?, + private val config: JsonNode?, + private val catalog: ConfiguredAirbyteCatalog?, + private val purgeStagingData: Boolean, + private val typerDeduperValve: TypeAndDedupeOperationValve?, + private val typerDeduper: TyperDeduper?, + private val parsedCatalog: ParsedCatalog?, + private val defaultNamespace: String?, + private val useDestinationsV2Columns: Boolean, + // Optional fields + private val bufferMemoryLimit: Optional, + private val optimalBatchSizeBytes: Long, + private val dataTransformer: StreamAwareDataTransformer +) : SerialStagingConsumerFactory() { + class Builder { + // Required (?) fields + // (TODO which of these are _actually_ required, and which have we just coincidentally + // always + // provided?) + var outputRecordCollector: Consumer? = null + var database: JdbcDatabase? = null + var stagingOperations: StagingOperations? = null + var namingResolver: NamingConventionTransformer? = null + var config: JsonNode? = null + var catalog: ConfiguredAirbyteCatalog? = null + var purgeStagingData: Boolean = false + var typerDeduperValve: TypeAndDedupeOperationValve? = null + var typerDeduper: TyperDeduper? = null + var parsedCatalog: ParsedCatalog? = null + var defaultNamespace: String? = null + var useDestinationsV2Columns: Boolean = false + + // Optional fields + private var bufferMemoryLimit = Optional.empty() + private var optimalBatchSizeBytes = (50 * 1024 * 1024).toLong() + + private var dataTransformer: StreamAwareDataTransformer? = null + + fun setBufferMemoryLimit(bufferMemoryLimit: Optional): Builder { + this.bufferMemoryLimit = bufferMemoryLimit + return this + } + + fun setOptimalBatchSizeBytes(optimalBatchSizeBytes: Long): Builder { + this.optimalBatchSizeBytes = optimalBatchSizeBytes + return this + } + + fun setDataTransformer(dataTransformer: StreamAwareDataTransformer?): Builder { + this.dataTransformer = dataTransformer + return this + } + + fun build(): StagingConsumerFactory { + return StagingConsumerFactory( + outputRecordCollector, + database, + stagingOperations, + namingResolver, + config, + catalog, + purgeStagingData, + typerDeduperValve, + typerDeduper, + parsedCatalog, + defaultNamespace, + useDestinationsV2Columns, + bufferMemoryLimit, + optimalBatchSizeBytes, + (if (dataTransformer != null) dataTransformer else IdentityDataTransformer())!! + ) + } + } + + fun createAsync(): SerializedAirbyteMessageConsumer { + val typerDeduper = this.typerDeduper!! + val typerDeduperValve = this.typerDeduperValve!! + val stagingOperations = this.stagingOperations!! + + val writeConfigs: List = + createWriteConfigs( + namingResolver, + config, + catalog, + parsedCatalog, + useDestinationsV2Columns + ) + val streamDescToWriteConfig: Map = + streamDescToWriteConfig(writeConfigs) + val flusher = + AsyncFlush( + streamDescToWriteConfig, + stagingOperations, + database, + catalog, + typerDeduperValve, + typerDeduper, + optimalBatchSizeBytes, + useDestinationsV2Columns + ) + return AsyncStreamConsumer( + outputRecordCollector!!, + GeneralStagingFunctions.onStartFunction( + database!!, + stagingOperations, + writeConfigs, + typerDeduper + ), + GeneralStagingFunctions.onCloseFunction( + database, + stagingOperations, + writeConfigs, + purgeStagingData, + typerDeduper + ), + flusher, + catalog!!, + BufferManager(getMemoryLimit(bufferMemoryLimit)), + Optional.ofNullable(defaultNamespace), + FlushFailure(), + Executors.newFixedThreadPool(5), + AirbyteMessageDeserializer(dataTransformer), + ) + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(StagingConsumerFactory::class.java) + + private val SYNC_DATETIME: Instant = Instant.now() + + @JvmStatic + fun builder( + outputRecordCollector: Consumer, + database: JdbcDatabase?, + stagingOperations: StagingOperations, + namingResolver: NamingConventionTransformer?, + config: JsonNode?, + catalog: ConfiguredAirbyteCatalog, + purgeStagingData: Boolean, + typerDeduperValve: TypeAndDedupeOperationValve, + typerDeduper: TyperDeduper, + parsedCatalog: ParsedCatalog?, + defaultNamespace: String?, + useDestinationsV2Columns: Boolean + ): Builder { + val builder = Builder() + builder.outputRecordCollector = outputRecordCollector + builder.database = database + builder.stagingOperations = stagingOperations + builder.namingResolver = namingResolver + builder.config = config + builder.catalog = catalog + builder.purgeStagingData = purgeStagingData + builder.typerDeduperValve = typerDeduperValve + builder.typerDeduper = typerDeduper + builder.parsedCatalog = parsedCatalog + builder.defaultNamespace = defaultNamespace + builder.useDestinationsV2Columns = useDestinationsV2Columns + return builder + } + + private fun getMemoryLimit(bufferMemoryLimit: Optional): Long { + return bufferMemoryLimit.orElse( + (Runtime.getRuntime().maxMemory() * BufferManager.MEMORY_LIMIT_RATIO).toLong() + ) + } + + private fun streamDescToWriteConfig( + writeConfigs: List + ): Map { + val conflictingStreams: MutableSet = HashSet() + val streamDescToWriteConfig: MutableMap = + HashMap() + for (config in writeConfigs) { + val streamIdentifier = toStreamDescriptor(config) + if (streamDescToWriteConfig.containsKey(streamIdentifier)) { + conflictingStreams.add(config) + val existingConfig: WriteConfig = + streamDescToWriteConfig.getValue(streamIdentifier) + // The first conflicting stream won't have any problems, so we need to + // explicitly add it here. + conflictingStreams.add(existingConfig) + } else { + streamDescToWriteConfig[streamIdentifier] = config + } + } + if (!conflictingStreams.isEmpty()) { + val message = + String.format( + "You are trying to write multiple streams to the same table. Consider switching to a custom namespace format using \${SOURCE_NAMESPACE}, or moving one of them into a separate connection with a different stream prefix. Affected streams: %s", + conflictingStreams + .stream() + .map( + Function { config: WriteConfig -> + config.namespace + "." + config.streamName + } + ) + .collect(Collectors.joining(", ")) + ) + throw ConfigErrorException(message) + } + return streamDescToWriteConfig + } + + private fun toStreamDescriptor(config: WriteConfig): StreamDescriptor { + return StreamDescriptor().withName(config.streamName).withNamespace(config.namespace) + } + + /** + * Creates a list of all [WriteConfig] for each stream within a [ConfiguredAirbyteCatalog]. + * Each write config represents the configuration settings for writing to a destination + * connector + * + * @param namingResolver [NamingConventionTransformer] used to transform names that are + * acceptable by each destination connector + * @param config destination connector configuration parameters + * @param catalog [ConfiguredAirbyteCatalog] collection of configured + * [ConfiguredAirbyteStream] + * @return list of all write configs for each stream in a [ConfiguredAirbyteCatalog] + */ + private fun createWriteConfigs( + namingResolver: NamingConventionTransformer?, + config: JsonNode?, + catalog: ConfiguredAirbyteCatalog?, + parsedCatalog: ParsedCatalog?, + useDestinationsV2Columns: Boolean + ): List { + return catalog!! + .streams + .stream() + .map(toWriteConfig(namingResolver, config, parsedCatalog, useDestinationsV2Columns)) + .toList() + } + + private fun toWriteConfig( + namingResolver: NamingConventionTransformer?, + config: JsonNode?, + parsedCatalog: ParsedCatalog?, + useDestinationsV2Columns: Boolean + ): Function { + return Function { stream: ConfiguredAirbyteStream + -> + Preconditions.checkNotNull( + stream.destinationSyncMode, + "Undefined destination sync mode" + ) + val abStream = stream.stream + val streamName = abStream.name + + val outputSchema: String + val tableName: String + if (useDestinationsV2Columns) { + val streamId = parsedCatalog!!.getStream(abStream.namespace, streamName).id + outputSchema = streamId.rawNamespace!! + tableName = streamId.rawName!! + } else { + outputSchema = + getOutputSchema(abStream, config!!["schema"].asText(), namingResolver) + tableName = namingResolver!!.getRawTableName(streamName) + } + val tmpTableName = namingResolver!!.getTmpTableName(streamName) + val syncMode = stream.destinationSyncMode + + val writeConfig: WriteConfig = + WriteConfig( + streamName, + abStream.namespace, + outputSchema, + tmpTableName, + tableName, + syncMode, + SYNC_DATETIME + ) + LOGGER.info("Write config: {}", writeConfig) + writeConfig + } + } + + private fun getOutputSchema( + stream: AirbyteStream, + defaultDestSchema: String, + namingResolver: NamingConventionTransformer? + ): String { + return if (stream.namespace != null) namingResolver!!.getNamespace(stream.namespace) + else namingResolver!!.getNamespace(defaultDestSchema) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.java deleted file mode 100644 index 15e30ec92d6e5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.Base64.Decoder; -import java.util.HashMap; -import java.util.Map; -import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.io.IOUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class AesCbcEnvelopeEncryptionBlobDecoratorTest { - - private static final Decoder BASE64_DECODER = Base64.getDecoder(); - // A random base64-encoded 256-bit AES key - public static final String KEY_ENCRYPTING_KEY = "oFf0LY0Zae9ksNZsPSJG8ZLGRRBUUhitaPKWRPPKTvM="; - // Another base64-encoded random 256-bit AES key - public static final String CONTENT_ENCRYPTING_KEY = "9ZAVuZE8L4hJCFQS49OMNeFRGTCBUHAFOgkW3iZkOq8="; - // A random base64-encoded 16-byte array - public static final String INITIALIZATION_VECTOR = "04YDvMCXpvTb2ilggLbDJQ=="; - // A small CSV file, which looks similar to what destination-s3 might upload - public static final String PLAINTEXT = """ - adc66b6e-6051-42db-b683-d978a51c3c02,"{""campaign.resource_name"":""cus""}",2022-04-04 22:32:50.046 - 0e253b28-bec6-4a90-8622-629d3e542982,"{""campaign.resource_name"":""cus""}",2022-04-04 22:32:50.047 - """; - // The encryption of the plaintext, using the CEK and IV defined above (base64-encoded). Equivalent - // to: - // base64Encode(encrypt("AES-CBC", PLAINTEXT, CONTENT_ENCRYPTING_KEY, INITIALIZATION_VECTOR) - public static final String CIPHERTEXT = - "IRfz0FN05Y9yyne+0V+G14xYjA4B0+ter7qniDheIu9UM3Fdmu/mqjyFvYFIRTroP5kNJ1SH3FaArE5aHkrWMPwSkczkhArajfYX+UEfGH68YyWOSnpdxuviTTgK3Ee3OVTz3ZlziOB8jCMjupJ9pqkLnxg7Ghe3BQ1puOHGFDMmIgiP4Zfz0fkdlUyZOvsJ7xpncD24G6IIJNwOyo4CedULgueHdybmxr4oddhAja8QxJxZzlfZl4suJ+KWvt78MSdkRlp+Ip99U8n0O7BLJA=="; - // The encryption of the CEK, using the KEK defined above (base64-encoded). Equivalent to: - // base64Encode(encrypt("AES-ECB", CONTENT_ENCRYPTING_KEY, KEY_ENCRYPTING_KEY) - public static final String ENCRYPTED_CEK = "Ck5u5cKqcY+bcFBrpsPHHUNw5Qx8nYDJ2Vqt6XG6kwxjVAJQKKljPv9NDsG6Ncoc"; - - private AesCbcEnvelopeEncryptionBlobDecorator decorator; - - @BeforeEach - public void setup() { - decorator = new AesCbcEnvelopeEncryptionBlobDecorator( - new SecretKeySpec(BASE64_DECODER.decode(KEY_ENCRYPTING_KEY), "AES"), - new SecretKeySpec(BASE64_DECODER.decode(CONTENT_ENCRYPTING_KEY), "AES"), - BASE64_DECODER.decode(INITIALIZATION_VECTOR)); - } - - @Test - public void testEncryption() throws IOException { - final ByteArrayOutputStream stream = new ByteArrayOutputStream(); - - try (final OutputStream wrapped = decorator.wrap(stream)) { - IOUtils.write( - PLAINTEXT, - wrapped, - StandardCharsets.UTF_8); - } - - Assertions.assertArrayEquals( - BASE64_DECODER.decode(CIPHERTEXT), - stream.toByteArray()); - } - - @Test - public void testMetadataInsertion() { - final Map metadata = new HashMap<>(); - - decorator.updateMetadata( - metadata, - Map.of( - AesCbcEnvelopeEncryptionBlobDecorator.ENCRYPTED_CONTENT_ENCRYPTING_KEY, "the_cek", - AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR, "the_iv")); - - Assertions.assertEquals( - Map.of( - "the_cek", ENCRYPTED_CEK, - "the_iv", INITIALIZATION_VECTOR), - metadata); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.java deleted file mode 100644 index 54c74060569c1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Test; - -public class BlobDecoratorTest { - - @Test - public void testOverwriteMetadata() { - final Map metadata = new HashMap<>(); - metadata.put("amz-foo", "oldValue"); - - BlobDecorator.insertMetadata( - metadata, - Map.of("foo", "amz-foo"), - "foo", "newValue"); - - assertEquals(Map.of("amz-foo", "newValue"), metadata); - } - - @Test - public void testNewMetadata() { - final Map metadata = new HashMap<>(); - metadata.put("amz-foo", "oldValue"); - - BlobDecorator.insertMetadata( - metadata, - Map.of("bar", "amz-bar"), - "bar", "newValue"); - - assertEquals( - Map.of( - "amz-foo", "oldValue", - "amz-bar", "newValue"), - metadata); - } - - @Test - public void testSkipMetadata() { - final Map metadata = new HashMap<>(); - metadata.put("amz-foo", "oldValue"); - - BlobDecorator.insertMetadata( - metadata, - Map.of("foo", "amz-foo"), - "bar", "newValue"); - - assertEquals(Map.of("amz-foo", "oldValue"), metadata); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.java deleted file mode 100644 index dee0146ed431f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.startsWith; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentMatchers; - -public class S3BaseChecksTest { - - private AmazonS3 s3Client; - - @BeforeEach - public void setup() { - s3Client = mock(AmazonS3.class); - when(s3Client.doesObjectExist(anyString(), eq(""))).thenThrow(new IllegalArgumentException("Object path must not be empty")); - when(s3Client.putObject(anyString(), eq(""), anyString())).thenThrow(new IllegalArgumentException("Object path must not be empty")); - } - - @Test - public void attemptWriteAndDeleteS3Object_should_createSpecificFiles() { - S3DestinationConfig config = new S3DestinationConfig( - null, - "test_bucket", - "test/bucket/path", - null, - null, - null, - null, - s3Client); - S3StorageOperations operations = new S3StorageOperations(new S3NameTransformer(), s3Client, config); - when(s3Client.doesObjectExist("test_bucket", "test/bucket/path/")).thenReturn(false); - - S3BaseChecks.attemptS3WriteAndDelete(operations, config, "test/bucket/path"); - - verify(s3Client).putObject(eq("test_bucket"), startsWith("test/bucket/path/_airbyte_connection_test_"), anyString()); - verify(s3Client).listObjects(ArgumentMatchers.argThat(request -> "test_bucket".equals(request.getBucketName()))); - verify(s3Client).deleteObject(eq("test_bucket"), startsWith("test/bucket/path/_airbyte_connection_test_")); - } - - @Test - public void attemptWriteAndDeleteS3Object_should_skipDirectoryCreateIfRootPath() { - S3DestinationConfig config = new S3DestinationConfig( - null, - "test_bucket", - "", - null, - null, - null, - null, - s3Client); - S3StorageOperations operations = new S3StorageOperations(new S3NameTransformer(), s3Client, config); - - S3BaseChecks.attemptS3WriteAndDelete(operations, config, ""); - - verify(s3Client, never()).putObject("test_bucket", "", ""); - verify(s3Client).putObject(eq("test_bucket"), startsWith("_airbyte_connection_test_"), anyString()); - verify(s3Client).listObjects(ArgumentMatchers.argThat(request -> "test_bucket".equals(request.getBucketName()))); - verify(s3Client).deleteObject(eq("test_bucket"), startsWith("_airbyte_connection_test_")); - } - - @Test - public void attemptWriteAndDeleteS3Object_should_skipDirectoryCreateIfNullPath() { - S3DestinationConfig config = new S3DestinationConfig( - null, - "test_bucket", - null, - null, - null, - null, - null, - s3Client); - S3StorageOperations operations = new S3StorageOperations(new S3NameTransformer(), s3Client, config); - - S3BaseChecks.attemptS3WriteAndDelete(operations, config, null); - - verify(s3Client, never()).putObject("test_bucket", "", ""); - verify(s3Client).putObject(eq("test_bucket"), startsWith("_airbyte_connection_test_"), anyString()); - verify(s3Client).listObjects(ArgumentMatchers.argThat(request -> "test_bucket".equals(request.getBucketName()))); - verify(s3Client).deleteObject(eq("test_bucket"), startsWith("_airbyte_connection_test_")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.java deleted file mode 100644 index b6166cfbc6876..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.assertj.core.api.AssertionsForClassTypes.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; - -import com.amazonaws.auth.AWSCredentials; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; -import io.airbyte.commons.json.Jsons; -import org.junit.jupiter.api.Test; - -class S3DestinationConfigTest { - - private static final S3DestinationConfig CONFIG = S3DestinationConfig.create("test-bucket", "test-path", "test-region") - .withEndpoint("test-endpoint") - .withPathFormat("${STREAM_NAME}/${NAMESPACE}") - .withAccessKeyCredential("test-key", "test-secret") - .get(); - - @Test - public void testCreateFromExistingConfig() { - assertEquals(CONFIG, S3DestinationConfig.create(CONFIG).get()); - } - - @Test - public void testCreateAndModify() { - final String newBucketName = "new-bucket"; - final String newBucketPath = "new-path"; - final String newBucketRegion = "new-region"; - final String newEndpoint = "new-endpoint"; - final String newKey = "new-key"; - final String newSecret = "new-secret"; - - final S3DestinationConfig modifiedConfig = S3DestinationConfig.create(CONFIG) - .withBucketName(newBucketName) - .withBucketPath(newBucketPath) - .withBucketRegion(newBucketRegion) - .withEndpoint(newEndpoint) - .withAccessKeyCredential(newKey, newSecret) - .get(); - - assertNotEquals(CONFIG, modifiedConfig); - assertEquals(newBucketName, modifiedConfig.getBucketName()); - assertEquals(newBucketPath, modifiedConfig.getBucketPath()); - assertEquals(newBucketRegion, modifiedConfig.getBucketRegion()); - - final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) modifiedConfig.getS3CredentialConfig(); - assertEquals(newKey, credentialConfig.getAccessKeyId()); - assertEquals(newSecret, credentialConfig.getSecretAccessKey()); - } - - @Test - public void testGetS3DestinationConfigAWS_S3Provider() { - final JsonNode s3config = Jsons.deserialize("{\n" - + " \"s3_bucket_name\": \"paste-bucket-name-here\",\n" - + " \"s3_bucket_path\": \"integration-test\",\n" - + " \"s3_bucket_region\": \"paste-bucket-region-here\",\n" - + " \"access_key_id\": \"paste-access-key-id-here\",\n" - + " \"secret_access_key\": \"paste-secret-access-key-here\"\n" - + "}"); - - final S3DestinationConfig result = S3DestinationConfig.getS3DestinationConfig(s3config, StorageProvider.AWS_S3); - - assertThat(result.getEndpoint()).isEmpty(); - assertThat(result.getBucketName()).isEqualTo("paste-bucket-name-here"); - assertThat(result.getBucketPath()).isEqualTo("integration-test"); - assertThat(result.getBucketRegion()).isEqualTo("paste-bucket-region-here"); - assertThat(result.getPathFormat()).isEqualTo("${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"); - final AWSCredentials awsCredentials = result.getS3CredentialConfig().getS3CredentialsProvider().getCredentials(); - assertThat(awsCredentials.getAWSAccessKeyId()).isEqualTo("paste-access-key-id-here"); - assertThat(awsCredentials.getAWSSecretKey()).isEqualTo("paste-secret-access-key-here"); - assertThat(result.isCheckIntegrity()).isEqualTo(true); - } - - @Test - public void testGetS3DestinationConfigCF_R2Provider() { - final JsonNode s3config = Jsons.deserialize("{\n" - + " \"s3_bucket_name\": \"paste-bucket-name-here\",\n" - + " \"s3_bucket_path\": \"integration-test\",\n" - + " \"account_id\": \"paster-account-id-here\",\n" - + " \"access_key_id\": \"paste-access-key-id-here\",\n" - + " \"secret_access_key\": \"paste-secret-access-key-here\"\n" - + "}\n"); - - final S3DestinationConfig result = S3DestinationConfig.getS3DestinationConfig(s3config, StorageProvider.CF_R2); - - assertThat(result.getEndpoint()).isEqualTo("https://paster-account-id-here.r2.cloudflarestorage.com"); - assertThat(result.getBucketName()).isEqualTo("paste-bucket-name-here"); - assertThat(result.getBucketPath()).isEqualTo("integration-test"); - assertThat(result.getBucketRegion()).isNull(); - assertThat(result.getPathFormat()).isEqualTo("${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"); - final AWSCredentials awsCredentials = result.getS3CredentialConfig().getS3CredentialsProvider().getCredentials(); - assertThat(awsCredentials.getAWSAccessKeyId()).isEqualTo("paste-access-key-id-here"); - assertThat(awsCredentials.getAWSSecretKey()).isEqualTo("paste-secret-access-key-here"); - assertThat(result.isCheckIntegrity()).isEqualTo(false); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.java deleted file mode 100644 index 2a3d93e2a1ef3..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import java.util.Map; -import org.junit.jupiter.api.Test; - -// S3FormatConfigs -public class S3FormatConfigsTest { - - @Test - // When CSV format is specified, it returns CSV format config - public void testGetCsvS3FormatConfig() { - final JsonNode configJson = Jsons.jsonNode(Map.of( - "format", Jsons.jsonNode(Map.of( - "format_type", S3Format.CSV.toString(), - "flattening", Flattening.ROOT_LEVEL.getValue(), - "compression", Jsons.jsonNode(Map.of( - "compression_type", "No Compression")))))); - - final S3FormatConfig formatConfig = S3FormatConfigs.getS3FormatConfig(configJson); - assertEquals(formatConfig.getFormat(), S3Format.CSV); - assertTrue(formatConfig instanceof S3CsvFormatConfig); - final S3CsvFormatConfig csvFormatConfig = (S3CsvFormatConfig) formatConfig; - assertEquals(Flattening.ROOT_LEVEL, csvFormatConfig.getFlattening()); - assertEquals(CompressionType.NO_COMPRESSION, csvFormatConfig.getCompressionType()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.java deleted file mode 100644 index 138e9d393ce56..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.amazonaws.services.s3.model.ObjectListing; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.regex.Pattern; -import org.joda.time.DateTime; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -public class S3StorageOperationsTest { - - private static final String BUCKET_NAME = "fake-bucket"; - private static final String FAKE_BUCKET_PATH = "fake-bucketPath"; - private static final String NAMESPACE = "namespace"; - private static final String STREAM_NAME = "stream_name1"; - private static final String OBJECT_TO_DELETE = NAMESPACE + "/" + STREAM_NAME + "/2022_04_04_123456789_0.csv.gz"; - private AmazonS3 s3Client; - private S3StorageOperations s3StorageOperations; - - @BeforeEach - public void setup() { - final NamingConventionTransformer nameTransformer = new S3NameTransformer(); - s3Client = mock(AmazonS3.class); - - final S3ObjectSummary objectSummary1 = mock(S3ObjectSummary.class); - final S3ObjectSummary objectSummary2 = mock(S3ObjectSummary.class); - final S3ObjectSummary objectSummary3 = mock(S3ObjectSummary.class); - when(objectSummary1.getKey()).thenReturn(OBJECT_TO_DELETE); - when(objectSummary2.getKey()).thenReturn(NAMESPACE + "/stream_name2/2022_04_04_123456789_0.csv.gz"); - when(objectSummary3.getKey()).thenReturn("other_files.txt"); - - final ObjectListing results = mock(ObjectListing.class); - when(results.isTruncated()).thenReturn(false); - when(results.getObjectSummaries()).thenReturn(List.of(objectSummary1, objectSummary2, objectSummary3)); - when(s3Client.listObjects(any(ListObjectsRequest.class))).thenReturn(results); - - final S3DestinationConfig s3Config = S3DestinationConfig.create(BUCKET_NAME, FAKE_BUCKET_PATH, "fake-region") - .withEndpoint("fake-endpoint") - .withAccessKeyCredential("fake-accessKeyId", "fake-secretAccessKey") - .withS3Client(s3Client) - .get(); - s3StorageOperations = new S3StorageOperations(nameTransformer, s3Client, s3Config); - } - - @Test - void testRegexMatch() { - final Pattern regexFormat = - Pattern.compile(s3StorageOperations.getRegexFormat(NAMESPACE, STREAM_NAME, S3DestinationConstants.DEFAULT_PATH_FORMAT)); - assertTrue(regexFormat.matcher(OBJECT_TO_DELETE).matches()); - assertTrue(regexFormat - .matcher(s3StorageOperations.getBucketObjectPath(NAMESPACE, STREAM_NAME, DateTime.now(), S3DestinationConstants.DEFAULT_PATH_FORMAT)) - .matches()); - assertFalse(regexFormat.matcher(NAMESPACE + "/" + STREAM_NAME + "/some_random_file_0.doc").matches()); - assertFalse(regexFormat.matcher(NAMESPACE + "/stream_name2/2022_04_04_123456789_0.csv.gz").matches()); - } - - @Test - void testCustomRegexMatch() { - final String customFormat = "${NAMESPACE}_${STREAM_NAME}_${YEAR}-${MONTH}-${DAY}-${HOUR}-${MINUTE}-${SECOND}-${MILLISECOND}-${EPOCH}-${UUID}"; - assertTrue(Pattern - .compile(s3StorageOperations.getRegexFormat(NAMESPACE, STREAM_NAME, customFormat)) - .matcher(s3StorageOperations.getBucketObjectPath(NAMESPACE, STREAM_NAME, DateTime.now(), customFormat)).matches()); - } - - @Test - void testGetExtension() { - assertEquals(".csv.gz", S3StorageOperations.getExtension("test.csv.gz")); - assertEquals(".gz", S3StorageOperations.getExtension("test.gz")); - assertEquals(".avro", S3StorageOperations.getExtension("test.avro")); - assertEquals("", S3StorageOperations.getExtension("test-file")); - } - - @Test - void testCleanUpBucketObject() { - final String pathFormat = S3DestinationConstants.DEFAULT_PATH_FORMAT; - s3StorageOperations.cleanUpBucketObject(NAMESPACE, STREAM_NAME, FAKE_BUCKET_PATH, pathFormat); - final ArgumentCaptor deleteRequest = ArgumentCaptor.forClass(DeleteObjectsRequest.class); - verify(s3Client).deleteObjects(deleteRequest.capture()); - assertEquals(1, deleteRequest.getValue().getKeys().size()); - assertEquals(OBJECT_TO_DELETE, deleteRequest.getValue().getKeys().get(0).getKey()); - } - - @Test - void testGetFilename() { - assertEquals("filename", S3StorageOperations.getFilename("filename")); - assertEquals("filename", S3StorageOperations.getFilename("/filename")); - assertEquals("filename", S3StorageOperations.getFilename("/p1/p2/filename")); - assertEquals("filename.csv", S3StorageOperations.getFilename("/p1/p2/filename.csv")); - } - - @Test - void getPartId() throws InterruptedException { - - // Multithreaded utility class - class PartIdGetter implements Runnable { - - final List responses = new ArrayList<>(); - final S3StorageOperations s3StorageOperations; - - PartIdGetter(S3StorageOperations instance) { - s3StorageOperations = instance; - } - - public void run() { - responses.add(s3StorageOperations.getPartId(FAKE_BUCKET_PATH)); - } - - List getResponses() { - return responses; - } - - } - - PartIdGetter partIdGetter = new PartIdGetter(s3StorageOperations); - - // single threaded - partIdGetter.run(); // 0 - partIdGetter.run(); // 1 - partIdGetter.run(); // 2 - - // multithreaded - ExecutorService executor = Executors.newFixedThreadPool(3); - for (int i = 0; i < 7; i++) { - executor.execute(partIdGetter); - } - executor.shutdown(); - executor.awaitTermination(5, TimeUnit.SECONDS); - - List responses = partIdGetter.getResponses(); - assertEquals(10, responses.size()); - for (int i = 0; i <= 9; i++) { - assertTrue(responses.contains(Integer.toString(i))); - } - } - - @Test - void getPartIdMultiplePaths() { - assertEquals("0", s3StorageOperations.getPartId(FAKE_BUCKET_PATH)); - assertEquals("1", s3StorageOperations.getPartId(FAKE_BUCKET_PATH)); - - assertEquals("0", s3StorageOperations.getPartId("other_path")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.java deleted file mode 100644 index fa5d28c36c1d7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; - -import java.util.Map; -import org.junit.jupiter.api.Test; - -class AvroNameTransformerTest { - - private static final AvroNameTransformer INSTANCE = new AvroNameTransformer(); - private static final Map RAW_TO_NORMALIZED_IDENTIFIERS = Map.of( - "name-space", "name_space", - "spécial_character", "special_character", - "99namespace", "_99namespace"); - - private static final Map RAW_TO_NORMALIZED_NAMESPACES = Map.of( - "", "", - "name-space1.name-space2.namespace3", "name_space1.name_space2.namespace3", - "namespace1.spécial_character", "namespace1.special_character", - "99namespace.namespace2", "_99namespace.namespace2"); - - @Test - public void testGetIdentifier() { - assertNull(INSTANCE.getIdentifier(null)); - assertNull(INSTANCE.convertStreamName(null)); - RAW_TO_NORMALIZED_IDENTIFIERS.forEach((raw, normalized) -> { - assertEquals(normalized, INSTANCE.getIdentifier(raw)); - assertEquals(normalized, INSTANCE.convertStreamName(raw)); - }); - } - - @Test - public void testGetNamespace() { - assertNull(INSTANCE.getNamespace(null)); - RAW_TO_NORMALIZED_NAMESPACES.forEach((raw, normalized) -> { - assertEquals(normalized, INSTANCE.getNamespace(raw)); - }); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.java deleted file mode 100644 index 33faf55f83694..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.File; -import java.io.InputStream; -import java.util.List; -import java.util.Map; -import org.apache.avro.file.DataFileReader; -import org.apache.avro.file.SeekableByteArrayInput; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumReader; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -public class AvroSerializedBufferTest { - - private static final JsonNode MESSAGE_DATA = Jsons.jsonNode(Map.of( - "field1", 10000, - "column2", "string value", - "another field", true, - "nested_column", Map.of("array_column", List.of(1, 2, 3)))); - private static final String STREAM = "stream1"; - private static final AirbyteStreamNameNamespacePair streamPair = new AirbyteStreamNameNamespacePair(STREAM, null); - private static final AirbyteRecordMessage message = new AirbyteRecordMessage() - .withStream(STREAM) - .withData(MESSAGE_DATA) - .withEmittedAt(System.currentTimeMillis()); - protected static final List FIELDS = List.of( - Field.of("field1", JsonSchemaType.NUMBER), - Field.of("column2", JsonSchemaType.STRING), - Field.of("another field", JsonSchemaType.BOOLEAN), - Field.of("nested_column", JsonSchemaType.OBJECT)); - private static final ConfiguredAirbyteCatalog catalog = CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS); - - @BeforeAll - public static void setup() { - DestinationConfig.initialize(Jsons.deserialize("{}")); - } - - @Test - @Disabled("Flaky on CI, See run https://github.com/airbytehq/airbyte/actions/runs/7126781640/job/19405426141?pr=33201 " + - "org.opentest4j.AssertionFailedError: Expected size between 964 and 985, but actual size was 991 ==> expected: but was: ") - public void testSnappyAvroWriter() throws Exception { - final S3AvroFormatConfig config = new S3AvroFormatConfig(Jsons.jsonNode(Map.of("compression_codec", Map.of( - "codec", "snappy")))); - runTest(new InMemoryBuffer(AvroSerializedBuffer.DEFAULT_SUFFIX), 964L, 985L, config, getExpectedString()); - } - - @Test - public void testGzipAvroFileWriter() throws Exception { - final S3AvroFormatConfig config = new S3AvroFormatConfig(Jsons.jsonNode(Map.of("compression_codec", Map.of( - "codec", "zstandard", - "compression_level", 20, - "include_checksum", true)))); - runTest(new FileBuffer(AvroSerializedBuffer.DEFAULT_SUFFIX), 965L, 985L, config, getExpectedString()); - } - - @Test - public void testUncompressedAvroWriter() throws Exception { - final S3AvroFormatConfig config = new S3AvroFormatConfig(Jsons.jsonNode(Map.of("compression_codec", Map.of( - "codec", "no compression")))); - runTest(new InMemoryBuffer(AvroSerializedBuffer.DEFAULT_SUFFIX), 1010L, 1020L, config, getExpectedString()); - } - - private static String getExpectedString() { - return "{\"_airbyte_ab_id\": \"\", \"_airbyte_emitted_at\": \"\", " - + "\"field1\": 10000.0, \"another_field\": true, " - + "\"nested_column\": {\"_airbyte_additional_properties\": {\"array_column\": \"[1,2,3]\"}}, " - + "\"column2\": \"string value\", " - + "\"_airbyte_additional_properties\": null}"; - } - - private static void runTest(final BufferStorage buffer, - final Long minExpectedByte, - final Long maxExpectedByte, - final S3AvroFormatConfig config, - final String expectedData) - throws Exception { - final File outputFile = buffer.getFile(); - try (final AvroSerializedBuffer writer = (AvroSerializedBuffer) AvroSerializedBuffer - .createFunction(config, () -> buffer) - .apply(streamPair, catalog)) { - writer.accept(message); - writer.accept(message); - writer.flush(); - // some data are randomized (uuid, timestamp, compression?) so the expected byte count is not always - // deterministic - assertTrue(minExpectedByte <= writer.getByteCount() && writer.getByteCount() <= maxExpectedByte, - String.format("Expected size between %d and %d, but actual size was %d", - minExpectedByte, maxExpectedByte, writer.getByteCount())); - final InputStream in = writer.getInputStream(); - try (final DataFileReader dataFileReader = - new DataFileReader<>(new SeekableByteArrayInput(in.readAllBytes()), new GenericDatumReader<>())) { - while (dataFileReader.hasNext()) { - final Record record = dataFileReader.next(); - record.put("_airbyte_ab_id", ""); - record.put("_airbyte_emitted_at", ""); - final String actualData = record.toString(); - assertEquals(expectedData, actualData); - } - } - } - assertFalse(outputFile.exists()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.java deleted file mode 100644 index 236ab209f5c8c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.MoreIterators; -import java.io.IOException; -import java.util.Map.Entry; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -class JsonFieldNameUpdaterTest { - - @Test - public void testFieldNameUpdate() throws IOException { - final JsonNode testCases = Jsons.deserialize(MoreResources.readResource("parquet/json_field_name_updater/test_case.json")); - for (final JsonNode testCase : testCases) { - final JsonNode nameMap = testCase.get("nameMap"); - final JsonFieldNameUpdater nameUpdater = new JsonFieldNameUpdater( - MoreIterators.toList(nameMap.fields()).stream() - .collect(Collectors.toMap(Entry::getKey, e -> e.getValue().asText()))); - - final JsonNode original = testCase.get("original"); - final JsonNode updated = testCase.get("updated"); - - assertEquals(original, nameUpdater.getJsonWithOriginalFieldNames(updated)); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.java deleted file mode 100644 index b447613a937ff..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.util.stream.Stream; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; - -public class JsonSchemaTypeTest { - - @ParameterizedTest - @ArgumentsSource(JsonSchemaTypeProvider.class) - public void testFromJsonSchemaType(String type, String airbyteType, JsonSchemaType expectedJsonSchemaType) { - assertEquals( - expectedJsonSchemaType, - JsonSchemaType.fromJsonSchemaType(type, airbyteType)); - } - - public static class JsonSchemaTypeProvider implements ArgumentsProvider { - - @Override - public Stream provideArguments(ExtensionContext context) { - return Stream.of( - Arguments.of("WellKnownTypes.json#/definitions/Number", null, JsonSchemaType.NUMBER_V1), - Arguments.of("WellKnownTypes.json#/definitions/String", null, JsonSchemaType.STRING_V1), - Arguments.of("WellKnownTypes.json#/definitions/Integer", null, JsonSchemaType.INTEGER_V1), - Arguments.of("WellKnownTypes.json#/definitions/Boolean", null, JsonSchemaType.BOOLEAN_V1), - Arguments.of("WellKnownTypes.json#/definitions/BinaryData", null, JsonSchemaType.BINARY_DATA_V1), - Arguments.of("WellKnownTypes.json#/definitions/Date", null, JsonSchemaType.DATE_V1), - Arguments.of("WellKnownTypes.json#/definitions/TimestampWithTimezone", null, JsonSchemaType.TIMESTAMP_WITH_TIMEZONE_V1), - Arguments.of("WellKnownTypes.json#/definitions/TimestampWithoutTimezone", null, JsonSchemaType.TIMESTAMP_WITHOUT_TIMEZONE_V1), - Arguments.of("WellKnownTypes.json#/definitions/TimeWithTimezone", null, JsonSchemaType.TIME_WITH_TIMEZONE_V1), - Arguments.of("WellKnownTypes.json#/definitions/TimeWithoutTimezone", null, JsonSchemaType.TIME_WITHOUT_TIMEZONE_V1), - Arguments.of("number", "integer", JsonSchemaType.NUMBER_INT_V0), - Arguments.of("string", "big_integer", JsonSchemaType.NUMBER_BIGINT_V0), - Arguments.of("number", "float", JsonSchemaType.NUMBER_FLOAT_V0), - Arguments.of("number", null, JsonSchemaType.NUMBER_V0), - Arguments.of("string", null, JsonSchemaType.STRING_V0), - Arguments.of("integer", null, JsonSchemaType.INTEGER_V0), - Arguments.of("boolean", null, JsonSchemaType.BOOLEAN_V0), - Arguments.of("null", null, JsonSchemaType.NULL), - Arguments.of("object", null, JsonSchemaType.OBJECT), - Arguments.of("array", null, JsonSchemaType.ARRAY), - Arguments.of("combined", null, JsonSchemaType.COMBINED)); - } - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.java deleted file mode 100644 index 534112d5a391f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectWriter; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.util.MoreIterators; -import java.util.Collections; -import java.util.stream.Stream; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; - -class JsonToAvroConverterTest { - - private static final ObjectWriter WRITER = MoreMappers.initMapper().writer(); - private static final JsonToAvroSchemaConverter SCHEMA_CONVERTER = new JsonToAvroSchemaConverter(); - - @Test - public void testGetSingleTypes() { - final JsonNode input1 = Jsons.deserialize(""" - {"$ref": "WellKnownTypes.json#/definitions/Number"}" - """); - - assertEquals( - Collections.singletonList(JsonSchemaType.NUMBER_V1), - JsonToAvroSchemaConverter.getTypes("field", input1)); - } - - @Test - public void testNoCombinedRestriction() { - final JsonNode input1 = Jsons.deserialize(""" - {"$ref": "WellKnownTypes.json#/definitions/String"}" - """); - assertTrue(JsonToAvroSchemaConverter.getCombinedRestriction(input1).isEmpty()); - } - - @Test - public void testWithCombinedRestriction() { - final JsonNode input2 = Jsons.deserialize("{ \"anyOf\": [{ \"type\": \"string\" }, { \"type\": \"integer\" }] }"); - assertTrue(JsonToAvroSchemaConverter.getCombinedRestriction(input2).isPresent()); - } - - @Deprecated - public static class GetFieldTypeTestCaseProviderV0 implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - final JsonNode testCases = - Jsons.deserialize(MoreResources.readResource("parquet/json_schema_converter/type_conversion_test_cases_v0.json")); - return MoreIterators.toList(testCases.elements()).stream().map(testCase -> Arguments.of( - testCase.get("fieldName").asText(), - testCase.get("jsonFieldSchema"), - testCase.get("avroFieldType"))); - } - - } - - public static class GetFieldTypeTestCaseProviderV1 implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - final JsonNode testCases = - Jsons.deserialize(MoreResources.readResource("parquet/json_schema_converter/type_conversion_test_cases_v1.json")); - return MoreIterators.toList(testCases.elements()).stream().map(testCase -> Arguments.of( - testCase.get("fieldName").asText(), - testCase.get("jsonFieldSchema"), - testCase.get("avroFieldType"))); - } - - } - - @ParameterizedTest - @ArgumentsSource(GetFieldTypeTestCaseProviderV0.class) - public void testFieldTypeConversionV0(final String fieldName, final JsonNode jsonFieldSchema, final JsonNode avroFieldType) { - assertEquals( - avroFieldType, - Jsons.deserialize(SCHEMA_CONVERTER.parseJsonField(fieldName, null, jsonFieldSchema, true, true).toString()), - String.format("Test for %s failed", fieldName)); - } - - @ParameterizedTest - @ArgumentsSource(GetFieldTypeTestCaseProviderV1.class) - public void testFieldTypeConversionV1(final String fieldName, final JsonNode jsonFieldSchema, final JsonNode avroFieldType) { - assertEquals( - avroFieldType, - Jsons.deserialize(SCHEMA_CONVERTER.parseJsonField(fieldName, null, jsonFieldSchema, true, true).toString()), - String.format("Test for %s failed", fieldName)); - } - - @Deprecated - public static class GetAvroSchemaTestCaseProviderV0 implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - final JsonNode testCases = Jsons.deserialize(MoreResources.readResource("parquet/json_schema_converter/json_conversion_test_cases_v0.json")); - return MoreIterators.toList(testCases.elements()).stream().map(testCase -> Arguments.of( - testCase.get("schemaName").asText(), - testCase.get("namespace").asText(), - testCase.get("appendAirbyteFields").asBoolean(), - testCase.get("jsonSchema"), - testCase.get("jsonObject"), - testCase.get("avroSchema"), - testCase.get("avroObject"))); - } - - } - - public static class GetAvroSchemaTestCaseProviderV1 implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - final JsonNode testCases = Jsons.deserialize(MoreResources.readResource("parquet/json_schema_converter/json_conversion_test_cases_v1.json")); - return MoreIterators.toList(testCases.elements()).stream().map(testCase -> Arguments.of( - testCase.get("schemaName").asText(), - testCase.get("namespace").asText(), - testCase.get("appendAirbyteFields").asBoolean(), - testCase.get("jsonSchema"), - testCase.get("jsonObject"), - testCase.get("avroSchema"), - testCase.get("avroObject"))); - } - - } - - /** - * This test verifies both the schema and object conversion. - */ - @ParameterizedTest - @ArgumentsSource(GetAvroSchemaTestCaseProviderV0.class) - public void testJsonAvroConversionV0(final String schemaName, - final String namespace, - final boolean appendAirbyteFields, - final JsonNode jsonSchema, - final JsonNode jsonObject, - final JsonNode avroSchema, - final JsonNode avroObject) - throws Exception { - final Schema actualAvroSchema = SCHEMA_CONVERTER.getAvroSchema(jsonSchema, schemaName, namespace, appendAirbyteFields, true, true, true); - assertEquals( - avroSchema, - Jsons.deserialize(actualAvroSchema.toString()), - String.format("Schema conversion for %s failed", schemaName)); - - final Schema.Parser schemaParser = new Schema.Parser(); - final GenericData.Record actualAvroObject = AvroConstants.JSON_CONVERTER.convertToGenericDataRecord( - WRITER.writeValueAsBytes(jsonObject), - schemaParser.parse(Jsons.serialize(avroSchema))); - assertEquals( - avroObject, - Jsons.deserialize(actualAvroObject.toString()), - String.format("Object conversion for %s failed", schemaName)); - } - - @ParameterizedTest - @ArgumentsSource(GetAvroSchemaTestCaseProviderV1.class) - public void testJsonAvroConversionV1(final String schemaName, - final String namespace, - final boolean appendAirbyteFields, - final JsonNode jsonSchema, - final JsonNode jsonObject, - final JsonNode avroSchema, - final JsonNode avroObject) - throws Exception { - final Schema actualAvroSchema = SCHEMA_CONVERTER.getAvroSchema(jsonSchema, schemaName, namespace, appendAirbyteFields, true, true, true); - assertEquals( - avroSchema, - Jsons.deserialize(actualAvroSchema.toString()), - String.format("Schema conversion for %s failed", schemaName)); - - final Schema.Parser schemaParser = new Schema.Parser(); - final GenericData.Record actualAvroObject = AvroConstants.JSON_CONVERTER.convertToGenericDataRecord( - WRITER.writeValueAsBytes(jsonObject), - schemaParser.parse(Jsons.serialize(avroSchema))); - assertEquals( - avroObject, - Jsons.deserialize(actualAvroObject.toString()), - String.format("Object conversion for %s failed", schemaName)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.java deleted file mode 100644 index 42266df26ef1d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.avro; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; -import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import java.util.List; -import org.apache.avro.file.CodecFactory; -import org.apache.avro.file.DataFileConstants; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Test; - -class S3AvroFormatConfigTest { - - @Test - public void testParseCodecConfigNull() { - final List nullConfigs = Lists.newArrayList("{}", "{ \"codec\": \"no compression\" }"); - for (final String nullConfig : nullConfigs) { - assertEquals( - DataFileConstants.NULL_CODEC, - S3AvroFormatConfig.parseCodecConfig(Jsons.deserialize(nullConfig)).toString()); - } - } - - @Test - public void testParseCodecConfigDeflate() { - // default compression level 0 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"deflate\" }")); - assertEquals("deflate-0", codecFactory1.toString()); - - // compression level 5 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"deflate\", \"compression_level\": 5 }")); - assertEquals("deflate-5", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigBzip2() { - final JsonNode bzip2Config = Jsons.deserialize("{ \"codec\": \"bzip2\" }"); - final CodecFactory codecFactory = S3AvroFormatConfig.parseCodecConfig(bzip2Config); - assertEquals(DataFileConstants.BZIP2_CODEC, codecFactory.toString()); - } - - @Test - public void testParseCodecConfigXz() { - // default compression level 6 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"xz\" }")); - assertEquals("xz-6", codecFactory1.toString()); - - // compression level 7 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"xz\", \"compression_level\": 7 }")); - assertEquals("xz-7", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigZstandard() { - // default compression level 3 - final CodecFactory codecFactory1 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize("{ \"codec\": \"zstandard\" }")); - // There is no way to verify the checksum; all relevant methods are private or protected... - assertEquals("zstandard[3]", codecFactory1.toString()); - - // compression level 20 - final CodecFactory codecFactory2 = S3AvroFormatConfig.parseCodecConfig( - Jsons.deserialize( - "{ \"codec\": \"zstandard\", \"compression_level\": 20, \"include_checksum\": true }")); - // There is no way to verify the checksum; all relevant methods are private or protected... - assertEquals("zstandard[20]", codecFactory2.toString()); - } - - @Test - public void testParseCodecConfigSnappy() { - final JsonNode snappyConfig = Jsons.deserialize("{ \"codec\": \"snappy\" }"); - final CodecFactory codecFactory = S3AvroFormatConfig.parseCodecConfig(snappyConfig); - assertEquals(DataFileConstants.SNAPPY_CODEC, codecFactory.toString()); - } - - @Test - public void testParseCodecConfigInvalid() { - try { - final JsonNode invalidConfig = Jsons.deserialize("{ \"codec\": \"bi-directional-bfs\" }"); - S3AvroFormatConfig.parseCodecConfig(invalidConfig); - fail(); - } catch (final IllegalArgumentException e) { - // expected - } - } - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config, StorageProvider.AWS_S3); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); - assertEquals("AVRO", formatConfig.getFormat().name()); - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"AVRO\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config, StorageProvider.AWS_S3); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.java deleted file mode 100644 index db18a75df87de..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer; -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.BufferedReader; -import java.io.File; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.zip.GZIPInputStream; -import org.apache.commons.csv.CSVFormat; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class CsvSerializedBufferTest { - - private static final JsonNode MESSAGE_DATA = Jsons.jsonNode(Map.of( - "field1", 10000, - "column2", "string value", - "another field", true, - "nested_column", Map.of("array_column", List.of(1, 2, 3)))); - private static final String STREAM = "stream1"; - private static final AirbyteStreamNameNamespacePair streamPair = new AirbyteStreamNameNamespacePair(STREAM, null); - private static final AirbyteRecordMessage message = new AirbyteRecordMessage() - .withStream(STREAM) - .withData(MESSAGE_DATA) - .withEmittedAt(System.currentTimeMillis()); - protected static final List FIELDS = List.of( - Field.of("field1", JsonSchemaType.NUMBER), - Field.of("column2", JsonSchemaType.STRING), - Field.of("another field", JsonSchemaType.BOOLEAN), - Field.of("nested_column", JsonSchemaType.OBJECT)); - private static final ConfiguredAirbyteCatalog catalog = CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS); - private static final String CSV_FILE_EXTENSION = ".csv"; - private static final CSVFormat csvFormat = CSVFormat.newFormat(','); - - @BeforeEach - public void setup() { - DestinationConfig.initialize(Jsons.emptyObject()); - } - - @Test - public void testUncompressedDefaultCsvFormatWriter() throws Exception { - runTest(new InMemoryBuffer(CSV_FILE_EXTENSION), CSVFormat.DEFAULT, false, 350L, 365L, null, - getExpectedString(CSVFormat.DEFAULT)); - } - - @Test - public void testUncompressedCsvWriter() throws Exception { - runTest(new InMemoryBuffer(CSV_FILE_EXTENSION), csvFormat, false, 320L, 335L, null, - getExpectedString(csvFormat)); - } - - @Test - public void testCompressedCsvWriter() throws Exception { - runTest(new InMemoryBuffer(CSV_FILE_EXTENSION), csvFormat, true, 170L, 190L, null, - getExpectedString(csvFormat)); - } - - @Test - public void testCompressedCsvFileWriter() throws Exception { - runTest(new FileBuffer(CSV_FILE_EXTENSION), csvFormat, true, 170L, 190L, null, - getExpectedString(csvFormat)); - } - - private static String getExpectedString(final CSVFormat csvFormat) { - String expectedData = Jsons.serialize(MESSAGE_DATA); - if (csvFormat.equals(CSVFormat.DEFAULT)) { - expectedData = "\"" + expectedData.replace("\"", "\"\"") + "\""; - } - return expectedData; - } - - @Test - public void testFlattenCompressedCsvFileWriter() throws Exception { - final String expectedData = "true,string value,10000,{\"array_column\":[1,2,3]}"; - runTest(new FileBuffer(CSV_FILE_EXTENSION), CSVFormat.newFormat(',').withRecordSeparator('\n'), true, 135L, 150L, - new S3CsvFormatConfig(Jsons.jsonNode(Map.of( - "format_type", S3Format.CSV, - "flattening", Flattening.ROOT_LEVEL.getValue()))), - expectedData + expectedData); - } - - private static void runTest(final BufferStorage buffer, - final CSVFormat csvFormat, - final boolean withCompression, - final Long minExpectedByte, - final Long maxExpectedByte, - final S3CsvFormatConfig config, - final String expectedData) - throws Exception { - final File outputFile = buffer.getFile(); - try (final CsvSerializedBuffer writer = (CsvSerializedBuffer) CsvSerializedBuffer - .createFunction(config, () -> buffer) - .apply(streamPair, catalog)) { - writer.withCsvFormat(csvFormat); - writer.withCompression(withCompression); - writer.accept(message); - writer.accept(message); - writer.flush(); - // some data are randomized (uuid, timestamp, compression?) so the expected byte count is not always - // deterministic - assertTrue(minExpectedByte <= writer.getByteCount() && writer.getByteCount() <= maxExpectedByte, - String.format("Expected size between %d and %d, but actual size was %d", - minExpectedByte, maxExpectedByte, writer.getByteCount())); - final InputStream inputStream; - if (withCompression) { - inputStream = new GZIPInputStream(writer.getInputStream()); - } else { - inputStream = writer.getInputStream(); - } - final String actualData; - if (config == null) { - actualData = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8) - // remove the UUID string at the beginning - .substring(UUID.randomUUID().toString().length() + 1) - // remove the last part of the string with random timestamp - .substring(0, expectedData.length()); - } else { - final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - final StringBuilder tmpData = new StringBuilder(); - String line; - while (reader.ready()) { - line = reader.readLine(); - tmpData.append(line - // remove uuid - .substring(UUID.randomUUID().toString().length() + 1) - // remove timestamp - .replaceAll("\\A[0-9]+,", "")); - } - actualData = tmpData.toString(); - } - assertEquals(expectedData, actualData); - } - assertFalse(outputFile.exists()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.java deleted file mode 100644 index e4163f55c8eac..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static org.junit.jupiter.api.Assertions.assertLinesMatch; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.jackson.MoreMappers; -import java.util.Collections; -import org.junit.jupiter.api.Test; - -class NoFlatteningSheetGeneratorTest { - - private final ObjectMapper mapper = MoreMappers.initMapper(); - private final NoFlatteningSheetGenerator sheetGenerator = new NoFlatteningSheetGenerator(); - - @Test - public void testGetHeaderRow() { - assertLinesMatch( - Lists.newArrayList( - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA), - sheetGenerator.getHeaderRow()); - } - - @Test - public void testGetRecordColumns() { - final ObjectNode json = mapper.createObjectNode(); - json.set("Field 4", mapper.createObjectNode().put("Field 41", 15)); - json.put("Field 1", "A"); - json.put("Field 3", 71); - json.put("Field 2", true); - - assertLinesMatch( - Collections.singletonList("{\"Field 4\":{\"Field 41\":15},\"Field 1\":\"A\",\"Field 3\":71,\"Field 2\":true}"), - sheetGenerator.getRecordColumns(json)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.java deleted file mode 100644 index f9953ce87de60..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static org.junit.jupiter.api.Assertions.assertLinesMatch; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.jackson.MoreMappers; -import java.util.Collections; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class RootLevelFlatteningSheetGeneratorTest { - - private final static ObjectMapper MAPPER = MoreMappers.initMapper(); - private final static ObjectNode SCHEMA = MAPPER.createObjectNode(); - static { - final List fields = Lists.newArrayList("C", "B", "A", "c", "b", "a"); - Collections.shuffle(fields); - - final ObjectNode schemaProperties = MAPPER.createObjectNode(); - for (final String field : fields) { - schemaProperties.set(field, MAPPER.createObjectNode()); - } - - SCHEMA.set("properties", schemaProperties); - } - - private RootLevelFlatteningSheetGenerator sheetGenerator; - - @BeforeEach - public void createGenerator() { - this.sheetGenerator = new RootLevelFlatteningSheetGenerator(SCHEMA); - } - - @Test - public void testGetHeaderRow() { - assertLinesMatch( - Lists.newArrayList( - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - "A", "B", "C", "a", "b", "c"), - sheetGenerator.getHeaderRow()); - } - - @Test - public void testGetRecordColumns() { - final ObjectNode json = MAPPER.createObjectNode(); - // Field c is missing - json.put("C", 3); - json.put("B", "value B"); - json.set("A", MAPPER.createObjectNode().put("Field 41", 15)); - json.put("b", "value b"); - json.put("a", 1); - - assertLinesMatch( - // A, B, C, a, b, c - Lists.newArrayList("{\"Field 41\":15}", "value B", "3", "1", "value b", ""), - sheetGenerator.getRecordColumns(json)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.java deleted file mode 100644 index 4b1a2e2494c3b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Test; - -// S3CsvFormatConfig -public class S3CsvFormatConfigTest { - - @Test - // Flattening enums can be created from value string - public void testFlatteningCreationFromString() { - assertEquals(Flattening.NO, Flattening.fromValue("no flattening")); - assertEquals(Flattening.ROOT_LEVEL, Flattening.fromValue("root level flattening")); - try { - Flattening.fromValue("invalid flattening value"); - } catch (final Exception e) { - assertTrue(e instanceof IllegalArgumentException); - } - } - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); - assertEquals("CSV", formatConfig.getFormat().name()); - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\",\n" - + " \"flattening\": \"Root level flattening\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testGzipCompressionConfig() { - // without gzip compression config - final JsonNode configWithoutGzipCompression = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\"\n" - + "}")); - final S3DestinationConfig s3ConfigWithoutGzipCompression = S3DestinationConfig.getS3DestinationConfig(configWithoutGzipCompression); - assertEquals( - S3DestinationConstants.DEFAULT_COMPRESSION_TYPE, - ((S3CsvFormatConfig) s3ConfigWithoutGzipCompression.getFormatConfig()).getCompressionType()); - - // with gzip compression config - final JsonNode configWithGzipCompression = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"CSV\",\n" - + " \"gzip_compression\": false\n" - + "}")); - final S3DestinationConfig gcsConfigWithGzipCompression = S3DestinationConfig.getS3DestinationConfig(configWithGzipCompression); - assertEquals( - CompressionType.GZIP, - ((S3CsvFormatConfig) gcsConfigWithGzipCompression.getFormatConfig()).getCompressionType()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java deleted file mode 100644 index 1ff6715d1723a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java +++ /dev/null @@ -1,327 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.csv; - -import static java.util.Collections.singletonList; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockConstruction; -import static org.mockito.Mockito.verify; - -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvWriter.Builder; -import io.airbyte.cdk.integrations.destination.s3.util.CompressionType; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerWithMetadata; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import org.apache.commons.csv.CSVFormat; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.Timeout; -import org.mockito.MockedConstruction; - -@Timeout(value = 90, - unit = TimeUnit.SECONDS) -class S3CsvWriterTest { - - public static final ConfiguredAirbyteStream CONFIGURED_STREAM = new ConfiguredAirbyteStream() - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(new AirbyteStream() - .withName("fake-stream") - .withNamespace("fake-namespace")); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - private static final S3CsvFormatConfig CSV_FORMAT_CONFIG = new S3CsvFormatConfig(Flattening.NO, CompressionType.NO_COMPRESSION); - - private static final S3DestinationConfig CONFIG = S3DestinationConfig.create( - "fake-bucket", - "fake-bucketPath", - "fake-region") - .withEndpoint("fake-endpoint") - .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withFormatConfig(CSV_FORMAT_CONFIG) - .get(); - - // equivalent to Thu, 09 Dec 2021 19:17:54 GMT - private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); - private static final int UPLOAD_THREADS = 8; - private static final int QUEUE_CAPACITY = 9; - - // The full path would be something like - // "fake-bucketPath/fake-namespace/fake-stream/2021_12_09_1639077474000_e549e712-b89c-4272-9496-9690ba7f973e.csv" - // 2021_12_09_1639077474000 is generated from the timestamp. It's followed by a random UUID, in case - // we need to create multiple files. - private static final String EXPECTED_OBJECT_BEGINNING = "fake-bucketPath/fake-namespace/fake-stream/2021_12_09_1639077474000_"; - private static final String EXPECTED_OBJECT_ENDING = ".csv"; - - private AmazonS3 s3Client; - - private MockedConstruction streamTransferManagerMockedConstruction; - private List streamTransferManagerConstructorArguments; - private List outputStreams; - - private record StreamTransferManagerArguments(String bucket, String object) { - - } - - @BeforeEach - public void setup() { - streamTransferManagerConstructorArguments = new ArrayList<>(); - outputStreams = new ArrayList<>(); - // This is basically RETURNS_SELF, except with getMultiPartOutputStreams configured correctly. - // Other non-void methods (e.g. toString()) will return null. - streamTransferManagerMockedConstruction = mockConstruction( - StreamTransferManagerWithMetadata.class, - (mock, context) -> { - // Mockito doesn't seem to provide an easy way to actually retrieve these arguments later on, so - // manually store them on construction. - // _PowerMockito_ does, but I didn't want to set up that additional dependency. - final List arguments = context.arguments(); - streamTransferManagerConstructorArguments.add(new StreamTransferManagerArguments((String) arguments.get(0), (String) arguments.get(1))); - - doReturn(mock).when(mock).numUploadThreads(anyInt()); - doReturn(mock).when(mock).numStreams(anyInt()); - doReturn(mock).when(mock).queueCapacity(anyInt()); - doReturn(mock).when(mock).partSize(anyLong()); - - // We can't write a fake MultiPartOutputStream, because it doesn't have a public constructor. - // So instead, we'll build a mock that captures its data into a ByteArrayOutputStream. - final MultiPartOutputStream stream = mock(MultiPartOutputStream.class); - doReturn(singletonList(stream)).when(mock).getMultiPartOutputStreams(); - final ByteArrayOutputStream capturer = new ByteArrayOutputStream(); - outputStreams.add(capturer); - doAnswer(invocation -> { - capturer.write((int) invocation.getArgument(0)); - return null; - }).when(stream).write(anyInt()); - doAnswer(invocation -> { - capturer.write(invocation.getArgument(0)); - return null; - }).when(stream).write(any(byte[].class)); - doAnswer(invocation -> { - capturer.write(invocation.getArgument(0), invocation.getArgument(1), invocation.getArgument(2)); - return null; - }).when(stream).write(any(byte[].class), anyInt(), anyInt()); - }); - - s3Client = mock(AmazonS3Client.class); - } - - private Builder writer() { - return new Builder( - CONFIG, - s3Client, - CONFIGURED_STREAM, - UPLOAD_TIME).uploadThreads(UPLOAD_THREADS) - .queueCapacity(QUEUE_CAPACITY); - } - - @AfterEach - public void teardown() { - streamTransferManagerMockedConstruction.close(); - } - - @Test - public void generatesCorrectObjectKey_when_created() throws IOException { - final S3CsvWriter writer = writer().build(); - - final String objectKey = writer.getOutputPath(); - - checkObjectName(objectKey); - } - - @Test - public void createsExactlyOneUpload() throws IOException { - writer().build(); - - assertEquals(1, streamTransferManagerMockedConstruction.constructed().size()); - - final StreamTransferManager manager = streamTransferManagerMockedConstruction.constructed().get(0); - final StreamTransferManagerArguments args = streamTransferManagerConstructorArguments.get(0); - verify(manager).numUploadThreads(UPLOAD_THREADS); - verify(manager).queueCapacity(QUEUE_CAPACITY); - assertEquals("fake-bucket", args.bucket); - checkObjectName(args.object); - } - - @Test - public void closesS3Upload_when_stagingUploaderClosedSuccessfully() throws Exception { - final S3CsvWriter writer = writer().build(); - - writer.close(false); - - final List managers = streamTransferManagerMockedConstruction.constructed(); - final StreamTransferManager manager = managers.get(0); - verify(manager).complete(); - } - - @Test - public void closesS3Upload_when_stagingUploaderClosedFailingly() throws Exception { - final S3CsvWriter writer = writer().build(); - - writer.close(true); - - final List managers = streamTransferManagerMockedConstruction.constructed(); - final StreamTransferManager manager = managers.get(0); - verify(manager).abort(); - } - - @Test - public void writesContentsCorrectly_when_headerEnabled() throws IOException { - final S3CsvWriter writer = writer().build(); - - writer.write( - UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) - .withEmittedAt(1234L)); - writer.write( - UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) - .withEmittedAt(2345L)); - writer.close(false); - - // carriage returns are required b/c RFC4180 requires it :( - assertEquals( - """ - "_airbyte_ab_id","_airbyte_emitted_at","_airbyte_data"\r - "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}"\r - "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}"\r - """, - outputStreams.get(0).toString(StandardCharsets.UTF_8)); - } - - @Test - public void writesContentsCorrectly_when_headerDisabled() throws IOException { - final S3CsvWriter writer = writer().withHeader(false).build(); - - writer.write( - UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) - .withEmittedAt(1234L)); - writer.write( - UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) - .withEmittedAt(2345L)); - writer.close(false); - - // carriage returns are required b/c RFC4180 requires it :( - assertEquals( - """ - "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}"\r - "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}"\r - """, - outputStreams.get(0).toString(StandardCharsets.UTF_8)); - } - - /** - * This test verifies that the S3StreamCopier usecase works. Specifically, the withHeader, - * csvSettings, and csvSheetGenerator options were all added solely to support S3StreamCopier; we - * want to verify that it outputs the exact same data as the previous implementation. - */ - @Test - public void writesContentsCorrectly_when_stagingDatabaseConfig() throws IOException { - DestinationConfig.initialize(Jsons.emptyObject()); - final S3DestinationConfig s3Config = S3DestinationConfig.create( - "fake-bucket", - "fake-bucketPath", - "fake-region") - .withEndpoint("fake-endpoint") - .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") - .withFormatConfig(CSV_FORMAT_CONFIG) - .get(); - final S3CsvWriter writer = new Builder( - s3Config, - s3Client, - CONFIGURED_STREAM, - UPLOAD_TIME).uploadThreads(UPLOAD_THREADS) - .queueCapacity(QUEUE_CAPACITY) - .withHeader(false) - .csvSettings(CSVFormat.DEFAULT) - .csvSheetGenerator(new StagingDatabaseCsvSheetGenerator()) - .build(); - - writer.write( - UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) - .withEmittedAt(1234L)); - writer.write( - UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), - new AirbyteRecordMessage() - .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) - .withEmittedAt(2345L)); - writer.close(false); - - // carriage returns are required b/c RFC4180 requires it :( - // Dynamically generate the timestamp because we generate in local time. - assertEquals( - """ - f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",1970-01-01T00:00:01.234Z\r - 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",1970-01-01T00:00:02.345Z\r - """, - outputStreams.get(0).toString(StandardCharsets.UTF_8)); - } - - /** - * This test really just wants to validate that: - *
      - *
    • we're dumping into the correct directory (fake-bucketPath/fake_namespace/fake_stream) and - * that the filename contains the upload time
    • - *
    • each S3CsvWriter generates a unique filename suffix (the UUID) so that they don't overwrite - * each other
    • - *
    • we generate a .csv extension
    • - *
    - * So the UUID check isn't strictly necessary. - *

    - * Eventually the output path generator should probably be injected into the S3CsvWriter (and we - * would test the generator directly + test that the writer calls the generator) - */ - private static void checkObjectName(final String objectName) { - final String errorMessage = "Object was actually " + objectName; - - assertTrue(objectName.startsWith(EXPECTED_OBJECT_BEGINNING), errorMessage); - assertTrue(objectName.endsWith(EXPECTED_OBJECT_ENDING), errorMessage); - - // Remove the beginning and ending, which _should_ leave us with just a UUID - final String uuidMaybe = objectName - // "^" == start of string - .replaceFirst("^" + EXPECTED_OBJECT_BEGINNING, "") - // "$" == end of string - .replaceFirst(EXPECTED_OBJECT_ENDING + "$", ""); - assertDoesNotThrow(() -> UUID.fromString(uuidMaybe), errorMessage); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.java deleted file mode 100644 index 9fabee2c61895..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.jsonl; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.File; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Map; -import java.util.zip.GZIPInputStream; -import org.junit.jupiter.api.Test; - -public class JsonLSerializedBufferTest { - - private static final JsonNode MESSAGE_DATA = Jsons.jsonNode(Map.of( - "field1", 10000, - "column2", "string value", - "another field", true, - "nested_column", Map.of("array_column", List.of(1, 2, 3)))); - private static final String STREAM = "stream1"; - private static final AirbyteStreamNameNamespacePair streamPair = new AirbyteStreamNameNamespacePair(STREAM, null); - private static final AirbyteRecordMessage message = new AirbyteRecordMessage() - .withStream(STREAM) - .withData(MESSAGE_DATA) - .withEmittedAt(System.currentTimeMillis()); - protected static final List FIELDS = List.of( - Field.of("field1", JsonSchemaType.NUMBER), - Field.of("column2", JsonSchemaType.STRING), - Field.of("another field", JsonSchemaType.BOOLEAN), - Field.of("nested_column", JsonSchemaType.OBJECT)); - private static final ConfiguredAirbyteCatalog catalog = CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS); - private static final String JSON_FILE_EXTENSION = ".jsonl"; - - @Test - public void testUncompressedJsonLFormatWriter() throws Exception { - runTest(new InMemoryBuffer(JSON_FILE_EXTENSION), false, 425L, 435L, getExpectedString()); - } - - @Test - public void testCompressedJsonLWriter() throws Exception { - runTest(new FileBuffer(JSON_FILE_EXTENSION), true, 205L, 215L, getExpectedString()); - } - - private static String getExpectedString() { - return Jsons.serialize(MESSAGE_DATA); - } - - private static void runTest(final BufferStorage buffer, - final boolean withCompression, - final Long minExpectedByte, - final Long maxExpectedByte, - final String expectedData) - throws Exception { - final File outputFile = buffer.getFile(); - try (final JsonLSerializedBuffer writer = (JsonLSerializedBuffer) JsonLSerializedBuffer - .createBufferFunction(null, () -> buffer) - .apply(streamPair, catalog)) { - writer.withCompression(withCompression); - writer.accept(message); - writer.accept(message); - writer.flush(); - // some data are randomized (uuid, timestamp, compression?) so the expected byte count is not always - // deterministic - assertTrue(minExpectedByte <= writer.getByteCount() && writer.getByteCount() <= maxExpectedByte, - String.format("Expected size between %d and %d, but actual size was %d", - minExpectedByte, maxExpectedByte, writer.getByteCount())); - final InputStream inputStream; - if (withCompression) { - inputStream = new GZIPInputStream(writer.getInputStream()); - } else { - inputStream = writer.getInputStream(); - } - final JsonNode actualData = Jsons.deserialize(new String(inputStream.readAllBytes(), StandardCharsets.UTF_8)); - assertEquals(expectedData, Jsons.serialize(actualData.get("_airbyte_data"))); - } - assertFalse(outputFile.exists()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java deleted file mode 100644 index 1f40560876849..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.jsonl; - -import static com.amazonaws.services.s3.internal.Constants.MB; -import static io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import alex.mojaki.s3upload.StreamTransferManager; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.S3FormatConfig; -import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory; -import io.airbyte.commons.json.Jsons; -import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.jupiter.api.Test; - -// S3JsonlFormatConfig -public class S3JsonlFormatConfigTest { - - @Test - // Flattening enums can be created from value string - public void testFlatteningCreationFromString() { - assertEquals(Flattening.NO, Flattening.fromValue("no flattening")); - assertEquals(Flattening.ROOT_LEVEL, Flattening.fromValue("root level flattening")); - try { - Flattening.fromValue("invalid flattening value"); - } catch (final Exception e) { - assertTrue(e instanceof IllegalArgumentException); - } - } - - @Test - public void testHandlePartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final S3FormatConfig formatConfig = s3DestinationConfig.getFormatConfig(); - assertEquals("JSONL", formatConfig.getFormat().name()); - - // Assert that is set properly in config - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - - @Test - public void testHandleAbsenceOfPartSizeConfig() throws IllegalAccessException { - - final JsonNode config = ConfigTestUtils.getBaseConfig(Jsons.deserialize("{\n" - + " \"format_type\": \"JSONL\"\n" - + "}")); - - final S3DestinationConfig s3DestinationConfig = S3DestinationConfig - .getS3DestinationConfig(config); - ConfigTestUtils.assertBaseConfig(s3DestinationConfig); - - final StreamTransferManager streamTransferManager = StreamTransferManagerFactory - .create(s3DestinationConfig.getBucketName(), "objectKey", null) - .get(); - - final Integer partSizeBytes = (Integer) FieldUtils.readField(streamTransferManager, "partSize", true); - assertEquals(MB * DEFAULT_PART_SIZE_MB, partSizeBytes); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.java deleted file mode 100644 index c163fab60354c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import static io.airbyte.cdk.integrations.destination.s3.util.JavaProcessRunner.runProcess; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.amazonaws.util.IOUtils; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.File; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.nio.file.Files; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.stream.Stream; -import org.apache.avro.generic.GenericData.Record; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.avro.AvroReadSupport; -import org.apache.parquet.hadoop.ParquetReader; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -public class ParquetSerializedBufferTest { - - private static final JsonNode MESSAGE_DATA = Jsons.jsonNode(Map.of( - "field1", 10000, - "column2", "string value", - "another field", true, - "nested_column", Map.of("array_column", List.of(1, 2, 3)), - "string_array_column", Stream.of("test_string", null).toList(), - "datetime_with_timezone", "2022-05-12T15:35:44.192950Z")); - private static final String STREAM = "stream1"; - private static final AirbyteStreamNameNamespacePair streamPair = new AirbyteStreamNameNamespacePair(STREAM, null); - private static final AirbyteRecordMessage message = new AirbyteRecordMessage() - .withStream(STREAM) - .withData(MESSAGE_DATA) - .withEmittedAt(System.currentTimeMillis()); - protected static final List FIELDS = List.of( - Field.of("field1", JsonSchemaType.NUMBER), - Field.of("column2", JsonSchemaType.STRING), - Field.of("another field", JsonSchemaType.BOOLEAN), - Field.of("nested_column", JsonSchemaType.OBJECT), - Field.of("string_array_column", JsonSchemaType.builder(JsonSchemaPrimitive.ARRAY) - .withItems(JsonSchemaType.STRING).build()), - Field.of("datetime_with_timezone", JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE)); - private static final ConfiguredAirbyteCatalog catalog = CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS); - - @BeforeAll - public static void setup() { - DestinationConfig.initialize(Jsons.deserialize("{}")); - } - - @Test - public void testUncompressedParquetWriter() throws Exception { - final S3DestinationConfig config = S3DestinationConfig.getS3DestinationConfig(Jsons.jsonNode(Map.of( - "format", Map.of( - "format_type", "parquet"), - "s3_bucket_name", "test", - "s3_bucket_region", "us-east-2"))); - runTest(225L, 245L, config, getExpectedString()); - } - - @Test - public void testCompressedParquetWriter() throws Exception { - final S3DestinationConfig config = S3DestinationConfig.getS3DestinationConfig(Jsons.jsonNode(Map.of( - "format", Map.of( - "format_type", "parquet", - "compression_codec", "GZIP"), - "s3_bucket_name", "test", - "s3_bucket_region", "us-east-2"))); - // TODO: Compressed parquet is the same size as uncompressed?? - runTest(225L, 245L, config, getExpectedString()); - } - - private static String resolveArchitecture() { - return System.getProperty("os.name").replace(' ', '_') + "-" + System.getProperty("os.arch") + "-" + System.getProperty("sun.arch.data.model"); - } - - @Test - public void testLzoCompressedParquet() throws Exception { - final String currentDir = System.getProperty("user.dir"); - Runtime runtime = Runtime.getRuntime(); - final String architecture = resolveArchitecture(); - if (architecture.equals("Linux-amd64-64") || architecture.equals("Linux-x86_64-64")) { - runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get update"); - runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get install lzop liblzo2-2 liblzo2-dev -y"); - runLzoParquetTest(); - } else if (architecture.equals("Linux-aarch64-64") || architecture.equals("Linux-arm64-64")) { - runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get update"); - runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get install lzop liblzo2-2 liblzo2-dev " + - "wget curl unzip zip build-essential maven git -y"); - runProcess(currentDir, runtime, "/bin/sh", "-c", "wget https://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz -P /usr/local/tmp"); - runProcess("/usr/local/tmp/", runtime, "/bin/sh", "-c", "tar xvfz lzo-2.10.tar.gz"); - runProcess("/usr/local/tmp/lzo-2.10/", runtime, "/bin/sh", "-c", "./configure --enable-shared --prefix /usr/local/lzo-2.10"); - runProcess("/usr/local/tmp/lzo-2.10/", runtime, "/bin/sh", "-c", "make && make install"); - runProcess(currentDir, runtime, "/bin/sh", "-c", "git clone https://github.com/twitter/hadoop-lzo.git /usr/lib/hadoop/lib/hadoop-lzo/"); - runProcess(currentDir, runtime, "/bin/sh", "-c", "curl -s https://get.sdkman.io | bash"); - runProcess(currentDir, runtime, "/bin/bash", "-c", "source /root/.sdkman/bin/sdkman-init.sh;" + - " sdk install java 8.0.342-librca;" + - " sdk use java 8.0.342-librca;" + - " cd /usr/lib/hadoop/lib/hadoop-lzo/ " + - "&& C_INCLUDE_PATH=/usr/local/lzo-2.10/include " + - "LIBRARY_PATH=/usr/local/lzo-2.10/lib mvn clean package"); - runProcess(currentDir, runtime, "/bin/sh", "-c", - "find /usr/lib/hadoop/lib/hadoop-lzo/ -name '*libgplcompression*' -exec cp {} /usr/lib/ \\;"); - runLzoParquetTest(); - } - } - - private void runLzoParquetTest() throws Exception { - final S3DestinationConfig config = S3DestinationConfig.getS3DestinationConfig(Jsons.jsonNode(Map.of( - "format", Map.of( - "format_type", "parquet", - "compression_codec", "LZO"), - "s3_bucket_name", "test", - "s3_bucket_region", "us-east-2"))); - runTest(225L, 245L, config, getExpectedString()); - } - - private static String getExpectedString() { - return "{\"_airbyte_ab_id\": \"\", \"_airbyte_emitted_at\": \"\", " - + "\"field1\": 10000.0, \"another_field\": true, " - + "\"nested_column\": {\"_airbyte_additional_properties\": {\"array_column\": \"[1,2,3]\"}}, " - + "\"column2\": \"string value\", " - + "\"string_array_column\": [\"test_string\", null], " - + "\"datetime_with_timezone\": 1652369744192000, " - + "\"_airbyte_additional_properties\": null}"; - } - - private static void runTest(final Long minExpectedByte, - final Long maxExpectedByte, - final S3DestinationConfig config, - final String expectedData) - throws Exception { - final File tempFile = Files.createTempFile(UUID.randomUUID().toString(), ".parquet").toFile(); - try (final SerializableBuffer writer = ParquetSerializedBuffer.createFunction(config).apply(streamPair, catalog)) { - writer.accept(message); - writer.accept(message); - writer.flush(); - // some data are randomized (uuid, timestamp, compression?) so the expected byte count is not always - // deterministic - assertTrue(minExpectedByte <= writer.getByteCount() && writer.getByteCount() <= maxExpectedByte, - String.format("Expected size between %d and %d, but actual size was %d", - minExpectedByte, maxExpectedByte, writer.getByteCount())); - final InputStream in = writer.getInputStream(); - try (final FileOutputStream outFile = new FileOutputStream(tempFile)) { - IOUtils.copy(in, outFile); - } - try (final ParquetReader parquetReader = - ParquetReader.builder(new AvroReadSupport<>(), new Path(tempFile.getAbsolutePath())) - .withConf(new Configuration()) - .build()) { - Record record; - while ((record = parquetReader.read()) != null) { - record.put("_airbyte_ab_id", ""); - record.put("_airbyte_emitted_at", ""); - final String actualData = record.toString(); - assertEquals(expectedData, actualData); - } - } - } finally { - Files.deleteIfExists(tempFile.toPath()); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.java deleted file mode 100644 index 2414355b30e03..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.parquet; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import org.apache.parquet.hadoop.metadata.CompressionCodecName; -import org.junit.jupiter.api.Test; - -class S3ParquetFormatConfigTest { - - @Test - public void testConfigConstruction() { - final JsonNode formatConfig = Jsons.deserialize("{\n" - + "\t\"compression_codec\": \"GZIP\",\n" - + "\t\"block_size_mb\": 1,\n" - + "\t\"max_padding_size_mb\": 1,\n" - + "\t\"page_size_kb\": 1,\n" - + "\t\"dictionary_page_size_kb\": 1,\n" - + "\t\"dictionary_encoding\": false\n" - + "}"); - - final S3ParquetFormatConfig config = new S3ParquetFormatConfig(formatConfig); - - // The constructor should automatically convert MB or KB to bytes. - assertEquals(1024 * 1024, config.getBlockSize()); - assertEquals(1024 * 1024, config.getMaxPaddingSize()); - assertEquals(1024, config.getPageSize()); - assertEquals(1024, config.getDictionaryPageSize()); - - assertEquals(CompressionCodecName.GZIP, config.getCompressionCodec()); - assertFalse(config.isDictionaryEncoding()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/tamplate/S3FilenameTemplateManagerTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/tamplate/S3FilenameTemplateManagerTest.java deleted file mode 100644 index e9aaacaf54090..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/tamplate/S3FilenameTemplateManagerTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.tamplate; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mockStatic; - -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateManager; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import java.io.IOException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.time.Clock; -import java.time.Instant; -import java.time.ZoneId; -import java.util.TimeZone; -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; - -class S3FilenameTemplateManagerTest { - - private final S3FilenameTemplateManager s3FilenameTemplateManager = new S3FilenameTemplateManager(); - - @Test - // Should replace the date placeholder with the current date in the format YYYY-MM-DD - void testDatePlaceholder() - throws IOException { - final String fileNamePattern = "test-{date}"; - final String fileExtension = "csv"; - final String partId = "1"; - - final String actual = s3FilenameTemplateManager - .applyPatternToFilename(S3FilenameTemplateParameterObject - .builder() - .objectPath("") - .fileNamePattern(fileNamePattern) - .fileExtension(fileExtension) - .partId(partId).build()); - - final DateFormat defaultDateFormat = new SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING); - defaultDateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - - long currentTimeInMillis = Instant.now().toEpochMilli(); - - final String expected = "test-" + defaultDateFormat.format(currentTimeInMillis); - assertEquals(expected, actual); - } - - @Test - // Should replace the timestamp placeholder with the current timestamp in milliseconds - void testTimestampPlaceholder() - throws IOException { - final String fileNamePattern = "test-{timestamp}.csv"; - - final Clock clock = Clock.fixed(Instant.ofEpochMilli(1657110148000L), ZoneId.of("UTC")); - final Instant instant = Instant.now(clock); - - try (final MockedStatic mocked = mockStatic(Instant.class)) { - mocked.when(Instant::now).thenReturn(instant); - final String actual = s3FilenameTemplateManager - .applyPatternToFilename(S3FilenameTemplateParameterObject.builder() - .objectPath("") - .fileNamePattern(fileNamePattern) - .fileExtension("csv") - .partId("1") - .build()); - - assertEquals("test-1657110148000.csv", actual); - } - } - - @Test - // Should sanitize the string and adapt it to applicable S3 format - void testIfFilenameTemplateStringWasSanitized() throws IOException { - final String fileNamePattern = " te st.csv "; - final String actual = s3FilenameTemplateManager - .applyPatternToFilename(S3FilenameTemplateParameterObject.builder() - .objectPath("") - .fileNamePattern(fileNamePattern) - .fileExtension("csv") - .partId("1") - .build()); - - assertEquals("te__st.csv", actual); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.java deleted file mode 100644 index 09b2c056e388d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants; -import io.airbyte.commons.json.Jsons; -import java.util.Map; -import org.junit.jupiter.api.Test; - -class CompressionTypeHelperTest { - - @Test - public void testGetCompressionType() { - assertEquals( - S3DestinationConstants.DEFAULT_COMPRESSION_TYPE, - CompressionTypeHelper.parseCompressionType(null)); - - assertEquals( - CompressionType.NO_COMPRESSION, - CompressionTypeHelper.parseCompressionType(Jsons.jsonNode(Map.of("compression_type", "No Compression")))); - - assertEquals( - CompressionType.GZIP, - CompressionTypeHelper.parseCompressionType(Jsons.jsonNode(Map.of("compression_type", "GZIP")))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.java deleted file mode 100644 index 95d01a2e8ecfc..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig; -import io.airbyte.commons.json.Jsons; - -public class ConfigTestUtils { - - public static JsonNode getBaseConfig(final JsonNode formatConfig) { - return Jsons.deserialize("{\n" - + " \"s3_endpoint\": \"some_test-endpoint\",\n" - + " \"s3_bucket_name\": \"test-bucket-name\",\n" - + " \"s3_bucket_path\": \"test_path\",\n" - + " \"s3_bucket_region\": \"us-east-2\",\n" - + " \"access_key_id\": \"some-test-key-id\",\n" - + " \"secret_access_key\": \"some-test-access-key\",\n" - + " \"format\": " + formatConfig - + "}"); - } - - public static void assertBaseConfig(final S3DestinationConfig s3DestinationConfig) { - assertEquals("some_test-endpoint", s3DestinationConfig.getEndpoint()); - assertEquals("test-bucket-name", s3DestinationConfig.getBucketName()); - assertEquals("test_path", s3DestinationConfig.getBucketPath()); - assertEquals("us-east-2", s3DestinationConfig.getBucketRegion()); - final S3AccessKeyCredentialConfig credentialConfig = (S3AccessKeyCredentialConfig) s3DestinationConfig.getS3CredentialConfig(); - assertEquals("some-test-key-id", credentialConfig.getAccessKeyId()); - assertEquals("some-test-access-key", credentialConfig.getSecretAccessKey()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.java deleted file mode 100644 index cd86d0c2a9472..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.util; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.google.common.collect.Lists; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; -import org.junit.jupiter.api.Test; - -class S3OutputPathHelperTest { - - @Test - // getOutputPrefix - public void testGetOutputPrefix() { - // No namespace - assertEquals("bucket_path/stream_name", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withName("stream_name").withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - - // With namespace - assertEquals("bucket_path/namespace/stream_name", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withNamespace("namespace").withName("stream_name") - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - - // With empty namespace - assertEquals("bucket_path/stream_name", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withNamespace("").withName("stream_name").withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - - // With namespace with slash chart in the end - assertEquals("bucket_path/namespace/stream_name", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withNamespace("namespace/").withName("stream_name") - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - - // With namespace with slash chart in the name - assertEquals("bucket_path/namespace/subfolder/stream_name", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withNamespace("namespace/subfolder/").withName("stream_name") - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - - // With an AWS Glue crawler - assertEquals("bucket_path/namespace/date=2022-03-15", S3OutputPathHelper - .getOutputPrefix("bucket_path", - new AirbyteStream().withNamespace("namespace").withName("date=2022-03-15") - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.java deleted file mode 100644 index 15a1b5e141ae2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3.writer; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.cdk.integrations.destination.s3.S3Format; -import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject; -import java.io.IOException; -import java.sql.Timestamp; -import org.junit.jupiter.api.Test; - -class BaseS3WriterTest { - - @Test - public void testGetOutputFilename() throws IOException { - final Timestamp timestamp = new Timestamp(1471461319000L); - assertEquals( - "2016_08_17_1471461319000_0.csv", - BaseS3Writer.determineOutputFilename(S3FilenameTemplateParameterObject - .builder() - .s3Format(S3Format.CSV) - .timestamp(timestamp) - .build())); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java similarity index 100% rename from airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java rename to airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.kt new file mode 100644 index 0000000000000..36e82f208bab8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/AesCbcEnvelopeEncryptionBlobDecoratorTest.kt @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import java.io.ByteArrayOutputStream +import java.io.IOException +import java.nio.charset.StandardCharsets +import java.util.Base64 +import javax.crypto.spec.SecretKeySpec +import org.apache.commons.io.IOUtils +import org.junit.jupiter.api.Assertions.assertArrayEquals +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class AesCbcEnvelopeEncryptionBlobDecoratorTest { + + companion object { + private val BASE64_DECODER: Base64.Decoder = Base64.getDecoder() + + // A random base64-encoded 256-bit AES key + const val KEY_ENCRYPTING_KEY: String = "oFf0LY0Zae9ksNZsPSJG8ZLGRRBUUhitaPKWRPPKTvM=" + + // Another base64-encoded random 256-bit AES key + const val CONTENT_ENCRYPTING_KEY: String = "9ZAVuZE8L4hJCFQS49OMNeFRGTCBUHAFOgkW3iZkOq8=" + + // A random base64-encoded 16-byte array + const val INITIALIZATION_VECTOR: String = "04YDvMCXpvTb2ilggLbDJQ==" + + // A small CSV file, which looks similar to what destination-s3 might upload + val PLAINTEXT: String = + """ + adc66b6e-6051-42db-b683-d978a51c3c02,"{""campaign.resource_name"":""cus""}",2022-04-04 22:32:50.046 + 0e253b28-bec6-4a90-8622-629d3e542982,"{""campaign.resource_name"":""cus""}",2022-04-04 22:32:50.047 + + """.trimIndent() + + // The encryption of the plaintext, using the CEK and IV defined above (base64-encoded). + // Equivalent + // to: + // base64Encode(encrypt("AES-CBC", PLAINTEXT, CONTENT_ENCRYPTING_KEY, INITIALIZATION_VECTOR) + const val CIPHERTEXT: String = + "IRfz0FN05Y9yyne+0V+G14xYjA4B0+ter7qniDheIu9UM3Fdmu/mqjyFvYFIRTroP5kNJ1SH3FaArE5aHkrWMPwSkczkhArajfYX+UEfGH68YyWOSnpdxuviTTgK3Ee3OVTz3ZlziOB8jCMjupJ9pqkLnxg7Ghe3BQ1puOHGFDMmIgiP4Zfz0fkdlUyZOvsJ7xpncD24G6IIJNwOyo4CedULgueHdybmxr4oddhAja8QxJxZzlfZl4suJ+KWvt78MSdkRlp+Ip99U8n0O7BLJA==" + + // The encryption of the CEK, using the KEK defined above (base64-encoded). Equivalent to: + // base64Encode(encrypt("AES-ECB", CONTENT_ENCRYPTING_KEY, KEY_ENCRYPTING_KEY) + const val ENCRYPTED_CEK: String = + "Ck5u5cKqcY+bcFBrpsPHHUNw5Qx8nYDJ2Vqt6XG6kwxjVAJQKKljPv9NDsG6Ncoc" + } + + private lateinit var decorator: AesCbcEnvelopeEncryptionBlobDecorator + + @BeforeEach + internal fun setup() { + decorator = + AesCbcEnvelopeEncryptionBlobDecorator( + SecretKeySpec(BASE64_DECODER.decode(KEY_ENCRYPTING_KEY), "AES"), + SecretKeySpec(BASE64_DECODER.decode(CONTENT_ENCRYPTING_KEY), "AES"), + BASE64_DECODER.decode(INITIALIZATION_VECTOR), + ) + } + + @Test + @Throws(IOException::class) + internal fun testEncryption() { + val stream = ByteArrayOutputStream() + + decorator.wrap(stream).use { wrapped -> + IOUtils.write( + PLAINTEXT, + wrapped, + StandardCharsets.UTF_8, + ) + } + assertArrayEquals( + BASE64_DECODER.decode(CIPHERTEXT), + stream.toByteArray(), + ) + } + + @Test + internal fun testMetadataInsertion() { + val metadata: MutableMap = HashMap() + + decorator.updateMetadata( + metadata, + mapOf( + AesCbcEnvelopeEncryptionBlobDecorator.ENCRYPTED_CONTENT_ENCRYPTING_KEY to "the_cek", + AesCbcEnvelopeEncryptionBlobDecorator.INITIALIZATION_VECTOR to "the_iv", + ), + ) + + assertEquals( + mapOf( + "the_cek" to ENCRYPTED_CEK, + "the_iv" to INITIALIZATION_VECTOR, + ), + metadata, + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.kt new file mode 100644 index 0000000000000..caed6ba62c8c8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/BlobDecoratorTest.kt @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import java.util.Map +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class BlobDecoratorTest { + @Test + fun testOverwriteMetadata() { + val metadata: MutableMap = HashMap() + metadata["amz-foo"] = "oldValue" + + BlobDecorator.insertMetadata(metadata, Map.of("foo", "amz-foo"), "foo", "newValue") + + Assertions.assertEquals(Map.of("amz-foo", "newValue"), metadata) + } + + @Test + fun testNewMetadata() { + val metadata: MutableMap = HashMap() + metadata["amz-foo"] = "oldValue" + + BlobDecorator.insertMetadata(metadata, Map.of("bar", "amz-bar"), "bar", "newValue") + + Assertions.assertEquals(Map.of("amz-foo", "oldValue", "amz-bar", "newValue"), metadata) + } + + @Test + fun testSkipMetadata() { + val metadata: MutableMap = HashMap() + metadata["amz-foo"] = "oldValue" + + BlobDecorator.insertMetadata(metadata, Map.of("foo", "amz-foo"), "bar", "newValue") + + Assertions.assertEquals(Map.of("amz-foo", "oldValue"), metadata) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.kt new file mode 100644 index 0000000000000..78dc75919a5c7 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseChecksTest.kt @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.ListObjectsRequest +import io.airbyte.cdk.integrations.destination.s3.S3BaseChecks.attemptS3WriteAndDelete +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito + +class S3BaseChecksTest { + private lateinit var s3Client: AmazonS3 + + @BeforeEach + fun setup() { + s3Client = Mockito.mock(AmazonS3::class.java) + Mockito.`when`( + s3Client.doesObjectExist(ArgumentMatchers.anyString(), ArgumentMatchers.eq("")) + ) + .thenThrow(IllegalArgumentException("Object path must not be empty")) + Mockito.`when`( + s3Client.putObject( + ArgumentMatchers.anyString(), + ArgumentMatchers.eq(""), + ArgumentMatchers.anyString() + ) + ) + .thenThrow(IllegalArgumentException("Object path must not be empty")) + } + + @Test + fun attemptWriteAndDeleteS3Object_should_createSpecificFiles() { + val config = + S3DestinationConfig( + null, + "test_bucket", + "test/bucket/path", + null, + null, + null, + null, + s3Client!! + ) + val operations = S3StorageOperations(S3NameTransformer(), s3Client!!, config) + Mockito.`when`(s3Client!!.doesObjectExist("test_bucket", "test/bucket/path/")) + .thenReturn(false) + + attemptS3WriteAndDelete(operations, config, "test/bucket/path") + + Mockito.verify(s3Client) + .putObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("test/bucket/path/_airbyte_connection_test_"), + ArgumentMatchers.anyString() + ) + Mockito.verify(s3Client) + .listObjects( + ArgumentMatchers.argThat { request: ListObjectsRequest -> + "test_bucket" == request.bucketName + } + ) + Mockito.verify(s3Client) + .deleteObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("test/bucket/path/_airbyte_connection_test_") + ) + } + + @Test + fun attemptWriteAndDeleteS3Object_should_skipDirectoryCreateIfRootPath() { + val config = + S3DestinationConfig(null, "test_bucket", "", null, null, null, null, s3Client!!) + val operations = S3StorageOperations(S3NameTransformer(), s3Client!!, config) + + attemptS3WriteAndDelete(operations, config, "") + + Mockito.verify(s3Client, Mockito.never()).putObject("test_bucket", "", "") + Mockito.verify(s3Client) + .putObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("_airbyte_connection_test_"), + ArgumentMatchers.anyString() + ) + Mockito.verify(s3Client) + .listObjects( + ArgumentMatchers.argThat { request: ListObjectsRequest -> + "test_bucket" == request.bucketName + } + ) + Mockito.verify(s3Client) + .deleteObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("_airbyte_connection_test_") + ) + } + + @Test + fun attemptWriteAndDeleteS3Object_should_skipDirectoryCreateIfNullPath() { + val config = + S3DestinationConfig(null, "test_bucket", null, null, null, null, null, s3Client!!) + val operations = S3StorageOperations(S3NameTransformer(), s3Client!!, config) + + attemptS3WriteAndDelete(operations, config, null) + + Mockito.verify(s3Client, Mockito.never()).putObject("test_bucket", "", "") + Mockito.verify(s3Client) + .putObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("_airbyte_connection_test_"), + ArgumentMatchers.anyString() + ) + Mockito.verify(s3Client) + .listObjects( + ArgumentMatchers.argThat { request: ListObjectsRequest -> + "test_bucket" == request.bucketName + } + ) + Mockito.verify(s3Client) + .deleteObject( + ArgumentMatchers.eq("test_bucket"), + ArgumentMatchers.startsWith("_airbyte_connection_test_") + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.kt new file mode 100644 index 0000000000000..ce20614e1911a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationConfigTest.kt @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.create +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.getS3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig +import io.airbyte.commons.json.Jsons.deserialize +import org.assertj.core.api.AssertionsForClassTypes +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class S3DestinationConfigTest { + @Test + fun testCreateFromExistingConfig() { + Assertions.assertEquals(CONFIG, create(CONFIG).get()) + } + + @Test + fun testCreateAndModify() { + val newBucketName = "new-bucket" + val newBucketPath = "new-path" + val newBucketRegion = "new-region" + val newEndpoint = "new-endpoint" + val newKey = "new-key" + val newSecret = "new-secret" + + val modifiedConfig = + create(CONFIG) + .withBucketName(newBucketName) + .withBucketPath(newBucketPath) + .withBucketRegion(newBucketRegion) + .withEndpoint(newEndpoint) + .withAccessKeyCredential(newKey, newSecret) + .get() + + Assertions.assertNotEquals(CONFIG, modifiedConfig) + Assertions.assertEquals(newBucketName, modifiedConfig.bucketName) + Assertions.assertEquals(newBucketPath, modifiedConfig.bucketPath) + Assertions.assertEquals(newBucketRegion, modifiedConfig.bucketRegion) + + val credentialConfig = modifiedConfig.s3CredentialConfig as S3AccessKeyCredentialConfig + Assertions.assertEquals(newKey, credentialConfig.accessKeyId) + Assertions.assertEquals(newSecret, credentialConfig.secretAccessKey) + } + + @Test + fun testGetS3DestinationConfigAWS_S3Provider() { + val s3config = + deserialize( + """{ + "s3_bucket_name": "paste-bucket-name-here", + "s3_bucket_path": "integration-test", + "s3_bucket_region": "paste-bucket-region-here", + "access_key_id": "paste-access-key-id-here", + "secret_access_key": "paste-secret-access-key-here" +}""" + ) + + val result = getS3DestinationConfig(s3config, StorageProvider.AWS_S3) + + AssertionsForClassTypes.assertThat(result.endpoint).isEmpty() + AssertionsForClassTypes.assertThat(result.bucketName).isEqualTo("paste-bucket-name-here") + AssertionsForClassTypes.assertThat(result.bucketPath).isEqualTo("integration-test") + AssertionsForClassTypes.assertThat(result.bucketRegion) + .isEqualTo("paste-bucket-region-here") + AssertionsForClassTypes.assertThat(result.pathFormat) + .isEqualTo("\${NAMESPACE}/\${STREAM_NAME}/\${YEAR}_\${MONTH}_\${DAY}_\${EPOCH}_") + val awsCredentials = result.s3CredentialConfig!!.s3CredentialsProvider.credentials + AssertionsForClassTypes.assertThat(awsCredentials.awsAccessKeyId) + .isEqualTo("paste-access-key-id-here") + AssertionsForClassTypes.assertThat(awsCredentials.awsSecretKey) + .isEqualTo("paste-secret-access-key-here") + AssertionsForClassTypes.assertThat(result.isCheckIntegrity).isEqualTo(true) + } + + @Test + fun testGetS3DestinationConfigCF_R2Provider() { + val s3config = + deserialize( + """{ + "s3_bucket_name": "paste-bucket-name-here", + "s3_bucket_path": "integration-test", + "account_id": "paster-account-id-here", + "access_key_id": "paste-access-key-id-here", + "secret_access_key": "paste-secret-access-key-here" +} +""" + ) + + val result = getS3DestinationConfig(s3config, StorageProvider.CF_R2) + + AssertionsForClassTypes.assertThat(result.endpoint) + .isEqualTo("https://paster-account-id-here.r2.cloudflarestorage.com") + AssertionsForClassTypes.assertThat(result.bucketName).isEqualTo("paste-bucket-name-here") + AssertionsForClassTypes.assertThat(result.bucketPath).isEqualTo("integration-test") + AssertionsForClassTypes.assertThat(result.bucketRegion).isNull() + AssertionsForClassTypes.assertThat(result.pathFormat) + .isEqualTo("\${NAMESPACE}/\${STREAM_NAME}/\${YEAR}_\${MONTH}_\${DAY}_\${EPOCH}_") + val awsCredentials = result.s3CredentialConfig!!.s3CredentialsProvider.credentials + AssertionsForClassTypes.assertThat(awsCredentials.awsAccessKeyId) + .isEqualTo("paste-access-key-id-here") + AssertionsForClassTypes.assertThat(awsCredentials.awsSecretKey) + .isEqualTo("paste-secret-access-key-here") + AssertionsForClassTypes.assertThat(result.isCheckIntegrity).isEqualTo(false) + } + + companion object { + private val CONFIG = + create("test-bucket", "test-path", "test-region") + .withEndpoint("test-endpoint") + .withPathFormat("\${STREAM_NAME}/\${NAMESPACE}") + .withAccessKeyCredential("test-key", "test-secret") + .get() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.kt new file mode 100644 index 0000000000000..366ca70aef610 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3FormatConfigsTest.kt @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.cdk.integrations.destination.s3.S3FormatConfigs.getS3FormatConfig +import io.airbyte.cdk.integrations.destination.s3.csv.S3CsvFormatConfig +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons.jsonNode +import java.util.Map +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +// S3FormatConfigs +class S3FormatConfigsTest { + @Test // When CSV format is specified, it returns CSV format config + fun testGetCsvS3FormatConfig() { + val configJson = + jsonNode( + Map.of( + "format", + jsonNode( + Map.of( + "format_type", + S3Format.CSV.toString(), + "flattening", + Flattening.ROOT_LEVEL.value, + "compression", + jsonNode(Map.of("compression_type", "No Compression")) + ) + ) + ) + ) + + val formatConfig = getS3FormatConfig(configJson) + Assertions.assertEquals(formatConfig.format, S3Format.CSV) + Assertions.assertTrue(formatConfig is S3CsvFormatConfig) + val csvFormatConfig = formatConfig as S3CsvFormatConfig + Assertions.assertEquals(Flattening.ROOT_LEVEL, csvFormatConfig.flattening) + Assertions.assertEquals(CompressionType.NO_COMPRESSION, csvFormatConfig.compressionType) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt new file mode 100644 index 0000000000000..750a312f13801 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt @@ -0,0 +1,213 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import com.amazonaws.services.s3.model.ListObjectsRequest +import com.amazonaws.services.s3.model.ObjectListing +import com.amazonaws.services.s3.model.S3ObjectSummary +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer +import java.util.concurrent.Executors +import java.util.concurrent.TimeUnit +import java.util.regex.Pattern +import org.joda.time.DateTime +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.ArgumentCaptor +import org.mockito.ArgumentMatchers +import org.mockito.Mockito + +class S3StorageOperationsTest { + + companion object { + private const val BUCKET_NAME = "fake-bucket" + private const val FAKE_BUCKET_PATH = "fake-bucketPath" + private const val NAMESPACE = "namespace" + private const val STREAM_NAME = "stream_name1" + private const val OBJECT_TO_DELETE = "$NAMESPACE/$STREAM_NAME/2022_04_04_123456789_0.csv.gz" + } + + private lateinit var s3Client: AmazonS3 + private lateinit var s3StorageOperations: S3StorageOperations + + @BeforeEach + internal fun setup() { + val nameTransformer: NamingConventionTransformer = S3NameTransformer() + s3Client = Mockito.mock(AmazonS3::class.java) + + val objectSummary1 = + Mockito.mock( + S3ObjectSummary::class.java, + ) + val objectSummary2 = + Mockito.mock( + S3ObjectSummary::class.java, + ) + val objectSummary3 = + Mockito.mock( + S3ObjectSummary::class.java, + ) + Mockito.`when`(objectSummary1.key).thenReturn(OBJECT_TO_DELETE) + Mockito.`when`(objectSummary2.key) + .thenReturn("$NAMESPACE/stream_name2/2022_04_04_123456789_0.csv.gz") + Mockito.`when`(objectSummary3.key).thenReturn("other_files.txt") + + val results = + Mockito.mock( + ObjectListing::class.java, + ) + Mockito.`when`(results.isTruncated).thenReturn(false) + Mockito.`when`(results.objectSummaries) + .thenReturn(listOf(objectSummary1, objectSummary2, objectSummary3)) + Mockito.`when`( + s3Client.listObjects( + ArgumentMatchers.any( + ListObjectsRequest::class.java, + ), + ), + ) + .thenReturn(results) + + val s3Config = + S3DestinationConfig.create(BUCKET_NAME, FAKE_BUCKET_PATH, "fake-region") + .withEndpoint("fake-endpoint") + .withAccessKeyCredential("fake-accessKeyId", "fake-secretAccessKey") + .withS3Client(s3Client) + .get() + s3StorageOperations = S3StorageOperations(nameTransformer, s3Client, s3Config) + } + + @Test + internal fun testRegexMatch() { + val regexFormat = + Pattern.compile( + s3StorageOperations.getRegexFormat( + NAMESPACE, + STREAM_NAME, + S3DestinationConstants.DEFAULT_PATH_FORMAT, + ), + ) + assertTrue(regexFormat.matcher(OBJECT_TO_DELETE).matches()) + assertTrue( + regexFormat + .matcher( + s3StorageOperations.getBucketObjectPath( + NAMESPACE, + STREAM_NAME, + DateTime.now(), + S3DestinationConstants.DEFAULT_PATH_FORMAT, + ), + ) + .matches(), + ) + assertFalse( + regexFormat.matcher("$NAMESPACE/$STREAM_NAME/some_random_file_0.doc").matches(), + ) + assertFalse( + regexFormat.matcher("$NAMESPACE/stream_name2/2022_04_04_123456789_0.csv.gz").matches(), + ) + } + + @Test + internal fun testCustomRegexMatch() { + val customFormat = + "\${NAMESPACE}_\${STREAM_NAME}_\${YEAR}-\${MONTH}-\${DAY}-\${HOUR}-\${MINUTE}-\${SECOND}-\${MILLISECOND}-\${EPOCH}-\${UUID}" + assertTrue( + Pattern.compile( + s3StorageOperations.getRegexFormat(NAMESPACE, STREAM_NAME, customFormat) + ) + .matcher( + s3StorageOperations.getBucketObjectPath( + NAMESPACE, + STREAM_NAME, + DateTime.now(), + customFormat, + ), + ) + .matches(), + ) + } + + @Test + internal fun testGetExtension() { + assertEquals(".csv.gz", S3StorageOperations.getExtension("test.csv.gz")) + assertEquals(".gz", S3StorageOperations.getExtension("test.gz")) + assertEquals(".avro", S3StorageOperations.getExtension("test.avro")) + assertEquals("", S3StorageOperations.getExtension("test-file")) + } + + @Test + internal fun testCleanUpBucketObject() { + val pathFormat = S3DestinationConstants.DEFAULT_PATH_FORMAT + s3StorageOperations.cleanUpBucketObject( + NAMESPACE, + STREAM_NAME, + FAKE_BUCKET_PATH, + pathFormat, + ) + val deleteRequest = + ArgumentCaptor.forClass( + DeleteObjectsRequest::class.java, + ) + Mockito.verify(s3Client).deleteObjects(deleteRequest.capture()) + assertEquals(1, deleteRequest.value.keys.size) + assertEquals(OBJECT_TO_DELETE, deleteRequest.value.keys[0].key) + } + + @Test + internal fun testGetFilename() { + assertEquals("filename", S3StorageOperations.getFilename("filename")) + assertEquals("filename", S3StorageOperations.getFilename("/filename")) + assertEquals("filename", S3StorageOperations.getFilename("/p1/p2/filename")) + assertEquals("filename.csv", S3StorageOperations.getFilename("/p1/p2/filename.csv")) + } + + @Test + @Throws(InterruptedException::class) + internal fun getPartId() { + // Multithreaded utility class + + class PartIdGetter(val s3StorageOperations: S3StorageOperations) : Runnable { + val responses: MutableList = mutableListOf() + + override fun run() { + responses.add(s3StorageOperations.getPartId(FAKE_BUCKET_PATH)) + } + } + + val partIdGetter = PartIdGetter(s3StorageOperations) + + // single threaded + partIdGetter.run() // 0 + partIdGetter.run() // 1 + partIdGetter.run() // 2 + + // multithreaded + val executor = Executors.newFixedThreadPool(3) + for (i in 0..6) { + executor.execute(partIdGetter) + } + executor.shutdown() + executor.awaitTermination(5, TimeUnit.SECONDS) + + val responses = partIdGetter.responses + assertEquals(10, responses.size) + for (i in 0..9) { + assertTrue(responses.contains(i.toString())) + } + } + + @Test + internal fun getPartIdMultiplePaths() { + assertEquals("0", s3StorageOperations.getPartId(FAKE_BUCKET_PATH)) + assertEquals("1", s3StorageOperations.getPartId(FAKE_BUCKET_PATH)) + assertEquals("0", s3StorageOperations.getPartId("other_path")) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.kt new file mode 100644 index 0000000000000..1591e0b4c6076 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroNameTransformerTest.kt @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class AvroNameTransformerTest { + + companion object { + private val INSTANCE = AvroNameTransformer() + private val RAW_TO_NORMALIZED_IDENTIFIERS: Map = + mapOf( + "name-space" to "name_space", + "spécial_character" to "special_character", + "99namespace" to "_99namespace", + ) + + private val RAW_TO_NORMALIZED_NAMESPACES: Map = + mapOf( + "" to "", + "name-space1.name-space2.namespace3" to "name_space1.name_space2.namespace3", + "namespace1.spécial_character" to "namespace1.special_character", + "99namespace.namespace2" to "_99namespace.namespace2", + ) + } + + @Test + internal fun testGetIdentifier() { + RAW_TO_NORMALIZED_IDENTIFIERS.forEach { (raw: String?, normalized: String?) -> + Assertions.assertEquals(normalized, INSTANCE.getIdentifier(raw)) + Assertions.assertEquals( + normalized, + INSTANCE.convertStreamName(raw), + ) + } + } + + @Test + internal fun testGetNamespace() { + RAW_TO_NORMALIZED_NAMESPACES.forEach { (raw: String?, normalized: String?) -> + Assertions.assertEquals(normalized, INSTANCE.getNamespace(raw)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.kt new file mode 100644 index 0000000000000..2953d50e9ad34 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/AvroSerializedBufferTest.kt @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.DestinationConfig +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.io.File +import java.io.InputStream +import org.apache.avro.file.DataFileReader +import org.apache.avro.file.SeekableByteArrayInput +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumReader +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test + +class AvroSerializedBufferTest() { + @Test + @Disabled( + "Flaky on CI, See run https://github.com/airbytehq/airbyte/actions/runs/7126781640/job/19405426141?pr=33201 " + + "org.opentest4j.AssertionFailedError: Expected size between 964 and 985, but actual size was 991 ==> expected: but was: ", + ) + @Throws( + Exception::class, + ) + internal fun testSnappyAvroWriter() { + val config = + S3AvroFormatConfig( + Jsons.jsonNode( + mapOf( + "compression_codec" to mapOf("codec" to "snappy"), + ), + ), + ) + runTest( + InMemoryBuffer( + AvroSerializedBuffer.DEFAULT_SUFFIX, + ), + 964L, + 985L, + config, + expectedString, + ) + } + + @Test + @Throws(Exception::class) + internal fun testGzipAvroFileWriter() { + val config = + S3AvroFormatConfig( + Jsons.jsonNode( + mapOf( + "compression_codec" to + mapOf( + "codec" to "zstandard", + "compression_level" to 20, + "include_checksum" to true, + ), + ), + ), + ) + runTest( + FileBuffer( + AvroSerializedBuffer.DEFAULT_SUFFIX, + ), + 965L, + 985L, + config, + expectedString, + ) + } + + @Test + @Throws(Exception::class) + internal fun testUncompressedAvroWriter() { + val config = + S3AvroFormatConfig( + Jsons.jsonNode( + mapOf( + "compression_codec" to + mapOf( + "codec" to "no compression", + ), + ), + ), + ) + runTest( + InMemoryBuffer( + AvroSerializedBuffer.DEFAULT_SUFFIX, + ), + 1010L, + 1020L, + config, + expectedString, + ) + } + + companion object { + private val MESSAGE_DATA: JsonNode = + Jsons.jsonNode( + mapOf( + "field1" to 10000, + "column2" to "string value", + "another field" to true, + "nested_column" to mapOf("array_column" to listOf(1, 2, 3)), + ), + ) + private const val STREAM: String = "stream1" + private val streamPair: AirbyteStreamNameNamespacePair = + AirbyteStreamNameNamespacePair( + STREAM, + null, + ) + private val message: AirbyteRecordMessage = + AirbyteRecordMessage() + .withStream(STREAM) + .withData(MESSAGE_DATA) + .withEmittedAt(System.currentTimeMillis()) + private val FIELDS: List = + listOf( + Field.of("field1", JsonSchemaType.NUMBER), + Field.of("column2", JsonSchemaType.STRING), + Field.of("another field", JsonSchemaType.BOOLEAN), + Field.of("nested_column", JsonSchemaType.OBJECT), + ) + private val catalog: ConfiguredAirbyteCatalog = + CatalogHelpers.createConfiguredAirbyteCatalog( + STREAM, + null, + FIELDS, + ) + + @BeforeAll + @JvmStatic + internal fun setup() { + DestinationConfig.initialize(Jsons.deserialize("{}")) + } + + private val expectedString: String + get() = + ("{\"_airbyte_ab_id\": \"\", \"_airbyte_emitted_at\": \"\", " + + "\"field1\": 10000.0, \"another_field\": true, " + + "\"nested_column\": {\"_airbyte_additional_properties\": {\"array_column\": \"[1,2,3]\"}}, " + + "\"column2\": \"string value\", " + + "\"_airbyte_additional_properties\": null}") + + @Throws(Exception::class) + private fun runTest( + buffer: BufferStorage, + minExpectedByte: Long, + maxExpectedByte: Long, + config: S3AvroFormatConfig, + expectedData: String + ) { + val outputFile: File = buffer.file + (AvroSerializedBuffer.createFunction(config) { buffer } + .apply( + streamPair, + catalog, + ) as AvroSerializedBuffer) + .use { writer -> + writer.accept(message) + writer.accept(message) + writer.flush() + // some data are randomized (uuid, timestamp, compression?) so the expected byte + // count is not always + // deterministic + assertTrue( + writer.byteCount in minExpectedByte..maxExpectedByte, + "Expected size between $minExpectedByte and $maxExpectedByte, but actual size was ${writer.byteCount}", + ) + val `in`: InputStream = writer.inputStream!! + DataFileReader( + SeekableByteArrayInput(`in`.readAllBytes()), + GenericDatumReader(), + ) + .use { dataFileReader -> + while (dataFileReader.hasNext()) { + val record: GenericData.Record = dataFileReader.next() + record.put("_airbyte_ab_id", "") + record.put("_airbyte_emitted_at", "") + val actualData: String = record.toString() + assertEquals(expectedData, actualData) + } + } + } + assertFalse(outputFile.exists()) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.kt new file mode 100644 index 0000000000000..771463d034d06 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonFieldNameUpdaterTest.kt @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons.deserialize +import io.airbyte.commons.resources.MoreResources.readResource +import io.airbyte.commons.util.MoreIterators.toList +import java.io.IOException +import java.util.function.Function +import java.util.stream.Collectors +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class JsonFieldNameUpdaterTest { + @Test + @Throws(IOException::class) + fun testFieldNameUpdate() { + val testCases = deserialize(readResource("parquet/json_field_name_updater/test_case.json")) + for (testCase in testCases) { + val nameMap = testCase["nameMap"] + val nameUpdater = + JsonFieldNameUpdater( + toList(nameMap.fields()) + .stream() + .collect( + Collectors.toMap( + Function { obj: Map.Entry -> obj.key }, + Function { e: Map.Entry -> e.value.asText() } + ) + ) + ) + + val original = testCase["original"] + val updated = testCase["updated"] + + Assertions.assertEquals(original, nameUpdater.getJsonWithOriginalFieldNames(updated)) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.kt new file mode 100644 index 0000000000000..9f3ef0cf9af69 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonSchemaTypeTest.kt @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import io.airbyte.cdk.integrations.destination.s3.avro.JsonSchemaType.Companion.fromJsonSchemaType +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.ArgumentsProvider +import org.junit.jupiter.params.provider.ArgumentsSource + +class JsonSchemaTypeTest { + @ParameterizedTest + @ArgumentsSource(JsonSchemaTypeProvider::class) + fun testFromJsonSchemaType( + type: String, + airbyteType: String?, + expectedJsonSchemaType: JsonSchemaType? + ) { + Assertions.assertEquals(expectedJsonSchemaType, fromJsonSchemaType(type, airbyteType)) + } + + class JsonSchemaTypeProvider : ArgumentsProvider { + override fun provideArguments(context: ExtensionContext): Stream { + return Stream.of( + Arguments.of( + "WellKnownTypes.json#/definitions/Number", + null, + JsonSchemaType.NUMBER_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/String", + null, + JsonSchemaType.STRING_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/Integer", + null, + JsonSchemaType.INTEGER_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/Boolean", + null, + JsonSchemaType.BOOLEAN_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/BinaryData", + null, + JsonSchemaType.BINARY_DATA_V1 + ), + Arguments.of("WellKnownTypes.json#/definitions/Date", null, JsonSchemaType.DATE_V1), + Arguments.of( + "WellKnownTypes.json#/definitions/TimestampWithTimezone", + null, + JsonSchemaType.TIMESTAMP_WITH_TIMEZONE_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/TimestampWithoutTimezone", + null, + JsonSchemaType.TIMESTAMP_WITHOUT_TIMEZONE_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/TimeWithTimezone", + null, + JsonSchemaType.TIME_WITH_TIMEZONE_V1 + ), + Arguments.of( + "WellKnownTypes.json#/definitions/TimeWithoutTimezone", + null, + JsonSchemaType.TIME_WITHOUT_TIMEZONE_V1 + ), + Arguments.of("number", "integer", JsonSchemaType.NUMBER_INT_V0), + Arguments.of("string", "big_integer", JsonSchemaType.NUMBER_BIGINT_V0), + Arguments.of("number", "float", JsonSchemaType.NUMBER_FLOAT_V0), + Arguments.of("number", null, JsonSchemaType.NUMBER_V0), + Arguments.of("string", null, JsonSchemaType.STRING_V0), + Arguments.of("integer", null, JsonSchemaType.INTEGER_V0), + Arguments.of("boolean", null, JsonSchemaType.BOOLEAN_V0), + Arguments.of("null", null, JsonSchemaType.NULL), + Arguments.of("object", null, JsonSchemaType.OBJECT), + Arguments.of("array", null, JsonSchemaType.ARRAY), + Arguments.of("combined", null, JsonSchemaType.COMBINED) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.kt new file mode 100644 index 0000000000000..85f82ef078fdc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/JsonToAvroConverterTest.kt @@ -0,0 +1,292 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectWriter +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.commons.util.MoreIterators +import java.util.stream.Stream +import org.apache.avro.Schema +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.ArgumentsProvider +import org.junit.jupiter.params.provider.ArgumentsSource + +class JsonToAvroConverterTest { + + companion object { + private val WRITER: ObjectWriter = MoreMappers.initMapper().writer() + private val SCHEMA_CONVERTER = JsonToAvroSchemaConverter() + } + + @Test + internal fun testGetSingleTypes() { + val input1 = + Jsons.deserialize( + """ + {"${'$'}ref": "WellKnownTypes.json#/definitions/Number"}" + + """.trimIndent(), + ) + + assertEquals( + listOf(JsonSchemaType.NUMBER_V1), + JsonToAvroSchemaConverter.getTypes("field", input1), + ) + } + + @Test + internal fun testNoCombinedRestriction() { + val input1 = + Jsons.deserialize( + """ + {"${'$'}ref": "WellKnownTypes.json#/definitions/String"}" + + """.trimIndent(), + ) + assertTrue(JsonToAvroSchemaConverter.getCombinedRestriction(input1).isEmpty) + } + + @Test + internal fun testWithCombinedRestriction() { + val input2 = + Jsons.deserialize( + "{ \"anyOf\": [{ \"type\": \"string\" }, { \"type\": \"integer\" }] }" + ) + assertTrue(JsonToAvroSchemaConverter.getCombinedRestriction(input2).isPresent) + } + + @Deprecated("") + internal class GetFieldTypeTestCaseProviderV0 : ArgumentsProvider { + @Throws(Exception::class) + override fun provideArguments(context: ExtensionContext): Stream { + val testCases = + Jsons.deserialize( + MoreResources.readResource( + "parquet/json_schema_converter/type_conversion_test_cases_v0.json" + ) + ) + return MoreIterators.toList(testCases.elements()).stream().map { testCase: JsonNode -> + Arguments.of( + testCase["fieldName"].asText(), + testCase["jsonFieldSchema"], + testCase["avroFieldType"], + ) + } + } + } + + internal class GetFieldTypeTestCaseProviderV1 : ArgumentsProvider { + @Throws(Exception::class) + override fun provideArguments(context: ExtensionContext): Stream { + val testCases = + Jsons.deserialize( + MoreResources.readResource( + "parquet/json_schema_converter/type_conversion_test_cases_v1.json" + ) + ) + return MoreIterators.toList(testCases.elements()).stream().map { testCase: JsonNode -> + Arguments.of( + testCase["fieldName"].asText(), + testCase["jsonFieldSchema"], + testCase["avroFieldType"], + ) + } + } + } + + @Suppress("DEPRECATION") + @ParameterizedTest + @ArgumentsSource( + GetFieldTypeTestCaseProviderV0::class, + ) + internal fun testFieldTypeConversionV0( + fieldName: String, + jsonFieldSchema: JsonNode, + avroFieldType: JsonNode + ) { + assertEquals( + avroFieldType, + Jsons.deserialize( + SCHEMA_CONVERTER.parseJsonField( + fieldName, + fieldNamespace = null, + jsonFieldSchema, + appendExtraProps = true, + addStringToLogicalTypes = true, + ) + .toString(), + ), + "Test for $fieldName failed", + ) + } + + @ParameterizedTest + @ArgumentsSource( + GetFieldTypeTestCaseProviderV1::class, + ) + internal fun testFieldTypeConversionV1( + fieldName: String, + jsonFieldSchema: JsonNode, + avroFieldType: JsonNode? + ) { + assertEquals( + avroFieldType, + Jsons.deserialize( + SCHEMA_CONVERTER.parseJsonField( + fieldName = fieldName, + fieldNamespace = null, + fieldDefinition = jsonFieldSchema, + appendExtraProps = true, + addStringToLogicalTypes = true, + ) + .toString(), + ), + "Test for $fieldName failed", + ) + } + + @Deprecated("") + internal class GetAvroSchemaTestCaseProviderV0 : ArgumentsProvider { + @Throws(Exception::class) + override fun provideArguments(context: ExtensionContext): Stream { + val testCases = + Jsons.deserialize( + MoreResources.readResource( + "parquet/json_schema_converter/json_conversion_test_cases_v0.json" + ) + ) + return MoreIterators.toList(testCases.elements()).stream().map { testCase: JsonNode -> + Arguments.of( + testCase["schemaName"].asText(), + testCase["namespace"].asText(), + testCase["appendAirbyteFields"].asBoolean(), + testCase["jsonSchema"], + testCase["jsonObject"], + testCase["avroSchema"], + testCase["avroObject"], + ) + } + } + } + + internal class GetAvroSchemaTestCaseProviderV1 : ArgumentsProvider { + @Throws(Exception::class) + override fun provideArguments(context: ExtensionContext): Stream { + val testCases = + Jsons.deserialize( + MoreResources.readResource( + "parquet/json_schema_converter/json_conversion_test_cases_v1.json" + ) + ) + return MoreIterators.toList(testCases.elements()).stream().map { testCase: JsonNode -> + Arguments.of( + testCase["schemaName"].asText(), + testCase["namespace"].asText(), + testCase["appendAirbyteFields"].asBoolean(), + testCase["jsonSchema"], + testCase["jsonObject"], + testCase["avroSchema"], + testCase["avroObject"], + ) + } + } + } + + /** This test verifies both the schema and object conversion. */ + @Suppress("DEPRECATION") + @ParameterizedTest + @ArgumentsSource( + GetAvroSchemaTestCaseProviderV0::class, + ) + @Throws(Exception::class) + internal fun testJsonAvroConversionV0( + schemaName: String, + namespace: String?, + appendAirbyteFields: Boolean, + jsonSchema: JsonNode, + jsonObject: JsonNode?, + avroSchema: JsonNode, + avroObject: JsonNode? + ) { + val actualAvroSchema = + SCHEMA_CONVERTER.getAvroSchema( + jsonSchema, + schemaName, + namespace, + appendAirbyteFields, + appendExtraProps = true, + addStringToLogicalTypes = true, + isRootNode = true, + ) + assertEquals( + avroSchema, + Jsons.deserialize(actualAvroSchema.toString()), + "Schema conversion for $schemaName failed", + ) + + val schemaParser = Schema.Parser() + val actualAvroObject = + AvroConstants.JSON_CONVERTER.convertToGenericDataRecord( + WRITER.writeValueAsBytes(jsonObject), + schemaParser.parse(Jsons.serialize(avroSchema)), + ) + assertEquals( + avroObject, + Jsons.deserialize(actualAvroObject.toString()), + "Object conversion for $schemaName failed", + ) + } + + @ParameterizedTest + @ArgumentsSource( + GetAvroSchemaTestCaseProviderV1::class, + ) + @Throws(Exception::class) + internal fun testJsonAvroConversionV1( + schemaName: String, + namespace: String?, + appendAirbyteFields: Boolean, + jsonSchema: JsonNode, + jsonObject: JsonNode?, + avroSchema: JsonNode, + avroObject: JsonNode? + ) { + val actualAvroSchema = + SCHEMA_CONVERTER.getAvroSchema( + jsonSchema, + schemaName, + namespace, + appendAirbyteFields, + appendExtraProps = true, + addStringToLogicalTypes = true, + isRootNode = true, + ) + assertEquals( + avroSchema, + Jsons.deserialize(actualAvroSchema.toString()), + "Schema conversion for $schemaName failed", + ) + + val schemaParser = Schema.Parser() + val actualAvroObject = + AvroConstants.JSON_CONVERTER.convertToGenericDataRecord( + WRITER.writeValueAsBytes(jsonObject), + schemaParser.parse(Jsons.serialize(avroSchema)), + ) + assertEquals( + avroObject, + Jsons.deserialize(actualAvroObject.toString()), + "Object conversion for $schemaName failed", + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.kt new file mode 100644 index 0000000000000..35235b458f34c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/avro/S3AvroFormatConfigTest.kt @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.avro + +import com.amazonaws.services.s3.internal.Constants +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.getS3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.StorageProvider +import io.airbyte.cdk.integrations.destination.s3.avro.S3AvroFormatConfig.Companion.parseCodecConfig +import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.commons.json.Jsons.deserialize +import org.apache.avro.file.DataFileConstants +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class S3AvroFormatConfigTest { + @Test + fun testParseCodecConfigNull() { + val nullConfigs: List = + Lists.newArrayList("{}", "{ \"codec\": \"no compression\" }") + for (nullConfig in nullConfigs) { + Assertions.assertEquals( + DataFileConstants.NULL_CODEC, + parseCodecConfig(deserialize(nullConfig)).toString() + ) + } + } + + @Test + fun testParseCodecConfigDeflate() { + // default compression level 0 + val codecFactory1 = parseCodecConfig(deserialize("{ \"codec\": \"deflate\" }")) + Assertions.assertEquals("deflate-0", codecFactory1.toString()) + + // compression level 5 + val codecFactory2 = + parseCodecConfig(deserialize("{ \"codec\": \"deflate\", \"compression_level\": 5 }")) + Assertions.assertEquals("deflate-5", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigBzip2() { + val bzip2Config = deserialize("{ \"codec\": \"bzip2\" }") + val codecFactory = parseCodecConfig(bzip2Config) + Assertions.assertEquals(DataFileConstants.BZIP2_CODEC, codecFactory.toString()) + } + + @Test + fun testParseCodecConfigXz() { + // default compression level 6 + val codecFactory1 = parseCodecConfig(deserialize("{ \"codec\": \"xz\" }")) + Assertions.assertEquals("xz-6", codecFactory1.toString()) + + // compression level 7 + val codecFactory2 = + parseCodecConfig(deserialize("{ \"codec\": \"xz\", \"compression_level\": 7 }")) + Assertions.assertEquals("xz-7", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigZstandard() { + // default compression level 3 + val codecFactory1 = parseCodecConfig(deserialize("{ \"codec\": \"zstandard\" }")) + // There is no way to verify the checksum; all relevant methods are private or protected... + Assertions.assertEquals("zstandard[3]", codecFactory1.toString()) + + // compression level 20 + val codecFactory2 = + parseCodecConfig( + deserialize( + "{ \"codec\": \"zstandard\", \"compression_level\": 20, \"include_checksum\": true }" + ) + ) + // There is no way to verify the checksum; all relevant methods are private or protected... + Assertions.assertEquals("zstandard[20]", codecFactory2.toString()) + } + + @Test + fun testParseCodecConfigSnappy() { + val snappyConfig = deserialize("{ \"codec\": \"snappy\" }") + val codecFactory = parseCodecConfig(snappyConfig) + Assertions.assertEquals(DataFileConstants.SNAPPY_CODEC, codecFactory.toString()) + } + + @Test + fun testParseCodecConfigInvalid() { + try { + val invalidConfig = deserialize("{ \"codec\": \"bi-directional-bfs\" }") + parseCodecConfig(invalidConfig) + Assertions.fail() + } catch (e: IllegalArgumentException) { + // expected + } + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = ConfigTestUtils.getBaseConfig(deserialize("""{ + "format_type": "AVRO" +}""")) + + val s3DestinationConfig = getS3DestinationConfig(config!!, StorageProvider.AWS_S3) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val formatConfig = s3DestinationConfig.formatConfig + Assertions.assertEquals("AVRO", formatConfig!!.format.name) + // Assert that is set properly in config + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = ConfigTestUtils.getBaseConfig(deserialize("""{ + "format_type": "AVRO" +}""")) + + val s3DestinationConfig = getS3DestinationConfig(config!!, StorageProvider.AWS_S3) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.kt new file mode 100644 index 0000000000000..faaaee10290f4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/CsvSerializedBufferTest.kt @@ -0,0 +1,225 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.DestinationConfig +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.io.BufferedReader +import java.io.InputStream +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.UUID +import java.util.zip.GZIPInputStream +import org.apache.commons.csv.CSVFormat +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class CsvSerializedBufferTest { + + companion object { + private val MESSAGE_DATA: JsonNode = + Jsons.jsonNode( + mapOf( + "field1" to 10000, + "column2" to "string value", + "another field" to true, + "nested_column" to mapOf("array_column" to listOf(1, 2, 3)), + ), + ) + private const val STREAM = "stream1" + private val streamPair = AirbyteStreamNameNamespacePair(STREAM, null) + private val message: AirbyteRecordMessage = + AirbyteRecordMessage() + .withStream(STREAM) + .withData(MESSAGE_DATA) + .withEmittedAt(System.currentTimeMillis()) + private val FIELDS: List = + listOf( + Field.of("field1", JsonSchemaType.NUMBER), + Field.of("column2", JsonSchemaType.STRING), + Field.of("another field", JsonSchemaType.BOOLEAN), + Field.of("nested_column", JsonSchemaType.OBJECT), + ) + private val catalog: ConfiguredAirbyteCatalog = + CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS) + private const val CSV_FILE_EXTENSION = ".csv" + private val csvFormat: CSVFormat = CSVFormat.newFormat(',') + } + + @BeforeEach + internal fun setup() { + DestinationConfig.initialize(Jsons.emptyObject()) + } + + @Test + @Throws(Exception::class) + internal fun testUncompressedDefaultCsvFormatWriter() { + runTest( + InMemoryBuffer(CSV_FILE_EXTENSION), + CSVFormat.DEFAULT, + false, + 350L, + 365L, + null, + getExpectedString(CSVFormat.DEFAULT), + ) + } + + @Test + @Throws(Exception::class) + internal fun testUncompressedCsvWriter() { + runTest( + InMemoryBuffer(CSV_FILE_EXTENSION), + csvFormat, + false, + 320L, + 335L, + null, + getExpectedString(csvFormat), + ) + } + + @Test + @Throws(Exception::class) + internal fun testCompressedCsvWriter() { + runTest( + InMemoryBuffer(CSV_FILE_EXTENSION), + csvFormat, + true, + 170L, + 190L, + null, + getExpectedString(csvFormat), + ) + } + + @Test + @Throws(Exception::class) + internal fun testCompressedCsvFileWriter() { + runTest( + FileBuffer(CSV_FILE_EXTENSION), + csvFormat, + true, + 170L, + 190L, + null, + getExpectedString(csvFormat), + ) + } + + private fun getExpectedString(csvFormat: CSVFormat): String { + var expectedData = Jsons.serialize(MESSAGE_DATA) + if (csvFormat == CSVFormat.DEFAULT) { + expectedData = "\"" + expectedData.replace("\"", "\"\"") + "\"" + } + return expectedData + } + + @Test + @Throws(Exception::class) + @Suppress("DEPRECATION") + internal fun testFlattenCompressedCsvFileWriter() { + val expectedData = "true,string value,10000,{\"array_column\":[1,2,3]}" + runTest( + FileBuffer(CSV_FILE_EXTENSION), + CSVFormat.newFormat(',').withRecordSeparator('\n'), + true, + 135L, + 150L, + S3CsvFormatConfig( + Jsons.jsonNode( + mapOf( + "format_type" to S3Format.CSV, + "flattening" to Flattening.ROOT_LEVEL.value, + ), + ), + ), + expectedData + expectedData, + ) + } + + @Throws(Exception::class) + private fun runTest( + buffer: BufferStorage, + csvFormat: CSVFormat, + withCompression: Boolean, + minExpectedByte: Long, + maxExpectedByte: Long, + config: S3CsvFormatConfig?, + expectedData: String + ) { + val outputFile = buffer.file + (CsvSerializedBuffer.createFunction(config) { buffer } + .apply( + streamPair, + catalog, + ) as CsvSerializedBuffer) + .use { writer -> + writer.withCsvFormat(csvFormat) + writer.withCompression(withCompression) + writer.accept(message) + writer.accept(message) + writer.flush() + // some data are randomized (uuid, timestamp, compression?) so the expected byte + // count is not always + // deterministic + assertTrue( + writer.byteCount in minExpectedByte..maxExpectedByte, + "Expected size between $minExpectedByte and $maxExpectedByte, but actual size was ${writer.byteCount}", + ) + val inputStream: InputStream = + if (withCompression) { + GZIPInputStream(writer.inputStream) + } else { + writer.inputStream!! + } + val actualData: String + if (config == null) { + actualData = + String( + inputStream.readAllBytes(), + StandardCharsets.UTF_8, + ) + .substring( + UUID.randomUUID().toString().length + 1, + ) // remove the last part of the string with random timestamp + .substring(0, expectedData.length) + } else { + val reader = + BufferedReader(InputStreamReader(inputStream, StandardCharsets.UTF_8)) + val tmpData = StringBuilder() + var line: String + while (reader.ready()) { + line = reader.readLine() + tmpData.append( + line // remove uuid + .substring( + UUID.randomUUID().toString().length + 1 + ) // remove timestamp + .replace("\\A[0-9]+,".toRegex(), ""), + ) + } + actualData = tmpData.toString() + } + assertEquals(expectedData, actualData) + } + assertFalse(outputFile.exists()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.kt new file mode 100644 index 0000000000000..209f77e772f73 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/NoFlatteningSheetGeneratorTest.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.jackson.MoreMappers +import org.junit.jupiter.api.Assertions.assertLinesMatch +import org.junit.jupiter.api.Test + +class NoFlatteningSheetGeneratorTest { + + private val mapper: ObjectMapper = MoreMappers.initMapper() + private val sheetGenerator = NoFlatteningSheetGenerator() + + @Test + internal fun testGetHeaderRow() { + assertLinesMatch( + listOf( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + JavaBaseConstants.COLUMN_NAME_DATA, + ), + sheetGenerator.getHeaderRow(), + ) + } + + @Test + internal fun testGetRecordColumns() { + val json = mapper.createObjectNode() + json.set("Field 4", mapper.createObjectNode().put("Field 41", 15)) + json.put("Field 1", "A") + json.put("Field 3", 71) + json.put("Field 2", true) + + assertLinesMatch( + listOf( + "{\"Field 4\":{\"Field 41\":15},\"Field 1\":\"A\",\"Field 3\":71,\"Field 2\":true}" + ), + sheetGenerator.getRecordColumns(json), + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.kt new file mode 100644 index 0000000000000..9006be50a4fef --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/RootLevelFlatteningSheetGeneratorTest.kt @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.commons.jackson.MoreMappers +import org.junit.jupiter.api.Assertions.assertLinesMatch +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class RootLevelFlatteningSheetGeneratorTest { + + private lateinit var sheetGenerator: RootLevelFlatteningSheetGenerator + + @BeforeEach + internal fun createGenerator() { + this.sheetGenerator = RootLevelFlatteningSheetGenerator(SCHEMA) + } + + @Test + internal fun testGetHeaderRow() { + assertLinesMatch( + listOf( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + "A", + "B", + "C", + "a", + "b", + "c", + ), + sheetGenerator.getHeaderRow(), + ) + } + + @Test + internal fun testGetRecordColumns() { + val json = MAPPER.createObjectNode() + // Field c is missing + json.put("C", 3) + json.put("B", "value B") + json.set("A", MAPPER.createObjectNode().put("Field 41", 15)) + json.put("b", "value b") + json.put("a", 1) + + assertLinesMatch( + // A, B, C, a, b, c + listOf("{\"Field 41\":15}", "value B", "3", "1", "value b", ""), + sheetGenerator.getRecordColumns(json), + ) + } + + companion object { + private val MAPPER: ObjectMapper = MoreMappers.initMapper() + private val SCHEMA: ObjectNode = MAPPER.createObjectNode() + + init { + val fields: List = listOf("C", "B", "A", "c", "b", "a").shuffled() + + val schemaProperties = MAPPER.createObjectNode() + for (field in fields) { + schemaProperties.set(field, MAPPER.createObjectNode()) + } + + SCHEMA.set("properties", schemaProperties) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.kt new file mode 100644 index 0000000000000..d640e080f07e1 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvFormatConfigTest.kt @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import com.amazonaws.services.s3.internal.Constants +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.getS3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.Flattening.Companion.fromValue +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.commons.json.Jsons.deserialize +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +// S3CsvFormatConfig +class S3CsvFormatConfigTest { + @Test // Flattening enums can be created from value string + fun testFlatteningCreationFromString() { + Assertions.assertEquals(Flattening.NO, fromValue("no flattening")) + Assertions.assertEquals(Flattening.ROOT_LEVEL, fromValue("root level flattening")) + try { + fromValue("invalid flattening value") + } catch (e: Exception) { + Assertions.assertTrue(e is IllegalArgumentException) + } + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig( + deserialize( + """{ + "format_type": "CSV", + "flattening": "Root level flattening" +}""" + ) + ) + + val s3DestinationConfig = getS3DestinationConfig(config!!) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val formatConfig = s3DestinationConfig.formatConfig + Assertions.assertEquals("CSV", formatConfig!!.format.name) + // Assert that is set properly in config + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = + ConfigTestUtils.getBaseConfig( + deserialize( + """{ + "format_type": "CSV", + "flattening": "Root level flattening" +}""" + ) + ) + + val s3DestinationConfig = getS3DestinationConfig(config!!) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + fun testGzipCompressionConfig() { + // without gzip compression config + val configWithoutGzipCompression = + ConfigTestUtils.getBaseConfig(deserialize("""{ + "format_type": "CSV" +}""")) + val s3ConfigWithoutGzipCompression = getS3DestinationConfig(configWithoutGzipCompression!!) + Assertions.assertEquals( + S3DestinationConstants.DEFAULT_COMPRESSION_TYPE, + (s3ConfigWithoutGzipCompression.formatConfig as S3CsvFormatConfig?)!!.compressionType + ) + + // with gzip compression config + val configWithGzipCompression = + ConfigTestUtils.getBaseConfig( + deserialize("""{ + "format_type": "CSV", + "gzip_compression": false +}""") + ) + val gcsConfigWithGzipCompression = getS3DestinationConfig(configWithGzipCompression!!) + Assertions.assertEquals( + CompressionType.GZIP, + (gcsConfigWithGzipCompression.formatConfig as S3CsvFormatConfig?)!!.compressionType + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.kt new file mode 100644 index 0000000000000..0016b1ec8ee32 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.kt @@ -0,0 +1,352 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.csv + +import alex.mojaki.s3upload.MultiPartOutputStream +import alex.mojaki.s3upload.StreamTransferManager +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.AmazonS3Client +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.initialize +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.create +import io.airbyte.cdk.integrations.destination.s3.util.CompressionType +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerWithMetadata +import io.airbyte.commons.json.Jsons.emptyObject +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.io.ByteArrayOutputStream +import java.io.IOException +import java.nio.charset.StandardCharsets +import java.sql.Timestamp +import java.time.Instant +import java.util.* +import java.util.concurrent.TimeUnit +import org.apache.commons.csv.CSVFormat +import org.junit.jupiter.api.* +import org.mockito.ArgumentMatchers +import org.mockito.MockedConstruction +import org.mockito.Mockito +import org.mockito.invocation.InvocationOnMock + +@Timeout(value = 90, unit = TimeUnit.SECONDS) +internal class S3CsvWriterTest { + private var s3Client: AmazonS3? = null + + private lateinit var streamTransferManagerMockedConstruction: + MockedConstruction + private lateinit var streamTransferManagerConstructorArguments: + MutableList + private lateinit var outputStreams: MutableList + + @JvmRecord + private data class StreamTransferManagerArguments(val bucket: String, val `object`: String) + + @BeforeEach + fun setup() { + streamTransferManagerConstructorArguments = ArrayList() + outputStreams = ArrayList() + // This is basically RETURNS_SELF, except with getMultiPartOutputStreams configured + // correctly. + // Other non-void methods (e.g. toString()) will return null. + streamTransferManagerMockedConstruction = + Mockito.mockConstruction(StreamTransferManagerWithMetadata::class.java) { + mock: StreamTransferManagerWithMetadata, + context: MockedConstruction.Context -> + // Mockito doesn't seem to provide an easy way to actually retrieve these arguments + // later on, so + // manually store them on construction. + // _PowerMockito_ does, but I didn't want to set up that additional dependency. + val arguments = context.arguments() + streamTransferManagerConstructorArguments.add( + StreamTransferManagerArguments(arguments[0] as String, arguments[1] as String) + ) + + Mockito.doReturn(mock).`when`(mock).numUploadThreads(ArgumentMatchers.anyInt()) + Mockito.doReturn(mock).`when`(mock).numStreams(ArgumentMatchers.anyInt()) + Mockito.doReturn(mock).`when`(mock).queueCapacity(ArgumentMatchers.anyInt()) + Mockito.doReturn(mock).`when`(mock).partSize(ArgumentMatchers.anyLong()) + + // We can't write a fake MultiPartOutputStream, because it doesn't have a public + // constructor. + // So instead, we'll build a mock that captures its data into a + // ByteArrayOutputStream. + val stream = Mockito.mock(MultiPartOutputStream::class.java) + Mockito.doReturn(listOf(stream)).`when`(mock).multiPartOutputStreams + val capturer = ByteArrayOutputStream() + outputStreams.add(capturer) + Mockito.doAnswer { invocation: InvocationOnMock -> + capturer.write(invocation.getArgument(0) as Int) + null + } + .`when`(stream) + .write(ArgumentMatchers.anyInt()) + Mockito.doAnswer { invocation: InvocationOnMock -> + capturer.write(invocation.getArgument(0)) + null + } + .`when`(stream) + .write(ArgumentMatchers.any(ByteArray::class.java)) + Mockito.doAnswer { invocation: InvocationOnMock -> + capturer.write( + invocation.getArgument(0), + invocation.getArgument(1), + invocation.getArgument(2) + ) + null + } + .`when`(stream) + .write( + ArgumentMatchers.any(ByteArray::class.java), + ArgumentMatchers.anyInt(), + ArgumentMatchers.anyInt() + ) + } + + s3Client = Mockito.mock(AmazonS3Client::class.java) + } + + private fun writer(): S3CsvWriter.Builder { + return S3CsvWriter.Builder(CONFIG, s3Client!!, CONFIGURED_STREAM, UPLOAD_TIME) + .uploadThreads(UPLOAD_THREADS) + .queueCapacity(QUEUE_CAPACITY) + } + + @AfterEach + fun teardown() { + streamTransferManagerMockedConstruction!!.close() + } + + @Test + @Throws(IOException::class) + fun generatesCorrectObjectKey_when_created() { + val writer = writer().build() + + val objectKey = writer.outputPath + + checkObjectName(objectKey) + } + + @Test + @Throws(IOException::class) + fun createsExactlyOneUpload() { + writer().build() + + Assertions.assertEquals(1, streamTransferManagerMockedConstruction!!.constructed().size) + + val manager: StreamTransferManager = + streamTransferManagerMockedConstruction!!.constructed()[0] + val args = streamTransferManagerConstructorArguments!![0] + Mockito.verify(manager).numUploadThreads(UPLOAD_THREADS) + Mockito.verify(manager).queueCapacity(QUEUE_CAPACITY) + Assertions.assertEquals("fake-bucket", args.bucket) + checkObjectName(args.`object`) + } + + @Test + @Throws(Exception::class) + fun closesS3Upload_when_stagingUploaderClosedSuccessfully() { + val writer = writer().build() + + writer.close(false) + + val managers = streamTransferManagerMockedConstruction!!.constructed() + val manager: StreamTransferManager = managers[0] + Mockito.verify(manager).complete() + } + + @Test + @Throws(Exception::class) + fun closesS3Upload_when_stagingUploaderClosedFailingly() { + val writer = writer().build() + + writer.close(true) + + val managers = streamTransferManagerMockedConstruction!!.constructed() + val manager: StreamTransferManager = managers[0] + Mockito.verify(manager).abort() + } + + @Test + @Throws(IOException::class) + fun writesContentsCorrectly_when_headerEnabled() { + val writer = writer().build() + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L) + ) + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L) + ) + writer.close(false) + + // carriage returns are required b/c RFC4180 requires it :( + Assertions.assertEquals( + """ + "_airbyte_ab_id","_airbyte_emitted_at","_airbyte_data" + "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}" + "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}" + + """ + .trimIndent() + .replace("\n", "\r\n"), + outputStreams!![0].toString(StandardCharsets.UTF_8) + ) + } + + @Test + @Throws(IOException::class) + fun writesContentsCorrectly_when_headerDisabled() { + val writer = writer().withHeader(false).build() + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L) + ) + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L) + ) + writer.close(false) + + // carriage returns are required b/c RFC4180 requires it :( + Assertions.assertEquals( + """ + "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}" + "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}" + + """ + .trimIndent() + .replace("\n", "\r\n"), + outputStreams!![0].toString(StandardCharsets.UTF_8) + ) + } + + /** + * This test verifies that the S3StreamCopier usecase works. Specifically, the withHeader, + * csvSettings, and csvSheetGenerator options were all added solely to support S3StreamCopier; + * we want to verify that it outputs the exact same data as the previous implementation. + */ + @Test + @Throws(IOException::class) + fun writesContentsCorrectly_when_stagingDatabaseConfig() { + initialize(emptyObject()) + val s3Config = + create("fake-bucket", "fake-bucketPath", "fake-region") + .withEndpoint("fake-endpoint") + .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") + .withFormatConfig(CSV_FORMAT_CONFIG) + .get() + val writer = + S3CsvWriter.Builder(s3Config, s3Client!!, CONFIGURED_STREAM, UPLOAD_TIME) + .uploadThreads(UPLOAD_THREADS) + .queueCapacity(QUEUE_CAPACITY) + .withHeader(false) + .csvSettings(CSVFormat.DEFAULT) + .csvSheetGenerator(StagingDatabaseCsvSheetGenerator()) + .build() + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L) + ) + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L) + ) + writer.close(false) + + // carriage returns are required b/c RFC4180 requires it :( + // Dynamically generate the timestamp because we generate in local time. + Assertions.assertEquals( + """ + f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",1970-01-01T00:00:01.234Z + 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",1970-01-01T00:00:02.345Z + + """ + .trimIndent() + .replace("\n", "\r\n"), + outputStreams!![0].toString(StandardCharsets.UTF_8) + ) + } + + companion object { + val CONFIGURED_STREAM: ConfiguredAirbyteStream = + ConfiguredAirbyteStream() + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(AirbyteStream().withName("fake-stream").withNamespace("fake-namespace")) + private val OBJECT_MAPPER = ObjectMapper() + + private val CSV_FORMAT_CONFIG = + S3CsvFormatConfig(Flattening.NO, CompressionType.NO_COMPRESSION) + + private val CONFIG = + create("fake-bucket", "fake-bucketPath", "fake-region") + .withEndpoint("fake-endpoint") + .withAccessKeyCredential("fake-access-key-id", "fake-secret-access-key") + .withFormatConfig(CSV_FORMAT_CONFIG) + .get() + + // equivalent to Thu, 09 Dec 2021 19:17:54 GMT + private val UPLOAD_TIME: Timestamp = Timestamp.from(Instant.ofEpochMilli(1639077474000L)) + private const val UPLOAD_THREADS = 8 + private const val QUEUE_CAPACITY = 9 + + // The full path would be something like + // "fake-bucketPath/fake-namespace/fake-stream/2021_12_09_1639077474000_e549e712-b89c-4272-9496-9690ba7f973e.csv" + // 2021_12_09_1639077474000 is generated from the timestamp. It's followed by a random UUID, + // in case + // we need to create multiple files. + private const val EXPECTED_OBJECT_BEGINNING = + "fake-bucketPath/fake-namespace/fake-stream/2021_12_09_1639077474000_" + private const val EXPECTED_OBJECT_ENDING = ".csv" + + /** + * This test really just wants to validate that: + * + * * we're dumping into the correct directory (fake-bucketPath/fake_namespace/fake_stream) + * and that the filename contains the upload time + * * each S3CsvWriter generates a unique filename suffix (the UUID) so that they don't + * overwrite each other + * * we generate a .csv extension + * + * So the UUID check isn't strictly necessary. + * + * Eventually the output path generator should probably be injected into the S3CsvWriter + * (and we would test the generator directly + test that the writer calls the generator) + */ + private fun checkObjectName(objectName: String) { + val errorMessage = "Object was actually $objectName" + + Assertions.assertTrue(objectName.startsWith(EXPECTED_OBJECT_BEGINNING), errorMessage) + Assertions.assertTrue(objectName.endsWith(EXPECTED_OBJECT_ENDING), errorMessage) + + // Remove the beginning and ending, which _should_ leave us with just a UUID + val uuidMaybe = + objectName // "^" == start of string + .replaceFirst( + ("^" + EXPECTED_OBJECT_BEGINNING).toRegex(), + "" + ) // "$" == end of string + .replaceFirst((EXPECTED_OBJECT_ENDING + "$").toRegex(), "") + Assertions.assertDoesNotThrow({ UUID.fromString(uuidMaybe) }, errorMessage) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.kt new file mode 100644 index 0000000000000..6af4254ed0f70 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/JsonLSerializedBufferTest.kt @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.jsonl + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.record_buffer.BufferStorage +import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer +import io.airbyte.cdk.integrations.destination.record_buffer.InMemoryBuffer +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.io.InputStream +import java.nio.charset.StandardCharsets +import java.util.zip.GZIPInputStream +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test + +class JsonLSerializedBufferTest { + + companion object { + private val MESSAGE_DATA: JsonNode = + Jsons.jsonNode( + mapOf( + "field1" to 10000, + "column2" to "string value", + "another field" to true, + "nested_column" to mapOf("array_column" to listOf(1, 2, 3)), + ), + ) + private const val STREAM = "stream1" + private val streamPair = AirbyteStreamNameNamespacePair(STREAM, null) + private val message: AirbyteRecordMessage = + AirbyteRecordMessage() + .withStream(STREAM) + .withData(MESSAGE_DATA) + .withEmittedAt(System.currentTimeMillis()) + private val FIELDS: List = + listOf( + Field.of("field1", JsonSchemaType.NUMBER), + Field.of("column2", JsonSchemaType.STRING), + Field.of("another field", JsonSchemaType.BOOLEAN), + Field.of("nested_column", JsonSchemaType.OBJECT), + ) + private val catalog: ConfiguredAirbyteCatalog = + CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS) + private const val JSON_FILE_EXTENSION = ".jsonl" + } + + @Test + @Throws(Exception::class) + internal fun testUncompressedJsonLFormatWriter() { + runTest(InMemoryBuffer(JSON_FILE_EXTENSION), false, 425L, 435L, getExpectedString()) + } + + @Test + @Throws(Exception::class) + internal fun testCompressedJsonLWriter() { + runTest(FileBuffer(JSON_FILE_EXTENSION), true, 205L, 215L, getExpectedString()) + } + + private fun getExpectedString(): String { + return Jsons.serialize(MESSAGE_DATA) + } + + @Throws(Exception::class) + private fun runTest( + buffer: BufferStorage, + withCompression: Boolean, + minExpectedByte: Long, + maxExpectedByte: Long, + expectedData: String + ) { + val outputFile = buffer.file + (JsonLSerializedBuffer.createBufferFunction(null) { buffer } + .apply( + streamPair, + catalog, + ) as JsonLSerializedBuffer) + .use { writer -> + writer.withCompression(withCompression) + writer.accept(message) + writer.accept(message) + writer.flush() + // some data are randomized (uuid, timestamp, compression?) so the expected byte + // count is not always + // deterministic + assertTrue( + writer.byteCount in minExpectedByte..maxExpectedByte, + "Expected size between $minExpectedByte and $maxExpectedByte, but actual size was ${writer.byteCount}" + ) + val inputStream: InputStream = + if (withCompression) { + GZIPInputStream(writer.inputStream) + } else { + writer.inputStream!! + } + val actualData = + Jsons.deserialize(String(inputStream.readAllBytes(), StandardCharsets.UTF_8)) + assertEquals( + expectedData, + Jsons.serialize( + actualData["_airbyte_data"], + ), + ) + } + assertFalse(outputFile.exists()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.kt new file mode 100644 index 0000000000000..f8828f8a6521e --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/jsonl/S3JsonlFormatConfigTest.kt @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.jsonl + +import com.amazonaws.services.s3.internal.Constants +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig.Companion.getS3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.util.ConfigTestUtils +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.cdk.integrations.destination.s3.util.Flattening.Companion.fromValue +import io.airbyte.cdk.integrations.destination.s3.util.StreamTransferManagerFactory +import io.airbyte.commons.json.Jsons.deserialize +import org.apache.commons.lang3.reflect.FieldUtils +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +// S3JsonlFormatConfig +class S3JsonlFormatConfigTest { + @Test // Flattening enums can be created from value string + fun testFlatteningCreationFromString() { + Assertions.assertEquals(Flattening.NO, fromValue("no flattening")) + Assertions.assertEquals(Flattening.ROOT_LEVEL, fromValue("root level flattening")) + try { + fromValue("invalid flattening value") + } catch (e: Exception) { + Assertions.assertTrue(e is IllegalArgumentException) + } + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandlePartSizeConfig() { + val config = ConfigTestUtils.getBaseConfig(deserialize("""{ + "format_type": "JSONL" +}""")) + + val s3DestinationConfig = getS3DestinationConfig(config!!) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val formatConfig = s3DestinationConfig.formatConfig + Assertions.assertEquals("JSONL", formatConfig!!.format.name) + + // Assert that is set properly in config + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } + + @Test + @Throws(IllegalAccessException::class) + fun testHandleAbsenceOfPartSizeConfig() { + val config = ConfigTestUtils.getBaseConfig(deserialize("""{ + "format_type": "JSONL" +}""")) + + val s3DestinationConfig = getS3DestinationConfig(config!!) + ConfigTestUtils.assertBaseConfig(s3DestinationConfig) + + val streamTransferManager = + StreamTransferManagerFactory.create(s3DestinationConfig.bucketName, "objectKey", null) + .get() + + val partSizeBytes = FieldUtils.readField(streamTransferManager, "partSize", true) as Int + Assertions.assertEquals( + Constants.MB * StreamTransferManagerFactory.DEFAULT_PART_SIZE_MB, + partSizeBytes + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.kt new file mode 100644 index 0000000000000..c13f44a96a879 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/ParquetSerializedBufferTest.kt @@ -0,0 +1,296 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.parquet + +import com.amazonaws.util.IOUtils +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.util.JavaProcessRunner +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.Field +import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil +import io.airbyte.protocol.models.JsonSchemaType +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import java.io.FileOutputStream +import java.io.InputStream +import java.nio.file.Files +import java.util.UUID +import java.util.stream.Stream +import org.apache.avro.generic.GenericData +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.Path +import org.apache.parquet.avro.AvroReadSupport +import org.apache.parquet.hadoop.ParquetReader +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test + +class ParquetSerializedBufferTest { + + companion object { + private val MESSAGE_DATA: JsonNode = + Jsons.jsonNode( + mapOf( + "field1" to 10000, + "column2" to "string value", + "another field" to true, + "nested_column" to mapOf("array_column" to listOf(1, 2, 3)), + "string_array_column" to Stream.of("test_string", null).toList(), + "datetime_with_timezone" to "2022-05-12T15:35:44.192950Z", + ), + ) + private const val STREAM = "stream1" + private val streamPair = AirbyteStreamNameNamespacePair(STREAM, null) + private val message: AirbyteRecordMessage = + AirbyteRecordMessage() + .withStream(STREAM) + .withData(MESSAGE_DATA) + .withEmittedAt(System.currentTimeMillis()) + private val FIELDS: List = + listOf( + Field.of("field1", JsonSchemaType.NUMBER), + Field.of("column2", JsonSchemaType.STRING), + Field.of("another field", JsonSchemaType.BOOLEAN), + Field.of("nested_column", JsonSchemaType.OBJECT), + Field.of( + "string_array_column", + JsonSchemaType.builder(JsonSchemaPrimitiveUtil.JsonSchemaPrimitive.ARRAY) + .withItems(JsonSchemaType.STRING) + .build(), + ), + Field.of("datetime_with_timezone", JsonSchemaType.STRING_TIMESTAMP_WITH_TIMEZONE), + ) + private val catalog: ConfiguredAirbyteCatalog = + CatalogHelpers.createConfiguredAirbyteCatalog(STREAM, null, FIELDS) + + @JvmStatic + @BeforeAll + internal fun setup() { + DestinationConfig.initialize(Jsons.deserialize("{}")) + } + } + + @Test + @Throws(Exception::class) + internal fun testUncompressedParquetWriter() { + val config = + S3DestinationConfig.getS3DestinationConfig( + Jsons.jsonNode( + mapOf( + "format" to + mapOf( + "format_type" to "parquet", + ), + "s3_bucket_name" to "test", + "s3_bucket_region" to "us-east-2", + ), + ), + ) + runTest(225L, 245L, config, getExpectedString()) + } + + @Test + @Throws(Exception::class) + internal fun testCompressedParquetWriter() { + val config = + S3DestinationConfig.getS3DestinationConfig( + Jsons.jsonNode( + mapOf( + "format" to + mapOf( + "format_type" to "parquet", + "compression_codec" to "GZIP", + ), + "s3_bucket_name" to "test", + "s3_bucket_region" to "us-east-2", + ), + ), + ) + // TODO: Compressed parquet is the same size as uncompressed?? + runTest(225L, 245L, config, getExpectedString()) + } + + private fun resolveArchitecture(): String { + return System.getProperty("os.name") + .replace( + ' ', + '_', + ) + + "-" + + System.getProperty("os.arch") + + "-" + + System.getProperty("sun.arch.data.model") + } + + @Test + @Throws(Exception::class) + internal fun testLzoCompressedParquet() { + val currentDir = System.getProperty("user.dir") + val runtime = Runtime.getRuntime() + val architecture = resolveArchitecture() + if ((architecture == "Linux-amd64-64") || architecture == "Linux-x86_64-64") { + JavaProcessRunner.runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get update") + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "apt-get install lzop liblzo2-2 liblzo2-dev -y", + ) + runLzoParquetTest() + } else if ((architecture == "Linux-aarch64-64") || architecture == "Linux-arm64-64") { + JavaProcessRunner.runProcess(currentDir, runtime, "/bin/sh", "-c", "apt-get update") + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "apt-get install lzop liblzo2-2 liblzo2-dev " + + "wget curl unzip zip build-essential maven git -y", + ) + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "wget https://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz -P /usr/local/tmp", + ) + JavaProcessRunner.runProcess( + "/usr/local/tmp/", + runtime, + "/bin/sh", + "-c", + "tar xvfz lzo-2.10.tar.gz", + ) + JavaProcessRunner.runProcess( + "/usr/local/tmp/lzo-2.10/", + runtime, + "/bin/sh", + "-c", + "./configure --enable-shared --prefix /usr/local/lzo-2.10", + ) + JavaProcessRunner.runProcess( + "/usr/local/tmp/lzo-2.10/", + runtime, + "/bin/sh", + "-c", + "make && make install", + ) + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "git clone https://github.com/twitter/hadoop-lzo.git /usr/lib/hadoop/lib/hadoop-lzo/", + ) + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "curl -s https://get.sdkman.io | bash", + ) + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/bash", + "-c", + "source /root/.sdkman/bin/sdkman-init.sh;" + + " sdk install java 8.0.342-librca;" + + " sdk use java 8.0.342-librca;" + + " cd /usr/lib/hadoop/lib/hadoop-lzo/ " + + "&& C_INCLUDE_PATH=/usr/local/lzo-2.10/include " + + "LIBRARY_PATH=/usr/local/lzo-2.10/lib mvn clean package", + ) + JavaProcessRunner.runProcess( + currentDir, + runtime, + "/bin/sh", + "-c", + "find /usr/lib/hadoop/lib/hadoop-lzo/ -name '*libgplcompression*' -exec cp {} /usr/lib/ \\;", + ) + runLzoParquetTest() + } + } + + @Throws(Exception::class) + private fun runLzoParquetTest() { + val config = + S3DestinationConfig.getS3DestinationConfig( + Jsons.jsonNode( + mapOf( + "format" to + mapOf( + "format_type" to "parquet", + "compression_codec" to "LZO", + ), + "s3_bucket_name" to "test", + "s3_bucket_region" to "us-east-2", + ), + ), + ) + runTest(225L, 245L, config, getExpectedString()) + } + + private fun getExpectedString(): String { + return ("{\"_airbyte_ab_id\": \"\", \"_airbyte_emitted_at\": \"\", " + + "\"field1\": 10000.0, \"another_field\": true, " + + "\"nested_column\": {\"_airbyte_additional_properties\": {\"array_column\": \"[1,2,3]\"}}, " + + "\"column2\": \"string value\", " + + "\"string_array_column\": [\"test_string\", null], " + + "\"datetime_with_timezone\": 1652369744192000, " + + "\"_airbyte_additional_properties\": null}") + } + + @Throws(Exception::class) + @Suppress("DEPRECATION") + private fun runTest( + minExpectedByte: Long, + maxExpectedByte: Long, + config: S3DestinationConfig, + expectedData: String + ) { + val tempFile = Files.createTempFile(UUID.randomUUID().toString(), ".parquet").toFile() + try { + ParquetSerializedBuffer.createFunction(config).apply(streamPair, catalog).use { writer + -> + writer!!.accept(message) + writer.accept(message) + writer.flush() + // some data are randomized (uuid, timestamp, compression?) so the expected byte + // count is not always + // deterministic + assertTrue( + writer.byteCount in minExpectedByte..maxExpectedByte, + "Expected size between $minExpectedByte and $maxExpectedByte, but actual size was ${writer.byteCount}", + ) + val `in`: InputStream = writer.inputStream!! + FileOutputStream(tempFile).use { outFile -> IOUtils.copy(`in`, outFile) } + ParquetReader.builder( + AvroReadSupport(), + Path(tempFile.absolutePath), + ) + .withConf(Configuration()) + .build() + .use { parquetReader -> + var record: GenericData.Record? = null + while ((parquetReader.read()?.also { record = it }) != null) { + record?.put("_airbyte_ab_id", "") + record?.put("_airbyte_emitted_at", "") + val actualData: String = record.toString() + assertEquals(expectedData, actualData) + } + } + } + } finally { + Files.deleteIfExists(tempFile.toPath()) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.kt new file mode 100644 index 0000000000000..b6a8f80229f53 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/S3ParquetFormatConfigTest.kt @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.parquet + +import io.airbyte.commons.json.Jsons.deserialize +import org.apache.parquet.hadoop.metadata.CompressionCodecName +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class S3ParquetFormatConfigTest { + @Test + fun testConfigConstruction() { + val formatConfig = + deserialize( + "{\n" + + "\t\"compression_codec\": \"GZIP\",\n" + + "\t\"block_size_mb\": 1,\n" + + "\t\"max_padding_size_mb\": 1,\n" + + "\t\"page_size_kb\": 1,\n" + + "\t\"dictionary_page_size_kb\": 1,\n" + + "\t\"dictionary_encoding\": false\n" + + "}" + ) + + val config = S3ParquetFormatConfig(formatConfig) + + // The constructor should automatically convert MB or KB to bytes. + Assertions.assertEquals(1024 * 1024, config.blockSize) + Assertions.assertEquals(1024 * 1024, config.maxPaddingSize) + Assertions.assertEquals(1024, config.pageSize) + Assertions.assertEquals(1024, config.dictionaryPageSize) + + Assertions.assertEquals(CompressionCodecName.GZIP, config.compressionCodec) + Assertions.assertFalse(config.isDictionaryEncoding) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManagerTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManagerTest.kt new file mode 100644 index 0000000000000..bcb4f7197239f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/template/S3FilenameTemplateManagerTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.destination.s3.template + +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import java.io.IOException +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.time.Instant +import java.util.TimeZone +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test + +class S3FilenameTemplateManagerTest { + + private val s3FilenameTemplateManager = S3FilenameTemplateManager() + + @Test + @Throws(IOException::class) + internal fun testDatePlaceholder() { + val fileNamePattern = "test-{date}" + val fileExtension = "csv" + val partId = "1" + + val actual: String = + s3FilenameTemplateManager.applyPatternToFilename( + S3FilenameTemplateParameterObject.builder() + .objectPath("") + .fileNamePattern(fileNamePattern) + .fileExtension(fileExtension) + .partId(partId) + .build(), + ) + + val defaultDateFormat: DateFormat = + SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING) + defaultDateFormat.timeZone = TimeZone.getTimeZone("UTC") + + val currentTimeInMillis = Instant.now().toEpochMilli() + + val expected = "test-${defaultDateFormat.format(currentTimeInMillis)}" + assertEquals(expected, actual) + } + + @Test + @Throws(IOException::class) + internal fun testIfFilenameTemplateStringWasSanitized() { + val fileNamePattern = " te st.csv " + val actual = + s3FilenameTemplateManager.applyPatternToFilename( + S3FilenameTemplateParameterObject.builder() + .objectPath("") + .fileNamePattern(fileNamePattern) + .fileExtension("csv") + .partId("1") + .build(), + ) + + assertEquals("te__st.csv", actual) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.kt new file mode 100644 index 0000000000000..505aeeb484688 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/CompressionTypeHelperTest.kt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConstants +import io.airbyte.cdk.integrations.destination.s3.util.CompressionTypeHelper.parseCompressionType +import io.airbyte.commons.json.Jsons.jsonNode +import java.util.Map +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class CompressionTypeHelperTest { + @Test + fun testGetCompressionType() { + Assertions.assertEquals( + S3DestinationConstants.DEFAULT_COMPRESSION_TYPE, + parseCompressionType(null) + ) + + Assertions.assertEquals( + CompressionType.NO_COMPRESSION, + parseCompressionType(jsonNode(Map.of("compression_type", "No Compression"))) + ) + + Assertions.assertEquals( + CompressionType.GZIP, + parseCompressionType(jsonNode(Map.of("compression_type", "GZIP"))) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.kt new file mode 100644 index 0000000000000..ecc2c30627131 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/ConfigTestUtils.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig +import io.airbyte.cdk.integrations.destination.s3.credential.S3AccessKeyCredentialConfig +import io.airbyte.commons.json.Jsons.deserialize +import org.junit.jupiter.api.Assertions + +object ConfigTestUtils { + fun getBaseConfig(formatConfig: JsonNode): JsonNode { + return deserialize( + """{ + "s3_endpoint": "some_test-endpoint", + "s3_bucket_name": "test-bucket-name", + "s3_bucket_path": "test_path", + "s3_bucket_region": "us-east-2", + "access_key_id": "some-test-key-id", + "secret_access_key": "some-test-access-key", + "format": $formatConfig}""" + ) + } + + fun assertBaseConfig(s3DestinationConfig: S3DestinationConfig) { + Assertions.assertEquals("some_test-endpoint", s3DestinationConfig.endpoint) + Assertions.assertEquals("test-bucket-name", s3DestinationConfig.bucketName) + Assertions.assertEquals("test_path", s3DestinationConfig.bucketPath) + Assertions.assertEquals("us-east-2", s3DestinationConfig.bucketRegion) + val credentialConfig = s3DestinationConfig.s3CredentialConfig as S3AccessKeyCredentialConfig + Assertions.assertEquals("some-test-key-id", credentialConfig.accessKeyId) + Assertions.assertEquals("some-test-access-key", credentialConfig.secretAccessKey) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.kt new file mode 100644 index 0000000000000..9137feab1d1ed --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/util/S3OutputPathHelperTest.kt @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.util + +import com.google.common.collect.Lists +import io.airbyte.cdk.integrations.destination.s3.util.S3OutputPathHelper.getOutputPrefix +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class S3OutputPathHelperTest { + @Test // getOutputPrefix + fun testGetOutputPrefix() { + // No namespace + Assertions.assertEquals( + "bucket_path/stream_name", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withName("stream_name") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + + // With namespace + Assertions.assertEquals( + "bucket_path/namespace/stream_name", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withNamespace("namespace") + .withName("stream_name") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + + // With empty namespace + Assertions.assertEquals( + "bucket_path/stream_name", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withNamespace("") + .withName("stream_name") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + + // With namespace with slash chart in the end + Assertions.assertEquals( + "bucket_path/namespace/stream_name", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withNamespace("namespace/") + .withName("stream_name") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + + // With namespace with slash chart in the name + Assertions.assertEquals( + "bucket_path/namespace/subfolder/stream_name", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withNamespace("namespace/subfolder/") + .withName("stream_name") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + + // With an AWS Glue crawler + Assertions.assertEquals( + "bucket_path/namespace/date=2022-03-15", + getOutputPrefix( + "bucket_path", + AirbyteStream() + .withNamespace("namespace") + .withName("date=2022-03-15") + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)) + ) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.kt new file mode 100644 index 0000000000000..a8af88fc51020 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/writer/BaseS3WriterTest.kt @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3.writer + +import io.airbyte.cdk.integrations.destination.s3.S3Format +import io.airbyte.cdk.integrations.destination.s3.template.S3FilenameTemplateParameterObject.Companion.builder +import io.airbyte.cdk.integrations.destination.s3.writer.BaseS3Writer.Companion.determineOutputFilename +import java.io.IOException +import java.sql.Timestamp +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class BaseS3WriterTest { + @Test + @Throws(IOException::class) + fun testGetOutputFilename() { + val timestamp = Timestamp(1471461319000L) + Assertions.assertEquals( + "2016_08_17_1471461319000_0.csv", + determineOutputFilename(builder().s3Format(S3Format.CSV).timestamp(timestamp).build()) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java deleted file mode 100644 index 42e209811c4bb..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonSchemaType; -import io.airbyte.cdk.integrations.standardtest.destination.argproviders.NumberDataTypeTestArgumentProvider; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericData.Record; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ArgumentsSource; - -public abstract class S3AvroParquetDestinationAcceptanceTest extends S3DestinationAcceptanceTest { - - protected S3AvroParquetDestinationAcceptanceTest(S3Format s3Format) { - super(s3Format); - } - - @ParameterizedTest - @ArgumentsSource(NumberDataTypeTestArgumentProvider.class) - public void testNumberDataType(String catalogFileName, String messagesFileName) throws Exception { - final AirbyteCatalog catalog = readCatalogFromFile(catalogFileName); - final List messages = readMessagesFromFile(messagesFileName); - - final JsonNode config = getConfig(); - final String defaultSchema = getDefaultSchema(config); - final ConfiguredAirbyteCatalog configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog); - runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false); - - for (final AirbyteStream stream : catalog.getStreams()) { - final String streamName = stream.getName(); - final String schema = stream.getNamespace() != null ? stream.getNamespace() : defaultSchema; - - Map> actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema); - Map> expectedSchemaTypes = retrieveExpectedDataTypes(stream); - - assertEquals(expectedSchemaTypes, actualSchemaTypes); - } - } - - private Map> retrieveExpectedDataTypes(AirbyteStream stream) { - Iterable iterableNames = () -> stream.getJsonSchema().get("properties").fieldNames(); - Map nameToNode = StreamSupport.stream(iterableNames.spliterator(), false) - .collect(Collectors.toMap( - Function.identity(), - name -> getJsonNode(stream, name))); - - return nameToNode - .entrySet() - .stream() - .collect(Collectors.toMap( - Entry::getKey, - entry -> getExpectedSchemaType(entry.getValue()))); - } - - private JsonNode getJsonNode(AirbyteStream stream, String name) { - JsonNode properties = stream.getJsonSchema().get("properties"); - if (properties.size() == 1) { - return properties.get("data"); - } - return properties.get(name).get("items"); - } - - private Set getExpectedSchemaType(JsonNode fieldDefinition) { - final JsonNode typeProperty = fieldDefinition.get("type") == null ? fieldDefinition.get("$ref") : fieldDefinition.get("type"); - final JsonNode airbyteTypeProperty = fieldDefinition.get("airbyte_type"); - final String airbyteTypePropertyText = airbyteTypeProperty == null ? null : airbyteTypeProperty.asText(); - return Arrays.stream(JsonSchemaType.values()) - .filter( - value -> value.getJsonSchemaType().equals(typeProperty.asText()) && compareAirbyteTypes(airbyteTypePropertyText, value)) - .map(JsonSchemaType::getAvroType) - .collect(Collectors.toSet()); - } - - private boolean compareAirbyteTypes(String airbyteTypePropertyText, JsonSchemaType value) { - if (airbyteTypePropertyText == null) { - return value.getJsonSchemaAirbyteType() == null; - } - return airbyteTypePropertyText.equals(value.getJsonSchemaAirbyteType()); - } - - private AirbyteCatalog readCatalogFromFile(final String catalogFilename) throws IOException { - return Jsons.deserialize(MoreResources.readResource(catalogFilename), AirbyteCatalog.class); - } - - private List readMessagesFromFile(final String messagesFilename) throws IOException { - return MoreResources.readResource(messagesFilename).lines() - .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); - } - - protected abstract Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception; - - protected Map> getTypes(Record record) { - - List fieldList = record - .getSchema() - .getFields() - .stream() - .filter(field -> !field.name().startsWith("_airbyte")) - .toList(); - - if (fieldList.size() == 1) { - return fieldList - .stream() - .collect( - Collectors.toMap( - Field::name, - field -> field.schema().getTypes().stream().map(Schema::getType).filter(type -> !type.equals(Type.NULL)) - .collect(Collectors.toSet()))); - } else { - return fieldList - .stream() - .collect( - Collectors.toMap( - Field::name, - field -> field.schema().getTypes() - .stream().filter(type -> !type.getType().equals(Type.NULL)) - .flatMap(type -> type.getElementType().getTypes().stream()).map(Schema::getType).filter(type -> !type.equals(Type.NULL)) - .collect(Collectors.toSet()))); - } - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.java deleted file mode 100644 index 3cbbc4bc433fa..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; - -public class S3AvroParquetTestDataComparator extends AdvancedTestDataComparator { - - @Override - protected boolean compareDateValues(String airbyteMessageValue, String destinationValue) { - var destinationDate = LocalDate.ofEpochDay(Long.parseLong(destinationValue)); - var expectedDate = LocalDate.parse(airbyteMessageValue, DateTimeFormatter.ofPattern(AdvancedTestDataComparator.AIRBYTE_DATE_FORMAT)); - return expectedDate.equals(destinationDate); - } - - private Instant getInstantFromEpoch(String epochValue) { - return Instant.ofEpochMilli(Long.parseLong(epochValue) / 1000); - } - - @Override - protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { - return ZonedDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC); - } - - @Override - protected boolean compareDateTimeValues(String airbyteMessageValue, String destinationValue) { - var format = DateTimeFormatter.ofPattern(AdvancedTestDataComparator.AIRBYTE_DATETIME_FORMAT); - LocalDateTime dateTime = LocalDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC); - return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.java deleted file mode 100644 index 0ba7ff5af30b4..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.avro.Schema.Type; -import org.apache.avro.file.DataFileReader; -import org.apache.avro.file.SeekableByteArrayInput; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericData.Record; -import org.apache.avro.generic.GenericDatumReader; - -public abstract class S3BaseAvroDestinationAcceptanceTest extends S3AvroParquetDestinationAcceptanceTest { - - protected S3BaseAvroDestinationAcceptanceTest() { - super(S3Format.AVRO); - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", "Avro", - "compression_codec", Map.of( - "codec", "zstandard", - "compression_level", 5, - "include_checksum", true))); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws Exception { - final JsonFieldNameUpdater nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema); - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final DataFileReader dataFileReader = new DataFileReader<>( - new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), - new GenericDatumReader<>())) { - final ObjectReader jsonReader = MAPPER.reader(); - while (dataFileReader.hasNext()) { - final GenericData.Record record = dataFileReader.next(); - final byte[] jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record); - JsonNode jsonRecord = jsonReader.readTree(jsonBytes); - jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord); - jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)); - } - } - } - - return jsonRecords; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new S3AvroParquetTestDataComparator(); - } - - @Override - protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - Map> resultDataTypes = new HashMap<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final DataFileReader dataFileReader = new DataFileReader<>( - new SeekableByteArrayInput(object.getObjectContent().readAllBytes()), - new GenericDatumReader<>())) { - while (dataFileReader.hasNext()) { - final GenericData.Record record = dataFileReader.next(); - Map> actualDataTypes = getTypes(record); - resultDataTypes.putAll(actualDataTypes); - } - } - } - return resultDataTypes; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.java deleted file mode 100644 index 55a01e4af9821..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.StreamSupport; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVRecord; -import org.apache.commons.csv.QuoteMode; - -public abstract class S3BaseCsvDestinationAcceptanceTest extends S3DestinationAcceptanceTest { - - public S3BaseCsvDestinationAcceptanceTest() { - super(S3Format.CSV); - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.ROOT_LEVEL.getValue(), - "compression", Jsons.jsonNode(Map.of("compression_type", "No Compression")))); - } - - /** - * Convert json_schema to a map from field name to field types. - */ - private static Map getFieldTypes(final JsonNode streamSchema) { - final Map fieldTypes = new HashMap<>(); - final JsonNode fieldDefinitions = streamSchema.get("properties"); - final Iterator> iterator = fieldDefinitions.fields(); - while (iterator.hasNext()) { - final Entry entry = iterator.next(); - JsonNode fieldValue = entry.getValue(); - JsonNode typeValue = fieldValue.get("type") == null ? fieldValue.get("$ref") : fieldValue.get("type"); - fieldTypes.put(entry.getKey(), typeValue.asText()); - } - return fieldTypes; - } - - private static JsonNode getJsonNode(final Map input, final Map fieldTypes) { - final ObjectNode json = MAPPER.createObjectNode(); - - if (input.containsKey(JavaBaseConstants.COLUMN_NAME_DATA)) { - return Jsons.deserialize(input.get(JavaBaseConstants.COLUMN_NAME_DATA)); - } - - for (final Entry entry : input.entrySet()) { - final String key = entry.getKey(); - if (key.equals(JavaBaseConstants.COLUMN_NAME_AB_ID) || key - .equals(JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) { - continue; - } - final String value = entry.getValue(); - if (value == null || value.equals("")) { - continue; - } - final String type = fieldTypes.get(key); - switch (type) { - case "WellKnownTypes.json#/definitions/Boolean" -> json.put(key, Boolean.valueOf(value)); - case "WellKnownTypes.json#/definitions/Integer" -> json.put(key, Integer.valueOf(value)); - case "WellKnownTypes.json#/definitions/Number" -> json.put(key, Double.valueOf(value)); - case "boolean" -> json.put(key, Boolean.valueOf(value)); - case "integer" -> json.put(key, Integer.valueOf(value)); - case "number" -> json.put(key, Double.valueOf(value)); - case "" -> addNoTypeValue(json, key, value); - default -> json.put(key, value); - } - } - return json; - } - - private static void addNoTypeValue(final ObjectNode json, final String key, final String value) { - if (value != null && (value.matches("^\\[.*\\]$")) || value.matches("^\\{.*\\}$")) { - final var newNode = Jsons.deserialize(value); - json.set(key, newNode); - } else { - json.put(key, value); - } - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException { - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - - final Map fieldTypes = getFieldTypes(streamSchema); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - try (final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final Reader in = getReader(object)) { - final Iterable records = CSVFormat.DEFAULT - .withQuoteMode(QuoteMode.NON_NUMERIC) - .withFirstRecordAsHeader() - .parse(in); - StreamSupport.stream(records.spliterator(), false) - .forEach(r -> jsonRecords.add(getJsonNode(r.toMap(), fieldTypes))); - } - } - - return jsonRecords; - } - - protected Reader getReader(final S3Object s3Object) throws IOException { - return new InputStreamReader(s3Object.getObjectContent(), StandardCharsets.UTF_8); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.java deleted file mode 100644 index 05117564a1f36..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.zip.GZIPInputStream; - -public abstract class S3BaseCsvGzipDestinationAcceptanceTest extends S3BaseCsvDestinationAcceptanceTest { - - @Override - protected JsonNode getFormatConfig() { - // config without compression defaults to GZIP - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.ROOT_LEVEL.getValue())); - } - - protected Reader getReader(final S3Object s3Object) throws IOException { - return new InputStreamReader(new GZIPInputStream(s3Object.getObjectContent()), StandardCharsets.UTF_8); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.java deleted file mode 100644 index faa374bfedb27..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -public abstract class S3BaseJsonlDestinationAcceptanceTest extends S3DestinationAcceptanceTest { - - protected S3BaseJsonlDestinationAcceptanceTest() { - super(S3Format.JSONL); - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.NO.getValue(), - "compression", Jsons.jsonNode(Map.of("compression_type", "No Compression")))); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException { - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - try (final BufferedReader reader = getReader(object)) { - String line; - while ((line = reader.readLine()) != null) { - jsonRecords.add(Jsons.deserialize(line).get(JavaBaseConstants.COLUMN_NAME_DATA)); - } - } - } - - return jsonRecords; - } - - protected BufferedReader getReader(final S3Object s3Object) throws IOException { - return new BufferedReader(new InputStreamReader(s3Object.getObjectContent(), StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.java deleted file mode 100644 index 5a7689bdb69be..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.destination.s3.util.Flattening; -import io.airbyte.commons.json.Jsons; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.zip.GZIPInputStream; - -public abstract class S3BaseJsonlGzipDestinationAcceptanceTest extends S3BaseJsonlDestinationAcceptanceTest { - - @Override - protected JsonNode getFormatConfig() { - // config without compression defaults to GZIP - return Jsons.jsonNode(Map.of( - "format_type", outputFormat, - "flattening", Flattening.NO.getValue())); - } - - protected BufferedReader getReader(final S3Object s3Object) throws IOException { - return new BufferedReader(new InputStreamReader(new GZIPInputStream(s3Object.getObjectContent()), StandardCharsets.UTF_8)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.java deleted file mode 100644 index d8a88f9591c68..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants; -import io.airbyte.cdk.integrations.destination.s3.avro.JsonFieldNameUpdater; -import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter; -import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericData.Record; -import org.apache.hadoop.conf.Configuration; -import org.apache.parquet.avro.AvroReadSupport; -import org.apache.parquet.hadoop.ParquetReader; - -public abstract class S3BaseParquetDestinationAcceptanceTest extends S3AvroParquetDestinationAcceptanceTest { - - protected S3BaseParquetDestinationAcceptanceTest() { - super(S3Format.PARQUET); - } - - @Override - protected JsonNode getFormatConfig() { - return Jsons.jsonNode(Map.of( - "format_type", "Parquet", - "compression_codec", "GZIP")); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException, URISyntaxException { - final JsonFieldNameUpdater nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema); - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final List jsonRecords = new LinkedList<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); - final var path = new org.apache.hadoop.fs.Path(uri); - final Configuration hadoopConfig = S3ParquetWriter.getHadoopConfig(config); - - try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) - .withConf(hadoopConfig) - .build()) { - final ObjectReader jsonReader = MAPPER.reader(); - GenericData.Record record; - while ((record = parquetReader.read()) != null) { - final byte[] jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record); - JsonNode jsonRecord = jsonReader.readTree(jsonBytes); - jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord); - jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)); - } - } - } - - return jsonRecords; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new S3AvroParquetTestDataComparator(); - } - - @Override - protected Map> retrieveDataTypesFromPersistedFiles(final String streamName, final String namespace) throws Exception { - - final List objectSummaries = getAllSyncedObjects(streamName, namespace); - final Map> resultDataTypes = new HashMap<>(); - - for (final S3ObjectSummary objectSummary : objectSummaries) { - final S3Object object = s3Client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - final URI uri = new URI(String.format("s3a://%s/%s", object.getBucketName(), object.getKey())); - final var path = new org.apache.hadoop.fs.Path(uri); - final Configuration hadoopConfig = S3ParquetWriter.getHadoopConfig(config); - - try (final ParquetReader parquetReader = ParquetReader.builder(new AvroReadSupport<>(), path) - .withConf(hadoopConfig) - .build()) { - GenericData.Record record; - while ((record = parquetReader.read()) != null) { - Map> actualDataTypes = getTypes(record); - resultDataTypes.putAll(actualDataTypes); - } - } - } - - return resultDataTypes; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.java deleted file mode 100644 index 7fd53f2aa5139..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination.s3; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.DeleteObjectsResult; -import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.jackson.MoreMappers; -import io.airbyte.commons.json.Jsons; -import java.nio.file.Path; -import java.util.Comparator; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Locale; -import java.util.stream.Collectors; -import org.apache.commons.lang3.RandomStringUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * When adding a new S3 destination acceptance test, extend this class and do the following: - *

  • Implement {@link #getFormatConfig} that returns a {@link S3FormatConfig}
  • - *
  • Implement {@link #retrieveRecords} that returns the Json records for the test
  • - * - * Under the hood, a {@link S3DestinationConfig} is constructed as follows: - *
  • Retrieve the secrets from "secrets/config.json"
  • - *
  • Get the S3 bucket path from the constructor
  • - *
  • Get the format config from {@link #getFormatConfig}
  • - */ -public abstract class S3DestinationAcceptanceTest extends DestinationAcceptanceTest { - - protected static final Logger LOGGER = LoggerFactory.getLogger(S3DestinationAcceptanceTest.class); - protected static final ObjectMapper MAPPER = MoreMappers.initMapper(); - - protected final String secretFilePath = "secrets/config.json"; - protected final S3Format outputFormat; - protected JsonNode configJson; - protected S3DestinationConfig config; - protected AmazonS3 s3Client; - protected NamingConventionTransformer nameTransformer; - protected S3StorageOperations s3StorageOperations; - - protected S3DestinationAcceptanceTest(final S3Format outputFormat) { - this.outputFormat = outputFormat; - } - - protected JsonNode getBaseConfigJson() { - return Jsons.deserialize(IOs.readFile(Path.of(secretFilePath))); - } - - @Override - protected JsonNode getConfig() { - return configJson; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - if (config.has("s3_bucket_path")) { - return config.get("s3_bucket_path").asText(); - } - return null; - } - - @Override - protected JsonNode getFailCheckConfig() { - final JsonNode baseJson = getBaseConfigJson(); - final JsonNode failCheckJson = Jsons.clone(baseJson); - // invalid credential - ((ObjectNode) failCheckJson).put("access_key_id", "fake-key"); - ((ObjectNode) failCheckJson).put("secret_access_key", "fake-secret"); - return failCheckJson; - } - - /** - * Helper method to retrieve all synced objects inside the configured bucket path. - */ - protected List getAllSyncedObjects(final String streamName, final String namespace) { - final String namespaceStr = nameTransformer.getNamespace(namespace); - final String streamNameStr = nameTransformer.getIdentifier(streamName); - final String outputPrefix = s3StorageOperations.getBucketObjectPath( - namespaceStr, - streamNameStr, - DateTime.now(DateTimeZone.UTC), - config.getPathFormat()); - // the child folder contains a non-deterministic epoch timestamp, so use the parent folder - final String parentFolder = outputPrefix.substring(0, outputPrefix.lastIndexOf("/") + 1); - final List objectSummaries = s3Client - .listObjects(config.getBucketName(), parentFolder) - .getObjectSummaries() - .stream() - .filter(o -> o.getKey().contains(streamNameStr + "/")) - .sorted(Comparator.comparingLong(o -> o.getLastModified().getTime())) - .collect(Collectors.toList()); - LOGGER.info( - "All objects: {}", - objectSummaries.stream().map(o -> String.format("%s/%s", o.getBucketName(), o.getKey())).collect(Collectors.toList())); - return objectSummaries; - } - - protected abstract JsonNode getFormatConfig(); - - /** - * This method does the following: - *
  • Construct the S3 destination config.
  • - *
  • Construct the S3 client.
  • - */ - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - final JsonNode baseConfigJson = getBaseConfigJson(); - // Set a random s3 bucket path for each integration test - final JsonNode configJson = Jsons.clone(baseConfigJson); - final String testBucketPath = String.format( - "%s_test_%s", - outputFormat.name().toLowerCase(Locale.ROOT), - RandomStringUtils.randomAlphanumeric(5)); - ((ObjectNode) configJson) - .put("s3_bucket_path", testBucketPath) - .set("format", getFormatConfig()); - this.configJson = configJson; - this.config = S3DestinationConfig.getS3DestinationConfig(configJson, storageProvider()); - LOGGER.info("Test full path: {}/{}", config.getBucketName(), config.getBucketPath()); - - this.s3Client = config.getS3Client(); - this.nameTransformer = new S3NameTransformer(); - this.s3StorageOperations = new S3StorageOperations(nameTransformer, s3Client, config); - } - - /** - * Remove all the S3 output from the tests. - */ - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - final List keysToDelete = new LinkedList<>(); - final List objects = s3Client - .listObjects(config.getBucketName(), config.getBucketPath()) - .getObjectSummaries(); - for (final S3ObjectSummary object : objects) { - keysToDelete.add(new KeyVersion(object.getKey())); - } - - if (keysToDelete.size() > 0) { - LOGGER.info("Tearing down test bucket path: {}/{}", config.getBucketName(), - config.getBucketPath()); - final DeleteObjectsResult result = s3Client - .deleteObjects(new DeleteObjectsRequest(config.getBucketName()).withKeys(keysToDelete)); - LOGGER.info("Deleted {} file(s).", result.getDeletedObjects().size()); - } - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - public StorageProvider storageProvider() { - return StorageProvider.AWS_S3; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..80b77a392c80a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetDestinationAcceptanceTest.kt @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.avro.JsonSchemaType +import io.airbyte.cdk.integrations.standardtest.destination.argproviders.NumberDataTypeTestArgumentProvider +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.resources.MoreResources +import io.airbyte.protocol.models.v0.AirbyteCatalog +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.CatalogHelpers +import java.io.IOException +import java.util.* +import java.util.function.Function +import java.util.stream.Collectors +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ArgumentsSource + +abstract class S3AvroParquetDestinationAcceptanceTest protected constructor(s3Format: S3Format) : + S3DestinationAcceptanceTest(s3Format) { + @ParameterizedTest + @ArgumentsSource(NumberDataTypeTestArgumentProvider::class) + @Throws(Exception::class) + fun testNumberDataType(catalogFileName: String, messagesFileName: String) { + val catalog = readCatalogFromFile(catalogFileName) + val messages = readMessagesFromFile(messagesFileName) + + val config = this.getConfig() + val defaultSchema = getDefaultSchema(config) + val configuredCatalog = CatalogHelpers.toDefaultConfiguredCatalog(catalog) + runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false) + + for (stream in catalog.streams) { + val streamName = stream.name + val schema = if (stream.namespace != null) stream.namespace else defaultSchema!! + + val actualSchemaTypes = retrieveDataTypesFromPersistedFiles(streamName, schema) + val expectedSchemaTypes = retrieveExpectedDataTypes(stream) + + Assertions.assertEquals(expectedSchemaTypes, actualSchemaTypes) + } + } + + private fun retrieveExpectedDataTypes(stream: AirbyteStream): Map> { + val iterableNames = Iterable { stream.jsonSchema["properties"].fieldNames() } + val nameToNode: Map = + iterableNames.associateWith { name: String -> getJsonNode(stream, name) } + + return nameToNode.entries + .stream() + .collect( + Collectors.toMap( + Function { obj: Map.Entry -> obj.key }, + Function { entry: Map.Entry -> + getExpectedSchemaType(entry.value) + } + ) + ) + } + + private fun getJsonNode(stream: AirbyteStream, name: String): JsonNode { + val properties = stream.jsonSchema["properties"] + if (properties.size() == 1) { + return properties["data"] + } + return properties[name]["items"] + } + + private fun getExpectedSchemaType(fieldDefinition: JsonNode): Set { + val typeProperty = + if (fieldDefinition["type"] == null) fieldDefinition["\$ref"] + else fieldDefinition["type"] + val airbyteTypeProperty = fieldDefinition["airbyte_type"] + val airbyteTypePropertyText = airbyteTypeProperty?.asText() + return Arrays.stream(JsonSchemaType.entries.toTypedArray()) + .filter { value: JsonSchemaType -> + value.jsonSchemaType == typeProperty.asText() && + compareAirbyteTypes(airbyteTypePropertyText, value) + } + .map { obj: JsonSchemaType -> obj.avroType } + .collect(Collectors.toSet()) + } + + private fun compareAirbyteTypes( + airbyteTypePropertyText: String?, + value: JsonSchemaType + ): Boolean { + if (airbyteTypePropertyText == null) { + return value.jsonSchemaAirbyteType == null + } + return airbyteTypePropertyText == value.jsonSchemaAirbyteType + } + + @Throws(IOException::class) + private fun readCatalogFromFile(catalogFilename: String): AirbyteCatalog { + return Jsons.deserialize( + MoreResources.readResource(catalogFilename), + AirbyteCatalog::class.java + ) + } + + @Throws(IOException::class) + private fun readMessagesFromFile(messagesFilename: String): List { + return MoreResources.readResource(messagesFilename) + .trim() + .lines() + .map { record -> Jsons.deserialize(record, AirbyteMessage::class.java) } + .toList() + } + + @Throws(Exception::class) + protected abstract fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map> + + protected fun getTypes(record: GenericData.Record): Map> { + val fieldList = + record.schema.fields + .stream() + .filter { field: Schema.Field -> !field.name().startsWith("_airbyte") } + .toList() + + return if (fieldList.size == 1) { + fieldList + .stream() + .collect( + Collectors.toMap( + Function { obj: Schema.Field -> obj.name() }, + Function { field: Schema.Field -> + field + .schema() + .types + .stream() + .map { obj: Schema -> obj.type } + .filter { type: Schema.Type -> type != Schema.Type.NULL } + .collect(Collectors.toSet()) + } + ) + ) + } else { + fieldList + .stream() + .collect( + Collectors.toMap( + Function { obj: Schema.Field -> obj.name() }, + Function { field: Schema.Field -> + field + .schema() + .types + .stream() + .filter { type: Schema -> type.type != Schema.Type.NULL } + .flatMap { type: Schema -> type.elementType.types.stream() } + .map { obj: Schema -> obj.type } + .filter { type: Schema.Type -> type != Schema.Type.NULL } + .collect(Collectors.toSet()) + } + ) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.kt new file mode 100644 index 0000000000000..676e9f98cf17c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3AvroParquetTestDataComparator.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator +import java.time.* +import java.time.format.DateTimeFormatter + +class S3AvroParquetTestDataComparator : AdvancedTestDataComparator() { + override fun compareDateValues(airbyteMessageValue: String, destinationValue: String): Boolean { + val destinationDate = LocalDate.ofEpochDay(destinationValue.toLong()) + val expectedDate = + LocalDate.parse( + airbyteMessageValue, + DateTimeFormatter.ofPattern(AdvancedTestDataComparator.AIRBYTE_DATE_FORMAT) + ) + return expectedDate == destinationDate + } + + private fun getInstantFromEpoch(epochValue: String): Instant { + return Instant.ofEpochMilli(epochValue.toLong() / 1000) + } + + override fun parseDestinationDateWithTz(destinationValue: String): ZonedDateTime { + return ZonedDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC) + } + + override fun compareDateTimeValues( + airbyteMessageValue: String, + destinationValue: String + ): Boolean { + val format = DateTimeFormatter.ofPattern(AdvancedTestDataComparator.AIRBYTE_DATETIME_FORMAT) + val dateTime = + LocalDateTime.ofInstant(getInstantFromEpoch(destinationValue), ZoneOffset.UTC) + return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..2ac2e1dc3f608 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseAvroDestinationAcceptanceTest.kt @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectReader +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.json.Jsons +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.file.DataFileReader +import org.apache.avro.file.SeekableByteArrayInput +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericDatumReader + +abstract class S3BaseAvroDestinationAcceptanceTest protected constructor() : + S3AvroParquetDestinationAcceptanceTest(S3Format.AVRO) { + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode( + java.util.Map.of( + "format_type", + "Avro", + "compression_codec", + java.util.Map.of( + "codec", + "zstandard", + "compression_level", + 5, + "include_checksum", + true + ) + ) + ) + + @Throws(Exception::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema) + + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + DataFileReader( + SeekableByteArrayInput(`object`.objectContent.readAllBytes()), + GenericDatumReader() + ) + .use { dataFileReader -> + val jsonReader: ObjectReader = + S3DestinationAcceptanceTest.Companion.MAPPER.reader() + while (dataFileReader.hasNext()) { + val record = dataFileReader.next() + val jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record) + var jsonRecord = jsonReader.readTree(jsonBytes) + jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord) + jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)) + } + } + } + + return jsonRecords + } + + override fun getTestDataComparator(): TestDataComparator = S3AvroParquetTestDataComparator() + + @Throws(Exception::class) + override fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map> { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val resultDataTypes: MutableMap> = HashMap() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + DataFileReader( + SeekableByteArrayInput(`object`.objectContent.readAllBytes()), + GenericDatumReader() + ) + .use { dataFileReader -> + while (dataFileReader.hasNext()) { + val record = dataFileReader.next() + val actualDataTypes = getTypes(record) + resultDataTypes.putAll(actualDataTypes) + } + } + } + return resultDataTypes + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..0081b1f2f7905 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvDestinationAcceptanceTest.kt @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.io.InputStreamReader +import java.io.Reader +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.stream.StreamSupport +import org.apache.commons.csv.CSVFormat +import org.apache.commons.csv.CSVRecord +import org.apache.commons.csv.QuoteMode + +abstract class S3BaseCsvDestinationAcceptanceTest : S3DestinationAcceptanceTest(S3Format.CSV) { + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode( + java.util.Map.of( + "format_type", + outputFormat, + "flattening", + Flattening.ROOT_LEVEL.value, + "compression", + Jsons.jsonNode(java.util.Map.of("compression_type", "No Compression")) + ) + ) + + @Throws(IOException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + + val fieldTypes = getFieldTypes(streamSchema) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + s3Client!!.getObject(objectSummary.bucketName, objectSummary.key).use { `object` -> + getReader(`object`).use { `in` -> + val records: Iterable = + CSVFormat.Builder.create() + .setHeader() + .setSkipHeaderRecord(true) + .setQuoteMode(QuoteMode.NON_NUMERIC) + .build() + .parse(`in`) + StreamSupport.stream(records.spliterator(), false).forEach { r: CSVRecord -> + jsonRecords.add(getJsonNode(r.toMap(), fieldTypes)) + } + } + } + } + + return jsonRecords + } + + @Throws(IOException::class) + protected open fun getReader(s3Object: S3Object): Reader { + return InputStreamReader(s3Object.objectContent, StandardCharsets.UTF_8) + } + + companion object { + /** Convert json_schema to a map from field name to field types. */ + private fun getFieldTypes(streamSchema: JsonNode): Map { + val fieldTypes: MutableMap = HashMap() + val fieldDefinitions = streamSchema["properties"] + val iterator = fieldDefinitions.fields() + while (iterator.hasNext()) { + val entry = iterator.next() + val fieldValue = entry.value + val typeValue = + if (fieldValue["type"] == null) fieldValue["\$ref"] else fieldValue["type"] + fieldTypes[entry.key] = typeValue.asText() + } + return fieldTypes + } + + private fun getJsonNode( + input: Map, + fieldTypes: Map + ): JsonNode { + val json: ObjectNode = MAPPER.createObjectNode() + + if (input.containsKey(JavaBaseConstants.COLUMN_NAME_DATA)) { + return Jsons.deserialize(input[JavaBaseConstants.COLUMN_NAME_DATA]) + } + + for ((key, value) in input) { + if ( + key == JavaBaseConstants.COLUMN_NAME_AB_ID || + (key == JavaBaseConstants.COLUMN_NAME_EMITTED_AT) + ) { + continue + } + if (value == "") { + continue + } + val type = fieldTypes[key] + when (type) { + "WellKnownTypes.json#/definitions/Boolean" -> json.put(key, value.toBoolean()) + "WellKnownTypes.json#/definitions/Integer" -> json.put(key, value.toInt()) + "WellKnownTypes.json#/definitions/Number" -> json.put(key, value.toDouble()) + "boolean" -> json.put(key, value.toBoolean()) + "integer" -> json.put(key, value.toInt()) + "number" -> json.put(key, value.toDouble()) + "" -> addNoTypeValue(json, key, value) + else -> json.put(key, value) + } + } + return json + } + + private fun addNoTypeValue(json: ObjectNode, key: String, value: String?) { + if ( + value != null && (value.matches("^\\[.*\\]$".toRegex())) || + value!!.matches("^\\{.*\\}$".toRegex()) + ) { + val newNode = Jsons.deserialize(value) + json.set(key, newNode) + } else { + json.put(key, value) + } + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..f616e30fbb778 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseCsvGzipDestinationAcceptanceTest.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.io.InputStreamReader +import java.io.Reader +import java.nio.charset.StandardCharsets +import java.util.zip.GZIPInputStream + +abstract class S3BaseCsvGzipDestinationAcceptanceTest : S3BaseCsvDestinationAcceptanceTest() { + override val formatConfig: JsonNode? + get() = // config without compression defaults to GZIP + Jsons.jsonNode( + mapOf("format_type" to outputFormat, "flattening" to Flattening.ROOT_LEVEL.value) + ) + + @Throws(IOException::class) + override fun getReader(s3Object: S3Object): Reader { + return InputStreamReader(GZIPInputStream(s3Object.objectContent), StandardCharsets.UTF_8) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..82de354be8a2b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlDestinationAcceptanceTest.kt @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons +import java.io.BufferedReader +import java.io.IOException +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.* +import kotlin.collections.List +import kotlin.collections.MutableList + +abstract class S3BaseJsonlDestinationAcceptanceTest protected constructor() : + S3DestinationAcceptanceTest(S3Format.JSONL) { + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode( + mapOf( + "format_type" to outputFormat, + "flattening" to Flattening.NO.value, + "compression" to Jsons.jsonNode(mapOf("compression_type" to "No Compression")) + ) + ) + + @Throws(IOException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + getReader(`object`).use { reader -> + var line: String? + while ((reader.readLine().also { line = it }) != null) { + jsonRecords.add(Jsons.deserialize(line)[JavaBaseConstants.COLUMN_NAME_DATA]) + } + } + } + + return jsonRecords + } + + @Throws(IOException::class) + protected open fun getReader(s3Object: S3Object): BufferedReader { + return BufferedReader(InputStreamReader(s3Object.objectContent, StandardCharsets.UTF_8)) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..3a48cc532a81d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseJsonlGzipDestinationAcceptanceTest.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.model.S3Object +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.integrations.destination.s3.util.Flattening +import io.airbyte.commons.json.Jsons +import java.io.BufferedReader +import java.io.IOException +import java.io.InputStreamReader +import java.nio.charset.StandardCharsets +import java.util.zip.GZIPInputStream + +abstract class S3BaseJsonlGzipDestinationAcceptanceTest : S3BaseJsonlDestinationAcceptanceTest() { + override val formatConfig: JsonNode? + get() = // config without compression defaults to GZIP + Jsons.jsonNode(mapOf("format_type" to outputFormat, "flattening" to Flattening.NO.value)) + + @Throws(IOException::class) + override fun getReader(s3Object: S3Object): BufferedReader { + return BufferedReader( + InputStreamReader(GZIPInputStream(s3Object.objectContent), StandardCharsets.UTF_8) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..c63d80f13234c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3BaseParquetDestinationAcceptanceTest.kt @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectReader +import io.airbyte.cdk.integrations.destination.s3.avro.AvroConstants +import io.airbyte.cdk.integrations.destination.s3.parquet.S3ParquetWriter +import io.airbyte.cdk.integrations.destination.s3.util.AvroRecordHelper +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.json.Jsons +import java.io.IOException +import java.net.URI +import java.net.URISyntaxException +import java.util.* +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.hadoop.fs.Path +import org.apache.parquet.avro.AvroReadSupport +import org.apache.parquet.hadoop.ParquetReader + +abstract class S3BaseParquetDestinationAcceptanceTest protected constructor() : + S3AvroParquetDestinationAcceptanceTest(S3Format.PARQUET) { + override val formatConfig: JsonNode? + get() = + Jsons.jsonNode(java.util.Map.of("format_type", "Parquet", "compression_codec", "GZIP")) + + @Throws(IOException::class, URISyntaxException::class) + override fun retrieveRecords( + testEnv: TestDestinationEnv?, + streamName: String, + namespace: String, + streamSchema: JsonNode + ): List { + val nameUpdater = AvroRecordHelper.getFieldNameUpdater(streamName, namespace, streamSchema) + + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val jsonRecords: MutableList = LinkedList() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + val uri = URI(String.format("s3a://%s/%s", `object`.bucketName, `object`.key)) + val path = Path(uri) + val hadoopConfig = S3ParquetWriter.getHadoopConfig(s3DestinationConfig) + + ParquetReader.builder(AvroReadSupport(), path) + .withConf(hadoopConfig) + .build() + .use { parquetReader -> + val jsonReader: ObjectReader = + S3DestinationAcceptanceTest.Companion.MAPPER.reader() + var record: GenericData.Record? + while ((parquetReader.read().also { record = it }) != null) { + val jsonBytes = AvroConstants.JSON_CONVERTER.convertToJson(record) + var jsonRecord = jsonReader.readTree(jsonBytes) + jsonRecord = nameUpdater.getJsonWithOriginalFieldNames(jsonRecord) + jsonRecords.add(AvroRecordHelper.pruneAirbyteJson(jsonRecord)) + } + } + } + + return jsonRecords + } + + override fun getTestDataComparator(): TestDataComparator = S3AvroParquetTestDataComparator() + + @Throws(Exception::class) + override fun retrieveDataTypesFromPersistedFiles( + streamName: String, + namespace: String + ): Map> { + val objectSummaries = getAllSyncedObjects(streamName, namespace) + val resultDataTypes: MutableMap> = HashMap() + + for (objectSummary in objectSummaries) { + val `object` = s3Client!!.getObject(objectSummary.bucketName, objectSummary.key) + val uri = URI(String.format("s3a://%s/%s", `object`.bucketName, `object`.key)) + val path = Path(uri) + val hadoopConfig = S3ParquetWriter.getHadoopConfig(s3DestinationConfig) + + ParquetReader.builder(AvroReadSupport(), path) + .withConf(hadoopConfig) + .build() + .use { parquetReader -> + var record: GenericData.Record? + while ((parquetReader.read().also { record = it }) != null) { + val actualDataTypes = getTypes(record!!) + resultDataTypes.putAll(actualDataTypes) + } + } + } + + return resultDataTypes + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt new file mode 100644 index 0000000000000..a53671d6cc90a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.cdk.integrations.destination.s3 + +import com.amazonaws.services.s3.AmazonS3 +import com.amazonaws.services.s3.model.DeleteObjectsRequest +import com.amazonaws.services.s3.model.S3ObjectSummary +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer +import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest +import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator +import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator +import io.airbyte.commons.io.IOs +import io.airbyte.commons.jackson.MoreMappers +import io.airbyte.commons.json.Jsons +import java.nio.file.Path +import java.util.* +import java.util.stream.Collectors +import org.apache.commons.lang3.RandomStringUtils +import org.joda.time.DateTime +import org.joda.time.DateTimeZone +import org.mockito.Mockito.mock +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * When adding a new S3 destination acceptance test, extend this class and do the following: + * * Implement [.getFormatConfig] that returns a [S3FormatConfig] + * * Implement [.retrieveRecords] that returns the Json records for the test + * + * Under the hood, a [S3DestinationConfig] is constructed as follows: + * * Retrieve the secrets from "secrets/config.json" + * * Get the S3 bucket path from the constructor + * * Get the format config from [.getFormatConfig] + */ +abstract class S3DestinationAcceptanceTest +protected constructor(protected val outputFormat: S3Format) : DestinationAcceptanceTest() { + protected val secretFilePath: String = "secrets/config.json" + protected var configJson: JsonNode? = null + protected var s3DestinationConfig: S3DestinationConfig = mock() + protected var s3Client: AmazonS3? = null + protected var s3nameTransformer: NamingConventionTransformer = mock() + protected var s3StorageOperations: S3StorageOperations? = null + + protected open val baseConfigJson: JsonNode + get() = Jsons.deserialize(IOs.readFile(Path.of(secretFilePath))) + + override val imageName: String + get() = "airbyte/destination-s3:dev" + + override fun getDefaultSchema(config: JsonNode): String? { + if (config.has("s3_bucket_path")) { + return config["s3_bucket_path"].asText() + } + return null + } + + override fun getConfig(): JsonNode = configJson!! + + override fun getFailCheckConfig(): JsonNode { + val baseJson = baseConfigJson + val failCheckJson = Jsons.clone(baseJson) + // invalid credential + (failCheckJson as ObjectNode).put("access_key_id", "fake-key") + failCheckJson.put("secret_access_key", "fake-secret") + return failCheckJson + } + + /** Helper method to retrieve all synced objects inside the configured bucket path. */ + protected fun getAllSyncedObjects( + streamName: String, + namespace: String + ): List { + val namespaceStr = s3nameTransformer.getNamespace(namespace) + val streamNameStr = s3nameTransformer.getIdentifier(streamName) + val outputPrefix = + s3StorageOperations!!.getBucketObjectPath( + namespaceStr, + streamNameStr, + DateTime.now(DateTimeZone.UTC), + s3DestinationConfig.pathFormat!!, + ) + // the child folder contains a non-deterministic epoch timestamp, so use the parent folder + val parentFolder = outputPrefix.substring(0, outputPrefix.lastIndexOf("/") + 1) + val objectSummaries = + s3Client!! + .listObjects(s3DestinationConfig.bucketName, parentFolder) + .objectSummaries + .stream() + .filter { o: S3ObjectSummary -> o.key.contains("$streamNameStr/") } + .sorted(Comparator.comparingLong { o: S3ObjectSummary -> o.lastModified.time }) + .collect(Collectors.toList()) + LOGGER.info( + "All objects: {}", + objectSummaries + .stream() + .map { o: S3ObjectSummary -> String.format("%s/%s", o.bucketName, o.key) } + .collect(Collectors.toList()), + ) + return objectSummaries + } + + protected abstract val formatConfig: JsonNode? + get + + /** + * This method does the following: + * * Construct the S3 destination config. + * * Construct the S3 client. + */ + override fun setup(testEnv: TestDestinationEnv, TEST_SCHEMAS: HashSet) { + val baseConfigJson = baseConfigJson + // Set a random s3 bucket path for each integration test + val configJson = Jsons.clone(baseConfigJson) + val testBucketPath = + String.format( + "%s_test_%s", + outputFormat.name.lowercase(), + RandomStringUtils.randomAlphanumeric(5), + ) + (configJson as ObjectNode) + .put("s3_bucket_path", testBucketPath) + .set("format", formatConfig) + this.configJson = configJson + this.s3DestinationConfig = + S3DestinationConfig.getS3DestinationConfig(configJson, storageProvider()) + LOGGER.info( + "Test full path: {}/{}", + s3DestinationConfig.bucketName, + s3DestinationConfig.bucketPath, + ) + + this.s3Client = s3DestinationConfig.getS3Client() + this.s3nameTransformer = S3NameTransformer() + this.s3StorageOperations = + S3StorageOperations(s3nameTransformer, s3Client!!, s3DestinationConfig) + } + + /** Remove all the S3 output from the tests. */ + override fun tearDown(testEnv: TestDestinationEnv) { + val keysToDelete: MutableList = LinkedList() + val objects = + s3Client!! + .listObjects(s3DestinationConfig.bucketName, s3DestinationConfig.bucketPath) + .objectSummaries + for (`object` in objects) { + keysToDelete.add(DeleteObjectsRequest.KeyVersion(`object`.key)) + } + + if (keysToDelete.size > 0) { + LOGGER.info( + "Tearing down test bucket path: {}/{}", + s3DestinationConfig.bucketName, + s3DestinationConfig.bucketPath, + ) + val result = + s3Client!!.deleteObjects( + DeleteObjectsRequest(s3DestinationConfig.bucketName).withKeys(keysToDelete), + ) + LOGGER.info("Deleted {} file(s).", result.deletedObjects.size) + } + } + + override fun getTestDataComparator(): TestDataComparator = AdvancedTestDataComparator() + + override fun supportBasicDataTypeTest(): Boolean { + return true + } + + override fun supportArrayDataTypeTest(): Boolean { + return true + } + + override fun supportObjectDataTypeTest(): Boolean { + return true + } + + fun storageProvider(): StorageProvider { + return StorageProvider.AWS_S3 + } + + companion object { + protected val LOGGER: Logger = + LoggerFactory.getLogger(S3DestinationAcceptanceTest::class.java) + @JvmStatic protected val MAPPER: ObjectMapper = MoreMappers.initMapper() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/build.gradle b/airbyte-cdk/java/airbyte-cdk/typing-deduping/build.gradle index 9ec539430fa0f..d0882ed8a8fa3 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/build.gradle @@ -8,14 +8,34 @@ java { } } +compileKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + +compileTestKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + +compileTestFixturesKotlin { + compilerOptions { + allWarningsAsErrors = false + } +} + dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - implementation project(':airbyte-cdk:java:airbyte-cdk:core') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') implementation 'commons-codec:commons-codec:1.16.0' - testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') - testFixturesImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:dependencies')) - testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:core') - testFixturesImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:core')) + testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') + testFixturesImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies')) + testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') + testFixturesApi testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core')) + testFixturesImplementation 'org.mockito.kotlin:mockito-kotlin:5.2.1' + testImplementation 'org.mockito.kotlin:mockito-kotlin:5.2.1' } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.java deleted file mode 100644 index 6d0320ca7b748..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import com.fasterxml.jackson.databind.JsonNode; - -/** - * Protocol types are ordered by precedence in the case of a Union that contains multiple types. - * Priority is given to wider scope types over narrower ones. (Note that because of dedup logic in - * {@link AirbyteType#fromJsonSchema(JsonNode)}, at most one string or date/time type can exist in a - * Union.) - */ -public enum AirbyteProtocolType implements AirbyteType { - - STRING, - DATE, - TIME_WITHOUT_TIMEZONE, - TIME_WITH_TIMEZONE, - TIMESTAMP_WITHOUT_TIMEZONE, - TIMESTAMP_WITH_TIMEZONE, - NUMBER, - INTEGER, - BOOLEAN, - UNKNOWN; - - private static AirbyteProtocolType matches(final String type) { - try { - return AirbyteProtocolType.valueOf(type.toUpperCase()); - } catch (final IllegalArgumentException e) { - LOGGER.error(String.format("Could not find matching AirbyteProtocolType for \"%s\": %s", type, e)); - return UNKNOWN; - } - } - - // Extracts the appropriate protocol type from the representative JSON - protected static AirbyteProtocolType fromJson(final JsonNode node) { - // JSON could be a string (ex: "number") - if (node.isTextual()) { - return matches(node.asText()); - } - - // or, JSON could be a node with fields - final JsonNode propertyType = node.get("type"); - final JsonNode airbyteType = node.get("airbyte_type"); - final JsonNode format = node.get("format"); - - if (AirbyteType.nodeMatches(propertyType, "boolean")) { - return BOOLEAN; - } else if (AirbyteType.nodeMatches(propertyType, "integer")) { - return INTEGER; - } else if (AirbyteType.nodeMatches(propertyType, "number")) { - return AirbyteType.nodeMatches(airbyteType, "integer") ? INTEGER : NUMBER; - } else if (AirbyteType.nodeMatches(propertyType, "string")) { - if (AirbyteType.nodeMatches(format, "date")) { - return DATE; - } else if (AirbyteType.nodeMatches(format, "time")) { - if (AirbyteType.nodeMatches(airbyteType, "time_without_timezone")) { - return TIME_WITHOUT_TIMEZONE; - } else if (AirbyteType.nodeMatches(airbyteType, "time_with_timezone")) { - return TIME_WITH_TIMEZONE; - } - } else if (AirbyteType.nodeMatches(format, "date-time")) { - if (AirbyteType.nodeMatches(airbyteType, "timestamp_without_timezone")) { - return TIMESTAMP_WITHOUT_TIMEZONE; - } else if (airbyteType == null || AirbyteType.nodeMatches(airbyteType, "timestamp_with_timezone")) { - return TIMESTAMP_WITH_TIMEZONE; - } - } else { - return STRING; - } - } - - return UNKNOWN; - } - - @Override - public String getTypeName() { - return this.name(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.java deleted file mode 100644 index 33ede887c42b0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.node.TextNode; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public sealed interface AirbyteType permits AirbyteProtocolType,Struct,Array,UnsupportedOneOf,Union { - - Logger LOGGER = LoggerFactory.getLogger(AirbyteType.class); - - /** - * The most common call pattern is probably to use this method on the stream schema, verify that - * it's an {@link Struct} schema, and then call {@link Struct#properties()} to get the columns. - *

    - * If the top-level schema is not an object, then we can't really do anything with it, and should - * probably fail the sync. (but see also {@link Union#asColumns()}). - */ - static AirbyteType fromJsonSchema(final JsonNode schema) { - try { - final JsonNode topLevelType = schema.get("type"); - if (topLevelType != null) { - if (topLevelType.isTextual()) { - if (nodeMatches(topLevelType, "object")) { - return getStruct(schema); - } else if (nodeMatches(topLevelType, "array")) { - return getArray(schema); - } - } else if (topLevelType.isArray()) { - return fromArrayJsonSchema(schema, topLevelType); - } - } else if (schema.hasNonNull("oneOf")) { - final List options = new ArrayList<>(); - schema.get("oneOf").elements().forEachRemaining(element -> options.add(fromJsonSchema(element))); - return new UnsupportedOneOf(options); - } else if (schema.hasNonNull("properties")) { - // The schema has neither type nor oneof, but it does have properties. Assume we're looking at a - // struct. - // This is for backwards-compatibility with legacy normalization. - return getStruct(schema); - } - return AirbyteProtocolType.fromJson(schema); - } catch (final Exception e) { - LOGGER.error("Exception parsing JSON schema {}: {}; returning UNKNOWN.", schema, e); - return AirbyteProtocolType.UNKNOWN; - } - } - - static boolean nodeMatches(final JsonNode node, final String value) { - if (node == null || !node.isTextual()) { - return false; - } - return node.equals(TextNode.valueOf(value)); - } - - private static Struct getStruct(final JsonNode schema) { - final LinkedHashMap propertiesMap = new LinkedHashMap<>(); - final JsonNode properties = schema.get("properties"); - if (properties != null) { - properties.fields().forEachRemaining(property -> { - final String key = property.getKey(); - final JsonNode value = property.getValue(); - propertiesMap.put(key, fromJsonSchema(value)); - }); - } - return new Struct(propertiesMap); - } - - private static Array getArray(final JsonNode schema) { - final JsonNode items = schema.get("items"); - if (items == null) { - return new Array(AirbyteProtocolType.UNKNOWN); - } else { - return new Array(fromJsonSchema(items)); - } - } - - private static AirbyteType fromArrayJsonSchema(final JsonNode schema, final JsonNode array) { - final List typeOptions = new ArrayList<>(); - array.elements().forEachRemaining(element -> { - // ignore "null" type and remove duplicates - final String type = element.asText(""); - if (!"null".equals(type) && !typeOptions.contains(type)) { - typeOptions.add(element.asText()); - } - }); - - // we encounter an array of types that actually represents a single type rather than a Union - if (typeOptions.size() == 1) { - if (typeOptions.get(0).equals("object")) { - return getStruct(schema); - } else if (typeOptions.get(0).equals("array")) { - return getArray(schema); - } else { - return AirbyteProtocolType.fromJson(getTrimmedJsonSchema(schema, typeOptions.get(0))); - } - } - - // Recurse into a schema that forces a specific one of each option - final List options = typeOptions.stream().map(typeOption -> fromJsonSchema(getTrimmedJsonSchema(schema, typeOption))).toList(); - return new Union(options); - } - - // Duplicates the JSON schema but keeps only one type - private static JsonNode getTrimmedJsonSchema(final JsonNode schema, final String type) { - final JsonNode schemaClone = schema.deepCopy(); - // schema is guaranteed to be an object here, because we know it has a `type` key - ((ObjectNode) schemaClone).put("type", type); - return schemaClone; - } - - String getTypeName(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.java deleted file mode 100644 index 64ce0d9fdd780..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.Set; -import java.util.stream.Stream; - -public record AlterTableReport(Set columnsToAdd, - Set columnsToRemove, - Set columnsToChangeType, - boolean isDestinationV2Format) { - - /** - * A no-op for an AlterTableReport is when the existing table matches the expected schema - * - * @return whether the schema matches - */ - public boolean isNoOp() { - return isDestinationV2Format && Stream.of(this.columnsToAdd, this.columnsToRemove, this.columnsToChangeType) - .allMatch(Set::isEmpty); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Array.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Array.java deleted file mode 100644 index 11f6b5287982a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Array.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public record Array(AirbyteType items) implements AirbyteType { - - public static final String TYPE = "ARRAY"; - - @Override - public String getTypeName() { - return TYPE; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java deleted file mode 100644 index 1f33b90749521..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS; -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.util.Collection; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class BaseDestinationV1V2Migrator implements DestinationV1V2Migrator { - - protected static final Logger LOGGER = LoggerFactory.getLogger(BaseDestinationV1V2Migrator.class); - - @Override - public void migrateIfNecessary( - final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig) - throws Exception { - LOGGER.info("Assessing whether migration is necessary for stream {}", streamConfig.id().finalName()); - if (shouldMigrate(streamConfig)) { - LOGGER.info("Starting v2 Migration for stream {}", streamConfig.id().finalName()); - migrate(sqlGenerator, destinationHandler, streamConfig); - LOGGER.info("V2 Migration completed successfully for stream {}", streamConfig.id().finalName()); - } else { - LOGGER.info("No Migration Required for stream: {}", streamConfig.id().finalName()); - } - - } - - /** - * Determine whether a given stream needs to be migrated from v1 to v2 - * - * @param streamConfig the stream in question - * @return whether to migrate the stream - */ - protected boolean shouldMigrate(final StreamConfig streamConfig) throws Exception { - final var v1RawTable = convertToV1RawName(streamConfig); - LOGGER.info("Checking whether v1 raw table {} in dataset {} exists", v1RawTable.tableName(), v1RawTable.namespace()); - final var syncModeNeedsMigration = isMigrationRequiredForSyncMode(streamConfig.destinationSyncMode()); - final var noValidV2RawTableExists = !doesValidV2RawTableAlreadyExist(streamConfig); - final var aValidV1RawTableExists = doesValidV1RawTableExist(v1RawTable.namespace(), v1RawTable.tableName()); - LOGGER.info("Migration Info: Required for Sync mode: {}, No existing v2 raw tables: {}, A v1 raw table exists: {}", - syncModeNeedsMigration, noValidV2RawTableExists, aValidV1RawTableExists); - return syncModeNeedsMigration && noValidV2RawTableExists && aValidV1RawTableExists; - } - - /** - * Execute sql statements that converts a v1 raw table to a v2 raw table. Leaves the v1 raw table - * intact - * - * @param sqlGenerator the class which generates dialect specific sql statements - * @param destinationHandler the class which executes the sql statements - * @param streamConfig the stream to migrate the raw table of - */ - public void migrate(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig) - throws TableNotMigratedException { - final var namespacedTableName = convertToV1RawName(streamConfig); - try { - destinationHandler.execute(sqlGenerator.migrateFromV1toV2(streamConfig.id(), namespacedTableName.namespace(), namespacedTableName.tableName())); - } catch (final Exception e) { - final var message = "Attempted and failed to migrate stream %s".formatted(streamConfig.id().finalName()); - throw new TableNotMigratedException(message, e); - } - } - - /** - * Checks the schema of the v1 raw table to ensure it matches the expected format - * - * @param existingV2AirbyteRawTable the v1 raw table - * @return whether the schema is as expected - */ - private boolean doesV1RawTableMatchExpectedSchema(final DialectTableDefinition existingV2AirbyteRawTable) { - - return schemaMatchesExpectation(existingV2AirbyteRawTable, LEGACY_RAW_TABLE_COLUMNS); - } - - /** - * Checks the schema of the v2 raw table to ensure it matches the expected format - * - * @param existingV2AirbyteRawTable the v2 raw table - */ - private void validateAirbyteInternalNamespaceRawTableMatchExpectedV2Schema(final DialectTableDefinition existingV2AirbyteRawTable) { - if (!schemaMatchesExpectation(existingV2AirbyteRawTable, V2_RAW_TABLE_COLUMN_NAMES)) { - throw new UnexpectedSchemaException("Destination V2 Raw Table does not match expected Schema"); - } - } - - /** - * If the sync mode is a full refresh and we overwrite the table then there is no need to migrate - * - * @param destinationSyncMode destination sync mode - * @return whether this is full refresh overwrite - */ - private boolean isMigrationRequiredForSyncMode(final DestinationSyncMode destinationSyncMode) { - return !DestinationSyncMode.OVERWRITE.equals(destinationSyncMode); - } - - /** - * Checks if a valid destinations v2 raw table already exists - * - * @param streamConfig the raw table to check - * @return whether it exists and is in the correct format - */ - private boolean doesValidV2RawTableAlreadyExist(final StreamConfig streamConfig) throws Exception { - if (doesAirbyteInternalNamespaceExist(streamConfig)) { - final var existingV2Table = getTableIfExists(streamConfig.id().rawNamespace(), streamConfig.id().rawName()); - existingV2Table.ifPresent(this::validateAirbyteInternalNamespaceRawTableMatchExpectedV2Schema); - return existingV2Table.isPresent(); - } - return false; - } - - /** - * Checks if a valid v1 raw table already exists - * - * @param namespace - * @param tableName - * @return whether it exists and is in the correct format - */ - protected boolean doesValidV1RawTableExist(final String namespace, final String tableName) throws Exception { - final var existingV1RawTable = getTableIfExists(namespace, tableName); - return existingV1RawTable.isPresent() && doesV1RawTableMatchExpectedSchema(existingV1RawTable.get()); - } - - /** - * Checks to see if Airbyte's internal schema for destinations v2 exists - * - * @param streamConfig the stream to check - * @return whether the schema exists - */ - abstract protected boolean doesAirbyteInternalNamespaceExist(StreamConfig streamConfig) throws Exception; - - /** - * Checks a Table's schema and compares it to an expected schema to make sure it matches - * - * @param existingTable the table to check - * @param columns the expected schema - * @return whether the existing table schema matches the expectation - */ - abstract protected boolean schemaMatchesExpectation(DialectTableDefinition existingTable, Collection columns); - - /** - * Get a reference ta a table if it exists - * - * @param namespace - * @param tableName - * @return an optional potentially containing a reference to the table - */ - abstract protected Optional getTableIfExists(String namespace, String tableName) throws Exception; - - /** - * We use different naming conventions for raw table names in destinations v2, we need a way to map - * that back to v1 names - * - * @param streamConfig the stream in question - * @return the valid v1 name and namespace for the same stream - */ - abstract protected NamespacedTableName convertToV1RawName(StreamConfig streamConfig); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.java deleted file mode 100644 index 372f2999be643..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.base.AirbyteExceptionHandler; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map.Entry; -import java.util.Optional; -import org.apache.commons.codec.digest.DigestUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CatalogParser { - - private static final Logger LOGGER = LoggerFactory.getLogger(CatalogParser.class); - - private final SqlGenerator sqlGenerator; - private final String rawNamespace; - - public CatalogParser(final SqlGenerator sqlGenerator) { - this(sqlGenerator, DEFAULT_AIRBYTE_INTERNAL_NAMESPACE); - } - - public CatalogParser(final SqlGenerator sqlGenerator, final String rawNamespace) { - this.sqlGenerator = sqlGenerator; - this.rawNamespace = rawNamespace; - } - - public ParsedCatalog parseCatalog(final ConfiguredAirbyteCatalog catalog) { - // this code is bad and I feel bad - // it's mostly a port of the old normalization logic to prevent tablename collisions. - // tbh I have no idea if it works correctly. - final List streamConfigs = new ArrayList<>(); - for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { - final StreamConfig originalStreamConfig = toStreamConfig(stream); - final StreamConfig actualStreamConfig; - // Use empty string quote because we don't really care - if (streamConfigs.stream().anyMatch(s -> s.id().finalTableId("").equals(originalStreamConfig.id().finalTableId(""))) - || streamConfigs.stream().anyMatch(s -> s.id().rawTableId("").equals(originalStreamConfig.id().rawTableId("")))) { - final String originalNamespace = stream.getStream().getNamespace(); - final String originalName = stream.getStream().getName(); - - LOGGER.info("Detected table name collision for {}.{}", originalNamespace, originalName); - - // ... this logic is ported from legacy normalization, and maybe should change? - // We're taking a hash of the quoted namespace and the unquoted stream name - final String hash = DigestUtils.sha1Hex(originalStreamConfig.id().finalNamespace() + "&airbyte&" + originalName).substring(0, 3); - final String newName = originalName + "_" + hash; - actualStreamConfig = new StreamConfig( - sqlGenerator.buildStreamId(originalNamespace, newName, rawNamespace), - originalStreamConfig.syncMode(), - originalStreamConfig.destinationSyncMode(), - originalStreamConfig.primaryKey(), - originalStreamConfig.cursor(), - originalStreamConfig.columns()); - } else { - actualStreamConfig = originalStreamConfig; - } - streamConfigs.add(actualStreamConfig); - - // Populate some interesting strings into the exception handler string deinterpolator - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().rawNamespace()); - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().rawName()); - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().finalNamespace()); - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().finalName()); - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().originalNamespace()); - AirbyteExceptionHandler.addStringForDeinterpolation(actualStreamConfig.id().originalName()); - actualStreamConfig.columns().keySet().forEach(columnId -> { - AirbyteExceptionHandler.addStringForDeinterpolation(columnId.name()); - AirbyteExceptionHandler.addStringForDeinterpolation(columnId.originalName()); - }); - // It's (unfortunately) possible for a cursor/PK to be declared that don't actually exist in the - // schema. - // Add their strings explicitly. - actualStreamConfig.cursor().ifPresent(cursor -> { - AirbyteExceptionHandler.addStringForDeinterpolation(cursor.name()); - AirbyteExceptionHandler.addStringForDeinterpolation(cursor.originalName()); - }); - actualStreamConfig.primaryKey().forEach(pk -> { - AirbyteExceptionHandler.addStringForDeinterpolation(pk.name()); - AirbyteExceptionHandler.addStringForDeinterpolation(pk.originalName()); - }); - } - return new ParsedCatalog(streamConfigs); - } - - // TODO maybe we should extract the column collision stuff to a separate method, since that's the - // interesting bit - @VisibleForTesting - public StreamConfig toStreamConfig(final ConfiguredAirbyteStream stream) { - final AirbyteType schema = AirbyteType.fromJsonSchema(stream.getStream().getJsonSchema()); - final LinkedHashMap airbyteColumns; - if (schema instanceof final Struct o) { - airbyteColumns = o.properties(); - } else if (schema instanceof final Union u) { - airbyteColumns = u.asColumns(); - } else { - throw new IllegalArgumentException("Top-level schema must be an object"); - } - - if (stream.getPrimaryKey().stream().anyMatch(key -> key.size() > 1)) { - throw new IllegalArgumentException("Only top-level primary keys are supported"); - } - final List primaryKey = stream.getPrimaryKey().stream().map(key -> sqlGenerator.buildColumnId(key.get(0))).toList(); - - if (stream.getCursorField().size() > 1) { - throw new IllegalArgumentException("Only top-level cursors are supported"); - } - final Optional cursor; - if (stream.getCursorField().size() > 0) { - cursor = Optional.of(sqlGenerator.buildColumnId(stream.getCursorField().get(0))); - } else { - cursor = Optional.empty(); - } - - // this code is really bad and I'm not convinced we need to preserve this behavior. - // as with the tablename collisions thing above - we're trying to preserve legacy normalization's - // naming conventions here. - final LinkedHashMap columns = new LinkedHashMap<>(); - for (final Entry entry : airbyteColumns.entrySet()) { - final ColumnId originalColumnId = sqlGenerator.buildColumnId(entry.getKey()); - ColumnId columnId; - if (columns.keySet().stream().noneMatch(c -> c.canonicalName().equals(originalColumnId.canonicalName()))) { - // None of the existing columns have the same name. We can add this new column as-is. - columnId = originalColumnId; - } else { - LOGGER.info( - "Detected column name collision for {}.{}.{}", - stream.getStream().getNamespace(), - stream.getStream().getName(), - entry.getKey()); - // One of the existing columns has the same name. We need to handle this collision. - // Append _1, _2, _3, ... to the column name until we find one that doesn't collide. - int i = 1; - while (true) { - columnId = sqlGenerator.buildColumnId(entry.getKey(), "_" + i); - final String canonicalName = columnId.canonicalName(); - if (columns.keySet().stream().noneMatch(c -> c.canonicalName().equals(canonicalName))) { - break; - } else { - i++; - } - } - // But we need to keep the original name so that we can still fetch it out of the JSON records. - columnId = new ColumnId( - columnId.name(), - originalColumnId.originalName(), - columnId.canonicalName()); - } - - columns.put(columnId, entry.getValue()); - } - - return new StreamConfig( - sqlGenerator.buildStreamId(stream.getStream().getNamespace(), stream.getStream().getName(), rawNamespace), - stream.getSyncMode(), - stream.getDestinationSyncMode(), - primaryKey, - cursor, - columns); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.java deleted file mode 100644 index 22e0e3b58dd81..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.Collection; -import java.util.Optional; - -/** - * TODO these are not in the right place, they probably belongs in a base library but to avoid - * having to publish a bunch of connectors I'm putting it here temporarily - * - */ -public class CollectionUtils { - - /** - * Pass in a collection and search term to determine whether any of the values match ignoring case - * - * @param collection the collection of values - * @param search the value to look for - * @return whether the value matches anything in the collection - */ - public static boolean containsIgnoreCase(final Collection collection, final String search) { - return matchingKey(collection, search).isPresent(); - } - - /** - * Convenience method for when you need to check an entire collection for membership in another - * collection. - * - * @param searchCollection the collection you want to check membership in - * @param searchTerms the keys you're looking for - * @return whether all searchTerms are in the searchCollection - */ - public static boolean containsAllIgnoreCase(final Collection searchCollection, final Collection searchTerms) { - if (searchTerms.isEmpty()) { - // There isn't a good behavior for an empty collection. Without this check, an empty collection - // would always return - // true, but it feels misleading to say that the searchCollection does "contain all" when - // searchTerms is empty - throw new IllegalArgumentException("Search Terms collection may not be empty"); - } - return searchTerms.stream().allMatch(term -> containsIgnoreCase(searchCollection, term)); - } - - /** - * From a collection of strings, return an entry which matches the search term ignoring case - * - * @param collection the collection to search - * @param search the key you're looking for - * @return an Optional value which might contain the key that matches the search - */ - public static Optional matchingKey(final Collection collection, final String search) { - if (collection.contains(search)) { - return Optional.of(search); - } - return collection.stream().filter(s -> s.equalsIgnoreCase(search)).findFirst(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.java deleted file mode 100644 index 5ad836b48fda5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -/** - * In general, callers should not directly instantiate this class. Use - * {@link SqlGenerator#buildColumnId(String)} instead. - * - * @param name the name of the column in the final table. Callers should prefer - * {@link #name(String)} when using the column in a query. - * @param originalName the name of the field in the raw JSON blob - * @param canonicalName the name of the field according to the destination. Used for deduping. - * Useful if a destination warehouse handles columns ignoring case, but preserves case in the - * table schema. - */ -public record ColumnId(String name, String originalName, String canonicalName) { - - public String name(final String quote) { - return quote + name + quote; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java deleted file mode 100644 index 9becbb9aa2b93..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java +++ /dev/null @@ -1,387 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.cdk.integrations.base.IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME; -import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst; -import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.getCountOfTypeAndDedupeThreads; -import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.reduceExceptions; -import static java.util.Collections.singleton; -import static java.util.stream.Collectors.toMap; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.commons.concurrency.CompletableFutures; -import io.airbyte.commons.functional.Either; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * An abstraction over SqlGenerator and DestinationHandler. Destinations will still need to call - * {@code new CatalogParser(new FooSqlGenerator()).parseCatalog()}, but should otherwise avoid - * interacting directly with these classes. - *

    - * In a typical sync, destinations should call the methods: - *

      - *
    1. {@link #prepareFinalTables()} once at the start of the sync
    2. - *
    3. {@link #typeAndDedupe(String, String, boolean)} as needed throughout the sync
    4. - *
    5. {@link #commitFinalTables()} once at the end of the sync
    6. - *
    - * Note that #prepareTables() initializes some internal state. The other methods will throw an - * exception if that method was not called. - */ -public class DefaultTyperDeduper implements TyperDeduper { - - private static final Logger LOGGER = LoggerFactory.getLogger(TyperDeduper.class); - - private static final String NO_SUFFIX = ""; - private static final String TMP_OVERWRITE_TABLE_SUFFIX = "_airbyte_tmp"; - - private final SqlGenerator sqlGenerator; - private final DestinationHandler destinationHandler; - - private final DestinationV1V2Migrator v1V2Migrator; - private final V2TableMigrator v2TableMigrator; - private final List> migrations; - private final ParsedCatalog parsedCatalog; - private Set overwriteStreamsWithTmpTable; - private final Set> streamsWithSuccessfulSetup; - private final Map initialRawTableStateByStream; - // We only want to run a single instance of T+D per stream at a time. These objects are used for - // synchronization per stream. - // Use a read-write lock because we need the same semantics: - // * any number of threads can insert to the raw tables at the same time, as long as T+D isn't - // running (i.e. "read lock") - // * T+D must run in complete isolation (i.e. "write lock") - private final Map tdLocks; - // These locks are used to prevent multiple simultaneous attempts to T+D the same stream. - // We use tryLock with these so that we don't queue up multiple T+D runs for the same stream. - private final Map internalTdLocks; - - private final ExecutorService executorService; - private List> destinationInitialStatuses; - - public DefaultTyperDeduper(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final ParsedCatalog parsedCatalog, - final DestinationV1V2Migrator v1V2Migrator, - final V2TableMigrator v2TableMigrator, - final List> migrations) { - this.sqlGenerator = sqlGenerator; - this.destinationHandler = destinationHandler; - this.parsedCatalog = parsedCatalog; - this.v1V2Migrator = v1V2Migrator; - this.v2TableMigrator = v2TableMigrator; - this.migrations = migrations; - this.initialRawTableStateByStream = new ConcurrentHashMap<>(); - this.streamsWithSuccessfulSetup = ConcurrentHashMap.newKeySet(parsedCatalog.streams().size()); - this.tdLocks = new ConcurrentHashMap<>(); - this.internalTdLocks = new ConcurrentHashMap<>(); - this.executorService = Executors.newFixedThreadPool(getCountOfTypeAndDedupeThreads(), - new BasicThreadFactory.Builder().namingPattern(TYPE_AND_DEDUPE_THREAD_NAME).build()); - } - - public DefaultTyperDeduper( - final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final ParsedCatalog parsedCatalog, - final DestinationV1V2Migrator v1V2Migrator, - final List> migrations) { - this(sqlGenerator, destinationHandler, parsedCatalog, v1V2Migrator, new NoopV2TableMigrator(), migrations); - } - - @Override - public void prepareSchemasAndRunMigrations() throws Exception { - // Technically kind of weird to call this here, but it's the best place we have. - // Ideally, we'd create just airbyte_internal here, and defer creating the final table schemas - // until prepareFinalTables... but it doesn't really matter. - TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); - - TyperDeduperUtil.executeWeirdMigrations( - executorService, - sqlGenerator, - destinationHandler, - v1V2Migrator, - v2TableMigrator, - parsedCatalog); - - destinationInitialStatuses = TyperDeduperUtil.executeRawTableMigrations( - executorService, - destinationHandler, - migrations, - destinationHandler.gatherInitialState(parsedCatalog.streams())); - - // Commit our destination states immediately. - // Technically, migrations aren't done until we execute the soft reset. - // However, our state contains a needsSoftReset flag, so we can commit that we already executed the - // migration - // and even if we fail to run the soft reset in this sync, future syncs will see the soft reset flag - // and finish it for us. - destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( - state -> state.streamConfig().id(), - DestinationInitialStatus::destinationState))); - } - - @Override - public void prepareFinalTables() throws Exception { - if (overwriteStreamsWithTmpTable != null) { - throw new IllegalStateException("Tables were already prepared."); - } - overwriteStreamsWithTmpTable = ConcurrentHashMap.newKeySet(); - LOGGER.info("Preparing tables"); - - final List> prepareTablesFutureResult = CompletableFutures.allOf( - destinationInitialStatuses.stream().map(this::prepareTablesFuture).toList()).toCompletableFuture().join(); - getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to prepare tables:\n", prepareTablesFutureResult); - - destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( - state -> state.streamConfig().id(), - // If we get here, then we've executed all soft resets. Force the soft reset flag to false. - state -> state.destinationState().withSoftReset(false)))); - } - - private CompletionStage prepareTablesFuture(final DestinationInitialStatus initialState) { - // For each stream, make sure that its corresponding final table exists. - // Also, for OVERWRITE streams, decide if we're writing directly to the final table, or into an - // _airbyte_tmp table. - return CompletableFuture.supplyAsync(() -> { - final var stream = initialState.streamConfig(); - try { - if (initialState.isFinalTablePresent()) { - LOGGER.info("Final Table exists for stream {}", stream.id().finalName()); - // The table already exists. Decide whether we're writing to it directly, or using a tmp table. - if (stream.destinationSyncMode() == DestinationSyncMode.OVERWRITE) { - if (!initialState.isFinalTableEmpty() || initialState.isSchemaMismatch()) { - // We want to overwrite an existing table. Write into a tmp table. We'll overwrite the table at the - // end of the sync. - overwriteStreamsWithTmpTable.add(stream.id()); - // overwrite an existing tmp table if needed. - destinationHandler.execute(sqlGenerator.createTable(stream, TMP_OVERWRITE_TABLE_SUFFIX, true)); - LOGGER.info("Using temp final table for stream {}, will overwrite existing table at end of sync", stream.id().finalName()); - } else { - LOGGER.info("Final Table for stream {} is empty and matches the expected v2 format, writing to table directly", - stream.id().finalName()); - } - } else if (initialState.isSchemaMismatch() || initialState.destinationState().needsSoftReset()) { - // We're loading data directly into the existing table. - // Make sure it has the right schema. - // Also, if a raw table migration wants us to do a soft reset, do that here. - TypeAndDedupeTransaction.executeSoftReset(sqlGenerator, destinationHandler, stream); - } - } else { - LOGGER.info("Final Table does not exist for stream {}, creating.", stream.id().finalName()); - // The table doesn't exist. Create it. Don't force. - destinationHandler.execute(sqlGenerator.createTable(stream, NO_SUFFIX, false)); - } - - initialRawTableStateByStream.put(stream.id(), initialState.initialRawTableStatus()); - - streamsWithSuccessfulSetup.add(Pair.of(stream.id().originalNamespace(), stream.id().originalName())); - - // Use fair locking. This slows down lock operations, but that performance hit is by far dwarfed - // by our IO costs. This lock needs to be fair because the raw table writers are running almost - // constantly, - // and we don't want them to starve T+D. - tdLocks.put(stream.id(), new ReentrantReadWriteLock(true)); - // This lock doesn't need to be fair; any T+D instance is equivalent and we'll skip T+D if we can't - // immediately acquire the lock. - internalTdLocks.put(stream.id(), new ReentrantLock()); - - return null; - } catch (final Exception e) { - LOGGER.error("Exception occurred while preparing tables for stream " + stream.id().originalName(), e); - throw new RuntimeException(e); - } - }, this.executorService); - } - - public void typeAndDedupe(final String originalNamespace, final String originalName, final boolean mustRun) throws Exception { - final var streamConfig = parsedCatalog.getStream(originalNamespace, originalName); - final CompletableFuture> task = typeAndDedupeTask(streamConfig, mustRun); - reduceExceptions( - singleton(task), - String.format( - "The Following Exceptions were thrown while typing and deduping %s.%s:\n", - originalNamespace, - originalName)); - } - - @Override - public Lock getRawTableInsertLock(final String originalNamespace, final String originalName) { - final var streamConfig = parsedCatalog.getStream(originalNamespace, originalName); - return tdLocks.get(streamConfig.id()).readLock(); - } - - private boolean streamSetupSucceeded(final StreamConfig streamConfig) { - final var originalNamespace = streamConfig.id().originalNamespace(); - final var originalName = streamConfig.id().originalName(); - if (!streamsWithSuccessfulSetup.contains(Pair.of(originalNamespace, originalName))) { - // For example, if T+D setup fails, but the consumer tries to run T+D on all streams during close, - // we should skip it. - LOGGER.warn("Skipping typing and deduping for {}.{} because we could not set up the tables for this stream.", originalNamespace, - originalName); - return false; - } - return true; - } - - public CompletableFuture> typeAndDedupeTask(final StreamConfig streamConfig, final boolean mustRun) { - return CompletableFuture.supplyAsync(() -> { - final var originalNamespace = streamConfig.id().originalNamespace(); - final var originalName = streamConfig.id().originalName(); - try { - if (!streamSetupSucceeded(streamConfig)) { - return Optional.empty(); - } - - final boolean run; - final Lock internalLock = internalTdLocks.get(streamConfig.id()); - if (mustRun) { - // If we must run T+D, then wait until we acquire the lock. - internalLock.lock(); - run = true; - } else { - // Otherwise, try and get the lock. If another thread already has it, then we should noop here. - run = internalLock.tryLock(); - } - - if (run) { - LOGGER.info("Waiting for raw table writes to pause for {}.{}", originalNamespace, originalName); - final Lock externalLock = tdLocks.get(streamConfig.id()).writeLock(); - externalLock.lock(); - try { - final InitialRawTableStatus initialRawTableStatus = initialRawTableStateByStream.get(streamConfig.id()); - TypeAndDedupeTransaction.executeTypeAndDedupe( - sqlGenerator, - destinationHandler, - streamConfig, - initialRawTableStatus.maxProcessedTimestamp(), - getFinalTableSuffix(streamConfig.id())); - } finally { - LOGGER.info("Allowing other threads to proceed for {}.{}", originalNamespace, originalName); - externalLock.unlock(); - internalLock.unlock(); - } - } else { - LOGGER.info("Another thread is already trying to run typing and deduping for {}.{}. Skipping it here.", originalNamespace, - originalName); - } - return Optional.empty(); - } catch (final Exception e) { - LOGGER.error("Exception occurred while typing and deduping stream " + originalName, e); - return Optional.of(e); - } - }, this.executorService); - } - - @Override - public void typeAndDedupe(final Map streamSyncSummaries) throws Exception { - LOGGER.info("Typing and deduping all tables"); - final Set>> typeAndDedupeTasks = new HashSet<>(); - parsedCatalog.streams().stream() - .filter(streamConfig -> { - // Skip if stream setup failed. - if (!streamSetupSucceeded(streamConfig)) { - return false; - } - // Skip if we don't have any records for this stream. - final StreamSyncSummary streamSyncSummary = streamSyncSummaries.getOrDefault( - streamConfig.id().asStreamDescriptor(), - StreamSyncSummary.DEFAULT); - final boolean nonzeroRecords = streamSyncSummary.recordsWritten() - .map(r -> r > 0) - // If we didn't track record counts during the sync, assume we had nonzero records for this stream - .orElse(true); - final boolean unprocessedRecordsPreexist = initialRawTableStateByStream.get(streamConfig.id()).hasUnprocessedRecords(); - // If this sync emitted records, or the previous sync left behind some unprocessed records, - // then the raw table has some unprocessed records right now. - // Run T+D if either of those conditions are true. - final boolean shouldRunTypingDeduping = nonzeroRecords || unprocessedRecordsPreexist; - if (!shouldRunTypingDeduping) { - LOGGER.info( - "Skipping typing and deduping for stream {}.{} because it had no records during this sync and no unprocessed records from a previous sync.", - streamConfig.id().originalNamespace(), - streamConfig.id().originalName()); - } - return shouldRunTypingDeduping; - }).forEach(streamConfig -> typeAndDedupeTasks.add(typeAndDedupeTask(streamConfig, true))); - CompletableFuture.allOf(typeAndDedupeTasks.toArray(CompletableFuture[]::new)).join(); - reduceExceptions(typeAndDedupeTasks, "The Following Exceptions were thrown while typing and deduping tables:\n"); - } - - /** - * Does any "end of sync" work. For most streams, this is a noop. - *

    - * For OVERWRITE streams where we're writing to a temp table, this is where we swap the temp table - * into the final table. - */ - @Override - public void commitFinalTables() throws Exception { - LOGGER.info("Committing final tables"); - final Set>> tableCommitTasks = new HashSet<>(); - for (final StreamConfig streamConfig : parsedCatalog.streams()) { - if (!streamsWithSuccessfulSetup.contains(Pair.of(streamConfig.id().originalNamespace(), - streamConfig.id().originalName()))) { - LOGGER.warn("Skipping committing final table for for {}.{} because we could not set up the tables for this stream.", - streamConfig.id().originalNamespace(), streamConfig.id().originalName()); - continue; - } - if (DestinationSyncMode.OVERWRITE.equals(streamConfig.destinationSyncMode())) { - tableCommitTasks.add(commitFinalTableTask(streamConfig)); - } - } - CompletableFuture.allOf(tableCommitTasks.toArray(CompletableFuture[]::new)).join(); - reduceExceptions(tableCommitTasks, "The Following Exceptions were thrown while committing final tables:\n"); - } - - private CompletableFuture> commitFinalTableTask(final StreamConfig streamConfig) { - return CompletableFuture.supplyAsync(() -> { - final StreamId streamId = streamConfig.id(); - final String finalSuffix = getFinalTableSuffix(streamId); - if (!StringUtils.isEmpty(finalSuffix)) { - final Sql overwriteFinalTable = sqlGenerator.overwriteFinalTable(streamId, finalSuffix); - LOGGER.info("Overwriting final table with tmp table for stream {}.{}", streamId.originalNamespace(), streamId.originalName()); - try { - destinationHandler.execute(overwriteFinalTable); - } catch (final Exception e) { - LOGGER.error("Exception Occurred while committing final table for stream " + streamId.originalName(), e); - return Optional.of(e); - } - } - return Optional.empty(); - }, this.executorService); - } - - private String getFinalTableSuffix(final StreamId streamId) { - return overwriteStreamsWithTmpTable.contains(streamId) ? TMP_OVERWRITE_TABLE_SUFFIX : NO_SUFFIX; - } - - @Override - public void cleanup() { - LOGGER.info("Cleaning Up type-and-dedupe thread pool"); - this.executorService.shutdown(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java deleted file mode 100644 index 9deec7bca0b17..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.List; -import java.util.Map; - -public interface DestinationHandler { - - void execute(final Sql sql) throws Exception; - - /** - * Fetch the current state of the destination for the given streams. This method MUST create the - * airbyte_internal.state table if it does not exist. This method MAY assume the airbyte_internal - * schema already exists. (substitute the appropriate raw table schema if the user is overriding - * it). - */ - List> gatherInitialState(List streamConfigs) throws Exception; - - void commitDestinationStates(final Map destinationStates) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java deleted file mode 100644 index 1a31d04b9a70c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public interface DestinationV1V2Migrator { - - /** - * This is the primary entrypoint to this interface - *

    - * Determine whether a migration is necessary for a given stream and if so, migrate the raw table - * and rebuild the final table with a soft reset - * - * @param sqlGenerator the class to use to generate sql - * @param destinationHandler the handler to execute the sql statements - * @param streamConfig the stream to assess migration needs - */ - void migrateIfNecessary( - final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig) - throws TableNotMigratedException, UnexpectedSchemaException, Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.java deleted file mode 100644 index 3319af8297a06..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.CompletableFuture; - -public class FutureUtils { - - private static final int DEFAULT_TD_THREAD_COUNT = 8; - - /** - * Allow for configuring the number of typing and deduping threads via an environment variable in - * the destination container. - * - * @return the number of threads to use in the typing and deduping pool - */ - public static int getCountOfTypeAndDedupeThreads() { - return Optional.ofNullable(System.getenv("TD_THREADS")) - .map(Integer::valueOf) - .orElse(DEFAULT_TD_THREAD_COUNT); - } - - /** - * Log all exceptions from a list of futures, and rethrow the first exception if there is one. This - * mimics the behavior of running the futures in serial, where the first failure - */ - public static void reduceExceptions(final Collection>> potentialExceptions, final String initialMessage) - throws Exception { - final List exceptions = potentialExceptions.stream() - .map(CompletableFuture::join) - .filter(Optional::isPresent) - .map(Optional::get) - .toList(); - ConnectorExceptionUtil.logAllAndThrowFirst(initialMessage, exceptions); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java deleted file mode 100644 index b39abf1cba29f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.time.Instant; -import java.util.Optional; - -public record InitialRawTableStatus(boolean rawTableExists, boolean hasUnprocessedRecords, Optional maxProcessedTimestamp) { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.java deleted file mode 100644 index 89f5a4ba4695c..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.java +++ /dev/null @@ -1,10 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -// yet another namespace, name combo class -public record NamespacedTableName(String namespace, String tableName) { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java deleted file mode 100644 index a32f214cec49f..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public class NoOpDestinationV1V2Migrator implements DestinationV1V2Migrator { - - @Override - public void migrateIfNecessary(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig) - throws TableNotMigratedException, UnexpectedSchemaException { - // Do nothing - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java deleted file mode 100644 index 3b7d35a3fbd8d..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.cdk.integrations.base.IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME; -import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.getCountOfTypeAndDedupeThreads; -import static java.util.stream.Collectors.toMap; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.locks.Lock; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.concurrent.BasicThreadFactory; - -/** - * This is a NoOp implementation which skips and Typing and Deduping operations and does not emit - * the final tables. However, this implementation still performs V1->V2 migrations and V2 - * json->string migrations in the raw tables. - */ -@Slf4j -public class NoOpTyperDeduperWithV1V2Migrations implements TyperDeduper { - - private final DestinationV1V2Migrator v1V2Migrator; - private final V2TableMigrator v2TableMigrator; - private final List> migrations; - private final ExecutorService executorService; - private final ParsedCatalog parsedCatalog; - private final SqlGenerator sqlGenerator; - private final DestinationHandler destinationHandler; - - public NoOpTyperDeduperWithV1V2Migrations(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final ParsedCatalog parsedCatalog, - final DestinationV1V2Migrator v1V2Migrator, - final V2TableMigrator v2TableMigrator, - final List> migrations) { - this.sqlGenerator = sqlGenerator; - this.destinationHandler = destinationHandler; - this.parsedCatalog = parsedCatalog; - this.v1V2Migrator = v1V2Migrator; - this.v2TableMigrator = v2TableMigrator; - this.migrations = migrations; - this.executorService = Executors.newFixedThreadPool(getCountOfTypeAndDedupeThreads(), - new BasicThreadFactory.Builder().namingPattern(TYPE_AND_DEDUPE_THREAD_NAME).build()); - } - - @Override - public void prepareSchemasAndRunMigrations() throws Exception { - TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); - - TyperDeduperUtil.executeWeirdMigrations( - executorService, - sqlGenerator, - destinationHandler, - v1V2Migrator, - v2TableMigrator, - parsedCatalog); - - List> destinationInitialStatuses = TyperDeduperUtil.executeRawTableMigrations( - executorService, - destinationHandler, - migrations, - destinationHandler.gatherInitialState(parsedCatalog.streams())); - - // Commit the updated destination states. - // We don't need to trigger any soft resets, because we don't have any final tables. - destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( - state -> state.streamConfig().id(), - DestinationInitialStatus::destinationState))); - } - - @Override - public void prepareFinalTables() { - log.info("Skipping prepareFinalTables"); - } - - @Override - public void typeAndDedupe(final String originalNamespace, final String originalName, final boolean mustRun) { - log.info("Skipping TypeAndDedupe"); - } - - @Override - public Lock getRawTableInsertLock(final String originalNamespace, final String originalName) { - return new NoOpRawTableTDLock(); - } - - @Override - public void typeAndDedupe(final Map streamSyncSummaries) { - log.info("Skipping TypeAndDedupe final"); - } - - @Override - public void commitFinalTables() { - log.info("Skipping commitFinalTables final"); - } - - @Override - public void cleanup() { - log.info("Cleaning Up type-and-dedupe thread pool"); - this.executorService.shutdown(); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java deleted file mode 100644 index 6a312a72b5152..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.Condition; -import java.util.concurrent.locks.Lock; - -/** - * This class should be used while upgrading a destination from V1 to V2. V2 destinations should use - * {@link NoOpTyperDeduperWithV1V2Migrations} for disabling T+D, because it correctly handles - * various migration operations. - */ -public class NoopTyperDeduper implements TyperDeduper { - - @Override - public void prepareSchemasAndRunMigrations() throws Exception { - - } - - @Override - public void prepareFinalTables() { - - } - - @Override - public void typeAndDedupe(final String originalNamespace, final String originalName, final boolean mustRun) { - - } - - @Override - public Lock getRawTableInsertLock(final String originalNamespace, final String originalName) { - // Return a fake lock that does nothing. - return new Lock() { - - @Override - public void lock() { - - } - - @Override - public void lockInterruptibly() { - - } - - @Override - public boolean tryLock() { - // To mimic NoOp behavior always return true that lock is acquired - return true; - } - - @Override - public boolean tryLock(final long time, final TimeUnit unit) { - // To mimic NoOp behavior always return true that lock is acquired - return true; - } - - @Override - public void unlock() { - - } - - @Override - public Condition newCondition() { - return null; - } - - }; - } - - @Override - public void commitFinalTables() { - - } - - @Override - public void typeAndDedupe(final Map streamSyncSummaries) { - - } - - @Override - public void cleanup() { - - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.java deleted file mode 100644 index f2f2a9c414972..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public class NoopV2TableMigrator implements V2TableMigrator { - - @Override - public void migrateIfNecessary(final StreamConfig streamConfig) { - // do nothing - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.java deleted file mode 100644 index fb8d2245232ce..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.List; - -public record ParsedCatalog(List streams) { - - public StreamConfig getStream(AirbyteStreamNameNamespacePair streamId) { - return getStream(streamId.getNamespace(), streamId.getName()); - } - - public StreamConfig getStream(StreamId streamId) { - return getStream(streamId.originalNamespace(), streamId.originalName()); - } - - public StreamConfig getStream(String originalNamespace, String originalName) { - return streams.stream() - .filter(s -> s.id().originalNamespace().equals(originalNamespace) && s.id().originalName().equals(originalName)) - .findFirst() - .orElseThrow(() -> new IllegalArgumentException(String.format( - "Could not find stream %s.%s out of streams %s", - originalNamespace, - originalName, - streams.stream().map(stream -> stream.id().originalNamespace() + "." + stream.id().originalName()).toList()))); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Sql.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Sql.java deleted file mode 100644 index bc0c1940566b0..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Sql.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Stream; - -/** - * Represents a list of SQL transactions, where each transaction consists of one or more SQL - * statements. Each transaction MUST NOT contain the BEGIN/COMMIT statements. Each inner list is a - * single transaction, and each String is a single statement within that transaction. - *

    - * Most callers likely only need a single transaction, but e.g. BigQuery disallows running DDL - * inside transactions, and so needs to run sequential "CREATE SCHEMA", "CREATE TABLE" as separate - * transactions. - *

    - * Callers are encouraged to use the static factory methods instead of the public constructor. - */ -public record Sql(List> transactions) { - - public Sql { - transactions.forEach(transaction -> { - if (transaction.isEmpty()) { - throw new IllegalArgumentException("Transaction must not be empty"); - } - if (transaction.stream().anyMatch(s -> s == null || s.isEmpty())) { - throw new IllegalArgumentException("Transaction must not contain empty statements"); - } - }); - } - - /** - * @param begin The SQL statement to start a transaction, typically "BEGIN" - * @param commit The SQL statement to commit a transaction, typically "COMMIT" - * @return A list of SQL strings, each of which represents a transaction. - */ - public List asSqlStrings(final String begin, final String commit) { - return transactions().stream() - .map(transaction -> { - // If there's only one statement, we don't need to wrap it in a transaction. - if (transaction.size() == 1) { - return transaction.get(0); - } - final StringBuilder builder = new StringBuilder(); - builder.append(begin); - builder.append(";\n"); - transaction.forEach(statement -> { - builder.append(statement); - // No semicolon - statements already end with a semicolon - builder.append("\n"); - }); - builder.append(commit); - builder.append(";\n"); - return builder.toString(); - }).toList(); - } - - /** - * Execute a list of SQL statements in a single transaction. - */ - public static Sql transactionally(final List statements) { - return create(List.of(statements)); - } - - public static Sql transactionally(final String... statements) { - return transactionally(Stream.of(statements).toList()); - } - - /** - * Execute each statement as its own transaction. - */ - public static Sql separately(final List statements) { - return create(statements.stream().map(Collections::singletonList).toList()); - } - - public static Sql separately(final String... statements) { - return separately(Stream.of(statements).toList()); - } - - /** - * Convenience method for indicating intent. Equivalent to calling - * {@link #transactionally(String...)} or {@link #separately(String...)} with the same string. - */ - public static Sql of(final String statement) { - return transactionally(statement); - } - - public static Sql concat(final Sql... sqls) { - return create(Stream.of(sqls).flatMap(sql -> sql.transactions.stream()).toList()); - } - - public static Sql concat(final List sqls) { - return create(sqls.stream().flatMap(sql -> sql.transactions.stream()).toList()); - } - - /** - * Utility method to create a Sql object without empty statements/transactions, and appending - * semicolons when needed. - */ - public static Sql create(final List> transactions) { - return new Sql(transactions.stream() - .map(transaction -> transaction.stream() - .filter(statement -> statement != null && !statement.isEmpty()) - .map(statement -> { - if (!statement.trim().endsWith(";")) { - return statement + ";"; - } - return statement; - }) - .toList()) - .filter(transaction -> !transaction.isEmpty()) - .toList()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.java deleted file mode 100644 index bb12237ebbf94..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeTransaction.SOFT_RESET_SUFFIX; - -import java.time.Instant; -import java.util.Optional; - -public interface SqlGenerator { - - StreamId buildStreamId(String namespace, String name, String rawNamespaceOverride); - - default ColumnId buildColumnId(final String name) { - return buildColumnId(name, ""); - } - - ColumnId buildColumnId(String name, String suffix); - - /** - * Generate a SQL statement to create a fresh table to match the given stream. - *

    - * The generated SQL should throw an exception if the table already exists and {@code force} is - * false. - * - * @param suffix A suffix to add to the stream name. Useful for full refresh overwrite syncs, where - * we write the entire sync to a temp table. - * @param force If true, will overwrite an existing table. If false, will throw an exception if the - * table already exists. If you're passing a non-empty prefix, you likely want to set this to - * true. - */ - Sql createTable(final StreamConfig stream, final String suffix, boolean force); - - /** - * Used to create either the airbyte_internal or final schemas if they don't exist - * - * @param schema the schema to create - * @return SQL to create the schema if it does not exist - */ - Sql createSchema(final String schema); - - /** - * Generate a SQL statement to copy new data from the raw table into the final table. - *

    - * Responsible for: - *

      - *
    • Pulling new raw records from a table (i.e. records with null _airbyte_loaded_at)
    • - *
    • Extracting the JSON fields and casting to the appropriate types
    • - *
    • Handling errors in those casts
    • - *
    • Merging those typed records into an existing table
    • - *
    • Updating the raw records with SET _airbyte_loaded_at = now()
    • - *
    - *

    - * Implementing classes are recommended to break this into smaller methods, which can be tested in - * isolation. However, this interface only requires a single mega-method. - * - * @param finalSuffix the suffix of the final table to write to. If empty string, writes to the - * final table directly. Useful for full refresh overwrite syncs, where we write the entire - * sync to a temp table and then swap it into the final table at the end. - * - * @param minRawTimestamp The latest _airbyte_extracted_at for which all raw records with that - * timestamp have already been typed+deduped. Implementations MAY use this value in a - * {@code _airbyte_extracted_at > minRawTimestamp} filter on the raw table to improve query - * performance. - * @param useExpensiveSaferCasting often the data coming from the source can be faithfully - * represented in the destination without issue, and using a "CAST" expression works fine, - * however sometimes we get badly typed data. In these cases we can use a more expensive - * query which handles casting exceptions. - */ - Sql updateTable(final StreamConfig stream, String finalSuffix, Optional minRawTimestamp, final boolean useExpensiveSaferCasting); - - /** - * Drop the previous final table, and rename the new final table to match the old final table. - *

    - * This method may assume that the stream is an OVERWRITE stream, and that the final suffix is - * non-empty. Callers are responsible for verifying those are true. - */ - Sql overwriteFinalTable(StreamId stream, String finalSuffix); - - /** - * Creates a sql query which will create a v2 raw table from the v1 raw table, then performs a soft - * reset. - * - * @param streamId the stream to migrate - * @param namespace the namespace of the v1 raw table - * @param tableName name of the v2 raw table - * @return a string containing the necessary sql to migrate - */ - Sql migrateFromV1toV2(StreamId streamId, String namespace, String tableName); - - /** - * Typically we need to create a soft reset temporary table and clear loaded at values - * - * @return - */ - default Sql prepareTablesForSoftReset(final StreamConfig stream) { - final Sql createTempTable = createTable(stream, SOFT_RESET_SUFFIX, true); - final Sql clearLoadedAt = clearLoadedAt(stream.id()); - return Sql.concat(createTempTable, clearLoadedAt); - } - - Sql clearLoadedAt(final StreamId streamId); - - /** - * Implementation specific if there is no option to retry again with safe casted SQL or the specific - * cause of the exception can be retried or not. - * - * @return true if the exception should be retried with a safer query - */ - default boolean shouldRetry(final Exception e) { - return true; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.java deleted file mode 100644 index 67952f916cb36..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Optional; - -public record StreamConfig(StreamId id, - SyncMode syncMode, - DestinationSyncMode destinationSyncMode, - List primaryKey, - Optional cursor, - LinkedHashMap columns) { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamId.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamId.java deleted file mode 100644 index e65cfa72259c5..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/StreamId.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.StreamDescriptor; - -/** - * In general, callers should not directly instantiate this class. Use - * {@link SqlGenerator#buildStreamId(String, String, String)} instead. - *

    - * All names/namespaces are intended to be quoted, but do not explicitly contain quotes. For - * example, finalName might be "foo bar"; the caller is required to wrap that in quotes before using - * it in a query. - * - * @param finalNamespace the namespace where the final table will be created - * @param finalName the name of the final table - * @param rawNamespace the namespace where the raw table will be created (typically "airbyte") - * @param rawName the name of the raw table (typically namespace_name, but may be different if there - * are collisions). There is no rawNamespace because we assume that we're writing raw tables - * to the airbyte namespace. - */ -public record StreamId(String finalNamespace, - String finalName, - String rawNamespace, - String rawName, - String originalNamespace, - String originalName) { - - /** - * Most databases/warehouses use a `schema.name` syntax to identify tables. This is a convenience - * method to generate that syntax. - */ - public String finalTableId(String quote) { - return quote + finalNamespace + quote + "." + quote + finalName + quote; - } - - public String finalTableId(String quote, String suffix) { - return quote + finalNamespace + quote + "." + quote + finalName + suffix + quote; - } - - public String rawTableId(String quote) { - return quote + rawNamespace + quote + "." + quote + rawName + quote; - } - - public String finalName(final String quote) { - return quote + finalName + quote; - } - - public String finalNamespace(final String quote) { - return quote + finalNamespace + quote; - } - - public AirbyteStreamNameNamespacePair asPair() { - return new AirbyteStreamNameNamespacePair(originalName, originalNamespace); - } - - public StreamDescriptor asStreamDescriptor() { - return new StreamDescriptor().withNamespace(originalNamespace).withName(originalName); - } - - /** - * Build the raw table name as namespace + (delimiter) + name. For example, given a stream with - * namespace "public__ab" and name "abab_users", we will end up with raw table name - * "public__ab_ab___ab_abab_users". - *

    - * This logic is intended to solve two problems: - *

      - *
    • The raw table name should be unambiguously parsable into the namespace/name.
    • - *
    • It must be impossible for two different streams to generate the same raw table name.
    • - *
    - * The generated delimiter is guaranteed to not be present in the namespace or name, so it - * accomplishes both of these goals. - */ - public static String concatenateRawTableName(String namespace, String name) { - String plainConcat = namespace + name; - // Pretend we always have at least one underscore, so that we never generate `_raw_stream_` - int longestUnderscoreRun = 1; - for (int i = 0; i < plainConcat.length(); i++) { - // If we've found an underscore, count the number of consecutive underscores - int underscoreRun = 0; - while (i < plainConcat.length() && plainConcat.charAt(i) == '_') { - underscoreRun++; - i++; - } - longestUnderscoreRun = Math.max(longestUnderscoreRun, underscoreRun); - } - - return namespace + "_raw" + "_".repeat(longestUnderscoreRun + 1) + "stream_" + name; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Struct.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Struct.java deleted file mode 100644 index c28bfe8761669..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Struct.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.LinkedHashMap; - -/** - * @param properties Use LinkedHashMap to preserve insertion order. - */ -public record Struct(LinkedHashMap properties) implements AirbyteType { - - public static final String TYPE = "STRUCT"; - - @Override - public String getTypeName() { - return TYPE; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.java deleted file mode 100644 index ee0fa6c10a22b..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -/** - * Exception thrown when a destination's v2 sync is attempting to write to a table which does not - * have the expected columns used by airbyte. - */ -public class TableNotMigratedException extends RuntimeException { - - public TableNotMigratedException(String message) { - super(message); - } - - public TableNotMigratedException(String message, Throwable cause) { - super(message, cause); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.java deleted file mode 100644 index d638125371cd2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.List; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Supplier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A slightly more complicated way to keep track of when to perform type and dedupe operations per - * stream - */ -public class TypeAndDedupeOperationValve extends ConcurrentHashMap { - - private static final Logger LOGGER = LoggerFactory.getLogger(TypeAndDedupeOperationValve.class); - - private static final long NEGATIVE_MILLIS = -1; - private static final long SIX_HOURS_MILLIS = 1000 * 60 * 60 * 6; - - // New users of airbyte likely want to see data flowing into their tables as soon as possible, and - // we want to catch new errors which might appear early within an incremental sync. - // However, as their destination tables grow in size, typing and de-duping data becomes an expensive - // operation. - // To strike a balance between showing data quickly and not slowing down the entire sync, we use an - // increasing interval based approach, from 0 up to 4 hours. - // This is not fancy, just hard coded intervals. - public static final List typeAndDedupeIncreasingIntervals = List.of(NEGATIVE_MILLIS, SIX_HOURS_MILLIS); - - private static final Supplier SYSTEM_NOW = () -> System.currentTimeMillis(); - - private final ConcurrentHashMap incrementalIndex; - - private final Supplier nowness; - - public TypeAndDedupeOperationValve() { - this(SYSTEM_NOW); - } - - /** - * This constructor is here because mocking System.currentTimeMillis() is a pain :( - * - * @param nownessSupplier Supplier which will return a long value representing now - */ - public TypeAndDedupeOperationValve(final Supplier nownessSupplier) { - super(); - incrementalIndex = new ConcurrentHashMap<>(); - this.nowness = nownessSupplier; - } - - @Override - public Long put(final AirbyteStreamNameNamespacePair key, final Long value) { - if (!incrementalIndex.containsKey(key)) { - incrementalIndex.put(key, 0); - } - return super.put(key, value); - - } - - /** - * Adds a stream specific timestamp to track type and dedupe operations - * - * @param key the AirbyteStreamNameNamespacePair to track - */ - public void addStream(final AirbyteStreamNameNamespacePair key) { - put(key, nowness.get()); - } - - public void addStreamIfAbsent(final AirbyteStreamNameNamespacePair key) { - putIfAbsent(key, nowness.get()); - incrementalIndex.putIfAbsent(key, 0); - } - - /** - * Whether we should type and dedupe at this point in time for this particular stream. - * - * @param key the stream in question - * @return a boolean indicating whether we have crossed the interval threshold for typing and - * deduping. - */ - public boolean readyToTypeAndDedupe(final AirbyteStreamNameNamespacePair key) { - if (!DestinationConfig.getInstance().getBooleanValue("enable_incremental_final_table_updates")) { - LOGGER.info("Skipping Incremental Typing and Deduping"); - return false; - } - if (!containsKey(key)) { - return false; - } - - return nowness.get() - get(key) > typeAndDedupeIncreasingIntervals.get(incrementalIndex.get(key)); - } - - /** - * Increment the interval at which typing and deduping should occur for the stream, max out at last - * index of {@link TypeAndDedupeOperationValve#typeAndDedupeIncreasingIntervals} - * - * @param key the stream to increment the interval of - * @return the index of the typing and deduping interval associated with this stream - */ - public int incrementInterval(final AirbyteStreamNameNamespacePair key) { - if (incrementalIndex.get(key) < typeAndDedupeIncreasingIntervals.size() - 1) { - incrementalIndex.put(key, incrementalIndex.get(key) + 1); - } - return incrementalIndex.get(key); - } - - /** - * Meant to be called after - * {@link TypeAndDedupeOperationValve#readyToTypeAndDedupe(AirbyteStreamNameNamespacePair)} will set - * a streams last operation to the current time and increase its index reference in - * {@link TypeAndDedupeOperationValve#typeAndDedupeIncreasingIntervals} - * - * @param key the stream to update - */ - public void updateTimeAndIncreaseInterval(final AirbyteStreamNameNamespacePair key) { - put(key, nowness.get()); - incrementInterval(key); - } - - /** - * Get the current interval for the stream - * - * @param key the stream in question - * @return a long value representing the length of the interval milliseconds - */ - public Long getIncrementInterval(final AirbyteStreamNameNamespacePair key) { - return typeAndDedupeIncreasingIntervals.get(incrementalIndex.get(key)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java deleted file mode 100644 index 63bb3b6470c58..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.time.Instant; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TypeAndDedupeTransaction { - - public static final String SOFT_RESET_SUFFIX = "_ab_soft_reset"; - private static final Logger LOGGER = LoggerFactory.getLogger(TypeAndDedupeTransaction.class); - - /** - * It can be expensive to build the errors array in the airbyte_meta column, so we first attempt an - * 'unsafe' transaction which assumes everything is typed correctly. If that fails, we will run a - * more expensive query which handles casting errors - * - * @param sqlGenerator for generating sql for the destination - * @param destinationHandler for executing sql created - * @param streamConfig which stream to operate on - * @param minExtractedAt to reduce the amount of data in the query - * @param suffix table suffix for temporary tables - * @throws Exception if the safe query fails - */ - public static void executeTypeAndDedupe(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig, - final Optional minExtractedAt, - final String suffix) - throws Exception { - try { - LOGGER.info("Attempting typing and deduping for {}.{} with suffix {}", streamConfig.id().originalNamespace(), streamConfig.id().originalName(), - suffix); - final Sql unsafeSql = sqlGenerator.updateTable(streamConfig, suffix, minExtractedAt, false); - destinationHandler.execute(unsafeSql); - } catch (final Exception e) { - if (sqlGenerator.shouldRetry(e)) { - // TODO Destination specific non-retryable exceptions should be added. - LOGGER.error("Encountered Exception on unsafe SQL for stream {} {} with suffix {}, attempting with error handling", - streamConfig.id().originalNamespace(), streamConfig.id().originalName(), suffix, e); - final Sql saferSql = sqlGenerator.updateTable(streamConfig, suffix, minExtractedAt, true); - destinationHandler.execute(saferSql); - } else { - LOGGER.error("Encountered Exception on unsafe SQL for stream {} {} with suffix {}, Retry is skipped", - streamConfig.id().originalNamespace(), streamConfig.id().originalName(), suffix, e); - throw e; - } - } - } - - /** - * Everything in - * {@link TypeAndDedupeTransaction#executeTypeAndDedupe(SqlGenerator, DestinationHandler, StreamConfig, Optional, String)} - * but with a little extra prep work for the soft reset temp tables - * - * @param sqlGenerator for generating sql for the destination - * @param destinationHandler for executing sql created - * @param streamConfig which stream to operate on - * @throws Exception if the safe query fails - */ - public static void executeSoftReset(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, - final StreamConfig streamConfig) - throws Exception { - LOGGER.info("Attempting soft reset for stream {} {}", streamConfig.id().originalNamespace(), streamConfig.id().originalName()); - destinationHandler.execute(sqlGenerator.prepareTablesForSoftReset(streamConfig)); - executeTypeAndDedupe(sqlGenerator, destinationHandler, streamConfig, Optional.empty(), SOFT_RESET_SUFFIX); - destinationHandler.execute(sqlGenerator.overwriteFinalTable(streamConfig.id(), SOFT_RESET_SUFFIX)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java deleted file mode 100644 index 37d34643b7201..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.Map; -import java.util.concurrent.locks.Lock; - -/* - * This class wants to do three separate things, but not all of them actually happen here right now: - * * A migration runner, which handles any changes in raw tables (#prepareSchemasAndRawTables) * A - * raw table creator, which creates any missing raw tables (currently handled in e.g. - * GeneralStagingFunctions.onStartFunction, BigQueryStagingConsumerFactory.onStartFunction, etc.) * - * A T+D runner, which manages the final tables (#prepareFinalTables, #typeAndDedupe, etc.) - * - * These would be injectable to the relevant locations, so that we can have: * DV2 destinations with - * T+D enabled (i.e. all three objects instantiated for real) * DV2 destinations with T+D disabled - * (i.e. noop T+D runner but the other two objects for real) * DV1 destinations (i.e. all three - * objects as noop) - * - * Even more ideally, we'd create an instance per stream, instead of having one instance for the - * entire sync. This would massively simplify all the state contained in our implementations - see - * DefaultTyperDeduper's pile of Sets and Maps. - * - * Unfortunately, it's just a pain to inject these objects to everywhere they need to be, and we'd - * need to refactor part of the async framework on top of that. There's an obvious overlap with the - * async framework's onStart function... which we should deal with eventually. - */ -public interface TyperDeduper { - - /** - * Does two things: Set up the schemas for the sync (both airbyte_internal and final table schemas), - * and execute any raw table migrations. These migrations might include: Upgrading v1 raw tables to - * v2, adding a column to the raw tables, etc. In general, this method shouldn't actually create the - * raw tables; the only exception is in the V1 -> V2 migration. - *

    - * This method should be called BEFORE creating raw tables, because the V1V2 migration might create - * the raw tables. - *

    - * This method may affect the behavior of {@link #prepareFinalTables()}. For example, modifying a - * raw table may require us to run a soft reset. However, we should defer that soft reset until - * {@link #prepareFinalTables()}. - */ - void prepareSchemasAndRunMigrations() throws Exception; - - /** - * Create the tables that T+D will write to during the sync. In OVERWRITE mode, these might not be - * the true final tables. Specifically, other than an initial sync (i.e. table does not exist, or is - * empty) we write to a temporary final table, and swap it into the true final table at the end of - * the sync. This is to prevent user downtime during a sync. - *

    - * This method should be called AFTER creating the raw tables, because it may run a soft reset - * (which requires the raw tables to exist). - */ - void prepareFinalTables() throws Exception; - - /** - * Suggest that we execute typing and deduping for a single stream (i.e. fetch new raw records into - * the final table, etc.). - *

    - * This method is thread-safe; multiple threads can call it concurrently. If T+D is already running - * for the given stream, this method may choose to do nothing. If a caller wishes to force T+D to - * run (for example, at the end of a sync), they may set {@code mustRun} to true. - *

    - * This method relies on callers to prevent concurrent modification to the underlying raw tables. - * This is most easily accomplished using {@link #getRawTableInsertLock(String, String)}, if the - * caller guards all raw table writes using {@code getRawTableInsertLock().lock()} and - * {@code getRawTableInsertLock().unlock()}. While {@code typeAndDedupe} is executing, that lock - * will be unavailable. However, callers are free to enforce this in other ways (for example, - * single- threaded callers do not need to use the lock). - * - * @param originalNamespace The stream's namespace, as declared in the configured catalog - * @param originalName The stream's name, as declared in the configured catalog - */ - void typeAndDedupe(String originalNamespace, String originalName, boolean mustRun) throws Exception; - - /** - * Get the lock that should be used to synchronize inserts to the raw table for a given stream. This - * lock permits any number of threads to hold the lock, but - * {@link #typeAndDedupe(String, String, boolean)} will not proceed while this lock is held. - *

    - * This lock provides fairness guarantees, i.e. typeAndDedupe will not starve while waiting for the - * lock (and similarly, raw table writers will not starve if many typeAndDedupe calls are queued). - */ - Lock getRawTableInsertLock(final String originalNamespace, final String originalName); - - /** - * Does any "end of sync" work. For most streams, this is a noop. - *

    - * For OVERWRITE streams where we're writing to a temp table, this is where we swap the temp table - * into the final table. - * - * @param streamSyncSummaries Information about what happened during the sync. Implementations - * SHOULD use this information to skip T+D when possible (this is not a requirement for - * correctness, but does allow us to save time/money). This parameter MUST NOT be null. - * Streams MAY be omitted, which will be treated as though they were mapped to - * {@link StreamSyncSummary#DEFAULT}. - */ - void typeAndDedupe(Map streamSyncSummaries) throws Exception; - - void commitFinalTables() throws Exception; - - void cleanup(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.java deleted file mode 100644 index 05f0fe6041cdd..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public class UnexpectedSchemaException extends RuntimeException { - - public UnexpectedSchemaException(String message) { - super(message); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Union.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Union.java deleted file mode 100644 index c50e223576986..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/Union.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Comparator; -import java.util.LinkedHashMap; -import java.util.List; - -/** - * Represents a {type: [a, b, ...]} schema. This is theoretically equivalent to {oneOf: [{type: a}, - * {type: b}, ...]} but legacy normalization only handles the {type: [...]} schemas. - *

    - * Eventually we should: - *

      - *
    1. Announce a breaking change to handle both oneOf styles the same
    2. - *
    3. Test against some number of API sources to verify that they won't break badly
    4. - *
    5. Update {@link AirbyteType#fromJsonSchema(JsonNode)} to parse both styles into - * SupportedOneOf
    6. - *
    7. Delete UnsupportedOneOf
    8. - *
    - */ -public record Union(List options) implements AirbyteType { - - public static final String TYPE = "UNION"; - - /** - * This is a hack to handle weird schemas like {type: [object, string]}. If a stream's top-level - * schema looks like this, we still want to be able to extract the object properties (i.e. treat it - * as though the string option didn't exist). - * - * @throws IllegalArgumentException if we cannot extract columns from this schema - */ - public LinkedHashMap asColumns() { - final long numObjectOptions = options.stream().filter(o -> o instanceof Struct).count(); - if (numObjectOptions > 1) { - LOGGER.error("Can't extract columns from a schema with multiple object options"); - return new LinkedHashMap<>(); - } - - return (options.stream().filter(o -> o instanceof Struct).findFirst()) - .map(o -> ((Struct) o).properties()) - .orElseGet(() -> { - LOGGER.error("Can't extract columns from a schema with no object options"); - return new LinkedHashMap<>(); - }); - } - - // Picks which type in a Union takes precedence - public AirbyteType chooseType() { - final Comparator comparator = Comparator.comparing(t -> { - if (t instanceof Array) { - return -2; - } else if (t instanceof Struct) { - return -1; - } else if (t instanceof final AirbyteProtocolType p) { - return List.of(AirbyteProtocolType.values()).indexOf(p); - } - return Integer.MAX_VALUE; - }); - - return options.stream().min(comparator).orElse(AirbyteProtocolType.UNKNOWN); - } - - @Override - public String getTypeName() { - return TYPE; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.java deleted file mode 100644 index 417e7f5cc5c99..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.List; - -/** - * Represents a {oneOf: [...]} schema. - *

    - * This is purely a legacy type that we should eventually delete. See also {@link Union}. - */ -public record UnsupportedOneOf(List options) implements AirbyteType { - - public static final String TYPE = "UNSUPPORTED_ONE_OF"; - - @Override - public String getTypeName() { - return TYPE; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java deleted file mode 100644 index ecc2d4ddd74c1..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -/** - * Prefer {@link io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration} - * instead. - */ -public interface V2TableMigrator { - - void migrateIfNecessary(final StreamConfig streamConfig) throws Exception; - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.kt new file mode 100644 index 0000000000000..ce79d8cfcb99a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteProtocolType.kt @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import java.util.* + +/** + * Protocol types are ordered by precedence in the case of a Union that contains multiple types. + * Priority is given to wider scope types over narrower ones. (Note that because of dedup logic in + * [AirbyteType.fromJsonSchema], at most one string or date/time type can exist in a Union.) + */ +enum class AirbyteProtocolType : AirbyteType { + STRING, + DATE, + TIME_WITHOUT_TIMEZONE, + TIME_WITH_TIMEZONE, + TIMESTAMP_WITHOUT_TIMEZONE, + TIMESTAMP_WITH_TIMEZONE, + NUMBER, + INTEGER, + BOOLEAN, + UNKNOWN; + + override val typeName: String + get() = this.name + + companion object { + private fun matches(type: String): AirbyteProtocolType { + try { + return valueOf(type.uppercase(Locale.getDefault())) + } catch (e: IllegalArgumentException) { + AirbyteType.Companion.LOGGER.error( + String.format( + "Could not find matching AirbyteProtocolType for \"%s\": %s", + type, + e + ) + ) + return UNKNOWN + } + } + + // Extracts the appropriate protocol type from the representative JSON + fun fromJson(node: JsonNode): AirbyteProtocolType { + // JSON could be a string (ex: "number") + if (node.isTextual) { + return matches(node.asText()) + } + + // or, JSON could be a node with fields + val propertyType = node["type"] + val airbyteType = node["airbyte_type"] + val format = node["format"] + + if (AirbyteType.Companion.nodeMatches(propertyType, "boolean")) { + return BOOLEAN + } else if (AirbyteType.Companion.nodeMatches(propertyType, "integer")) { + return INTEGER + } else if (AirbyteType.Companion.nodeMatches(propertyType, "number")) { + return if (AirbyteType.Companion.nodeMatches(airbyteType, "integer")) INTEGER + else NUMBER + } else if (AirbyteType.Companion.nodeMatches(propertyType, "string")) { + if (AirbyteType.Companion.nodeMatches(format, "date")) { + return DATE + } else if (AirbyteType.Companion.nodeMatches(format, "time")) { + if (AirbyteType.Companion.nodeMatches(airbyteType, "time_without_timezone")) { + return TIME_WITHOUT_TIMEZONE + } else if ( + AirbyteType.Companion.nodeMatches(airbyteType, "time_with_timezone") + ) { + return TIME_WITH_TIMEZONE + } + } else if (AirbyteType.Companion.nodeMatches(format, "date-time")) { + if ( + AirbyteType.Companion.nodeMatches(airbyteType, "timestamp_without_timezone") + ) { + return TIMESTAMP_WITHOUT_TIMEZONE + } else if ( + airbyteType == null || + AirbyteType.Companion.nodeMatches( + airbyteType, + "timestamp_with_timezone" + ) + ) { + return TIMESTAMP_WITH_TIMEZONE + } + } else { + return STRING + } + } + + return UNKNOWN + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.kt new file mode 100644 index 0000000000000..a89a77f11031f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteType.kt @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.fasterxml.jackson.databind.node.TextNode +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +interface AirbyteType { + val typeName: String + + companion object { + /** + * The most common call pattern is probably to use this method on the stream schema, verify + * that it's an [Struct] schema, and then call [Struct.properties] to get the columns. + * + * If the top-level schema is not an object, then we can't really do anything with it, and + * should probably fail the sync. (but see also [Union.asColumns]). + */ + @JvmStatic + fun fromJsonSchema(schema: JsonNode): AirbyteType { + try { + val topLevelType = schema["type"] + if (topLevelType != null) { + if (topLevelType.isTextual) { + if (nodeMatches(topLevelType, "object")) { + return getStruct(schema) + } else if (nodeMatches(topLevelType, "array")) { + return getArray(schema) + } + } else if (topLevelType.isArray) { + return fromArrayJsonSchema(schema, topLevelType) + } + } else if (schema.hasNonNull("oneOf")) { + val options: MutableList = ArrayList() + schema["oneOf"].elements().forEachRemaining { element: JsonNode -> + options.add(fromJsonSchema(element)) + } + return UnsupportedOneOf(options) + } else if (schema.hasNonNull("properties")) { + // The schema has neither type nor oneof, but it does have properties. Assume + // we're looking at a + // struct. + // This is for backwards-compatibility with legacy normalization. + return getStruct(schema) + } + return AirbyteProtocolType.Companion.fromJson(schema) + } catch (e: Exception) { + LOGGER.error("Exception parsing JSON schema {}: {}; returning UNKNOWN.", schema, e) + return AirbyteProtocolType.UNKNOWN + } + } + + fun nodeMatches(node: JsonNode?, value: String?): Boolean { + if (node == null || !node.isTextual) { + return false + } + return node == TextNode.valueOf(value) + } + + private fun getStruct(schema: JsonNode): Struct { + val propertiesMap = LinkedHashMap() + val properties = schema["properties"] + properties?.fields()?.forEachRemaining { property: Map.Entry -> + val key = property.key + val value = property.value + propertiesMap[key] = fromJsonSchema(value) + } + return Struct(propertiesMap) + } + + private fun getArray(schema: JsonNode): Array { + val items = schema["items"] + return if (items == null) { + Array(AirbyteProtocolType.UNKNOWN) + } else { + Array(fromJsonSchema(items)) + } + } + + private fun fromArrayJsonSchema(schema: JsonNode, array: JsonNode): AirbyteType { + val typeOptions: MutableList = ArrayList() + array.elements().forEachRemaining { element: JsonNode -> + // ignore "null" type and remove duplicates + val type = element.asText("") + if ("null" != type && !typeOptions.contains(type)) { + typeOptions.add(element.asText()) + } + } + + // we encounter an array of types that actually represents a single type rather than a + // Union + if (typeOptions.size == 1) { + return if (typeOptions[0] == "object") { + getStruct(schema) + } else if (typeOptions[0] == "array") { + getArray(schema) + } else { + AirbyteProtocolType.Companion.fromJson( + getTrimmedJsonSchema(schema, typeOptions[0]) + ) + } + } + + // Recurse into a schema that forces a specific one of each option + val options = + typeOptions + .stream() + .map { typeOption: String -> + fromJsonSchema(getTrimmedJsonSchema(schema, typeOption)) + } + .toList() + return Union(options) + } + + // Duplicates the JSON schema but keeps only one type + private fun getTrimmedJsonSchema(schema: JsonNode, type: String): JsonNode { + val schemaClone = schema.deepCopy() + // schema is guaranteed to be an object here, because we know it has a `type` key + (schemaClone as ObjectNode).put("type", type) + return schemaClone + } + + val LOGGER: Logger = LoggerFactory.getLogger(AirbyteType::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.kt new file mode 100644 index 0000000000000..5e6df09a0b44f --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AlterTableReport.kt @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.util.stream.Stream + +class AlterTableReport( + columnsToAdd: Set, + columnsToRemove: Set, + columnsToChangeType: Set, + isDestinationV2Format: Boolean +) { + val isNoOp: Boolean + /** + * A no-op for an AlterTableReport is when the existing table matches the expected schema + * + * @return whether the schema matches + */ + get() = + isDestinationV2Format && + Stream.of(this.columnsToAdd, this.columnsToRemove, this.columnsToChangeType) + .allMatch { obj: Set -> obj.isEmpty() } + + val columnsToAdd: Set + val columnsToRemove: Set + val columnsToChangeType: Set + val isDestinationV2Format: Boolean + + init { + this.columnsToAdd = columnsToAdd + this.columnsToRemove = columnsToRemove + this.columnsToChangeType = columnsToChangeType + this.isDestinationV2Format = isDestinationV2Format + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Array.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Array.kt new file mode 100644 index 0000000000000..a7f8f2d9f77f5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Array.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +data class Array(val items: AirbyteType) : AirbyteType { + override val typeName: String = TYPE + + companion object { + const val TYPE: String = "ARRAY" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.kt new file mode 100644 index 0000000000000..ae080b8162ed3 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.kt @@ -0,0 +1,228 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +abstract class BaseDestinationV1V2Migrator : DestinationV1V2Migrator { + @Throws(Exception::class) + override fun migrateIfNecessary( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig + ) { + LOGGER.info( + "Assessing whether migration is necessary for stream {}", + streamConfig.id!!.finalName + ) + if (shouldMigrate(streamConfig)) { + LOGGER.info("Starting v2 Migration for stream {}", streamConfig.id!!.finalName) + migrate(sqlGenerator, destinationHandler, streamConfig) + LOGGER.info( + "V2 Migration completed successfully for stream {}", + streamConfig.id!!.finalName + ) + } else { + LOGGER.info("No Migration Required for stream: {}", streamConfig.id!!.finalName) + } + } + + /** + * Determine whether a given stream needs to be migrated from v1 to v2 + * + * @param streamConfig the stream in question + * @return whether to migrate the stream + */ + @Throws(Exception::class) + fun shouldMigrate(streamConfig: StreamConfig): Boolean { + val v1RawTable = convertToV1RawName(streamConfig) + LOGGER.info( + "Checking whether v1 raw table {} in dataset {} exists", + v1RawTable.tableName, + v1RawTable.namespace + ) + val syncModeNeedsMigration = + isMigrationRequiredForSyncMode(streamConfig.destinationSyncMode) + val noValidV2RawTableExists = !doesValidV2RawTableAlreadyExist(streamConfig) + val aValidV1RawTableExists = + doesValidV1RawTableExist(v1RawTable.namespace, v1RawTable.tableName) + LOGGER.info( + "Migration Info: Required for Sync mode: {}, No existing v2 raw tables: {}, A v1 raw table exists: {}", + syncModeNeedsMigration, + noValidV2RawTableExists, + aValidV1RawTableExists + ) + return syncModeNeedsMigration && noValidV2RawTableExists && aValidV1RawTableExists + } + + /** + * Execute sql statements that converts a v1 raw table to a v2 raw table. Leaves the v1 raw + * table intact + * + * @param sqlGenerator the class which generates dialect specific sql statements + * @param destinationHandler the class which executes the sql statements + * @param streamConfig the stream to migrate the raw table of + */ + @Throws(TableNotMigratedException::class) + fun migrate( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig + ) { + val namespacedTableName = convertToV1RawName(streamConfig) + try { + destinationHandler.execute( + sqlGenerator.migrateFromV1toV2( + streamConfig.id, + namespacedTableName.namespace, + namespacedTableName.tableName + ) + ) + } catch (e: Exception) { + val message = + "Attempted and failed to migrate stream %s".formatted(streamConfig.id!!.finalName) + throw TableNotMigratedException(message, e) + } + } + + /** + * Checks the schema of the v1 raw table to ensure it matches the expected format + * + * @param existingV2AirbyteRawTable the v1 raw table + * @return whether the schema is as expected + */ + private fun doesV1RawTableMatchExpectedSchema( + existingV2AirbyteRawTable: DialectTableDefinition + ): Boolean { + return schemaMatchesExpectation( + existingV2AirbyteRawTable, + JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS + ) + } + + /** + * Checks the schema of the v2 raw table to ensure it matches the expected format + * + * @param existingV2AirbyteRawTable the v2 raw table + */ + private fun validateAirbyteInternalNamespaceRawTableMatchExpectedV2Schema( + existingV2AirbyteRawTable: DialectTableDefinition + ) { + // Account for the fact that the meta column was added later, so skip the rebuilding of the + // raw + // table. + if ( + !(schemaMatchesExpectation( + existingV2AirbyteRawTable, + JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES_WITHOUT_META + ) || + schemaMatchesExpectation( + existingV2AirbyteRawTable, + JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES + )) + ) { + throw UnexpectedSchemaException( + "Destination V2 Raw Table does not match expected Schema" + ) + } + } + + /** + * If the sync mode is a full refresh and we overwrite the table then there is no need to + * migrate + * + * @param destinationSyncMode destination sync mode + * @return whether this is full refresh overwrite + */ + private fun isMigrationRequiredForSyncMode(destinationSyncMode: DestinationSyncMode?): Boolean { + return DestinationSyncMode.OVERWRITE != destinationSyncMode + } + + /** + * Checks if a valid destinations v2 raw table already exists + * + * @param streamConfig the raw table to check + * @return whether it exists and is in the correct format + */ + @Throws(Exception::class) + private fun doesValidV2RawTableAlreadyExist(streamConfig: StreamConfig): Boolean { + if (doesAirbyteInternalNamespaceExist(streamConfig)) { + val existingV2Table = + getTableIfExists(streamConfig.id!!.rawNamespace, streamConfig.id!!.rawName) + existingV2Table.ifPresent { existingV2AirbyteRawTable: DialectTableDefinition -> + this.validateAirbyteInternalNamespaceRawTableMatchExpectedV2Schema( + existingV2AirbyteRawTable + ) + } + return existingV2Table.isPresent + } + return false + } + + /** + * Checks if a valid v1 raw table already exists + * + * @param namespace + * @param tableName + * @return whether it exists and is in the correct format + */ + @Throws(Exception::class) + protected fun doesValidV1RawTableExist(namespace: String?, tableName: String?): Boolean { + val existingV1RawTable = getTableIfExists(namespace, tableName) + return existingV1RawTable.isPresent && + doesV1RawTableMatchExpectedSchema(existingV1RawTable.get()) + } + + /** + * Checks to see if Airbyte's internal schema for destinations v2 exists + * + * @param streamConfig the stream to check + * @return whether the schema exists + */ + @Throws(Exception::class) + abstract fun doesAirbyteInternalNamespaceExist(streamConfig: StreamConfig?): Boolean + + /** + * Checks a Table's schema and compares it to an expected schema to make sure it matches + * + * @param existingTable the table to check + * @param columns the expected schema + * @return whether the existing table schema matches the expectation + */ + abstract fun schemaMatchesExpectation( + existingTable: DialectTableDefinition, + columns: Collection + ): Boolean + + /** + * Get a reference ta a table if it exists + * + * @param namespace + * @param tableName + * @return an optional potentially containing a reference to the table + */ + @Throws(Exception::class) + abstract fun getTableIfExists( + namespace: String?, + tableName: String? + ): Optional + + /** + * We use different naming conventions for raw table names in destinations v2, we need a way to + * map that back to v1 names + * + * @param streamConfig the stream in question + * @return the valid v1 name and namespace for the same stream + */ + abstract fun convertToV1RawName(streamConfig: StreamConfig): NamespacedTableName + + companion object { + protected val LOGGER: Logger = + LoggerFactory.getLogger(BaseDestinationV1V2Migrator::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.kt new file mode 100644 index 0000000000000..16ee374fe1c0a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParser.kt @@ -0,0 +1,271 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.cdk.integrations.base.AirbyteExceptionHandler.Companion.addStringForDeinterpolation +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.Optional +import java.util.function.Consumer +import org.apache.commons.codec.digest.DigestUtils +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class CatalogParser +@JvmOverloads +constructor( + private val sqlGenerator: SqlGenerator, + private val rawNamespace: String = JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE +) { + fun parseCatalog(catalog: ConfiguredAirbyteCatalog): ParsedCatalog { + // this code is bad and I feel bad + // it's mostly a port of the old normalization logic to prevent tablename collisions. + // tbh I have no idea if it works correctly. + val streamConfigs: MutableList = ArrayList() + for (stream in catalog.streams) { + val originalStreamConfig = toStreamConfig(stream) + val actualStreamConfig: StreamConfig + // Use empty string quote because we don't really care + if ( + streamConfigs.stream().anyMatch { s: StreamConfig -> + s.id.finalTableId("") == originalStreamConfig.id.finalTableId("") + } || + streamConfigs.stream().anyMatch { s: StreamConfig -> + s.id.rawTableId("") == originalStreamConfig.id.rawTableId("") + } + ) { + val originalNamespace = stream.stream.namespace + val originalName = stream.stream.name + + LOGGER.info( + "Detected table name collision for {}.{}", + originalNamespace, + originalName + ) + + // ... this logic is ported from legacy normalization, and maybe should change? + // We're taking a hash of the quoted namespace and the unquoted stream name + val hash = + DigestUtils.sha1Hex( + "${originalStreamConfig.id.finalNamespace}&airbyte&$originalName" + ) + .substring(0, 3) + val newName = "${originalName}_$hash" + actualStreamConfig = + StreamConfig( + sqlGenerator.buildStreamId(originalNamespace, newName, rawNamespace), + originalStreamConfig.syncMode, + originalStreamConfig.destinationSyncMode, + originalStreamConfig.primaryKey, + originalStreamConfig.cursor, + originalStreamConfig.columns, + ) + } else { + actualStreamConfig = originalStreamConfig + } + streamConfigs.add(actualStreamConfig) + + // Populate some interesting strings into the exception handler string deinterpolator + addStringForDeinterpolation(actualStreamConfig.id.rawNamespace) + addStringForDeinterpolation(actualStreamConfig.id.rawName) + addStringForDeinterpolation(actualStreamConfig.id.finalNamespace) + addStringForDeinterpolation(actualStreamConfig.id.finalName) + addStringForDeinterpolation(actualStreamConfig.id.originalNamespace) + addStringForDeinterpolation(actualStreamConfig.id.originalName) + actualStreamConfig.columns!! + .keys + .forEach( + Consumer { columnId: ColumnId? -> + addStringForDeinterpolation(columnId!!.name) + addStringForDeinterpolation(columnId.originalName) + } + ) + // It's (unfortunately) possible for a cursor/PK to be declared that don't actually + // exist in the + // schema. + // Add their strings explicitly. + actualStreamConfig.cursor!!.ifPresent { cursor: ColumnId -> + addStringForDeinterpolation(cursor.name) + addStringForDeinterpolation(cursor.originalName) + } + actualStreamConfig.primaryKey!!.forEach( + Consumer { pk: ColumnId -> + addStringForDeinterpolation(pk.name) + addStringForDeinterpolation(pk.originalName) + } + ) + } + return ParsedCatalog(streamConfigs) + } + + @VisibleForTesting + fun toStreamConfig(stream: ConfiguredAirbyteStream): StreamConfig { + val schema: AirbyteType = AirbyteType.Companion.fromJsonSchema(stream.stream.jsonSchema) + val airbyteColumns = + when (schema) { + is Struct -> schema.properties + is Union -> schema.asColumns() + else -> throw IllegalArgumentException("Top-level schema must be an object") + } + + require(!stream.primaryKey.stream().anyMatch { key: List -> key.size > 1 }) { + "Only top-level primary keys are supported" + } + val primaryKey = + stream.primaryKey + .stream() + .map { key: List -> sqlGenerator.buildColumnId(key[0]) } + .toList() + + require(stream.cursorField.size <= 1) { "Only top-level cursors are supported" } + val cursor: Optional = + if (stream.cursorField.isNotEmpty()) { + Optional.of(sqlGenerator.buildColumnId(stream.cursorField[0])) + } else { + Optional.empty() + } + + val columns = resolveColumnCollisions(airbyteColumns, stream) + + return StreamConfig( + sqlGenerator.buildStreamId(stream.stream.namespace, stream.stream.name, rawNamespace), + stream.syncMode, + stream.destinationSyncMode, + primaryKey, + cursor, + columns + ) + } + + /** + * This code is really bad and I'm not convinced we need to preserve this behavior. As with the + * tablename collisions thing above - we're trying to preserve legacy normalization's naming + * conventions here. + */ + private fun resolveColumnCollisions( + airbyteColumns: LinkedHashMap, + stream: ConfiguredAirbyteStream + ): LinkedHashMap { + val columns = LinkedHashMap() + for ((key, value) in airbyteColumns) { + val originalColumnId = sqlGenerator.buildColumnId(key) + var columnId: ColumnId + if ( + columns.keys.stream().noneMatch { c: ColumnId -> + c.canonicalName == originalColumnId.canonicalName + } + ) { + // None of the existing columns have the same name. We can add this new column + // as-is. + columnId = originalColumnId + } else { + LOGGER.info( + "Detected column name collision for {}.{}.{}", + stream.stream.namespace, + stream.stream.name, + key, + ) + // One of the existing columns has the same name. We need to handle this collision. + // Append _1, _2, _3, ... to the column name until we find one that doesn't collide. + var i = 1 + while (true) { + columnId = sqlGenerator.buildColumnId(key, "_$i") + + // Verify that we're making progress, e.g. we haven't immediately truncated away + // the suffix. + if (columnId.canonicalName == originalColumnId.canonicalName) { + // If we're not making progress, do a more powerful mutation instead of + // appending numbers. + // Assume that we're being truncated, and that the column ID's name is the + // maximum length. + columnId = + superResolveColumnCollisions( + originalColumnId, + columns, + originalColumnId.name.length + ) + break + } + + val canonicalName = columnId.canonicalName + if ( + columns.keys.stream().noneMatch { c: ColumnId -> + c.canonicalName == canonicalName + } + ) { + break + } else { + i++ + } + } + // But we need to keep the original name so that we can still fetch it out of the + // JSON records. + columnId = + ColumnId( + columnId.name, + originalColumnId.originalName, + columnId.canonicalName, + ) + } + + columns[columnId] = value + } + return columns + } + + /** + * Generate a name of the format ``. E.g. for affixLength=3: + * "veryLongName" -> "ver6ame" This is based on the "i18n"-ish naming convention. + * + * @param columnId The column that we're trying to add + * @param columns The columns that we've already added + */ + private fun superResolveColumnCollisions( + columnId: ColumnId, + columns: LinkedHashMap, + maximumColumnNameLength: Int + ): ColumnId { + val originalColumnName = columnId.originalName + + var newColumnId = columnId + // Assume that the portion can be expressed in at most 5 characters. + // If someone is giving us a column name that's longer than 99999 characters, + // that's just being silly. + val affixLength = (maximumColumnNameLength - 5) / 2 + // If, after reserving 5 characters for the length, we can't fit the affixes, + // just give up. That means the destination is trying to restrict us to a + // 6-character column name, which is just silly. + if (affixLength <= 0) { + throw IllegalArgumentException( + "Cannot solve column name collision: ${newColumnId.originalName}. We recommend removing this column to continue syncing." + ) + } + val prefix = originalColumnName.substring(0, affixLength) + val suffix = + originalColumnName.substring( + originalColumnName.length - affixLength, + originalColumnName.length + ) + val length = originalColumnName.length - 2 * affixLength + newColumnId = sqlGenerator.buildColumnId("$prefix$length$suffix") + // if there's _still_ a collision after this, just give up. + // we could try to be more clever, but this is already a pretty rare case. + if ( + columns.keys.stream().anyMatch { c: ColumnId -> + c.canonicalName == newColumnId.canonicalName + } + ) { + throw IllegalArgumentException( + "Cannot solve column name collision: ${newColumnId.originalName}. We recommend removing this column to continue syncing." + ) + } + return newColumnId + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(CatalogParser::class.java) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.kt new file mode 100644 index 0000000000000..9a1a0d3f01d71 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtils.kt @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.util.* + +/** + * TODO these are not in the right place, they probably belongs in a base library but to avoid + * having to publish a bunch of connectors I'm putting it here temporarily + */ +object CollectionUtils { + /** + * Pass in a collection and search term to determine whether any of the values match ignoring + * case + * + * @param collection the collection of values + * @param search the value to look for + * @return whether the value matches anything in the collection + */ + fun containsIgnoreCase(collection: Collection, search: String): Boolean { + return matchingKey(collection, search).isPresent + } + + /** + * Convenience method for when you need to check an entire collection for membership in another + * collection. + * + * @param searchCollection the collection you want to check membership in + * @param searchTerms the keys you're looking for + * @return whether all searchTerms are in the searchCollection + */ + fun containsAllIgnoreCase( + searchCollection: Collection, + searchTerms: Collection + ): Boolean { + require(!searchTerms.isEmpty()) { + // There isn't a good behavior for an empty collection. Without this check, an empty + // collection + // would always return + // true, but it feels misleading to say that the searchCollection does "contain all" + // when + // searchTerms is empty + "Search Terms collection may not be empty" + } + return searchTerms.stream().allMatch { term: String -> + containsIgnoreCase(searchCollection, term) + } + } + + /** + * From a collection of strings, return an entry which matches the search term ignoring case + * + * @param collection the collection to search + * @param search the key you're looking for + * @return an Optional value which might contain the key that matches the search + */ + fun matchingKey(collection: Collection, search: String): Optional { + if (collection.contains(search)) { + return Optional.of(search) + } + return collection + .stream() + .filter { s: String -> s.equals(search, ignoreCase = true) } + .findFirst() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.kt new file mode 100644 index 0000000000000..5629b4085e6bd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ColumnId.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** + * In general, callers should not directly instantiate this class. Use [SqlGenerator.buildColumnId] + * instead. + * + * @param name the name of the column in the final table. Callers should prefer [.name] when using + * the column in a query. + * @param originalName the name of the field in the raw JSON blob + * @param canonicalName the name of the field according to the destination. Used for deduping. + * Useful if a destination warehouse handles columns ignoring case, but preserves case in the table + * schema. + */ +data class ColumnId(val name: String, val originalName: String, val canonicalName: String) { + fun name(quote: String): String { + return quote + name + quote + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.kt new file mode 100644 index 0000000000000..07499144868fd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.kt @@ -0,0 +1,511 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst +import io.airbyte.commons.concurrency.CompletableFutures +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.executeRawTableMigrations +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.executeWeirdMigrations +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.prepareSchemas +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.* +import java.util.concurrent.* +import java.util.concurrent.locks.Lock +import java.util.concurrent.locks.ReadWriteLock +import java.util.concurrent.locks.ReentrantLock +import java.util.concurrent.locks.ReentrantReadWriteLock +import java.util.function.Supplier +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.concurrent.BasicThreadFactory +import org.apache.commons.lang3.tuple.Pair +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * An abstraction over SqlGenerator and DestinationHandler. Destinations will still need to call + * `new CatalogParser(new FooSqlGenerator()).parseCatalog()`, but should otherwise avoid interacting + * directly with these classes. + * + * In a typical sync, destinations should call the methods: + * + * 1. [.prepareFinalTables] once at the start of the sync + * 1. [.typeAndDedupe] as needed throughout the sync + * 1. [.commitFinalTables] once at the end of the sync + * + * Note that #prepareTables() initializes some internal state. The other methods will throw an + * exception if that method was not called. + */ +class DefaultTyperDeduper( + private val sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + parsedCatalog: ParsedCatalog, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + migrations: List> +) : TyperDeduper { + private val destinationHandler: DestinationHandler + + private val v1V2Migrator: DestinationV1V2Migrator + private val v2TableMigrator: V2TableMigrator + private val migrations: List> + private val parsedCatalog: ParsedCatalog + private var overwriteStreamsWithTmpTable: MutableSet? = null + private val streamsWithSuccessfulSetup: MutableSet> + private val initialRawTableStateByStream: MutableMap + + // We only want to run a single instance of T+D per stream at a time. These objects are used for + // synchronization per stream. + // Use a read-write lock because we need the same semantics: + // * any number of threads can insert to the raw tables at the same time, as long as T+D isn't + // running (i.e. "read lock") + // * T+D must run in complete isolation (i.e. "write lock") + private val tdLocks: MutableMap + + // These locks are used to prevent multiple simultaneous attempts to T+D the same stream. + // We use tryLock with these so that we don't queue up multiple T+D runs for the same stream. + private val internalTdLocks: MutableMap + + private val executorService: ExecutorService + private lateinit var destinationInitialStatuses: + List> + + init { + this.destinationHandler = destinationHandler + this.parsedCatalog = parsedCatalog + this.v1V2Migrator = v1V2Migrator + this.v2TableMigrator = v2TableMigrator + this.migrations = migrations + this.initialRawTableStateByStream = ConcurrentHashMap() + this.streamsWithSuccessfulSetup = ConcurrentHashMap.newKeySet(parsedCatalog.streams.size) + this.tdLocks = ConcurrentHashMap() + this.internalTdLocks = ConcurrentHashMap() + this.executorService = + Executors.newFixedThreadPool( + FutureUtils.countOfTypeAndDedupeThreads, + BasicThreadFactory.Builder() + .namingPattern(IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME) + .build() + ) + } + + constructor( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + parsedCatalog: ParsedCatalog, + v1V2Migrator: DestinationV1V2Migrator, + migrations: List> + ) : this( + sqlGenerator, + destinationHandler, + parsedCatalog, + v1V2Migrator, + NoopV2TableMigrator(), + migrations + ) + + @Throws(Exception::class) + override fun prepareSchemasAndRunMigrations() { + // Technically kind of weird to call this here, but it's the best place we have. + // Ideally, we'd create just airbyte_internal here, and defer creating the final table + // schemas + // until prepareFinalTables... but it doesn't really matter. + prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog) + + executeWeirdMigrations( + executorService, + sqlGenerator, + destinationHandler, + v1V2Migrator, + v2TableMigrator, + parsedCatalog + ) + + destinationInitialStatuses = + executeRawTableMigrations( + executorService, + destinationHandler, + migrations, + destinationHandler.gatherInitialState(parsedCatalog.streams) + ) + + // Commit our destination states immediately. + // Technically, migrations aren't done until we execute the soft reset. + // However, our state contains a needsSoftReset flag, so we can commit that we already + // executed the + // migration + // and even if we fail to run the soft reset in this sync, future syncs will see the soft + // reset flag + // and finish it for us. + destinationHandler.commitDestinationStates( + destinationInitialStatuses.associate { it.streamConfig.id to it.destinationState } + ) + } + + @Throws(Exception::class) + override fun prepareFinalTables() { + check(overwriteStreamsWithTmpTable == null) { "Tables were already prepared." } + overwriteStreamsWithTmpTable = ConcurrentHashMap.newKeySet() + LOGGER.info("Preparing tables") + + val prepareTablesFutureResult = + CompletableFutures.allOf( + destinationInitialStatuses.map { this.prepareTablesFuture(it) }.toList() + ) + .toCompletableFuture() + .join() + getResultsOrLogAndThrowFirst( + "The following exceptions were thrown attempting to prepare tables:\n", + prepareTablesFutureResult + ) + + // If we get here, then we've executed all soft resets. Force the soft reset flag to false. + destinationHandler.commitDestinationStates( + destinationInitialStatuses.associate { + it.streamConfig.id to it.destinationState.withSoftReset(false) + } + ) + } + + private fun prepareTablesFuture( + initialState: DestinationInitialStatus + ): CompletionStage { + // For each stream, make sure that its corresponding final table exists. + // Also, for OVERWRITE streams, decide if we're writing directly to the final table, or into + // an + // _airbyte_tmp table. + return CompletableFuture.supplyAsync( + { + val stream = initialState.streamConfig + try { + if (initialState.isFinalTablePresent) { + LOGGER.info("Final Table exists for stream {}", stream.id!!.finalName) + // The table already exists. Decide whether we're writing to it directly, or + // using a tmp table. + if (stream.destinationSyncMode == DestinationSyncMode.OVERWRITE) { + if (!initialState.isFinalTableEmpty || initialState.isSchemaMismatch) { + // We want to overwrite an existing table. Write into a tmp table. + // We'll overwrite the table at the + // end of the sync. + overwriteStreamsWithTmpTable!!.add(stream.id) + // overwrite an existing tmp table if needed. + destinationHandler.execute( + sqlGenerator.createTable( + stream, + TMP_OVERWRITE_TABLE_SUFFIX, + true + ) + ) + LOGGER.info( + "Using temp final table for stream {}, will overwrite existing table at end of sync", + stream.id!!.finalName + ) + } else { + LOGGER.info( + "Final Table for stream {} is empty and matches the expected v2 format, writing to table directly", + stream.id!!.finalName + ) + } + } else if ( + initialState.isSchemaMismatch || + initialState.destinationState!!.needsSoftReset() + ) { + // We're loading data directly into the existing table. + // Make sure it has the right schema. + // Also, if a raw table migration wants us to do a soft reset, do that + // here. + TypeAndDedupeTransaction.executeSoftReset( + sqlGenerator, + destinationHandler, + stream + ) + } + } else { + LOGGER.info( + "Final Table does not exist for stream {}, creating.", + stream.id!!.finalName + ) + // The table doesn't exist. Create it. Don't force. + destinationHandler.execute( + sqlGenerator.createTable(stream, NO_SUFFIX, false) + ) + } + + initialRawTableStateByStream[stream.id] = initialState.initialRawTableStatus + + streamsWithSuccessfulSetup.add( + Pair.of(stream.id!!.originalNamespace, stream.id!!.originalName) + ) + + // Use fair locking. This slows down lock operations, but that performance hit + // is by far dwarfed + // by our IO costs. This lock needs to be fair because the raw table writers are + // running almost + // constantly, + // and we don't want them to starve T+D. + tdLocks[stream.id] = ReentrantReadWriteLock(true) + // This lock doesn't need to be fair; any T+D instance is equivalent and we'll + // skip T+D if we can't + // immediately acquire the lock. + internalTdLocks[stream.id] = ReentrantLock() + + return@supplyAsync Unit + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while preparing tables for stream " + + stream.id!!.originalName, + e + ) + throw RuntimeException(e) + } + }, + this.executorService + ) + } + + @Throws(Exception::class) + override fun typeAndDedupe(originalNamespace: String, originalName: String, mustRun: Boolean) { + val streamConfig = parsedCatalog.getStream(originalNamespace, originalName) + val task = typeAndDedupeTask(streamConfig, mustRun) + FutureUtils.reduceExceptions( + setOf(task), + String.format( + "The Following Exceptions were thrown while typing and deduping %s.%s:\n", + originalNamespace, + originalName + ) + ) + } + + override fun getRawTableInsertLock(originalNamespace: String, originalName: String): Lock { + val streamConfig = parsedCatalog.getStream(originalNamespace, originalName) + return tdLocks[streamConfig!!.id]!!.readLock() + } + + private fun streamSetupSucceeded(streamConfig: StreamConfig?): Boolean { + val originalNamespace = streamConfig!!.id!!.originalNamespace + val originalName = streamConfig.id!!.originalName + if (!streamsWithSuccessfulSetup.contains(Pair.of(originalNamespace, originalName))) { + // For example, if T+D setup fails, but the consumer tries to run T+D on all streams + // during close, + // we should skip it. + LOGGER.warn( + "Skipping typing and deduping for {}.{} because we could not set up the tables for this stream.", + originalNamespace, + originalName + ) + return false + } + return true + } + + fun typeAndDedupeTask( + streamConfig: StreamConfig?, + mustRun: Boolean + ): CompletableFuture> { + return CompletableFuture.supplyAsync( + { + val originalNamespace = streamConfig!!.id!!.originalNamespace + val originalName = streamConfig.id!!.originalName + try { + if (!streamSetupSucceeded(streamConfig)) { + return@supplyAsync Optional.empty() + } + + val run: Boolean + val internalLock = internalTdLocks[streamConfig.id] + if (mustRun) { + // If we must run T+D, then wait until we acquire the lock. + internalLock!!.lock() + run = true + } else { + // Otherwise, try and get the lock. If another thread already has it, then + // we should noop here. + run = internalLock!!.tryLock() + } + + if (run) { + LOGGER.info( + "Waiting for raw table writes to pause for {}.{}", + originalNamespace, + originalName + ) + val externalLock = tdLocks[streamConfig.id]!!.writeLock() + externalLock.lock() + try { + val initialRawTableStatus = + initialRawTableStateByStream.getValue(streamConfig.id) + TypeAndDedupeTransaction.executeTypeAndDedupe( + sqlGenerator, + destinationHandler, + streamConfig, + initialRawTableStatus.maxProcessedTimestamp, + getFinalTableSuffix(streamConfig.id) + ) + } finally { + LOGGER.info( + "Allowing other threads to proceed for {}.{}", + originalNamespace, + originalName + ) + externalLock.unlock() + internalLock.unlock() + } + } else { + LOGGER.info( + "Another thread is already trying to run typing and deduping for {}.{}. Skipping it here.", + originalNamespace, + originalName + ) + } + return@supplyAsync Optional.empty() + } catch (e: Exception) { + LOGGER.error( + "Exception occurred while typing and deduping stream $originalName", + e + ) + return@supplyAsync Optional.of(e) + } + }, + this.executorService + ) + } + + @Throws(Exception::class) + override fun typeAndDedupe(streamSyncSummaries: Map) { + LOGGER.info("Typing and deduping all tables") + val typeAndDedupeTasks: MutableSet>> = HashSet() + parsedCatalog.streams + .stream() + .filter { streamConfig: StreamConfig -> + // Skip if stream setup failed. + if (!streamSetupSucceeded(streamConfig)) { + return@filter false + } + // Skip if we don't have any records for this stream. + val streamSyncSummary = + streamSyncSummaries.getOrDefault( + streamConfig!!.id!!.asStreamDescriptor(), + StreamSyncSummary.DEFAULT + ) + val nonzeroRecords = + streamSyncSummary.recordsWritten + .map { r: Long -> + r > 0 + } // If we didn't track record counts during the sync, assume we had nonzero + // records for this stream + .orElse(true) + val unprocessedRecordsPreexist = + initialRawTableStateByStream[streamConfig.id]!!.hasUnprocessedRecords + // If this sync emitted records, or the previous sync left behind some unprocessed + // records, + // then the raw table has some unprocessed records right now. + // Run T+D if either of those conditions are true. + val shouldRunTypingDeduping = nonzeroRecords || unprocessedRecordsPreexist + if (!shouldRunTypingDeduping) { + LOGGER.info( + "Skipping typing and deduping for stream {}.{} because it had no records during this sync and no unprocessed records from a previous sync.", + streamConfig.id!!.originalNamespace, + streamConfig.id!!.originalName + ) + } + shouldRunTypingDeduping + } + .forEach { streamConfig: StreamConfig? -> + typeAndDedupeTasks.add(typeAndDedupeTask(streamConfig, true)) + } + CompletableFuture.allOf(*typeAndDedupeTasks.toTypedArray()).join() + FutureUtils.reduceExceptions( + typeAndDedupeTasks, + "The Following Exceptions were thrown while typing and deduping tables:\n" + ) + } + + /** + * Does any "end of sync" work. For most streams, this is a noop. + * + * For OVERWRITE streams where we're writing to a temp table, this is where we swap the temp + * table into the final table. + */ + @Throws(Exception::class) + override fun commitFinalTables() { + LOGGER.info("Committing final tables") + val tableCommitTasks: MutableSet>> = HashSet() + for (streamConfig in parsedCatalog.streams) { + if ( + !streamsWithSuccessfulSetup.contains( + Pair.of(streamConfig!!.id!!.originalNamespace, streamConfig.id!!.originalName) + ) + ) { + LOGGER.warn( + "Skipping committing final table for for {}.{} because we could not set up the tables for this stream.", + streamConfig.id!!.originalNamespace, + streamConfig.id!!.originalName + ) + continue + } + if (DestinationSyncMode.OVERWRITE == streamConfig.destinationSyncMode) { + tableCommitTasks.add(commitFinalTableTask(streamConfig)) + } + } + CompletableFuture.allOf(*tableCommitTasks.toTypedArray()).join() + FutureUtils.reduceExceptions( + tableCommitTasks, + "The Following Exceptions were thrown while committing final tables:\n" + ) + } + + private fun commitFinalTableTask( + streamConfig: StreamConfig? + ): CompletableFuture> { + return CompletableFuture.supplyAsync>( + Supplier> supplyAsync@{ + val streamId = streamConfig!!.id + val finalSuffix = getFinalTableSuffix(streamId) + if (!StringUtils.isEmpty(finalSuffix)) { + val overwriteFinalTable = + sqlGenerator.overwriteFinalTable(streamId, finalSuffix) + LOGGER.info( + "Overwriting final table with tmp table for stream {}.{}", + streamId!!.originalNamespace, + streamId.originalName + ) + try { + destinationHandler.execute(overwriteFinalTable) + } catch (e: Exception) { + LOGGER.error( + "Exception Occurred while committing final table for stream " + + streamId.originalName, + e + ) + return@supplyAsync Optional.of(e) + } + } + return@supplyAsync Optional.empty() + }, + this.executorService + ) + } + + private fun getFinalTableSuffix(streamId: StreamId?): String { + return if (overwriteStreamsWithTmpTable!!.contains(streamId)) TMP_OVERWRITE_TABLE_SUFFIX + else NO_SUFFIX + } + + override fun cleanup() { + LOGGER.info("Cleaning Up type-and-dedupe thread pool") + executorService.shutdown() + } + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(TyperDeduper::class.java) + + private const val NO_SUFFIX = "" + private const val TMP_OVERWRITE_TABLE_SUFFIX = "_airbyte_tmp" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.kt new file mode 100644 index 0000000000000..69802466706c6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.kt @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +interface DestinationHandler { + @Throws(Exception::class) fun execute(sql: Sql) + + /** + * Fetch the current state of the destination for the given streams. This method MUST create the + * airbyte_internal.state table if it does not exist. This method MAY assume the + * airbyte_internal schema already exists. (substitute the appropriate raw table schema if the + * user is overriding it). + */ + @Throws(Exception::class) + fun gatherInitialState( + streamConfigs: List + ): List> + + @Throws(Exception::class) + fun commitDestinationStates(destinationStates: Map) +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt similarity index 90% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt index ebddcc2805d5f..2003a8b7d0e0d 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt @@ -10,5 +10,5 @@ data class DestinationInitialStatus( val initialRawTableStatus: InitialRawTableStatus, val isSchemaMismatch: Boolean, val isFinalTableEmpty: Boolean, - val destinationState: DestinationState + val destinationState: DestinationState, ) diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.kt new file mode 100644 index 0000000000000..d18cf8567af1b --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +interface DestinationV1V2Migrator { + /** + * This is the primary entrypoint to this interface + * + * Determine whether a migration is necessary for a given stream and if so, migrate the raw + * table and rebuild the final table with a soft reset + * + * @param sqlGenerator the class to use to generate sql + * @param destinationHandler the handler to execute the sql statements + * @param streamConfig the stream to assess migration needs + */ + @Throws(TableNotMigratedException::class, UnexpectedSchemaException::class, Exception::class) + fun migrateIfNecessary( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig + ) +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.kt new file mode 100644 index 0000000000000..75fa0e41552ff --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/FutureUtils.kt @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.logAllAndThrowFirst +import java.util.* +import java.util.concurrent.CompletableFuture + +object FutureUtils { + private const val DEFAULT_TD_THREAD_COUNT = 8 + + val countOfTypeAndDedupeThreads: Int + /** + * Allow for configuring the number of typing and deduping threads via an environment + * variable in the destination container. + * + * @return the number of threads to use in the typing and deduping pool + */ + get() = + Optional.ofNullable(System.getenv("TD_THREADS")) + .map { s -> s.toInt() } + .orElse(DEFAULT_TD_THREAD_COUNT) + + /** + * Log all exceptions from a list of futures, and rethrow the first exception if there is one. + * This mimics the behavior of running the futures in serial, where the first failure + */ + @Throws(Exception::class) + fun reduceExceptions( + potentialExceptions: Collection>>, + initialMessage: String + ) { + val exceptions = + potentialExceptions + .stream() + .map { obj: CompletableFuture> -> obj.join() } + .filter { obj: Optional -> obj.isPresent } + .map { obj: Optional -> obj.get() } + .toList() + logAllAndThrowFirst(initialMessage, exceptions) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.kt new file mode 100644 index 0000000000000..49df7a54bc499 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.time.Instant +import java.util.* + +data class InitialRawTableStatus( + val rawTableExists: Boolean, + val hasUnprocessedRecords: Boolean, + val maxProcessedTimestamp: Optional +) diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.kt new file mode 100644 index 0000000000000..c4615c2bd68d8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NamespacedTableName.kt @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +// yet another namespace, name combo class +class NamespacedTableName(namespace: String, tableName: String) { + val namespace: String + val tableName: String + + init { + this.namespace = namespace + this.tableName = tableName + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.kt new file mode 100644 index 0000000000000..2150d5e9947af --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.kt @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +class NoOpDestinationV1V2Migrator : DestinationV1V2Migrator { + @Throws(TableNotMigratedException::class, UnexpectedSchemaException::class) + override fun migrateIfNecessary( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig + ) { + // Do nothing + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt similarity index 100% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.kt new file mode 100644 index 0000000000000..60cfbca938092 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.kt @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.base.IntegrationRunner +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.executeRawTableMigrations +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.executeWeirdMigrations +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.Companion.prepareSchemas +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState +import io.airbyte.protocol.models.v0.StreamDescriptor +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.ExecutorService +import java.util.concurrent.Executors +import java.util.concurrent.locks.Lock +import org.apache.commons.lang3.concurrent.BasicThreadFactory + +/** + * This is a NoOp implementation which skips and Typing and Deduping operations and does not emit + * the final tables. However, this implementation still performs V1->V2 migrations and V2 + * json->string migrations in the raw tables. + */ +private val log = KotlinLogging.logger {} + +class NoOpTyperDeduperWithV1V2Migrations( + private val sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + parsedCatalog: ParsedCatalog, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + migrations: List> +) : TyperDeduper { + private val v1V2Migrator: DestinationV1V2Migrator + private val v2TableMigrator: V2TableMigrator + private val migrations: List> + private val executorService: ExecutorService + private val parsedCatalog: ParsedCatalog + private val destinationHandler: DestinationHandler + + init { + this.destinationHandler = destinationHandler + this.parsedCatalog = parsedCatalog + this.v1V2Migrator = v1V2Migrator + this.v2TableMigrator = v2TableMigrator + this.migrations = migrations + this.executorService = + Executors.newFixedThreadPool( + FutureUtils.countOfTypeAndDedupeThreads, + BasicThreadFactory.Builder() + .namingPattern(IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME) + .build() + ) + } + + @Throws(Exception::class) + override fun prepareSchemasAndRunMigrations() { + prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog) + + executeWeirdMigrations( + executorService, + sqlGenerator, + destinationHandler, + v1V2Migrator, + v2TableMigrator, + parsedCatalog + ) + + val destinationInitialStatuses = + executeRawTableMigrations( + executorService, + destinationHandler, + migrations, + destinationHandler.gatherInitialState(parsedCatalog.streams) + ) + + // Commit the updated destination states. + // We don't need to trigger any soft resets, because we don't have any final tables. + destinationHandler.commitDestinationStates( + destinationInitialStatuses.associate { it.streamConfig.id to it.destinationState } + ) + } + + override fun prepareFinalTables() { + log.info("Skipping prepareFinalTables") + } + + override fun typeAndDedupe(originalNamespace: String, originalName: String, mustRun: Boolean) { + log.info("Skipping TypeAndDedupe") + } + + override fun getRawTableInsertLock(originalNamespace: String, originalName: String): Lock { + return NoOpRawTableTDLock() + } + + override fun typeAndDedupe(streamSyncSummaries: Map) { + log.info("Skipping TypeAndDedupe final") + } + + override fun commitFinalTables() { + log.info("Skipping commitFinalTables final") + } + + override fun cleanup() { + log.info("Cleaning Up type-and-dedupe thread pool") + executorService.shutdown() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.kt new file mode 100644 index 0000000000000..26df693cf76ca --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.kt @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.concurrent.TimeUnit +import java.util.concurrent.locks.Condition +import java.util.concurrent.locks.Lock + +/** + * This class should be used while upgrading a destination from V1 to V2. V2 destinations should use + * [NoOpTyperDeduperWithV1V2Migrations] for disabling T+D, because it correctly handles various + * migration operations. + */ +class NoopTyperDeduper : TyperDeduper { + @Throws(Exception::class) override fun prepareSchemasAndRunMigrations() {} + + override fun prepareFinalTables() {} + + override fun typeAndDedupe(originalNamespace: String, originalName: String, mustRun: Boolean) {} + + override fun getRawTableInsertLock(originalNamespace: String, originalName: String): Lock { + // Return a fake lock that does nothing. + return object : Lock { + override fun lock() {} + + override fun lockInterruptibly() {} + + override fun tryLock(): Boolean { + // To mimic NoOp behavior always return true that lock is acquired + return true + } + + override fun tryLock(time: Long, unit: TimeUnit): Boolean { + // To mimic NoOp behavior always return true that lock is acquired + return true + } + + override fun unlock() {} + + override fun newCondition(): Condition? { + return null + } + } + } + + override fun commitFinalTables() {} + + override fun typeAndDedupe(streamSyncSummaries: Map) {} + + override fun cleanup() {} +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.kt new file mode 100644 index 0000000000000..b0a12c081a801 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/NoopV2TableMigrator.kt @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +class NoopV2TableMigrator : V2TableMigrator { + override fun migrateIfNecessary(streamConfig: StreamConfig?) { + // do nothing + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.kt new file mode 100644 index 0000000000000..5a2742124df10 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/ParsedCatalog.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair + +class ParsedCatalog(streams: List) { + fun getStream(streamId: AirbyteStreamNameNamespacePair): StreamConfig? { + return getStream(streamId.namespace, streamId.name) + } + + fun getStream(streamId: StreamId): StreamConfig? { + return getStream(streamId.originalNamespace, streamId.originalName) + } + + fun getStream(originalNamespace: String?, originalName: String?): StreamConfig { + return streams + .stream() + .filter { s: StreamConfig -> + s.id.originalNamespace == originalNamespace && s.id.originalName == originalName + } + .findFirst() + .orElseThrow { + IllegalArgumentException( + String.format( + "Could not find stream %s.%s out of streams %s", + originalNamespace, + originalName, + streams + .stream() + .map { stream: StreamConfig -> + stream.id.originalNamespace + "." + stream.id.originalName + } + .toList() + ) + ) + } + } + + val streams: List + + init { + this.streams = streams + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Sql.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Sql.kt new file mode 100644 index 0000000000000..3f1971c066cd4 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Sql.kt @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Stream + +/** + * Represents a list of SQL transactions, where each transaction consists of one or more SQL + * statements. Each transaction MUST NOT contain the BEGIN/COMMIT statements. Each inner list is a + * single transaction, and each String is a single statement within that transaction. + * + * Most callers likely only need a single transaction, but e.g. BigQuery disallows running DDL + * inside transactions, and so needs to run sequential "CREATE SCHEMA", "CREATE TABLE" as separate + * transactions. + * + * Callers are encouraged to use the static factory methods instead of the public constructor. + */ +@JvmRecord +data class Sql(val transactions: List>) { + /** + * @param begin The SQL statement to start a transaction, typically "BEGIN" + * @param commit The SQL statement to commit a transaction, typically "COMMIT" + * @return A list of SQL strings, each of which represents a transaction. + */ + fun asSqlStrings(begin: String?, commit: String?): List { + return transactions + .stream() + .map { transaction: List -> + // If there's only one statement, we don't need to wrap it in a transaction. + if (transaction.size == 1) { + return@map transaction[0] + } + val builder = StringBuilder() + builder.append(begin) + builder.append(";\n") + transaction.forEach( + Consumer { statement: String? -> + builder.append(statement) + // No semicolon - statements already end with a semicolon + builder.append("\n") + } + ) + builder.append(commit) + builder.append(";\n") + builder.toString() + } + .toList() + } + + init { + transactions.forEach( + Consumer { transaction: List -> + require(!transaction.isEmpty()) { "Transaction must not be empty" } + require(!transaction.stream().anyMatch { s: String? -> s == null || s.isEmpty() }) { + "Transaction must not contain empty statements" + } + } + ) + } + + companion object { + /** Execute a list of SQL statements in a single transaction. */ + @JvmStatic + fun transactionally(statements: List): Sql { + return create(java.util.List.of(statements)) + } + + @JvmStatic + fun transactionally(vararg statements: String): Sql { + return transactionally(listOf(*statements)) + } + + /** Execute each statement as its own transaction. */ + @JvmStatic + fun separately(statements: List): Sql { + return create( + statements + .stream() + .map(Function> { o: String -> listOf(o) }) + .toList() + ) + } + + @JvmStatic + fun separately(vararg statements: String): Sql { + return separately(Stream.of(*statements).toList()) + } + + /** + * Convenience method for indicating intent. Equivalent to calling [.transactionally] or + * [.separately] with the same string. + */ + @JvmStatic + fun of(statement: String): Sql { + return transactionally(statement) + } + + @JvmStatic + fun concat(vararg sqls: Sql): Sql { + return create( + Stream.of(*sqls).flatMap { sql: Sql -> sql.transactions.stream() }.toList() + ) + } + + @JvmStatic + fun concat(sqls: List): Sql { + return create(sqls.stream().flatMap { sql: Sql -> sql.transactions.stream() }.toList()) + } + + /** + * Utility method to create a Sql object without empty statements/transactions, and + * appending semicolons when needed. + */ + @JvmStatic + fun create(transactions: List>): Sql { + return Sql( + transactions + .stream() + .map { transaction: List -> + transaction + .stream() + .filter { statement: String? -> + statement != null && !statement.isEmpty() + } + .map { statement: String -> + if (!statement.trim { it <= ' ' }.endsWith(";")) { + return@map "$statement;" + } + statement + } + .toList() + } + .filter { transaction: List -> !transaction.isEmpty() } + .toList() + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.kt new file mode 100644 index 0000000000000..2e82ac554efad --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/SqlGenerator.kt @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.time.Instant +import java.util.* + +interface SqlGenerator { + fun buildStreamId(namespace: String, name: String, rawNamespaceOverride: String): StreamId + + fun buildColumnId(name: String): ColumnId { + return buildColumnId(name, "") + } + + fun buildColumnId(name: String, suffix: String?): ColumnId + + /** + * Generate a SQL statement to create a fresh table to match the given stream. + * + * The generated SQL should throw an exception if the table already exists and `force` is false. + * + * @param suffix A suffix to add to the stream name. Useful for full refresh overwrite syncs, + * where we write the entire sync to a temp table. + * @param force If true, will overwrite an existing table. If false, will throw an exception if + * the table already exists. If you're passing a non-empty prefix, you likely want to set this + * to true. + */ + fun createTable(stream: StreamConfig, suffix: String, force: Boolean): Sql + + /** + * Used to create either the airbyte_internal or final schemas if they don't exist + * + * @param schema the schema to create + * @return SQL to create the schema if it does not exist + */ + fun createSchema(schema: String?): Sql + + /** + * Generate a SQL statement to copy new data from the raw table into the final table. + * + * Responsible for: + * + * * Pulling new raw records from a table (i.e. records with null _airbyte_loaded_at) + * * Extracting the JSON fields and casting to the appropriate types + * * Handling errors in those casts + * * Merging those typed records into an existing table + * * Updating the raw records with SET _airbyte_loaded_at = now() + * + * Implementing classes are recommended to break this into smaller methods, which can be tested + * in isolation. However, this interface only requires a single mega-method. + * + * @param finalSuffix the suffix of the final table to write to. If empty string, writes to the + * final table directly. Useful for full refresh overwrite syncs, where we write the entire sync + * to a temp table and then swap it into the final table at the end. + * + * @param minRawTimestamp The latest _airbyte_extracted_at for which all raw records with that + * timestamp have already been typed+deduped. Implementations MAY use this value in a + * `_airbyte_extracted_at > minRawTimestamp` filter on the raw table to improve query + * performance. + * @param useExpensiveSaferCasting often the data coming from the source can be faithfully + * represented in the destination without issue, and using a "CAST" expression works fine, + * however sometimes we get badly typed data. In these cases we can use a more expensive query + * which handles casting exceptions. + */ + fun updateTable( + stream: StreamConfig, + finalSuffix: String, + minRawTimestamp: Optional, + useExpensiveSaferCasting: Boolean + ): Sql + + /** + * Drop the previous final table, and rename the new final table to match the old final table. + * + * This method may assume that the stream is an OVERWRITE stream, and that the final suffix is + * non-empty. Callers are responsible for verifying those are true. + */ + fun overwriteFinalTable(stream: StreamId, finalSuffix: String): Sql + + /** + * Creates a sql query which will create a v2 raw table from the v1 raw table, then performs a + * soft reset. + * + * @param streamId the stream to migrate + * @param namespace the namespace of the v1 raw table + * @param tableName name of the v2 raw table + * @return a string containing the necessary sql to migrate + */ + fun migrateFromV1toV2(streamId: StreamId, namespace: String?, tableName: String?): Sql + + /** + * Typically we need to create a soft reset temporary table and clear loaded at values + * + * @return + */ + fun prepareTablesForSoftReset(stream: StreamConfig): Sql { + val createTempTable = createTable(stream, TypeAndDedupeTransaction.SOFT_RESET_SUFFIX, true) + val clearLoadedAt = clearLoadedAt(stream.id) + return Sql.Companion.concat(createTempTable, clearLoadedAt) + } + + fun clearLoadedAt(streamId: StreamId): Sql + + /** + * Implementation specific if there is no option to retry again with safe casted SQL or the + * specific cause of the exception can be retried or not. + * + * @return true if the exception should be retried with a safer query + */ + fun shouldRetry(e: Exception?): Boolean { + return true + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.kt new file mode 100644 index 0000000000000..b37e7e5c2b919 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamConfig.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.airbyte.protocol.models.v0.SyncMode +import java.util.* +import kotlin.collections.LinkedHashMap + +data class StreamConfig( + val id: StreamId, + val syncMode: SyncMode?, + val destinationSyncMode: DestinationSyncMode?, + val primaryKey: List?, + val cursor: Optional?, + val columns: LinkedHashMap? +) {} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamId.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamId.kt new file mode 100644 index 0000000000000..1afb6199b436a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamId.kt @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.v0.StreamDescriptor +import kotlin.math.max + +/** + * In general, callers should not directly instantiate this class. Use [SqlGenerator.buildStreamId] + * instead. + * + * All names/namespaces are intended to be quoted, but do not explicitly contain quotes. For + * example, finalName might be "foo bar"; the caller is required to wrap that in quotes before using + * it in a query. + * + * @param finalNamespace the namespace where the final table will be created + * @param finalName the name of the final table + * @param rawNamespace the namespace where the raw table will be created (typically "airbyte") + * @param rawName the name of the raw table (typically namespace_name, but may be different if there + * are collisions). There is no rawNamespace because we assume that we're writing raw tables to the + * airbyte namespace. + */ +data class StreamId( + val finalNamespace: String?, + val finalName: String?, + val rawNamespace: String?, + val rawName: String?, + val originalNamespace: String?, + val originalName: String? +) { + /** + * Most databases/warehouses use a `schema.name` syntax to identify tables. This is a + * convenience method to generate that syntax. + */ + fun finalTableId(quote: String): String { + return "$quote$finalNamespace$quote.$quote$finalName$quote" + } + + fun finalTableId(quote: String, suffix: String): String { + return "$quote$finalNamespace$quote.$quote$finalName$suffix$quote" + } + + fun rawTableId(quote: String): String { + return "$quote$rawNamespace$quote.$quote$rawName$quote" + } + + fun finalName(quote: String): String { + return quote + finalName + quote + } + + fun finalNamespace(quote: String): String { + return quote + finalNamespace + quote + } + + fun asPair(): AirbyteStreamNameNamespacePair { + return AirbyteStreamNameNamespacePair(originalName, originalNamespace) + } + + fun asStreamDescriptor(): StreamDescriptor { + return StreamDescriptor().withNamespace(originalNamespace).withName(originalName) + } + + companion object { + /** + * Build the raw table name as namespace + (delimiter) + name. For example, given a stream + * with namespace "public__ab" and name "abab_users", we will end up with raw table name + * "public__ab_ab___ab_abab_users". + * + * This logic is intended to solve two problems: + * + * * The raw table name should be unambiguously parsable into the namespace/name. + * * It must be impossible for two different streams to generate the same raw table name. + * + * The generated delimiter is guaranteed to not be present in the namespace or name, so it + * accomplishes both of these goals. + */ + @JvmStatic + fun concatenateRawTableName(namespace: String, name: String): String { + val plainConcat = namespace + name + // Pretend we always have at least one underscore, so that we never generate + // `_raw_stream_` + var longestUnderscoreRun = 1 + var i = 0 + while (i < plainConcat.length) { + // If we've found an underscore, count the number of consecutive underscores + var underscoreRun = 0 + while (i < plainConcat.length && plainConcat[i] == '_') { + underscoreRun++ + i++ + } + longestUnderscoreRun = + max(longestUnderscoreRun.toDouble(), underscoreRun.toDouble()).toInt() + i++ + } + + return namespace + "_raw" + "_".repeat(longestUnderscoreRun + 1) + "stream_" + name + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Struct.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Struct.kt new file mode 100644 index 0000000000000..7817653b1e6c5 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Struct.kt @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** @param properties Use LinkedHashMap to preserve insertion order. */ +data class Struct(val properties: LinkedHashMap) : AirbyteType { + override val typeName: String = TYPE + + companion object { + const val TYPE: String = "STRUCT" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.kt new file mode 100644 index 0000000000000..00fb6767a1641 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TableNotMigratedException.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** + * Exception thrown when a destination's v2 sync is attempting to write to a table which does not + * have the expected columns used by airbyte. + */ +class TableNotMigratedException : RuntimeException { + constructor(message: String?) : super(message) + + constructor(message: String?, cause: Throwable?) : super(message, cause) +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.kt new file mode 100644 index 0000000000000..04d2fb44954c8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValve.kt @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.instance +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.concurrent.ConcurrentHashMap +import java.util.function.Supplier +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * A slightly more complicated way to keep track of when to perform type and dedupe operations per + * stream + */ +class TypeAndDedupeOperationValve +@JvmOverloads +constructor(private val nowness: Supplier = SYSTEM_NOW) : + ConcurrentHashMap() { + private val incrementalIndex = ConcurrentHashMap() + + @SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") + override fun put(key: AirbyteStreamNameNamespacePair, value: Long?): Long? { + if (!incrementalIndex.containsKey(key)) { + incrementalIndex[key] = 0 + } + return super.put(key, value) + } + + /** + * Adds a stream specific timestamp to track type and dedupe operations + * + * @param key the AirbyteStreamNameNamespacePair to track + */ + fun addStream(key: AirbyteStreamNameNamespacePair) { + put(key, nowness.get()) + } + + fun addStreamIfAbsent(key: AirbyteStreamNameNamespacePair) { + putIfAbsent(key, nowness.get()) + incrementalIndex.putIfAbsent(key, 0) + } + + /** + * Whether we should type and dedupe at this point in time for this particular stream. + * + * @param key the stream in question + * @return a boolean indicating whether we have crossed the interval threshold for typing and + * deduping. + */ + fun readyToTypeAndDedupe(key: AirbyteStreamNameNamespacePair): Boolean { + if (!instance!!.getBooleanValue("enable_incremental_final_table_updates")) { + LOGGER.info("Skipping Incremental Typing and Deduping") + return false + } + if (!containsKey(key)) { + return false + } + + return nowness.get() - get(key)!! > + typeAndDedupeIncreasingIntervals[incrementalIndex[key]!!] + } + + /** + * Increment the interval at which typing and deduping should occur for the stream, max out at + * last index of [TypeAndDedupeOperationValve.typeAndDedupeIncreasingIntervals] + * + * @param key the stream to increment the interval of + * @return the index of the typing and deduping interval associated with this stream + */ + fun incrementInterval(key: AirbyteStreamNameNamespacePair): Int { + if (incrementalIndex[key]!! < typeAndDedupeIncreasingIntervals.size - 1) { + incrementalIndex[key] = incrementalIndex[key]!! + 1 + } + return incrementalIndex[key]!! + } + + /** + * Meant to be called after [TypeAndDedupeOperationValve.readyToTypeAndDedupe] will set a + * streams last operation to the current time and increase its index reference in + * [TypeAndDedupeOperationValve.typeAndDedupeIncreasingIntervals] + * + * @param key the stream to update + */ + fun updateTimeAndIncreaseInterval(key: AirbyteStreamNameNamespacePair) { + put(key, nowness.get()) + incrementInterval(key) + } + + /** + * Get the current interval for the stream + * + * @param key the stream in question + * @return a long value representing the length of the interval milliseconds + */ + fun getIncrementInterval(key: AirbyteStreamNameNamespacePair): Long { + return typeAndDedupeIncreasingIntervals[incrementalIndex[key]!!] + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(TypeAndDedupeOperationValve::class.java) + + private const val NEGATIVE_MILLIS: Long = -1 + private const val SIX_HOURS_MILLIS = (1000 * 60 * 60 * 6).toLong() + + // New users of airbyte likely want to see data flowing into their tables as soon as + // possible, and + // we want to catch new errors which might appear early within an incremental sync. + // However, as their destination tables grow in size, typing and de-duping data becomes an + // expensive + // operation. + // To strike a balance between showing data quickly and not slowing down the entire sync, we + // use an + // increasing interval based approach, from 0 up to 4 hours. + // This is not fancy, just hard coded intervals. + val typeAndDedupeIncreasingIntervals: List = + java.util.List.of(NEGATIVE_MILLIS, SIX_HOURS_MILLIS) + + private val SYSTEM_NOW = Supplier { System.currentTimeMillis() } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.kt new file mode 100644 index 0000000000000..66a05eb199f12 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.kt @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import java.time.Instant +import java.util.* +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +object TypeAndDedupeTransaction { + const val SOFT_RESET_SUFFIX: String = "_ab_soft_reset" + private val LOGGER: Logger = LoggerFactory.getLogger(TypeAndDedupeTransaction::class.java) + + /** + * It can be expensive to build the errors array in the airbyte_meta column, so we first attempt + * an 'unsafe' transaction which assumes everything is typed correctly. If that fails, we will + * run a more expensive query which handles casting errors + * + * @param sqlGenerator for generating sql for the destination + * @param destinationHandler for executing sql created + * @param streamConfig which stream to operate on + * @param minExtractedAt to reduce the amount of data in the query + * @param suffix table suffix for temporary tables + * @throws Exception if the safe query fails + */ + @JvmStatic + @Throws(Exception::class) + fun executeTypeAndDedupe( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig?, + minExtractedAt: Optional, + suffix: String + ) { + try { + LOGGER.info( + "Attempting typing and deduping for {}.{} with suffix {}", + streamConfig!!.id!!.originalNamespace, + streamConfig.id!!.originalName, + suffix + ) + val unsafeSql = sqlGenerator.updateTable(streamConfig, suffix, minExtractedAt, false) + destinationHandler.execute(unsafeSql) + } catch (e: Exception) { + if (sqlGenerator.shouldRetry(e)) { + // TODO Destination specific non-retryable exceptions should be added. + LOGGER.error( + "Encountered Exception on unsafe SQL for stream {} {} with suffix {}, attempting with error handling", + streamConfig!!.id!!.originalNamespace, + streamConfig.id!!.originalName, + suffix, + e + ) + val saferSql = sqlGenerator.updateTable(streamConfig, suffix, minExtractedAt, true) + destinationHandler.execute(saferSql) + } else { + LOGGER.error( + "Encountered Exception on unsafe SQL for stream {} {} with suffix {}, Retry is skipped", + streamConfig!!.id!!.originalNamespace, + streamConfig.id!!.originalName, + suffix, + e + ) + throw e + } + } + } + + /** + * Everything in [TypeAndDedupeTransaction.executeTypeAndDedupe] but with a little extra prep + * work for the soft reset temp tables + * + * @param sqlGenerator for generating sql for the destination + * @param destinationHandler for executing sql created + * @param streamConfig which stream to operate on + * @throws Exception if the safe query fails + */ + @JvmStatic + @Throws(Exception::class) + fun executeSoftReset( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler<*>, + streamConfig: StreamConfig + ) { + LOGGER.info( + "Attempting soft reset for stream {} {}", + streamConfig.id!!.originalNamespace, + streamConfig.id!!.originalName + ) + destinationHandler.execute(sqlGenerator.prepareTablesForSoftReset(streamConfig)) + executeTypeAndDedupe( + sqlGenerator, + destinationHandler, + streamConfig, + Optional.empty(), + SOFT_RESET_SUFFIX + ) + destinationHandler.execute( + sqlGenerator.overwriteFinalTable(streamConfig.id, SOFT_RESET_SUFFIX) + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.kt new file mode 100644 index 0000000000000..60a8fb24fe752 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.kt @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.util.concurrent.locks.Lock + +/* + * This class wants to do three separate things, but not all of them actually happen here right now: + * * A migration runner, which handles any changes in raw tables (#prepareSchemasAndRawTables) * A + * raw table creator, which creates any missing raw tables (currently handled in e.g. + * GeneralStagingFunctions.onStartFunction, BigQueryStagingConsumerFactory.onStartFunction, etc.) * + * A T+D runner, which manages the final tables (#prepareFinalTables, #typeAndDedupe, etc.) + * + * These would be injectable to the relevant locations, so that we can have: * DV2 destinations with + * T+D enabled (i.e. all three objects instantiated for real) * DV2 destinations with T+D disabled + * (i.e. noop T+D runner but the other two objects for real) * DV1 destinations (i.e. all three + * objects as noop) + * + * Even more ideally, we'd create an instance per stream, instead of having one instance for the + * entire sync. This would massively simplify all the state contained in our implementations - see + * DefaultTyperDeduper's pile of Sets and Maps. + * + * Unfortunately, it's just a pain to inject these objects to everywhere they need to be, and we'd + * need to refactor part of the async framework on top of that. There's an obvious overlap with the + * async framework's onStart function... which we should deal with eventually. + */ +interface TyperDeduper { + /** + * Does two things: Set up the schemas for the sync (both airbyte_internal and final table + * schemas), and execute any raw table migrations. These migrations might include: Upgrading v1 + * raw tables to v2, adding a column to the raw tables, etc. In general, this method shouldn't + * actually create the raw tables; the only exception is in the V1 -> V2 migration. + * + * This method should be called BEFORE creating raw tables, because the V1V2 migration might + * create the raw tables. + * + * This method may affect the behavior of [.prepareFinalTables]. For example, modifying a raw + * table may require us to run a soft reset. However, we should defer that soft reset until + * [.prepareFinalTables]. + */ + @Throws(Exception::class) fun prepareSchemasAndRunMigrations() + + /** + * Create the tables that T+D will write to during the sync. In OVERWRITE mode, these might not + * be the true final tables. Specifically, other than an initial sync (i.e. table does not + * exist, or is empty) we write to a temporary final table, and swap it into the true final + * table at the end of the sync. This is to prevent user downtime during a sync. + * + * This method should be called AFTER creating the raw tables, because it may run a soft reset + * (which requires the raw tables to exist). + */ + @Throws(Exception::class) fun prepareFinalTables() + + /** + * Suggest that we execute typing and deduping for a single stream (i.e. fetch new raw records + * into the final table, etc.). + * + * This method is thread-safe; multiple threads can call it concurrently. If T+D is already + * running for the given stream, this method may choose to do nothing. If a caller wishes to + * force T+D to run (for example, at the end of a sync), they may set `mustRun` to true. + * + * This method relies on callers to prevent concurrent modification to the underlying raw + * tables. This is most easily accomplished using [.getRawTableInsertLock], if the caller guards + * all raw table writes using `getRawTableInsertLock().lock()` and + * `getRawTableInsertLock().unlock()`. While `typeAndDedupe` is executing, that lock will be + * unavailable. However, callers are free to enforce this in other ways (for example, single- + * threaded callers do not need to use the lock). + * + * @param originalNamespace The stream's namespace, as declared in the configured catalog + * @param originalName The stream's name, as declared in the configured catalog + */ + @Throws(Exception::class) + fun typeAndDedupe(originalNamespace: String, originalName: String, mustRun: Boolean) + + /** + * Get the lock that should be used to synchronize inserts to the raw table for a given stream. + * This lock permits any number of threads to hold the lock, but [.typeAndDedupe] will not + * proceed while this lock is held. + * + * This lock provides fairness guarantees, i.e. typeAndDedupe will not starve while waiting for + * the lock (and similarly, raw table writers will not starve if many typeAndDedupe calls are + * queued). + */ + fun getRawTableInsertLock(originalNamespace: String, originalName: String): Lock + + /** + * Does any "end of sync" work. For most streams, this is a noop. + * + * For OVERWRITE streams where we're writing to a temp table, this is where we swap the temp + * table into the final table. + * + * @param streamSyncSummaries Information about what happened during the sync. Implementations + * SHOULD use this information to skip T+D when possible (this is not a requirement for + * correctness, but does allow us to save time/money). This parameter MUST NOT be null. Streams + * MAY be omitted, which will be treated as though they were mapped to + * [StreamSyncSummary.DEFAULT]. + */ + @Throws(Exception::class) + fun typeAndDedupe(streamSyncSummaries: Map) + + @Throws(Exception::class) fun commitFinalTables() + + fun cleanup() +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt similarity index 95% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt index 1b55216675fa5..961e1ff5b9e66 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt @@ -4,7 +4,6 @@ package io.airbyte.integrations.base.destination.typing_deduping -import com.google.common.collect.Streams import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst import io.airbyte.commons.concurrency.CompletableFutures import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration @@ -161,14 +160,10 @@ class TyperDeduperUtil { destinationHandler: DestinationHandler, parsedCatalog: ParsedCatalog ) { - val rawSchema = parsedCatalog.streams.stream().map { it.id.rawNamespace } - val finalSchema = parsedCatalog.streams.stream().map { it.id.finalNamespace } + val rawSchema = parsedCatalog.streams.map { it.id.rawNamespace } + val finalSchema = parsedCatalog.streams.map { it.id.finalNamespace } val createAllSchemasSql = - Streams.concat(rawSchema, finalSchema) - .filter(Objects::nonNull) - .distinct() - .map(sqlGenerator::createSchema) - .toList() + (rawSchema + finalSchema).distinct().map { sqlGenerator.createSchema(it) } destinationHandler.execute(Sql.concat(createAllSchemasSql)) } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.kt new file mode 100644 index 0000000000000..ade980b4db409 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnexpectedSchemaException.kt @@ -0,0 +1,6 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +class UnexpectedSchemaException(message: String?) : RuntimeException(message) diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Union.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Union.kt new file mode 100644 index 0000000000000..c919bfe488120 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/Union.kt @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** + * Represents a {type: [a, b, ...]} schema. This is theoretically equivalent to {oneOf: [{type: a}, + * {type: b}, ...]} but legacy normalization only handles the {type: [...]} schemas. + * + * Eventually we should: + * + * 1. Announce a breaking change to handle both oneOf styles the same + * 1. Test against some number of API sources to verify that they won't break badly + * 1. Update [AirbyteType.fromJsonSchema] to parse both styles into SupportedOneOf + * 1. Delete UnsupportedOneOf + */ +data class Union(val options: List) : AirbyteType { + override val typeName: String = TYPE + + /** + * This is a hack to handle weird schemas like {type: [object, string]}. If a stream's top-level + * schema looks like this, we still want to be able to extract the object properties (i.e. treat + * it as though the string option didn't exist). + * + * @throws IllegalArgumentException if we cannot extract columns from this schema + */ + fun asColumns(): LinkedHashMap { + AirbyteType.LOGGER.warn("asColumns options=$options") + val numObjectOptions = options.filterIsInstance().count() + if (numObjectOptions > 1) { + AirbyteType.LOGGER.error( + "Can't extract columns from a schema with multiple object options" + ) + return LinkedHashMap() + } + + var retVal: LinkedHashMap + try { + retVal = options.filterIsInstance().first().properties + } catch (_: NoSuchElementException) { + AirbyteType.LOGGER.error("Can't extract columns from a schema with no object options") + retVal = LinkedHashMap() + } + AirbyteType.LOGGER.warn("asColumns retVal=$retVal") + return retVal + } + + // Picks which type in a Union takes precedence + fun chooseType(): AirbyteType { + if (options.isEmpty()) { + return AirbyteProtocolType.UNKNOWN + } + + return options.minBy { + when (it) { + is Array -> -2 + is Struct -> -1 + is AirbyteProtocolType -> it.ordinal + else -> Int.MAX_VALUE + } + } + } + + companion object { + const val TYPE: String = "UNION" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.kt new file mode 100644 index 0000000000000..ad400fcc5edab --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/UnsupportedOneOf.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** + * Represents a {oneOf: [...]} schema. + * + * This is purely a legacy type that we should eventually delete. See also [Union]. + */ +data class UnsupportedOneOf(val options: List) : AirbyteType { + override val typeName: String = TYPE + + companion object { + const val TYPE: String = "UNSUPPORTED_ONE_OF" + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.kt new file mode 100644 index 0000000000000..211fbf9c610c8 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.kt @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +/** + * Prefer [io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration] instead. + */ +interface V2TableMigrator { + @Throws(Exception::class) fun migrateIfNecessary(streamConfig: StreamConfig?) +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt similarity index 100% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt similarity index 95% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt index 2fceb4ad74fc8..ff9b8c8ac15eb 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/kotlin/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt @@ -40,8 +40,9 @@ interface MinimumDestinationState { return needsSoftReset } + @Suppress("UNCHECKED_CAST") override fun withSoftReset(needsSoftReset: Boolean): T { - return copy(needsSoftReset = true) as T + return Impl(needsSoftReset = true) as T } } } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.java deleted file mode 100644 index da80eeee31c56..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.java +++ /dev/null @@ -1,502 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType.*; -import static io.airbyte.integrations.base.destination.typing_deduping.AirbyteType.fromJsonSchema; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableList; -import io.airbyte.commons.json.Jsons; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import org.junit.jupiter.api.Test; - -public class AirbyteTypeTest { - - @Test - public void testStruct() { - final List structSchema = new ArrayList<>(); - structSchema.add(""" - { - "type": "object", - "properties": { - "key1": { - "type": "boolean" - }, - "key2": { - "type": "integer" - }, - "key3": { - "type": "number", - "airbyte_type": "integer" - }, - "key4": { - "type": "number" - }, - "key5": { - "type": "string", - "format": "date" - }, - "key6": { - "type": "string", - "format": "time", - "airbyte_type": "time_without_timezone" - }, - "key7": { - "type": "string", - "format": "time", - "airbyte_type": "time_with_timezone" - }, - "key8": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "key9": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "key10": { - "type": "string", - "format": "date-time" - }, - "key11": { - "type": "string" - } - } - } - """); - structSchema.add(""" - { - "type": ["object"], - "properties": { - "key1": { - "type": ["boolean"] - }, - "key2": { - "type": ["integer"] - }, - "key3": { - "type": ["number"], - "airbyte_type": "integer" - }, - "key4": { - "type": ["number"] - }, - "key5": { - "type": ["string"], - "format": "date" - }, - "key6": { - "type": ["string"], - "format": "time", - "airbyte_type": "time_without_timezone" - }, - "key7": { - "type": ["string"], - "format": "time", - "airbyte_type": "time_with_timezone" - }, - "key8": { - "type": ["string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "key9": { - "type": ["string"], - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "key10": { - "type": ["string"], - "format": "date-time" - }, - "key11": { - "type": ["string"] - } - } - } - """); - structSchema.add(""" - { - "type": ["null", "object"], - "properties": { - "key1": { - "type": ["null", "boolean"] - }, - "key2": { - "type": ["null", "integer"] - }, - "key3": { - "type": ["null", "number"], - "airbyte_type": "integer" - }, - "key4": { - "type": ["null", "number"] - }, - "key5": { - "type": ["null", "string"], - "format": "date" - }, - "key6": { - "type": ["null", "string"], - "format": "time", - "airbyte_type": "time_without_timezone" - }, - "key7": { - "type": ["null", "string"], - "format": "time", - "airbyte_type": "time_with_timezone" - }, - "key8": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "key9": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "key10": { - "type": ["null", "string"], - "format": "date-time" - }, - "key11": { - "type": ["null", "string"] - } - } - } - """); - - final LinkedHashMap propertiesMap = new LinkedHashMap<>(); - propertiesMap.put("key1", BOOLEAN); - propertiesMap.put("key2", INTEGER); - propertiesMap.put("key3", INTEGER); - propertiesMap.put("key4", NUMBER); - propertiesMap.put("key5", DATE); - propertiesMap.put("key6", TIME_WITHOUT_TIMEZONE); - propertiesMap.put("key7", TIME_WITH_TIMEZONE); - propertiesMap.put("key8", TIMESTAMP_WITHOUT_TIMEZONE); - propertiesMap.put("key9", TIMESTAMP_WITH_TIMEZONE); - propertiesMap.put("key10", TIMESTAMP_WITH_TIMEZONE); - propertiesMap.put("key11", STRING); - - final AirbyteType struct = new Struct(propertiesMap); - for (final String schema : structSchema) { - assertEquals(struct, fromJsonSchema(Jsons.deserialize(schema))); - } - } - - @Test - public void testEmptyStruct() { - final List structSchema = new ArrayList<>(); - structSchema.add(""" - { - "type": "object" - } - """); - structSchema.add(""" - { - "type": ["object"] - } - """); - structSchema.add(""" - { - "type": ["null", "object"] - } - """); - - final AirbyteType struct = new Struct(new LinkedHashMap<>()); - for (final String schema : structSchema) { - assertEquals(struct, fromJsonSchema(Jsons.deserialize(schema))); - } - } - - @Test - public void testImplicitStruct() { - final String structSchema = """ - { - "properties": { - "key1": { - "type": "boolean" - } - } - } - """; - - final LinkedHashMap propertiesMap = new LinkedHashMap<>(); - propertiesMap.put("key1", BOOLEAN); - - final AirbyteType struct = new Struct(propertiesMap); - assertEquals(struct, fromJsonSchema(Jsons.deserialize(structSchema))); - } - - @Test - public void testArray() { - final List arraySchema = new ArrayList<>(); - arraySchema.add(""" - { - "type": "array", - "items": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - } - } - """); - arraySchema.add(""" - { - "type": ["array"], - "items": { - "type": ["string"], - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - } - } - """); - arraySchema.add(""" - { - "type": ["null", "array"], - "items": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - } - } - """); - - final AirbyteType array = new Array(TIMESTAMP_WITH_TIMEZONE); - for (final String schema : arraySchema) { - assertEquals(array, fromJsonSchema(Jsons.deserialize(schema))); - } - } - - @Test - public void testEmptyArray() { - final List arraySchema = new ArrayList<>(); - arraySchema.add(""" - { - "type": "array" - } - """); - arraySchema.add(""" - { - "type": ["array"] - } - """); - - arraySchema.add(""" - { - "type": ["null", "array"] - } - """); - - final AirbyteType array = new Array(UNKNOWN); - for (final String schema : arraySchema) { - assertEquals(array, fromJsonSchema(Jsons.deserialize(schema))); - } - } - - @Test - public void testUnsupportedOneOf() { - final String unsupportedOneOfSchema = """ - { - "oneOf": ["number", "string"] - } - """; - - final List options = new ArrayList<>(); - options.add(NUMBER); - options.add(STRING); - - final UnsupportedOneOf unsupportedOneOf = new UnsupportedOneOf(options); - assertEquals(unsupportedOneOf, fromJsonSchema(Jsons.deserialize(unsupportedOneOfSchema))); - } - - @Test - public void testUnion() { - - final String unionSchema = """ - { - "type": ["string", "number"] - } - """; - - final List options = new ArrayList<>(); - options.add(STRING); - options.add(NUMBER); - - final Union union = new Union(options); - assertEquals(union, fromJsonSchema(Jsons.deserialize(unionSchema))); - } - - @Test - public void testUnionComplex() { - final JsonNode schema = Jsons.deserialize(""" - { - "type": ["string", "object", "array", "null", "string", "object", "array", "null"], - "properties": { - "foo": {"type": "string"} - }, - "items": {"type": "string"} - } - """); - - final AirbyteType parsed = fromJsonSchema(schema); - - final AirbyteType expected = new Union(List.of( - STRING, - new Struct(new LinkedHashMap<>() { - - { - put("foo", STRING); - } - - }), - new Array(STRING))); - assertEquals(expected, parsed); - } - - @Test - public void testUnionUnderspecifiedNonPrimitives() { - final JsonNode schema = Jsons.deserialize(""" - { - "type": ["string", "object", "array", "null", "string", "object", "array", "null"] - } - """); - - final AirbyteType parsed = fromJsonSchema(schema); - - final AirbyteType expected = new Union(List.of( - STRING, - new Struct(new LinkedHashMap<>()), - new Array(UNKNOWN))); - assertEquals(expected, parsed); - } - - @Test - public void testInvalidTextualType() { - final String invalidTypeSchema = """ - { - "type": "foo" - } - """; - assertEquals(UNKNOWN, fromJsonSchema(Jsons.deserialize(invalidTypeSchema))); - } - - @Test - public void testInvalidBooleanType() { - final String invalidTypeSchema = """ - { - "type": true - } - """; - assertEquals(UNKNOWN, fromJsonSchema(Jsons.deserialize(invalidTypeSchema))); - } - - @Test - public void testInvalid() { - final List invalidSchema = new ArrayList<>(); - invalidSchema.add(""); - invalidSchema.add("null"); - invalidSchema.add("true"); - invalidSchema.add("false"); - invalidSchema.add("1"); - invalidSchema.add("\"\""); - invalidSchema.add("[]"); - invalidSchema.add("{}"); - - for (final String schema : invalidSchema) { - assertEquals(UNKNOWN, fromJsonSchema(Jsons.deserialize(schema))); - } - } - - @Test - public void testChooseUnion() { - final Map unionToType = new HashMap<>(); - - final Array a = new Array(BOOLEAN); - - final LinkedHashMap properties = new LinkedHashMap<>(); - properties.put("key1", UNKNOWN); - properties.put("key2", INTEGER); - final Struct s = new Struct(properties); - - unionToType.put(new Union(ImmutableList.of(s, a)), a); - unionToType.put(new Union(ImmutableList.of(NUMBER, a)), a); - unionToType.put(new Union(ImmutableList.of(INTEGER, s)), s); - unionToType.put(new Union(ImmutableList.of(NUMBER, DATE, BOOLEAN)), DATE); - unionToType.put(new Union(ImmutableList.of(INTEGER, BOOLEAN, NUMBER)), NUMBER); - unionToType.put(new Union(ImmutableList.of(BOOLEAN, INTEGER)), INTEGER); - - assertAll( - unionToType.entrySet().stream().map(e -> () -> assertEquals(e.getValue(), e.getKey().chooseType()))); - } - - @Test - public void testAsColumns() { - final Union u = new Union(List.of( - STRING, - new Struct(new LinkedHashMap<>() { - - { - put("foo", STRING); - } - - }), - new Array(STRING), - // This is bad behavior, but it matches current behavior so we'll test it. - // Ideally, we would recognize that the sub-unions are also objects. - new Union(List.of(new Struct(new LinkedHashMap<>()))), - new UnsupportedOneOf(List.of(new Struct(new LinkedHashMap<>()))))); - - final LinkedHashMap columns = u.asColumns(); - - assertEquals( - new LinkedHashMap<>() { - - { - put("foo", STRING); - } - - }, - columns); - } - - @Test - public void testAsColumnsMultipleObjects() { - final Union u = new Union(List.of( - new Struct(new LinkedHashMap<>()), - new Struct(new LinkedHashMap<>()))); - - // This prooobably should throw an exception, but for the sake of smooth rollout it just logs a - // warning for now. - assertEquals(new LinkedHashMap<>(), u.asColumns()); - } - - @Test - public void testAsColumnsNoObjects() { - final Union u = new Union(List.of( - STRING, - new Array(STRING), - new UnsupportedOneOf(new ArrayList<>()), - // Similar to testAsColumns(), this is bad behavior. - new Union(List.of(new Struct(new LinkedHashMap<>()))), - new UnsupportedOneOf(List.of(new Struct(new LinkedHashMap<>()))))); - - // This prooobably should throw an exception, but for the sake of smooth rollout it just logs a - // warning for now. - assertEquals(new LinkedHashMap<>(), u.asColumns()); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.java deleted file mode 100644 index b0237657058b7..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.mockito.Mockito.*; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class CatalogParserTest { - - private SqlGenerator sqlGenerator; - private CatalogParser parser; - - @BeforeEach - public void setup() { - sqlGenerator = mock(SqlGenerator.class); - // noop quoting logic - when(sqlGenerator.buildColumnId(any())).thenAnswer(invocation -> { - final String fieldName = invocation.getArgument(0); - return new ColumnId(fieldName, fieldName, fieldName); - }); - when(sqlGenerator.buildStreamId(any(), any(), any())).thenAnswer(invocation -> { - final String namespace = invocation.getArgument(0); - final String name = invocation.getArgument(1); - final String rawNamespace = invocation.getArgument(1); - return new StreamId(namespace, name, rawNamespace, namespace + "_abab_" + name, namespace, name); - }); - - parser = new CatalogParser(sqlGenerator); - } - - /** - * Both these streams will write to the same final table name ("foofoo"). Verify that they don't - * actually use the same tablename. - */ - @Test - public void finalNameCollision() { - when(sqlGenerator.buildStreamId(any(), any(), any())).thenAnswer(invocation -> { - final String originalNamespace = invocation.getArgument(0); - final String originalName = (invocation.getArgument(1)); - final String originalRawNamespace = (invocation.getArgument(1)); - - // emulate quoting logic that causes a name collision - final String quotedName = originalName.replaceAll("bar", ""); - return new StreamId(originalNamespace, quotedName, originalRawNamespace, originalNamespace + "_abab_" + quotedName, originalNamespace, - originalName); - }); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - stream("a", "foobarfoo"), - stream("a", "foofoo"))); - - final ParsedCatalog parsedCatalog = parser.parseCatalog(catalog); - - assertNotEquals( - parsedCatalog.streams().get(0).id().finalName(), - parsedCatalog.streams().get(1).id().finalName()); - } - - /** - * The schema contains two fields, which will both end up named "foofoo" after quoting. Verify that - * they don't actually use the same column name. - */ - @Test - public void columnNameCollision() { - when(sqlGenerator.buildColumnId(any(), any())).thenAnswer(invocation -> { - final String originalName = invocation.getArgument(0); - - // emulate quoting logic that causes a name collision - final String quotedName = originalName.replaceAll("bar", ""); - return new ColumnId(quotedName, originalName, quotedName); - }); - final JsonNode schema = Jsons.deserialize(""" - { - "type": "object", - "properties": { - "foobarfoo": {"type": "string"}, - "foofoo": {"type": "string"} - } - } - """); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of(stream("a", "a", schema))); - - final ParsedCatalog parsedCatalog = parser.parseCatalog(catalog); - - assertEquals(2, parsedCatalog.streams().get(0).columns().size()); - } - - private static ConfiguredAirbyteStream stream(final String namespace, final String name) { - return stream( - namespace, - name, - Jsons.deserialize(""" - { - "type": "object", - "properties": { - "name": {"type": "string"} - } - } - """)); - } - - private static ConfiguredAirbyteStream stream(final String namespace, final String name, final JsonNode schema) { - return new ConfiguredAirbyteStream().withStream( - new AirbyteStream() - .withNamespace(namespace) - .withName(name) - .withJsonSchema(schema)); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.java deleted file mode 100644 index 84718062c16ce..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.util.Optional; -import java.util.Set; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; - -public class CollectionUtilsTest { - - static Set TEST_COLLECTION = Set.of("foo", "BAR", "fizz", "zip_ZOP"); - - @ParameterizedTest - @CsvSource({"foo,foo", "bar,BAR", "fIzZ,fizz", "ZIP_zop,zip_ZOP", "nope,"}) - public void testMatchingKey(final String input, final String output) { - final var expected = Optional.ofNullable(output); - Assertions.assertEquals(CollectionUtils.matchingKey(TEST_COLLECTION, input), expected); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java deleted file mode 100644 index 7eec05efc2d78..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java +++ /dev/null @@ -1,637 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.integrations.base.destination.typing_deduping.Sql.separately; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.Mockito.clearInvocations; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.ignoreStubs; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.cdk.integrations.destination.StreamSyncSummary; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.time.Instant; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import lombok.SneakyThrows; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class DefaultTyperDeduperTest { - - private ParsedCatalog parsedCatalog; - private static final StreamConfig OVERWRITE_STREAM_CONFIG = new StreamConfig( - new StreamId("overwrite_ns", "overwrite_stream", "airbyte_internal", "overwrite_stream", "overwrite_ns", "overwrite_stream"), - null, - DestinationSyncMode.OVERWRITE, - null, - null, - null); - private static final StreamConfig APPEND_STREAM_CONFIG = new StreamConfig( - new StreamId("append_ns", "append_stream", "airbyte_internal", "append_stream", "append_ns", "append_stream"), - null, - DestinationSyncMode.APPEND, - null, - null, - null); - private static final StreamConfig DEDUPE_STREAM_CONFIG = new StreamConfig( - new StreamId("dedup_ns", "dedup_stream", "airbyte_internal", "dedup_stream", "dedup_ns", "dedup_stream"), - null, - DestinationSyncMode.APPEND_DEDUP, - null, - null, - null); - - private record MockState( - boolean needsSoftReset, - boolean softResetMigrationCompleted, - boolean nonSoftResetMigrationCompleted) - implements MinimumDestinationState { - - @Override - public MockState withSoftReset(boolean needsSoftReset) { - return new MockState(needsSoftReset, this.softResetMigrationCompleted, this.nonSoftResetMigrationCompleted); - } - - } - - private MockSqlGenerator sqlGenerator; - private DestinationHandler destinationHandler; - - private List> initialStates; - private Map updatedStates; - - private DestinationV1V2Migrator migrator; - private TyperDeduper typerDeduper; - - private final Migration MIGRATION_REQUIRING_SOFT_RESET = new Migration<>() { - - @SneakyThrows - @NotNull - @Override - public MigrationResult migrateIfNecessary(DestinationHandler destinationHandler, - @NotNull StreamConfig stream, - DestinationInitialStatus state) { - destinationHandler.execute(Sql.of("MIGRATE " + stream.id().rawTableId(""))); - return new MigrationResult<>(new MockState(true, true, state.destinationState().nonSoftResetMigrationCompleted), false); - } - - }; - - private final Migration MIGRATION_NOT_REQUIRING_SOFT_RESET = new Migration<>() { - - @NotNull - @Override - public MigrationResult migrateIfNecessary(@NotNull DestinationHandler destinationHandler, - @NotNull StreamConfig stream, - DestinationInitialStatus status) { - return new MigrationResult<>( - new MockState( - status.destinationState().needsSoftReset, - status.destinationState().softResetMigrationCompleted, - true), - false); - } - - }; - - private final Migration MIGRATION_NOOP = new Migration<>() { - - @NotNull - @Override - public MigrationResult migrateIfNecessary(@NotNull DestinationHandler destinationHandler, - @NotNull StreamConfig stream, - DestinationInitialStatus status) { - return new MigrationResult<>( - new MockState( - status.destinationState().needsSoftReset, - status.destinationState().softResetMigrationCompleted, - true), - false); - } - - }; - - @BeforeEach - void setup() throws Exception { - sqlGenerator = spy(new MockSqlGenerator()); - destinationHandler = mock(DestinationHandler.class); - - DestinationInitialStatus overwriteNsState = mock(DestinationInitialStatus.class); - when(overwriteNsState.destinationState()).thenReturn(new MockState(false, false, true)); - when(overwriteNsState.streamConfig()).thenReturn(OVERWRITE_STREAM_CONFIG); - - DestinationInitialStatus appendNsState = mock(DestinationInitialStatus.class); - when(appendNsState.destinationState()).thenReturn(new MockState(false, false, true)); - when(appendNsState.streamConfig()).thenReturn(APPEND_STREAM_CONFIG); - - DestinationInitialStatus dedupeNsState = mock(DestinationInitialStatus.class); - when(dedupeNsState.destinationState()).thenReturn(new MockState(false, false, true)); - when(dedupeNsState.streamConfig()).thenReturn(DEDUPE_STREAM_CONFIG); - - initialStates = List.of(overwriteNsState, appendNsState, dedupeNsState); - when(destinationHandler.gatherInitialState(anyList())) - .thenReturn(initialStates); - initialStates - .forEach(initialState -> when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, true, Optional.empty()))); - - updatedStates = new HashMap<>(); - updatedStates.put(OVERWRITE_STREAM_CONFIG.id(), new MockState(false, false, true)); - updatedStates.put(APPEND_STREAM_CONFIG.id(), new MockState(false, false, true)); - updatedStates.put(DEDUPE_STREAM_CONFIG.id(), new MockState(false, false, true)); - - migrator = new NoOpDestinationV1V2Migrator(); - - parsedCatalog = new ParsedCatalog(List.of( - OVERWRITE_STREAM_CONFIG, - APPEND_STREAM_CONFIG, - DEDUPE_STREAM_CONFIG)); - - typerDeduper = new DefaultTyperDeduper<>(sqlGenerator, destinationHandler, parsedCatalog, migrator, Collections.emptyList()); - } - - /** - * When there are no existing tables, we should create them and write to them directly. - */ - @Test - void emptyDestination() throws Exception { - initialStates.forEach(initialState -> when(initialState.isFinalTablePresent()).thenReturn(false)); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verify(destinationHandler).commitDestinationStates(updatedStates); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream")); - verify(destinationHandler).execute(Sql.of("CREATE TABLE append_ns.append_stream")); - verify(destinationHandler).execute(Sql.of("CREATE TABLE dedup_ns.dedup_stream")); - verify(destinationHandler).commitDestinationStates(updatedStates); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe("overwrite_ns", "overwrite_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE overwrite_ns.overwrite_stream WITHOUT SAFER CASTING")); - typerDeduper.typeAndDedupe("append_ns", "append_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")); - typerDeduper.typeAndDedupe("dedup_ns", "dedup_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.commitFinalTables(); - verify(destinationHandler, never()).execute(any()); - } - - /** - * When there's an existing table but it's empty, we should ensure it has the right schema and write - * to it directly. - */ - @Test - void existingEmptyTable() throws Exception { - initialStates.forEach(initialState -> { - when(initialState.isFinalTablePresent()).thenReturn(true); - when(initialState.isFinalTableEmpty()).thenReturn(true); - when(initialState.isSchemaMismatch()).thenReturn(true); - }); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verify(destinationHandler).commitDestinationStates(updatedStates); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); - verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); - verify(destinationHandler).commitDestinationStates(updatedStates); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe("overwrite_ns", "overwrite_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp WITHOUT SAFER CASTING")); - typerDeduper.typeAndDedupe("append_ns", "append_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")); - typerDeduper.typeAndDedupe("dedup_ns", "dedup_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.commitFinalTables(); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE overwrite_ns.overwrite_stream FROM overwrite_ns.overwrite_stream_airbyte_tmp")); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - } - - /** - * When there's an existing empty table with the right schema, we don't need to do anything during - * setup. - */ - @Test - void existingEmptyTableMatchingSchema() throws Exception { - initialStates.forEach(initialState -> { - when(initialState.isFinalTablePresent()).thenReturn(true); - when(initialState.isFinalTableEmpty()).thenReturn(true); - when(initialState.isSchemaMismatch()).thenReturn(false); - }); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - verify(destinationHandler, never()).execute(any()); - } - - /** - * When there's an existing nonempty table, we should alter it. For the OVERWRITE stream, we also - * need to write to a tmp table, and overwrite the real table at the end of the sync. - */ - @Test - void existingNonemptyTable() throws Exception { - initialStates.forEach(initialState -> { - when(initialState.isFinalTablePresent()).thenReturn(true); - when(initialState.isFinalTableEmpty()).thenReturn(false); - when(initialState.isSchemaMismatch()).thenReturn(true); - when(initialState.initialRawTableStatus()) - .thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.parse("2023-01-01T12:34:56Z")))); - }); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verify(destinationHandler).commitDestinationStates(updatedStates); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - // NB: We only create a tmp table for the overwrite stream, and do _not_ soft reset the existing - // overwrite stream's table. - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); - verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); - verify(destinationHandler).commitDestinationStates(updatedStates); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe("overwrite_ns", "overwrite_stream", false); - // NB: no airbyte_tmp suffix on the non-overwrite streams - verify(destinationHandler) - .execute(Sql.of("UPDATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z")); - typerDeduper.typeAndDedupe("append_ns", "append_stream", false); - verify(destinationHandler) - .execute(Sql.of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z")); - typerDeduper.typeAndDedupe("dedup_ns", "dedup_stream", false); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z")); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - clearInvocations(destinationHandler); - - typerDeduper.commitFinalTables(); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE overwrite_ns.overwrite_stream FROM overwrite_ns.overwrite_stream_airbyte_tmp")); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - } - - /** - * When there's an existing nonempty table with the right schema, we don't need to modify it, but - * OVERWRITE streams still need to create a tmp table. - */ - @Test - void existingNonemptyTableMatchingSchema() throws Exception { - initialStates.forEach(initialState -> { - when(initialState.isFinalTablePresent()).thenReturn(true); - when(initialState.isFinalTableEmpty()).thenReturn(false); - when(initialState.isSchemaMismatch()).thenReturn(false); - when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.now()))); - }); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verify(destinationHandler).commitDestinationStates(updatedStates); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - // NB: We only create one tmp table here. - // Also, we need to alter the existing _real_ table, not the tmp table! - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - verify(destinationHandler).commitDestinationStates(updatedStates); - verifyNoMoreInteractions(ignoreStubs(destinationHandler)); - } - - @Test - void nonexistentStream() { - assertThrows(IllegalArgumentException.class, - () -> typerDeduper.typeAndDedupe("nonexistent_ns", "nonexistent_stream", false)); - verifyNoInteractions(ignoreStubs(destinationHandler)); - } - - @Test - void failedSetup() throws Exception { - doThrow(new RuntimeException("foo")).when(destinationHandler).execute(any()); - - assertThrows(Exception.class, () -> typerDeduper.prepareFinalTables()); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe("dedup_ns", "dedup_stream", false); - typerDeduper.commitFinalTables(); - - verifyNoInteractions(ignoreStubs(destinationHandler)); - } - - /** - * Test a typical sync, where the previous sync left no unprocessed raw records. If this sync writes - * some records for a stream, we should run T+D for that stream. - */ - @Test - void noUnprocessedRecords() throws Exception { - initialStates - .forEach(initialState -> when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, false, Optional.empty()))); - - typerDeduper.prepareSchemasAndRunMigrations(); - - typerDeduper.prepareFinalTables(); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe(Map.of( - new StreamDescriptor().withName("overwrite_stream").withNamespace("overwrite_ns"), new StreamSyncSummary(Optional.of(0L)), - new StreamDescriptor().withName("append_stream").withNamespace("append_ns"), new StreamSyncSummary(Optional.of(1L)))); - - // append_stream and dedup_stream should be T+D-ed. overwrite_stream has explicitly 0 records, but - // dedup_stream - // is missing from the map, so implicitly has nonzero records. - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")); - verifyNoMoreInteractions(destinationHandler); - } - - /** - * Test a sync where the previous sync failed to run T+D for some stream. Even if this sync writes - * zero records, it should still run T+D. - */ - @Test - void unprocessedRecords() throws Exception { - initialStates.forEach(initialState -> when(initialState.initialRawTableStatus()) - .thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.parse("2023-01-23T12:34:56Z"))))); - - typerDeduper.prepareSchemasAndRunMigrations(); - - typerDeduper.prepareFinalTables(); - clearInvocations(destinationHandler); - - typerDeduper.typeAndDedupe(Map.of( - new StreamDescriptor().withName("overwrite_stream").withNamespace("overwrite_ns"), new StreamSyncSummary(Optional.of(0L)), - new StreamDescriptor().withName("append_stream").withNamespace("append_ns"), new StreamSyncSummary(Optional.of(1L)))); - - verify(destinationHandler) - .execute(Sql.of("UPDATE TABLE overwrite_ns.overwrite_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z")); - verify(destinationHandler) - .execute(Sql.of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z")); - } - - /** - * A test that tries to trigger multiple soft resets on all three streams. The migration should run, - * and we also detect a schema mismatch. However, only one soft reset should be triggered once per - * stream. Additionally, the overwrite stream should not trigger a soft reset. - */ - @Test - void multipleSoftResets() throws Exception { - typerDeduper = new DefaultTyperDeduper<>( - sqlGenerator, - destinationHandler, - parsedCatalog, - migrator, - List.of(MIGRATION_REQUIRING_SOFT_RESET)); - - // Notably: isSchemaMismatch = true, - // and the MockStates have needsSoftReset = false and isMigrated = false. - when(destinationHandler.gatherInitialState(anyList())) - .thenReturn(List.of( - new DestinationInitialStatus<>( - OVERWRITE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - true, - false, - new MockState(false, false, true)), - new DestinationInitialStatus<>( - APPEND_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - true, - false, - new MockState(false, false, true)), - new DestinationInitialStatus<>( - DEDUPE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - true, - false, - new MockState(false, false, true)))); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.overwrite_stream")); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.append_stream")); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.dedup_stream")); - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(true, true, true), - APPEND_STREAM_CONFIG.id(), new MockState(true, true, true), - DEDUPE_STREAM_CONFIG.id(), new MockState(true, true, true))); - verify(destinationHandler).gatherInitialState(any()); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verifyNoMoreInteractions(destinationHandler); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - - // We should trigger a soft reset on the append + dedup streams. - verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); - - verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); - - // The overwrite stream just gets a new table entirely, instead of a soft reset. - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - - // And we should commit the states. Note that we now set needsSoftReset=false. - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(false, true, true), - APPEND_STREAM_CONFIG.id(), new MockState(false, true, true), - DEDUPE_STREAM_CONFIG.id(), new MockState(false, true, true))); - - verifyNoMoreInteractions(destinationHandler); - } - - /** - * A test where we have multiple migrations. The first migration triggers a soft reset; the second - * migration does nothing. We should correctly trigger the soft reset. - */ - @Test - void migrationsMixedResults() throws Exception { - typerDeduper = new DefaultTyperDeduper<>( - sqlGenerator, - destinationHandler, - parsedCatalog, - migrator, - List.of(MIGRATION_REQUIRING_SOFT_RESET, MIGRATION_NOT_REQUIRING_SOFT_RESET)); - - when(destinationHandler.gatherInitialState(anyList())) - .thenReturn(List.of( - new DestinationInitialStatus<>( - OVERWRITE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(false, false, false)), - new DestinationInitialStatus<>( - APPEND_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(false, false, false)), - new DestinationInitialStatus<>( - DEDUPE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(false, false, false)))); - - typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.overwrite_stream")); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.append_stream")); - verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.dedup_stream")); - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(true, true, true), - APPEND_STREAM_CONFIG.id(), new MockState(true, true, true), - DEDUPE_STREAM_CONFIG.id(), new MockState(true, true, true))); - verify(destinationHandler).gatherInitialState(any()); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verifyNoMoreInteractions(destinationHandler); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - - // We should trigger a soft reset on the append + dedup streams. - verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); - - verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); - - // The overwrite stream just gets a new table - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - - // And we should commit the states. - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(false, true, true), - APPEND_STREAM_CONFIG.id(), new MockState(false, true, true), - DEDUPE_STREAM_CONFIG.id(), new MockState(false, true, true))); - - verifyNoMoreInteractions(destinationHandler); - } - - /** - * A test where a previous sync committed a destination state with needsSoftReset=true. We should - * trigger a soft reset, even though the current sync doesn't need it. - */ - @Test - void previousSyncSoftReset() throws Exception { - // Notably: isSchemaMismatch = false, but the MockStates have needsSoftReset = true. - when(destinationHandler.gatherInitialState(anyList())) - .thenReturn(List.of( - new DestinationInitialStatus<>( - OVERWRITE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(true, false, false)), - new DestinationInitialStatus<>( - APPEND_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(true, false, false)), - new DestinationInitialStatus<>( - DEDUPE_STREAM_CONFIG, - true, - new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), - false, - false, - new MockState(true, false, false)))); - - typerDeduper.prepareSchemasAndRunMigrations(); - // Even though we didn't do anything, we still commit the destination states. - // This is technically unnecessary, but it's a single extra call and it's simpler to just do it. - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(true, false, false), - APPEND_STREAM_CONFIG.id(), new MockState(true, false, false), - DEDUPE_STREAM_CONFIG.id(), new MockState(true, false, false))); - verify(destinationHandler).gatherInitialState(any()); - verify(destinationHandler) - .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); - verifyNoMoreInteractions(destinationHandler); - clearInvocations(destinationHandler); - - typerDeduper.prepareFinalTables(); - - // We should trigger a soft reset on the append + dedup streams. - verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); - - verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); - verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); - verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); - - // The overwrite stream just gets a new table entirely, instead of a soft reset. - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); - - // And we should commit the states. Note that we now set needsSoftReset=false. - verify(destinationHandler).commitDestinationStates(Map.of( - OVERWRITE_STREAM_CONFIG.id(), new MockState(false, false, false), - APPEND_STREAM_CONFIG.id(), new MockState(false, false, false), - DEDUPE_STREAM_CONFIG.id(), new MockState(false, false, false))); - - verifyNoMoreInteractions(destinationHandler); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java deleted file mode 100644 index 2f582274438b9..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS; -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES; -import static org.mockito.ArgumentMatchers.any; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.util.Optional; -import java.util.stream.Stream; -import lombok.SneakyThrows; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; -import org.mockito.Mockito; - -public class DestinationV1V2MigratorTest { - - private static final StreamId STREAM_ID = new StreamId("final", "final_table", "raw", "raw_table", null, null); - - public static class ShouldMigrateTestArgumentProvider implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - - // Don't throw an exception - final boolean v2SchemaMatches = true; - - return Stream.of( - // Doesn't Migrate because of sync mode - Arguments.of(DestinationSyncMode.OVERWRITE, makeMockMigrator(true, false, v2SchemaMatches, true, true), false), - // Doesn't migrate because v2 table already exists - Arguments.of(DestinationSyncMode.APPEND, makeMockMigrator(true, true, v2SchemaMatches, true, true), false), - Arguments.of(DestinationSyncMode.APPEND_DEDUP, makeMockMigrator(true, true, v2SchemaMatches, true, true), false), - // Doesn't migrate because no valid v1 raw table exists - Arguments.of(DestinationSyncMode.APPEND, makeMockMigrator(true, false, v2SchemaMatches, false, true), false), - Arguments.of(DestinationSyncMode.APPEND_DEDUP, makeMockMigrator(true, false, v2SchemaMatches, false, true), false), - Arguments.of(DestinationSyncMode.APPEND, makeMockMigrator(true, false, v2SchemaMatches, true, false), false), - Arguments.of(DestinationSyncMode.APPEND_DEDUP, makeMockMigrator(true, false, v2SchemaMatches, true, false), false), - // Migrates - Arguments.of(DestinationSyncMode.APPEND, noIssuesMigrator(), true), - Arguments.of(DestinationSyncMode.APPEND_DEDUP, noIssuesMigrator(), true)); - } - - } - - @ParameterizedTest - @ArgumentsSource(ShouldMigrateTestArgumentProvider.class) - public void testShouldMigrate(final DestinationSyncMode destinationSyncMode, final BaseDestinationV1V2Migrator migrator, final boolean expected) - throws Exception { - final StreamConfig config = new StreamConfig(STREAM_ID, null, destinationSyncMode, null, null, null); - final var actual = migrator.shouldMigrate(config); - Assertions.assertEquals(expected, actual); - } - - @Test - public void testMismatchedSchemaThrowsException() throws Exception { - final StreamConfig config = new StreamConfig(STREAM_ID, null, DestinationSyncMode.APPEND_DEDUP, null, null, null); - final var migrator = makeMockMigrator(true, true, false, false, false); - final UnexpectedSchemaException exception = Assertions.assertThrows(UnexpectedSchemaException.class, - () -> migrator.shouldMigrate(config)); - Assertions.assertEquals("Destination V2 Raw Table does not match expected Schema", exception.getMessage()); - } - - @SneakyThrows - @Test - public void testMigrate() throws Exception { - final var sqlGenerator = new MockSqlGenerator(); - final StreamConfig stream = new StreamConfig(STREAM_ID, null, DestinationSyncMode.APPEND_DEDUP, null, null, null); - final DestinationHandler handler = Mockito.mock(DestinationHandler.class); - final var sql = sqlGenerator.migrateFromV1toV2(STREAM_ID, "v1_raw_namespace", "v1_raw_table"); - // All is well - final var migrator = noIssuesMigrator(); - migrator.migrate(sqlGenerator, handler, stream); - Mockito.verify(handler).execute(sql); - // Exception thrown when executing sql, TableNotMigratedException thrown - Mockito.doThrow(Exception.class).when(handler).execute(any()); - final TableNotMigratedException exception = Assertions.assertThrows(TableNotMigratedException.class, - () -> migrator.migrate(sqlGenerator, handler, stream)); - Assertions.assertEquals("Attempted and failed to migrate stream final_table", exception.getMessage()); - } - - public static BaseDestinationV1V2Migrator makeMockMigrator(final boolean v2NamespaceExists, - final boolean v2TableExists, - final boolean v2RawSchemaMatches, - final boolean v1RawTableExists, - final boolean v1RawTableSchemaMatches) - throws Exception { - final BaseDestinationV1V2Migrator migrator = Mockito.spy(BaseDestinationV1V2Migrator.class); - Mockito.when(migrator.doesAirbyteInternalNamespaceExist(any())).thenReturn(v2NamespaceExists); - final var existingTable = v2TableExists ? Optional.of("v2_raw") : Optional.empty(); - Mockito.when(migrator.getTableIfExists("raw", "raw_table")).thenReturn(existingTable); - Mockito.when(migrator.schemaMatchesExpectation("v2_raw", V2_RAW_TABLE_COLUMN_NAMES)).thenReturn(v2RawSchemaMatches); - - Mockito.when(migrator.convertToV1RawName(any())).thenReturn(new NamespacedTableName("v1_raw_namespace", "v1_raw_table")); - final var existingV1RawTable = v1RawTableExists ? Optional.of("v1_raw") : Optional.empty(); - Mockito.when(migrator.getTableIfExists("v1_raw_namespace", "v1_raw_table")).thenReturn(existingV1RawTable); - Mockito.when(migrator.schemaMatchesExpectation("v1_raw", LEGACY_RAW_TABLE_COLUMNS)).thenReturn(v1RawTableSchemaMatches); - return migrator; - } - - public static BaseDestinationV1V2Migrator noIssuesMigrator() throws Exception { - return makeMockMigrator(true, false, true, true, true); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.java deleted file mode 100644 index 3ef59aa91e21a..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import java.time.Instant; -import java.util.Optional; - -/** - * Basic SqlGenerator mock. See {@link DefaultTyperDeduperTest} for example usage. - */ -class MockSqlGenerator implements SqlGenerator { - - @Override - public StreamId buildStreamId(final String namespace, final String name, final String rawNamespaceOverride) { - return null; - } - - @Override - public ColumnId buildColumnId(final String name, final String suffix) { - return null; - } - - @Override - public Sql createSchema(final String schema) { - return Sql.of("CREATE SCHEMA " + schema); - } - - @Override - public Sql createTable(final StreamConfig stream, final String suffix, final boolean force) { - return Sql.of("CREATE TABLE " + stream.id().finalTableId("", suffix)); - } - - @Override - public Sql updateTable(final StreamConfig stream, - final String finalSuffix, - final Optional minRawTimestamp, - final boolean useExpensiveSaferCasting) { - final String timestampFilter = minRawTimestamp - .map(timestamp -> " WHERE extracted_at > " + timestamp) - .orElse(""); - final String casting = useExpensiveSaferCasting ? " WITH" : " WITHOUT" + " SAFER CASTING"; - return Sql.of("UPDATE TABLE " + stream.id().finalTableId("", finalSuffix) + casting + timestampFilter); - } - - @Override - public Sql overwriteFinalTable(final StreamId stream, final String finalSuffix) { - return Sql.of("OVERWRITE TABLE " + stream.finalTableId("") + " FROM " + stream.finalTableId("", finalSuffix)); - } - - @Override - public Sql migrateFromV1toV2(final StreamId streamId, final String namespace, final String tableName) { - return Sql.of("MIGRATE TABLE " + String.join(".", namespace, tableName) + " TO " + streamId.rawTableId("")); - } - - @Override - public Sql prepareTablesForSoftReset(final StreamConfig stream) { - return Sql.of("PREPARE " + String.join(".", stream.id().originalNamespace(), stream.id().originalName()) + " FOR SOFT RESET"); - } - - @Override - public Sql clearLoadedAt(final StreamId streamId) { - return null; - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.java deleted file mode 100644 index d9ef0d6f4c855..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import org.junit.jupiter.api.Test; - -class StreamIdTest { - - /** - * Both these streams naively want the same raw table name ("aaa_abab_bbb_abab_ccc"). Verify that - * they don't actually use the same raw table. - */ - @Test - public void rawNameCollision() { - String stream1 = StreamId.concatenateRawTableName("aaa_abab_bbb", "ccc"); - String stream2 = StreamId.concatenateRawTableName("aaa", "bbb_abab_ccc"); - - assertAll( - () -> assertEquals("aaa_abab_bbb_raw__stream_ccc", stream1), - () -> assertEquals("aaa_raw__stream_bbb_abab_ccc", stream2)); - } - - @Test - public void noUnderscores() { - String stream = StreamId.concatenateRawTableName("a", "b"); - - assertEquals("a_raw__stream_b", stream); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.java deleted file mode 100644 index 3ada28f544d4e..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.base.DestinationConfig; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Supplier; -import java.util.stream.IntStream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class TypeAndDedupeOperationValveTest { - - private static final AirbyteStreamNameNamespacePair STREAM_A = new AirbyteStreamNameNamespacePair("a", "a"); - private static final AirbyteStreamNameNamespacePair STREAM_B = new AirbyteStreamNameNamespacePair("b", "b"); - private static final Supplier ALWAYS_ZERO = () -> 0l; - - private Supplier minuteUpdates; - - @BeforeEach - public void setup() { - AtomicLong start = new AtomicLong(0); - minuteUpdates = () -> start.getAndUpdate(l -> l + (60 * 1000)); - } - - @AfterEach - public void clearDestinationConfig() { - DestinationConfig.clearInstance(); - } - - private void initializeDestinationConfigOption(final boolean enableIncrementalTypingAndDeduping) { - ObjectMapper mapper = new ObjectMapper(); - ObjectNode objectNode = mapper.createObjectNode(); - objectNode.put("enable_incremental_final_table_updates", enableIncrementalTypingAndDeduping); - DestinationConfig.initialize(objectNode); - } - - private void elapseTime(Supplier timing, int iterations) { - IntStream.range(0, iterations).forEach(__ -> { - timing.get(); - }); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - public void testAddStream(final boolean enableIncrementalTypingAndDeduping) { - initializeDestinationConfigOption(enableIncrementalTypingAndDeduping); - final var valve = new TypeAndDedupeOperationValve(ALWAYS_ZERO); - valve.addStream(STREAM_A); - Assertions.assertEquals(-1, valve.getIncrementInterval(STREAM_A)); - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping); - Assertions.assertEquals(valve.get(STREAM_A), 0l); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - public void testReadyToTypeAndDedupe(final boolean enableIncrementalTypingAndDeduping) { - initializeDestinationConfigOption(enableIncrementalTypingAndDeduping); - final var valve = new TypeAndDedupeOperationValve(minuteUpdates); - // method call increments time - valve.addStream(STREAM_A); - elapseTime(minuteUpdates, 1); - // method call increments time - valve.addStream(STREAM_B); - // method call increments time - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping); - elapseTime(minuteUpdates, 1); - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_B), enableIncrementalTypingAndDeduping); - valve.updateTimeAndIncreaseInterval(STREAM_A); - Assertions.assertEquals(1000 * 60 * 60 * 6, - valve.getIncrementInterval(STREAM_A)); - // method call increments time - Assertions.assertFalse(valve.readyToTypeAndDedupe(STREAM_A)); - // More than enough time has passed now - elapseTime(minuteUpdates, 60 * 6); - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - public void testUpdateTimeAndIncreaseInterval(final boolean enableIncrementalTypingAndDeduping) { - initializeDestinationConfigOption(enableIncrementalTypingAndDeduping); - final var valve = new TypeAndDedupeOperationValve(minuteUpdates); - valve.addStream(STREAM_A); - IntStream.range(0, 1).forEach(__ -> Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping)); // start - // ready - // to T&D - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping); - valve.updateTimeAndIncreaseInterval(STREAM_A); - IntStream.range(0, 360).forEach(__ -> Assertions.assertFalse(valve.readyToTypeAndDedupe(STREAM_A))); - Assertions.assertEquals(valve.readyToTypeAndDedupe(STREAM_A), enableIncrementalTypingAndDeduping); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.kt new file mode 100644 index 0000000000000..6639ef67399b2 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/AirbyteTypeTest.kt @@ -0,0 +1,582 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.commons.json.Jsons +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType.Companion.fromJsonSchema +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable + +class AirbyteTypeTest { + @Test + fun testStruct() { + val structSchema: MutableList = ArrayList() + structSchema.add( + """ + { + "type": "object", + "properties": { + "key1": { + "type": "boolean" + }, + "key2": { + "type": "integer" + }, + "key3": { + "type": "number", + "airbyte_type": "integer" + }, + "key4": { + "type": "number" + }, + "key5": { + "type": "string", + "format": "date" + }, + "key6": { + "type": "string", + "format": "time", + "airbyte_type": "time_without_timezone" + }, + "key7": { + "type": "string", + "format": "time", + "airbyte_type": "time_with_timezone" + }, + "key8": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + }, + "key9": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "key10": { + "type": "string", + "format": "date-time" + }, + "key11": { + "type": "string" + } + } + } + + """.trimIndent() + ) + structSchema.add( + """ + { + "type": ["object"], + "properties": { + "key1": { + "type": ["boolean"] + }, + "key2": { + "type": ["integer"] + }, + "key3": { + "type": ["number"], + "airbyte_type": "integer" + }, + "key4": { + "type": ["number"] + }, + "key5": { + "type": ["string"], + "format": "date" + }, + "key6": { + "type": ["string"], + "format": "time", + "airbyte_type": "time_without_timezone" + }, + "key7": { + "type": ["string"], + "format": "time", + "airbyte_type": "time_with_timezone" + }, + "key8": { + "type": ["string"], + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + }, + "key9": { + "type": ["string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "key10": { + "type": ["string"], + "format": "date-time" + }, + "key11": { + "type": ["string"] + } + } + } + + """.trimIndent() + ) + structSchema.add( + """ + { + "type": ["null", "object"], + "properties": { + "key1": { + "type": ["null", "boolean"] + }, + "key2": { + "type": ["null", "integer"] + }, + "key3": { + "type": ["null", "number"], + "airbyte_type": "integer" + }, + "key4": { + "type": ["null", "number"] + }, + "key5": { + "type": ["null", "string"], + "format": "date" + }, + "key6": { + "type": ["null", "string"], + "format": "time", + "airbyte_type": "time_without_timezone" + }, + "key7": { + "type": ["null", "string"], + "format": "time", + "airbyte_type": "time_with_timezone" + }, + "key8": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + }, + "key9": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "key10": { + "type": ["null", "string"], + "format": "date-time" + }, + "key11": { + "type": ["null", "string"] + } + } + } + + """.trimIndent() + ) + + val propertiesMap = LinkedHashMap() + propertiesMap["key1"] = AirbyteProtocolType.BOOLEAN + propertiesMap["key2"] = AirbyteProtocolType.INTEGER + propertiesMap["key3"] = AirbyteProtocolType.INTEGER + propertiesMap["key4"] = AirbyteProtocolType.NUMBER + propertiesMap["key5"] = AirbyteProtocolType.DATE + propertiesMap["key6"] = AirbyteProtocolType.TIME_WITHOUT_TIMEZONE + propertiesMap["key7"] = AirbyteProtocolType.TIME_WITH_TIMEZONE + propertiesMap["key8"] = AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE + propertiesMap["key9"] = AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + propertiesMap["key10"] = AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + propertiesMap["key11"] = AirbyteProtocolType.STRING + + val struct: AirbyteType = Struct(propertiesMap) + for (schema in structSchema) { + Assertions.assertEquals(struct, fromJsonSchema(Jsons.deserialize(schema))) + } + } + + @Test + fun testEmptyStruct() { + val structSchema: MutableList = ArrayList() + structSchema.add( + """ + { + "type": "object" + } + + """.trimIndent() + ) + structSchema.add( + """ + { + "type": ["object"] + } + + """.trimIndent() + ) + structSchema.add( + """ + { + "type": ["null", "object"] + } + + """.trimIndent() + ) + + val struct: AirbyteType = Struct(LinkedHashMap()) + for (schema in structSchema) { + Assertions.assertEquals(struct, fromJsonSchema(Jsons.deserialize(schema))) + } + } + + @Test + fun testImplicitStruct() { + val structSchema = + """ + { + "properties": { + "key1": { + "type": "boolean" + } + } + } + + """.trimIndent() + + val propertiesMap = LinkedHashMap() + propertiesMap["key1"] = AirbyteProtocolType.BOOLEAN + + val struct: AirbyteType = Struct(propertiesMap) + Assertions.assertEquals(struct, fromJsonSchema(Jsons.deserialize(structSchema))) + } + + @Test + fun testArray() { + val arraySchema: MutableList = ArrayList() + arraySchema.add( + """ + { + "type": "array", + "items": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + + """.trimIndent() + ) + arraySchema.add( + """ + { + "type": ["array"], + "items": { + "type": ["string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + + """.trimIndent() + ) + arraySchema.add( + """ + { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + + """.trimIndent() + ) + + val array: AirbyteType = Array(AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE) + for (schema in arraySchema) { + Assertions.assertEquals(array, fromJsonSchema(Jsons.deserialize(schema))) + } + } + + @Test + fun testEmptyArray() { + val arraySchema: MutableList = ArrayList() + arraySchema.add( + """ + { + "type": "array" + } + + """.trimIndent() + ) + arraySchema.add( + """ + { + "type": ["array"] + } + + """.trimIndent() + ) + + arraySchema.add( + """ + { + "type": ["null", "array"] + } + + """.trimIndent() + ) + + val array: AirbyteType = Array(AirbyteProtocolType.UNKNOWN) + for (schema in arraySchema) { + Assertions.assertEquals(array, fromJsonSchema(Jsons.deserialize(schema))) + } + } + + @Test + fun testUnsupportedOneOf() { + val unsupportedOneOfSchema = + """ + { + "oneOf": ["number", "string"] + } + + """.trimIndent() + + val options: MutableList = ArrayList() + options.add(AirbyteProtocolType.NUMBER) + options.add(AirbyteProtocolType.STRING) + + val unsupportedOneOf = UnsupportedOneOf(options) + Assertions.assertEquals( + unsupportedOneOf, + fromJsonSchema(Jsons.deserialize(unsupportedOneOfSchema)) + ) + } + + @Test + fun testUnion() { + val unionSchema = + """ + { + "type": ["string", "number"] + } + + """.trimIndent() + + val options: MutableList = ArrayList() + options.add(AirbyteProtocolType.STRING) + options.add(AirbyteProtocolType.NUMBER) + + val union = Union(options) + Assertions.assertEquals(union, fromJsonSchema(Jsons.deserialize(unionSchema))) + } + + @Test + fun testUnionComplex() { + val schema = + Jsons.deserialize( + """ + { + "type": ["string", "object", "array", "null", "string", "object", "array", "null"], + "properties": { + "foo": {"type": "string"} + }, + "items": {"type": "string"} + } + + """.trimIndent() + ) + + val parsed = fromJsonSchema(schema) + + val expected: AirbyteType = + Union( + List.of( + AirbyteProtocolType.STRING, + Struct(linkedMapOf("foo" to AirbyteProtocolType.STRING)), + Array(AirbyteProtocolType.STRING) + ) + ) + Assertions.assertEquals(expected, parsed) + } + + @Test + fun testUnionUnderspecifiedNonPrimitives() { + val schema = + Jsons.deserialize( + """ + { + "type": ["string", "object", "array", "null", "string", "object", "array", "null"] + } + + """.trimIndent() + ) + + val parsed = fromJsonSchema(schema) + + val expected: AirbyteType = + Union( + List.of( + AirbyteProtocolType.STRING, + Struct(LinkedHashMap()), + Array(AirbyteProtocolType.UNKNOWN) + ) + ) + expected.toString() + Assertions.assertEquals(expected, parsed) + } + + @Test + fun testInvalidTextualType() { + val invalidTypeSchema = + """ + { + "type": "foo" + } + + """.trimIndent() + Assertions.assertEquals( + AirbyteProtocolType.UNKNOWN, + fromJsonSchema(Jsons.deserialize(invalidTypeSchema)) + ) + } + + @Test + fun testInvalidBooleanType() { + val invalidTypeSchema = + """ + { + "type": true + } + + """.trimIndent() + Assertions.assertEquals( + AirbyteProtocolType.UNKNOWN, + fromJsonSchema(Jsons.deserialize(invalidTypeSchema)) + ) + } + + @Test + fun testInvalid() { + val invalidSchema: MutableList = ArrayList() + invalidSchema.add("") + invalidSchema.add("null") + invalidSchema.add("true") + invalidSchema.add("false") + invalidSchema.add("1") + invalidSchema.add("\"\"") + invalidSchema.add("[]") + invalidSchema.add("{}") + + for (schema in invalidSchema) { + Assertions.assertEquals( + AirbyteProtocolType.UNKNOWN, + fromJsonSchema(Jsons.deserialize(schema)) + ) + } + } + + @Test + fun testChooseUnion() { + val unionToType: MutableMap = HashMap() + + val a = Array(AirbyteProtocolType.BOOLEAN) + + val properties = LinkedHashMap() + properties["key1"] = AirbyteProtocolType.UNKNOWN + properties["key2"] = AirbyteProtocolType.INTEGER + val s = Struct(properties) + + unionToType[Union(listOf(s, a))] = a + unionToType[Union(listOf(AirbyteProtocolType.NUMBER, a))] = a + unionToType[Union(listOf(AirbyteProtocolType.INTEGER, s))] = s + unionToType[ + Union( + listOf( + AirbyteProtocolType.NUMBER, + AirbyteProtocolType.DATE, + AirbyteProtocolType.BOOLEAN + ) + ) + ] = AirbyteProtocolType.DATE + unionToType[ + Union( + listOf( + AirbyteProtocolType.INTEGER, + AirbyteProtocolType.BOOLEAN, + AirbyteProtocolType.NUMBER + ) + ) + ] = AirbyteProtocolType.NUMBER + unionToType[Union(listOf(AirbyteProtocolType.BOOLEAN, AirbyteProtocolType.INTEGER))] = + AirbyteProtocolType.INTEGER + + Assertions.assertAll( + unionToType.entries.map { e -> + Executable { Assertions.assertEquals(e.value, e.key.chooseType()) } + } + ) + } + + @Test + fun testAsColumns() { + val u = + Union( + List.of( + AirbyteProtocolType.STRING, + Struct(linkedMapOf("foo" to AirbyteProtocolType.STRING)), + Array( + AirbyteProtocolType.STRING + ), // This is bad behavior, but it matches current behavior so we'll test it. + // Ideally, we would recognize that the sub-unions are also objects. + Union(List.of(Struct(LinkedHashMap()))), + UnsupportedOneOf(List.of(Struct(LinkedHashMap()))) + ) + ) + + val columns = u.asColumns() + + Assertions.assertEquals( + object : LinkedHashMap() { + init { + put("foo", AirbyteProtocolType.STRING) + } + }, + columns + ) + } + + @Test + fun testAsColumnsMultipleObjects() { + val u = Union(List.of(Struct(LinkedHashMap()), Struct(LinkedHashMap()))) + + // This prooobably should throw an exception, but for the sake of smooth rollout it just + // logs a + // warning for now. + Assertions.assertEquals(LinkedHashMap(), u.asColumns()) + } + + @Test + fun testAsColumnsNoObjects() { + val u = + Union( + List.of( + AirbyteProtocolType.STRING, + Array(AirbyteProtocolType.STRING), + UnsupportedOneOf( + ArrayList() + ), // Similar to testAsColumns(), this is bad behavior. + Union(List.of(Struct(LinkedHashMap()))), + UnsupportedOneOf(List.of(Struct(LinkedHashMap()))) + ) + ) + + // This prooobably should throw an exception, but for the sake of smooth rollout it just + // logs a + // warning for now. + Assertions.assertEquals(LinkedHashMap(), u.asColumns()) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.kt new file mode 100644 index 0000000000000..563daa146ecb6 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CatalogParserTest.kt @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import java.util.List +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertAll +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito +import org.mockito.invocation.InvocationOnMock +import org.mockito.kotlin.any +import org.mockito.kotlin.whenever + +internal class CatalogParserTest { + private lateinit var sqlGenerator: SqlGenerator + private lateinit var parser: CatalogParser + + @BeforeEach + fun setup() { + sqlGenerator = Mockito.mock(SqlGenerator::class.java) + // noop quoting logic + Mockito.`when`(sqlGenerator.buildColumnId(any(), any())).thenAnswer { + invocation: InvocationOnMock -> + val fieldName = invocation.getArgument(0) + val suffix = invocation.getArgument(1) + ColumnId(fieldName + suffix, fieldName + suffix, fieldName + suffix) + } + Mockito.`when`(sqlGenerator.buildColumnId(any())).thenAnswer { invocation: InvocationOnMock + -> + sqlGenerator.buildColumnId(invocation.getArgument(0), "") + } + Mockito.`when`(sqlGenerator.buildStreamId(any(), any(), any())).thenAnswer { + invocation: InvocationOnMock -> + val namespace = invocation.getArgument(0) + val name = invocation.getArgument(1) + val rawNamespace = invocation.getArgument(1) + StreamId(namespace, name, rawNamespace, namespace + "_abab_" + name, namespace, name) + } + + parser = CatalogParser(sqlGenerator) + } + + /** + * Both these streams will write to the same final table name ("foofoo"). Verify that they don't + * actually use the same tablename. + */ + @Test + fun finalNameCollision() { + Mockito.`when`(sqlGenerator.buildStreamId(any(), any(), any())).thenAnswer { + invocation: InvocationOnMock -> + val originalNamespace = invocation.getArgument(0) + val originalName = (invocation.getArgument(1)) + val originalRawNamespace = (invocation.getArgument(1)) + + // emulate quoting logic that causes a name collision + val quotedName = originalName.replace("bar".toRegex(), "") + StreamId( + originalNamespace, + quotedName, + originalRawNamespace, + originalNamespace + "_abab_" + quotedName, + originalNamespace, + originalName + ) + } + val catalog = + ConfiguredAirbyteCatalog() + .withStreams(List.of(stream("a", "foobarfoo"), stream("a", "foofoo"))) + + val parsedCatalog = parser.parseCatalog(catalog) + + assertAll( + { Assertions.assertEquals("a_abab_foofoo", parsedCatalog.streams.get(0).id.rawName) }, + { Assertions.assertEquals("foofoo", parsedCatalog.streams.get(0).id.finalName) }, + { + Assertions.assertEquals( + "a_abab_foofoo_3fd", + parsedCatalog.streams.get(1).id.rawName + ) + }, + { Assertions.assertEquals("foofoo_3fd", parsedCatalog.streams.get(1).id.finalName) } + ) + } + + /** + * The schema contains two fields, which will both end up named "foofoo" after quoting. Verify + * that they don't actually use the same column name. + */ + @Test + fun columnNameCollision() { + Mockito.`when`(sqlGenerator.buildColumnId(any(), any())).thenAnswer { + invocation: InvocationOnMock -> + val originalName = invocation.getArgument(0) + invocation.getArgument(1) + // emulate quoting logic that causes a name collision + val quotedName = originalName.replace("bar".toRegex(), "") + ColumnId(quotedName, originalName, quotedName) + } + val schema = + Jsons.deserialize( + """ + { + "type": "object", + "properties": { + "foobarfoo": {"type": "string"}, + "foofoo": {"type": "string"} + } + } + + """.trimIndent() + ) + val catalog = ConfiguredAirbyteCatalog().withStreams(List.of(stream("a", "a", schema))) + + val parsedCatalog = parser.parseCatalog(catalog) + val columnsList = parsedCatalog.streams[0].columns!!.keys.toList() + + assertAll( + { Assertions.assertEquals(2, parsedCatalog.streams[0].columns!!.size) }, + { Assertions.assertEquals("foofoo", columnsList[0].name) }, + { Assertions.assertEquals("foofoo_1", columnsList[1].name) } + ) + } + + /** + * Test behavior when the sqlgenerator truncates column names. We should end generate new names + * that still avoid collision. + */ + @Test + fun truncatingColumnNameCollision() { + whenever(sqlGenerator.buildColumnId(any(), any())).thenAnswer { invocation: InvocationOnMock + -> + val originalName = invocation.getArgument(0) + invocation.getArgument(1) + // truncate to 10 characters + val truncatedName = originalName.substring(0, 10.coerceAtMost(originalName.length)) + ColumnId(truncatedName, originalName, truncatedName) + } + val schema = + Jsons.deserialize( + """ + { + "type": "object", + "properties": { + "aVeryLongColumnName": {"type": "string"}, + "aVeryLongColumnNameWithMoreTextAfterward": {"type": "string"} + } + } + + """.trimIndent() + ) + val catalog = ConfiguredAirbyteCatalog().withStreams(listOf(stream("a", "a", schema))) + + val parsedCatalog = parser.parseCatalog(catalog) + val columnsList = parsedCatalog.streams[0].columns!!.keys.toList() + + assertAll( + { Assertions.assertEquals(2, parsedCatalog.streams[0].columns!!.size) }, + { Assertions.assertEquals("aVeryLongC", columnsList[0].name) }, + { Assertions.assertEquals("aV36rd", columnsList[1].name) } + ) + } + + companion object { + private fun stream( + namespace: String, + name: String, + schema: JsonNode = + Jsons.deserialize( + """ + { + "type": "object", + "properties": { + "name": {"type": "string"} + } + } + + """.trimIndent() + ) + ): ConfiguredAirbyteStream { + return ConfiguredAirbyteStream() + .withStream( + AirbyteStream().withNamespace(namespace).withName(name).withJsonSchema(schema) + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.kt new file mode 100644 index 0000000000000..a7f3cddd4d860 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/CollectionUtilsTest.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.matchingKey +import java.util.* +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.CsvSource + +class CollectionUtilsTest { + @ParameterizedTest + @CsvSource("foo,foo", "bar,BAR", "fIzZ,fizz", "ZIP_zop,zip_ZOP", "nope,") + fun testMatchingKey(input: String, output: String?) { + val expected = Optional.ofNullable(output) + Assertions.assertEquals(matchingKey(TEST_COLLECTION, input), expected) + } + + companion object { + var TEST_COLLECTION: Set = setOf("foo", "BAR", "fizz", "zip_ZOP") + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.kt new file mode 100644 index 0000000000000..cb458b65bcc9a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.kt @@ -0,0 +1,976 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.integrations.base.destination.typing_deduping.Sql.Companion.of +import io.airbyte.integrations.base.destination.typing_deduping.Sql.Companion.separately +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.airbyte.protocol.models.v0.StreamDescriptor +import java.time.Instant +import java.util.* +import java.util.Map +import java.util.function.Consumer +import kotlin.collections.HashMap +import kotlin.collections.List +import kotlin.collections.MutableMap +import kotlin.collections.emptyList +import kotlin.collections.set +import lombok.SneakyThrows +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.mockito.Mockito.mock +import org.mockito.kotlin.any + +class DefaultTyperDeduperTest { + private var parsedCatalog: ParsedCatalog? = null + + private data class MockState( + val needsSoftReset: Boolean, + val softResetMigrationCompleted: Boolean, + val nonSoftResetMigrationCompleted: Boolean + ) : MinimumDestinationState { + override fun needsSoftReset(): Boolean = needsSoftReset + + override fun withSoftReset(needsSoftReset: Boolean): T { + return MockState( + needsSoftReset, + this.softResetMigrationCompleted, + this.nonSoftResetMigrationCompleted + ) + as T + } + } + + private lateinit var sqlGenerator: MockSqlGenerator + private lateinit var destinationHandler: DestinationHandler + + private lateinit var initialStates: List> + private lateinit var updatedStates: MutableMap + + private lateinit var migrator: DestinationV1V2Migrator + private lateinit var typerDeduper: TyperDeduper + + private val MIGRATION_REQUIRING_SOFT_RESET: Migration = + object : Migration { + @SneakyThrows + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + state: DestinationInitialStatus + ): Migration.MigrationResult { + destinationHandler.execute(of("MIGRATE " + stream.id.rawTableId(""))) + return Migration.MigrationResult( + MockState(true, true, state.destinationState.nonSoftResetMigrationCompleted), + false + ) + } + } + + private val MIGRATION_NOT_REQUIRING_SOFT_RESET: Migration = + object : Migration { + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + status: DestinationInitialStatus + ): Migration.MigrationResult { + return Migration.MigrationResult( + MockState( + status.destinationState.needsSoftReset, + status.destinationState.softResetMigrationCompleted, + true + ), + false + ) + } + } + + private val MIGRATION_NOOP: Migration = + object : Migration { + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + status: DestinationInitialStatus + ): Migration.MigrationResult { + return Migration.MigrationResult( + MockState( + status.destinationState.needsSoftReset, + status.destinationState.softResetMigrationCompleted, + true + ), + false + ) + } + } + + @BeforeEach + @Throws(Exception::class) + fun setup() { + sqlGenerator = Mockito.spy(MockSqlGenerator()) + destinationHandler = mock() + + val overwriteNsState: DestinationInitialStatus = mock() + Mockito.`when`(overwriteNsState.destinationState).thenReturn(MockState(false, false, true)) + Mockito.`when`(overwriteNsState.streamConfig).thenReturn(OVERWRITE_STREAM_CONFIG) + + val appendNsState: DestinationInitialStatus = mock() + Mockito.`when`(appendNsState.destinationState).thenReturn(MockState(false, false, true)) + Mockito.`when`(appendNsState.streamConfig).thenReturn(APPEND_STREAM_CONFIG) + + val dedupeNsState: DestinationInitialStatus = mock() + Mockito.`when`(dedupeNsState.destinationState).thenReturn(MockState(false, false, true)) + Mockito.`when`(dedupeNsState.streamConfig).thenReturn(DEDUPE_STREAM_CONFIG) + + initialStates = java.util.List.of(overwriteNsState, appendNsState, dedupeNsState) + Mockito.`when`(destinationHandler.gatherInitialState(ArgumentMatchers.anyList())) + .thenReturn(initialStates) + initialStates.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.initialRawTableStatus) + .thenReturn(InitialRawTableStatus(true, true, Optional.empty())) + } + ) + + val updatedStates: MutableMap = HashMap() + updatedStates[OVERWRITE_STREAM_CONFIG.id] = MockState(false, false, true) + updatedStates[APPEND_STREAM_CONFIG.id] = MockState(false, false, true) + updatedStates[DEDUPE_STREAM_CONFIG.id] = MockState(false, false, true) + this.updatedStates = updatedStates + + migrator = NoOpDestinationV1V2Migrator() + + parsedCatalog = + ParsedCatalog( + java.util.List.of( + OVERWRITE_STREAM_CONFIG, + APPEND_STREAM_CONFIG, + DEDUPE_STREAM_CONFIG + ) + ) + + typerDeduper = + DefaultTyperDeduper( + sqlGenerator, + destinationHandler, + parsedCatalog!!, + migrator, + emptyList() + ) + } + + /** When there are no existing tables, we should create them and write to them directly. */ + @Test + @Throws(Exception::class) + fun emptyDestination() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.isFinalTablePresent).thenReturn(false) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + Mockito.verify(destinationHandler).execute(of("CREATE TABLE overwrite_ns.overwrite_stream")) + Mockito.verify(destinationHandler).execute(of("CREATE TABLE append_ns.append_stream")) + Mockito.verify(destinationHandler).execute(of("CREATE TABLE dedup_ns.dedup_stream")) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe("overwrite_ns", "overwrite_stream", false) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE overwrite_ns.overwrite_stream WITHOUT SAFER CASTING")) + typerDeduper!!.typeAndDedupe("append_ns", "append_stream", false) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")) + typerDeduper!!.typeAndDedupe("dedup_ns", "dedup_stream", false) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.commitFinalTables() + Mockito.verify(destinationHandler, Mockito.never()).execute(any()) + } + + /** + * When there's an existing table but it's empty, we should ensure it has the right schema and + * write to it directly. + */ + @Test + @Throws(Exception::class) + fun existingEmptyTable() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.isFinalTablePresent).thenReturn(true) + Mockito.`when`(initialState.isFinalTableEmpty).thenReturn(true) + Mockito.`when`(initialState.isSchemaMismatch).thenReturn(true) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + Mockito.verify(destinationHandler) + .execute(of("PREPARE append_ns.append_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset" + ) + ) + Mockito.verify(destinationHandler) + .execute(of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset") + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe("overwrite_ns", "overwrite_stream", false) + Mockito.verify(destinationHandler) + .execute( + of("UPDATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp WITHOUT SAFER CASTING") + ) + typerDeduper!!.typeAndDedupe("append_ns", "append_stream", false) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")) + typerDeduper!!.typeAndDedupe("dedup_ns", "dedup_stream", false) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.commitFinalTables() + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE overwrite_ns.overwrite_stream FROM overwrite_ns.overwrite_stream_airbyte_tmp" + ) + ) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + } + + /** + * When there's an existing empty table with the right schema, we don't need to do anything + * during setup. + */ + @Test + @Throws(Exception::class) + fun existingEmptyTableMatchingSchema() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.isFinalTablePresent).thenReturn(true) + Mockito.`when`(initialState.isFinalTableEmpty).thenReturn(true) + Mockito.`when`(initialState.isSchemaMismatch).thenReturn(false) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + Mockito.verify(destinationHandler, Mockito.never()).execute(any()) + } + + /** + * When there's an existing nonempty table, we should alter it. For the OVERWRITE stream, we + * also need to write to a tmp table, and overwrite the real table at the end of the sync. + */ + @Test + @Throws(Exception::class) + fun existingNonemptyTable() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.isFinalTablePresent).thenReturn(true) + Mockito.`when`(initialState.isFinalTableEmpty).thenReturn(false) + Mockito.`when`(initialState.isSchemaMismatch).thenReturn(true) + Mockito.`when`(initialState.initialRawTableStatus) + .thenReturn( + InitialRawTableStatus( + true, + true, + Optional.of(Instant.parse("2023-01-01T12:34:56Z")) + ) + ) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + // NB: We only create a tmp table for the overwrite stream, and do _not_ soft reset the + // existing + // overwrite stream's table. + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + Mockito.verify(destinationHandler) + .execute(of("PREPARE append_ns.append_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset" + ) + ) + Mockito.verify(destinationHandler) + .execute(of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset") + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe("overwrite_ns", "overwrite_stream", false) + // NB: no airbyte_tmp suffix on the non-overwrite streams + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z" + ) + ) + typerDeduper!!.typeAndDedupe("append_ns", "append_stream", false) + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z" + ) + ) + typerDeduper!!.typeAndDedupe("dedup_ns", "dedup_stream", false) + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-01T12:34:56Z" + ) + ) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.commitFinalTables() + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE overwrite_ns.overwrite_stream FROM overwrite_ns.overwrite_stream_airbyte_tmp" + ) + ) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + } + + /** + * When there's an existing nonempty table with the right schema, we don't need to modify it, + * but OVERWRITE streams still need to create a tmp table. + */ + @Test + @Throws(Exception::class) + fun existingNonemptyTableMatchingSchema() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.isFinalTablePresent).thenReturn(true) + Mockito.`when`(initialState.isFinalTableEmpty).thenReturn(false) + Mockito.`when`(initialState.isSchemaMismatch).thenReturn(false) + Mockito.`when`(initialState.initialRawTableStatus) + .thenReturn(InitialRawTableStatus(true, true, Optional.of(Instant.now()))) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + // NB: We only create one tmp table here. + // Also, we need to alter the existing _real_ table, not the tmp table! + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + Mockito.verify(destinationHandler).commitDestinationStates(updatedStates) + Mockito.verifyNoMoreInteractions(*Mockito.ignoreStubs(destinationHandler)) + } + + @Test + fun nonexistentStream() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + typerDeduper!!.typeAndDedupe("nonexistent_ns", "nonexistent_stream", false) + } + Mockito.verifyNoInteractions(*Mockito.ignoreStubs(destinationHandler)) + } + + @Test + @Throws(Exception::class) + fun failedSetup() { + Mockito.doThrow(RuntimeException("foo")).`when`(destinationHandler).execute(any()) + + Assertions.assertThrows(Exception::class.java) { typerDeduper!!.prepareFinalTables() } + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe("dedup_ns", "dedup_stream", false) + typerDeduper!!.commitFinalTables() + + Mockito.verifyNoInteractions(*Mockito.ignoreStubs(destinationHandler)) + } + + /** + * Test a typical sync, where the previous sync left no unprocessed raw records. If this sync + * writes some records for a stream, we should run T+D for that stream. + */ + @Test + @Throws(Exception::class) + fun noUnprocessedRecords() { + initialStates.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.initialRawTableStatus) + .thenReturn(InitialRawTableStatus(true, false, Optional.empty())) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + + typerDeduper!!.prepareFinalTables() + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe( + Map.of( + StreamDescriptor().withName("overwrite_stream").withNamespace("overwrite_ns"), + StreamSyncSummary(Optional.of(0L)), + StreamDescriptor().withName("append_stream").withNamespace("append_ns"), + StreamSyncSummary(Optional.of(1L)) + ) + ) + + // append_stream and dedup_stream should be T+D-ed. overwrite_stream has explicitly 0 + // records, but + // dedup_stream + // is missing from the map, so implicitly has nonzero records. + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING")) + Mockito.verifyNoMoreInteractions(destinationHandler) + } + + /** + * Test a sync where the previous sync failed to run T+D for some stream. Even if this sync + * writes zero records, it should still run T+D. + */ + @Test + @Throws(Exception::class) + fun unprocessedRecords() { + initialStates!!.forEach( + Consumer { initialState: DestinationInitialStatus? -> + Mockito.`when`(initialState!!.initialRawTableStatus) + .thenReturn( + InitialRawTableStatus( + true, + true, + Optional.of(Instant.parse("2023-01-23T12:34:56Z")) + ) + ) + } + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + + typerDeduper!!.prepareFinalTables() + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.typeAndDedupe( + Map.of( + StreamDescriptor().withName("overwrite_stream").withNamespace("overwrite_ns"), + StreamSyncSummary(Optional.of(0L)), + StreamDescriptor().withName("append_stream").withNamespace("append_ns"), + StreamSyncSummary(Optional.of(1L)) + ) + ) + + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE overwrite_ns.overwrite_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z" + ) + ) + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE append_ns.append_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z" + ) + ) + Mockito.verify(destinationHandler) + .execute( + of( + "UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z" + ) + ) + } + + /** + * A test that tries to trigger multiple soft resets on all three streams. The migration should + * run, and we also detect a schema mismatch. However, only one soft reset should be triggered + * once per stream. Additionally, the overwrite stream should not trigger a soft reset. + */ + @Test + @Throws(Exception::class) + fun multipleSoftResets() { + val typerDeduper = + DefaultTyperDeduper( + sqlGenerator!!, + destinationHandler, + parsedCatalog!!, + migrator!!, + java.util.List.of(MIGRATION_REQUIRING_SOFT_RESET) + ) + + this.typerDeduper = typerDeduper + // Notably: isSchemaMismatch = true, + // and the MockStates have needsSoftReset = false and isMigrated = false. + Mockito.`when`(destinationHandler!!.gatherInitialState(ArgumentMatchers.anyList())) + .thenReturn( + java.util.List.of( + DestinationInitialStatus( + OVERWRITE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + MockState(false, false, true) + ), + DestinationInitialStatus( + APPEND_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + MockState(false, false, true) + ), + DestinationInitialStatus( + DEDUPE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + MockState(false, false, true) + ) + ) + ) + + typerDeduper.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.overwrite_stream")) + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.append_stream")) + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.dedup_stream")) + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(true, true, true), + APPEND_STREAM_CONFIG.id, + MockState(true, true, true), + DEDUPE_STREAM_CONFIG.id, + MockState(true, true, true) + ) + ) + Mockito.verify(destinationHandler).gatherInitialState(any()) + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verifyNoMoreInteractions(destinationHandler) + Mockito.clearInvocations(destinationHandler) + + typerDeduper.prepareFinalTables() + + // We should trigger a soft reset on the append + dedup streams. + Mockito.verify(destinationHandler) + .execute(of("PREPARE append_ns.append_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset" + ) + ) + + Mockito.verify(destinationHandler) + .execute(of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset") + ) + + // The overwrite stream just gets a new table entirely, instead of a soft reset. + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + + // And we should commit the states. Note that we now set needsSoftReset=false. + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(false, true, true), + APPEND_STREAM_CONFIG.id, + MockState(false, true, true), + DEDUPE_STREAM_CONFIG.id, + MockState(false, true, true) + ) + ) + + Mockito.verifyNoMoreInteractions(destinationHandler) + } + + /** + * A test where we have multiple migrations. The first migration triggers a soft reset; the + * second migration does nothing. We should correctly trigger the soft reset. + */ + @Test + @Throws(Exception::class) + fun migrationsMixedResults() { + val typerDeduper = + DefaultTyperDeduper( + sqlGenerator!!, + destinationHandler, + parsedCatalog!!, + migrator!!, + java.util.List.of( + MIGRATION_REQUIRING_SOFT_RESET, + MIGRATION_NOT_REQUIRING_SOFT_RESET + ) + ) + this.typerDeduper = typerDeduper + + Mockito.`when`(destinationHandler!!.gatherInitialState(ArgumentMatchers.anyList())) + .thenReturn( + java.util.List.of( + DestinationInitialStatus( + OVERWRITE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(false, false, false) + ), + DestinationInitialStatus( + APPEND_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(false, false, false) + ), + DestinationInitialStatus( + DEDUPE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(false, false, false) + ) + ) + ) + + typerDeduper.prepareSchemasAndRunMigrations() + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.overwrite_stream")) + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.append_stream")) + Mockito.verify(destinationHandler).execute(of("MIGRATE airbyte_internal.dedup_stream")) + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(true, true, true), + APPEND_STREAM_CONFIG.id, + MockState(true, true, true), + DEDUPE_STREAM_CONFIG.id, + MockState(true, true, true) + ) + ) + Mockito.verify(destinationHandler).gatherInitialState(any()) + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verifyNoMoreInteractions(destinationHandler) + Mockito.clearInvocations(destinationHandler) + + typerDeduper.prepareFinalTables() + + // We should trigger a soft reset on the append + dedup streams. + Mockito.verify(destinationHandler) + .execute(of("PREPARE append_ns.append_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset" + ) + ) + + Mockito.verify(destinationHandler) + .execute(of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset") + ) + + // The overwrite stream just gets a new table + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + + // And we should commit the states. + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(false, true, true), + APPEND_STREAM_CONFIG.id, + MockState(false, true, true), + DEDUPE_STREAM_CONFIG.id, + MockState(false, true, true) + ) + ) + + Mockito.verifyNoMoreInteractions(destinationHandler) + } + + /** + * A test where a previous sync committed a destination state with needsSoftReset=true. We + * should trigger a soft reset, even though the current sync doesn't need it. + */ + @Test + @Throws(Exception::class) + fun previousSyncSoftReset() { + // Notably: isSchemaMismatch = false, but the MockStates have needsSoftReset = true. + Mockito.`when`(destinationHandler!!.gatherInitialState(ArgumentMatchers.anyList())) + .thenReturn( + java.util.List.of( + DestinationInitialStatus( + OVERWRITE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(true, false, false) + ), + DestinationInitialStatus( + APPEND_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(true, false, false) + ), + DestinationInitialStatus( + DEDUPE_STREAM_CONFIG, + true, + InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + MockState(true, false, false) + ) + ) + ) + + typerDeduper!!.prepareSchemasAndRunMigrations() + // Even though we didn't do anything, we still commit the destination states. + // This is technically unnecessary, but it's a single extra call and it's simpler to just do + // it. + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(true, false, false), + APPEND_STREAM_CONFIG.id, + MockState(true, false, false), + DEDUPE_STREAM_CONFIG.id, + MockState(true, false, false) + ) + ) + Mockito.verify(destinationHandler).gatherInitialState(any()) + Mockito.verify(destinationHandler) + .execute( + separately( + "CREATE SCHEMA airbyte_internal", + "CREATE SCHEMA overwrite_ns", + "CREATE SCHEMA append_ns", + "CREATE SCHEMA dedup_ns" + ) + ) + Mockito.verifyNoMoreInteractions(destinationHandler) + Mockito.clearInvocations(destinationHandler) + + typerDeduper!!.prepareFinalTables() + + // We should trigger a soft reset on the append + dedup streams. + Mockito.verify(destinationHandler) + .execute(of("PREPARE append_ns.append_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of( + "OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset" + ) + ) + + Mockito.verify(destinationHandler) + .execute(of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")) + Mockito.verify(destinationHandler) + .execute(of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")) + Mockito.verify(destinationHandler) + .execute( + of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset") + ) + + // The overwrite stream just gets a new table entirely, instead of a soft reset. + Mockito.verify(destinationHandler) + .execute(of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")) + + // And we should commit the states. Note that we now set needsSoftReset=false. + Mockito.verify(destinationHandler) + .commitDestinationStates( + Map.of( + OVERWRITE_STREAM_CONFIG.id, + MockState(false, false, false), + APPEND_STREAM_CONFIG.id, + MockState(false, false, false), + DEDUPE_STREAM_CONFIG.id, + MockState(false, false, false) + ) + ) + + Mockito.verifyNoMoreInteractions(destinationHandler) + } + + companion object { + private val OVERWRITE_STREAM_CONFIG = + StreamConfig( + StreamId( + "overwrite_ns", + "overwrite_stream", + "airbyte_internal", + "overwrite_stream", + "overwrite_ns", + "overwrite_stream" + ), + null, + DestinationSyncMode.OVERWRITE, + null, + null, + null + ) + private val APPEND_STREAM_CONFIG = + StreamConfig( + StreamId( + "append_ns", + "append_stream", + "airbyte_internal", + "append_stream", + "append_ns", + "append_stream" + ), + null, + DestinationSyncMode.APPEND, + null, + null, + null + ) + private val DEDUPE_STREAM_CONFIG = + StreamConfig( + StreamId( + "dedup_ns", + "dedup_stream", + "airbyte_internal", + "dedup_stream", + "dedup_ns", + "dedup_stream" + ), + null, + DestinationSyncMode.APPEND_DEDUP, + null, + null, + null + ) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.kt new file mode 100644 index 0000000000000..1a27bf462e985 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.kt @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.protocol.models.v0.DestinationSyncMode +import java.util.* +import java.util.stream.Stream +import lombok.SneakyThrows +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.ArgumentsProvider +import org.junit.jupiter.params.provider.ArgumentsSource +import org.mockito.Mockito +import org.mockito.Mockito.mock +import org.mockito.kotlin.any +import org.mockito.kotlin.spy + +class DestinationV1V2MigratorTest { + class ShouldMigrateTestArgumentProvider : ArgumentsProvider { + @Throws(Exception::class) + override fun provideArguments(context: ExtensionContext): Stream { + // Don't throw an exception + + val v2SchemaMatches = true + + return Stream.of( // Doesn't Migrate because of sync mode + Arguments.of( + DestinationSyncMode.OVERWRITE, + makeMockMigrator(true, false, v2SchemaMatches, true, true), + false + ), // Doesn't migrate because v2 table already exists + Arguments.of( + DestinationSyncMode.APPEND, + makeMockMigrator(true, true, v2SchemaMatches, true, true), + false + ), + Arguments.of( + DestinationSyncMode.APPEND_DEDUP, + makeMockMigrator(true, true, v2SchemaMatches, true, true), + false + ), // Doesn't migrate because no valid v1 raw table exists + Arguments.of( + DestinationSyncMode.APPEND, + makeMockMigrator(true, false, v2SchemaMatches, false, true), + false + ), + Arguments.of( + DestinationSyncMode.APPEND_DEDUP, + makeMockMigrator(true, false, v2SchemaMatches, false, true), + false + ), + Arguments.of( + DestinationSyncMode.APPEND, + makeMockMigrator(true, false, v2SchemaMatches, true, false), + false + ), + Arguments.of( + DestinationSyncMode.APPEND_DEDUP, + makeMockMigrator(true, false, v2SchemaMatches, true, false), + false + ), // Migrates + Arguments.of(DestinationSyncMode.APPEND, noIssuesMigrator(), true), + Arguments.of(DestinationSyncMode.APPEND_DEDUP, noIssuesMigrator(), true) + ) + } + } + + @ParameterizedTest + @ArgumentsSource(ShouldMigrateTestArgumentProvider::class) + @Throws(Exception::class) + fun testShouldMigrate( + destinationSyncMode: DestinationSyncMode, + migrator: BaseDestinationV1V2Migrator<*>, + expected: Boolean + ) { + val config = StreamConfig(STREAM_ID, null, destinationSyncMode, null, null, null) + val actual = migrator.shouldMigrate(config) + Assertions.assertEquals(expected, actual) + } + + @Test + @Throws(Exception::class) + fun testMismatchedSchemaThrowsException() { + val config = + StreamConfig(STREAM_ID, null, DestinationSyncMode.APPEND_DEDUP, null, null, null) + val migrator = makeMockMigrator(true, true, false, false, false) + val exception = + Assertions.assertThrows(UnexpectedSchemaException::class.java) { + migrator.shouldMigrate(config) + } + Assertions.assertEquals( + "Destination V2 Raw Table does not match expected Schema", + exception.message + ) + } + + @SneakyThrows + @Test + @Throws(Exception::class) + fun testMigrate() { + val sqlGenerator = MockSqlGenerator() + val stream = + StreamConfig(STREAM_ID, null, DestinationSyncMode.APPEND_DEDUP, null, null, null) + val handler = Mockito.mock(DestinationHandler::class.java) + val sql = sqlGenerator.migrateFromV1toV2(STREAM_ID, "v1_raw_namespace", "v1_raw_table") + // All is well + val migrator = noIssuesMigrator() + migrator.migrate(sqlGenerator, handler, stream) + Mockito.verify(handler).execute(sql) + // Exception thrown when executing sql, TableNotMigratedException thrown + Mockito.doThrow(Exception::class.java).`when`(handler).execute(any()) + val exception = + Assertions.assertThrows(TableNotMigratedException::class.java) { + migrator.migrate(sqlGenerator, handler, stream) + } + Assertions.assertEquals( + "Attempted and failed to migrate stream final_table", + exception.message + ) + } + + companion object { + private val STREAM_ID = StreamId("final", "final_table", "raw", "raw_table", null, null) + + @Throws(Exception::class) + fun makeMockMigrator( + v2NamespaceExists: Boolean, + v2TableExists: Boolean, + v2RawSchemaMatches: Boolean, + v1RawTableExists: Boolean, + v1RawTableSchemaMatches: Boolean + ): BaseDestinationV1V2Migrator<*> { + val migrator: BaseDestinationV1V2Migrator = spy() + Mockito.`when`(migrator.doesAirbyteInternalNamespaceExist(any())) + .thenReturn(v2NamespaceExists) + val existingTable = + if (v2TableExists) Optional.of("v2_raw") else Optional.empty() + Mockito.`when`(migrator.getTableIfExists("raw", "raw_table")).thenReturn(existingTable) + Mockito.`when`( + migrator.schemaMatchesExpectation( + "v2_raw", + JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES + ) + ) + .thenReturn(false) + Mockito.`when`( + migrator.schemaMatchesExpectation( + "v2_raw", + JavaBaseConstants.V2_RAW_TABLE_COLUMN_NAMES_WITHOUT_META + ) + ) + .thenReturn(v2RawSchemaMatches) + + Mockito.`when`(migrator.convertToV1RawName(any())) + .thenReturn(NamespacedTableName("v1_raw_namespace", "v1_raw_table")) + val existingV1RawTable = + if (v1RawTableExists) Optional.of("v1_raw") else Optional.empty() + Mockito.`when`(migrator.getTableIfExists("v1_raw_namespace", "v1_raw_table")) + .thenReturn(existingV1RawTable) + Mockito.`when`( + migrator.schemaMatchesExpectation( + "v1_raw", + JavaBaseConstants.LEGACY_RAW_TABLE_COLUMNS + ) + ) + .thenReturn(v1RawTableSchemaMatches) + return migrator + } + + @Throws(Exception::class) + fun noIssuesMigrator(): BaseDestinationV1V2Migrator<*> { + return makeMockMigrator( + v2NamespaceExists = true, + v2TableExists = false, + v2RawSchemaMatches = true, + v1RawTableExists = true, + v1RawTableSchemaMatches = true + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.kt new file mode 100644 index 0000000000000..ac25371b61a84 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/MockSqlGenerator.kt @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.integrations.base.destination.typing_deduping.Sql.Companion.of +import java.time.Instant +import java.util.* +import java.util.function.Function + +/** Basic SqlGenerator mock. See [DefaultTyperDeduperTest] for example usage. */ +internal class MockSqlGenerator : SqlGenerator { + override fun buildStreamId( + namespace: String, + name: String, + rawNamespaceOverride: String + ): StreamId { + throw RuntimeException() + } + + override fun buildColumnId(name: String, suffix: String?): ColumnId { + throw RuntimeException() + } + + override fun createSchema(schema: String?): Sql { + return of("CREATE SCHEMA $schema") + } + + override fun createTable(stream: StreamConfig, suffix: String, force: Boolean): Sql { + return of("CREATE TABLE " + stream!!.id.finalTableId("", suffix!!)) + } + + override fun updateTable( + stream: StreamConfig, + finalSuffix: String, + minRawTimestamp: Optional, + useExpensiveSaferCasting: Boolean + ): Sql { + val timestampFilter = + minRawTimestamp + .map(Function { timestamp: Instant? -> " WHERE extracted_at > $timestamp" }) + .orElse("") + val casting = if (useExpensiveSaferCasting) " WITH" else " WITHOUT" + " SAFER CASTING" + return of( + ("UPDATE TABLE " + stream.id.finalTableId("", finalSuffix)).toString() + + casting + + timestampFilter + ) + } + + override fun overwriteFinalTable(stream: StreamId, finalSuffix: String): Sql { + return of( + "OVERWRITE TABLE " + + stream.finalTableId("") + + " FROM " + + stream.finalTableId("", finalSuffix) + ) + } + + override fun migrateFromV1toV2( + streamId: StreamId, + namespace: String?, + tableName: String? + ): Sql { + return of( + "MIGRATE TABLE " + + java.lang.String.join(".", namespace, tableName) + + " TO " + + streamId!!.rawTableId("") + ) + } + + override fun prepareTablesForSoftReset(stream: StreamConfig): Sql { + return of( + "PREPARE " + + java.lang.String.join(".", stream.id.originalNamespace, stream.id.originalName) + + " FOR SOFT RESET" + ) + } + + override fun clearLoadedAt(streamId: StreamId): Sql { + throw RuntimeException() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.kt new file mode 100644 index 0000000000000..e1a8c2b04427d --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/StreamIdTest.kt @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import io.airbyte.integrations.base.destination.typing_deduping.StreamId.Companion.concatenateRawTableName +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable + +internal class StreamIdTest { + /** + * Both these streams naively want the same raw table name ("aaa_abab_bbb_abab_ccc"). Verify + * that they don't actually use the same raw table. + */ + @Test + fun rawNameCollision() { + val stream1 = concatenateRawTableName("aaa_abab_bbb", "ccc") + val stream2 = concatenateRawTableName("aaa", "bbb_abab_ccc") + + Assertions.assertAll( + Executable { Assertions.assertEquals("aaa_abab_bbb_raw__stream_ccc", stream1) }, + Executable { Assertions.assertEquals("aaa_raw__stream_bbb_abab_ccc", stream2) } + ) + } + + @Test + fun noUnderscores() { + val stream = concatenateRawTableName("a", "b") + + Assertions.assertEquals("a_raw__stream_b", stream) + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.kt new file mode 100644 index 0000000000000..a7c4265855ddd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/kotlin/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeOperationValveTest.kt @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.clearInstance +import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.initialize +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair +import java.util.concurrent.atomic.AtomicLong +import java.util.function.Supplier +import java.util.stream.IntStream +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +class TypeAndDedupeOperationValveTest { + private var minuteUpdates: Supplier? = null + + @BeforeEach + fun setup() { + val start = AtomicLong(0) + minuteUpdates = Supplier { start.getAndUpdate { l: Long -> l + (60 * 1000) } } + } + + @AfterEach + fun clearDestinationConfig() { + clearInstance() + } + + private fun initializeDestinationConfigOption(enableIncrementalTypingAndDeduping: Boolean) { + val mapper = ObjectMapper() + val objectNode = mapper.createObjectNode() + objectNode.put("enable_incremental_final_table_updates", enableIncrementalTypingAndDeduping) + initialize(objectNode) + } + + private fun elapseTime(timing: Supplier?, iterations: Int) { + IntStream.range(0, iterations).forEach { `__`: Int -> timing!!.get() } + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testAddStream(enableIncrementalTypingAndDeduping: Boolean) { + initializeDestinationConfigOption(enableIncrementalTypingAndDeduping) + val valve = TypeAndDedupeOperationValve(ALWAYS_ZERO) + valve.addStream(STREAM_A) + Assertions.assertEquals(-1, valve.getIncrementInterval(STREAM_A)) + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + Assertions.assertEquals(valve[STREAM_A], 0L) + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testReadyToTypeAndDedupe(enableIncrementalTypingAndDeduping: Boolean) { + initializeDestinationConfigOption(enableIncrementalTypingAndDeduping) + val valve = TypeAndDedupeOperationValve(minuteUpdates!!) + // method call increments time + valve.addStream(STREAM_A) + elapseTime(minuteUpdates, 1) + // method call increments time + valve.addStream(STREAM_B) + // method call increments time + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + elapseTime(minuteUpdates, 1) + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_B), + enableIncrementalTypingAndDeduping + ) + valve.updateTimeAndIncreaseInterval(STREAM_A) + Assertions.assertEquals((1000 * 60 * 60 * 6).toLong(), valve.getIncrementInterval(STREAM_A)) + // method call increments time + Assertions.assertFalse(valve.readyToTypeAndDedupe(STREAM_A)) + // More than enough time has passed now + elapseTime(minuteUpdates, 60 * 6) + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testUpdateTimeAndIncreaseInterval(enableIncrementalTypingAndDeduping: Boolean) { + initializeDestinationConfigOption(enableIncrementalTypingAndDeduping) + val valve = TypeAndDedupeOperationValve(minuteUpdates!!) + valve.addStream(STREAM_A) + IntStream.range(0, 1).forEach { `__`: Int -> + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + } // start + // ready + // to T&D + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + valve.updateTimeAndIncreaseInterval(STREAM_A) + IntStream.range(0, 360).forEach { `__`: Int -> + Assertions.assertFalse(valve.readyToTypeAndDedupe(STREAM_A)) + } + Assertions.assertEquals( + valve.readyToTypeAndDedupe(STREAM_A), + enableIncrementalTypingAndDeduping + ) + } + + companion object { + private val STREAM_A = AirbyteStreamNameNamespacePair("a", "a") + private val STREAM_B = AirbyteStreamNameNamespacePair("b", "b") + private val ALWAYS_ZERO = Supplier { 0L } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java deleted file mode 100644 index c2be6502365c2..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java +++ /dev/null @@ -1,1322 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static java.util.Collections.emptyList; -import static java.util.Collections.singletonList; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Streams; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; -import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.time.Instant; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.Pair; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.parallel.Execution; -import org.junit.jupiter.api.parallel.ExecutionMode; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class exercises {@link SqlGenerator} implementations. All destinations should extend this - * class for their respective implementation. Subclasses are encouraged to add additional tests with - * destination-specific behavior (for example, verifying that datasets are created in the correct - * BigQuery region). - *

    - * Subclasses should implement a {@link org.junit.jupiter.api.BeforeAll} method to load any secrets - * and connect to the destination. This test expects to be able to run - * {@link #getDestinationHandler()} in a {@link org.junit.jupiter.api.BeforeEach} method. - */ -@Execution(ExecutionMode.CONCURRENT) -public abstract class BaseSqlGeneratorIntegrationTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseSqlGeneratorIntegrationTest.class); - /** - * This, along with {@link #FINAL_TABLE_COLUMN_NAMES_CDC}, is the list of columns that should be in - * the final table. They're useful for generating SQL queries to insert records into the final - * table. - */ - protected static final List FINAL_TABLE_COLUMN_NAMES = List.of( - "_airbyte_raw_id", - "_airbyte_extracted_at", - "_airbyte_meta", - "id1", - "id2", - "updated_at", - "struct", - "array", - "string", - "number", - "integer", - "boolean", - "timestamp_with_timezone", - "timestamp_without_timezone", - "time_with_timezone", - "time_without_timezone", - "date", - "unknown"); - protected static final List FINAL_TABLE_COLUMN_NAMES_CDC; - - static { - FINAL_TABLE_COLUMN_NAMES_CDC = Streams.concat( - FINAL_TABLE_COLUMN_NAMES.stream(), - Stream.of("_ab_cdc_deleted_at")).toList(); - } - - protected RecordDiffer DIFFER; - - /** - * Subclasses may use these four StreamConfigs in their tests. - */ - protected StreamConfig incrementalDedupStream; - /** - * We intentionally don't have full refresh overwrite/append streams. Those actually behave - * identically in the sqlgenerator. Overwrite mode is actually handled in - * {@link DefaultTyperDeduper}. - */ - protected StreamConfig incrementalAppendStream; - protected StreamConfig cdcIncrementalDedupStream; - /** - * This isn't particularly realistic, but it's technically possible. - */ - protected StreamConfig cdcIncrementalAppendStream; - - protected SqlGenerator generator; - protected DestinationHandler destinationHandler; - protected String namespace; - - protected StreamId streamId; - private List primaryKey; - private ColumnId cursor; - private LinkedHashMap COLUMNS; - - protected abstract SqlGenerator getSqlGenerator(); - - protected abstract DestinationHandler getDestinationHandler(); - - /** - * Subclasses should override this method if they need to make changes to the stream ID. For - * example, you could upcase the final table name here. - */ - protected StreamId buildStreamId(final String namespace, final String finalTableName, final String rawTableName) { - return new StreamId(namespace, finalTableName, namespace, rawTableName, namespace, finalTableName); - } - - /** - * Do any setup work to create a namespace for this test run. For example, this might create a - * BigQuery dataset, or a Snowflake schema. - */ - protected abstract void createNamespace(String namespace) throws Exception; - - /** - * Create a raw table using the StreamId's rawTableId. - */ - protected abstract void createRawTable(StreamId streamId) throws Exception; - - /** - * Creates a raw table in the v1 format - */ - protected abstract void createV1RawTable(StreamId v1RawTable) throws Exception; - - protected abstract void insertRawTableRecords(StreamId streamId, List records) throws Exception; - - protected abstract void insertV1RawTableRecords(StreamId streamId, List records) throws Exception; - - protected abstract void insertFinalTableRecords(boolean includeCdcDeletedAt, StreamId streamId, String suffix, List records) - throws Exception; - - /** - * The two dump methods are defined identically as in {@link BaseTypingDedupingTest}, but with - * slightly different method signature. This test expects subclasses to respect the raw/finalTableId - * on the StreamId object, rather than hardcoding e.g. the airbyte_internal dataset. - *

    - * The {@code _airbyte_data} field must be deserialized into an ObjectNode, even if it's stored in - * the destination as a string. - */ - protected abstract List dumpRawTableRecords(StreamId streamId) throws Exception; - - protected abstract List dumpFinalTableRecords(StreamId streamId, String suffix) throws Exception; - - /** - * Clean up all resources in the namespace. For example, this might delete the BigQuery dataset - * created in {@link #createNamespace(String)}. - */ - protected abstract void teardownNamespace(String namespace) throws Exception; - - /** - * Identical to {@link BaseTypingDedupingTest#getRawMetadataColumnNames()}. - */ - protected Map getRawMetadataColumnNames() { - return new HashMap<>(); - } - - /** - * Identical to {@link BaseTypingDedupingTest#getFinalMetadataColumnNames()}. - */ - protected Map getFinalMetadataColumnNames() { - return new HashMap<>(); - } - - /** - * This test implementation is extremely destination-specific, but all destinations must implement - * it. This test should verify that creating a table using {@link #incrementalDedupStream} works as - * expected, including column types, indexing, partitioning, etc. - *

    - * Note that subclasses must also annotate their implementation with @Test. - */ - @Test - public abstract void testCreateTableIncremental() throws Exception; - - @BeforeEach - public void setup() throws Exception { - generator = getSqlGenerator(); - - final ColumnId id1 = generator.buildColumnId("id1"); - final ColumnId id2 = generator.buildColumnId("id2"); - primaryKey = List.of(id1, id2); - cursor = generator.buildColumnId("updated_at"); - - COLUMNS = new LinkedHashMap<>(); - COLUMNS.put(id1, AirbyteProtocolType.INTEGER); - COLUMNS.put(id2, AirbyteProtocolType.INTEGER); - COLUMNS.put(cursor, AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); - COLUMNS.put(generator.buildColumnId("struct"), new Struct(new LinkedHashMap<>())); - COLUMNS.put(generator.buildColumnId("array"), new Array(AirbyteProtocolType.UNKNOWN)); - COLUMNS.put(generator.buildColumnId("string"), AirbyteProtocolType.STRING); - COLUMNS.put(generator.buildColumnId("number"), AirbyteProtocolType.NUMBER); - COLUMNS.put(generator.buildColumnId("integer"), AirbyteProtocolType.INTEGER); - COLUMNS.put(generator.buildColumnId("boolean"), AirbyteProtocolType.BOOLEAN); - COLUMNS.put(generator.buildColumnId("timestamp_with_timezone"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); - COLUMNS.put(generator.buildColumnId("timestamp_without_timezone"), AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE); - COLUMNS.put(generator.buildColumnId("time_with_timezone"), AirbyteProtocolType.TIME_WITH_TIMEZONE); - COLUMNS.put(generator.buildColumnId("time_without_timezone"), AirbyteProtocolType.TIME_WITHOUT_TIMEZONE); - COLUMNS.put(generator.buildColumnId("date"), AirbyteProtocolType.DATE); - COLUMNS.put(generator.buildColumnId("unknown"), AirbyteProtocolType.UNKNOWN); - - final LinkedHashMap cdcColumns = new LinkedHashMap<>(COLUMNS); - cdcColumns.put(generator.buildColumnId("_ab_cdc_deleted_at"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); - - DIFFER = new RecordDiffer( - getRawMetadataColumnNames(), - getFinalMetadataColumnNames(), - Pair.of(id1, AirbyteProtocolType.INTEGER), - Pair.of(id2, AirbyteProtocolType.INTEGER), - Pair.of(cursor, AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE)); - - namespace = Strings.addRandomSuffix("sql_generator_test", "_", 10); - // This is not a typical stream ID would look like, but SqlGenerator isn't allowed to make any - // assumptions about StreamId structure. - // In practice, the final table would be testDataset.users, and the raw table would be - // airbyte_internal.testDataset_raw__stream_users. - streamId = buildStreamId(namespace, "users_final", "users_raw"); - - incrementalDedupStream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - primaryKey, - Optional.of(cursor), - COLUMNS); - incrementalAppendStream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND, - primaryKey, - Optional.of(cursor), - COLUMNS); - - cdcIncrementalDedupStream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - primaryKey, - Optional.of(cursor), - cdcColumns); - cdcIncrementalAppendStream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND, - primaryKey, - Optional.of(cursor), - cdcColumns); - - destinationHandler = getDestinationHandler(); - - LOGGER.info("Running with namespace {}", namespace); - createNamespace(namespace); - } - - @AfterEach - public void teardown() throws Exception { - teardownNamespace(namespace); - } - - private DestinationInitialStatus getDestinationInitialState(StreamConfig streamConfig) throws Exception { - final List> initialState = - destinationHandler.gatherInitialState(List.of(streamConfig)); - assertEquals(1, initialState.size(), "gatherInitialState returned the wrong number of futures"); - assertTrue(initialState.getFirst().isFinalTablePresent(), "Destination handler could not find existing table"); - return initialState.getFirst(); - } - - /** - * Create a table and verify that we correctly recognize it as identical to itself. - */ - @Test - public void detectNoSchemaChange() throws Exception { - final Sql createTable = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(createTable); - final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); - assertFalse( - destinationInitialStatus.isSchemaMismatch(), - "Unchanged schema was incorrectly detected as a schema change."); - } - - /** - * Verify that adding a new column is detected as a schema change. - */ - @Test - public void detectColumnAdded() throws Exception { - final Sql createTable = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(createTable); - incrementalDedupStream.columns().put( - generator.buildColumnId("new_column"), - AirbyteProtocolType.STRING); - final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); - assertTrue( - destinationInitialStatus.isSchemaMismatch(), - "Adding a new column was not detected as a schema change."); - } - - /** - * Verify that removing a column is detected as a schema change. - */ - @Test - public void detectColumnRemoved() throws Exception { - final Sql createTable = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(createTable); - incrementalDedupStream.columns().remove(generator.buildColumnId("string")); - final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); - assertTrue( - destinationInitialStatus.isSchemaMismatch(), - "Removing a column was not detected as a schema change."); - } - - /** - * Verify that changing a column's type is detected as a schema change. - */ - @Test - public void detectColumnChanged() throws Exception { - final Sql createTable = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(createTable); - incrementalDedupStream.columns().put( - generator.buildColumnId("string"), - AirbyteProtocolType.INTEGER); - final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); - assertTrue( - destinationInitialStatus.isSchemaMismatch(), - "Altering a column was not detected as a schema change."); - } - - /** - * Test that T+D supports streams whose name and namespace are the same. - */ - @Test - public void incrementalDedupSameNameNamespace() throws Exception { - final StreamId streamId = buildStreamId(namespace, namespace, namespace + "_raw"); - final StreamConfig stream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - incrementalDedupStream.primaryKey(), - incrementalDedupStream.cursor(), - incrementalDedupStream.columns()); - - createRawTable(streamId); - createFinalTable(stream, ""); - insertRawTableRecords( - streamId, - List.of(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "5ce60e70-98aa-4fe3-8159-67207352c4f0", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": {"id1": 1, "id2": 100} - } - """))); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), ""); - - final List rawRecords = dumpRawTableRecords(streamId); - final List finalRecords = dumpFinalTableRecords(streamId, ""); - verifyRecordCounts(1, rawRecords, 1, finalRecords); - } - - private DestinationInitialStatus getOnly(final List> initialStates) { - assertEquals(1, initialStates.size()); - return initialStates.getFirst(); - } - - /** - * Run a full T+D update for an incremental-dedup stream, writing to a final table with "_foo" - * suffix, with values for all data types. Verifies all behaviors for all types: - *

      - *
    • A valid, nonnull value
    • - *
    • No value (i.e. the column is missing from the record)
    • - *
    • A JSON null value
    • - *
    • An invalid value
    • - *
    - *

    - * In practice, incremental streams never write to a suffixed table, but SqlGenerator isn't allowed - * to make that assumption (and we might as well exercise that code path). - */ - @Test - public void allTypes() throws Exception { - // Add case-sensitive columnName to test json path querying - incrementalDedupStream.columns().put( - generator.buildColumnId("IamACaseSensitiveColumnName"), - AirbyteProtocolType.STRING); - createRawTable(streamId); - createFinalTable(incrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_inputrecords.jsonl")); - - DestinationInitialStatus initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); - assertTrue(initialState.isFinalTableEmpty(), "Final table should be empty before T+D"); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, Optional.empty(), ""); - - verifyRecords( - "sqlgenerator/alltypes_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/alltypes_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); - assertFalse(initialState.isFinalTableEmpty(), "Final table should not be empty after T+D"); - } - - /** - * Run a basic test to verify that we don't throw an exception on basic data values. - */ - @Test - public void allTypesUnsafe() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_unsafe_inputrecords.jsonl")); - - DestinationInitialStatus initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); - assertTrue(initialState.isFinalTableEmpty(), "Final table should be empty before T+D"); - - // Instead of using the full T+D transaction, explicitly run with useSafeCasting=false. - final Sql unsafeSql = generator.updateTable(incrementalDedupStream, "", Optional.empty(), false); - destinationHandler.execute(unsafeSql); - - initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); - assertFalse(initialState.isFinalTableEmpty(), "Final table should not be empty after T+D"); - } - - private InitialRawTableStatus getInitialRawTableState(StreamConfig streamConfig) throws Exception { - List> initialStates = - destinationHandler.gatherInitialState(List.of(streamConfig)); - assertEquals(1, initialStates.size()); - return initialStates.getFirst().initialRawTableStatus(); - } - - /** - * Run through some plausible T+D scenarios to verify that we correctly identify the min raw - * timestamp. - */ - @Test - public void minTimestampBehavesCorrectly() throws Exception { - // When the raw table doesn't exist, there are no unprocessed records and no timestamp - assertEquals(new InitialRawTableStatus(false, false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); - - // When the raw table is empty, there are still no unprocessed records and no timestamp - createRawTable(streamId); - assertEquals(new InitialRawTableStatus(true, false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); - - // If we insert some raw records with null loaded_at, we should get the min extracted_at - insertRawTableRecords( - streamId, - List.of( - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "899d3bc3-7921-44f0-8517-c748a28fe338", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": {} - } - """), - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "47f46eb6-fcae-469c-a7fc-31d4b9ce7474", - "_airbyte_extracted_at": "2023-01-02T00:00:00Z", - "_airbyte_data": {} - } - """))); - InitialRawTableStatus tableState = getInitialRawTableState(incrementalAppendStream); - assertTrue(tableState.hasUnprocessedRecords(), - "When all raw records have null loaded_at, we should recognize that there are unprocessed records"); - assertTrue( - tableState.maxProcessedTimestamp().get().isBefore(Instant.parse("2023-01-01T00:00:00Z")), - "When all raw records have null loaded_at, the min timestamp should be earlier than all of their extracted_at values (2023-01-01). Was actually " - + tableState.maxProcessedTimestamp().get()); - - // Execute T+D to set loaded_at on the records - createFinalTable(incrementalAppendStream, ""); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalAppendStream, Optional.empty(), ""); - - assertEquals( - getInitialRawTableState(incrementalAppendStream), - new InitialRawTableStatus(true, false, Optional.of(Instant.parse("2023-01-02T00:00:00Z"))), - "When all raw records have non-null loaded_at, we should recognize that there are no unprocessed records, and the min timestamp should be equal to the latest extracted_at"); - - // If we insert another raw record with older extracted_at than the typed records, we should fetch a - // timestamp earlier than this new record. - // This emulates a sync inserting some records out of order, running T+D on newer records, inserting - // an older record, and then crashing before it can execute T+D. The next sync should recognize - // that older record as still needing to be processed. - insertRawTableRecords( - streamId, - List.of(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "899d3bc3-7921-44f0-8517-c748a28fe338", - "_airbyte_extracted_at": "2023-01-01T12:00:00Z", - "_airbyte_data": {} - } - """))); - tableState = getInitialRawTableState(incrementalAppendStream); - // this is a pretty confusing pair of assertions. To explain them in more detail: There are three - // records in the raw table: - // * loaded_at not null, extracted_at = 2023-01-01 00:00Z - // * loaded_at is null, extracted_at = 2023-01-01 12:00Z - // * loaded_at not null, extracted_at = 2023-01-02 00:00Z - // We should have a timestamp which is older than the second record, but newer than or equal to - // (i.e. not before) the first record. This allows us to query the raw table using - // `_airbyte_extracted_at > ?`, which will include the second record and exclude the first record. - assertTrue(tableState.hasUnprocessedRecords(), - "When some raw records have null loaded_at, we should recognize that there are unprocessed records"); - assertTrue( - tableState.maxProcessedTimestamp().get().isBefore(Instant.parse("2023-01-01T12:00:00Z")), - "When some raw records have null loaded_at, the min timestamp should be earlier than the oldest unloaded record (2023-01-01 12:00Z). Was actually " - + tableState); - assertFalse( - tableState.maxProcessedTimestamp().get().isBefore(Instant.parse("2023-01-01T00:00:00Z")), - "When some raw records have null loaded_at, the min timestamp should be later than the newest loaded record older than the oldest unloaded record (2023-01-01 00:00Z). Was actually " - + tableState); - } - - /** - * Identical to {@link #allTypes()}, but queries for the min raw timestamp first. This verifies that - * if a previous sync doesn't fully type-and-dedupe a table, we still get those records on the next - * sync. - */ - @Test - public void handlePreexistingRecords() throws Exception { - // Add case-sensitive columnName to test json path querying - incrementalDedupStream.columns().put( - generator.buildColumnId("IamACaseSensitiveColumnName"), - AirbyteProtocolType.STRING); - createRawTable(streamId); - createFinalTable(incrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_inputrecords.jsonl")); - - final InitialRawTableStatus tableState = getInitialRawTableState(incrementalDedupStream); - assertAll( - () -> assertTrue(tableState.hasUnprocessedRecords(), - "After writing some raw records, we should recognize that there are unprocessed records"), - () -> assertTrue(tableState.maxProcessedTimestamp().isPresent(), "After writing some raw records, the min timestamp should be present.")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, tableState.maxProcessedTimestamp(), ""); - - verifyRecords( - "sqlgenerator/alltypes_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/alltypes_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - /** - * Identical to {@link #handlePreexistingRecords()}, but queries for the min timestamp before - * inserting any raw records. This emulates a sync starting with an empty table. - */ - @Test - public void handleNoPreexistingRecords() throws Exception { - // Add case-sensitive columnName to test json path querying - incrementalDedupStream.columns().put( - generator.buildColumnId("IamACaseSensitiveColumnName"), - AirbyteProtocolType.STRING); - createRawTable(streamId); - final InitialRawTableStatus tableState = getInitialRawTableState(incrementalDedupStream); - assertAll( - () -> assertFalse(tableState.hasUnprocessedRecords(), "With an empty raw table, we should recognize that there are no unprocessed records"), - () -> assertEquals(Optional.empty(), tableState.maxProcessedTimestamp(), "With an empty raw table, the min timestamp should be empty")); - - createFinalTable(incrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_inputrecords.jsonl")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, tableState.maxProcessedTimestamp(), ""); - - verifyRecords( - "sqlgenerator/alltypes_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/alltypes_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - /** - * Verify that we correctly only process raw records with recent extracted_at. In practice, - * destinations should not do this - but their SQL should work correctly. - *

    - * Create two raw records, one with an old extracted_at. Verify that updatedTable only T+Ds the new - * record, and doesn't set loaded_at on the old record. - */ - @Test - public void ignoreOldRawRecords() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalAppendStream, ""); - insertRawTableRecords( - streamId, - List.of( - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "c5bcae50-962e-4b92-b2eb-1659eae31693", - "_airbyte_extracted_at": "2022-01-01T00:00:00Z", - "_airbyte_data": { - "string": "foo" - } - } - """), - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "93f1bdd8-1916-4e6c-94dc-29a5d9701179", - "_airbyte_extracted_at": "2023-01-01T01:00:00Z", - "_airbyte_data": { - "string": "bar" - } - } - """))); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalAppendStream, - Optional.of(Instant.parse("2023-01-01T00:00:00Z")), ""); - - final List rawRecords = dumpRawTableRecords(streamId); - final List finalRecords = dumpFinalTableRecords(streamId, ""); - assertAll( - () -> assertEquals( - 1, - rawRecords.stream().filter(record -> record.get("_airbyte_loaded_at") == null).count(), - "Raw table should only have non-null loaded_at on the newer record"), - () -> assertEquals(1, finalRecords.size(), "T+D should only execute on the newer record")); - } - - /** - * Test JSON Types encounted for a String Type field. - * - * @throws Exception - */ - @Test - public void jsonStringifyTypes() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalDedupStream, "_foo"); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/json_types_in_string_inputrecords.jsonl")); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, Optional.empty(), "_foo"); - verifyRecords( - "sqlgenerator/json_types_in_string_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/json_types_in_string_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "_foo")); - } - - @Test - public void timestampFormats() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalAppendStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/timestampformats_inputrecords.jsonl")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalAppendStream, Optional.empty(), ""); - - DIFFER.diffFinalTableRecords( - BaseTypingDedupingTest.readRecords("sqlgenerator/timestampformats_expectedrecords_final.jsonl"), - dumpFinalTableRecords(streamId, "")); - } - - @Test - public void incrementalDedup() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/incrementaldedup_inputrecords.jsonl")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, Optional.empty(), ""); - - verifyRecords( - "sqlgenerator/incrementaldedup_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/incrementaldedup_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - /** - * We shouldn't crash on a sync with null cursor. Insert two records and verify that we keep the - * record with higher extracted_at. - */ - @Test - public void incrementalDedupNoCursor() throws Exception { - final StreamConfig streamConfig = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - primaryKey, - Optional.empty(), - COLUMNS); - createRawTable(streamId); - createFinalTable(streamConfig, ""); - insertRawTableRecords( - streamId, - List.of( - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "c5bcae50-962e-4b92-b2eb-1659eae31693", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": { - "id1": 1, - "id2": 100, - "string": "foo" - } - } - """), - Jsons.deserialize( - """ - { - "_airbyte_raw_id": "93f1bdd8-1916-4e6c-94dc-29a5d9701179", - "_airbyte_extracted_at": "2023-01-01T01:00:00Z", - "_airbyte_data": { - "id1": 1, - "id2": 100, - "string": "bar" - } - } - """))); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, streamConfig, Optional.empty(), ""); - - final List actualRawRecords = dumpRawTableRecords(streamId); - final List actualFinalRecords = dumpFinalTableRecords(streamId, ""); - verifyRecordCounts( - 2, - actualRawRecords, - 1, - actualFinalRecords); - assertEquals("bar", actualFinalRecords.get(0).get(generator.buildColumnId("string").name()).asText()); - } - - @Test - public void incrementalAppend() throws Exception { - createRawTable(streamId); - createFinalTable(incrementalAppendStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/incrementaldedup_inputrecords.jsonl")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalAppendStream, Optional.empty(), ""); - - verifyRecordCounts( - 3, - dumpRawTableRecords(streamId), - 3, - dumpFinalTableRecords(streamId, "")); - } - - /** - * Create a nonempty users_final_tmp table. Overwrite users_final from users_final_tmp. Verify that - * users_final now exists and contains nonzero records. - */ - @Test - public void overwriteFinalTable() throws Exception { - createFinalTable(incrementalAppendStream, "_tmp"); - final List records = singletonList(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_meta": {} - } - """)); - insertFinalTableRecords( - false, - streamId, - "_tmp", - records); - - final Sql sql = generator.overwriteFinalTable(streamId, "_tmp"); - destinationHandler.execute(sql); - - assertEquals(1, dumpFinalTableRecords(streamId, "").size()); - } - - @Test - public void cdcImmediateDeletion() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalDedupStream, ""); - insertRawTableRecords( - streamId, - singletonList(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": { - "id1": 1, - "id2": 100, - "updated_at": "2023-01-01T00:00:00Z", - "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" - } - } - """))); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, Optional.empty(), ""); - - verifyRecordCounts( - 1, - dumpRawTableRecords(streamId), - 0, - dumpFinalTableRecords(streamId, "")); - } - - /** - * Verify that running T+D twice is idempotent. Previously there was a bug where non-dedup syncs - * with an _ab_cdc_deleted_at column would duplicate "deleted" records on each run. - */ - @Test - public void cdcIdempotent() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalAppendStream, ""); - insertRawTableRecords( - streamId, - singletonList(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": { - "id1": 1, - "id2": 100, - "updated_at": "2023-01-01T00:00:00Z", - "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" - } - } - """))); - - // Execute T+D twice - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalAppendStream, Optional.empty(), ""); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalAppendStream, Optional.empty(), ""); - - verifyRecordCounts( - 1, - dumpRawTableRecords(streamId), - 1, - dumpFinalTableRecords(streamId, "")); - } - - @Test - public void cdcComplexUpdate() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/cdcupdate_inputrecords_raw.jsonl")); - insertFinalTableRecords( - true, - streamId, - "", - BaseTypingDedupingTest.readRecords("sqlgenerator/cdcupdate_inputrecords_final.jsonl")); - - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, Optional.empty(), ""); - - verifyRecordCounts( - 11, - dumpRawTableRecords(streamId), - 6, - dumpFinalTableRecords(streamId, "")); - } - - /** - * source operations: - *

      - *
    1. insert id=1 (lsn 10000)
    2. - *
    3. delete id=1 (lsn 10001)
    4. - *
    - *

    - * But the destination writes lsn 10001 before 10000. We should still end up with no records in the - * final table. - *

    - * All records have the same emitted_at timestamp. This means that we live or die purely based on - * our ability to use _ab_cdc_lsn. - */ - @Test - public void testCdcOrdering_updateAfterDelete() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/cdcordering_updateafterdelete_inputrecords.jsonl")); - - final InitialRawTableStatus tableState = getInitialRawTableState(cdcIncrementalDedupStream); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, tableState.maxProcessedTimestamp(), ""); - - verifyRecordCounts( - 2, - dumpRawTableRecords(streamId), - 0, - dumpFinalTableRecords(streamId, "")); - } - - /** - * source operations: - *

      - *
    1. arbitrary history...
    2. - *
    3. delete id=1 (lsn 10001)
    4. - *
    5. reinsert id=1 (lsn 10002)
    6. - *
    - *

    - * But the destination receives LSNs 10002 before 10001. In this case, we should keep the reinserted - * record in the final table. - *

    - * All records have the same emitted_at timestamp. This means that we live or die purely based on - * our ability to use _ab_cdc_lsn. - */ - @Test - public void testCdcOrdering_insertAfterDelete() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalDedupStream, ""); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/cdcordering_insertafterdelete_inputrecords_raw.jsonl")); - insertFinalTableRecords( - true, - streamId, - "", - BaseTypingDedupingTest.readRecords("sqlgenerator/cdcordering_insertafterdelete_inputrecords_final.jsonl")); - - final InitialRawTableStatus tableState = getInitialRawTableState(cdcIncrementalAppendStream); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, tableState.maxProcessedTimestamp(), ""); - verifyRecordCounts( - 2, - dumpRawTableRecords(streamId), - 1, - dumpFinalTableRecords(streamId, "")); - } - - /** - * Create a table which includes the _ab_cdc_deleted_at column, then soft reset it using the non-cdc - * stream config. Verify that the deleted_at column gets dropped. - */ - @Test - public void softReset() throws Exception { - createRawTable(streamId); - createFinalTable(cdcIncrementalAppendStream, ""); - insertRawTableRecords( - streamId, - singletonList(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "arst", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_loaded_at": "2023-01-01T00:00:00Z", - "_airbyte_data": { - "id1": 1, - "id2": 100, - "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" - } - } - """))); - insertFinalTableRecords( - true, - streamId, - "", - singletonList(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "arst", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_meta": {}, - "id1": 1, - "id2": 100, - "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" - } - """))); - - TypeAndDedupeTransaction.executeSoftReset(generator, destinationHandler, incrementalAppendStream); - - final List actualRawRecords = dumpRawTableRecords(streamId); - final List actualFinalRecords = dumpFinalTableRecords(streamId, ""); - assertAll( - () -> assertEquals(1, actualRawRecords.size()), - () -> assertEquals(1, actualFinalRecords.size()), - () -> assertTrue( - actualFinalRecords.stream().noneMatch(record -> record.has("_ab_cdc_deleted_at")), - "_ab_cdc_deleted_at column was expected to be dropped. Actual final table had: " + actualFinalRecords)); - } - - @Test - public void weirdColumnNames() throws Exception { - createRawTable(streamId); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/weirdcolumnnames_inputrecords_raw.jsonl")); - final StreamConfig stream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - primaryKey, - Optional.of(cursor), - new LinkedHashMap<>() { - - { - put(generator.buildColumnId("id1"), AirbyteProtocolType.INTEGER); - put(generator.buildColumnId("id2"), AirbyteProtocolType.INTEGER); - put(generator.buildColumnId("updated_at"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); - put(generator.buildColumnId("$starts_with_dollar_sign"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("includes\"doublequote"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("includes'singlequote"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("includes`backtick"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("includes.period"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("includes$$doubledollar"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("endswithbackslash\\"), AirbyteProtocolType.STRING); - } - - }); - - final Sql createTable = generator.createTable(stream, "", false); - destinationHandler.execute(createTable); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), ""); - - verifyRecords( - "sqlgenerator/weirdcolumnnames_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - /** - * Verify that we don't crash when there are special characters in the stream namespace, name, - * primary key, or cursor. - */ - @ParameterizedTest - @ValueSource(strings = {"$", "${", "${${", "${foo}", "\"", "'", "`", ".", "$$", "\\", "{", "}"}) - public void noCrashOnSpecialCharacters(final String specialChars) throws Exception { - final String str = specialChars + "_" + namespace + "_" + specialChars; - final StreamId originalStreamId = generator.buildStreamId(str, str, "unused"); - final StreamId modifiedStreamId = buildStreamId( - originalStreamId.finalNamespace(), - originalStreamId.finalName(), - "raw_table"); - final ColumnId columnId = generator.buildColumnId(str); - try { - createNamespace(modifiedStreamId.finalNamespace()); - createRawTable(modifiedStreamId); - insertRawTableRecords( - modifiedStreamId, - List.of(Jsons.jsonNode(Map.of( - "_airbyte_raw_id", "758989f2-b148-4dd3-8754-30d9c17d05fb", - "_airbyte_extracted_at", "2023-01-01T00:00:00Z", - "_airbyte_data", Map.of(str, "bar"))))); - final StreamConfig stream = new StreamConfig( - modifiedStreamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of(columnId), - Optional.of(columnId), - new LinkedHashMap<>() { - - { - put(columnId, AirbyteProtocolType.STRING); - } - - }); - - final Sql createTable = generator.createTable(stream, "", false); - destinationHandler.execute(createTable); - // Not verifying anything about the data; let's just make sure we don't crash. - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), ""); - } finally { - teardownNamespace(modifiedStreamId.finalNamespace()); - } - } - - /** - * Verify column names that are reserved keywords are handled successfully. Each destination should - * always have at least 1 column in the record data that is a reserved keyword. - */ - @Test - public void testReservedKeywords() throws Exception { - createRawTable(streamId); - insertRawTableRecords( - streamId, - BaseTypingDedupingTest.readRecords("sqlgenerator/reservedkeywords_inputrecords_raw.jsonl")); - final StreamConfig stream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND, - null, - Optional.empty(), - new LinkedHashMap<>() { - - { - put(generator.buildColumnId("current_date"), AirbyteProtocolType.STRING); - put(generator.buildColumnId("join"), AirbyteProtocolType.STRING); - } - - }); - - final Sql createTable = generator.createTable(stream, "", false); - destinationHandler.execute(createTable); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), ""); - - DIFFER.diffFinalTableRecords( - BaseTypingDedupingTest.readRecords("sqlgenerator/reservedkeywords_expectedrecords_final.jsonl"), - dumpFinalTableRecords(streamId, "")); - } - - /** - * A stream with no columns is weird, but we shouldn't treat it specially in any way. It should - * create a final table as usual, and populate it with the relevant metadata columns. - */ - @Test - public void noColumns() throws Exception { - createRawTable(streamId); - insertRawTableRecords( - streamId, - List.of(Jsons.deserialize( - """ - { - "_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", - "_airbyte_extracted_at": "2023-01-01T00:00:00Z", - "_airbyte_data": {} - } - """))); - final StreamConfig stream = new StreamConfig( - streamId, - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND, - emptyList(), - Optional.empty(), - new LinkedHashMap<>()); - - final Sql createTable = generator.createTable(stream, "", false); - destinationHandler.execute(createTable); - TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), ""); - - verifyRecords( - "sqlgenerator/nocolumns_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/nocolumns_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - @Test - public void testV1V2migration() throws Exception { - // This is maybe a little hacky, but it avoids having to refactor this entire class and subclasses - // for something that is going away - // Add case-sensitive columnName to test json path querying - incrementalDedupStream.columns().put( - generator.buildColumnId("IamACaseSensitiveColumnName"), - AirbyteProtocolType.STRING); - final StreamId v1RawTableStreamId = new StreamId(null, null, streamId.finalNamespace(), "v1_" + streamId.rawName(), null, null); - createV1RawTable(v1RawTableStreamId); - insertV1RawTableRecords(v1RawTableStreamId, BaseTypingDedupingTest.readRecords( - "sqlgenerator/all_types_v1_inputrecords.jsonl")); - final Sql migration = generator.migrateFromV1toV2(streamId, v1RawTableStreamId.rawNamespace(), v1RawTableStreamId.rawName()); - destinationHandler.execute(migration); - final List v1RawRecords = dumpV1RawTableRecords(v1RawTableStreamId); - final List v2RawRecords = dumpRawTableRecords(streamId); - migrationAssertions(v1RawRecords, v2RawRecords); - - // And then run T+D on the migrated raw data - final Sql createTable = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(createTable); - final Sql updateTable = generator.updateTable(incrementalDedupStream, "", Optional.empty(), true); - destinationHandler.execute(updateTable); - verifyRecords( - "sqlgenerator/alltypes_expectedrecords_raw.jsonl", - dumpRawTableRecords(streamId), - "sqlgenerator/alltypes_expectedrecords_final.jsonl", - dumpFinalTableRecords(streamId, "")); - } - - /** - * Sometimes, a sync doesn't delete its soft reset temp table. (it's not entirely clear why this - * happens.) In these cases, the next sync should not crash. - */ - @Test - public void softResetIgnoresPreexistingTempTable() throws Exception { - createRawTable(incrementalDedupStream.id()); - - // Create a soft reset table. Use incremental append mode, in case the destination connector uses - // different - // indexing/partitioning/etc. - final Sql createOldTempTable = generator.createTable(incrementalAppendStream, TypeAndDedupeTransaction.SOFT_RESET_SUFFIX, false); - destinationHandler.execute(createOldTempTable); - - // Execute a soft reset. This should not crash. - TypeAndDedupeTransaction.executeSoftReset(generator, destinationHandler, incrementalAppendStream); - } - - protected void migrationAssertions(final List v1RawRecords, final List v2RawRecords) { - final var v2RecordMap = v2RawRecords.stream().collect(Collectors.toMap( - record -> record.get("_airbyte_raw_id").asText(), - Function.identity())); - assertAll( - () -> assertEquals(6, v1RawRecords.size()), - () -> assertEquals(6, v2RawRecords.size())); - v1RawRecords.forEach(v1Record -> { - final var v1id = v1Record.get("_airbyte_ab_id").asText(); - assertAll( - () -> assertEquals(v1id, v2RecordMap.get(v1id).get("_airbyte_raw_id").asText()), - () -> assertEquals(v1Record.get("_airbyte_emitted_at").asText(), v2RecordMap.get(v1id).get("_airbyte_extracted_at").asText()), - () -> assertNull(v2RecordMap.get(v1id).get("_airbyte_loaded_at"))); - JsonNode originalData = v1Record.get("_airbyte_data"); - if (originalData.isTextual()) { - originalData = Jsons.deserializeExact(originalData.asText()); - } - JsonNode migratedData = v2RecordMap.get(v1id).get("_airbyte_data"); - if (migratedData.isTextual()) { - migratedData = Jsons.deserializeExact(migratedData.asText()); - } - // hacky thing because we only care about the data contents. - // diffRawTableRecords makes some assumptions about the structure of the blob. - DIFFER.diffFinalTableRecords(List.of(originalData), List.of(migratedData)); - }); - } - - protected List dumpV1RawTableRecords(final StreamId streamId) throws Exception { - return dumpRawTableRecords(streamId); - } - - @Test - public void testCreateTableForce() throws Exception { - final Sql createTableNoForce = generator.createTable(incrementalDedupStream, "", false); - final Sql createTableForce = generator.createTable(incrementalDedupStream, "", true); - - destinationHandler.execute(createTableNoForce); - assertThrows(Exception.class, () -> destinationHandler.execute(createTableNoForce)); - // This should not throw an exception - destinationHandler.execute(createTableForce); - // This method call ensures assertion than finalTable exists - getDestinationInitialState(incrementalDedupStream); - } - - @Test - public void testStateHandling() throws Exception { - // Fetch state from an empty destination. This should not throw an error. - final DestinationInitialStatus initialState = - destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); - // The initial state should not need a soft reset. - assertFalse(initialState.destinationState().needsSoftReset(), "Empty state table should have needsSoftReset = false"); - - // Commit a state that now requires a soft reset. - destinationHandler.commitDestinationStates(Map.of( - incrementalDedupStream.id(), - initialState.destinationState().withSoftReset(true))); - final DestinationInitialStatus updatedState = - destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); - // When we re-fetch the state, it should now need a soft reset. - assertTrue(updatedState.destinationState().needsSoftReset(), "After committing an explicit state, expected needsSoftReset = true"); - - // Commit a state belonging to a different stream - destinationHandler.commitDestinationStates(Map.of( - new StreamId(null, null, null, null, null, "some_other_stream"), - initialState.destinationState().withSoftReset(true))); - - // Verify that we can still retrieve the state for the original stream - final DestinationInitialStatus refetchedState = - destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); - // When we re-fetch the state, it should now need a soft reset. - assertTrue(refetchedState.destinationState().needsSoftReset(), "After committing an unrelated state, expected needsSoftReset = true"); - } - - protected void createFinalTable(final StreamConfig stream, final String suffix) throws Exception { - final Sql createTable = generator.createTable(stream, suffix, false); - destinationHandler.execute(createTable); - } - - private void verifyRecords(final String expectedRawRecordsFile, - final List actualRawRecords, - final String expectedFinalRecordsFile, - final List actualFinalRecords) { - assertAll( - () -> DIFFER.diffRawTableRecords( - BaseTypingDedupingTest.readRecords(expectedRawRecordsFile), - actualRawRecords), - () -> assertEquals( - 0, - actualRawRecords.stream() - .filter(record -> !record.hasNonNull("_airbyte_loaded_at")) - .count()), - () -> DIFFER.diffFinalTableRecords( - BaseTypingDedupingTest.readRecords(expectedFinalRecordsFile), - actualFinalRecords)); - } - - private void verifyRecordCounts(final int expectedRawRecords, - final List actualRawRecords, - final int expectedFinalRecords, - final List actualFinalRecords) { - assertAll( - () -> assertEquals( - expectedRawRecords, - actualRawRecords.size(), - "Raw record count was incorrect"), - () -> assertEquals( - 0, - actualRawRecords.stream() - .filter(record -> !record.hasNonNull("_airbyte_loaded_at")) - .count()), - () -> assertEquals( - expectedFinalRecords, - actualFinalRecords.size(), - "Final record count was incorrect")); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.java deleted file mode 100644 index e52c669dc7981..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.java +++ /dev/null @@ -1,911 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static java.util.stream.Collectors.toList; -import static org.junit.jupiter.api.Assertions.assertAll; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.configoss.WorkerDestinationConfig; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import io.airbyte.workers.internal.AirbyteDestination; -import io.airbyte.workers.internal.DefaultAirbyteDestination; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.DockerProcessFactory; -import io.airbyte.workers.process.ProcessFactory; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; -import java.util.function.Function; -import java.util.stream.Stream; -import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.lang3.tuple.Pair; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.parallel.Execution; -import org.junit.jupiter.api.parallel.ExecutionMode; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This is loosely based on standard-destination-tests's DestinationAcceptanceTest class. The - * sync-running code is copy-pasted from there. - *

    - * All tests use a single stream, whose schema is defined in {@code resources/schema.json}. Each - * test case constructs a ConfiguredAirbyteCatalog dynamically. - *

    - * For sync modes which use a primary key, the stream provides a composite key of (id1, id2). For - * sync modes which use a cursor, the stream provides an updated_at field. The stream also has an - * _ab_cdc_deleted_at field. - */ -// If you're running from inside intellij, you must run your specific subclass to get concurrent -// execution. -@Execution(ExecutionMode.CONCURRENT) -public abstract class BaseTypingDedupingTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(BaseTypingDedupingTest.class); - protected static final JsonNode SCHEMA; - static { - try { - SCHEMA = Jsons.deserialize(MoreResources.readResource("dat/schema.json")); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - protected RecordDiffer DIFFER; - - private String randomSuffix; - private JsonNode config; - protected String streamNamespace; - protected String streamName; - private List streamsToTearDown; - - /** - * @return the docker image to run, e.g. {@code "airbyte/destination-bigquery:dev"}. - */ - protected abstract String getImageName(); - - /** - * Get the destination connector config. Subclasses may use this method for other setup work, e.g. - * opening a connection to the destination. - *

    - * Subclasses should _not_ start testcontainers in this method; that belongs in a BeforeAll method. - * The tests in this class are intended to be run concurrently on a shared database and will not - * interfere with each other. - *

    - * Sublcasses which need access to the config may use {@link #getConfig()}. - */ - protected abstract JsonNode generateConfig() throws Exception; - - /** - * For a given stream, return the records that exist in the destination's raw table. Each record - * must be in the format {"_airbyte_raw_id": "...", "_airbyte_extracted_at": "...", - * "_airbyte_loaded_at": "...", "_airbyte_data": {fields...}}. - *

    - * The {@code _airbyte_data} column must be an - * {@link com.fasterxml.jackson.databind.node.ObjectNode} (i.e. it cannot be a string value). - *

    - * streamNamespace may be null, in which case you should query from the default namespace. - */ - protected abstract List dumpRawTableRecords(String streamNamespace, String streamName) throws Exception; - - /** - * Utility method for tests to check if table exists - * - * @param streamNamespace - * @param streamName - * @return - * @throws Exception - */ - protected boolean checkTableExists(final String streamNamespace, final String streamName) { - // Implementation is specific to destination's tests. - return true; - } - - /** - * For a given stream, return the records that exist in the destination's final table. Each record - * must be in the format {"_airbyte_raw_id": "...", "_airbyte_extracted_at": "...", "_airbyte_meta": - * {...}, "field1": ..., "field2": ..., ...}. If the destination renames (e.g. upcases) the airbyte - * fields, this method must revert that naming to use the exact strings "_airbyte_raw_id", etc. - *

    - * For JSON-valued columns, there is some nuance: a SQL null should be represented as a missing - * entry, whereas a JSON null should be represented as a - * {@link com.fasterxml.jackson.databind.node.NullNode}. For example, in the JSON blob {"name": - * null}, the `name` field is a JSON null, and the `address` field is a SQL null. - *

    - * The corresponding SQL looks like - * {@code INSERT INTO ... (name, address) VALUES ('null' :: jsonb, NULL)}. - *

    - * streamNamespace may be null, in which case you should query from the default namespace. - */ - protected abstract List dumpFinalTableRecords(String streamNamespace, String streamName) throws Exception; - - /** - * Delete any resources in the destination associated with this stream AND its namespace. We need - * this because we write raw tables to a shared {@code airbyte} namespace, which we can't drop - * wholesale. Must handle the case where the table/namespace doesn't exist (e.g. if the connector - * crashed without writing any data). - *

    - * In general, this should resemble - * {@code DROP TABLE IF EXISTS airbyte._; DROP SCHEMA IF EXISTS }. - */ - protected abstract void teardownStreamAndNamespace(String streamNamespace, String streamName) throws Exception; - - protected abstract SqlGenerator getSqlGenerator(); - - /** - * Destinations which need to clean up resources after an entire test finishes should override this - * method. For example, if you want to gracefully close a database connection, you should do that - * here. - */ - protected void globalTeardown() throws Exception {} - - /** - * Conceptually identical to {@link #getFinalMetadataColumnNames()}, but for the raw table. - */ - protected Map getRawMetadataColumnNames() { - return new HashMap<>(); - } - - /** - * If the destination connector uses a nonstandard schema for the final table, override this method. - * For example, destination-snowflake upcases all column names in the final tables. - *

    - * You only need to add mappings for the airbyte metadata column names (_airbyte_raw_id, - * _airbyte_extracted_at, etc.). The test framework automatically populates mappings for the primary - * key and cursor using the SqlGenerator. - */ - protected Map getFinalMetadataColumnNames() { - return new HashMap<>(); - } - - /** - * @return A suffix which is different for each concurrent test, but stable within a single test. - */ - protected synchronized String getUniqueSuffix() { - if (randomSuffix == null) { - randomSuffix = "_" + RandomStringUtils.randomAlphabetic(10).toLowerCase(); - } - return randomSuffix; - } - - protected JsonNode getConfig() { - return config; - } - - /** - * Override this method only when skipping T&D and only compare raw tables and skip final table - * comparison. For every other case it should always return false. - * - * @return - */ - protected boolean disableFinalTableComparison() { - return false; - } - - @BeforeEach - public void setup() throws Exception { - config = generateConfig(); - streamNamespace = "typing_deduping_test" + getUniqueSuffix(); - streamName = "test_stream" + getUniqueSuffix(); - streamsToTearDown = new ArrayList<>(); - - final SqlGenerator generator = getSqlGenerator(); - DIFFER = new RecordDiffer( - getRawMetadataColumnNames(), - getFinalMetadataColumnNames(), - Pair.of(generator.buildColumnId("id1"), AirbyteProtocolType.INTEGER), - Pair.of(generator.buildColumnId("id2"), AirbyteProtocolType.INTEGER), - Pair.of(generator.buildColumnId("updated_at"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE), - Pair.of(generator.buildColumnId("old_cursor"), AirbyteProtocolType.INTEGER)); - - LOGGER.info("Using stream namespace {} and name {}", streamNamespace, streamName); - } - - @AfterEach - public void teardown() throws Exception { - for (final AirbyteStreamNameNamespacePair streamId : streamsToTearDown) { - teardownStreamAndNamespace(streamId.getNamespace(), streamId.getName()); - } - globalTeardown(); - } - - /** - * Starting with an empty destination, execute a full refresh overwrite sync. Verify that the - * records are written to the destination table. Then run a second sync, and verify that the records - * are overwritten. - */ - @Test - public void fullRefreshOverwrite() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - /** - * Starting with an empty destination, execute a full refresh append sync. Verify that the records - * are written to the destination table. Then run a second sync, and verify that the old and new - * records are all present. - */ - @Test - public void fullRefreshAppend() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - /** - * Starting with an empty destination, execute an incremental append sync. - *

    - * This is (not so secretly) identical to {@link #fullRefreshAppend()}, and uses the same set of - * expected records. Incremental as a concept only exists in the source. From the destination's - * perspective, we only care about the destination sync mode. - */ - @Test - public void incrementalAppend() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - // These two lines are literally the only difference between this test and fullRefreshAppend - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - /** - * Starting with an empty destination, execute an incremental dedup sync. Verify that the records - * are written to the destination table. Then run a second sync, and verify that the raw/final - * tables contain the correct records. - */ - @Test - public void incrementalDedup() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - /** - * Run the first sync from {@link #incrementalDedup()}, but repeat the messages many times. Some - * destinations behave differently with small vs large record count, so this test case tries to - * exercise that behavior. - */ - @Test - public void largeDedupSync() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // Run a sync with 25K copies of the input messages - final List messages1 = repeatList(25_000, readMessages("dat/sync1_messages.jsonl")); - - runSync(catalog, messages1); - - // The raw table will contain 25K copies of each record - final List expectedRawRecords1 = repeatList(25_000, readRecords("dat/sync1_expectedrecords_raw.jsonl")); - // But the final table should be fully deduped - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - } - - /** - * Identical to {@link #incrementalDedup()}, except that the stream has no namespace. - */ - @Test - public void incrementalDedupDefaultNamespace() throws Exception { - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - // NB: we don't call `withNamespace` here - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl", null, streamName); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, null, streamName, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl", null, streamName); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, null, streamName, disableFinalTableComparison()); - } - - @Test - @Disabled("Not yet implemented") - public void testLineBreakCharacters() throws Exception { - // TODO verify that we can handle strings with interesting characters - // build an airbyterecordmessage using something like this, and add it to the input messages: - Jsons.jsonNode(ImmutableMap.builder() - .put("id", 1) - .put("currency", "USD\u2028") - .put("date", "2020-03-\n31T00:00:00Z\r") - // TODO(sherifnada) hack: write decimals with sigfigs because Snowflake stores 10.1 as "10" which - // fails destination tests - .put("HKD", 10.1) - .put("NZD", 700.1) - .build()); - } - - /** - * Run a sync, then remove the {@code name} column from the schema and run a second sync. Verify - * that the final table doesn't contain the `name` column after the second sync. - */ - @Test - public void testIncrementalSyncDropOneColumn() throws Exception { - final AirbyteStream stream = new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(stream))); - - // First sync - final List messages1 = readMessages("dat/sync1_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - final JsonNode trimmedSchema = SCHEMA.deepCopy(); - ((ObjectNode) trimmedSchema.get("properties")).remove("name"); - stream.setJsonSchema(trimmedSchema); - - runSync(catalog, messages2); - - // The raw data is unaffected by the schema, but the final table should not have a `name` column. - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl").stream() - .peek(record -> ((ObjectNode) record).remove(getSqlGenerator().buildColumnId("name").name())) - .toList(); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - @Test - @Disabled("Not yet implemented") - public void testSyncUsesAirbyteStreamNamespaceIfNotNull() throws Exception { - // TODO duplicate this test for each sync mode. Run 1st+2nd syncs using a stream with null - // namespace: - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(null) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - } - - // TODO duplicate this test for each sync mode. Run 1st+2nd syncs using two streams with the same - // name but different namespace - // TODO maybe we don't even need the single-stream versions... - /** - * Identical to {@link #incrementalDedup()}, except there are two streams with the same name and - * different namespace. - */ - @Test - public void incrementalDedupIdenticalName() throws Exception { - final String namespace1 = streamNamespace + "_1"; - final String namespace2 = streamNamespace + "_2"; - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(namespace1) - .withName(streamName) - .withJsonSchema(SCHEMA)), - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(namespace2) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - // First sync - final List messages1 = Stream.concat( - readMessages("dat/sync1_messages.jsonl", namespace1, streamName).stream(), - readMessages("dat/sync1_messages2.jsonl", namespace2, streamName).stream()).toList(); - - runSync(catalog, messages1); - - verifySyncResult( - readRecords("dat/sync1_expectedrecords_raw.jsonl"), - readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"), - namespace1, - streamName, disableFinalTableComparison()); - verifySyncResult( - readRecords("dat/sync1_expectedrecords_raw2.jsonl"), - readRecords("dat/sync1_expectedrecords_dedup_final2.jsonl"), - namespace2, - streamName, disableFinalTableComparison()); - - // Second sync - final List messages2 = Stream.concat( - readMessages("dat/sync2_messages.jsonl", namespace1, streamName).stream(), - readMessages("dat/sync2_messages2.jsonl", namespace2, streamName).stream()).toList(); - - runSync(catalog, messages2); - - verifySyncResult( - readRecords("dat/sync2_expectedrecords_raw.jsonl"), - readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl"), - namespace1, - streamName, disableFinalTableComparison()); - verifySyncResult( - readRecords("dat/sync2_expectedrecords_raw2.jsonl"), - readRecords("dat/sync2_expectedrecords_incremental_dedup_final2.jsonl"), - namespace2, - streamName, disableFinalTableComparison()); - } - - /** - * Run two syncs at the same time. They each have one stream, which has the same name for both syncs - * but different namespace. This should work fine. This test is similar to - * {@link #incrementalDedupIdenticalName()}, but uses two separate syncs instead of one sync with - * two streams. - *

    - * Note that destination stdout is a bit misleading: The two syncs' stdout _should_ be interleaved, - * but we're just dumping the entire sync1 stdout, and then the entire sync2 stdout. - */ - @Test - public void identicalNameSimultaneousSync() throws Exception { - final String namespace1 = streamNamespace + "_1"; - final ConfiguredAirbyteCatalog catalog1 = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(namespace1) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - final String namespace2 = streamNamespace + "_2"; - final ConfiguredAirbyteCatalog catalog2 = new ConfiguredAirbyteCatalog().withStreams(List.of( - new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("updated_at")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(namespace2) - .withName(streamName) - .withJsonSchema(SCHEMA)))); - - final List messages1 = readMessages("dat/sync1_messages.jsonl", namespace1, streamName); - final List messages2 = readMessages("dat/sync1_messages2.jsonl", namespace2, streamName); - - // Start two concurrent syncs - final AirbyteDestination sync1 = startSync(catalog1); - final AirbyteDestination sync2 = startSync(catalog2); - CompletableFuture> outFuture1 = destinationOutputFuture(sync1); - CompletableFuture> outFuture2 = destinationOutputFuture(sync2); - - // Write some messages to both syncs. Write a lot of data to sync 2 to try and force a flush. - pushMessages(messages1, sync1); - for (int i = 0; i < 100_000; i++) { - pushMessages(messages2, sync2); - } - endSync(sync1, outFuture1); - // Write some more messages to the second sync. It should not be affected by the first sync's - // shutdown. - for (int i = 0; i < 100_000; i++) { - pushMessages(messages2, sync2); - } - endSync(sync2, outFuture2); - - // For simplicity, don't verify the raw table. Assume that if the final table is correct, then - // the raw data is correct. This is generally a safe assumption. - assertAll( - () -> DIFFER.diffFinalTableRecords( - readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"), - dumpFinalTableRecords(namespace1, streamName)), - () -> DIFFER.diffFinalTableRecords( - readRecords("dat/sync1_expectedrecords_dedup_final2.jsonl"), - dumpFinalTableRecords(namespace2, streamName))); - } - - @Test - @Disabled("Not yet implemented") - public void testSyncNotFailsWithNewFields() throws Exception { - // TODO duplicate this test for each sync mode. Run a sync, then add a new field to the schema, then - // run another sync - // We might want to write a test that verifies more general schema evolution (e.g. all valid - // evolutions) - } - - /** - * Change the cursor column in the second sync to a column that doesn't exist in the first sync. - * Verify that we overwrite everything correctly. - *

    - * This essentially verifies that the destination connector correctly recognizes NULL cursors as - * older than non-NULL cursors. - */ - @Test - public void incrementalDedupChangeCursor() throws Exception { - final JsonNode mangledSchema = SCHEMA.deepCopy(); - ((ObjectNode) mangledSchema.get("properties")).remove("updated_at"); - ((ObjectNode) mangledSchema.get("properties")).set( - "old_cursor", - Jsons.deserialize( - """ - {"type": "integer"} - """)); - final ConfiguredAirbyteStream configuredStream = new ConfiguredAirbyteStream() - .withSyncMode(SyncMode.INCREMENTAL) - .withCursorField(List.of("old_cursor")) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) - .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(mangledSchema)); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of(configuredStream)); - - // First sync - final List messages1 = readMessages("dat/sync1_cursorchange_messages.jsonl"); - - runSync(catalog, messages1); - - final List expectedRawRecords1 = readRecords("dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl"); - final List expectedFinalRecords1 = readRecords("dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); - - // Second sync - final List messages2 = readMessages("dat/sync2_messages.jsonl"); - configuredStream.getStream().setJsonSchema(SCHEMA); - configuredStream.setCursorField(List.of("updated_at")); - - runSync(catalog, messages2); - - final List expectedRawRecords2 = readRecords("dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl"); - final List expectedFinalRecords2 = readRecords("dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl"); - verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); - } - - @Test - @Disabled("Not yet implemented") - public void testSyncWithLargeRecordBatch() throws Exception { - // TODO duplicate this test for each sync mode. Run a single sync with many records - /* - * copied from DATs: This serves to test MSSQL 2100 limit parameters in a single query. this means - * that for Airbyte insert data need to limit to ~ 700 records (3 columns for the raw tables) = 2100 - * params - * - * this maybe needs configuration per destination to specify that limit? - */ - } - - @Test - @Disabled("Not yet implemented") - public void testDataTypes() throws Exception { - // TODO duplicate this test for each sync mode. See DataTypeTestArgumentProvider for what this test - // does in DAT-land - // we probably don't want to do the exact same thing, but the general spirit of testing a wide range - // of values for every data type is approximately correct - // this test probably needs some configuration per destination to specify what values are supported? - } - - private List repeatList(final int n, final List list) { - return Collections - .nCopies(n, list) - .stream() - .flatMap(List::stream) - .collect(toList()); - } - - protected void verifySyncResult(final List expectedRawRecords, - final List expectedFinalRecords, - final boolean disableFinalTableComparison) - throws Exception { - verifySyncResult(expectedRawRecords, expectedFinalRecords, streamNamespace, streamName, disableFinalTableComparison); - } - - private void verifySyncResult(final List expectedRawRecords, - final List expectedFinalRecords, - final String streamNamespace, - final String streamName, - final boolean disableFinalTableComparison) - throws Exception { - final List actualRawRecords = dumpRawTableRecords(streamNamespace, streamName); - if (disableFinalTableComparison) { - DIFFER.diffRawTableRecords(expectedRawRecords, actualRawRecords); - } else { - final List actualFinalRecords = dumpFinalTableRecords(streamNamespace, streamName); - DIFFER.verifySyncResult(expectedRawRecords, actualRawRecords, expectedFinalRecords, actualFinalRecords); - } - } - - public static List readRecords(final String filename) throws IOException { - return MoreResources.readResource(filename).lines() - .map(String::trim) - .filter(line -> !line.isEmpty()) - .filter(line -> !line.startsWith("//")) - .map(Jsons::deserializeExact) - .toList(); - } - - protected List readMessages(final String filename) throws IOException { - return readMessages(filename, streamNamespace, streamName); - } - - protected static List readMessages(final String filename, final String streamNamespace, final String streamName) - throws IOException { - return readRecords(filename).stream() - .map(record -> Jsons.convertValue(record, AirbyteMessage.class)) - .peek(message -> { - message.getRecord().setNamespace(streamNamespace); - message.getRecord().setStream(streamName); - }).toList(); - } - - /* - * !!!!!! WARNING !!!!!! The code below was mostly copypasted from DestinationAcceptanceTest. If you - * make edits here, you probably want to also edit there. - */ - - protected void runSync(final ConfiguredAirbyteCatalog catalog, final List messages) throws Exception { - runSync(catalog, messages, getImageName()); - } - - protected void runSync(final ConfiguredAirbyteCatalog catalog, final List messages, final String imageName) throws Exception { - runSync(catalog, messages, imageName, Function.identity()); - } - - protected void runSync(final ConfiguredAirbyteCatalog catalog, - final List messages, - final String imageName, - final Function configTransformer) - throws Exception { - final AirbyteDestination destination = startSync(catalog, imageName, configTransformer); - final CompletableFuture> outputFuture = destinationOutputFuture(destination); - pushMessages(messages, destination); - endSync(destination, outputFuture); - } - - // In the background, read messages from the destination until it terminates. We need to clear - // stdout in real time, to prevent the buffer from filling up and blocking the destination. - private CompletableFuture> destinationOutputFuture(final AirbyteDestination destination) { - final CompletableFuture> outputFuture = new CompletableFuture<>(); - Executors.newSingleThreadExecutor().submit((Callable) () -> { - final List destinationMessages = new ArrayList<>(); - while (!destination.isFinished()) { - // attemptRead isn't threadsafe, we read stdout fully here. - // i.e. we shouldn't call attemptRead anywhere else. - destination.attemptRead().ifPresent(destinationMessages::add); - } - outputFuture.complete(destinationMessages); - return null; - }); - return outputFuture; - } - - protected AirbyteDestination startSync(final ConfiguredAirbyteCatalog catalog) throws Exception { - return startSync(catalog, getImageName()); - } - - protected AirbyteDestination startSync(final ConfiguredAirbyteCatalog catalog, final String imageName) throws Exception { - return startSync(catalog, imageName, Function.identity()); - } - - /** - * - * @param catalog - * @param imageName - * @param configTransformer - test specific config overrides or additions can be performed with this - * function - * @return - * @throws Exception - */ - protected AirbyteDestination startSync(final ConfiguredAirbyteCatalog catalog, - final String imageName, - final Function configTransformer) - throws Exception { - synchronized (this) { - catalog.getStreams().forEach(s -> streamsToTearDown.add(AirbyteStreamNameNamespacePair.fromAirbyteStream(s.getStream()))); - } - - final Path testDir = Path.of("/tmp/airbyte_tests/"); - Files.createDirectories(testDir); - final Path workspaceRoot = Files.createTempDirectory(testDir, "test"); - final Path jobRoot = Files.createDirectories(Path.of(workspaceRoot.toString(), "job")); - final Path localRoot = Files.createTempDirectory(testDir, "output"); - final ProcessFactory processFactory = new DockerProcessFactory( - workspaceRoot, - workspaceRoot.toString(), - localRoot.toString(), - "host", - Collections.emptyMap()); - final JsonNode transformedConfig = configTransformer.apply(config); - final WorkerDestinationConfig destinationConfig = new WorkerDestinationConfig() - .withConnectionId(UUID.randomUUID()) - .withCatalog(convertProtocolObject(catalog, io.airbyte.protocol.models.ConfiguredAirbyteCatalog.class)) - .withDestinationConnectionConfiguration(transformedConfig); - - final AirbyteDestination destination = new DefaultAirbyteDestination(new AirbyteIntegrationLauncher( - "0", - 0, - imageName, - processFactory, - null, - null, - false, - new EnvVariableFeatureFlags())); - - destination.start(destinationConfig, jobRoot, Collections.emptyMap()); - - return destination; - } - - protected static void pushMessages(final List messages, final AirbyteDestination destination) { - messages.forEach( - message -> Exceptions.toRuntime(() -> destination.accept(convertProtocolObject(message, io.airbyte.protocol.models.AirbyteMessage.class)))); - } - - protected void endSync(final AirbyteDestination destination, - final CompletableFuture> destinationOutputFuture) - throws Exception { - destination.notifyEndOfInput(); - // TODO Eventually we'll want to somehow extract the state messages while a sync is running, to - // verify checkpointing. - destinationOutputFuture.join(); - destination.close(); - } - - private static V0 convertProtocolObject(final V1 v1, final Class klass) { - return Jsons.object(Jsons.jsonNode(v1), klass); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.java deleted file mode 100644 index e4cea52498a81..0000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -import static java.util.stream.Collectors.toList; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.fail; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Streams; -import io.airbyte.commons.json.Jsons; -import java.math.BigDecimal; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetTime; -import java.time.ZoneOffset; -import java.util.Arrays; -import java.util.Comparator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.Pair; - -/** - * Utility class to generate human-readable diffs between expected and actual records. Assumes 1s1t - * output format. - */ -public class RecordDiffer { - - private final Comparator rawRecordIdentityComparator; - private final Comparator rawRecordSortComparator; - private final Function rawRecordIdentityExtractor; - private final Map rawRecordColumnNames; - - private final Comparator finalRecordIdentityComparator; - private final Comparator finalRecordSortComparator; - private final Function finalRecordIdentityExtractor; - private final Map finalRecordColumnNames; - - /** - * @param rawRecordColumnNames - * @param finalRecordColumnNames - * @param identifyingColumns Which fields constitute a unique record (typically PK+cursor). Do _not_ - * include extracted_at; it is handled automatically. - */ - @SafeVarargs - public RecordDiffer(final Map rawRecordColumnNames, - final Map finalRecordColumnNames, - final Pair... identifyingColumns) { - this.rawRecordColumnNames = rawRecordColumnNames; - this.finalRecordColumnNames = finalRecordColumnNames; - final Pair[] rawTableIdentifyingColumns = Arrays.stream(identifyingColumns) - .map(p -> Pair.of( - // Raw tables always retain the original column names - p.getLeft().originalName(), - p.getRight())) - .toArray(Pair[]::new); - this.rawRecordIdentityComparator = buildIdentityComparator(rawTableIdentifyingColumns, rawRecordColumnNames); - this.rawRecordSortComparator = rawRecordIdentityComparator - .thenComparing(record -> asString(record.get(getMetadataColumnName(rawRecordColumnNames, "_airbyte_raw_id")))); - this.rawRecordIdentityExtractor = buildIdentityExtractor(rawTableIdentifyingColumns, rawRecordColumnNames); - - final Pair[] finalTableIdentifyingColumns = Arrays.stream(identifyingColumns) - .map(p -> Pair.of( - // Final tables may have modified the column names, so use the final name here. - p.getLeft().name(), - p.getRight())) - .toArray(Pair[]::new); - this.finalRecordIdentityComparator = buildIdentityComparator(finalTableIdentifyingColumns, finalRecordColumnNames); - this.finalRecordSortComparator = finalRecordIdentityComparator - .thenComparing(record -> asString(record.get(getMetadataColumnName(finalRecordColumnNames, "_airbyte_raw_id")))); - this.finalRecordIdentityExtractor = buildIdentityExtractor(finalTableIdentifyingColumns, finalRecordColumnNames); - } - - /** - * In the expected records, a SQL null is represented as a JsonNode without that field at all, and a - * JSON null is represented as a NullNode. For example, in the JSON blob {"name": null}, the `name` - * field is a JSON null, and the `address` field is a SQL null. - */ - public void verifySyncResult(final List expectedRawRecords, - final List actualRawRecords, - final List expectedFinalRecords, - final List actualFinalRecords) { - assertAll( - () -> diffRawTableRecords(expectedRawRecords, actualRawRecords), - () -> diffFinalTableRecords(expectedFinalRecords, actualFinalRecords)); - } - - public void diffRawTableRecords(final List expectedRecords, final List actualRecords) { - final String diff = diffRecords( - expectedRecords.stream().map(this::copyWithLiftedData).collect(toList()), - actualRecords.stream().map(this::copyWithLiftedData).collect(toList()), - rawRecordIdentityComparator, - rawRecordSortComparator, - rawRecordIdentityExtractor, - rawRecordColumnNames); - - if (!diff.isEmpty()) { - fail("Raw table was incorrect.\n" + diff); - } - } - - public void diffFinalTableRecords(final List expectedRecords, final List actualRecords) { - final String diff = diffRecords( - expectedRecords, - actualRecords, - finalRecordIdentityComparator, - finalRecordSortComparator, - finalRecordIdentityExtractor, - finalRecordColumnNames); - - if (!diff.isEmpty()) { - fail("Final table was incorrect.\n" + diff); - } - } - - /** - * Lift _airbyte_data fields to the root level. If _airbyte_data is a string, deserialize it first. - * - * @return A copy of the record, but with all fields in _airbyte_data lifted to the top level. - */ - private JsonNode copyWithLiftedData(final JsonNode record) { - final ObjectNode copy = record.deepCopy(); - copy.remove(getMetadataColumnName(rawRecordColumnNames, "_airbyte_data")); - JsonNode airbyteData = record.get(getMetadataColumnName(rawRecordColumnNames, "_airbyte_data")); - if (airbyteData.isTextual()) { - airbyteData = Jsons.deserializeExact(airbyteData.asText()); - } - Streams.stream(airbyteData.fields()).forEach(field -> { - if (!copy.has(field.getKey())) { - copy.set(field.getKey(), field.getValue()); - } else { - // This would only happen if the record has one of the metadata columns (e.g. _airbyte_raw_id) - // We don't support that in production, so we don't support it here either. - throw new RuntimeException("Cannot lift field " + field.getKey() + " because it already exists in the record."); - } - }); - return copy; - } - - /** - * Build a Comparator to detect equality between two records. It first compares all the identifying - * columns in order, and breaks ties using extracted_at. - */ - private Comparator buildIdentityComparator(final Pair[] identifyingColumns, final Map columnNames) { - // Start with a noop comparator for convenience - Comparator comp = Comparator.comparing(record -> 0); - for (final Pair column : identifyingColumns) { - comp = comp.thenComparing(record -> extract(record, column.getKey(), column.getValue())); - } - comp = comp.thenComparing(record -> asTimestampWithTimezone(record.get(getMetadataColumnName(columnNames, "_airbyte_extracted_at")))); - return comp; - } - - /** - * See {@link #buildIdentityComparator(Pair[], Map)} for an explanation of - * dataExtractor. - */ - private Function buildIdentityExtractor(final Pair[] identifyingColumns, - final Map columnNames) { - return record -> Arrays.stream(identifyingColumns) - .map(column -> getPrintableFieldIfPresent(record, column.getKey())) - .collect(Collectors.joining(", ")) - + getPrintableFieldIfPresent(record, getMetadataColumnName(columnNames, "_airbyte_extracted_at")); - } - - private static String getPrintableFieldIfPresent(final JsonNode record, final String field) { - if (record.has(field)) { - return field + "=" + record.get(field); - } else { - return ""; - } - } - - /** - * Generate a human-readable diff between the two lists. Assumes (in general) that two records with - * the same PK, cursor, and extracted_at are the same record. - *

    - * Verifies that all values specified in the expected records are correct (_including_ raw_id), and - * that no other fields are present (except for loaded_at and raw_id). We assume that it's - * impossible to verify loaded_at, since it's generated dynamically; however, we do provide the - * ability to assert on the exact raw_id if desired; we simply assume that raw_id is always expected - * to be present. - * - * @param identityComparator Returns 0 iff two records are the "same" record (i.e. have the same - * PK+cursor+extracted_at) - * @param sortComparator Behaves identically to identityComparator, but if two records are the same, - * breaks that tie using _airbyte_raw_id - * @param recordIdExtractor Dump the record's PK+cursor+extracted_at into a human-readable string - * @return The diff, or empty string if there were no differences - */ - private String diffRecords(final List originalExpectedRecords, - final List originalActualRecords, - final Comparator identityComparator, - final Comparator sortComparator, - final Function recordIdExtractor, - final Map columnNames) { - final List expectedRecords = originalExpectedRecords.stream().sorted(sortComparator).toList(); - final List actualRecords = originalActualRecords.stream().sorted(sortComparator).toList(); - - // Iterate through both lists in parallel and compare each record. - // Build up an error message listing any incorrect, missing, or unexpected records. - String message = ""; - int expectedRecordIndex = 0; - int actualRecordIndex = 0; - while (expectedRecordIndex < expectedRecords.size() && actualRecordIndex < actualRecords.size()) { - final JsonNode expectedRecord = expectedRecords.get(expectedRecordIndex); - final JsonNode actualRecord = actualRecords.get(actualRecordIndex); - final int compare = identityComparator.compare(expectedRecord, actualRecord); - if (compare == 0) { - // These records should be the same. Find the specific fields that are different and move on - // to the next records in both lists. - message += diffSingleRecord(recordIdExtractor, expectedRecord, actualRecord, columnNames); - expectedRecordIndex++; - actualRecordIndex++; - } else if (compare < 0) { - // The expected record is missing from the actual records. Print it and move on to the next expected - // record. - message += "Row was expected but missing: " + expectedRecord + "\n"; - expectedRecordIndex++; - } else { - // There's an actual record which isn't present in the expected records. Print it and move on to the - // next actual record. - message += "Row was not expected but present: " + actualRecord + "\n"; - actualRecordIndex++; - } - } - // Tail loops in case we reached the end of one list before the other. - while (expectedRecordIndex < expectedRecords.size()) { - message += "Row was expected but missing: " + expectedRecords.get(expectedRecordIndex) + "\n"; - expectedRecordIndex++; - } - while (actualRecordIndex < actualRecords.size()) { - message += "Row was not expected but present: " + actualRecords.get(actualRecordIndex) + "\n"; - actualRecordIndex++; - } - - return message; - } - - private String diffSingleRecord(final Function recordIdExtractor, - final JsonNode expectedRecord, - final JsonNode actualRecord, - final Map columnNames) { - boolean foundMismatch = false; - String mismatchedRecordMessage = "Row had incorrect data: " + recordIdExtractor.apply(expectedRecord) + "\n"; - // Iterate through each column in the expected record and compare it to the actual record's value. - for (final String column : Streams.stream(expectedRecord.fieldNames()).sorted().toList()) { - // For all other columns, we can just compare their values directly. - final JsonNode expectedValue = expectedRecord.get(column); - final JsonNode actualValue = actualRecord.get(column); - if (!areJsonNodesEquivalent(expectedValue, actualValue)) { - mismatchedRecordMessage += generateFieldError("column " + column, expectedValue, actualValue); - foundMismatch = true; - } - } - // Then check the entire actual record for any columns that we weren't expecting. - final LinkedHashMap extraColumns = checkForExtraOrNonNullFields(expectedRecord, actualRecord, columnNames); - if (extraColumns.size() > 0) { - for (final Map.Entry extraColumn : extraColumns.entrySet()) { - mismatchedRecordMessage += generateFieldError("column " + extraColumn.getKey(), null, extraColumn.getValue()); - foundMismatch = true; - } - } - if (foundMismatch) { - return mismatchedRecordMessage; - } else { - return ""; - } - } - - private static boolean areJsonNodesEquivalent(final JsonNode expectedValue, final JsonNode actualValue) { - if (expectedValue == null || actualValue == null) { - // If one of the values is null, then we expect both of them to be null. - return expectedValue == null && actualValue == null; - } else if (expectedValue instanceof final ArrayNode expectedArrayNode && actualValue instanceof final ArrayNode actualArrayNode) { - // If both values are arrays, compare each of their elements. Order should be preserved - return IntStream.range(0, expectedArrayNode.size()) - .allMatch(i -> areJsonNodesEquivalent(expectedArrayNode.get(i), actualArrayNode.get(i))); - } else if (expectedValue instanceof final ObjectNode expectedObjectNode && actualValue instanceof final ObjectNode actualObjectNode) { - // If both values are objects compare their fields and values - return expectedObjectNode.size() == actualObjectNode.size() && Stream.generate(expectedObjectNode.fieldNames()::next) - .limit(expectedObjectNode.size()) - .allMatch(field -> areJsonNodesEquivalent(expectedObjectNode.get(field), actualObjectNode.get(field))); - } else { - // Otherwise, we need to compare the actual values. - // This is kind of sketchy, but seems to work fine for the data we have in our test cases. - return expectedValue.equals(actualValue) - // equals() expects the two values to be the same class. - // We need to handle comparisons between e.g. LongNode and IntNode. - || (expectedValue.isIntegralNumber() && actualValue.isIntegralNumber() - && expectedValue.bigIntegerValue().equals(actualValue.bigIntegerValue())) - || (expectedValue.isNumber() && actualValue.isNumber() && expectedValue.decimalValue().equals(actualValue.decimalValue())); - } - } - - /** - * Verify that all fields in the actual record are present in the expected record. This is primarily - * relevant for detecting fields that we expected to be null, but actually were not. See - * {@link BaseTypingDedupingTest#dumpFinalTableRecords(String, String)} for an explanation of how - * SQL/JSON nulls are represented in the expected record. - *

    - * This has the side benefit of detecting completely unexpected columns, which would be a very weird - * bug but is probably still useful to catch. - */ - private LinkedHashMap checkForExtraOrNonNullFields(final JsonNode expectedRecord, - final JsonNode actualRecord, - final Map columnNames) { - final LinkedHashMap extraFields = new LinkedHashMap<>(); - for (final String column : Streams.stream(actualRecord.fieldNames()).sorted().toList()) { - // loaded_at and raw_id are generated dynamically, so we just ignore them. - final boolean isLoadedAt = getMetadataColumnName(columnNames, "_airbyte_loaded_at").equals(column); - final boolean isRawId = getMetadataColumnName(columnNames, "_airbyte_raw_id").equals(column); - final boolean isExpected = expectedRecord.has(column); - if (!(isLoadedAt || isRawId || isExpected)) { - extraFields.put(column, actualRecord.get(column)); - } - } - return extraFields; - } - - /** - * Produce a pretty-printed error message, e.g. " For column foo, expected 1 but got 2". The leading - * spaces are intentional, to make the message easier to read when it's embedded in a larger - * stacktrace. - */ - private static String generateFieldError(final String fieldname, final JsonNode expectedValue, final JsonNode actualValue) { - final String expectedString = expectedValue == null ? "SQL NULL (i.e. no value)" : expectedValue.toString(); - final String actualString = actualValue == null ? "SQL NULL (i.e. no value)" : actualValue.toString(); - return " For " + fieldname + ", expected " + expectedString + " but got " + actualString + "\n"; - } - - // These asFoo methods are used for sorting records, so their defaults are intended to make broken - // records stand out. - private static String asString(final JsonNode node) { - if (node == null || node.isNull()) { - return ""; - } else if (node.isTextual()) { - return node.asText(); - } else { - return Jsons.serialize(node); - } - } - - private static BigDecimal asNumber(final JsonNode node) { - if (node == null || !node.isNumber()) { - return new BigDecimal(Double.MIN_VALUE); - } else { - return node.decimalValue(); - } - } - - private static long asInt(final JsonNode node) { - if (node == null || !node.isIntegralNumber()) { - return Long.MIN_VALUE; - } else { - return node.longValue(); - } - } - - private static boolean asBoolean(final JsonNode node) { - if (node == null || !node.isBoolean()) { - return false; - } else { - return node.asBoolean(); - } - } - - private static Instant asTimestampWithTimezone(final JsonNode node) { - if (node == null || !node.isTextual()) { - return Instant.ofEpochMilli(Long.MIN_VALUE); - } else { - try { - return Instant.parse(node.asText()); - } catch (final Exception e) { - return Instant.ofEpochMilli(Long.MIN_VALUE); - } - } - } - - private static LocalDateTime asTimestampWithoutTimezone(final JsonNode node) { - if (node == null || !node.isTextual()) { - return LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC); - } else { - try { - return LocalDateTime.parse(node.asText()); - } catch (final Exception e) { - return LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC); - } - } - } - - private static OffsetTime asTimeWithTimezone(final JsonNode node) { - if (node == null || !node.isTextual()) { - return OffsetTime.of(0, 0, 0, 0, ZoneOffset.UTC); - } else { - return OffsetTime.parse(node.asText()); - } - } - - private static LocalTime asTimeWithoutTimezone(final JsonNode node) { - if (node == null || !node.isTextual()) { - return LocalTime.of(0, 0, 0); - } else { - try { - return LocalTime.parse(node.asText()); - } catch (final Exception e) { - return LocalTime.of(0, 0, 0); - } - } - } - - private static LocalDate asDate(final JsonNode node) { - if (node == null || !node.isTextual()) { - return LocalDate.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC); - } else { - try { - return LocalDate.parse(node.asText()); - } catch (final Exception e) { - return LocalDate.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC); - } - } - } - - // Generics? Never heard of 'em. (I'm sorry) - private static Comparable extract(final JsonNode node, final String field, final AirbyteType type) { - if (type instanceof final AirbyteProtocolType t) { - return switch (t) { - case STRING -> asString(node.get(field)); - case NUMBER -> asNumber(node.get(field)); - case INTEGER -> asInt(node.get(field)); - case BOOLEAN -> asBoolean(node.get(field)); - case TIMESTAMP_WITH_TIMEZONE -> asTimestampWithTimezone(node.get(field)); - case TIMESTAMP_WITHOUT_TIMEZONE -> asTimestampWithoutTimezone(node.get(field)); - case TIME_WITH_TIMEZONE -> asTimeWithTimezone(node.get(field)); - case TIME_WITHOUT_TIMEZONE -> asTimeWithoutTimezone(node.get(field)); - case DATE -> asDate(node.get(field)); - case UNKNOWN -> node.toString(); - }; - } else { - return node.toString(); - } - } - - private String getMetadataColumnName(final Map columnNames, final String columnName) { - return columnNames.getOrDefault(columnName, columnName); - } - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.kt new file mode 100644 index 0000000000000..5f42e223d1c99 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.kt @@ -0,0 +1,1844 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import com.google.common.collect.Streams +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.string.Strings +import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeTransaction.executeSoftReset +import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeTransaction.executeTypeAndDedupe +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.airbyte.protocol.models.v0.SyncMode +import java.time.Instant +import java.util.* +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertAll +import org.junit.jupiter.api.function.Executable +import org.junit.jupiter.api.parallel.Execution +import org.junit.jupiter.api.parallel.ExecutionMode +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource +import org.mockito.kotlin.mock +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This class exercises [SqlGenerator] implementations. All destinations should extend this class + * for their respective implementation. Subclasses are encouraged to add additional tests with + * destination-specific behavior (for example, verifying that datasets are created in the correct + * BigQuery region). + * + * Subclasses should implement a [org.junit.jupiter.api.BeforeAll] method to load any secrets and + * connect to the destination. This test expects to be able to run [.getDestinationHandler] in a + * [org.junit.jupiter.api.BeforeEach] method. + */ +@Execution(ExecutionMode.CONCURRENT) +abstract class BaseSqlGeneratorIntegrationTest { + protected var DIFFER: RecordDiffer? = null + + /** Subclasses may use these four StreamConfigs in their tests. */ + protected var incrementalDedupStream: StreamConfig = mock() + + /** + * We intentionally don't have full refresh overwrite/append streams. Those actually behave + * identically in the sqlgenerator. Overwrite mode is actually handled in [DefaultTyperDeduper]. + */ + protected var incrementalAppendStream: StreamConfig = mock() + protected var cdcIncrementalDedupStream: StreamConfig = mock() + + /** This isn't particularly realistic, but it's technically possible. */ + protected var cdcIncrementalAppendStream: StreamConfig = mock() + + protected var generator: SqlGenerator = mock() + protected abstract val destinationHandler: DestinationHandler + // Need a placeholder otherwise Spotbugs will complain with + // a possibility of returning null value in getNamespace. + protected var namespace: String = "dummy_holder" + + protected var streamId: StreamId = mock() + private lateinit var primaryKey: List + private lateinit var cursor: ColumnId + private var COLUMNS: LinkedHashMap = mock() + + protected abstract val sqlGenerator: SqlGenerator + get + + /** + * Subclasses should override this method if they need to make changes to the stream ID. For + * example, you could upcase the final table name here. + */ + protected fun buildStreamId( + namespace: String, + finalTableName: String, + rawTableName: String + ): StreamId { + return StreamId( + namespace, + finalTableName, + namespace, + rawTableName, + namespace, + finalTableName + ) + } + + /** + * Do any setup work to create a namespace for this test run. For example, this might create a + * BigQuery dataset, or a Snowflake schema. + */ + @Throws(Exception::class) protected abstract fun createNamespace(namespace: String?) + + /** Create a raw table using the StreamId's rawTableId. */ + @Throws(Exception::class) protected abstract fun createRawTable(streamId: StreamId) + + /** Creates a raw table in the v1 format */ + @Throws(Exception::class) protected abstract fun createV1RawTable(v1RawTable: StreamId) + + @Throws(Exception::class) + protected abstract fun insertRawTableRecords(streamId: StreamId, records: List) + + @Throws(Exception::class) + protected abstract fun insertV1RawTableRecords(streamId: StreamId, records: List) + + @Throws(Exception::class) + protected abstract fun insertFinalTableRecords( + includeCdcDeletedAt: Boolean, + streamId: StreamId, + suffix: String?, + records: List + ) + + /** + * The two dump methods are defined identically as in [BaseTypingDedupingTest], but with + * slightly different method signature. This test expects subclasses to respect the + * raw/finalTableId on the StreamId object, rather than hardcoding e.g. the airbyte_internal + * dataset. + * + * The `_airbyte_data` field must be deserialized into an ObjectNode, even if it's stored in the + * destination as a string. + */ + @Throws(Exception::class) + protected abstract fun dumpRawTableRecords(streamId: StreamId): List + + @Throws(Exception::class) + protected abstract fun dumpFinalTableRecords( + streamId: StreamId, + suffix: String? + ): List + + /** + * Clean up all resources in the namespace. For example, this might delete the BigQuery dataset + * created in [.createNamespace]. + */ + @Throws(Exception::class) protected abstract fun teardownNamespace(namespace: String?) + + protected val rawMetadataColumnNames: Map + /** Identical to [BaseTypingDedupingTest.getRawMetadataColumnNames]. */ + get() = HashMap() + + protected val finalMetadataColumnNames: Map + /** Identical to [BaseTypingDedupingTest.getFinalMetadataColumnNames]. */ + get() = HashMap() + + /** + * This test implementation is extremely destination-specific, but all destinations must + * implement it. This test should verify that creating a table using [.incrementalDedupStream] + * works as expected, including column types, indexing, partitioning, etc. + * + * Note that subclasses must also annotate their implementation with @Test. + */ + @Test @Throws(Exception::class) abstract fun testCreateTableIncremental() + + @BeforeEach + @Throws(Exception::class) + fun setup() { + generator = sqlGenerator + + val id1 = generator.buildColumnId("id1") + val id2 = generator.buildColumnId("id2") + primaryKey = listOf(id1, id2) + val cursor = generator.buildColumnId("updated_at") + this.cursor = cursor + + COLUMNS = LinkedHashMap() + COLUMNS[id1] = AirbyteProtocolType.INTEGER + COLUMNS[id2] = AirbyteProtocolType.INTEGER + COLUMNS[cursor] = AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + COLUMNS[generator.buildColumnId("struct")] = Struct(LinkedHashMap()) + COLUMNS[generator.buildColumnId("array")] = Array(AirbyteProtocolType.UNKNOWN) + COLUMNS[generator.buildColumnId("string")] = AirbyteProtocolType.STRING + COLUMNS[generator.buildColumnId("number")] = AirbyteProtocolType.NUMBER + COLUMNS[generator.buildColumnId("integer")] = AirbyteProtocolType.INTEGER + COLUMNS[generator.buildColumnId("boolean")] = AirbyteProtocolType.BOOLEAN + COLUMNS[generator.buildColumnId("timestamp_with_timezone")] = + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + COLUMNS[generator.buildColumnId("timestamp_without_timezone")] = + AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE + COLUMNS[generator.buildColumnId("time_with_timezone")] = + AirbyteProtocolType.TIME_WITH_TIMEZONE + COLUMNS[generator.buildColumnId("time_without_timezone")] = + AirbyteProtocolType.TIME_WITHOUT_TIMEZONE + COLUMNS[generator.buildColumnId("date")] = AirbyteProtocolType.DATE + COLUMNS[generator.buildColumnId("unknown")] = AirbyteProtocolType.UNKNOWN + + val cdcColumns = LinkedHashMap(COLUMNS) + cdcColumns[generator.buildColumnId("_ab_cdc_deleted_at")] = + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + + DIFFER = + RecordDiffer( + rawMetadataColumnNames, + finalMetadataColumnNames, + id1 to AirbyteProtocolType.INTEGER, + id2 to AirbyteProtocolType.INTEGER, + cursor to AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE + ) + + namespace = Strings.addRandomSuffix("sql_generator_test", "_", 10) + // This is not a typical stream ID would look like, but SqlGenerator isn't allowed to make + // any + // assumptions about StreamId structure. + // In practice, the final table would be testDataset.users, and the raw table would be + // airbyte_internal.testDataset_raw__stream_users. + streamId = buildStreamId(namespace, "users_final", "users_raw") + + incrementalDedupStream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + primaryKey, + Optional.of(cursor), + COLUMNS + ) + incrementalAppendStream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND, + primaryKey, + Optional.of(cursor), + COLUMNS + ) + + cdcIncrementalDedupStream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + primaryKey, + Optional.of(cursor), + cdcColumns + ) + cdcIncrementalAppendStream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND, + primaryKey, + Optional.of(cursor), + cdcColumns + ) + + LOGGER.info("Running with namespace {}", namespace) + createNamespace(namespace) + } + + @AfterEach + @Throws(Exception::class) + fun teardown() { + teardownNamespace(namespace) + } + + @Throws(Exception::class) + private fun getDestinationInitialState( + streamConfig: StreamConfig + ): DestinationInitialStatus { + val initialState = destinationHandler.gatherInitialState(java.util.List.of(streamConfig)) + Assertions.assertEquals( + 1, + initialState.size, + "gatherInitialState returned the wrong number of futures" + ) + Assertions.assertTrue( + initialState.first().isFinalTablePresent, + "Destination handler could not find existing table" + ) + return initialState.first() + } + + /** Create a table and verify that we correctly recognize it as identical to itself. */ + @Test + @Throws(Exception::class) + fun detectNoSchemaChange() { + val createTable = generator.createTable(incrementalDedupStream, "", false) + destinationHandler.execute(createTable) + val destinationInitialStatus = getDestinationInitialState(incrementalDedupStream) + Assertions.assertFalse( + destinationInitialStatus.isSchemaMismatch, + "Unchanged schema was incorrectly detected as a schema change." + ) + } + + /** Verify that adding a new column is detected as a schema change. */ + @Test + @Throws(Exception::class) + fun detectColumnAdded() { + val createTable = generator.createTable(incrementalDedupStream, "", false) + destinationHandler.execute(createTable) + incrementalDedupStream.columns!!.put( + generator.buildColumnId("new_column"), + AirbyteProtocolType.STRING + ) + val destinationInitialStatus = getDestinationInitialState(incrementalDedupStream) + Assertions.assertTrue( + destinationInitialStatus.isSchemaMismatch, + "Adding a new column was not detected as a schema change." + ) + } + + /** Verify that removing a column is detected as a schema change. */ + @Test + @Throws(Exception::class) + fun detectColumnRemoved() { + val createTable = generator.createTable(incrementalDedupStream, "", false) + destinationHandler.execute(createTable) + incrementalDedupStream.columns!!.remove(generator.buildColumnId("string")) + val destinationInitialStatus = getDestinationInitialState(incrementalDedupStream) + Assertions.assertTrue( + destinationInitialStatus.isSchemaMismatch, + "Removing a column was not detected as a schema change." + ) + } + + /** Verify that changing a column's type is detected as a schema change. */ + @Test + @Throws(Exception::class) + fun detectColumnChanged() { + val createTable = generator.createTable(incrementalDedupStream, "", false) + destinationHandler.execute(createTable) + incrementalDedupStream.columns!!.put( + generator.buildColumnId("string"), + AirbyteProtocolType.INTEGER + ) + val destinationInitialStatus = getDestinationInitialState(incrementalDedupStream) + Assertions.assertTrue( + destinationInitialStatus.isSchemaMismatch, + "Altering a column was not detected as a schema change." + ) + } + + /** Test that T+D supports streams whose name and namespace are the same. */ + @Test + @Throws(Exception::class) + fun incrementalDedupSameNameNamespace() { + val streamId = buildStreamId(namespace, namespace, namespace + "_raw") + val stream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + incrementalDedupStream.primaryKey, + incrementalDedupStream.cursor, + incrementalDedupStream.columns + ) + + createRawTable(streamId) + createFinalTable(stream, "") + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "5ce60e70-98aa-4fe3-8159-67207352c4f0", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": {"id1": 1, "id2": 100} + } + + """.trimIndent() + ) + ) + ) + + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + + val rawRecords = dumpRawTableRecords(streamId) + val finalRecords = dumpFinalTableRecords(streamId, "") + verifyRecordCounts(1, rawRecords, 1, finalRecords) + } + + private fun getOnly( + initialStates: List> + ): DestinationInitialStatus { + Assertions.assertEquals(1, initialStates.size) + return initialStates.first() + } + + /** + * Run a full T+D update for an incremental-dedup stream, writing to a final table with "_foo" + * suffix, with values for all data types. Verifies all behaviors for all types: + * + * * A valid, nonnull value + * * No value (i.e. the column is missing from the record) + * * A JSON null value + * * An invalid value + * + * In practice, incremental streams never write to a suffixed table, but SqlGenerator isn't + * allowed to make that assumption (and we might as well exercise that code path). + */ + @Test + @Throws(Exception::class) + fun allTypes() { + // Add case-sensitive columnName to test json path querying + incrementalDedupStream.columns!!.put( + generator.buildColumnId("IamACaseSensitiveColumnName"), + AirbyteProtocolType.STRING + ) + createRawTable(streamId) + createFinalTable(incrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords("sqlgenerator/alltypes_inputrecords.jsonl") + ) + + var initialState = + getOnly( + destinationHandler.gatherInitialState(java.util.List.of(incrementalDedupStream)) + ) + Assertions.assertTrue( + initialState.isFinalTableEmpty, + "Final table should be empty before T+D" + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalDedupStream, + Optional.empty(), + "" + ) + + verifyRecords( + "sqlgenerator/alltypes_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/alltypes_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + initialState = + getOnly( + destinationHandler.gatherInitialState(java.util.List.of(incrementalDedupStream)) + ) + Assertions.assertFalse( + initialState.isFinalTableEmpty, + "Final table should not be empty after T+D" + ) + } + + /** Run a basic test to verify that we don't throw an exception on basic data values. */ + @Test + @Throws(Exception::class) + fun allTypesUnsafe() { + createRawTable(streamId) + createFinalTable(incrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/alltypes_unsafe_inputrecords.jsonl" + ) + ) + + var initialState = + getOnly( + destinationHandler.gatherInitialState(java.util.List.of(incrementalDedupStream)) + ) + Assertions.assertTrue( + initialState.isFinalTableEmpty, + "Final table should be empty before T+D" + ) + + // Instead of using the full T+D transaction, explicitly run with useSafeCasting=false. + val unsafeSql = generator.updateTable(incrementalDedupStream, "", Optional.empty(), false) + destinationHandler.execute(unsafeSql) + + initialState = + getOnly( + destinationHandler.gatherInitialState(java.util.List.of(incrementalDedupStream)) + ) + Assertions.assertFalse( + initialState.isFinalTableEmpty, + "Final table should not be empty after T+D" + ) + } + + @Throws(Exception::class) + private fun getInitialRawTableState(streamConfig: StreamConfig?): InitialRawTableStatus { + val initialStates = destinationHandler.gatherInitialState(java.util.List.of(streamConfig!!)) + Assertions.assertEquals(1, initialStates.size) + return initialStates.first().initialRawTableStatus + } + + /** + * Run through some plausible T+D scenarios to verify that we correctly identify the min raw + * timestamp. + */ + @Test + @Throws(Exception::class) + fun minTimestampBehavesCorrectly() { + // When the raw table doesn't exist, there are no unprocessed records and no timestamp + Assertions.assertEquals( + InitialRawTableStatus(false, false, Optional.empty()), + getInitialRawTableState(incrementalAppendStream) + ) + + // When the raw table is empty, there are still no unprocessed records and no timestamp + createRawTable(streamId) + Assertions.assertEquals( + InitialRawTableStatus(true, false, Optional.empty()), + getInitialRawTableState(incrementalAppendStream) + ) + + // If we insert some raw records with null loaded_at, we should get the min extracted_at + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "899d3bc3-7921-44f0-8517-c748a28fe338", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": {} + } + + """.trimIndent() + ), + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "47f46eb6-fcae-469c-a7fc-31d4b9ce7474", + "_airbyte_extracted_at": "2023-01-02T00:00:00Z", + "_airbyte_data": {} + } + + """.trimIndent() + ) + ) + ) + var tableState = getInitialRawTableState(incrementalAppendStream) + Assertions.assertTrue( + tableState.hasUnprocessedRecords, + "When all raw records have null loaded_at, we should recognize that there are unprocessed records" + ) + Assertions.assertTrue( + tableState.maxProcessedTimestamp.get().isBefore(Instant.parse("2023-01-01T00:00:00Z")), + "When all raw records have null loaded_at, the min timestamp should be earlier than all of their extracted_at values (2023-01-01). Was actually " + + tableState.maxProcessedTimestamp.get() + ) + + // Execute T+D to set loaded_at on the records + createFinalTable(incrementalAppendStream, "") + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalAppendStream, + Optional.empty(), + "" + ) + + Assertions.assertEquals( + getInitialRawTableState(incrementalAppendStream), + InitialRawTableStatus(true, false, Optional.of(Instant.parse("2023-01-02T00:00:00Z"))), + "When all raw records have non-null loaded_at, we should recognize that there are no unprocessed records, and the min timestamp should be equal to the latest extracted_at" + ) + + // If we insert another raw record with older extracted_at than the typed records, we should + // fetch a + // timestamp earlier than this new record. + // This emulates a sync inserting some records out of order, running T+D on newer records, + // inserting + // an older record, and then crashing before it can execute T+D. The next sync should + // recognize + // that older record as still needing to be processed. + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "899d3bc3-7921-44f0-8517-c748a28fe338", + "_airbyte_extracted_at": "2023-01-01T12:00:00Z", + "_airbyte_data": {} + } + + """.trimIndent() + ) + ) + ) + tableState = getInitialRawTableState(incrementalAppendStream) + // this is a pretty confusing pair of assertions. To explain them in more detail: There are + // three + // records in the raw table: + // * loaded_at not null, extracted_at = 2023-01-01 00:00Z + // * loaded_at is null, extracted_at = 2023-01-01 12:00Z + // * loaded_at not null, extracted_at = 2023-01-02 00:00Z + // We should have a timestamp which is older than the second record, but newer than or equal + // to + // (i.e. not before) the first record. This allows us to query the raw table using + // `_airbyte_extracted_at > ?`, which will include the second record and exclude the first + // record. + Assertions.assertTrue( + tableState.hasUnprocessedRecords, + "When some raw records have null loaded_at, we should recognize that there are unprocessed records" + ) + Assertions.assertTrue( + tableState.maxProcessedTimestamp.get().isBefore(Instant.parse("2023-01-01T12:00:00Z")), + "When some raw records have null loaded_at, the min timestamp should be earlier than the oldest unloaded record (2023-01-01 12:00Z). Was actually " + + tableState + ) + Assertions.assertFalse( + tableState.maxProcessedTimestamp.get().isBefore(Instant.parse("2023-01-01T00:00:00Z")), + "When some raw records have null loaded_at, the min timestamp should be later than the newest loaded record older than the oldest unloaded record (2023-01-01 00:00Z). Was actually " + + tableState + ) + } + + /** + * Identical to [.allTypes], but queries for the min raw timestamp first. This verifies that if + * a previous sync doesn't fully type-and-dedupe a table, we still get those records on the next + * sync. + */ + @Test + @Throws(Exception::class) + fun handlePreexistingRecords() { + // Add case-sensitive columnName to test json path querying + incrementalDedupStream.columns!!.put( + generator.buildColumnId("IamACaseSensitiveColumnName"), + AirbyteProtocolType.STRING + ) + createRawTable(streamId) + createFinalTable(incrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords("sqlgenerator/alltypes_inputrecords.jsonl") + ) + + val tableState = getInitialRawTableState(incrementalDedupStream) + Assertions.assertAll( + Executable { + Assertions.assertTrue( + tableState.hasUnprocessedRecords, + "After writing some raw records, we should recognize that there are unprocessed records" + ) + }, + Executable { + Assertions.assertTrue( + tableState.maxProcessedTimestamp.isPresent(), + "After writing some raw records, the min timestamp should be present." + ) + } + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalDedupStream, + tableState.maxProcessedTimestamp, + "" + ) + + verifyRecords( + "sqlgenerator/alltypes_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/alltypes_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * Identical to [.handlePreexistingRecords], but queries for the min timestamp before inserting + * any raw records. This emulates a sync starting with an empty table. + */ + @Test + @Throws(Exception::class) + fun handleNoPreexistingRecords() { + // Add case-sensitive columnName to test json path querying + incrementalDedupStream.columns!!.put( + generator.buildColumnId("IamACaseSensitiveColumnName"), + AirbyteProtocolType.STRING + ) + createRawTable(streamId) + val tableState = getInitialRawTableState(incrementalDedupStream) + Assertions.assertAll( + Executable { + Assertions.assertFalse( + tableState.hasUnprocessedRecords, + "With an empty raw table, we should recognize that there are no unprocessed records" + ) + }, + Executable { + Assertions.assertEquals( + Optional.empty(), + tableState.maxProcessedTimestamp, + "With an empty raw table, the min timestamp should be empty" + ) + } + ) + + createFinalTable(incrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords("sqlgenerator/alltypes_inputrecords.jsonl") + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalDedupStream, + tableState.maxProcessedTimestamp, + "" + ) + + verifyRecords( + "sqlgenerator/alltypes_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/alltypes_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * Verify that we correctly only process raw records with recent extracted_at. In practice, + * destinations should not do this - but their SQL should work correctly. + * + * Create two raw records, one with an old extracted_at. Verify that updatedTable only T+Ds the + * new record, and doesn't set loaded_at on the old record. + */ + @Test + @Throws(Exception::class) + fun ignoreOldRawRecords() { + createRawTable(streamId) + createFinalTable(incrementalAppendStream, "") + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "c5bcae50-962e-4b92-b2eb-1659eae31693", + "_airbyte_extracted_at": "2022-01-01T00:00:00Z", + "_airbyte_data": { + "string": "foo" + } + } + + """.trimIndent() + ), + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "93f1bdd8-1916-4e6c-94dc-29a5d9701179", + "_airbyte_extracted_at": "2023-01-01T01:00:00Z", + "_airbyte_data": { + "string": "bar" + } + } + + """.trimIndent() + ) + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalAppendStream, + Optional.of(Instant.parse("2023-01-01T00:00:00Z")), + "" + ) + + val rawRecords = dumpRawTableRecords(streamId) + val finalRecords = dumpFinalTableRecords(streamId, "") + Assertions.assertAll( + Executable { + Assertions.assertEquals( + 1, + rawRecords + .stream() + .filter { record: JsonNode -> record["_airbyte_loaded_at"] == null } + .count(), + "Raw table should only have non-null loaded_at on the newer record" + ) + }, + Executable { + Assertions.assertEquals( + 1, + finalRecords.size, + "T+D should only execute on the newer record" + ) + } + ) + } + + /** + * Test JSON Types encounted for a String Type field. + * + * @throws Exception + */ + @Test + @Throws(Exception::class) + fun jsonStringifyTypes() { + createRawTable(streamId) + createFinalTable(incrementalDedupStream, "_foo") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/json_types_in_string_inputrecords.jsonl" + ) + ) + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalDedupStream, + Optional.empty(), + "_foo" + ) + verifyRecords( + "sqlgenerator/json_types_in_string_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/json_types_in_string_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "_foo") + ) + } + + @Test + @Throws(Exception::class) + fun timestampFormats() { + createRawTable(streamId) + createFinalTable(incrementalAppendStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/timestampformats_inputrecords.jsonl" + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalAppendStream, + Optional.empty(), + "" + ) + + DIFFER!!.diffFinalTableRecords( + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/timestampformats_expectedrecords_final.jsonl" + ), + dumpFinalTableRecords(streamId, "") + ) + } + + @Test + @Throws(Exception::class) + fun incrementalDedup() { + createRawTable(streamId) + createFinalTable(incrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/incrementaldedup_inputrecords.jsonl" + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalDedupStream, + Optional.empty(), + "" + ) + + verifyRecords( + "sqlgenerator/incrementaldedup_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/incrementaldedup_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * We shouldn't crash on a sync with null cursor. Insert two records and verify that we keep the + * record with higher extracted_at. + */ + @Test + @Throws(Exception::class) + fun incrementalDedupNoCursor() { + val streamConfig = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + primaryKey, + Optional.empty(), + COLUMNS + ) + createRawTable(streamId) + createFinalTable(streamConfig, "") + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "c5bcae50-962e-4b92-b2eb-1659eae31693", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "foo" + } + } + + """.trimIndent() + ), + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "93f1bdd8-1916-4e6c-94dc-29a5d9701179", + "_airbyte_extracted_at": "2023-01-01T01:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "bar" + } + } + + """.trimIndent() + ) + ) + ) + + executeTypeAndDedupe(generator, destinationHandler, streamConfig, Optional.empty(), "") + + val actualRawRecords = dumpRawTableRecords(streamId) + val actualFinalRecords = dumpFinalTableRecords(streamId, "") + verifyRecordCounts(2, actualRawRecords, 1, actualFinalRecords) + Assertions.assertEquals( + "bar", + actualFinalRecords[0][generator.buildColumnId("string").name].asText() + ) + } + + @Test + @Throws(Exception::class) + fun incrementalAppend() { + createRawTable(streamId) + createFinalTable(incrementalAppendStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/incrementaldedup_inputrecords.jsonl" + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + incrementalAppendStream, + Optional.empty(), + "" + ) + + verifyRecordCounts(3, dumpRawTableRecords(streamId), 3, dumpFinalTableRecords(streamId, "")) + } + + /** + * Create a nonempty users_final_tmp table. Overwrite users_final from users_final_tmp. Verify + * that users_final now exists and contains nonzero records. + */ + @Test + @Throws(Exception::class) + fun overwriteFinalTable() { + createFinalTable(incrementalAppendStream, "_tmp") + val records = + listOf( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_meta": {} + } + + """.trimIndent() + ) + ) + insertFinalTableRecords(false, streamId, "_tmp", records) + + val sql = generator.overwriteFinalTable(streamId, "_tmp") + destinationHandler.execute(sql) + + Assertions.assertEquals(1, dumpFinalTableRecords(streamId, "").size) + } + + @Test + @Throws(Exception::class) + fun cdcImmediateDeletion() { + createRawTable(streamId) + createFinalTable(cdcIncrementalDedupStream, "") + insertRawTableRecords( + streamId, + listOf( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "updated_at": "2023-01-01T00:00:00Z", + "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" + } + } + + """.trimIndent() + ) + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalDedupStream, + Optional.empty(), + "" + ) + + verifyRecordCounts(1, dumpRawTableRecords(streamId), 0, dumpFinalTableRecords(streamId, "")) + } + + /** + * Verify that running T+D twice is idempotent. Previously there was a bug where non-dedup syncs + * with an _ab_cdc_deleted_at column would duplicate "deleted" records on each run. + */ + @Test + @Throws(Exception::class) + fun cdcIdempotent() { + createRawTable(streamId) + createFinalTable(cdcIncrementalAppendStream, "") + insertRawTableRecords( + streamId, + listOf( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "4fa4efe2-3097-4464-bd22-11211cc3e15b", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "updated_at": "2023-01-01T00:00:00Z", + "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" + } + } + + """.trimIndent() + ) + ) + ) + + // Execute T+D twice + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalAppendStream, + Optional.empty(), + "" + ) + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalAppendStream, + Optional.empty(), + "" + ) + + verifyRecordCounts(1, dumpRawTableRecords(streamId), 1, dumpFinalTableRecords(streamId, "")) + } + + @Test + @Throws(Exception::class) + fun cdcComplexUpdate() { + createRawTable(streamId) + createFinalTable(cdcIncrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/cdcupdate_inputrecords_raw.jsonl" + ) + ) + insertFinalTableRecords( + true, + streamId, + "", + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/cdcupdate_inputrecords_final.jsonl" + ) + ) + + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalDedupStream, + Optional.empty(), + "" + ) + + verifyRecordCounts( + 11, + dumpRawTableRecords(streamId), + 6, + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * source operations: + * + * 1. insert id=1 (lsn 10000) + * 1. delete id=1 (lsn 10001) + * + * But the destination writes lsn 10001 before 10000. We should still end up with no records in + * the final table. + * + * All records have the same emitted_at timestamp. This means that we live or die purely based + * on our ability to use _ab_cdc_lsn. + */ + @Test + @Throws(Exception::class) + fun testCdcOrdering_updateAfterDelete() { + createRawTable(streamId) + createFinalTable(cdcIncrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/cdcordering_updateafterdelete_inputrecords.jsonl" + ) + ) + + val tableState = getInitialRawTableState(cdcIncrementalDedupStream) + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalDedupStream, + tableState.maxProcessedTimestamp, + "" + ) + + verifyRecordCounts(2, dumpRawTableRecords(streamId), 0, dumpFinalTableRecords(streamId, "")) + } + + /** + * source operations: + * + * 1. arbitrary history... + * 1. delete id=1 (lsn 10001) + * 1. reinsert id=1 (lsn 10002) + * + * But the destination receives LSNs 10002 before 10001. In this case, we should keep the + * reinserted record in the final table. + * + * All records have the same emitted_at timestamp. This means that we live or die purely based + * on our ability to use _ab_cdc_lsn. + */ + @Test + @Throws(Exception::class) + fun testCdcOrdering_insertAfterDelete() { + createRawTable(streamId) + createFinalTable(cdcIncrementalDedupStream, "") + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/cdcordering_insertafterdelete_inputrecords_raw.jsonl" + ) + ) + insertFinalTableRecords( + true, + streamId, + "", + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/cdcordering_insertafterdelete_inputrecords_final.jsonl" + ) + ) + + val tableState = getInitialRawTableState(cdcIncrementalAppendStream) + executeTypeAndDedupe( + generator, + destinationHandler, + cdcIncrementalDedupStream, + tableState.maxProcessedTimestamp, + "" + ) + verifyRecordCounts(2, dumpRawTableRecords(streamId), 1, dumpFinalTableRecords(streamId, "")) + } + + /** + * Create a table which includes the _ab_cdc_deleted_at column, then soft reset it using the + * non-cdc stream config. Verify that the deleted_at column gets dropped. + */ + @Test + @Throws(Exception::class) + fun softReset() { + createRawTable(streamId) + createFinalTable(cdcIncrementalAppendStream, "") + insertRawTableRecords( + streamId, + listOf( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "arst", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_loaded_at": "2023-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" + } + } + + """.trimIndent() + ) + ) + ) + insertFinalTableRecords( + true, + streamId, + "", + listOf( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "arst", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_meta": {}, + "id1": 1, + "id2": 100, + "_ab_cdc_deleted_at": "2023-01-01T00:01:00Z" + } + + """.trimIndent() + ) + ) + ) + + executeSoftReset(generator, destinationHandler, incrementalAppendStream) + + val actualRawRecords = dumpRawTableRecords(streamId) + val actualFinalRecords = dumpFinalTableRecords(streamId, "") + Assertions.assertAll( + Executable { Assertions.assertEquals(1, actualRawRecords.size) }, + Executable { Assertions.assertEquals(1, actualFinalRecords.size) }, + Executable { + Assertions.assertTrue( + actualFinalRecords.stream().noneMatch { record: JsonNode -> + record.has("_ab_cdc_deleted_at") + }, + "_ab_cdc_deleted_at column was expected to be dropped. Actual final table had: $actualFinalRecords" + ) + } + ) + } + + @Test + @Throws(Exception::class) + fun weirdColumnNames() { + createRawTable(streamId) + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/weirdcolumnnames_inputrecords_raw.jsonl" + ) + ) + val stream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + primaryKey, + Optional.of(cursor), + linkedMapOf( + generator.buildColumnId("id1") to AirbyteProtocolType.INTEGER, + generator.buildColumnId("id2") to AirbyteProtocolType.INTEGER, + generator.buildColumnId("updated_at") to + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE, + generator.buildColumnId("\$starts_with_dollar_sign") to + AirbyteProtocolType.STRING, + generator.buildColumnId("includes\"doublequote") to AirbyteProtocolType.STRING, + generator.buildColumnId("includes'singlequote") to AirbyteProtocolType.STRING, + generator.buildColumnId("includes`backtick") to AirbyteProtocolType.STRING, + generator.buildColumnId("includes.period") to AirbyteProtocolType.STRING, + generator.buildColumnId("includes$\$doubledollar") to + AirbyteProtocolType.STRING, + generator.buildColumnId("endswithbackslash\\") to AirbyteProtocolType.STRING + ) + ) + + val createTable = generator.createTable(stream, "", false) + destinationHandler.execute(createTable) + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + + verifyRecords( + "sqlgenerator/weirdcolumnnames_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * Verify that we don't crash when there are special characters in the stream namespace, name, + * primary key, or cursor. + */ + @ParameterizedTest + @ValueSource( + strings = ["$", "\${", "\${\${", "\${foo}", "\"", "'", "`", ".", "$$", "\\", "{", "}"] + ) + @Throws(Exception::class) + fun noCrashOnSpecialCharacters(specialChars: String) { + val str = specialChars + "_" + namespace + "_" + specialChars + val originalStreamId = generator.buildStreamId(str, str, "unused") + val modifiedStreamId = + buildStreamId( + originalStreamId.finalNamespace!!, + originalStreamId.finalName!!, + "raw_table" + ) + val columnId = generator.buildColumnId(str) + try { + createNamespace(modifiedStreamId.finalNamespace) + createRawTable(modifiedStreamId) + insertRawTableRecords( + modifiedStreamId, + java.util.List.of( + Jsons.jsonNode( + java.util.Map.of( + "_airbyte_raw_id", + "758989f2-b148-4dd3-8754-30d9c17d05fb", + "_airbyte_extracted_at", + "2023-01-01T00:00:00Z", + "_airbyte_data", + java.util.Map.of(str, "bar") + ) + ) + ) + ) + val stream = + StreamConfig( + modifiedStreamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + java.util.List.of(columnId), + Optional.of(columnId), + linkedMapOf(columnId to AirbyteProtocolType.STRING) + ) + + val createTable = generator.createTable(stream, "", false) + destinationHandler.execute(createTable) + // Not verifying anything about the data; let's just make sure we don't crash. + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + } finally { + teardownNamespace(modifiedStreamId.finalNamespace) + } + } + + /** + * Verify column names that are reserved keywords are handled successfully. Each destination + * should always have at least 1 column in the record data that is a reserved keyword. + */ + @Test + @Throws(Exception::class) + fun testReservedKeywords() { + createRawTable(streamId) + insertRawTableRecords( + streamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/reservedkeywords_inputrecords_raw.jsonl" + ) + ) + val stream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND, + emptyList(), + Optional.empty(), + linkedMapOf( + generator.buildColumnId("current_date") to AirbyteProtocolType.STRING, + generator.buildColumnId("join") to AirbyteProtocolType.STRING + ) + ) + + val createTable = generator.createTable(stream, "", false) + destinationHandler.execute(createTable) + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + + DIFFER!!.diffFinalTableRecords( + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/reservedkeywords_expectedrecords_final.jsonl" + ), + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * A stream with no columns is weird, but we shouldn't treat it specially in any way. It should + * create a final table as usual, and populate it with the relevant metadata columns. + */ + @Test + @Throws(Exception::class) + fun noColumns() { + createRawTable(streamId) + insertRawTableRecords( + streamId, + java.util.List.of( + Jsons.deserialize( + """ + { + "_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", + "_airbyte_extracted_at": "2023-01-01T00:00:00Z", + "_airbyte_data": {} + } + + """.trimIndent() + ) + ) + ) + val stream = + StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND, + emptyList(), + Optional.empty(), + LinkedHashMap() + ) + + val createTable = generator.createTable(stream, "", false) + destinationHandler.execute(createTable) + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + + verifyRecords( + "sqlgenerator/nocolumns_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/nocolumns_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + @Test + @Throws(Exception::class) + fun testV1V2migration() { + // This is maybe a little hacky, but it avoids having to refactor this entire class and + // subclasses + // for something that is going away + // Add case-sensitive columnName to test json path querying + incrementalDedupStream.columns!!.put( + generator.buildColumnId("IamACaseSensitiveColumnName"), + AirbyteProtocolType.STRING + ) + val v1RawTableStreamId = + StreamId(null, null, streamId.finalNamespace, "v1_" + streamId.rawName, null, null) + createV1RawTable(v1RawTableStreamId) + insertV1RawTableRecords( + v1RawTableStreamId, + BaseTypingDedupingTest.Companion.readRecords( + "sqlgenerator/all_types_v1_inputrecords.jsonl" + ) + ) + val migration = + generator.migrateFromV1toV2( + streamId, + v1RawTableStreamId.rawNamespace, + v1RawTableStreamId.rawName + ) + destinationHandler.execute(migration) + val v1RawRecords = dumpV1RawTableRecords(v1RawTableStreamId) + val v2RawRecords = dumpRawTableRecords(streamId) + migrationAssertions(v1RawRecords, v2RawRecords) + + // And then run T+D on the migrated raw data + val createTable = generator.createTable(incrementalDedupStream, "", false) + destinationHandler.execute(createTable) + val updateTable = generator.updateTable(incrementalDedupStream, "", Optional.empty(), true) + destinationHandler.execute(updateTable) + verifyRecords( + "sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl", + dumpRawTableRecords(streamId), + "sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl", + dumpFinalTableRecords(streamId, "") + ) + } + + /** + * Sometimes, a sync doesn't delete its soft reset temp table. (it's not entirely clear why this + * happens.) In these cases, the next sync should not crash. + */ + @Test + @Throws(Exception::class) + fun softResetIgnoresPreexistingTempTable() { + createRawTable(incrementalDedupStream.id) + + // Create a soft reset table. Use incremental append mode, in case the destination connector + // uses + // different + // indexing/partitioning/etc. + val createOldTempTable = + generator.createTable( + incrementalDedupStream, + TypeAndDedupeTransaction.SOFT_RESET_SUFFIX, + false + ) + destinationHandler.execute(createOldTempTable) + + // Execute a soft reset. This should not crash. + executeSoftReset(generator, destinationHandler, incrementalAppendStream) + } + + protected fun migrationAssertions(v1RawRecords: List, v2RawRecords: List) { + val v2RecordMap = + v2RawRecords + .stream() + .collect( + Collectors.toMap( + { record: JsonNode -> record["_airbyte_raw_id"].asText() }, + Function.identity() + ) + ) + Assertions.assertAll( + Executable { Assertions.assertEquals(6, v1RawRecords.size) }, + Executable { Assertions.assertEquals(6, v2RawRecords.size) } + ) + v1RawRecords.forEach( + Consumer { v1Record: JsonNode -> + val v1id = v1Record["_airbyte_ab_id"].asText() + Assertions.assertAll( + Executable { + Assertions.assertEquals( + v1id, + v2RecordMap[v1id]!!["_airbyte_raw_id"].asText() + ) + }, + Executable { + Assertions.assertEquals( + v1Record["_airbyte_emitted_at"].asText(), + v2RecordMap[v1id]!!["_airbyte_extracted_at"].asText() + ) + }, + Executable { Assertions.assertNull(v2RecordMap[v1id]!!["_airbyte_loaded_at"]) } + ) + var originalData = v1Record["_airbyte_data"] + if (originalData.isTextual) { + originalData = Jsons.deserializeExact(originalData.asText()) + } + var migratedData = v2RecordMap[v1id]!!["_airbyte_data"] + if (migratedData.isTextual) { + migratedData = Jsons.deserializeExact(migratedData.asText()) + } + // hacky thing because we only care about the data contents. + // diffRawTableRecords makes some assumptions about the structure of the blob. + DIFFER!!.diffFinalTableRecords( + java.util.List.of(originalData), + java.util.List.of(migratedData) + ) + } + ) + } + + @Throws(Exception::class) + protected fun dumpV1RawTableRecords(streamId: StreamId): List { + return dumpRawTableRecords(streamId) + } + + @Test + @Throws(Exception::class) + fun testCreateTableForce() { + val createTableNoForce = generator.createTable(incrementalDedupStream, "", false) + val createTableForce = generator.createTable(incrementalDedupStream, "", true) + + destinationHandler.execute(createTableNoForce) + Assertions.assertThrows(Exception::class.java) { + destinationHandler.execute(createTableNoForce) + } + // This should not throw an exception + destinationHandler.execute(createTableForce) + // This method call ensures assertion than finalTable exists + getDestinationInitialState(incrementalDedupStream) + } + + @Test + @Throws(Exception::class) + fun testStateHandling() { + // Fetch state from an empty destination. This should not throw an error. + val initialState = + destinationHandler + .gatherInitialState(java.util.List.of((incrementalDedupStream))) + .first() + // The initial state should not need a soft reset. + Assertions.assertFalse( + initialState.destinationState.needsSoftReset(), + "Empty state table should have needsSoftReset = false" + ) + + // Commit a state that now requires a soft reset. + destinationHandler.commitDestinationStates( + java.util.Map.of( + incrementalDedupStream.id, + initialState.destinationState.withSoftReset(true) + ) + ) + val updatedState = + destinationHandler + .gatherInitialState(java.util.List.of((incrementalDedupStream))) + .first() + // When we re-fetch the state, it should now need a soft reset. + Assertions.assertTrue( + updatedState.destinationState.needsSoftReset(), + "After committing an explicit state, expected needsSoftReset = true" + ) + + // Commit a state belonging to a different stream + destinationHandler.commitDestinationStates( + java.util.Map.of( + StreamId(null, null, null, null, null, "some_other_stream"), + initialState.destinationState.withSoftReset(true) + ) + ) + + // Verify that we can still retrieve the state for the original stream + val refetchedState = + destinationHandler + .gatherInitialState(java.util.List.of((incrementalDedupStream))) + .first() + // When we re-fetch the state, it should now need a soft reset. + Assertions.assertTrue( + refetchedState.destinationState.needsSoftReset(), + "After committing an unrelated state, expected needsSoftReset = true" + ) + } + + @Test + fun testLongIdentifierHandling() { + val randomSuffix = Strings.addRandomSuffix("", "_", 5) + val rawNamespace = "a".repeat(512) + randomSuffix + val finalNamespace = "b".repeat(512) + randomSuffix + val streamName = "c".repeat(512) + randomSuffix + // Limiting to total 127 column length for redshift. Postgres is 63. + // Move it down if BigQuery / Snowflake complains. + val baseColumnName = "d".repeat(120) + randomSuffix + val columnName1 = baseColumnName + "1" + val columnName2 = baseColumnName + "2" + + val catalogParser = CatalogParser(generator, rawNamespace) + val stream = + catalogParser + .parseCatalog( + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName(streamName) + .withNamespace(finalNamespace) + .withJsonSchema( + Jsons.jsonNode( + mapOf( + "type" to "object", + "properties" to + mapOf( + columnName1 to + mapOf("type" to "string"), + columnName2 to + mapOf("type" to "string") + ) + ) + ) + ) + ) + .withSyncMode(SyncMode.INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + ) + ) + ) + .streams[0] + + val streamId = stream.id + val columnId1: ColumnId = + stream.columns?.filter { columnName1 == it.key.originalName }?.keys?.first()!! + val columnId2: ColumnId = + stream.columns?.filter { columnName2 == it.key.originalName }?.keys?.first()!! + LOGGER.info("Trying to use column names {} and {}", columnId1.name, columnId2.name) + + try { + createNamespace(rawNamespace) + createNamespace(finalNamespace) + createRawTable(streamId) + insertRawTableRecords( + streamId, + listOf( + Jsons.jsonNode( + mapOf( + "_airbyte_raw_id" to "ad3e8c84-e02e-4df4-b146-3d5a007b21b4", + "_airbyte_extracted_at" to "2023-01-01T00:00:00Z", + "_airbyte_data" to mapOf(columnName1 to "foo", columnName2 to "bar") + ) + ) + ) + ) + + val createTable = generator.createTable(stream, "", false) + destinationHandler.execute(createTable) + executeTypeAndDedupe(generator, destinationHandler, stream, Optional.empty(), "") + + val rawRecords = dumpRawTableRecords(streamId) + val finalRecords = dumpFinalTableRecords(streamId, "") + LOGGER.info("Dumped raw records: {}", rawRecords) + LOGGER.info("Dumped final records: {}", finalRecords) + assertAll( + { Assertions.assertEquals(1, rawRecords.size) }, + { Assertions.assertEquals(1, finalRecords.size) }, + // Assume that if we can find the values in the final table, that everything looks + // right :shrug: + { Assertions.assertEquals("foo", finalRecords[0].get(columnId1.name).asText()) }, + { Assertions.assertEquals("bar", finalRecords[0].get(columnId2.name).asText()) } + ) + } finally { + // do this manually b/c we're using a weird namespace that won't get handled by the + // @AfterEach method + teardownNamespace(rawNamespace) + teardownNamespace(finalNamespace) + } + } + + @Throws(Exception::class) + protected fun createFinalTable(stream: StreamConfig, suffix: String) { + val createTable = generator.createTable(stream, suffix, false) + destinationHandler.execute(createTable) + } + + private fun verifyRecords( + expectedRawRecordsFile: String, + actualRawRecords: List, + expectedFinalRecordsFile: String, + actualFinalRecords: List + ) { + Assertions.assertAll( + Executable { + DIFFER!!.diffRawTableRecords( + BaseTypingDedupingTest.Companion.readRecords(expectedRawRecordsFile), + actualRawRecords + ) + }, + Executable { + Assertions.assertEquals( + 0, + actualRawRecords + .stream() + .filter { record: JsonNode -> !record.hasNonNull("_airbyte_loaded_at") } + .count() + ) + }, + Executable { + DIFFER!!.diffFinalTableRecords( + BaseTypingDedupingTest.Companion.readRecords(expectedFinalRecordsFile), + actualFinalRecords + ) + } + ) + } + + private fun verifyRecordCounts( + expectedRawRecords: Int, + actualRawRecords: List, + expectedFinalRecords: Int, + actualFinalRecords: List + ) { + Assertions.assertAll( + Executable { + Assertions.assertEquals( + expectedRawRecords, + actualRawRecords.size, + "Raw record count was incorrect" + ) + }, + Executable { + Assertions.assertEquals( + 0, + actualRawRecords + .stream() + .filter { record: JsonNode -> !record.hasNonNull("_airbyte_loaded_at") } + .count() + ) + }, + Executable { + Assertions.assertEquals( + expectedFinalRecords, + actualFinalRecords.size, + "Final record count was incorrect" + ) + } + ) + } + + companion object { + private val LOGGER: Logger = + LoggerFactory.getLogger(BaseSqlGeneratorIntegrationTest::class.java) + + /** + * This, along with [.FINAL_TABLE_COLUMN_NAMES_CDC], is the list of columns that should be + * in the final table. They're useful for generating SQL queries to insert records into the + * final table. + */ + @JvmField + val FINAL_TABLE_COLUMN_NAMES: List = + listOf( + "_airbyte_raw_id", + "_airbyte_extracted_at", + "_airbyte_meta", + "id1", + "id2", + "updated_at", + "struct", + "array", + "string", + "number", + "integer", + "boolean", + "timestamp_with_timezone", + "timestamp_without_timezone", + "time_with_timezone", + "time_without_timezone", + "date", + "unknown" + ) + @JvmField + val FINAL_TABLE_COLUMN_NAMES_CDC: List = + Streams.concat(FINAL_TABLE_COLUMN_NAMES.stream(), Stream.of("_ab_cdc_deleted_at")) + .toList() + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.kt new file mode 100644 index 0000000000000..c07461aa95676 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/BaseTypingDedupingTest.kt @@ -0,0 +1,1137 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.ImmutableMap +import io.airbyte.commons.features.EnvVariableFeatureFlags +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.lang.Exceptions +import io.airbyte.commons.resources.MoreResources +import io.airbyte.configoss.WorkerDestinationConfig +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.* +import io.airbyte.workers.internal.AirbyteDestination +import io.airbyte.workers.internal.DefaultAirbyteDestination +import io.airbyte.workers.process.AirbyteIntegrationLauncher +import io.airbyte.workers.process.DockerProcessFactory +import io.airbyte.workers.process.ProcessFactory +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import java.util.concurrent.Callable +import java.util.concurrent.CompletableFuture +import java.util.concurrent.Executors +import java.util.function.Consumer +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.Stream +import org.apache.commons.lang3.RandomStringUtils +import org.junit.jupiter.api.* +import org.junit.jupiter.api.function.Executable +import org.junit.jupiter.api.parallel.Execution +import org.junit.jupiter.api.parallel.ExecutionMode +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This is loosely based on standard-destination-tests's DestinationAcceptanceTest class. The + * sync-running code is copy-pasted from there. + * + * All tests use a single stream, whose schema is defined in `resources/schema.json`. Each test case + * constructs a ConfiguredAirbyteCatalog dynamically. + * + * For sync modes which use a primary key, the stream provides a composite key of (id1, id2). For + * sync modes which use a cursor, the stream provides an updated_at field. The stream also has an + * _ab_cdc_deleted_at field. + */ +// If you're running from inside intellij, you must run your specific subclass to get concurrent +// execution. +@Execution(ExecutionMode.CONCURRENT) +abstract class BaseTypingDedupingTest { + protected var DIFFER: RecordDiffer? = null + + private var randomSuffix: String? = null + protected var config: JsonNode? = null + private set + protected var streamNamespace: String? = null + protected var streamName: String = "dummy" + private var streamsToTearDown: MutableList? = null + + protected abstract val imageName: String + /** @return the docker image to run, e.g. `"airbyte/destination-bigquery:dev"`. */ + get + + /** + * Get the destination connector config. Subclasses may use this method for other setup work, + * e.g. opening a connection to the destination. + * + * Subclasses should _not_ start testcontainers in this method; that belongs in a BeforeAll + * method. The tests in this class are intended to be run concurrently on a shared database and + * will not interfere with each other. + * + * Sublcasses which need access to the config may use [.getConfig]. + */ + @Throws(Exception::class) protected abstract fun generateConfig(): JsonNode? + + /** + * For a given stream, return the records that exist in the destination's raw table. Each record + * must be in the format {"_airbyte_raw_id": "...", "_airbyte_extracted_at": "...", + * "_airbyte_loaded_at": "...", "_airbyte_data": {fields...}}. + * + * The `_airbyte_data` column must be an [com.fasterxml.jackson.databind.node.ObjectNode] (i.e. + * it cannot be a string value). + * + * streamNamespace may be null, in which case you should query from the default namespace. + */ + @Throws(Exception::class) + protected abstract fun dumpRawTableRecords( + streamNamespace: String?, + streamName: String + ): List + + /** + * Utility method for tests to check if table exists + * + * @param streamNamespace + * @param streamName + * @return + * @throws Exception + */ + protected fun checkTableExists(streamNamespace: String?, streamName: String?): Boolean { + // Implementation is specific to destination's tests. + return true + } + + /** + * For a given stream, return the records that exist in the destination's final table. Each + * record must be in the format {"_airbyte_raw_id": "...", "_airbyte_extracted_at": "...", + * "_airbyte_meta": {...}, "field1": ..., "field2": ..., ...}. If the destination renames (e.g. + * upcases) the airbyte fields, this method must revert that naming to use the exact strings + * "_airbyte_raw_id", etc. + * + * For JSON-valued columns, there is some nuance: a SQL null should be represented as a missing + * entry, whereas a JSON null should be represented as a + * [com.fasterxml.jackson.databind.node.NullNode]. For example, in the JSON blob {"name": null}, + * the `name` field is a JSON null, and the `address` field is a SQL null. + * + * The corresponding SQL looks like `INSERT INTO ... (name, address) VALUES ('null' :: jsonb, + * NULL)`. + * + * streamNamespace may be null, in which case you should query from the default namespace. + */ + @Throws(Exception::class) + abstract fun dumpFinalTableRecords(streamNamespace: String?, streamName: String): List + + /** + * Delete any resources in the destination associated with this stream AND its namespace. We + * need this because we write raw tables to a shared `airbyte` namespace, which we can't drop + * wholesale. Must handle the case where the table/namespace doesn't exist (e.g. if the + * connector crashed without writing any data). + * + * In general, this should resemble `DROP TABLE IF EXISTS + * airbyte._; DROP SCHEMA IF EXISTS `. + */ + @Throws(Exception::class) + protected abstract fun teardownStreamAndNamespace(streamNamespace: String?, streamName: String) + + protected abstract val sqlGenerator: SqlGenerator + get + + /** + * Destinations which need to clean up resources after an entire test finishes should override + * this method. For example, if you want to gracefully close a database connection, you should + * do that here. + */ + @Throws(Exception::class) protected open fun globalTeardown() {} + + val rawMetadataColumnNames: Map + /** Conceptually identical to [.getFinalMetadataColumnNames], but for the raw table. */ + get() = HashMap() + + val finalMetadataColumnNames: Map + /** + * If the destination connector uses a nonstandard schema for the final table, override this + * method. For example, destination-snowflake upcases all column names in the final tables. + * + * You only need to add mappings for the airbyte metadata column names (_airbyte_raw_id, + * _airbyte_extracted_at, etc.). The test framework automatically populates mappings for the + * primary key and cursor using the SqlGenerator. + */ + get() = HashMap() + + @get:Synchronized + protected val uniqueSuffix: String + /** + * @return A suffix which is different for each concurrent test, but stable within a single + * test. + */ + get() { + if (randomSuffix == null) { + randomSuffix = + "_" + RandomStringUtils.randomAlphabetic(10).lowercase(Locale.getDefault()) + } + return randomSuffix!! + } + + /** + * Override this method only when skipping T&D and only compare raw tables and skip final table + * comparison. For every other case it should always return false. + * + * @return + */ + protected open fun disableFinalTableComparison(): Boolean { + return false + } + + @BeforeEach + @Throws(Exception::class) + fun setup() { + config = generateConfig() + streamNamespace = "typing_deduping_test" + uniqueSuffix + streamName = "test_stream" + uniqueSuffix + streamsToTearDown = ArrayList() + + val generator = sqlGenerator + DIFFER = + RecordDiffer( + rawMetadataColumnNames, + finalMetadataColumnNames, + generator.buildColumnId("id1") to AirbyteProtocolType.INTEGER, + generator.buildColumnId("id2") to AirbyteProtocolType.INTEGER, + generator.buildColumnId("updated_at") to + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE, + generator.buildColumnId("old_cursor") to AirbyteProtocolType.INTEGER + ) + + LOGGER.info("Using stream namespace {} and name {}", streamNamespace, streamName) + } + + @AfterEach + @Throws(Exception::class) + fun teardown() { + for (streamId in streamsToTearDown!!) { + teardownStreamAndNamespace(streamId.namespace, streamId.name) + } + globalTeardown() + } + + /** + * Starting with an empty destination, execute a full refresh overwrite sync. Verify that the + * records are written to the destination table. Then run a second sync, and verify that the + * records are overwritten. + */ + @Test + @Throws(Exception::class) + fun fullRefreshOverwrite() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + + runSync(catalog, messages2) + + val expectedRawRecords2 = + readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl") + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + /** + * Starting with an empty destination, execute a full refresh append sync. Verify that the + * records are written to the destination table. Then run a second sync, and verify that the old + * and new records are all present. + */ + @Test + @Throws(Exception::class) + fun fullRefreshAppend() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + + runSync(catalog, messages2) + + val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl") + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + /** + * Starting with an empty destination, execute an incremental append sync. + * + * This is (not so secretly) identical to [.fullRefreshAppend], and uses the same set of + * expected records. Incremental as a concept only exists in the source. From the destination's + * perspective, we only care about the destination sync mode. + */ + @Test + @Throws(Exception::class) + fun incrementalAppend() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() // These two lines are literally the only + // difference between this test and + // fullRefreshAppend + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + + runSync(catalog, messages2) + + val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl") + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + /** + * Starting with an empty destination, execute an incremental dedup sync. Verify that the + * records are written to the destination table. Then run a second sync, and verify that the + * raw/final tables contain the correct records. + */ + @Test + @Throws(Exception::class) + fun incrementalDedup() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + + runSync(catalog, messages2) + + val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl") + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + /** + * Run the first sync from [.incrementalDedup], but repeat the messages many times. Some + * destinations behave differently with small vs large record count, so this test case tries to + * exercise that behavior. + */ + @Test + @Throws(Exception::class) + fun largeDedupSync() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // Run a sync with 25K copies of the input messages + val messages1 = repeatList(25000, readMessages("dat/sync1_messages.jsonl")) + + runSync(catalog, messages1) + + // The raw table will contain 25K copies of each record + val expectedRawRecords1 = + repeatList(25000, readRecords("dat/sync1_expectedrecords_raw.jsonl")) + // But the final table should be fully deduped + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + } + + /** Identical to [.incrementalDedup], except that the stream has no namespace. */ + @Test + @Throws(Exception::class) + fun incrementalDedupDefaultNamespace() { + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() // NB: we don't call `withNamespace` here + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl", null, streamName) + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_dedup_final.jsonl") + verifySyncResult( + expectedRawRecords1, + expectedFinalRecords1, + null, + streamName, + disableFinalTableComparison() + ) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl", null, streamName) + + runSync(catalog, messages2) + + val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl") + verifySyncResult( + expectedRawRecords2, + expectedFinalRecords2, + null, + streamName, + disableFinalTableComparison() + ) + } + + @Test + @Disabled("Not yet implemented") + @Throws(Exception::class) + fun testLineBreakCharacters() { + // TODO verify that we can handle strings with interesting characters + // build an airbyterecordmessage using something like this, and add it to the input + // messages: + Jsons.jsonNode( + ImmutableMap.builder() + .put("id", 1) + .put("currency", "USD\u2028") + .put( + "date", + "2020-03-\n31T00:00:00Z\r" + ) // TODO(sherifnada) hack: write decimals with sigfigs because Snowflake stores + // 10.1 as "10" which + // fails destination tests + .put("HKD", 10.1) + .put("NZD", 700.1) + .build() + ) + } + + /** + * Run a sync, then remove the `name` column from the schema and run a second sync. Verify that + * the final table doesn't contain the `name` column after the second sync. + */ + @Test + @Throws(Exception::class) + fun testIncrementalSyncDropOneColumn() { + val stream = + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA) + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(stream) + ) + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = readRecords("dat/sync1_expectedrecords_raw.jsonl") + val expectedFinalRecords1 = readRecords("dat/sync1_expectedrecords_nondedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + val trimmedSchema = SCHEMA.deepCopy() + (trimmedSchema["properties"] as ObjectNode).remove("name") + stream.jsonSchema = trimmedSchema + + runSync(catalog, messages2) + + // The raw data is unaffected by the schema, but the final table should not have a `name` + // column. + val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl") + .stream() + .peek { record: JsonNode -> + (record as ObjectNode).remove(sqlGenerator.buildColumnId("name").name) + } + .toList() + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + @Test + @Disabled("Not yet implemented") + @Throws(Exception::class) + fun testSyncUsesAirbyteStreamNamespaceIfNotNull() { + // TODO duplicate this test for each sync mode. Run 1st+2nd syncs using a stream with null + // namespace: + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(null) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + } + + // TODO duplicate this test for each sync mode. Run 1st+2nd syncs using two streams with the + // same + // name but different namespace + // TODO maybe we don't even need the single-stream versions... + /** + * Identical to [.incrementalDedup], except there are two streams with the same name and + * different namespace. + */ + @Test + @Throws(Exception::class) + fun incrementalDedupIdenticalName() { + val namespace1 = streamNamespace + "_1" + val namespace2 = streamNamespace + "_2" + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(namespace1) + .withName(streamName) + .withJsonSchema(SCHEMA) + ), + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(namespace2) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + // First sync + val messages1 = + Stream.concat( + readMessages("dat/sync1_messages.jsonl", namespace1, streamName).stream(), + readMessages("dat/sync1_messages2.jsonl", namespace2, streamName).stream() + ) + .toList() + + runSync(catalog, messages1) + + verifySyncResult( + readRecords("dat/sync1_expectedrecords_raw.jsonl"), + readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"), + namespace1, + streamName, + disableFinalTableComparison() + ) + verifySyncResult( + readRecords("dat/sync1_expectedrecords_raw2.jsonl"), + readRecords("dat/sync1_expectedrecords_dedup_final2.jsonl"), + namespace2, + streamName, + disableFinalTableComparison() + ) + + // Second sync + val messages2 = + Stream.concat( + readMessages("dat/sync2_messages.jsonl", namespace1, streamName).stream(), + readMessages("dat/sync2_messages2.jsonl", namespace2, streamName).stream() + ) + .toList() + + runSync(catalog, messages2) + + verifySyncResult( + readRecords("dat/sync2_expectedrecords_raw.jsonl"), + readRecords("dat/sync2_expectedrecords_incremental_dedup_final.jsonl"), + namespace1, + streamName, + disableFinalTableComparison() + ) + verifySyncResult( + readRecords("dat/sync2_expectedrecords_raw2.jsonl"), + readRecords("dat/sync2_expectedrecords_incremental_dedup_final2.jsonl"), + namespace2, + streamName, + disableFinalTableComparison() + ) + } + + /** + * Run two syncs at the same time. They each have one stream, which has the same name for both + * syncs but different namespace. This should work fine. This test is similar to + * [.incrementalDedupIdenticalName], but uses two separate syncs instead of one sync with two + * streams. + * + * Note that destination stdout is a bit misleading: The two syncs' stdout _should_ be + * interleaved, but we're just dumping the entire sync1 stdout, and then the entire sync2 + * stdout. + */ + @Test + @Throws(Exception::class) + open fun identicalNameSimultaneousSync() { + val namespace1 = streamNamespace + "_1" + val catalog1 = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(namespace1) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + val namespace2 = streamNamespace + "_2" + val catalog2 = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams( + java.util.List.of( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(namespace2) + .withName(streamName) + .withJsonSchema(SCHEMA) + ) + ) + ) + + val messages1 = readMessages("dat/sync1_messages.jsonl", namespace1, streamName) + val messages2 = readMessages("dat/sync1_messages2.jsonl", namespace2, streamName) + + // Start two concurrent syncs + val sync1 = startSync(catalog1) + val sync2 = startSync(catalog2) + val outFuture1 = destinationOutputFuture(sync1) + val outFuture2 = destinationOutputFuture(sync2) + + // Write some messages to both syncs. Write a lot of data to sync 2 to try and force a + // flush. + pushMessages(messages1, sync1) + for (i in 0..100000 - 1) { + pushMessages(messages2, sync2) + } + endSync(sync1, outFuture1) + // Write some more messages to the second sync. It should not be affected by the first + // sync's + // shutdown. + for (i in 0..100000 - 1) { + pushMessages(messages2, sync2) + } + endSync(sync2, outFuture2) + + // For simplicity, don't verify the raw table. Assume that if the final table is correct, + // then + // the raw data is correct. This is generally a safe assumption. + Assertions.assertAll( + Executable { + DIFFER!!.diffFinalTableRecords( + readRecords("dat/sync1_expectedrecords_dedup_final.jsonl"), + dumpFinalTableRecords(namespace1, streamName) + ) + }, + Executable { + DIFFER!!.diffFinalTableRecords( + readRecords("dat/sync1_expectedrecords_dedup_final2.jsonl"), + dumpFinalTableRecords(namespace2, streamName) + ) + } + ) + } + + @Test + @Disabled("Not yet implemented") + @Throws(Exception::class) + fun testSyncNotFailsWithNewFields() { + // TODO duplicate this test for each sync mode. Run a sync, then add a new field to the + // schema, then + // run another sync + // We might want to write a test that verifies more general schema evolution (e.g. all valid + // evolutions) + } + + /** + * Change the cursor column in the second sync to a column that doesn't exist in the first sync. + * Verify that we overwrite everything correctly. + * + * This essentially verifies that the destination connector correctly recognizes NULL cursors as + * older than non-NULL cursors. + */ + @Test + @Throws(Exception::class) + fun incrementalDedupChangeCursor() { + val mangledSchema = SCHEMA.deepCopy() + (mangledSchema["properties"] as ObjectNode).remove("updated_at") + (mangledSchema["properties"] as ObjectNode).set( + "old_cursor", + Jsons.deserialize( + """ + {"type": "integer"} + + """.trimIndent() + ) + ) + val configuredStream = + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(listOf("old_cursor")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(java.util.List.of(listOf("id1"), listOf("id2"))) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(mangledSchema) + ) + val catalog = + io.airbyte.protocol.models.v0 + .ConfiguredAirbyteCatalog() + .withStreams(java.util.List.of(configuredStream)) + + // First sync + val messages1 = readMessages("dat/sync1_cursorchange_messages.jsonl") + + runSync(catalog, messages1) + + val expectedRawRecords1 = + readRecords("dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl") + val expectedFinalRecords1 = + readRecords("dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl") + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()) + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + configuredStream.stream.jsonSchema = SCHEMA + configuredStream.cursorField = listOf("updated_at") + + runSync(catalog, messages2) + + val expectedRawRecords2 = + readRecords("dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl") + val expectedFinalRecords2 = + readRecords("dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl") + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + + @Test + @Disabled("Not yet implemented") + @Throws(Exception::class) + fun testSyncWithLargeRecordBatch() { + // TODO duplicate this test for each sync mode. Run a single sync with many records + /* + * copied from DATs: This serves to test MSSQL 2100 limit parameters in a single query. this means + * that for Airbyte insert data need to limit to ~ 700 records (3 columns for the raw tables) = 2100 + * params + * + * this maybe needs configuration per destination to specify that limit? + */ + } + + @Test + @Disabled("Not yet implemented") + @Throws(Exception::class) + fun testDataTypes() { + // TODO duplicate this test for each sync mode. See DataTypeTestArgumentProvider for what + // this test + // does in DAT-land + // we probably don't want to do the exact same thing, but the general spirit of testing a + // wide range + // of values for every data type is approximately correct + // this test probably needs some configuration per destination to specify what values are + // supported? + } + + private fun repeatList(n: Int, list: List): List { + return Collections.nCopies(n, list) + .stream() + .flatMap { obj: List -> obj.stream() } + .collect(Collectors.toList()) + } + + @Throws(Exception::class) + protected fun verifySyncResult( + expectedRawRecords: List, + expectedFinalRecords: List, + disableFinalTableComparison: Boolean + ) { + verifySyncResult( + expectedRawRecords, + expectedFinalRecords, + streamNamespace, + streamName, + disableFinalTableComparison + ) + } + + @Throws(Exception::class) + private fun verifySyncResult( + expectedRawRecords: List, + expectedFinalRecords: List, + streamNamespace: String?, + streamName: String, + disableFinalTableComparison: Boolean + ) { + val actualRawRecords = dumpRawTableRecords(streamNamespace, streamName) + if (disableFinalTableComparison) { + DIFFER!!.diffRawTableRecords(expectedRawRecords, actualRawRecords) + } else { + val actualFinalRecords = dumpFinalTableRecords(streamNamespace, streamName) + DIFFER!!.verifySyncResult( + expectedRawRecords, + actualRawRecords, + expectedFinalRecords, + actualFinalRecords + ) + } + } + + /* + * !!!!!! WARNING !!!!!! The code below was mostly copypasted from DestinationAcceptanceTest. If you + * make edits here, you probably want to also edit there. + */ + @JvmOverloads + @Throws(Exception::class) + protected fun runSync( + catalog: io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog, + messages: List, + imageName: String = this.imageName, + configTransformer: Function = Function.identity() + ) { + val destination = startSync(catalog, imageName, configTransformer) + val outputFuture = destinationOutputFuture(destination) + pushMessages(messages, destination) + endSync(destination, outputFuture) + } + + // In the background, read messages from the destination until it terminates. We need to clear + // stdout in real time, to prevent the buffer from filling up and blocking the destination. + private fun destinationOutputFuture( + destination: AirbyteDestination + ): CompletableFuture> { + val outputFuture = CompletableFuture>() + Executors.newSingleThreadExecutor() + .submit( + Callable { + val destinationMessages: + MutableList = + ArrayList() + while (!destination.isFinished()) { + // attemptRead isn't threadsafe, we read stdout fully here. + // i.e. we shouldn't call attemptRead anywhere else. + destination.attemptRead().ifPresent { + e: io.airbyte.protocol.models.AirbyteMessage -> + destinationMessages.add(e) + } + } + outputFuture.complete(destinationMessages) + null + } + ) + return outputFuture + } + + /** + * + * @param catalog + * @param imageName + * @param configTransformer + * - test specific config overrides or additions can be performed with this function + * @return + * @throws Exception + */ + @Throws(Exception::class) + protected fun startSync( + catalog: io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog, + imageName: String = this.imageName, + configTransformer: Function = Function.identity() + ): AirbyteDestination { + synchronized(this) { + catalog.streams.forEach( + Consumer { s: ConfiguredAirbyteStream -> + streamsToTearDown!!.add( + AirbyteStreamNameNamespacePair.fromAirbyteStream(s.stream) + ) + } + ) + } + + val testDir = Path.of("/tmp/airbyte_tests/") + Files.createDirectories(testDir) + val workspaceRoot = Files.createTempDirectory(testDir, "test") + val jobRoot = Files.createDirectories(Path.of(workspaceRoot.toString(), "job")) + val localRoot = Files.createTempDirectory(testDir, "output") + val processFactory: ProcessFactory = + DockerProcessFactory( + workspaceRoot, + workspaceRoot.toString(), + localRoot.toString(), + "host", + emptyMap() + ) + val transformedConfig = configTransformer.apply(config) + val destinationConfig = + WorkerDestinationConfig() + .withConnectionId(UUID.randomUUID()) + .withCatalog(convertProtocolObject(catalog, ConfiguredAirbyteCatalog::class.java)) + .withDestinationConnectionConfiguration(transformedConfig) + + val destination: AirbyteDestination = + DefaultAirbyteDestination( + AirbyteIntegrationLauncher( + "0", + 0, + imageName, + processFactory, + null, + null, + false, + EnvVariableFeatureFlags() + ) + ) + + destination.start(destinationConfig, jobRoot, emptyMap()) + + return destination + } + + @Throws(Exception::class) + protected fun endSync( + destination: AirbyteDestination, + destinationOutputFuture: CompletableFuture> + ) { + destination.notifyEndOfInput() + // TODO Eventually we'll want to somehow extract the state messages while a sync is running, + // to + // verify checkpointing. + destinationOutputFuture.join() + destination.close() + } + + protected fun readMessages(filename: String): List { + return Companion.readMessages(filename, streamNamespace, streamName) + } + + protected fun readRecords(filename: String): List { + return Companion.readRecords(filename) + } + + protected val schema: JsonNode = SCHEMA + + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(BaseTypingDedupingTest::class.java) + protected val SCHEMA: JsonNode + + init { + try { + SCHEMA = Jsons.deserialize(MoreResources.readResource("dat/schema.json")) + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + @Throws(IOException::class) + fun readRecords(filename: String): List { + return MoreResources.readResource(filename) + .lines() + .map { obj: String -> obj.trim { it <= ' ' } } + .filter { line: String -> !line.isEmpty() } + .filter { line: String -> !line.startsWith("//") } + .map { jsonString: String? -> Jsons.deserializeExact(jsonString) } + .toList() + } + + @Throws(IOException::class) + protected fun readMessages( + filename: String, + streamNamespace: String?, + streamName: String? + ): List { + return readRecords(filename) + .stream() + .map { record: JsonNode? -> Jsons.convertValue(record, AirbyteMessage::class.java) } + .peek { message: AirbyteMessage -> + message.record.namespace = streamNamespace + message.record.stream = streamName + } + .toList() + } + + protected fun pushMessages( + messages: List, + destination: AirbyteDestination + ) { + messages.forEach( + Consumer { message: AirbyteMessage -> + Exceptions.toRuntime { + destination.accept( + convertProtocolObject( + message, + io.airbyte.protocol.models.AirbyteMessage::class.java + ) + ) + } + } + ) + } + + private fun convertProtocolObject(v1: V1, klass: Class): V0 { + return Jsons.`object`(Jsons.jsonNode(v1), klass) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.kt new file mode 100644 index 0000000000000..9094e5ccc884c --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/kotlin/io/airbyte/integrations/base/destination/typing_deduping/RecordDiffer.kt @@ -0,0 +1,530 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ArrayNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.google.common.collect.Streams +import io.airbyte.commons.json.Jsons +import java.math.BigDecimal +import java.time.* +import java.util.* +import java.util.function.Function +import java.util.stream.Collectors +import java.util.stream.IntStream +import java.util.stream.Stream +import kotlin.Array +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.function.Executable + +/** + * Utility class to generate human-readable diffs between expected and actual records. Assumes 1s1t + * output format. + */ +class RecordDiffer +@SafeVarargs +constructor( + private val rawRecordColumnNames: Map, + private val finalRecordColumnNames: Map, + vararg identifyingColumns: Pair +) { + private val rawRecordIdentityComparator: Comparator + private val rawRecordSortComparator: Comparator + private val rawRecordIdentityExtractor: Function + + private val finalRecordIdentityComparator: Comparator + private val finalRecordSortComparator: Comparator + private val finalRecordIdentityExtractor: Function + + /** + * @param rawRecordColumnNames + * @param finalRecordColumnNames + * @param identifyingColumns Which fields constitute a unique record (typically PK+cursor). Do + * _not_ include extracted_at; it is handled automatically. + */ + init { + val rawTableIdentifyingColumns: Array> = + identifyingColumns.map { it.first.originalName to it.second }.toTypedArray() + + this.rawRecordIdentityComparator = + buildIdentityComparator(rawTableIdentifyingColumns, rawRecordColumnNames) + this.rawRecordSortComparator = + rawRecordIdentityComparator.thenComparing { record: JsonNode -> + asString(record[getMetadataColumnName(rawRecordColumnNames, "_airbyte_raw_id")]) + } + this.rawRecordIdentityExtractor = + buildIdentityExtractor(rawTableIdentifyingColumns, rawRecordColumnNames) + + val finalTableIdentifyingColumns: Array> = + identifyingColumns.map { it.first.name to it.second }.toTypedArray() + this.finalRecordIdentityComparator = + buildIdentityComparator(finalTableIdentifyingColumns, finalRecordColumnNames) + this.finalRecordSortComparator = + finalRecordIdentityComparator.thenComparing { record: JsonNode -> + asString(record[getMetadataColumnName(finalRecordColumnNames, "_airbyte_raw_id")]) + } + this.finalRecordIdentityExtractor = + buildIdentityExtractor(finalTableIdentifyingColumns, finalRecordColumnNames) + } + + /** + * In the expected records, a SQL null is represented as a JsonNode without that field at all, + * and a JSON null is represented as a NullNode. For example, in the JSON blob {"name": null}, + * the `name` field is a JSON null, and the `address` field is a SQL null. + */ + fun verifySyncResult( + expectedRawRecords: List, + actualRawRecords: List, + expectedFinalRecords: List, + actualFinalRecords: List + ) { + Assertions.assertAll( + Executable { diffRawTableRecords(expectedRawRecords, actualRawRecords) }, + Executable { diffFinalTableRecords(expectedFinalRecords, actualFinalRecords) } + ) + } + + fun diffRawTableRecords(expectedRecords: List, actualRecords: List) { + val diff = + diffRecords( + expectedRecords + .stream() + .map { record: JsonNode -> this.copyWithLiftedData(record) } + .collect(Collectors.toList()), + actualRecords + .stream() + .map { record: JsonNode -> this.copyWithLiftedData(record) } + .collect(Collectors.toList()), + rawRecordIdentityComparator, + rawRecordSortComparator, + rawRecordIdentityExtractor, + rawRecordColumnNames + ) + + if (!diff.isEmpty()) { + Assertions.fail("Raw table was incorrect.\n$diff") + } + } + + fun diffFinalTableRecords(expectedRecords: List, actualRecords: List) { + val diff = + diffRecords( + expectedRecords, + actualRecords, + finalRecordIdentityComparator, + finalRecordSortComparator, + finalRecordIdentityExtractor, + finalRecordColumnNames + ) + + if (!diff.isEmpty()) { + Assertions.fail("Final table was incorrect.\n$diff") + } + } + + /** + * Lift _airbyte_data fields to the root level. If _airbyte_data is a string, deserialize it + * first. + * + * @return A copy of the record, but with all fields in _airbyte_data lifted to the top level. + */ + private fun copyWithLiftedData(record: JsonNode): JsonNode { + val copy = record.deepCopy() + copy.remove(getMetadataColumnName(rawRecordColumnNames, "_airbyte_data")) + var airbyteData = record[getMetadataColumnName(rawRecordColumnNames, "_airbyte_data")] + if (airbyteData.isTextual) { + airbyteData = Jsons.deserializeExact(airbyteData.asText()) + } + Streams.stream(airbyteData.fields()).forEach { field: Map.Entry -> + if (!copy.has(field.key)) { + copy.set(field.key, field.value) + } else { + // This would only happen if the record has one of the metadata columns (e.g. + // _airbyte_raw_id) + // We don't support that in production, so we don't support it here either. + throw RuntimeException( + "Cannot lift field " + field.key + " because it already exists in the record." + ) + } + } + return copy + } + + /** + * Build a Comparator to detect equality between two records. It first compares all the + * identifying columns in order, and breaks ties using extracted_at. + */ + private fun buildIdentityComparator( + identifyingColumns: Array>, + columnNames: Map + ): Comparator { + // Start with a noop comparator for convenience + var comp: Comparator = Comparator.comparing { record -> 0 } + for (column in identifyingColumns) { + comp = comp.thenComparing { record -> extract(record!!, column.first, column.second) } + } + comp = + comp.thenComparing { record -> + asTimestampWithTimezone( + record!![getMetadataColumnName(columnNames, "_airbyte_extracted_at")] + ) + } + return comp + } + + /** See [<][.buildIdentityComparator] for an explanation of dataExtractor. */ + private fun buildIdentityExtractor( + identifyingColumns: Array>, + columnNames: Map + ): Function { + return Function { record: JsonNode -> + (Arrays.stream(identifyingColumns) + .map { column: Pair -> + getPrintableFieldIfPresent(record, column.first) + } + .collect(Collectors.joining(", ")) + + getPrintableFieldIfPresent( + record, + getMetadataColumnName(columnNames, "_airbyte_extracted_at") + )) + } + } + + /** + * Generate a human-readable diff between the two lists. Assumes (in general) that two records + * with the same PK, cursor, and extracted_at are the same record. + * + * Verifies that all values specified in the expected records are correct (_including_ raw_id), + * and that no other fields are present (except for loaded_at and raw_id). We assume that it's + * impossible to verify loaded_at, since it's generated dynamically; however, we do provide the + * ability to assert on the exact raw_id if desired; we simply assume that raw_id is always + * expected to be present. + * + * @param identityComparator Returns 0 iff two records are the "same" record (i.e. have the same + * PK+cursor+extracted_at) + * @param sortComparator Behaves identically to identityComparator, but if two records are the + * same, breaks that tie using _airbyte_raw_id + * @param recordIdExtractor Dump the record's PK+cursor+extracted_at into a human-readable + * string + * @return The diff, or empty string if there were no differences + */ + private fun diffRecords( + originalExpectedRecords: List, + originalActualRecords: List, + identityComparator: Comparator, + sortComparator: Comparator, + recordIdExtractor: Function, + columnNames: Map + ): String { + val expectedRecords = originalExpectedRecords.stream().sorted(sortComparator).toList() + val actualRecords = originalActualRecords.stream().sorted(sortComparator).toList() + + // Iterate through both lists in parallel and compare each record. + // Build up an error message listing any incorrect, missing, or unexpected records. + var message = "" + var expectedRecordIndex = 0 + var actualRecordIndex = 0 + while ( + expectedRecordIndex < expectedRecords.size && actualRecordIndex < actualRecords.size + ) { + val expectedRecord = expectedRecords[expectedRecordIndex] + val actualRecord = actualRecords[actualRecordIndex] + val compare = identityComparator.compare(expectedRecord, actualRecord) + if (compare == 0) { + // These records should be the same. Find the specific fields that are different and + // move on + // to the next records in both lists. + message += + diffSingleRecord(recordIdExtractor, expectedRecord, actualRecord, columnNames) + expectedRecordIndex++ + actualRecordIndex++ + } else if (compare < 0) { + // The expected record is missing from the actual records. Print it and move on to + // the next expected + // record. + message += "Row was expected but missing: $expectedRecord\n" + expectedRecordIndex++ + } else { + // There's an actual record which isn't present in the expected records. Print it + // and move on to the + // next actual record. + message += "Row was not expected but present: $actualRecord\n" + actualRecordIndex++ + } + } + // Tail loops in case we reached the end of one list before the other. + while (expectedRecordIndex < expectedRecords.size) { + message += + "Row was expected but missing: " + expectedRecords[expectedRecordIndex] + "\n" + expectedRecordIndex++ + } + while (actualRecordIndex < actualRecords.size) { + message += + "Row was not expected but present: " + actualRecords[actualRecordIndex] + "\n" + actualRecordIndex++ + } + + return message + } + + private fun diffSingleRecord( + recordIdExtractor: Function, + expectedRecord: JsonNode, + actualRecord: JsonNode, + columnNames: Map + ): String { + var foundMismatch = false + var mismatchedRecordMessage = + "Row had incorrect data: " + recordIdExtractor.apply(expectedRecord) + "\n" + // Iterate through each column in the expected record and compare it to the actual record's + // value. + for (column in Streams.stream(expectedRecord.fieldNames()).sorted().toList()) { + // For all other columns, we can just compare their values directly. + val expectedValue = expectedRecord[column] + val actualValue = actualRecord[column] + if (!areJsonNodesEquivalent(expectedValue, actualValue)) { + mismatchedRecordMessage += + generateFieldError("column $column", expectedValue, actualValue) + foundMismatch = true + } + } + // Then check the entire actual record for any columns that we weren't expecting. + val extraColumns = checkForExtraOrNonNullFields(expectedRecord, actualRecord, columnNames) + if (extraColumns.size > 0) { + for ((key, value) in extraColumns) { + mismatchedRecordMessage += generateFieldError("column $key", null, value) + foundMismatch = true + } + } + return if (foundMismatch) { + mismatchedRecordMessage + } else { + "" + } + } + + /** + * Verify that all fields in the actual record are present in the expected record. This is + * primarily relevant for detecting fields that we expected to be null, but actually were not. + * See [BaseTypingDedupingTest.dumpFinalTableRecords] for an explanation of how SQL/JSON nulls + * are represented in the expected record. + * + * This has the side benefit of detecting completely unexpected columns, which would be a very + * weird bug but is probably still useful to catch. + */ + private fun checkForExtraOrNonNullFields( + expectedRecord: JsonNode, + actualRecord: JsonNode, + columnNames: Map + ): LinkedHashMap { + val extraFields = LinkedHashMap() + for (column in Streams.stream(actualRecord.fieldNames()).sorted().toList()) { + // loaded_at and raw_id are generated dynamically, so we just ignore them. + val isLoadedAt = getMetadataColumnName(columnNames, "_airbyte_loaded_at") == column + val isRawId = getMetadataColumnName(columnNames, "_airbyte_raw_id") == column + val isExpected = expectedRecord.has(column) + if (!(isLoadedAt || isRawId || isExpected)) { + extraFields[column] = actualRecord[column] + } + } + return extraFields + } + + private fun getMetadataColumnName( + columnNames: Map, + columnName: String + ): String { + return columnNames.getOrDefault(columnName, columnName) + } + + companion object { + private fun getPrintableFieldIfPresent(record: JsonNode, field: String): String { + return if (record.has(field)) { + field + "=" + record[field] + } else { + "" + } + } + + private fun areJsonNodesEquivalent( + expectedValue: JsonNode?, + actualValue: JsonNode? + ): Boolean { + return if (expectedValue == null || actualValue == null) { + // If one of the values is null, then we expect both of them to be null. + expectedValue == null && actualValue == null + } else if (expectedValue is ArrayNode && actualValue is ArrayNode) { + // If both values are arrays, compare each of their elements. Order should be + // preserved + IntStream.range(0, expectedValue.size()).allMatch { i: Int -> + areJsonNodesEquivalent(expectedValue[i], actualValue[i]) + } + } else if (expectedValue is ObjectNode && actualValue is ObjectNode) { + // If both values are objects compare their fields and values + expectedValue.size() == actualValue.size() && + Stream.generate { expectedValue.fieldNames().next() } + .limit(expectedValue.size().toLong()) + .allMatch { field: String? -> + areJsonNodesEquivalent(expectedValue[field], actualValue[field]) + } + } else { + // Otherwise, we need to compare the actual values. + // This is kind of sketchy, but seems to work fine for the data we have in our test + // cases. + (expectedValue == actualValue || + (expectedValue.isIntegralNumber && + actualValue.isIntegralNumber && + expectedValue.bigIntegerValue() == actualValue.bigIntegerValue()) || + (expectedValue.isNumber && + actualValue.isNumber && + expectedValue.decimalValue() == actualValue.decimalValue())) + } + } + + /** + * Produce a pretty-printed error message, e.g. " For column foo, expected 1 but got 2". The + * leading spaces are intentional, to make the message easier to read when it's embedded in + * a larger stacktrace. + */ + private fun generateFieldError( + fieldname: String, + expectedValue: JsonNode?, + actualValue: JsonNode? + ): String { + val expectedString = expectedValue?.toString() ?: "SQL NULL (i.e. no value)" + val actualString = actualValue?.toString() ?: "SQL NULL (i.e. no value)" + return " For $fieldname, expected $expectedString but got $actualString\n" + } + + // These asFoo methods are used for sorting records, so their defaults are intended to make + // broken + // records stand out. + private fun asString(node: JsonNode?): String { + return if (node == null || node.isNull) { + "" + } else if (node.isTextual) { + node.asText() + } else { + Jsons.serialize(node) + } + } + + private fun asNumber(node: JsonNode?): BigDecimal { + return if (node == null || !node.isNumber) { + BigDecimal(Double.MIN_VALUE) + } else { + node.decimalValue() + } + } + + private fun asInt(node: JsonNode?): Long { + return if (node == null || !node.isIntegralNumber) { + Long.MIN_VALUE + } else { + node.longValue() + } + } + + private fun asBoolean(node: JsonNode?): Boolean { + return if (node == null || !node.isBoolean) { + false + } else { + node.asBoolean() + } + } + + private fun asTimestampWithTimezone(node: JsonNode?): Instant { + return if (node == null || !node.isTextual) { + Instant.ofEpochMilli(Long.MIN_VALUE) + } else { + try { + Instant.parse(node.asText()) + } catch (e: Exception) { + Instant.ofEpochMilli(Long.MIN_VALUE) + } + } + } + + private fun asTimestampWithoutTimezone(node: JsonNode?): LocalDateTime { + return if (node == null || !node.isTextual) { + LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC) + } else { + try { + LocalDateTime.parse(node.asText()) + } catch (e: Exception) { + LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC) + } + } + } + + private fun asTimeWithTimezone(node: JsonNode?): OffsetTime { + return if (node == null || !node.isTextual) { + OffsetTime.of(0, 0, 0, 0, ZoneOffset.UTC) + } else { + OffsetTime.parse(node.asText()) + } + } + + private fun asTimeWithoutTimezone(node: JsonNode?): LocalTime { + return if (node == null || !node.isTextual) { + LocalTime.of(0, 0, 0) + } else { + try { + LocalTime.parse(node.asText()) + } catch (e: Exception) { + LocalTime.of(0, 0, 0) + } + } + } + + private fun asDate(node: JsonNode?): LocalDate { + return if (node == null || !node.isTextual) { + LocalDate.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC) + } else { + try { + LocalDate.parse(node.asText()) + } catch (e: Exception) { + LocalDate.ofInstant(Instant.ofEpochMilli(Long.MIN_VALUE), ZoneOffset.UTC) + } + } + } + + private class Field(f: Comparable<*>) : Comparable { + private val stringValue = f.toString() + private val realType: Class<*> = f.javaClass + override fun compareTo(o: Field): Int { + if (realType.canonicalName == o.realType.canonicalName) { + return stringValue.compareTo(o.stringValue) + } + return realType.canonicalName.compareTo(o.realType.canonicalName) + } + } + + // Generics? Never heard of 'em. (I'm sorry) + private fun extract(node: JsonNode, field: String, type: AirbyteType): Field { + return Field( + if (type is AirbyteProtocolType) { + when (type) { + AirbyteProtocolType.STRING -> asString(node[field]) + AirbyteProtocolType.NUMBER -> asNumber(node[field]) + AirbyteProtocolType.INTEGER -> asInt(node[field]) + AirbyteProtocolType.BOOLEAN -> asBoolean(node[field]) + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE -> + asTimestampWithTimezone(node[field]) + AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE -> + asTimestampWithoutTimezone(node[field]) + AirbyteProtocolType.TIME_WITH_TIMEZONE -> asTimeWithTimezone(node[field]) + AirbyteProtocolType.TIME_WITHOUT_TIMEZONE -> + asTimeWithoutTimezone(node[field]) + AirbyteProtocolType.DATE -> asDate(node[field]) + AirbyteProtocolType.UNKNOWN -> node.toString() + } + } else { + node.toString() + } + ) + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/dat/sync1_messages.jsonl b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/dat/sync1_messages.jsonl index 653e49e39e207..a37e8a603749e 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/dat/sync1_messages.jsonl +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/dat/sync1_messages.jsonl @@ -8,7 +8,7 @@ {"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}}} // Emit a record with no _ab_cdc_deleted_at field. CDC sources typically emit an explicit null, but we should handle both cases. {"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}}} -// Emit a record with an invalid age. -{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}}} +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} // Emit a record with interesting characters in one of the values. {"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}}} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/sqlgenerator/alltypes_inputrecords.jsonl b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/sqlgenerator/alltypes_inputrecords.jsonl index c21fc0bbb6abe..8f8ced8a26a1c 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/sqlgenerator/alltypes_inputrecords.jsonl +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/resources/sqlgenerator/alltypes_inputrecords.jsonl @@ -2,6 +2,6 @@ {"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} // Note that array and struct have invalid values ({} and [] respectively). -{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}, "_airbyte_meta": {"changes": [{"field": "string", "change": "NULLED", "reason": "SOURCE_SERIALIZATION_ERROR"}]}} {"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} \ No newline at end of file diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg deleted file mode 100644 index e23d05b9a01fd..0000000000000 --- a/airbyte-cdk/python/.bumpversion.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[bumpversion] -current_version = 0.68.4 -commit = False - -[bumpversion:file:setup.py] - -[bumpversion:file:Dockerfile] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 3e0ad06f11c59..76ea2de3a7c98 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,116 @@ # Changelog +## 0.81.6 +Upgrade to recent version of langchain + +## 0.81.5 +Updated langchain version and add langchain_core as a dependency + +## 0.81.4 +Adding stream_descriptor as part of AirbyteTracedException.__init__ + +## 0.81.3 +Republish print buffer after previous pypi attempt timed out + +## 0.81.2 +Fix concurrent CDK printing by flushing the print buffer for every message + +## 0.81.1 +Concurrent CDK: add logging on exception + +## 0.81.0 +Unpin airbyte-protocol-models library + +## 0.80.0 +Concurrent CDK: support partitioned states + +## 0.79.2 +Concurrent CDK: Print error messages properly so that they can be categorized + +## 0.79.1 +Dummy patch to test new publishing flow fixes + +## 0.79.0 +Update release process of airbyte-cdk and source-declarative manifest + +## 0.78.9 +Fix CDK version mismatch introduced in 0.78.8 + +## 0.78.8 +Update error messaging/type for missing streams. Note: version mismatch, please use 0.78.9 instead + +## 0.78.6 +low-code: add backward compatibility for old close slice behavior + +## 0.78.5 +low-code: fix stop_condition instantiation in the cursor pagination + +## 0.78.4 +low-code: Add last_record and last_page_size interpolation variables to pagination + +## 0.78.3 +Fix dependencies for file-based extras + +## 0.78.2 +low-code: fix retrieving partition key for legacy state migration + +## 0.78.1 +connector-builder: return full url-encoded URL instead of separating parameters + +## 0.78.0 +low-code: Allow state migration with CustomPartitionRouter + +## 0.77.2 +Emit state recordCount as float instead of integer + +## 0.77.1 +Fix empty , , extras packages + +## 0.77.0 +low-code: Add string interpolation filter + +## 0.76.0 +Migrate Python CDK to Poetry + +## 0.75.0 +low-code: Add StateMigration component + +## 0.74.0 +Request option params are allowed to be an array + +## 0.73.0 +set minimum python version to 3.9 + +## 0.72.2 +Connector Builder: have schema fields be nullable by default except from PK and cursor field + +## 0.72.1 +low code: add refresh_token_error handler to DeclarativeOauth2Authenticator + +## 0.72.0 +low-code: Allow defining custom schema loaders + +## 0.71.0 +Declarative datetime-based cursors now only derive state values from records that were read + +## 0.70.2 +low-code: remove superfluous sleep + +## 0.70.1 +File-based CDK: Fix tab delimiter configuration in CSV file type + +## 0.70.0 +testing + +## 0.69.2 +low-code: improve error message when a custom component cannot be found + +## 0.69.1 +Update mock server test entrypoint wrapper to use per-stream state + +## 0.69.0 +Include recordCount in stream state messages and final state message for full refresh syncs + ## 0.68.4 low-code: update cartesian stream slice to emit typed StreamSlice diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile deleted file mode 100644 index fdf8120039048..0000000000000 --- a/airbyte-cdk/python/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -FROM python:3.9.18-alpine3.18 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - -# install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.4 - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY source_declarative_manifest/main.py ./ - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -# needs to be the same as CDK -LABEL io.airbyte.version=0.68.4 -LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/README.md b/airbyte-cdk/python/README.md index 5b0bb5840a50d..808a57e3806d7 100644 --- a/airbyte-cdk/python/README.md +++ b/airbyte-cdk/python/README.md @@ -1,114 +1,141 @@ -# Connector Development Kit \(Python\) +# Airbyte Python CDK and Low-Code CDK -The Airbyte Python CDK is a framework for rapidly developing production-grade Airbyte connectors.The CDK currently offers helpers specific for creating Airbyte source connectors for: +Airbyte Python CDK is a framework for building Airbyte API Source Connectors. It provides a set of +classes and helpers that make it easy to build a connector against an HTTP API (REST, GraphQL, etc), +or a generic Python source connector. -- HTTP APIs \(REST APIs, GraphQL, etc..\) -- Generic Python sources \(anything not covered by the above\) +## Usage -The CDK provides an improved developer experience by providing basic implementation structure and abstracting away low-level glue boilerplate. +If you're looking to build a connector, we highly recommend that you +[start with the Connector Builder](https://docs.airbyte.com/connector-development/connector-builder-ui/overview). +It should be enough for 90% connectors out there. For more flexible and complex connectors, use the +[low-code CDK and `SourceDeclarativeManifest`](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview). -This document is a general introduction to the CDK. Readers should have basic familiarity with the [Airbyte Specification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/) before proceeding. +If that doesn't work, then consider building on top of the +[lower-level Python CDK itself](https://docs.airbyte.com/connector-development/cdk-python/). -## Getting Started +### Quick Start -Generate an empty connector using the code generator. First clone the Airbyte repository then from the repository root run +To get started on a Python CDK based connector or a low-code connector, you can generate a connector +project from a template: ```bash +# from the repo root cd airbyte-integrations/connector-templates/generator ./generate.sh ``` -then follow the interactive prompt. Next, find all `TODO`s in the generated project directory -- they're accompanied by lots of comments explaining what you'll need to do in order to implement your connector. Upon completing all TODOs properly, you should have a functioning connector. - -Additionally, you can follow [this tutorial](https://docs.airbyte.com/connector-development/cdk-python/) for a complete walkthrough of creating an HTTP connector using the Airbyte CDK. - -### Concepts & Documentation - -See the [concepts docs](docs/concepts/) for a tour through what the API offers. - ### Example Connectors **HTTP Connectors**: -- [Stripe](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py) -- [Slack](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-slack/source_slack/source.py) +- [Stripe](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/) +- [Salesforce](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-salesforce/) -**Simple Python connectors using the barebones `Source` abstraction**: +**Simple Python connectors using the bare-bones `Source` abstraction**: - [Google Sheets](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-google-sheets/google_sheets_source/google_sheets_source.py) -- [Mailchimp](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/source.py) + +This will generate a project with a type and a name of your choice and put it in +`airbyte-integrations/connectors`. Open the directory with your connector in an editor and follow +the `TODO` items. + +## Python CDK Overview + +Airbyte CDK code is within `airbyte_cdk` directory. Here's a high level overview of what's inside: + +- `connector_builder`. Internal wrapper that helps the Connector Builder platform run a declarative + manifest (low-code connector). You should not use this code directly. If you need to run a + `SourceDeclarativeManifest`, take a look at + [`source-declarative-manifest`](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-declarative-manifest) + connector implementation instead. +- `destinations`. Basic Destination connector support! If you're building a Destination connector in + Python, try that. Some of our vector DB destinations like `destination-pinecone` are using that + code. +- `models` expose `airbyte_protocol.models` as a part of `airbyte_cdk` package. +- `sources/concurrent_source` is the Concurrent CDK implementation. It supports reading data from + streams concurrently per slice / partition, useful for connectors with high throughput and high + number of records. +- `sources/declarative` is the low-code CDK. It works on top of Airbyte Python CDK, but provides a + declarative manifest language to define streams, operations, etc. This makes it easier to build + connectors without writing Python code. +- `sources/file_based` is the CDK for file-based sources. Examples include S3, Azure, GCS, etc. +- `sources/singer` is a singer tap source adapter. Deprecated. ## Contributing -### First time setup +Thank you for being interested in contributing to Airbyte Python CDK! Here are some guidelines to +get you started: + +- We adhere to the [code of conduct](/CODE_OF_CONDUCT.md). +- You can contribute by reporting bugs, posting github discussions, opening issues, improving [documentation](/docs/), and + submitting pull requests with bugfixes and new features alike. +- If you're changing the code, please add unit tests for your change. +- When submitting issues or PRs, please add a small reproduction project. Using the changes in your + connector and providing that connector code as an example (or a satellite PR) helps! -We assume `python` points to Python 3.9 or higher. +### First time setup -Setup a virtual env: +Install the project dependencies and development tools: ```bash -python -m venv .venv -source .venv/bin/activate -pip install -e ".[dev]" # [dev] installs development-only dependencies +poetry install --all-extras ``` -#### Iteration +Installing all extras is required to run the full suite of unit tests. + +#### Running tests locally -- Iterate on the code locally -- Run tests via `python -m pytest -s unit_tests` -- Perform static type checks using `mypy airbyte_cdk`. `MyPy` configuration is in `mypy.ini`. -- Run `mypy ` to only check specific files. This is useful as the CDK still contains code that is not compliant. -- The `type_check_and_test.sh` script bundles both type checking and testing in one convenient command. Feel free to use it! +- Iterate on the CDK code locally +- Run tests via `poetry run poe unit-test-with-cov`, or `python -m pytest -s unit_tests` if you want + to pass pytest options. +- Run `poetry run poe check-local` to lint all code, type-check modified code, and run unit tests + with coverage in one command. + +To see all available scripts, run `poetry run poe`. ##### Autogenerated files -If the iteration you are working on includes changes to the models, you might want to regenerate them. In order to do that, you can run: +Low-code CDK models are generated from `sources/declarative/declarative_component_schema.yaml`. If +the iteration you are working on includes changes to the models or the connector generator, you +might want to regenerate them. In order to do that, you can run: ```bash -cd airbyte-cdk/python -./gradlew build +poetry run poe build ``` -This will generate the files based on the schemas, add the license information and format the code. If you want to only do the former and rely on -pre-commit to the others, you can run the appropriate generation command i.e. `./gradlew generateComponentManifestClassFiles`. +This will generate the code generator docker image and the component manifest files based on the +schemas and templates. #### Testing -All tests are located in the `unit_tests` directory. Run `python -m pytest --cov=airbyte_cdk unit_tests/` to run them. This also presents a test coverage report. +All tests are located in the `unit_tests` directory. Run `poetry run poe unit-test-with-cov` to run +them. This also presents a test coverage report. For faster iteration with no coverage report and +more options, `python -m pytest -s unit_tests` is a good place to start. #### Building and testing a connector with your local CDK -When developing a new feature in the CDK, you may find it helpful to run a connector that uses that new feature. You can test this in one of two ways: +When developing a new feature in the CDK, you may find it helpful to run a connector that uses that +new feature. You can test this in one of two ways: - Running a connector locally - Building and running a source via Docker ##### Installing your local CDK into a local Python connector -In order to get a local Python connector running your local CDK, do the following. - -First, make sure you have your connector's virtual environment active: - -```bash -# from the `airbyte/airbyte-integrations/connectors/` directory -source .venv/bin/activate - -# if you haven't installed dependencies for your connector already -pip install -e . -``` - -Then, navigate to the CDK and install it in editable mode: +Open the connector's `pyproject.toml` file and replace the line with `airbyte_cdk` with the +following: -```bash -cd ../../../airbyte-cdk/python -pip install -e . +```toml +airbyte_cdk = { path = "../../../airbyte-cdk/python/airbyte_cdk", develop = true } ``` -You should see that `pip` has uninstalled the version of `airbyte-cdk` defined by your connector's `setup.py` and installed your local CDK. Any changes you make will be immediately reflected in your editor, so long as your editor's interpreter is set to your connector's virtual environment. +Then, running `poetry update` should reinstall `airbyte_cdk` from your local working directory. ##### Building a Python connector in Docker with your local CDK installed -_Pre-requisite: Install the [`airbyte-ci` CLI](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md)_ +_Pre-requisite: Install the +[`airbyte-ci` CLI](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md)_ You can build your connector image with the local CDK using @@ -117,13 +144,16 @@ You can build your connector image with the local CDK using airbyte-ci connectors --use-local-cdk --name= build ``` -Note that the local CDK is injected at build time, so if you make changes, you will have to run the build command again to see them reflected. +Note that the local CDK is injected at build time, so if you make changes, you will have to run the +build command again to see them reflected. ##### Running Connector Acceptance Tests for a single connector in Docker with your local CDK installed -_Pre-requisite: Install the [`airbyte-ci` CLI](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md)_ +_Pre-requisite: Install the +[`airbyte-ci` CLI](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md)_ -To run acceptance tests for a single connectors using the local CDK, from the connector directory, run +To run acceptance tests for a single connectors using the local CDK, from the connector directory, +run ```bash airbyte-ci connectors --use-local-cdk --name= test @@ -131,9 +161,13 @@ airbyte-ci connectors --use-local-cdk --name= test #### When you don't have access to the API -There can be some time where you do not have access to the API (either because you don't have the credentials, network access, etc...) You will probably still want to do end-to-end testing at least once. In order to do so, you can emulate the server you would be reaching using a server stubbing tool. +There may be a time when you do not have access to the API (either because you don't have the +credentials, network access, etc...) You will probably still want to do end-to-end testing at least +once. In order to do so, you can emulate the server you would be reaching using a server stubbing +tool. -For example, using [mockserver](https://www.mock-server.com/), you can set up an expectation file like this: +For example, using [mockserver](https://www.mock-server.com/), you can set up an expectation file +like this: ```json { @@ -147,15 +181,39 @@ For example, using [mockserver](https://www.mock-server.com/), you can set up an } ``` -Assuming this file has been created at `secrets/mock_server_config/expectations.json`, running the following command will allow to match any requests on path `/data` to return the response defined in the expectation file: +Assuming this file has been created at `secrets/mock_server_config/expectations.json`, running the +following command will allow to match any requests on path `/data` to return the response defined in +the expectation file: ```bash docker run -d --rm -v $(pwd)/secrets/mock_server_config:/config -p 8113:8113 --env MOCKSERVER_LOG_LEVEL=TRACE --env MOCKSERVER_SERVER_PORT=8113 --env MOCKSERVER_WATCH_INITIALIZATION_JSON=true --env MOCKSERVER_PERSISTED_EXPECTATIONS_PATH=/config/expectations.json --env MOCKSERVER_INITIALIZATION_JSON_PATH=/config/expectations.json mockserver/mockserver:5.15.0 ``` -HTTP requests to `localhost:8113/data` should now return the body defined in the expectations file. To test this, the implementer either has to change the code which defines the base URL for Python source or update the `url_base` from low-code. With the Connector Builder running in docker, you will have to use domain `host.docker.internal` instead of `localhost` as the requests are executed within docker. +HTTP requests to `localhost:8113/data` should now return the body defined in the expectations file. +To test this, the implementer either has to change the code which defines the base URL for Python +source or update the `url_base` from low-code. With the Connector Builder running in docker, you +will have to use domain `host.docker.internal` instead of `localhost` as the requests are executed +within docker. #### Publishing a new version to PyPi -1. Open a PR -2. Once it is approved and **merged**, an Airbyte member must run the `Publish CDK Manually` workflow from master using `release-type=major|manor|patch` and setting the changelog message. +Python CDK has a +[GitHub workflow](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) +that manages the CDK changelog, making a new release for `airbyte_cdk`, publishing it to PyPI, and then making a commit to update (and subsequently auto-release) +[`source-declarative-m anifest`](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-declarative-manifest) +and Connector Builder (in the platform repository). + +> [!Note]: The workflow will handle the `CHANGELOG.md` entry for you. You should +> not add changelog lines in your PRs to the CDK itself. + +> [!Warning]: The workflow bumps version on it's own, please don't change the +> CDK version in `pyproject.toml` manually. + +1. You only trigger the release workflow once all the PRs that you want to be included are already merged into the `master` branch. +2. The [`Publish CDK Manually`](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) workflow from master using `release-type=major|manor|patch` and setting the changelog message. +3. When the workflow runs, it will commit a new version directly to master + branch. +4. The workflow will bump the version of `source-declarative-manifest` according to the `release-type` of the CDK, then commit these changes + back to master. The commit to master will kick off a publish of the new version of `source-declarative-manifest`. +5. The workflow will also add a pull request to `airbyte-platform-internal` + repo to bump the dependency in Connector Builder. diff --git a/airbyte-cdk/python/airbyte_cdk/connector_builder/README.md b/airbyte-cdk/python/airbyte_cdk/connector_builder/README.md index 6788a6f226b16..cc2cf7064e29b 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector_builder/README.md +++ b/airbyte-cdk/python/airbyte_cdk/connector_builder/README.md @@ -1,44 +1,53 @@ # Connector Builder Backend -This is the backend for requests from the [Connector Builder](https://docs.airbyte.com/connector-development/connector-builder-ui/overview/). +This is the backend for requests from the +[Connector Builder](https://docs.airbyte.com/connector-development/connector-builder-ui/overview/). ## Local development ### Locally running the Connector Builder backend -``` +```bash python main.py read --config path/to/config --catalog path/to/catalog ``` Note: -- Requires the keys `__injected_declarative_manifest` and `__command` in its config, where `__injected_declarative_manifest` is a JSON manifest and `__command` is one of the commands handled by the ConnectorBuilderHandler (`stream_read` or `resolve_manifest`), i.e. -``` + +- Requires the keys `__injected_declarative_manifest` and `__command` in its config, where + `__injected_declarative_manifest` is a JSON manifest and `__command` is one of the commands + handled by the ConnectorBuilderHandler (`stream_read` or `resolve_manifest`), i.e. + +```json { "config": , "__injected_declarative_manifest": {...}, "__command": <"resolve_manifest" | "test_read"> } ``` -*See [ConnectionSpecification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#actor-specification) for details on the `"config"` key if needed. + +\*See +[ConnectionSpecification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#actor-specification) +for details on the `"config"` key if needed. - When the `__command` is `resolve_manifest`, the argument to `catalog` should be an empty string. -- The config can optionally contain an object under the `__test_read_config` key which can define custom test read limits with `max_records`, `max_slices`, and `max_pages_per_slice` properties. All custom limits are optional; a default will be used for any limit that is not provided. +- The config can optionally contain an object under the `__test_read_config` key which can define + custom test read limits with `max_records`, `max_slices`, and `max_pages_per_slice` properties. + All custom limits are optional; a default will be used for any limit that is not provided. ### Locally running the docker image #### Build First, make sure you build the latest Docker image: -``` -./gradlew airbyte-cdk:python:airbyteDocker -``` -The docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in the Dockerfile. +```bash +docker build -t airbyte/source-declarative-manifest:dev . +``` #### Run Then run any of the connector commands as follows: -``` +```bash docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-declarative-manifest:dev read --config /secrets/config.json ``` diff --git a/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py b/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py index b30a3a3744f48..6abde6724dd02 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py +++ b/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py @@ -7,7 +7,6 @@ from copy import deepcopy from json import JSONDecodeError from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Union -from urllib.parse import parse_qs, urlparse from airbyte_cdk.connector_builder.models import ( AuxiliaryRequest, @@ -24,7 +23,7 @@ from airbyte_cdk.sources.utils.types import JsonType from airbyte_cdk.utils import AirbyteTracedException from airbyte_cdk.utils.datetime_format_inferrer import DatetimeFormatInferrer -from airbyte_cdk.utils.schema_inferrer import SchemaInferrer +from airbyte_cdk.utils.schema_inferrer import SchemaInferrer, SchemaValidationException from airbyte_protocol.models.airbyte_protocol import ( AirbyteControlMessage, AirbyteLogMessage, @@ -45,6 +44,32 @@ def __init__(self, max_pages_per_slice: int, max_slices: int, max_record_limit: self._max_slices = max_slices self._max_record_limit = max_record_limit + def _pk_to_nested_and_composite_field(self, field: Optional[Union[str, List[str], List[List[str]]]]) -> List[List[str]]: + if not field: + return [[]] + + if isinstance(field, str): + return [[field]] + + is_composite_key = isinstance(field[0], str) + if is_composite_key: + return [[i] for i in field] # type: ignore # the type of field is expected to be List[str] here + + return field # type: ignore # the type of field is expected to be List[List[str]] here + + def _cursor_field_to_nested_and_composite_field(self, field: Union[str, List[str]]) -> List[List[str]]: + if not field: + return [[]] + + if isinstance(field, str): + return [[field]] + + is_nested_key = isinstance(field[0], str) + if is_nested_key: + return [field] # type: ignore # the type of field is expected to be List[str] here + + raise ValueError(f"Unknown type for cursor field `{field}") + def get_message_groups( self, source: DeclarativeSource, @@ -54,7 +79,11 @@ def get_message_groups( ) -> StreamRead: if record_limit is not None and not (1 <= record_limit <= self._max_record_limit): raise ValueError(f"Record limit must be between 1 and {self._max_record_limit}. Got {record_limit}") - schema_inferrer = SchemaInferrer() + stream = source.streams(config)[0] # The connector builder currently only supports reading from a single stream at a time + schema_inferrer = SchemaInferrer( + self._pk_to_nested_and_composite_field(stream.primary_key), + self._cursor_field_to_nested_and_composite_field(stream.cursor_field), + ) datetime_format_inferrer = DatetimeFormatInferrer() if record_limit is None: @@ -88,14 +117,20 @@ def get_message_groups( else: raise ValueError(f"Unknown message group type: {type(message_group)}") + try: + configured_stream = configured_catalog.streams[0] # The connector builder currently only supports reading from a single stream at a time + schema = schema_inferrer.get_stream_schema(configured_stream.stream.name) + except SchemaValidationException as exception: + for validation_error in exception.validation_errors: + log_messages.append(LogMessage(validation_error, "ERROR")) + schema = exception.schema + return StreamRead( logs=log_messages, slices=slices, test_read_limit_reached=self._has_reached_limit(slices), auxiliary_requests=auxiliary_requests, - inferred_schema=schema_inferrer.get_stream_schema( - configured_catalog.streams[0].stream.name - ), # The connector builder currently only supports reading from a single stream at a time + inferred_schema=schema, latest_config_update=self._clean_config(latest_config_update.connectorConfig.config) if latest_config_update else None, inferred_datetime_formats=datetime_format_inferrer.get_inferred_datetime_formats(), ) @@ -264,15 +299,12 @@ def _parse_json(log_message: AirbyteLogMessage) -> JsonType: @staticmethod def _create_request_from_log_message(json_http_message: Dict[str, Any]) -> HttpRequest: - url = urlparse(json_http_message.get("url", {}).get("full", "")) - full_path = f"{url.scheme}://{url.hostname}{url.path}" if url else "" + url = json_http_message.get("url", {}).get("full", "") request = json_http_message.get("http", {}).get("request", {}) - parameters = parse_qs(url.query) or None return HttpRequest( - url=full_path, + url=url, http_method=request.get("method", ""), headers=request.get("headers"), - parameters=parameters, body=request.get("body", {}).get("content", ""), ) diff --git a/airbyte-cdk/python/airbyte_cdk/connector_builder/models.py b/airbyte-cdk/python/airbyte_cdk/connector_builder/models.py index 06cbe215e4472..8afab45cd6fda 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector_builder/models.py +++ b/airbyte-cdk/python/airbyte_cdk/connector_builder/models.py @@ -16,7 +16,6 @@ class HttpResponse: @dataclass class HttpRequest: url: str - parameters: Optional[Dict[str, Any]] headers: Optional[Dict[str, Any]] http_method: str body: Optional[str] = None diff --git a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py index 06d3c892dd5d9..e9a9c007e25e2 100644 --- a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py +++ b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py @@ -12,9 +12,9 @@ from airbyte_cdk.destinations.vector_db_based.utils import create_stream_identifier from airbyte_cdk.models import AirbyteRecordMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode from airbyte_cdk.utils.traced_exception import AirbyteTracedException, FailureType -from langchain.document_loaders.base import Document from langchain.text_splitter import Language, RecursiveCharacterTextSplitter from langchain.utils import stringify_dict +from langchain_core.documents.base import Document METADATA_STREAM_FIELD = "_ab_stream" METADATA_RECORD_ID_FIELD = "_ab_record_id" diff --git a/airbyte-cdk/python/airbyte_cdk/entrypoint.py b/airbyte-cdk/python/airbyte_cdk/entrypoint.py index 3852cb7e9890f..557d3369f94a9 100644 --- a/airbyte-cdk/python/airbyte_cdk/entrypoint.py +++ b/airbyte-cdk/python/airbyte_cdk/entrypoint.py @@ -10,23 +10,24 @@ import socket import sys import tempfile +from collections import defaultdict from functools import wraps -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union +from typing import Any, DefaultDict, Iterable, List, Mapping, MutableMapping, Optional, Union from urllib.parse import urlparse import requests from airbyte_cdk.connector import TConfig from airbyte_cdk.exception_handler import init_uncaught_exception_handler from airbyte_cdk.logger import init_logger -from airbyte_cdk.models import AirbyteMessage, Status, Type -from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification # type: ignore [attr-defined] +from airbyte_cdk.models import AirbyteMessage, FailureType, Status, Type +from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats, ConnectorSpecification # type: ignore [attr-defined] from airbyte_cdk.sources import Source +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit, split_config -from airbyte_cdk.utils import is_cloud_environment +from airbyte_cdk.utils import is_cloud_environment, message_utils from airbyte_cdk.utils.airbyte_secrets_utils import get_secrets, update_secrets from airbyte_cdk.utils.constants import ENV_REQUEST_CACHE_PATH from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from airbyte_protocol.models import FailureType from requests import PreparedRequest, Response, Session logger = init_logger("airbyte") @@ -39,7 +40,7 @@ class AirbyteEntrypoint(object): def __init__(self, source: Source): init_uncaught_exception_handler(logger) - # deployment mode is read when instantiating the entrypoint because it is the common path shared by syncs and connector builder test requests + # Deployment mode is read when instantiating the entrypoint because it is the common path shared by syncs and connector builder test requests if is_cloud_environment(): _init_internal_request_filter() @@ -160,8 +161,28 @@ def read( if self.source.check_config_against_spec: self.validate_connection(source_spec, config) - yield from self.source.read(self.logger, config, catalog, state) - yield from self._emit_queued_messages(self.source) + # The Airbyte protocol dictates that counts be expressed as float/double to better protect against integer overflows + stream_message_counter: DefaultDict[HashableStreamDescriptor, float] = defaultdict(float) + for message in self.source.read(self.logger, config, catalog, state): + yield self.handle_record_counts(message, stream_message_counter) + for message in self._emit_queued_messages(self.source): + yield self.handle_record_counts(message, stream_message_counter) + + @staticmethod + def handle_record_counts(message: AirbyteMessage, stream_message_count: DefaultDict[HashableStreamDescriptor, float]) -> AirbyteMessage: + if message.type == Type.RECORD: + stream_message_count[message_utils.get_stream_descriptor(message)] += 1.0 + + elif message.type == Type.STATE: + stream_descriptor = message_utils.get_stream_descriptor(message) + + # Set record count from the counter onto the state message + message.state.sourceStats = message.state.sourceStats or AirbyteStateStats() + message.state.sourceStats.recordCount = stream_message_count.get(stream_descriptor, 0.0) + + # Reset the counter + stream_message_count[stream_descriptor] = 0.0 + return message @staticmethod def validate_connection(source_spec: ConnectorSpecification, config: TConfig) -> None: @@ -214,7 +235,7 @@ def launch(source: Source, args: List[str]) -> None: for message in source_entrypoint.run(parsed_args): # simply printing is creating issues for concurrent CDK as Python uses different two instructions to print: one for the message and # the other for the break line. Adding `\n` to the message ensure that both are printed at the same time - print(f"{message}\n", end="") + print(f"{message}\n", end="", flush=True) def _init_internal_request_filter() -> None: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py index 208aee6b8a4ae..a1f1ee1495b94 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py @@ -113,9 +113,16 @@ def read( if not stream_instance: if not self.raise_exception_on_missing_stream: continue - raise KeyError( - f"The stream {configured_stream.stream.name} no longer exists in the configuration. " - f"Refresh the schema in replication settings and remove this stream from future sync attempts." + + error_message = ( + f"The stream '{configured_stream.stream.name}' in your connection configuration was not found in the source. " + f"Refresh the schema in your replication settings and remove this stream from future sync attempts." + ) + + raise AirbyteTracedException( + message="A stream listed in your configuration was not found in the source. Please check the logs for more details.", + internal_message=error_message, + failure_type=FailureType.config_error, ) try: @@ -152,13 +159,14 @@ def read( logger.info(f"Marking stream {configured_stream.stream.name} as STOPPED") yield stream_status_as_airbyte_message(configured_stream.stream, AirbyteStreamStatus.INCOMPLETE) display_message = stream_instance.get_error_display_message(e) + stream_descriptor = StreamDescriptor(name=configured_stream.stream.name) if display_message: - traced_exception = AirbyteTracedException.from_exception(e, message=display_message) + traced_exception = AirbyteTracedException.from_exception( + e, message=display_message, stream_descriptor=stream_descriptor + ) else: - traced_exception = AirbyteTracedException.from_exception(e) - yield traced_exception.as_sanitized_airbyte_message( - stream_descriptor=StreamDescriptor(name=configured_stream.stream.name) - ) + traced_exception = AirbyteTracedException.from_exception(e, stream_descriptor=stream_descriptor) + yield traced_exception.as_sanitized_airbyte_message() stream_name_to_exception[stream_instance.name] = traced_exception if self.stop_sync_on_stream_failure: logger.info(f"{self.name} does not support continuing syncs on error from stream {configured_stream.stream.name}") diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py index 24ac315c526e5..7e92be5380649 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py @@ -7,6 +7,7 @@ from airbyte_cdk.models import AirbyteMessage, AirbyteStreamStatus from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager from airbyte_cdk.sources.message import MessageRepository from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream @@ -17,7 +18,9 @@ from airbyte_cdk.sources.streams.concurrent.partitions.types import PartitionCompleteSentinel from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message from airbyte_cdk.sources.utils.slice_logger import SliceLogger +from airbyte_cdk.utils import AirbyteTracedException from airbyte_cdk.utils.stream_status_utils import as_airbyte_message as stream_status_as_airbyte_message +from airbyte_protocol.models import StreamDescriptor class ConcurrentReadProcessor: @@ -56,6 +59,7 @@ def __init__( self._message_repository = message_repository self._partition_reader = partition_reader self._streams_done: Set[str] = set() + self._exceptions_per_stream_name: dict[str, List[Exception]] = {} def on_partition_generation_completed(self, sentinel: PartitionGenerationCompletedSentinel) -> Iterable[AirbyteMessage]: """ @@ -94,14 +98,22 @@ def on_partition_complete_sentinel(self, sentinel: PartitionCompleteSentinel) -> 3. Emit messages that were added to the message repository """ partition = sentinel.partition - partition.close() - partitions_running = self._streams_to_running_partitions[partition.stream_name()] - if partition in partitions_running: - partitions_running.remove(partition) - # If all partitions were generated and this was the last one, the stream is done - if partition.stream_name() not in self._streams_currently_generating_partitions and len(partitions_running) == 0: - yield from self._on_stream_is_done(partition.stream_name()) - yield from self._message_repository.consume_queue() + + try: + partition.close() + except Exception as exception: + self._flag_exception(partition.stream_name(), exception) + yield AirbyteTracedException.from_exception( + exception, stream_descriptor=StreamDescriptor(name=partition.stream_name()) + ).as_sanitized_airbyte_message() + finally: + partitions_running = self._streams_to_running_partitions[partition.stream_name()] + if partition in partitions_running: + partitions_running.remove(partition) + # If all partitions were generated and this was the last one, the stream is done + if partition.stream_name() not in self._streams_currently_generating_partitions and len(partitions_running) == 0: + yield from self._on_stream_is_done(partition.stream_name()) + yield from self._message_repository.consume_queue() def on_record(self, record: Record) -> Iterable[AirbyteMessage]: """ @@ -126,14 +138,20 @@ def on_record(self, record: Record) -> Iterable[AirbyteMessage]: yield message yield from self._message_repository.consume_queue() - def on_exception(self, exception: Exception) -> Iterable[AirbyteMessage]: + def on_exception(self, exception: StreamThreadException) -> Iterable[AirbyteMessage]: """ This method is called when an exception is raised. 1. Stop all running streams 2. Raise the exception """ - yield from self._stop_streams() - raise exception + self._flag_exception(exception.stream_name, exception.exception) + self._logger.exception(f"Exception while syncing stream {exception.stream_name}", exc_info=exception.exception) + yield AirbyteTracedException.from_exception( + exception, stream_descriptor=StreamDescriptor(name=exception.stream_name) + ).as_airbyte_message() + + def _flag_exception(self, stream_name: str, exception: Exception) -> None: + self._exceptions_per_stream_name.setdefault(stream_name, []).append(exception) def start_next_partition_generator(self) -> Optional[AirbyteMessage]: """ @@ -177,13 +195,7 @@ def _on_stream_is_done(self, stream_name: str) -> Iterable[AirbyteMessage]: yield from self._message_repository.consume_queue() self._logger.info(f"Finished syncing {stream.name}") self._streams_done.add(stream_name) - yield stream_status_as_airbyte_message(stream.as_airbyte_stream(), AirbyteStreamStatus.COMPLETE) - - def _stop_streams(self) -> Iterable[AirbyteMessage]: - self._thread_pool_manager.shutdown() - for stream_name in self._streams_to_running_partitions.keys(): - stream = self._stream_name_to_instance[stream_name] - if not self._is_stream_done(stream_name): - self._logger.info(f"Marking stream {stream.name} as STOPPED") - self._logger.info(f"Finished syncing {stream.name}") - yield stream_status_as_airbyte_message(stream.as_airbyte_stream(), AirbyteStreamStatus.INCOMPLETE) + stream_status = ( + AirbyteStreamStatus.INCOMPLETE if self._exceptions_per_stream_name.get(stream_name, []) else AirbyteStreamStatus.COMPLETE + ) + yield stream_status_as_airbyte_message(stream.as_airbyte_stream(), stream_status) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source.py index f7d65d31aca70..714a6104d00a3 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source.py @@ -9,6 +9,7 @@ from airbyte_cdk.models import AirbyteMessage from airbyte_cdk.sources.concurrent_source.concurrent_read_processor import ConcurrentReadProcessor from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream @@ -123,11 +124,6 @@ def _consume_from_queue( concurrent_stream_processor: ConcurrentReadProcessor, ) -> Iterable[AirbyteMessage]: while airbyte_message_or_record_or_exception := queue.get(): - try: - self._threadpool.shutdown_if_exception() - except Exception as exception: - concurrent_stream_processor.on_exception(exception) - yield from self._handle_item( airbyte_message_or_record_or_exception, concurrent_stream_processor, @@ -142,7 +138,7 @@ def _handle_item( concurrent_stream_processor: ConcurrentReadProcessor, ) -> Iterable[AirbyteMessage]: # handle queue item and call the appropriate handler depending on the type of the queue item - if isinstance(queue_item, Exception): + if isinstance(queue_item, StreamThreadException): yield from concurrent_stream_processor.on_exception(queue_item) elif isinstance(queue_item, PartitionGenerationCompletedSentinel): yield from concurrent_stream_processor.on_partition_generation_completed(queue_item) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py index 6c3b8aa70efbc..260cf3c032b06 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py @@ -5,12 +5,13 @@ from abc import ABC from typing import Any, Iterator, List, Mapping, MutableMapping, Optional, Union -from airbyte_cdk.models import AirbyteMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog +from airbyte_cdk.models import AirbyteMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog, FailureType from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.abstract_stream_facade import AbstractStreamFacade +from airbyte_cdk.utils.traced_exception import AirbyteTracedException class ConcurrentSourceAdapter(AbstractSource, ABC): @@ -54,10 +55,18 @@ def _select_abstract_streams(self, config: Mapping[str, Any], configured_catalog if not stream_instance: if not self.raise_exception_on_missing_stream: continue - raise KeyError( - f"The stream {configured_stream.stream.name} no longer exists in the configuration. " - f"Refresh the schema in replication settings and remove this stream from future sync attempts." + + error_message = ( + f"The stream '{configured_stream.stream.name}' in your connection configuration was not found in the source. " + f"Refresh the schema in your replication settings and remove this stream from future sync attempts." + ) + + raise AirbyteTracedException( + message="A stream listed in your configuration was not found in the source. Please check the logs for more details.", + internal_message=error_message, + failure_type=FailureType.config_error, ) + if isinstance(stream_instance, AbstractStreamFacade): abstract_streams.append(stream_instance.get_underlying_stream()) return abstract_streams diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/stream_thread_exception.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/stream_thread_exception.py new file mode 100644 index 0000000000000..c865bef597326 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/stream_thread_exception.py @@ -0,0 +1,25 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from typing import Any + + +class StreamThreadException(Exception): + def __init__(self, exception: Exception, stream_name: str): + self._exception = exception + self._stream_name = stream_name + + @property + def stream_name(self) -> str: + return self._stream_name + + @property + def exception(self) -> Exception: + return self._exception + + def __str__(self) -> str: + return f"Exception while syncing stream {self._stream_name}: {self._exception}" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StreamThreadException): + return self._exception == other._exception and self._stream_name == other._stream_name + return False diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/thread_pool_manager.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/thread_pool_manager.py index 560989af0a6cd..b6933e6bc3d2a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/thread_pool_manager.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/thread_pool_manager.py @@ -71,26 +71,26 @@ def _prune_futures(self, futures: List[Future[Any]]) -> None: ) futures.pop(index) - def shutdown(self) -> None: + def _shutdown(self) -> None: + # Without a way to stop the threads that have already started, this will not stop the Python application. We are fine today with + # this imperfect approach because we only do this in case of `self._most_recently_seen_exception` which we don't expect to happen self._threadpool.shutdown(wait=False, cancel_futures=True) def is_done(self) -> bool: return all([f.done() for f in self._futures]) - def shutdown_if_exception(self) -> None: - """ - This method will raise if there is an exception so that the caller can use it. - """ - if self._most_recently_seen_exception: - self._stop_and_raise_exception(self._most_recently_seen_exception) - def check_for_errors_and_shutdown(self) -> None: """ Check if any of the futures have an exception, and raise it if so. If all futures are done, shutdown the threadpool. If the futures are not done, raise an exception. :return: """ - self.shutdown_if_exception() + if self._most_recently_seen_exception: + self._logger.exception( + "An unknown exception has occurred while reading concurrently", + exc_info=self._most_recently_seen_exception, + ) + self._stop_and_raise_exception(self._most_recently_seen_exception) exceptions_from_futures = [f for f in [future.exception() for future in self._futures] if f is not None] if exceptions_from_futures: @@ -102,8 +102,8 @@ def check_for_errors_and_shutdown(self) -> None: exception = RuntimeError(f"Failed reading with futures not done: {futures_not_done}") self._stop_and_raise_exception(exception) else: - self.shutdown() + self._shutdown() def _stop_and_raise_exception(self, exception: BaseException) -> None: - self.shutdown() + self._shutdown() raise exception diff --git a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py index 9a85529d29d39..b53372aee4c9b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py @@ -82,7 +82,6 @@ def create_state_message(self, stream_name: str, namespace: Optional[str]) -> Ai Generates an AirbyteMessage using the current per-stream state of a specified stream in either the per-stream or legacy format :param stream_name: The name of the stream for the message that is being created :param namespace: The namespace of the stream for the message that is being created - :param send_per_stream_state: Decides which state format the message should be generated as :return: The Airbyte state message to be emitted by the connector during a sync """ hashable_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/create_partial.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/create_partial.py deleted file mode 100644 index bfbdb9078480a..0000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/create_partial.py +++ /dev/null @@ -1,92 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import inspect -from typing import Any, Mapping - -PARAMETERS_STR = "$parameters" - - -def create(func, /, *args, **keywords): - """ - Create a partial on steroids. - Returns a partial object which when called will behave like func called with the arguments supplied. - Parameters will be interpolated before the creation of the object - The interpolation will take in kwargs, and config as parameters that can be accessed through interpolating. - If any of the parameters are also create functions, they will also be created. - kwargs are propagated to the recursive method calls - - :param func: Function - :param args: - :param keywords: - :return: partially created object - """ - - def newfunc(*fargs, **fkeywords): - - all_keywords = {**keywords} - all_keywords.update(fkeywords) - - # config is a special keyword used for interpolation - config = all_keywords.pop("config", None) - - # $parameters is a special keyword used for interpolation and propagation - if PARAMETERS_STR in all_keywords: - parameters = all_keywords.get(PARAMETERS_STR) - else: - parameters = dict() - - # if config is not none, add it back to the keywords mapping - if config is not None: - all_keywords["config"] = config - - kwargs_to_pass_down = _get_kwargs_to_pass_to_func(func, parameters, all_keywords) - all_keywords_to_pass_down = _get_kwargs_to_pass_to_func(func, all_keywords, all_keywords) - - # parameters is required as part of creation of all declarative components - dynamic_args = {**all_keywords_to_pass_down, **kwargs_to_pass_down} - if "parameters" not in dynamic_args: - dynamic_args["parameters"] = {} - else: - # Handles the case where kwarg parameters and keyword $parameters both exist. We should merge both sets of parameters - # before creating the component - dynamic_args["parameters"] = {**all_keywords_to_pass_down["parameters"], **kwargs_to_pass_down["parameters"]} - try: - ret = func(*args, *fargs, **dynamic_args) - except TypeError as e: - raise Exception(f"failed to create object of type {func} because {e}") - return ret - - newfunc.func = func - newfunc.args = args - newfunc.kwargs = keywords - - return newfunc - - -def _get_kwargs_to_pass_to_func(func, parameters, existing_keyword_parameters): - argspec = inspect.getfullargspec(func) - kwargs_to_pass_down = set(argspec.kwonlyargs) - args_to_pass_down = set(argspec.args) - all_args = args_to_pass_down.union(kwargs_to_pass_down) - kwargs_to_pass_down = { - k: v for k, v in parameters.items() if k in all_args and _key_is_unset_or_identical(k, v, existing_keyword_parameters) - } - if "parameters" in all_args: - kwargs_to_pass_down["parameters"] = parameters - return kwargs_to_pass_down - - -def _key_is_unset_or_identical(key: str, value: Any, mapping: Mapping[str, Any]): - return key not in mapping or mapping[key] == value - - -def _create_inner_objects(keywords, kwargs): - fully_created = dict() - for k, v in keywords.items(): - if type(v) == type(create): - fully_created[k] = v(kwargs=kwargs) - else: - fully_created[k] = v - return fully_created diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 55eee7d2fae13..f8f3b24005971 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -353,11 +353,12 @@ definitions: interpolation_context: - config - headers - - last_records + - last_page_size + - last_record - response examples: - "{{ headers.link.next.cursor }}" - - "{{ last_records[-1]['key'] }}" + - "{{ last_record['key'] }}" - "{{ response['nextPage'] }}" page_size: title: Page Size @@ -372,7 +373,7 @@ definitions: interpolation_context: - config - headers - - last_records + - last_record - response examples: - "{{ response.data.has_more is false }}" @@ -602,6 +603,48 @@ definitions: $parameters: type: object additionalProperties: true + CustomSchemaLoader: + title: Custom Schema Loader + description: Schema Loader component whose behavior is derived from a custom code implementation of the connector. + type: object + additionalProperties: true + required: + - type + - class_name + properties: + type: + type: string + enum: [CustomSchemaLoader] + class_name: + title: Class Name + description: Fully-qualified name of the class that will be implementing the custom schema loader. The format is `source_..`. + type: string + examples: + - "source_railz.components.MyCustomSchemaLoader" + $parameters: + type: object + additionalProperties: true + CustomStateMigration: + title: Custom State Migration + description: Apply a custom transformation on the input state. + type: object + additionalProperties: true + required: + - type + - class_name + properties: + type: + type: string + enum: [CustomStateMigration] + class_name: + title: Class Name + description: Fully-qualified name of the class that will be implementing the custom state migration. The format is `source_..`. + type: string + examples: + - "source_railz.components.MyCustomStateMigration" + $parameters: + type: object + additionalProperties: true CustomTransformation: title: Custom Transformation description: Transformation component whose behavior is derived from a custom code implementation of the connector. @@ -623,6 +666,29 @@ definitions: $parameters: type: object additionalProperties: true + LegacyToPerPartitionStateMigration: + title: Legacy To Per-partition-state Migration + description: + 'Transforms the input state for per-partitioned streams from the legacy format to the low-code format. + The cursor field and partition ID fields are automatically extracted from the stream''s DatetimebasedCursor and SubstreamPartitionRouter. + + Example input state: + { + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + } + Example output state: + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + } + ' + type: object + additionalProperties: true + properties: + type: + type: string + enum: [LegacyToPerPartitionStateMigration] DatetimeBasedCursor: title: Datetime Based Cursor description: Cursor to provide incremental capabilities over datetime. @@ -903,6 +969,31 @@ definitions: default: ["credentials", "token_expiry_date"] examples: - ["credentials", "token_expiry_date"] + refresh_token_error_status_codes: + title: Refresh Token Error Status Codes + description: Status Codes to Identify refresh token error in response (Refresh Token Error Key and Refresh Token Error Values should be also specified). Responses with one of the error status code and containing an error value will be flagged as a config error + type: array + items: + type: integer + default: [] + examples: + - [400, 500] + refresh_token_error_key: + title: Refresh Token Error Key + description: Key to Identify refresh token error in response (Refresh Token Error Status Codes and Refresh Token Error Values should be also specified). + type: string + default: "" + examples: + - "error" + refresh_token_error_values: + title: Refresh Token Error Values + description: 'List of values to check for exception during token refresh process. Used to check if the error found in the response matches the key from the Refresh Token Error Key field (e.g. response={"error": "invalid_grant"}). Only responses with one of the error status code and containing an error value will be flagged as a config error' + type: array + items: + type: string + default: [] + examples: + - ["invalid_grant", "invalid_permissions"] $parameters: type: object additionalProperties: true @@ -948,6 +1039,7 @@ definitions: anyOf: - "$ref": "#/definitions/InlineSchemaLoader" - "$ref": "#/definitions/JsonFileSchemaLoader" + - "$ref": "#/definitions/CustomSchemaLoader" # TODO we have move the transformation to the RecordSelector level in the code but kept this here for # compatibility reason. We should eventually move this to align with the code. transformations: @@ -959,6 +1051,15 @@ definitions: - "$ref": "#/definitions/AddFields" - "$ref": "#/definitions/CustomTransformation" - "$ref": "#/definitions/RemoveFields" + state_migrations: + title: State Migrations + description: Array of state migrations to be applied on the input state + type: array + items: + anyOf: + - "$ref": "#/definitions/LegacyToPerPartitionStateMigration" + - "$ref": "#/definitions/CustomStateMigration" + default: [] $parameters: type: object additional_properties: true @@ -2206,20 +2307,20 @@ interpolation: x-ratelimit-limit: "600" x-ratelimit-remaining: "598" x-ratelimit-reset: "39" - - title: last_records - description: List of records extracted from the last response received from the API. - type: list + - title: last_record + description: Last record extracted from the response received from the API. + type: object + examples: + - name: "Test List: 19" + id: 0236d6d2 + contact_count: 20 + _metadata: + self: https://api.sendgrid.com/v3/marketing/lists/0236d6d2 + - title: last_page_size + description: Number of records extracted from the last response received from the API. + type: object examples: - - - name: "Test List: 19" - id: 0236d6d2 - contact_count: 20 - _metadata: - self: https://api.sendgrid.com/v3/marketing/lists/0236d6d2 - - name: List for CI tests, number 30 - id: 041ee031 - contact_count: 0 - _metadata: - self: https://api.sendgrid.com/v3/marketing/lists/041ee031 + - 2 - title: next_page_token description: Object describing the token to fetch the next page of records. The object has a single key "next_page_token". type: object @@ -2351,3 +2452,10 @@ interpolation: return_type: str examples: - "{{ 'ZmFrZSByZWZyZXNoX3Rva2VuIHZhbHVl' | base64decode }} -> 'fake refresh_token value'" + - title: String + description: Converts the specified value to a string. + arguments: {} + return_type: str + examples: + - '{{ 1 | string }} -> "1"' + - '{{ ["hello", "world" | string }} -> "["hello", "world"]"' diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py index aaca24dc610d3..eda99652fc861 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py @@ -7,6 +7,7 @@ from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.declarative.interpolation import InterpolatedString +from airbyte_cdk.sources.declarative.migrations.state_migration import StateMigration from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever from airbyte_cdk.sources.declarative.schema import DefaultSchemaLoader from airbyte_cdk.sources.declarative.schema.schema_loader import SchemaLoader @@ -34,6 +35,7 @@ class DeclarativeStream(Stream): parameters: InitVar[Mapping[str, Any]] name: str primary_key: Optional[Union[str, List[str], List[List[str]]]] + state_migrations: List[StateMigration] = field(repr=True, default_factory=list) schema_loader: Optional[SchemaLoader] = None _name: str = field(init=False, repr=False, default="") _primary_key: str = field(init=False, repr=False, default="") @@ -75,7 +77,12 @@ def state(self) -> MutableMapping[str, Any]: @state.setter def state(self, value: MutableMapping[str, Any]) -> None: """State setter, accept state serialized by state getter.""" - self.retriever.state = value + state: Mapping[str, Any] = value + if self.state_migrations: + for migration in self.state_migrations: + if migration.should_migrate(state): + state = migration.migrate(state) + self.retriever.state = state def get_updated_state( self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/cursor.py index 9e2c6d4eb0658..ca67ed4703261 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/cursor.py @@ -3,7 +3,7 @@ # from abc import ABC, abstractmethod -from typing import Optional +from typing import Any from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState @@ -24,18 +24,24 @@ def set_initial_state(self, stream_state: StreamState) -> None: :param stream_state: The state of the stream as returned by get_stream_state """ + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Register a record with the cursor; the cursor instance can then use it to manage the state of the in-progress stream read. + + :param stream_slice: The current slice, which may or may not contain the most recently observed record + :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the + stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. + """ + pass + @abstractmethod - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: + def close_slice(self, stream_slice: StreamSlice, *args: Any) -> None: """ - Update state based on the stream slice and the latest record. Note that `stream_slice.cursor_slice` and - `last_record.associated_slice` are expected to be the same but we make it explicit here that `stream_slice` should be leveraged to - update the state. + Update state based on the stream slice. Note that `stream_slice.cursor_slice` and `most_recent_record.associated_slice` are expected + to be the same but we make it explicit here that `stream_slice` should be leveraged to update the state. We do not pass in the + latest record, since cursor instances should maintain the relevant internal state on their own. :param stream_slice: slice to close - :param last_record: the latest record we have received for the slice. This is important to consider because even if the cursor emits - a slice, some APIs are not able to enforce the upper boundary. The outcome is that the last_record might have a higher cursor - value than the slice upper boundary and if we want to reduce the duplication as much as possible, we need to consider the highest - value between the internal cursor, the stream slice upper boundary and the record cursor value. """ @abstractmethod diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py index 0124b93e75539..fd99c62656330 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py @@ -52,7 +52,12 @@ class DatetimeBasedCursor(Cursor): datetime_format: str config: Config parameters: InitVar[Mapping[str, Any]] - _cursor: Optional[str] = field(repr=False, default=None) # tracks current datetime + _highest_observed_cursor_field_value: Optional[str] = field( + repr=False, default=None + ) # tracks the latest observed datetime, which may not be safe to emit in the case of out-of-order records + _cursor: Optional[str] = field( + repr=False, default=None + ) # tracks the latest observed datetime that is appropriate to emit as stream state end_datetime: Optional[Union[MinMaxDatetime, str]] = None step: Optional[Union[InterpolatedString, str]] = None cursor_granularity: Optional[str] = None @@ -109,20 +114,39 @@ def set_initial_state(self, stream_state: StreamState) -> None: """ self._cursor = stream_state.get(self._cursor_field.eval(self.config)) if stream_state else None - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Register a record with the cursor; the cursor instance can then use it to manage the state of the in-progress stream read. + + :param stream_slice: The current slice, which may or may not contain the most recently observed record + :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the + stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. + """ + record_cursor_value = record.get(self._cursor_field.eval(self.config)) + # if the current record has no cursor value, we cannot meaningfully update the state based on it, so there is nothing more to do + if not record_cursor_value: + return + + start_field = self._partition_field_start.eval(self.config) + end_field = self._partition_field_end.eval(self.config) + is_highest_observed_cursor_value = not self._highest_observed_cursor_field_value or self.parse_date( + record_cursor_value + ) > self.parse_date(self._highest_observed_cursor_field_value) + if ( + self._is_within_daterange_boundaries(record, stream_slice.get(start_field), stream_slice.get(end_field)) # type: ignore # we know that stream_slices for these cursors will use a string representing an unparsed date + and is_highest_observed_cursor_value + ): + self._highest_observed_cursor_field_value = record_cursor_value + + def close_slice(self, stream_slice: StreamSlice, *args: Any) -> None: if stream_slice.partition: raise ValueError(f"Stream slice {stream_slice} should not have a partition. Got {stream_slice.partition}.") - last_record_cursor_value = most_recent_record.get(self._cursor_field.eval(self.config)) if most_recent_record else None - stream_slice_value_end = stream_slice.get(self._partition_field_end.eval(self.config)) - potential_cursor_values = [ - cursor_value for cursor_value in [self._cursor, last_record_cursor_value, stream_slice_value_end] if cursor_value - ] cursor_value_str_by_cursor_value_datetime = dict( map( # we need to ensure the cursor value is preserved as is in the state else the CATs might complain of something like # 2023-01-04T17:30:19.000Z' <= '2023-01-04T17:30:19.000000Z' - lambda datetime_str: (self.parse_date(datetime_str), datetime_str), - potential_cursor_values, + lambda datetime_str: (self.parse_date(datetime_str), datetime_str), # type: ignore # because of the filter on the next line, this will only be called with a str + filter(lambda item: item, [self._cursor, self._highest_observed_cursor_field_value]), ) ) self._cursor = ( @@ -279,10 +303,26 @@ def should_be_synced(self, record: Record) -> bool: f"Could not find cursor field `{cursor_field}` in record. The incremental sync will assume it needs to be synced", ) return True - latest_possible_cursor_value = self._select_best_end_datetime() earliest_possible_cursor_value = self._calculate_earliest_possible_value(latest_possible_cursor_value) - return earliest_possible_cursor_value <= self.parse_date(record_cursor_value) <= latest_possible_cursor_value + return self._is_within_daterange_boundaries(record, earliest_possible_cursor_value, latest_possible_cursor_value) + + def _is_within_daterange_boundaries( + self, record: Record, start_datetime_boundary: Union[datetime.datetime, str], end_datetime_boundary: Union[datetime.datetime, str] + ) -> bool: + cursor_field = self._cursor_field.eval(self.config) + record_cursor_value = record.get(cursor_field) + if not record_cursor_value: + self._send_log( + Level.WARN, + f"Could not find cursor field `{cursor_field}` in record. The record will not be considered when emitting sync state", + ) + return False + if isinstance(start_datetime_boundary, str): + start_datetime_boundary = self.parse_date(start_datetime_boundary) + if isinstance(end_datetime_boundary, str): + end_datetime_boundary = self.parse_date(end_datetime_boundary) + return start_datetime_boundary <= self.parse_date(record_cursor_value) <= end_datetime_boundary def _send_log(self, level: Level, message: str) -> None: if self.message_repository: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py index 39dfa8f1fe1fe..829c93d8833b8 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py @@ -86,20 +86,20 @@ def set_initial_state(self, stream_state: StreamState) -> None: for state in stream_state["states"]: self._cursor_per_partition[self._to_partition_key(state["partition"])] = self._create_cursor(state["cursor"]) - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].observe( + StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), record + ) + + def close_slice(self, stream_slice: StreamSlice, *args: Any) -> None: try: - cursor_most_recent_record = ( - Record(most_recent_record.data, StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice)) - if most_recent_record - else most_recent_record - ) self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].close_slice( - StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), cursor_most_recent_record + StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), *args ) except KeyError as exception: raise ValueError( f"Partition {str(exception)} could not be found in current state based on the record. This is unexpected because " - f"we should only update state for partition that where emitted during `stream_slices`" + f"we should only update state for partitions that were emitted during `stream_slices`" ) def get_stream_state(self) -> StreamState: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/filters.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/filters.py index eac515b03301e..f3d70a0b13c47 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/filters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/filters.py @@ -3,6 +3,7 @@ # import base64 import hashlib +import json from typing import Any, Optional @@ -90,5 +91,19 @@ def base64decode(value: str) -> str: return base64.b64decode(value.encode("utf-8")).decode() -_filters_list = [hash, base64encode, base64decode] +def string(value: Any) -> str: + """ + Converts the input value to a string. + If the value is already a string, it is returned as is. + Otherwise, the value is interpreted as a json object and wrapped in triple-quotes so it's evalued as a string by the JinjaInterpolation + :param value: the value to convert to a string + :return: string representation of the input value + """ + if isinstance(value, str): + return value + ret = f'"""{json.dumps(value)}"""' + return ret + + +_filters_list = [hash, base64encode, base64decode, string] filters = {f.__name__: f for f in _filters_list} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py index 72043e0174975..bdd95ecde776f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py @@ -4,7 +4,7 @@ from dataclasses import InitVar, dataclass -from typing import Any, Mapping, Optional +from typing import Any, Dict, Mapping, Optional from airbyte_cdk.sources.declarative.interpolation.jinja import JinjaInterpolation from airbyte_cdk.sources.declarative.types import Config @@ -22,17 +22,17 @@ class InterpolatedMapping: mapping: Mapping[str, str] parameters: InitVar[Mapping[str, Any]] - def __post_init__(self, parameters: Optional[Mapping[str, Any]]): + def __post_init__(self, parameters: Optional[Mapping[str, Any]]) -> None: self._interpolation = JinjaInterpolation() self._parameters = parameters - def eval(self, config: Config, **additional_parameters): + def eval(self, config: Config, **additional_parameters: Any) -> Dict[str, Any]: """ Wrapper around a Mapping[str, str] that allows for both keys and values to be interpolated. :param config: The user-provided configuration as specified by the source's spec :param additional_parameters: Optional parameters used for interpolation - :return: The interpolated string + :return: The interpolated mapping """ valid_key_types = additional_parameters.pop("valid_key_types", (str,)) valid_value_types = additional_parameters.pop("valid_value_types", None) @@ -43,10 +43,10 @@ def eval(self, config: Config, **additional_parameters): for name, value in self.mapping.items() } - def _eval(self, value, config, **kwargs): + def _eval(self, value: str, config: Config, **kwargs: Any) -> Any: # The values in self._mapping can be of Any type # We only want to interpolate them if they are strings - if type(value) == str: + if isinstance(value, str): return self._interpolation.eval(value, config, parameters=self._parameters, **kwargs) else: return value diff --git a/airbyte-cdk/python/source_declarative_manifest/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/__init__.py similarity index 100% rename from airbyte-cdk/python/source_declarative_manifest/__init__.py rename to airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/__init__.py diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py new file mode 100644 index 0000000000000..11d33d0f4cf39 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py @@ -0,0 +1,89 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from typing import Any, Mapping + +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.migrations.state_migration import StateMigration +from airbyte_cdk.sources.declarative.models import DatetimeBasedCursor, SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ParentStreamConfig + + +def _is_already_migrated(stream_state: Mapping[str, Any]) -> bool: + return "states" in stream_state + + +class LegacyToPerPartitionStateMigration(StateMigration): + """ + Transforms the input state for per-partitioned streams from the legacy format to the low-code format. + The cursor field and partition ID fields are automatically extracted from the stream's DatetimebasedCursor and SubstreamPartitionRouter. + + Example input state: + { + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + } + Example output state: + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + } + """ + + def __init__( + self, + partition_router: SubstreamPartitionRouter, + cursor: DatetimeBasedCursor, + config: Mapping[str, Any], + parameters: Mapping[str, Any], + ): + self._partition_router = partition_router + self._cursor = cursor + self._config = config + self._parameters = parameters + self._partition_key_field = InterpolatedString.create( + self._get_partition_field(self._partition_router), parameters=self._parameters + ).eval(self._config) + self._cursor_field = InterpolatedString.create(self._cursor.cursor_field, parameters=self._parameters).eval(self._config) + + def _get_partition_field(self, partition_router: SubstreamPartitionRouter) -> str: + parent_stream_config = partition_router.parent_stream_configs[0] + + # Retrieve the partition field with a condition, as properties are returned as a dictionary for custom components. + partition_field = ( + parent_stream_config.partition_field + if isinstance(parent_stream_config, ParentStreamConfig) + else parent_stream_config.get("partition_field") + ) + + return partition_field + + def should_migrate(self, stream_state: Mapping[str, Any]) -> bool: + if _is_already_migrated(stream_state): + return False + + # There is exactly one parent stream + number_of_parent_streams = len(self._partition_router.parent_stream_configs) + if number_of_parent_streams != 1: + # There should be exactly one parent stream + return False + """ + The expected state format is + "" : { + "" : "" + } + """ + if stream_state: + for key, value in stream_state.items(): + if isinstance(value, dict): + keys = list(value.keys()) + if len(keys) != 1: + # The input partitioned state should only have one key + return False + if keys[0] != self._cursor_field: + # Unexpected key. Found {keys[0]}. Expected {self._cursor.cursor_field} + return False + return True + + def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: + states = [{"partition": {self._partition_key_field: key}, "cursor": value} for key, value in stream_state.items()] + return {"states": states} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/state_migration.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/state_migration.py new file mode 100644 index 0000000000000..9cf7f3cfe08cf --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/migrations/state_migration.py @@ -0,0 +1,24 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from abc import abstractmethod +from typing import Any, Mapping + + +class StateMigration: + @abstractmethod + def should_migrate(self, stream_state: Mapping[str, Any]) -> bool: + """ + Check if the stream_state should be migrated + + :param stream_state: The stream_state to potentially migrate + :return: true if the state is of the expected format and should be migrated. False otherwise. + """ + + @abstractmethod + def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Migrate the stream_state. Assumes should_migrate(stream_state) returned True. + + :param stream_state: The stream_state to migrate + :return: The migrated stream_state + """ diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 630a0cdd797a9..5926052dea3b7 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -208,6 +208,34 @@ class Config: parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') +class CustomSchemaLoader(BaseModel): + class Config: + extra = Extra.allow + + type: Literal['CustomSchemaLoader'] + class_name: str = Field( + ..., + description='Fully-qualified name of the class that will be implementing the custom schema loader. The format is `source_..`.', + examples=['source_railz.components.MyCustomSchemaLoader'], + title='Class Name', + ) + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') + + +class CustomStateMigration(BaseModel): + class Config: + extra = Extra.allow + + type: Literal['CustomStateMigration'] + class_name: str = Field( + ..., + description='Fully-qualified name of the class that will be implementing the custom state migration. The format is `source_..`.', + examples=['source_railz.components.MyCustomStateMigration'], + title='Class Name', + ) + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') + + class CustomTransformation(BaseModel): class Config: extra = Extra.allow @@ -222,6 +250,13 @@ class Config: parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') +class LegacyToPerPartitionStateMigration(BaseModel): + class Config: + extra = Extra.allow + + type: Optional[Literal['LegacyToPerPartitionStateMigration']] = None + + class RefreshTokenUpdater(BaseModel): refresh_token_name: Optional[str] = Field( 'refresh_token', @@ -247,6 +282,24 @@ class RefreshTokenUpdater(BaseModel): examples=[['credentials', 'token_expiry_date']], title='Config Path To Expiry Date', ) + refresh_token_error_status_codes: Optional[List[int]] = Field( + [], + description='Status Codes to Identify refresh token error in response (Refresh Token Error Key and Refresh Token Error Values should be also specified). Responses with one of the error status code and containing an error value will be flagged as a config error', + examples=[[400, 500]], + title='Refresh Token Error Status Codes', + ) + refresh_token_error_key: Optional[str] = Field( + '', + description='Key to Identify refresh token error in response (Refresh Token Error Status Codes and Refresh Token Error Values should be also specified).', + examples=['error'], + title='Refresh Token Error Key', + ) + refresh_token_error_values: Optional[List[str]] = Field( + [], + description='List of values to check for exception during token refresh process. Used to check if the error found in the response matches the key from the Refresh Token Error Key field (e.g. response={"error": "invalid_grant"}). Only responses with one of the error status code and containing an error value will be flagged as a config error', + examples=[['invalid_grant', 'invalid_permissions']], + title='Refresh Token Error Values', + ) class OAuthAuthenticator(BaseModel): @@ -827,7 +880,7 @@ class CursorPagination(BaseModel): description='Value of the cursor defining the next page to fetch.', examples=[ '{{ headers.link.next.cursor }}', - "{{ last_records[-1]['key'] }}", + "{{ last_record['key'] }}", "{{ response['nextPage'] }}", ], title='Cursor Value', @@ -1161,7 +1214,9 @@ class Config: primary_key: Optional[PrimaryKey] = Field( '', description='The primary key of the stream.', title='Primary Key' ) - schema_loader: Optional[Union[InlineSchemaLoader, JsonFileSchemaLoader]] = Field( + schema_loader: Optional[ + Union[InlineSchemaLoader, JsonFileSchemaLoader, CustomSchemaLoader] + ] = Field( None, description='Component used to retrieve the schema for the current stream.', title='Schema Loader', @@ -1173,6 +1228,13 @@ class Config: description='A list of transformations to be applied to each output record.', title='Transformations', ) + state_migrations: Optional[ + List[Union[LegacyToPerPartitionStateMigration, CustomStateMigration]] + ] = Field( + [], + description='Array of state migrations to be applied on the input state', + title='State Migrations', + ) parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 8f60500d012b7..31e5264062112 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -30,6 +30,8 @@ from airbyte_cdk.sources.declarative.incremental import Cursor, CursorFactory, DatetimeBasedCursor, PerPartitionCursor from airbyte_cdk.sources.declarative.interpolation import InterpolatedString from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping +from airbyte_cdk.sources.declarative.migrations.legacy_to_per_partition_state_migration import LegacyToPerPartitionStateMigration +from airbyte_cdk.sources.declarative.models import CustomStateMigration from airbyte_cdk.sources.declarative.models.declarative_component_schema import AddedFieldDefinition as AddedFieldDefinitionModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import AddFields as AddFieldsModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import ApiKeyAuthenticator as ApiKeyAuthenticatorModel @@ -49,6 +51,7 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import CustomRecordFilter as CustomRecordFilterModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import CustomRequester as CustomRequesterModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import CustomRetriever as CustomRetrieverModel +from airbyte_cdk.sources.declarative.models.declarative_component_schema import CustomSchemaLoader as CustomSchemaLoader from airbyte_cdk.sources.declarative.models.declarative_component_schema import CustomTransformation as CustomTransformationModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import DatetimeBasedCursor as DatetimeBasedCursorModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import DeclarativeStream as DeclarativeStreamModel @@ -66,6 +69,9 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( LegacySessionTokenAuthenticator as LegacySessionTokenAuthenticatorModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + LegacyToPerPartitionStateMigration as LegacyToPerPartitionStateMigrationModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ListPartitionRouter as ListPartitionRouterModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import MinMaxDatetime as MinMaxDatetimeModel from airbyte_cdk.sources.declarative.models.declarative_component_schema import NoAuth as NoAuthModel @@ -165,6 +171,8 @@ def _init_mappings(self) -> None: CustomRecordFilterModel: self.create_custom_component, CustomRequesterModel: self.create_custom_component, CustomRetrieverModel: self.create_custom_component, + CustomSchemaLoader: self.create_custom_component, + CustomStateMigration: self.create_custom_component, CustomPaginationStrategyModel: self.create_custom_component, CustomPartitionRouterModel: self.create_custom_component, CustomTransformationModel: self.create_custom_component, @@ -180,6 +188,7 @@ def _init_mappings(self) -> None: InlineSchemaLoaderModel: self.create_inline_schema_loader, JsonDecoderModel: self.create_json_decoder, JsonFileSchemaLoaderModel: self.create_json_file_schema_loader, + LegacyToPerPartitionStateMigrationModel: self.create_legacy_to_per_partition_state_migration, ListPartitionRouterModel: self.create_list_partition_router, MinMaxDatetimeModel: self.create_min_max_datetime, NoAuthModel: self.create_no_auth, @@ -306,6 +315,28 @@ def create_api_key_authenticator( parameters=model.parameters or {}, ) + def create_legacy_to_per_partition_state_migration( + self, + model: LegacyToPerPartitionStateMigrationModel, + config: Mapping[str, Any], + declarative_stream: DeclarativeStreamModel, + ) -> LegacyToPerPartitionStateMigration: + retriever = declarative_stream.retriever + partition_router = retriever.partition_router + + if not isinstance(retriever, SimpleRetrieverModel): + raise ValueError( + f"LegacyToPerPartitionStateMigrations can only be applied on a DeclarativeStream with a SimpleRetriever. Got {type(retriever)}" + ) + if not isinstance(partition_router, (SubstreamPartitionRouterModel, CustomPartitionRouterModel)): + raise ValueError( + f"LegacyToPerPartitionStateMigrations can only be applied on a SimpleRetriever with a Substream partition router. Got {type(partition_router)}" + ) + if not hasattr(partition_router, "parent_stream_configs"): + raise ValueError("LegacyToPerPartitionStateMigrations can only be applied with a parent stream configuration.") + + return LegacyToPerPartitionStateMigration(declarative_stream.retriever.partition_router, declarative_stream.incremental_sync, config, declarative_stream.parameters) # type: ignore # The retriever type was already checked + def create_session_token_authenticator( self, model: SessionTokenAuthenticatorModel, config: Config, name: str, **kwargs: Any ) -> Union[ApiKeyAuthenticator, BearerAuthenticator]: @@ -319,7 +350,7 @@ def create_session_token_authenticator( ) if model.request_authentication.type == "Bearer": return ModelToComponentFactory.create_bearer_authenticator( - BearerAuthenticatorModel(type="BearerAuthenticator", api_token=""), + BearerAuthenticatorModel(type="BearerAuthenticator", api_token=""), # type: ignore # $parameters has a default value config, token_provider=token_provider, # type: ignore # $parameters defaults to None ) @@ -431,11 +462,14 @@ def create_custom_component(self, model: Any, config: Config, **kwargs: Any) -> return custom_component_class(**kwargs) @staticmethod - def _get_class_from_fully_qualified_class_name(class_name: str) -> Any: - split = class_name.split(".") + def _get_class_from_fully_qualified_class_name(full_qualified_class_name: str) -> Any: + split = full_qualified_class_name.split(".") module = ".".join(split[:-1]) class_name = split[-1] - return getattr(importlib.import_module(module), class_name) + try: + return getattr(importlib.import_module(module), class_name) + except AttributeError: + raise ValueError(f"Could not load class {full_qualified_class_name}.") @staticmethod def _derive_component_type_from_type_hints(field_type: Any) -> Optional[str]: @@ -581,6 +615,14 @@ def create_declarative_stream(self, model: DeclarativeStreamModel, config: Confi ) cursor_field = model.incremental_sync.cursor_field if model.incremental_sync else None + if model.state_migrations: + state_transformations = [ + self._create_component_from_model(state_migration, config, declarative_stream=model) + for state_migration in model.state_migrations + ] + else: + state_transformations = [] + if model.schema_loader: schema_loader = self._create_component_from_model(model=model.schema_loader, config=config) else: @@ -595,6 +637,7 @@ def create_declarative_stream(self, model: DeclarativeStreamModel, config: Confi retriever=retriever, schema_loader=schema_loader, stream_cursor_field=cursor_field or "", + state_migrations=state_transformations, config=config, parameters=model.parameters or {}, ) @@ -803,7 +846,7 @@ def create_no_pagination(model: NoPaginationModel, config: Config, **kwargs: Any def create_oauth_authenticator(self, model: OAuthAuthenticatorModel, config: Config, **kwargs: Any) -> DeclarativeOauth2Authenticator: if model.refresh_token_updater: - # ignore type error beause fixing it would have a lot of dependencies, revisit later + # ignore type error because fixing it would have a lot of dependencies, revisit later return DeclarativeSingleUseRefreshTokenOauth2Authenticator( # type: ignore config, InterpolatedString.create(model.token_refresh_endpoint, parameters=model.parameters or {}).eval(config), @@ -824,6 +867,9 @@ def create_oauth_authenticator(self, model: OAuthAuthenticatorModel, config: Con scopes=model.scopes, token_expiry_date_format=model.token_expiry_date_format, message_repository=self._message_repository, + refresh_token_error_status_codes=model.refresh_token_updater.refresh_token_error_status_codes, + refresh_token_error_key=model.refresh_token_updater.refresh_token_error_key, + refresh_token_error_values=model.refresh_token_updater.refresh_token_error_values, ) # ignore type error because fixing it would have a lot of dependencies, revisit later return DeclarativeOauth2Authenticator( # type: ignore diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py index 98e12eef908a8..969dbd99b7349 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py @@ -336,8 +336,8 @@ def _request_params( raise ValueError("Request params cannot be a string") for k, v in options.items(): - if isinstance(v, (list, dict)): - raise ValueError(f"Invalid value for `{k}` parameter. The values of request params cannot be an array or object.") + if isinstance(v, (dict,)): + raise ValueError(f"Invalid value for `{k}` parameter. The values of request params cannot be an object.") return options @@ -456,9 +456,6 @@ def send_request( json=self._request_body_json(stream_state, stream_slice, next_page_token, request_body_json), data=self._request_body_data(stream_state, stream_slice, next_page_token, request_body_data), ) - import time - - time.sleep(1) response = self._send_with_retry(request, log_formatter=log_formatter) return self._validate_response(response) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py index bea36fc8e3230..52a93ec221e1b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py @@ -3,7 +3,7 @@ # from dataclasses import InitVar, dataclass -from typing import Any, List, Mapping, Optional, Union +from typing import Any, Dict, List, Mapping, Optional, Union import requests from airbyte_cdk.sources.declarative.decoders.decoder import Decoder @@ -11,7 +11,7 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.requesters.paginators.strategies.pagination_strategy import PaginationStrategy -from airbyte_cdk.sources.declarative.types import Config +from airbyte_cdk.sources.declarative.types import Config, Record @dataclass @@ -34,32 +34,52 @@ class CursorPaginationStrategy(PaginationStrategy): stop_condition: Optional[Union[InterpolatedBoolean, str]] = None decoder: Decoder = JsonDecoder(parameters={}) - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: if isinstance(self.cursor_value, str): - self.cursor_value = InterpolatedString.create(self.cursor_value, parameters=parameters) + self._cursor_value = InterpolatedString.create(self.cursor_value, parameters=parameters) + else: + self._cursor_value = self.cursor_value if isinstance(self.stop_condition, str): - self.stop_condition = InterpolatedBoolean(condition=self.stop_condition, parameters=parameters) + self._stop_condition = InterpolatedBoolean(condition=self.stop_condition, parameters=parameters) + else: + self._stop_condition = self.stop_condition # type: ignore # the type has been checked @property def initial_token(self) -> Optional[Any]: return None - def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + def next_page_token(self, response: requests.Response, last_records: List[Record]) -> Optional[Any]: decoded_response = self.decoder.decode(response) # The default way that link is presented in requests.Response is a string of various links (last, next, etc). This # is not indexable or useful for parsing the cursor, so we replace it with the link dictionary from response.links - headers = response.headers + headers: Dict[str, Any] = dict(response.headers) headers["link"] = response.links - if self.stop_condition: - should_stop = self.stop_condition.eval(self.config, response=decoded_response, headers=headers, last_records=last_records) + last_record = last_records[-1] if last_records else None + + if self._stop_condition: + should_stop = self._stop_condition.eval( + self.config, + response=decoded_response, + headers=headers, + last_records=last_records, + last_record=last_record, + last_page_size=len(last_records), + ) if should_stop: return None - token = self.cursor_value.eval(config=self.config, last_records=last_records, response=decoded_response, headers=headers) + token = self._cursor_value.eval( + config=self.config, + last_records=last_records, + response=decoded_response, + headers=headers, + last_record=last_record, + last_page_size=len(last_records), + ) return token if token else None - def reset(self): + def reset(self) -> None: # No state to reset pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py index 7028850bcaaa6..d6f4567cddb2d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py @@ -322,7 +322,14 @@ def read_records( records_schema=records_schema, ) for stream_data in self._read_pages(record_generator, self.state, _slice): - most_recent_record_from_slice = self._get_most_recent_record(most_recent_record_from_slice, stream_data, _slice) + current_record = self._extract_record(stream_data, _slice) + if self.cursor and current_record: + self.cursor.observe(_slice, current_record) + + # Latest record read, not necessarily within slice boundaries. + # TODO Remove once all custom components implement `observe` method. + # https://github.com/airbytehq/airbyte-internal-issues/issues/6955 + most_recent_record_from_slice = self._get_most_recent_record(most_recent_record_from_slice, current_record, _slice) yield stream_data if self.cursor: @@ -330,13 +337,13 @@ def read_records( return def _get_most_recent_record( - self, current_most_recent: Optional[Record], stream_data: StreamData, stream_slice: StreamSlice + self, current_most_recent: Optional[Record], current_record: Optional[Record], stream_slice: StreamSlice ) -> Optional[Record]: - if self.cursor and (record := self._extract_record(stream_data, stream_slice)): + if self.cursor and current_record: if not current_most_recent: - return record + return current_record else: - return current_most_recent if self.cursor.is_greater_than_or_equal(current_most_recent, record) else record + return current_most_recent if self.cursor.is_greater_than_or_equal(current_most_recent, current_record) else current_record else: return None diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/csv_format.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/csv_format.py index bf8a57e73e90b..ee9f9c32b24d2 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/csv_format.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/csv_format.py @@ -147,11 +147,16 @@ class Config(OneOfOptionConfig): description="How to infer the types of the columns. If none, inference default to strings.", airbyte_hidden=True, ) + ignore_errors_on_fields_mismatch: bool = Field( + title="Ignore errors on field mismatch", + default=False, + description="Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + ) @validator("delimiter") def validate_delimiter(cls, v: str) -> str: if v == r"\t": - return v + v = "\t" if len(v) != 1: raise ValueError("delimiter should only be one character") if v in {"\r", "\n"}: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py index 8bd5cfe9565ef..c3ef77cea94fe 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py @@ -36,7 +36,7 @@ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import ( AbstractConcurrentFileBasedCursor, FileBasedConcurrentCursor, - FileBasedNoopCursor, + FileBasedFinalStateCursor, ) from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository @@ -170,7 +170,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: sync_mode = self._get_sync_mode_from_catalog(stream_config.name) if sync_mode == SyncMode.full_refresh and hasattr(self, "_concurrency_level") and self._concurrency_level is not None: - cursor = FileBasedNoopCursor(stream_config) + cursor = FileBasedFinalStateCursor( + stream_config=stream_config, stream_namespace=None, message_repository=self.message_repository + ) stream = FileBasedStreamFacade.create_from_stream( self._make_default_stream(stream_config, cursor), self, self.logger, stream_state, cursor ) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/csv_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/csv_parser.py index 627c3573b6692..435162edeafb5 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/csv_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/csv_parser.py @@ -70,13 +70,21 @@ def read_data( # The row was not properly parsed if any of the values are None. This will most likely occur if there are more columns # than headers or more headers dans columns if None in row: - raise RecordParseError( - FileBasedSourceError.ERROR_PARSING_RECORD_MISMATCHED_COLUMNS, - filename=file.uri, - lineno=lineno, - ) + if config_format.ignore_errors_on_fields_mismatch: + logger.error(f"Skipping record in line {lineno} of file {file.uri}; invalid CSV row with missing column.") + else: + raise RecordParseError( + FileBasedSourceError.ERROR_PARSING_RECORD_MISMATCHED_COLUMNS, + filename=file.uri, + lineno=lineno, + ) if None in row.values(): - raise RecordParseError(FileBasedSourceError.ERROR_PARSING_RECORD_MISMATCHED_ROWS, filename=file.uri, lineno=lineno) + if config_format.ignore_errors_on_fields_mismatch: + logger.error(f"Skipping record in line {lineno} of file {file.uri}; invalid CSV row with extra column.") + else: + raise RecordParseError( + FileBasedSourceError.ERROR_PARSING_RECORD_MISMATCHED_ROWS, filename=file.uri, lineno=lineno + ) yield row finally: # due to RecordParseError or GeneratorExit diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py index 4fc1a365b4245..e92b78df9c463 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py @@ -18,7 +18,7 @@ from airbyte_cdk.sources.file_based.file_types.file_type_parser import FileTypeParser from airbyte_cdk.sources.file_based.remote_file import RemoteFile from airbyte_cdk.sources.file_based.stream import AbstractFileBasedStream -from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedNoopCursor +from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedFinalStateCursor from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor from airbyte_cdk.sources.file_based.types import StreamSlice from airbyte_cdk.sources.message import MessageRepository @@ -71,7 +71,7 @@ def create_from_stream( partition_generator=FileBasedStreamPartitionGenerator( stream, message_repository, - SyncMode.full_refresh if isinstance(cursor, FileBasedNoopCursor) else SyncMode.incremental, + SyncMode.full_refresh if isinstance(cursor, FileBasedFinalStateCursor) else SyncMode.incremental, [cursor_field] if cursor_field is not None else None, state, cursor, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py index 6ab66bb398882..590f37bb6d63e 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py @@ -1,5 +1,5 @@ from .abstract_concurrent_file_based_cursor import AbstractConcurrentFileBasedCursor from .file_based_concurrent_cursor import FileBasedConcurrentCursor -from .file_based_noop_cursor import FileBasedNoopCursor +from .file_based_final_state_cursor import FileBasedFinalStateCursor -__all__ = ["AbstractConcurrentFileBasedCursor", "FileBasedConcurrentCursor", "FileBasedNoopCursor"] +__all__ = ["AbstractConcurrentFileBasedCursor", "FileBasedConcurrentCursor", "FileBasedFinalStateCursor"] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py new file mode 100644 index 0000000000000..ca6f43aec2e4e --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py @@ -0,0 +1,71 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import logging +from datetime import datetime +from typing import TYPE_CHECKING, Any, Iterable, List, MutableMapping, Optional + +from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager +from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig +from airbyte_cdk.sources.file_based.remote_file import RemoteFile +from airbyte_cdk.sources.file_based.stream.concurrent.cursor.abstract_concurrent_file_based_cursor import AbstractConcurrentFileBasedCursor +from airbyte_cdk.sources.file_based.types import StreamState +from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.streams import FULL_REFRESH_SENTINEL_STATE_KEY +from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition +from airbyte_cdk.sources.streams.concurrent.partitions.record import Record + +if TYPE_CHECKING: + from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition + + +class FileBasedFinalStateCursor(AbstractConcurrentFileBasedCursor): + """Cursor that is used to guarantee at least one state message is emitted for a concurrent file-based stream.""" + + def __init__( + self, stream_config: FileBasedStreamConfig, message_repository: MessageRepository, stream_namespace: Optional[str], **kwargs: Any + ): + self._stream_name = stream_config.name + self._stream_namespace = stream_namespace + self._message_repository = message_repository + # Normally the connector state manager operates at the source-level. However, we only need it to write the sentinel + # state message rather than manage overall source state. This is also only temporary as we move to the resumable + # full refresh world where every stream uses a FileBasedConcurrentCursor with incremental state. + self._connector_state_manager = ConnectorStateManager(stream_instance_map={}) + + @property + def state(self) -> MutableMapping[str, Any]: + return {FULL_REFRESH_SENTINEL_STATE_KEY: True} + + def observe(self, record: Record) -> None: + pass + + def close_partition(self, partition: Partition) -> None: + pass + + def set_pending_partitions(self, partitions: List["FileBasedStreamPartition"]) -> None: + pass + + def add_file(self, file: RemoteFile) -> None: + pass + + def get_files_to_sync(self, all_files: Iterable[RemoteFile], logger: logging.Logger) -> Iterable[RemoteFile]: + return all_files + + def get_state(self) -> MutableMapping[str, Any]: + return {} + + def set_initial_state(self, value: StreamState) -> None: + return None + + def get_start_time(self) -> datetime: + return datetime.min + + def emit_state_message(self) -> None: + pass + + def ensure_at_least_one_state_emitted(self) -> None: + self._connector_state_manager.update_state_for_stream(self._stream_name, self._stream_namespace, self.state) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) + self._message_repository.emit_message(state_message) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py deleted file mode 100644 index 2aa5a204d5035..0000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -from datetime import datetime -from typing import TYPE_CHECKING, Any, Iterable, List, MutableMapping - -from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig -from airbyte_cdk.sources.file_based.remote_file import RemoteFile -from airbyte_cdk.sources.file_based.stream.concurrent.cursor.abstract_concurrent_file_based_cursor import AbstractConcurrentFileBasedCursor -from airbyte_cdk.sources.file_based.types import StreamState -from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition -from airbyte_cdk.sources.streams.concurrent.partitions.record import Record - -if TYPE_CHECKING: - from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition - - -class FileBasedNoopCursor(AbstractConcurrentFileBasedCursor): - def __init__(self, stream_config: FileBasedStreamConfig, **kwargs: Any): - pass - - @property - def state(self) -> MutableMapping[str, Any]: - return {} - - def observe(self, record: Record) -> None: - pass - - def close_partition(self, partition: Partition) -> None: - pass - - def set_pending_partitions(self, partitions: List["FileBasedStreamPartition"]) -> None: - pass - - def add_file(self, file: RemoteFile) -> None: - pass - - def get_files_to_sync(self, all_files: Iterable[RemoteFile], logger: logging.Logger) -> Iterable[RemoteFile]: - return all_files - - def get_state(self) -> MutableMapping[str, Any]: - return {} - - def set_initial_state(self, value: StreamState) -> None: - return None - - def get_start_time(self) -> datetime: - return datetime.min - - def emit_state_message(self) -> None: - pass - - def ensure_at_least_one_state_emitted(self) -> None: - pass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py index 8b762e63a7b15..a6556caf577e5 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py @@ -21,7 +21,7 @@ StreamAvailable, StreamUnavailable, ) -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage from airbyte_cdk.sources.streams.concurrent.helpers import get_cursor_field_from_stream, get_primary_key_from_stream @@ -77,7 +77,7 @@ def create_from_stream( partition_generator=StreamPartitionGenerator( stream, message_repository, - SyncMode.full_refresh if isinstance(cursor, NoopCursor) else SyncMode.incremental, + SyncMode.full_refresh if isinstance(cursor, FinalStateCursor) else SyncMode.incremental, [cursor_field] if cursor_field is not None else None, state, cursor, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py index d581e66a33d8d..08dee0716c529 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py @@ -3,11 +3,11 @@ # import functools from abc import ABC, abstractmethod -from datetime import datetime -from typing import Any, List, Mapping, MutableMapping, Optional, Protocol, Tuple +from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Protocol, Tuple from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.streams import FULL_REFRESH_SENTINEL_STATE_KEY from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_state_converter import AbstractStreamStateConverter @@ -17,11 +17,33 @@ def _extract_value(mapping: Mapping[str, Any], path: List[str]) -> Any: return functools.reduce(lambda a, b: a[b], path, mapping) -class Comparable(Protocol): +class GapType(Protocol): + """ + This is the representation of gaps between two cursor values. Examples: + * if cursor values are datetimes, GapType is timedelta + * if cursor values are integer, GapType will also be integer + """ + + pass + + +class CursorValueType(Protocol): """Protocol for annotating comparable types.""" @abstractmethod - def __lt__(self: "Comparable", other: "Comparable") -> bool: + def __lt__(self: "CursorValueType", other: "CursorValueType") -> bool: + pass + + @abstractmethod + def __ge__(self: "CursorValueType", other: "CursorValueType") -> bool: + pass + + @abstractmethod + def __add__(self: "CursorValueType", other: GapType) -> "CursorValueType": + pass + + @abstractmethod + def __sub__(self: "CursorValueType", other: GapType) -> "CursorValueType": pass @@ -29,7 +51,7 @@ class CursorField: def __init__(self, cursor_field_key: str) -> None: self.cursor_field_key = cursor_field_key - def extract_value(self, record: Record) -> Comparable: + def extract_value(self, record: Record) -> CursorValueType: cursor_value = record.data.get(self.cursor_field_key) if cursor_value is None: raise ValueError(f"Could not find cursor field {self.cursor_field_key} in record") @@ -65,10 +87,27 @@ def ensure_at_least_one_state_emitted(self) -> None: raise NotImplementedError() -class NoopCursor(Cursor): +class FinalStateCursor(Cursor): + """Cursor that is used to guarantee at least one state message is emitted for a concurrent stream.""" + + def __init__( + self, + stream_name: str, + stream_namespace: Optional[str], + message_repository: MessageRepository, + ) -> None: + self._stream_name = stream_name + self._stream_namespace = stream_namespace + self._message_repository = message_repository + # Normally the connector state manager operates at the source-level. However, we only need it to write the sentinel + # state message rather than manage overall source state. This is also only temporary as we move to the resumable + # full refresh world where every stream uses a FileBasedConcurrentCursor with incremental state. + self._connector_state_manager = ConnectorStateManager(stream_instance_map={}) + self._has_closed_at_least_one_slice = False + @property def state(self) -> MutableMapping[str, Any]: - return {} + return {FULL_REFRESH_SENTINEL_STATE_KEY: True} def observe(self, record: Record) -> None: pass @@ -77,7 +116,13 @@ def close_partition(self, partition: Partition) -> None: pass def ensure_at_least_one_state_emitted(self) -> None: - pass + """ + Used primarily for full refresh syncs that do not have a valid cursor value to emit at the end of a sync + """ + + self._connector_state_manager.update_state_for_stream(self._stream_name, self._stream_namespace, self.state) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) + self._message_repository.emit_message(state_message) class ConcurrentCursor(Cursor): @@ -94,7 +139,10 @@ def __init__( connector_state_converter: AbstractStreamStateConverter, cursor_field: CursorField, slice_boundary_fields: Optional[Tuple[str, str]], - start: Optional[Any], + start: Optional[CursorValueType], + end_provider: Callable[[], CursorValueType], + lookback_window: Optional[GapType] = None, + slice_range: Optional[GapType] = None, ) -> None: self._stream_name = stream_name self._stream_namespace = stream_namespace @@ -105,15 +153,18 @@ def __init__( # To see some example where the slice boundaries might not be defined, check https://github.com/airbytehq/airbyte/blob/1ce84d6396e446e1ac2377362446e3fb94509461/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py#L363-L379 self._slice_boundary_fields = slice_boundary_fields if slice_boundary_fields else tuple() self._start = start + self._end_provider = end_provider self._most_recent_record: Optional[Record] = None self._has_closed_at_least_one_slice = False self.start, self._concurrent_state = self._get_concurrent_state(stream_state) + self._lookback_window = lookback_window + self._slice_range = slice_range @property def state(self) -> MutableMapping[str, Any]: return self._concurrent_state - def _get_concurrent_state(self, state: MutableMapping[str, Any]) -> Tuple[datetime, MutableMapping[str, Any]]: + def _get_concurrent_state(self, state: MutableMapping[str, Any]) -> Tuple[CursorValueType, MutableMapping[str, Any]]: if self._connector_state_converter.is_state_message_compatible(state): return self._start or self._connector_state_converter.zero_value, self._connector_state_converter.deserialize(state) return self._connector_state_converter.convert_from_sequential_state(self._cursor_field, state, self._start) @@ -179,23 +230,20 @@ def _emit_state_message(self) -> None: self._connector_state_manager.update_state_for_stream( self._stream_name, self._stream_namespace, - self._connector_state_converter.convert_to_sequential_state(self._cursor_field, self.state), + self._connector_state_converter.convert_to_state_message(self._cursor_field, self.state), ) - # TODO: if we migrate stored state to the concurrent state format - # (aka stop calling self._connector_state_converter.convert_to_sequential_state`), we'll need to cast datetimes to string or - # int before emitting state state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) self._message_repository.emit_message(state_message) def _merge_partitions(self) -> None: self.state["slices"] = self._connector_state_converter.merge_intervals(self.state["slices"]) - def _extract_from_slice(self, partition: Partition, key: str) -> Comparable: + def _extract_from_slice(self, partition: Partition, key: str) -> CursorValueType: try: _slice = partition.to_slice() if not _slice: raise KeyError(f"Could not find key `{key}` in empty slice") - return self._connector_state_converter.parse_value(_slice[key]) # type: ignore # we expect the devs to specify a key that would return a Comparable + return self._connector_state_converter.parse_value(_slice[key]) # type: ignore # we expect the devs to specify a key that would return a CursorValueType except KeyError as exception: raise KeyError(f"Partition is expected to have key `{key}` but could not be found") from exception @@ -205,3 +253,66 @@ def ensure_at_least_one_state_emitted(self) -> None: called. """ self._emit_state_message() + + def generate_slices(self) -> Iterable[Tuple[CursorValueType, CursorValueType]]: + """ + Generating slices based on a few parameters: + * lookback_window: Buffer to remove from END_KEY of the highest slice + * slice_range: Max difference between two slices. If the difference between two slices is greater, multiple slices will be created + * start: `_split_per_slice_range` will clip any value to `self._start which means that: + * if upper is less than self._start, no slices will be generated + * if lower is less than self._start, self._start will be used as the lower boundary (lookback_window will not be considered in that case) + + Note that the slices will overlap at their boundaries. We therefore expect to have at least the lower or the upper boundary to be + inclusive in the API that is queried. + """ + self._merge_partitions() + + if self._start is not None and self._is_start_before_first_slice(): + yield from self._split_per_slice_range(self._start, self.state["slices"][0][self._connector_state_converter.START_KEY]) + + if len(self.state["slices"]) == 1: + yield from self._split_per_slice_range( + self._calculate_lower_boundary_of_last_slice(self.state["slices"][0][self._connector_state_converter.END_KEY]), + self._end_provider(), + ) + elif len(self.state["slices"]) > 1: + for i in range(len(self.state["slices"]) - 1): + yield from self._split_per_slice_range( + self.state["slices"][i][self._connector_state_converter.END_KEY], + self.state["slices"][i + 1][self._connector_state_converter.START_KEY], + ) + yield from self._split_per_slice_range( + self._calculate_lower_boundary_of_last_slice(self.state["slices"][-1][self._connector_state_converter.END_KEY]), + self._end_provider(), + ) + else: + raise ValueError("Expected at least one slice") + + def _is_start_before_first_slice(self) -> bool: + return self._start is not None and self._start < self.state["slices"][0][self._connector_state_converter.START_KEY] + + def _calculate_lower_boundary_of_last_slice(self, lower_boundary: CursorValueType) -> CursorValueType: + if self._lookback_window: + return lower_boundary - self._lookback_window + return lower_boundary + + def _split_per_slice_range(self, lower: CursorValueType, upper: CursorValueType) -> Iterable[Tuple[CursorValueType, CursorValueType]]: + if lower >= upper: + return + + if self._start and upper < self._start: + return + + lower = max(lower, self._start) if self._start else lower + if not self._slice_range or lower + self._slice_range >= upper: + yield lower, upper + else: + stop_processing = False + current_lower_boundary = lower + while not stop_processing: + current_upper_boundary = min(current_lower_boundary + self._slice_range, upper) + yield current_lower_boundary, current_upper_boundary + current_lower_boundary = current_upper_boundary + if current_upper_boundary >= upper: + stop_processing = True diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py index 3e839cb3959ef..6cf4a694118e4 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py @@ -9,7 +9,7 @@ from airbyte_cdk.models import AirbyteStream, SyncMode from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.availability_strategy import AbstractAvailabilityStrategy, StreamAvailability -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator @@ -24,7 +24,7 @@ def __init__( primary_key: List[str], cursor_field: Optional[str], logger: Logger, - cursor: Optional[Cursor], + cursor: Cursor, namespace: Optional[str] = None, ) -> None: self._stream_partition_generator = partition_generator @@ -34,7 +34,7 @@ def __init__( self._primary_key = primary_key self._cursor_field = cursor_field self._logger = logger - self._cursor = cursor or NoopCursor() + self._cursor = cursor self._namespace = namespace def generate_partitions(self) -> Iterable[Partition]: @@ -44,6 +44,10 @@ def generate_partitions(self) -> Iterable[Partition]: def name(self) -> str: return self._name + @property + def namespace(self) -> Optional[str]: + return self._namespace + def check_availability(self) -> StreamAvailability: return self._availability_strategy.check_availability(self._logger) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/exceptions.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/exceptions.py index c67c2c58311d1..a0cf699a46d0d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/exceptions.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/exceptions.py @@ -13,3 +13,6 @@ class ExceptionWithDisplayMessage(Exception): def __init__(self, display_message: str, **kwargs: Any): super().__init__(**kwargs) self.display_message = display_message + + def __str__(self) -> str: + return f'ExceptionWithDisplayMessage: "{self.display_message}"' diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py index 3869c6cf9e732..8e63c16a4b2c2 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py @@ -5,6 +5,7 @@ from queue import Queue from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.partitions.types import QueueItem @@ -52,4 +53,5 @@ def generate_partitions(self, stream: AbstractStream) -> None: self._queue.put(partition) self._queue.put(PartitionGenerationCompletedSentinel(stream)) except Exception as e: - self._queue.put(e) + self._queue.put(StreamThreadException(e, stream.name)) + self._queue.put(PartitionGenerationCompletedSentinel(stream)) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_reader.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_reader.py index 3df19ca29f926..c0cbf778b6576 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_reader.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partition_reader.py @@ -3,6 +3,7 @@ # from queue import Queue +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.types import PartitionCompleteSentinel, QueueItem @@ -35,4 +36,5 @@ def process_partition(self, partition: Partition) -> None: self._queue.put(record) self._queue.put(PartitionCompleteSentinel(partition)) except Exception as e: - self._queue.put(e) + self._queue.put(StreamThreadException(e, partition.stream_name())) + self._queue.put(PartitionCompleteSentinel(partition)) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partitions/types.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partitions/types.py index fe16b2b0f9ab1..1ffdf6a903ef0 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partitions/types.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/partitions/types.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Union +from typing import Any, Union from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition @@ -21,6 +21,11 @@ def __init__(self, partition: Partition): """ self.partition = partition + def __eq__(self, other: Any) -> bool: + if isinstance(other, PartitionCompleteSentinel): + return self.partition == other.partition + return False + """ Typedef representing the items that can be added to the ThreadBasedConcurrentStream diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py index 843f477ddb160..e442dc6d97e96 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py @@ -4,7 +4,7 @@ from abc import ABC, abstractmethod from enum import Enum -from typing import TYPE_CHECKING, Any, List, MutableMapping, Tuple +from typing import TYPE_CHECKING, Any, List, MutableMapping, Optional, Tuple if TYPE_CHECKING: from airbyte_cdk.sources.streams.concurrent.cursor import CursorField @@ -19,11 +19,65 @@ class AbstractStreamStateConverter(ABC): END_KEY = "end" @abstractmethod + def _from_state_message(self, value: Any) -> Any: + pass + + @abstractmethod + def _to_state_message(self, value: Any) -> Any: + pass + + def __init__(self, is_sequential_state: bool = True): + self._is_sequential_state = is_sequential_state + + def convert_to_state_message(self, cursor_field: "CursorField", stream_state: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + """ + Convert the state message from the concurrency-compatible format to the stream's original format. + + e.g. + { "created": "2021-01-18T21:18:20.000Z" } + """ + if self.is_state_message_compatible(stream_state) and self._is_sequential_state: + legacy_state = stream_state.get("legacy", {}) + latest_complete_time = self._get_latest_complete_time(stream_state.get("slices", [])) + if latest_complete_time is not None: + legacy_state.update({cursor_field.cursor_field_key: self._to_state_message(latest_complete_time)}) + return legacy_state or {} + else: + return self.serialize(stream_state, ConcurrencyCompatibleStateType.date_range) + + def _get_latest_complete_time(self, slices: List[MutableMapping[str, Any]]) -> Any: + """ + Get the latest time before which all records have been processed. + """ + if not slices: + raise RuntimeError("Expected at least one slice but there were none. This is unexpected; please contact Support.") + + merged_intervals = self.merge_intervals(slices) + first_interval = merged_intervals[0] + return first_interval[self.END_KEY] + def deserialize(self, state: MutableMapping[str, Any]) -> MutableMapping[str, Any]: """ Perform any transformations needed for compatibility with the converter. """ - ... + for stream_slice in state.get("slices", []): + stream_slice[self.START_KEY] = self._from_state_message(stream_slice[self.START_KEY]) + stream_slice[self.END_KEY] = self._from_state_message(stream_slice[self.END_KEY]) + return state + + def serialize(self, state: MutableMapping[str, Any], state_type: ConcurrencyCompatibleStateType) -> MutableMapping[str, Any]: + """ + Perform any transformations needed for compatibility with the converter. + """ + serialized_slices = [] + for stream_slice in state.get("slices", []): + serialized_slices.append( + { + self.START_KEY: self._to_state_message(stream_slice[self.START_KEY]), + self.END_KEY: self._to_state_message(stream_slice[self.END_KEY]), + } + ) + return {"slices": serialized_slices, "state_type": state_type.value} @staticmethod def is_state_message_compatible(state: MutableMapping[str, Any]) -> bool: @@ -32,9 +86,9 @@ def is_state_message_compatible(state: MutableMapping[str, Any]) -> bool: @abstractmethod def convert_from_sequential_state( self, - cursor_field: "CursorField", + cursor_field: "CursorField", # to deprecate as it is only needed for sequential state stream_state: MutableMapping[str, Any], - start: Any, + start: Optional[Any], ) -> Tuple[Any, MutableMapping[str, Any]]: """ Convert the state message to the format required by the ConcurrentCursor. @@ -50,23 +104,12 @@ def convert_from_sequential_state( ... @abstractmethod - def convert_to_sequential_state(self, cursor_field: "CursorField", stream_state: MutableMapping[str, Any]) -> MutableMapping[str, Any]: - """ - Convert the state message from the concurrency-compatible format to the stream's original format. - - e.g. - { "created": 1617030403 } - """ - ... - - @abstractmethod - def increment(self, timestamp: Any) -> Any: + def increment(self, value: Any) -> Any: """ Increment a timestamp by a single unit. """ ... - @abstractmethod def merge_intervals(self, intervals: List[MutableMapping[str, Any]]) -> List[MutableMapping[str, Any]]: """ Compute and return a list of merged intervals. @@ -74,7 +117,22 @@ def merge_intervals(self, intervals: List[MutableMapping[str, Any]]) -> List[Mut Intervals may be merged if the start time of the second interval is 1 unit or less (as defined by the `increment` method) than the end time of the first interval. """ - ... + if not intervals: + return [] + + sorted_intervals = sorted(intervals, key=lambda x: (x[self.START_KEY], x[self.END_KEY])) + merged_intervals = [sorted_intervals[0]] + + for interval in sorted_intervals[1:]: + last_end_time = merged_intervals[-1][self.END_KEY] + current_start_time = interval[self.START_KEY] + if bool(self.increment(last_end_time) >= current_start_time): + merged_end_time = max(last_end_time, interval[self.END_KEY]) + merged_intervals[-1][self.END_KEY] = merged_end_time + else: + merged_intervals.append(interval) + + return merged_intervals @abstractmethod def parse_value(self, value: Any) -> Any: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py index 83f8a44b23db2..226ee79c04040 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py @@ -3,8 +3,8 @@ # from abc import abstractmethod -from datetime import datetime, timedelta -from typing import Any, List, MutableMapping, Optional, Tuple +from datetime import datetime, timedelta, timezone +from typing import Any, Callable, MutableMapping, Optional, Tuple import pendulum from airbyte_cdk.sources.streams.concurrent.cursor import CursorField @@ -16,6 +16,12 @@ class DateTimeStreamStateConverter(AbstractStreamStateConverter): + def _from_state_message(self, value: Any) -> Any: + return self.parse_timestamp(value) + + def _to_state_message(self, value: Any) -> Any: + return self.output_format(value) + @property @abstractmethod def _zero_value(self) -> Any: @@ -25,6 +31,10 @@ def _zero_value(self) -> Any: def zero_value(self) -> datetime: return self.parse_timestamp(self._zero_value) + @classmethod + def get_end_provider(cls) -> Callable[[], datetime]: + return lambda: datetime.now(timezone.utc) + @abstractmethod def increment(self, timestamp: datetime) -> datetime: ... @@ -37,41 +47,17 @@ def parse_timestamp(self, timestamp: Any) -> datetime: def output_format(self, timestamp: datetime) -> Any: ... - def deserialize(self, state: MutableMapping[str, Any]) -> MutableMapping[str, Any]: - for stream_slice in state.get("slices", []): - stream_slice[self.START_KEY] = self.parse_timestamp(stream_slice[self.START_KEY]) - stream_slice[self.END_KEY] = self.parse_timestamp(stream_slice[self.END_KEY]) - return state - def parse_value(self, value: Any) -> Any: """ Parse the value of the cursor field into a comparable value. """ return self.parse_timestamp(value) - def merge_intervals(self, intervals: List[MutableMapping[str, datetime]]) -> List[MutableMapping[str, datetime]]: - if not intervals: - return [] - - sorted_intervals = sorted(intervals, key=lambda x: (x[self.START_KEY], x[self.END_KEY])) - merged_intervals = [sorted_intervals[0]] - - for interval in sorted_intervals[1:]: - last_end_time = merged_intervals[-1][self.END_KEY] - current_start_time = interval[self.START_KEY] - if self._compare_intervals(last_end_time, current_start_time): - merged_end_time = max(last_end_time, interval[self.END_KEY]) - merged_intervals[-1][self.END_KEY] = merged_end_time - else: - merged_intervals.append(interval) - - return merged_intervals - def _compare_intervals(self, end_time: Any, start_time: Any) -> bool: return bool(self.increment(end_time) >= start_time) def convert_from_sequential_state( - self, cursor_field: CursorField, stream_state: MutableMapping[str, Any], start: datetime + self, cursor_field: CursorField, stream_state: MutableMapping[str, Any], start: Optional[datetime] ) -> Tuple[datetime, MutableMapping[str, Any]]: """ Convert the state message to the format required by the ConcurrentCursor. @@ -92,7 +78,7 @@ def convert_from_sequential_state( # Create a slice to represent the records synced during prior syncs. # The start and end are the same to avoid confusion as to whether the records for this slice # were actually synced - slices = [{self.START_KEY: sync_start, self.END_KEY: sync_start}] + slices = [{self.START_KEY: start if start is not None else sync_start, self.END_KEY: sync_start}] return sync_start, { "state_type": ConcurrencyCompatibleStateType.date_range.value, @@ -100,8 +86,8 @@ def convert_from_sequential_state( "legacy": stream_state, } - def _get_sync_start(self, cursor_field: CursorField, stream_state: MutableMapping[str, Any], start: Optional[Any]) -> datetime: - sync_start = self.parse_timestamp(start) if start is not None else self.zero_value + def _get_sync_start(self, cursor_field: CursorField, stream_state: MutableMapping[str, Any], start: Optional[datetime]) -> datetime: + sync_start = start if start is not None else self.zero_value prev_sync_low_water_mark = ( self.parse_timestamp(stream_state[cursor_field.cursor_field_key]) if cursor_field.cursor_field_key in stream_state else None ) @@ -110,33 +96,6 @@ def _get_sync_start(self, cursor_field: CursorField, stream_state: MutableMappin else: return sync_start - def convert_to_sequential_state(self, cursor_field: CursorField, stream_state: MutableMapping[str, Any]) -> MutableMapping[str, Any]: - """ - Convert the state message from the concurrency-compatible format to the stream's original format. - - e.g. - { "created": "2021-01-18T21:18:20.000Z" } - """ - if self.is_state_message_compatible(stream_state): - legacy_state = stream_state.get("legacy", {}) - latest_complete_time = self._get_latest_complete_time(stream_state.get("slices", [])) - if latest_complete_time is not None: - legacy_state.update({cursor_field.cursor_field_key: self.output_format(latest_complete_time)}) - return legacy_state or {} - else: - return stream_state - - def _get_latest_complete_time(self, slices: List[MutableMapping[str, Any]]) -> Optional[datetime]: - """ - Get the latest time before which all records have been processed. - """ - if not slices: - raise RuntimeError("Expected at least one slice but there were none. This is unexpected; please contact Support.") - - merged_intervals = self.merge_intervals(slices) - first_interval = merged_intervals[0] - return first_interval[self.END_KEY] - class EpochValueConcurrentStreamStateConverter(DateTimeStreamStateConverter): """ diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py index b944072ee4a41..8502bdf2339e1 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py @@ -148,7 +148,10 @@ def read( # type: ignore # ignoring typing for ConnectorStateManager because o hasattr(record_data_or_message, "type") and record_data_or_message.type == MessageType.RECORD ): record_data = record_data_or_message if isinstance(record_data_or_message, Mapping) else record_data_or_message.record - stream_state = self.get_updated_state(stream_state, record_data) + if self.cursor_field: + # Some connectors have streams that implement get_updated_state(), but do not define a cursor_field. This + # should be fixed on the stream implementation, but we should also protect against this in the CDK as well + stream_state = self.get_updated_state(stream_state, record_data) record_counter += 1 if sync_mode == SyncMode.incremental: diff --git a/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py b/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py index 522e3dd68ab28..6b88bb898c7dd 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py +++ b/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py @@ -1,29 +1,62 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from typing import Any, Dict, List +from typing import List, Union, overload -from airbyte_protocol.models import ConfiguredAirbyteCatalog, SyncMode +from airbyte_protocol.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, SyncMode + + +class ConfiguredAirbyteStreamBuilder: + def __init__(self) -> None: + self._stream = { + "stream": { + "name": "any name", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_primary_key": [["id"]], + }, + "primary_key": [["id"]], + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + } + + def with_name(self, name: str) -> "ConfiguredAirbyteStreamBuilder": + self._stream["stream"]["name"] = name # type: ignore # we assume that self._stream["stream"] is a Dict[str, Any] + return self + + def with_sync_mode(self, sync_mode: SyncMode) -> "ConfiguredAirbyteStreamBuilder": + self._stream["sync_mode"] = sync_mode.name + return self + + def with_primary_key(self, pk: List[List[str]]) -> "ConfiguredAirbyteStreamBuilder": + self._stream["primary_key"] = pk + self._stream["stream"]["source_defined_primary_key"] = pk # type: ignore # we assume that self._stream["stream"] is a Dict[str, Any] + return self + + def build(self) -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream.parse_obj(self._stream) class CatalogBuilder: def __init__(self) -> None: - self._streams: List[Dict[str, Any]] = [] + self._streams: List[ConfiguredAirbyteStreamBuilder] = [] + @overload + def with_stream(self, name: ConfiguredAirbyteStreamBuilder) -> "CatalogBuilder": + ... + + @overload def with_stream(self, name: str, sync_mode: SyncMode) -> "CatalogBuilder": - self._streams.append( - { - "stream": { - "name": name, - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_primary_key": [["id"]], - }, - "primary_key": [["id"]], - "sync_mode": sync_mode.name, - "destination_sync_mode": "overwrite", - } - ) + ... + + def with_stream(self, name: Union[str, ConfiguredAirbyteStreamBuilder], sync_mode: Union[SyncMode, None] = None) -> "CatalogBuilder": + # As we are introducing a fully fledge ConfiguredAirbyteStreamBuilder, we would like to deprecate the previous interface + # with_stream(str, SyncMode) + + # to avoid a breaking change, `name` needs to stay in the API but this can be either a name or a builder + name_or_builder = name + builder = name_or_builder if isinstance(name_or_builder, ConfiguredAirbyteStreamBuilder) else ConfiguredAirbyteStreamBuilder().with_name(name_or_builder).with_sync_mode(sync_mode) + self._streams.append(builder) return self def build(self) -> ConfiguredAirbyteCatalog: - return ConfiguredAirbyteCatalog.parse_obj({"streams": self._streams}) + return ConfiguredAirbyteCatalog(streams=list(map(lambda builder: builder.build(), self._streams))) diff --git a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py index 612b2742ea1e8..767a13a75ed29 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py +++ b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py @@ -74,7 +74,7 @@ def most_recent_state(self) -> Any: state_messages = self._get_message_by_types([Type.STATE]) if not state_messages: raise ValueError("Can't provide most recent state as there are no state messages") - return state_messages[-1].state.data + return state_messages[-1].state.stream @property def logs(self) -> List[AirbyteMessage]: diff --git a/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py new file mode 100644 index 0000000000000..410ce809ec8d2 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py @@ -0,0 +1,17 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor +from airbyte_protocol.models import AirbyteMessage, Type + + +def get_stream_descriptor(message: AirbyteMessage) -> HashableStreamDescriptor: + if message.type == Type.RECORD: + return HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace) + elif message.type == Type.STATE: + if not message.state.stream or not message.state.stream.stream_descriptor: + raise ValueError("State message was not in per-stream state format, which is required for record counts.") + return HashableStreamDescriptor( + name=message.state.stream.stream_descriptor.name, namespace=message.state.stream.stream_descriptor.namespace + ) + else: + raise NotImplementedError(f"get_stream_descriptor is not implemented for message type '{message.type}'.") diff --git a/airbyte-cdk/python/airbyte_cdk/utils/schema_inferrer.py b/airbyte-cdk/python/airbyte_cdk/utils/schema_inferrer.py index 41f8e179469e0..134068e212bfa 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/schema_inferrer.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/schema_inferrer.py @@ -3,18 +3,23 @@ # from collections import defaultdict -from typing import Any, Dict, Mapping, Optional +from typing import Any, Dict, List, Mapping, Optional from airbyte_cdk.models import AirbyteRecordMessage from genson import SchemaBuilder, SchemaNode from genson.schema.strategies.object import Object from genson.schema.strategies.scalar import Number +_NULL_TYPE = "null" + class NoRequiredObj(Object): """ This class has Object behaviour, but it does not generate "required[]" fields - every time it parses object. So we dont add unnecessary extra field. + every time it parses object. So we don't add unnecessary extra field. + + The logic is that even reading all the data from a source, it does not mean that there can be another record added with those fields as + optional. Hence, we make everything nullable. """ def to_schema(self) -> Mapping[str, Any]: @@ -41,6 +46,25 @@ class NoRequiredSchemaBuilder(SchemaBuilder): InferredSchema = Dict[str, Any] +class SchemaValidationException(Exception): + @classmethod + def merge_exceptions(cls, exceptions: List["SchemaValidationException"]) -> "SchemaValidationException": + # We assume the schema is the same for all SchemaValidationException + return SchemaValidationException(exceptions[0].schema, [x for exception in exceptions for x in exception._validation_errors]) + + def __init__(self, schema: InferredSchema, validation_errors: List[Exception]): + self._schema = schema + self._validation_errors = validation_errors + + @property + def schema(self) -> InferredSchema: + return self._schema + + @property + def validation_errors(self) -> List[str]: + return list(map(lambda error: str(error), self._validation_errors)) + + class SchemaInferrer: """ This class is used to infer a JSON schema which fits all the records passed into it @@ -53,23 +77,15 @@ class SchemaInferrer: stream_to_builder: Dict[str, SchemaBuilder] - def __init__(self) -> None: + def __init__(self, pk: Optional[List[List[str]]] = None, cursor_field: Optional[List[List[str]]] = None) -> None: self.stream_to_builder = defaultdict(NoRequiredSchemaBuilder) + self._pk = [] if pk is None else pk + self._cursor_field = [] if cursor_field is None else cursor_field def accumulate(self, record: AirbyteRecordMessage) -> None: """Uses the input record to add to the inferred schemas maintained by this object""" self.stream_to_builder[record.stream].add_object(record.data) - def get_inferred_schemas(self) -> Dict[str, InferredSchema]: - """ - Returns the JSON schemas for all encountered streams inferred by inspecting all records - passed via the accumulate method - """ - schemas = {} - for stream_name, builder in self.stream_to_builder.items(): - schemas[stream_name] = self._clean(builder.to_schema()) - return schemas - def _clean(self, node: InferredSchema) -> InferredSchema: """ Recursively cleans up a produced schema: @@ -78,23 +94,119 @@ def _clean(self, node: InferredSchema) -> InferredSchema: """ if isinstance(node, dict): if "anyOf" in node: - if len(node["anyOf"]) == 2 and {"type": "null"} in node["anyOf"]: - real_type = node["anyOf"][1] if node["anyOf"][0]["type"] == "null" else node["anyOf"][0] + if len(node["anyOf"]) == 2 and {"type": _NULL_TYPE} in node["anyOf"]: + real_type = node["anyOf"][1] if node["anyOf"][0]["type"] == _NULL_TYPE else node["anyOf"][0] node.update(real_type) - node["type"] = [node["type"], "null"] + node["type"] = [node["type"], _NULL_TYPE] node.pop("anyOf") if "properties" in node and isinstance(node["properties"], dict): for key, value in list(node["properties"].items()): - if isinstance(value, dict) and value.get("type", None) == "null": + if isinstance(value, dict) and value.get("type", None) == _NULL_TYPE: node["properties"].pop(key) else: self._clean(value) if "items" in node: self._clean(node["items"]) + + # this check needs to follow the "anyOf" cleaning as it might populate `type` + if isinstance(node["type"], list): + if _NULL_TYPE in node["type"]: + # we want to make sure null is always at the end as it makes schemas more readable + node["type"].remove(_NULL_TYPE) + node["type"].append(_NULL_TYPE) + else: + node["type"] = [node["type"], _NULL_TYPE] + return node + + def _add_required_properties(self, node: InferredSchema) -> InferredSchema: + """ + This method takes properties that should be marked as required (self._pk and self._cursor_field) and travel the schema to mark every + node as required. + """ + # Removing nullable for the root as when we call `_clean`, we make everything nullable + node["type"] = "object" + + exceptions = [] + for field in [x for x in [self._pk, self._cursor_field] if x]: + try: + self._add_fields_as_required(node, field) + except SchemaValidationException as exception: + exceptions.append(exception) + + if exceptions: + raise SchemaValidationException.merge_exceptions(exceptions) + return node + def _add_fields_as_required(self, node: InferredSchema, composite_key: List[List[str]]) -> None: + """ + Take a list of nested keys (this list represents a composite key) and travel the schema to mark every node as required. + """ + errors: List[Exception] = [] + + for path in composite_key: + try: + self._add_field_as_required(node, path) + except ValueError as exception: + errors.append(exception) + + if errors: + raise SchemaValidationException(node, errors) + + def _add_field_as_required(self, node: InferredSchema, path: List[str], traveled_path: Optional[List[str]] = None) -> None: + """ + Take a nested key and travel the schema to mark every node as required. + """ + self._remove_null_from_type(node) + if self._is_leaf(path): + return + + if not traveled_path: + traveled_path = [] + + if "properties" not in node: + # This validation is only relevant when `traveled_path` is empty + raise ValueError( + f"Path {traveled_path} does not refer to an object but is `{node}` and hence {path} can't be marked as required." + ) + + next_node = path[0] + if next_node not in node["properties"]: + raise ValueError(f"Path {traveled_path} does not have field `{next_node}` in the schema and hence can't be marked as required.") + + if "type" not in node: + # We do not expect this case to happen but we added a specific error message just in case + raise ValueError( + f"Unknown schema error: {traveled_path} is expected to have a type but did not. Schema inferrence is probably broken" + ) + + if node["type"] not in ["object", ["null", "object"], ["object", "null"]]: + raise ValueError(f"Path {traveled_path} is expected to be an object but was of type `{node['properties'][next_node]['type']}`") + + if "required" not in node or not node["required"]: + node["required"] = [next_node] + elif next_node not in node["required"]: + node["required"].append(next_node) + + traveled_path.append(next_node) + self._add_field_as_required(node["properties"][next_node], path[1:], traveled_path) + + def _is_leaf(self, path: List[str]) -> bool: + return len(path) == 0 + + def _remove_null_from_type(self, node: InferredSchema) -> None: + if isinstance(node["type"], list): + if "null" in node["type"]: + node["type"].remove("null") + if len(node["type"]) == 1: + node["type"] = node["type"][0] + def get_stream_schema(self, stream_name: str) -> Optional[InferredSchema]: """ Returns the inferred JSON schema for the specified stream. Might be `None` if there were no records for the given stream name. """ - return self._clean(self.stream_to_builder[stream_name].to_schema()) if stream_name in self.stream_to_builder else None + return ( + self._add_required_properties(self._clean(self.stream_to_builder[stream_name].to_schema())) + if stream_name in self.stream_to_builder + else None + ) diff --git a/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py b/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py index 753296a5dd74d..04658980ed0f6 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py @@ -31,22 +31,28 @@ def __init__( message: Optional[str] = None, failure_type: FailureType = FailureType.system_error, exception: Optional[BaseException] = None, + stream_descriptor: Optional[StreamDescriptor] = None, ): """ :param internal_message: the internal error that caused the failure :param message: a user-friendly message that indicates the cause of the error :param failure_type: the type of error :param exception: the exception that caused the error, from which the stack trace should be retrieved + :param stream_descriptor: describe the stream from which the exception comes from """ self.internal_message = internal_message self.message = message self.failure_type = failure_type self._exception = exception + self._stream_descriptor = stream_descriptor super().__init__(internal_message) - def as_airbyte_message(self, stream_descriptor: StreamDescriptor = None) -> AirbyteMessage: + def as_airbyte_message(self, stream_descriptor: Optional[StreamDescriptor] = None) -> AirbyteMessage: """ Builds an AirbyteTraceMessage from the exception + + :param stream_descriptor is deprecated, please use the stream_description in `__init__ or `from_exception`. If many + stream_descriptors are defined, the one from `as_airbyte_message` will be discarded. """ now_millis = datetime.now().timestamp() * 1000.0 @@ -61,18 +67,18 @@ def as_airbyte_message(self, stream_descriptor: StreamDescriptor = None) -> Airb internal_message=self.internal_message, failure_type=self.failure_type, stack_trace=stack_trace_str, - stream_descriptor=stream_descriptor, + stream_descriptor=self._stream_descriptor if self._stream_descriptor is not None else stream_descriptor, ), ) return AirbyteMessage(type=MessageType.TRACE, trace=trace_message) - def as_connection_status_message(self) -> AirbyteMessage: + def as_connection_status_message(self) -> Optional[AirbyteMessage]: if self.failure_type == FailureType.config_error: - output_message = AirbyteMessage( + return AirbyteMessage( type=MessageType.CONNECTION_STATUS, connectionStatus=AirbyteConnectionStatus(status=Status.FAILED, message=self.message) ) - return output_message + return None def emit_message(self) -> None: """ @@ -84,16 +90,20 @@ def emit_message(self) -> None: print(filtered_message) @classmethod - def from_exception(cls, exc: BaseException, *args, **kwargs) -> "AirbyteTracedException": # type: ignore # ignoring because of args and kwargs + def from_exception(cls, exc: BaseException, stream_descriptor: Optional[StreamDescriptor] = None, *args, **kwargs) -> "AirbyteTracedException": # type: ignore # ignoring because of args and kwargs """ Helper to create an AirbyteTracedException from an existing exception :param exc: the exception that caused the error + :param stream_descriptor: describe the stream from which the exception comes from """ - return cls(internal_message=str(exc), exception=exc, *args, **kwargs) # type: ignore # ignoring because of args and kwargs + return cls(internal_message=str(exc), exception=exc, stream_descriptor=stream_descriptor, *args, **kwargs) # type: ignore # ignoring because of args and kwargs - def as_sanitized_airbyte_message(self, stream_descriptor: StreamDescriptor = None) -> AirbyteMessage: + def as_sanitized_airbyte_message(self, stream_descriptor: Optional[StreamDescriptor] = None) -> AirbyteMessage: """ Builds an AirbyteTraceMessage from the exception and sanitizes any secrets from the message body + + :param stream_descriptor is deprecated, please use the stream_description in `__init__ or `from_exception`. If many + stream_descriptors are defined, the one from `as_sanitized_airbyte_message` will be discarded. """ error_message = self.as_airbyte_message(stream_descriptor=stream_descriptor) if error_message.trace.error.message: diff --git a/airbyte-cdk/python/bin/build_code_generator_image.sh b/airbyte-cdk/python/bin/build_code_generator_image.sh deleted file mode 100755 index f73c318317c5a..0000000000000 --- a/airbyte-cdk/python/bin/build_code_generator_image.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -set -e - -DOCKER_BUILD_ARCH="${DOCKER_BUILD_ARCH:-amd64}" -# https://docs.docker.com/develop/develop-images/build_enhancements/ -export DOCKER_BUILDKIT=1 - -CODE_GENERATOR_DOCKERFILE="$(dirname $0)/../code-generator/Dockerfile" -test -f $CODE_GENERATOR_DOCKERFILE -docker build --build-arg DOCKER_BUILD_ARCH="$DOCKER_BUILD_ARCH" -t "airbyte/code-generator:dev" - < $CODE_GENERATOR_DOCKERFILE diff --git a/airbyte-cdk/python/bin/generate-component-manifest-files.sh b/airbyte-cdk/python/bin/generate-component-manifest-files.sh index d366d3ca9cdee..77bb636f8b520 100755 --- a/airbyte-cdk/python/bin/generate-component-manifest-files.sh +++ b/airbyte-cdk/python/bin/generate-component-manifest-files.sh @@ -2,7 +2,7 @@ set -e -[ -z "$ROOT_DIR" ] && exit 1 +ROOT_DIR=$(cd ../../ && pwd) YAML_DIR=airbyte-cdk/python/airbyte_cdk/sources/declarative OUTPUT_DIR=airbyte-cdk/python/airbyte_cdk/sources/declarative/models @@ -15,9 +15,9 @@ function main() { filename_wo_ext=$(basename "$f" | cut -d . -f 1) echo "from .$filename_wo_ext import *" >> "$ROOT_DIR/$OUTPUT_DIR"/__init__.py - docker run --user "$(id -u):$(id -g)" -v "$ROOT_DIR":/airbyte airbyte/code-generator:dev \ - --input "/airbyte/$YAML_DIR/$filename_wo_ext.yaml" \ - --output "/airbyte/$OUTPUT_DIR/$filename_wo_ext.py" \ + datamodel-codegen \ + --input "$ROOT_DIR/$YAML_DIR/$filename_wo_ext.yaml" \ + --output "$ROOT_DIR/$OUTPUT_DIR/$filename_wo_ext.py" \ --disable-timestamp \ --enum-field-as-literal one \ --set-default-enum-member @@ -33,7 +33,9 @@ function main() { # We can revisit this if there is movement on a fix. temp_file=$(mktemp) sed 's/ _parameters:/ parameters:/g' "$ROOT_DIR/$OUTPUT_DIR/$filename_wo_ext.py" > "${temp_file}" - mv "${temp_file}" "$ROOT_DIR/$OUTPUT_DIR/$filename_wo_ext.py" + output_file="$ROOT_DIR/$OUTPUT_DIR/$filename_wo_ext.py" + mv "${temp_file}" "${output_file}" + echo "Generated component manifest files into '${output_file}'." done } diff --git a/airbyte-cdk/python/bin/low-code-unit-tests.sh b/airbyte-cdk/python/bin/low-code-unit-tests.sh deleted file mode 100755 index d660e4373b82b..0000000000000 --- a/airbyte-cdk/python/bin/low-code-unit-tests.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash - -# Ideally we'd like to have set -e, but when set -e is used, the `python -m pytest unit_tests` command can return a -# non-zero exit code and end the whole script prematurely -# set -e - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - SOURCE_NAME=${MANIFEST_DIRECTORY#source_} - if test -f "$directory/$MANIFEST_DIRECTORY/manifest.yaml"; then - cd $directory - - # Unit tests are optional for most connectors unless they implement custom components - if [ -d "unit_tests" ]; then - rm -rf .venv - python -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt > /dev/null 2>&1 - pip install -e ".[tests]" > /dev/null 2>&1 - pip install -e $CDK_DIR > /dev/null 2>&1 - - test_output=$(python -m pytest unit_tests) - ret=$? - if [[ "$test_output" == *"no tests ran"* ]]; then - # When there are no tests defined, code 5 gets emitted so we should also check test output for no tests run - echo "Source $SOURCE_NAME did not have any tests" - elif [ $ret -ne 0 ]; then - echo "----Tests failed for source $SOURCE_NAME" - else - echo "Source $SOURCE_NAME passed tests" - fi - - deactivate - cd .. - fi - fi -done diff --git a/airbyte-cdk/python/bin/migrate_field_point_to_field_path.sh b/airbyte-cdk/python/bin/migrate_field_point_to_field_path.sh deleted file mode 100755 index edcc1510f9b04..0000000000000 --- a/airbyte-cdk/python/bin/migrate_field_point_to_field_path.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -echo "Starting migration" - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - FILEPATH=$directory/$MANIFEST_DIRECTORY/manifest.yaml - - if test -f $FILEPATH; then - echo "Migrating manifest located at $FILEPATH" - - sed -i '' -E 's/\field_pointer:/\field_path:/' $FILEPATH - fi -done diff --git a/airbyte-cdk/python/bin/migrate_options_to_parameters.sh b/airbyte-cdk/python/bin/migrate_options_to_parameters.sh deleted file mode 100755 index ac0d5a7921aed..0000000000000 --- a/airbyte-cdk/python/bin/migrate_options_to_parameters.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash - -echo "Starting migration" - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - FILEPATH=$directory/$MANIFEST_DIRECTORY/manifest.yaml - - if test -f $FILEPATH; then - echo "Migrating manifest located at $FILEPATH" - - # In place replacement of $options to $parameters - sed -i '' -E 's/\$options/\$parameters/' $FILEPATH - - # In place replacement of options used in interpolated curly braces {{ }} - sed -i '' -E 's/{{ options/{{ parameters/' $FILEPATH - fi -done \ No newline at end of file diff --git a/airbyte-cdk/python/bin/migrate_stream_slicer.sh b/airbyte-cdk/python/bin/migrate_stream_slicer.sh deleted file mode 100755 index a274fbdd24e53..0000000000000 --- a/airbyte-cdk/python/bin/migrate_stream_slicer.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -echo "Starting migration" - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - FILEPATH=$directory/$MANIFEST_DIRECTORY/manifest.yaml - - if test -f $FILEPATH; then - echo "Migrating manifest located at $FILEPATH" - - sed -i '' -E 's/\stream_slicer/\partition_router/' $FILEPATH - sed -i '' -E 's/\iterable/\partition_router/' $FILEPATH - fi -done diff --git a/airbyte-cdk/python/bin/run-mypy-on-modified-files.sh b/airbyte-cdk/python/bin/run-mypy-on-modified-files.sh index 0b42bc8a7a8f4..96f757be1b7d6 100755 --- a/airbyte-cdk/python/bin/run-mypy-on-modified-files.sh +++ b/airbyte-cdk/python/bin/run-mypy-on-modified-files.sh @@ -7,7 +7,7 @@ cd "$(dirname "${0}")/.." || exit 1 # TODO change this to include unit_tests as well once it's in a good state { - git diff --name-only --relative ':(exclude)unit_tests' - git diff --name-only --staged --relative ':(exclude)unit_tests' - git diff --name-only master... --relative ':(exclude)unit_tests' -} | grep -E '\.py$' | sort | uniq | xargs .venv/bin/python -m mypy --config-file mypy.ini --install-types --non-interactive + git diff --name-only --diff-filter=d --relative ':(exclude)unit_tests' + git diff --name-only --diff-filter=d --staged --relative ':(exclude)unit_tests' + git diff --name-only --diff-filter=d master... --relative ':(exclude)unit_tests' +} | grep -E '\.py$' | sort | uniq | xargs mypy --config-file mypy.ini --install-types --non-interactive diff --git a/airbyte-cdk/python/bin/update_ref_format.sh b/airbyte-cdk/python/bin/update_ref_format.sh deleted file mode 100755 index a63cb9be46e03..0000000000000 --- a/airbyte-cdk/python/bin/update_ref_format.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - FILEPATH=$directory/$MANIFEST_DIRECTORY/manifest.yaml - - if test -f $FILEPATH; then - echo "Migrating manifest located at $FILEPATH" - - gsed -i -E 's/\*ref\((.*)\)/#\/\1/' $FILEPATH - gsed -i -E '/#\// y/./\//' $FILEPATH - fi -done diff --git a/airbyte-cdk/python/bin/validate-yaml-schema.sh b/airbyte-cdk/python/bin/validate-yaml-schema.sh deleted file mode 100755 index bc3bbf448115c..0000000000000 --- a/airbyte-cdk/python/bin/validate-yaml-schema.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash - -[ -z "$ROOT_DIR" ] && exit 1 - -CONNECTORS_DIR=$ROOT_DIR/airbyte-integrations/connectors -CDK_DIR=$ROOT_DIR/airbyte-cdk/python/ - -for directory in $CONNECTORS_DIR/source-* ; do - MANIFEST_DIRECTORY=$(basename $directory | tr - _) - SOURCE_NAME=${MANIFEST_DIRECTORY#source_} - if test -f "$directory/$MANIFEST_DIRECTORY/manifest.yaml"; then - cd $directory - - rm -rf .venv - python -m venv .venv - source .venv/bin/activate - pip install -r requirements.txt > /dev/null 2>&1 - pip install -e ".[tests]" > /dev/null 2>&1 - pip install -e $CDK_DIR > /dev/null 2>&1 - - python main.py spec > /dev/null 2>&1 - ret=$? - if [ $ret -ne 0 ]; then - echo "----Error for source $SOURCE_NAME" - else - echo "Source $SOURCE_NAME is fine" - fi - - deactivate - cd .. - fi -done diff --git a/airbyte-cdk/python/build.gradle b/airbyte-cdk/python/build.gradle deleted file mode 100644 index d1531c04d4458..0000000000000 --- a/airbyte-cdk/python/build.gradle +++ /dev/null @@ -1,134 +0,0 @@ -import ru.vyarus.gradle.plugin.python.task.PythonTask - -plugins { - id 'base' - id 'ru.vyarus.use-python' version '2.3.0' -} - -def generateCodeGeneratorImage = tasks.register('generateCodeGeneratorImage', Exec) { - commandLine 'bin/build_code_generator_image.sh' -} -def generateComponentManifestClassFiles = tasks.register('generateComponentManifestClassFiles', Exec) { - environment 'ROOT_DIR', rootDir.parentFile.parentFile.absolutePath - commandLine 'bin/generate-component-manifest-files.sh' -} -generateComponentManifestClassFiles.configure { - dependsOn generateCodeGeneratorImage -} -tasks.named('assemble').configure { - dependsOn generateComponentManifestClassFiles -} - -tasks.register('validateSourceYamlManifest', Exec) { - environment 'ROOT_DIR', rootDir.parentFile.parentFile.absolutePath - commandLine 'bin/validate-yaml-schema.sh' -} - -tasks.register('runLowCodeConnectorUnitTests', Exec) { - environment 'ROOT_DIR', rootDir.parentFile.parentFile.absolutePath - commandLine 'bin/low-code-unit-tests.sh' -} - -def venvDirectoryName = '.venv' - -// Add a task that allows cleaning up venvs to every python project -def cleanPythonVenv = tasks.register('cleanPythonVenv', Exec) { - commandLine 'rm' - args '-rf', "${projectDir.absolutePath}/${venvDirectoryName}" -} - -tasks.named('clean').configure { - dependsOn cleanPythonVenv -} - -// Configure gradle python plugin. -python { - envPath = venvDirectoryName - minPythonVersion '3.10' - - // Amazon Linux support. - // The airbyte-ci tool runs gradle tasks in AL2023-based containers. - // In AL2023, `python3` is necessarily v3.9, and later pythons need to be installed and named explicitly. - // See https://github.com/amazonlinux/amazon-linux-2023/issues/459 for details. - try { - if ("python3.11 --version".execute().waitFor() == 0) { - // python3.11 definitely exists at this point, use it instead of 'python3'. - pythonBinary "python3.11" - } - } catch (IOException _) { - // Swallow exception if python3.11 is not installed. - } - // Pyenv support. - try { - def pyenvRoot = "pyenv root".execute() - def pyenvLatest = "pyenv latest ${minPythonVersion}".execute() - // Pyenv definitely exists at this point: use 'python' instead of 'python3' in all cases. - pythonBinary "python" - if (pyenvRoot.waitFor() == 0 && pyenvLatest.waitFor() == 0) { - pythonPath "${pyenvRoot.text.trim()}/versions/${pyenvLatest.text.trim()}/bin" - } - } catch (IOException _) { - // Swallow exception if pyenv is not installed. - } - - scope 'VIRTUALENV' - installVirtualenv = true - pip 'pip:23.2.1' - pip 'mccabe:0.6.1' - // https://github.com/csachs/pyproject-flake8/issues/13 - pip 'flake8:4.0.1' - // flake8 doesn't support pyproject.toml files - // and thus there is the wrapper "pyproject-flake8" for this - pip 'pyproject-flake8:0.0.1a2' - pip 'pytest:6.2.5' - pip 'coverage[toml]:6.3.1' -} - -def installLocalReqs = tasks.register('installLocalReqs', PythonTask) { - module = "pip" - command = "install .[dev,tests]" - inputs.file('setup.py') - outputs.file('build/installedlocalreqs.txt') -} - -def flakeCheck = tasks.register('flakeCheck', PythonTask) { - module = "pflake8" - command = "--config pyproject.toml ./" -} - -def installReqs = tasks.register('installReqs', PythonTask) { - module = "pip" - command = "install .[main]" - inputs.file('setup.py') - outputs.file('build/installedreqs.txt') -} -installReqs.configure { - dependsOn installLocalReqs -} - -tasks.named('check').configure { - dependsOn installReqs - dependsOn flakeCheck -} - -def installTestReqs = tasks.register('installTestReqs', PythonTask) { - module = "pip" - command = "install .[tests]" - inputs.file('setup.py') - outputs.file('build/installedtestreqs.txt') -} -installTestReqs.configure { - dependsOn installReqs -} - -def testTask = tasks.register('testPython', PythonTask) { - module = "coverage" - command = "run --data-file=unit_tests/.coverage.testPython --rcfile=pyproject.toml -m pytest -s unit_tests -c pytest.ini" -} -testTask.configure { - dependsOn installTestReqs -} - -tasks.named('check').configure { - dependsOn testTask -} diff --git a/airbyte-cdk/python/code-generator/Dockerfile b/airbyte-cdk/python/code-generator/Dockerfile deleted file mode 100644 index d7b6b61999093..0000000000000 --- a/airbyte-cdk/python/code-generator/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM python:3.11.0b5-alpine3.15 as base -FROM base as builder - - -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk add g++ make - -# the new version (>= 2.1.0) of package markupsafe removed the funcion `soft_unicode`. And it broke other dependences -# https://github.com/pallets/markupsafe/blob/main/CHANGES.rst -# thus this version is pinned -# RUN pip install --prefix=/install markupsafe==2.0.1 -RUN pip install --prefix=/install black==22.1.0 datamodel_code_generator==0.11.19 - -FROM base -COPY --from=builder /install /usr/local - -ENTRYPOINT ["datamodel-codegen"] - -LABEL io.airbyte.version=dev -LABEL io.airbyte.name=airbyte/code-generator diff --git a/airbyte-cdk/python/gradle.properties b/airbyte-cdk/python/gradle.properties deleted file mode 100644 index a458cfe27eb92..0000000000000 --- a/airbyte-cdk/python/gradle.properties +++ /dev/null @@ -1,11 +0,0 @@ -# NOTE: some of these values are overwritten in CI! -# NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties - -org.gradle.parallel=true -org.gradle.caching=true - -# Note, this might have issues on the normal Github runner. -org.gradle.vfs.watch=true - -# Tune # of cores Gradle uses. -# org.gradle.workers.max=3 diff --git a/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.jar b/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 7f93135c49b76..0000000000000 Binary files a/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.properties b/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index a80b22ce5cffe..0000000000000 --- a/airbyte-cdk/python/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,7 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip -networkTimeout=10000 -validateDistributionUrl=true -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/airbyte-cdk/python/gradlew b/airbyte-cdk/python/gradlew deleted file mode 100755 index 1aa94a4269074..0000000000000 --- a/airbyte-cdk/python/gradlew +++ /dev/null @@ -1,249 +0,0 @@ -#!/bin/sh - -# -# Copyright © 2015-2021 the original authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -############################################################################## -# -# Gradle start up script for POSIX generated by Gradle. -# -# Important for running: -# -# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is -# noncompliant, but you have some other compliant shell such as ksh or -# bash, then to run this script, type that shell name before the whole -# command line, like: -# -# ksh Gradle -# -# Busybox and similar reduced shells will NOT work, because this script -# requires all of these POSIX shell features: -# * functions; -# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», -# «${var#prefix}», «${var%suffix}», and «$( cmd )»; -# * compound commands having a testable exit status, especially «case»; -# * various built-in commands including «command», «set», and «ulimit». -# -# Important for patching: -# -# (2) This script targets any POSIX shell, so it avoids extensions provided -# by Bash, Ksh, etc; in particular arrays are avoided. -# -# The "traditional" practice of packing multiple parameters into a -# space-separated string is a well documented source of bugs and security -# problems, so this is (mostly) avoided, by progressively accumulating -# options in "$@", and eventually passing that to Java. -# -# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, -# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; -# see the in-line comments for details. -# -# There are tweaks for specific operating systems such as AIX, CygWin, -# Darwin, MinGW, and NonStop. -# -# (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt -# within the Gradle project. -# -# You can find Gradle at https://github.com/gradle/gradle/. -# -############################################################################## - -# Attempt to set APP_HOME - -# Resolve links: $0 may be a link -app_path=$0 - -# Need this for daisy-chained symlinks. -while - APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path - [ -h "$app_path" ] -do - ls=$( ls -ld "$app_path" ) - link=${ls#*' -> '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -# This is normally unused -# shellcheck disable=SC2034 -APP_BASE_NAME=${0##*/} -# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - if ! command -v java >/dev/null 2>&1 - then - die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC2039,SC3045 - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC2039,SC3045 - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Collect all arguments for the java command: -# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, -# and any embedded shellness will be escaped. -# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be -# treated as '${Hostname}' itself on the command line. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Stop when "xargs" is not available. -if ! command -v xargs >/dev/null 2>&1 -then - die "xargs is not available" -fi - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" diff --git a/airbyte-cdk/python/gradlew.bat b/airbyte-cdk/python/gradlew.bat deleted file mode 100644 index 6689b85beecde..0000000000000 --- a/airbyte-cdk/python/gradlew.bat +++ /dev/null @@ -1,92 +0,0 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%"=="" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%"=="" set DIRNAME=. -@rem This is normally unused -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if %ERRORLEVEL% equ 0 goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if %ERRORLEVEL% equ 0 goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -set EXIT_CODE=%ERRORLEVEL% -if %EXIT_CODE% equ 0 set EXIT_CODE=1 -if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% -exit /b %EXIT_CODE% - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/airbyte-cdk/python/poetry.lock b/airbyte-cdk/python/poetry.lock new file mode 100644 index 0000000000000..e7ad547f2356f --- /dev/null +++ b/airbyte-cdk/python/poetry.lock @@ -0,0 +1,4575 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = true +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = true +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + +[[package]] +name = "argcomplete" +version = "2.1.2" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argcomplete-2.1.2-py3-none-any.whl", hash = "sha256:4ba9cdaa28c361d251edce884cd50b4b1215d65cdc881bd204426cdde9f52731"}, + {file = "argcomplete-2.1.2.tar.gz", hash = "sha256:fc82ef070c607b1559b5c720529d63b54d9dcf2dcfc2632b10e6372314a34457"}, +] + +[package.extras] +lint = ["flake8", "mypy"] +test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = true +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = true +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "24.4.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = true +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = true +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = true +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "coverage" +version = "7.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = true +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = true +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = true +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "datamodel-code-generator" +version = "0.11.19" +description = "Datamodel Code Generator" +optional = false +python-versions = ">=3.6.1,<4.0.0" +files = [ + {file = "datamodel-code-generator-0.11.19.tar.gz", hash = "sha256:39874c017bbedc5fc9b93c332f3f213d299c9af2995e3870aaa2db8a661098e2"}, + {file = "datamodel_code_generator-0.11.19-py3-none-any.whl", hash = "sha256:26a62a1f99c7c8148b808e3e67e82c762cc9f7bbe036fb4c2798352460d68e38"}, +] + +[package.dependencies] +argcomplete = ">=1.10,<3.0" +black = ">=19.10b0" +genson = ">=1.2.1,<2.0" +inflect = ">=4.1.0,<6.0" +isort = ">=4.3.21,<6.0" +jinja2 = ">=2.10.1,<4.0" +openapi-spec-validator = ">=0.2.8,<0.4" +prance = ">=0.18.2,<1.0" +pydantic = [ + {version = ">=1.5.1,<2.0", extras = ["email"], markers = "python_version < \"3.10\""}, + {version = ">=1.9.0,<2.0", extras = ["email"], markers = "python_version >= \"3.10\""}, +] +PySnooper = ">=0.4.1,<2.0.0" +toml = ">=0.10.0,<1.0.0" +typed-ast = [ + {version = ">=1.4.2", markers = "python_full_version < \"3.9.8\""}, + {version = ">=1.5.0", markers = "python_full_version >= \"3.9.8\""}, +] + +[package.extras] +http = ["httpx"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "email-validator" +version = "2.1.1" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, + {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = true +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = true +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = true +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = true +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = true +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = true +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = true +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = true +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "inflect" +version = "5.6.2" +description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +optional = false +python-versions = ">=3.7" +files = [ + {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, + {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = true +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = true +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = true +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = true +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = true +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = true +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = true +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "langsmith" +version = "0.1.47" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = true +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.47-py3-none-any.whl", hash = "sha256:17b0a908b8d39b6da3ecff658c8c00304b0b62f59945a5e16c2da5a254ea21a6"}, + {file = "langsmith-0.1.47.tar.gz", hash = "sha256:f5ddd17628baa03a775525c5547a543a559313e425cdb2bf23579ffcf6056a76"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "lxml" +version = "5.2.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = true +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = true +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = true +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = true +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = true +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = true +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = true +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openapi-schema-validator" +version = "0.2.3" +description = "OpenAPI schema validation for Python" +optional = false +python-versions = ">=3.7.0,<4.0.0" +files = [ + {file = "openapi-schema-validator-0.2.3.tar.gz", hash = "sha256:2c64907728c3ef78e23711c8840a423f0b241588c9ed929855e4b2d1bb0cf5f2"}, + {file = "openapi_schema_validator-0.2.3-py3-none-any.whl", hash = "sha256:9bae709212a19222892cabcc60cafd903cbf4b220223f48583afa3c0e3cc6fc4"}, +] + +[package.dependencies] +jsonschema = ">=3.0.0,<5.0.0" + +[package.extras] +isodate = ["isodate"] +rfc3339-validator = ["rfc3339-validator"] +strict-rfc3339 = ["strict-rfc3339"] + +[[package]] +name = "openapi-spec-validator" +version = "0.3.1" +description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3.0 spec validator" +optional = false +python-versions = ">= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.*, != 3.4.*" +files = [ + {file = "openapi-spec-validator-0.3.1.tar.gz", hash = "sha256:3d70e6592754799f7e77a45b98c6a91706bdd309a425169d17d8e92173e198a2"}, + {file = "openapi_spec_validator-0.3.1-py2-none-any.whl", hash = "sha256:0a7da925bad4576f4518f77302c0b1990adb2fbcbe7d63fb4ed0de894cad8bdd"}, + {file = "openapi_spec_validator-0.3.1-py3-none-any.whl", hash = "sha256:ba28b06e63274f2bc6de995a07fb572c657e534425b5baf68d9f7911efe6929f"}, +] + +[package.dependencies] +jsonschema = "*" +openapi-schema-validator = "*" +PyYAML = ">=5.1" +six = "*" + +[package.extras] +dev = ["pre-commit"] +requests = ["requests"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = true +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.0" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = true +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, + {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, + {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, + {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, + {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, + {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, + {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, + {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, + {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, + {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, + {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, + {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, + {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, + {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, + {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, + {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = true +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pastel" +version = "0.2.1" +description = "Bring colors to your terminal." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, + {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pdf2image" +version = "1.16.3" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +optional = true +python-versions = "*" +files = [ + {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, + {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, +] + +[package.dependencies] +pillow = "*" + +[[package]] +name = "pdfminer-six" +version = "20221105" +description = "PDF parser and analyzer" +optional = true +python-versions = ">=3.6" +files = [ + {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, + {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, +] + +[package.dependencies] +charset-normalizer = ">=2.0.0" +cryptography = ">=36.0.0" + +[package.extras] +dev = ["black", "mypy (==0.931)", "nox", "pytest"] +docs = ["sphinx", "sphinx-argparse"] +image = ["Pillow"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "poethepoet" +version = "0.24.4" +description = "A task runner that works well with poetry." +optional = false +python-versions = ">=3.8" +files = [ + {file = "poethepoet-0.24.4-py3-none-any.whl", hash = "sha256:fb4ea35d7f40fe2081ea917d2e4102e2310fda2cde78974050ca83896e229075"}, + {file = "poethepoet-0.24.4.tar.gz", hash = "sha256:ff4220843a87c888cbcb5312c8905214701d0af60ac7271795baa8369b428fef"}, +] + +[package.dependencies] +pastel = ">=0.2.1,<0.3.0" +tomli = ">=1.2.2" + +[package.extras] +poetry-plugin = ["poetry (>=1.0,<2.0)"] + +[[package]] +name = "prance" +version = "0.22.2.22.0" +description = "Resolving Swagger/OpenAPI 2.0 and 3.0.0 Parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "prance-0.22.2.22.0-py3-none-any.whl", hash = "sha256:57deeb67b7e93ef27c1c17845bf3ccb4af288ccfb5748c7e01779c01a8507f27"}, + {file = "prance-0.22.2.22.0.tar.gz", hash = "sha256:9a83f8a4f5fe0f2d896d238d4bec6b5788b10b94155414b3d88c21c1579b85bf"}, +] + +[package.dependencies] +chardet = ">=3.0" +packaging = ">=21.3" +requests = ">=2.25" +"ruamel.yaml" = ">=0.17.10" +six = ">=1.15,<2.0" + +[package.extras] +cli = ["click (>=7.0)"] +dev = ["bumpversion (>=0.6)", "pytest (>=6.1)", "pytest-cov (>=2.11)", "sphinx (>=3.4)", "towncrier (>=19.2)", "tox (>=3.4)"] +flex = ["flex (>=6.13,<7.0)"] +icu = ["PyICU (>=2.4,<3.0)"] +osv = ["openapi-spec-validator (>=0.5.1,<0.6.0)"] +ssv = ["swagger-spec-validator (>=2.4,<3.0)"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""} +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = true +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyproject-flake8" +version = "6.1.0" +description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyproject_flake8-6.1.0-py3-none-any.whl", hash = "sha256:86ea5559263c098e1aa4f866776aa2cf45362fd91a576b9fd8fbbbb55db12c4e"}, + {file = "pyproject_flake8-6.1.0.tar.gz", hash = "sha256:6da8e5a264395e0148bc11844c6fb50546f1fac83ac9210f7328664135f9e70f"}, +] + +[package.dependencies] +flake8 = "6.1.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pysnooper" +version = "1.2.0" +description = "A poor man's debugger for Python." +optional = false +python-versions = "*" +files = [ + {file = "PySnooper-1.2.0-py2.py3-none-any.whl", hash = "sha256:aa859aa9a746cffc1f35e4ee469d49c3cc5185b5fc0c571feb3af3c94d2eb625"}, + {file = "PySnooper-1.2.0.tar.gz", hash = "sha256:810669e162a250a066d8662e573adbc5af770e937c5b5578f28bb7355d1c859b"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-httpserver" +version = "1.0.10" +description = "pytest-httpserver is a httpserver for pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_httpserver-1.0.10-py3-none-any.whl", hash = "sha256:d40e0cc3d61ed6e4d80f52a796926d557a7db62b17e43b3e258a78a3c34becb9"}, + {file = "pytest_httpserver-1.0.10.tar.gz", hash = "sha256:77b9fbc2eb0a129cfbbacc8fe57e8cafe071d506489f31fe31e62f1b332d9905"}, +] + +[package.dependencies] +Werkzeug = ">=2.0.0" + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = true +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = true +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = true +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.8.1" +description = "rapid fuzzy string matching" +optional = true +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1b176f01490b48337183da5b4223005bc0c2354a4faee5118917d2fba0bedc1c"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0798e32304b8009d215026bf7e1c448f1831da0a03987b7de30059a41bee92f3"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad4dbd06c1f579eb043b2dcfc635bc6c9fb858240a70f0abd3bed84d8ac79994"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6ec696a268e8d730b42711537e500f7397afc06125c0e8fa9c8211386d315a5"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8a007fdc5cf646e48e361a39eabe725b93af7673c5ab90294e551cae72ff58"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68b185a0397aebe78bcc5d0e1efd96509d4e2f3c4a05996e5c843732f547e9ef"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:267ff42370e031195e3020fff075420c136b69dc918ecb5542ec75c1e36af81f"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:987cd277d27d14301019fdf61c17524f6127f5d364be5482228726049d8e0d10"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bc5a1ec3bd05b55d3070d557c0cdd4412272d51b4966c79aa3e9da207bd33d65"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa223c73c59cc45c12eaa9c439318084003beced0447ff92b578a890288e19eb"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d4276c7ee061db0bac54846933b40339f60085523675f917f37de24a4b3ce0ee"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2ba0e43e9a94d256a704a674c7010e6f8ef9225edf7287cf3e7f66c9894b06cd"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c22b32a57ab47afb207e8fe4bd7bb58c90f9291a63723cafd4e704742166e368"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win32.whl", hash = "sha256:50db3867864422bf6a6435ea65b9ac9de71ef52ed1e05d62f498cd430189eece"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:bca5acf77508d1822023a85118c2dd8d3c16abdd56d2762359a46deb14daa5e0"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win_arm64.whl", hash = "sha256:c763d99cf087e7b2c5be0cf34ae9a0e1b031f5057d2341a0a0ed782458645b7e"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:30c282612b7ebf2d7646ebebfd98dd308c582246a94d576734e4b0162f57baf4"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c6a43446f0cd8ff347b1fbb918dc0d657bebf484ddfa960ee069e422a477428"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4969fe0eb179aedacee53ca8f8f1be3c655964a6d62db30f247fee444b9c52b4"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799f5f221d639d1c2ed8a2348d1edf5e22aa489b58b2cc99f5bf0c1917e2d0f2"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e62bde7d5df3312acc528786ee801c472cae5078b1f1e42761c853ba7fe1072a"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ea3d2e41d8fac71cb63ee72f75bee0ed1e9c50709d4c58587f15437761c1858"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f34a541895627c2bc9ef7757f16f02428a08d960d33208adfb96b33338d0945"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0643a25937fafe8d117f2907606e9940cd1cc905c66f16ece9ab93128299994"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63044a7b6791a2e945dce9d812a6886e93159deb0464984eb403617ded257f08"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bbc15985c5658691f637a6b97651771147744edfad2a4be56b8a06755e3932fa"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:48b6e5a337a814aec7c6dda5d6460f947c9330860615301f35b519e16dde3c77"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:8c40da44ca20235cda05751d6e828b6b348e7a7c5de2922fa0f9c63f564fd675"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c21d5c7cfa6078c79897e5e482a7e84ff927143d2f3fb020dd6edd27f5469574"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win32.whl", hash = "sha256:209bb712c448cdec4def6260b9f059bd4681ec61a01568f5e70e37bfe9efe830"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f7641992de44ec2ca54102422be44a8e3fb75b9690ccd74fff72b9ac7fc00ee"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:c458085e067c766112f089f78ce39eab2b69ba027d7bbb11d067a0b085774367"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1905d9319a97bed29f21584ca641190dbc9218a556202b77876f1e37618d2e03"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f176867f438ff2a43e6a837930153ca78fddb3ca94e378603a1e7b860d7869bf"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25498650e30122f4a5ad6b27c7614b4af8628c1d32b19d406410d33f77a86c80"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16153a97efacadbd693ccc612a3285df2f072fd07c121f30c2c135a709537075"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0264d03dcee1bb975975b77c2fe041820fb4d4a25a99e3cb74ddd083d671ca"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17d79398849c1244f646425cf31d856eab9ebd67b7d6571273e53df724ca817e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e08b01dc9369941a24d7e512b0d81bf514e7d6add1b93d8aeec3c8fa08a824e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97c13f156f14f10667e1cfc4257069b775440ce005e896c09ce3aff21c9ae665"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8b76abfec195bf1ee6f9ec56c33ba5e9615ff2d0a9530a54001ed87e5a6ced3b"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b0ba20be465566264fa5580d874ccf5eabba6975dba45857e2c76e2df3359c6d"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4d5cd86aca3f12e73bfc70015db7e8fc44122da03aa3761138b95112e83f66e4"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a16ef3702cecf16056c5fd66398b7ea8622ff4e3afeb00a8db3e74427e850af"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:392582aa784737d95255ca122ebe7dca3c774da900d100c07b53d32cd221a60e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win32.whl", hash = "sha256:ceb10039e7346927cec47eaa490b34abb602b537e738ee9914bb41b8de029fbc"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc4af7090a626c902c48db9b5d786c1faa0d8e141571e8a63a5350419ea575bd"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:3aff3b829b0b04bdf78bd780ec9faf5f26eac3591df98c35a0ae216c925ae436"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78a0d2a11bb3936463609777c6d6d4984a27ebb2360b58339c699899d85db036"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f8af980695b866255447703bf634551e67e1a4e1c2d2d26501858d9233d886d7"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1a15fef1938b43468002f2d81012dbc9e7b50eb8533af202b0559c2dc7865d9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4dbb1ebc9a811f38da33f32ed2bb5f58b149289b89eb11e384519e9ba7ca881"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41219536634bd6f85419f38450ef080cfb519638125d805cf8626443e677dc61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3f882110f2f4894942e314451773c47e8b1b4920b5ea2b6dd2e2d4079dd3135"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c754ce1fab41b731259f100d5d46529a38aa2c9b683c92aeb7e96ef5b2898cd8"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:718ea99f84b16c4bdbf6a93e53552cdccefa18e12ff9a02c5041e621460e2e61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9441aca94b21f7349cdb231cd0ce9ca251b2355836e8a02bf6ccbea5b442d7a9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90167a48de3ed7f062058826608a80242b8561d0fb0cce2c610d741624811a61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8e02425bfc7ebed617323a674974b70eaecd8f07b64a7d16e0bf3e766b93e3c9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d48657a404fab82b2754faa813a10c5ad6aa594cb1829dca168a49438b61b4ec"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f8b62fdccc429e6643cefffd5df9c7bca65588d06e8925b78014ad9ad983bf5"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-win32.whl", hash = "sha256:63db612bb6da1bb9f6aa7412739f0e714b1910ec07bc675943044fe683ef192c"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:bb571dbd4cc93342be0ba632f0b8d7de4cbd9d959d76371d33716d2216090d41"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b27cea618601ca5032ea98ee116ca6e0fe67be7b286bcb0b9f956d64db697472"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d5592b08e3cadc9e06ef3af6a9d66b6ef1bf871ed5acd7f9b1e162d78806a65"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:58999b21d01dd353f49511a61937eac20c7a5b22eab87612063947081855d85f"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ee3909f611cc5860cc8d9f92d039fd84241ce7360b49ea88e657181d2b45f6"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00b5ee47b387fa3805f4038362a085ec58149135dc5bc640ca315a9893a16f9e"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4c647795c5b901091a68e210c76b769af70a33a8624ac496ac3e34d33366c0d"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77ea62879932b32aba77ab23a9296390a67d024bf2f048dee99143be80a4ce26"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fee62ae76e3b8b9fff8aa2ca4061575ee358927ffbdb2919a8c84a98da59f78"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:231dc1cb63b1c8dd78c0597aa3ad3749a86a2b7e76af295dd81609522699a558"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:827ddf2d5d157ac3d1001b52e84c9e20366237a742946599ffc435af7fdd26d0"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c04ef83c9ca3162d200df36e933b3ea0327a2626cee2e01bbe55acbc004ce261"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:747265f39978bbaad356f5c6b6c808f0e8f5e8994875af0119b82b4700c55387"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:14791324f0c753f5a0918df1249b91515f5ddc16281fbaa5ec48bff8fa659229"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win32.whl", hash = "sha256:b7b9cbc60e3eb08da6d18636c62c6eb6206cd9d0c7ad73996f7a1df3fc415b27"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:2084193fd8fd346db496a2220363437eb9370a06d1d5a7a9dba00a64390c6a28"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win_arm64.whl", hash = "sha256:c9597a05d08e8103ad59ebdf29e3fbffb0d0dbf3b641f102cfbeadc3a77bde51"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f4174079dfe8ed1f13ece9bde7660f19f98ab17e0c0d002d90cc845c3a7e238"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07d7d4a3c49a15146d65f06e44d7545628ca0437c929684e32ef122852f44d95"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ef119fc127c982053fb9ec638dcc3277f83b034b5972eb05941984b9ec4a290"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e57f9c2367706a320b78e91f8bf9a3b03bf9069464eb7b54455fa340d03e4c"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d4f1956fe1fc618e34ac79a6ed84fff5a6f23e41a8a476dd3e8570f0b12f02b"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:313bdcd16e9cd5e5568b4a31d18a631f0b04cc10a3fd916e4ef75b713e6f177e"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a02def2eb526cc934d2125533cf2f15aa71c72ed4397afca38427ab047901e88"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9d5d924970b07128c61c08eebee718686f4bd9838ef712a50468169520c953f"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1edafc0a2737df277d3ddf401f3a73f76e246b7502762c94a3916453ae67e9b1"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:81fd28389bedab28251f0535b3c034b0e63a618efc3ff1d338c81a3da723adb3"}, + {file = "rapidfuzz-3.8.1.tar.gz", hash = "sha256:a357aae6791118011ad3ab4f2a4aa7bd7a487e5f9981b390e9f3c2c5137ecadf"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = true +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = true +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = true +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "sphinx" +version = "4.2.0" +description = "Python documentation generator" +optional = true +python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.2.0-py3-none-any.whl", hash = "sha256:98a535c62a4fcfcc362528592f69b26f7caec587d32cd55688db580be0287ae0"}, + {file = "Sphinx-4.2.0.tar.gz", hash = "sha256:94078db9184491e15bce0a56d9186e0aec95f16ac20b12d00e06d4e36f1058a6"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +setuptools = "*" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "types-pkg-resources", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +optional = true +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +files = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.8" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.6" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.5" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.7" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.10" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = true +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typed-ast" +version = "1.5.5" +description = "a fork of Python 2 and 3 ast modules with type comment support" +optional = false +python-versions = ">=3.6" +files = [ + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, +] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = true +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = true +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = true +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = true +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "werkzeug" +version = "3.0.2" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = true +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = true +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[extras] +file-based = ["avro", "fastavro", "markdown", "pdf2image", "pdfminer.six", "pyarrow", "pytesseract", "unstructured", "unstructured.pytesseract"] +sphinx-docs = ["Sphinx", "sphinx-rtd-theme"] +vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "1633d60fbb46ff59f4314f61c11dfc326a45563421a48b06406c2bab352774f3" diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index f03d6cbcbe012..d56f504d6bfac 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -1,44 +1,96 @@ -# Defines Python build system settings. [build-system] -requires = [ - "setuptools>=42", - "wheel" +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/airbytehq/airbyte" +repository = "https://github.com/airbytehq/airbyte" +documentation = "https://docs.airbyte.io/" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Topic :: Scientific/Engineering", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.8", ] +keywords = ["airbyte", "connector-development-kit", "cdk"] -build-backend = "setuptools.build_meta" +[tool.poetry.dependencies] +python = "^3.9" +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +Deprecated = "~1.2" +dpath = "~2.0.1" +genson = "1.2.2" +isodate = "~0.6.1" +Jinja2 = "~3.1.2" +jsonref = "~0.2" +jsonschema = "~3.2.0" +pendulum = "<3.0.0" +pydantic = "^1.10.8" +pyrate-limiter = "~3.1.0" +python-dateutil = "*" +PyYAML = "^6.0.1" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" +# Extras depedencies +avro = { version = "~1.11.2", optional = true } +cohere = { version = "4.21", optional = true } +fastavro = { version = "~1.8.0", optional = true } +langchain = { version = "0.1.16", optional = true } +langchain_core = { version = "0.1.42", optional = true } +markdown = { version = "*", optional = true } +openai = { version = "0.27.9", extras = ["embeddings"], optional = true } +pdf2image = { version = "1.16.3", optional = true } +"pdfminer.six" = { version = "20221105", optional = true } +pyarrow = { version = "~15.0.0", optional = true } +pytesseract = { version = "0.3.10", optional = true } +Sphinx = { version = "~4.2", optional = true } +sphinx-rtd-theme = { version = "~1.0", optional = true } +tiktoken = { version = "0.4.0", optional = true } +unstructured = { version = "0.10.27", extras = ["docx", "pptx"], optional = true } +"unstructured.pytesseract" = { version = ">=0.3.12", optional = true } -[tool.coverage.report] -fail_under = 0 -skip_empty = true -sort = "-cover" -omit = [ - ".venv/*", - "main.py", - "setup.py", - "unit_tests/*", - "integration_tests/*", - "**/generated/*", -] +[tool.poetry.group.dev.dependencies] +datamodel_code_generator = "0.11.19" +freezegun = "*" +mypy = "*" +pandas = "2.0.3" +poethepoet = "^0.24.2" +pyproject-flake8 = "^6.1.0" +pytest = "6.2.5" +pytest-cov = "*" +pytest-httpserver = "*" +pytest-mock = "*" +requests-mock = "*" -[tool.flake8] -extend-exclude = [ - "*/lib/*/site-packages", - ".venv", - "build", - "models", - ".eggs", - "airbyte-cdk/python/airbyte_cdk/models/__init__.py", - "airbyte-cdk/python/airbyte_cdk/sources/declarative/models/__init__.py", - ".tox", - "airbyte_api_client", - "**/generated/*", -] -max-complexity = 20 -max-line-length = 140 +[tool.poetry.extras] +file-based = ["avro", "fastavro", "pyarrow", "unstructured", "pdf2image", "pdfminer.six", "unstructured.pytesseract", "pytesseract", "markdown"] +sphinx-docs = ["Sphinx", "sphinx-rtd-theme"] +vector-db-based = ["langchain", "openai", "cohere", "tiktoken"] + +[tool.poe.tasks] +# Build tasks +assemble = {cmd = "bin/generate-component-manifest-files.sh", help = "Generate component manifest files."} +build-package = {cmd = "poetry build", help = "Build the python package: source and wheels archives."} +build = {sequence = ["assemble", "build-package"], help = "Run all tasks to build the package."} + +# Check tasks +lint = {cmd = "pflake8 --config ../../pyproject.toml ./", help = "Lint with flake8."} +type-check = {cmd = "bin/run-mypy-on-modified-files.sh", help = "Type check modified files with mypy."} +unit-test-with-cov = {cmd = "pytest -s unit_tests -c pytest.ini --cov=airbyte_cdk --cov-report=term --cov-config ../../pyproject.toml", help = "Run unit tests and create a coverage report."} +# TODO: find a version of the modified mypy check that works both locally and in CI. +check-local = {sequence = ["lint", "type-check", "unit-test-with-cov"], help = "Lint all code, type-check modified files, and run unit tests."} +check-ci = {sequence = ["lint", "unit-test-with-cov"], help = "Lint and run unit tests. Does not include type-checking."} -extend-ignore = [ - "E203", # whitespace before ':' (conflicts with Black) - "E231", # Bad trailing comma (conflicts with Black) - "E501", # line too long (conflicts with Black) - "W503", # line break before binary operator (conflicts with Black) -] \ No newline at end of file +# Build and check +pre-push = {sequence = ["build", "check-local"], help = "Run all build and check tasks."} diff --git a/airbyte-cdk/python/settings.gradle b/airbyte-cdk/python/settings.gradle deleted file mode 100644 index 02e3dd9a67242..0000000000000 --- a/airbyte-cdk/python/settings.gradle +++ /dev/null @@ -1,29 +0,0 @@ -import com.gradle.scan.plugin.PublishedBuildScan - -pluginManagement { - repositories { - // # Gradle looks for dependency artifacts in repositories listed in 'repositories' blocks in descending order. - gradlePluginPortal() - } -} - -// Configure the gradle enterprise plugin to enable build scans. Enabling the plugin at the top of the settings file allows the build scan to record -// as much information as possible. -plugins { - id "com.gradle.enterprise" version "3.15.1" -} - -ext.isCiServer = System.getenv().containsKey("CI") - -gradleEnterprise { - buildScan { - termsOfServiceUrl = "https://gradle.com/terms-of-service" - termsOfServiceAgree = "yes" - uploadInBackground = !isCiServer // Disable in CI or scan URLs may not work. - buildScanPublished { PublishedBuildScan scan -> - file("scan-journal.log") << "${new Date()} - ${scan.buildScanId} - ${scan.buildScanUri}\n" - } - } -} - -rootProject.name = 'airbyte-cdk-python' diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py deleted file mode 100644 index af776b923b920..0000000000000 --- a/airbyte-cdk/python/setup.py +++ /dev/null @@ -1,119 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pathlib - -from setuptools import find_packages, setup - -# The directory containing this file -HERE = pathlib.Path(__file__).parent - -# The text of the README file -README = (HERE / "README.md").read_text() - -avro_dependency = "avro~=1.11.2" -fastavro_dependency = "fastavro~=1.8.0" -pyarrow_dependency = "pyarrow~=15.0.0" - -langchain_dependency = "langchain==0.0.271" -openai_dependency = "openai[embeddings]==0.27.9" -cohere_dependency = "cohere==4.21" -tiktoken_dependency = "tiktoken==0.4.0" - -unstructured_dependencies = [ - "unstructured==0.10.27", # can't be bumped higher due to transitive dependencies we can't provide - "unstructured[docx,pptx]==0.10.27", - "pdf2image==1.16.3", - "pdfminer.six==20221105", - "unstructured.pytesseract>=0.3.12", - "pytesseract==0.3.10", - "markdown", -] - -setup( - name="airbyte-cdk", - # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be - # updated if our semver format changes such as using release candidate versions. - version="0.68.4", - description="A framework for writing Airbyte Connectors.", - long_description=README, - long_description_content_type="text/markdown", - author="Airbyte", - author_email="contact@airbyte.io", - license="MIT", - url="https://github.com/airbytehq/airbyte", - classifiers=[ - # This information is used when browsing on PyPi. - # Dev Status - "Development Status :: 3 - Alpha", - # Project Audience - "Intended Audience :: Developers", - "Topic :: Scientific/Engineering", - "Topic :: Software Development :: Libraries :: Python Modules", - "License :: OSI Approved :: MIT License", - # Python Version Support - "Programming Language :: Python :: 3.8", - ], - keywords="airbyte connector-development-kit cdk", - project_urls={ - "Documentation": "https://docs.airbyte.io/", - "Source": "https://github.com/airbytehq/airbyte", - "Tracker": "https://github.com/airbytehq/airbyte/issues", - }, - packages=find_packages(exclude=("unit_tests",)), - package_data={"airbyte_cdk": ["py.typed", "sources/declarative/declarative_component_schema.yaml"]}, - install_requires=[ - "airbyte-protocol-models==0.5.1", - "backoff", - "dpath~=2.0.1", - "isodate~=0.6.1", - "jsonschema~=3.2.0", - "jsonref~=0.2", - "pendulum<3.0.0", - "genson==1.2.2", - "pydantic>=1.10.8,<2.0.0", - "pyrate-limiter~=3.1.0", - "python-dateutil", - "PyYAML>=6.0.1", - "requests", - "requests_cache", - "Deprecated~=1.2", - "Jinja2~=3.1.2", - "cachetools", - "wcmatch==8.4", - ], - python_requires=">=3.8", - extras_require={ - "dev": [ - avro_dependency, - fastavro_dependency, - "freezegun", - "mypy", - "pytest", - "pytest-cov", - "pytest-mock", - "requests-mock", - "pytest-httpserver", - "pandas==2.0.3", - pyarrow_dependency, - langchain_dependency, - openai_dependency, - cohere_dependency, - tiktoken_dependency, - *unstructured_dependencies, - ], - "sphinx-docs": [ - "Sphinx~=4.2", - "sphinx-rtd-theme~=1.0", - ], - "file-based": [ - avro_dependency, - fastavro_dependency, - pyarrow_dependency, - *unstructured_dependencies, - ], - "vector-db-based": [langchain_dependency, openai_dependency, cohere_dependency, tiktoken_dependency], - }, -) diff --git a/airbyte-cdk/python/source_declarative_manifest/README.md b/airbyte-cdk/python/source_declarative_manifest/README.md deleted file mode 100644 index 7a723a4b6d334..0000000000000 --- a/airbyte-cdk/python/source_declarative_manifest/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Declarative manifest source - -This is a generic source that takes the declarative manifest via a key `__injected_declarative_manifest` of its config. - -## Execution -This entrypoint is used for connectors created by the connector builder. These connector's spec is defined in their manifest, which is defined in the config's "__injected_declarative_manifest" field. This allows this entrypoint to be used with any connector manifest. - -The spec operation is not supported because the config is not known when running a spec. - -## Local development - -#### Building - -You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. - -To build using Gradle, from the Airbyte repository root, run: - -``` -./gradlew airbyte-cdk:python:build -``` - -### Locally running the connector - -``` -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build - -First, make sure you build the latest Docker image: -``` -./gradlew airbyte-cdk:python:airbyteDocker -``` - -The docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in the Dockerfile. - -#### Run - -Then run any of the connector commands as follows: - -``` -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-declarative-manifest:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-declarative-manifest:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-declarative-manifest:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` diff --git a/airbyte-cdk/python/source_declarative_manifest/main.py b/airbyte-cdk/python/source_declarative_manifest/main.py deleted file mode 100644 index 2c1bdcb2b782c..0000000000000 --- a/airbyte-cdk/python/source_declarative_manifest/main.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys -from typing import List - -from airbyte_cdk.connector import BaseConnector -from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch -from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource - - -def create_manifest(args: List[str]): - parsed_args = AirbyteEntrypoint.parse_args(args) - if parsed_args.command == "spec": - raise ValueError("spec command is not supported for injected declarative manifest") - - config = BaseConnector.read_config(parsed_args.config) - if "__injected_declarative_manifest" not in config: - raise ValueError( - f"Invalid config: `__injected_declarative_manifest` should be provided at the root of the config but config only has keys {list(config.keys())}" - ) - return ManifestDeclarativeSource(config.get("__injected_declarative_manifest")) - - -if __name__ == "__main__": - source = create_manifest(sys.argv[1:]) - launch(source, sys.argv[1:]) diff --git a/airbyte-cdk/python/type_check_and_test.sh b/airbyte-cdk/python/type_check_and_test.sh deleted file mode 100755 index 37220d8f8e516..0000000000000 --- a/airbyte-cdk/python/type_check_and_test.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# TODO(davin): Migrate to Gradle? -# TODO(davin): This should not assume the user has already set up the venv folder. - -# Static Type Checking -echo "Running MyPy to static check and test files." -mypy airbyte_cdk/ unit_tests/ --config mypy.ini - -printf "\n" - -# Test with Coverage Report -echo "Running tests.." -# The -s flag instructs PyTest to capture stdout logging; simplifying debugging. -pytest -s -vv --cov=airbyte_cdk unit_tests/ diff --git a/airbyte-cdk/python/unit_tests/__init__.py b/airbyte-cdk/python/unit_tests/__init__.py index e69de29bb2d1d..b6b74b56a60d7 100644 --- a/airbyte-cdk/python/unit_tests/__init__.py +++ b/airbyte-cdk/python/unit_tests/__init__.py @@ -0,0 +1,6 @@ +# THIS STOPS SOME MODELS TESTS FROM FALLING OVER. IT'S A HACK, WE SHOULD PIN DOWN WHAT'S ACTUALLY GOING ON HERE + +# Import the thing that needs to be imported to stop the tests from falling over +from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource +# "Use" the thing so that the linter doesn't complain +placeholder = ManifestDeclarativeSource diff --git a/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py b/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py index 190f8d4bcb56b..fa1cf13a09214 100644 --- a/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py +++ b/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py @@ -499,7 +499,19 @@ def test_config_update(): @patch("traceback.TracebackException.from_exception") def test_read_returns_error_response(mock_from_exception): + class MockDeclarativeStream: + @property + def primary_key(self): + return [[]] + + @property + def cursor_field(self): + return [] + class MockManifestDeclarativeSource: + def streams(self, config): + return [MockDeclarativeStream()] + def read(self, logger, config, catalog, state): raise ValueError("error_message") diff --git a/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py b/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py index 437a775dd8dee..e371a4fd3c626 100644 --- a/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py +++ b/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py @@ -21,6 +21,9 @@ from airbyte_cdk.models import Type as MessageType from unit_tests.connector_builder.utils import create_configured_catalog +_NO_PK = [[]] +_NO_CURSOR_FIELD = [] + MAX_PAGES_PER_SLICE = 4 MAX_SLICES = 3 @@ -96,15 +99,14 @@ def test_get_grouped_messages(mock_entrypoint_read: Mock) -> None: response = {"status_code": 200, "headers": {"field": "value"}, "body": {"content": '{"name": "field"}'}} expected_schema = { "$schema": "http://json-schema.org/schema#", - "properties": {"name": {"type": "string"}, "date": {"type": "string"}}, + "properties": {"name": {"type": ["string", "null"]}, "date": {"type": ["string", "null"]}}, "type": "object", } expected_datetime_fields = {"date": "%Y-%m-%d"} expected_pages = [ StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -114,8 +116,7 @@ def test_get_grouped_messages(mock_entrypoint_read: Mock) -> None: ), StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -163,8 +164,7 @@ def test_get_grouped_messages_with_logs(mock_entrypoint_read: Mock) -> None: expected_pages = [ StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -174,8 +174,7 @@ def test_get_grouped_messages_with_logs(mock_entrypoint_read: Mock) -> None: ), StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -348,8 +347,7 @@ def test_get_grouped_messages_no_records(mock_entrypoint_read: Mock) -> None: expected_pages = [ StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -359,8 +357,7 @@ def test_get_grouped_messages_no_records(mock_entrypoint_read: Mock) -> None: ), StreamReadPages( request=HttpRequest( - url="https://demonslayers.com/api/v1/hashiras", - parameters={"era": ["taisho"]}, + url="https://demonslayers.com/api/v1/hashiras?era=taisho", headers={"Content-Type": "application/json"}, body='{"custom": "field"}', http_method="GET", @@ -537,6 +534,7 @@ def test_get_grouped_messages_given_maximum_number_of_pages_then_test_read_limit def test_read_stream_returns_error_if_stream_does_not_exist() -> None: mock_source = MagicMock() mock_source.read.side_effect = ValueError("error") + mock_source.streams.return_value = [make_mock_stream()] full_config: Mapping[str, Any] = {**CONFIG, **{"__injected_declarative_manifest": MANIFEST}} @@ -545,7 +543,7 @@ def test_read_stream_returns_error_if_stream_does_not_exist() -> None: source=mock_source, config=full_config, configured_catalog=create_configured_catalog("not_in_manifest") ) - assert 1 == len(actual_response.logs) + assert len(actual_response.logs) == 1 assert "Traceback" in actual_response.logs[0].message assert "ERROR" in actual_response.logs[0].level @@ -636,12 +634,58 @@ def test_given_no_slices_then_return_empty_slices(mock_entrypoint_read: Mock) -> assert len(stream_read.slices) == 0 +@patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") +def test_given_pk_then_ensure_pk_is_pass_to_schema_inferrence(mock_entrypoint_read: Mock) -> None: + mock_source = make_mock_source(mock_entrypoint_read, iter([ + request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), + record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), + record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), + ])) + mock_source.streams.return_value = [Mock()] + mock_source.streams.return_value[0].primary_key = [["id"]] + mock_source.streams.return_value[0].cursor_field = _NO_CURSOR_FIELD + connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) + + stream_read: StreamRead = connector_builder_handler.get_message_groups( + source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras") + ) + + assert stream_read.inferred_schema["required"] == ["id"] + + +@patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") +def test_given_cursor_field_then_ensure_cursor_field_is_pass_to_schema_inferrence(mock_entrypoint_read: Mock) -> None: + mock_source = make_mock_source(mock_entrypoint_read, iter([ + request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), + record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), + record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), + ])) + mock_source.streams.return_value = [Mock()] + mock_source.streams.return_value[0].primary_key = _NO_PK + mock_source.streams.return_value[0].cursor_field = ["date"] + connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) + + stream_read: StreamRead = connector_builder_handler.get_message_groups( + source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras") + ) + + assert stream_read.inferred_schema["required"] == ["date"] + + def make_mock_source(mock_entrypoint_read: Mock, return_value: Iterator[AirbyteMessage]) -> MagicMock: mock_source = MagicMock() mock_entrypoint_read.return_value = return_value + mock_source.streams.return_value = [make_mock_stream()] return mock_source +def make_mock_stream(): + mock_stream = MagicMock() + mock_stream.primary_key = [] + mock_stream.cursor_field = [] + return mock_stream + + def request_log_message(request: Mapping[str, Any]) -> AirbyteMessage: return AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.INFO, message=f"request:{json.dumps(request)}")) diff --git a/airbyte-cdk/python/unit_tests/destinations/test_destination.py b/airbyte-cdk/python/unit_tests/destinations/test_destination.py index 74e6d995d6bb8..00c4cc478825d 100644 --- a/airbyte-cdk/python/unit_tests/destinations/test_destination.py +++ b/airbyte-cdk/python/unit_tests/destinations/test_destination.py @@ -157,7 +157,7 @@ def test_run_spec(self, mocker, destination: Destination): destination.spec.assert_called_once() # type: ignore # verify the output of spec was returned - assert _wrapped(expected_spec) == spec_message + assert spec_message == _wrapped(expected_spec) def test_run_check(self, mocker, destination: Destination, tmp_path): file_path = tmp_path / "config.json" @@ -183,7 +183,7 @@ def test_run_check(self, mocker, destination: Destination, tmp_path): validate_mock.assert_called_with(dummy_config, spec_msg) # verify output was correct - assert _wrapped(expected_check_result) == returned_check_result + assert returned_check_result == _wrapped(expected_check_result) def test_run_write(self, mocker, destination: Destination, tmp_path, monkeypatch): config_path, dummy_config = tmp_path / "config.json", {"user": "sherif"} @@ -235,7 +235,7 @@ def test_run_write(self, mocker, destination: Destination, tmp_path, monkeypatch validate_mock.assert_called_with(dummy_config, spec_msg) # verify output was correct - assert expected_write_result == returned_write_result + assert returned_write_result == expected_write_result @pytest.mark.parametrize("args", [{}, {"command": "fake"}]) def test_run_cmd_with_incorrect_args_fails(self, args, destination: Destination): diff --git a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py index 80cc7c4a9d9e6..141355ee90574 100644 --- a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py +++ b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py @@ -20,7 +20,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor class _MockSource(ConcurrentSourceAdapter): @@ -36,7 +36,7 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> def streams(self, config: Mapping[str, Any]) -> List[Stream]: return [ - StreamFacade.create_from_stream(s, self, self._logger, None, NoopCursor()) if is_concurrent else s + StreamFacade.create_from_stream(s, self, self._logger, None, FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository())) if is_concurrent else s for s, is_concurrent in self._streams_to_is_concurrent.items() ] diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py index 0e81f99878590..604c339de64f1 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py @@ -41,7 +41,7 @@ def test_parse_date(test_name, input_date, date_format, expected_output_date): parser = DatetimeParser() output_date = parser.parse(input_date, date_format) - assert expected_output_date == output_date + assert output_date == expected_output_date @pytest.mark.parametrize( @@ -56,4 +56,4 @@ def test_parse_date(test_name, input_date, date_format, expected_output_date): def test_format_datetime(test_name, input_dt, datetimeformat, expected_output): parser = DatetimeParser() output_date = parser.format(input_dt, datetimeformat) - assert expected_output == output_date + assert output_date == expected_output diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py index 6d93dd50c7648..2f1db84600f19 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py @@ -338,55 +338,95 @@ def test_stream_slices( @pytest.mark.parametrize( - "test_name, previous_cursor, stream_slice, latest_record_data, expected_state", + "test_name, previous_cursor, stream_slice, observed_records, expected_state", [ ( "test_close_slice_previous_cursor_is_highest", "2023-01-01", - StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), - {cursor_field: "2021-01-01"}, + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2021-01-01"}], {cursor_field: "2023-01-01"}, ), ( "test_close_slice_stream_slice_partition_end_is_highest", - "2021-01-01", - StreamSlice(partition={}, cursor_slice={"end_time": "2023-01-01"}), + "2020-01-01", + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2023-01-01"}), + [{cursor_field: "2021-01-01"}], {cursor_field: "2021-01-01"}, - {cursor_field: "2023-01-01"}, ), ( - "test_close_slice_latest_record_cursor_value_is_highest", + "test_close_slice_latest_record_cursor_value_is_higher_than_slice_end", "2021-01-01", - StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), - {cursor_field: "2023-01-01"}, - {cursor_field: "2023-01-01"}, + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2023-01-01"}], + {cursor_field: "2021-01-01"}, ), ( - "test_close_slice_without_latest_record", + "test_close_slice_with_no_records_observed", "2021-01-01", - StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [], + {cursor_field: "2021-01-01"}, + ), + ( + "test_close_slice_with_no_records_observed_and_no_previous_state", None, + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [], + {}, + ), + ( + "test_close_slice_without_previous_cursor", + None, + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2023-01-01"}), + [{cursor_field: "2022-01-01"}], {cursor_field: "2022-01-01"}, ), ( - "test_close_slice_without_cursor", + "test_close_slice_with_out_of_order_records", + "2021-01-01", + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2021-04-01"}, {cursor_field: "2021-02-01"}, {cursor_field: "2021-03-01"}], + {cursor_field: "2021-04-01"}, + ), + ( + "test_close_slice_with_some_records_out_of_slice_boundaries", + "2021-01-01", + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2021-02-01"}, {cursor_field: "2021-03-01"}, {cursor_field: "2023-01-01"}], + {cursor_field: "2021-03-01"}, + ), + ( + "test_close_slice_with_all_records_out_of_slice_boundaries", + "2021-01-01", + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2023-01-01"}], + {cursor_field: "2021-01-01"}, + ), + ( + "test_close_slice_with_all_records_out_of_slice_and_no_previous_cursor", None, - StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), - {cursor_field: "2023-01-01"}, - {cursor_field: "2023-01-01"}, + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2022-01-01"}), + [{cursor_field: "2023-01-01"}], + {}, ), ], ) -def test_close_slice(test_name, previous_cursor, stream_slice, latest_record_data, expected_state): +def test_close_slice(test_name, previous_cursor, stream_slice, observed_records, expected_state): cursor = DatetimeBasedCursor( start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), cursor_field=InterpolatedString(string=cursor_field, parameters={}), datetime_format="%Y-%m-%d", config=config, parameters={}, + partition_field_start="start_time", + partition_field_end="end_time", ) - cursor._cursor = previous_cursor - cursor.close_slice(stream_slice, Record(latest_record_data, stream_slice) if latest_record_data else None) + cursor.set_initial_state({cursor_field: previous_cursor}) + for record_data in observed_records: + record = Record(record_data, stream_slice) + cursor.observe(stream_slice, record) + cursor.close_slice(stream_slice) updated_state = cursor.get_stream_state() assert updated_state == expected_state @@ -401,40 +441,45 @@ def test_close_slice_fails_if_slice_has_a_partition(): ) stream_slice = StreamSlice(partition={"key": "value"}, cursor_slice={"end_time": "2022-01-01"}) with pytest.raises(ValueError): - cursor.close_slice(stream_slice, Record({"id": 1}, stream_slice)) + cursor.close_slice(stream_slice) -def test_given_different_format_and_slice_is_highest_when_close_slice_then_slice_datetime_format(): +def test_compares_cursor_values_by_chronological_order(): cursor = DatetimeBasedCursor( start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), cursor_field=cursor_field, - datetime_format="%Y-%m-%dT%H:%M:%S.%fZ", - cursor_datetime_formats=["%Y-%m-%d"], + datetime_format="%d-%m-%Y", config=config, parameters={}, ) - _slice = StreamSlice(partition={}, cursor_slice={"end_time": "2023-01-04T17:30:19.000Z"}) - record_cursor_value = "2023-01-03" - cursor.close_slice(_slice, Record({cursor_field: record_cursor_value}, _slice)) + _slice = StreamSlice(partition={}, cursor_slice={"start_time": "01-01-2023", "end_time": "01-04-2023"}) + first_record = Record({cursor_field: "21-02-2023"}, _slice) + cursor.observe(_slice, first_record) + second_record = Record({cursor_field: "01-03-2023"}, _slice) + cursor.observe(_slice, second_record) + cursor.close_slice(_slice) - assert cursor.get_stream_state()[cursor_field] == "2023-01-04T17:30:19.000Z" + assert cursor.get_stream_state()[cursor_field] == "01-03-2023" -def test_given_partition_end_is_specified_and_greater_than_record_when_close_slice_then_use_partition_end(): - partition_field_end = "partition_field_end" +def test_given_different_format_and_slice_is_highest_when_close_slice_then_state_uses_record_format(): cursor = DatetimeBasedCursor( start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), - cursor_field=InterpolatedString(string=cursor_field, parameters={}), - datetime_format="%Y-%m-%d", - partition_field_end=partition_field_end, + cursor_field=cursor_field, + datetime_format="%Y-%m-%dT%H:%M:%S.%fZ", + cursor_datetime_formats=["%Y-%m-%d"], config=config, parameters={}, ) - stream_slice = StreamSlice(partition={}, cursor_slice={partition_field_end: "2025-01-01"}) - cursor.close_slice(stream_slice, Record({cursor_field: "2020-01-01"}, stream_slice)) - updated_state = cursor.get_stream_state() - assert {cursor_field: "2025-01-01"} == updated_state + + _slice = StreamSlice(partition={}, cursor_slice={"start_time": "2023-01-01T17:30:19.000Z", "end_time": "2023-01-04T17:30:19.000Z"}) + record_cursor_value = "2023-01-03" + record = Record({cursor_field: record_cursor_value}, _slice) + cursor.observe(_slice, record) + cursor.close_slice(_slice) + + assert cursor.get_stream_state()[cursor_field] == "2023-01-03" @pytest.mark.parametrize( @@ -496,10 +541,10 @@ def test_request_option(test_name, inject_into, field_name, expected_req_params, parameters={}, ) stream_slice = {"start_time": "2021-01-01T00:00:00.000000+0000", "end_time": "2021-01-04T00:00:00.000000+0000"} - assert expected_req_params == slicer.get_request_params(stream_slice=stream_slice) - assert expected_headers == slicer.get_request_headers(stream_slice=stream_slice) - assert expected_body_json == slicer.get_request_body_json(stream_slice=stream_slice) - assert expected_body_data == slicer.get_request_body_data(stream_slice=stream_slice) + assert slicer.get_request_params(stream_slice=stream_slice) == expected_req_params + assert slicer.get_request_headers(stream_slice=stream_slice) == expected_headers + assert slicer.get_request_body_json(stream_slice=stream_slice) == expected_body_json + assert slicer.get_request_body_data(stream_slice=stream_slice) == expected_body_data @pytest.mark.parametrize( @@ -562,7 +607,7 @@ def test_parse_date_legacy_merge_datetime_format_in_cursor_datetime_format( parameters={}, ) output_date = slicer.parse_date(input_date) - assert expected_output_date == output_date + assert output_date == expected_output_date @pytest.mark.parametrize( @@ -629,7 +674,7 @@ def test_format_datetime(test_name, input_dt, datetimeformat, datetimeformat_gra ) output_date = slicer._format_datetime(input_dt) - assert expected_output == output_date + assert output_date == expected_output def test_step_but_no_cursor_granularity(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py index 769f3e073fcc6..683bee4030f59 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py @@ -212,12 +212,11 @@ def test_close_slice(mocked_cursor_factory, mocked_partition_router): stream_slice = StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) mocked_partition_router.stream_slices.return_value = [stream_slice] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) - last_record = Mock() list(cursor.stream_slices()) # generate internal state - cursor.close_slice(stream_slice, last_record) + cursor.close_slice(stream_slice) - underlying_cursor.close_slice.assert_called_once_with(stream_slice.cursor_slice, Record(last_record.data, stream_slice.cursor_slice)) + underlying_cursor.close_slice.assert_called_once_with(stream_slice.cursor_slice) def test_given_no_last_record_when_close_slice_then_do_not_raise_error(mocked_cursor_factory, mocked_partition_router): @@ -228,9 +227,9 @@ def test_given_no_last_record_when_close_slice_then_do_not_raise_error(mocked_cu cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) list(cursor.stream_slices()) # generate internal state - cursor.close_slice(stream_slice, None) + cursor.close_slice(stream_slice) - underlying_cursor.close_slice.assert_called_once_with(stream_slice.cursor_slice, None) + underlying_cursor.close_slice.assert_called_once_with(stream_slice.cursor_slice) def test_given_unknown_partition_when_close_slice_then_raise_error(): @@ -239,7 +238,7 @@ def test_given_unknown_partition_when_close_slice_then_raise_error(): cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) stream_slice = StreamSlice(partition={"unknown_partition": "unknown"}, cursor_slice={}) with pytest.raises(ValueError): - cursor.close_slice(stream_slice, Record({}, stream_slice)) + cursor.close_slice(stream_slice) def test_given_unknown_partition_when_should_be_synced_then_raise_error(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py index e5080f1286a28..ca18e239ca20e 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py @@ -200,14 +200,14 @@ def test_given_record_for_partition_when_read_then_update_state(): "states": [ { "partition": {"partition_field": "1"}, - "cursor": {CURSOR_FIELD: "2022-01-31"}, + "cursor": {CURSOR_FIELD: "2022-01-15"}, } ] } def test_substream_without_input_state(): - source = ManifestDeclarativeSource( + test_source = ManifestDeclarativeSource( source_config=ManifestBuilder() .with_substream_partition_router("AnotherStream") .with_incremental_sync( @@ -231,14 +231,14 @@ def test_substream_without_input_state(): .build() ) - stream_instance = source.streams({})[1] + stream_instance = test_source.streams({})[1] stream_slice = StreamSlice(partition={"parent_id": "1"}, cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) with patch.object( SimpleRetriever, "_read_pages", side_effect=[[Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, stream_slice)], - Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)] + [Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)]] ): slices = list(stream_instance.stream_slices(sync_mode=SYNC_MODE)) assert list(slices) == [ @@ -246,6 +246,10 @@ def test_substream_without_input_state(): cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}), StreamSlice(partition={"parent_id": "1", "parent_slice": {}, }, cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}), + StreamSlice(partition={"parent_id": "2", "parent_slice": {}, }, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}), + StreamSlice(partition={"parent_id": "2", "parent_slice": {}, }, + cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}), ] @@ -307,7 +311,7 @@ def test_substream_with_legacy_input_state(): with patch.object( SimpleRetriever, "_read_pages", side_effect=[ [Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, stream_slice)], - [Record({"parent_id": "1"}, stream_slice)], + [Record({"parent_id": "1", CURSOR_FIELD: "2022-01-15"}, stream_slice)], [Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)], [Record({"parent_id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)] ] @@ -319,7 +323,7 @@ def test_substream_with_legacy_input_state(): expected_state = {"states": [ { "cursor": { - "cursor_field": "2022-01-31" + CURSOR_FIELD: "2022-01-15" }, "partition": {"parent_id": "1", "parent_slice": {}} } diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_jinja.py b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_jinja.py index 097afbb3487f2..31cfd569d6e86 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_jinja.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_jinja.py @@ -131,6 +131,27 @@ def test_negative_day_delta(): assert val <= (datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=25)).strftime("%Y-%m-%dT%H:%M:%S.%f%z") +@pytest.mark.parametrize( + "test_name, input_value, expected_output", + [ + ("test_string_to_string", "hello world", "hello world"), + ("test_int_to_string", 1, "1"), + ("test_number_to_string", 1.52, "1.52"), + ("test_true_to_string", True, "true"), + ("test_false_to_string", False, "false"), + ("test_array_to_string", ["hello", "world"], '["hello", "world"]'), + ("test_object_to_array", {"hello": "world"}, '{"hello": "world"}'), + ] +) +def test_to_string(test_name, input_value, expected_output): + interpolation = JinjaInterpolation() + config = {"key": input_value} + template = "{{ config['key'] | string }}" + actual_output = interpolation.eval(template, config, {}) + assert isinstance(actual_output, str) + assert actual_output == expected_output + + @pytest.mark.parametrize( "s, expected_value", [ @@ -231,6 +252,8 @@ def test_undeclared_variables(template_string, expected_error, expected_value): "2021-08-31T00:00:00Z", id="test_now_utc_with_duration_and_format", ), + pytest.param("{{ 1 | string }}", "1", id="test_int_to_string"), + pytest.param("{{ [\"hello\", \"world\"] | string }}", "[\"hello\", \"world\"]", id="test_array_to_string"), ], ) def test_macros_examples(template_string, expected_value): diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/__init__.py b/airbyte-cdk/python/unit_tests/sources/declarative/migrations/__init__.py old mode 100755 new mode 100644 similarity index 100% rename from airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/__init__.py rename to airbyte-cdk/python/unit_tests/sources/declarative/migrations/__init__.py diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py b/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py new file mode 100644 index 0000000000000..7fce15031ee19 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py @@ -0,0 +1,295 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import pytest +from airbyte_cdk.sources.declarative.migrations.legacy_to_per_partition_state_migration import LegacyToPerPartitionStateMigration +from airbyte_cdk.sources.declarative.models import CustomPartitionRouter, CustomRetriever, DatetimeBasedCursor, DeclarativeStream +from airbyte_cdk.sources.declarative.models import LegacyToPerPartitionStateMigration as LegacyToPerPartitionStateMigrationModel +from airbyte_cdk.sources.declarative.models import ParentStreamConfig, SimpleRetriever, SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.parsers.manifest_component_transformer import ManifestComponentTransformer +from airbyte_cdk.sources.declarative.parsers.manifest_reference_resolver import ManifestReferenceResolver +from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import ModelToComponentFactory +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +factory = ModelToComponentFactory() + +resolver = ManifestReferenceResolver() + +transformer = ManifestComponentTransformer() + + +def test_migrate_a_valid_legacy_state_to_per_partition(): + input_state = { + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + }, + "14351124": { + "last_changed": "2022-12-27T08:35:39+00:00" + }, + } + + migrator = _migrator() + + assert migrator.should_migrate(input_state) + + expected_state = { + "states": [ + { + "partition": {"parent_id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"parent_id": "14351124"}, + "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"} + }, + ] + } + + assert migrator.migrate(input_state) == expected_state + + +@pytest.mark.parametrize( + "input_state", [ + pytest.param({ + "states": [ + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "14351124"}, + "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"} + }, + ] + }, id="test_should_not_migrate_a_per_partition_state"), + pytest.param({ + "states": [ + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "14351124"}, + }, + ] + }, id="test_should_not_migrate_state_without_a_cursor_component"), + pytest.param({ + "states": [ + { + "partition": {"id": "13506132"}, + "cursor": {"updated_at": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "14351124"}, + "cursor": {"updated_at": "2022-12-27T08:35:39+00:00"} + }, + ] + }, id="test_should_not_migrate_a_per_partition_state_with_wrong_cursor_field"), + pytest.param({ + "states": [ + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "14351124"}, + "cursor": {"last_changed": "2022-12-27T08:35:39+00:00", "updated_at": "2021-01-01"} + }, + ] + }, id="test_should_not_migrate_a_per_partition_state_with_multiple_cursor_fields"), + pytest.param( + { + "states": [ + { + "partition": {"id": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + ] + }, id="test_should_not_migrate_state_without_a_partition_component" + ), + pytest.param( + { + "states": [ + { + "partition": {"id": "13506132", "another_id": "A"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "13506134"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + ] + }, id="test_should_not_migrate_state_if_multiple_partition_keys" + ), + pytest.param( + { + "states": [ + { + "partition": {"identifier": "13506132"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + { + "partition": {"id": "13506134"}, + "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + }, + ] + }, id="test_should_not_migrate_state_if_invalid_partition_key" + ), + pytest.param( + { + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + }, + "14351124": { + "last_changed": "2022-12-27T08:35:39+00:00", + "another_key": "2022-12-27T08:35:39+00:00" + }, + }, id="test_should_not_migrate_if_the_partitioned_state_has_more_than_one_key" + ), + pytest.param({ + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + }, + "14351124": { + "another_key": "2022-12-27T08:35:39+00:00" + }, + }, id="test_should_not_migrate_if_the_partitioned_state_key_is_not_the_cursor_field"), + ] +) +def test_should_not_migrate(input_state): + migrator = _migrator() + assert not migrator.should_migrate(input_state) + + +def test_should_not_migrate_stream_with_multiple_parent_streams(): + input_state = { + "13506132": { + "last_changed": "2022-12-27T08:34:39+00:00" + }, + "14351124": { + "last_changed": "2022-12-27T08:35:39+00:00" + }, + } + + migrator = _migrator_with_multiple_parent_streams() + + assert not migrator.should_migrate(input_state) + + +def _migrator(): + partition_router = SubstreamPartitionRouter( + type="SubstreamPartitionRouter", + parent_stream_configs=[ + ParentStreamConfig( + type="ParentStreamConfig", + parent_key="{{ parameters['parent_key_id'] }}", + partition_field="parent_id", + stream=DeclarativeStream( + type="DeclarativeStream", + retriever=CustomRetriever( + type="CustomRetriever", + class_name="a_class_name" + ) + ) + ) + ] + ) + cursor = DatetimeBasedCursor( + type="DatetimeBasedCursor", + cursor_field="{{ parameters['cursor_field'] }}", + datetime_format="%Y-%m-%dT%H:%M:%S.%fZ", + start_datetime="1970-01-01T00:00:00.0Z", + ) + config = {} + parameters = {"cursor_field": "last_changed", "parent_key_id": "id"} + return LegacyToPerPartitionStateMigration(partition_router, cursor, config, parameters) + + +def _migrator_with_multiple_parent_streams(): + partition_router = SubstreamPartitionRouter( + type="SubstreamPartitionRouter", + parent_stream_configs=[ + ParentStreamConfig( + type="ParentStreamConfig", + parent_key="id", + partition_field="parent_id", + stream=DeclarativeStream( + type="DeclarativeStream", + retriever=CustomRetriever( + type="CustomRetriever", + class_name="a_class_name" + ) + ) + ), + ParentStreamConfig( + type="ParentStreamConfig", + parent_key="id", + partition_field="parent_id", + stream=DeclarativeStream( + type="DeclarativeStream", + retriever=CustomRetriever( + type="CustomRetriever", + class_name="a_class_name" + ) + ) + ), + ] + ) + cursor = DatetimeBasedCursor( + type="DatetimeBasedCursor", + cursor_field="{{ parameters['cursor_field'] }}", + datetime_format="%Y-%m-%dT%H:%M:%S.%fZ", + start_datetime="1970-01-01T00:00:00.0Z", + ) + config = {} + parameters = {} + return LegacyToPerPartitionStateMigration(partition_router, cursor, config, parameters) + + +@pytest.mark.parametrize( + "retriever_type, partition_router_class, is_parent_stream_config, expected_exception, expected_error_message", + [ + (SimpleRetriever, CustomPartitionRouter, True, None, None), + (None, CustomPartitionRouter, True, ValueError, "LegacyToPerPartitionStateMigrations can only be applied on a DeclarativeStream with a SimpleRetriever. Got "), + (SimpleRetriever, None, False, ValueError, "LegacyToPerPartitionStateMigrations can only be applied on a SimpleRetriever with a Substream partition router. Got "), + (SimpleRetriever, CustomPartitionRouter, False, ValueError, "LegacyToPerPartitionStateMigrations can only be applied with a parent stream configuration."), + ] +) +def test_create_legacy_to_per_partition_state_migration( + retriever_type, + partition_router_class, + is_parent_stream_config, + expected_exception, + expected_error_message, +): + partition_router = partition_router_class(type="CustomPartitionRouter", class_name="a_class_namer") if partition_router_class else None + + stream = MagicMock() + stream.retriever = MagicMock(spec=retriever_type) + stream.retriever.partition_router = partition_router + + content = """ + state_migrations: + - type: LegacyToPerPartitionStateMigration + """ + + resolved_manifest = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content)) + state_migrations_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["state_migrations"][0], {}) + + if is_parent_stream_config: + parent_stream_config = ParentStreamConfig(type="ParentStreamConfig", parent_key="id", partition_field="parent_id", stream=DeclarativeStream(type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name"))) + partition_router.parent_stream_configs = [parent_stream_config] + + if expected_exception: + with pytest.raises(expected_exception) as excinfo: + factory.create_component(model_type=LegacyToPerPartitionStateMigrationModel, component_definition=state_migrations_manifest, config={}, declarative_stream=stream) + assert str(excinfo.value) == expected_error_message + else: + migration_instance = factory.create_component(model_type=LegacyToPerPartitionStateMigrationModel, component_definition=state_migrations_manifest, config={}, declarative_stream=stream) + assert migration_instance is not None diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index 0a5a796566c90..4f6c8db9b197d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -5,6 +5,7 @@ # mypy: ignore-errors import datetime +from typing import Any, Mapping import pytest from airbyte_cdk.models import Level @@ -27,6 +28,7 @@ from airbyte_cdk.sources.declarative.models import CompositeErrorHandler as CompositeErrorHandlerModel from airbyte_cdk.sources.declarative.models import CustomErrorHandler as CustomErrorHandlerModel from airbyte_cdk.sources.declarative.models import CustomPartitionRouter as CustomPartitionRouterModel +from airbyte_cdk.sources.declarative.models import CustomSchemaLoader as CustomSchemaLoaderModel from airbyte_cdk.sources.declarative.models import DatetimeBasedCursor as DatetimeBasedCursorModel from airbyte_cdk.sources.declarative.models import DeclarativeStream as DeclarativeStreamModel from airbyte_cdk.sources.declarative.models import DefaultPaginator as DefaultPaginatorModel @@ -66,6 +68,7 @@ from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod from airbyte_cdk.sources.declarative.retrievers import SimpleRetriever, SimpleRetrieverTestReadDecorator from airbyte_cdk.sources.declarative.schema import JsonFileSchemaLoader +from airbyte_cdk.sources.declarative.schema.schema_loader import SchemaLoader from airbyte_cdk.sources.declarative.spec import Spec from airbyte_cdk.sources.declarative.stream_slicers import CartesianProductStreamSlicer from airbyte_cdk.sources.declarative.transformations import AddFields, RemoveFields @@ -240,8 +243,8 @@ def test_full_config_stream(): assert isinstance(stream.retriever.paginator.pagination_strategy, CursorPaginationStrategy) assert isinstance(stream.retriever.paginator.pagination_strategy.decoder, JsonDecoder) - assert stream.retriever.paginator.pagination_strategy.cursor_value.string == "{{ response._metadata.next }}" - assert stream.retriever.paginator.pagination_strategy.cursor_value.default == "{{ response._metadata.next }}" + assert stream.retriever.paginator.pagination_strategy._cursor_value.string == "{{ response._metadata.next }}" + assert stream.retriever.paginator.pagination_strategy._cursor_value.default == "{{ response._metadata.next }}" assert stream.retriever.paginator.pagination_strategy.page_size == 10 assert isinstance(stream.retriever.requester, HttpRequester) @@ -356,6 +359,9 @@ def test_single_use_oauth_branch(): interpolated_body_field: "{{ config['apikey'] }}" refresh_token_updater: refresh_token_name: "the_refresh_token" + refresh_token_error_status_codes: [400] + refresh_token_error_key: "error" + refresh_token_error_values: ["invalid_grant"] refresh_token_config_path: - apikey """ @@ -378,6 +384,9 @@ def test_single_use_oauth_branch(): # default values assert authenticator._access_token_config_path == ["credentials", "access_token"] assert authenticator._token_expiry_date_config_path == ["credentials", "token_expiry_date"] + assert authenticator._refresh_token_error_status_codes == [400] + assert authenticator._refresh_token_error_key == "error" + assert authenticator._refresh_token_error_values == ["invalid_grant"] def test_list_based_stream_slicer_with_values_refd(): @@ -1119,7 +1128,7 @@ def test_create_default_paginator(): assert isinstance(paginator.pagination_strategy, CursorPaginationStrategy) assert paginator.pagination_strategy.page_size == 50 - assert paginator.pagination_strategy.cursor_value.string == "{{ response._metadata.next }}" + assert paginator.pagination_strategy._cursor_value.string == "{{ response._metadata.next }}" assert isinstance(paginator.page_size_option, RequestOption) assert paginator.page_size_option.inject_into == RequestOptionType.request_parameter @@ -1239,6 +1248,20 @@ def test_create_default_paginator(): ValueError, id="test_create_custom_component_missing_required_field_emits_error", ), + pytest.param( + { + "type": "CustomErrorHandler", + "class_name": "unit_tests.sources.declarative.parsers.testing_components.NonExistingClass", + "paginator": { + "type": "DefaultPaginator", + "pagination_strategy": {"type": "OffsetIncrement", "page_size": 10}, + }, + }, + "paginator", + None, + ValueError, + id="test_create_custom_component_non_existing_class_raises_value_error", + ), ], ) def test_create_custom_components(manifest, field_name, expected_value, expected_error): @@ -1702,10 +1725,10 @@ def test_merge_incremental_and_partition_router(incremental, partition_router, e if incremental and partition_router: assert isinstance(stream.retriever.stream_slicer, PerPartitionCursor) - if type(partition_router) == list and len(partition_router) > 1: - assert type(stream.retriever.stream_slicer._partition_router) == CartesianProductStreamSlicer + if isinstance(partition_router, list) and len(partition_router) > 1: + assert isinstance(stream.retriever.stream_slicer._partition_router, CartesianProductStreamSlicer) assert len(stream.retriever.stream_slicer._partition_router.stream_slicers) == len(partition_router) - elif partition_router and type(partition_router) == list and len(partition_router) > 1: + elif partition_router and isinstance(partition_router, list) and len(partition_router) > 1: assert isinstance(stream.retriever.stream_slicer, PerPartitionCursor) assert len(stream.retriever.stream_slicer.stream_slicerS) == len(partition_router) @@ -1806,3 +1829,19 @@ def test_create_offset_increment(): assert strategy.page_size == expected_strategy.page_size assert strategy.inject_on_first_request == expected_strategy.inject_on_first_request assert strategy.config == input_config + + +class MyCustomSchemaLoader(SchemaLoader): + def get_json_schema(self) -> Mapping[str, Any]: + """Returns a mapping describing the stream's schema""" + return {} + + +def test_create_custom_schema_loader(): + + definition = { + "type": "CustomSchemaLoader", + "class_name": "unit_tests.sources.declarative.parsers.test_model_to_component_factory.MyCustomSchemaLoader" + } + component = factory.create_component(CustomSchemaLoaderModel, definition, {}) + assert isinstance(component, MyCustomSchemaLoader) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py index b98f8f82d0b7b..67eb064c0e2c8 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py @@ -95,10 +95,10 @@ def test_request_option(request_option, expected_req_params, expected_headers, e ) stream_slice = {cursor_field: "customer"} - assert expected_req_params == partition_router.get_request_params(stream_slice=stream_slice) - assert expected_headers == partition_router.get_request_headers(stream_slice=stream_slice) - assert expected_body_json == partition_router.get_request_body_json(stream_slice=stream_slice) - assert expected_body_data == partition_router.get_request_body_data(stream_slice=stream_slice) + assert partition_router.get_request_params(stream_slice=stream_slice) == expected_req_params + assert partition_router.get_request_headers(stream_slice=stream_slice) == expected_headers + assert partition_router.get_request_body_json(stream_slice=stream_slice) == expected_body_json + assert partition_router.get_request_body_data(stream_slice=stream_slice) == expected_body_data @pytest.mark.parametrize( @@ -139,7 +139,7 @@ def test_request_options_interpolation(field_name_interpolation: str, expected_r ) stream_slice = {cursor_field: "customer"} - assert expected_request_params == partition_router.get_request_params(stream_slice=stream_slice) + assert partition_router.get_request_params(stream_slice=stream_slice) == expected_request_params def test_request_option_before_updating_cursor(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py index 664dcaa734213..5daaeb4036722 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py @@ -295,10 +295,10 @@ def test_request_option( ) stream_slice = {"first_stream_id": "1234", "second_stream_id": "4567"} - assert expected_req_params == partition_router.get_request_params(stream_slice=stream_slice) - assert expected_headers == partition_router.get_request_headers(stream_slice=stream_slice) - assert expected_body_json == partition_router.get_request_body_json(stream_slice=stream_slice) - assert expected_body_data == partition_router.get_request_body_data(stream_slice=stream_slice) + assert partition_router.get_request_params(stream_slice=stream_slice) == expected_req_params + assert partition_router.get_request_headers(stream_slice=stream_slice) == expected_headers + assert partition_router.get_request_body_json(stream_slice=stream_slice) == expected_body_json + assert partition_router.get_request_body_data(stream_slice=stream_slice) == expected_body_data @pytest.mark.parametrize( @@ -353,7 +353,7 @@ def test_request_params_interpolation_for_parent_stream( ) stream_slice = {"first_stream_id": "1234", "second_stream_id": "4567"} - assert expected_request_params == partition_router.get_request_params(stream_slice=stream_slice) + assert partition_router.get_request_params(stream_slice=stream_slice) == expected_request_params def test_given_record_is_airbyte_message_when_stream_slices_then_use_record_data(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py index ce15b586125b5..ea379822b004d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py @@ -23,6 +23,7 @@ ("test_token_not_found", "{{ response.invalid_key }}", None, None, None), ("test_static_token_with_stop_condition_false", "token", InterpolatedBoolean("{{False}}", parameters={}), "token", None), ("test_static_token_with_stop_condition_true", "token", InterpolatedBoolean("{{True}}", parameters={}), None, None), + ("test_static_token_with_string_stop_condition", "token", "{{True}}", None, None), ( "test_token_from_header", "{{ headers.next }}", @@ -60,5 +61,33 @@ def test_cursor_pagination_strategy(test_name, template_string, stop_condition, last_records = [{"id": 0, "more_records": True}, {"id": 1, "more_records": True}] token = strategy.next_page_token(response, last_records) - assert expected_token == token + assert token == expected_token assert page_size == strategy.get_page_size() + + +def test_last_record_points_to_the_last_item_in_last_records_array(): + last_records = [{"id": 0, "more_records": True}, {"id": 1, "more_records": True}] + strategy = CursorPaginationStrategy( + page_size=1, + cursor_value="{{ last_record.id }}", + config={}, + parameters={}, + ) + + response = requests.Response() + next_page_token = strategy.next_page_token(response, last_records) + assert next_page_token == 1 + + +def test_last_record_is_node_if_no_records(): + last_records = [] + strategy = CursorPaginationStrategy( + page_size=1, + cursor_value="{{ last_record.id }}", + config={}, + parameters={}, + ) + + response = requests.Response() + next_page_token = strategy.next_page_token(response, last_records) + assert next_page_token is None diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_offset_increment.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_offset_increment.py index 37f26a2af420c..88e6cda7f15bd 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_offset_increment.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_offset_increment.py @@ -31,8 +31,8 @@ def test_offset_increment_paginator_strategy(page_size, parameters, last_records response._content = json.dumps(response_body).encode("utf-8") next_page_token = paginator_strategy.next_page_token(response, last_records) - assert expected_next_page_token == next_page_token - assert expected_offset == paginator_strategy._offset + assert next_page_token == expected_next_page_token + assert paginator_strategy._offset == expected_offset paginator_strategy.reset() assert 0 == paginator_strategy._offset diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py index 42d7995388e71..f108dcfe8301f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py @@ -33,8 +33,8 @@ def test_page_increment_paginator_strategy(page_size, start_from, last_records, response._content = json.dumps(response_body).encode("utf-8") next_page_token = paginator_strategy.next_page_token(response, last_records) - assert expected_next_page_token == next_page_token - assert expected_offset == paginator_strategy._page + assert next_page_token == expected_next_page_token + assert paginator_strategy._page == expected_offset paginator_strategy.reset() assert start_from == paginator_strategy._page diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py index a861e76f2a0cb..8600a0ea9571e 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py @@ -386,67 +386,67 @@ def test_send_request_params(provider_params, param_params, authenticator_params "k=%7B%27updatedDateFrom%27%3A+%272023-08-20T00%3A00%3A00Z%27%2C+%27updatedDateTo%27%3A+%272023-08-20T23%3A59%3A59Z%27%7D", id="test-request-parameter-from-config-object", ), - ], -) -def test_request_param_interpolation(request_parameters, config, expected_query_params): - options_provider = InterpolatedRequestOptionsProvider( - config=config, - request_parameters=request_parameters, - request_body_data={}, - request_headers={}, - parameters={}, - ) - requester = create_requester() - requester._request_options_provider = options_provider - requester.send_request() - sent_request: PreparedRequest = requester._session.send.call_args_list[0][0][0] - assert sent_request.url.split("?", 1)[-1] == expected_query_params - - -@pytest.mark.parametrize( - "request_parameters, config, invalid_value_for_key", - [ pytest.param( {"k": "[1,2]"}, {}, - "k", + "k=1&k=2", id="test-request-parameter-list-of-numbers", ), - pytest.param( - {"k": {"updatedDateFrom": "2023-08-20T00:00:00Z", "updatedDateTo": "2023-08-20T23:59:59Z"}}, - {}, - "k", - id="test-request-parameter-object-of-the-updated-info", - ), pytest.param( {"k": '["a", "b"]'}, {}, - "k", + "k=a&k=b", id="test-request-parameter-list-of-strings", ), pytest.param( {"k": '{{ config["k"] }}'}, {"k": [1, 2]}, - "k", + "k=1&k=2", id="test-request-parameter-from-config-list-of-numbers", ), pytest.param( {"k": '{{ config["k"] }}'}, {"k": ["a", "b"]}, - "k", + "k=a&k=b", id="test-request-parameter-from-config-list-of-strings", ), pytest.param( {"k": '{{ config["k"] }}'}, {"k": ["a,b"]}, - "k", + "k=a%2Cb", id="test-request-parameter-from-config-comma-separated-strings", ), pytest.param( {'["a", "b"]': '{{ config["k"] }}'}, {"k": [1, 2]}, - '["a", "b"]', - id="test-key-with-list-is-not-interpolated", + "%5B%22a%22%2C+%22b%22%5D=1&%5B%22a%22%2C+%22b%22%5D=2", + id="test-key-with-list-to-be-interpolated", + ) + ], +) +def test_request_param_interpolation(request_parameters, config, expected_query_params): + options_provider = InterpolatedRequestOptionsProvider( + config=config, + request_parameters=request_parameters, + request_body_data={}, + request_headers={}, + parameters={}, + ) + requester = create_requester() + requester._request_options_provider = options_provider + requester.send_request() + sent_request: PreparedRequest = requester._session.send.call_args_list[0][0][0] + assert sent_request.url.split("?", 1)[-1] == expected_query_params + + +@pytest.mark.parametrize( + "request_parameters, config, invalid_value_for_key", + [ + pytest.param( + {"k": {"updatedDateFrom": "2023-08-20T00:00:00Z", "updatedDateTo": "2023-08-20T23:59:59Z"}}, + {}, + "k", + id="test-request-parameter-object-of-the-updated-info", ), pytest.param( {"a": '{{ config["k"] }}', "b": {"end_timestamp": 1699109113}}, @@ -471,7 +471,7 @@ def test_request_param_interpolation_with_incorrect_values(request_parameters, c assert ( error.value.args[0] - == f"Invalid value for `{invalid_value_for_key}` parameter. The values of request params cannot be an array or object." + == f"Invalid value for `{invalid_value_for_key}` parameter. The values of request params cannot be an object." ) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py index 438a1497df84e..0a7480194d2be 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py @@ -219,7 +219,7 @@ def test_get_request_options_from_pagination(test_name, paginator_mapping, strea for _, method in request_option_type_to_method.items(): if expected_mapping is not None: actual_mapping = method(None, None, None) - assert expected_mapping == actual_mapping + assert actual_mapping == expected_mapping else: try: method(None, None, None) @@ -264,7 +264,7 @@ def test_get_request_headers(test_name, paginator_mapping, expected_mapping): for _, method in request_option_type_to_method.items(): if expected_mapping: actual_mapping = method(None, None, None) - assert expected_mapping == actual_mapping + assert actual_mapping == expected_mapping else: try: method(None, None, None) @@ -310,7 +310,7 @@ def test_ignore_stream_slicer_parameters_on_paginated_requests(test_name, pagina for _, method in request_option_type_to_method.items(): actual_mapping = method(None, None, next_page_token={"next_page_token": "1000"}) - assert expected_mapping == actual_mapping + assert actual_mapping == expected_mapping @pytest.mark.parametrize( @@ -345,7 +345,7 @@ def test_request_body_data(test_name, slicer_body_data, paginator_body_data, exp if expected_body_data: actual_body_data = retriever._request_body_data(None, None, None) - assert expected_body_data == actual_body_data + assert actual_body_data == expected_body_data else: try: retriever._request_body_data(None, None, None) @@ -380,7 +380,7 @@ def test_path(test_name, requester_path, paginator_path, expected_path): ) actual_path = retriever._paginator_path() - assert expected_path == actual_path + assert actual_path == expected_path def test_limit_stream_slices(): @@ -477,6 +477,7 @@ def retriever_read_pages(_, __, ___): side_effect=retriever_read_pages, ): list(retriever.read_records(stream_slice=stream_slice, records_schema={})) + cursor.observe.assert_not_called() cursor.close_slice.assert_called_once_with(stream_slice, None) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py index 74b1cc6ec3135..6f07f20297392 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py @@ -191,10 +191,10 @@ def test_request_option( ) stream_slice = {"owner_resource": "customer", "repository": "airbyte"} - assert expected_req_params == slicer.get_request_params(stream_slice=stream_slice) - assert expected_headers == slicer.get_request_headers(stream_slice=stream_slice) - assert expected_body_json == slicer.get_request_body_json(stream_slice=stream_slice) - assert expected_body_data == slicer.get_request_body_data(stream_slice=stream_slice) + assert slicer.get_request_params(stream_slice=stream_slice) == expected_req_params + assert slicer.get_request_headers(stream_slice=stream_slice) == expected_headers + assert slicer.get_request_body_json(stream_slice=stream_slice) == expected_body_json + assert slicer.get_request_body_data(stream_slice=stream_slice) == expected_body_data def test_request_option_before_updating_cursor(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_create_partial.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_create_partial.py deleted file mode 100644 index 7d7860c5ae570..0000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_create_partial.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import pytest -from airbyte_cdk.sources.declarative.create_partial import _key_is_unset_or_identical, create -from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString - - -class AClass: - def __init__(self, parameter, another_param, parameters): - self.parameter = parameter - self.another_param = another_param - self.parameters = parameters - - -class OuterClass: - def __init__(self, name, some_field, inner_param): - self.name = name - self.some_field = some_field - self.inner_param = inner_param - - -class OuterOuterClass: - def __init__(self, name, param, inner_class): - self.name = name - self.param = param - self.inner_class = inner_class - - -def test_pass_parameter_to_create_function(): - object = create(AClass, parameter="A")(another_param="B") - assert object.parameter == "A" - assert object.another_param == "B" - - -def test_parameter_not_overwritten_by_parameters(): - object = create(AClass, parameter="A", another_param="B", **{"$parameters": {"parameter": "C"}})() - assert object.parameter == "A" - assert object.another_param == "B" - - -def test_overwrite_param(): - object = create(AClass, parameter="A", another_param="B")(parameter="C") - assert object.parameter == "C" - assert object.another_param == "B" - - -def test_string_interpolation(): - s = "{{ next_page_token['next_page_url'] }}" - partial = create(InterpolatedString, string=s) - interpolated_string = partial() - assert interpolated_string.string == s - - -def test_string_interpolation_through_parameters(): - s = "{{ parameters['name'] }}" - parameters = {"name": "airbyte"} - partial = create(InterpolatedString, string=s, **parameters) - interpolated_string = partial() - assert interpolated_string.eval({}) == "airbyte" - - -def test_string_interpolation_through_parameters_keyword(): - s = "{{ parameters['name'] }}" - parameters = {"$parameters": {"name": "airbyte"}} - partial = create(InterpolatedString, string=s, **parameters) - interpolated_string = partial() - assert interpolated_string.eval({}) == "airbyte" - - -@pytest.mark.parametrize( - "test_name, key, value, expected_result", - [ - ("test", "key", "value", True), - ("test", "key", "a_different_value", False), - ("test", "a_different_key", "value", True), - ], -) -def test_key_is_unset_or_identical(test_name, key, value, expected_result): - mapping = {"key": "value"} - result = _key_is_unset_or_identical(key, value, mapping) - assert expected_result == result diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py index e5e16e66044a7..fef085073960b 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py @@ -97,6 +97,60 @@ def test_state_checkpoint_interval(): assert stream.state_checkpoint_interval is None +def test_state_migrations(): + intermediate_state = {"another_key", "another_value"} + final_state = {"yet_another_key", "yet_another_value"} + first_state_migration = MagicMock() + first_state_migration.should_migrate.return_value = True + first_state_migration.migrate.return_value = intermediate_state + second_state_migration = MagicMock() + second_state_migration.should_migrate.return_value = True + second_state_migration.migrate.return_value = final_state + + stream = DeclarativeStream( + name="any name", + primary_key="any primary key", + stream_cursor_field="{{ parameters['cursor_field'] }}", + schema_loader=MagicMock(), + retriever=MagicMock(), + state_migrations=[first_state_migration, second_state_migration], + config={}, + parameters={}, + ) + + input_state = {"a_key": "a_value"} + + stream.state = input_state + assert stream.state == final_state + first_state_migration.should_migrate.assert_called_once_with(input_state) + first_state_migration.migrate.assert_called_once_with(input_state) + second_state_migration.should_migrate.assert_called_once_with(intermediate_state) + second_state_migration.migrate.assert_called_once_with(intermediate_state) + + +def test_no_state_migration_is_applied_if_the_state_should_not_be_migrated(): + state_migration = MagicMock() + state_migration.should_migrate.return_value = False + + stream = DeclarativeStream( + name="any name", + primary_key="any primary key", + stream_cursor_field="{{ parameters['cursor_field'] }}", + schema_loader=MagicMock(), + retriever=MagicMock(), + state_migrations=[state_migration], + config={}, + parameters={}, + ) + + input_state = {"a_key": "a_value"} + + stream.state = input_state + assert stream.state == input_state + state_migration.should_migrate.assert_called_once_with(input_state) + assert not state_migration.migrate.called + + def _schema_loader(): schema_loader = MagicMock() schema_loader.get_json_schema.return_value = _json_schema diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py index 8252089b5d4c3..8a1026776fd3c 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -1123,7 +1123,7 @@ def test_read_manifest_declarative_source(test_name, manifest, pages, expected_r _stream_name = "Rates" with patch.object(SimpleRetriever, "_fetch_next_page", side_effect=pages) as mock_retriever: output_data = [message.record.data for message in _run_read(manifest, _stream_name) if message.record] - assert expected_records == output_data + assert output_data == expected_records mock_retriever.assert_has_calls(expected_calls) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py b/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py index 1b2d23b810cc2..0821f42af3206 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py @@ -30,4 +30,5 @@ def test_given_from_csv_then_csv_has_header_row(self) -> None: class CsvDelimiterTest(unittest.TestCase): def test_tab_delimter(self): - assert CsvFormat(delimiter=r"\t").delimiter == '\\t' + assert CsvFormat(delimiter=r"\t").delimiter == '\t' + assert len(CsvFormat(delimiter=r"\t").delimiter) == 1 diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_csv_parser.py b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_csv_parser.py index 3dfea9bb17dfd..64045bed8bd79 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_csv_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_csv_parser.py @@ -512,6 +512,75 @@ def _read_data(self) -> Generator[Dict[str, str], None, None]: return data_generator +_TOO_MANY_VALUES = [ + "header", + "too many values,value,value,value", +] + +_TOO_FEW_VALUES = [ + "header1,header2,header3", + "a value", + "value1,value2,value3", +] + + +@pytest.mark.parametrize( + "ignore_errors_on_fields_mismatch, data, error_message", + [ + ( + True, + _TOO_MANY_VALUES, + "Skipping record in line 2 of file a uri; invalid CSV row with missing column.", + ), + ( + False, + _TOO_MANY_VALUES, + None, + ), + ( + True, + _TOO_FEW_VALUES, + "Skipping record in line 2 of file a uri; invalid CSV row with extra column.", + ), + ( + False, + _TOO_FEW_VALUES, + None, + ), + ], +) +def test_mismatch_between_values_and_header(ignore_errors_on_fields_mismatch, data, error_message) -> None: + config_format = CsvFormat() + config = Mock() + config.name = "config_name" + config.format = config_format + + file = RemoteFile(uri="a uri", last_modified=datetime.now()) + stream_reader = Mock(spec=AbstractFileBasedStreamReader) + logger = Mock(spec=logging.Logger) + csv_reader = _CsvReader() + + config_format.ignore_errors_on_fields_mismatch = ignore_errors_on_fields_mismatch + stream_reader.open_file.return_value = CsvFileBuilder().with_data(data).build() + + data_generator = csv_reader.read_data( + config, + file, + stream_reader, + logger, + FileReadMode.READ, + ) + + # Check if exception is raised only when skip_wrong_number_of_fields_error is False + if not ignore_errors_on_fields_mismatch: + with pytest.raises(RecordParseError): + print(list(data_generator)) + else: + # Expect no exception when skip_wrong_number_of_fields_error is True + list(data_generator) + logger.error.assert_called_with(error_message) + + def test_encoding_is_passed_to_stream_reader() -> None: parser = CsvParser() encoding = "ascii" diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py index bba3977db2fda..3fe7ee42583d3 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py @@ -265,6 +265,12 @@ "airbyte_hidden": True, "enum": ["None", "Primitive Types Only"], }, + "ignore_errors_on_fields_mismatch": { + "type": "boolean", + "title": "Ignore errors on field mismatch", + "default": False, + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + }, }, "required": ["filetype"], }, @@ -467,30 +473,24 @@ ) ).build() -multi_format_analytics_scenario: TestScenario[InMemoryFilesSource] = ( +csv_analytics_scenario: TestScenario[InMemoryFilesSource] = ( TestScenarioBuilder[InMemoryFilesSource]() - .set_name("multi_format_analytics") + .set_name("csv_analytics") .set_config( { "streams": [ { "name": "stream1", "format": {"filetype": "csv"}, - "globs": ["file1.csv"], + "globs": ["a.csv"], "validation_policy": "Emit Record", }, { "name": "stream2", "format": {"filetype": "csv"}, - "globs": ["file2.csv"], - "validation_policy": "Emit Record", - }, - { - "name": "stream3", - "format": {"filetype": "jsonl"}, - "globs": ["file3.jsonl"], + "globs": ["b.csv"], "validation_policy": "Emit Record", - }, + } ] } ) @@ -498,17 +498,21 @@ FileBasedSourceBuilder() .set_files( { - "file1.csv": { - "contents": [], + "a.csv": { + "contents": [ + ("col1", "col2"), + ("val11a", "val12a"), + ("val21a", "val22a"), + ], "last_modified": "2023-06-05T03:54:07.000Z", }, - "file2.csv": { - "contents": [], - "last_modified": "2023-06-06T03:54:07.000Z", - }, - "file3.jsonl": { - "contents": [], - "last_modified": "2023-06-07T03:54:07.000Z", + "b.csv": { + "contents": [ + ("col1", "col2", "col3"), + ("val11b", "val12b", "val13b"), + ("val21b", "val22b", "val23b"), + ], + "last_modified": "2023-06-05T03:54:07.000Z", }, } ) @@ -521,7 +525,12 @@ "default_cursor_field": ["_ab_source_file_last_modified"], "json_schema": { "type": "object", - "properties": {}, + "properties": { + "col1": {"type": ["null", "string"]}, + "col2": {"type": ["null", "string"]}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, }, "name": "stream1", "source_defined_cursor": True, @@ -531,30 +540,64 @@ "default_cursor_field": ["_ab_source_file_last_modified"], "json_schema": { "type": "object", - "properties": {}, + "properties": { + "col1": {"type": ["null", "string"]}, + "col2": {"type": ["null", "string"]}, + "col3": {"type": ["null", "string"]}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, }, "name": "stream2", "source_defined_cursor": True, "supported_sync_modes": ["full_refresh", "incremental"], - }, - { - "default_cursor_field": ["_ab_source_file_last_modified"], - "json_schema": { - "type": "object", - "properties": {}, - }, - "name": "stream3", - "source_defined_cursor": True, - "supported_sync_modes": ["full_refresh", "incremental"], - }, + } ] } ) - .set_expected_records([]) + .set_expected_records([ + { + "data": { + "col1": "val11a", + "col2": "val12a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", + }, + { + "data": { + "col1": "val21a", + "col2": "val22a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", + }, + { + "data": { + "col1": "val11b", + "col2": "val12b", + "col3": "val13b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", + }, + { + "data": { + "col1": "val21b", + "col2": "val22b", + "col3": "val23b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", + }, + ]) .set_expected_analytics( [ AirbyteAnalyticsTraceMessage(type="file-cdk-csv-stream-count", value="2"), - AirbyteAnalyticsTraceMessage(type="file-cdk-jsonl-stream-count", value="1"), ] ) ).build() @@ -1450,7 +1493,6 @@ } ) .set_expected_discover_error(AirbyteTracedException, FileBasedSourceError.SCHEMA_INFERENCE_ERROR.value) - .set_expected_records([]) ).build() schemaless_csv_scenario: TestScenario[InMemoryFilesSource] = ( @@ -3009,6 +3051,61 @@ ] } ) - .set_expected_records([]) .set_expected_discover_error(AirbyteTracedException, FileBasedSourceError.SCHEMA_INFERENCE_ERROR.value) ).build() + +csv_no_records_scenario: TestScenario[InMemoryFilesSource] = ( + TestScenarioBuilder[InMemoryFilesSource]() + .set_name("csv_empty_no_records") + .set_config( + { + "streams": [ + { + "name": "stream1", + "globs": ["*"], + "validation_policy": "Emit Record", + "input_schema": '{"col1": "boolean", "col2": "string"}', + "format": { + "filetype": "csv", + "null_values": ["null"], + }, + } + ], + "start_date": "2023-06-04T03:54:07.000000Z", + } + ) + .set_source_builder( + FileBasedSourceBuilder() + .set_files( + { + "a.csv": { + "contents": [("col1", "col2")], # column headers, but no data rows + "last_modified": "2023-06-05T03:54:07.000Z", + } + } + ) + .set_file_type("csv") + ) + .set_expected_catalog( + { + "streams": [ + { + "default_cursor_field": ["_ab_source_file_last_modified"], + "json_schema": { + "type": "object", + "properties": { + "col1": {"type": "boolean"}, + "col2": {"type": "string"}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, + }, + "name": "stream1", + "source_defined_cursor": True, + "supported_sync_modes": ["full_refresh", "incremental"], + } + ] + } + ) + .set_expected_records([]) +).build() diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py index 01ac6bcb78d3d..95d59fc82ceb1 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py @@ -23,7 +23,7 @@ FileBasedStreamPartition, FileBasedStreamPartitionGenerator, ) -from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedNoopCursor +from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedFinalStateCursor from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams.concurrent.cursor import Cursor from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage @@ -36,7 +36,7 @@ _ANY_STATE = {"state_key": "state_value"} _ANY_CURSOR_FIELD = ["a", "cursor", "key"] _STREAM_NAME = "stream" -_ANY_CURSOR = Mock(spec=FileBasedNoopCursor) +_ANY_CURSOR = Mock(spec=FileBasedFinalStateCursor) @pytest.mark.parametrize( @@ -165,7 +165,7 @@ def setUp(self): supported_sync_modes=[SyncMode.full_refresh], ) self._legacy_stream = DefaultFileBasedStream( - cursor=FileBasedNoopCursor(MagicMock()), + cursor=FileBasedFinalStateCursor(stream_config=MagicMock(), stream_namespace=None, message_repository=Mock()), config=FileBasedStreamConfig(name="stream", format=CsvFormat()), catalog_schema={}, stream_reader=MagicMock(), @@ -329,7 +329,7 @@ def test_get_error_display_message_no_display_message(self): display_message = facade.get_error_display_message(e) - assert expected_display_message == display_message + assert display_message == expected_display_message def test_get_error_display_message_with_display_message(self): self._stream.get_error_display_message.return_value = "display_message" @@ -341,7 +341,7 @@ def test_get_error_display_message_with_display_message(self): display_message = facade.get_error_display_message(e) - assert expected_display_message == display_message + assert display_message == expected_display_message @pytest.mark.parametrize( diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/test_default_file_based_cursor.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/test_default_file_based_cursor.py index 2088097f7ef48..957ed912aa4bd 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/test_default_file_based_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/test_default_file_based_cursor.py @@ -111,8 +111,8 @@ def test_add_file(files_to_add: List[RemoteFile], expected_start_time: List[date for index, f in enumerate(files_to_add): cursor.add_file(f) - assert expected_start_time[index] == cursor._compute_start_time() - assert expected_state_dict == cursor.get_state() + assert cursor._compute_start_time() == expected_start_time[index] + assert cursor.get_state() == expected_state_dict @pytest.mark.parametrize( diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py index 5a7a7b72ff9be..6969dfd0f39b8 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py @@ -50,6 +50,7 @@ single_csv_no_input_state_scenario_concurrent, ) from unit_tests.sources.file_based.scenarios.csv_scenarios import ( + csv_analytics_scenario, csv_autogenerate_column_names_scenario, csv_custom_bool_values_scenario, csv_custom_delimiter_in_double_quotes_scenario, @@ -61,6 +62,7 @@ csv_multi_stream_scenario, csv_newline_in_values_not_quoted_scenario, csv_newline_in_values_quoted_value_scenario, + csv_no_records_scenario, csv_single_stream_scenario, csv_skip_after_header_scenario, csv_skip_before_and_after_header_scenario, @@ -75,7 +77,6 @@ invalid_csv_scenario, multi_csv_scenario, multi_csv_stream_n_file_exceeds_limit_for_inference, - multi_format_analytics_scenario, multi_stream_custom_format, schemaless_csv_multi_stream_scenario, schemaless_csv_scenario, @@ -152,7 +153,13 @@ ) from unit_tests.sources.file_based.test_scenarios import verify_check, verify_discover, verify_read, verify_spec -discover_scenarios = [ +discover_failure_scenarios = [ + earlier_csv_scenario, + empty_schema_inference_scenario, +] + +discover_success_scenarios = [ + csv_no_records_scenario, csv_multi_stream_scenario, csv_single_stream_scenario, invalid_csv_scenario, @@ -176,9 +183,7 @@ single_csv_file_is_skipped_if_same_modified_at_as_in_history, single_csv_file_is_synced_if_modified_at_is_more_recent_than_in_history, csv_custom_format_scenario, - earlier_csv_scenario, multi_stream_custom_format, - empty_schema_inference_scenario, single_parquet_scenario, multi_parquet_scenario, parquet_various_types_scenario, @@ -260,12 +265,14 @@ single_csv_no_input_state_scenario_concurrent, ] -read_scenarios = discover_scenarios + [ +discover_scenarios = discover_failure_scenarios + discover_success_scenarios + +read_scenarios = discover_success_scenarios + [ emit_record_scenario_multi_stream, emit_record_scenario_single_stream, skip_record_scenario_multi_stream, skip_record_scenario_single_stream, - multi_format_analytics_scenario, + csv_analytics_scenario, wait_for_rediscovery_scenario_multi_stream, wait_for_rediscovery_scenario_single_stream, ] diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py index 5785f13a65ef8..7d66ec79e5ddf 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py @@ -16,6 +16,7 @@ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import AbstractConcurrentFileBasedCursor from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput from airbyte_cdk.test.entrypoint_wrapper import read as entrypoint_read +from airbyte_cdk.utils import message_utils from airbyte_cdk.utils.traced_exception import AirbyteTracedException from airbyte_protocol.models import AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalog from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenario @@ -71,7 +72,7 @@ def assert_exception(expected_exception: type[BaseException], output: Entrypoint def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[AbstractSource]) -> None: - records, log_messages = output.records_and_state_messages, output.logs + records_and_state_messages, log_messages = output.records_and_state_messages, output.logs logs = [message.log for message in log_messages if message.log.level.value in scenario.log_levels] if scenario.expected_records is None: return @@ -85,7 +86,7 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac ), ) sorted_records = sorted( - filter(lambda r: r.record, records), + filter(lambda r: r.record, records_and_state_messages), key=lambda record: ",".join( f"{k}={v}" for k, v in sorted(record.record.data.items(), key=lambda items: (items[0], items[1])) if k != "emitted_at" ), @@ -104,7 +105,9 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac assert actual.record.stream == expected["stream"] expected_states = list(filter(lambda e: "data" not in e, expected_records)) - states = list(filter(lambda r: r.state, records)) + states = list(filter(lambda r: r.state, records_and_state_messages)) + assert len(states) > 0, "No state messages emitted. Successful syncs should emit at least one stream state." + _verify_state_record_counts(sorted_records, states) if hasattr(scenario.source, "cursor_cls") and issubclass(scenario.source.cursor_cls, AbstractConcurrentFileBasedCursor): # Only check the last state emitted because we don't know the order the others will be in. @@ -125,8 +128,34 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac _verify_analytics(analytics, scenario.expected_analytics) +def _verify_state_record_counts(records: List[AirbyteMessage], states: List[AirbyteMessage]) -> None: + actual_record_counts = {} + for record in records: + stream_descriptor = message_utils.get_stream_descriptor(record) + actual_record_counts[stream_descriptor] = actual_record_counts.get(stream_descriptor, 0) + 1 + + state_record_count_sums = {} + for state_message in states: + stream_descriptor = message_utils.get_stream_descriptor(state_message) + state_record_count_sums[stream_descriptor] = ( + state_record_count_sums.get(stream_descriptor, 0) + + state_message.state.sourceStats.recordCount + ) + + for stream, actual_count in actual_record_counts.items(): + assert actual_count == state_record_count_sums.get(stream) + + # We can have extra keys in state_record_count_sums if we processed a stream and reported 0 records + extra_keys = state_record_count_sums.keys() - actual_record_counts.keys() + for stream in extra_keys: + assert state_record_count_sums[stream] == 0 + + def _verify_analytics(analytics: List[AirbyteMessage], expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]]) -> None: if expected_analytics: + assert len(analytics) == len( + expected_analytics), \ + f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" for actual, expected in zip(analytics, expected_analytics): actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value expected_type = expected.type diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py index 4698f7ba8dadf..b113479970811 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py @@ -205,6 +205,7 @@ def test_full_refresh_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2.0 @HttpMocker() def test_full_refresh_with_slices(self, http_mocker): @@ -232,6 +233,7 @@ def test_full_refresh_with_slices(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "dividers" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 4.0 @freezegun.freeze_time(_NOW) @@ -264,8 +266,10 @@ def test_incremental_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3.0 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 2.0 @HttpMocker() def test_incremental_running_as_full_refresh(self, http_mocker): @@ -295,6 +299,7 @@ def test_incremental_running_as_full_refresh(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 5.0 @HttpMocker() def test_legacy_incremental_sync(self, http_mocker): @@ -324,8 +329,10 @@ def test_legacy_incremental_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "legacies" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3.0 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "legacies" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 2.0 @freezegun.freeze_time(_NOW) @@ -395,12 +402,16 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): ], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2.0 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 3.0 assert actual_messages.state_messages[2].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[2].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[2].state.sourceStats.recordCount == 2.0 assert actual_messages.state_messages[3].state.stream.stream_descriptor.name == "dividers" assert actual_messages.state_messages[3].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[3].state.sourceStats.recordCount == 4.0 def emits_successful_sync_status_messages(status_messages: List[AirbyteStreamStatus]) -> bool: diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py index 10c93aebb334a..b233e8039bc83 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py @@ -21,7 +21,7 @@ from airbyte_cdk.sources.source import TState from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import EpochValueConcurrentStreamStateConverter from airbyte_protocol.models import ConfiguredAirbyteStream from unit_tests.sources.file_based.scenarios.scenario_builder import SourceBuilder @@ -81,9 +81,10 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: self._cursor_field, self._cursor_boundaries, None, + EpochValueConcurrentStreamStateConverter.get_end_provider() ) if self._cursor_field - else NoopCursor(), + else FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository), ) for stream, state in zip(self._streams, stream_states) ] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py index 2090a4dd1c14a..de2ca049edf1c 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.streams.concurrent.cursor import CursorField from unit_tests.sources.file_based.scenarios.scenario_builder import IncrementalScenarioConfig, TestScenarioBuilder from unit_tests.sources.streams.concurrent.scenarios.stream_facade_builder import StreamFacadeSourceBuilder @@ -157,7 +158,7 @@ ] } ) - .set_expected_read_error(ValueError, "test exception") + .set_expected_read_error(StreamThreadException, "Exception while syncing stream stream1: test exception") .build() ) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py index e1eb81445d4a2..4a0094c3bc463 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py @@ -3,8 +3,9 @@ # import logging +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.message import InMemoryMessageRepository -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenarioBuilder @@ -15,6 +16,8 @@ InMemoryPartitionGenerator, ) +_message_repository = InMemoryMessageRepository() + _id_only_stream = DefaultStream( partition_generator=InMemoryPartitionGenerator( [InMemoryPartition("partition1", "stream1", None, [Record({"id": "1"}, "stream1"), Record({"id": "2"}, "stream1")])] @@ -30,7 +33,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_with_slice_logger = DefaultStream( @@ -48,7 +51,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_with_primary_key = DefaultStream( @@ -66,7 +69,7 @@ primary_key=["id"], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_multiple_partitions = DefaultStream( @@ -87,7 +90,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_multiple_partitions_concurrency_level_two = DefaultStream( @@ -108,7 +111,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _stream_raising_exception = DefaultStream( @@ -126,7 +129,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) test_concurrent_cdk_single_stream = ( @@ -140,7 +143,7 @@ _id_only_stream, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -193,7 +196,7 @@ _id_only_stream_with_primary_key, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -253,11 +256,11 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream2", stream_namespace=None, message_repository=_message_repository), ), ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -308,14 +311,14 @@ _stream_raising_exception, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ {"data": {"id": "1"}, "stream": "stream1"}, ] ) - .set_expected_read_error(ValueError, "test exception") + .set_expected_read_error(StreamThreadException, "Exception while syncing stream stream1: test exception") .set_expected_catalog( { "streams": [ @@ -346,7 +349,7 @@ _id_only_stream_multiple_partitions, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -386,7 +389,7 @@ _id_only_stream_multiple_partitions_concurrency_level_two, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py index 87a65ea6efd81..43c198916a67a 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py @@ -8,11 +8,11 @@ from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConnectorSpecification, DestinationSyncMode, SyncMode from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter -from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade from airbyte_cdk.sources.streams.concurrent.availability_strategy import AbstractAvailabilityStrategy, StreamAvailability, StreamAvailable -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator @@ -42,13 +42,14 @@ def __init__(self, streams: List[DefaultStream], message_repository: Optional[Me concurrent_source = ConcurrentSource.create(1, 1, streams[0]._logger, NeverLogSliceLogger(), message_repository) super().__init__(concurrent_source) self._streams = streams + self._message_repository = message_repository def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: # Check is not verified because it is up to the source to implement this method return True, None def streams(self, config: Mapping[str, Any]) -> List[Stream]: - return [StreamFacade(s, LegacyStream(), NoopCursor(), NeverLogSliceLogger(), s._logger) for s in self._streams] + return [StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=self.message_repository), NeverLogSliceLogger(), s._logger) for s in self._streams] def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: return ConnectorSpecification(connectionSpecification={}) @@ -57,7 +58,7 @@ def read_catalog(self, catalog_path: str) -> ConfiguredAirbyteCatalog: return ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( - stream=StreamFacade(s, LegacyStream(), NoopCursor(), NeverLogSliceLogger(), s._logger).as_airbyte_stream(), + stream=StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository()), NeverLogSliceLogger(), s._logger).as_airbyte_stream(), sync_mode=SyncMode.full_refresh, destination_sync_mode=DestinationSyncMode.overwrite, ) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py index 41553bd4622d4..a3990b9c057ba 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py @@ -347,7 +347,7 @@ def test_get_error_display_message_no_display_message(self): display_message = facade.get_error_display_message(e) - assert expected_display_message == display_message + assert display_message == expected_display_message def test_get_error_display_message_with_display_message(self): self._stream.get_error_display_message.return_value = "display_message" @@ -359,7 +359,7 @@ def test_get_error_display_message_with_display_message(self): display_message = facade.get_error_display_message(e) - assert expected_display_message == display_message + assert display_message == expected_display_message @pytest.mark.parametrize( diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py index 3e0e00b62d32f..91e5e97ebfad3 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py @@ -20,6 +20,7 @@ from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.concurrent_source.concurrent_read_processor import ConcurrentReadProcessor from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager from airbyte_cdk.sources.message import LogMessage, MessageRepository from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream @@ -107,7 +108,7 @@ def test_handle_partition_done_no_other_streams_to_generate_partitions_for(self) messages = list(handler.on_partition_generation_completed(sentinel)) expected_messages = [] - assert expected_messages == messages + assert messages == expected_messages @freezegun.freeze_time("2020-01-01T00:00:00") def test_handle_last_stream_partition_done(self): @@ -145,7 +146,7 @@ def test_handle_last_stream_partition_done(self): ), ), ] - assert expected_messages == messages + assert messages == expected_messages assert in_order_validation_mock.mock_calls.index( call._another_stream.cursor.ensure_at_least_one_state_emitted ) < in_order_validation_mock.mock_calls.index(call._message_repository.consume_queue) @@ -222,7 +223,7 @@ def test_handle_on_partition_complete_sentinel_with_messages_from_repository(sel expected_messages = [ AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=LogLevel.INFO, message="message emitted from the repository")) ] - assert expected_messages == messages + assert messages == expected_messages partition.close.assert_called_once() @@ -266,9 +267,44 @@ def test_handle_on_partition_complete_sentinel_yields_status_message_if_the_stre ), ) ] - assert expected_messages == messages + assert messages == expected_messages self._a_closed_partition.close.assert_called_once() + @freezegun.freeze_time("2020-01-01T00:00:00") + def test_given_exception_on_partition_complete_sentinel_then_yield_error_trace_message_and_stream_is_incomplete(self) -> None: + self._a_closed_partition.stream_name.return_value = self._stream.name + self._a_closed_partition.close.side_effect = ValueError + + handler = ConcurrentReadProcessor( + [self._stream], + self._partition_enqueuer, + self._thread_pool_manager, + self._logger, + self._slice_logger, + self._message_repository, + self._partition_reader, + ) + handler.start_next_partition_generator() + handler.on_partition(self._a_closed_partition) + list(handler.on_partition_generation_completed(PartitionGenerationCompletedSentinel(self._stream))) + messages = list(handler.on_partition_complete_sentinel(PartitionCompleteSentinel(self._a_closed_partition))) + + expected_status_message = AirbyteMessage( + type=MessageType.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor( + name=self._stream.name, + ), + status=AirbyteStreamStatus.INCOMPLETE, + ), + emitted_at=1577836800000.0, + ), + ) + assert list(map(lambda message: message.trace.type, messages)) == [TraceType.ERROR, TraceType.STREAM_STATUS] + assert messages[1] == expected_status_message + @freezegun.freeze_time("2020-01-01T00:00:00") def test_handle_on_partition_complete_sentinel_yields_no_status_message_if_the_stream_is_not_done(self): stream_instances_to_read_from = [self._stream] @@ -293,7 +329,7 @@ def test_handle_on_partition_complete_sentinel_yields_no_status_message_if_the_s messages = list(handler.on_partition_complete_sentinel(sentinel)) expected_messages = [] - assert expected_messages == messages + assert messages == expected_messages partition.close.assert_called_once() @freezegun.freeze_time("2020-01-01T00:00:00") @@ -330,7 +366,7 @@ def test_on_record_no_status_message_no_repository_messge(self): ), ) ] - assert expected_messages == messages + assert messages == expected_messages @freezegun.freeze_time("2020-01-01T00:00:00") def test_on_record_with_repository_messge(self): @@ -380,7 +416,7 @@ def test_on_record_with_repository_messge(self): ), AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=LogLevel.INFO, message="message emitted from the repository")), ] - assert expected_messages == messages + assert messages == expected_messages assert handler._record_counter[_STREAM_NAME] == 2 @freezegun.freeze_time("2020-01-01T00:00:00") @@ -422,7 +458,7 @@ def test_on_record_emits_status_message_on_first_record_no_repository_message(se ), ), ] - assert expected_messages == messages + assert messages == expected_messages @freezegun.freeze_time("2020-01-01T00:00:00") def test_on_record_emits_status_message_on_first_record_with_repository_message(self): @@ -476,69 +512,10 @@ def test_on_record_emits_status_message_on_first_record_with_repository_message( ), AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=LogLevel.INFO, message="message emitted from the repository")), ] - assert expected_messages == messages + assert messages == expected_messages @freezegun.freeze_time("2020-01-01T00:00:00") - def test_on_exception_stops_streams_and_raises_an_exception(self): - stream_instances_to_read_from = [self._stream, self._another_stream] - - handler = ConcurrentReadProcessor( - stream_instances_to_read_from, - self._partition_enqueuer, - self._thread_pool_manager, - self._logger, - self._slice_logger, - self._message_repository, - self._partition_reader, - ) - - handler.start_next_partition_generator() - - another_stream = Mock(spec=AbstractStream) - another_stream.name = _STREAM_NAME - another_stream.as_airbyte_stream.return_value = AirbyteStream( - name=_ANOTHER_STREAM_NAME, - json_schema={}, - supported_sync_modes=[SyncMode.full_refresh], - ) - - exception = RuntimeError("Something went wrong") - - messages = [] - - with self.assertRaises(RuntimeError): - for m in handler.on_exception(exception): - messages.append(m) - - expected_message = [ - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name=_STREAM_NAME), status=AirbyteStreamStatus(AirbyteStreamStatus.INCOMPLETE) - ), - ), - ), - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name=_ANOTHER_STREAM_NAME), - status=AirbyteStreamStatus(AirbyteStreamStatus.INCOMPLETE), - ), - ), - ), - ] - - assert messages == expected_message - self._thread_pool_manager.shutdown.assert_called_once() - - @freezegun.freeze_time("2020-01-01T00:00:00") - def test_on_exception_does_not_stop_streams_that_are_already_done(self): + def test_on_exception_return_trace_message_and_on_stream_complete_return_stream_status(self): stream_instances_to_read_from = [self._stream, self._another_stream] handler = ConcurrentReadProcessor( @@ -564,15 +541,13 @@ def test_on_exception_does_not_stop_streams_that_are_already_done(self): supported_sync_modes=[SyncMode.full_refresh], ) - exception = RuntimeError("Something went wrong") + exception = StreamThreadException(RuntimeError("Something went wrong"), _STREAM_NAME) - messages = [] + exception_messages = list(handler.on_exception(exception)) + assert len(exception_messages) == 1 + assert exception_messages[0].type == MessageType.TRACE - with self.assertRaises(RuntimeError): - for m in handler.on_exception(exception): - messages.append(m) - - expected_message = [ + assert list(handler.on_partition_complete_sentinel(PartitionCompleteSentinel(self._an_open_partition))) == [ AirbyteMessage( type=MessageType.TRACE, trace=AirbyteTraceMessage( @@ -585,9 +560,6 @@ def test_on_exception_does_not_stop_streams_that_are_already_done(self): ) ] - assert messages == expected_message - self._thread_pool_manager.shutdown.assert_called_once() - def test_is_done_is_false_if_there_are_any_instances_to_read_from(self): stream_instances_to_read_from = [self._stream] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py index 94ed5211eabb9..b8fa8b2f79e0c 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py @@ -1,21 +1,25 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from datetime import datetime, timedelta, timezone from typing import Any, Mapping, Optional from unittest import TestCase from unittest.mock import Mock +import freezegun import pytest from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.message import MessageRepository -from airbyte_cdk.sources.streams.concurrent.cursor import Comparable, ConcurrentCursor, CursorField +from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, CursorValueType from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record +from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_state_converter import ConcurrencyCompatibleStateType from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import EpochValueConcurrentStreamStateConverter _A_STREAM_NAME = "a stream name" _A_STREAM_NAMESPACE = "a stream namespace" _A_CURSOR_FIELD_KEY = "a_cursor_field_key" +_NO_STATE = {} _NO_PARTITION_IDENTIFIER = None _NO_SLICE = None _NO_SLICE_BOUNDARIES = None @@ -23,6 +27,7 @@ _UPPER_SLICE_BOUNDARY_FIELD = "upper_boundary" _SLICE_BOUNDARY_FIELDS = (_LOWER_SLICE_BOUNDARY_FIELD, _UPPER_SLICE_BOUNDARY_FIELD) _A_VERY_HIGH_CURSOR_VALUE = 1000000000 +_NO_LOOKBACK_WINDOW = timedelta(seconds=0) def _partition(_slice: Optional[Mapping[str, Any]]) -> Partition: @@ -31,27 +36,28 @@ def _partition(_slice: Optional[Mapping[str, Any]]) -> Partition: return partition -def _record(cursor_value: Comparable) -> Record: +def _record(cursor_value: CursorValueType) -> Record: return Record(data={_A_CURSOR_FIELD_KEY: cursor_value}, stream_name=_A_STREAM_NAME) -class ConcurrentCursorTest(TestCase): +class ConcurrentCursorStateTest(TestCase): def setUp(self) -> None: self._message_repository = Mock(spec=MessageRepository) self._state_manager = Mock(spec=ConnectorStateManager) - self._state_converter = EpochValueConcurrentStreamStateConverter() - def _cursor_with_slice_boundary_fields(self) -> ConcurrentCursor: + def _cursor_with_slice_boundary_fields(self, is_sequential_state=True) -> ConcurrentCursor: return ConcurrentCursor( _A_STREAM_NAME, _A_STREAM_NAMESPACE, {}, self._message_repository, self._state_manager, - self._state_converter, + EpochValueConcurrentStreamStateConverter(is_sequential_state), CursorField(_A_CURSOR_FIELD_KEY), _SLICE_BOUNDARY_FIELDS, None, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, ) def _cursor_without_slice_boundary_fields(self) -> ConcurrentCursor: @@ -61,10 +67,12 @@ def _cursor_without_slice_boundary_fields(self) -> ConcurrentCursor: {}, self._message_repository, self._state_manager, - self._state_converter, + EpochValueConcurrentStreamStateConverter(is_sequential_state=True), CursorField(_A_CURSOR_FIELD_KEY), None, None, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, ) def test_given_boundary_fields_when_close_partition_then_emit_state(self) -> None: @@ -82,6 +90,24 @@ def test_given_boundary_fields_when_close_partition_then_emit_state(self) -> Non {_A_CURSOR_FIELD_KEY: 0}, # State message is updated to the legacy format before being emitted ) + def test_given_state_not_sequential_when_close_partition_then_emit_state(self) -> None: + cursor = self._cursor_with_slice_boundary_fields(is_sequential_state=False) + cursor.close_partition( + _partition( + {_LOWER_SLICE_BOUNDARY_FIELD: 12, _UPPER_SLICE_BOUNDARY_FIELD: 30}, + ) + ) + + self._message_repository.emit_message.assert_called_once_with(self._state_manager.create_state_message.return_value) + self._state_manager.update_state_for_stream.assert_called_once_with( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "slices": [{"end": 0, "start": 0}, {"end": 30, "start": 12}], + "state_type": "date-range" + }, + ) + def test_given_boundary_fields_when_close_partition_then_emit_updated_state(self) -> None: self._cursor_with_slice_boundary_fields().close_partition( _partition( @@ -137,3 +163,265 @@ def test_given_slice_boundaries_not_matching_slice_when_close_partition_then_rai cursor = self._cursor_with_slice_boundary_fields() with pytest.raises(KeyError): cursor.close_partition(_partition({"not_matching_key": "value"})) + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_no_state_when_generate_slices_then_create_slice_from_start_to_end(self): + start = datetime.fromtimestamp(10, timezone.utc) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + _NO_STATE, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(10, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_one_slice_when_generate_slices_then_create_slice_from_slice_upper_boundary_to_end(self): + start = datetime.fromtimestamp(0, timezone.utc) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(20, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_start_after_slices_when_generate_slices_then_generate_from_start(self): + start = datetime.fromtimestamp(30, timezone.utc) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(30, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_state_with_gap_and_start_after_slices_when_generate_slices_then_generate_from_start(self): + start = datetime.fromtimestamp(30, timezone.utc) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 10}, + {EpochValueConcurrentStreamStateConverter.START_KEY: 15, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(30, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_small_slice_range_when_generate_slices_then_create_many_slices(self): + start = datetime.fromtimestamp(0, timezone.utc) + small_slice_range = timedelta(seconds=10) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + small_slice_range, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(20, timezone.utc), datetime.fromtimestamp(30, timezone.utc)), + (datetime.fromtimestamp(30, timezone.utc), datetime.fromtimestamp(40, timezone.utc)), + (datetime.fromtimestamp(40, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_difference_between_slices_match_slice_range_when_generate_slices_then_create_one_slice(self): + start = datetime.fromtimestamp(0, timezone.utc) + small_slice_range = timedelta(seconds=10) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 30}, + {EpochValueConcurrentStreamStateConverter.START_KEY: 40, EpochValueConcurrentStreamStateConverter.END_KEY: 50}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + small_slice_range, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(30, timezone.utc), datetime.fromtimestamp(40, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_non_continuous_state_when_generate_slices_then_create_slices_between_gaps_and_after(self): + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 10}, + {EpochValueConcurrentStreamStateConverter.START_KEY: 20, EpochValueConcurrentStreamStateConverter.END_KEY: 25}, + {EpochValueConcurrentStreamStateConverter.START_KEY: 30, EpochValueConcurrentStreamStateConverter.END_KEY: 40}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + None, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(10, timezone.utc), datetime.fromtimestamp(20, timezone.utc)), + (datetime.fromtimestamp(25, timezone.utc), datetime.fromtimestamp(30, timezone.utc)), + (datetime.fromtimestamp(40, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_lookback_window_when_generate_slices_then_apply_lookback_on_most_recent_slice(self): + start = datetime.fromtimestamp(0, timezone.utc) + lookback_window = timedelta(seconds=10) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + {EpochValueConcurrentStreamStateConverter.START_KEY: 30, EpochValueConcurrentStreamStateConverter.END_KEY: 40}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + lookback_window, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(20, timezone.utc), datetime.fromtimestamp(30, timezone.utc)), + (datetime.fromtimestamp(30, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] + + @freezegun.freeze_time(time_to_freeze=datetime.fromtimestamp(50, timezone.utc)) + def test_given_start_is_before_first_slice_lower_boundary_when_generate_slices_then_generate_slice_before(self): + start = datetime.fromtimestamp(0, timezone.utc) + cursor = ConcurrentCursor( + _A_STREAM_NAME, + _A_STREAM_NAMESPACE, + { + "state_type": ConcurrencyCompatibleStateType.date_range.value, + "slices": [ + {EpochValueConcurrentStreamStateConverter.START_KEY: 10, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, + ] + }, + self._message_repository, + self._state_manager, + EpochValueConcurrentStreamStateConverter(is_sequential_state=False), + CursorField(_A_CURSOR_FIELD_KEY), + _SLICE_BOUNDARY_FIELDS, + start, + EpochValueConcurrentStreamStateConverter.get_end_provider(), + _NO_LOOKBACK_WINDOW, + ) + + slices = list(cursor.generate_slices()) + + assert slices == [ + (datetime.fromtimestamp(0, timezone.utc), datetime.fromtimestamp(10, timezone.utc)), + (datetime.fromtimestamp(20, timezone.utc), datetime.fromtimestamp(50, timezone.utc)), + ] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_datetime_state_converter.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_datetime_state_converter.py index 534dbd580787c..aeaf5ae5a50ba 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_datetime_state_converter.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_datetime_state_converter.py @@ -98,7 +98,7 @@ def test_concurrent_stream_state_converter_is_state_message_compatible(converter ), pytest.param( EpochValueConcurrentStreamStateConverter(), - 1617030403, + datetime.fromtimestamp(1617030403, timezone.utc), {}, datetime(2021, 3, 29, 15, 6, 43, tzinfo=timezone.utc), id="epoch-converter-no-state-with-start-start-is-start", @@ -112,14 +112,14 @@ def test_concurrent_stream_state_converter_is_state_message_compatible(converter ), pytest.param( EpochValueConcurrentStreamStateConverter(), - 1617030404, + datetime.fromtimestamp(1617030404, timezone.utc), {"created_at": 1617030403}, datetime(2021, 3, 29, 15, 6, 44, tzinfo=timezone.utc), id="epoch-converter-state-before-start-start-is-start", ), pytest.param( EpochValueConcurrentStreamStateConverter(), - 1617030403, + datetime.fromtimestamp(1617030403, timezone.utc), {"created_at": 1617030404}, datetime(2021, 3, 29, 15, 6, 44, tzinfo=timezone.utc), id="epoch-converter-state-after-start-start-is-from-state", @@ -133,7 +133,7 @@ def test_concurrent_stream_state_converter_is_state_message_compatible(converter ), pytest.param( IsoMillisConcurrentStreamStateConverter(), - "2021-08-22T05:03:27.000Z", + datetime(2021, 8, 22, 5, 3, 27, tzinfo=timezone.utc), {}, datetime(2021, 8, 22, 5, 3, 27, tzinfo=timezone.utc), id="isomillis-converter-no-state-with-start-start-is-start", @@ -147,14 +147,14 @@ def test_concurrent_stream_state_converter_is_state_message_compatible(converter ), pytest.param( IsoMillisConcurrentStreamStateConverter(), - "2022-08-22T05:03:27.000Z", + datetime(2022, 8, 22, 5, 3, 27, tzinfo=timezone.utc), {"created_at": "2021-08-22T05:03:27.000Z"}, datetime(2022, 8, 22, 5, 3, 27, tzinfo=timezone.utc), id="isomillis-converter-state-before-start-start-is-start", ), pytest.param( IsoMillisConcurrentStreamStateConverter(), - "2022-08-22T05:03:27.000Z", + datetime(2022, 8, 22, 5, 3, 27, tzinfo=timezone.utc), {"created_at": "2023-08-22T05:03:27.000Z"}, datetime(2023, 8, 22, 5, 3, 27, tzinfo=timezone.utc), id="isomillis-converter-state-after-start-start-is-from-state", @@ -170,7 +170,7 @@ def test_get_sync_start(converter, start, state, expected_start): [ pytest.param( EpochValueConcurrentStreamStateConverter(), - 0, + datetime.fromtimestamp(0, timezone.utc), {}, { "legacy": {}, @@ -186,13 +186,13 @@ def test_get_sync_start(converter, start, state, expected_start): ), pytest.param( EpochValueConcurrentStreamStateConverter(), - 1617030403, + datetime.fromtimestamp(1577836800, timezone.utc), {"created": 1617030403}, { "state_type": "date-range", "slices": [ { - "start": datetime(2021, 3, 29, 15, 6, 43, tzinfo=timezone.utc), + "start": datetime(2020, 1, 1, tzinfo=timezone.utc), "end": datetime(2021, 3, 29, 15, 6, 43, tzinfo=timezone.utc), } ], @@ -202,13 +202,13 @@ def test_get_sync_start(converter, start, state, expected_start): ), pytest.param( IsoMillisConcurrentStreamStateConverter(), - "2020-01-01T00:00:00.000Z", + datetime(2020, 1, 1, tzinfo=timezone.utc), {"created": "2021-08-22T05:03:27.000Z"}, { "state_type": "date-range", "slices": [ { - "start": datetime(2021, 8, 22, 5, 3, 27, tzinfo=timezone.utc), + "start": datetime(2020, 1, 1, tzinfo=timezone.utc), "end": datetime(2021, 8, 22, 5, 3, 27, tzinfo=timezone.utc), } ], @@ -338,7 +338,7 @@ def test_convert_from_sequential_state(converter, start, sequential_state, expec ], ) def test_convert_to_sequential_state(converter, concurrent_state, expected_output_state): - assert converter.convert_to_sequential_state(CursorField("created"), concurrent_state) == expected_output_state + assert converter.convert_to_state_message(CursorField("created"), concurrent_state) == expected_output_state @pytest.mark.parametrize( @@ -366,4 +366,4 @@ def test_convert_to_sequential_state(converter, concurrent_state, expected_outpu ) def test_convert_to_sequential_state_no_slices_returns_legacy_state(converter, concurrent_state, expected_output_state): with pytest.raises(RuntimeError): - converter.convert_to_sequential_state(CursorField("created"), concurrent_state) + converter.convert_to_state_message(CursorField("created"), concurrent_state) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py index fb40368d98b37..eef8c9fd6bb2a 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py @@ -5,8 +5,9 @@ from unittest.mock import Mock from airbyte_cdk.models import AirbyteStream, SyncMode +from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams.concurrent.availability_strategy import STREAM_AVAILABLE -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream @@ -20,6 +21,7 @@ def setUp(self): self._cursor_field = None self._logger = Mock() self._cursor = Mock(spec=Cursor) + self._message_repository = InMemoryMessageRepository() self._stream = DefaultStream( self._partition_generator, self._name, @@ -28,7 +30,7 @@ def setUp(self): self._primary_key, self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) def test_get_json_schema(self): @@ -71,7 +73,7 @@ def test_as_airbyte_stream(self): ) actual_airbyte_stream = self._stream.as_airbyte_stream() - assert expected_airbyte_stream == actual_airbyte_stream + assert actual_airbyte_stream == expected_airbyte_stream def test_as_airbyte_stream_with_primary_key(self): json_schema = { @@ -89,7 +91,7 @@ def test_as_airbyte_stream_with_primary_key(self): ["id"], self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -103,7 +105,7 @@ def test_as_airbyte_stream_with_primary_key(self): ) airbyte_stream = stream.as_airbyte_stream() - assert expected_airbyte_stream == airbyte_stream + assert airbyte_stream == expected_airbyte_stream def test_as_airbyte_stream_with_composite_primary_key(self): json_schema = { @@ -121,7 +123,7 @@ def test_as_airbyte_stream_with_composite_primary_key(self): ["id_a", "id_b"], self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -135,7 +137,7 @@ def test_as_airbyte_stream_with_composite_primary_key(self): ) airbyte_stream = stream.as_airbyte_stream() - assert expected_airbyte_stream == airbyte_stream + assert airbyte_stream == expected_airbyte_stream def test_as_airbyte_stream_with_a_cursor(self): json_schema = { @@ -153,7 +155,7 @@ def test_as_airbyte_stream_with_a_cursor(self): self._primary_key, "date", self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -167,7 +169,7 @@ def test_as_airbyte_stream_with_a_cursor(self): ) airbyte_stream = stream.as_airbyte_stream() - assert expected_airbyte_stream == airbyte_stream + assert airbyte_stream == expected_airbyte_stream def test_as_airbyte_stream_with_namespace(self): stream = DefaultStream( @@ -178,7 +180,7 @@ def test_as_airbyte_stream_with_namespace(self): self._primary_key, self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), namespace="test", ) expected_airbyte_stream = AirbyteStream( @@ -192,4 +194,4 @@ def test_as_airbyte_stream_with_namespace(self): ) actual_airbyte_stream = stream.as_airbyte_stream() - assert expected_airbyte_stream == actual_airbyte_stream + assert actual_airbyte_stream == expected_airbyte_stream diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py index bdcd9ad43318c..d11154e712978 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py @@ -7,6 +7,7 @@ from unittest.mock import Mock, patch from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.partition_enqueuer import PartitionEnqueuer @@ -14,6 +15,7 @@ from airbyte_cdk.sources.streams.concurrent.partitions.types import QueueItem _SOME_PARTITIONS: List[Partition] = [Mock(spec=Partition), Mock(spec=Partition)] +_A_STREAM_NAME = "a_stream_name" class PartitionEnqueuerTest(unittest.TestCase): @@ -57,14 +59,16 @@ def test_given_partition_but_limit_reached_when_generate_partitions_then_wait_un assert mocked_sleep.call_count == 2 - def test_given_exception_when_generate_partitions_then_raise(self): + def test_given_exception_when_generate_partitions_then_return_exception_and_sentinel(self): stream = Mock(spec=AbstractStream) + stream.name = _A_STREAM_NAME exception = ValueError() stream.generate_partitions.side_effect = self._partitions_before_raising(_SOME_PARTITIONS, exception) self._partition_generator.generate_partitions(stream) - assert self._consume_queue() == _SOME_PARTITIONS + [exception] + queue_content = self._consume_queue() + assert queue_content == _SOME_PARTITIONS + [StreamThreadException(exception, _A_STREAM_NAME), PartitionGenerationCompletedSentinel(stream)] def _partitions_before_raising(self, partitions: List[Partition], exception: Exception) -> Callable[[], Iterable[Partition]]: def inner_function() -> Iterable[Partition]: @@ -83,7 +87,7 @@ def _a_stream(partitions: List[Partition]) -> AbstractStream: def _consume_queue(self) -> List[QueueItem]: queue_content: List[QueueItem] = [] while queue_item := self._queue.get(): - if isinstance(queue_item, (PartitionGenerationCompletedSentinel, Exception)): + if isinstance(queue_item, PartitionGenerationCompletedSentinel): queue_content.append(queue_item) break queue_content.append(queue_item) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_reader.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_reader.py index 9e9fb89739496..226652be82a1c 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_reader.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_reader.py @@ -7,6 +7,7 @@ from unittest.mock import Mock import pytest +from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException from airbyte_cdk.sources.streams.concurrent.partition_reader import PartitionReader from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record @@ -32,26 +33,22 @@ def test_given_no_records_when_process_partition_then_only_emit_sentinel(self): break def test_given_read_partition_successful_when_process_partition_then_queue_records_and_sentinel(self): - self._partition_reader.process_partition(self._a_partition(_RECORDS)) + partition = self._a_partition(_RECORDS) + self._partition_reader.process_partition(partition) - actual_records = [] - while queue_item := self._queue.get(): - if isinstance(queue_item, PartitionCompleteSentinel): - break - actual_records.append(queue_item) + queue_content = self._consume_queue() - assert _RECORDS == actual_records + assert queue_content == _RECORDS + [PartitionCompleteSentinel(partition)] - def test_given_exception_when_process_partition_then_queue_records_and_raise_exception(self): + def test_given_exception_when_process_partition_then_queue_records_and_exception_and_sentinel(self): partition = Mock() exception = ValueError() partition.read.side_effect = self._read_with_exception(_RECORDS, exception) - self._partition_reader.process_partition(partition) - for i in range(len(_RECORDS)): - assert self._queue.get() == _RECORDS[i] - assert self._queue.get() == exception + queue_content = self._consume_queue() + + assert queue_content == _RECORDS + [StreamThreadException(exception, partition.stream_name()), PartitionCompleteSentinel(partition)] def _a_partition(self, records: List[Record]) -> Partition: partition = Mock(spec=Partition) @@ -65,3 +62,11 @@ def mocked_function() -> Iterable[Record]: raise exception return mocked_function + + def _consume_queue(self): + queue_content = [] + while queue_item := self._queue.get(): + queue_content.append(queue_item) + if isinstance(queue_item, PartitionCompleteSentinel): + break + return queue_content diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_thread_pool_manager.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_thread_pool_manager.py index 102cf7cdd4482..197f9b3431e85 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_thread_pool_manager.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_thread_pool_manager.py @@ -21,27 +21,6 @@ def test_submit_calls_underlying_thread_pool(self): assert len(self._thread_pool_manager._futures) == 1 - def test_given_no_exceptions_when_shutdown_if_exception_then_do_not_raise(self): - future = Mock(spec=Future) - future.exception.return_value = None - future.done.side_effect = [True, True] - - self._thread_pool_manager._futures = [future] - self._thread_pool_manager.prune_to_validate_has_reached_futures_limit() - - self._thread_pool_manager.shutdown_if_exception() # do not raise - - def test_given_exception_when_shutdown_if_exception_then_raise(self): - future = Mock(spec=Future) - future.exception.return_value = RuntimeError - future.done.side_effect = [True, True] - - self._thread_pool_manager._futures = [future] - self._thread_pool_manager.prune_to_validate_has_reached_futures_limit() - - with self.assertRaises(RuntimeError): - self._thread_pool_manager.shutdown_if_exception() - def test_given_exception_during_pruning_when_check_for_errors_and_shutdown_then_shutdown_and_raise(self): future = Mock(spec=Future) future.exception.return_value = RuntimeError @@ -54,10 +33,6 @@ def test_given_exception_during_pruning_when_check_for_errors_and_shutdown_then_ self._thread_pool_manager.check_for_errors_and_shutdown() self._threadpool.shutdown.assert_called_with(wait=False, cancel_futures=True) - def test_shutdown(self): - self._thread_pool_manager.shutdown() - self._threadpool.shutdown.assert_called_with(wait=False, cancel_futures=True) - def test_is_done_is_false_if_not_all_futures_are_done(self): future = Mock(spec=Future) future.done.return_value = False diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py b/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py index 444031526a906..a1ada8b0ff63e 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/auth/test_auth.py @@ -170,4 +170,4 @@ def test_refresh_access_authenticator(self): refresh_access_token_authenticator=TestOauth2Authenticator.refresh_access_token_authenticator, ) expected_headers = {"Authorization": "Basic Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="} - assert expected_headers == oauth.get_refresh_access_token_headers() + assert oauth.get_refresh_access_token_headers() == expected_headers diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py index b63af79738546..c3a2d8d7f1426 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py @@ -113,7 +113,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: else: actual_is_available, reason = http_stream.check_availability(logger) - assert expected_is_available == actual_is_available + assert actual_is_available == expected_is_available if expected_is_available: assert reason is None else: diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py index e826e74a47ee8..c9836e4c28517 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py @@ -123,7 +123,7 @@ def test_next_page_token_is_input_to_other_methods(mocker): expected = [{"data": 1}, {"data": 2}, {"data": 3}, {"data": 4}, {"data": 5}, {"data": 6}] - assert expected == records + assert records == expected class StubBadUrlHttpStream(StubBasicReadHttpStream): diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py index 6f12585ca2b68..b64b403ebe621 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py @@ -26,7 +26,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.core import StreamData @@ -105,8 +105,9 @@ def _stream(slice_to_partition_mapping, slice_logger, logger, message_repository return _MockStream(slice_to_partition_mapping) -def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor: Cursor = NoopCursor()): +def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor: Optional[Cursor] = None): stream = _stream(slice_to_partition_mapping, slice_logger, logger, message_repository) + cursor = cursor or FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=message_repository) source = Mock() source._slice_logger = slice_logger source.message_repository = message_repository @@ -126,6 +127,15 @@ def _incremental_concurrent_stream(slice_to_partition_mapping, slice_logger, log return stream +def _stream_with_no_cursor_field(slice_to_partition_mapping, slice_logger, logger, message_repository): + def get_updated_state(current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> MutableMapping[str, Any]: + raise Exception("I shouldn't be invoked by a full_refresh stream") + + mock_stream = _MockStream(slice_to_partition_mapping) + mock_stream.get_updated_state = get_updated_state + return mock_stream + + @pytest.mark.parametrize( "constructor", [ @@ -177,7 +187,7 @@ def test_full_refresh_read_a_single_slice_with_debug(constructor): actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) - assert expected_records == actual_records + assert actual_records == expected_records @pytest.mark.parametrize( @@ -223,7 +233,7 @@ def test_full_refresh_read_a_single_slice(constructor): actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) - assert expected_records == actual_records + assert actual_records == expected_records @pytest.mark.parametrize( @@ -231,9 +241,10 @@ def test_full_refresh_read_a_single_slice(constructor): [ pytest.param(_stream, id="synchronous_reader"), pytest.param(_concurrent_stream, id="concurrent_reader"), + pytest.param(_stream_with_no_cursor_field, id="no_cursor_field"), ], ) -def test_full_refresh_read_a_two_slices(constructor): +def test_full_refresh_read_two_slices(constructor): # This test verifies that a concurrent stream adapted from a Stream behaves the same as the Stream object # It is done by running the same test cases on both streams configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh], json_schema={}), sync_mode=SyncMode.full_refresh,destination_sync_mode=DestinationSyncMode.overwrite) @@ -260,7 +271,7 @@ def test_full_refresh_read_a_two_slices(constructor): ] # Temporary check to only validate the final state message for synchronous sources since it has not been implemented for concurrent yet - if constructor == _stream: + if constructor == _stream or constructor == _stream_with_no_cursor_field: expected_records.append( AirbyteMessage( type=MessageType.STATE, @@ -278,7 +289,7 @@ def test_full_refresh_read_a_two_slices(constructor): for record in expected_records: assert record in actual_records - assert len(expected_records) == len(actual_records) + assert len(actual_records) == len(expected_records) def test_incremental_read_two_slices(): @@ -313,7 +324,7 @@ def test_incremental_read_two_slices(): for record in expected_records: assert record in actual_records - assert len(expected_records) == len(actual_records) + assert len(actual_records) == len(expected_records) def test_concurrent_incremental_read_two_slices(): @@ -350,7 +361,7 @@ def test_concurrent_incremental_read_two_slices(): for record in expected_records: assert record in actual_records - assert len(expected_records) == len(actual_records) + assert len(actual_records) == len(expected_records) # We don't have a real source that reads from the message_repository for state, so we read from the queue directly to verify # the cursor observed records correctly and updated partition states diff --git a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py index 7bd56e9b2b497..4ebfa6a0e771c 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py @@ -144,13 +144,13 @@ def message_repository(): def test_successful_check(): """Tests that if a source returns TRUE for the connection check the appropriate connectionStatus success message is returned""" expected = AirbyteConnectionStatus(status=Status.SUCCEEDED) - assert expected == MockSource(check_lambda=lambda: (True, None)).check(logger, {}) + assert MockSource(check_lambda=lambda: (True, None)).check(logger, {}) == expected def test_failed_check(): """Tests that if a source returns FALSE for the connection check the appropriate connectionStatus failure message is returned""" expected = AirbyteConnectionStatus(status=Status.FAILED, message="'womp womp'") - assert expected == MockSource(check_lambda=lambda: (False, "womp womp")).check(logger, {}) + assert MockSource(check_lambda=lambda: (False, "womp womp")).check(logger, {}) == expected def test_raising_check(mocker): @@ -187,6 +187,10 @@ def read_records(self, **kwargs) -> Iterable[Mapping[str, Any]]: # type: ignore def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: return "pk" + @property + def cursor_field(self) -> Union[str, List[str]]: + return ["updated_at"] + class MockStreamWithState(MockStream): cursor_field = "cursor" @@ -249,7 +253,7 @@ def test_discover(mocker): expected = AirbyteCatalog(streams=[airbyte_stream1, airbyte_stream2]) src = MockSource(check_lambda=lambda: (True, None), streams=[stream1, stream2]) - assert expected == src.discover(logger, {}) + assert src.discover(logger, {}) == expected def test_read_nonexistent_stream_raises_exception(mocker): @@ -261,9 +265,12 @@ def test_read_nonexistent_stream_raises_exception(mocker): src = MockSource(streams=[s1]) catalog = ConfiguredAirbyteCatalog(streams=[_configured_stream(s2, SyncMode.full_refresh)]) - with pytest.raises(KeyError): + with pytest.raises(AirbyteTracedException) as exc_info: list(src.read(logger, {}, catalog)) + assert exc_info.value.failure_type == FailureType.config_error + assert "not found in the source" in exc_info.value.internal_message + def test_read_nonexistent_stream_without_raises_exception(mocker): """Tests that attempting to sync a stream which the source does not return from the `streams` method raises an exception""" @@ -434,7 +441,7 @@ def test_valid_full_refresh_read_no_slices(mocker): ) messages = _fix_emitted_at(list(src.read(logger, {}, catalog))) - assert expected == messages + assert messages == expected def test_valid_full_refresh_read_with_slices(mocker): @@ -478,7 +485,7 @@ def test_valid_full_refresh_read_with_slices(mocker): messages = _fix_emitted_at(list(src.read(logger, {}, catalog))) - assert expected == messages + assert messages == expected def test_full_refresh_does_not_use_incoming_state(mocker): @@ -748,7 +755,7 @@ def test_with_checkpoint_interval(self, mocker, use_legacy): ) messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state=input_state))) - assert expected == messages + assert messages == expected @pytest.mark.parametrize( "use_legacy", @@ -805,7 +812,7 @@ def test_with_no_interval(self, mocker, use_legacy): messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state=input_state))) - assert expected == messages + assert messages == expected @pytest.mark.parametrize( "use_legacy", @@ -890,7 +897,7 @@ def test_with_slices(self, mocker, use_legacy): messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state=input_state))) - assert expected == messages + assert messages == expected @pytest.mark.parametrize( "use_legacy", @@ -974,7 +981,7 @@ def test_no_slices(self, mocker, use_legacy, slices): messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state=input_state))) - assert expected == messages + assert messages == expected @pytest.mark.parametrize( "use_legacy", @@ -1281,7 +1288,7 @@ def test_continue_sync_with_failed_streams(mocker, exception_to_raise, expected_ messages = [_remove_stack_trace(message) for message in src.read(logger, {}, catalog)] messages = _fix_emitted_at(messages) - assert expected == messages + assert messages == expected assert "lamentations" in exc.value.message assert exc.value.failure_type == FailureType.config_error @@ -1331,7 +1338,7 @@ def test_continue_sync_source_override_false(mocker): messages = [_remove_stack_trace(message) for message in src.read(logger, {}, catalog)] messages = _fix_emitted_at(messages) - assert expected == messages + assert messages == expected assert "lamentations" in exc.value.message assert exc.value.failure_type == FailureType.config_error @@ -1382,7 +1389,7 @@ def test_sync_error_trace_messages_obfuscate_secrets(mocker): messages = [_remove_stack_trace(message) for message in src.read(logger, {}, catalog)] messages = _fix_emitted_at(messages) - assert expected == messages + assert messages == expected assert "lamentations" in exc.value.message assert exc.value.failure_type == FailureType.config_error @@ -1427,7 +1434,7 @@ def test_continue_sync_with_failed_streams_with_override_false(mocker): messages = [_remove_stack_trace(message) for message in src.read(logger, {}, catalog)] messages = _fix_emitted_at(messages) - assert expected == messages + assert messages == expected assert "lamentations" in exc.value.message assert exc.value.failure_type == FailureType.config_error diff --git a/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py b/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py deleted file mode 100644 index ebd082a2b1523..0000000000000 --- a/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -import concurrent -import logging -from typing import Any, Callable, Dict, Iterable, Mapping, Optional, Tuple -from unittest.mock import Mock - -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource -from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager -from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository -from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream -from airbyte_cdk.sources.streams.concurrent.availability_strategy import StreamAvailability, StreamAvailable, StreamUnavailable -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor -from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition -from airbyte_cdk.sources.streams.concurrent.partitions.record import Record -from airbyte_protocol.models import AirbyteStream - -logger = logging.getLogger("airbyte") - - -class _MockSource(ConcurrentSource): - def __init__( - self, - check_lambda: Callable[[], Tuple[bool, Optional[Any]]] = None, - per_stream: bool = True, - message_repository: MessageRepository = InMemoryMessageRepository(), - threadpool: ThreadPoolManager = ThreadPoolManager( - concurrent.futures.ThreadPoolExecutor(max_workers=1, thread_name_prefix="workerpool"), logger - ), - exception_on_missing_stream: bool = True, - ): - super().__init__(threadpool, Mock(), Mock(), message_repository) - self.check_lambda = check_lambda - self.per_stream = per_stream - self.exception_on_missing_stream = exception_on_missing_stream - self._message_repository = message_repository - - -MESSAGE_FROM_REPOSITORY = Mock() - - -class _MockStream(AbstractStream): - def __init__(self, name: str, available: bool = True, json_schema: Dict[str, Any] = {}): - self._name = name - self._available = available - self._json_schema = json_schema - - def generate_partitions(self) -> Iterable[Partition]: - yield _MockPartition(self._name) - - @property - def name(self) -> str: - return self._name - - @property - def cursor_field(self) -> Optional[str]: - raise NotImplementedError - - def check_availability(self) -> StreamAvailability: - if self._available: - return StreamAvailable() - else: - return StreamUnavailable("stream is unavailable") - - def get_json_schema(self) -> Mapping[str, Any]: - return self._json_schema - - def as_airbyte_stream(self) -> AirbyteStream: - return AirbyteStream(name=self.name, json_schema=self.get_json_schema(), supported_sync_modes=[SyncMode.full_refresh]) - - def log_stream_sync_configuration(self) -> None: - raise NotImplementedError - - @property - def cursor(self) -> Cursor: - return NoopCursor() - - -class _MockPartition(Partition): - def __init__(self, name: str): - self._name = name - self._closed = False - - def read(self) -> Iterable[Record]: - yield from [Record({"key": "value"}, self._name)] - - def to_slice(self) -> Optional[Mapping[str, Any]]: - return {} - - def stream_name(self) -> str: - return self._name - - def close(self) -> None: - self._closed = True - - def is_closed(self) -> bool: - return self._closed - - def __hash__(self) -> int: - return hash(self._name) - - -def test_concurrent_source_reading_from_no_streams(): - stream = _MockStream("my_stream", False, {}) - source = _MockSource() - messages = [] - for m in source.read([stream]): - messages.append(m) diff --git a/airbyte-cdk/python/unit_tests/sources/test_source_read.py b/airbyte-cdk/python/unit_tests/sources/test_source_read.py index dd08c4d18dacf..61b4f0229534e 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source_read.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source_read.py @@ -27,7 +27,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.utils import AirbyteTracedException from unit_tests.sources.streams.concurrent.scenarios.thread_based_concurrent_stream_source_builder import NeverLogSliceLogger @@ -126,7 +126,8 @@ def test_concurrent_source_yields_the_same_messages_as_abstract_source_when_no_e config = {} catalog = _create_configured_catalog(source._streams) - messages_from_abstract_source = _read_from_source(source, logger, config, catalog, state, None) + # FIXME this is currently unused in this test + # messages_from_abstract_source = _read_from_source(source, logger, config, catalog, state, None) messages_from_concurrent_source = _read_from_source(concurrent_source, logger, config, catalog, state, None) expected_messages = [ @@ -267,7 +268,7 @@ def test_concurrent_source_yields_the_same_messages_as_abstract_source_when_no_e ), ), ] - _verify_messages(expected_messages, messages_from_abstract_source, messages_from_concurrent_source) + _verify_messages(expected_messages, messages_from_concurrent_source) @freezegun.freeze_time("2020-01-01T00:00:00") @@ -283,53 +284,9 @@ def test_concurrent_source_yields_the_same_messages_as_abstract_source_when_a_tr messages_from_abstract_source = _read_from_source(source, logger, config, catalog, state, AirbyteTracedException) messages_from_concurrent_source = _read_from_source(concurrent_source, logger, config, catalog, state, AirbyteTracedException) - expected_messages = [ - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.STARTED) - ), - ), - ), - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.RUNNING) - ), - ), - ), - AirbyteMessage( - type=MessageType.RECORD, - record=AirbyteRecordMessage( - stream="stream0", - data=records[0], - emitted_at=1577836800000, - ), - ), - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.INCOMPLETE) - ), - ), - ), - ] - _verify_messages(expected_messages, messages_from_abstract_source, messages_from_concurrent_source) + _assert_status_messages(messages_from_abstract_source, messages_from_concurrent_source) + _assert_record_messages(messages_from_abstract_source, messages_from_concurrent_source) + _assert_errors(messages_from_abstract_source, messages_from_concurrent_source) @freezegun.freeze_time("2020-01-01T00:00:00") @@ -346,53 +303,38 @@ def test_concurrent_source_yields_the_same_messages_as_abstract_source_when_an_e messages_from_abstract_source = _read_from_source(source, logger, config, catalog, state, AirbyteTracedException) messages_from_concurrent_source = _read_from_source(concurrent_source, logger, config, catalog, state, RuntimeError) - expected_messages = [ - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.STARTED) - ), - ), - ), - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.RUNNING) - ), - ), - ), - AirbyteMessage( - type=MessageType.RECORD, - record=AirbyteRecordMessage( - stream="stream0", - data=records[0], - emitted_at=1577836800000, - ), - ), - AirbyteMessage( - type=MessageType.TRACE, - trace=AirbyteTraceMessage( - type=TraceType.STREAM_STATUS, - emitted_at=1577836800000.0, - error=None, - estimate=None, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="stream0"), status=AirbyteStreamStatus(AirbyteStreamStatus.INCOMPLETE) - ), - ), - ), - ] - _verify_messages(expected_messages, messages_from_abstract_source, messages_from_concurrent_source) + _assert_status_messages(messages_from_abstract_source, messages_from_concurrent_source) + _assert_record_messages(messages_from_abstract_source, messages_from_concurrent_source) + _assert_errors(messages_from_abstract_source, messages_from_concurrent_source) + + +def _assert_status_messages(messages_from_abstract_source, messages_from_concurrent_source): + status_from_concurrent_source = [message for message in messages_from_concurrent_source if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS] + + assert status_from_concurrent_source + _verify_messages( + [message for message in messages_from_abstract_source if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS], + status_from_concurrent_source, + ) + + +def _assert_record_messages(messages_from_abstract_source, messages_from_concurrent_source): + records_from_concurrent_source = [message for message in messages_from_concurrent_source if message.type == MessageType.RECORD] + + assert records_from_concurrent_source + _verify_messages( + [message for message in messages_from_abstract_source if message.type == MessageType.RECORD], + records_from_concurrent_source, + ) + + +def _assert_errors(messages_from_abstract_source, messages_from_concurrent_source): + errors_from_concurrent_source = [message for message in messages_from_concurrent_source if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR] + errors_from_abstract_source = [message for message in messages_from_abstract_source if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR] + + assert errors_from_concurrent_source + # exceptions might differ from both framework hence we only assert the count + assert len(errors_from_concurrent_source) == len(errors_from_abstract_source) def _init_logger(): @@ -409,9 +351,8 @@ def _init_sources(stream_slice_to_partitions, state, logger): def _init_source(stream_slice_to_partitions, state, logger, source): - cursor = NoopCursor() streams = [ - StreamFacade.create_from_stream(_MockStream(stream_slices, f"stream{i}"), source, logger, state, cursor) + StreamFacade.create_from_stream(_MockStream(stream_slices, f"stream{i}"), source, logger, state, FinalStateCursor(stream_name=f"stream{i}", stream_namespace=None, message_repository=InMemoryMessageRepository())) for i, stream_slices in enumerate(stream_slice_to_partitions) ] source.set_streams(streams) @@ -443,7 +384,7 @@ def _read_from_source(source, logger, config, catalog, state, expected_exception return messages -def _verify_messages(expected_messages, messages_from_abstract_source, messages_from_concurrent_source): +def _verify_messages(expected_messages, messages_from_concurrent_source): assert _compare(expected_messages, messages_from_concurrent_source) diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py b/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py index d8ae2bdd802a5..8f4862332ea81 100644 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py @@ -27,4 +27,4 @@ def test_coerce_catalog_as_full_refresh(): ] ) - assert expected == CatalogHelper.coerce_catalog_as_full_refresh(input) + assert CatalogHelper.coerce_catalog_as_full_refresh(input) == expected diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py b/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py index 7f3d535ce05d1..b5476180309bc 100644 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py @@ -45,7 +45,7 @@ def test_data_or_record_to_airbyte_record(test_name, data, expected_message): transformer.transform.assert_called_with(data, schema) else: assert not transformer.transform.called - assert expected_message == message + assert message == expected_message @pytest.mark.parametrize( @@ -69,7 +69,7 @@ def test_log_or_trace_to_message(test_name, data, expected_message): message = stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema) assert not transformer.transform.called - assert expected_message == message + assert message == expected_message @pytest.mark.parametrize( diff --git a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py index d0564cdf93f62..35a8b300fb124 100644 --- a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py +++ b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py @@ -3,7 +3,7 @@ import json import logging import os -from typing import Any, Iterator, List +from typing import Any, Iterator, List, Mapping from unittest import TestCase from unittest.mock import Mock, patch @@ -16,7 +16,9 @@ AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, + AirbyteStateBlob, AirbyteStateMessage, + AirbyteStreamState, AirbyteStreamStatus, AirbyteStreamStatusTraceMessage, AirbyteTraceMessage, @@ -28,8 +30,8 @@ ) -def _a_state_message(state: Any) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=state)) +def _a_state_message(stream_name: str, stream_state: Mapping[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(**stream_state)))) def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteMessage: @@ -49,7 +51,7 @@ def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteM _A_RECORD = AirbyteMessage( type=Type.RECORD, record=AirbyteRecordMessage(stream="stream", data={"record key": "record value"}, emitted_at=0) ) -_A_STATE_MESSAGE = _a_state_message({"state key": "state value for _A_STATE_MESSAGE"}) +_A_STATE_MESSAGE = _a_state_message("stream_name", {"state key": "state value for _A_STATE_MESSAGE"}) _A_LOG = AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="This is an Airbyte log message")) _AN_ERROR_MESSAGE = AirbyteMessage( type=Type.TRACE, @@ -176,8 +178,9 @@ def test_given_state_message_and_records_when_read_then_output_has_records_and_s @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_many_state_messages_and_records_when_read_then_output_has_records_and_state_message(self, entrypoint): - last_emitted_state = {"last state key": "last state value"} - entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE, _a_state_message(last_emitted_state)]) + state_value = {"state_key": "last state value"} + last_emitted_state = AirbyteStreamState(stream_descriptor=StreamDescriptor(name="stream_name"), stream_state=AirbyteStateBlob(**state_value)) + entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE, _a_state_message("stream_name", state_value)]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) diff --git a/airbyte-cdk/python/unit_tests/test_connector.py b/airbyte-cdk/python/unit_tests/test_connector.py index c7116c6b25c19..5ebcfb9b86066 100644 --- a/airbyte-cdk/python/unit_tests/test_connector.py +++ b/airbyte-cdk/python/unit_tests/test_connector.py @@ -40,7 +40,7 @@ def test_from_file(self): f.write(json.dumps(self.VALID_SPEC)) f.flush() actual = AirbyteSpec.from_file(f.name) - assert expected == json.loads(actual.spec_string) + assert json.loads(actual.spec_string) == expected def test_from_file_nonexistent(self): with pytest.raises(OSError): @@ -80,7 +80,7 @@ def integration(): def test_read_config(nonempty_file, integration: Connector, mock_config): actual = integration.read_config(nonempty_file.name) - assert mock_config == actual + assert actual == mock_config def test_read_non_json_config(nonjson_file, integration: Connector): @@ -92,7 +92,7 @@ def test_write_config(integration, mock_config): config_path = Path(tempfile.gettempdir()) / "config.json" integration.write_config(mock_config, str(config_path)) with open(config_path, "r") as actual: - assert mock_config == json.loads(actual.read()) + assert json.loads(actual.read()) == mock_config class TestConnectorSpec: diff --git a/airbyte-cdk/python/unit_tests/test_entrypoint.py b/airbyte-cdk/python/unit_tests/test_entrypoint.py index 7451a320d4042..1c5f8427bbb03 100644 --- a/airbyte-cdk/python/unit_tests/test_entrypoint.py +++ b/airbyte-cdk/python/unit_tests/test_entrypoint.py @@ -4,6 +4,7 @@ import os from argparse import Namespace +from collections import defaultdict from copy import deepcopy from typing import Any, List, Mapping, MutableMapping, Union from unittest import mock @@ -20,14 +21,25 @@ AirbyteControlMessage, AirbyteMessage, AirbyteRecordMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateType, AirbyteStream, + AirbyteStreamState, + AirbyteStreamStatus, + AirbyteStreamStatusTraceMessage, + AirbyteTraceMessage, ConnectorSpecification, OrchestratorType, Status, + StreamDescriptor, SyncMode, + TraceType, Type, ) +from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats from airbyte_cdk.sources import Source +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.utils import AirbyteTracedException @@ -94,14 +106,14 @@ def test_airbyte_entrypoint_init(mocker): ("check", {"config": "config_path"}, {"command": "check", "config": "config_path", "debug": False}), ("discover", {"config": "config_path", "debug": ""}, {"command": "discover", "config": "config_path", "debug": True}), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "None"}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "None"}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, ), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, ), ], ) @@ -169,9 +181,9 @@ def config_mock(mocker, request): ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string"}}}, True), ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string", "airbyte_secret": True}}}, True), ( - {"username": "fake", "_limit": 22}, - {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, - True, + {"username": "fake", "_limit": 22}, + {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, + True, ), ], indirect=["config_mock"], @@ -248,7 +260,7 @@ def test_run_read(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock) def test_given_message_emitted_during_config_when_read_then_emit_message_before_next_steps( - entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock + entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock ): parsed_args = Namespace(command="read", config="config_path", state="statepath", catalog="catalogpath") mocker.patch.object(MockSource, "read_catalog", side_effect=ValueError) @@ -309,3 +321,108 @@ def test_filter_internal_requests(deployment_mode, url, expected_error): else: actual_response = session.send(request=prepared_request) assert isinstance(actual_response, requests.Response) + + +@pytest.mark.parametrize( + "incoming_message, stream_message_count, expected_message, expected_records_by_stream", + [ + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100.0}, + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 101.0}, + id="test_handle_record_message", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers"): 100.0}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), + sourceStats=AirbyteStateStats(recordCount=100.0))), + {HashableStreamDescriptor(name="customers"): 0.0}, + id="test_handle_state_message", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + defaultdict(float), + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 1.0}, + id="test_handle_first_record_message", + ), + pytest.param( + AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), + status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 5.0}, + AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), + status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 5.0}, + id="test_handle_other_message_type", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="others", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 27.0}, + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="others", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 28.0}, + id="test_handle_record_message_for_other_stream", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 27.0}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), + sourceStats=AirbyteStateStats(recordCount=27.0))), + {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 0.0}, + id="test_handle_state_message_for_other_stream", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, + record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, + AirbyteMessage(type=Type.RECORD, + record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers", namespace="public"): 101.0}, + id="test_handle_record_message_with_descriptor", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=100.0))), + {HashableStreamDescriptor(name="customers", namespace="public"): 0.0}, + id="test_handle_state_message_with_descriptor", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=0.0))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100.0, + HashableStreamDescriptor(name="others", namespace="public"): 0.0}, + id="test_handle_state_message_no_records", + ), + ] +) +def test_handle_record_counts(incoming_message, stream_message_count, expected_message, expected_records_by_stream): + entrypoint = AirbyteEntrypoint(source=MockSource()) + actual_message = entrypoint.handle_record_counts(message=incoming_message, stream_message_count=stream_message_count) + assert actual_message == expected_message + + for stream_descriptor, message_count in stream_message_count.items(): + assert isinstance(message_count, float) + # Python assertions against different number types won't fail if the value is equivalent + assert message_count == expected_records_by_stream[stream_descriptor] + + if actual_message.type == Type.STATE: + assert isinstance(actual_message.state.sourceStats.recordCount, float), "recordCount value should be expressed as a float" diff --git a/airbyte-cdk/python/unit_tests/test_exception_handler.py b/airbyte-cdk/python/unit_tests/test_exception_handler.py index 3c6466dd46c74..42819942ade19 100644 --- a/airbyte-cdk/python/unit_tests/test_exception_handler.py +++ b/airbyte-cdk/python/unit_tests/test_exception_handler.py @@ -10,6 +10,7 @@ import pytest from airbyte_cdk.exception_handler import assemble_uncaught_exception from airbyte_cdk.models import AirbyteErrorTraceMessage, AirbyteLogMessage, AirbyteMessage, AirbyteTraceMessage +from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage from airbyte_cdk.utils.traced_exception import AirbyteTracedException @@ -25,6 +26,13 @@ def test_given_exception_not_traced_exception_when_assemble_uncaught_exception_t assert isinstance(assembled_exception, AirbyteTracedException) +def test_given_exception_with_display_message_when_assemble_uncaught_exception_then_internal_message_contains_display_message(): + display_message = "some display message" + exception = ExceptionWithDisplayMessage(display_message) + assembled_exception = assemble_uncaught_exception(type(exception), exception) + assert display_message in assembled_exception.internal_message + + def test_uncaught_exception_handler(): cmd = "from airbyte_cdk.logger import init_logger; from airbyte_cdk.exception_handler import init_uncaught_exception_handler; logger = init_logger('airbyte'); init_uncaught_exception_handler(logger); raise 1" exception_message = "exceptions must derive from BaseException" diff --git a/airbyte-cdk/python/unit_tests/utils/test_message_utils.py b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py new file mode 100644 index 0000000000000..496360ea46f37 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py @@ -0,0 +1,91 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import pytest +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor +from airbyte_cdk.utils.message_utils import get_stream_descriptor +from airbyte_protocol.models import ( + AirbyteControlConnectorConfigMessage, + AirbyteControlMessage, + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateStats, + AirbyteStateType, + AirbyteStreamState, + OrchestratorType, + StreamDescriptor, + Type, +) + + +def test_get_record_message_stream_descriptor(): + message = AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="test_stream", + namespace="test_namespace", + data={"id": "12345"}, + emitted_at=1, + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace="test_namespace") + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_record_message_stream_descriptor_no_namespace(): + message = AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="test_stream", data={"id": "12345"}, emitted_at=1 + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace=None) + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_state_message_stream_descriptor(): + message = AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor( + name="test_stream", namespace="test_namespace" + ), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=27.0), + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace="test_namespace") + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_state_message_stream_descriptor_no_namespace(): + message = AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="test_stream"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=27.0), + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace=None) + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_other_message_stream_descriptor_fails(): + message = AirbyteMessage( + type=Type.CONTROL, + control=AirbyteControlMessage( + type=OrchestratorType.CONNECTOR_CONFIG, + emitted_at=10, + connectorConfig=AirbyteControlConnectorConfigMessage(config={"any config": "a config value"}), + ), + ) + with pytest.raises(NotImplementedError): + get_stream_descriptor(message) diff --git a/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py b/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py index 3666d89c9f66a..6a1943f0279b9 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py +++ b/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py @@ -6,7 +6,7 @@ import pytest from airbyte_cdk.models.airbyte_protocol import AirbyteRecordMessage -from airbyte_cdk.utils.schema_inferrer import SchemaInferrer +from airbyte_cdk.utils.schema_inferrer import SchemaInferrer, SchemaValidationException NOW = 1234567 @@ -19,7 +19,7 @@ {"stream": "my_stream", "data": {"field_A": "abc"}}, {"stream": "my_stream", "data": {"field_A": "def"}}, ], - {"my_stream": {"field_A": {"type": "string"}}}, + {"my_stream": {"field_A": {"type": ["string", "null"]}}}, id="test_basic", ), pytest.param( @@ -27,7 +27,7 @@ {"stream": "my_stream", "data": {"field_A": 1.0}}, {"stream": "my_stream", "data": {"field_A": "abc"}}, ], - {"my_stream": {"field_A": {"type": ["number", "string"]}}}, + {"my_stream": {"field_A": {"type": ["number", "string", "null"]}}}, id="test_deriving_schema_refine", ), pytest.param( @@ -38,10 +38,10 @@ { "my_stream": { "obj": { - "type": "object", + "type": ["object", "null"], "properties": { - "data": {"type": "array", "items": {"type": "number"}}, - "other_key": {"type": "string"}, + "data": {"type": ["array", "null"], "items": {"type": ["number", "null"]}}, + "other_key": {"type": ["string", "null"]}, }, } } @@ -53,7 +53,7 @@ {"stream": "my_stream", "data": {"field_A": 1}}, {"stream": "my_stream", "data": {"field_A": 2}}, ], - {"my_stream": {"field_A": {"type": "number"}}}, + {"my_stream": {"field_A": {"type": ["number", "null"]}}}, id="test_integer_number", ), pytest.param( @@ -68,7 +68,7 @@ {"stream": "my_stream", "data": {"field_A": None}}, {"stream": "my_stream", "data": {"field_A": "abc"}}, ], - {"my_stream": {"field_A": {"type": ["null", "string"]}}}, + {"my_stream": {"field_A": {"type": ["string", "null"]}}}, id="test_null_optional", ), pytest.param( @@ -76,7 +76,7 @@ {"stream": "my_stream", "data": {"field_A": None}}, {"stream": "my_stream", "data": {"field_A": {"nested": "abc"}}}, ], - {"my_stream": {"field_A": {"type": ["object", "null"], "properties": {"nested": {"type": "string"}}}}}, + {"my_stream": {"field_A": {"type": ["object", "null"], "properties": {"nested": {"type": ["string", "null"]}}}}}, id="test_any_of", ), pytest.param( @@ -84,7 +84,7 @@ {"stream": "my_stream", "data": {"field_A": None}}, {"stream": "my_stream", "data": {"field_A": {"nested": "abc", "nully": None}}}, ], - {"my_stream": {"field_A": {"type": ["object", "null"], "properties": {"nested": {"type": "string"}}}}}, + {"my_stream": {"field_A": {"type": ["object", "null"], "properties": {"nested": {"type": ["string", "null"]}}}}}, id="test_any_of_with_null", ), pytest.param( @@ -97,7 +97,7 @@ "my_stream": { "field_A": { "type": ["object", "null"], - "properties": {"nested": {"type": "string"}, "nully": {"type": ["null", "string"]}}, + "properties": {"nested": {"type": ["string", "null"]}, "nully": {"type": ["string", "null"]}}, } } }, @@ -113,7 +113,7 @@ "my_stream": { "field_A": { "type": ["object", "null"], - "properties": {"nested": {"type": "string"}, "nully": {"type": ["null", "string"]}}, + "properties": {"nested": {"type": ["string", "null"]}, "nully": {"type": ["string", "null"]}}, } } }, @@ -123,7 +123,7 @@ [ {"stream": "my_stream", "data": {"field_A": "abc", "nested": {"field_B": None}}}, ], - {"my_stream": {"field_A": {"type": "string"}, "nested": {"type": "object", "properties": {}}}}, + {"my_stream": {"field_A": {"type": ["string", "null"]}, "nested": {"type": ["object", "null"], "properties": {}}}}, id="test_nested_null", ), pytest.param( @@ -132,8 +132,8 @@ ], { "my_stream": { - "field_A": {"type": "string"}, - "nested": {"type": "array", "items": {"type": "object", "properties": {"field_C": {"type": "string"}}}}, + "field_A": {"type": ["string", "null"]}, + "nested": {"type": ["array", "null"], "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}}, } }, id="test_array_nested_null", @@ -145,8 +145,8 @@ ], { "my_stream": { - "field_A": {"type": "string"}, - "nested": {"type": ["array", "null"], "items": {"type": "object", "properties": {"field_C": {"type": "string"}}}}, + "field_A": {"type": ["string", "null"]}, + "nested": {"type": ["array", "null"], "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}}, } }, id="test_array_top_level_null", @@ -156,7 +156,7 @@ {"stream": "my_stream", "data": {"field_A": None}}, {"stream": "my_stream", "data": {"field_A": "abc"}}, ], - {"my_stream": {"field_A": {"type": ["null", "string"]}}}, + {"my_stream": {"field_A": {"type": ["string", "null"]}}}, id="test_null_string", ), ], @@ -167,36 +167,127 @@ def test_schema_derivation(input_records: List, expected_schemas: Mapping): inferrer.accumulate(AirbyteRecordMessage(stream=record["stream"], data=record["data"], emitted_at=NOW)) for stream_name, expected_schema in expected_schemas.items(): - assert inferrer.get_inferred_schemas()[stream_name] == { + assert inferrer.get_stream_schema(stream_name) == { "$schema": "http://json-schema.org/schema#", "type": "object", "properties": expected_schema, } -def test_deriving_schema_multiple_streams(): - inferrer = SchemaInferrer() - inferrer.accumulate(AirbyteRecordMessage(stream="my_stream", data={"field_A": 1.0}, emitted_at=NOW)) - inferrer.accumulate(AirbyteRecordMessage(stream="my_stream2", data={"field_A": "abc"}, emitted_at=NOW)) - inferred_schemas = inferrer.get_inferred_schemas() - assert inferred_schemas["my_stream"] == { - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": {"field_A": {"type": "number"}}, - } - assert inferred_schemas["my_stream2"] == { - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": {"field_A": {"type": "string"}}, - } - - -def test_get_individual_schema(): - inferrer = SchemaInferrer() - inferrer.accumulate(AirbyteRecordMessage(stream="my_stream", data={"field_A": 1.0}, emitted_at=NOW)) - assert inferrer.get_stream_schema("my_stream") == { - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": {"field_A": {"type": "number"}}, - } - assert inferrer.get_stream_schema("another_stream") is None +_STREAM_NAME = "a stream name" +_ANY_VALUE = "any value" +_IS_PK = True +_IS_CURSOR_FIELD = True + + +def _create_inferrer_with_required_field(is_pk: bool, field: List[List[str]]) -> SchemaInferrer: + if is_pk: + return SchemaInferrer(field) + return SchemaInferrer([[]], field) + + +@pytest.mark.parametrize( + "is_pk", + [ + pytest.param(_IS_PK, id="required_field_is_pk"), + pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), + ] +) +def test_field_is_on_root(is_pk: bool): + inferrer = _create_inferrer_with_required_field(is_pk, [["property"]]) + + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property": _ANY_VALUE}, emitted_at=NOW)) + + assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property"] + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property"]["type"] == "string" + + +@pytest.mark.parametrize( + "is_pk", + [ + pytest.param(_IS_PK, id="required_field_is_pk"), + pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), + ] +) +def test_field_is_nested(is_pk: bool): + inferrer = _create_inferrer_with_required_field(is_pk, [["property", "nested_property"]]) + + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property": {"nested_property": _ANY_VALUE}}, emitted_at=NOW)) + + assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property"] + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property"]["type"] == "object" + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property"]["required"] == ["nested_property"] + + +@pytest.mark.parametrize( + "is_pk", + [ + pytest.param(_IS_PK, id="required_field_is_pk"), + pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), + ] +) +def test_field_is_composite(is_pk: bool): + inferrer = _create_inferrer_with_required_field(is_pk, [["property 1"], ["property 2"]]) + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": _ANY_VALUE, "property 2": _ANY_VALUE}, emitted_at=NOW)) + assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property 1", "property 2"] + + +@pytest.mark.parametrize( + "is_pk", + [ + pytest.param(_IS_PK, id="required_field_is_pk"), + pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), + ] +) +def test_field_is_composite_and_nested(is_pk: bool): + inferrer = _create_inferrer_with_required_field(is_pk, [["property 1", "nested"], ["property 2"]]) + + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": {"nested": _ANY_VALUE}, "property 2": _ANY_VALUE}, emitted_at=NOW)) + + assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property 1", "property 2"] + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property 1"]["type"] == "object" + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property 2"]["type"] == "string" + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property 1"]["required"] == ["nested"] + assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property 1"]["properties"]["nested"]["type"] == "string" + + +def test_given_pk_does_not_exist_when_get_inferred_schemas_then_raise_error(): + inferrer = SchemaInferrer([["pk does not exist"]]) + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"id": _ANY_VALUE}, emitted_at=NOW)) + + with pytest.raises(SchemaValidationException) as exception: + inferrer.get_stream_schema(_STREAM_NAME) + + assert len(exception.value.validation_errors) == 1 + + +def test_given_pk_path_is_partially_valid_when_get_inferred_schemas_then_validation_error_mentions_where_the_issue_is(): + inferrer = SchemaInferrer([["id", "nested pk that does not exist"]]) + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"id": _ANY_VALUE}, emitted_at=NOW)) + + with pytest.raises(SchemaValidationException) as exception: + inferrer.get_stream_schema(_STREAM_NAME) + + assert len(exception.value.validation_errors) == 1 + assert "Path ['id']" in exception.value.validation_errors[0] + + +def test_given_composite_pk_but_only_one_path_valid_when_get_inferred_schemas_then_valid_path_is_required(): + inferrer = SchemaInferrer([["id 1"], ["id 2"]]) + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"id 1": _ANY_VALUE}, emitted_at=NOW)) + + with pytest.raises(SchemaValidationException) as exception: + inferrer.get_stream_schema(_STREAM_NAME) + + assert exception.value.schema["required"] == ["id 1"] + + +def test_given_composite_pk_but_only_one_path_valid_when_get_inferred_schemas_then_validation_error_mentions_where_the_issue_is(): + inferrer = SchemaInferrer([["id 1"], ["id 2"]]) + inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"id 1": _ANY_VALUE}, emitted_at=NOW)) + + with pytest.raises(SchemaValidationException) as exception: + inferrer.get_stream_schema(_STREAM_NAME) + + assert len(exception.value.validation_errors) == 1 + assert "id 2" in exception.value.validation_errors[0] diff --git a/airbyte-cdk/python/unit_tests/utils/test_stream_status_utils.py b/airbyte-cdk/python/unit_tests/utils/test_stream_status_utils.py index 5d41ab7a57007..4862a1e0118e3 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_stream_status_utils.py +++ b/airbyte-cdk/python/unit_tests/utils/test_stream_status_utils.py @@ -13,7 +13,7 @@ def test_started_as_message(): stream_status = AirbyteStreamStatus.STARTED airbyte_message = stream_status_as_airbyte_message(stream, stream_status) - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.STREAM_STATUS assert airbyte_message.trace.emitted_at > 0 @@ -26,7 +26,7 @@ def test_running_as_message(): stream_status = AirbyteStreamStatus.RUNNING airbyte_message = stream_status_as_airbyte_message(stream, stream_status) - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.STREAM_STATUS assert airbyte_message.trace.emitted_at > 0 @@ -39,7 +39,7 @@ def test_complete_as_message(): stream_status = AirbyteStreamStatus.COMPLETE airbyte_message = stream_status_as_airbyte_message(stream, stream_status) - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.STREAM_STATUS assert airbyte_message.trace.emitted_at > 0 @@ -52,7 +52,7 @@ def test_incomplete_failed_as_message(): stream_status = AirbyteStreamStatus.INCOMPLETE airbyte_message = stream_status_as_airbyte_message(stream, stream_status) - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.STREAM_STATUS assert airbyte_message.trace.emitted_at > 0 diff --git a/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py b/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py index bfe55952f993f..e0d3b9a50353a 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py +++ b/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py @@ -15,6 +15,11 @@ ) from airbyte_cdk.models.airbyte_protocol import Type as MessageType from airbyte_cdk.utils.traced_exception import AirbyteTracedException +from airbyte_protocol.models import StreamDescriptor + +_AN_EXCEPTION = ValueError("An exception") +_A_STREAM_DESCRIPTOR = StreamDescriptor(name="a_stream") +_ANOTHER_STREAM_DESCRIPTOR = StreamDescriptor(name="another_stream") @pytest.fixture @@ -37,7 +42,7 @@ def test_exception_as_airbyte_message(): traced_exc = AirbyteTracedException("an internal message") airbyte_message = traced_exc.as_airbyte_message() - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.ERROR assert airbyte_message.trace.emitted_at > 0 @@ -51,7 +56,7 @@ def test_existing_exception_as_airbyte_message(raised_exception): traced_exc = AirbyteTracedException.from_exception(raised_exception) airbyte_message = traced_exc.as_airbyte_message() - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.TRACE assert airbyte_message.trace.type == TraceType.ERROR assert airbyte_message.trace.error.message == "Something went wrong in the connector. See the logs for more details." @@ -66,7 +71,7 @@ def test_config_error_as_connection_status_message(): traced_exc = AirbyteTracedException("an internal message", message="Config validation error", failure_type=FailureType.config_error) airbyte_message = traced_exc.as_connection_status_message() - assert type(airbyte_message) == AirbyteMessage + assert isinstance(airbyte_message, AirbyteMessage) assert airbyte_message.type == MessageType.CONNECTION_STATUS assert airbyte_message.connectionStatus.status == Status.FAILED assert airbyte_message.connectionStatus.message == "Config validation error" @@ -105,3 +110,27 @@ def test_emit_message(capsys): printed_message.trace.emitted_at = 0.0 assert printed_message == expected_message + + +def test_given_both_init_and_as_message_with_stream_descriptor_when_as_airbyte_message_use_init_stream_descriptor() -> None: + traced_exc = AirbyteTracedException(stream_descriptor=_A_STREAM_DESCRIPTOR) + message = traced_exc.as_airbyte_message(stream_descriptor=_ANOTHER_STREAM_DESCRIPTOR) + assert message.trace.error.stream_descriptor == _A_STREAM_DESCRIPTOR + + +def test_given_both_init_and_as_sanitized_airbyte_message_with_stream_descriptor_when_as_airbyte_message_use_init_stream_descriptor() -> None: + traced_exc = AirbyteTracedException(stream_descriptor=_A_STREAM_DESCRIPTOR) + message = traced_exc.as_sanitized_airbyte_message(stream_descriptor=_ANOTHER_STREAM_DESCRIPTOR) + assert message.trace.error.stream_descriptor == _A_STREAM_DESCRIPTOR + + +def test_given_both_from_exception_and_as_message_with_stream_descriptor_when_as_airbyte_message_use_init_stream_descriptor() -> None: + traced_exc = AirbyteTracedException.from_exception(_AN_EXCEPTION, stream_descriptor=_A_STREAM_DESCRIPTOR) + message = traced_exc.as_airbyte_message(stream_descriptor=_ANOTHER_STREAM_DESCRIPTOR) + assert message.trace.error.stream_descriptor == _A_STREAM_DESCRIPTOR + + +def test_given_both_from_exception_and_as_sanitized_airbyte_message_with_stream_descriptor_when_as_airbyte_message_use_init_stream_descriptor() -> None: + traced_exc = AirbyteTracedException.from_exception(_AN_EXCEPTION, stream_descriptor=_A_STREAM_DESCRIPTOR) + message = traced_exc.as_sanitized_airbyte_message(stream_descriptor=_ANOTHER_STREAM_DESCRIPTOR) + assert message.trace.error.stream_descriptor == _A_STREAM_DESCRIPTOR diff --git a/airbyte-ci/README.md b/airbyte-ci/README.md index b7985d1c95851..5bf7bc76cb7f0 100644 --- a/airbyte-ci/README.md +++ b/airbyte-ci/README.md @@ -2,4 +2,16 @@ This folder is a collection of systems, tools and scripts that are used to run Airbyte's CI/CD -The installation instructions for the `airbyte-ci` CLI tool cal be found here [airbyte-ci/connectors/pipelines](connectors/pipelines/README.md) \ No newline at end of file +The installation instructions for the `airbyte-ci` CLI tool cal be found here +[airbyte-ci/connectors/pipelines](connectors/pipelines/README.md) + +## Tools + +| Directory | Description | +| -------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | +| [`base_images`](connectors/base_images) | A set of tools to build and publish Airbyte base connector images. | +| [`ci_credentials`](connectors/ci_credentials) | A CLI tool to fetch connector secrets from GCP Secrets Manager. | +| [`connector_ops`](connectors/connector_ops) | A python package with utils reused in internal packages. | +| [`connectors_qa`](connectors/connectors_qa/) | A tool to verify connectors have sounds assets and metadata. | +| [`metadata_service`](connectors/metadata_service/) | Tools to generate connector metadata and registry. | +| [`pipelines`](connectors/pipelines/) | Airbyte CI pipelines, including formatting, linting, building, testing connectors, etc. Connector acceptance tests live here. | diff --git a/airbyte-ci/connectors/README.md b/airbyte-ci/connectors/README.md index 2b48853b0a44e..0af1e828f1b82 100644 --- a/airbyte-ci/connectors/README.md +++ b/airbyte-ci/connectors/README.md @@ -1,3 +1,6 @@ # Airbyte Connectors CI -This folder is a collection of systems, tools and scripts that are used to run CI/CD systems specific to our connectors. \ No newline at end of file +This folder is a collection of systems, tools and scripts that are used to run CI/CD systems +specific to our connectors. + +For the list of tools and subfolders, please see [README in `airbyte-ci`](../README.md). diff --git a/airbyte-ci/connectors/base_images/poetry.lock b/airbyte-ci/connectors/base_images/poetry.lock index 44a8b475dca2a..3fc32672b1b90 100644 --- a/airbyte-ci/connectors/base_images/poetry.lock +++ b/airbyte-ci/connectors/base_images/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "ansicon" @@ -13,13 +13,13 @@ files = [ [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -33,16 +33,6 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - [[package]] name = "attrs" version = "23.2.0" @@ -75,20 +65,20 @@ files = [ [[package]] name = "beartype" -version = "0.16.4" +version = "0.17.1" description = "Unbearably fast runtime type checking in pure Python." optional = false python-versions = ">=3.8.0" files = [ - {file = "beartype-0.16.4-py3-none-any.whl", hash = "sha256:64865952f9dff1e17f22684b3c7286fc79754553b47eaefeb1286224ae8c1bd9"}, - {file = "beartype-0.16.4.tar.gz", hash = "sha256:1ada89cf2d6eb30eb6e156eed2eb5493357782937910d74380918e53c2eae0bf"}, + {file = "beartype-0.17.1-py3-none-any.whl", hash = "sha256:583deb076e312f5acc2e2928706af2facab1f4282be775ee619e6f42c290f423"}, + {file = "beartype-0.17.1.tar.gz", hash = "sha256:001df1ce51c76f0a21c2183215b26254b667fd8b688a6cbe8f013907cdaaf9b3"}, ] [package.extras] all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] test-tox-coverage = ["coverage (>=5.5)"] [[package]] @@ -109,13 +99,13 @@ wcwidth = ">=0.1.4" [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -145,13 +135,13 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -329,8 +319,9 @@ develop = false [package.dependencies] click = "^8.1.3" common_utils = {path = "../common_utils", develop = true} +cryptography = ">=42.0" pyyaml = "^6.0" -requests = "^2.28.2" +requests = "^2.31" [package.source] type = "directory" @@ -371,9 +362,8 @@ files = [] develop = true [package.dependencies] -cryptography = "^3.4.7" -pyjwt = "^2.1.0" -requests = "^2.28.2" +pyjwt = "^2.8.0" +requests = "^2.31.0" [package.source] type = "directory" @@ -381,7 +371,7 @@ url = "../common_utils" [[package]] name = "connector-ops" -version = "0.3.3" +version = "0.3.4" description = "Packaged maintained by the connector operations team to perform CI for connectors" optional = false python-versions = "^3.10" @@ -395,10 +385,10 @@ GitPython = "^3.1.29" google-cloud-storage = "^2.8.0" pandas = "^2.0.3" pydantic = "^1.9" -pydash = "^7.0.4" +pydash = "^6.0.2" PyGithub = "^1.58.0" PyYAML = "^6.0" -requests = "^2.28.2" +requests = "^2.31" rich = "^13.0.0" simpleeval = "^0.9.13" @@ -408,63 +398,63 @@ url = "../connector_ops" [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -475,42 +465,57 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "3.4.8" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "dagger-io" @@ -553,13 +558,13 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "editor" -version = "1.6.5" +version = "1.6.6" description = "🖋 Open the default text editor 🖋" optional = false python-versions = ">=3.8" files = [ - {file = "editor-1.6.5-py3-none-any.whl", hash = "sha256:53c26dd78333b50b8cdcf67748956afa75fabcb5bb25e96a00515504f58e49a8"}, - {file = "editor-1.6.5.tar.gz", hash = "sha256:5a8ad611d2a05de34994df3781605e26e63492f82f04c2e93abdd330eed6fa8d"}, + {file = "editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf"}, + {file = "editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8"}, ] [package.dependencies] @@ -596,35 +601,36 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.41" +version = "3.1.42" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, + {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, + {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -635,13 +641,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.26.2" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -676,18 +682,18 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.16.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, + {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" @@ -796,13 +802,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -863,13 +869,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -880,17 +886,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -930,13 +936,13 @@ files = [ [[package]] name = "inquirer" -version = "3.2.1" +version = "3.2.4" description = "Collection of common interactive command line user interfaces, based on Inquirer.js" optional = false python-versions = ">=3.8.1" files = [ - {file = "inquirer-3.2.1-py3-none-any.whl", hash = "sha256:e1a0a001b499633ca69d2ea64da712b449939e8fad8fa47caebc92b0ee212df4"}, - {file = "inquirer-3.2.1.tar.gz", hash = "sha256:d5ff9bb8cd07bd3f076eabad8ae338280886e93998ff10461975b768e3854fbc"}, + {file = "inquirer-3.2.4-py3-none-any.whl", hash = "sha256:273a4e4a4345ac1afdb17408d40fc8dccf3485db68203357919468561035a763"}, + {file = "inquirer-3.2.4.tar.gz", hash = "sha256:33b09efc1b742b9d687b540296a8b6a3f773399673321fcc2ab0eb4c109bf9b5"}, ] [package.dependencies] @@ -1001,71 +1007,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1081,121 +1087,137 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -1222,92 +1244,96 @@ files = [ [[package]] name = "numpy" -version = "1.26.3" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.1.4" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -1318,56 +1344,57 @@ numpy = [ ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1375,60 +1402,66 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "protobuf" -version = "4.25.2" -description = "" +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, ] +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "protobuf" +version = "4.25.3" +description = "" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -1443,47 +1476,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -1495,20 +1528,17 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pydash" -version = "7.0.6" +version = "6.0.2" description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." optional = false python-versions = ">=3.7" files = [ - {file = "pydash-7.0.6-py3-none-any.whl", hash = "sha256:10e506935953fde4b0d6fe21a88e17783cd1479256ae96f285b5f89063b4efd6"}, - {file = "pydash-7.0.6.tar.gz", hash = "sha256:7d9df7e9f36f2bbb08316b609480e7c6468185473a21bdd8e65dda7915565a26"}, + {file = "pydash-6.0.2-py3-none-any.whl", hash = "sha256:6d3ce5cbbc8ca3533c12782ac201c2ec756d1e1703ec3efc88f2b95d1ed2bb31"}, + {file = "pydash-6.0.2.tar.gz", hash = "sha256:35caa588e01d293713655e0870544d25128cd414c5e19477a0d63adc2b2ca03e"}, ] -[package.dependencies] -typing-extensions = ">=3.10,<4.6.0 || >4.6.0" - [package.extras] -dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "importlib-metadata (<5)", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] +dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "importlib-metadata (<5)", "invoke", "isort", "pylint", "pytest", "pytest-cov", "sphinx-rtd-theme", "tox", "twine", "wheel"] [[package]] name = "pygithub" @@ -1590,27 +1620,25 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "6.2.5" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1632,30 +1660,30 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1663,13 +1691,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -1684,7 +1712,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1692,15 +1719,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1717,7 +1737,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1725,7 +1744,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1733,13 +1751,13 @@ files = [ [[package]] name = "readchar" -version = "4.0.5" +version = "4.0.6" description = "Library to easily read single chars and key strokes" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "readchar-4.0.5-py3-none-any.whl", hash = "sha256:76ec784a5dd2afac3b7da8003329834cdd9824294c260027f8c8d2e4d0a78f43"}, - {file = "readchar-4.0.5.tar.gz", hash = "sha256:08a456c2d7c1888cde3f4688b542621b676eb38cd6cfed7eb6cb2e2905ddc826"}, + {file = "readchar-4.0.6-py3-none-any.whl", hash = "sha256:b4b31dd35de4897be738f27e8f9f62426b5fedb54b648364987e30ae534b71bc"}, + {file = "readchar-4.0.6.tar.gz", hash = "sha256:e0dae942d3a746f8d5423f83dbad67efe704004baafe31b626477929faaee472"}, ] [package.dependencies] @@ -1768,13 +1786,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -1800,13 +1818,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "runs" -version = "1.2.0" +version = "1.2.2" description = "🏃 Run a block of text as a subprocess 🏃" optional = false python-versions = ">=3.8" files = [ - {file = "runs-1.2.0-py3-none-any.whl", hash = "sha256:ec6fe3b24dfa20c5c4e5c4806d3b35bb880aad0e787a8610913c665c5a7cc07c"}, - {file = "runs-1.2.0.tar.gz", hash = "sha256:8804271011b7a2eeb0d77c3e3f556e5ce5f602fa0dd2a31ed0c1222893be69b7"}, + {file = "runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd"}, + {file = "runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1"}, ] [package.dependencies] @@ -1825,19 +1843,19 @@ files = [ [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simpleeval" @@ -1874,24 +1892,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1907,55 +1914,56 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "vulture" -version = "2.10" +version = "2.11" description = "Find dead code" optional = false python-versions = ">=3.8" files = [ - {file = "vulture-2.10-py2.py3-none-any.whl", hash = "sha256:568a4176db7468d0157817ae3bb1847a19f1ddc629849af487f9d3b279bff77d"}, - {file = "vulture-2.10.tar.gz", hash = "sha256:2a5c3160bffba77595b6e6dfcc412016bd2a09cd4b66cdf7fbba913684899f6f"}, + {file = "vulture-2.11-py2.py3-none-any.whl", hash = "sha256:12d745f7710ffbf6aeb8279ba9068a24d4e52e8ed333b8b044035c9d6b823aba"}, + {file = "vulture-2.11.tar.gz", hash = "sha256:f0fbb60bce6511aad87ee0736c502456737490a82d919a44e6d92262cb35f1c2"}, ] [package.dependencies] -toml = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [[package]] name = "wcwidth" @@ -2164,4 +2172,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "e6f67b753371bdbe515e2326b68d32e46a492722b13a8b32a2636fe1e0c39028" +content-hash = "42969107e6745a0ef4167870d8342206028abc6d62bd0eac3a41948fb3348271" diff --git a/airbyte-ci/connectors/base_images/pyproject.toml b/airbyte-ci/connectors/base_images/pyproject.toml index d853b591b2775..9ea6a60739bd2 100644 --- a/airbyte-ci/connectors/base_images/pyproject.toml +++ b/airbyte-ci/connectors/base_images/pyproject.toml @@ -9,6 +9,7 @@ include = ["generated"] [tool.poetry.dependencies] python = "^3.10" dagger-io = "==0.9.6" +beartype = "<0.17.2" # dagger 0.9.6 doesn't pin this but doesn't play well with it. We should probably upgrade dagger gitpython = "^3.1.35" rich = "^13.5.2" semver = "^3.0.1" @@ -17,7 +18,7 @@ inquirer = "^3.1.3" jinja2 = "^3.1.2" [tool.poetry.group.dev.dependencies] -pytest = "^6.2.5" +pytest = "^8" pytest-mock = "^3.10.0" pytest-cov = "^4.1.0" mypy = "^1.5.1" @@ -36,6 +37,6 @@ publish = "base_images.commands:publish_existing_version" test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["test"] mount_docker_socket = true diff --git a/airbyte-ci/connectors/ci_credentials/README.md b/airbyte-ci/connectors/ci_credentials/README.md index 511d8cb005a68..1e82c4061fbb6 100644 --- a/airbyte-ci/connectors/ci_credentials/README.md +++ b/airbyte-ci/connectors/ci_credentials/README.md @@ -1,17 +1,18 @@ # CI Credentials + CLI tooling to read and manage GSM secrets: -- `write-to-storage` download a connector's secrets locally in the connector's `secret` folder +- `write-to-storage` download a connector's secrets locally in the connector's `secrets` folder - `update-secrets` uploads new connector secret version that were locally updated. ## Requirements -This project requires Python 3.10 and pipx. +This project requires Python 3.10 and `pipx`. ## Installation The recommended way to install `ci_credentials` is using pipx. This ensures the tool and its dependencies are isolated from your other Python projects. -If you havent installed pyenv, you can do it with brew: +First, install `pyenv`. If you don't have it yet, you can install it using Homebrew: ```bash brew update @@ -27,7 +28,7 @@ python -m pip install --user pipx python -m pipx ensurepath ``` -Once pyenv and pipx is installed then run the following: +Once pyenv and pipx is installed then run the following (assuming you're in Airbyte repo root): ```bash pipx install --editable --force --python=python3.10 airbyte-ci/connectors/ci_credentials/ @@ -35,15 +36,13 @@ pipx install --editable --force --python=python3.10 airbyte-ci/connectors/ci_cre This command installs `ci_credentials` and makes it globally available in your terminal. -_Note: `--force` is required to ensure updates are applied on subsequent installs._ -_Note: `--python=python3.10` is required to ensure the correct python version is used._ -_Note: `--editable` is required to ensure the correct python version is used._ - -If you face any installation problem feel free to reach out the Airbyte Connectors Operations team. - +> [!Note] +> - `--force` is required to ensure updates are applied on subsequent installs. +> - `--python=python3.10` is required to ensure the correct python version is used. ## Get GSM access Download a Service account json key that has access to Google Secrets Manager. +`ci_credentials` expects `GCP_GSM_CREDENTIALS` to be set in environment to be able to access secrets. ### Create Service Account * Go to https://console.cloud.google.com/iam-admin/serviceaccounts/create?project=dataline-integration-testing @@ -60,6 +59,7 @@ Download a Service account json key that has access to Google Secrets Manager. * In your .zshrc, add: `export GCP_GSM_CREDENTIALS=$(cat )` ## Development + During development, you can use the `--editable` option to make changes to the `ci_credentials` package and have them immediately take effect without needing to reinstall the package: ```bash @@ -68,31 +68,28 @@ pipx install --editable airbyte-ci/connectors/ci_credentials/ This is useful when you are making changes to the package and want to test them in real-time. -Note: - -- The package name is `pipelines`, not `airbyte-ci`. You will need this when uninstalling or reinstalling. -- Even with the above `--editable` method, live changes to the code in the sibling project `/airbyte-ci/connectors/connector_ops/` are not automatically captured. To ensure you are using the latest code, use the command `pipx reinstall pipelines`. +> [!Note] +> - The package name is `ci_credentials`, not `airbyte-ci`. You will need this when uninstalling or reinstalling. ## Usage -After installation, you can use the ci_credentials command in your terminal. +After installation, you can use the `ci_credentials` command in your terminal. ## Run it The `VERSION=dev` will make it so it knows to use your local current working directory and not the Github Action one. -### Help -```bash -VERSION=dev ci_credentials --help -``` - ### Write credentials for a specific connector to local storage + To download GSM secrets to `airbyte-integrations/connectors/source-bings-ads/secrets`: + ```bash VERSION=dev ci_credentials source-bing-ads write-to-storage ``` ### Write credentials for all connectors to local storage + To download GSM secrets to for all available connectors into their respective `secrets` directories: + ```bash VERSION=dev ci_credentials all write-to-storage ``` @@ -105,5 +102,13 @@ VERSION=dev ci_credentials source-bing-ads update-secrets ``` ## FAQ + +### Help + +```bash +VERSION=dev ci_credentials --help +``` + ### What is `VERSION=dev`? + This is a way to tell the tool to write secrets using your local current working directory and not the Github Action runner one. diff --git a/airbyte-ci/connectors/ci_credentials/poetry.lock b/airbyte-ci/connectors/ci_credentials/poetry.lock index ebc60b07b062b..9d9f45b1d5765 100644 --- a/airbyte-ci/connectors/ci_credentials/poetry.lock +++ b/airbyte-ci/connectors/ci_credentials/poetry.lock @@ -1,87 +1,75 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "certifi" -version = "2023.5.7" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -89,118 +77,112 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] -[[package]] -name = "common_utils" -version = "0.0.0" -description = "" -optional = false -python-versions = ">=3.9" -files = [] -develop = true - -[package.dependencies] -cryptography = "*" -pyjwt = ">=2.6.0,<2.7.0" -requests = "*" - -[package.extras] -tests = ["requests-mock"] - -[package.source] -type = "directory" -url = "../common_utils" - [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -217,56 +199,86 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "common-utils" +version = "0.0.0" +description = "Suite of all often used classes and common functions" +optional = false +python-versions = "^3.10" +files = [] +develop = true + +[package.dependencies] +pyjwt = "^2.8.0" +requests = "^2.31.0" + +[package.source] +type = "directory" +url = "../common_utils" + [[package]] name = "cryptography" -version = "41.0.1" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, - {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, - {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, - {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -274,13 +286,13 @@ test = ["pytest (>=6)"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -296,24 +308,24 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -333,13 +345,13 @@ files = [ [[package]] name = "pyjwt" -version = "2.6.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" files = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.extras] @@ -350,13 +362,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" -version = "7.3.1" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -364,77 +376,88 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -442,33 +465,20 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-mock" -version = "1.10.0" +version = "1.12.0" description = "Mock out responses from the requests package" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.10.0.tar.gz", hash = "sha256:59c9c32419a9fb1ae83ec242d98e889c45bd7d7a65d48375cc243ec08441658b"}, - {file = "requests_mock-1.10.0-py2.py3-none-any.whl", hash = "sha256:2fdbb637ad17ee15c06f33d31169e71bf9fe2bdb7bc9da26185be0dd8d842699"}, + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testrepository (>=0.0.18)", "testtools"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] [[package]] name = "tomli" @@ -483,21 +493,22 @@ files = [ [[package]] name = "urllib3" -version = "1.26.16" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "5c858b1988aed2273d7268987a724d848bc9eef618b5592cf0ef78d6b2c91ff8" +python-versions = "^3.10" +content-hash = "fceb05aba33d5b2e174a5aebe89e61311b47b560f4629851d245b4080a6e7f0f" diff --git a/airbyte-ci/connectors/ci_credentials/pyproject.toml b/airbyte-ci/connectors/ci_credentials/pyproject.toml index a85705db57626..3eed7cace5930 100644 --- a/airbyte-ci/connectors/ci_credentials/pyproject.toml +++ b/airbyte-ci/connectors/ci_credentials/pyproject.toml @@ -12,14 +12,15 @@ packages = [{ include = "ci_credentials" }] [tool.poetry.dependencies] python = "^3.10" -requests = "^2.28.2" +requests = "^2.31" +cryptography = ">=42.0" click = "^8.1.3" pyyaml = "^6.0" common_utils = { path = "../common_utils", develop = true } [tool.poetry.group.dev.dependencies] requests-mock = "^1.10.0" -pytest = "^7.3.1" +pytest = "^8" [build-system] requires = ["poetry-core"] @@ -32,5 +33,5 @@ ci_credentials = "ci_credentials.main:ci_credentials" test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["test"] diff --git a/airbyte-ci/connectors/common_utils/README.md b/airbyte-ci/connectors/common_utils/README.md new file mode 100644 index 0000000000000..9565733d10694 --- /dev/null +++ b/airbyte-ci/connectors/common_utils/README.md @@ -0,0 +1,7 @@ +# Airbyte CI Common Utils + +`common_utils` is a Python package that provides common utilities that are used in other `airbyte-ci` tools, such as `ci_credentials` and `base_images`. + +Currently: +- Logger +- GCS API client diff --git a/airbyte-ci/connectors/common_utils/poetry.lock b/airbyte-ci/connectors/common_utils/poetry.lock index 750a922b5cca9..f380aa659f68a 100644 --- a/airbyte-ci/connectors/common_utils/poetry.lock +++ b/airbyte-ci/connectors/common_utils/poetry.lock @@ -1,101 +1,113 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "certifi" -version = "2023.5.7" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -109,49 +121,15 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "cryptography" -version = "3.4.7" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b01fd6f2737816cb1e08ed4807ae194404790eac7ad030b34f2ce72b332f5586"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:bf40af59ca2465b24e54f671b2de2c59257ddc4f7e5706dbd6930e26823668d3"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] - [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -159,13 +137,13 @@ test = ["pytest (>=6)"] [[package]] name = "idna" -version = "2.10" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" files = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -181,67 +159,56 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - [[package]] name = "pyjwt" -version = "2.1.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "PyJWT-2.1.0-py3-none-any.whl", hash = "sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1"}, - {file = "PyJWT-2.1.0.tar.gz", hash = "sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.extras] -crypto = ["cryptography (>=3.3.1,<4.0.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1,<4.0.0)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" -version = "7.4.0" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -249,62 +216,49 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "requests" -version = "2.25.1" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.27" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" [package.extras] -security = ["cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-mock" -version = "1.9.3" +version = "1.12.0" description = "Mock out responses from the requests package" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, - {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] [[package]] name = "tomli" @@ -319,21 +273,22 @@ files = [ [[package]] name = "urllib3" -version = "1.26.16" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "13161cded140a3c9476808685981630c07a214edc26a537a9b30031f669903d4" +python-versions = "^3.10" +content-hash = "20348f72d1357218cadf24354d019896c8a68ce1f8d45391d02fa45f0f8adf94" diff --git a/airbyte-ci/connectors/common_utils/pyproject.toml b/airbyte-ci/connectors/common_utils/pyproject.toml index ab21cfab18449..e5f41ef6f1590 100644 --- a/airbyte-ci/connectors/common_utils/pyproject.toml +++ b/airbyte-ci/connectors/common_utils/pyproject.toml @@ -10,14 +10,13 @@ authors = ["Airbyte "] [tool.poetry.dependencies] python = "^3.10" -cryptography = "^3.4.7" -requests = "^2.28.2" -pyjwt = "^2.1.0" +requests = "^2.31.0" +pyjwt = "^2.8.0" [tool.poetry.group.dev.dependencies] requests-mock = "^1.9.3" -pytest = "^7.2.2" +pytest = "^8" [build-system] requires = ["poetry-core>=1.0.0"] @@ -27,6 +26,6 @@ build-backend = "poetry.core.masonry.api" test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] # Disable poe tasks as tests are not passing ATM poe_tasks = [] diff --git a/airbyte-ci/connectors/common_utils/tests/test_logger.py b/airbyte-ci/connectors/common_utils/tests/test_logger.py index 1d12ad29325c9..23be93de42279 100644 --- a/airbyte-ci/connectors/common_utils/tests/test_logger.py +++ b/airbyte-ci/connectors/common_utils/tests/test_logger.py @@ -19,8 +19,8 @@ def check_output(msg: str, expected_line_number: int, expected_log_level: str): date_time, log_level, line_number, msg = m.groups() assert int(line_number) == expected_line_number - assert expected_log_level == log_level - assert expected_log_level == log_level + assert log_level == expected_log_level + assert log_level == expected_log_level dt = datetime.strptime(date_time, "%d/%m/%Y %H:%M:%S.%f") now = datetime.now() delta = timedelta(seconds=1) diff --git a/airbyte-ci/connectors/connector_ops/README.md b/airbyte-ci/connectors/connector_ops/README.md index 344d985bc717d..6de6139b7256f 100644 --- a/airbyte-ci/connectors/connector_ops/README.md +++ b/airbyte-ci/connectors/connector_ops/README.md @@ -4,43 +4,38 @@ A collection of utilities for working with Airbyte connectors. # Setup -## Prerequisites - -#### Poetry +## Installation -Before you can start working on this project, you will need to have Poetry installed on your system. Please follow the instructions below to install Poetry: +`connector_ops` tools use [Poetry](https://github.com/python-poetry/poetry) to manage dependencies, +and targets Python 3.10 and higher. -1. Open your terminal or command prompt. -2. Install Poetry using the recommended installation method: +Assuming you're in Airbyte repo root: ```bash -curl -sSL https://install.python-poetry.org | POETRY_VERSION=1.5.1 python3 - +cd airbyte-ci/connectors/connector_ops +poetry install ``` -Alternatively, you can use `pip` to install Poetry: - -```bash -pip install --user poetry -``` +## Usage -3. After the installation is complete, close and reopen your terminal to ensure the newly installed `poetry` command is available in your system's PATH. +`connector_ops` provides a set of tools that verify connector characteristics. They're intended to +be used in CI. They will detect the list of connectors that are modified compared to `master` branch +of the repository, and only run checks on them. You can run them locally, too, with +`poetry run TOOL_NAME`. -For more detailed instructions and alternative installation methods, please refer to the official Poetry documentation: https://python-poetry.org/docs/#installation +- `write-review-requirements-file` writes required reviewers github action file. +- `print-mandatory-reviewers` prints out the GitHub comment with required reviewers. -### Using Poetry in the Project +## Contributing to `connector_ops` -Once Poetry is installed, you can use it to manage the project's dependencies and virtual environment. To get started, navigate to the project's root directory in your terminal and follow these steps: +### Running tests +To run tests locally: -## Installation ```bash -poetry install +poetry run pytest ``` +## Changelog -## Testing Locally - -Simply run -```bash -poetry run pytest -``` \ No newline at end of file +- 0.4.0: Removed acceptance test configuration and allowed hosts checks as they're not used. diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/acceptance_test_config_checks.py b/airbyte-ci/connectors/connector_ops/connector_ops/acceptance_test_config_checks.py deleted file mode 100644 index 9bc70e3b17a6a..0000000000000 --- a/airbyte-ci/connectors/connector_ops/connector_ops/acceptance_test_config_checks.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -import sys -from typing import List - -from connector_ops import utils - - -def find_connectors_with_bad_strictness_level() -> List[utils.Connector]: - """Check if changed connectors have the expected connector acceptance test strictness level according to their release stage. - 1. Identify changed connectors - 2. Retrieve their release stage from the catalog - 3. Parse their acceptance test config file - 4. Check if the test strictness level matches the strictness level expected for their release stage. - - Returns: - List[utils.Connector]: List of changed connector that are not matching test strictness level expectations. - """ - connectors_with_bad_strictness_level = [] - changed_connector = utils.get_changed_connectors(destination=False, third_party=False) - for connector in changed_connector: - check_for_high_strictness = connector.acceptance_test_config is not None and connector.requires_high_test_strictness_level - if check_for_high_strictness: - try: - assert connector.acceptance_test_config.get("test_strictness_level") == "high" - except AssertionError: - connectors_with_bad_strictness_level.append(connector) - return connectors_with_bad_strictness_level - - -def check_test_strictness_level(): - connectors_with_bad_strictness_level = find_connectors_with_bad_strictness_level() - if connectors_with_bad_strictness_level: - logging.error( - f"The following connectors must enable high test strictness level: {connectors_with_bad_strictness_level}. Please check this documentation for details: https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference/#strictness-level" - ) - sys.exit(1) - else: - sys.exit(0) diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/allowed_hosts_checks.py b/airbyte-ci/connectors/connector_ops/connector_ops/allowed_hosts_checks.py deleted file mode 100644 index 9a74371716ea2..0000000000000 --- a/airbyte-ci/connectors/connector_ops/connector_ops/allowed_hosts_checks.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -import sys -from typing import List - -from connector_ops import utils - - -def get_connectors_missing_allowed_hosts() -> List[utils.Connector]: - connectors_missing_allowed_hosts: List[utils.Connector] = [] - changed_connectors = utils.get_changed_connectors(destination=False, third_party=False) - - for connector in changed_connectors: - if connector.requires_allowed_hosts_check: - missing = not connector_has_allowed_hosts(connector) - if missing: - connectors_missing_allowed_hosts.append(connector) - - return connectors_missing_allowed_hosts - - -def connector_has_allowed_hosts(connector: utils.Connector) -> bool: - return connector.allowed_hosts is not None - - -def check_allowed_hosts(): - connectors_missing_allowed_hosts = get_connectors_missing_allowed_hosts() - if connectors_missing_allowed_hosts: - logging.error(f"The following connectors must include allowedHosts: {connectors_missing_allowed_hosts}") - sys.exit(1) - else: - sys.exit(0) diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py b/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py index d2575314e6337..07e05c30c5244 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py @@ -10,7 +10,7 @@ BACKWARD_COMPATIBILITY_REVIEWERS = {"connector-extensibility"} TEST_STRICTNESS_LEVEL_REVIEWERS = {"connector-extensibility"} BYPASS_REASON_REVIEWERS = {"connector-extensibility"} -STRATEGIC_PYTHON_CONNECTOR_REVIEWERS = {"gl-python"} +STRATEGIC_PYTHON_CONNECTOR_REVIEWERS = {"gl-python", "connector-extensibility"} BREAKING_CHANGE_REVIEWERS = {"breaking-change-reviewers"} REVIEW_REQUIREMENTS_FILE_PATH = ".github/connector_org_review_requirements.yaml" diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py index 2af725f8a4f2d..87dc326943d9c 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py @@ -51,6 +51,7 @@ def download_catalog(catalog_url): response = requests.get(catalog_url) + response.raise_for_status() return response.json() @@ -555,6 +556,25 @@ def normalization_tag(self) -> Optional[str]: def is_using_poetry(self) -> bool: return Path(self.code_directory / "pyproject.toml").exists() + @property + def is_released(self) -> bool: + """Pull the the OSS registry and check if it the current definition ID and docker image tag are in the registry. + If there is a match it means the connector is released. + We use the OSS registry as the source of truth for released connectors as the cloud registry can be a subset of the OSS registry. + + Returns: + bool: True if the connector is released, False otherwise. + """ + metadata = self.metadata + registry = download_catalog(OSS_CATALOG_URL) + for connector in registry[f"{self.connector_type}s"]: + if ( + connector[f"{self.connector_type}DefinitionId"] == metadata["definitionId"] + and connector["dockerImageTag"] == metadata["dockerImageTag"] + ): + return True + return False + def get_secret_manager(self, gsm_credentials: str): return SecretsManager(connector_name=self.technical_name, gsm_credentials=gsm_credentials) diff --git a/airbyte-ci/connectors/connector_ops/poetry.lock b/airbyte-ci/connectors/connector_ops/poetry.lock index 3c39ea2bfe7d7..2a969f7cce677 100644 --- a/airbyte-ci/connectors/connector_ops/poetry.lock +++ b/airbyte-ci/connectors/connector_ops/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -197,8 +197,9 @@ develop = false [package.dependencies] click = "^8.1.3" common_utils = {path = "../common_utils", develop = true} +cryptography = ">=42.0" pyyaml = "^6.0" -requests = "^2.28.2" +requests = "^2.31" [package.source] type = "directory" @@ -239,9 +240,8 @@ files = [] develop = true [package.dependencies] -cryptography = "^3.4.7" -pyjwt = "^2.1.0" -requests = "^2.28.2" +pyjwt = "^2.8.0" +requests = "^2.31.0" [package.source] type = "directory" @@ -249,42 +249,57 @@ url = "../common_utils" [[package]] name = "cryptography" -version = "3.4.8" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "deprecated" @@ -364,18 +379,19 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre [[package]] name = "google-api-core" -version = "2.17.1" +version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -386,13 +402,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.28.0" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, - {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -427,18 +443,18 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.16.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, + {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" @@ -547,13 +563,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -666,51 +682,51 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -742,6 +758,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -762,6 +779,23 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] name = "protobuf" version = "4.25.3" @@ -784,28 +818,28 @@ files = [ [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -964,13 +998,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "7.4.4" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -978,38 +1012,38 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1051,6 +1085,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1108,13 +1143,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -1184,13 +1219,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1206,13 +1241,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1303,4 +1338,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "d747e06c3998e49136a4bf3aa4f02e51e99b190b173e8f5d747579d262c2cf01" +content-hash = "d9d20ad9588f537adf28d9139dcd915491778dba6375258bcf51bd833a538503" diff --git a/airbyte-ci/connectors/connector_ops/pyproject.toml b/airbyte-ci/connectors/connector_ops/pyproject.toml index 8aae2d66bbb3e..0ec46a0b32dc8 100644 --- a/airbyte-ci/connectors/connector_ops/pyproject.toml +++ b/airbyte-ci/connectors/connector_ops/pyproject.toml @@ -4,14 +4,14 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "connector_ops" -version = "0.3.4" +version = "0.4.0" description = "Packaged maintained by the connector operations team to perform CI for connectors" authors = ["Airbyte "] [tool.poetry.dependencies] python = "^3.10" click = "^8.1.3" -requests = "^2.28.2" +requests = "^2.31" PyYAML = "^6.0" GitPython = "^3.1.29" pydantic = "^1.9" @@ -24,20 +24,17 @@ pandas = "^2.0.3" simpleeval = "^0.9.13" [tool.poetry.group.dev.dependencies] -pytest = "^7.4.0" +pytest = "^8" pytest-mock = "^3.10.0" freezegun = "^1.1.0" [tool.poetry.scripts] -check-test-strictness-level = "connector_ops.acceptance_test_config_checks:check_test_strictness_level" write-review-requirements-file = "connector_ops.required_reviewer_checks:write_review_requirements_file" print-mandatory-reviewers = "connector_ops.required_reviewer_checks:print_mandatory_reviewers" -allowed-hosts-checks = "connector_ops.allowed_hosts_checks:check_allowed_hosts" -run-qa-checks = "connector_ops.qa_checks:run_qa_checks" [tool.poe.tasks] test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["test"] diff --git a/airbyte-ci/connectors/connectors_qa/README.md b/airbyte-ci/connectors/connectors_qa/README.md index 3dc2ffa29830d..90328f446e07e 100644 --- a/airbyte-ci/connectors/connectors_qa/README.md +++ b/airbyte-ci/connectors/connectors_qa/README.md @@ -1,25 +1,25 @@ # Connectors QA This package has two main purposes: -* Running QA checks on connectors. -* Generating the QA checks documentation that are run on connectors. - +- Running assets and metadata verification checks on connectors. +- Generating the QA checks documentation that are run on connectors. ## Usage ### Install +Connectors QA is an internal Airbyte package that is not published to PyPI. To install it, run the +following command from this directory: + ```bash pipx install . ``` This will make `connectors-qa` available in your `PATH`. - Feel free to run `connectors-qa --help` to see the available commands and options. - ### Examples #### Running QA checks on one or more connectors: @@ -28,6 +28,7 @@ Feel free to run `connectors-qa --help` to see the available commands and option # This command must run from the root of the Airbyte repo connectors-qa run --name=source-faker --name=source-google-sheets ``` + #### Running QA checks on all connectors: ```bash @@ -63,19 +64,25 @@ connectors-qa generate-documentation qa_checks.md ## Development ```bash -poetry install +poetry install --with dev ``` ### Dependencies + This package uses two local dependencies: -* [`connector_ops`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/connector_ops): To interact with the `Connector` object. -* [`metadata_service/lib`]((https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/metadata_service/lib)): To validate the metadata of the connectors. + +- [`connector_ops`](../connector_ops): To interact with the `Connector` object. +- [`metadata_service/lib`](../metadata_service/lib): To validate the metadata of the connectors. ### Adding a new QA check -To add a new QA check, you have to create add new class in one of the `checks` module. This class must inherit from `models.Check` and implement the `_run` method. Then, you need to add an instance of this class to the `ENABLED_CHECKS` list of the module. +To add a new QA check, you have to create add new class in one of the `checks` module. This class +must inherit from `models.Check` and implement the `_run` method. Then, you need to add an instance +of this class to the `ENABLED_CHECKS` list of the module. + +**Please run the `generate-documentation` command to update the documentation with the new check and +commit it in your PR.**: -**Please run the `generate-documentation` command to update the documentation with the new check and commit it in your PR.**: ```bash # From airbyte repo root connectors-qa generate-documentation docs/contributing-to-airbyte/resources/qa-checks.md @@ -98,18 +105,32 @@ poe type_check ```bash poe lint ``` - ## Changelog +### 1.2.0 + +Added `ValidateBreakingChangesDeadlines` check that verifies the minimal compliance of breaking change rollout deadline. + +### 1.1.0 +Introduced the `Check.run_on_released_connectors` flag. + +### 1.0.4 + +Adds `htmlcov` to list of ignored directories for `CheckConnectorUsesHTTPSOnly` check. + ### 1.0.3 -Disable `CheckDocumentationStructure` for now. + +Disable `CheckDocumentationStructure` for now. ### 1.0.2 + Fix access to connector types: it should be accessed from the `Connector.connector_type` attribute. ### 1.0.1 -* Add `applies_to_connector_types` attribute to `Check` class to specify the connector types that the check applies to. -* Make `CheckPublishToPyPiIsEnabled` run on source connectors only. + +- Add `applies_to_connector_types` attribute to `Check` class to specify the connector types that + the check applies to. +- Make `CheckPublishToPyPiIsEnabled` run on source connectors only. ### 1.0.0 -Initial release of `connectors-qa` package. \ No newline at end of file +Initial release of `connectors-qa` package. diff --git a/airbyte-ci/connectors/connectors_qa/poetry.lock b/airbyte-ci/connectors/connectors_qa/poetry.lock index 1d13efbb0bae4..63ccab13f98f5 100644 --- a/airbyte-ci/connectors/connectors_qa/poetry.lock +++ b/airbyte-ci/connectors/connectors_qa/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-connectors-base-images" @@ -10,6 +10,7 @@ files = [] develop = false [package.dependencies] +beartype = "<0.17.2" connector-ops = {path = "../connector_ops", develop = true} dagger-io = "==0.9.6" gitpython = "^3.1.35" @@ -57,13 +58,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "asyncclick" -version = "8.1.7.1" +version = "8.1.7.2" description = "Composable command line interface toolkit, async version" optional = false python-versions = ">=3.7" files = [ - {file = "asyncclick-8.1.7.1-py3-none-any.whl", hash = "sha256:e0fea5f0223ac45cfc26153cc80a58cc65fc077ac8de79be49248c918e8c3422"}, - {file = "asyncclick-8.1.7.1.tar.gz", hash = "sha256:a47b61258a689212cf9463fbf3b4cc52d05bfd03185f6ead2315fc03fd17ef75"}, + {file = "asyncclick-8.1.7.2-py3-none-any.whl", hash = "sha256:1ab940b04b22cb89b5b400725132b069d01b0c3472a9702c7a2c9d5d007ded02"}, + {file = "asyncclick-8.1.7.2.tar.gz", hash = "sha256:219ea0f29ccdc1bb4ff43bcab7ce0769ac6d48a04f997b43ec6bee99a222daa0"}, ] [package.dependencies] @@ -116,13 +117,13 @@ files = [ [[package]] name = "beartype" -version = "0.17.2" +version = "0.17.1" description = "Unbearably fast runtime type checking in pure Python." optional = false python-versions = ">=3.8.0" files = [ - {file = "beartype-0.17.2-py3-none-any.whl", hash = "sha256:c22b21e1f785cfcf5c4d3d13070f532b6243a3ad67e68d2298ff08d539847dce"}, - {file = "beartype-0.17.2.tar.gz", hash = "sha256:e911e1ae7de4bccd15745f7643609d8732f64de5c2fb844e89cbbed1c5a8d495"}, + {file = "beartype-0.17.1-py3-none-any.whl", hash = "sha256:583deb076e312f5acc2e2928706af2facab1f4282be775ee619e6f42c290f423"}, + {file = "beartype-0.17.1.tar.gz", hash = "sha256:001df1ce51c76f0a21c2183215b26254b667fd8b688a6cbe8f013907cdaaf9b3"}, ] [package.extras] @@ -171,13 +172,13 @@ wcwidth = ">=0.1.4" [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -391,8 +392,9 @@ develop = false [package.dependencies] click = "^8.1.3" common_utils = {path = "../common_utils", develop = true} +cryptography = ">=42.0" pyyaml = "^6.0" -requests = "^2.28.2" +requests = "^2.31" [package.source] type = "directory" @@ -433,9 +435,8 @@ files = [] develop = true [package.dependencies] -cryptography = "^3.4.7" -pyjwt = "^2.1.0" -requests = "^2.28.2" +pyjwt = "^2.8.0" +requests = "^2.31.0" [package.source] type = "directory" @@ -443,7 +444,7 @@ url = "../common_utils" [[package]] name = "connector-ops" -version = "0.3.3" +version = "0.3.4" description = "Packaged maintained by the connector operations team to perform CI for connectors" optional = false python-versions = "^3.10" @@ -460,7 +461,7 @@ pydantic = "^1.9" pydash = "^6.0.2" PyGithub = "^1.58.0" PyYAML = "^6.0" -requests = "^2.28.2" +requests = "^2.31" rich = "^13.0.0" simpleeval = "^0.9.13" @@ -470,42 +471,57 @@ url = "../connector_ops" [[package]] name = "cryptography" -version = "3.4.8" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "dagger-io" @@ -577,12 +593,13 @@ test = ["pytest (>=6)"] [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -690,35 +707,35 @@ beautifulsoup4 = "*" [[package]] name = "google-api-core" -version = "2.17.1" +version = "2.8.0" description = "Google API client core library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, + {file = "google-api-core-2.8.0.tar.gz", hash = "sha256:065bb8e11c605fd232707ae50963dc1c8af5b3c95b4568887515985e6c1156b3"}, + {file = "google_api_core-2.8.0-py3-none-any.whl", hash = "sha256:1b9f59236ce1bae9a687c1d4f22957e79a2669e53d032893f6bf0fca54f6931d"}, ] [package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.52.0,<2.0dev" +protobuf = ">=3.12.0" +requests = ">=2.18.0,<3.0.0dev" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] name = "google-auth" -version = "2.28.0" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, - {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -890,20 +907,20 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.56.1" description = "Common protobufs used in Google APIs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.56.1.tar.gz", hash = "sha256:6b5ee59dc646eb61a8eb65ee1db186d3df6687c8804830024f32573298bca19b"}, + {file = "googleapis_common_protos-1.56.1-py2.py3-none-any.whl", hash = "sha256:ddcd955b5bb6589368f659fa475373faa1ed7d09cde5ba25e88513d87007e174"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.15.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.0.0)"] [[package]] name = "gql" @@ -976,69 +993,69 @@ oauth2client = ">=1.4.11" [[package]] name = "grpcio" -version = "1.60.1" +version = "1.62.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, - {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, - {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, - {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, - {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, - {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, - {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, - {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, - {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, - {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, - {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, - {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, - {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, - {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, - {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, - {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, - {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, - {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, - {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, - {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, - {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, - {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, - {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, - {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.1)"] +protobuf = ["grpcio-tools (>=1.62.1)"] [[package]] name = "h11" @@ -1053,13 +1070,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -1070,7 +1087,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.24.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httplib2" @@ -1088,13 +1105,13 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -1134,13 +1151,13 @@ files = [ [[package]] name = "inquirer" -version = "3.2.3" +version = "3.2.4" description = "Collection of common interactive command line user interfaces, based on Inquirer.js" optional = false python-versions = ">=3.8.1" files = [ - {file = "inquirer-3.2.3-py3-none-any.whl", hash = "sha256:68fa2cfaa652212f035f73794aa1db2e6c0a9c8cef81ab6825b45120fa8ea345"}, - {file = "inquirer-3.2.3.tar.gz", hash = "sha256:0cba57d901b206dd597d8809b58c378c47fbc804a1fc9b33e2780ca2f9b43ac7"}, + {file = "inquirer-3.2.4-py3-none-any.whl", hash = "sha256:273a4e4a4345ac1afdb17408d40fc8dccf3485db68203357919468561035a763"}, + {file = "inquirer-3.2.4.tar.gz", hash = "sha256:33b09efc1b742b9d687b540296a8b6a3f773399673321fcc2ab0eb4c109bf9b5"}, ] [package.dependencies] @@ -1285,7 +1302,7 @@ files = [ [[package]] name = "metadata-service" -version = "0.3.3" +version = "0.3.4" description = "" optional = false python-versions = "^3.9" @@ -1407,38 +1424,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -1528,51 +1545,51 @@ six = ">=1.6.1" [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -1604,6 +1621,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -1651,48 +1669,48 @@ files = [ [[package]] name = "protobuf" -version = "4.25.3" +version = "5.26.1" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.26.1-cp310-abi3-win32.whl", hash = "sha256:3c388ea6ddfe735f8cf69e3f7dc7611e73107b60bdfcf5d0f024c3ccd3794e23"}, + {file = "protobuf-5.26.1-cp310-abi3-win_amd64.whl", hash = "sha256:e6039957449cb918f331d32ffafa8eb9255769c96aa0560d9a5bf0b4e00a2a33"}, + {file = "protobuf-5.26.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:38aa5f535721d5bb99861166c445c4105c4e285c765fbb2ac10f116e32dcd46d"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fbfe61e7ee8c1860855696e3ac6cfd1b01af5498facc6834fcc345c9684fb2ca"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7417703f841167e5a27d48be13389d52ad705ec09eade63dfc3180a959215d7"}, + {file = "protobuf-5.26.1-cp38-cp38-win32.whl", hash = "sha256:d693d2504ca96750d92d9de8a103102dd648fda04540495535f0fec7577ed8fc"}, + {file = "protobuf-5.26.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b557c317ebe6836835ec4ef74ec3e994ad0894ea424314ad3552bc6e8835b4e"}, + {file = "protobuf-5.26.1-cp39-cp39-win32.whl", hash = "sha256:b9ba3ca83c2e31219ffbeb9d76b63aad35a3eb1544170c55336993d7a18ae72c"}, + {file = "protobuf-5.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ee014c2c87582e101d6b54260af03b6596728505c79f17c8586e7523aaa8f8c"}, + {file = "protobuf-5.26.1-py3-none-any.whl", hash = "sha256:da612f2720c0183417194eeaa2523215c4fcc1a1949772dc65f05047e08d5932"}, + {file = "protobuf-5.26.1.tar.gz", hash = "sha256:8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51"}, ] [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -1851,13 +1869,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -1865,13 +1883,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "8.0.1" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, - {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -1879,21 +1897,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.23.5" +version = "0.23.6" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, - {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, ] [package.dependencies] @@ -1905,30 +1923,30 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1957,7 +1975,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1965,15 +1982,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1990,7 +2000,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1998,7 +2007,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2006,13 +2014,13 @@ files = [ [[package]] name = "readchar" -version = "4.0.5" +version = "4.0.6" description = "Library to easily read single chars and key strokes" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "readchar-4.0.5-py3-none-any.whl", hash = "sha256:76ec784a5dd2afac3b7da8003329834cdd9824294c260027f8c8d2e4d0a78f43"}, - {file = "readchar-4.0.5.tar.gz", hash = "sha256:08a456c2d7c1888cde3f4688b542621b676eb38cd6cfed7eb6cb2e2905ddc826"}, + {file = "readchar-4.0.6-py3-none-any.whl", hash = "sha256:b4b31dd35de4897be738f27e8f9f62426b5fedb54b648364987e30ae534b71bc"}, + {file = "readchar-4.0.6.tar.gz", hash = "sha256:e0dae942d3a746f8d5423f83dbad67efe704004baafe31b626477929faaee472"}, ] [package.dependencies] @@ -2041,13 +2049,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -2124,19 +2132,19 @@ files = [ [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simpleeval" @@ -2173,13 +2181,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -2217,24 +2225,24 @@ files = [ [[package]] name = "types-toml" -version = "0.10.8.7" +version = "0.10.8.20240310" description = "Typing stubs for toml" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-toml-0.10.8.7.tar.gz", hash = "sha256:58b0781c681e671ff0b5c0319309910689f4ab40e8a2431e205d70c94bb6efb1"}, - {file = "types_toml-0.10.8.7-py3-none-any.whl", hash = "sha256:61951da6ad410794c97bec035d59376ce1cbf4453dc9b6f90477e81e4442d631"}, + {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, + {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -2472,4 +2480,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "cc999d31f1c5111833dc2d09a6c66a25e50b3179193d011ff3141e992f4846d8" +content-hash = "271217ba1dec9fbd68098b86489275f011c13d8a5beae495e08220ca0adc3b0f" diff --git a/airbyte-ci/connectors/connectors_qa/pyproject.toml b/airbyte-ci/connectors/connectors_qa/pyproject.toml index b0a761f239510..7041550e756e2 100644 --- a/airbyte-ci/connectors/connectors_qa/pyproject.toml +++ b/airbyte-ci/connectors/connectors_qa/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "connectors-qa" -version = "1.0.3" +version = "1.2.0" description = "A package to run QA checks on Airbyte connectors, generate reports and documentation." authors = ["Airbyte "] readme = "README.md" @@ -9,9 +9,9 @@ packages = [ ] [tool.poetry.dependencies] python = "^3.10" -airbyte-connectors-base-images = {path = "../base_images", develop = false} -connector-ops = {path = "../connector_ops", develop = false} -metadata-service = {path = "../metadata_service/lib", develop = false} +airbyte-connectors-base-images = { path = "../base_images", develop = false } +connector-ops = { path = "../connector_ops", develop = false } +metadata-service = { path = "../metadata_service/lib", develop = false } pydash = "^6.0.2" jinja2 = "^3.1.3" toml = "^0.10.2" @@ -23,7 +23,7 @@ connectors-qa = "connectors_qa.cli:connectors_qa" [tool.poetry.group.dev.dependencies] ruff = "^0.2.1" -pytest = "^8.0.0" +pytest = "^8" pytest-mock = "^3.12.0" mypy = "^1.8.0" types-toml = "^0.10.8.7" @@ -40,6 +40,6 @@ type_check = "mypy src --disallow-untyped-defs" lint = "ruff check src" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["type_check", "lint", "test"] -required_environment_variables = ["DOCKER_HUB_USERNAME", "DOCKER_HUB_PASSWORD",] +required_environment_variables = ["DOCKER_HUB_USERNAME", "DOCKER_HUB_PASSWORD"] diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py index 72ca27c6263ff..399de6829882e 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py @@ -2,6 +2,7 @@ import os +from datetime import datetime, timedelta import toml from connector_ops.utils import Connector, ConnectorLanguage # type: ignore @@ -149,8 +150,68 @@ def _run(self, connector: Connector) -> CheckResult: ) +class ValidateBreakingChangesDeadlines(MetadataCheck): + """ + Verify that _if_ the the most recent connector version has a breaking change, + it's deadline is at least a week in the future. + """ + + name = "Breaking change deadline should be a week in the future" + description = "If the connector version has a breaking change, the deadline field must be set to at least a week in the future." + runs_on_released_connectors = False + minimum_days_until_deadline = 7 + + def _run(self, connector: Connector) -> CheckResult: + + # fetch the current branch version of the connector first. + # we'll try and see if there are any breaking changes associated + # with it next. + current_version = connector.version + if current_version is None: + return self.fail( + connector=connector, + message="Can't verify breaking changes deadline: connector version is not defined.", + ) + + breaking_changes = connector.metadata.get("releases", {}).get("breakingChanges") + + if not breaking_changes: + return self.pass_( + connector=connector, + message="No breaking changes found on this connector.", + ) + + current_version_breaking_changes = breaking_changes.get(current_version) + + if not current_version_breaking_changes: + return self.pass_( + connector=connector, + message="No breaking changes found for the current version.", + ) + + upgrade_deadline = current_version_breaking_changes.get("upgradeDeadline") + + if not upgrade_deadline: + return self.fail( + connector=connector, + message=f"No upgrade deadline found for the breaking changes in {current_version}.", + ) + + upgrade_deadline_datetime = datetime.strptime(upgrade_deadline, "%Y-%m-%d") + one_week_from_now = datetime.utcnow() + timedelta(days=self.minimum_days_until_deadline) + + if upgrade_deadline_datetime <= one_week_from_now: + return self.fail( + connector=connector, + message=f"The upgrade deadline for the breaking changes in {current_version} is less than {self.minimum_days_until_deadline} days from today. Please extend the deadline", + ) + + return self.pass_(connector=connector, message="The upgrade deadline is set to at least a week in the future") + + ENABLED_CHECKS = [ ValidateMetadata(), CheckConnectorLanguageTag(), CheckConnectorCDKTag(), + ValidateBreakingChangesDeadlines(), ] diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/packaging.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/packaging.py index 539e56b9cb56b..c3a3f327d96c8 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/packaging.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/packaging.py @@ -17,6 +17,7 @@ class CheckConnectorUsesPoetry(PackagingCheck): name = "Connectors must use Poetry for dependency management" description = "Connectors must use [Poetry](https://python-poetry.org/) for dependency management. This is to ensure that all connectors use a dependency management tool which locks dependencies and ensures reproducible installs." requires_metadata = False + runs_on_released_connectors = False applies_to_connector_languages = [ ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE, @@ -44,7 +45,7 @@ def _run(self, connector: Connector) -> CheckResult: class CheckPublishToPyPiIsEnabled(PackagingCheck): name = "Python connectors must have PyPi publishing enabled" - description = f"Python connectors must have [PyPi](https://pypi.org/) publishing enabled in their `{consts.METADATA_FILE_NAME}` file. This is declared by setting `remoteRegistries.pypi.enabled` to `true` in {consts.METADATA_FILE_NAME}. This is to ensure that all connectors can be published to PyPi and can be used in `airbyte-lib`." + description = f"Python connectors must have [PyPi](https://pypi.org/) publishing enabled in their `{consts.METADATA_FILE_NAME}` file. This is declared by setting `remoteRegistries.pypi.enabled` to `true` in {consts.METADATA_FILE_NAME}. This is to ensure that all connectors can be published to PyPi and can be used in `PyAirbyte`." applies_to_connector_languages = [ ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE, diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/security.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/security.py index d5e5f396cedd3..8f448e00187ee 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/security.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/security.py @@ -17,6 +17,7 @@ class CheckConnectorUsesHTTPSOnly(SecurityCheck): name = "Connectors must use HTTPS only" description = "Connectors must use HTTPS only when making requests to external services." requires_metadata = False + runs_on_released_connectors = False ignore_comment = "# ignore-https-check" # Define the ignore comment pattern @@ -31,6 +32,7 @@ class CheckConnectorUsesHTTPSOnly(SecurityCheck): "acceptance_tests_logs", ".hypothesis", ".ruff_cache", + "htmlcov", } ignored_file_name_pattern_for_https_checks = { diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/models.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/models.py index db55b1db4a397..6103fc8a7d6dd 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/models.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/models.py @@ -63,6 +63,7 @@ def __repr__(self) -> str: class Check(ABC): requires_metadata: bool = True + runs_on_released_connectors: bool = True @property @abstractmethod @@ -135,6 +136,11 @@ def category(self) -> CheckCategory: raise NotImplementedError("Subclasses must implement category property/attribute") def run(self, connector: Connector) -> CheckResult: + if not self.runs_on_released_connectors and connector.is_released: + return self.skip( + connector, + "Check does not apply to released connectors", + ) if not connector.metadata and self.requires_metadata: return self.fail( connector, diff --git a/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_models.py b/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_models.py index 4105193331853..51ce582a13198 100644 --- a/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_models.py +++ b/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_models.py @@ -9,7 +9,7 @@ class TestCheck: def test_fail_when_requires_metadata_and_metata_is_missing(self, mocker): # Arrange - connector = mocker.MagicMock(metadata={}) + connector = mocker.MagicMock(metadata={}, is_released=False) # Act results = [] @@ -27,7 +27,7 @@ def test_fail_when_requires_metadata_and_metata_is_missing(self, mocker): def test_fail_when_language_is_missing(self, mocker): # Arrange - connector = mocker.MagicMock(language=None) + connector = mocker.MagicMock(language=None, is_released=False) # Act results = [] @@ -63,3 +63,17 @@ def test_skip_when_type_does_not_apply(self, mocker): # Assert assert all(result.status == CheckStatus.SKIPPED for result in results) + + def test_skip_when_check_does_not_apply_to_released_connectors(self, mocker): + # Arrange + connector = mocker.MagicMock(is_released=True) + + # Act + results = [] + for check in ENABLED_CHECKS: + if not check.runs_on_released_connectors: + results.append(check.run(connector)) + + # Assert + assert all(result.status == CheckStatus.SKIPPED for result in results) + assert all(result.message == "Check does not apply to released connectors" for result in results) diff --git a/airbyte-ci/connectors/live-tests/.gitignore b/airbyte-ci/connectors/live-tests/.gitignore new file mode 100644 index 0000000000000..692dc1a1022df --- /dev/null +++ b/airbyte-ci/connectors/live-tests/.gitignore @@ -0,0 +1,2 @@ +regression_tests_artifacts +live_tests_debug_reports diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md index 0723dd868f393..675060a90b10a 100644 --- a/airbyte-ci/connectors/live-tests/README.md +++ b/airbyte-ci/connectors/live-tests/README.md @@ -10,7 +10,7 @@ This project contains utilities for running connector tests against live data. ## Install ```bash -# From airbyte-ci/connectors/live-tests +# From tools/connectors/live-tests pipx install . # To install in editable mode for development pipx install . --force --editable @@ -21,22 +21,19 @@ pipx install . --force --editable ### `debug` ``` -Usage: live-tests debug [OPTIONS] COMMAND +Usage: live-tests debug [OPTIONS] {check|discover|read|read-with-state|spec} Run a specific command on one or multiple connectors and persists the outputs to local storage. Options: + --connection-id TEXT + --config-path FILE + --catalog-path FILE + --state-path FILE -c, --connector-image TEXT Docker image name of the connector to debug - (e.g. `source-faker:latest`, `source- - faker:dev`) [required] - -o, --output-directory DIRECTORY - Directory in which connector output and test - results should be stored. - Defaults to the current directory. - --config-path FILE Path to the connector config. - --catalog-path FILE Path to the connector catalog. - --state-path FILE Path to the connector state. + (e.g. `airbyte/source-faker:latest`, + `airbyte/source-faker:dev`) [required] -hc, --http-cache Use the HTTP cache for the connector. --help Show this message and exit. ``` @@ -53,6 +50,8 @@ It will write artifacts to an output directory: * `stdout.log`: The collected standard output following the command execution * `stderr.log`: The collected standard error following the c * `http_dump.txt`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `9.0.1`) for debugging. +* `airbyte_messages.db`: A DuckDB database containing the messages produced by the connector. +* `airbyte_messages`: A directory containing `.jsonl` files for each message type (logs, records, traces, controls, states etc.) produced by the connector. #### Example Let's run `debug` to check the output of `read` on two different versions of the same connector: @@ -74,23 +73,32 @@ live_tests_debug_reports └── read ├── dev │   ├── airbyte_messages + | │ ├── duck.db # DuckDB database │   │   ├── logs.jsonl - │   │   ├── pokemon_records.jsonl + │   │   ├── records.jsonl │   │   └── traces.jsonl - │   ├── http_dump.mitm │   ├── stderr.log │   └── stdout.log └── latest ├── airbyte_messages + │ ├── duck.db # DuckDB database │   ├── logs.jsonl - │   ├── pokemon_records.jsonl + │   ├── records.jsonl │   └── traces.jsonl - ├── http_dump.mitm ├── stderr.log └── stdout.log ``` +You can also run the `debug` command on a live connection by passing the `--connection-id` option: + +```bash +live-tests debug read \ +--connector-image=airbyte/source-pokeapi:dev \ +--connector-image=airbyte/source-pokeapi:latest \ +--connection-id= +``` + ##### Consuming `http_dump.mitm` You can install [`mitmproxy`](https://mitmproxy.org/): ```bash @@ -103,17 +111,198 @@ mitmweb --rfile=http_dump.mitm ``` ## Regression tests -We created a regression test suite to run tests to compare outputs of connector commands on different versions of the same connector. +We created a regression test suite to run tests to compare the outputs of connector commands on different versions of the same connector. + You can run the existing test suites with the following command: +#### With local connection objects (`config.json`, `catalog.json`, `state.json`) +```bash +poetry run pytest src/live_tests/regression_tests \ +--connector-image=airbyte/source-faker \ + --config-path= \ + --catalog-path= \ + --target-version=dev \ + --control-version=latest + --pr-url= # The URL of the PR you are testing +``` + +#### Using a live connection +The live connection objects will be fetched. + ```bash -cd src/live_tests/regression_tests -poetry run pytest --connector-image=airbyte/source-pokeapi --config-path= --catalog-path= + poetry run pytest src/live_tests/regression_tests \ + --connector-image=airbyte/source-faker \ + --connection-id= \ + --target-version=dev \ + --control-version=latest + --pr-url= # The URL of the PR you are testing + ``` + +You can also pass local connection objects path to override the live connection objects with `--config-path`, `--state-path` or `--catalog-path`. + +#### Test artifacts +The test suite run will produce test artifacts in the `/tmp/regression_tests_artifacts/` folder. +**They will get cleared after each test run on prompt exit. Please do not copy them elsewhere in your filesystem as they contain sensitive data that are not meant to be stored outside of your debugging session!** + +##### Artifacts types +* `report.html`: A report of the test run. +* `stdout.log`: The collected standard output following the command execution +* `stderr.log`: The collected standard error following the command execution +* `http_dump.mitm`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `>=10`) for debugging. +* `http_dump.har`: An `mitmproxy` http stream log in HAR format (a JSON encoded version of the mitm dump). +* `airbyte_messages`: A directory containing `.jsonl` files for each message type (logs, records, traces, controls, states etc.) produced by the connector. +* `duck.db`: A DuckDB database containing the messages produced by the connector. +* `dagger.log`: The log of the Dagger session, useful for debugging errors unrelated to the tests. + +**Tests can also write specific artifacts like diffs under a directory named after the test function.** + + ``` +/tmp/regression_tests_artifacts +└── session_1710754231 + ├── duck.db + |── report.html + ├── command_execution_artifacts + │   └── source-orb + │   ├── check + │   │   ├── dev + │   │   │   ├── airbyte_messages + │   │   │   │   ├── connection_status.jsonl + │   │   │   │   └── logs.jsonl + │   │   │   ├── http_dump.har + │   │   │   ├── http_dump.mitm + │   │   │   ├── stderr.log + │   │   │   └── stdout.log + │   │   └── latest + │   │   ├── airbyte_messages + │   │   │   ├── connection_status.jsonl + │   │   │   └── logs.jsonl + │   │   ├── http_dump.har + │   │   ├── http_dump.mitm + │   │   ├── stderr.log + │   │   └── stdout.log + │   ├── discover + │   │   ├── dev + │   │   │   ├── airbyte_messages + │   │   │   │   └── catalog.jsonl + │   │   │   ├── http_dump.har + │   │   │   ├── http_dump.mitm + │   │   │   ├── stderr.log + │   │   │   └── stdout.log + │   │   └── latest + │   │   ├── airbyte_messages + │   │   │   └── catalog.jsonl + │   │   ├── http_dump.har + │   │   ├── http_dump.mitm + │   │   ├── stderr.log + │   │   └── stdout.log + │   ├── read-with-state + │   │   ├── dev + │   │   │   ├── airbyte_messages + │   │   │   │   ├── logs.jsonl + │   │   │   │   ├── records.jsonl + │   │   │   │   ├── states.jsonl + │   │   │   │   └── traces.jsonl + │   │   │   ├── http_dump.har + │   │   │   ├── http_dump.mitm + │   │   │   ├── stderr.log + │   │   │   └── stdout.log + │   │   └── latest + │   │   ├── airbyte_messages + │   │   │   ├── logs.jsonl + │   │   │   ├── records.jsonl + │   │   │   ├── states.jsonl + │   │   │   └── traces.jsonl + │   │   ├── http_dump.har + │   │   ├── http_dump.mitm + │   │   ├── stderr.log + │   │   └── stdout.log + │   └── spec + │   ├── dev + │   │   ├── airbyte_messages + │   │   │   └── spec.jsonl + │   │   ├── stderr.log + │   │   └── stdout.log + │   └── latest + │   ├── airbyte_messages + │   │   └── spec.jsonl + │   ├── stderr.log + │   └── stdout.log + └── dagger.log + ``` +#### HTTP Proxy and caching +We use a containerized `mitmproxy` to capture the HTTP traffic between the connector and the source. Connector command runs produce `http_dump.mitm` (can be consumed with `mitmproxy` (version `>=10`) for debugging) and `http_dump.har` (a JSON encoded version of the mitm dump) artifacts. +The traffic recorded on the control connector is passed to the target connector proxy to cache the responses for requests with the same URL. This is useful to avoid hitting the source API multiple times when running the same command on different versions of the connector. ## Changelog +### 0.14.2 +Fix KeyError when target & control streams differ. + +### 0.14.1 +Improve performance when reading records per stream. + +### 0.14.0 +Track usage via Segment. + +### 0.13.0 +Show test docstring in the test report. + +### 0.12.0 +Implement a test to compare schema inferred on both control and target version. + +### 0.11.0 +Create a global duckdb instance to store messages produced by the connector in target and control version. + +### 0.10.0 +Show record count per stream in report and list untested streams. + +### 0.9.0 +Make the regressions tests suite better at handling large connector outputs. + +### 0.8.1 +Improve diff output. + +### 0.8.0 +Regression tests: add an HTML report. + +### 0.7.0 +Improve the proxy workflow and caching logic + generate HAR files. + +### 0.6.6 +Exit pytest if connection can't be retrieved. + +### 0.6.6 +Cleanup debug files when prompt is closed. + +### 0.6.5 +Improve ConnectorRunner logging. + +### 0.6.4 +Add more data integrity checks to the regression tests suite. + +### 0.6.3 +Make catalog diffs more readable. + +### 0.6.2 +Clean up regression test artifacts on any exception. + +### 0.6.1 +Modify diff output for `discover` and `read` tests. + +### 0.5.1 +Handle connector command execution errors. + +### 0.5.0 +Add new tests and confirmation prompts. + +### 0.4.0 +Introduce DuckDB to store the messages produced by the connector. + +### 0.3.0 +Pass connection id to the regression tests suite. + ### 0.2.0 Declare the regression tests suite. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock index 9200049d8e318..15431883af4e3 100644 --- a/airbyte-ci/connectors/live-tests/poetry.lock +++ b/airbyte-ci/connectors/live-tests/poetry.lock @@ -1,14 +1,166 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aioquic" +version = "0.9.25" +description = "An implementation of QUIC and HTTP/3" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aioquic-0.9.25-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:4032a718dea1cc670379dcac15da6ee49440ffaffca565d4505c74f6ac56bb34"}, + {file = "aioquic-0.9.25-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a416579f78177ea3590fdb16933f6168f425f9109fcad00e09b3ac3f991d0bb"}, + {file = "aioquic-0.9.25-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a7a69f4396540e38caf2cf3f69f42844a9130e3dac2590fd8713d5dc77b3a1f"}, + {file = "aioquic-0.9.25-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fd3b0e42e3dab1ca7396fbb6810deb3a0d9324bfc730fb4a7697de08f1b4dc3"}, + {file = "aioquic-0.9.25-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e4f592f0ad0d57753c7d3851f75041052528b76a7255011294b208c6a9e360b"}, + {file = "aioquic-0.9.25-cp38-abi3-win32.whl", hash = "sha256:18658be4dc06eb1cba9a7bbc80b716b25d3dcbfb89360575de9e2b66c0bee6a7"}, + {file = "aioquic-0.9.25-cp38-abi3-win_amd64.whl", hash = "sha256:da07befc3fa186621a6ff34695d9bf51c803e49f6c02fec53f50c86b74cdd55f"}, + {file = "aioquic-0.9.25-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cbd60cec8cc8e134dc1e2ebb79047827298b84d3b5ff011c36ee101110da63b8"}, + {file = "aioquic-0.9.25-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73db85db29e35260f85961840d5089c3da3e404c6b7dfdaadbd9842a53c10a1"}, + {file = "aioquic-0.9.25-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bac804af55b230acaebefc33eb04356df1844cc77da5f4a7f860cbe41052553d"}, + {file = "aioquic-0.9.25-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab61fe290e3eed71e2f0ee1dd6916040adc087fc2d4f9dc0dfd037c09a6defc"}, + {file = "aioquic-0.9.25-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9852358f7bbb52c56e1151fa054505a3880f1d2cffef8a83a1bbb653a2faaab0"}, + {file = "aioquic-0.9.25-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d8b00e2fbf6fee5c9bb5e6d481f1d414f9a3318ae500f673470f6571f2455dd"}, + {file = "aioquic-0.9.25-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd1cda94f7c5e1a4bb75a2f195c0f20839b54b014e3d81eeab47d6a625c7a761"}, + {file = "aioquic-0.9.25-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fad05736e0152e698a3fd18d421bab1a77f379ff085b953e306e53df00d0b9e"}, + {file = "aioquic-0.9.25-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827652aa7b52ac069fc1fc9b1d8308f6c19adcfb86cd7f563c0ce5be8b416ce9"}, + {file = "aioquic-0.9.25-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7fb11167019d24ec9b5c62e40cef365a4911cd74f5fb23a1283d772e92c8ef7d"}, + {file = "aioquic-0.9.25-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45241ac5b9b6d4cd976109220dfecddc377d610d4675fffb69869bedcdfa841c"}, + {file = "aioquic-0.9.25-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8637030a95f68454cdaa58c0a7d0cbee5eca1e694a5cb8d6c179846f7d4d86c"}, + {file = "aioquic-0.9.25-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d4641eee9cdd05b9c11088077b376423f8ed148f198d491d72d8189596f1aaf"}, + {file = "aioquic-0.9.25-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb187080955b026da4d3c9ea5fa1be32c4413e27bd8e458f66d94bf9a2b42e72"}, + {file = "aioquic-0.9.25-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0066c0867c7c78aad05cd1f7ebcc1a61b61f3dbc57e65823df26edc0098b6c75"}, + {file = "aioquic-0.9.25.tar.gz", hash = "sha256:70795c78905326d855c2ae524072234aae586c789b81292e272d021e9b0430a3"}, +] + +[package.dependencies] +certifi = "*" +cryptography = "*" +pylsqpack = ">=0.3.3,<0.4.0" +pyopenssl = ">=22" +service-identity = ">=23.1.0" + +[package.extras] +dev = ["coverage[toml] (>=7.2.2)"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" [[package]] name = "airbyte-protocol-models" -version = "0.7.0" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.7.0-py3-none-any.whl", hash = "sha256:0b038134f12eff2c5f8265751a6915f5d247fb15d62c878bdeb1a6fefe1eb59a"}, - {file = "airbyte_protocol_models-0.7.0.tar.gz", hash = "sha256:e084970365ff5c245d3dbfa58d0d2134e8f97455835e5a08dfd9be77b4be016c"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -36,21 +188,74 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "asyncclick" -version = "8.1.7.1" +version = "8.1.7.2" description = "Composable command line interface toolkit, async version" optional = false python-versions = ">=3.7" files = [ - {file = "asyncclick-8.1.7.1-py3-none-any.whl", hash = "sha256:e0fea5f0223ac45cfc26153cc80a58cc65fc077ac8de79be49248c918e8c3422"}, - {file = "asyncclick-8.1.7.1.tar.gz", hash = "sha256:a47b61258a689212cf9463fbf3b4cc52d05bfd03185f6ead2315fc03fd17ef75"}, + {file = "asyncclick-8.1.7.2-py3-none-any.whl", hash = "sha256:1ab940b04b22cb89b5b400725132b069d01b0c3472a9702c7a2c9d5d007ded02"}, + {file = "asyncclick-8.1.7.2.tar.gz", hash = "sha256:219ea0f29ccdc1bb4ff43bcab7ce0769ac6d48a04f997b43ec6bee99a222daa0"}, ] [package.dependencies] anyio = "*" colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "asyncer" +version = "0.0.5" +description = "Asyncer, async and await, focused on developer experience." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "asyncer-0.0.5-py3-none-any.whl", hash = "sha256:ba06d6de3c750763868dffacf89b18d40b667605b0241d31c2ee43f188e2ab74"}, + {file = "asyncer-0.0.5.tar.gz", hash = "sha256:2979f3e04cbedfe5cfeb79027dcf7d004fcc4430a0ca0066ae20490f218ec06e"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5.0" + [[package]] name = "attrs" version = "23.2.0" @@ -83,13 +288,13 @@ files = [ [[package]] name = "beartype" -version = "0.17.2" +version = "0.18.2" description = "Unbearably fast runtime type checking in pure Python." optional = false python-versions = ">=3.8.0" files = [ - {file = "beartype-0.17.2-py3-none-any.whl", hash = "sha256:c22b21e1f785cfcf5c4d3d13070f532b6243a3ad67e68d2298ff08d539847dce"}, - {file = "beartype-0.17.2.tar.gz", hash = "sha256:e911e1ae7de4bccd15745f7643609d8732f64de5c2fb844e89cbbed1c5a8d495"}, + {file = "beartype-0.18.2-py3-none-any.whl", hash = "sha256:561aa7858e92289b952a6fc5faf15ea32f9519c07cdc0f4df7a01b59fc4bbeaf"}, + {file = "beartype-0.18.2.tar.gz", hash = "sha256:a6fbc0be9269889312388bfec6a9ddf41bf8fe31b68bcf9c8239db35cd38f411"}, ] [package.extras] @@ -99,6 +304,109 @@ doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2. test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] test-tox-coverage = ["coverage (>=5.5)"] +[[package]] +name = "blinker" +version = "1.7.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, +] + +[[package]] +name = "brotli" +version = "1.1.0" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, + {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, + {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, + {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, + {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, + {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, + {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, + {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, + {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, + {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, +] + [[package]] name = "cachetools" version = "5.3.3" @@ -146,6 +454,70 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -245,6 +617,44 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloud-sql-python-connector" +version = "1.8.0" +description = "The Cloud SQL Python Connector is a library that can be used alongside a database driver to allow users with sufficient permissions to connect to a Cloud SQL database without having to manually allowlist IPs or manage SSL certificates." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloud-sql-python-connector-1.8.0.tar.gz", hash = "sha256:8ac77878700fba79699f4b9786d932f97c8580f8ace6e750f74427acd7e59a3f"}, + {file = "cloud_sql_python_connector-1.8.0-py2.py3-none-any.whl", hash = "sha256:f0f48e1975ebc3b73e5b0a9507e411ef40feebaac42185149904b2028004b35a"}, +] + +[package.dependencies] +aiohttp = "*" +cryptography = ">=42.0.0" +google-auth = "*" +pg8000 = {version = ">=1.30.5", optional = true, markers = "extra == \"pg8000\""} +Requests = "*" + +[package.extras] +asyncpg = ["asyncpg (>=0.29.0)"] +pg8000 = ["pg8000 (>=1.30.5)"] +pymysql = ["PyMySQL (>=1.1.0)"] +pytds = ["python-tds (>=1.15.0)"] + [[package]] name = "colorama" version = "0.4.6" @@ -256,6 +666,87 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "connection-retriever" +version = "0.4.0" +description = "A tool to retrieve connection information from our Airbyte Cloud config api database" +optional = false +python-versions = "^3.10" +files = [] +develop = false + +[package.dependencies] +click = "^8.1.7" +cloud-sql-python-connector = {version = "^1.7.0", extras = ["pg8000"]} +dpath = "^2.1.6" +google-cloud-iam = "^2.14.3" +google-cloud-logging = "^3.9.0" +google-cloud-secret-manager = "^2.18.3" +python-dotenv = "^1.0.1" +requests = "^2.31.0" +sqlalchemy = "^2.0.28" + +[package.source] +type = "git" +url = "git@github.com:airbytehq/airbyte-platform-internal" +reference = "HEAD" +resolved_reference = "c42ab098ecd05de671dce528a2da58599674f49c" +subdirectory = "tools/connection-retriever" + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "dagger-io" version = "0.9.6" @@ -278,6 +769,24 @@ platformdirs = ">=2.6.2" rich = ">=10.11.0" typing-extensions = ">=4.8.0" +[[package]] +name = "deepdiff" +version = "6.7.1" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +optional = false +python-versions = ">=3.7" +files = [ + {file = "deepdiff-6.7.1-py3-none-any.whl", hash = "sha256:58396bb7a863cbb4ed5193f548c56f18218060362311aa1dc36397b2f25108bd"}, + {file = "deepdiff-6.7.1.tar.gz", hash = "sha256:b367e6fa6caac1c9f500adc79ada1b5b1242c50d5f716a1a4362030197847d30"}, +] + +[package.dependencies] +ordered-set = ">=4.0.2,<4.2.0" + +[package.extras] +cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"] +optimize = ["orjson"] + [[package]] name = "docker" version = "6.1.3" @@ -299,6 +808,72 @@ websocket-client = ">=0.32.0" [package.extras] ssh = ["paramiko (>=2.4.3)"] +[[package]] +name = "dpath" +version = "2.1.6" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.1.6-py3-none-any.whl", hash = "sha256:31407395b177ab63ef72e2f6ae268c15e938f2990a8ecf6510f5686c02b6db73"}, + {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, +] + +[[package]] +name = "duckdb" +version = "0.10.1" +description = "DuckDB in-process database" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0ac172788e3d8e410e009e3699016a4d7f17b4c7cde20f98856fca1fea79d247"}, + {file = "duckdb-0.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f754c20d3b963574da58b0d22029681b79c63f2e32060f10b687f41b7bba54d7"}, + {file = "duckdb-0.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c68b1ef88b8cce185381ec69f437d20059c30623375bab41ac07a1104acdb57"}, + {file = "duckdb-0.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f566f615278844ea240c9a3497c0ef201331628f78e0f9f4d64f72f82210e750"}, + {file = "duckdb-0.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67d2996c3372a0f7d8f41f1c49e00ecdb26f83cdd9132b76730224ad68b1f1e3"}, + {file = "duckdb-0.10.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c3b3a18a58eebabb426beafc2f7da01d59805d660fc909e5e143b6db04d881a"}, + {file = "duckdb-0.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:343795d13ec3d8cd06c250225a05fd3c348c3ed49cccdde01addd46cb50f3559"}, + {file = "duckdb-0.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:33f99c2e9e4060464673912312b4ec91060d66638756592c9484c62824ff4e85"}, + {file = "duckdb-0.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdbe4173729043b2fd949be83135b035820bb2faf64648500563b16f3f6f02ee"}, + {file = "duckdb-0.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f90738310a76bd1618acbc7345175582d36b6907cb0ed07841a3d800dea189d6"}, + {file = "duckdb-0.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d14d00560832592cbac2817847b649bd1d573f125d064518afb6eec5b02e15a"}, + {file = "duckdb-0.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11c0bf253c96079c6139e8a0880300d80f4dc9f21a8c5c239d2ebc060b227d46"}, + {file = "duckdb-0.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcc60833bb1a1fb2c33b052cf793fef48f681c565d982acff6ac7a86369794da"}, + {file = "duckdb-0.10.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88cdc0c2501dd7a65b1df2a76d7624b93d9b6d27febd2ee80b7e5643a0b40bcb"}, + {file = "duckdb-0.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:698a8d1d48b150d344d8aa6dbc30a22ea30fb14ff2b15c90004fc9fcb0b3a3e9"}, + {file = "duckdb-0.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:b450aa2b3e0eb1fc0f7ad276bd1e4a5a03b1a4def6c45366af17557de2cafbdf"}, + {file = "duckdb-0.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:40dd55ea9c31abc69e5a8299f16c877e0b1950fd9a311c117efb4dd3c0dc8458"}, + {file = "duckdb-0.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7c1b3538bb9c2b49f48b26f092444525b22186efa4e77ba070603ed4a348a66"}, + {file = "duckdb-0.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bce024b69bae426b0739c470803f7b44261bdc0c0700ea7c41dff5f2d70ca4f3"}, + {file = "duckdb-0.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52af2a078340b2e1b57958477ebc1be07786d3ad5796777e87d4f453e0477b4c"}, + {file = "duckdb-0.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c52b08c773e52484542300339ebf295e3c9b12d5d7d49b2567e252c16205a7"}, + {file = "duckdb-0.10.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:097aa9b6d5c9f5d3ed8c35b16020a67731d04befc35f6b89ccb5db9d5f1489c4"}, + {file = "duckdb-0.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b5a14a80ad09d65c270d16761b04ea6b074811cdfde6b5e4db1a8b0184125d1b"}, + {file = "duckdb-0.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fb98dbbdbf8048b07223dc6e7401333bb4e83681dde4cded2d239051ea102b5"}, + {file = "duckdb-0.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28857b0d595c229827cc3631ae9b74ff52d11614435aa715e09d8629d2e1b609"}, + {file = "duckdb-0.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d85645136fc25026978b5db81869e8a120cfb60e1645a29a0f6dd155be9e59e"}, + {file = "duckdb-0.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2e10582db74b99051e718279c1be204c98a63a5b6aa4e09226b7249e414146"}, + {file = "duckdb-0.10.1-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6a88358d86a8ce689fdd4136514aebedf958e910361156a0bb0e53dc3c55f7d"}, + {file = "duckdb-0.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b025afa30fcdcede094386e7c519e6964d26de5ad95f4e04a2a0a713676d4465"}, + {file = "duckdb-0.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:910be5005de7427c5231a7200027e0adb951e048c612b895340effcd3e660d5a"}, + {file = "duckdb-0.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:13d81752763f14203a53981f32bd09731900eb6fda4048fbc532eae5e7bf30e5"}, + {file = "duckdb-0.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21858225b8a5c5dead128f62e4e88facdcbfdce098e18cbcd86a6cd8f48fb2b3"}, + {file = "duckdb-0.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8bf46d55685906729998eca70ee751934e0425d86863148e658277526c54282e"}, + {file = "duckdb-0.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f786b4402b9c31461ea0520d919e2166df4f9e6e21fd3c7bb0035fa985b5dfe"}, + {file = "duckdb-0.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32e52c6e939a4bada220803e6bde6fc0ce870da5662a33cabdd3be14824183a6"}, + {file = "duckdb-0.10.1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c563b565ea68cfebe9c4078646503b3d38930218f9c3c278277d58952873771"}, + {file = "duckdb-0.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af8382280f24273a535e08b80e9383ad739c66e22855ce68716dfbaeaf8910b9"}, + {file = "duckdb-0.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:2e6e01e2499e07873b09316bf4d6808f712c57034fa24c255565c4f92386e8e3"}, + {file = "duckdb-0.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7791a0aa2cea972a612d31d4a289c81c5d00181328ed4f7642907f68f8b1fb9f"}, + {file = "duckdb-0.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1ace20383fb0ba06229e060a6bb0bcfd48a4582a02e43f05991720504508eb59"}, + {file = "duckdb-0.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5aad3e085c33253c689205b5ea3c5d9d54117c1249276c90d495cb85d9adce76"}, + {file = "duckdb-0.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa08173f68e678793dfe6aab6490ac753204ca7935beb8dbde778dbe593552d8"}, + {file = "duckdb-0.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:525efad4e6caff80d0f6a51d466470839146e3880da36d4544fee7ff842e7e20"}, + {file = "duckdb-0.10.1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48d84577216010ee407913bad9dc47af4cbc65e479c91e130f7bd909a32caefe"}, + {file = "duckdb-0.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6e65f00294c3b8576ae651e91e732ea1cefc4aada89c307fb02f49231fd11e1f"}, + {file = "duckdb-0.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:30aa9dbbfc1f9607249fc148af9e6d6fd253fdc2f4c9924d4957d6a535558b4f"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -314,182 +889,954 @@ files = [ test = ["pytest (>=6)"] [[package]] -name = "gql" -version = "3.5.0" -description = "GraphQL client for Python" +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, - {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, ] [package.dependencies] -anyio = ">=3.0,<5" -backoff = ">=1.11.1,<3.0" -graphql-core = ">=3.2,<3.3" -yarl = ">=1.6,<2.0" +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" [package.extras] -aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] -all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] -botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -httpx = ["httpx (>=0.23.1,<1)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] -test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)"] -websockets = ["websockets (>=10,<12)"] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] [[package]] -name = "graphql-core" -version = "3.2.3" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.6,<4" +python-versions = ">=3.8" files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, ] [[package]] -name = "httpcore" -version = "1.0.4" -description = "A minimal low-level HTTP client." +name = "google-api-core" +version = "2.18.0" +description = "Google API client core library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" [package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" +name = "google-cloud-appengine-logging" +version = "1.4.3" +description = "Google Cloud Appengine Logging API client library" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "google-cloud-appengine-logging-1.4.3.tar.gz", hash = "sha256:fb504e6199fe8de85baa9d31cecf6776877851fe58867de603317ec7cc739987"}, + {file = "google_cloud_appengine_logging-1.4.3-py2.py3-none-any.whl", hash = "sha256:8e30af51d853f219caf29e8b8b342b9ce8214b29f334dafae38d39aaaff7d372"}, ] +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + [[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" +name = "google-cloud-audit-log" +version = "0.2.5" +description = "Google Cloud Audit Protos" optional = false python-versions = ">=3.7" files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "google-cloud-audit-log-0.2.5.tar.gz", hash = "sha256:86e2faba3383adc8fd04a5bd7fd4f960b3e4aedaa7ed950f2f891ce16902eb6b"}, + {file = "google_cloud_audit_log-0.2.5-py2.py3-none-any.whl", hash = "sha256:18b94d4579002a450b7902cd2e8b8fdcb1ea2dd4df3b41f8f82be6d9f7fcd746"}, ] +[package.dependencies] +googleapis-common-protos = ">=1.56.2,<2.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + [[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] -mdurl = ">=0.1,<1.0" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" [package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" +name = "google-cloud-iam" +version = "2.14.3" +description = "Google Cloud Iam API client library" optional = false python-versions = ">=3.7" files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, + {file = "google-cloud-iam-2.14.3.tar.gz", hash = "sha256:c82e993f8a9219c5ba1fce139c34aed6f019dd5f9b45ce956d5430583d2af26e"}, + {file = "google_cloud_iam-2.14.3-py2.py3-none-any.whl", hash = "sha256:61b8555fd14240b050611d7fe9833f276202a306e4003e01fc7fb7d70d23e6c4"}, ] +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + [[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" +name = "google-cloud-logging" +version = "3.10.0" +description = "Stackdriver Logging API client library" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "google-cloud-logging-3.10.0.tar.gz", hash = "sha256:d93d347351240ddb14cfe201987a2d32cf9d7f478b8b2fabed3015b425b3274f"}, + {file = "google_cloud_logging-3.10.0-py2.py3-none-any.whl", hash = "sha256:132192beb45731130a2ffbcd4b2b5cbd87370e7dcfa7397ae4002154f542bd20"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +google-cloud-appengine-logging = ">=0.1.0,<2.0.0dev" +google-cloud-audit-log = ">=0.1.0,<1.0.0dev" +google-cloud-core = ">=2.0.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = [ + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-secret-manager" +version = "2.19.0" +description = "Google Cloud Secret Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-secret-manager-2.19.0.tar.gz", hash = "sha256:bb918435835a14eb94785f4d4d9087bdcf1b6de306432d7edaa7d62e7f780c30"}, + {file = "google_cloud_secret_manager-2.19.0-py2.py3-none-any.whl", hash = "sha256:7dd9ad9ab3e70f9a7fbac432938b702ba23bce1207e9bda86463b6d6b1f5cdbb"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "gql" +version = "3.5.0" +description = "GraphQL client for Python" +optional = false +python-versions = "*" +files = [ + {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, + {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, +] + +[package.dependencies] +anyio = ">=3.0,<5" +backoff = ">=1.11.1,<3.0" +graphql-core = ">=3.2,<3.3" +yarl = ">=1.6,<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] +all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] +botocore = ["botocore (>=1.21,<2)"] +dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] +httpx = ["httpx (>=0.23.1,<1)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] +test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] +test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)"] +websockets = ["websockets (>=10,<12)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + +[[package]] +name = "grpcio-status" +version = "1.62.1" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"}, + {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.62.1" +protobuf = ">=4.21.6" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kaitaistruct" +version = "0.10" +description = "Kaitai Struct declarative parser generator for binary data: runtime library for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "kaitaistruct-0.10-py2.py3-none-any.whl", hash = "sha256:a97350919adbf37fda881f75e9365e2fb88d04832b7a4e57106ec70119efb235"}, + {file = "kaitaistruct-0.10.tar.gz", hash = "sha256:a044dee29173d6afbacf27bcac39daf89b654dd418cfa009ab82d9178a9ae52a"}, +] + +[[package]] +name = "ldap3" +version = "2.9.1" +description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" +optional = false +python-versions = "*" +files = [ + {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, + {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mitmproxy" +version = "10.2.4" +description = "An interactive, SSL/TLS-capable intercepting proxy for HTTP/1, HTTP/2, and WebSockets." +optional = false +python-versions = ">=3.10" +files = [ + {file = "mitmproxy-10.2.4-py3-none-any.whl", hash = "sha256:2b3910a9cdce10a9456a8e28fd2d5c4f58272bce977e5a9fe37e4ec54b995c15"}, +] + +[package.dependencies] +aioquic = ">=0.9.24,<0.10" +asgiref = ">=3.2.10,<3.8" +Brotli = ">=1.0,<1.2" +certifi = ">=2019.9.11" +cryptography = ">=42.0,<42.1" +flask = ">=1.1.1,<3.1" +h11 = ">=0.11,<0.15" +h2 = ">=4.1,<5" +hyperframe = ">=6.0,<7" +kaitaistruct = ">=0.10,<0.11" +ldap3 = ">=2.8,<2.10" +mitmproxy-rs = ">=0.5.1,<0.6" +msgpack = ">=1.0.0,<1.1.0" +passlib = ">=1.6.5,<1.8" +protobuf = ">=3.14,<5" +publicsuffix2 = ">=2.20190812,<3" +pydivert = {version = ">=2.0.3,<2.2", markers = "sys_platform == \"win32\""} +pyOpenSSL = ">=22.1,<24.1" +pyparsing = ">=2.4.2,<3.2" +pyperclip = ">=1.6.0,<1.9" +"ruamel.yaml" = ">=0.16,<0.19" +sortedcontainers = ">=2.3,<2.5" +tornado = ">=6.2,<7" +typing-extensions = {version = ">=4.3,<5", markers = "python_version < \"3.11\""} +urwid-mitmproxy = ">=2.1.1,<2.2" +wsproto = ">=1.0,<1.3" +zstandard = ">=0.11,<0.23" + +[package.extras] +dev = ["build (>=0.10.0)", "click (>=7.0,<8.2)", "hypothesis (>=5.8,<7)", "pdoc (>=4.0.0)", "pyinstaller (==6.4.0)", "pytest (>=6.1.0,<9)", "pytest-asyncio (>=0.23,<0.24)", "pytest-cov (>=2.7.1,<4.2)", "pytest-timeout (>=1.3.3,<2.3)", "pytest-xdist (>=2.1.0,<3.6)", "requests (>=2.9.1,<3)", "tox (>=3.5,<5)", "wheel (>=0.36.2,<0.43)"] + +[[package]] +name = "mitmproxy-macos" +version = "0.5.1" +description = "" +optional = false +python-versions = ">=3.10" +files = [ + {file = "mitmproxy_macos-0.5.1-py3-none-any.whl", hash = "sha256:3fb4fc9930b33101298675aeba6645dee71be17620c8cb07c810ba8bed6c2a42"}, +] + +[[package]] +name = "mitmproxy-rs" +version = "0.5.1" +description = "" +optional = false +python-versions = ">=3.10" +files = [ + {file = "mitmproxy_rs-0.5.1-cp310-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5bfc3cf4a1f1dd09ee97ca8d9f2220ffeea29d5e9a0aa5a591deacf5612763c5"}, + {file = "mitmproxy_rs-0.5.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee18c0398dc439e9fe9d7dca66f1c2f868a6e0c2c444781c0b8964c794d1054f"}, + {file = "mitmproxy_rs-0.5.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2350fa71d0db814423eac65569be70d1788e8f4b8816cd56fc99be12a3498096"}, + {file = "mitmproxy_rs-0.5.1-cp310-abi3-win_amd64.whl", hash = "sha256:9e814163b5174c7ce65ef0c975f6ebf031ef1f3d4a0d8969644ec314108f91ab"}, + {file = "mitmproxy_rs-0.5.1.tar.gz", hash = "sha256:d8fc5dfde7bee019ebd0b29b28f178236949f3b4f229b9219929f15e2386d671"}, +] + +[package.dependencies] +mitmproxy_macos = {version = "0.5.1", markers = "sys_platform == \"darwin\""} +mitmproxy_windows = {version = "0.5.1", markers = "os_name == \"nt\""} + +[[package]] +name = "mitmproxy-windows" +version = "0.5.1" +description = "" +optional = false +python-versions = ">=3.10" +files = [ + {file = "mitmproxy_windows-0.5.1-py3-none-any.whl", hash = "sha256:08c2e71f9b7ff6aa094943627646f9afe048ec20ad892b701d1aba7de145e15a"}, +] + +[[package]] +name = "msgpack" +version = "1.0.8" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, @@ -566,38 +1913,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -616,96 +1963,356 @@ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[package.extras] +dev = ["black", "mypy", "pytest"] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + +[[package]] +name = "pg8000" +version = "1.31.1" +description = "PostgreSQL interface library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pg8000-1.31.1-py3-none-any.whl", hash = "sha256:69aac9dba4114c9c8d0408232d54eaf7d06d271df7765caeed39960e057800e4"}, + {file = "pg8000-1.31.1.tar.gz", hash = "sha256:b11130d4c615dd3062ea8fed8143064a7978b7fe6d44f14b72261d43c8e27087"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.4" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "publicsuffix2" +version = "2.20191221" +description = "Get a public suffix for a domain name using the Public Suffix List. Forked from and using the same API as the publicsuffix package." +optional = false +python-versions = "*" files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "publicsuffix2-2.20191221-py2.py3-none-any.whl", hash = "sha256:786b5e36205b88758bd3518725ec8cfe7a8173f5269354641f581c6b80a99893"}, + {file = "publicsuffix2-2.20191221.tar.gz", hash = "sha256:00f8cc31aa8d0d5592a5ced19cccba7de428ebca985db26ac852d920ddd6fe7b"}, ] [[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" [[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" +name = "pycparser" +version = "2.22" +description = "C parser in Python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -732,6 +2339,21 @@ typing-extensions = ">=3.10,<4.6.0 || >4.6.0" [package.extras] dev = ["black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] +[[package]] +name = "pydivert" +version = "2.1.0" +description = "Python binding to windivert driver" +optional = false +python-versions = "*" +files = [ + {file = "pydivert-2.1.0-py2.py3-none-any.whl", hash = "sha256:382db488e3c37c03ec9ec94e061a0b24334d78dbaeebb7d4e4d32ce4355d9da1"}, + {file = "pydivert-2.1.0.tar.gz", hash = "sha256:f0e150f4ff591b78e35f514e319561dadff7f24a82186a171dd4d465483de5b4"}, +] + +[package.extras] +docs = ["sphinx (>=1.4.8)"] +test = ["codecov (>=2.0.5)", "hypothesis (>=3.5.3)", "mock (>=1.0.1)", "pytest (>=3.0.3)", "pytest-cov (>=2.2.1)", "pytest-faulthandler (>=1.3.0,<2)", "pytest-timeout (>=1.0.0,<2)", "wheel (>=0.29)"] + [[package]] name = "pygments" version = "2.17.2" @@ -747,15 +2369,101 @@ files = [ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pylsqpack" +version = "0.3.18" +description = "Python wrapper for the ls-qpack QPACK library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pylsqpack-0.3.18-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:1f415d2e03c779261ac7ed421a009a4c752eef6f1ef7b5a34c4a463a5e17fbad"}, + {file = "pylsqpack-0.3.18-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c84e6d4dcb708d766a50bfd16579d8a0bff4eb4e5f5dff9f3df4018454d4013b"}, + {file = "pylsqpack-0.3.18-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bac5f2dc255ae70e5a14033e769769b38bd4c980b365dacd88665610f245e36f"}, + {file = "pylsqpack-0.3.18-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75042b442a0a7a283b5adc21045e6583f3c817d40ccec769837bf2f90b79c494"}, + {file = "pylsqpack-0.3.18-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b5fd04bb27180286811f8e1659974e6e5e854a882de3f2aba8caefc1bb9ab81"}, + {file = "pylsqpack-0.3.18-cp38-abi3-win32.whl", hash = "sha256:a2798e1c08bd36875f77a1ebec0f130fdf9e27eebdb0499a764201d55ef78770"}, + {file = "pylsqpack-0.3.18-cp38-abi3-win_amd64.whl", hash = "sha256:40465d025b946bca195bdaed74b3b79fe3f7f419ab1d4bc4109dca34ba9881d7"}, + {file = "pylsqpack-0.3.18-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ae628cd359ecb466dd85f151ea1ad53de3114e5a9ae0f0ac1408fb43a4318032"}, + {file = "pylsqpack-0.3.18-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a248be29d9ca1fa2ebd7ef4b8ac166d17df0d8d4631b4499c8c566e221d4e5b"}, + {file = "pylsqpack-0.3.18-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:005ddce84bdcbf5c3cf99f764504208e1aa0a91a8331bf47108f2708f2a315e6"}, + {file = "pylsqpack-0.3.18-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dd664354422d4cd51c189febb5f5d22bf3d8c453cc25517c04ce01a57478060"}, + {file = "pylsqpack-0.3.18-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c003eb882f41e4dbd093243c67b97c8634209b4d5ba7edd16163b1ff37306254"}, + {file = "pylsqpack-0.3.18-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8ea75152e8cb8b8c7cfef11c3aa5ebe5b226bd850889f56ff70a688e9680acbf"}, + {file = "pylsqpack-0.3.18-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4cccfd91afd589994f844fd1dbae0acdb58a8ab929d8edeadb25339deb6590"}, + {file = "pylsqpack-0.3.18-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e1bbe47514b83cd03158e5558ef8cc44f578169c1820098be9f3cc4137f16a"}, + {file = "pylsqpack-0.3.18-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1054b0b44f6141a99e84a9aa6a27c9df028e9223747b893e8e37cdc95b602f1"}, + {file = "pylsqpack-0.3.18-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:711f3aa645f72a928e22606c1f026cde905de23efc07028fe1bc7429f73ec8ee"}, + {file = "pylsqpack-0.3.18.tar.gz", hash = "sha256:45ae55e721877505f4d5ccd49591d69353f2a548a8673dfafb251d385b3c097f"}, +] + +[[package]] +name = "pyopenssl" +version = "24.0.0" +description = "Python wrapper module around the OpenSSL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyOpenSSL-24.0.0-py3-none-any.whl", hash = "sha256:ba07553fb6fd6a7a2259adb9b84e12302a9a8a75c44046e8bb5d3e5ee887e3c3"}, + {file = "pyOpenSSL-24.0.0.tar.gz", hash = "sha256:6aa33039a93fffa4563e655b61d11364d01264be8ccb49906101e02a334530bf"}, +] + +[package.dependencies] +cryptography = ">=41.0.5,<43" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] +test = ["flaky", "pretend", "pytest (>=3.0.1)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyperclip" +version = "1.8.2" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +files = [ + {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"}, +] + [[package]] name = "pytest" -version = "8.0.2" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, - {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -763,21 +2471,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.23.5" +version = "0.23.6" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, - {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, ] [package.dependencies] @@ -787,6 +2495,64 @@ pytest = ">=7.0.0,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-sugar" +version = "1.0.0" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, + {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pywin32" version = "306" @@ -810,6 +2576,66 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "requests" version = "2.31.0" @@ -849,30 +2675,190 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "ruff" -version = "0.3.1" +version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6b82e3937d0d76554cd5796bc3342a7d40de44494d29ff490022d7a52c501744"}, - {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ae7954c8f692b70e6a206087ae3988acc9295d84c550f8d90b66c62424c16771"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b730f56ccf91225da0f06cfe421e83b8cc27b2a79393db9c3df02ed7e2bbc01"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c78bfa85637668f47bd82aa2ae17de2b34221ac23fea30926f6409f9e37fc927"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6abaad602d6e6daaec444cbf4d9364df0a783e49604c21499f75bb92237d4af"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f0c21b6914c3c9a25a59497cbb1e5b6c2d8d9beecc9b8e03ee986e24eee072e"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434c3fc72e6311c85cd143c4c448b0e60e025a9ac1781e63ba222579a8c29200"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78a7025e6312cbba496341da5062e7cdd47d95f45c1b903e635cdeb1ba5ec2b9"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b02bb46f1a79b0c1fa93f6495bc7e77e4ef76e6c28995b4974a20ed09c0833"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:11b5699c42f7d0b771c633d620f2cb22e727fb226273aba775a91784a9ed856c"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:54e5dca3e411772b51194b3102b5f23b36961e8ede463776b289b78180df71a0"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:951efb610c5844e668bbec4f71cf704f8645cf3106e13f283413969527ebfded"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:09c7333b25e983aabcf6e38445252cff0b4745420fc3bda45b8fce791cc7e9ce"}, - {file = "ruff-0.3.1-py3-none-win32.whl", hash = "sha256:d937f9b99ebf346e0606c3faf43c1e297a62ad221d87ef682b5bdebe199e01f6"}, - {file = "ruff-0.3.1-py3-none-win_amd64.whl", hash = "sha256:c0318a512edc9f4e010bbaab588b5294e78c5cdc9b02c3d8ab2d77c7ae1903e3"}, - {file = "ruff-0.3.1-py3-none-win_arm64.whl", hash = "sha256:d3b60e44240f7e903e6dbae3139a65032ea4c6f2ad99b6265534ff1b83c20afa"}, - {file = "ruff-0.3.1.tar.gz", hash = "sha256:d30db97141fc2134299e6e983a6727922c9e03c031ae4883a6d69461de722ae7"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scramp" +version = "1.4.5" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + +[[package]] +name = "segment-analytics-python" +version = "2.3.2" +description = "The hassle-free way to integrate analytics into any python application." +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "segment-analytics-python-2.3.2.tar.gz", hash = "sha256:9321b1e03b0129fa69edba0b38c63c2de229db91abe7f849e3df015b8fbc1c36"}, + {file = "segment_analytics_python-2.3.2-py2.py3-none-any.whl", hash = "sha256:0ba881e019c396f17b4e0a66117691a189a555bc13da47de69cb8db8e3adecad"}, +] + +[package.dependencies] +backoff = ">=2.1,<3.0" +PyJWT = ">=2.8.0,<2.9.0" +python-dateutil = ">=2.2,<3.0" +requests = ">=2.7,<3.0" + +[package.extras] +test = ["flake8 (==3.7.9)", "mock (==2.0.0)", "pylint (==2.8.0)"] + +[[package]] +name = "service-identity" +version = "24.1.0" +description = "Service identity verification for pyOpenSSL & cryptography." +optional = false +python-versions = ">=3.8" +files = [ + {file = "service_identity-24.1.0-py3-none-any.whl", hash = "sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a"}, + {file = "service_identity-24.1.0.tar.gz", hash = "sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221"}, +] + +[package.dependencies] +attrs = ">=19.1.0" +cryptography = "*" +pyasn1 = "*" +pyasn1-modules = "*" + +[package.extras] +dev = ["pyopenssl", "service-identity[idna,mypy,tests]"] +docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] +idna = ["idna"] +mypy = ["idna", "mypy", "types-pyopenssl"] +tests = ["coverage[toml] (>=5.0.2)", "pytest"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] @@ -886,6 +2872,118 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "termcolor" +version = "2.4.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.8" +files = [ + {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, + {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + [[package]] name = "tomli" version = "2.0.1" @@ -897,6 +2995,26 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + [[package]] name = "types-cachetools" version = "5.3.0.7" @@ -908,15 +3026,62 @@ files = [ {file = "types_cachetools-5.3.0.7-py3-none-any.whl", hash = "sha256:98c069dc7fc087b1b061703369c80751b0a0fc561f6fb072b554e5eee23773a0"}, ] +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240311" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.20240406" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -936,6 +3101,22 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "urwid-mitmproxy" +version = "2.1.2.1" +description = "A full-featured console (xterm et al.) user interface library" +optional = false +python-versions = "*" +files = [ + {file = "urwid-mitmproxy-2.1.2.1.tar.gz", hash = "sha256:be6238e587acb92bdd43b241af0a10dc23798e8cf3eddef834164eb637686cda"}, + {file = "urwid_mitmproxy-2.1.2.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:29c62a593235d2b69ba4557648588c54420ef030794b9d28e65f50bffdde85c3"}, + {file = "urwid_mitmproxy-2.1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:d93bdc87cbb329cd262f8ada586e954a95ca4cc7249eca5b348b87f47ef1adb5"}, + {file = "urwid_mitmproxy-2.1.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8cb7eb42fcc426ea02c321159631d396ec0cd6ebebabb310f3a4493579ff2e09"}, + {file = "urwid_mitmproxy-2.1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:66c40dcead7fedbb312516e18574d216b0e7c728bf5cd0e240eee53737234b45"}, + {file = "urwid_mitmproxy-2.1.2.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7a8a95460a519e0388d91a198acb31836dce40d14e599a0b9c24ba70fa4ec64b"}, + {file = "urwid_mitmproxy-2.1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:d2d536ad412022365b5e1974cde9029b86cfc30f3960ae073f959630f0c27c21"}, +] + [[package]] name = "websocket-client" version = "1.7.0" @@ -952,6 +3133,37 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "werkzeug" +version = "3.0.2" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [[package]] name = "yarl" version = "1.9.4" @@ -1055,7 +3267,68 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zstandard" +version = "0.22.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, + {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, + {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, + {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, + {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, + {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, + {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, + {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, + {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, + {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, + {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, + {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, + {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "98437771d3bb81792186c952105888fb44215dc87f2a0004db79db8f58dd2814" +python-versions = "^3.10,<3.12" +content-hash = "c23821b61ee592ec43f11c22e1e3b5eb597eefe17265fc546e4fabffed697a05" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml index d3e0f2276a85f..ff13dd895abbe 100644 --- a/airbyte-ci/connectors/live-tests/pyproject.toml +++ b/airbyte-ci/connectors/live-tests/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "live-tests" -version = "0.2.0" +version = "0.14.2" description = "Contains utilities for testing connectors against live data." authors = ["Airbyte "] license = "MIT" @@ -15,15 +15,30 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.10" +python = "^3.10,<3.12" airbyte-protocol-models = "<1.0.0" cachetools = "~=5.3.3" dagger-io = "==0.9.6" +deepdiff = "6.7.1" pydantic = "*" pytest-asyncio = "~=0.23.5" +pytest = "^8.1.1" pydash = "~=7.0.7" docker = ">=6,<7" asyncclick = "^8.1.7.1" +# TODO: when this is open-sourced, don't require connection-retriever +connection-retriever = {git = "git@github.com:airbytehq/airbyte-platform-internal", subdirectory = "tools/connection-retriever"} +duckdb = "^0.10.0" +pandas = "^2.2.1" +pytest-sugar = "^1.0.0" +asyncer = "^0.0.5" +rich = "^13.7.1" +mitmproxy = "^10.2.4" +requests = "^2.31.0" +pyyaml = "^6.0.1" +dpath = "^2.1.6" +genson = "^1.2.2" +segment-analytics-python = "^2.3.2" [tool.poetry.scripts] live-tests = "live_tests.cli:live_tests" @@ -32,13 +47,19 @@ live-tests = "live_tests.cli:live_tests" ruff = "^0.3.0" mypy = "^1.8.0" types-cachetools = "^5.3.0.7" +pandas-stubs = "^2.2.0.240218" +types-requests = "^2.31.0.20240311" +types-pyyaml = "^6.0.12.20240311" + +[tool.ruff.lint] +select = ["I", "F"] + +[tool.ruff.lint.isort] +known-first-party = ["connection-retriever"] [tool.poe.tasks] +format = "ruff format src" test = "pytest tests" lint = "ruff check src" -format = "ruff format src" type_check = "mypy src --disallow-untyped-defs" pre-push = ["format", "lint", "test", "type_check"] - -[tool.airbyte_ci] -poe_tasks = ["test", "lint", "type_check"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py index f70ecfc3a89e7..51502a263eae0 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py @@ -1 +1,2 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. + diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py index 9a1b7d627ed31..177ff35cf6879 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py @@ -1,6 +1,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. from .base_backend import BaseBackend +from .duckdb_backend import DuckDbBackend from .file_backend import FileBackend -__all__ = ["BaseBackend", "FileBackend"] +__all__ = ["BaseBackend", "FileBackend", "DuckDbBackend"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py index f6005120c2168..f009b82722756 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py @@ -12,5 +12,5 @@ class BaseBackend(ABC): """ @abstractmethod - async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: ... diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py new file mode 100644 index 0000000000000..41f7518d3ae5a --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py @@ -0,0 +1,78 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + + +import logging +import re +from pathlib import Path +from typing import Iterable, Optional + +import duckdb +from airbyte_protocol.models import AirbyteMessage # type: ignore +from live_tests.commons.backends.file_backend import FileBackend + + +class DuckDbBackend(FileBackend): + SAMPLE_SIZE = -1 + + def __init__( + self, + output_directory: Path, + duckdb_path: Path, + schema: Optional[Iterable[str]] = None, + ): + super().__init__(output_directory) + self.duckdb_path = duckdb_path + self.schema = schema + + @property + def jsonl_files_to_insert(self) -> Iterable[Path]: + return [ + self.jsonl_catalogs_path, + self.jsonl_connection_status_path, + self.jsonl_specs_path, + self.jsonl_states_path, + self.jsonl_traces_path, + self.jsonl_logs_path, + self.jsonl_controls_path, + self.jsonl_records_path, + ] + + @staticmethod + def sanitize_table_name(table_name: str) -> str: + sanitized = table_name.replace(" ", "_") + sanitized = re.sub(r"[^\w\s]", "", sanitized) + if sanitized and sanitized[0].isdigit(): + sanitized = "_" + sanitized + return sanitized + + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + # Use the FileBackend to write the messages to disk as jsonl files + super().write(airbyte_messages) + duck_db_conn = duckdb.connect(str(self.duckdb_path)) + + if self.schema: + sanitized_schema_name = "_".join([self.sanitize_table_name(s) for s in self.schema]) + duck_db_conn.sql(f"CREATE SCHEMA IF NOT EXISTS {sanitized_schema_name}") + duck_db_conn.sql(f"USE {sanitized_schema_name}") + logging.info(f"Using schema {sanitized_schema_name}") + + for json_file in self.jsonl_files_to_insert: + if json_file.exists(): + table_name = self.sanitize_table_name(json_file.stem) + logging.info(f"Creating table {table_name} from {json_file} in schema {sanitized_schema_name}") + duck_db_conn.sql( + f"CREATE TABLE {table_name} AS SELECT * FROM read_json_auto('{json_file}', sample_size = {self.SAMPLE_SIZE}, format = 'newline_delimited')" + ) + logging.info(f"Table {table_name} created in schema {sanitized_schema_name}") + + for json_file in self.record_per_stream_paths_data_only.values(): + if json_file.exists(): + table_name = self.sanitize_table_name(f"records_{json_file.stem}") + logging.info( + f"Creating table {table_name} from {json_file} in schema {sanitized_schema_name} to store stream records with the data field only" + ) + duck_db_conn.sql( + f"CREATE TABLE {self.sanitize_table_name(table_name)} AS SELECT * FROM read_json_auto('{json_file}', sample_size = {self.SAMPLE_SIZE}, format = 'newline_delimited')" + ) + logging.info(f"Table {table_name} created in schema {sanitized_schema_name}") + duck_db_conn.close() diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py index a7bea3cb184d0..72620d3de502f 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py @@ -1,14 +1,15 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. import json +import logging from pathlib import Path -from typing import Any, Iterable, TextIO, Tuple +from typing import Any, Dict, Iterable, TextIO, Tuple -import pydash from airbyte_protocol.models import AirbyteMessage # type: ignore from airbyte_protocol.models import Type as AirbyteMessageType from cachetools import LRUCache, cached from live_tests.commons.backends.base_backend import BaseBackend +from live_tests.commons.utils import sanitize_stream_name class FileDescriptorLRUCache(LRUCache): @@ -27,22 +28,64 @@ class FileBackend(BaseBackend): RELATIVE_TRACES_PATH = "traces.jsonl" RELATIVE_LOGS_PATH = "logs.jsonl" RELATIVE_CONTROLS_PATH = "controls.jsonl" - RECORD_PATHS_TO_POP = ["emitted_at"] CACHE = FileDescriptorLRUCache(maxsize=250) def __init__(self, output_directory: Path): self._output_directory = output_directory - - async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + self.record_per_stream_directory = self._output_directory / "records_per_stream" + self.record_per_stream_directory.mkdir(exist_ok=True, parents=True) + self.record_per_stream_paths: Dict[str, Path] = {} + self.record_per_stream_paths_data_only: Dict[str, Path] = {} + + @property + def jsonl_specs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_SPECS_PATH).resolve() + + @property + def jsonl_catalogs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CATALOGS_PATH).resolve() + + @property + def jsonl_connection_status_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CONNECTION_STATUS_PATH).resolve() + + @property + def jsonl_records_path(self) -> Path: + return (self._output_directory / self.RELATIVE_RECORDS_PATH).resolve() + + @property + def jsonl_states_path(self) -> Path: + return (self._output_directory / self.RELATIVE_STATES_PATH).resolve() + + @property + def jsonl_traces_path(self) -> Path: + return (self._output_directory / self.RELATIVE_TRACES_PATH).resolve() + + @property + def jsonl_logs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_LOGS_PATH).resolve() + + @property + def jsonl_controls_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CONTROLS_PATH).resolve() + + @property + def jsonl_files(self) -> Iterable[Path]: + return [ + self.jsonl_catalogs_path, + self.jsonl_connection_status_path, + self.jsonl_records_path, + self.jsonl_specs_path, + self.jsonl_states_path, + self.jsonl_traces_path, + self.jsonl_logs_path, + self.jsonl_controls_path, + ] + + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: """ Write AirbyteMessages to the appropriate file. - Catalogs, connection status messages, specs, trace messages, logs, and control messages are all written to their - own file (e.g. "catalog.jsonl", "spec.jsonl"). - - Records and state messages are further subdivided, with one file per stream (e.g. "my_stream_records.jsonl", - "my_stream_states.jsonl"). Streams with global state are stored in a "_global_states.jsonl" file. - We use an LRU cache here to manage open file objects, in order to limit the number of concurrently open file descriptors. This mitigates the risk of hitting limits on the number of open file descriptors, particularly for connections with a high number of streams. The cache is designed to automatically close files upon eviction. @@ -53,52 +96,50 @@ def _open_file(path: Path) -> TextIO: return open(path, "a") try: + logging.info("Writing airbyte messages to disk") for _message in airbyte_messages: if not isinstance(_message, AirbyteMessage): continue - filepath, message = self._get_filepath_and_message(_message) - _open_file(self._output_directory / filepath).write(f"{message}\n") + filepaths, messages = self._get_filepaths_and_messages(_message) + for filepath, message in zip(filepaths, messages): + _open_file(self._output_directory / filepath).write(f"{message}\n") + logging.info("Finished writing airbyte messages to disk") finally: for f in self.CACHE.values(): f.close() - def _get_filepath_and_message(self, message: AirbyteMessage) -> Tuple[str, str]: + def _get_filepaths_and_messages(self, message: AirbyteMessage) -> Tuple[Tuple[str, ...], Tuple[str, ...]]: if message.type == AirbyteMessageType.CATALOG: - return self.RELATIVE_CATALOGS_PATH, message.catalog.json() + return (self.RELATIVE_CATALOGS_PATH,), (message.catalog.json(),) elif message.type == AirbyteMessageType.CONNECTION_STATUS: - return self.RELATIVE_CONNECTION_STATUS_PATH, message.connectionStatus.json() + return (self.RELATIVE_CONNECTION_STATUS_PATH,), (message.connectionStatus.json(),) elif message.type == AirbyteMessageType.RECORD: - record = json.loads(message.record.json()) - # TODO: once we have a comparator and/or database backend implemented we can remove this - for key_path in self.RECORD_PATHS_TO_POP: - pydash.objects.unset(record, key_path) - return f"{message.record.stream}_{self.RELATIVE_RECORDS_PATH}", json.dumps(record) + stream_name = message.record.stream + stream_file_path = self.record_per_stream_directory / f"{sanitize_stream_name(stream_name)}.jsonl" + stream_file_path_data_only = self.record_per_stream_directory / f"{sanitize_stream_name(stream_name)}_data_only.jsonl" + self.record_per_stream_paths[stream_name] = stream_file_path + self.record_per_stream_paths_data_only[stream_name] = stream_file_path_data_only + return (self.RELATIVE_RECORDS_PATH, str(stream_file_path), str(stream_file_path_data_only),), ( + message.json(sort_keys=True), + message.json(sort_keys=True), + json.dumps(message.record.data, sort_keys=True), + ) elif message.type == AirbyteMessageType.SPEC: - return self.RELATIVE_SPECS_PATH, message.spec.json() + return (self.RELATIVE_SPECS_PATH,), (message.spec.json(),) elif message.type == AirbyteMessageType.STATE: - if message.state.stream and message.state.stream.stream_descriptor: - stream_name = message.state.stream.stream_descriptor.name - stream_namespace = message.state.stream.stream_descriptor.namespace - filepath = ( - f"{stream_name}_{stream_namespace}_{self.RELATIVE_STATES_PATH}" - if stream_namespace - else f"{stream_name}_{self.RELATIVE_STATES_PATH}" - ) - else: - filepath = f"_global_{self.RELATIVE_STATES_PATH}" - return filepath, message.state.json() + return (self.RELATIVE_STATES_PATH,), (message.state.json(),) elif message.type == AirbyteMessageType.TRACE: - return self.RELATIVE_TRACES_PATH, message.trace.json() + return (self.RELATIVE_TRACES_PATH,), (message.trace.json(),) elif message.type == AirbyteMessageType.LOG: - return self.RELATIVE_LOGS_PATH, message.log.json() + return (self.RELATIVE_LOGS_PATH,), (message.log.json(),) elif message.type == AirbyteMessageType.CONTROL: - return self.RELATIVE_CONTROLS_PATH, message.control.json() + return (self.RELATIVE_CONTROLS_PATH,), (message.control.json(),) raise NotImplementedError(f"No handling for AirbyteMessage type {message.type} has been implemented. This is unexpected.") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py new file mode 100644 index 0000000000000..ccb2bec2f1626 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py @@ -0,0 +1,160 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +import logging +from pathlib import Path +from typing import Dict, Optional, Set + +from connection_retriever import ConnectionObject, retrieve_objects # type: ignore +from connection_retriever.errors import NotPermittedError # type: ignore + +from .models import AirbyteCatalog, Command, ConfiguredAirbyteCatalog, ConnectionObjects, SecretDict + +LOGGER = logging.getLogger(__name__) + + +def parse_config(config: Dict | str | None) -> Optional[SecretDict]: + if not config: + return None + if isinstance(config, str): + return SecretDict(json.loads(config)) + else: + return SecretDict(config) + + +def parse_catalog(catalog: Dict | str | None) -> Optional[AirbyteCatalog]: + if not catalog: + return None + if isinstance(catalog, str): + return AirbyteCatalog.parse_obj(json.loads(catalog)) + else: + return AirbyteCatalog.parse_obj(catalog) + + +def parse_configured_catalog( + configured_catalog: Dict | str | None, +) -> Optional[ConfiguredAirbyteCatalog]: + if not configured_catalog: + return None + if isinstance(configured_catalog, str): + return ConfiguredAirbyteCatalog.parse_obj(json.loads(configured_catalog)) + else: + return ConfiguredAirbyteCatalog.parse_obj(configured_catalog) + + +def parse_state(state: Dict | str | None) -> Optional[Dict]: + if not state: + return None + if isinstance(state, str): + return json.loads(state) + else: + return state + + +def get_connector_config_from_path(config_path: Path) -> Optional[SecretDict]: + return parse_config(config_path.read_text()) + + +def get_state_from_path(state_path: Path) -> Optional[Dict]: + return parse_state(state_path.read_text()) + + +def get_configured_catalog_from_path(path: Path) -> Optional[ConfiguredAirbyteCatalog]: + return parse_configured_catalog(path.read_text()) + + +COMMAND_TO_REQUIRED_OBJECT_TYPES = { + Command.SPEC: set(), + Command.CHECK: {ConnectionObject.SOURCE_CONFIG}, + Command.DISCOVER: {ConnectionObject.SOURCE_CONFIG}, + Command.READ: {ConnectionObject.SOURCE_CONFIG, ConnectionObject.CONFIGURED_CATALOG}, + Command.READ_WITH_STATE: { + ConnectionObject.SOURCE_CONFIG, + ConnectionObject.CONFIGURED_CATALOG, + ConnectionObject.STATE, + }, +} + + +def get_connection_objects( + requested_objects: Set[ConnectionObject], + connection_id: Optional[str], + custom_config_path: Optional[Path], + custom_configured_catalog_path: Optional[Path], + custom_state_path: Optional[Path], + retrieval_reason: Optional[str], + fail_if_missing_objects: bool = True, + connector_image: Optional[str] = None, +) -> ConnectionObjects: + """This function retrieves the connection objects values. + It checks that the required objects are available and raises a UsageError if they are not. + If a connection_id is provided, it retrieves the connection objects from the connection. + If custom objects are provided, it overrides the retrieved objects with them. + + Args: + requested_objects (Set[ConnectionObject]): The set of requested connection objects. + connection_id (Optional[str]): The connection id to retrieve the connection objects for. + custom_config_path (Optional[Path]): The local path to the custom config to use. + custom_configured_catalog_path (Optional[Path]): The local path to the custom catalog to use. + custom_state_path (Optional[Path]): The local path to the custom state to use. + retrieval_reason (Optional[str]): The reason to access the connection objects. + fail_if_missing_objects (bool, optional): Whether to raise a ValueError if a required object is missing. Defaults to True. + connector_image (Optional[str]): The image name for the connector under test. + Raises: + click.UsageError: If a required object is missing for the command. + click.UsageError: If a retrieval reason is missing when passing a connection id. + Returns: + ConnectionObjects: The connection objects values. + """ + + custom_config = get_connector_config_from_path(custom_config_path) if custom_config_path else None + custom_configured_catalog = get_configured_catalog_from_path(custom_configured_catalog_path) if custom_configured_catalog_path else None + custom_state = get_state_from_path(custom_state_path) if custom_state_path else None + + if not connection_id: + connection_object = ConnectionObjects( + source_config=custom_config, + destination_config=custom_config, + catalog=None, + configured_catalog=custom_configured_catalog, + state=custom_state, + workspace_id=None, + source_id=None, + destination_id=None, + ) + else: + if not retrieval_reason: + raise ValueError("A retrieval reason is required to access the connection objects when passing a connection id.") + retrieved_objects = retrieve_objects(connection_id, requested_objects, retrieval_reason=retrieval_reason) + retrieved_source_config = parse_config(retrieved_objects.get(ConnectionObject.SOURCE_CONFIG)) + rerieved_destination_config = parse_config(retrieved_objects.get(ConnectionObject.DESTINATION_CONFIG)) + retrieved_catalog = parse_catalog(retrieved_objects.get(ConnectionObject.CATALOG)) + retrieved_configured_catalog = parse_configured_catalog(retrieved_objects.get(ConnectionObject.CONFIGURED_CATALOG)) + retrieved_state = parse_state(retrieved_objects.get(ConnectionObject.STATE)) + + retrieved_source_docker_image = retrieved_objects.get(ConnectionObject.SOURCE_DOCKER_IMAGE) + if retrieved_source_docker_image is None: + raise ValueError(f"A docker image was not found for connection ID {connection_id}.") + elif retrieved_source_docker_image.split(":")[0] != connector_image: + raise NotPermittedError( + f"The provided docker image ({connector_image}) does not match the image for connection ID {connection_id}." + ) + + connection_object = ConnectionObjects( + source_config=custom_config if custom_config else retrieved_source_config, + destination_config=custom_config if custom_config else rerieved_destination_config, + catalog=retrieved_catalog, + configured_catalog=custom_configured_catalog if custom_configured_catalog else retrieved_configured_catalog, + state=custom_state if custom_state else retrieved_state, + workspace_id=retrieved_objects.get(ConnectionObject.WORKSPACE_ID), + source_id=retrieved_objects.get(ConnectionObject.SOURCE_ID), + destination_id=retrieved_objects.get(ConnectionObject.DESTINATION_ID), + ) + if fail_if_missing_objects: + if not connection_object.source_config and ConnectionObject.SOURCE_CONFIG in requested_objects: + raise ValueError("A source config is required to run the command.") + if not connection_object.catalog and ConnectionObject.CONFIGURED_CATALOG in requested_objects: + raise ValueError("A catalog is required to run the command.") + if not connection_object.state and ConnectionObject.STATE in requested_objects: + raise ValueError("A state is required to run the command.") + return connection_object diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py index ff1bacaf23dd4..b7ba8ef8fd089 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py @@ -2,153 +2,61 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import hashlib +import datetime import json import logging -import os import uuid from pathlib import Path -from typing import Dict, List, Optional +from typing import List, Optional +import anyio +import asyncer import dagger -import docker # type: ignore -import pytest -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionResult, SecretDict - - -async def get_container_from_id(dagger_client: dagger.Client, container_id: str) -> dagger.Container: - """Get a dagger container from its id. - Please remind that container id are not persistent and can change between Dagger sessions. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - """ - try: - return await dagger_client.container(id=dagger.ContainerID(container_id)) - except dagger.DaggerError as e: - pytest.exit(f"Failed to load connector container: {e}") - - -async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path) -> dagger.Container: - if not tarball_path.exists(): - pytest.exit(f"Connector image tarball {tarball_path} does not exist") - container_under_test_tar_file = ( - dagger_client.host().directory(str(tarball_path.parent), include=tarball_path.name).file(tarball_path.name) - ) - try: - return await dagger_client.container().import_(container_under_test_tar_file) - except dagger.DaggerError as e: - pytest.exit(f"Failed to import connector image from tarball: {e}") - - -async def get_container_from_local_image(dagger_client: dagger.Client, local_image_name: str) -> Optional[dagger.Container]: - """Get a dagger container from a local image. - It will use Docker python client to export the image to a tarball and then import it into dagger. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - local_image_name (str): The name of the local image to import - - Returns: - Optional[dagger.Container]: The dagger container for the local image or None if the image does not exist - """ - docker_client = docker.from_env() - - try: - image = docker_client.images.get(local_image_name) - except docker.errors.ImageNotFound: - return None - - image_digest = image.id.replace("sha256:", "") - tarball_path = Path(f"/tmp/{image_digest}.tar") - if not tarball_path.exists(): - logging.info(f"Exporting local connector image {local_image_name} to tarball {tarball_path}") - with open(tarball_path, "wb") as f: - for chunk in image.save(named=True): - f.write(chunk) - return await get_container_from_tarball_path(dagger_client, tarball_path) - - -async def get_container_from_dockerhub_image(dagger_client: dagger.Client, dockerhub_image_name: str) -> dagger.Container: - """Get a dagger container from a dockerhub image. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - dockerhub_image_name (str): The name of the dockerhub image to import - - Returns: - dagger.Container: The dagger container for the dockerhub image - """ - try: - return await dagger_client.container().from_(dockerhub_image_name) - except dagger.DaggerError as e: - pytest.exit(f"Failed to import connector image from DockerHub: {e}") - - -async def get_connector_container(dagger_client: dagger.Client, image_name_with_tag: str) -> dagger.Container: - """Get a dagger container for the connector image to test. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - image_name_with_tag (str): The docker image name and tag of the connector image to test - - Returns: - dagger.Container: The dagger container for the connector image to test - """ - # If a container_id.txt file is available, we'll use it to load the connector container - # We use a txt file as container ids can be too long to be passed as env vars - # It's used for dagger-in-dagger use case with airbyte-ci, when the connector container is built via an upstream dagger operation - connector_container_id_path = Path("/tmp/container_id.txt") - if connector_container_id_path.exists(): - # If the CONNECTOR_CONTAINER_ID env var is set, we'll use it to load the connector container - return await get_container_from_id(dagger_client, connector_container_id_path.read_text()) - - # If the CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH env var is set, we'll use it to import the connector image from the tarball - if connector_image_tarball_path := os.environ.get("CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH"): - tarball_path = Path(connector_image_tarball_path) - return await get_container_from_tarball_path(dagger_client, tarball_path) - - # Let's try to load the connector container from a local image - if connector_container := await get_container_from_local_image(dagger_client, image_name_with_tag): - return connector_container - - # If we get here, we'll try to pull the connector image from DockerHub - return await get_container_from_dockerhub_image(dagger_client, image_name_with_tag) +from live_tests.commons import errors +from live_tests.commons.models import Command, ExecutionInputs, ExecutionResult +from live_tests.commons.proxy import Proxy class ConnectorRunner: IN_CONTAINER_CONFIG_PATH = "/data/config.json" - IN_CONTAINER_CATALOG_PATH = "/data/catalog.json" + IN_CONTAINER_CONFIGURED_CATALOG_PATH = "/data/catalog.json" IN_CONTAINER_STATE_PATH = "/data/state.json" - MITMPROXY_IMAGE = "mitmproxy/mitmproxy:9.0.1" - HTTP_DUMP_FILE_NAME = "http_dump.mitm" + IN_CONTAINER_OUTPUT_PATH = "/output.txt" def __init__( self, dagger_client: dagger.Client, - connector_under_test: ConnectorUnderTest, - command: Command, - config: Optional[SecretDict] = None, - catalog: Optional[ConfiguredAirbyteCatalog] = None, - state: Optional[Dict] = None, - environment_variables: Optional[Dict] = None, - enable_http_cache: bool = True, + execution_inputs: ExecutionInputs, + http_proxy: Optional[Proxy] = None, ): - self.dagger_client = dagger_client - self.connector_under_test = connector_under_test - self.command = command - self.config = config - self.catalog = catalog - self.state = state - self.environment_variables = environment_variables if environment_variables else {} - self.enable_http_cache = enable_http_cache - self.full_command: List[str] = self._get_full_command(command) + self.connector_under_test = execution_inputs.connector_under_test + self.command = execution_inputs.command + self.output_dir = execution_inputs.output_dir + self.config = execution_inputs.config + self.configured_catalog = execution_inputs.configured_catalog + self.state = execution_inputs.state + self.duckdb_path = execution_inputs.duckdb_path + self.actor_id = execution_inputs.actor_id + self.environment_variables = execution_inputs.environment_variables if execution_inputs.environment_variables else {} + + self.full_command: List[str] = self._get_full_command(execution_inputs.command) + self.completion_event = anyio.Event() + self.http_proxy = http_proxy + self.logger = logging.getLogger(f"{self.connector_under_test.name}-{self.connector_under_test.version}") + self.dagger_client = dagger_client.pipeline(f"{self.connector_under_test.name}-{self.connector_under_test.version}") @property def _connector_under_test_container(self) -> dagger.Container: return self.connector_under_test.container + @property + def stdout_file_path(self) -> Path: + return (self.output_dir / "stdout.log").resolve() + + @property + def stderr_file_path(self) -> Path: + return (self.output_dir / "stderr.log").resolve() + def _get_full_command(self, command: Command) -> List[str]: if command is Command.SPEC: return ["spec"] @@ -162,7 +70,7 @@ def _get_full_command(self, command: Command) -> List[str]: "--config", self.IN_CONTAINER_CONFIG_PATH, "--catalog", - self.IN_CONTAINER_CATALOG_PATH, + self.IN_CONTAINER_CONFIGURED_CATALOG_PATH, ] elif command is Command.READ_WITH_STATE: return [ @@ -170,7 +78,7 @@ def _get_full_command(self, command: Command) -> List[str]: "--config", self.IN_CONTAINER_CONFIG_PATH, "--catalog", - self.IN_CONTAINER_CATALOG_PATH, + self.IN_CONTAINER_CONFIGURED_CATALOG_PATH, "--state", self.IN_CONTAINER_STATE_PATH, ] @@ -188,9 +96,14 @@ async def get_container_entrypoint(self) -> str: assert entrypoint, "The connector container has no entrypoint" return " ".join(entrypoint) - async def run( + async def run(self) -> ExecutionResult: + async with asyncer.create_task_group() as task_group: + soon_result = task_group.soonify(self._run)() + task_group.soonify(self._log_progress)() + return soon_result.value + + async def _run( self, - raise_on_container_error: bool = True, ) -> ExecutionResult: container = self._connector_under_test_container # Do not cache downstream dagger layers @@ -201,88 +114,77 @@ async def run( container = container.with_new_file(self.IN_CONTAINER_CONFIG_PATH, contents=json.dumps(dict(self.config))) if self.state: container = container.with_new_file(self.IN_CONTAINER_STATE_PATH, contents=json.dumps(self.state)) - if self.catalog: - container = container.with_new_file(self.IN_CONTAINER_CATALOG_PATH, contents=self.catalog.json()) - if self.enable_http_cache: - container = await self._bind_connector_container_to_proxy(container) - executed_container = await container.with_exec(self.full_command).sync() - - return ExecutionResult( - stdout=await executed_container.stdout(), - stderr=await executed_container.stderr(), - executed_container=executed_container, - http_dump=await self._retrieve_http_dump() if self.enable_http_cache else None, - ) - - def _get_http_dumps_cache_volume(self) -> dagger.CacheVolume: - config_data = self.config.data if self.config else None - proxy_cache_key = hashlib.md5((self.connector_under_test.name + str(config_data)).encode("utf-8")).hexdigest() - return self.dagger_client.cache_volume(f"{self.MITMPROXY_IMAGE}{proxy_cache_key}") - - def _get_mitmproxy_dir_cache(self) -> dagger.CacheVolume: - return self.dagger_client.cache_volume(self.MITMPROXY_IMAGE) - - async def _get_proxy_container( - self, - ) -> dagger.Container: - proxy_container = ( - self.dagger_client.container() - .from_(self.MITMPROXY_IMAGE) - .with_exec(["mkdir", "-p", "/home/mitmproxy/.mitmproxy"], skip_entrypoint=True) - .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) - .with_mounted_cache("/home/mitmproxy/.mitmproxy", self._get_mitmproxy_dir_cache()) - ) - previous_dump_files = ( - await proxy_container.with_env_variable("CACHEBUSTER", str(uuid.uuid4())) - .with_exec(["ls", "/dumps"], skip_entrypoint=True) - .stdout() - ).splitlines() - if self.HTTP_DUMP_FILE_NAME in previous_dump_files: - command = [ - "mitmweb", - "--server-replay", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - ] - else: - command = [ - "mitmweb", - "--save-stream-file", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - ] - - return proxy_container.with_exec(command) - - async def _bind_connector_container_to_proxy(self, container: dagger.Container) -> dagger.Container: - proxy_srv = await self._get_proxy_container() - proxy_host, proxy_port = "proxy_server", 8080 - cert_path_in_volume = "/mitmproxy_dir/mitmproxy-ca.pem" - requests_cert_path = "/usr/local/lib/python3.9/site-packages/certifi/cacert.pem" - ca_certificate_path = "/usr/local/share/ca-certificates/mitmproxy.crt" + if self.configured_catalog: + container = container.with_new_file( + self.IN_CONTAINER_CONFIGURED_CATALOG_PATH, + contents=self.configured_catalog.json(), + ) + if self.http_proxy: + container = await self.http_proxy.bind_container(container) - return ( - container.with_service_binding(proxy_host, proxy_srv.with_exposed_port(proxy_port).as_service()) - .with_mounted_cache("/mitmproxy_dir", self._get_mitmproxy_dir_cache()) - .with_exec(["cp", cert_path_in_volume, requests_cert_path], skip_entrypoint=True) - .with_exec(["cp", cert_path_in_volume, ca_certificate_path], skip_entrypoint=True) - .with_env_variable("REQUESTS_CA_BUNDLE", requests_cert_path) - .with_exec(["update-ca-certificates"], skip_entrypoint=True) - .with_env_variable("http_proxy", f"{proxy_host}:{proxy_port}") - .with_env_variable("https_proxy", f"{proxy_host}:{proxy_port}") - ) + self.logger.info(f"⏳ Start running {self.command.value} command") - async def _retrieve_http_dump(self) -> dagger.File: - return await ( - self.dagger_client.container() - .from_("alpine:latest") - .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) - .with_exec(["mkdir", "/to_export"]) - .with_exec( + try: + entrypoint = await container.entrypoint() + assert entrypoint, "The connector container has no entrypoint" + airbyte_command = entrypoint + self.full_command + # We are piping the output to a file to avoidQueryError: file size exceeds limit 134217728 + container = container.with_exec( [ - "cp", - "-r", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - f"/to_export/{self.HTTP_DUMP_FILE_NAME}", - ] + "sh", + "-c", + " ".join(airbyte_command) + f" > {self.IN_CONTAINER_OUTPUT_PATH} 2>&1 | tee -a {self.IN_CONTAINER_OUTPUT_PATH}", + ], + skip_entrypoint=True, ) - .file(f"/to_export/{self.HTTP_DUMP_FILE_NAME}") + executed_container = await container.sync() + # We exporting to disk as we can't read .stdout() or await file.contents() as it might blow up the memory + stdout_exported = await executed_container.file(self.IN_CONTAINER_OUTPUT_PATH).export(str(self.stdout_file_path)) + if not stdout_exported: + raise errors.ExportError(f"Failed to export {self.IN_CONTAINER_OUTPUT_PATH}") + + stderr = await executed_container.stderr() + self.stderr_file_path.write_text(stderr) + success = True + except dagger.ExecError as e: + self.stderr_file_path.write_text(e.stderr) + self.stdout_file_path.write_text(e.stdout) + executed_container = None + success = False + + self.completion_event.set() + if not success: + self.logger.error(f"❌ Failed to run {self.command.value} command") + else: + self.logger.info(f"⌛ Finished running {self.command.value} command") + execution_result = await ExecutionResult.load( + command=self.command, + connector_under_test=self.connector_under_test, + actor_id=self.actor_id, + stdout_file_path=self.stdout_file_path, + stderr_file_path=self.stderr_file_path, + success=success, + http_dump=await self.http_proxy.retrieve_http_dump() if self.http_proxy else None, + executed_container=executed_container, ) + await execution_result.save_artifacts(self.output_dir, self.duckdb_path) + return execution_result + + async def _log_progress(self) -> None: + start_time = datetime.datetime.utcnow() + message = f"⏳ Still running {self.command.value} command" + while not self.completion_event.is_set(): + duration = datetime.datetime.utcnow() - start_time + elapsed_seconds = duration.total_seconds() + if elapsed_seconds > 10 and round(elapsed_seconds) % 10 == 0: + self.logger.info(f"{message} (duration: {self.format_duration(duration)})") + await anyio.sleep(1) + + @staticmethod + def format_duration(time_delta: datetime.timedelta) -> str: + total_seconds = time_delta.total_seconds() + if total_seconds < 60: + return "{:.2f}s".format(total_seconds) + minutes = int(total_seconds // 60) + seconds = int(total_seconds % 60) + return "{:02d}mn{:02d}s".format(minutes, seconds) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/errors.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/errors.py new file mode 100644 index 0000000000000..402429cfb2d53 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/errors.py @@ -0,0 +1,6 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + + +class ExportError(Exception): + def __init__(self, message: str): + super().__init__(message) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/mitm_addons.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/mitm_addons.py new file mode 100644 index 0000000000000..d650c843f217e --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/mitm_addons.py @@ -0,0 +1,27 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from urllib.parse import parse_qs, urlencode, urlparse + +from mitmproxy import http + + +class SortQueryParams: + """This addon sorts query parameters in the request URL. + It is useful for testing purposes, as it makes it easier to compare requests and get cache hits. + """ + + def request(self, flow: http.HTTPFlow) -> None: + if url := flow.request.url: + parsed_url = urlparse(url) + # Get query parameters as dictionary + query_params = parse_qs(parsed_url.query) + # Sort query parameters alphabetically + sorted_params = {key: query_params[key] for key in sorted(query_params.keys())} + # Reconstruct the URL with sorted query parameters + sorted_url = parsed_url._replace(query=urlencode(sorted_params, doseq=True)).geturl() + + # Update the request URL + flow.request.url = sorted_url + + +addons = [SortQueryParams()] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py index 5425fca704f42..2123cc9f5fa75 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py @@ -1,16 +1,31 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from __future__ import annotations -import time +import json +import logging +import tempfile +from collections import defaultdict from dataclasses import dataclass, field from enum import Enum from pathlib import Path -from typing import Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Tuple +from typing import Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Type import _collections_abc import dagger -from airbyte_protocol.models import AirbyteMessage # type: ignore -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.backends import FileBackend +import requests +from airbyte_protocol.models import AirbyteCatalog, AirbyteMessage, ConfiguredAirbyteCatalog # type: ignore +from airbyte_protocol.models import Type as AirbyteMessageType +from genson import SchemaBuilder # type: ignore +from live_tests.commons.backends import DuckDbBackend, FileBackend +from live_tests.commons.secret_access import get_airbyte_api_key +from live_tests.commons.utils import ( + get_connector_container, + get_http_flows_from_mitm_dump, + mitm_http_stream_to_har, + sanitize_stream_name, + sort_dict_keys, +) +from mitmproxy import http from pydantic import ValidationError @@ -50,35 +65,35 @@ def __contains__(self, key: Any) -> bool: def __repr__(self) -> str: return repr(self.data) - def __or__(self, other: "UserDict" | dict) -> "UserDict": + def __or__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): return self.__class__(self.data | other.data) # type: ignore if isinstance(other, dict): return self.__class__(self.data | other) # type: ignore return NotImplemented - def __ror__(self, other: "UserDict" | dict) -> "UserDict": + def __ror__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): return self.__class__(other.data | self.data) # type: ignore if isinstance(other, dict): return self.__class__(other | self.data) # type: ignore return NotImplemented - def __ior__(self, other: "UserDict" | dict) -> "UserDict": + def __ior__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): self.data |= other.data # type: ignore else: self.data |= other # type: ignore return self - def __copy__(self) -> "UserDict": + def __copy__(self) -> UserDict: inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"].copy() return inst - def copy(self) -> "UserDict": + def copy(self) -> UserDict: if self.__class__ is UserDict: return UserDict(self.data.copy()) # type: ignore import copy @@ -93,7 +108,7 @@ def copy(self) -> "UserDict": return c @classmethod - def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> "UserDict": + def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> UserDict: d = cls() for key in iterable: d[key] = value @@ -116,40 +131,67 @@ class Command(Enum): SPEC = "spec" +class TargetOrControl(Enum): + TARGET = "target" + CONTROL = "control" + + +class ActorType(Enum): + SOURCE = "source" + DESTINATION = "destination" + + @dataclass class ConnectorUnderTest: image_name: str container: dagger.Container + target_or_control: TargetOrControl @property def name(self) -> str: return self.image_name.replace("airbyte/", "").split(":")[0] + @property + def name_without_type_prefix(self) -> str: + return self.name.replace(f"{self.actor_type.value}-", "") + @property def version(self) -> str: return self.image_name.replace("airbyte/", "").split(":")[1] + @property + def actor_type(self) -> ActorType: + if "airbyte/destination-" in self.image_name: + return ActorType.DESTINATION + elif "airbyte/source-" in self.image_name: + return ActorType.SOURCE + else: + raise ValueError( + f"Can't infer the actor type. Connector image name {self.image_name} does not contain 'airbyte/source' or 'airbyte/destination'" + ) + + @classmethod + async def from_image_name( + cls: Type[ConnectorUnderTest], + dagger_client: dagger.Client, + image_name: str, + target_or_control: TargetOrControl, + ) -> ConnectorUnderTest: + container = await get_connector_container(dagger_client, image_name) + return cls(image_name, container, target_or_control) + @dataclass class ExecutionInputs: connector_under_test: ConnectorUnderTest + actor_id: str + global_output_dir: Path command: Command config: Optional[SecretDict] = None - catalog: Optional[ConfiguredAirbyteCatalog] = None + configured_catalog: Optional[ConfiguredAirbyteCatalog] = None state: Optional[Dict] = None environment_variables: Optional[Dict] = None - enable_http_cache: bool = True - - def to_dict(self) -> dict: - return { - "connector_under_test": self.connector_under_test, - "command": self.command, - "config": self.config, - "catalog": self.catalog, - "state": self.state, - "environment_variables": self.environment_variables, - "enable_http_cache": self.enable_http_cache, - } + duckdb_path: Optional[Path] = None def raise_if_missing_attr_for_command(self, attribute: str) -> None: if getattr(self, attribute) is None: @@ -162,76 +204,228 @@ def __post_init__(self) -> None: self.raise_if_missing_attr_for_command("config") if self.command is Command.READ: self.raise_if_missing_attr_for_command("config") - self.raise_if_missing_attr_for_command("catalog") + self.raise_if_missing_attr_for_command("configured_catalog") if self.command is Command.READ_WITH_STATE: self.raise_if_missing_attr_for_command("config") - self.raise_if_missing_attr_for_command("catalog") + self.raise_if_missing_attr_for_command("configured_catalog") self.raise_if_missing_attr_for_command("state") - -@dataclass -class ExecutionResult: - stdout: str - stderr: str - executed_container: dagger.Container - http_dump: Optional[dagger.File] - airbyte_messages: List[AirbyteMessage] = field(default_factory=list) - airbyte_messages_parsing_errors: List[Tuple[Exception, str]] = field(default_factory=list) - - def __post_init__(self) -> None: - self.airbyte_messages, self.airbyte_messages_parsing_errors = self.parse_airbyte_messages_from_command_output(self.stdout) - - @staticmethod - def parse_airbyte_messages_from_command_output( - command_output: str, - ) -> Tuple[List[AirbyteMessage], List[Tuple[Exception, str]]]: - airbyte_messages: List[AirbyteMessage] = [] - parsing_errors: List[Tuple[Exception, str]] = [] - for line in command_output.splitlines(): - try: - airbyte_messages.append(AirbyteMessage.parse_raw(line)) - except ValidationError as e: - parsing_errors.append((e, line)) - return airbyte_messages, parsing_errors + @property + def output_dir(self) -> Path: + output_dir = ( + self.global_output_dir + / f"command_execution_artifacts/{self.connector_under_test.name}/{self.command.value}/{self.connector_under_test.version}/" + ) + output_dir.mkdir(parents=True, exist_ok=True) + return output_dir @dataclass -class ExecutionReport: - execution_inputs: ExecutionInputs - execution_result: ExecutionResult - created_at: int = field(default_factory=lambda: int(time.time())) - saved_path: Optional[Path] = None +class ExecutionResult: + actor_id: str + connector_under_test: ConnectorUnderTest + command: Command + stdout_file_path: Path + stderr_file_path: Path + success: bool + executed_container: Optional[dagger.Container] + http_dump: Optional[dagger.File] = None + http_flows: List[http.HTTPFlow] = field(default_factory=list) + stream_schemas: Optional[Dict[str, Any]] = None + backend: Optional[FileBackend] = None + + HTTP_DUMP_FILE_NAME = "http_dump.mitm" + HAR_FILE_NAME = "http_dump.har" @property - def report_dir(self) -> str: - return f"{self.execution_inputs.connector_under_test.name}/{self.execution_inputs.command.value}/{self.execution_inputs.connector_under_test.version}/" + def logger(self) -> logging.Logger: + return logging.getLogger(f"{self.connector_under_test.target_or_control.value}-{self.command.value}") @property - def stdout_filename(self) -> str: - return "stdout.log" + def airbyte_messages(self) -> Iterable[AirbyteMessage]: + return self.parse_airbyte_messages_from_command_output(self.stdout_file_path) @property - def stderr_filename(self) -> str: - return "stderr.log" + def duckdb_schema(self) -> Iterable[str]: + return (self.connector_under_test.target_or_control.value, self.command.value) - @property - def http_dump_filename(self) -> str: - return "http_dump.mitm" - - async def save_to_disk(self, output_dir: Path) -> None: - final_dir = output_dir / self.report_dir - final_dir.mkdir(parents=True, exist_ok=True) - stdout_file_path = final_dir / self.stdout_filename - stdout_file_path.write_text(self.execution_result.stdout) - - stderr_file_path = final_dir / self.stderr_filename - stderr_file_path.write_text(self.execution_result.stderr) - if self.execution_result.http_dump: - http_dump_file_path = final_dir / self.http_dump_filename - await self.execution_result.http_dump.export(str(http_dump_file_path.resolve())) - # TODO merge ExecutionReport.save_to_disk and Backend.write? - # Make backends use customizable - airbyte_messages_dir = final_dir / "airbyte_messages" + @classmethod + async def load( + cls: Type[ExecutionResult], + connector_under_test: ConnectorUnderTest, + actor_id: str, + command: Command, + stdout_file_path: Path, + stderr_file_path: Path, + success: bool, + executed_container: Optional[dagger.Container], + http_dump: Optional[dagger.File] = None, + ) -> ExecutionResult: + execution_result = cls( + actor_id, + connector_under_test, + command, + stdout_file_path, + stderr_file_path, + success, + executed_container, + http_dump, + ) + await execution_result.load_http_flows() + return execution_result + + async def load_http_flows(self) -> None: + if not self.http_dump: + return + with tempfile.NamedTemporaryFile() as temp_file: + await self.http_dump.export(temp_file.name) + self.http_flows = get_http_flows_from_mitm_dump(Path(temp_file.name)) + + def parse_airbyte_messages_from_command_output( + self, command_output_path: Path, log_validation_errors: bool = False + ) -> Iterable[AirbyteMessage]: + with open(command_output_path, "r") as command_output: + for line in command_output: + try: + yield AirbyteMessage.parse_raw(line) + except ValidationError as e: + if log_validation_errors: + self.logger.warn(f"Error parsing AirbyteMessage: {e}") + + def get_records(self) -> Iterable[AirbyteMessage]: + self.logger.info( + f"Reading records all records for command {self.command.value} on {self.connector_under_test.target_or_control.value} version." + ) + for message in self.airbyte_messages: + if message.type is AirbyteMessageType.RECORD: + yield message + + def generate_stream_schemas(self) -> Dict[str, Any]: + self.logger.info("Generating stream schemas") + stream_builders: Dict[str, SchemaBuilder] = {} + for record in self.get_records(): + stream = record.record.stream + if stream not in stream_builders: + stream_schema_builder = SchemaBuilder() + stream_schema_builder.add_schema({"type": "object", "properties": {}}) + stream_builders[stream] = stream_schema_builder + stream_builders[stream].add_object(record.record.data) + self.logger.info("Stream schemas generated") + return {stream: sort_dict_keys(stream_builders[stream].to_schema()) for stream in stream_builders} + + def get_records_per_stream(self, stream: str) -> Iterator[AirbyteMessage]: + assert self.backend is not None, "Backend must be set to get records per stream" + self.logger.info(f"Reading records for stream {stream}") + if stream not in self.backend.record_per_stream_paths: + self.logger.warning(f"No records found for stream {stream}") + yield from [] + else: + for message in self.parse_airbyte_messages_from_command_output( + self.backend.record_per_stream_paths[stream], log_validation_errors=True + ): + if message.type is AirbyteMessageType.RECORD: + yield message + + def get_message_count_per_type(self) -> Dict[AirbyteMessageType, int]: + message_count: Dict[AirbyteMessageType, int] = defaultdict(int) + for message in self.airbyte_messages: + message_count[message.type] += 1 + return message_count + + async def save_http_dump(self, output_dir: Path) -> None: + if self.http_dump: + self.logger.info("An http dump was captured during the execution of the command, saving it.") + http_dump_file_path = (output_dir / self.HTTP_DUMP_FILE_NAME).resolve() + await self.http_dump.export(str(http_dump_file_path)) + self.logger.info(f"Http dump saved to {http_dump_file_path}") + + # Define where the har file will be saved + har_file_path = (output_dir / self.HAR_FILE_NAME).resolve() + # Convert the mitmproxy dump file to a har file + mitm_http_stream_to_har(http_dump_file_path, har_file_path) + self.logger.info(f"Har file saved to {har_file_path}") + else: + self.logger.warning("No http dump to save") + + def save_airbyte_messages(self, output_dir: Path, duckdb_path: Optional[Path] = None) -> None: + self.logger.info("Saving Airbyte messages to disk") + airbyte_messages_dir = output_dir / "airbyte_messages" airbyte_messages_dir.mkdir(parents=True, exist_ok=True) - await FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) - self.saved_path = final_dir + if duckdb_path: + self.backend = DuckDbBackend(airbyte_messages_dir, duckdb_path, self.duckdb_schema) + else: + self.backend = FileBackend(airbyte_messages_dir) + self.backend.write(self.airbyte_messages) + self.logger.info("Airbyte messages saved") + + def save_stream_schemas(self, output_dir: Path) -> None: + self.stream_schemas = self.generate_stream_schemas() + stream_schemas_dir = output_dir / "stream_schemas" + stream_schemas_dir.mkdir(parents=True, exist_ok=True) + for stream_name, stream_schema in self.stream_schemas.items(): + (stream_schemas_dir / f"{sanitize_stream_name(stream_name)}.json").write_text(json.dumps(stream_schema, sort_keys=True)) + self.logger.info("Stream schemas saved to disk") + + async def save_artifacts(self, output_dir: Path, duckdb_path: Optional[Path] = None) -> None: + self.logger.info("Saving artifacts to disk") + self.save_airbyte_messages(output_dir, duckdb_path) + self.update_configuration() + await self.save_http_dump(output_dir) + self.save_stream_schemas(output_dir) + self.logger.info("All artifacts saved to disk") + + def get_updated_configuration(self, control_message_path: Path) -> Optional[Dict[str, Any]]: + """Iterate through the control messages to find CONNECTOR_CONFIG message and return the last updated configuration.""" + if not control_message_path.exists(): + return None + updated_config = None + for line in control_message_path.read_text().splitlines(): + if line.strip(): + connector_config = json.loads(line.strip()).get("connectorConfig", {}) + if connector_config: + updated_config = connector_config + return updated_config + + def update_configuration(self) -> None: + """This function checks if a configuration has to be updated by reading the control messages file. + If a configuration has to be updated, it updates the configuration on the actor using the Airbyte API. + """ + assert self.backend is not None, "Backend must be set to update configuration in order to find the control messages path" + updated_configuration = self.get_updated_configuration(self.backend.jsonl_controls_path) + if updated_configuration is None: + return + + self.logger.warning(f"Updating configuration for {self.connector_under_test.name}, actor {self.actor_id}") + url = f"https://api.airbyte.com/v1/{self.connector_under_test.actor_type.value}s/{self.actor_id}" + + payload = { + "configuration": { + **updated_configuration, + **{f"{self.connector_under_test.actor_type.value}Type": self.connector_under_test.name_without_type_prefix}, + } + } + headers = { + "accept": "application/json", + "content-type": "application/json", + "authorization": f"Bearer {get_airbyte_api_key()}", + } + + response = requests.patch(url, json=payload, headers=headers) + try: + response.raise_for_status() + except requests.HTTPError as e: + self.logger.error(f"Failed to update {self.connector_under_test.name} configuration on actor {self.actor_id}: {e}") + self.logger.error(f"Response: {response.text}") + self.logger.info(f"Updated configuration for {self.connector_under_test.name}, actor {self.actor_id}") + + +@dataclass(kw_only=True) +class ConnectionObjects: + source_config: Optional[SecretDict] + destination_config: Optional[SecretDict] + configured_catalog: Optional[ConfiguredAirbyteCatalog] + catalog: Optional[AirbyteCatalog] + state: Optional[Dict] + workspace_id: Optional[str] + source_id: Optional[str] + destination_id: Optional[str] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/proxy.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/proxy.py new file mode 100644 index 0000000000000..a50f7f4c37804 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/proxy.py @@ -0,0 +1,180 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import logging +import uuid +from typing import Optional + +import dagger + +from . import mitm_addons + + +class Proxy: + """ + This class is a wrapper around a mitmproxy container. It allows to declare a mitmproxy container, bind it as a service to a different container and retrieve the mitmproxy stream file. + """ + + MITMPROXY_IMAGE = "mitmproxy/mitmproxy:10.2.4" + MITM_STREAM_FILE = "stream.mitm" + PROXY_PORT = 8080 + MITM_ADDONS_PATH = mitm_addons.__file__ + + def __init__( + self, + dagger_client: dagger.Client, + hostname: str, + session_id: str, + stream_for_server_replay: Optional[dagger.File] = None, + ) -> None: + self.dagger_client = dagger_client + self.hostname = hostname + self.session_id = session_id + self.stream_for_server_replay = stream_for_server_replay + + @property + def dump_cache_volume(self) -> dagger.CacheVolume: + # We namespace the cache by: + # - Mitmproxy image name to make sure we're not re-using a cached artifact on a different and potentially incompatible mitmproxy version + # - Hostname to avoid sharing the same https dump between different tests + # - Session id to avoid sharing the same https dump between different runs of the same tests + # The session id is set to the Airbyte Connection ID to ensure that no cache is shared between connections + return self.dagger_client.cache_volume(f"{self.MITMPROXY_IMAGE}{self.hostname}{self.session_id}") + + @property + def mitmproxy_dir_cache(self) -> dagger.CacheVolume: + return self.dagger_client.cache_volume(self.MITMPROXY_IMAGE) + + async def get_container( + self, + ) -> dagger.Container: + """Get a container for the mitmproxy service. + If a stream for server replay is provided, it will be used to replay requests to the same URL. + + Returns: + dagger.Container: The container for the mitmproxy service. + """ + container_addons_path = "/addons.py" + proxy_container = ( + self.dagger_client.container() + .from_(self.MITMPROXY_IMAGE) + .with_exec(["mkdir", "-p", "/home/mitmproxy/.mitmproxy"], skip_entrypoint=True) + # This is caching the mitmproxy stream files, which can contain sensitive information + # We want to nuke this cache after test suite execution. + .with_mounted_cache("/dumps", self.dump_cache_volume) + # This is caching the mitmproxy self-signed certificate, no sensitive information is stored in it + .with_mounted_cache("/home/mitmproxy/.mitmproxy", self.mitmproxy_dir_cache) + .with_file( + container_addons_path, + self.dagger_client.host().file(self.MITM_ADDONS_PATH), + ) + ) + + # If the proxy was instantiated with a stream for server replay from a previous run, we want to use it. + # Requests to the same URL will be replayed from the stream instead of being sent to the server. + # This is useful to avoid rate limiting issues and limits responses drifts due to time based logics. + if self.stream_for_server_replay is not None and await self.stream_for_server_replay.size() > 0: + proxy_container = proxy_container.with_file("/cache.mitm", self.stream_for_server_replay) + command = [ + "mitmdump", + "-s", + container_addons_path, + "--flow-detail", + "2", + "--server-replay", + "/cache.mitm", + "--save-stream-file", + f"/dumps/{self.MITM_STREAM_FILE}", + ] + else: + command = [ + "mitmdump", + "-s", + container_addons_path, + "--flow-detail", + "2", + "--save-stream-file", + f"/dumps/{self.MITM_STREAM_FILE}", + ] + + return proxy_container.with_exec(command) + + async def get_service(self) -> dagger.Service: + return (await self.get_container()).with_exposed_port(self.PROXY_PORT).as_service() + + async def bind_container(self, container: dagger.Container) -> dagger.Container: + """Bind a container to the proxy service and set environment variables to use the proxy for HTTP(S) traffic. + + Args: + container (dagger.Container): The container to bind to the proxy service. + + Returns: + dagger.Container: The container with the proxy service bound and environment variables set. + """ + cert_path_in_volume = "/mitmproxy_dir/mitmproxy-ca.pem" + requests_cert_path = "/usr/local/lib/python3.9/site-packages/certifi/cacert.pem" + ca_certificate_path = "/usr/local/share/ca-certificates/mitmproxy.crt" + try: + return await ( + container.with_service_binding(self.hostname, await self.get_service()) + .with_mounted_cache("/mitmproxy_dir", self.mitmproxy_dir_cache) + .with_exec( + ["cp", cert_path_in_volume, requests_cert_path], + skip_entrypoint=True, + ) + .with_exec( + ["cp", cert_path_in_volume, ca_certificate_path], + skip_entrypoint=True, + ) + # The following command make the container use the proxy for all outgoing HTTP requests + .with_env_variable("REQUESTS_CA_BUNDLE", requests_cert_path) + .with_exec(["update-ca-certificates"], skip_entrypoint=True) + .with_env_variable("http_proxy", f"{self.hostname}:{self.PROXY_PORT}") + .with_env_variable("https_proxy", f"{self.hostname}:{self.PROXY_PORT}") + ) + except dagger.DaggerError as e: + # This is likely hapenning on Java connector images whose certificates location is different + # TODO handle this case + logging.warn(f"Failed to bind container to proxy: {e}") + return container + + async def retrieve_http_dump(self) -> Optional[dagger.File]: + """We mount the cache volume, where the mitmproxy container saves the stream file, to a fresh container. + We then copy the stream file to a new directory and return it as a dagger.File. + The copy operation to /to_export is required as Dagger does not support direct access to files in cache volumes. + + + Returns: + Optional[dagger.File]: The mitmproxy stream file if it exists, None otherwise. + """ + container = ( + self.dagger_client.container() + .from_("alpine:latest") + .with_env_variable("CACHEBUSTER", str(uuid.uuid4())) + .with_mounted_cache("/dumps", self.dump_cache_volume) + ) + dump_files = (await container.with_exec(["ls", "/dumps"], skip_entrypoint=True).stdout()).splitlines() + if self.MITM_STREAM_FILE not in dump_files: + return None + return await ( + container.with_exec(["mkdir", "/to_export"]) + .with_exec( + [ + "cp", + "-r", + f"/dumps/{self.MITM_STREAM_FILE}", + f"/to_export/{self.MITM_STREAM_FILE}", + ] + ) + .file(f"/to_export/{self.MITM_STREAM_FILE}") + ) + + async def clear_cache_volume(self) -> None: + """Delete all files in the cache volume. This is useful to avoid caching sensitive information between tests.""" + await ( + self.dagger_client.container() + .from_("alpine:latest") + .with_mounted_cache("/to_clear", self.dump_cache_volume) + .with_exec(["rm", "-rf", "/to_clear/*"]) + .sync() + ) + logging.info(f"Cache volume {self.dump_cache_volume} cleared") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/secret_access.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/secret_access.py new file mode 100644 index 0000000000000..1545cc8ce25b3 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/secret_access.py @@ -0,0 +1,38 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import logging + +from google.api_core.exceptions import PermissionDenied +from google.cloud import secretmanager + +LIVE_TESTS_AIRBYTE_API_KEY_SECRET_ID = "projects/587336813068/secrets/live_tests_airbyte_api_key" + + +def get_secret_value(secret_manager_client: secretmanager.SecretManagerServiceClient, secret_id: str) -> str: + """Get the value of the enabled version of a secret + + Args: + secret_manager_client (secretmanager.SecretManagerServiceClient): The secret manager client + secret_id (str): The id of the secret + + Returns: + str: The value of the enabled version of the secret + """ + try: + response = secret_manager_client.list_secret_versions(request={"parent": secret_id, "filter": "state:ENABLED"}) + if len(response.versions) == 0: + raise ValueError(f"No enabled version of secret {secret_id} found") + enabled_version = response.versions[0] + response = secret_manager_client.access_secret_version(name=enabled_version.name) + return response.payload.data.decode("UTF-8") + except PermissionDenied as e: + logging.error( + f"Permission denied while trying to access secret {secret_id}. Please write to #dev-extensibility in Airbyte Slack for help.", + exc_info=e, + ) + raise e + + +def get_airbyte_api_key() -> str: + secret_manager_client = secretmanager.SecretManagerServiceClient() + return get_secret_value(secret_manager_client, LIVE_TESTS_AIRBYTE_API_KEY_SECRET_ID) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/segment_tracking.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/segment_tracking.py new file mode 100644 index 0000000000000..7427cf73eca95 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/segment_tracking.py @@ -0,0 +1,40 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import logging +import os +from importlib.metadata import version +from typing import Any, Dict + +import segment.analytics as analytics # type: ignore + +ENABLE_TRACKING = os.getenv("REGRESSION_TEST_DISABLE_TRACKING") is None +DEBUG_SEGMENT = os.getenv("DEBUG_SEGMENT") is not None +EVENT_NAME = "regression_test_start" +CURRENT_VERSION = version(__name__.split(".")[0]) + + +def on_error(error: Exception, items: Any) -> None: + logging.warning("An error occurred in Segment Tracking", exc_info=error) + + +# This is not a secret key, it is a public key that is used to identify the Segment project +analytics.write_key = "hnWfMdEtXNKBjvmJ258F72wShsLmcsZ8" +analytics.send = ENABLE_TRACKING +analytics.debug = DEBUG_SEGMENT +analytics.on_error = on_error + + +def track_usage( + user_id: str, + pytest_options: Dict[str, Any], +) -> None: + analytics.identify(user_id) + # It contains default pytest option and the custom one passed by the user + analytics.track( + user_id, + EVENT_NAME, + { + "pytest_options": pytest_options, + "package_version": CURRENT_VERSION, + }, + ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py index 4b62defb821aa..9e4244c4b20a8 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py @@ -1,37 +1,191 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -import json +import logging +import os +import re +import shutil from pathlib import Path -from typing import Dict, List, Optional +from typing import List, Optional import dagger -from live_tests.commons.connector_runner import SecretDict, get_connector_container -from live_tests.commons.models import ConnectorUnderTest +import docker # type: ignore +import pytest +from mitmproxy import http, io # type: ignore +from mitmproxy.addons.savehar import SaveHar # type: ignore -def get_connector_config(path: Optional[str | Path]) -> Optional[SecretDict]: - if path is None: - return None - return SecretDict(_read_json(path)) +async def get_container_from_id(dagger_client: dagger.Client, container_id: str) -> dagger.Container: + """Get a dagger container from its id. + Please remind that container id are not persistent and can change between Dagger sessions. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + """ + try: + return await dagger_client.container(id=dagger.ContainerID(container_id)) + except dagger.DaggerError as e: + pytest.exit(f"Failed to load connector container: {e}") + + +async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path) -> dagger.Container: + if not tarball_path.exists(): + pytest.exit(f"Connector image tarball {tarball_path} does not exist") + container_under_test_tar_file = ( + dagger_client.host().directory(str(tarball_path.parent), include=tarball_path.name).file(tarball_path.name) + ) + try: + return await dagger_client.container().import_(container_under_test_tar_file) + except dagger.DaggerError as e: + pytest.exit(f"Failed to import connector image from tarball: {e}") + +async def get_container_from_local_image(dagger_client: dagger.Client, local_image_name: str) -> Optional[dagger.Container]: + """Get a dagger container from a local image. + It will use Docker python client to export the image to a tarball and then import it into dagger. -def get_state(path: Optional[str | Path]) -> Optional[Dict]: - if path is None: + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + local_image_name (str): The name of the local image to import + + Returns: + Optional[dagger.Container]: The dagger container for the local image or None if the image does not exist + """ + docker_client = docker.from_env() + + try: + image = docker_client.images.get(local_image_name) + except docker.errors.ImageNotFound: return None - return _read_json(path) + image_digest = image.id.replace("sha256:", "") + tarball_path = Path(f"/tmp/{image_digest}.tar") + if not tarball_path.exists(): + logging.info(f"Exporting local connector image {local_image_name} to tarball {tarball_path}") + with open(tarball_path, "wb") as f: + for chunk in image.save(named=True): + f.write(chunk) + return await get_container_from_tarball_path(dagger_client, tarball_path) + + +async def get_container_from_dockerhub_image(dagger_client: dagger.Client, dockerhub_image_name: str) -> dagger.Container: + """Get a dagger container from a dockerhub image. -def _read_json(path: Path | str) -> Dict: - with open(str(path), "r") as file: - contents = file.read() - return json.loads(contents) + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + dockerhub_image_name (str): The name of the dockerhub image to import + Returns: + dagger.Container: The dagger container for the dockerhub image + """ + try: + return await dagger_client.container().from_(dockerhub_image_name) + except dagger.DaggerError as e: + pytest.exit(f"Failed to import connector image from DockerHub: {e}") -async def get_connector_under_test(dagger_client: dagger.Client, connector_image_name: str) -> ConnectorUnderTest: - dagger_container = await get_connector_container(dagger_client, connector_image_name) - return ConnectorUnderTest(connector_image_name, dagger_container) + +async def get_connector_container(dagger_client: dagger.Client, image_name_with_tag: str) -> dagger.Container: + """Get a dagger container for the connector image to test. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + image_name_with_tag (str): The docker image name and tag of the connector image to test + + Returns: + dagger.Container: The dagger container for the connector image to test + """ + # If a container_id.txt file is available, we'll use it to load the connector container + # We use a txt file as container ids can be too long to be passed as env vars + # It's used for dagger-in-dagger use case with airbyte-ci, when the connector container is built via an upstream dagger operation + connector_container_id_path = Path("/tmp/container_id.txt") + if connector_container_id_path.exists(): + # If the CONNECTOR_CONTAINER_ID env var is set, we'll use it to load the connector container + return await get_container_from_id(dagger_client, connector_container_id_path.read_text()) + + # If the CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH env var is set, we'll use it to import the connector image from the tarball + if connector_image_tarball_path := os.environ.get("CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH"): + tarball_path = Path(connector_image_tarball_path) + return await get_container_from_tarball_path(dagger_client, tarball_path) + + # Let's try to load the connector container from a local image + if connector_container := await get_container_from_local_image(dagger_client, image_name_with_tag): + return connector_container + + # If we get here, we'll try to pull the connector image from DockerHub + return await get_container_from_dockerhub_image(dagger_client, image_name_with_tag) def sh_dash_c(lines: List[str]) -> List[str]: """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] + + +def clean_up_artifacts(directory: Path, logger: logging.Logger) -> None: + if directory.exists(): + shutil.rmtree(directory) + logger.info(f"🧹 Test artifacts cleaned up from {directory}") + + +def get_http_flows_from_mitm_dump(mitm_dump_path: Path) -> List[http.HTTPFlow]: + """Get http flows from a mitmproxy dump file. + + Args: + mitm_dump_path (Path): Path to the mitmproxy dump file. + + Returns: + List[http.HTTPFlow]: List of http flows. + """ + with open(mitm_dump_path, "rb") as dump_file: + return [f for f in io.FlowReader(dump_file).stream() if isinstance(f, http.HTTPFlow)] + + +def mitm_http_stream_to_har(mitm_http_stream_path: Path, har_file_path: Path) -> Path: + """Converts a mitmproxy http stream file to a har file. + + Args: + mitm_http_stream_path (Path): Path to the mitmproxy http stream file. + har_file_path (Path): Path where the har file will be saved. + + Returns: + Path: Path to the har file. + """ + flows = get_http_flows_from_mitm_dump(mitm_http_stream_path) + SaveHar().export_har(flows, str(har_file_path)) + return har_file_path + + +def extract_connection_id_from_url(url: str) -> str: + pattern = r"/connections/([a-f0-9\-]+)" + match = re.search(pattern, url) + if match: + return match.group(1) + else: + raise ValueError(f"Could not extract connection id from url {url}") + + +def extract_workspace_id_from_url(url: str) -> str: + pattern = r"/workspaces/([a-f0-9\-]+)" + match = re.search(pattern, url) + if match: + return match.group(1) + else: + raise ValueError(f"Could not extract workspace id from url {url}") + + +def build_connection_url(workspace_id: str | None, connection_id: str | None) -> str: + if not workspace_id or not connection_id: + raise ValueError("Both workspace_id and connection_id must be provided") + return f"https://cloud.airbyte.com/workspaces/{workspace_id}/connections/{connection_id}" + + +def sort_dict_keys(d: dict) -> dict: + if isinstance(d, dict): + sorted_dict = {} + for key in sorted(d.keys()): + sorted_dict[key] = sort_dict_keys(d[key]) + return sorted_dict + else: + return d + + +def sanitize_stream_name(stream_name: str) -> str: + return stream_name.replace("/", "_").replace(" ", "_").lower() diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py index 85a4c5094f0ef..3ffc8dc5255f7 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py @@ -1,9 +1,10 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -import dagger import os import sys +import dagger + DAGGER_EXEC_TIMEOUT = dagger.Timeout( int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600")) ) # One hour by default diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py index f42633a87e95d..37556ec7bf540 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py @@ -1,16 +1,22 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -import time +import logging +import textwrap from pathlib import Path from typing import List, Optional import asyncclick as click import dagger -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.connection_objects_retrieval import COMMAND_TO_REQUIRED_OBJECT_TYPES, get_connection_objects from live_tests.commons.connector_runner import ConnectorRunner -from live_tests.commons.models import Command, ExecutionInputs, ExecutionReport -from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state +from live_tests.commons.models import ActorType, Command, ConnectionObjects, ConnectorUnderTest, ExecutionInputs, TargetOrControl +from live_tests.commons.utils import clean_up_artifacts from live_tests.debug import DAGGER_CONFIG +from rich.prompt import Prompt + +from .consts import MAIN_OUTPUT_DIRECTORY + +LOGGER = logging.getLogger("debug_command") @click.command( @@ -22,74 +28,109 @@ type=click.Choice([c.value for c in Command]), callback=lambda _, __, value: Command(value), ) -@click.option( - "-c", - "--connector-image", - "connector_images", - help="Docker image name of the connector to debug (e.g. `airbyte/source-faker:latest`, `airbyte/source-faker:dev`)", - multiple=True, - type=str, - required=True, -) -@click.option( - "-o", - "--output-directory", - help="Directory in which connector output and test results should be stored. Defaults to the current directory.", - default=Path("live_tests_debug_reports"), - type=click.Path(file_okay=False, dir_okay=True, resolve_path=True, path_type=Path), -) +@click.option("--connection-id", type=str, required=False, default=None) @click.option( "--config-path", - help="Path to the connector config.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, ) @click.option( "--catalog-path", - help="Path to the connector catalog.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, ) @click.option( "--state-path", - help="Path to the connector state.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, ) @click.option( - "-hc", - "--http-cache", - "enable_http_cache", - help="Use the HTTP cache for the connector.", - default=True, - is_flag=True, - type=bool, + "-c", + "--connector-image", + "connector_images", + help="Docker image name of the connector to debug (e.g. `airbyte/source-faker:latest`, `airbyte/source-faker:dev`)", + multiple=True, + type=str, + required=True, ) # TODO add an env var options to pass to the connector @click.pass_context async def debug_cmd( ctx: click.Context, command: Command, + connection_id: Optional[str], + config_path: Optional[Path], + catalog_path: Optional[Path], + state_path: Optional[Path], connector_images: List[str], - output_directory: Path, - config_path: Optional[str], - catalog_path: Optional[str], - state_path: Optional[str], - enable_http_cache: bool, ) -> None: - output_directory.mkdir(parents=True, exist_ok=True) - debug_session_start_time = int(time.time()) + if connection_id: + retrieval_reason = click.prompt("👮‍♂️ Please provide a reason for accessing the connection objects. This will be logged") + else: + retrieval_reason = None + + try: + connection_objects = get_connection_objects( + COMMAND_TO_REQUIRED_OBJECT_TYPES[command], + connection_id, + config_path, + catalog_path, + state_path, + retrieval_reason, + ) + except ValueError as e: + raise click.UsageError(str(e)) async with dagger.Connection(config=DAGGER_CONFIG) as dagger_client: - for connector_image in connector_images: - try: - execution_inputs = ExecutionInputs( - connector_under_test=await get_connector_under_test(dagger_client, connector_image), - command=command, - config=get_connector_config(config_path), - catalog=ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None, - state=get_state(state_path) if state_path else None, - environment_variables=None, - enable_http_cache=enable_http_cache, + MAIN_OUTPUT_DIRECTORY.mkdir(parents=True, exist_ok=True) + try: + for connector_image in connector_images: + await _execute_command_and_save_artifacts( + dagger_client, + connector_image, + command, + connection_objects, ) - except ValueError as e: - raise click.UsageError(str(e)) - execution_result = await ConnectorRunner(dagger_client, **execution_inputs.to_dict()).run() - execution_report = ExecutionReport(execution_inputs, execution_result, created_at=debug_session_start_time) - await execution_report.save_to_disk(output_directory) + + Prompt.ask( + textwrap.dedent( + """ + Debug artifacts will be destroyed after this prompt. + Press enter when you're done reading them. + 🚨 Do not copy them elsewhere on your disk!!! 🚨 + """ + ) + ) + finally: + clean_up_artifacts(MAIN_OUTPUT_DIRECTORY, LOGGER) + + +async def _execute_command_and_save_artifacts( + dagger_client: dagger.Client, + connector_image: str, + command: Command, + connection_objects: ConnectionObjects, +) -> None: + try: + connector_under_test = await ConnectorUnderTest.from_image_name(dagger_client, connector_image, TargetOrControl.CONTROL) + if connector_under_test.actor_type is ActorType.SOURCE: + actor_id = connection_objects.source_id + else: + actor_id = connection_objects.destination_id + assert actor_id is not None + execution_inputs = ExecutionInputs( + global_output_dir=MAIN_OUTPUT_DIRECTORY, + connector_under_test=connector_under_test, + command=command, + config=connection_objects.source_config, + configured_catalog=connection_objects.configured_catalog, + state=connection_objects.state, + environment_variables=None, + actor_id=actor_id, + ) + except ValueError as e: + raise click.UsageError(str(e)) + execution_result = await ConnectorRunner(dagger_client, execution_inputs).run() + await execution_result.save_artifacts(MAIN_OUTPUT_DIRECTORY) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/consts.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/consts.py new file mode 100644 index 0000000000000..855a6c8c5d107 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/consts.py @@ -0,0 +1,5 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from pathlib import Path + +MAIN_OUTPUT_DIRECTORY = Path("/tmp/debug_artifacts") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore deleted file mode 100644 index 452eecef73dd4..0000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore +++ /dev/null @@ -1 +0,0 @@ -regression_tests_artifacts diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py index ba60ce4a21b4e..56a02d6278b15 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py @@ -3,16 +3,36 @@ import logging import os +import textwrap import time +import webbrowser from pathlib import Path -from typing import TYPE_CHECKING, AsyncIterable, Callable, Dict, List, Optional +from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterable, Callable, Dict, Generator, Iterable, List, Optional import dagger import pytest from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.connector_runner import ConnectorRunner -from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionInputs, ExecutionReport, ExecutionResult, SecretDict -from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state +from connection_retriever.audit_logging import get_user_email # type: ignore +from connection_retriever.retrieval import ConnectionNotFoundError, NotPermittedError # type: ignore +from live_tests.commons.connection_objects_retrieval import ConnectionObject, get_connection_objects +from live_tests.commons.connector_runner import ConnectorRunner, Proxy +from live_tests.commons.models import ( + ActorType, + Command, + ConnectionObjects, + ConnectorUnderTest, + ExecutionInputs, + ExecutionResult, + SecretDict, + TargetOrControl, +) +from live_tests.commons.secret_access import get_airbyte_api_key +from live_tests.commons.segment_tracking import track_usage +from live_tests.commons.utils import build_connection_url, clean_up_artifacts +from live_tests.regression_tests import stash_keys +from rich.prompt import Confirm, Prompt + +from .report import Report, ReportState if TYPE_CHECKING: from _pytest.config import Config @@ -20,23 +40,16 @@ from _pytest.fixtures import SubRequest from pytest_sugar import SugarTerminalReporter # type: ignore +## CONSTS LOGGER = logging.getLogger("regression_tests") +MAIN_OUTPUT_DIRECTORY = Path("/tmp/regression_tests_artifacts") + # It's used by Dagger and its very verbose logging.getLogger("httpx").setLevel(logging.ERROR) -## STASH KEYS -SESSION_START_TIMESTAMP = pytest.StashKey[int]() -TEST_ARTIFACT_DIRECTORY = pytest.StashKey[Path]() -DAGGER_LOG_PATH = pytest.StashKey[Path]() - ## PYTEST HOOKS def pytest_addoption(parser: Parser) -> None: - parser.addoption( - "--output-directory", - default="./regression_tests_artifacts", - help="Path to a directory where the test execution reports will be stored", - ) parser.addoption( "--connector-image", help="The connector image name on which the regressions tests will run: e.g. airbyte/source-faker", @@ -54,73 +67,184 @@ def pytest_addoption(parser: Parser) -> None: parser.addoption("--config-path") parser.addoption("--catalog-path") parser.addoption("--state-path") + parser.addoption("--connection-id") + parser.addoption("--pr-url", help="The URL of the PR you are testing") def pytest_configure(config: Config) -> None: + user_email = get_user_email() + prompt_for_confirmation(user_email) + track_usage(user_email, vars(config.option)) + + config.stash[stash_keys.AIRBYTE_API_KEY] = get_airbyte_api_key() + config.stash[stash_keys.USER] = user_email start_timestamp = int(time.time()) - main_output_directory = Path(config.option.output_directory) - test_artifacts_directory = main_output_directory / f"session_{start_timestamp}" + test_artifacts_directory = MAIN_OUTPUT_DIRECTORY / f"session_{start_timestamp}" + duckdb_path = test_artifacts_directory / "duckdb.db" + config.stash[stash_keys.DUCKDB_PATH] = duckdb_path test_artifacts_directory.mkdir(parents=True, exist_ok=True) dagger_log_path = test_artifacts_directory / "dagger.log" - config.stash[SESSION_START_TIMESTAMP] = start_timestamp - config.stash[TEST_ARTIFACT_DIRECTORY] = test_artifacts_directory + config.stash[stash_keys.IS_PERMITTED_BOOL] = False + report_path = test_artifacts_directory / "report.html" + config.stash[stash_keys.SESSION_START_TIMESTAMP] = start_timestamp + config.stash[stash_keys.TEST_ARTIFACT_DIRECTORY] = test_artifacts_directory dagger_log_path.touch() - config.stash[DAGGER_LOG_PATH] = dagger_log_path + config.stash[stash_keys.DAGGER_LOG_PATH] = dagger_log_path + config.stash[stash_keys.PR_URL] = get_option_or_fail(config, "--pr-url") + config.stash[stash_keys.CONNECTION_ID] = get_option_or_fail(config, "--connection-id") + + config.stash[stash_keys.CONNECTOR_IMAGE] = get_option_or_fail(config, "--connector-image") + config.stash[stash_keys.CONTROL_VERSION] = get_option_or_fail(config, "--control-version") + config.stash[stash_keys.TARGET_VERSION] = get_option_or_fail(config, "--target-version") + if config.stash[stash_keys.CONTROL_VERSION] == config.stash[stash_keys.TARGET_VERSION]: + pytest.exit(f"Control and target versions are the same: {control_version}. Please provide different versions.") + custom_source_config_path = config.getoption("--config-path") + custom_configured_catalog_path = config.getoption("--catalog-path") + custom_state_path = config.getoption("--state-path") + config.stash[stash_keys.SHOULD_READ_WITH_STATE] = prompt_for_read_with_or_without_state() + retrieval_reason = f"Running regression tests on connection {config.stash[stash_keys.CONNECTION_ID]} for connector {config.stash[stash_keys.CONNECTOR_IMAGE]} on the control ({config.stash[stash_keys.CONTROL_VERSION]}) and target versions ({config.stash[stash_keys.TARGET_VERSION]})." + try: + config.stash[stash_keys.CONNECTION_OBJECTS] = get_connection_objects( + { + ConnectionObject.SOURCE_CONFIG, + ConnectionObject.CATALOG, + ConnectionObject.CONFIGURED_CATALOG, + ConnectionObject.STATE, + ConnectionObject.WORKSPACE_ID, + ConnectionObject.SOURCE_DOCKER_IMAGE, + ConnectionObject.SOURCE_ID, + ConnectionObject.DESTINATION_ID, + }, + config.stash[stash_keys.CONNECTION_ID], + Path(custom_source_config_path) if custom_source_config_path else None, + Path(custom_configured_catalog_path) if custom_configured_catalog_path else None, + Path(custom_state_path) if custom_state_path else None, + retrieval_reason, + fail_if_missing_objects=False, + connector_image=config.stash[stash_keys.CONNECTOR_IMAGE], + ) + config.stash[stash_keys.IS_PERMITTED_BOOL] = True + except (ConnectionNotFoundError, NotPermittedError) as exc: + clean_up_artifacts(MAIN_OUTPUT_DIRECTORY, LOGGER) + pytest.exit(str(exc)) + if config.stash[stash_keys.CONNECTION_OBJECTS].workspace_id and config.stash[stash_keys.CONNECTION_ID]: + config.stash[stash_keys.CONNECTION_URL] = build_connection_url( + config.stash[stash_keys.CONNECTION_OBJECTS].workspace_id, + config.stash[stash_keys.CONNECTION_ID], + ) + else: + config.stash[stash_keys.CONNECTION_URL] = None + config.stash[stash_keys.REPORT] = Report( + report_path, + config, + ) + webbrowser.open_new_tab(config.stash[stash_keys.REPORT].path.resolve().as_uri()) + + +def pytest_collection_modifyitems(config: pytest.Config, items: List[pytest.Item]) -> None: + for item in items: + if config.stash[stash_keys.SHOULD_READ_WITH_STATE] and "without_state" in item.keywords: + item.add_marker(pytest.mark.skip(reason="Test is marked with without_state marker")) + if not config.stash[stash_keys.SHOULD_READ_WITH_STATE] and "with_state" in item.keywords: + item.add_marker(pytest.mark.skip(reason="Test is marked with with_state marker")) def pytest_terminal_summary(terminalreporter: SugarTerminalReporter, exitstatus: int, config: Config) -> None: + config.stash[stash_keys.REPORT].update(ReportState.FINISHED) + if not config.stash.get(stash_keys.IS_PERMITTED_BOOL, False): + # Don't display the prompt if the tests were not run due to inability to fetch config + clean_up_artifacts(MAIN_OUTPUT_DIRECTORY, LOGGER) + pytest.exit(str(NotPermittedError)) + terminalreporter.ensure_newline() terminalreporter.section("Test artifacts", sep="=", bold=True, blue=True) - terminalreporter.line(f"All tests artifacts for this sessions should be available in {config.stash[TEST_ARTIFACT_DIRECTORY].resolve()}") - terminalreporter.section("Dagger logs", sep=".") - terminalreporter.line(f"Dagger logs are stored in {config.stash[DAGGER_LOG_PATH]}") - artifact_subsection: Dict[str, List[str]] = {} - for report in terminalreporter.reports: - properties_dict = { - record_property_key: record_property_value for record_property_key, record_property_value in report.user_properties - } - if "control_execution_report" in properties_dict or "target_execution_report" in properties_dict: - artifact_subsection[report.head_line] = [] - if "control_execution_report" in properties_dict: - artifact_subsection[report.head_line].append( - f"Control execution artifacts stored in {properties_dict['control_execution_report'].saved_path}" - ) - if "target_execution_report" in properties_dict: - artifact_subsection[report.head_line].append( - f"Target execution artifacts stored in {properties_dict['target_execution_report'].saved_path}" - ) - - if artifact_subsection: - terminalreporter.ensure_newline() - for section, artifact_lines in artifact_subsection.items(): - terminalreporter.ensure_newline() - terminalreporter.section(section, sep=".") - terminalreporter.line(os.linesep.join(artifact_lines)) + terminalreporter.line( + f"All tests artifacts for this sessions should be available in {config.stash[stash_keys.TEST_ARTIFACT_DIRECTORY].resolve()}" + ) + + try: + Prompt.ask( + textwrap.dedent( + """ + Test artifacts will be destroyed after this prompt. + Press enter when you're done reading them. + 🚨 Do not copy them elsewhere on your disk!!! 🚨 + """ + ) + ) + finally: + clean_up_artifacts(MAIN_OUTPUT_DIRECTORY, LOGGER) + + +def pytest_keyboard_interrupt(excinfo: Exception) -> None: + LOGGER.error("Test execution was interrupted by the user. Cleaning up test artifacts.") + clean_up_artifacts(MAIN_OUTPUT_DIRECTORY, LOGGER) + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item: pytest.Item, call: pytest.CallInfo) -> Generator: + outcome = yield + report = outcome.get_result() + # This is to add skipped or failed tests due to upstream fixture failures on setup + if report.outcome in ["failed", "skipped"]: + item.config.stash[stash_keys.REPORT].add_test_result( + report, + item.function.__doc__, # type: ignore + ) + + elif report.when == "call": + item.config.stash[stash_keys.REPORT].add_test_result( + report, + item.function.__doc__, # type: ignore + ) ## HELPERS -async def persist_report( - request: SubRequest, - output_directory: Path, - execution_inputs: ExecutionInputs, - execution_result: ExecutionResult, - session_start_timestamp: int, -) -> ExecutionReport: - test_name = request.node.name - test_output_directory = Path(output_directory / test_name) - test_output_directory.mkdir(parents=True, exist_ok=True) - report = ExecutionReport(execution_inputs, execution_result, created_at=session_start_timestamp) - await report.save_to_disk(test_output_directory) - LOGGER.info(f"Execution report saved to {test_output_directory}") - return report - - -def get_option_or_fail(request: SubRequest, option: str) -> str: - if option_value := request.config.getoption(option): + + +def get_option_or_fail(config: pytest.Config, option: str) -> str: + if option_value := config.getoption(option): return option_value pytest.fail(f"Missing required option: {option}") +def prompt_for_confirmation(user_email: str) -> None: + message = textwrap.dedent( + f""" + 👮 This program is running on live Airbyte Cloud connection. + It means that it might induce costs or rate limits on the source. + This program is storing tests artifacts in {MAIN_OUTPUT_DIRECTORY.resolve()} that you can use for debugging. They will get destroyed after the program execution. + + By approving this prompt, you ({user_email}) confirm that: + 1. You understand the implications of running this test suite. + 2. You have selected the correct target and control versions. + 3. You have selected the right tests according to your testing needs. + 4. You will not copy the test artifacts content. + 5. You want to run the program on the passed connection ID. + + Usage of this tool is tracked and logged. + + Do you want to continue? + """ + ) + if not os.environ.get("CI") and not Confirm.ask(message): + pytest.exit("Test execution was interrupted by the user.") + + +def prompt_for_read_with_or_without_state() -> bool: + message = textwrap.dedent( + """ + 📖 Do you want to run the read command with or without state? + 1. Run the read command with state + 2. Run the read command without state + + We recommend reading with state to properly test incremental sync. + But if the target version introduces a breaking change in the state, you might want to run without state. + """ + ) + return Prompt.ask(message) == "1" + + ## FIXTURES @@ -131,51 +255,89 @@ def anyio_backend() -> str: @pytest.fixture(scope="session") def session_start_timestamp(request: SubRequest) -> int: - return request.config.stash[SESSION_START_TIMESTAMP] + return request.config.stash[stash_keys.SESSION_START_TIMESTAMP] @pytest.fixture(scope="session") def test_artifacts_directory(request: SubRequest) -> Path: - return request.config.stash[TEST_ARTIFACT_DIRECTORY] + return request.config.stash[stash_keys.TEST_ARTIFACT_DIRECTORY] @pytest.fixture(scope="session") def connector_image(request: SubRequest) -> str: - return get_option_or_fail(request, "--connector-image") + return request.config.stash[stash_keys.CONNECTOR_IMAGE] @pytest.fixture(scope="session") def control_version(request: SubRequest) -> str: - return get_option_or_fail(request, "--control-version") + return request.config.stash[stash_keys.CONTROL_VERSION] @pytest.fixture(scope="session") def target_version(request: SubRequest) -> str: - return get_option_or_fail(request, "--target-version") + return request.config.stash[stash_keys.TARGET_VERSION] @pytest.fixture(scope="session") -def catalog(request: SubRequest) -> Optional[ConfiguredAirbyteCatalog]: - catalog_path = get_option_or_fail(request, "--catalog-path") - return ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None +def connection_id(request: SubRequest) -> Optional[str]: + return request.config.stash[stash_keys.CONNECTION_ID] @pytest.fixture(scope="session") -def connector_config(request: SubRequest) -> Optional[SecretDict]: - return get_connector_config(get_option_or_fail(request, "--config-path")) +def connection_objects(request: SubRequest) -> ConnectionObjects: + return request.config.stash[stash_keys.CONNECTION_OBJECTS] @pytest.fixture(scope="session") -def state(request: SubRequest) -> Optional[dict]: - return get_state(get_option_or_fail(request, "--state-path")) +def connector_config(connection_objects: ConnectionObjects) -> Optional[SecretDict]: + return connection_objects.source_config @pytest.fixture(scope="session") -def dagger_connection(request: SubRequest) -> dagger.Connection: - return dagger.Connection(dagger.Config(log_output=request.config.stash[DAGGER_LOG_PATH].open("w"))) +def actor_id(connection_objects: ConnectionObjects, control_connector: ConnectorUnderTest) -> str | None: + if control_connector.actor_type is ActorType.SOURCE: + return connection_objects.source_id + elif control_connector.actor_type is ActorType.DESTINATION: + return connection_objects.destination_id + else: + raise ValueError(f"Actor type {control_connector.actor_type} is not supported") + + +@pytest.fixture(scope="session") +def configured_catalog( + connection_objects: ConnectionObjects, +) -> ConfiguredAirbyteCatalog: + if not connection_objects.configured_catalog: + pytest.skip("Catalog is not provided. The catalog fixture can't be used.") + assert connection_objects.configured_catalog is not None + return connection_objects.configured_catalog + + +@pytest.fixture(scope="session", autouse=True) +def primary_keys_per_stream( + configured_catalog: ConfiguredAirbyteCatalog, +) -> Dict[str, Optional[List[str]]]: + return {stream.stream.name: stream.primary_key[0] if getattr(stream, "primary_key") else None for stream in configured_catalog.streams} @pytest.fixture(scope="session") +def configured_streams( + configured_catalog: ConfiguredAirbyteCatalog, +) -> Iterable[str]: + return {stream.stream.name for stream in configured_catalog.streams} + + +@pytest.fixture(scope="session") +def state(connection_objects: ConnectionObjects) -> Optional[Dict]: + return connection_objects.state + + +@pytest.fixture(scope="session") +def dagger_connection(request: SubRequest) -> dagger.Connection: + return dagger.Connection(dagger.Config(log_output=request.config.stash[stash_keys.DAGGER_LOG_PATH].open("w"))) + + +@pytest.fixture(scope="session", autouse=True) async def dagger_client( dagger_connection: dagger.Connection, ) -> AsyncIterable[dagger.Client]: @@ -185,400 +347,521 @@ async def dagger_client( @pytest.fixture(scope="session") async def control_connector(dagger_client: dagger.Client, connector_image: str, control_version: str) -> ConnectorUnderTest: - return await get_connector_under_test(dagger_client, f"{connector_image}:{control_version}") + return await ConnectorUnderTest.from_image_name(dagger_client, f"{connector_image}:{control_version}", TargetOrControl.CONTROL) @pytest.fixture(scope="session") async def target_connector(dagger_client: dagger.Client, connector_image: str, target_version: str) -> ConnectorUnderTest: - return await get_connector_under_test(dagger_client, f"{connector_image}:{target_version}") + return await ConnectorUnderTest.from_image_name(dagger_client, f"{connector_image}:{target_version}", TargetOrControl.TARGET) + +@pytest.fixture(scope="session") +def duckdb_path(request: SubRequest) -> Path: + return request.config.stash[stash_keys.DUCKDB_PATH] -@pytest.fixture + +@pytest.fixture(scope="session") def spec_control_execution_inputs( control_connector: ConnectorUnderTest, + actor_id: str, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: - return ExecutionInputs(connector_under_test=control_connector, command=Command.SPEC) + return ExecutionInputs( + connector_under_test=control_connector, + actor_id=actor_id, + command=Command.SPEC, + global_output_dir=test_artifacts_directory, + duckdb_path=duckdb_path, + ) -@pytest.fixture -def spec_control_connector_runner(dagger_client: dagger.Client, spec_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **spec_control_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +def spec_control_connector_runner( + dagger_client: dagger.Client, + spec_control_execution_inputs: ExecutionInputs, +) -> ConnectorRunner: + runner = ConnectorRunner( + dagger_client, + spec_control_execution_inputs, + ) + return runner -@pytest.fixture +@pytest.fixture(scope="session") async def spec_control_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, spec_control_execution_inputs: ExecutionInputs, spec_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running spec for control connector {spec_control_execution_inputs.connector_under_test.name}") execution_result = await spec_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - spec_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_control_execution_result(execution_result) return execution_result -@pytest.fixture +@pytest.fixture(scope="session") def spec_target_execution_inputs( target_connector: ConnectorUnderTest, + actor_id: str, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: - return ExecutionInputs(connector_under_test=target_connector, command=Command.SPEC) + return ExecutionInputs( + connector_under_test=target_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, + command=Command.SPEC, + duckdb_path=duckdb_path, + ) -@pytest.fixture -def spec_target_connector_runner(dagger_client: dagger.Client, spec_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **spec_target_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +def spec_target_connector_runner( + dagger_client: dagger.Client, + spec_target_execution_inputs: ExecutionInputs, +) -> ConnectorRunner: + runner = ConnectorRunner( + dagger_client, + spec_target_execution_inputs, + ) + return runner -@pytest.fixture +@pytest.fixture(scope="session") async def spec_target_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, - spec_control_execution_result: ExecutionResult, spec_target_execution_inputs: ExecutionInputs, spec_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running spec for target connector {spec_target_execution_inputs.connector_under_test.name}") execution_result = await spec_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - spec_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) + + request.config.stash[stash_keys.REPORT].add_target_execution_result(execution_result) + return execution_result -@pytest.fixture -def check_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: +@pytest.fixture(scope="session") +def check_control_execution_inputs( + control_connector: ConnectorUnderTest, + actor_id: str, + connector_config: SecretDict, + test_artifacts_directory: Path, + duckdb_path: Path, +) -> ExecutionInputs: return ExecutionInputs( connector_under_test=control_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.CHECK, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture -def check_control_connector_runner(dagger_client: dagger.Client, check_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **check_control_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +async def check_control_connector_runner( + dagger_client: dagger.Client, + check_control_execution_inputs: ExecutionInputs, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy(dagger_client, "proxy_server_check_control", connection_id) + + runner = ConnectorRunner( + dagger_client, + check_control_execution_inputs, + http_proxy=proxy, + ) + yield runner + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def check_control_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, check_control_execution_inputs: ExecutionInputs, check_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running check for control connector {check_control_execution_inputs.connector_under_test.name}") execution_result = await check_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - check_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) + + request.config.stash[stash_keys.REPORT].add_control_execution_result(execution_result) + return execution_result -@pytest.fixture -def check_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: +@pytest.fixture(scope="session") +def check_target_execution_inputs( + target_connector: ConnectorUnderTest, + actor_id: str, + connector_config: SecretDict, + test_artifacts_directory: Path, + duckdb_path: Path, +) -> ExecutionInputs: return ExecutionInputs( connector_under_test=target_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.CHECK, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture -def check_target_connector_runner(dagger_client: dagger.Client, check_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **check_target_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +async def check_target_connector_runner( + check_control_execution_result: ExecutionResult, + dagger_client: dagger.Client, + check_target_execution_inputs: ExecutionInputs, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy( + dagger_client, + "proxy_server_check_target", + connection_id, + stream_for_server_replay=check_control_execution_result.http_dump, + ) + runner = ConnectorRunner( + dagger_client, + check_target_execution_inputs, + http_proxy=proxy, + ) + yield runner + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def check_target_execution_result( - record_property: Callable, request: SubRequest, test_artifacts_directory: Path, - check_control_execution_result: ExecutionResult, check_target_execution_inputs: ExecutionInputs, check_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running check for target connector {check_target_execution_inputs.connector_under_test.name}") execution_result = await check_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - check_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_target_execution_result(execution_result) + return execution_result -@pytest.fixture -def discover_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: +@pytest.fixture(scope="session") +def discover_control_execution_inputs( + control_connector: ConnectorUnderTest, + actor_id: str, + connector_config: SecretDict, + test_artifacts_directory: Path, + duckdb_path: Path, +) -> ExecutionInputs: return ExecutionInputs( connector_under_test=control_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.DISCOVER, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture +@pytest.fixture(scope="session") async def discover_control_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, discover_control_execution_inputs: ExecutionInputs, discover_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running discover for control connector {discover_control_execution_inputs.connector_under_test.name}") execution_result = await discover_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - discover_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_control_execution_result(execution_result) + return execution_result -@pytest.fixture -def discover_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: +@pytest.fixture(scope="session") +def discover_target_execution_inputs( + target_connector: ConnectorUnderTest, + actor_id: str, + connector_config: SecretDict, + test_artifacts_directory: Path, + duckdb_path: Path, +) -> ExecutionInputs: return ExecutionInputs( connector_under_test=target_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.DISCOVER, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture -def discover_control_connector_runner(dagger_client: dagger.Client, discover_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **discover_control_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +async def discover_control_connector_runner( + dagger_client: dagger.Client, + discover_control_execution_inputs: ExecutionInputs, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy(dagger_client, "proxy_server_discover_control", connection_id) + yield ConnectorRunner( + dagger_client, + discover_control_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() + + +@pytest.fixture(scope="session") +async def discover_target_connector_runner( + dagger_client: dagger.Client, + discover_control_execution_result: ExecutionResult, + discover_target_execution_inputs: ExecutionInputs, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy( + dagger_client, + "proxy_server_discover_target", + connection_id, + stream_for_server_replay=discover_control_execution_result.http_dump, + ) -@pytest.fixture -def discover_target_connector_runner(dagger_client: dagger.Client, discover_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **discover_target_execution_inputs.to_dict()) + yield ConnectorRunner( + dagger_client, + discover_target_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def discover_target_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, - discover_control_execution_result: ExecutionResult, discover_target_execution_inputs: ExecutionInputs, discover_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running discover for target connector {discover_target_execution_inputs.connector_under_test.name}") execution_result = await discover_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - discover_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_target_execution_result(execution_result) + return execution_result -@pytest.fixture +@pytest.fixture(scope="session") def read_control_execution_inputs( control_connector: ConnectorUnderTest, + actor_id: str, connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, + configured_catalog: ConfiguredAirbyteCatalog, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: return ExecutionInputs( connector_under_test=control_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.READ, - catalog=catalog, + configured_catalog=configured_catalog, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture +@pytest.fixture(scope="session") def read_target_execution_inputs( target_connector: ConnectorUnderTest, + actor_id: str, connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, + configured_catalog: ConfiguredAirbyteCatalog, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: return ExecutionInputs( connector_under_test=target_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.READ, - catalog=catalog, + configured_catalog=configured_catalog, config=connector_config, + duckdb_path=duckdb_path, ) -@pytest.fixture -def read_control_connector_runner(dagger_client: dagger.Client, read_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_control_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +async def read_control_connector_runner( + dagger_client: dagger.Client, + read_control_execution_inputs: ExecutionInputs, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy(dagger_client, "proxy_server_read_control", connection_id) + + yield ConnectorRunner( + dagger_client, + read_control_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def read_control_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, read_control_execution_inputs: ExecutionInputs, read_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running read for control connector {read_control_execution_inputs.connector_under_test.name}") execution_result = await read_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) + + request.config.stash[stash_keys.REPORT].add_control_execution_result(execution_result) + return execution_result -@pytest.fixture -def read_target_connector_runner(dagger_client: dagger.Client, read_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_target_execution_inputs.to_dict()) +@pytest.fixture(scope="session") +async def read_target_connector_runner( + dagger_client: dagger.Client, + read_target_execution_inputs: ExecutionInputs, + read_control_execution_result: ExecutionResult, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy( + dagger_client, + "proxy_server_read_target", + connection_id, + stream_for_server_replay=read_control_execution_result.http_dump, + ) + + yield ConnectorRunner( + dagger_client, + read_target_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def read_target_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, - read_control_execution_result: ExecutionResult, + record_testsuite_property: Callable, read_target_execution_inputs: ExecutionInputs, read_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: logging.info(f"Running read for target connector {read_target_execution_inputs.connector_under_test.name}") execution_result = await read_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) + + request.config.stash[stash_keys.REPORT].add_target_execution_result(execution_result) return execution_result -@pytest.fixture +@pytest.fixture(scope="session") def read_with_state_control_execution_inputs( control_connector: ConnectorUnderTest, + actor_id: str, connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, + configured_catalog: ConfiguredAirbyteCatalog, state: dict, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: + if not state: + pytest.skip("The state is not provided. Skipping the test as it's not possible to run a read with state.") return ExecutionInputs( connector_under_test=control_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.READ_WITH_STATE, - catalog=catalog, + configured_catalog=configured_catalog, config=connector_config, state=state, + duckdb_path=duckdb_path, ) -@pytest.fixture +@pytest.fixture(scope="session") def read_with_state_target_execution_inputs( target_connector: ConnectorUnderTest, + actor_id: str, connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, + configured_catalog: ConfiguredAirbyteCatalog, state: dict, + test_artifacts_directory: Path, + duckdb_path: Path, ) -> ExecutionInputs: + if not state: + pytest.skip("The state is not provided. Skipping the test as it's not possible to run a read with state.") return ExecutionInputs( connector_under_test=target_connector, + actor_id=actor_id, + global_output_dir=test_artifacts_directory, command=Command.READ_WITH_STATE, - catalog=catalog, + configured_catalog=configured_catalog, config=connector_config, state=state, + duckdb_path=duckdb_path, ) -@pytest.fixture -def read_with_state_control_connector_runner( +@pytest.fixture(scope="session") +async def read_with_state_control_connector_runner( dagger_client: dagger.Client, read_with_state_control_execution_inputs: ExecutionInputs, -) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_with_state_control_execution_inputs.to_dict()) + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy(dagger_client, "proxy_server_read_with_state_control", connection_id) + + yield ConnectorRunner( + dagger_client, + read_with_state_control_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def read_with_state_control_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, read_with_state_control_execution_inputs: ExecutionInputs, read_with_state_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: + if read_with_state_control_execution_inputs.state is None: + pytest.skip("The control state is not provided. Skipping the test as it's not possible to run a read with state.") + logging.info(f"Running read with state for control connector {read_with_state_control_execution_inputs.connector_under_test.name}") execution_result = await read_with_state_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_with_state_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_control_execution_result(execution_result) + return execution_result -@pytest.fixture -def read_with_state_target_connector_runner( +@pytest.fixture(scope="session") +async def read_with_state_target_connector_runner( dagger_client: dagger.Client, read_with_state_target_execution_inputs: ExecutionInputs, -) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_with_state_target_execution_inputs.to_dict()) + read_with_state_control_execution_result: ExecutionResult, + connection_id: str, +) -> AsyncGenerator: + proxy = Proxy( + dagger_client, + "proxy_server_read_with_state_target", + connection_id, + stream_for_server_replay=read_with_state_control_execution_result.http_dump, + ) + yield ConnectorRunner( + dagger_client, + read_with_state_target_execution_inputs, + http_proxy=proxy, + ) + await proxy.clear_cache_volume() -@pytest.fixture +@pytest.fixture(scope="session") async def read_with_state_target_execution_result( - record_property: Callable, request: SubRequest, - test_artifacts_directory: Path, - read_with_state_control_execution_result: ExecutionResult, read_with_state_target_execution_inputs: ExecutionInputs, read_with_state_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, ) -> ExecutionResult: + if read_with_state_target_execution_inputs.state is None: + pytest.skip("The target state is not provided. Skipping the test as it's not possible to run a read with state.") logging.info(f"Running read with state for target connector {read_with_state_target_execution_inputs.connector_under_test.name}") execution_result = await read_with_state_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_with_state_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) + request.config.stash[stash_keys.REPORT].add_target_execution_result(execution_result) + return execution_result diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/consts.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/consts.py new file mode 100644 index 0000000000000..3ece4d135814a --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/consts.py @@ -0,0 +1,3 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +MAX_LINES_IN_REPORT = 1000 diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini index 060aaa5a285fa..19c3b0784fe8e 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini @@ -1,4 +1,8 @@ [pytest] +addopts = --capture=no console_output_style = progress log_cli = True log_cli_level= INFO +markers = + with_state: mark test as running a read command with state. + without_state: mark test as running a read command without state. \ No newline at end of file diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/report.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/report.py new file mode 100644 index 0000000000000..65281933e30d5 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/report.py @@ -0,0 +1,317 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from __future__ import annotations + +import datetime +import json +from collections import defaultdict +from copy import deepcopy +from enum import Enum +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, MutableMapping, Optional, Set, Tuple + +import requests +import yaml +from jinja2 import Environment, PackageLoader, select_autoescape +from live_tests.regression_tests import stash_keys + +from .consts import MAX_LINES_IN_REPORT + +if TYPE_CHECKING: + import pytest + from _pytest.config import Config + from airbyte_protocol.models import SyncMode, Type # type: ignore + from live_tests.commons.models import Command, ExecutionResult + + +class ReportState(Enum): + INITIALIZING = "initializing" + RUNNING = "running" + FINISHED = "finished" + + +class Report: + TEMPLATE_NAME = "report.html.j2" + + SPEC_SECRET_MASK_URL = "https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml" + + def __init__(self, path: Path, pytest_config: Config) -> None: + self.path = path + self.pytest_config = pytest_config + self.connection_objects = pytest_config.stash[stash_keys.CONNECTION_OBJECTS] + self.secret_properties = self.get_secret_properties() + self.created_at = datetime.datetime.utcnow() + self.updated_at = self.created_at + self.control_execution_results_per_command: Dict[Command, ExecutionResult] = {} + self.target_execution_results_per_command: Dict[Command, ExecutionResult] = {} + self.test_results: List[Dict[str, Any]] = [] + self.update(ReportState.INITIALIZING) + + def get_secret_properties(self) -> List: + response = requests.get(self.SPEC_SECRET_MASK_URL) + response.raise_for_status() + return yaml.safe_load(response.text)["properties"] + + def update(self, state: ReportState = ReportState.RUNNING) -> None: + self._state = state + self.updated_at = datetime.datetime.utcnow() + self.render() + + def add_control_execution_result(self, control_execution_result: ExecutionResult) -> None: + self.control_execution_results_per_command[control_execution_result.command] = control_execution_result + self.update() + + def add_target_execution_result(self, target_execution_result: ExecutionResult) -> None: + self.target_execution_results_per_command[target_execution_result.command] = target_execution_result + self.update() + + def add_test_result(self, test_report: pytest.TestReport, test_documentation: Optional[str] = None) -> None: + cut_properties: List[Tuple[str, str]] = [] + for property_name, property_value in test_report.user_properties: + if len(str(property_value).splitlines()) > MAX_LINES_IN_REPORT: + cut_property_name = f"{property_name} (truncated)" + cut_property_value = "\n".join(str(property_value).splitlines()[:MAX_LINES_IN_REPORT]) + cut_property_value += f"\n... and {len(str(property_value).splitlines()) - MAX_LINES_IN_REPORT} more lines.\nPlease check the artifacts files for the full output." + cut_properties.append((cut_property_name, cut_property_value)) + else: + cut_properties.append((property_name, str(property_value))) + self.test_results.append( + { + "name": test_report.head_line, + "result": test_report.outcome, + "output": test_report.longreprtext if test_report.longrepr else "", + "properties": cut_properties, + "documentation": test_documentation, + } + ) + self.update() + + def render(self) -> None: + jinja_env = Environment( + loader=PackageLoader(__package__, "templates"), + autoescape=select_autoescape(), + trim_blocks=False, + lstrip_blocks=True, + ) + template = jinja_env.get_template(self.TEMPLATE_NAME) + rendered = template.render( + fully_generated=self._state is ReportState.FINISHED, + user=self.pytest_config.stash[stash_keys.USER], + test_date=self.updated_at, + connection_url=self.pytest_config.stash[stash_keys.CONNECTION_URL], + workspace_id=self.pytest_config.stash[stash_keys.CONNECTION_OBJECTS].workspace_id, + connection_id=self.pytest_config.stash[stash_keys.CONNECTION_ID], + connector_image=self.pytest_config.stash[stash_keys.CONNECTOR_IMAGE], + control_version=self.pytest_config.stash[stash_keys.CONTROL_VERSION], + target_version=self.pytest_config.stash[stash_keys.TARGET_VERSION], + source_config=json.dumps( + self.scrub_secrets_from_config( + deepcopy(self.connection_objects.source_config.data) if self.connection_objects.source_config else {} + ), + indent=2, + ), + state=json.dumps( + self.connection_objects.state if self.connection_objects.state else {}, + indent=2, + ), + configured_catalog=self.connection_objects.configured_catalog.json(indent=2) + if self.connection_objects.configured_catalog + else {}, + catalog=self.connection_objects.catalog.json(indent=2) if self.connection_objects.catalog else {}, + message_count_per_type=self.get_message_count_per_type(), + stream_coverage_metrics=self.get_stream_coverage_metrics(), + untested_streams=self.get_untested_streams(), + selected_streams=self.get_selected_streams(), + sync_mode_coverage=self.get_sync_mode_coverage(), + requested_urls_per_command=self.get_requested_urls_per_command(), + http_metrics_per_command=self.get_http_metrics_per_command(), + record_count_per_command_and_stream=self.get_record_count_per_stream(), + test_results=self.test_results, + max_lines=MAX_LINES_IN_REPORT, + ) + self.path.write_text(rendered) + + def scrub_secrets_from_config(self, to_scrub: MutableMapping) -> MutableMapping: + if isinstance(to_scrub, dict): + for key, value in to_scrub.items(): + if key in self.secret_properties: + to_scrub[key] = "********" + elif isinstance(value, dict): + to_scrub[key] = self.scrub_secrets_from_config(value) + return to_scrub + + ### REPORT CONTENT HELPERS ### + def get_stream_coverage_metrics(self) -> Dict[str, str]: + configured_catalog_stream_count = ( + len(self.connection_objects.configured_catalog.streams) if self.connection_objects.configured_catalog else 0 + ) + catalog_stream_count = len(self.connection_objects.catalog.streams) if self.connection_objects.catalog else 0 + return { + "Available in catalog": str(catalog_stream_count), + "In use (in configured catalog)": str(configured_catalog_stream_count), + "Coverage": f"{(configured_catalog_stream_count / catalog_stream_count) * 100:.2f}%", + } + + def get_record_count_per_stream( + self, + ) -> Dict[Command, Dict[str, Dict[str, int] | int]]: + record_count_per_command_and_stream: Dict[Command, Dict[str, Dict[str, int] | int]] = {} + + for control_result, target_result in zip( + self.control_execution_results_per_command.values(), + self.target_execution_results_per_command.values(), + ): + per_stream_count = defaultdict(lambda: {"control": 0, "target": 0}) # type: ignore + for result, source in [ + (control_result, "control"), + (target_result, "target"), + ]: + stream_schemas: Iterable = result.stream_schemas or [] + + for stream in stream_schemas: + per_stream_count[stream][source] = sum(1 for _ in result.get_records_per_stream(stream)) # type: ignore + for stream in per_stream_count: + per_stream_count[stream]["difference"] = per_stream_count[stream]["target"] - per_stream_count[stream]["control"] + record_count_per_command_and_stream[control_result.command] = per_stream_count # type: ignore + + return record_count_per_command_and_stream + + def get_untested_streams(self) -> List[str]: + streams_with_data: Set[str] = set() + for stream_count in self.get_record_count_per_stream().values(): + streams_with_data.update(stream_count.keys()) + + catalog_streams = self.connection_objects.catalog.streams if self.connection_objects.catalog else [] + + return [stream.name for stream in catalog_streams if stream.name not in streams_with_data] + + def get_selected_streams(self) -> Dict[str, Dict[str, SyncMode | bool]]: + untested_streams = self.get_untested_streams() + return ( + { + configured_stream.stream.name: { + "sync_mode": configured_stream.sync_mode, + "has_data": configured_stream.stream.name not in untested_streams, + } + for configured_stream in sorted( + self.connection_objects.configured_catalog.streams, + key=lambda x: x.stream.name, + ) + } + if self.connection_objects.configured_catalog + else {} + ) + + def get_sync_mode_coverage(self) -> Dict[SyncMode, int]: + count_per_sync_mode: Dict[SyncMode, int] = defaultdict(int) + for s in self.get_selected_streams().values(): + count_per_sync_mode[s["sync_mode"]] += 1 + return count_per_sync_mode + + def get_message_count_per_type( + self, + ) -> Tuple[List[Command], Dict[Type, Dict[Command, Dict[str, int]]]]: + message_count_per_type_and_command: Dict[Type, Dict[Command, Dict[str, int]]] = {} + all_message_types = set() + all_commands = set() + # Gather all message types from both control and target execution reports + for execution_results_per_command in [ + self.control_execution_results_per_command, + self.target_execution_results_per_command, + ]: + for command, execution_result in execution_results_per_command.items(): + all_commands.add(command) + for message_type in execution_result.get_message_count_per_type().keys(): + all_message_types.add(message_type) + + all_commands_sorted = sorted(all_commands, key=lambda command: command.value) + all_message_types_sorted = sorted(all_message_types, key=lambda message_type: message_type.value) + + # Iterate over all message types and commands to count messages + for message_type in all_message_types_sorted: + message_count_per_type_and_command[message_type] = {} + for command in all_commands_sorted: + message_count_per_type_and_command[message_type][command] = { + "control": 0, + "target": 0, + } + if command in self.control_execution_results_per_command: + message_count_per_type_and_command[message_type][command]["control"] = ( + self.control_execution_results_per_command[command].get_message_count_per_type().get(message_type, 0) + ) + if command in self.target_execution_results_per_command: + message_count_per_type_and_command[message_type][command]["target"] = ( + self.target_execution_results_per_command[command].get_message_count_per_type().get(message_type, 0) + ) + message_count_per_type_and_command[message_type][command]["difference"] = ( + message_count_per_type_and_command[message_type][command]["target"] + - message_count_per_type_and_command[message_type][command]["control"] + ) + return all_commands_sorted, message_count_per_type_and_command + + def get_http_metrics_per_command( + self, + ) -> Dict[Command, Dict[str, Dict[str, int | str] | int]]: + metrics_per_command: Dict[Command, Dict[str, Dict[str, int | str] | int]] = {} + + for control_result, target_result in zip( + self.control_execution_results_per_command.values(), + self.target_execution_results_per_command.values(), + ): + control_flow_count = len(control_result.http_flows) + control_all_urls = [f.request.url for f in control_result.http_flows] + control_duplicate_flow_count = len(control_all_urls) - len(set(control_all_urls)) + control_cache_hits_count = sum(1 for f in control_result.http_flows if f.is_replay) + control_cache_hit_ratio = f"{(control_cache_hits_count / control_flow_count) * 100:.2f}%" if control_flow_count != 0 else "N/A" + + target_flow_count = len(target_result.http_flows) + target_all_urls = [f.request.url for f in target_result.http_flows] + target_duplicate_flow_count = len(target_all_urls) - len(set(target_all_urls)) + target_cache_hits_count = sum(1 for f in target_result.http_flows if f.is_replay) + target_cache_hit_ratio = f"{(target_cache_hits_count / target_flow_count) * 100:.2f}%" if target_flow_count != 0 else "N/A" + + flow_count_difference = target_flow_count - control_flow_count + + metrics_per_command[control_result.command] = { + "control": { + "flow_count": control_flow_count, + "duplicate_flow_count": control_duplicate_flow_count, + "cache_hits_count": control_cache_hits_count, + "cache_hit_ratio": control_cache_hit_ratio, + }, + "target": { + "flow_count": target_flow_count, + "duplicate_flow_count": target_duplicate_flow_count, + "cache_hits_count": target_cache_hits_count, + "cache_hit_ratio": target_cache_hit_ratio, + }, + "difference": flow_count_difference, + } + + return metrics_per_command + + def get_requested_urls_per_command( + self, + ) -> Dict[Command, List[Tuple[int, str, str]]]: + requested_urls_per_command = {} + all_commands = sorted( + list(set(self.control_execution_results_per_command.keys()).union(set(self.target_execution_results_per_command.keys()))), + key=lambda command: command.value, + ) + for command in all_commands: + if command in self.control_execution_results_per_command: + control_flows = self.control_execution_results_per_command[command].http_flows + else: + control_flows = [] + if command in self.target_execution_results_per_command: + target_flows = self.target_execution_results_per_command[command].http_flows + else: + target_flows = [] + all_flows = [] + max_flows = max(len(control_flows), len(target_flows)) + for i in range(max_flows): + control_url = control_flows[i].request.url if i < len(control_flows) else "" + target_url = target_flows[i].request.url if i < len(target_flows) else "" + all_flows.append((i, control_url, target_url)) + requested_urls_per_command[command] = all_flows + return requested_urls_per_command diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/stash_keys.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/stash_keys.py new file mode 100644 index 0000000000000..a8f608fdf2d1a --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/stash_keys.py @@ -0,0 +1,28 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from pathlib import Path +from typing import List + +import pytest +from live_tests.commons.models import ConnectionObjects +from live_tests.regression_tests.report import Report + +AIRBYTE_API_KEY = pytest.StashKey[str]() +CONNECTION_ID = pytest.StashKey[str]() +CONNECTION_OBJECTS = pytest.StashKey[ConnectionObjects]() +CONNECTION_URL = pytest.StashKey[str | None]() +CONNECTOR_IMAGE = pytest.StashKey[str]() +CONTROL_VERSION = pytest.StashKey[str]() +DAGGER_LOG_PATH = pytest.StashKey[Path]() +DUCKDB_PATH = pytest.StashKey[Path]() +HTTP_DUMP_CACHE_VOLUMES = pytest.StashKey[List]() +IS_PERMITTED_BOOL = pytest.StashKey[bool]() +PR_URL = pytest.StashKey[str]() +REPORT = pytest.StashKey[Report]() +RETRIEVAL_REASONS = pytest.StashKey[str]() +SESSION_START_TIMESTAMP = pytest.StashKey[int]() +SHOULD_READ_WITH_STATE = pytest.StashKey[bool]() +TARGET_VERSION = pytest.StashKey[str]() +TEST_ARTIFACT_DIRECTORY = pytest.StashKey[Path]() +USER = pytest.StashKey[str]() +WORKSPACE_ID = pytest.StashKey[str]() diff --git a/airbyte-lib/airbyte_lib/_factories/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/templates/__init__.py similarity index 100% rename from airbyte-lib/airbyte_lib/_factories/__init__.py rename to airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/templates/__init__.py diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/templates/report.html.j2 b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/templates/report.html.j2 new file mode 100644 index 0000000000000..ff2f902c28cc8 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/templates/report.html.j2 @@ -0,0 +1,493 @@ + + + + + + Test Report + + + + + + + +

    +

    Regression test report

    +
    +

    Context

    +
    +
      +
    • Connection
    • +
    • Tester: {{ user }}
    • +
    • Test date: {{ test_date }}
    • +
    • Workspace ID: {{ workspace_id }}
    • +
    • Connection ID: {{ connection_id }}
    • +
    • Connector image: {{ connector_image }}
    • +
    • Control version: {{ control_version }}
    • +
    • Target version: {{ target_version }}
    • +
    +
    +
    +
    +

    Connection coverage metadata

    +
    +

    Stream coverage

    + + + + {% for metric_name, metric_value in stream_coverage_metrics.items() %} + + + {% endfor %} + + +
    {{ metric_name}}{{ metric_value }}
    +

    Sync mode coverage

    + + + + {% for sync_mode, count in sync_mode_coverage.items() %} + + + {% endfor %} + + +
    {{ sync_mode.value }}{{ count }}
    + +

    Selected stream

    + + + + + + + + + {% for stream_name in selected_streams %} + + + + {% if selected_streams[stream_name]['has_data'] %} + + {% else %} + + {% endif %} + + {% endfor %} + +
    StreamSync modeHas data
    {{ stream_name }}{{ selected_streams[stream_name]['sync_mode'].value }}{{ selected_streams[stream_name]['has_data'] }}{{ selected_streams[stream_name]['has_data'] }}
    + {% if untested_streams %} +

    Untested streams (not in configured catalog or without data)

    +
      + {% for stream_name in untested_streams %} +
    • {{ stream_name }}
    • + {% endfor %} +
    + {% endif %} +
    +
    +
    +

    Connection objects

    +
    +

    Source configuration

    +
    {{ source_config }}
    + {% if state %} +

    State

    +
    {{ state }}
    +                
    + {% endif %} +

    Configured catalog

    +
    {{ configured_catalog }}
    +

    Catalog

    +
    {{ catalog }}
    +
    +
    +
    + {% if not fully_generated %} +

    Command execution metrics

    + {% else%} +

    Command execution metrics

    + {% endif %} +
    + {% if message_count_per_type[0] %} +

    Message types

    + + + + + {% for command in message_count_per_type[0] %} + + {% endfor %} + + + + {% for command in message_count_per_type[0] %} + + + + {% endfor %} + + {% for message_type in message_count_per_type[1] %} + + + {% for command in message_count_per_type[1][message_type] %} + + + {% if message_count_per_type[1][message_type][command]["difference"] != 0 %} + + {% else %} + + {% endif %} + {% endfor %} + + {% endfor %} +
    {{ command.value.upper() }}
    controltargetΔ
    {{ message_type.value }}{{ message_count_per_type[1][message_type][command]["control"] }}{{ message_count_per_type[1][message_type][command]["target"] }}{{ message_count_per_type[1][message_type][command]["difference"] }}{{ message_count_per_type[1][message_type][command]["difference"] }}
    + {% endif %} + {% if record_count_per_command_and_stream %} +

    Record count per stream

    + {% for command, record_count_per_stream in record_count_per_command_and_stream.items() %} +

    {{ command.value.upper() }}

    + + + + + + + + + + + {% for stream, record_count in record_count_per_stream.items() %} + + + + + {% if record_count.get("difference", 0) != 0 %} + + {% else %} + + {% endif %} + + {% endfor %} + +
    streamcontrol record counttarget record countΔ
    {{ stream }}{{ record_count.get("control", 0) }}{{ record_count.get("target", 0) }}{{ record_count.get("difference", 0) }}{{ record_count.get("difference", 0) }}
    + {% endfor %} + {% endif %} + {% if http_metrics_per_command %} +

    HTTP traffic

    + + + + + + + + + + + + + + + + + + + {% for command in http_metrics_per_command %} + + + + + + + + {% if http_metrics_per_command[command].get("difference", 0) != 0 %} + + {% else %} + + {% endif %} + + {% endfor %} + +
    controltargetΔ
    commandrequest countduplicate request countrequest countduplicate request countcache hit ratiorequest count
    {{ command.value.upper() }}{{ http_metrics_per_command[command].get("control", {}).get("flow_count", "0")}}{{ http_metrics_per_command[command].get("control", {}).get("duplicate_flow_count", "0")}}{{ http_metrics_per_command[command].get("target", {}).get("flow_count", "0")}}{{ http_metrics_per_command[command].get("target", {}).get("duplicate_flow_count", "0")}}{{ http_metrics_per_command[command].get("target", {}).get("cache_hit_ratio", "0%")}}{{ http_metrics_per_command[command].get("difference", 0)}}{{ http_metrics_per_command[command].get("difference", 0)}}
    + {% endif %} +
    +
    +
    + {% if not fully_generated %} +

    Requested URLs

    + {% else%} +

    Requested URLs

    + {% endif %} +
    + {% for command, flows in requested_urls_per_command.items() %} +

    {{ command.value.upper() }}

    + {% if flows %} +
    + + + + + + + + + + {% for index, control_url, target_url in flows %} + + + + + + {% endfor %} + +
    Control URLTarget URL
    {{ index }}{{ control_url }}{{ target_url }}
    +
    + {% else %} +

    No URLs requested

    + {% endif %} + {% endfor%} +
    +
    +
    + {% if not fully_generated %} +

    Test results

    + {% else%} +

    Test results

    + {% endif %} +
    + {% for test in test_results %} +
    + {% if test["result"] == "passed" %} +

    {{ test["name"] }} [{{ test["result"] }}]

    + {% elif test["result"] == "failed" %} +

    {{ test["name"] }} [{{ test["result"] }}]

    + {% else %} +

    {{ test["name"] }} [{{ test["result"] }}]

    + {% endif %} + {% if test["documentation"] %} +

    {{ test["documentation"] }}

    + {% endif %} + {% if test["output"] %} +
    +                    {{ test["output"] }}
    +                    
    + {% endif %} + {% if test["properties"]%} + {% for property_name, property_value in test["properties"] %} + {% if property_value %} +

    {{ property_name }}

    +
    +                        {{ property_value }}
    +                        
    + {% else%} + + {% endif %} + {% endfor%} + {% endif %} +
    + {% endfor%} +
    +
    +
    + + diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_check.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_check.py new file mode 100644 index 0000000000000..7b963f8a24995 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_check.py @@ -0,0 +1,57 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + + +from typing import Callable + +import pytest +from airbyte_protocol.models import Status, Type # type: ignore +from live_tests.commons.models import ExecutionResult +from live_tests.regression_tests.consts import MAX_LINES_IN_REPORT + +from .utils import fail_test_on_failing_execution_results, tail_file + +pytestmark = [ + pytest.mark.anyio, +] + + +async def test_check_passes_on_both_versions( + record_property: Callable, + check_control_execution_result: ExecutionResult, + check_target_execution_result: ExecutionResult, +) -> None: + """This test runs the check command on both the control and target connectors. + It makes sure that the check command succeeds on both connectors. + Success is determined by the presence of a connection status message with a status of SUCCEEDED. + """ + fail_test_on_failing_execution_results( + record_property, + [ + check_control_execution_result, + check_target_execution_result, + ], + ) + + def is_successful_check(execution_result: ExecutionResult) -> bool: + for message in execution_result.airbyte_messages: + if message.type is Type.CONNECTION_STATUS and message.connectionStatus.status is Status.SUCCEEDED: + return True + return False + + successful_control_check: bool = is_successful_check(check_control_execution_result) + successful_target_check: bool = is_successful_check(check_target_execution_result) + error_messages = [] + if not successful_control_check: + record_property( + f"Control CHECK standard output [Last {MAX_LINES_IN_REPORT} lines]", + tail_file(check_control_execution_result.stdout_file_path, n=MAX_LINES_IN_REPORT), + ) + error_messages.append("The control check did not succeed, we cannot compare the results.") + if not successful_target_check: + record_property( + f"Target CHECK standard output [Last {MAX_LINES_IN_REPORT} lines]", + tail_file(check_target_execution_result.stdout_file_path, n=MAX_LINES_IN_REPORT), + ) + error_messages.append("The target check did not succeed. Check the test artifacts for more information.") + if error_messages: + pytest.fail("\n".join(error_messages)) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_discover.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_discover.py new file mode 100644 index 0000000000000..1c9ea0035a90b --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_discover.py @@ -0,0 +1,109 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import Callable, Dict, Iterable, List + +import pytest +from _pytest.fixtures import SubRequest +from airbyte_protocol.models import AirbyteCatalog, AirbyteStream, Type # type: ignore +from live_tests.commons.models import ExecutionResult + +from .utils import fail_test_on_failing_execution_results, get_and_write_diff + +pytestmark = [ + pytest.mark.anyio, +] + + +async def test_catalog_are_the_same( + record_property: Callable, + request: SubRequest, + discover_control_execution_result: ExecutionResult, + discover_target_execution_result: ExecutionResult, +) -> None: + """This test runs the discover command on both the control and target connectors. + It makes sure that the discover command returns the same catalog for both connectors. + A catalog diff is generated and stored in the test artifacts if the catalogs are not the same. + """ + fail_test_on_failing_execution_results( + record_property, + [ + discover_control_execution_result, + discover_target_execution_result, + ], + ) + + def get_catalog(execution_result: ExecutionResult) -> AirbyteCatalog: + for message in execution_result.airbyte_messages: + if message.type is Type.CATALOG and message.catalog: + return message.catalog + return None + + control_catalog = get_catalog(discover_control_execution_result) + target_catalog = get_catalog(discover_target_execution_result) + + if control_catalog is None: + pytest.skip("The control discover did not return a catalog, we cannot compare the results.") + + if target_catalog is None: + pytest.fail("The target discover did not return a catalog. Check the test artifacts for more information.") + + control_streams = {c.name: c for c in control_catalog.streams} + target_streams = {t.name: t for t in target_catalog.streams} + + catalog_diff_path_prefix = "catalog_diff" + catalog_diff = get_and_write_diff( + request, + _get_filtered_sorted_streams(control_streams, target_streams.keys(), True), + _get_filtered_sorted_streams(target_streams, control_streams.keys(), True), + catalog_diff_path_prefix, + True, + None, + ) + + control_streams_diff_path_prefix = "control_streams_diff" + control_streams_diff = get_and_write_diff( + request, + _get_filtered_sorted_streams(control_streams, target_streams.keys(), False), + [], + control_streams_diff_path_prefix, + True, + None, + ) + + target_streams_diff_path_prefix = "target_streams_diff" + target_streams_diff = get_and_write_diff( + request, + [], + _get_filtered_sorted_streams(target_streams, control_streams.keys(), False), + target_streams_diff_path_prefix, + True, + None, + ) + + has_diff = catalog_diff or control_streams_diff or target_streams_diff + + if has_diff: + record_property("Catalog diff", catalog_diff) + record_property("Control streams diff", control_streams_diff) + record_property("Target streams diff", target_streams_diff) + + if control_streams.keys() != target_streams.keys(): + pytest.fail( + f"The set of streams in the control and target catalogs do not match. control_streams={', '.join(control_streams.keys())} target_streams={', '.join(target_streams.keys())}. Detailed diff is stored in Diff is stored at {catalog_diff}, {control_streams_diff}, and {target_streams_diff}." + ) + + else: + pytest.fail( + f"The control and target output are not the same. Diff is stored at {catalog_diff}, {control_streams_diff}, and {target_streams_diff}." + ) + + +def _get_filtered_sorted_streams(streams: Dict[str, AirbyteStream], stream_set: Iterable[str], include_target: bool) -> List[Dict]: + return sorted( + filter( + lambda x: (x["name"] in stream_set if include_target else x["name"] not in stream_set), + [json.loads(s.json(sort_keys=True)) for s in streams.values()], + ), + key=lambda x: x["name"], + ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py deleted file mode 100644 index 74a8c26db977a..0000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import pytest -from live_tests.commons.models import ExecutionResult - -from .utils import filter_records, make_comparable_records - -pytestmark = [ - pytest.mark.anyio, -] - - -# This test is very basic and just used as a demonstration before porting the "real" expected records tests from VA -async def test_all_records_are_produced_in_target_version( - read_control_execution_result: ExecutionResult, - read_target_execution_result: ExecutionResult, -) -> None: - control_records = list(make_comparable_records(filter_records(read_control_execution_result.airbyte_messages))) - target_records = list(make_comparable_records(filter_records(read_target_execution_result.airbyte_messages))) - assert target_records == control_records diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_read.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_read.py new file mode 100644 index 0000000000000..515b79caa26b8 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_read.py @@ -0,0 +1,573 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from __future__ import annotations + +import json +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Iterable, List, Optional + +import pytest +from airbyte_protocol.models import AirbyteMessage # type: ignore +from deepdiff import DeepDiff # type: ignore +from live_tests.commons.models import ExecutionResult + +from .utils import fail_test_on_failing_execution_results, get_and_write_diff, get_test_logger, write_string_to_test_artifact + +if TYPE_CHECKING: + from _pytest.fixtures import SubRequest + +pytestmark = [ + pytest.mark.anyio, +] + + +EXCLUDE_PATHS = ["emitted_at"] + + +class TestDataIntegrity: + """This class contains tests that check if the data integrity is preserved between the control and target versions. + The tests have some overlap but they are meant to be gradually stricter in terms of integrity checks. + + 1. test_record_count: On each stream, check if the target version produces at least the same number of records as the control version. + 2. test_all_pks_are_produced_in_target_version: On each stream, check if all primary key values produced by the control version are present in the target version. + 3. test_all_records_are_the_same: On each stream, check if all records produced by the control version are the same as in the target version. This will write a diff of the records to the test artifacts. + + All these test have a full refresh and incremental variant. + """ + + async def _check_all_pks_are_produced_in_target_version( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, + ) -> None: + """This test gathers all primary key values from the control version and checks if they are present in the target version for each stream. + If there are missing primary keys, the test fails and the missing records are stored in the test artifacts. + Args: + request (SubRequest): The test request. + record_property (Callable): A callable for stashing information on the report. + streams: (Iterable[str]): The list of streams configured for the connection. + primary_keys_per_stream (Dict[str, Optional[List[str]]]): The primary keys for each stream. + read_with_state_control_execution_result (ExecutionResult): The control version execution result. + read_with_state_target_execution_result (ExecutionResult): The target version execution result. + """ + if not primary_keys_per_stream: + pytest.skip("No primary keys provided on any stream. Skipping the test.") + + logger = get_test_logger(request) + streams_with_missing_records = set() + for stream_name in configured_streams: + _primary_key = primary_keys_per_stream[stream_name] + if not _primary_key: + # TODO: report skipped PK test per individual stream + logger.warning(f"No primary keys provided on stream {stream_name}.") + continue + + primary_key = _primary_key[0] if isinstance(_primary_key, list) else _primary_key + + control_pks = set() + target_pks = set() + logger.info(f"Retrieving primary keys for stream {stream_name} on control version.") + for control_record in read_with_state_control_execution_result.get_records_per_stream(stream_name): + control_pks.add(control_record.record.data[primary_key]) + + logger.info(f"Retrieving primary keys for stream {stream_name} on target version.") + for target_record in read_with_state_target_execution_result.get_records_per_stream(stream_name): + target_pks.add(target_record.record.data[primary_key]) + + if missing_pks := control_pks - target_pks: + logger.warning(f"Found {len(missing_pks)} primary keys for stream {stream_name}. Retrieving missing records.") + streams_with_missing_records.add(stream_name) + missing_records = [ + r + for r in read_with_state_control_execution_result.get_records_per_stream(stream_name) + if r.record.data[primary_key] in missing_pks + ] + record_property( + f"Missing records on stream {stream_name}", + json.dumps(missing_records), + ) + artifact_path = write_string_to_test_artifact( + request, + json.dumps(missing_records), + f"missing_records_{stream_name}.json", + subdir=request.node.name, + ) + logger.info(f"Missing records for stream {stream_name} are stored in {artifact_path}.") + if streams_with_missing_records: + pytest.fail(f"Missing records for streams: {', '.join(streams_with_missing_records)}.") + + async def _check_record_counts( + self, + record_property: Callable, + configured_streams: Iterable[str], + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + record_count_difference_per_stream: Dict[str, Dict[str, int]] = {} + for stream_name in configured_streams: + control_records_count = sum(1 for _ in read_control_execution_result.get_records_per_stream(stream_name)) + target_records_count = sum(1 for _ in read_target_execution_result.get_records_per_stream(stream_name)) + + difference = { + "delta": target_records_count - control_records_count, + "control": control_records_count, + "target": target_records_count, + } + + if difference["delta"] != 0: + record_count_difference_per_stream[stream_name] = difference + error_messages = [] + for stream, difference in record_count_difference_per_stream.items(): + if difference["delta"] > 0: + error_messages.append( + f"Stream {stream} has {difference['delta']} more records in the target version ({difference['target']} vs. {difference['control']})." + ) + if difference["delta"] < 0: + error_messages.append( + f"Stream {stream} has {-difference['delta']} fewer records in the target version({difference['target']} vs. {difference['control']})." + ) + if error_messages: + record_property("Record count differences", "\n".join(error_messages)) + pytest.fail("Record counts are different.") + + async def _check_all_records_are_the_same( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + """This test checks if all records in the control version are present in the target version for each stream. + If there are mismatches, the test fails and the missing records are stored in the test artifacts. + It will catch differences in record schemas, missing records, and extra records. + + Args: + request (SubRequest): The test request. + read_control_execution_result (ExecutionResult): The control version execution result. + read_target_execution_result (ExecutionResult): The target version execution result. + """ + streams_with_diff = set() + for stream in configured_streams: + control_records = list(read_control_execution_result.get_records_per_stream(stream)) + target_records = list(read_target_execution_result.get_records_per_stream(stream)) + + if control_records and not target_records: + pytest.fail(f"Stream {stream} is missing in the target version.") + + if primary_key := primary_keys_per_stream.get(stream): + diffs = self._get_diff_on_stream_with_pk( + request, + record_property, + stream, + control_records, + target_records, + primary_key, + ) + else: + diffs = self._get_diff_on_stream_without_pk( + request, + record_property, + stream, + control_records, + target_records, + ) + + if diffs: + streams_with_diff.add(stream) + + if streams_with_diff: + messages = [ + f"Records for stream {stream} are different. Please check the diff in the test artifacts for debugging." + for stream in sorted(streams_with_diff) + ] + pytest.fail("/n".join(messages)) + + def _check_record_schema_match( + self, + request: SubRequest, + record_property: Callable, + control_execution_result: ExecutionResult, + target_execution_result: ExecutionResult, + ) -> None: + """This test checks if the schema of the records in the control and target versions match. + It compares the meta schema inferred for each streams on the control and target versions. + It also fetches an example record for each stream from the DuckDB instance and compares the schema of the records. + + Args: + record_property (Callable): The record property to store the mismatching fields. + control_execution_result (ExecutionResult): The control version execution result. + target_execution_result (ExecutionResult): The target version execution result. + """ + logger = get_test_logger(request) + + assert control_execution_result.stream_schemas is not None, "Control schemas were not inferred." + assert target_execution_result.stream_schemas is not None, "Target schemas were not inferred." + + mismatches_count = 0 + for stream in control_execution_result.stream_schemas: + control_schema = control_execution_result.stream_schemas.get(stream, {}) + if not control_schema: + logger.warning(f"Stream {stream} was not found in the control results.") + + target_schema = target_execution_result.stream_schemas.get(stream, {}) + if control_schema and not target_schema: + logger.warning(f"Stream {stream} was present in the control results but not in the target results.") + + diff = DeepDiff(control_schema, target_schema, ignore_order=True) + if diff: + record_property(f"{stream} diff between control and target version", diff.pretty()) + try: + control_record = next(control_execution_result.get_records_per_stream(stream)) + control_example = json.dumps(control_record.record.data, indent=2) + record_property(f"{stream} example record for control version", control_example) + except StopIteration: + logger.warning(f"Stream {stream} has no record in the control version.") + try: + target_record = next(target_execution_result.get_records_per_stream(stream)) + target_example = json.dumps(target_record.record.data, indent=2) + record_property(f"{stream} example record for target version", target_example) + except StopIteration: + logger.warning(f"Stream {stream} has no record in the target version.") + mismatches_count += 1 + + if mismatches_count > 0: + pytest.fail(f"{mismatches_count} streams have mismatching schemas between control and target versions.") + + @pytest.mark.with_state + async def test_record_count_with_state( + self, + record_property: Callable, + configured_streams: Iterable[str], + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, + ) -> None: + """This test compares the record counts between the control and target versions on each stream. + Records are pulled from the output of the read command to which the connection state is passed. + It fails if there are any differences in the record counts. + It is not bulletproof, if the upstream source supports insertion or deletion it may lead to false positives. + The HTTP cache used between the control and target versions command execution might limit this problem. + Extra records in the target version might mean that a bug was fixed, but it could also mean that the target version produces duplicates. + We should add a new test for duplicates and not fail this one if extra records are found. + More advanced checks are done in the other tests. + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ], + ) + await self._check_record_counts( + record_property, + configured_streams, + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ) + + @pytest.mark.without_state + async def test_record_count_without_state( + self, + record_property: Callable, + configured_streams: Iterable[str], + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + """This test compares the record counts between the control and target versions on each stream. + Records are pulled from the output of the read command to which no connection state is passed (leading to a full-refresh like sync). + It fails if there are any differences in the record counts. + It is not bulletproof, if the upstream source supports insertion or deletion it may lead to false positives. + The HTTP cache used between the control and target versions command execution might limit this problem. + Extra records in the target version might mean that a bug was fixed, but it could also mean that the target version produces duplicates. + We should add a new test for duplicates and not fail this one if extra records are found. + More advanced checks are done in the other tests. + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_control_execution_result, + read_target_execution_result, + ], + ) + await self._check_record_counts( + record_property, + configured_streams, + read_control_execution_result, + read_target_execution_result, + ) + + @pytest.mark.with_state + async def test_all_pks_are_produced_in_target_version_with_state( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, + ) -> None: + """This test checks if all primary key values produced by the control version are present in the target version for each stream. + It is reading the records from the output of the read command to which the connection state is passed. + A failing test means that the target version is missing some records. + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ], + ) + await self._check_all_pks_are_produced_in_target_version( + request, + record_property, + configured_streams, + primary_keys_per_stream, + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ) + + @pytest.mark.without_state + async def test_all_pks_are_produced_in_target_version_without_state( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + """This test checks if all primary key values produced by the control version are present in the target version for each stream. + Records are pulled from the output of the read command to which no connection state is passed (leading to a full-refresh like sync). + A failing test means that the target version is missing some records. + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_control_execution_result, + read_target_execution_result, + ], + ) + await self._check_all_pks_are_produced_in_target_version( + request, + record_property, + configured_streams, + primary_keys_per_stream, + read_control_execution_result, + read_target_execution_result, + ) + + @pytest.mark.with_state + async def test_record_schema_match_with_state( + self, + request: SubRequest, + record_property: Callable, + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, + ) -> None: + """This test checks if the schema of the streams in the control and target versions match. + It produces a meta schema for each stream on control and target version and compares them. + It is not using the catalog schema, but inferring schemas from the actual records produced by the read command. + Records are pulled from the output of the read command to which the connection state is passed. + """ + self._check_record_schema_match( + request, + record_property, + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ) + + @pytest.mark.without_state + async def test_record_schema_match_without_state( + self, + request: SubRequest, + record_property: Callable, + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + """This test checks if the schema of the streams in the control and target versions match. + It produces a meta schema for each stream on control and target version and compares them. + It is not using the catalog schema, but inferring schemas from the actual records produced by the read command. + Records are pulled from the output of the read command to which the connection state is passed. + """ + self._check_record_schema_match( + request, + record_property, + read_control_execution_result, + read_target_execution_result, + ) + + @pytest.mark.with_state + async def test_all_records_are_the_same_with_state( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, + ) -> None: + """This test compares all records between the control and target versions on each stream. + It is very sensitive to record schema and order changes. + It fails if there are any differences in the records. + It is reading the records from the output of the read command to which the connection state is passed. + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ], + ) + await self._check_all_records_are_the_same( + request, + record_property, + configured_streams, + primary_keys_per_stream, + read_with_state_control_execution_result, + read_with_state_target_execution_result, + ) + + @pytest.mark.without_state + async def test_all_records_are_the_same_without_state( + self, + request: SubRequest, + record_property: Callable, + configured_streams: Iterable[str], + primary_keys_per_stream: Dict[str, Optional[List[str]]], + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, + ) -> None: + """This test compares all records between the control and target versions on each stream. + It is very sensitive to record schema and order changes. + It fails if there are any differences in the records. + It is reading the records from the output of the read command to which no connection state is passed (leading to a full-refresh like sync). + """ + fail_test_on_failing_execution_results( + record_property, + [ + read_control_execution_result, + read_target_execution_result, + ], + ) + await self._check_all_records_are_the_same( + request, + record_property, + configured_streams, + primary_keys_per_stream, + read_control_execution_result, + read_target_execution_result, + ) + + def _get_diff_on_stream_with_pk( + self, + request: SubRequest, + record_property: Callable, + stream: str, + control_records: List[AirbyteMessage], + target_records: List[AirbyteMessage], + primary_key: List[str], + ) -> Optional[Iterable[str]]: + control_pks = {r.record.data[primary_key[0]] for r in control_records} + target_pks = {r.record.data[primary_key[0]] for r in target_records} + + # Compare the diff for all records whose primary key is in + record_diff_path_prefix = f"{stream}_record_diff" + record_diff = get_and_write_diff( + request, + _get_filtered_sorted_records(control_records, target_pks, True, primary_key), + _get_filtered_sorted_records(target_records, control_pks, True, primary_key), + record_diff_path_prefix, + ignore_order=False, + exclude_paths=EXCLUDE_PATHS, + ) + + control_records_diff_path_prefix = f"{stream}_control_records_diff" + control_records_diff = get_and_write_diff( + request, + _get_filtered_sorted_records(control_records, target_pks, False, primary_key), + [], + control_records_diff_path_prefix, + ignore_order=False, + exclude_paths=EXCLUDE_PATHS, + ) + + target_records_diff_path_prefix = f"{stream}_target_records_diff" + target_records_diff = get_and_write_diff( + request, + [], + _get_filtered_sorted_records(target_records, control_pks, False, primary_key), + target_records_diff_path_prefix, + ignore_order=False, + exclude_paths=EXCLUDE_PATHS, + ) + + has_diff = record_diff or control_records_diff or target_records_diff + + if has_diff: + record_property( + f"{stream} stream: records with primary key in target & control whose values differ", + record_diff, + ) + record_property( + f"{stream} stream: records in control but not target", + control_records_diff, + ) + record_property( + f"{stream} stream: records in target but not control", + target_records_diff, + ) + + return (record_diff, control_records_diff, target_records_diff) + return None + + def _get_diff_on_stream_without_pk( + self, + request: SubRequest, + record_property: Callable, + stream: str, + control_records: List[AirbyteMessage], + target_records: List[AirbyteMessage], + ) -> Optional[Iterable[str]]: + diff = get_and_write_diff( + request, + [json.loads(r.record.json(sort_keys=True)) for r in control_records], + [json.loads(r.record.json(sort_keys=True)) for r in target_records], + f"{stream}_diff", + ignore_order=True, + exclude_paths=EXCLUDE_PATHS, + ) + if diff: + record_property(f"Diff for stream {stream}", diff) + return (diff,) + return None + + +def _get_filtered_sorted_records( + records: List[AirbyteMessage], + primary_key_set: set[Generator[Any, Any, None]], + include_target: bool, + primary_key: List[str], +) -> List[Dict]: + """ + Get a list of records sorted by primary key, and filtered as specified. + + For example, if `include_target` is true, we filter the records such that + only those whose primary key is in `primary_key_set` are returned. + If `include_target` is false, we only return records whose primary key + is not in `primary_key_set`. + """ + if include_target: + _filter = lambda x: x["data"].get(primary_key[0]) in primary_key_set + else: + _filter = lambda x: x["data"].get(primary_key[0]) not in primary_key_set + + return sorted( + filter( + _filter, + [json.loads(s.record.json(sort_keys=True)) for s in records], + ), + key=lambda x: x["data"][primary_key[0]], + ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_spec.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_spec.py new file mode 100644 index 0000000000000..a59247e69b14c --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_spec.py @@ -0,0 +1,41 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Callable + +import pytest +from airbyte_protocol.models import Type # type: ignore +from live_tests.commons.models import ExecutionResult + +from .utils import fail_test_on_failing_execution_results + +pytestmark = [ + pytest.mark.anyio, +] + + +async def test_spec_passes_on_both_versions( + record_property: Callable, + spec_control_execution_result: ExecutionResult, + spec_target_execution_result: ExecutionResult, +) -> None: + """This test runs the spec command on both the control and target connectors. + It makes sure that the spec command succeeds on both connectors by checking the presence of a SPEC message. + """ + fail_test_on_failing_execution_results( + record_property, + [ + spec_control_execution_result, + spec_target_execution_result, + ], + ) + + def has_spec(execution_result: ExecutionResult) -> bool: + for message in execution_result.airbyte_messages: + if message.type is Type.SPEC and message.spec: + return True + return False + + if not has_spec(spec_control_execution_result): + pytest.skip("The control spec did not succeed, we cannot compare the results.") + if not has_spec(spec_target_execution_result): + pytest.fail("The target spec did not succeed. Check the test artifacts for more information.") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py index e8b26038b0d95..331a18a05b10a 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py @@ -1,8 +1,27 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from __future__ import annotations -from typing import Iterable +import json +import logging +from pathlib import Path +from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Union +import pytest from airbyte_protocol.models import AirbyteMessage, Type # type: ignore +from deepdiff import DeepDiff # type: ignore +from live_tests.commons.models import ExecutionResult + +from . import stash_keys +from .consts import MAX_LINES_IN_REPORT + +if TYPE_CHECKING: + from _pytest.fixtures import SubRequest + +MAX_DIFF_SIZE_FOR_LOGGING = 500 + + +def get_test_logger(request: SubRequest) -> logging.Logger: + return logging.getLogger(request.node.name) def filter_records(messages: Iterable[AirbyteMessage]) -> Iterable[AirbyteMessage]: @@ -11,9 +30,94 @@ def filter_records(messages: Iterable[AirbyteMessage]) -> Iterable[AirbyteMessag yield message -def make_comparable_records( - record_messages: Iterable[AirbyteMessage], -) -> Iterable[AirbyteMessage]: - for message in record_messages: - message.record.emitted_at = 0 - yield message +def write_string_to_test_artifact(request: SubRequest, content: str, filename: str, subdir: Optional[Path] = None) -> Path: + test_artifact_directory = request.config.stash[stash_keys.TEST_ARTIFACT_DIRECTORY] + if subdir: + test_artifact_directory = test_artifact_directory / subdir + test_artifact_directory.mkdir(parents=True, exist_ok=True) + artifact_path = test_artifact_directory / filename + artifact_path.write_text(content) + return artifact_path + + +def get_and_write_diff( + request: SubRequest, + control_data: Union[List, Dict], + target_data: Union[List, Dict], + filepath: str, + ignore_order: bool, + exclude_paths: Optional[List[str]], +) -> str: + logger = get_test_logger(request) + diff = DeepDiff( + control_data, + target_data, + ignore_order=ignore_order, + report_repetition=True, + exclude_regex_paths=exclude_paths, + ) + if diff: + diff_json = diff.to_json() + parsed_diff = json.loads(diff_json) + formatted_diff_json = json.dumps(parsed_diff, indent=2) + + diff_path_tree = write_string_to_test_artifact(request, str(diff.tree), f"{filepath}_tree.txt", subdir=request.node.name) + diff_path_text = write_string_to_test_artifact( + request, + formatted_diff_json, + f"{filepath}_text.txt", + subdir=request.node.name, + ) + diff_path_pretty = write_string_to_test_artifact( + request, + str(diff.pretty()), + f"{filepath}_pretty.txt", + subdir=request.node.name, + ) + + logger.info(f"Diff file are stored in {diff_path_tree}, {diff_path_text}, and {diff_path_pretty}.") + if len(diff_json.encode("utf-8")) < MAX_DIFF_SIZE_FOR_LOGGING: + logger.error(formatted_diff_json) + + return formatted_diff_json + return "" + + +def fail_test_on_failing_execution_results(record_property: Callable, execution_results: List[ExecutionResult]) -> None: + error_messages = [] + for execution_result in execution_results: + if not execution_result.success: + property_suffix = f"of failing execution {execution_result.command.value} on {execution_result.connector_under_test.name}:{execution_result.connector_under_test.version} [{MAX_LINES_IN_REPORT} last lines]" + record_property( + f"Stdout {property_suffix}", + tail_file(execution_result.stdout_file_path, n=MAX_LINES_IN_REPORT), + ) + record_property( + f"Stderr of {property_suffix}", + tail_file(execution_result.stderr_file_path, n=MAX_LINES_IN_REPORT), + ) + error_messages.append( + f"Failed executing command {execution_result.command} on {execution_result.connector_under_test.name}:{execution_result.connector_under_test.version}" + ) + if error_messages: + pytest.fail("\n".join(error_messages)) + + +def tail_file(file_path: Path, n: int = MAX_LINES_IN_REPORT) -> List[str]: + with open(file_path, "r") as f: + # Move the cursor to the end of the file + f.seek(0, 2) + file_size = f.tell() + lines: List[str] = [] + read_size = min(4096, file_size) + cursor = file_size - read_size + + # Read chunks of the file until we've found n lines + while len(lines) < n and cursor >= 0: + f.seek(cursor) + chunk = f.read(read_size) + lines.extend(chunk.splitlines(True)[-n:]) + cursor -= read_size + + # Return the last n lines + return lines[-n:] diff --git a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py index 0de07435efb45..d7dc61b831b09 100644 --- a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py +++ b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py @@ -16,7 +16,6 @@ from live_tests.commons.backends import FileBackend -@pytest.mark.asyncio @pytest.mark.parametrize( "messages, expected_writes", [ @@ -47,7 +46,7 @@ '{"status": "SUCCEEDED", "message": null}\n', ), ( - "test_stream_records.jsonl", + "records.jsonl", '{"namespace": null, "stream": "test_stream", "data": {}, "meta": null}\n', ), ( @@ -55,16 +54,16 @@ '{"documentationUrl": null, "changelogUrl": null, "connectionSpecification": {}, "supportsIncremental": null, "supportsNormalization": false, "supportsDBT": false, "supported_destination_sync_modes": null, "advanced_auth": null, "protocol_version": null}\n', ), ( - "_global_states.jsonl", + "states.jsonl", '{"type": null, "stream": null, "global_": null, "data": {"test": "value"}, "sourceStats": null, "destinationStats": null}\n', ), ], ), ], ) -async def test_write(tmp_path, messages, expected_writes): +def test_write(tmp_path, messages, expected_writes): backend = FileBackend(tmp_path) - await backend.write(messages) + backend.write(messages) for expected_file, expected_content in expected_writes: expected_path = Path(tmp_path / expected_file) assert expected_path.exists() diff --git a/airbyte-ci/connectors/metadata_service/README.md b/airbyte-ci/connectors/metadata_service/README.md index 35438f6dbf9c4..1bcf9192a3f8c 100644 --- a/airbyte-ci/connectors/metadata_service/README.md +++ b/airbyte-ci/connectors/metadata_service/README.md @@ -1,7 +1,9 @@ # Metadata Service -This is the begining of metadata service for airbyte. + +Airbyte Metadata Service manages the Airbyte Connector Registry. This system is responsible for the following: + - Validating Connector metadata - Storing Connector metadata in GCS - Serving Connector metadata to various consumers @@ -9,4 +11,7 @@ This system is responsible for the following: - Triggering actions based on changes to Connector metadata ## Subsystems -- [Metadata Orchestrator](./orchestrator/README.md) \ No newline at end of file + +- [Metadata Lib](./lib) responsible for preparing and validating connector metadata. +- [Metadata Orchestrator](./orchestrator) responsible for gathering metadata into the registry, + using Dagster. diff --git a/airbyte-ci/connectors/metadata_service/lib/poetry.lock b/airbyte-ci/connectors/metadata_service/lib/poetry.lock index bdaafe3ae471b..5ee4040fe397c 100644 --- a/airbyte-ci/connectors/metadata_service/lib/poetry.lock +++ b/airbyte-ci/connectors/metadata_service/lib/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "argcomplete" -version = "3.1.1" +version = "3.2.3" description = "Bash tab completion for argparse" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "argcomplete-3.1.1-py3-none-any.whl", hash = "sha256:35fa893a88deea85ea7b20d241100e64516d6af6d7b0ae2bed1d263d26f70948"}, - {file = "argcomplete-3.1.1.tar.gz", hash = "sha256:6c4c563f14f01440aaffa3eae13441c5db2357b5eec639abe7c0b15334627dff"}, + {file = "argcomplete-3.2.3-py3-none-any.whl", hash = "sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c"}, + {file = "argcomplete-3.2.3.tar.gz", hash = "sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23"}, ] [package.extras] @@ -16,69 +16,73 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "24.3.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, + {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, + {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, + {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, + {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, + {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, + {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, + {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, + {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, + {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, + {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, + {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, + {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, + {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, + {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, + {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, + {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, + {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, + {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, + {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, + {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, + {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, ] [package.dependencies] @@ -88,34 +92,34 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -131,97 +135,112 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -276,32 +295,33 @@ http = ["httpx"] [[package]] name = "dnspython" -version = "2.4.1" +version = "2.6.1" description = "DNS toolkit" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "dnspython-2.4.1-py3-none-any.whl", hash = "sha256:5b7488477388b8c0b70a8ce93b227c5603bc7b77f1565afe8e729c36c51447d7"}, - {file = "dnspython-2.4.1.tar.gz", hash = "sha256:c33971c79af5be968bb897e95c2448e11a645ee84d93b265ce0b7aabe5dfdca8"}, + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "email-validator" -version = "2.0.0.post2" +version = "2.1.1" description = "A robust email address syntax and deliverability validation library." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "email_validator-2.0.0.post2-py3-none-any.whl", hash = "sha256:2466ba57cda361fb7309fd3d5a225723c788ca4bbad32a0ebd5373b99730285c"}, - {file = "email_validator-2.0.0.post2.tar.gz", hash = "sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900"}, + {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, + {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, ] [package.dependencies] @@ -310,13 +330,13 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -324,12 +344,13 @@ test = ["pytest (>=6)"] [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -416,43 +437,41 @@ beautifulsoup4 = "*" [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.8.0" description = "Google API client core library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.8.0.tar.gz", hash = "sha256:065bb8e11c605fd232707ae50963dc1c8af5b3c95b4568887515985e6c1156b3"}, + {file = "google_api_core-2.8.0-py3-none-any.whl", hash = "sha256:1b9f59236ce1bae9a687c1d4f22957e79a2669e53d032893f6bf0fca54f6931d"}, ] [package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.52.0,<2.0dev" +protobuf = ">=3.12.0" +requests = ">=2.18.0,<3.0.0dev" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] name = "google-auth" -version = "2.22.0" +version = "2.29.0" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -463,13 +482,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-core" -version = "2.3.3" +version = "2.4.1" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] @@ -477,24 +496,25 @@ google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)"] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.10.0" +version = "2.14.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.10.0.tar.gz", hash = "sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"}, - {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = "sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"}, + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, ] [package.dependencies] google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" +google-auth = ">=2.23.3,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" -google-resumable-media = ">=2.3.2" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" [package.extras] @@ -599,38 +619,38 @@ protobuf = ">=3.0.0b3" [[package]] name = "google-resumable-media" -version = "2.5.0" +version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, ] [package.dependencies] google-crc32c = ">=1.0,<2.0dev" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.60.0" +version = "1.56.1" description = "Common protobufs used in Google APIs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, + {file = "googleapis-common-protos-1.56.1.tar.gz", hash = "sha256:6b5ee59dc646eb61a8eb65ee1db186d3df6687c8804830024f32573298bca19b"}, + {file = "googleapis_common_protos-1.56.1-py2.py3-none-any.whl", hash = "sha256:ddcd955b5bb6589368f659fa475373faa1ed7d09cde5ba25e88513d87007e174"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.15.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.0.0)"] [[package]] name = "grpc-google-logging-v2" @@ -664,60 +684,69 @@ oauth2client = ">=1.4.11" [[package]] name = "grpcio" -version = "1.56.2" +version = "1.62.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.56.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:bf0b9959e673505ee5869950642428046edb91f99942607c2ecf635f8a4b31c9"}, - {file = "grpcio-1.56.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5144feb20fe76e73e60c7d73ec3bf54f320247d1ebe737d10672480371878b48"}, - {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a72797549935c9e0b9bc1def1768c8b5a709538fa6ab0678e671aec47ebfd55e"}, - {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3f3237a57e42f79f1e560726576aedb3a7ef931f4e3accb84ebf6acc485d316"}, - {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:900bc0096c2ca2d53f2e5cebf98293a7c32f532c4aeb926345e9747452233950"}, - {file = "grpcio-1.56.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97e0efaebbfd222bcaac2f1735c010c1d3b167112d9d237daebbeedaaccf3d1d"}, - {file = "grpcio-1.56.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0c85c5cbe8b30a32fa6d802588d55ffabf720e985abe9590c7c886919d875d4"}, - {file = "grpcio-1.56.2-cp310-cp310-win32.whl", hash = "sha256:06e84ad9ae7668a109e970c7411e7992751a116494cba7c4fb877656527f9a57"}, - {file = "grpcio-1.56.2-cp310-cp310-win_amd64.whl", hash = "sha256:10954662f77dc36c9a1fb5cc4a537f746580d6b5734803be1e587252682cda8d"}, - {file = "grpcio-1.56.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:c435f5ce1705de48e08fcbcfaf8aee660d199c90536e3e06f2016af7d6a938dd"}, - {file = "grpcio-1.56.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:6108e5933eb8c22cd3646e72d5b54772c29f57482fd4c41a0640aab99eb5071d"}, - {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8391cea5ce72f4a12368afd17799474015d5d3dc00c936a907eb7c7eaaea98a5"}, - {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750de923b456ca8c0f1354d6befca45d1f3b3a789e76efc16741bd4132752d95"}, - {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fda2783c12f553cdca11c08e5af6eecbd717280dc8fbe28a110897af1c15a88c"}, - {file = "grpcio-1.56.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9e04d4e4cfafa7c5264e535b5d28e786f0571bea609c3f0aaab13e891e933e9c"}, - {file = "grpcio-1.56.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89a49cc5ad08a38b6141af17e00d1dd482dc927c7605bc77af457b5a0fca807c"}, - {file = "grpcio-1.56.2-cp311-cp311-win32.whl", hash = "sha256:6a007a541dff984264981fbafeb052bfe361db63578948d857907df9488d8774"}, - {file = "grpcio-1.56.2-cp311-cp311-win_amd64.whl", hash = "sha256:af4063ef2b11b96d949dccbc5a987272f38d55c23c4c01841ea65a517906397f"}, - {file = "grpcio-1.56.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:a6ff459dac39541e6a2763a4439c4ca6bc9ecb4acc05a99b79246751f9894756"}, - {file = "grpcio-1.56.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:f20fd21f7538f8107451156dd1fe203300b79a9ddceba1ee0ac8132521a008ed"}, - {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d1fbad1f9077372b6587ec589c1fc120b417b6c8ad72d3e3cc86bbbd0a3cee93"}, - {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee26e9dfb3996aff7c870f09dc7ad44a5f6732b8bdb5a5f9905737ac6fd4ef1"}, - {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c60abd950d6de3e4f1ddbc318075654d275c29c846ab6a043d6ed2c52e4c8c"}, - {file = "grpcio-1.56.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1c31e52a04e62c8577a7bf772b3e7bed4df9c9e0dd90f92b6ffa07c16cab63c9"}, - {file = "grpcio-1.56.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:345356b307cce5d14355e8e055b4ca5f99bc857c33a3dc1ddbc544fca9cd0475"}, - {file = "grpcio-1.56.2-cp37-cp37m-win_amd64.whl", hash = "sha256:42e63904ee37ae46aa23de50dac8b145b3596f43598fa33fe1098ab2cbda6ff5"}, - {file = "grpcio-1.56.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:7c5ede2e2558f088c49a1ddda19080e4c23fb5d171de80a726b61b567e3766ed"}, - {file = "grpcio-1.56.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:33971197c47965cc1d97d78d842163c283e998223b151bab0499b951fd2c0b12"}, - {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d39f5d4af48c138cb146763eda14eb7d8b3ccbbec9fe86fb724cd16e0e914c64"}, - {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ded637176addc1d3eef35331c39acc598bac550d213f0a1bedabfceaa2244c87"}, - {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c90da4b124647547a68cf2f197174ada30c7bb9523cb976665dfd26a9963d328"}, - {file = "grpcio-1.56.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3ccb621749a81dc7755243665a70ce45536ec413ef5818e013fe8dfbf5aa497b"}, - {file = "grpcio-1.56.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4eb37dd8dd1aa40d601212afa27ca5be255ba792e2e0b24d67b8af5e012cdb7d"}, - {file = "grpcio-1.56.2-cp38-cp38-win32.whl", hash = "sha256:ddb4a6061933bd9332b74eac0da25f17f32afa7145a33a0f9711ad74f924b1b8"}, - {file = "grpcio-1.56.2-cp38-cp38-win_amd64.whl", hash = "sha256:8940d6de7068af018dfa9a959a3510e9b7b543f4c405e88463a1cbaa3b2b379a"}, - {file = "grpcio-1.56.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:51173e8fa6d9a2d85c14426bdee5f5c4a0654fd5fddcc21fe9d09ab0f6eb8b35"}, - {file = "grpcio-1.56.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:373b48f210f43327a41e397391715cd11cfce9ded2fe76a5068f9bacf91cc226"}, - {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:42a3bbb2bc07aef72a7d97e71aabecaf3e4eb616d39e5211e2cfe3689de860ca"}, - {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5344be476ac37eb9c9ad09c22f4ea193c1316bf074f1daf85bddb1b31fda5116"}, - {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3fa3ab0fb200a2c66493828ed06ccd1a94b12eddbfb985e7fd3e5723ff156c6"}, - {file = "grpcio-1.56.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b975b85d1d5efc36cf8b237c5f3849b64d1ba33d6282f5e991f28751317504a1"}, - {file = "grpcio-1.56.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cbdf2c498e077282cd427cfd88bdce4668019791deef0be8155385ab2ba7837f"}, - {file = "grpcio-1.56.2-cp39-cp39-win32.whl", hash = "sha256:139f66656a762572ae718fa0d1f2dce47c05e9fbf7a16acd704c354405b97df9"}, - {file = "grpcio-1.56.2-cp39-cp39-win_amd64.whl", hash = "sha256:830215173ad45d670140ff99aac3b461f9be9a6b11bee1a17265aaaa746a641a"}, - {file = "grpcio-1.56.2.tar.gz", hash = "sha256:0ff789ae7d8ddd76d2ac02e7d13bfef6fc4928ac01e1dcaa182be51b6bcc0aaa"}, + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.56.2)"] +protobuf = ["grpcio-tools (>=1.62.1)"] [[package]] name = "httplib2" @@ -735,13 +764,13 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -790,30 +819,27 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -824,18 +850,20 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -843,123 +871,148 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-spec" -version = "0.1.6" +version = "0.1.3" description = "JSONSchema Spec with object-oriented paths" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "jsonschema_spec-0.1.6-py3-none-any.whl", hash = "sha256:f2206d18c89d1824c1f775ba14ed039743b41a9167bd2c5bdb774b66b3ca0bbf"}, - {file = "jsonschema_spec-0.1.6.tar.gz", hash = "sha256:90215863b56e212086641956b20127ccbf6d8a3a38343dad01d6a74d19482f76"}, + {file = "jsonschema_spec-0.1.3-py3-none-any.whl", hash = "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6"}, + {file = "jsonschema_spec-0.1.3.tar.gz", hash = "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0"}, ] [package.dependencies] -jsonschema = ">=4.0.0,<4.18.0" +jsonschema = ">=4.0.0,<5.0.0" pathable = ">=0.4.1,<0.5.0" PyYAML = ">=5.1" -requests = ">=2.31.0,<3.0.0" +typing-extensions = ">=4.3.0,<5.0.0" + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1035,13 +1088,13 @@ requests = ["requests"] [[package]] name = "packaging" -version = "23.1" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1068,39 +1121,39 @@ files = [ [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1163,94 +1216,92 @@ ssv = ["swagger-spec-validator (>=2.4,<3.0)"] [[package]] name = "protobuf" -version = "4.23.4" +version = "5.26.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, - {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, - {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, - {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, - {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, - {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, - {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, - {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, - {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, - {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, - {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, + {file = "protobuf-5.26.1-cp310-abi3-win32.whl", hash = "sha256:3c388ea6ddfe735f8cf69e3f7dc7611e73107b60bdfcf5d0f024c3ccd3794e23"}, + {file = "protobuf-5.26.1-cp310-abi3-win_amd64.whl", hash = "sha256:e6039957449cb918f331d32ffafa8eb9255769c96aa0560d9a5bf0b4e00a2a33"}, + {file = "protobuf-5.26.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:38aa5f535721d5bb99861166c445c4105c4e285c765fbb2ac10f116e32dcd46d"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fbfe61e7ee8c1860855696e3ac6cfd1b01af5498facc6834fcc345c9684fb2ca"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7417703f841167e5a27d48be13389d52ad705ec09eade63dfc3180a959215d7"}, + {file = "protobuf-5.26.1-cp38-cp38-win32.whl", hash = "sha256:d693d2504ca96750d92d9de8a103102dd648fda04540495535f0fec7577ed8fc"}, + {file = "protobuf-5.26.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b557c317ebe6836835ec4ef74ec3e994ad0894ea424314ad3552bc6e8835b4e"}, + {file = "protobuf-5.26.1-cp39-cp39-win32.whl", hash = "sha256:b9ba3ca83c2e31219ffbeb9d76b63aad35a3eb1544170c55336993d7a18ae72c"}, + {file = "protobuf-5.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ee014c2c87582e101d6b54260af03b6596728505c79f17c8586e7523aaa8f8c"}, + {file = "protobuf-5.26.1-py3-none-any.whl", hash = "sha256:da612f2720c0183417194eeaa2523215c4fcc1a1949772dc65f05047e08d5932"}, + {file = "protobuf-5.26.1.tar.gz", hash = "sha256:8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51"}, ] [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -1277,54 +1328,18 @@ dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8 [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] - [[package]] name = "pysnooper" version = "1.2.0" @@ -1341,13 +1356,13 @@ tests = ["pytest"] [[package]] name = "pytest" -version = "7.4.0" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -1355,25 +1370,25 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" -version = "3.11.1" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -1390,6 +1405,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1397,8 +1413,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1415,6 +1439,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1422,11 +1447,27 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "referencing" +version = "0.34.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "requests" version = "2.31.0" @@ -1448,6 +1489,114 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + [[package]] name = "rsa" version = "4.9" @@ -1464,76 +1613,90 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruamel-yaml" -version = "0.17.32" +version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false -python-versions = ">=3" +python-versions = ">=3.7" files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, ] [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} [package.extras] -docs = ["ryd"] +docs = ["mercurial (>5.7)", "ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.7" +version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] name = "semver" -version = "3.0.1" +version = "3.0.2" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" files = [ - {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, - {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, ] [[package]] @@ -1549,13 +1712,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -1632,47 +1795,48 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "urllib3" -version = "1.26.16" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "zipp" -version = "3.16.2" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "85d95e7cb087a5b9ae292c7c6f077f7ebc2b5b07b6fb5022ac11bb6dff83fda9" +content-hash = "29dd9175e5c8c3efabd26717628e1ce535f7ccd4743e9a936a93ee4a3c900e14" diff --git a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml index b4f0389e433b7..9d0ac9874cb4f 100644 --- a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml @@ -20,7 +20,7 @@ semver = "^3.0.1" [tool.poetry.group.dev.dependencies] -pytest = "^7.2.2" +pytest = "^8" datamodel-code-generator = "^0.17.1" pytest-mock = "^3.10.0" poethepoet = "^0.20.0" @@ -36,7 +36,7 @@ promote-connector-to-latest = "gsutil -m rsync -r -d gs://$TARGET_BUCKET/metada test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["test"] [build-system] diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/README.md b/airbyte-ci/connectors/metadata_service/orchestrator/README.md index ed1c3b44ade90..104264a4044af 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/README.md +++ b/airbyte-ci/connectors/metadata_service/orchestrator/README.md @@ -4,40 +4,11 @@ This is the Orchestrator for Airbyte metadata built on Dagster. # Setup -## Prerequisites - -#### Poetry - -Before you can start working on this project, you will need to have Poetry installed on your system. -Please follow the instructions below to install Poetry: - -1. Open your terminal or command prompt. -2. Install Poetry using the recommended installation method: - -```bash -curl -sSL https://install.python-poetry.org | POETRY_VERSION=1.5.1 python3 - -``` - -Alternatively, you can use `pip` to install Poetry: - -```bash -pip install --user poetry -``` - -3. After the installation is complete, close and reopen your terminal to ensure the newly installed - `poetry` command is available in your system's PATH. - -For more detailed instructions and alternative installation methods, please refer to the official -Poetry documentation: https://python-poetry.org/docs/#installation - -### Using Poetry in the Project - -Once Poetry is installed, you can use it to manage the project's dependencies and virtual -environment. To get started, navigate to the project's root directory in your terminal and follow -these steps: - ## Installation +Metadata Orchestrator uses Poetry as dependency manager and build system. The guide below +assumes you have Poetry installed. + ```bash poetry install cp .env.template .env diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock index 8b4130bb6cc1a..9c219ad009131 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock +++ b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alembic" @@ -35,13 +35,13 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -98,87 +98,91 @@ files = [ [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "build" -version = "1.0.3" +version = "1.2.1" description = "A simple, correct Python build frontend" optional = false -python-versions = ">= 3.7" +python-versions = ">=3.8" files = [ - {file = "build-1.0.3-py3-none-any.whl", hash = "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f"}, - {file = "build-1.0.3.tar.gz", hash = "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b"}, + {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, + {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, ] [package.dependencies] colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -packaging = ">=19.0" +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" pyproject_hooks = "*" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=5.1)", "mypy (>=1.5.0,<1.6.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachecontrol" -version = "0.13.1" +version = "0.14.0" description = "httplib2 caching for requests" optional = false python-versions = ">=3.7" files = [ - {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"}, - {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"}, + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, ] [package.dependencies] filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2" +msgpack = ">=0.5.2,<2.0.0" requests = ">=2.16.0" [package.extras] -dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "mypy", "pytest", "pytest-cov", "sphinx", "tox", "types-redis", "types-requests"] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] filecache = ["filelock (>=3.8.0)"] redis = ["redis (>=2.10.5)"] [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -414,13 +418,13 @@ files = [ [[package]] name = "croniter" -version = "2.0.1" +version = "2.0.3" description = "croniter provides iteration for datetime object with cron like format" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "croniter-2.0.1-py2.py3-none-any.whl", hash = "sha256:4cb064ce2d8f695b3b078be36ff50115cf8ac306c10a7e8653ee2a5b534673d7"}, - {file = "croniter-2.0.1.tar.gz", hash = "sha256:d199b2ec3ea5e82988d1f72022433c5f9302b3b3ea9e6bfd6a1518f6ea5e700a"}, + {file = "croniter-2.0.3-py2.py3-none-any.whl", hash = "sha256:84dc95b2eb6760144cc01eca65a6b9cc1619c93b2dc37d8a27f4319b3eb740de"}, + {file = "croniter-2.0.3.tar.gz", hash = "sha256:28763ad39c404e159140874f08010cfd8a18f4c2a7cea1ce73e9506a4380cfc1"}, ] [package.dependencies] @@ -429,47 +433,56 @@ pytz = ">2021.1" [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -758,13 +771,13 @@ files = [ [[package]] name = "docstring-parser" -version = "0.15" +version = "0.16" description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = false python-versions = ">=3.6,<4.0" files = [ - {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"}, - {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, ] [[package]] @@ -910,29 +923,29 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.3" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, + {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "fsspec" -version = "2023.12.2" +version = "2024.3.1" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, - {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, ] [package.extras] @@ -950,7 +963,7 @@ github = ["requests"] gs = ["gcsfs"] gui = ["panel"] hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] libarchive = ["libarchive-c"] oci = ["ocifs"] s3 = ["s3fs"] @@ -961,12 +974,13 @@ tqdm = ["tqdm"] [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -1064,18 +1078,27 @@ beautifulsoup4 = "*" [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, +] +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1086,31 +1109,31 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.113.0" +version = "2.124.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, - {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, + {file = "google-api-python-client-2.124.0.tar.gz", hash = "sha256:f6d3258420f7c76b0f5266b5e402e6f804e30351b018a10083f4a46c3ec33773"}, + {file = "google_api_python_client-2.124.0-py2.py3-none-any.whl", hash = "sha256:07dc674449ed353704b1169fdee792f74438d024261dad71b6ce7bb9c683d51f"}, ] [package.dependencies] google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" -google-auth = ">=1.19.0,<3.0.0.dev0" -google-auth-httplib2 = ">=0.1.0" -httplib2 = ">=0.15.0,<1.dev0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.dev0" uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.26.1" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.1.tar.gz", hash = "sha256:54385acca5c0fbdda510cd8585ba6f3fcb06eeecf8a6ecca39d3ee148b092590"}, - {file = "google_auth-2.26.1-py2.py3-none-any.whl", hash = "sha256:2c8b55e3e564f298122a02ab7b97458ccfcc5617840beb5d0ac757ada92c9780"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -1142,17 +1165,18 @@ httplib2 = ">=0.19.0" [[package]] name = "google-cloud-bigquery" -version = "3.14.1" +version = "3.19.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.14.1.tar.gz", hash = "sha256:aa15bd86f79ea76824c7d710f5ae532323c4b3ba01ef4abff42d4ee7a2e9b142"}, - {file = "google_cloud_bigquery-3.14.1-py2.py3-none-any.whl", hash = "sha256:a8ded18455da71508db222b7c06197bc12b6dbc6ed5b0b64e7007b76d7016957"}, + {file = "google-cloud-bigquery-3.19.0.tar.gz", hash = "sha256:8e311dae49768e1501fcdc5e916bff4b7e169471e5707919f4a6f78a02b3b5a6"}, + {file = "google_cloud_bigquery-3.19.0-py2.py3-none-any.whl", hash = "sha256:c6b8850247a4b132066e49f6e45f850c22824482838688d744a4398eea1120ed"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" google-cloud-core = ">=1.6.0,<3.0.0dev" google-resumable-media = ">=0.6.0,<3.0dev" packaging = ">=20.0.0" @@ -1190,18 +1214,18 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.16.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, + {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" @@ -1327,13 +1351,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -1520,83 +1544,99 @@ oauth2client = ">=1.4.11" [[package]] name = "grpcio" -version = "1.60.0" +version = "1.62.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, - {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, - {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, - {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, - {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, - {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, - {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, - {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, - {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, - {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, - {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, - {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, - {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, - {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, - {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, - {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, - {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, - {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, - {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, - {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, - {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, - {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, - {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, - {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.0)"] +protobuf = ["grpcio-tools (>=1.62.1)"] [[package]] name = "grpcio-health-checking" -version = "1.60.0" +version = "1.62.1" description = "Standard Health Checking Service for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-health-checking-1.60.0.tar.gz", hash = "sha256:478b5300778120fed9f6d134d72b157a59f9c06689789218cbff47fafca2f119"}, - {file = "grpcio_health_checking-1.60.0-py3-none-any.whl", hash = "sha256:13caf28bc93795bd6bdb580b21832ebdd1aa3f5b648ea47ed17362d85bed96d3"}, + {file = "grpcio-health-checking-1.62.1.tar.gz", hash = "sha256:9e56180a941b1d32a077d7491e0611d0483c396358afd5349bf00152612e4583"}, + {file = "grpcio_health_checking-1.62.1-py3-none-any.whl", hash = "sha256:9ce761c09fc383e7aa2f7e6c0b0b65d5a1157c1b98d1f5871f7c38aca47d49b9"}, ] [package.dependencies] -grpcio = ">=1.60.0" +grpcio = ">=1.62.1" +protobuf = ">=4.21.6" + +[[package]] +name = "grpcio-status" +version = "1.62.1" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"}, + {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.62.1" protobuf = ">=4.21.6" [[package]] @@ -1713,22 +1753,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1754,21 +1794,21 @@ files = [ [[package]] name = "jaraco-classes" -version = "3.3.0" +version = "3.3.1" description = "Utility functions for Python class constructs" optional = false python-versions = ">=3.8" files = [ - {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, - {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, + {file = "jaraco.classes-3.3.1-py3-none-any.whl", hash = "sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206"}, + {file = "jaraco.classes-3.3.1.tar.gz", hash = "sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30"}, ] [package.dependencies] more-itertools = "*" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "jedi" @@ -1806,13 +1846,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1823,13 +1863,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "keyring" -version = "24.3.0" +version = "24.3.1" description = "Store and access your passwords safely." optional = false python-versions = ">=3.8" files = [ - {file = "keyring-24.3.0-py3-none-any.whl", hash = "sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836"}, - {file = "keyring-24.3.0.tar.gz", hash = "sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25"}, + {file = "keyring-24.3.1-py3-none-any.whl", hash = "sha256:df38a4d7419a6a60fea5cef1e45a948a3e8430dd12ad88b0f423c5c143906218"}, + {file = "keyring-24.3.1.tar.gz", hash = "sha256:c3327b6ffafc0e8befbdb597cacdb4928ffe5c1212f7645f186e6d9957a898db"}, ] [package.dependencies] @@ -1842,17 +1882,17 @@ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] completion = ["shtab (>=1.1.0)"] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "mako" -version = "1.3.0" +version = "1.3.2" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, + {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, + {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, ] [package.dependencies] @@ -1889,71 +1929,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2014,195 +2054,211 @@ files = [ [[package]] name = "msgpack" -version = "1.0.7" +version = "1.0.8" description = "MessagePack serializer" optional = false python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, - {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, - {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, - {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, - {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, - {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, - {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, - {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, - {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, - {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, - {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, - {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] name = "numpy" -version = "1.26.3" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -2239,13 +2295,13 @@ dev = ["black", "mypy", "pytest"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -2287,8 +2343,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -2367,13 +2423,13 @@ pytzdata = ">=2020.1" [[package]] name = "pex" -version = "2.1.156" +version = "2.3.0" description = "The PEX packaging toolchain." optional = false -python-versions = ">=2.7,<3.13,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,<3.13,>=2.7" files = [ - {file = "pex-2.1.156-py2.py3-none-any.whl", hash = "sha256:e7c00fe6f12f6b2ed57ab8e55c4d422647b30e25a4a275cfbc3d3b0bc26e774a"}, - {file = "pex-2.1.156.tar.gz", hash = "sha256:542ecb457c21f5ae8fa749894098e1c54e8639628efee70ece7f89da602aa4c2"}, + {file = "pex-2.3.0-py2.py3-none-any.whl", hash = "sha256:6c0ccbaa99fe15174fb1560c01ba0416579a057eed7ac90453324b18356f9b40"}, + {file = "pex-2.3.0.tar.gz", hash = "sha256:7d0fc86236192fbc14a71b25081e9c48c543d7dbc1e7b270d62eff88afd2245c"}, ] [package.extras] @@ -2395,42 +2451,42 @@ ptyprocess = ">=0.5" [[package]] name = "pkginfo" -version = "1.9.6" +version = "1.10.0" description = "Query metadata from sdists / bdists / installed packages." optional = false python-versions = ">=3.6" files = [ - {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, - {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, + {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"}, + {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"}, ] [package.extras] -testing = ["pytest", "pytest-cov"] +testing = ["pytest", "pytest-cov", "wheel"] [[package]] name = "platformdirs" -version = "3.11.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2449,18 +2505,18 @@ files = [ [[package]] name = "poetry" -version = "1.7.1" +version = "1.8.2" description = "Python dependency management and packaging made easy." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "poetry-1.7.1-py3-none-any.whl", hash = "sha256:03d3807a0fb3bc1028cc3707dfd646aae629d58e476f7e7f062437680741c561"}, - {file = "poetry-1.7.1.tar.gz", hash = "sha256:b348a70e7d67ad9c0bd3d0ea255bc6df84c24cf4b16f8d104adb30b425d6ff32"}, + {file = "poetry-1.8.2-py3-none-any.whl", hash = "sha256:b42b400d9a803af6e788a30a6f3e9998020b77860e28df20647eb10b6f414910"}, + {file = "poetry-1.8.2.tar.gz", hash = "sha256:49cceb3838104647c3e1021f3a4f13c6053704cc18d33f849a90fe687a29cb73"}, ] [package.dependencies] build = ">=1.0.3,<2.0.0" -cachecontrol = {version = ">=0.13.0,<0.14.0", extras = ["filecache"]} +cachecontrol = {version = ">=0.14.0,<0.15.0", extras = ["filecache"]} cleo = ">=2.1.0,<3.0.0" crashtest = ">=0.4.1,<0.5.0" dulwich = ">=0.21.2,<0.22.0" @@ -2468,46 +2524,46 @@ fastjsonschema = ">=2.18.0,<3.0.0" importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} installer = ">=0.7.0,<0.8.0" keyring = ">=24.0.0,<25.0.0" -packaging = ">=20.5" +packaging = ">=23.1" pexpect = ">=4.7.0,<5.0.0" pkginfo = ">=1.9.4,<2.0.0" -platformdirs = ">=3.0.0,<4.0.0" -poetry-core = "1.8.1" +platformdirs = ">=3.0.0,<5" +poetry-core = "1.9.0" poetry-plugin-export = ">=1.6.0,<2.0.0" pyproject-hooks = ">=1.0.0,<2.0.0" requests = ">=2.26,<3.0" -requests-toolbelt = ">=0.9.1,<2" +requests-toolbelt = ">=1.0.0,<2.0.0" shellingham = ">=1.5,<2.0" tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.11.4,<1.0.0" trove-classifiers = ">=2022.5.19" virtualenv = ">=20.23.0,<21.0.0" -xattr = {version = ">=0.10.0,<0.11.0", markers = "sys_platform == \"darwin\""} +xattr = {version = ">=1.0.0,<2.0.0", markers = "sys_platform == \"darwin\""} [[package]] name = "poetry-core" -version = "1.8.1" +version = "1.9.0" description = "Poetry PEP 517 Build Backend" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "poetry_core-1.8.1-py3-none-any.whl", hash = "sha256:194832b24f3283e01c5402eae71a6aae850ecdfe53f50a979c76bf7aa5010ffa"}, - {file = "poetry_core-1.8.1.tar.gz", hash = "sha256:67a76c671da2a70e55047cddda83566035b701f7e463b32a2abfeac6e2a16376"}, + {file = "poetry_core-1.9.0-py3-none-any.whl", hash = "sha256:4e0c9c6ad8cf89956f03b308736d84ea6ddb44089d16f2adc94050108ec1f5a1"}, + {file = "poetry_core-1.9.0.tar.gz", hash = "sha256:fa7a4001eae8aa572ee84f35feb510b321bd652e5cf9293249d62853e1f935a2"}, ] [[package]] name = "poetry-plugin-export" -version = "1.6.0" +version = "1.7.1" description = "Poetry plugin to export the dependencies to various formats" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "poetry_plugin_export-1.6.0-py3-none-any.whl", hash = "sha256:2dce6204c9318f1f6509a11a03921fb3f461b201840b59f1c237b6ab454dabcf"}, - {file = "poetry_plugin_export-1.6.0.tar.gz", hash = "sha256:091939434984267a91abf2f916a26b00cff4eee8da63ec2a24ba4b17cf969a59"}, + {file = "poetry_plugin_export-1.7.1-py3-none-any.whl", hash = "sha256:b2258e53ae0d369a73806f957ed0e726eb95c571a0ce8b1f273da686528cc1da"}, + {file = "poetry_plugin_export-1.7.1.tar.gz", hash = "sha256:cf62cfb6218a904290ba6db3bc1a24aa076d10f81c48c6e48b2ded430131e22e"}, ] [package.dependencies] -poetry = ">=1.6.0,<2.0.0" +poetry = ">=1.8.0,<2.0.0" poetry-core = ">=1.7.0,<2.0.0" [[package]] @@ -2526,61 +2582,78 @@ poetry-core = ">=1.0.0,<2.0.0" [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.36" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.6.2" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, + {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] name = "protobuf" -version = "4.25.1" +version = "4.25.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, - {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, - {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, - {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, - {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, - {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, - {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, - {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, - {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -2588,13 +2661,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "ptpython" -version = "3.0.25" +version = "3.0.26" description = "Python REPL build on top of prompt_toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "ptpython-3.0.25-py2.py3-none-any.whl", hash = "sha256:16654143dea960dcefb9d6e69af5f92f01c7a783dd28ff99e78bc7449fba805c"}, - {file = "ptpython-3.0.25.tar.gz", hash = "sha256:887f0a91a576bc26585a0dcec41cd03f004ac7c46a2c88576c87fc51d6c06cd7"}, + {file = "ptpython-3.0.26-py2.py3-none-any.whl", hash = "sha256:3dc4c066d049e16d8b181e995a568d36697d04d9acc2724732f3ff6686c5da57"}, + {file = "ptpython-3.0.26.tar.gz", hash = "sha256:c8fb1406502dc349d99c57eaf06e7116f3b2deac94f02f342bae68708909f743"}, ] [package.dependencies] @@ -2620,76 +2693,76 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.2" +version = "15.0.2" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -2704,47 +2777,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -2848,13 +2921,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -2907,13 +2980,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.4" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -2921,21 +2994,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2943,13 +3016,13 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -2957,13 +3030,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -3073,118 +3146,115 @@ files = [ [[package]] name = "questionary" -version = "1.10.0" +version = "2.0.1" description = "Python library to build pretty command line user prompts ⭐️" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.8" files = [ - {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"}, - {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"}, + {file = "questionary-2.0.1-py3-none-any.whl", hash = "sha256:8ab9a01d0b91b68444dff7f6652c1e754105533f083cbe27597c8110ecc230a2"}, + {file = "questionary-2.0.1.tar.gz", hash = "sha256:bcce898bf3dbb446ff62830c86c5c6fb9a22a54146f0f5597d3da43b10d8fc8b"}, ] [package.dependencies] -prompt_toolkit = ">=2.0,<4.0" - -[package.extras] -docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphinx-autodoc-typehints (>=1.11.1,<2.0.0)", "sphinx-copybutton (>=0.3.1,<0.4.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)"] +prompt_toolkit = ">=2.0,<=3.0.36" [[package]] name = "rapidfuzz" -version = "3.6.1" +version = "3.7.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e19d519386e9db4a5335a4b29f25b8183a1c3f78cecb4c9c3112e7f86470e37f"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01eb03cd880a294d1bf1a583fdd00b87169b9cc9c9f52587411506658c864d73"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be368573255f8fbb0125a78330a1a40c65e9ba3c5ad129a426ff4289099bfb41"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e5af946f419c30f5cb98b69d40997fe8580efe78fc83c2f0f25b60d0e56efb"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f382f7ffe384ce34345e1c0b2065451267d3453cadde78946fbd99a59f0cc23c"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be156f51f3a4f369e758505ed4ae64ea88900dcb2f89d5aabb5752676d3f3d7e"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1936d134b6c513fbe934aeb668b0fee1ffd4729a3c9d8d373f3e404fbb0ce8a0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ff8eaf4a9399eb2bebd838f16e2d1ded0955230283b07376d68947bbc2d33d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae598a172e3a95df3383634589660d6b170cc1336fe7578115c584a99e0ba64d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd4ba4c18b149da11e7f1b3584813159f189dc20833709de5f3df8b1342a9759"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0402f1629e91a4b2e4aee68043a30191e5e1b7cd2aa8dacf50b1a1bcf6b7d3ab"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1e12319c6b304cd4c32d5db00b7a1e36bdc66179c44c5707f6faa5a889a317c0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bbfae35ce4de4c574b386c43c78a0be176eeddfdae148cb2136f4605bebab89"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win32.whl", hash = "sha256:7fec74c234d3097612ea80f2a80c60720eec34947066d33d34dc07a3092e8105"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:a553cc1a80d97459d587529cc43a4c7c5ecf835f572b671107692fe9eddf3e24"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:757dfd7392ec6346bd004f8826afb3bf01d18a723c97cbe9958c733ab1a51791"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2963f4a3f763870a16ee076796be31a4a0958fbae133dbc43fc55c3968564cf5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2f0274595cc5b2b929c80d4e71b35041104b577e118cf789b3fe0a77b37a4c5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f211e366e026de110a4246801d43a907cd1a10948082f47e8a4e6da76fef52"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59472b43879012b90989603aa5a6937a869a72723b1bf2ff1a0d1edee2cc8e6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a03863714fa6936f90caa7b4b50ea59ea32bb498cc91f74dc25485b3f8fccfe9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd95b6b7bfb1584f806db89e1e0c8dbb9d25a30a4683880c195cc7f197eaf0c"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7183157edf0c982c0b8592686535c8b3e107f13904b36d85219c77be5cefd0d8"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ad9d74ef7c619b5b0577e909582a1928d93e07d271af18ba43e428dc3512c2a1"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b53137d81e770c82189e07a8f32722d9e4260f13a0aec9914029206ead38cac3"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49b9ed2472394d306d5dc967a7de48b0aab599016aa4477127b20c2ed982dbf9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dec307b57ec2d5054d77d03ee4f654afcd2c18aee00c48014cb70bfed79597d6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4381023fa1ff32fd5076f5d8321249a9aa62128eb3f21d7ee6a55373e672b261"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win32.whl", hash = "sha256:8d7a072f10ee57c8413c8ab9593086d42aaff6ee65df4aa6663eecdb7c398dca"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ebcfb5bfd0a733514352cfc94224faad8791e576a80ffe2fd40b2177bf0e7198"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_arm64.whl", hash = "sha256:1c47d592e447738744905c18dda47ed155620204714e6df20eb1941bb1ba315e"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, - {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"}, + {file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"}, ] [package.extras] @@ -3227,13 +3297,13 @@ requests = ">=2.0.1,<3.0.0" [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -3285,13 +3355,13 @@ files = [ [[package]] name = "sentry-sdk" -version = "1.39.1" +version = "1.44.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.39.1.tar.gz", hash = "sha256:320a55cdf9da9097a0bead239c35b7e61f53660ef9878861824fd6d9b2eaf3b5"}, - {file = "sentry_sdk-1.39.1-py2.py3-none-any.whl", hash = "sha256:81b5b9ffdd1a374e9eb0c053b5d2012155db9cbe76393a8585677b753bd5fdc1"}, + {file = "sentry-sdk-1.44.0.tar.gz", hash = "sha256:f7125a9235795811962d52ff796dc032cd1d0dd98b59beaced8380371cd9c13c"}, + {file = "sentry_sdk-1.44.0-py2.py3-none-any.whl", hash = "sha256:eb65289da013ca92fad2694851ad2f086aa3825e808dc285bd7dcaf63602bb18"}, ] [package.dependencies] @@ -3305,6 +3375,7 @@ asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] chalice = ["chalice (>=1.16.0)"] clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] @@ -3315,6 +3386,7 @@ grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] pure-eval = ["asttokens", "executing", "pure-eval"] @@ -3330,19 +3402,19 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shellingham" @@ -3368,24 +3440,27 @@ files = [ [[package]] name = "slack-sdk" -version = "3.26.2" +version = "3.27.1" description = "The Slack API Platform SDK for Python" optional = false python-versions = ">=3.6" files = [ - {file = "slack_sdk-3.26.2-py2.py3-none-any.whl", hash = "sha256:a10e8ee69ca17d274989d0c2bbecb875f19898da3052d8d57de0898a00b1ab52"}, - {file = "slack_sdk-3.26.2.tar.gz", hash = "sha256:bcdac5e688fa50e9357ecd00b803b6a8bad766aa614d35d8dc0636f40adc48bf"}, + {file = "slack_sdk-3.27.1-py2.py3-none-any.whl", hash = "sha256:c108e509160cf1324c5c8b1f47ca52fb5e287021b8caf9f4ec78ad737ab7b1d9"}, + {file = "slack_sdk-3.27.1.tar.gz", hash = "sha256:85d86b34d807c26c8bb33c1569ec0985876f06ae4a2692afba765b7a5490d28c"}, ] +[package.extras] +optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)", "websockets (>=9.1,<10)"] + [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -3401,64 +3476,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.25" +version = "2.0.29" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, - {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, - {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} typing-extensions = ">=4.6.0" [package.extras] @@ -3488,13 +3563,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.34.0" +version = "0.37.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.34.0-py3-none-any.whl", hash = "sha256:2e14ee943f2df59eb8c141326240ce601643f1a97b577db44634f6d05d368c37"}, - {file = "starlette-0.34.0.tar.gz", hash = "sha256:ed050aaf3896945bfaae93bdf337e53ef3f29115a9d9c153e402985115cd9c8e"}, + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] @@ -3502,7 +3577,7 @@ anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] [[package]] name = "tabulate" @@ -3531,13 +3606,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.3" +version = "0.12.4" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, + {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, + {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, ] [[package]] @@ -3553,13 +3628,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -3573,28 +3648,28 @@ telegram = ["requests"] [[package]] name = "trove-classifiers" -version = "2024.1.8" +version = "2024.3.25" description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false python-versions = "*" files = [ - {file = "trove-classifiers-2024.1.8.tar.gz", hash = "sha256:6e36caf430ff6485c4b57a4c6b364a13f6a898d16b9417c6c37467e59c14b05a"}, - {file = "trove_classifiers-2024.1.8-py3-none-any.whl", hash = "sha256:3c1ff4deb10149c7e39ede6e5bbc107def64362ef1ee7590ec98d71fb92f1b6a"}, + {file = "trove-classifiers-2024.3.25.tar.gz", hash = "sha256:6de68d06edd6fec5032162b6af22e818a4bb6f4ae2258e74699f8a41064b7cad"}, + {file = "trove_classifiers-2024.3.25-py3-none-any.whl", hash = "sha256:c400e0bdceb018913339d53b07682d09a42aada687d070e90ee3c08477bec024"}, ] [[package]] name = "typer" -version = "0.9.0" +version = "0.11.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.11.1-py3-none-any.whl", hash = "sha256:4ce7b2a60b8543816ca97d5ec016026cbe95d1a7a931083b988c1d3682548fe7"}, + {file = "typer-0.11.1.tar.gz", hash = "sha256:f5ae987b97ebbbd59182f8e84407bbc925bc636867fa007bce87a7a71ac81d5c"}, ] [package.dependencies] -click = ">=7.1.1,<9.0.0" +click = ">=8.0.0" colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""} rich = {version = ">=10.11.0,<14.0.0", optional = true, markers = "extra == \"all\""} shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""} @@ -3602,38 +3677,35 @@ typing-extensions = ">=3.7.4.3" [package.extras] all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "universal-pathlib" -version = "0.1.4" +version = "0.2.2" description = "pathlib api extended to use fsspec backends" optional = false python-versions = ">=3.8" files = [ - {file = "universal_pathlib-0.1.4-py3-none-any.whl", hash = "sha256:f99186cf950bde1262de9a590bb019613ef84f9fabd9f276e8b019722201943a"}, - {file = "universal_pathlib-0.1.4.tar.gz", hash = "sha256:82e5d86d16a27e0ea1adc7d88acbcba9d02d5a45488163174f96d9ac289db2e4"}, + {file = "universal_pathlib-0.2.2-py3-none-any.whl", hash = "sha256:9bc176112d593348bb29806a47e409eda78dff8d95391d66dd6f85e443aaa75d"}, + {file = "universal_pathlib-0.2.2.tar.gz", hash = "sha256:6bc215548792ad5db3553708b1c19bafd9e2fa1667dc925ed404c95e52ae2f13"}, ] [package.dependencies] fsspec = ">=2022.1.0" [package.extras] -dev = ["adlfs", "aiohttp", "cheroot", "gcsfs", "hadoop-test-cluster", "moto[s3,server]", "mypy (==1.3.0)", "packaging", "pyarrow", "pydantic", "pydantic-settings", "pylint (==2.17.4)", "pytest (==7.3.2)", "pytest-cov (==4.1.0)", "pytest-mock (==3.11.1)", "pytest-sugar (==0.9.6)", "requests", "s3fs", "webdav4[fsspec]", "wsgidav"] -tests = ["mypy (==1.3.0)", "packaging", "pylint (==2.17.4)", "pytest (==7.3.2)", "pytest-cov (==4.1.0)", "pytest-mock (==3.11.1)", "pytest-sugar (==0.9.6)"] +dev = ["adlfs", "aiohttp", "cheroot", "gcsfs", "moto[s3,server] (<5)", "mypy (==1.8.0)", "packaging", "pydantic", "pydantic-settings", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)", "requests", "s3fs", "webdav4[fsspec]", "wsgidav"] +tests = ["mypy (==1.8.0)", "packaging", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)"] [[package]] name = "uritemplate" @@ -3648,29 +3720,30 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.25.0" +version = "0.29.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.25.0-py3-none-any.whl", hash = "sha256:ce107f5d9bd02b4636001a77a4e74aab5e1e2b146868ebbad565237145af444c"}, - {file = "uvicorn-0.25.0.tar.gz", hash = "sha256:6dddbad1d7ee0f5140aba5ec138ddc9612c5109399903828b4874c9937f009c2"}, + {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, + {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, ] [package.dependencies] @@ -3734,13 +3807,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.25.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, ] [package.dependencies] @@ -3754,38 +3827,40 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -4068,87 +4143,76 @@ files = [ [[package]] name = "xattr" -version = "0.10.1" +version = "1.1.0" description = "Python wrapper for extended filesystem attributes" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "xattr-0.10.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:16a660a883e703b311d1bbbcafc74fa877585ec081cd96e8dd9302c028408ab1"}, - {file = "xattr-0.10.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1e2973e72faa87ca29d61c23b58c3c89fe102d1b68e091848b0e21a104123503"}, - {file = "xattr-0.10.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:13279fe8f7982e3cdb0e088d5cb340ce9cbe5ef92504b1fd80a0d3591d662f68"}, - {file = "xattr-0.10.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1dc9b9f580ef4b8ac5e2c04c16b4d5086a611889ac14ecb2e7e87170623a0b75"}, - {file = "xattr-0.10.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:485539262c2b1f5acd6b6ea56e0da2bc281a51f74335c351ea609c23d82c9a79"}, - {file = "xattr-0.10.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:295b3ab335fcd06ca0a9114439b34120968732e3f5e9d16f456d5ec4fa47a0a2"}, - {file = "xattr-0.10.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:a126eb38e14a2f273d584a692fe36cff760395bf7fc061ef059224efdb4eb62c"}, - {file = "xattr-0.10.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:b0e919c24f5b74428afa91507b15e7d2ef63aba98e704ad13d33bed1288dca81"}, - {file = "xattr-0.10.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e31d062cfe1aaeab6ba3db6bd255f012d105271018e647645941d6609376af18"}, - {file = "xattr-0.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:209fb84c09b41c2e4cf16dd2f481bb4a6e2e81f659a47a60091b9bcb2e388840"}, - {file = "xattr-0.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c4120090dac33eddffc27e487f9c8f16b29ff3f3f8bcb2251b2c6c3f974ca1e1"}, - {file = "xattr-0.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e739d624491267ec5bb740f4eada93491de429d38d2fcdfb97b25efe1288eca"}, - {file = "xattr-0.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2677d40b95636f3482bdaf64ed9138fb4d8376fb7933f434614744780e46e42d"}, - {file = "xattr-0.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40039f1532c4456fd0f4c54e9d4e01eb8201248c321c6c6856262d87e9a99593"}, - {file = "xattr-0.10.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:148466e5bb168aba98f80850cf976e931469a3c6eb11e9880d9f6f8b1e66bd06"}, - {file = "xattr-0.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0aedf55b116beb6427e6f7958ccd80a8cbc80e82f87a4cd975ccb61a8d27b2ee"}, - {file = "xattr-0.10.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c3024a9ff157247c8190dd0eb54db4a64277f21361b2f756319d9d3cf20e475f"}, - {file = "xattr-0.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f1be6e733e9698f645dbb98565bb8df9b75e80e15a21eb52787d7d96800e823b"}, - {file = "xattr-0.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7880c8a54c18bc091a4ce0adc5c6d81da1c748aec2fe7ac586d204d6ec7eca5b"}, - {file = "xattr-0.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:89c93b42c3ba8aedbc29da759f152731196c2492a2154371c0aae3ef8ba8301b"}, - {file = "xattr-0.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b905e808df61b677eb972f915f8a751960284358b520d0601c8cbc476ba2df6"}, - {file = "xattr-0.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ef954d0655f93a34d07d0cc7e02765ec779ff0b59dc898ee08c6326ad614d5"}, - {file = "xattr-0.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:199b20301b6acc9022661412346714ce764d322068ef387c4de38062474db76c"}, - {file = "xattr-0.10.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec0956a8ab0f0d3f9011ba480f1e1271b703d11542375ef73eb8695a6bd4b78b"}, - {file = "xattr-0.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffcb57ca1be338d69edad93cf59aac7c6bb4dbb92fd7bf8d456c69ea42f7e6d2"}, - {file = "xattr-0.10.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f0563196ee54756fe2047627d316977dc77d11acd7a07970336e1a711e934db"}, - {file = "xattr-0.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc354f086f926a1c7f04886f97880fed1a26d20e3bc338d0d965fd161dbdb8ab"}, - {file = "xattr-0.10.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c0cd2d02ef2fb45ecf2b0da066a58472d54682c6d4f0452dfe7ae2f3a76a42ea"}, - {file = "xattr-0.10.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49626096ddd72dcc1654aadd84b103577d8424f26524a48d199847b5d55612d0"}, - {file = "xattr-0.10.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceaa26bef8fcb17eb59d92a7481c2d15d20211e217772fb43c08c859b01afc6a"}, - {file = "xattr-0.10.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c014c371391f28f8cd27d73ea59f42b30772cd640b5a2538ad4f440fd9190b"}, - {file = "xattr-0.10.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:46c32cd605673606b9388a313b0050ee7877a0640d7561eea243ace4fa2cc5a6"}, - {file = "xattr-0.10.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:772b22c4ff791fe5816a7c2a1c9fcba83f9ab9bea138eb44d4d70f34676232b4"}, - {file = "xattr-0.10.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:183ad611a2d70b5a3f5f7aadef0fcef604ea33dcf508228765fd4ddac2c7321d"}, - {file = "xattr-0.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8068df3ebdfa9411e58d5ae4a05d807ec5994645bb01af66ec9f6da718b65c5b"}, - {file = "xattr-0.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bc40570155beb85e963ae45300a530223d9822edfdf09991b880e69625ba38a"}, - {file = "xattr-0.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:436e1aaf23c07e15bed63115f1712d2097e207214fc6bcde147c1efede37e2c5"}, - {file = "xattr-0.10.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7298455ccf3a922d403339781b10299b858bb5ec76435445f2da46fb768e31a5"}, - {file = "xattr-0.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:986c2305c6c1a08f78611eb38ef9f1f47682774ce954efb5a4f3715e8da00d5f"}, - {file = "xattr-0.10.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5dc6099e76e33fa3082a905fe59df766b196534c705cf7a2e3ad9bed2b8a180e"}, - {file = "xattr-0.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:042ad818cda6013162c0bfd3816f6b74b7700e73c908cde6768da824686885f8"}, - {file = "xattr-0.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9d4c306828a45b41b76ca17adc26ac3dc00a80e01a5ba85d71df2a3e948828f2"}, - {file = "xattr-0.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a606280b0c9071ef52572434ecd3648407b20df3d27af02c6592e84486b05894"}, - {file = "xattr-0.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5b49d591cf34cda2079fd7a5cb2a7a1519f54dc2e62abe3e0720036f6ed41a85"}, - {file = "xattr-0.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8705ac6791426559c1a5c2b88bb2f0e83dc5616a09b4500899bfff6a929302"}, - {file = "xattr-0.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5ea974930e876bc5c146f54ac0f85bb39b7b5de2b6fc63f90364712ae368ebe"}, - {file = "xattr-0.10.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f55a2dd73a12a1ae5113c5d9cd4b4ab6bf7950f4d76d0a1a0c0c4264d50da61d"}, - {file = "xattr-0.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:475c38da0d3614cc5564467c4efece1e38bd0705a4dbecf8deeb0564a86fb010"}, - {file = "xattr-0.10.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:925284a4a28e369459b2b7481ea22840eed3e0573a4a4c06b6b0614ecd27d0a7"}, - {file = "xattr-0.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa32f1b45fed9122bed911de0fcc654da349e1f04fa4a9c8ef9b53e1cc98b91e"}, - {file = "xattr-0.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c5d3d0e728bace64b74c475eb4da6148cd172b2d23021a1dcd055d92f17619ac"}, - {file = "xattr-0.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8faaacf311e2b5cc67c030c999167a78a9906073e6abf08eaa8cf05b0416515c"}, - {file = "xattr-0.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc6b8d5ca452674e1a96e246a3d2db5f477aecbc7c945c73f890f56323e75203"}, - {file = "xattr-0.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3725746a6502f40f72ef27e0c7bfc31052a239503ff3eefa807d6b02a249be22"}, - {file = "xattr-0.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:789bd406d1aad6735e97b20c6d6a1701e1c0661136be9be862e6a04564da771f"}, - {file = "xattr-0.10.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9a7a807ab538210ff8532220d8fc5e2d51c212681f63dbd4e7ede32543b070f"}, - {file = "xattr-0.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3e5825b5fc99ecdd493b0cc09ec35391e7a451394fdf623a88b24726011c950d"}, - {file = "xattr-0.10.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80638d1ce7189dc52f26c234cee3522f060fadab6a8bc3562fe0ddcbe11ba5a4"}, - {file = "xattr-0.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3ff0dbe4a6ce2ce065c6de08f415bcb270ecfd7bf1655a633ddeac695ce8b250"}, - {file = "xattr-0.10.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5267e5f9435c840d2674194150b511bef929fa7d3bc942a4a75b9eddef18d8d8"}, - {file = "xattr-0.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b27dfc13b193cb290d5d9e62f806bb9a99b00cd73bb6370d556116ad7bb5dc12"}, - {file = "xattr-0.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:636ebdde0277bce4d12d2ef2550885804834418fee0eb456b69be928e604ecc4"}, - {file = "xattr-0.10.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d60c27922ec80310b45574351f71e0dd3a139c5295e8f8b19d19c0010196544f"}, - {file = "xattr-0.10.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b34df5aad035d0343bd740a95ca30db99b776e2630dca9cc1ba8e682c9cc25ea"}, - {file = "xattr-0.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f24a7c04ff666d0fe905dfee0a84bc899d624aeb6dccd1ea86b5c347f15c20c1"}, - {file = "xattr-0.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3878e1aff8eca64badad8f6d896cb98c52984b1e9cd9668a3ab70294d1ef92d"}, - {file = "xattr-0.10.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4abef557028c551d59cf2fb3bf63f2a0c89f00d77e54c1c15282ecdd56943496"}, - {file = "xattr-0.10.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0e14bd5965d3db173d6983abdc1241c22219385c22df8b0eb8f1846c15ce1fee"}, - {file = "xattr-0.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f9be588a4b6043b03777d50654c6079af3da60cc37527dbb80d36ec98842b1e"}, - {file = "xattr-0.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bc4ae264aa679aacf964abf3ea88e147eb4a22aea6af8c6d03ebdebd64cfd6"}, - {file = "xattr-0.10.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827b5a97673b9997067fde383a7f7dc67342403093b94ea3c24ae0f4f1fec649"}, - {file = "xattr-0.10.1.tar.gz", hash = "sha256:c12e7d81ffaa0605b3ac8c22c2994a8e18a9cf1c59287a1b7722a2289c952ec5"}, + {file = "xattr-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef2fa0f85458736178fd3dcfeb09c3cf423f0843313e25391db2cfd1acec8888"}, + {file = "xattr-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccab735d0632fe71f7d72e72adf886f45c18b7787430467ce0070207882cfe25"}, + {file = "xattr-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9013f290387f1ac90bccbb1926555ca9aef75651271098d99217284d9e010f7c"}, + {file = "xattr-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd5dfbcee73c7be057676ecb900cabb46c691aff4397bf48c579ffb30bb963"}, + {file = "xattr-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6480589c1dac7785d1f851347a32c4a97305937bf7b488b857fe8b28a25de9e9"}, + {file = "xattr-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08f61cbed52dc6f7c181455826a9ff1e375ad86f67dd9d5eb7663574abb32451"}, + {file = "xattr-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:918e1f83f2e8a072da2671eac710871ee5af337e9bf8554b5ce7f20cdb113186"}, + {file = "xattr-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0f06e0c1e4d06b4e0e49aaa1184b6f0e81c3758c2e8365597918054890763b53"}, + {file = "xattr-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46a641ac038a9f53d2f696716147ca4dbd6a01998dc9cd4bc628801bc0df7f4d"}, + {file = "xattr-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7e4ca0956fd11679bb2e0c0d6b9cdc0f25470cc00d8da173bb7656cc9a9cf104"}, + {file = "xattr-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6881b120f9a4b36ccd8a28d933bc0f6e1de67218b6ce6e66874e0280fc006844"}, + {file = "xattr-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dab29d9288aa28e68a6f355ddfc3f0a7342b40c9012798829f3e7bd765e85c2c"}, + {file = "xattr-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c80bbf55339c93770fc294b4b6586b5bf8e85ec00a4c2d585c33dbd84b5006"}, + {file = "xattr-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1418705f253b6b6a7224b69773842cac83fcbcd12870354b6e11dd1cd54630f"}, + {file = "xattr-1.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687e7d18611ef8d84a6ecd8f4d1ab6757500c1302f4c2046ce0aa3585e13da3f"}, + {file = "xattr-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6ceb9efe0657a982ccb8b8a2efe96b690891779584c901d2f920784e5d20ae3"}, + {file = "xattr-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b489b7916f239100956ea0b39c504f3c3a00258ba65677e4c8ba1bd0b5513446"}, + {file = "xattr-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0a9c431b0e66516a078125e9a273251d4b8e5ba84fe644b619f2725050d688a0"}, + {file = "xattr-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1a5921ea3313cc1c57f2f53b63ea8ca9a91e48f4cc7ebec057d2447ec82c7efe"}, + {file = "xattr-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6ad2a7bd5e6cf71d4a862413234a067cf158ca0ae94a40d4b87b98b62808498"}, + {file = "xattr-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0683dae7609f7280b0c89774d00b5957e6ffcb181c6019c46632b389706b77e6"}, + {file = "xattr-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54cb15cd94e5ef8a0ef02309f1bf973ba0e13c11e87686e983f371948cfee6af"}, + {file = "xattr-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff6223a854229055e803c2ad0c0ea9a6da50c6be30d92c198cf5f9f28819a921"}, + {file = "xattr-1.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d44e8f955218638c9ab222eed21e9bd9ab430d296caf2176fb37abe69a714e5c"}, + {file = "xattr-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:caab2c2986c30f92301f12e9c50415d324412e8e6a739a52a603c3e6a54b3610"}, + {file = "xattr-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d6eb7d5f281014cd44e2d847a9107491af1bf3087f5afeded75ed3e37ec87239"}, + {file = "xattr-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:47a3bdfe034b4fdb70e5941d97037405e3904accc28e10dbef6d1c9061fb6fd7"}, + {file = "xattr-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:00d2b415cf9d6a24112d019e721aa2a85652f7bbc9f3b9574b2d1cd8668eb491"}, + {file = "xattr-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:78b377832dd0ee408f9f121a354082c6346960f7b6b1480483ed0618b1912120"}, + {file = "xattr-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6461a43b585e5f2e049b39bcbfcb6391bfef3c5118231f1b15d10bdb89ef17fe"}, + {file = "xattr-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24d97f0d28f63695e3344ffdabca9fcc30c33e5c8ccc198c7524361a98d526f2"}, + {file = "xattr-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ad47d89968c9097900607457a0c89160b4771601d813e769f68263755516065"}, + {file = "xattr-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc53cab265f6e8449bd683d5ee3bc5a191e6dd940736f3de1a188e6da66b0653"}, + {file = "xattr-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cd11e917f5b89f2a0ad639d9875943806c6c9309a3dd02da5a3e8ef92db7bed9"}, + {file = "xattr-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c5a78c7558989492c4cb7242e490ffb03482437bf782967dfff114e44242343"}, + {file = "xattr-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cebcf8a303a44fbc439b68321408af7267507c0d8643229dbb107f6c132d389c"}, + {file = "xattr-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b0d73150f2f9655b4da01c2369eb33a294b7f9d56eccb089819eafdbeb99f896"}, + {file = "xattr-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:793c01deaadac50926c0e1481702133260c7cb5e62116762f6fe1543d07b826f"}, + {file = "xattr-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e189e440bcd04ccaad0474720abee6ee64890823ec0db361fb0a4fb5e843a1bf"}, + {file = "xattr-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afacebbc1fa519f41728f8746a92da891c7755e6745164bd0d5739face318e86"}, + {file = "xattr-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b1664edf003153ac8d1911e83a0fc60db1b1b374ee8ac943f215f93754a1102"}, + {file = "xattr-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda2684228798e937a7c29b0e1c7ef3d70e2b85390a69b42a1c61b2039ba81de"}, + {file = "xattr-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b735ac2625a4fc2c9343b19f806793db6494336338537d2911c8ee4c390dda46"}, + {file = "xattr-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fa6a7af7a4ada43f15ccc58b6f9adcdbff4c36ba040013d2681e589e07ae280a"}, + {file = "xattr-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1059b2f726e2702c8bbf9bbf369acfc042202a4cc576c2dec6791234ad5e948"}, + {file = "xattr-1.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e2255f36ebf2cb2dbf772a7437ad870836b7396e60517211834cf66ce678b595"}, + {file = "xattr-1.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba4f80b9855cc98513ddf22b7ad8551bc448c70d3147799ea4f6c0b758fb466"}, + {file = "xattr-1.1.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb70c16e7c3ae6ba0ab6c6835c8448c61d8caf43ea63b813af1f4dbe83dd156"}, + {file = "xattr-1.1.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83652910ef6a368b77b00825ad67815e5c92bfab551a848ca66e9981d14a7519"}, + {file = "xattr-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7a92aff66c43fa3e44cbeab7cbeee66266c91178a0f595e044bf3ce51485743b"}, + {file = "xattr-1.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d4f71b673339aeaae1f6ea9ef8ea6c9643c8cd0df5003b9a0eaa75403e2e06c"}, + {file = "xattr-1.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a20de1c47b5cd7b47da61799a3b34e11e5815d716299351f82a88627a43f9a96"}, + {file = "xattr-1.1.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23705c7079b05761ff2fa778ad17396e7599c8759401abc05b312dfb3bc99f69"}, + {file = "xattr-1.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:27272afeba8422f2a9d27e1080a9a7b807394e88cce73db9ed8d2dde3afcfb87"}, + {file = "xattr-1.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd43978966de3baf4aea367c99ffa102b289d6c2ea5f3d9ce34a203dc2f2ab73"}, + {file = "xattr-1.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ded771eaf27bb4eb3c64c0d09866460ee8801d81dc21097269cf495b3cac8657"}, + {file = "xattr-1.1.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca300c0acca4f0cddd2332bb860ef58e1465d376364f0e72a1823fdd58e90d"}, + {file = "xattr-1.1.0.tar.gz", hash = "sha256:fecbf3b05043ed3487a28190dec3e4c4d879b2fcec0e30bafd8ec5d4b6043630"}, ] [package.dependencies] -cffi = ">=1.0" +cffi = ">=1.16.0" + +[package.extras] +test = ["pytest"] [[package]] name = "yarl" @@ -4255,20 +4319,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9, <3.13" -content-hash = "122eb321956cdeea7fd94d3d6ef5fd0d2b8153d2d5fb136a9644fc83a1f56419" +content-hash = "5c6adde8c403c0fd94abd53bf22f4e7e9164dac4f50d5357e0fef49f55e5ac69" diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml index 1cd1f9abfa41d..caffe24d42d23 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml @@ -34,7 +34,7 @@ pendulum = "<3.0.0" [tool.poetry.group.dev.dependencies] -pytest = "^7.2.2" +pytest = "^8" ptpython = "^3.0.23" pdbpp = "^0.10.3" @@ -49,5 +49,5 @@ module_name = "orchestrator" test = "pytest tests" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["test"] diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py b/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py index 151e49b4ab8a2..4e88c1eb2dc99 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py @@ -220,7 +220,7 @@ def test_definition_id_conversion(registry_type, connector_type, expected_id_fie result = metadata_to_registry_entry(mock_metadata_entry, registry_type) assert "definitionId" not in result - assert result[expected_id_field] == "test-id" + assert "test-id" == result[expected_id_field] def test_tombstone_custom_public_set(): diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 5efb54e231cb8..7e4a71c9d14e6 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -68,7 +68,8 @@ make tools.airbyte-ci.clean ## Disabling telemetry -We collect anonymous usage data to help improve the tool. If you would like to disable this, you can set the `AIRBYTE_CI_DISABLE_TELEMETRY` environment variable to `true`. +We collect anonymous usage data to help improve the tool. If you would like to disable this, you can +set the `AIRBYTE_CI_DISABLE_TELEMETRY` environment variable to `true`. ## Installation for development @@ -145,6 +146,7 @@ At this point you can run `airbyte-ci` commands. - [`connectors upgrade_cdk` command](#connectors-upgrade_cdk) - [`connectors upgrade_base_image` command](#connectors-upgrade_base_image) - [`connectors migrate_to_base_image` command](#connectors-migrate_to_base_image) +- [`connectors migrate-to-poetry` command](#connectors-migrate-to-poetry) - [`format` command subgroup](#format-subgroup) - [`format check` command](#format-check-command) - [`format fix` command](#format-fix-command) @@ -271,7 +273,7 @@ flowchart TD build[Build connector docker image] unit[Run unit tests] integration[Run integration tests] - airbyte_lib_validation[Run airbyte-lib validation tests] + pyairbyte_validation[Run PyAirbyte validation tests] cat[Run connector acceptance tests] secret[Load connector configuration] @@ -279,7 +281,7 @@ flowchart TD unit-->build secret-->integration secret-->cat - secret-->airbyte_lib_validation + secret-->pyairbyte_validation build-->integration build-->cat end @@ -512,16 +514,19 @@ Make a connector using a Dockerfile migrate to the base image by: - Updating its documentation to explain the build process - Bumping by a patch version -### Examples +#### Examples Migrate source-openweather to use the base image: `airbyte-ci connectors --name=source-openweather migrate_to_base_image` -### Arguments +### `connectors migrate-to-poetry` command -| Argument | Description | -| --------------------- | ----------------------------------------------------------- | -| `PULL_REQUEST_NUMBER` | The GitHub pull request number, used in the changelog entry | +Migrate connectors the poetry package manager. + +#### Examples + +Migrate source-openweather to use the base image: +`airbyte-ci connectors --name=source-openweather migrate-to-poetry` ### `format` command subgroup @@ -617,10 +622,13 @@ flowchart TD ### `tests` command -This command runs the poe tasks declared in the `[tool.airbyte-ci]` section of our internal poetry packages. -Feel free to checkout this [Pydantic model](https://github.com/airbytehq/airbyte/blob/main/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py#L9) to see the list of available options in `[tool.airbyte-ci]` section. +This command runs the poe tasks declared in the `[tool.airbyte-ci]` section of our internal poetry +packages. Feel free to checkout this +[Pydantic model](https://github.com/airbytehq/airbyte/blob/main/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py#L9) +to see the list of available options in `[tool.airbyte-ci]` section. -You can find the list of internal packages [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py#L1) +You can find the list of internal packages +[here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py#L1) #### Options @@ -631,19 +639,28 @@ You can find the list of internal packages [here](https://github.com/airbytehq/a | `--ci-requirements` | False | False | Output the CI requirements as a JSON payload. It is used to determine the CI runner to use. | #### Examples -You can pass multiple `--poetry-package-path` options to run poe tasks. -E.G.: running Poe tasks on `airbyte-lib` and `airbyte-ci/connectors/pipelines`: -`airbyte-ci test --poetry-package-path=airbyte-ci/connectors/pipelines --poetry-package-path=airbyte-lib` +You can pass multiple `--poetry-package-path` options to run poe tasks. E.G.: running Poe tasks on the modified internal packages of the current branch: `airbyte-ci test --modified` - ## Changelog | Version | PR | Description | -| ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| ------- | ---------------------------------------------------------- |----------------------------------------------------------------------------------------------------------------------------| +| 4.7.3 | [#37101](https://github.com/airbytehq/airbyte/pull/37101) | Pin PyAirbyte version. | +| 4.7.2 | [#36962](https://github.com/airbytehq/airbyte/pull/36962) | Re-enable connector dependencies upload on publish. | +| 4.7.1 | [#36961](https://github.com/airbytehq/airbyte/pull/36961) | Temporarily disable python connectors dependencies upload until we find a schema the data team can work with. | +| 4.7.0 | [#36892](https://github.com/airbytehq/airbyte/pull/36892) | Upload Python connectors dependencies list to GCS on publish. | +| 4.6.5 | [#36722](https://github.com/airbytehq/airbyte/pull/36527) | Fix incorrect pipeline names | +| 4.6.4 | [#36480](https://github.com/airbytehq/airbyte/pull/36480) | Burst the Gradle Task cache if a new CDK version was released | +| 4.6.3 | [#36527](https://github.com/airbytehq/airbyte/pull/36527) | Handle extras as well as groups in `airbyte ci test` [poetry packages] | +| 4.6.2 | [#36220](https://github.com/airbytehq/airbyte/pull/36220) | Allow using `migrate-to-base-image` without PULL_REQUEST_NUMBER | +| 4.6.1 | [#36319](https://github.com/airbytehq/airbyte/pull/36319) | Fix `ValueError` related to PR number in migrate-to-poetry | +| 4.6.0 | [#35583](https://github.com/airbytehq/airbyte/pull/35583) | Implement the `airbyte-ci connectors migrate-to-poetry` command. | +| 4.5.4 | [#36206](https://github.com/airbytehq/airbyte/pull/36206) | Revert poetry cache removal during nightly builds | +| 4.5.3 | [#34586](https://github.com/airbytehq/airbyte/pull/34586) | Extract connector changelog modification logic into its own class | | 4.5.2 | [#35802](https://github.com/airbytehq/airbyte/pull/35802) | Fix bug with connectors bump_version command | | 4.5.1 | [#35786](https://github.com/airbytehq/airbyte/pull/35786) | Declare `live_tests` as an internal poetry package. | | 4.5.0 | [#35784](https://github.com/airbytehq/airbyte/pull/35784) | Format command supports kotlin | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py index 17bf1241fc2e1..cafa6601e1892 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py @@ -24,7 +24,7 @@ async def bump_version( connectors_contexts = [ ConnectorContext( - pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + pipeline_name=f"Bump version of connector {connector.technical_name}", connector=connector, is_local=ctx.obj["is_local"], git_branch=ctx.obj["git_branch"], diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py index 329da37639b35..eaf68dad6bcc8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py @@ -10,6 +10,7 @@ from pipelines.airbyte_ci.connectors.reports import ConnectorReport, Report from pipelines.airbyte_ci.metadata.pipeline import MetadataValidation from pipelines.helpers import git +from pipelines.helpers.changelog import Changelog from pipelines.helpers.connectors import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus @@ -39,14 +40,16 @@ def __init__( context: ConnectorContext, repo_dir: Container, new_version: str, - changelog_entry: str, + comment: str, pull_request_number: str, + export_docs: bool = False, ) -> None: super().__init__(context) self.repo_dir = repo_dir - self.new_version = new_version - self.changelog_entry = changelog_entry - self.pull_request_number = pull_request_number + self.new_version = semver.VersionInfo.parse(new_version) + self.comment = comment + self.pull_request_number = int(pull_request_number) + self.export_docs = export_docs async def _run(self) -> StepResult: doc_path = self.context.connector.documentation_file_path @@ -58,15 +61,17 @@ async def _run(self) -> StepResult: output=self.repo_dir, ) try: - updated_doc = self.add_changelog_entry(doc_path.read_text()) + original_markdown = doc_path.read_text() + changelog = Changelog(original_markdown) + changelog.add_entry(self.new_version, datetime.date.today(), self.pull_request_number, self.comment) + updated_doc = changelog.to_markdown() except Exception as e: return StepResult( - step=self, - status=StepStatus.FAILURE, - stdout=f"Could not add changelog entry: {e}", - output=self.repo_dir, + step=self, status=StepStatus.FAILURE, stderr=f"Could not add changelog entry: {e}", output=self.repo_dir, exc_info=e ) updated_repo_dir = self.repo_dir.with_new_file(str(doc_path), contents=updated_doc) + if self.export_docs: + await updated_repo_dir.file(str(doc_path)).export(str(doc_path)) return StepResult( step=self, status=StepStatus.SUCCESS, @@ -74,35 +79,22 @@ async def _run(self) -> StepResult: output=updated_repo_dir, ) - def find_line_index_for_new_entry(self, markdown_text: str) -> int: - lines = markdown_text.splitlines() - for line_index, line in enumerate(lines): - if "version" in line.lower() and "date" in line.lower() and "pull request" in line.lower() and "subject" in line.lower(): - return line_index + 2 - raise Exception("Could not find the changelog section table in the documentation file.") - - def add_changelog_entry(self, og_doc_content: str) -> str: - today = datetime.date.today().strftime("%Y-%m-%d") - lines = og_doc_content.splitlines() - line_index_for_new_entry = self.find_line_index_for_new_entry(og_doc_content) - new_entry = f"| {self.new_version} | {today} | [{self.pull_request_number}](https://github.com/airbytehq/airbyte/pull/{self.pull_request_number}) | {self.changelog_entry} |" - lines.insert(line_index_for_new_entry, new_entry) - return "\n".join(lines) + "\n" - class BumpDockerImageTagInMetadata(Step): context: ConnectorContext - title = "Upgrade the dockerImageTag to the latest version in metadata.yaml" + title = "Upgrade the dockerImageTag to the new version in metadata.yaml" def __init__( self, context: ConnectorContext, repo_dir: Directory, new_version: str, + export_metadata: bool = False, ) -> None: super().__init__(context) self.repo_dir = repo_dir self.new_version = new_version + self.export_metadata = export_metadata @staticmethod def get_metadata_with_bumped_version(previous_version: str, new_version: str, metadata_str: str) -> str: @@ -124,12 +116,13 @@ async def _run(self) -> StepResult: repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata_str( self.repo_dir, metadata_path, updated_metadata_str ) - metadata_validation_results = await MetadataValidation(self.context).run() # Exit early if the metadata file is invalid. if metadata_validation_results.status is not StepStatus.SUCCESS: return metadata_validation_results + if self.export_metadata: + await repo_dir_with_updated_metadata.file(str(metadata_path)).export(str(metadata_path)) return StepResult( step=self, status=StepStatus.SUCCESS, diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py index 6bda567160fdb..4527a02c75368 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py @@ -151,6 +151,7 @@ def should_use_remote_secrets(use_remote_secrets: Optional[bool]) -> bool: "publish": "pipelines.airbyte_ci.connectors.publish.commands.publish", "bump_version": "pipelines.airbyte_ci.connectors.bump_version.commands.bump_version", "migrate_to_base_image": "pipelines.airbyte_ci.connectors.migrate_to_base_image.commands.migrate_to_base_image", + "migrate-to-poetry": "pipelines.airbyte_ci.connectors.migrate_to_poetry.commands.migrate_to_poetry", "upgrade_base_image": "pipelines.airbyte_ci.connectors.upgrade_base_image.commands.upgrade_base_image", "upgrade_cdk": "pipelines.airbyte_ci.connectors.upgrade_cdk.commands.bump_version", }, diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/consts.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/consts.py index 34f0ce1158114..8194dfc38594c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/consts.py @@ -23,6 +23,13 @@ class CONNECTOR_TEST_STEP_ID(str, Enum): VERSION_INC_CHECK = "version_inc_check" TEST_ORCHESTRATOR = "test_orchestrator" DEPLOY_ORCHESTRATOR = "deploy_orchestrator" + UPDATE_README = "update_readme" + ADD_CHANGELOG_ENTRY = "add_changelog_entry" + BUMP_METADATA_VERSION = "bump_metadata_version" + REGRESSION_TEST = "regression_test" + CHECK_MIGRATION_CANDIDATE = "check_migration_candidate" + POETRY_INIT = "poetry_init" + DELETE_SETUP_PY = "delete_setup_py" def __str__(self) -> str: return self.value diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py index 7b6196e6eee51..7edb79bd4ecb7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py @@ -2,6 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import asyncclick as click from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline @@ -14,19 +15,22 @@ cls=DaggerPipelineCommand, short_help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", ) -@click.argument("pull-request-number", type=str) +@click.option("--pull-request-number", type=str, required=False, default=None) @click.pass_context async def migrate_to_base_image( ctx: click.Context, - pull_request_number: str, + pull_request_number: str | None, ) -> bool: - """Bump a connector version: update metadata.yaml, changelog and delete legacy files.""" + """ + Bump a connector version: update metadata.yaml, changelog and delete legacy files. + If the `PULL_REQUEST_NUMBER` is not provided, no changelog entry will be added. + """ fail_if_missing_docker_hub_creds(ctx) connectors_contexts = [ ConnectorContext( - pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + pipeline_name=f"Upgrade connector {connector.technical_name} to use our base image", connector=connector, is_local=ctx.obj["is_local"], git_branch=ctx.obj["git_branch"], diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py index cb1f6d357d3ab..b544943deac3b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py @@ -284,12 +284,11 @@ async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, s async def run_connector_migration_to_base_image_pipeline( - context: ConnectorContext, semaphore: "Semaphore", pull_request_number: str + context: ConnectorContext, semaphore: "Semaphore", pull_request_number: str | None ) -> Report: async with semaphore: steps_results = [] async with context: - # DELETE DOCKERFILE delete_docker_file = DeleteConnectorFile( context, "Dockerfile", @@ -330,16 +329,17 @@ async def run_connector_migration_to_base_image_pipeline( bump_version_in_metadata_result = await bump_version_in_metadata.run() steps_results.append(bump_version_in_metadata_result) - # ADD CHANGELOG ENTRY - add_changelog_entry = AddChangelogEntry( - context, - bump_version_in_metadata_result.output, - new_version, - "Base image migration: remove Dockerfile and use the python-connector-base image", - pull_request_number, - ) - add_changelog_entry_result = await add_changelog_entry.run() - steps_results.append(add_changelog_entry_result) + # ADD CHANGELOG ENTRY only if the PR number is provided. + if pull_request_number is not None: + add_changelog_entry = AddChangelogEntry( + context, + bump_version_in_metadata_result.output, + new_version, + "Base image migration: remove Dockerfile and use the python-connector-base image", + pull_request_number, + ) + add_changelog_entry_result = await add_changelog_entry.run() + steps_results.append(add_changelog_entry_result) # UPDATE DOC add_build_instructions_to_doc = AddBuildInstructionsToReadme( diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/__init__.py similarity index 100% rename from airbyte-ci/connectors/qa-engine/qa_engine/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/commands.py new file mode 100644 index 0000000000000..b0b3012a6484c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/commands.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import asyncclick as click +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.migrate_to_poetry.pipeline import run_connector_migration_to_poetry_pipeline +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +@click.command( + cls=DaggerPipelineCommand, + short_help="Migrate the selected connectors to poetry.", +) +@click.pass_context +async def migrate_to_poetry( + ctx: click.Context, +) -> bool: + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Migrate {connector.technical_name} to Poetry", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + enable_report_auto_open=True, + docker_hub_username=ctx.obj.get("docker_hub_username"), + docker_hub_password=ctx.obj.get("docker_hub_password"), + s3_build_cache_access_key_id=ctx.obj.get("s3_build_cache_access_key_id"), + s3_build_cache_secret_key=ctx.obj.get("s3_build_cache_secret_key"), + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + await run_connectors_pipelines( + connectors_contexts, + run_connector_migration_to_poetry_pipeline, + "Migration to poetry pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + ) + + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/pipeline.py new file mode 100644 index 0000000000000..3680143032367 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/pipeline.py @@ -0,0 +1,487 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +import dagger +import git +import requests +import toml +from connector_ops.utils import ConnectorLanguage # type: ignore +from jinja2 import Environment, PackageLoader, select_autoescape +from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version +from pipelines.airbyte_ci.connectors.consts import CONNECTOR_TEST_STEP_ID +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport, Report +from pipelines.consts import LOCAL_BUILD_PLATFORM +from pipelines.dagger.actions.python.common import with_python_connector_installed +from pipelines.helpers.execution.run_steps import StepToRun, run_steps +from pipelines.models.steps import Step, StepResult, StepStatus + +if TYPE_CHECKING: + from typing import Iterable, List, Optional + + from anyio import Semaphore + +PACKAGE_NAME_PATTERN = r"^([a-zA-Z0-9_.\-]+)(?:\[(.*?)\])?([=~> StepResult: + connector_dir_entries = await (await self.context.get_connector_dir()).entries() + if self.context.connector.language not in [ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE]: + return StepResult( + step=self, + status=StepStatus.SKIPPED, + stderr="The connector is not a Python connector.", + ) + if self.context.connector.connector_type != "source": + return StepResult( + step=self, + status=StepStatus.SKIPPED, + stderr="The connector is not a source connector.", + ) + if "poetry.lock" in connector_dir_entries and "pyproject.toml" in connector_dir_entries: + return StepResult( + step=self, + status=StepStatus.SKIPPED, + stderr="The connector has already been migrated to poetry.", + ) + if "setup.py" not in connector_dir_entries: + return StepResult( + step=self, + status=StepStatus.SKIPPED, + stderr="The connector can't be migrated to poetry because it does not have a setup.py file.", + ) + if not self.context.connector.metadata.get("connectorBuildOptions", {}).get("baseImage"): + return StepResult( + step=self, + status=StepStatus.SKIPPED, + stderr="The connector can't be migrated to poetry because it does not have a base image defined in the metadata.", + ) + + return StepResult( + step=self, + status=StepStatus.SUCCESS, + ) + + +class PoetryInit(Step): + context: ConnectorContext + + title = "Generate pyproject.toml and poetry.lock" + python_version = "^3.9,<3.12" + build_system = { + "requires": ["poetry-core>=1.0.0"], + "build-backend": "poetry.core.masonry.api", + } + + def __init__(self, context: PipelineContext, new_version: str) -> None: + super().__init__(context) + self.new_version = new_version + + @property + def package_name(self) -> str: + return self.context.connector.technical_name.replace("-", "_") + + def get_package_info(self, package_info: str) -> dict: + package_info_dict = {} + for line in package_info.splitlines(): + # Ignoring locally installed packages + if ":" not in line: + continue + key, value = line.split(": ") + package_info_dict[key] = value + return { + "version": self.context.connector.version, + "name": package_info_dict["Name"], + "description": package_info_dict["Summary"], + "authors": [package_info_dict["Author"] + " <" + package_info_dict["Author-email"] + ">"], + "license": self.context.connector.metadata["license"], + "readme": "README.md", + "documentation": self.context.connector.metadata["documentationUrl"], + "homepage": "https://airbyte.com", + "repository": "https://github.com/airbytehq/airbyte", + } + + def to_poetry_dependencies(self, requirements_style_deps: Iterable[str], latest_dependencies_for_hard_pin: dict) -> dict: + dependencies = {} + for deps in requirements_style_deps: + if "," in deps: + deps = deps.split(",")[0] + match = re.match(PACKAGE_NAME_PATTERN, deps) + assert match, f"Failed to parse package name and version from {deps}" + name = match.group(1) + extras = match.group(2) + version = match.group(3) + if extras: + extras = extras.split(",") + if name in latest_dependencies_for_hard_pin: + version = f"=={latest_dependencies_for_hard_pin[name]}" + elif "~=" in deps: + # We prefer caret (^) over tilde (~) for the version range + # See https://python-poetry.org/docs/dependency-specification/ + version = version.replace("~=", "^") + elif "==" not in deps: + # The package version is not pinned and not installed in the released connector + # It's because it's a test dependency + # Poetry requires version to be declared so we should get the latest version from PyPI + version = f"^{self.get_latest_version_from_pypi(name)}" + if extras: + version = {"extras": extras, "version": version} + dependencies[name] = version + return dependencies + + def get_latest_version_from_pypi(self, package_name: str) -> str: + url = f"https://pypi.org/pypi/{package_name}/json" + + # Send GET request to the PyPI API + response = requests.get(url) + response.raise_for_status() # Raise an exception for any HTTP error status + + # Parse the JSON response + data = response.json() + + # Extract the latest version from the response + latest_version = data["info"]["version"] + + return latest_version + + async def get_dependencies(self, connector_container: dagger.Container, groups: Optional[List[str]] = None) -> set[str]: + package = "." if not groups else f'.[{",".join(groups)}]' + connector_container = await connector_container.with_exec(["pip", "install", package]) + + pip_install_dry_run_output = await connector_container.with_exec(["pip", "install", package, "--dry-run"]).stdout() + + non_transitive_deps = [] + for line in pip_install_dry_run_output.splitlines(): + if "Requirement already satisfied" in line and "->" not in line: + non_transitive_deps.append(line.replace("Requirement already satisfied: ", "").split(" ")[0].replace("_", "-")) + return set(non_transitive_deps) + + async def _run(self) -> StepResult: + base_image_name = self.context.connector.metadata["connectorBuildOptions"]["baseImage"] + base_container = self.dagger_client.container(platform=LOCAL_BUILD_PLATFORM).from_(base_image_name) + connector_container = await with_python_connector_installed( + self.context, + base_container, + str(self.context.connector.code_directory), + ) + with_egg_info = await connector_container.with_exec(["python", "setup.py", "egg_info"]) + + egg_info_dir = with_egg_info.directory(f"{self.package_name}.egg-info") + egg_info_files = {file_path: await egg_info_dir.file(file_path).contents() for file_path in await egg_info_dir.entries()} + + package_info = self.get_package_info(egg_info_files["PKG-INFO"]) + dependencies = await self.get_dependencies(connector_container) + dev_dependencies = await self.get_dependencies(connector_container, groups=["dev", "tests"]) - dependencies + latest_pip_freeze = ( + await self.context.dagger_client.container(platform=LOCAL_BUILD_PLATFORM) + .from_(f"{self.context.connector.metadata['dockerRepository']}:latest") + .with_exec(["pip", "freeze"], skip_entrypoint=True) + .stdout() + ) + latest_dependencies = { + name_version.split("==")[0]: name_version.split("==")[1] + for name_version in latest_pip_freeze.splitlines() + if "==" in name_version + } + poetry_dependencies = self.to_poetry_dependencies(dependencies, latest_dependencies) + poetry_dev_dependencies = self.to_poetry_dependencies(dev_dependencies, latest_dependencies) + scripts = {self.context.connector.technical_name: f"{self.package_name}.run:run"} + + pyproject = { + "build-system": self.build_system, + "tool": { + "poetry": { + **package_info, + "packages": [{"include": self.package_name}], + "dependencies": {"python": self.python_version, **poetry_dependencies}, + "group": {"dev": {"dependencies": poetry_dev_dependencies}}, + "scripts": scripts, + } + }, + } + toml_string = toml.dumps(pyproject) + try: + with_poetry_lock = await connector_container.with_new_file("pyproject.toml", contents=toml_string).with_exec( + ["poetry", "install"] + ) + except dagger.ExecError as e: + return StepResult( + step=self, + status=StepStatus.FAILURE, + stderr=str(e), + ) + with_new_version = await with_poetry_lock.with_exec(["poetry", "version", self.new_version]) + await with_new_version.file("pyproject.toml").export(f"{self.context.connector.code_directory}/pyproject.toml") + self.logger.info(f"Generated pyproject.toml for {self.context.connector.technical_name}") + await with_new_version.file("poetry.lock").export(f"{self.context.connector.code_directory}/poetry.lock") + self.logger.info(f"Generated poetry.lock for {self.context.connector.technical_name}") + return StepResult(step=self, status=StepStatus.SUCCESS, output=(dependencies, dev_dependencies)) + + +class DeleteSetUpPy(Step): + context: ConnectorContext + + title = "Delete setup.py" + + async def _run(self) -> StepResult: + setup_path = self.context.connector.code_directory / "setup.py" + original_setup_py = setup_path.read_text() + setup_path.unlink() + self.logger.info(f"Removed setup.py for {self.context.connector.technical_name}") + return StepResult(step=self, status=StepStatus.SUCCESS, output=original_setup_py) + + +class RestoreOriginalState(Step): + context: ConnectorContext + + title = "Restore original state" + + def __init__(self, context: ConnectorContext) -> None: + super().__init__(context) + self.setup_path = context.connector.code_directory / "setup.py" + self.metadata_path = context.connector.code_directory / "metadata.yaml" + self.pyproject_path = context.connector.code_directory / "pyproject.toml" + self.poetry_lock_path = context.connector.code_directory / "poetry.lock" + self.readme_path = context.connector.code_directory / "README.md" + self.doc_path = context.connector.documentation_file_path + self.original_setup_py = self.setup_path.read_text() if self.setup_path.exists() else None + self.original_metadata = self.metadata_path.read_text() + self.original_docs = self.doc_path.read_text() + self.original_readme = self.readme_path.read_text() + + async def _run(self) -> StepResult: + if self.original_setup_py: + self.setup_path.write_text(self.original_setup_py) + self.logger.info(f"Restored setup.py for {self.context.connector.technical_name}") + self.metadata_path.write_text(self.original_metadata) + self.logger.info(f"Restored metadata.yaml for {self.context.connector.technical_name}") + self.doc_path.write_text(self.original_docs) + self.logger.info(f"Restored documentation file for {self.context.connector.technical_name}") + self.readme_path.write_text(self.original_readme) + self.logger.info(f"Restored README.md for {self.context.connector.technical_name}") + if self.poetry_lock_path.exists(): + self.poetry_lock_path.unlink() + self.logger.info(f"Removed poetry.lock for {self.context.connector.technical_name}") + if self.pyproject_path.exists(): + self.pyproject_path.unlink() + self.logger.info(f"Removed pyproject.toml for {self.context.connector.technical_name}") + + return StepResult( + step=self, + status=StepStatus.SUCCESS, + ) + + +class RegressionTest(Step): + """Run the regression test for the connector. + We test that: + - The original dependencies are installed in the new connector image. + - The dev dependencies are not installed in the new connector image. + - The connector spec command successfully. + """ + + context: ConnectorContext + + title = "Run regression test" + + async def _run( + self, new_connector_container: dagger.Container, original_dependencies: List[str], original_dev_dependencies: List[str] + ) -> StepResult: + try: + await self.check_all_original_deps_are_installed(new_connector_container, original_dependencies, original_dev_dependencies) + except (AttributeError, AssertionError) as e: + return StepResult( + step=self, + status=StepStatus.FAILURE, + stderr=f"Failed checking if the original dependencies are installed:\n {str(e)}", + exc_info=e, + ) + + try: + await new_connector_container.with_exec(["spec"]) + await new_connector_container.with_mounted_file( + "pyproject.toml", (await self.context.get_connector_dir(include=["pyproject.toml"])).file("pyproject.toml") + ).with_exec(["poetry", "run", self.context.connector.technical_name, "spec"], skip_entrypoint=True) + except dagger.ExecError as e: + return StepResult( + step=self, + status=StepStatus.FAILURE, + stderr=str(e), + ) + return StepResult( + step=self, + status=StepStatus.SUCCESS, + ) + + async def check_all_original_deps_are_installed( + self, new_connector_container: dagger.Container, original_main_dependencies: List[str], original_dev_dependencies: List[str] + ) -> None: + previous_pip_freeze = ( + await self.dagger_client.container(platform=LOCAL_BUILD_PLATFORM) + .from_(f'{self.context.connector.metadata["dockerRepository"]}:latest') + .with_exec(["pip", "freeze"], skip_entrypoint=True) + .stdout() + ).splitlines() + current_pip_freeze = (await new_connector_container.with_exec(["pip", "freeze"], skip_entrypoint=True).stdout()).splitlines() + main_dependencies_names = [] + for dep in original_main_dependencies: + match = re.match(PACKAGE_NAME_PATTERN, dep) + if match: + main_dependencies_names.append(match.group(1)) + + dev_dependencies_names = [] + for dep in original_dev_dependencies: + match = re.match(PACKAGE_NAME_PATTERN, dep) + if match: + dev_dependencies_names.append(match.group(1)) + + previous_package_name_version_mapping: dict[str, str] = {} + for dep in previous_pip_freeze: + if "==" in dep: + match = re.match(PACKAGE_NAME_PATTERN, dep) + if match: + previous_package_name_version_mapping[match.group(1)] = dep + + current_package_name_version_mapping: dict[str, str] = {} + for dep in current_pip_freeze: + if "==" in dep: + match = re.match(PACKAGE_NAME_PATTERN, dep) + if match: + current_package_name_version_mapping[match.group(1)] = dep + + for main_dep in main_dependencies_names: + assert main_dep in current_package_name_version_mapping, f"{main_dep} not found in the latest pip freeze" + assert ( + current_package_name_version_mapping[main_dep] == previous_package_name_version_mapping[main_dep] + ), f"Poetry installed a different version of {main_dep} than the previous version. Previous: {previous_package_name_version_mapping[main_dep]}, current: {current_package_name_version_mapping[main_dep]}" + for dev_dep in dev_dependencies_names: + if dev_dep not in main_dependencies_names: + assert ( + dev_dep not in current_package_name_version_mapping + ), f"A dev dependency ({dev_dep}) was installed by poetry in the container image" + + +class UpdateReadMe(Step): + context: ConnectorContext + + title = "Update README.md" + + async def _run(self) -> StepResult: + readme_path = self.context.connector.code_directory / "README.md" + jinja_env = Environment( + loader=PackageLoader("pipelines.airbyte_ci.connectors.migrate_to_poetry"), + autoescape=select_autoescape(), + trim_blocks=False, + lstrip_blocks=True, + ) + readme_template = jinja_env.get_template("README.md.j2") + updated_readme = readme_template.render(connector=self.context.connector) + readme_path.write_text(updated_readme) + return StepResult( + step=self, + status=StepStatus.SUCCESS, + ) + + +async def run_connector_migration_to_poetry_pipeline(context: ConnectorContext, semaphore: "Semaphore") -> Report: + restore_original_state = RestoreOriginalState(context) + new_version = get_bumped_version(context.connector.version, "patch") + context.targeted_platforms = [LOCAL_BUILD_PLATFORM] + steps_to_run: list[StepToRun | list[StepToRun]] = [ + [StepToRun(id=CONNECTOR_TEST_STEP_ID.CHECK_MIGRATION_CANDIDATE, step=CheckIsMigrationCandidate(context))], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.POETRY_INIT, + step=PoetryInit(context, new_version), + depends_on=[CONNECTOR_TEST_STEP_ID.CHECK_MIGRATION_CANDIDATE], + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.DELETE_SETUP_PY, step=DeleteSetUpPy(context), depends_on=[CONNECTOR_TEST_STEP_ID.POETRY_INIT] + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.BUILD, step=BuildConnectorImages(context), depends_on=[CONNECTOR_TEST_STEP_ID.DELETE_SETUP_PY] + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.REGRESSION_TEST, + step=RegressionTest(context), + depends_on=[CONNECTOR_TEST_STEP_ID.BUILD], + args=lambda results: { + "new_connector_container": results["BUILD_CONNECTOR_IMAGE"].output[LOCAL_BUILD_PLATFORM], + "original_dependencies": results["POETRY_INIT"].output[0], + "original_dev_dependencies": results["POETRY_INIT"].output[1], + }, + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.BUMP_METADATA_VERSION, + step=BumpDockerImageTagInMetadata( + context, await context.get_repo_dir(include=[str(context.connector.code_directory)]), new_version, export_metadata=True + ), + depends_on=[CONNECTOR_TEST_STEP_ID.REGRESSION_TEST], + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.ADD_CHANGELOG_ENTRY, + step=AddChangelogEntry( + context, + await context.get_repo_dir(include=[str(context.connector.local_connector_documentation_directory)]), + new_version, + "Manage dependencies with Poetry.", + "0", + export_docs=True, + ), + depends_on=[CONNECTOR_TEST_STEP_ID.REGRESSION_TEST], + ) + ], + [ + StepToRun( + id=CONNECTOR_TEST_STEP_ID.UPDATE_README, step=UpdateReadMe(context), depends_on=[CONNECTOR_TEST_STEP_ID.REGRESSION_TEST] + ) + ], + ] + async with semaphore: + async with context: + try: + result_dict = await run_steps( + runnables=steps_to_run, + options=context.run_step_options, + ) + except Exception as e: + await restore_original_state.run() + raise e + results = list(result_dict.values()) + if any(step_result.status is StepStatus.FAILURE for step_result in results): + await restore_original_state.run() + report = ConnectorReport(context, steps_results=results, name="TEST RESULTS") + context.report = report + + return report diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/templates/README.md.j2 b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/templates/README.md.j2 new file mode 100644 index 0000000000000..4ae7bae23b63d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/templates/README.md.j2 @@ -0,0 +1,91 @@ +# {{ connector.name.title()}} source connector + + +This is the repository for the {{ connector.name.title() }} source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation]({{ connector.metadata['documentationUrl']}}). + +## Local development + +### Prerequisites +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) + + +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev +``` + + +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation]({{ connector.metadata['documentationUrl']}}) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `{{ connector.technical_name.replace('-', '_')}}/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. + + +### Locally running the connector +``` +poetry run {{ connector.technical_name}} spec +poetry run {{ connector.technical_name}} check --config secrets/config.json +poetry run {{ connector.technical_name}} discover --config secrets/config.json +poetry run {{ connector.technical_name}} read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` + +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name={{ connector.technical_name }} build +``` + +An image will be available on your host with the tag `airbyte/{{ connector.technical_name}}:dev`. + + +### Running as a docker container +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/{{ connector.technical_name }}:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/{{ connector.technical_name }}:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/{{ connector.technical_name }}:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/{{ connector.technical_name }}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name={{ connector.technical_name }} test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name={{ connector.technical_name }} test` +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` +3. Make sure the `metadata.yaml` content is up to date. +4. Make sure the connector documentation and its changelog is up to date (`{{ connector.documentation_file_path}}`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-lib/tests/docs_tests/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/templates/__init__.py similarity index 100% rename from airbyte-lib/tests/docs_tests/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_poetry/templates/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index 88c917211946d..7cd7e4c0a45c1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -4,22 +4,25 @@ import json import uuid -from typing import List, Tuple +from datetime import datetime +from typing import Dict, List, Tuple import anyio from airbyte_protocol.models.airbyte_protocol import ConnectorSpecification # type: ignore -from dagger import Container, ExecError, File, ImageLayerCompression, QueryError +from connector_ops.utils import ConnectorLanguage # type: ignore +from dagger import Container, ExecError, File, ImageLayerCompression, Platform, QueryError from pipelines import consts from pipelines.airbyte_ci.connectors.build_image import steps from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation from pipelines.airbyte_ci.steps.python_registry import PublishToPythonRegistry, PythonRegistryPublishContext +from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.actions.remote_storage import upload_to_gcs from pipelines.dagger.actions.system import docker from pipelines.helpers.pip import is_package_published from pipelines.models.steps import Step, StepResult, StepStatus -from pydantic import ValidationError +from pydantic import BaseModel, ValidationError class InvalidSpecOutputError(Exception): @@ -76,6 +79,58 @@ async def _run(self) -> StepResult: ) +class ConnectorDependenciesMetadata(BaseModel): + connector_technical_name: str + connector_repository: str + connector_version: str + connector_definition_id: str + dependencies: List[Dict[str, str]] + generation_time: datetime = datetime.utcnow() + + +class UploadDependenciesToMetadataService(Step): + context: PublishConnectorContext + title = "Upload connector dependencies list to GCS." + key_prefix = "connector_dependencies" + + async def _run(self, built_containers_per_platform: Dict[Platform, Container]) -> StepResult: + assert self.context.connector.language in [ + ConnectorLanguage.PYTHON, + ConnectorLanguage.LOW_CODE, + ], "This step can only run for Python connectors." + built_container = built_containers_per_platform[LOCAL_BUILD_PLATFORM] + pip_freeze_output = await built_container.with_exec(["pip", "freeze"], skip_entrypoint=True).stdout() + dependencies = [ + {"package_name": line.split("==")[0], "version": line.split("==")[1]} for line in pip_freeze_output.splitlines() if "==" in line + ] + connector_technical_name = self.context.connector.technical_name + connector_version = self.context.metadata["dockerImageTag"] + dependencies_metadata = ConnectorDependenciesMetadata( + connector_technical_name=connector_technical_name, + connector_repository=self.context.metadata["dockerRepository"], + connector_version=connector_version, + connector_definition_id=self.context.metadata["definitionId"], + dependencies=dependencies, + ).json() + file = ( + (await self.context.get_connector_dir()) + .with_new_file("dependencies.json", contents=dependencies_metadata) + .file("dependencies.json") + ) + key = f"{self.key_prefix}/{connector_technical_name}/{connector_version}/dependencies.json" + exit_code, stdout, stderr = await upload_to_gcs( + self.context.dagger_client, + file, + key, + self.context.metadata_bucket_name, + self.context.metadata_service_gcs_credentials_secret, + flags=['--cache-control="no-cache"'], + ) + if exit_code != 0: + return StepResult(step=self, status=StepStatus.FAILURE, stdout=stdout, stderr=stderr) + return StepResult(step=self, status=StepStatus.SUCCESS, stdout="Uploaded connector dependencies to metadata service bucket.") + + class PushConnectorImageToRegistry(Step): context: PublishConnectorContext title = "Push connector image to registry" @@ -282,7 +337,6 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: check_connector_image_results = await CheckConnectorImageDoesNotExist(context).run() results.append(check_connector_image_results) - python_registry_steps, terminate_early = await _run_python_registry_publish_pipeline(context) results.extend(python_registry_steps) if terminate_early: @@ -313,6 +367,10 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: if build_connector_results.status is not StepStatus.SUCCESS: return create_connector_report(results) + if context.connector.language in [ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE]: + upload_dependencies_step = await UploadDependenciesToMetadataService(context).run(build_connector_results.output) + results.append(upload_dependencies_step) + built_connector_platform_variants = list(build_connector_results.output.values()) push_connector_image_results = await PushConnectorImageToRegistry(context).run(built_connector_platform_variants) results.append(push_connector_image_results) diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py index c7cc04cea7f39..538b07f0c339f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py @@ -22,6 +22,9 @@ from pipelines.helpers.execution.run_steps import STEP_TREE, StepToRun from pipelines.models.steps import STEP_PARAMS, Step, StepResult +# Pin the PyAirbyte version to avoid updates from breaking CI +PYAIRBYTE_VERSION = "0.10.2" + class PytestStep(Step, ABC): """An abstract class to run pytest tests and evaluate success or failure according to pytest logs.""" @@ -192,10 +195,10 @@ def default_params(self) -> STEP_PARAMS: return super().default_params | coverage_options -class AirbyteLibValidation(Step): - """A step to validate the connector will work with airbyte-lib, using the airbyte-lib validation helper.""" +class PyAirbyteValidation(Step): + """A step to validate the connector will work with PyAirbyte, using the PyAirbyte validation helper.""" - title = "AirbyteLib validation tests" + title = "PyAirbyte validation tests" context: ConnectorContext @@ -207,7 +210,7 @@ async def _run(self, connector_under_test: Container) -> StepResult: StepResult: Failure or success of the unit tests with stdout and stdout. """ if dpath.util.get(self.context.connector.metadata, "remoteRegistries/pypi/enabled", default=False) is False: - return self.skip("Connector is not published on pypi, skipping airbyte-lib validation.") + return self.skip("Connector is not published on pypi, skipping PyAirbyte validation.") test_environment = await self.install_testing_environment(with_poetry(self.context)) test_execution = test_environment.with_( @@ -220,7 +223,7 @@ async def install_testing_environment( self, built_connector_container: Container, ) -> Container: - """Add airbyte-lib and secrets to the test environment.""" + """Add PyAirbyte and secrets to the test environment.""" context: ConnectorContext = self.context container_with_test_deps = await pipelines.dagger.actions.python.common.with_python_package( @@ -230,7 +233,7 @@ async def install_testing_environment( [ "pip", "install", - "airbyte-lib", + f"airbyte=={PYAIRBYTE_VERSION}", ] ) @@ -266,7 +269,7 @@ def get_test_steps(context: ConnectorContext) -> STEP_TREE: ), StepToRun( id=CONNECTOR_TEST_STEP_ID.AIRBYTE_LIB_VALIDATION, - step=AirbyteLibValidation(context), + step=PyAirbyteValidation(context), args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]}, depends_on=[CONNECTOR_TEST_STEP_ID.BUILD], ), diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py index 456e6c3f1aa32..0c8f7f953b968 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py @@ -1,11 +1,13 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +import xml.etree.ElementTree as ET from abc import ABC from datetime import datetime from typing import Any, ClassVar, List, Optional, Tuple, cast import pipelines.dagger.actions.system.docker +import requests from dagger import CacheSharingMode, CacheVolume, Container, ExecError from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.consts import AMAZONCORRETTO_IMAGE @@ -32,6 +34,9 @@ class GradleTask(Step, ABC): GRADLE_DEP_CACHE_PATH = "/root/gradle-cache" GRADLE_HOME_PATH = "/root/.gradle" STATIC_GRADLE_OPTIONS = ("--no-daemon", "--no-watch-fs", "--build-cache", "--scan", "--console=plain") + CDK_MAVEN_METADATA_URL = ( + "https://airbyte.mycloudrepo.io/public/repositories/airbyte-public-jars/io/airbyte/cdk/airbyte-cdk-core/maven-metadata.xml" + ) gradle_task_name: ClassVar[str] bind_to_docker_host: ClassVar[bool] = False mount_connector_secrets: ClassVar[bool] = False @@ -65,6 +70,14 @@ def _get_gradle_command(self, task: str, *args: Any, task_options: Optional[List task_options = task_options or [] return f"./gradlew {' '.join(self.gradle_task_options + args)} {task} {' '.join(task_options)}" + def get_last_cdk_update_time(self) -> str: + response = requests.get(self.CDK_MAVEN_METADATA_URL) + response.raise_for_status() + last_updated = ET.fromstring(response.text).find(".//lastUpdated") + if last_updated is None or last_updated.text is None: + raise ValueError(f"Could not find the lastUpdated field in the CDK maven metadata at {self.CDK_MAVEN_METADATA_URL}") + return last_updated.text + async def _run(self, *args: Any, **kwargs: Any) -> StepResult: include = [ ".root", @@ -150,6 +163,8 @@ async def _run(self, *args: Any, **kwargs: Any) -> StepResult: gradle_container_base # Mount the whole repo. .with_directory("/airbyte", self.context.get_repo_dir(".")) + # Burst the cache if a new CDK version was released. + .with_env_variable("CDK_LAST_UPDATE", self.get_last_cdk_update_time()) # Update the cache in place by executing a gradle task which will update all dependencies. .with_exec( sh_dash_c( diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py index 43bcee98b3884..c2ec33f857dff 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py @@ -5,17 +5,17 @@ from pathlib import Path INTERNAL_POETRY_PACKAGES = [ - "airbyte-lib", "airbyte-ci/connectors/pipelines", "airbyte-ci/connectors/base_images", "airbyte-ci/connectors/common_utils", "airbyte-ci/connectors/connector_ops", "airbyte-ci/connectors/connectors_qa", "airbyte-ci/connectors/ci_credentials", - "airbyte-ci/connectors/live-tests", + # This will move to a different repo + #"airbyte-ci/connectors/live-tests", "airbyte-ci/connectors/metadata_service/lib", "airbyte-ci/connectors/metadata_service/orchestrator", - "airbyte-integrations/bases/connector-acceptance-test" + "airbyte-integrations/bases/connector-acceptance-test", ] INTERNAL_POETRY_PACKAGES_PATH = [Path(package) for package in INTERNAL_POETRY_PACKAGES] diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py index fd4ed95de5abf..d1ba68f4be035 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/models.py @@ -11,7 +11,8 @@ class AirbyteCiPackageConfiguration(BaseModel): required_environment_variables: Set[str] = Field( set(), description="List of unique required environment variables to pass to the container running the poe task" ) - extra_poetry_groups: Set[str] = Field(set(), description="List of unique extra poetry groups to install") + poetry_extras: Set[str] = Field(set(), description="List of unique poetry extras to install") + optional_poetry_groups: Set[str] = Field(set(), description="List of unique poetry groups to install") side_car_docker_engine: bool = Field( False, description="Flag indicating the use of a sidecar Docker engine during the poe task executions" ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py index f8ada153f44c6..ab99ecff49198 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py @@ -191,7 +191,11 @@ def prepare_container_for_poe_tasks( container = container.with_workdir(f"/airbyte/{poetry_package_path}") # Install the poetry package - container = container.with_exec(["poetry", "install"] + [f"--with={group}" for group in airbyte_ci_package_config.extra_poetry_groups]) + container = container.with_exec( + ["poetry", "install"] + + [f"--with={group}" for group in airbyte_ci_package_config.optional_poetry_groups] + + [f"--extras={extra}" for extra in airbyte_ci_package_config.poetry_extras] + ) return container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index 821719aa554f6..fff7611c5ec53 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -9,7 +9,7 @@ from dagger import Container, Directory from pipelines import hacks from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext -from pipelines.dagger.containers.python import with_pip_cache, with_python_base, with_testing_dependencies +from pipelines.dagger.containers.python import with_pip_cache, with_poetry_cache, with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents @@ -211,9 +211,7 @@ async def with_installed_python_package( has_pyproject_toml = await check_path_in_workdir(container, "pyproject.toml") if has_pyproject_toml: - # This is a temporary change in order to scope an issue. There should be following action items once we have more information. - # maxi297 has an action item on his calendar for 2024-03-21 to review this - # container = with_poetry_cache(container, context.dagger_client) + container = with_poetry_cache(container, context.dagger_client) container = _install_python_dependencies_from_poetry(container, additional_dependency_groups, install_root_package) elif has_setup_py: container = with_pip_cache(container, context.dagger_client) diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/changelog.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/changelog.py new file mode 100644 index 0000000000000..bbe724c4832c6 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/changelog.py @@ -0,0 +1,121 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import re +from dataclasses import dataclass +from operator import attrgetter +from typing import Set, Tuple + +import semver +from pipelines.helpers.github import AIRBYTE_GITHUB_REPO + + +class ChangelogParsingException(Exception): + pass + + +@dataclass(frozen=True) +class ChangelogEntry: + date: datetime.date + version: semver.Version + pr_number: int + comment: str + + def to_markdown(self, github_repo: str = AIRBYTE_GITHUB_REPO) -> str: + return f'| {self.version} | {self.date.strftime("%Y-%m-%d")} | [{self.pr_number}](https://github.com/{github_repo}/pull/{self.pr_number}) | {self.comment} |' + + def __str__(self) -> str: + return f'version={self.version}, data={self.date.strftime("%Y-%m-%d")}, pr_number={self.pr_number}, comment={self.comment}' + + def __repr__(self) -> str: + return "ChangelogEntry: " + self.__str__() + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ChangelogEntry): + return False + retVal = ( + self.date == other.date + and self.version == other.version + and self.pr_number == other.pr_number + and self.comment == other.comment + ) + return retVal + + def __ne__(self, other: object) -> bool: + return not (self.__eq__(other)) + + def __hash__(self) -> int: + return self.__str__().__hash__() + + +def parse_markdown(markdown_lines: list[str], github_repo: str) -> Tuple[int, Set[ChangelogEntry]]: + """This parses the markdown to find the changelog table, and then populates entries with the existing entries""" + changelog_entry_re = ( + "^\\| *(?P[0-9]+\\.[0-9+]+\\.[0-9]+?) *\\| *" + + "(?P[0-9]{4}-[0-9]{2}-[0-9]{2}) *\\| *" + + "\\[?(?P[0-9]+)\\]? ?\\(https://github.com/" + + github_repo + + "/pull/(?P[0-9]+)\\) *\\| *" + + "(?P[^ ].*[^ ]) *\\| *$" + ) + changelog_header_line_index = -1 + changelog_line_enumerator = enumerate(markdown_lines) + for line_index, line in changelog_line_enumerator: + if re.search(r"\| *Version *\| *Date *\| *Pull Request *\| *Subject *\|", line): + changelog_header_line_index = line_index + break + if changelog_header_line_index == -1: + raise ChangelogParsingException("Could not find the changelog section table in the documentation file.") + if markdown_lines[changelog_header_line_index - 1] != "": + raise ChangelogParsingException( + "Found changelog section table in the documentation file at line but there is not blank line before it." + ) + if not re.search(r"(\|[- :]*){4}\|", next(changelog_line_enumerator)[1]): + raise ChangelogParsingException("The changelog table in the documentation file is missing the header delimiter.") + changelog_entries_start_line_index = changelog_header_line_index + 2 + + # parse next line to see if it needs to be cut + entries = set() + for line_index, line in changelog_line_enumerator: + changelog_entry_regexp = re.search(changelog_entry_re, line) + if not changelog_entry_regexp or changelog_entry_regexp.group("pr_number1") != changelog_entry_regexp.group("pr_number2"): + break + entry_version = semver.VersionInfo.parse(changelog_entry_regexp.group("version")) + entry_date = datetime.datetime.strptime(changelog_entry_regexp.group("day"), "%Y-%m-%d").date() + entry_pr_number = int(changelog_entry_regexp.group("pr_number1")) + entry_comment = changelog_entry_regexp.group("comment") + changelog_entry = ChangelogEntry(entry_date, entry_version, entry_pr_number, entry_comment) + entries.add(changelog_entry) + + return changelog_entries_start_line_index, entries + + +class Changelog: + def __init__(self, markdown: str, github_repo: str = AIRBYTE_GITHUB_REPO) -> None: + self.original_markdown_lines = markdown.splitlines() + self.changelog_entries_start_line_index, self.original_entries = parse_markdown(self.original_markdown_lines, github_repo) + self.new_entries: Set[ChangelogEntry] = set() + self.github_repo = github_repo + + def add_entry(self, version: semver.Version, date: datetime.date, pull_request_number: int, comment: str) -> None: + self.new_entries.add(ChangelogEntry(date, version, pull_request_number, comment)) + + def to_markdown(self) -> str: + all_entries = set(self.original_entries.union(self.new_entries)) + sorted_entries = sorted( + sorted( + sorted(sorted(all_entries, key=attrgetter("comment"), reverse=True), key=attrgetter("pr_number"), reverse=True), + key=attrgetter("date"), + reverse=True, + ), + key=attrgetter("version"), + reverse=True, + ) + new_lines = ( + self.original_markdown_lines[: self.changelog_entries_start_line_index] + + [line.to_markdown(self.github_repo) for line in sorted_entries] + + self.original_markdown_lines[(self.changelog_entries_start_line_index + len(self.original_entries)) :] + ) + return "\n".join(new_lines) + "\n" diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py index fb1a44138a60e..28256cc78789c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py @@ -22,7 +22,7 @@ def initialize() -> None: sentry_sdk.init( dsn=os.environ.get("SENTRY_DSN"), environment=os.environ.get("SENTRY_ENVIRONMENT") or "production", - before_send=before_send, + before_send=before_send, # type: ignore release=f"pipelines@{importlib.metadata.version('pipelines')}", ) diff --git a/airbyte-ci/connectors/pipelines/poetry.lock b/airbyte-ci/connectors/pipelines/poetry.lock index 04bcf0652f85b..e43499a41f25c 100644 --- a/airbyte-ci/connectors/pipelines/poetry.lock +++ b/airbyte-ci/connectors/pipelines/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-connectors-base-images" @@ -10,6 +10,7 @@ files = [] develop = true [package.dependencies] +beartype = "<0.17.2" connector-ops = {path = "../connector_ops", develop = true} dagger-io = "==0.9.6" gitpython = "^3.1.35" @@ -24,13 +25,13 @@ url = "../base_images" [[package]] name = "airbyte-protocol-models" -version = "0.6.0" +version = "0.8.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.6.0-py3-none-any.whl", hash = "sha256:dda91403c9731ecbadffaf05dbe8d24f0d318a189d26fcb727627291837a085c"}, - {file = "airbyte_protocol_models-0.6.0.tar.gz", hash = "sha256:84a0bb0fbedc777f8066295960461ab4a8ab6af63985c21c39bb589569786bc2"}, + {file = "airbyte_protocol_models-0.8.0-py3-none-any.whl", hash = "sha256:45357703a92eab4bd573f446306365acef9f4d3fe15d07fc713f519078df3f10"}, + {file = "airbyte_protocol_models-0.8.0.tar.gz", hash = "sha256:b147dbf15d40b0c5e3f1bf5058e7f219a4ff2e94ee23334f468ec5802809e56f"}, ] [package.dependencies] @@ -81,13 +82,13 @@ trio = ["trio (<0.22)"] [[package]] name = "asyncclick" -version = "8.1.7.1" +version = "8.1.7.2" description = "Composable command line interface toolkit, async version" optional = false python-versions = ">=3.7" files = [ - {file = "asyncclick-8.1.7.1-py3-none-any.whl", hash = "sha256:e0fea5f0223ac45cfc26153cc80a58cc65fc077ac8de79be49248c918e8c3422"}, - {file = "asyncclick-8.1.7.1.tar.gz", hash = "sha256:a47b61258a689212cf9463fbf3b4cc52d05bfd03185f6ead2315fc03fd17ef75"}, + {file = "asyncclick-8.1.7.2-py3-none-any.whl", hash = "sha256:1ab940b04b22cb89b5b400725132b069d01b0c3472a9702c7a2c9d5d007ded02"}, + {file = "asyncclick-8.1.7.2.tar.gz", hash = "sha256:219ea0f29ccdc1bb4ff43bcab7ce0769ac6d48a04f997b43ec6bee99a222daa0"}, ] [package.dependencies] @@ -108,16 +109,6 @@ files = [ [package.dependencies] anyio = ">=3.4.0,<4.0.0" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - [[package]] name = "attrs" version = "23.2.0" @@ -150,20 +141,20 @@ files = [ [[package]] name = "beartype" -version = "0.17.0" +version = "0.17.1" description = "Unbearably fast runtime type checking in pure Python." optional = false python-versions = ">=3.8.0" files = [ - {file = "beartype-0.17.0-py3-none-any.whl", hash = "sha256:fa84b77a8d037f2a39c4aa2f3dc71854afc7d79312e55a66b338da68fdd48c60"}, - {file = "beartype-0.17.0.tar.gz", hash = "sha256:3226fbba8c53b4e698acdb47dcaf3c0640151c4d405618c281e6631f4112947d"}, + {file = "beartype-0.17.1-py3-none-any.whl", hash = "sha256:583deb076e312f5acc2e2928706af2facab1f4282be775ee619e6f42c290f423"}, + {file = "beartype-0.17.1.tar.gz", hash = "sha256:001df1ce51c76f0a21c2183215b26254b667fd8b688a6cbe8f013907cdaaf9b3"}, ] [package.extras] all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "torch", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "torch", "typing-extensions (>=3.10.0.0)"] +test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] test-tox-coverage = ["coverage (>=5.5)"] [[package]] @@ -184,13 +175,13 @@ wcwidth = ">=0.1.4" [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -220,13 +211,13 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -404,8 +395,9 @@ develop = false [package.dependencies] click = "^8.1.3" common_utils = {path = "../common_utils", develop = true} +cryptography = ">=42.0" pyyaml = "^6.0" -requests = "^2.28.2" +requests = "^2.31" [package.source] type = "directory" @@ -446,9 +438,8 @@ files = [] develop = true [package.dependencies] -cryptography = "^3.4.7" -pyjwt = "^2.1.0" -requests = "^2.28.2" +pyjwt = "^2.8.0" +requests = "^2.31.0" [package.source] type = "directory" @@ -456,7 +447,7 @@ url = "../common_utils" [[package]] name = "connector-ops" -version = "0.3.3" +version = "0.3.4" description = "Packaged maintained by the connector operations team to perform CI for connectors" optional = false python-versions = "^3.10" @@ -470,10 +461,10 @@ GitPython = "^3.1.29" google-cloud-storage = "^2.8.0" pandas = "^2.0.3" pydantic = "^1.9" -pydash = "^7.0.4" +pydash = "^6.0.2" PyGithub = "^1.58.0" PyYAML = "^6.0" -requests = "^2.28.2" +requests = "^2.31" rich = "^13.0.0" simpleeval = "^0.9.13" @@ -483,63 +474,63 @@ url = "../connector_ops" [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -550,42 +541,57 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "3.4.8" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "dagger-io" @@ -628,13 +634,13 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "docker" -version = "6.1.3" +version = "7.0.0" description = "A Python library for the Docker Engine API." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, - {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, + {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, + {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, ] [package.dependencies] @@ -642,10 +648,10 @@ packaging = ">=14.0" pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} requests = ">=2.26.0" urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" [package.extras] ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "dpath" @@ -717,35 +723,36 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.41" +version = "3.1.42" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, + {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, + {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] [[package]] name = "google-api-core" -version = "2.16.2" +version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.16.2.tar.gz", hash = "sha256:032d37b45d1d6bdaf68fb11ff621e2593263a239fa9246e2e94325f9c47876d2"}, - {file = "google_api_core-2.16.2-py3-none-any.whl", hash = "sha256:449ca0e3f14c179b4165b664256066c7861610f70b6ffe54bb01a04e9b466929"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -756,13 +763,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.27.0" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, - {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -797,18 +804,18 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.16.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, + {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" @@ -917,13 +924,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -984,13 +991,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -1001,17 +1008,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -1051,13 +1058,13 @@ files = [ [[package]] name = "inquirer" -version = "3.2.3" +version = "3.2.4" description = "Collection of common interactive command line user interfaces, based on Inquirer.js" optional = false python-versions = ">=3.8.1" files = [ - {file = "inquirer-3.2.3-py3-none-any.whl", hash = "sha256:68fa2cfaa652212f035f73794aa1db2e6c0a9c8cef81ab6825b45120fa8ea345"}, - {file = "inquirer-3.2.3.tar.gz", hash = "sha256:0cba57d901b206dd597d8809b58c378c47fbc804a1fc9b33e2780ca2f9b43ac7"}, + {file = "inquirer-3.2.4-py3-none-any.whl", hash = "sha256:273a4e4a4345ac1afdb17408d40fc8dccf3485db68203357919468561035a763"}, + {file = "inquirer-3.2.4.tar.gz", hash = "sha256:33b09efc1b742b9d687b540296a8b6a3f773399673321fcc2ab0eb4c109bf9b5"}, ] [package.dependencies] @@ -1326,38 +1333,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -1429,51 +1436,51 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -1501,6 +1508,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -1577,60 +1585,66 @@ tomli = ">=1.2.2" poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] -name = "protobuf" -version = "4.25.2" -description = "" +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, ] +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "protobuf" +version = "4.25.3" +description = "" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -1697,53 +1711,50 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pydash" -version = "7.0.7" +version = "6.0.2" description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pydash-7.0.7-py3-none-any.whl", hash = "sha256:c3c5b54eec0a562e0080d6f82a14ad4d5090229847b7e554235b5c1558c745e1"}, - {file = "pydash-7.0.7.tar.gz", hash = "sha256:cc935d5ac72dd41fb4515bdf982e7c864c8b5eeea16caffbab1936b849aaa49a"}, + {file = "pydash-6.0.2-py3-none-any.whl", hash = "sha256:6d3ce5cbbc8ca3533c12782ac201c2ec756d1e1703ec3efc88f2b95d1ed2bb31"}, + {file = "pydash-6.0.2.tar.gz", hash = "sha256:35caa588e01d293713655e0870544d25128cd414c5e19477a0d63adc2b2ca03e"}, ] -[package.dependencies] -typing-extensions = ">=3.10,<4.6.0 || >4.6.0" - [package.extras] -dev = ["black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] +dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "importlib-metadata (<5)", "invoke", "isort", "pylint", "pytest", "pytest-cov", "sphinx-rtd-theme", "tox", "twine", "wheel"] [[package]] name = "pygit2" -version = "1.14.0" +version = "1.14.1" description = "Python bindings for libgit2." optional = false python-versions = ">=3.9" files = [ - {file = "pygit2-1.14.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ab5a983cb116d617c136cdc23832e16aed17f5fdd3b7bb46d85c0aabde0162ee"}, - {file = "pygit2-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e352b77c2e6f8a1900b406bc10a9471718782775a6029d847c71e5363c3166f9"}, - {file = "pygit2-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12a5f456ab9ac2e7718c95c8ac2bfa1fd23908545deb7cb7693e035c2d0f037a"}, - {file = "pygit2-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bb10402c983d8513c3bceb6a3f6f52ec19c69b0244801cebe95aab6dbf19f679"}, - {file = "pygit2-1.14.0-cp310-cp310-win32.whl", hash = "sha256:0d7526a7ad2bb91b36ba43c87452182052f58cb068311cf8173ed5391ca7788e"}, - {file = "pygit2-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:80d0baca5ab9a06ca6a709716737ed6993e10349db7a98f1f3966278d39098fd"}, - {file = "pygit2-1.14.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86f5295e7996927238dfebdb3c8d81dae83332bc8ced61971806a606261d60ff"}, - {file = "pygit2-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84dd4b36e38c9736736ba57e7257b6efe604932232c98503a64c94283dada7de"}, - {file = "pygit2-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adf7fd8af9bc3b6e11e4920abb0121cdad6f8299ed1d7643e756ab49dbb4e34"}, - {file = "pygit2-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a98c3db4f06bae8266263bdc7b7447801debc30b6223f0826e07709abe9c0929"}, - {file = "pygit2-1.14.0-cp311-cp311-win32.whl", hash = "sha256:4c74aba5b40d6dac2f04bf4f3ca529304bdbf77888de0e87c706d243c9fa0693"}, - {file = "pygit2-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:613bc82b0a17ccd5334b8f5d3b963698b45e228910bcea27fa52f84c60f50b1a"}, - {file = "pygit2-1.14.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0384fb21af58149d59dc37f73f9daea7e6cfec2de7d067be40cc08049b4a62b"}, - {file = "pygit2-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb53c367f66cdd8d41552ed2a01a15a0499d8373dcca37360f3abfb7bf947f71"}, - {file = "pygit2-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:807cf57e02947ad448ae91226d193ebe0999540a56f5a95183a502e28c50b7ff"}, - {file = "pygit2-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a83fe40e2cdac3abf926b633e07be434ddae353085720c1a6e3afb2a4b72f9c1"}, - {file = "pygit2-1.14.0-cp312-cp312-win32.whl", hash = "sha256:ffe8b5b7fb482c3f8625384eb60e83390e1c2c1b74e66aff2f812e74c9754c5d"}, - {file = "pygit2-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:47d8223440096e59bd6367c341692cd0191e98601665dd4986ba2e00bc5ef769"}, - {file = "pygit2-1.14.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed9e67e58f11f285e2fa2077c6f45852763826f8b8a2a777937f1fd2313eed5d"}, - {file = "pygit2-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec66cb115afd5552d50ba96a29e60da4556cd060396a1b38e97aefc047bd124"}, - {file = "pygit2-1.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ea6fd663ebe59e6e872a25a0f1af2d83c7d75147461a352a22bca4df70c8d0"}, - {file = "pygit2-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65cc2e696f5d6add54d34dbf7336a420f7b1df31c525e3ed5c8a123f4f1d67de"}, - {file = "pygit2-1.14.0-cp39-cp39-win32.whl", hash = "sha256:34a05d47b05e1fe2cc44164d778035253868b179819b300a4d1c6cb75ff48847"}, - {file = "pygit2-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:0f101c08fe2f81cc05a44f5c95ea5396310df3240e24d9f5dc2cf1871a794fcb"}, - {file = "pygit2-1.14.0.tar.gz", hash = "sha256:f529ed9660edbf9b625ccae7e51098ef73662e61496609009772d4627a826aa8"}, + {file = "pygit2-1.14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:404d3d9bac22ff022157de3fbfd8997c108d86814ba88cbc8709c1c2daef833a"}, + {file = "pygit2-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:141a1b37fc431d98b3de2f4651eab8b1b1b038cd50de42bfd1c8de057ec2284e"}, + {file = "pygit2-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35152b96a31ab705cdd63aef08fb199d6c1e87fc6fd45b1945f8cd040a43b7b"}, + {file = "pygit2-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ea505739af41496b1d36c99bc15e2bd5631880059514458977c8931e27063a8d"}, + {file = "pygit2-1.14.1-cp310-cp310-win32.whl", hash = "sha256:793f49ce66640d41d977e1337ddb5dec9b3b4ff818040d78d3ded052e1ea52e6"}, + {file = "pygit2-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:46ae2149851d5da2934e27c9ac45c375d04af1e549f8c4cbb4e9e4de5f43dc42"}, + {file = "pygit2-1.14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5a87744e6c36f03fe488b975c73d3eaef22eadce433152516a2b8dbc4015233"}, + {file = "pygit2-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fff3d1aaf1d7372757888c4620347d6ad8b1b3a637b30a3abd156da7cf9476b"}, + {file = "pygit2-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3326a5ce891ef26429ae6d4290acb80ea0064947b4184a4c4940b4bd6ab4a3"}, + {file = "pygit2-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:15db91695259f672f8be3080eb943889f7c8bdc5fbd8b89555e0c53ba2481f15"}, + {file = "pygit2-1.14.1-cp311-cp311-win32.whl", hash = "sha256:a03de11ba5205628996d867280e5181605009c966c801dbb94781bed55b740d7"}, + {file = "pygit2-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d96e46b94dc706e6316e6cc293c0a0692e5b0811a6f8f2738728a4a68d7a827"}, + {file = "pygit2-1.14.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8589c8c0005b5ba373b3b101f903d4451338f3dfc09f8a38c76da6584fef84d0"}, + {file = "pygit2-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4f371c4b7ee86c0a751209fac7c941d1f6a3aca6af89ac09481469dbe0ea1cc"}, + {file = "pygit2-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2378f9a70cea27809a2c78b823e22659691a91db9d81b1f3a58d537067815ac"}, + {file = "pygit2-1.14.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:acb849cea89438192e78eea91a27fb9c54c7286a82aac65a3f746ea8c498fedb"}, + {file = "pygit2-1.14.1-cp312-cp312-win32.whl", hash = "sha256:11058be23a5d6c1308303fd450d690eada117c564154634d81676e66530056be"}, + {file = "pygit2-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:67b6e5911101dc5ecb679bf241c0b9ee2099f4d76aa0ad66b326400cb4590afa"}, + {file = "pygit2-1.14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c22027f748d125698964ed696406075dac85f114e01d50547e67053c1bb03308"}, + {file = "pygit2-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b6d1202d6a0c21281d2697321292aff9e2e2e195d6ce553efcdf86c2de2af1a"}, + {file = "pygit2-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:230493d43945e10365070d349da206d39cc885ae8c52fdeca93942f36661dd93"}, + {file = "pygit2-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:760614370fcce4e9606ff675d6fc11165badb59aaedc2ea6cb2e7ec1855616c2"}, + {file = "pygit2-1.14.1-cp39-cp39-win32.whl", hash = "sha256:acc7be8a439274fc6227e33b63b9ec83cd51fa210ab898eaadffb7bf930c0087"}, + {file = "pygit2-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed16f2bc8ca9c42af8adb967c73227b1de973e9c4d717bd738fb2f177890ca2c"}, + {file = "pygit2-1.14.1.tar.gz", hash = "sha256:ec5958571b82a6351785ca645e5394c31ae45eec5384b2fa9c4e05dde3597ad6"}, ] [package.dependencies] @@ -1783,23 +1794,23 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyinstaller" -version = "6.3.0" +version = "6.5.0" description = "PyInstaller bundles a Python application and all its dependencies into a single package." optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "pyinstaller-6.3.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:75a6f2a6f835a2e6e0899d10e60c10caf5defd25aced38b1dd48fbbabc89de07"}, - {file = "pyinstaller-6.3.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:de25beb176f73a944758553caacec46cc665bf3910ad8a174706d79cf6e95340"}, - {file = "pyinstaller-6.3.0-py3-none-manylinux2014_i686.whl", hash = "sha256:e436fcc0ea87c3f132baac916d508c24c84a8f6d8a06c3154fbc753f169b76c7"}, - {file = "pyinstaller-6.3.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:b721d793a33b6d9946c7dd95d3ea7589c0424b51cf1b9fe580f03c544f1336b2"}, - {file = "pyinstaller-6.3.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:96c37a1ee5b2fd5bb25c098ef510661d6d17b6515d0b86d8fc93727dd2475ba3"}, - {file = "pyinstaller-6.3.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:abe91106a3bbccc3f3a27af4325676ecdb6f46cb842ac663625002a870fc503b"}, - {file = "pyinstaller-6.3.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:41c937fe8f07ae02009b3b5a96ac3eb0800a4f8a97af142d4100060fe2135bb9"}, - {file = "pyinstaller-6.3.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:886b3b995b674905a20ad5b720b47cc395897d7b391117831027a4c8c5d67a58"}, - {file = "pyinstaller-6.3.0-py3-none-win32.whl", hash = "sha256:0597fb04337695e5cc5250253e0655530bf14f264b7a5b7d219cc65f6889c4bd"}, - {file = "pyinstaller-6.3.0-py3-none-win_amd64.whl", hash = "sha256:156b32ba943e0090bcc68e40ae1cb68fd92b7f1ab6fe0bdf8faf3d3cfc4e12dd"}, - {file = "pyinstaller-6.3.0-py3-none-win_arm64.whl", hash = "sha256:1eadbd1fae84e2e6c678d8b4ed6a232ec5c8fe3a839aea5a3071c4c0282f98cc"}, - {file = "pyinstaller-6.3.0.tar.gz", hash = "sha256:914d4c96cc99472e37ac552fdd82fbbe09e67bb592d0717fcffaa99ea74273df"}, + {file = "pyinstaller-6.5.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:81ec15c0deb8c7a0f95bea85b49eecc2df1bdeaf5fe487a41d97de6b0ad29dff"}, + {file = "pyinstaller-6.5.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5f432f3fdef053989e0a44134e483131c533dab7637e6afd80c3f7c26e6dbcc9"}, + {file = "pyinstaller-6.5.0-py3-none-manylinux2014_i686.whl", hash = "sha256:6ffd76a0194dac4df5e66dcfccc7b597f3eaa40ef9a3f63548f260aa2c187512"}, + {file = "pyinstaller-6.5.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:a54968df2228f0128607b1dced41bbff94149d459987fb5cd1a41893e9bb85df"}, + {file = "pyinstaller-6.5.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:0dae0edbe6d667b6b0ccd8c97a148f86474a82da7ce582296f9025f4c7242ec6"}, + {file = "pyinstaller-6.5.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:7c76bfcb624803c311fa8fb137e4780d0ec86d11b7d90a8f43f185e2554afdcc"}, + {file = "pyinstaller-6.5.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:6cfee8a74ea2d3a1dc8e99e732a87b314739dc14363778143caac31f8aee9039"}, + {file = "pyinstaller-6.5.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9d828213aea5401bb33a36ca396f8dc76a59a25bce1d76a13c9ad94ba29fbe42"}, + {file = "pyinstaller-6.5.0-py3-none-win32.whl", hash = "sha256:61865eee5e0d8f8252722f6d001baec497b7cee79ebe62c33a6ba86ba0c7010d"}, + {file = "pyinstaller-6.5.0-py3-none-win_amd64.whl", hash = "sha256:e1266498893ce1d6cc7337e8d2acbf7905a10ed2b7c8377270117d6b7b922fc4"}, + {file = "pyinstaller-6.5.0-py3-none-win_arm64.whl", hash = "sha256:1b3b7d6d3b18d76a833fd5a4d7f4544c5e2c2a4db4a728ea191e62f69d5cc33c"}, + {file = "pyinstaller-6.5.0.tar.gz", hash = "sha256:b1e55113c5a40cb7041c908a57f212f3ebd3e444dbb245ca2f91d86a76dabec5"}, ] [package.dependencies] @@ -1807,7 +1818,7 @@ altgraph = "*" macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} packaging = ">=22.0" pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} -pyinstaller-hooks-contrib = ">=2021.4" +pyinstaller-hooks-contrib = ">=2024.3" pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} setuptools = ">=42.0.0" @@ -1817,13 +1828,13 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] [[package]] name = "pyinstaller-hooks-contrib" -version = "2024.0" +version = "2024.3" description = "Community maintained hooks for PyInstaller" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstaller-hooks-contrib-2024.0.tar.gz", hash = "sha256:a7118c1a5c9788595e5c43ad058a7a5b7b6d59e1eceb42362f6ec1f0b61986b0"}, - {file = "pyinstaller_hooks_contrib-2024.0-py2.py3-none-any.whl", hash = "sha256:469b5690df53223e2e8abffb2e44d6ee596e7d79d4b1eed9465123b67439875a"}, + {file = "pyinstaller-hooks-contrib-2024.3.tar.gz", hash = "sha256:d18657c29267c63563a96b8fc78db6ba9ae40af6702acb2f8c871df12c75b60b"}, + {file = "pyinstaller_hooks_contrib-2024.3-py2.py3-none-any.whl", hash = "sha256:6701752d525e1f4eda1eaec2c2affc206171e15c7a4e188a152fcf3ed3308024"}, ] [package.dependencies] @@ -1878,27 +1889,25 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "6.2.5" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1920,30 +1929,30 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2006,7 +2015,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2014,15 +2022,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2039,7 +2040,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2047,7 +2047,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2055,13 +2054,13 @@ files = [ [[package]] name = "readchar" -version = "4.0.5" +version = "4.0.6" description = "Library to easily read single chars and key strokes" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "readchar-4.0.5-py3-none-any.whl", hash = "sha256:76ec784a5dd2afac3b7da8003329834cdd9824294c260027f8c8d2e4d0a78f43"}, - {file = "readchar-4.0.5.tar.gz", hash = "sha256:08a456c2d7c1888cde3f4688b542621b676eb38cd6cfed7eb6cb2e2905ddc826"}, + {file = "readchar-4.0.6-py3-none-any.whl", hash = "sha256:b4b31dd35de4897be738f27e8f9f62426b5fedb54b648364987e30ae534b71bc"}, + {file = "readchar-4.0.6.tar.gz", hash = "sha256:e0dae942d3a746f8d5423f83dbad67efe704004baafe31b626477929faaee472"}, ] [package.dependencies] @@ -2069,20 +2068,20 @@ setuptools = ">=41.0" [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -2090,13 +2089,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -2162,12 +2161,13 @@ xmod = "*" [[package]] name = "segment-analytics-python" -version = "2.3.1" +version = "2.3.2" description = "The hassle-free way to integrate analytics into any python application." optional = false python-versions = ">=3.6.0" files = [ - {file = "segment_analytics_python-2.3.1-py2.py3-none-any.whl", hash = "sha256:b5d415247f983e8698de7e094f141cf48f9098b49cc95e108c5bf1e08127d636"}, + {file = "segment-analytics-python-2.3.2.tar.gz", hash = "sha256:9321b1e03b0129fa69edba0b38c63c2de229db91abe7f849e3df015b8fbc1c36"}, + {file = "segment_analytics_python-2.3.2-py2.py3-none-any.whl", hash = "sha256:0ba881e019c396f17b4e0a66117691a189a555bc13da47de69cb8db8e3adecad"}, ] [package.dependencies] @@ -2192,13 +2192,13 @@ files = [ [[package]] name = "sentry-sdk" -version = "1.40.2" +version = "1.44.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.40.2.tar.gz", hash = "sha256:c98c8e9bb4dc8ff1e67473caf6467acfccf915dadcc26d0efb0d6791a8652610"}, - {file = "sentry_sdk-1.40.2-py2.py3-none-any.whl", hash = "sha256:696ef61a323a207e6a20b018ddc6591adb81c671434c88d1a4f2e95ffa75556c"}, + {file = "sentry-sdk-1.44.0.tar.gz", hash = "sha256:f7125a9235795811962d52ff796dc032cd1d0dd98b59beaced8380371cd9c13c"}, + {file = "sentry_sdk-1.44.0-py2.py3-none-any.whl", hash = "sha256:eb65289da013ca92fad2694851ad2f086aa3825e808dc285bd7dcaf63602bb18"}, ] [package.dependencies] @@ -2212,6 +2212,7 @@ asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] chalice = ["chalice (>=1.16.0)"] clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] @@ -2222,9 +2223,10 @@ grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure_eval"] +pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] @@ -2237,19 +2239,19 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simpleeval" @@ -2286,13 +2288,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -2330,38 +2332,27 @@ files = [ [[package]] name = "types-requests" -version = "2.28.2" +version = "2.31.0.20240311" description = "Typing stubs for requests" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-requests-2.28.2.tar.gz", hash = "sha256:398f88cd9302c796cb63d1021af2a1fb7ae507741a3d508edf8e0746d8c16a04"}, - {file = "types_requests-2.28.2-py3-none-any.whl", hash = "sha256:c164696bfdce0123901165c5f097a6cc4f6326268c65815d4b6a57eacfec5e81"}, + {file = "types-requests-2.31.0.20240311.tar.gz", hash = "sha256:b1c1b66abfb7fa79aae09097a811c4aa97130eb8831c60e47aee4ca344731ca5"}, + {file = "types_requests-2.31.0.20240311-py3-none-any.whl", hash = "sha256:47872893d65a38e282ee9f277a4ee50d1b28bd592040df7d1fdaffdf3779937d"}, ] [package.dependencies] -types-urllib3 = "<1.27" +urllib3 = ">=2" [[package]] name = "types-toml" -version = "0.10.8.7" +version = "0.10.8.20240310" description = "Typing stubs for toml" optional = false -python-versions = "*" -files = [ - {file = "types-toml-0.10.8.7.tar.gz", hash = "sha256:58b0781c681e671ff0b5c0319309910689f4ab40e8a2431e205d70c94bb6efb1"}, - {file = "types_toml-0.10.8.7-py3-none-any.whl", hash = "sha256:61951da6ad410794c97bec035d59376ce1cbf4453dc9b6f90477e81e4442d631"}, -] - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, + {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, + {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, ] [[package]] @@ -2377,41 +2368,42 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" -version = "1.26.18" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcwidth" @@ -2424,22 +2416,6 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] -[[package]] -name = "websocket-client" -version = "1.7.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - [[package]] name = "wrapt" version = "1.16.0" @@ -2647,4 +2623,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "~3.10" -content-hash = "f1c22f429416235274b390c130d1c02b046ee94bcbeb206b2322009cf240fdcf" +content-hash = "c7213e905b1cc43466c064e51ee52701c9fbaa9dbce07483cb1ad53185f38330" diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index d79ebc3ad1807..e1b420315bcd7 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,32 +4,33 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.5.2" +version = "4.7.3" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] [tool.poetry.dependencies] python = "~3.10" dagger-io = "==0.9.6" +beartype = "<0.17.2" # dagger 0.9.6 doesn't pin this but doesn't play well with it. We should probably upgrade dagger asyncer = "^0.0.2" anyio = "^3.4.1" more-itertools = "^8.11.0" -docker = "^6.0.0" -semver = "^3.0.1" +docker = "^7" +semver = "^3" airbyte-protocol-models = "*" jinja2 = "^3.0.2" -requests = "2.28.2" # Pinned as the requests 2.29.0 version is not compatible with the docker package +requests = "^2.31" airbyte-connectors-base-images = {path = "../base_images", develop = true} connector-ops = {path = "../connector_ops", develop = true} toml = "^0.10.2" +types-requests = "^2.31" sentry-sdk = "^1.28.1" segment-analytics-python = "^2.2.3" pygit2 = "^1.13.1" asyncclick = "^8.1.3.4" -certifi = "^2023.11.17" +certifi = ">=2024" tomli = "^2.0.1" tomli-w = "^1.0.0" -types-requests = "2.28.2" dpath = "^2.1.6" xmltodict = "^0.13.0" @@ -38,12 +39,12 @@ freezegun = "^1.2.2" pytest-cov = "^4.1.0" pyinstaller = "^6.1.0" poethepoet = "^0.24.2" -pytest = "^6.2.5" +pytest = "^8" pytest-mock = "^3.10.0" -mypy = "^1.7.1" +mypy = "^1.9" ruff = "^0.1.9" types-toml = "^0.10.8" -types-requests = "2.28.2" +types-requests = "^2.31" types-xmltodict = "^0.13.0" [tool.poetry.scripts] @@ -62,6 +63,6 @@ type_check = "mypy pipelines --disallow-untyped-defs" lint = "ruff check pipelines" [tool.airbyte_ci] -extra_poetry_groups = ["dev"] +optional_poetry_groups = ["dev"] poe_tasks = ["type_check", "lint", "test"] mount_docker_socket = true diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog.py b/airbyte-ci/connectors/pipelines/tests/test_changelog.py new file mode 100644 index 0000000000000..dcb54b47eedfd --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import difflib +from pathlib import Path + +import pytest +import semver +from pipelines.helpers.changelog import Changelog, ChangelogParsingException + +pytestmark = [ + pytest.mark.anyio, +] + +PATH_TO_INITIAL_FILES = Path("airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files") +PATH_TO_RESULT_FILES = Path("airbyte-ci/connectors/pipelines/tests/test_changelog/result_files") +WRITE_TO_RESULT_FILE = False + + +def check_result(changelog: Changelog, result_filename: str): + markdown = changelog.to_markdown() + result_filepath = PATH_TO_RESULT_FILES / result_filename + if not result_filepath.exists(): + expected_text = "" + else: + expected_text = result_filepath.read_text() + diff = "".join(difflib.unified_diff(expected_text.splitlines(1), markdown.splitlines(1))) + if WRITE_TO_RESULT_FILE: + result_file = open(result_filepath, "w") + result_file.write(markdown) + result_file.close() + assert diff == "" + + +def get_changelog(filename: str) -> Changelog: + filepath = PATH_TO_INITIAL_FILES / filename + return Changelog(open(filepath).read()) + + +@pytest.mark.parametrize("filename", ["valid_changelog_at_end.md", "valid_changelog_in_middle.md"]) +def test_single_insert(dagger_client, filename): + changelog = get_changelog(filename) + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123456, "test") + check_result(changelog, "single_insert_" + filename) + + +@pytest.mark.parametrize("filename", ["valid_changelog_at_end.md", "valid_changelog_in_middle.md"]) +def test_insert_duplicate_versions(dagger_client, filename): + changelog = get_changelog(filename) + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123456, "test1") + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-02"), 123457, "test2") + check_result(changelog, "dupicate_versions_" + filename) + + +@pytest.mark.parametrize("filename", ["valid_changelog_at_end.md", "valid_changelog_in_middle.md"]) +def test_insert_duplicate_version_date(dagger_client, filename): + changelog = get_changelog(filename) + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123456, "test1") + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123457, "test2") + check_result(changelog, "dupicate_version_date_" + filename) + + +@pytest.mark.parametrize("filename", ["valid_changelog_at_end.md", "valid_changelog_in_middle.md"]) +def test_insert_duplicate_entries(dagger_client, filename): + changelog = get_changelog(filename) + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123456, "test") + changelog.add_entry(semver.VersionInfo.parse("3.4.0"), datetime.date.fromisoformat("2024-03-01"), 123456, "test") + check_result(changelog, "duplicate_entry_" + filename) + + +@pytest.mark.parametrize("filename", ["valid_changelog_at_end.md", "valid_changelog_in_middle.md"]) +def test_insert_existing_entries(dagger_client, filename): + changelog = get_changelog(filename) + changelog.add_entry(semver.VersionInfo.parse("3.3.3"), datetime.date.fromisoformat("2024-01-26"), 34573, "Adopt CDK v0.16.0") + changelog.add_entry( + semver.VersionInfo.parse("3.3.2"), + datetime.date.fromisoformat("2024-01-24"), + 34465, + "Check xmin only if user selects xmin sync mode.", + ) + check_result(changelog, "existing_entries_" + filename) + + +@pytest.mark.parametrize("filename", ["no_changelog_header.md", "changelog_header_no_separator.md", "changelog_header_no_newline.md"]) +def test_failure(dagger_client, filename): + try: + get_changelog(filename) + assert False + except ChangelogParsingException as e: + result_filepath = PATH_TO_RESULT_FILES / filename + if not result_filepath.exists(): + expected_text = "" + else: + expected_text = result_filepath.read_text() + diff = "\n".join(difflib.unified_diff(expected_text.splitlines(), str(e).splitlines())) + if WRITE_TO_RESULT_FILE: + result_file = open(result_filepath, "w") + result_file.write(str(e)) + result_file.close() + assert diff == "" diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_newline.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_newline.md new file mode 100644 index 0000000000000..91dbd6fe3bc3e --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_newline.md @@ -0,0 +1,10 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_separator.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_separator.md new file mode 100644 index 0000000000000..0bb5c1c587453 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/changelog_header_no_separator.md @@ -0,0 +1,10 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/no_changelog_header.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/no_changelog_header.md new file mode 100644 index 0000000000000..e8dc6156152ad --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/no_changelog_header.md @@ -0,0 +1,10 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_at_end.md new file mode 100644 index 0000000000000..954709e5679f2 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_at_end.md @@ -0,0 +1,11 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_in_middle.md new file mode 100644 index 0000000000000..91d499c5180ab --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/initial_files/valid_changelog_in_middle.md @@ -0,0 +1,12 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_newline.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_newline.md new file mode 100644 index 0000000000000..922753a1706d3 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_newline.md @@ -0,0 +1 @@ +Found changelog section table in the documentation file at line but there is not blank line before it. \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_separator.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_separator.md new file mode 100644 index 0000000000000..06e63e850542c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/changelog_header_no_separator.md @@ -0,0 +1 @@ +The changelog table in the documentation file is missing the header delimiter. \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_at_end.md new file mode 100644 index 0000000000000..ec82a0c5ea46d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_at_end.md @@ -0,0 +1,13 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123457](https://github.com/airbytehq/airbyte/pull/123457) | test2 | +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test1 | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_in_middle.md new file mode 100644 index 0000000000000..a2d9a31677826 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_version_date_valid_changelog_in_middle.md @@ -0,0 +1,14 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123457](https://github.com/airbytehq/airbyte/pull/123457) | test2 | +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test1 | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_at_end.md new file mode 100644 index 0000000000000..843738afdc36a --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_at_end.md @@ -0,0 +1,13 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-02 | [123457](https://github.com/airbytehq/airbyte/pull/123457) | test2 | +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test1 | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_in_middle.md new file mode 100644 index 0000000000000..2e22f1999945d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/dupicate_versions_valid_changelog_in_middle.md @@ -0,0 +1,14 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-02 | [123457](https://github.com/airbytehq/airbyte/pull/123457) | test2 | +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test1 | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_at_end.md new file mode 100644 index 0000000000000..47ffbeac1a75c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_at_end.md @@ -0,0 +1,12 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_in_middle.md new file mode 100644 index 0000000000000..fe7ff8cce836a --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/duplicate_entry_valid_changelog_in_middle.md @@ -0,0 +1,13 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_at_end.md new file mode 100644 index 0000000000000..be064c1fb03b2 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_at_end.md @@ -0,0 +1,11 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_in_middle.md new file mode 100644 index 0000000000000..2873736244b0f --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/existing_entries_valid_changelog_in_middle.md @@ -0,0 +1,12 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/no_changelog_header.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/no_changelog_header.md new file mode 100644 index 0000000000000..b5212669f2b01 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/no_changelog_header.md @@ -0,0 +1 @@ +Could not find the changelog section table in the documentation file. \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_at_end.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_at_end.md new file mode 100644 index 0000000000000..47ffbeac1a75c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_at_end.md @@ -0,0 +1,12 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | diff --git a/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_in_middle.md b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_in_middle.md new file mode 100644 index 0000000000000..fe7ff8cce836a --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_changelog/result_files/single_insert_valid_changelog_in_middle.md @@ -0,0 +1,13 @@ +# Postgres + +Airbyte's certified Postgres connector offers the following features: +* Replicate data from tables, views and materilized views. Other data objects won't be replicated to the destination like indexes, permissions. + +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.4.0 | 2024-03-01 | [123456](https://github.com/airbytehq/airbyte/pull/123456) | test | +| 3.3.3 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0 | +| 3.3.2 | 2024-01-24 | [34465](https://github.com/airbytehq/airbyte/pull/34465) | Check xmin only if user selects xmin sync mode. | +| 3.3.1 | 2024-01-10 | [34119](https://github.com/airbytehq/airbyte/pull/34119) | Adopt java CDK version 0.11.5. | +| 3.3.0 | 2023-12-19 | [33437](https://github.com/airbytehq/airbyte/pull/33437) | Remove LEGACY state flag | | +Laurem Ipsum blah blah diff --git a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py index aa7e305d809ae..2b9f19a83ed2b 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py @@ -183,8 +183,8 @@ async def test_check_path_in_workdir(dagger_client): .with_workdir(str(connector.code_directory)) ) assert await utils.check_path_in_workdir(container, "metadata.yaml") - assert await utils.check_path_in_workdir(container, "setup.py") - assert await utils.check_path_in_workdir(container, "requirements.txt") + assert await utils.check_path_in_workdir(container, "pyproject.toml") + assert await utils.check_path_in_workdir(container, "poetry.lock") assert await utils.check_path_in_workdir(container, "not_existing_file") is False diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index f53b43ebe57a2..63dfd66c41695 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -8,7 +8,7 @@ from connector_ops.utils import Connector, ConnectorLanguage from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext -from pipelines.airbyte_ci.connectors.test.steps.python_connectors import AirbyteLibValidation, UnitTests +from pipelines.airbyte_ci.connectors.test.steps.python_connectors import PyAirbyteValidation, UnitTests from pipelines.models.steps import StepResult, StepStatus pytestmark = [ @@ -109,7 +109,7 @@ def test_params(self, context_for_certified_connector_with_setup): ] -class TestAirbyteLibValidationTests: +class TestPyAirbyteValidationTests: @pytest.fixture def compatible_connector(self): return Connector("source-faker") @@ -121,7 +121,7 @@ def incompatible_connector(self): @pytest.fixture def context_for_valid_connector(self, compatible_connector, dagger_client, current_platform): context = ConnectorContext( - pipeline_name="test airbyte-lib validation", + pipeline_name="test pyairbyte validation", connector=compatible_connector, git_branch="test", git_revision="test", @@ -136,7 +136,7 @@ def context_for_valid_connector(self, compatible_connector, dagger_client, curre @pytest.fixture def context_for_invalid_connector(self, incompatible_connector, dagger_client, current_platform): context = ConnectorContext( - pipeline_name="test airbyte-lib validation", + pipeline_name="test pyairbyte validation", connector=incompatible_connector, git_branch="test", git_revision="test", @@ -149,7 +149,7 @@ def context_for_invalid_connector(self, incompatible_connector, dagger_client, c return context async def test__run_validation_success(self, mocker, context_for_valid_connector: ConnectorContext): - result = await AirbyteLibValidation(context_for_valid_connector)._run(mocker.MagicMock()) + result = await PyAirbyteValidation(context_for_valid_connector)._run(mocker.MagicMock()) assert isinstance(result, StepResult) assert result.status == StepStatus.SUCCESS assert "Creating source and validating spec is returned successfully..." in result.stdout @@ -159,7 +159,7 @@ async def test__run_validation_skip_unpublished_connector( mocker, context_for_invalid_connector: ConnectorContext, ): - result = await AirbyteLibValidation(context_for_invalid_connector)._run(mocker.MagicMock()) + result = await PyAirbyteValidation(context_for_invalid_connector)._run(mocker.MagicMock()) assert isinstance(result, StepResult) assert result.status == StepStatus.SKIPPED @@ -172,7 +172,7 @@ async def test__run_validation_fail( metadata["remoteRegistries"] = {"pypi": {"enabled": True, "packageName": "airbyte-source-postgres"}} metadata_mock = mocker.PropertyMock(return_value=metadata) with patch.object(Connector, "metadata", metadata_mock): - result = await AirbyteLibValidation(context_for_invalid_connector)._run(mocker.MagicMock()) + result = await PyAirbyteValidation(context_for_invalid_connector)._run(mocker.MagicMock()) assert isinstance(result, StepResult) assert result.status == StepStatus.FAILURE assert "is not installable" in result.stderr diff --git a/airbyte-ci/connectors/pipelines/tests/test_upgrade_java_cdk.py b/airbyte-ci/connectors/pipelines/tests/test_upgrade_java_cdk.py index cbe91b4df3c1e..72a45978769f4 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_upgrade_java_cdk.py +++ b/airbyte-ci/connectors/pipelines/tests/test_upgrade_java_cdk.py @@ -105,7 +105,7 @@ async def test_run_connector_cdk_upgrade_pipeline( assert files == ["build.gradle"] build_gradle = resulting_directory.file("build.gradle") actual_build_gradle_content = await build_gradle.contents() - assert expected_build_gradle_content == actual_build_gradle_content + assert actual_build_gradle_content == expected_build_gradle_content # Assert that the diff was exported to the repo assert updated_connector_dir.diff.return_value.export.call_count == 1 diff --git a/airbyte-ci/connectors/pipelines/tests/test_upgrade_python_cdk.py b/airbyte-ci/connectors/pipelines/tests/test_upgrade_python_cdk.py index 67d855a1d91e7..70c253241ac21 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_upgrade_python_cdk.py +++ b/airbyte-ci/connectors/pipelines/tests/test_upgrade_python_cdk.py @@ -96,7 +96,7 @@ async def test_run_connector_cdk_upgrade_pipeline( assert files == ["setup.py"] setup_py = resulting_directory.file("setup.py") actual_setup_py_content = await setup_py.contents() - assert expected_setup_py_content == actual_setup_py_content + assert actual_setup_py_content == expected_setup_py_content # Assert that the diff was exported to the repo assert updated_connector_dir.diff.return_value.export.call_count == 1 diff --git a/airbyte-ci/connectors/qa-engine/README.md b/airbyte-ci/connectors/qa-engine/README.md deleted file mode 100644 index 661dfc87c33a9..0000000000000 --- a/airbyte-ci/connectors/qa-engine/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Connector QA Engine -This system is responsible for -1. Ensuring Connectors adhere to a set of validation rules -2. Reporting on the results of the validation -3. Providing a mechanism for Connectors to be automatically promoted to the next stage of the release process - - -## Running the QA Engine Locally -```bash -poetry install -poetry run run-qa-engine -``` \ No newline at end of file diff --git a/airbyte-ci/connectors/qa-engine/poetry.lock b/airbyte-ci/connectors/qa-engine/poetry.lock deleted file mode 100644 index 9feaa409fd5fc..0000000000000 --- a/airbyte-ci/connectors/qa-engine/poetry.lock +++ /dev/null @@ -1,2311 +0,0 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.8.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - -[[package]] -name = "cachetools" -version = "5.3.1" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, -] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = "*" -files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.2.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "ci-credentials" -version = "1.1.0" -description = "CLI tooling to read and manage GSM secrets" -optional = false -python-versions = "^3.10" -files = [] -develop = false - -[package.dependencies] -click = "^8.1.3" -common_utils = {path = "../common_utils", develop = true} -pyyaml = "^6.0" -requests = "^2.28.2" - -[package.source] -type = "directory" -url = "../ci_credentials" - -[[package]] -name = "click" -version = "8.1.6" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "common-utils" -version = "0.0.0" -description = "Suite of all often used classes and common functions" -optional = false -python-versions = "^3.10" -files = [] -develop = true - -[package.dependencies] -cryptography = "^3.4.7" -pyjwt = "^2.1.0" -requests = "^2.28.2" - -[package.source] -type = "directory" -url = "../common_utils" - -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -optional = false -python-versions = "*" -files = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - -[[package]] -name = "connector-ops" -version = "0.2.2" -description = "Packaged maintained by the connector operations team to perform CI for connectors" -optional = false -python-versions = "^3.10" -files = [] -develop = false - -[package.dependencies] -ci-credentials = {path = "../ci_credentials"} -click = "^8.1.3" -GitPython = "^3.1.29" -google-cloud-storage = "^2.8.0" -pydantic = "^1.9" -pydash = "^7.0.4" -PyGithub = "^1.58.0" -PyYAML = "^6.0" -requests = "^2.28.2" -rich = "^11.0.1" - -[package.source] -type = "directory" -url = "../connector_ops" - -[[package]] -name = "cryptography" -version = "3.4.8" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] - -[[package]] -name = "dataproperty" -version = "1.0.1" -description = "Python library for extract property from data." -optional = false -python-versions = ">=3.7" -files = [ - {file = "DataProperty-1.0.1-py3-none-any.whl", hash = "sha256:0b8b07d4fb6453fcf975b53d35dea41f3cfd69c9d79b5010c3cf224ff0407a7a"}, - {file = "DataProperty-1.0.1.tar.gz", hash = "sha256:723e5729fa6e885e127a771a983ee1e0e34bb141aca4ffe1f0bfa7cde34650a4"}, -] - -[package.dependencies] -mbstrdecoder = ">=1.0.0,<2" -typepy = {version = ">=1.2.0,<2", extras = ["datetime"]} - -[package.extras] -logging = ["loguru (>=0.4.1,<1)"] -test = ["pytest (>=6.0.1)", "pytest-md-report (>=0.3)", "tcolorpy (>=0.1.2)"] - -[[package]] -name = "db-dtypes" -version = "1.1.1" -description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "db-dtypes-1.1.1.tar.gz", hash = "sha256:ab485c85fef2454f3182427def0b0a3ab179b2871542787d33ba519d62078883"}, - {file = "db_dtypes-1.1.1-py2.py3-none-any.whl", hash = "sha256:23be34ea2bc91065447ecea4d5f107e46d1de223d152e69fa73673a62d5bd27d"}, -] - -[package.dependencies] -numpy = ">=1.16.6" -packaging = ">=17.0" -pandas = ">=0.24.2" -pyarrow = ">=3.0.0" - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "freezegun" -version = "1.2.2" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.6" -files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "frozenlist" -version = "1.4.0" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, -] - -[[package]] -name = "fsspec" -version = "2023.1.0" -description = "File-system specification" -optional = false -python-versions = ">=3.7" -files = [ - {file = "fsspec-2023.1.0-py3-none-any.whl", hash = "sha256:b833e2e541e9e8cde0ab549414187871243177feb3d344f9d27b25a93f5d8139"}, - {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -entrypoints = ["importlib-metadata"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "gcsfs" -version = "2023.1.0" -description = "Convenient Filesystem interface over GCS" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gcsfs-2023.1.0-py2.py3-none-any.whl", hash = "sha256:62c491b9e2a8e9e58b8a899eec2ce111f827718a65539019ff3cadf447e48f41"}, - {file = "gcsfs-2023.1.0.tar.gz", hash = "sha256:0a7b7ca8c1affa126a14ba35d7b7dff81c49e2aaceedda9732c7f159a4837a26"}, -] - -[package.dependencies] -aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" -decorator = ">4.1.2" -fsspec = "2023.1.0" -google-auth = ">=1.2" -google-auth-oauthlib = "*" -google-cloud-storage = "*" -requests = "*" - -[package.extras] -crc = ["crcmod"] -gcsfuse = ["fusepy"] - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.32" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, - {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[[package]] -name = "google-api-core" -version = "2.11.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-auth" -version = "2.22.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" -six = ">=1.9.0" -urllib3 = "<2.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-auth-oauthlib" -version = "1.0.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "google-auth-oauthlib-1.0.0.tar.gz", hash = "sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5"}, - {file = "google_auth_oauthlib-1.0.0-py2.py3-none-any.whl", hash = "sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb"}, -] - -[package.dependencies] -google-auth = ">=2.15.0" -requests-oauthlib = ">=0.7.0" - -[package.extras] -tool = ["click (>=6.0.0)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.11.4" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.11.4.tar.gz", hash = "sha256:697df117241a2283bcbb93b21e10badc14e51c9a90800d2a7e1a3e1c7d842974"}, - {file = "google_cloud_bigquery-3.11.4-py2.py3-none-any.whl", hash = "sha256:5fa7897743a0ed949ade25a0942fc9e7557d8fce307c6f8a76d1b604cf27f1b1"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -grpcio = [ - {version = ">=1.47.0,<2.0dev", markers = "python_version < \"3.11\""}, - {version = ">=1.49.1,<2.0dev", markers = "python_version >= \"3.11\""}, -] -packaging = ">=20.0.0" -proto-plus = ">=1.15.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<2.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<2.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery-storage" -version = "2.22.0" -description = "Google Cloud Bigquery Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-storage-2.22.0.tar.gz", hash = "sha256:f6d8c7b3ab9b574c66977fcee9d336e334ad1a3843a722be19123640e7808ea3"}, - {file = "google_cloud_bigquery_storage-2.22.0-py2.py3-none-any.whl", hash = "sha256:7f11b2ae590a5b3874fb6ddf705a66a070340db238f971cf7b53349eee9ca317"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[package.extras] -fastavro = ["fastavro (>=0.21.2)"] -pandas = ["pandas (>=0.21.1)"] -pyarrow = ["pyarrow (>=0.15.0)"] - -[[package]] -name = "google-cloud-core" -version = "2.3.3" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)"] - -[[package]] -name = "google-cloud-storage" -version = "2.10.0" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-storage-2.10.0.tar.gz", hash = "sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"}, - {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = "sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"}, -] - -[package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-resumable-media = ">=2.3.2" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<5.0.0dev)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-resumable-media" -version = "2.5.0" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">= 3.7" -files = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.60.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, -] - -[package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "grpcio" -version = "1.57.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6"}, - {file = "grpcio-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019"}, - {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527"}, - {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca"}, - {file = "grpcio-1.57.0-cp310-cp310-win32.whl", hash = "sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb"}, - {file = "grpcio-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56"}, - {file = "grpcio-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652"}, - {file = "grpcio-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e"}, - {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d"}, - {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a"}, - {file = "grpcio-1.57.0-cp311-cp311-win32.whl", hash = "sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe"}, - {file = "grpcio-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0"}, - {file = "grpcio-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf"}, - {file = "grpcio-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0"}, - {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc"}, - {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2"}, - {file = "grpcio-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98"}, - {file = "grpcio-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51"}, - {file = "grpcio-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f"}, - {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06"}, - {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79"}, - {file = "grpcio-1.57.0-cp38-cp38-win32.whl", hash = "sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb"}, - {file = "grpcio-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16"}, - {file = "grpcio-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b"}, - {file = "grpcio-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5"}, - {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9"}, - {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37"}, - {file = "grpcio-1.57.0-cp39-cp39-win32.whl", hash = "sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766"}, - {file = "grpcio-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056"}, - {file = "grpcio-1.57.0.tar.gz", hash = "sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.57.0)"] - -[[package]] -name = "grpcio-status" -version = "1.57.0" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.57.0.tar.gz", hash = "sha256:b098da99df1eebe58337f8f78e50df990273ccacc1226fddeb47c590e3df9e02"}, - {file = "grpcio_status-1.57.0-py3-none-any.whl", hash = "sha256:15d6af055914ebbc4ed17e55ebfb8e6bb17a45a57fea32e6af19978fb7844690"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.57.0" -protobuf = ">=4.21.6" - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "mbstrdecoder" -version = "1.1.3" -description = "mbstrdecoder is a Python library for multi-byte character string decoder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mbstrdecoder-1.1.3-py3-none-any.whl", hash = "sha256:d66c1ed3f2dc4e7c5d87cd44a75be10bc5af4250f95b38bbaedd7851308ce938"}, - {file = "mbstrdecoder-1.1.3.tar.gz", hash = "sha256:dcfd2c759322eb44fe193a9e0b1b86c5b87f3ec5ea8e1bb43b3e9ae423f1e8fe"}, -] - -[package.dependencies] -chardet = ">=3.0.4,<6" - -[package.extras] -test = ["Faker (>=1.0.2)", "pytest (>=6.0.1)", "pytest-md-report (>=0.1)"] - -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - -[[package]] -name = "numpy" -version = "1.25.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "pandas" -version = "1.5.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, -] -python-dateutil = ">=2.8.1" -pytz = ">=2020.1" - -[package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] - -[[package]] -name = "pandas-gbq" -version = "0.19.2" -description = "Google BigQuery connector for pandas" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pandas-gbq-0.19.2.tar.gz", hash = "sha256:b0f7fa84a2be0fe767e33a008ca7e4ad9a9e3ac67255fd0a41fc19b503138447"}, - {file = "pandas_gbq-0.19.2-py2.py3-none-any.whl", hash = "sha256:0ef8da3e4088053a2bea069ed688992a44b52af67dadb97eee494b32a2147563"}, -] - -[package.dependencies] -db-dtypes = ">=1.0.4,<2.0.0" -google-api-core = ">=2.10.2,<3.0.0dev" -google-auth = ">=2.13.0" -google-auth-oauthlib = ">=0.7.0" -google-cloud-bigquery = ">=3.3.5,<4.0.0dev" -google-cloud-bigquery-storage = ">=2.16.2,<3.0.0dev" -numpy = ">=1.16.6" -pandas = ">=1.1.4" -pyarrow = ">=3.0.0" -pydata-google-auth = ">=1.5.0" -setuptools = "*" - -[package.extras] -tqdm = ["tqdm (>=4.23.0)"] - -[[package]] -name = "pathvalidate" -version = "2.5.2" -description = "pathvalidate is a Python library to sanitize/validate a string such as filenames/file-paths/etc." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pathvalidate-2.5.2-py3-none-any.whl", hash = "sha256:e39a4dfacdba70e3a96d3e4c6ff617a39e991cf242e6e1f2017f1f67c3408d33"}, - {file = "pathvalidate-2.5.2.tar.gz", hash = "sha256:5ff57d0fabe5ecb7a4f1e4957bfeb5ad8ab5ab4c0fa71f79c6bbc24bd9b7d14d"}, -] - -[package.extras] -test = ["allpairspy", "click", "faker", "pytest (>=6.0.1)", "pytest-discord (>=0.0.6)", "pytest-md-report (>=0.0.12)"] - -[[package]] -name = "pluggy" -version = "1.2.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "proto-plus" -version = "1.22.3" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "proto-plus-1.22.3.tar.gz", hash = "sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b"}, - {file = "proto_plus-1.22.3-py3-none-any.whl", hash = "sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.24.0" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, - {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, - {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, - {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, - {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, - {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, - {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, - {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, - {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, - {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, - {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, -] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pyarrow" -version = "12.0.1" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, - {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, - {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, - {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, - {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, - {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, - {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, - {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, - {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyasn1" -version = "0.5.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.3.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pydantic" -version = "1.9.2" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, - {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, - {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, - {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, - {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, - {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, - {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, - {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, - {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, - {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, - {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, -] - -[package.dependencies] -typing-extensions = ">=3.7.4.3" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydash" -version = "7.0.6" -description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydash-7.0.6-py3-none-any.whl", hash = "sha256:10e506935953fde4b0d6fe21a88e17783cd1479256ae96f285b5f89063b4efd6"}, - {file = "pydash-7.0.6.tar.gz", hash = "sha256:7d9df7e9f36f2bbb08316b609480e7c6468185473a21bdd8e65dda7915565a26"}, -] - -[package.dependencies] -typing-extensions = ">=3.10,<4.6.0 || >4.6.0" - -[package.extras] -dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "importlib-metadata (<5)", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] - -[[package]] -name = "pydata-google-auth" -version = "1.8.2" -description = "PyData helpers for authenticating to Google APIs" -optional = false -python-versions = "*" -files = [ - {file = "pydata-google-auth-1.8.2.tar.gz", hash = "sha256:547b6c0fbea657dcecd50887c5db8640ebec062a59a2b88e8ff8e53a04818303"}, - {file = "pydata_google_auth-1.8.2-py2.py3-none-any.whl", hash = "sha256:a9dce59af4a170ea60c4b2ebbc83ee1f74d34255a4f97b2469ae9a4a0dc98e99"}, -] - -[package.dependencies] -google-auth = {version = ">=1.25.0,<3.0dev", markers = "python_version >= \"3.6\""} -google-auth-oauthlib = {version = ">=0.4.0", markers = "python_version >= \"3.6\""} -setuptools = "*" - -[[package]] -name = "pygithub" -version = "1.58.2" -description = "Use the full Github API v3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyGithub-1.58.2-py3-none-any.whl", hash = "sha256:f435884af617c6debaa76cbc355372d1027445a56fbc39972a3b9ed4968badc8"}, - {file = "PyGithub-1.58.2.tar.gz", hash = "sha256:1e6b1b7afe31f75151fb81f7ab6b984a7188a852bdb123dbb9ae90023c3ce60f"}, -] - -[package.dependencies] -deprecated = "*" -pyjwt = {version = ">=2.4.0", extras = ["crypto"]} -pynacl = ">=1.4.0" -requests = ">=2.14.0" - -[[package]] -name = "pygments" -version = "2.16.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, -] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyinstrument" -version = "4.5.1" -description = "Call stack profiler for Python. Shows you why your code is slow!" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f334250b158010d1e2c70d9d10b880f848e03a917079b366b1e2d8890348d41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55537cd763aee8bce65a201d5ec1aef74677d9ff3ab3391316604ca68740d92a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d7933bd83e913e21c4031d5c1aeeb2483147e4037363f43475df9ad962c748"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0d8f6b6df7ce338af35b213cd89b685b2a7c15569f482476c4e0942700b3e71"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98101d064b7af008189dd6f0bdd01f9be39bc6a4630505dfb13ff6ef51a0c67c"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46f1607e29f93da16d38be41ad2062a56731ff4efa24e561ac848719e8b8ca41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e287ebc1a8b00d3a767829c03f210df0824ab2e0f6340e8f63bab6fcef1b3546"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d15613b8d5d509c29001f2edfadd73d418c2814262433fd1225c4f7893e4010a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win32.whl", hash = "sha256:04c67f08bac41173bc6b44396c60bf1a1879864d0684a7717b1bb8be27793bd9"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:dc07267447935d28ee914f955613b04d621e5bb44995f793508d6f0eb3ec2818"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8285cfb25b9ee72766bdac8db8c276755115a6e729cda4571005d1ba58c99dda"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b58239f4a0fe64f688260be0e5b4a1d19a23b890b284cf6c1c8bd0ead4616f41"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4039210a80015ae0ad2016a3b3311b068f5b334d5f5ce3c54d473f8624db0d35"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b28a4c5926036155062c83e15ca93437dbe2d41dd5feeac96f72d4d16b3431c"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d2c2a9de60712abd2228033e4ac63cdee86783af5288f2d7f8efc365e33425"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bf0fdb17cb245c53826c77e2b95095a8fb5053e49ae8ef18aecbbd184028f9e7"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:65ac43f8a1b74a331b5a4f60985531654a8d71a7698e6be5ac7e8493e7a37f37"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:61632d287f70d850a517533b9e1bf8da41527ffc4d781d4b65106f64ee33cb98"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win32.whl", hash = "sha256:22ae739152ed2366c654f80aa073579f9d5a93caffa74dcb839a62640ffe429f"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:c72a33168485172a7c2dbd6c4aa3262c8d2a6154bc0792403d8e0689c6ff5304"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8c3dabcb70b705d1342f52f0c3a00647c8a244d1e6ffe46459c05d4533ffabfc"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d469572d48ee0b78d4ff7ed3972ff40abc70c7dab4777897c843cb03a6ab7b"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66416fa4b3413bc60e6b499e60e8d009384c85cd03535f82337dce55801c43f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c888fca16c3ae04a6d7b5a29ee0c12f9fa23792fab695117160c48c3113428f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:861fe8c41ac7e54a57ed6ef63268c2843fbc695012427a3d19b2eb1307d9bc61"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0bf91cd5d6c80ff25fd1a136545a5cf752522190b6e6f3806559c352f18d0e73"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b16afb5e67d4d901ef702160e85e04001183b7cdea7e38c8dfb37e491986ccff"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win32.whl", hash = "sha256:f12312341c505e7441e5503b7c77974cff4156d072f0e7f9f822a6b5fdafbc20"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:06d96b442a1ae7c267aa34450b028d80559c4f968b10e4d3ce631b0a6ccea6ef"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c6234094ff0ea7d51e7d4699f192019359bf12d5bbe9e1c9c5d1983562162d58"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f025522edc35831af34bcdbe300b272b432d2afd9811eb780e326116096cbff5"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0a091c575367af427e80829ec414f69a8398acdd68ddfaeb335598071329b44"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ec169cd288f230cbc6a1773384f20481b0a14d2d7cceecf1fb65e56835eaa9a"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004745e83c79d0db7ea8787aba476f13d8bb6d00d75b00d8dbd933a9c7ee1685"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:54be442df5039bc7c73e3e86de0093ca82f3e446392bebab29e51a1512c796cb"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:35e5be8621b3381cf10b1f16bbae527cb7902e87b64e0c9706bc244f6fee51b1"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50e93fac7e42dba8b3c630ed00808e7664d0d6c6b0c477462e7b061a31be23dc"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win32.whl", hash = "sha256:b0a88bfe24d4efb129ef2ae7e2d50fa29908634e893bf154e29f91655c558692"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:b8a71ef9c2ad81e5f3d5f92e1d21a0c9b5f9992e94d0bfcfa9020ea88df4e69f"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9882827e681466d1aff931479387ed77e29674c179bc10fc67f1fa96f724dd20"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:427228a011d5be21ff009dc05fcd512cee86ea2a51687a3300b8b822bad6815b"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50501756570352e78aaf2aee509b5eb6c68706a2f2701dc3a84b066e570c61ca"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6471f47860f1a5807c182be7184839d747e2702625d44ec19a8f652380541020"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59727936e862677e9716b9317e209e5e31aa1da7eb03c65083d9dee8b5fbe0f8"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9341a07885cba57c2a134847aacb629f27b4ce06a4950a4619629d35a6d8619c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:63c27f2ae8f0501dca4d52b42285be36095f4461dd9e340d32104c2b2df3a731"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1bda9b73dde7df63d7606e37340ba0a63ad59053e59eff318f3b67d5a7ea5579"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win32.whl", hash = "sha256:300ed27714c43ae2feb7572e9b3ca39660fb89b3b298e94ad24b64609f823d3c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:f2d8e4a9a8167c2a47874d72d6ab0a4266ed484e9ae30f35a515f8594b224b51"}, - {file = "pyinstrument-4.5.1.tar.gz", hash = "sha256:b55a93be883c65650515319455636d32ab32692b097faa1e07f8cd9d4e0eeaa9"}, -] - -[package.extras] -jupyter = ["ipython"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pynacl" -version = "1.5.0" -description = "Python binding to the Networking and Cryptography (NaCl) library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] - -[package.dependencies] -cffi = ">=1.4.1" - -[package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] - -[[package]] -name = "pytablewriter" -version = "0.64.2" -description = "pytablewriter is a Python library to write a table in various formats: AsciiDoc / CSV / Elasticsearch / HTML / JavaScript / JSON / LaTeX / LDJSON / LTSV / Markdown / MediaWiki / NumPy / Excel / Pandas / Python / reStructuredText / SQLite / TOML / TSV / YAML." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytablewriter-0.64.2-py3-none-any.whl", hash = "sha256:c46d1ddc40ef4d084213a86f8626cee33b3aa0119535aa8555da64cb5b65e382"}, - {file = "pytablewriter-0.64.2.tar.gz", hash = "sha256:99409d401d6ef5f06d1bc40f265a8e3053afe4cbfbaf709f71124076afb40dbb"}, -] - -[package.dependencies] -DataProperty = ">=0.55.0,<2" -mbstrdecoder = ">=1.0.0,<2" -pathvalidate = ">=2.3.0,<3" -setuptools = ">=38.3.0" -tabledata = ">=1.3.0,<2" -tcolorpy = ">=0.0.5,<1" -typepy = {version = ">=1.2.0,<2", extras = ["datetime"]} - -[package.extras] -all = ["PyYAML (>=3.11,<7)", "SimpleSQLite (>=1.1.3,<2)", "XlsxWriter (>=0.9.6,<4)", "dominate (>=2.1.5,<3)", "elasticsearch (>=7.0.5,<8)", "loguru (>=0.4.1,<1)", "pytablereader (>=0.31.3,<2)", "pytablewriter-altrow-theme (>=0.0.2,<1)", "simplejson (>=3.8.1,<4)", "toml (>=0.9.3,<1)", "xlwt"] -docs = ["Sphinx (>=2.4)", "sphinx-rtd-theme"] -es = ["elasticsearch (>=7.0.5,<8)"] -es7 = ["elasticsearch (>=7.0.5,<8)"] -excel = ["XlsxWriter (>=0.9.6,<4)", "xlwt"] -from = ["pytablereader (>=0.31.3,<2)"] -html = ["dominate (>=2.1.5,<3)"] -logging = ["loguru (>=0.4.1,<1)"] -sqlite = ["SimpleSQLite (>=1.1.3,<2)"] -test = ["PyYAML (>=3.11,<7)", "SimpleSQLite (>=1.1.3,<2)", "XlsxWriter (>=0.9.6,<4)", "beautifulsoup4 (>=4.10)", "dominate (>=2.1.5,<3)", "elasticsearch (>=7.0.5,<8)", "idna (<3)", "loguru (>=0.4.1,<1)", "pytablereader (>=0.31.3,<2)", "pytablereader[excel,sqlite] (>=0.31.3)", "pytablewriter-altrow-theme (>=0.0.2,<1)", "pytest (>=6.0.1)", "pytest-md-report (>=0.1)", "simplejson (>=3.8.1,<4)", "sqliteschema (>=1.3.0)", "tablib (>=3.2.0)", "toml (>=0.9.3,<1)", "xlwt"] -theme = ["pytablewriter-altrow-theme (>=0.0.2,<1)"] -toml = ["toml (>=0.9.3,<1)"] -yaml = ["PyYAML (>=3.11,<7)"] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.10.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, -] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rich" -version = "11.2.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.6.2,<4.0.0" -files = [ - {file = "rich-11.2.0-py3-none-any.whl", hash = "sha256:d5f49ad91fb343efcae45a2b2df04a9755e863e50413623ab8c9e74f05aee52b"}, - {file = "rich-11.2.0.tar.gz", hash = "sha256:1a6266a5738115017bb64a66c59c717e7aa047b3ae49a011ede4abdeffc6536e"}, -] - -[package.dependencies] -colorama = ">=0.4.0,<0.5.0" -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.17.32" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3" -files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.7" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.5" -files = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, -] - -[[package]] -name = "setuptools" -version = "68.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - -[[package]] -name = "tabledata" -version = "1.3.1" -description = "tabledata is a Python library to represent tabular data. Used for pytablewriter/pytablereader/SimpleSQLite/etc." -optional = false -python-versions = ">=3.6" -files = [ - {file = "tabledata-1.3.1-py3-none-any.whl", hash = "sha256:73e610c378670a2b9bb80e56cece24427d18c8672a36c80fcdf2a3753b19642b"}, - {file = "tabledata-1.3.1.tar.gz", hash = "sha256:6608f86171f3285f16251ed6649dcf6e953d4fe6a8e622d39b80d1954b9e7711"}, -] - -[package.dependencies] -DataProperty = ">=0.54.2,<2" -typepy = ">=1.2.0,<2" - -[package.extras] -logging = ["loguru (>=0.4.1,<1)"] -test = ["pytablewriter (>=0.46)", "pytest"] - -[[package]] -name = "tcolorpy" -version = "0.1.3" -description = "tcolopy is a Python library to apply true color for terminal text." -optional = false -python-versions = ">=3.7" -files = [ - {file = "tcolorpy-0.1.3-py3-none-any.whl", hash = "sha256:4ba9e4d52696a36dc16a55c20317115fb46e4b8e02796e8e270132719bcefad4"}, - {file = "tcolorpy-0.1.3.tar.gz", hash = "sha256:43c1afe908f9968ff5ce59f129b62e392049b8e7cd6a8d3f416bd3d372bb5c7a"}, -] - -[package.extras] -test = ["pytest", "pytest-md-report (>=0.1)"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "typepy" -version = "1.3.1" -description = "typepy is a Python library for variable type checker/validator/converter at a run time." -optional = false -python-versions = ">=3.7" -files = [ - {file = "typepy-1.3.1-py3-none-any.whl", hash = "sha256:892566bff279368d63f02901aba0a3ce78cd7a319ec1f2bf6c8baab3520207a3"}, - {file = "typepy-1.3.1.tar.gz", hash = "sha256:dfc37b888d6eed8542208389efa60ec8454e06fd84b276b45b2e33897f9d7825"}, -] - -[package.dependencies] -mbstrdecoder = ">=1.0.0,<2" -packaging = {version = "*", optional = true, markers = "extra == \"datetime\""} -python-dateutil = {version = ">=2.8.0,<3.0.0", optional = true, markers = "extra == \"datetime\""} -pytz = {version = ">=2018.9", optional = true, markers = "extra == \"datetime\""} - -[package.extras] -datetime = ["packaging", "python-dateutil (>=2.8.0,<3.0.0)", "pytz (>=2018.9)"] -test = ["packaging", "pytest (>=6.0.1)", "python-dateutil (>=2.8.0,<3.0.0)", "pytz (>=2018.9)", "tcolorpy"] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "urllib3" -version = "1.26.16" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "wrapt" -version = "1.15.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, -] - -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "bc89c823b268bf41879da6dbc8559a4662ee564b407dd9da858f59e79cf7d058" diff --git a/airbyte-ci/connectors/qa-engine/pyproject.toml b/airbyte-ci/connectors/qa-engine/pyproject.toml deleted file mode 100644 index 05022c51d447e..0000000000000 --- a/airbyte-ci/connectors/qa-engine/pyproject.toml +++ /dev/null @@ -1,38 +0,0 @@ -[tool.poetry] -name = "qa-engine" -version = "0.2.2" -description = "Connector QA Engine for Airbyte" -authors = ["Airbyte "] -readme = "README.md" -packages = [{include = "qa_engine"}] - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry.dependencies] -python = "^3.10" -click = "~8.1.3" -requests = "*" -PyYAML = "~6.0" -GitPython = "~3.1.29" -pydantic = "~1.9" -PyGithub = "~1.58.0" -rich = "*" -pandas = "~1.5.3" -pandas-gbq = "~0.19.0" -fsspec = "~2023.1.0" -gcsfs = "~2023.1.0" -pytablewriter = "~0.64.2" -pydash = "^7.0.4" -ruamel-yaml = "^0.17.30" -connector-ops = {path = "../connector_ops"} - -[tool.poetry.group.dev.dependencies] -pytest = "~6.2.5" -pytest-mock = "~3.10.0" -freezegun = "*" -pyinstrument = "*" - -[tool.poetry.scripts] -run-qa-engine = "qa_engine.main:main" diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/cloud_availability_updater.py b/airbyte-ci/connectors/qa-engine/qa_engine/cloud_availability_updater.py deleted file mode 100644 index 9f22e4474ec3c..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/cloud_availability_updater.py +++ /dev/null @@ -1,215 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import logging -import shutil -import tempfile -from datetime import datetime -from pathlib import Path -from typing import Iterable, List, Optional - -import git -import requests -from pydash.objects import get, set_ -from pytablewriter import MarkdownTableWriter -from ruamel.yaml import YAML - -from .constants import ( - AIRBYTE_GITHUB_REPO_URL, - AIRBYTE_ISSUES_ENDPOINT, - AIRBYTE_MAIN_BRANCH_NAME, - AIRBYTE_PR_ENDPOINT, - AIRBYTE_REPO_OWNER, - GIT_USER_EMAIL, - GIT_USERNAME, - GIT_USERNAME_FOR_AUTH, - GITHUB_API_COMMON_HEADERS, - GITHUB_API_TOKEN, - PR_LABELS, -) -from .models import ConnectorQAReport - -logger = logging.getLogger(__name__) - -# Initialize the ruamel.yaml object -yaml = YAML() -yaml.indent(mapping=2, sequence=4, offset=2) -yaml.preserve_quotes = True - - -def set_git_identity(repo: git.repo) -> git.repo: - repo.git.config("--global", "user.email", GIT_USER_EMAIL) - repo.git.config("--global", "user.name", GIT_USERNAME) - return repo - - -def get_authenticated_repo_url(git_username: str, github_api_token: str) -> str: - return AIRBYTE_GITHUB_REPO_URL.replace("https://", f"https://{git_username}:{github_api_token}@") - - -def clone_airbyte_repo(local_repo_path: Path) -> git.Repo: - logger.info(f"Cloning {AIRBYTE_GITHUB_REPO_URL} to {local_repo_path}") - authenticated_repo_url = get_authenticated_repo_url(GIT_USERNAME_FOR_AUTH, GITHUB_API_TOKEN) - return git.Repo.clone_from(authenticated_repo_url, local_repo_path, branch=AIRBYTE_MAIN_BRANCH_NAME) - - -def get_metadata_file_path(airbyte_repo_path: Path, connector: ConnectorQAReport) -> Path: - connector_folder_name = connector.connector_technical_name - metadata_file_path = airbyte_repo_path / f"airbyte-integrations/connectors/{connector_folder_name}/metadata.yaml" - if not metadata_file_path.exists(): - raise FileNotFoundError(f"Can't find the metadata file for {metadata_file_path}") - return metadata_file_path - - -def checkout_new_branch(airbyte_repo: git.Repo, new_branch_name: str) -> git.Head: - new_branch = airbyte_repo.create_head(new_branch_name) - new_branch.checkout() - logger.info(f"Checked out branch {new_branch_name}.") - return new_branch - - -def enable_in_cloud(connector: ConnectorQAReport, metadata_file_path: Path) -> Optional[Path]: - with open(metadata_file_path, "r") as f: - metadata = yaml.load(f) - connector_already_enabled_in_cloud = get(metadata, "data.registries.cloud.enabled", False) - - if connector_already_enabled_in_cloud: - logger.warning(f"{connector.connector_name}'s definition id is already in {metadata_file_path}.") - return None - - set_(metadata, "data.registries.cloud.enabled", True) - - with open(metadata_file_path, "w") as f: - yaml.dump(metadata, f) - - logger.info(f"Updated {metadata_file_path} to enable {connector.connector_name} in Cloud.") - return metadata_file_path - - -def commit_all_files(airbyte_repo: git.Repo, commit_message: str): - airbyte_repo.git.add("--all") - airbyte_repo.git.commit(m=commit_message) - logger.info("Committed file changes.") - - -def push_branch(airbyte_repo: git.Repo, branch: str): - airbyte_repo.git.push("--force", "--set-upstream", "origin", branch) - logger.info(f"Pushed branch {branch} to origin") - - -def pr_already_created_for_branch(head_branch: str) -> bool: - response = requests.get( - AIRBYTE_PR_ENDPOINT, - headers=GITHUB_API_COMMON_HEADERS, - params={"head": f"{AIRBYTE_REPO_OWNER}:{head_branch}", "state": "open"}, - ) - response.raise_for_status() - return len(response.json()) > 0 - - -def add_labels_to_pr(pr_number: str, labels_to_add: List) -> requests.Response: - url = AIRBYTE_ISSUES_ENDPOINT + f"/{pr_number}/labels" - response = requests.post(url, headers=GITHUB_API_COMMON_HEADERS, json={"labels": labels_to_add}) - response.raise_for_status() - logger.info(f"Labels {labels_to_add} added to PR {pr_number}") - return response - - -def create_pr(pr_title: str, pr_body: str, branch: str, labels: Optional[List]) -> Optional[requests.Response]: - data = { - "title": pr_title, - "body": pr_body, - "head": branch, - "base": AIRBYTE_MAIN_BRANCH_NAME, - } - if not pr_already_created_for_branch(branch): - response = requests.post(AIRBYTE_PR_ENDPOINT, headers=GITHUB_API_COMMON_HEADERS, json=data) - response.raise_for_status() - pr_url = response.json().get("url") - pr_number = response.json().get("number") - logger.info(f"A PR was opened: {pr_url}") - if labels: - add_labels_to_pr(pr_number, labels) - return response - else: - logger.warning(f"A PR already exists for branch {branch}") - - -def get_pr_body(eligible_connectors: List[ConnectorQAReport], excluded_connectors: List[ConnectorQAReport]) -> str: - body = ( - f"The Cloud Availability Updater decided that it's the right time to make the following {len(eligible_connectors)} connectors available on Cloud!" - + "\n\n" - ) - headers = ["connector_technical_name", "connector_version", "connector_definition_id"] - - writer = MarkdownTableWriter( - max_precision=2, - table_name="Promoted connectors", - headers=headers, - value_matrix=[[connector.dict()[h] for h in headers] for connector in eligible_connectors], - ) - body += writer.dumps() - body += "\n" - - writer = MarkdownTableWriter( - table_name="Excluded but eligible connectors", - max_precision=2, - headers=headers, - value_matrix=[[connector.dict()[h] for h in headers] for connector in excluded_connectors], - ) - - body += writer.dumps() - body += "\n ☝️ These eligible connectors are already in the definitions masks. They might have been explicitly pinned or excluded. We're not adding these for safety." - return body - - -def add_new_connector_to_cloud_catalog(airbyte_repo_path: Path, airbyte_repo: git.Repo, connector: ConnectorQAReport) -> bool: - """Updates the local definitions mask on Airbyte cloud repo. - Calls the generateCloudConnectorCatalog gradle task. - Commits these changes - - Args: - airbyte_repo (git.Repo): The Airbyte Cloud repo instance. - connector (ConnectorQAReport): The connector to add to a definitions mask. - Returns: - bool: Whether the connector was added or not. - """ - metadata_file_path = get_metadata_file_path(airbyte_repo_path, connector) - - updated_files = enable_in_cloud(connector, metadata_file_path) - if updated_files: - commit_all_files(airbyte_repo, f"🤖 Add {connector.connector_name} connector to cloud") - return True - return False - - -def batch_deploy_eligible_connectors_to_cloud_repo(eligible_connectors: Iterable): - repo_path = Path(tempfile.mkdtemp()) - airbyte_repo = clone_airbyte_repo(repo_path) - airbyte_repo = set_git_identity(airbyte_repo) - current_date = datetime.utcnow().strftime("%Y%m%d") - airbyte_repo.git.checkout(AIRBYTE_MAIN_BRANCH_NAME) - - new_branch_name = f"cloud-availability-updater/batch-deploy/{current_date}" - checkout_new_branch(airbyte_repo, new_branch_name) - - added_connectors = [] - explicitly_disabled_connectors = [] - for connector in eligible_connectors: - added = add_new_connector_to_cloud_catalog(repo_path, airbyte_repo, connector) - if added: - added_connectors.append(connector) - else: - explicitly_disabled_connectors.append(connector) - if added_connectors: - push_branch(airbyte_repo, new_branch_name) - create_pr( - f"🤖 Cloud Availability updater: new connectors to deploy [{current_date}]", - get_pr_body(added_connectors, explicitly_disabled_connectors), - new_branch_name, - PR_LABELS, - ) - - shutil.rmtree(repo_path) diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/connector_adoption.sql b/airbyte-ci/connectors/qa-engine/qa_engine/connector_adoption.sql deleted file mode 100644 index 51d26bc57f850..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/connector_adoption.sql +++ /dev/null @@ -1,91 +0,0 @@ -WITH official_connector_syncs AS( - SELECT - * - FROM - airbyte_warehouse.connector_sync - WHERE - is_officially_published - AND( - job_status = "failed" - OR job_status = "succeeded" - ) -), -adoption_per_version AS( - SELECT - connector_definition_id, - docker_repository, - connector_version, - COUNT( DISTINCT( user_id )) AS number_of_users, - COUNT( DISTINCT( connection_id )) AS number_of_connections - FROM - official_connector_syncs - GROUP BY - connector_definition_id, - docker_repository, - connector_version -), -job_status_per_version AS( - SELECT - connector_definition_id, - docker_repository, - connector_version, - job_status, - COUNT( 1 ) AS sync_count - FROM - official_connector_syncs - GROUP BY - connector_definition_id, - docker_repository, - connector_version, - job_status -), -success_failure_by_connector_version AS( - SELECT - connector_definition_id, - docker_repository, - connector_version, - ifnull( - failed, - 0 - ) AS failed_syncs_count, - ifnull( - succeeded, - 0 - ) AS succeeded_syncs_count, - ifnull( - succeeded, - 0 - )+ ifnull( - failed, - 0 - ) AS total_syncs_count, - SAFE_DIVIDE( - ifnull( - succeeded, - 0 - ), - ifnull( - succeeded, - 0 - )+ ifnull( - failed, - 0 - ) - ) AS sync_success_rate - FROM - job_status_per_version PIVOT( - MAX( sync_count ) FOR job_status IN( - 'failed', - 'succeeded' - ) - ) -) SELECT - * -FROM - adoption_per_version -LEFT JOIN success_failure_by_connector_version - USING( - connector_definition_id, - docker_repository, - connector_version - ); diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/constants.py b/airbyte-ci/connectors/qa-engine/qa_engine/constants.py deleted file mode 100644 index 92d98ae6172ce..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/constants.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import os - -CONNECTOR_TEST_SUMMARY_URL = "https://connectors.airbyte.com/files/generated_reports/test_summary" -CLOUD_CATALOG_URL = "https://connectors.airbyte.com/files/registries/v0/cloud_registry.json" -OSS_CATALOG_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json" - -INAPPROPRIATE_FOR_CLOUD_USE_CONNECTORS = [ - "8be1cf83-fde1-477f-a4ad-318d23c9f3c6", # Local CSV - "a625d593-bba5-4a1c-a53d-2d246268a816", # Local JSON - "b76be0a6-27dc-4560-95f6-2623da0bd7b6", # Local SQL Lite - "2300fdcf-a532-419f-9f24-a014336e7966", # destination-yugabytedb, no strict-encrypt variant - "7cf88806-25f5-4e1a-b422-b2fa9e1b0090", # source-elasticsearch, no strict-encrypt variant - "0dad1a35-ccf8-4d03-b73e-6788c00b13ae", # source-tidb, no strict-encrypt variant - "d53f9084-fa6b-4a5a-976c-5b8392f4ad8a", # source-e2e-testing, a cloud variant already exists - "f3802bc4-5406-4752-9e8d-01e504ca8194", # destination-mqtt, no strict-encrypt variant - "825c5ee3-ed9a-4dd1-a2b6-79ed722f7b13", # destination-redpanda, no strict-encrypt variant - "58e6f9da-904e-11ed-a1eb-0242ac120002", # destination-teradata, no strict-encrypt variant - "bb6071d9-6f34-4766-bec2-d1d4ed81a653", # destination-exasol, no strict-encrypt variant - "7b7d7a0d-954c-45a0-bcfc-39a634b97736", # destination-weviate, no strict-encrypt variant - "06ec60c7-7468-45c0-91ac-174f6e1a788b", # destination-tidb, no strict-encrypt variant - "2af123bf-0aaf-4e0d-9784-cb497f23741a", # source-appstore, originally ignored in the source connector masks - "9fa5862c-da7c-11eb-8d19-0242ac130003", # source-cockroachdb, originally ignored in the source connector masks - "445831eb-78db-4b1f-8f1f-0d96ad8739e2", # source-drift, originally ignored in the source connector masks - "d917a47b-8537-4d0d-8c10-36a9928d4265", # source-kafka, originally ignored in the source connector masks - "9f760101-60ae-462f-9ee6-b7a9dafd454d", # destination-kafka, originally ignored in the destination connector masks - "4528e960-6f7b-4412-8555-7e0097e1da17", # destination-starburst-galaxy, no strict-encrypt variant - "aa8ba6fd-4875-d94e-fc8d-4e1e09aa2503", # source-teradata, no strict-encrypt variant - "447e0381-3780-4b46-bb62-00a4e3c8b8e2", # source-db2, no strict-encrypt variant - "0eeee7fb-518f-4045-bacc-9619e31c43ea", # destination-amazon-sqs, hide Amazon SQS Destination https://github.com/airbytehq/airbyte/issues/16316 -] - -GCS_QA_REPORT_PATH = "gs://airbyte-data-connectors-qa-engine/" -AIRBYTE_REPO_OWNER = "airbytehq" -AIRBYTE_REPO_NAME = "airbyte" -AIRBYTE_GITHUB_REPO_URL = f"https://github.com/{AIRBYTE_REPO_OWNER}/{AIRBYTE_REPO_NAME}.git" -AIRBYTE_MAIN_BRANCH_NAME = "master" -AIRBYTE_REPO_ENDPOINT = f"https://api.github.com/repos/{AIRBYTE_REPO_OWNER}/{AIRBYTE_REPO_NAME}" -AIRBYTE_PR_ENDPOINT = f"{AIRBYTE_REPO_ENDPOINT}/pulls" -AIRBYTE_ISSUES_ENDPOINT = f"{AIRBYTE_REPO_ENDPOINT}/issues" - -GITHUB_API_TOKEN = os.environ.get("GITHUB_API_TOKEN") -GITHUB_API_COMMON_HEADERS = { - "Accept": "application/vnd.github+json", - "X-GitHub-Api-Version": "2022-11-28", - "Authorization": f"Bearer {GITHUB_API_TOKEN}", -} -GIT_USERNAME_FOR_AUTH = "octavia-squidington-iii" -GIT_USER_EMAIL = f"{GIT_USERNAME_FOR_AUTH}@sers.noreply.github.com" -GIT_USERNAME = "Octavia Squidington III" -PR_LABELS = ["team/connector-ops", "cloud-availability-updater"] diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/enrichments.py b/airbyte-ci/connectors/qa-engine/qa_engine/enrichments.py deleted file mode 100644 index 5bdfd48fe60e2..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/enrichments.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pandas as pd - - -def get_enriched_catalog( - oss_catalog: pd.DataFrame, cloud_catalog: pd.DataFrame, adoption_metrics_per_connector_version: pd.DataFrame -) -> pd.DataFrame: - """Merge OSS and Cloud catalog in a single dataframe on their definition id. - Transformations: - - Rename columns to snake case. - - Rename name column to connector_name. - - Rename docker_image_tag to connector_version. - - Replace null value for support_level with unknown. - Enrichments: - - is_on_cloud: determined by the merge operation results. - - connector_technical_name: built from the docker repository field. airbyte/source-pokeapi -> source-pokeapi. - - Adoptions metrics: add the columns from the adoption_metrics_per_connector_version dataframe. - Args: - oss_catalog (pd.DataFrame): The open source catalog dataframe. - cloud_catalog (pd.DataFrame): The cloud catalog dataframe. - adoption_metrics_per_connector_version (pd.DataFrame): The crowd sourced adoptions metrics. - - Returns: - pd.DataFrame: The enriched catalog. - """ - enriched_catalog = pd.merge( - oss_catalog, - cloud_catalog, - how="left", - on="connector_definition_id", - indicator=True, - suffixes=("", "_cloud"), - ) - - enriched_catalog.columns = enriched_catalog.columns.str.replace( - "(?<=[a-z])(?=[A-Z])", "_", regex=True - ).str.lower() # column names to snake case - enriched_catalog = enriched_catalog[[c for c in enriched_catalog.columns if "_cloud" not in c]] - enriched_catalog["is_on_cloud"] = enriched_catalog["_merge"] == "both" - enriched_catalog = enriched_catalog.drop(columns="_merge") - enriched_catalog["connector_name"] = enriched_catalog["name"] - enriched_catalog["connector_technical_name"] = enriched_catalog["docker_repository"].str.replace("airbyte/", "") - enriched_catalog["connector_version"] = enriched_catalog["docker_image_tag"] - enriched_catalog["support_level"] = enriched_catalog["support_level"].fillna("unknown") - enriched_catalog = enriched_catalog.merge( - adoption_metrics_per_connector_version, how="left", on=["connector_definition_id", "connector_version"] - ) - enriched_catalog = enriched_catalog.drop_duplicates(subset=["connector_definition_id", "connector_version"]) - enriched_catalog[adoption_metrics_per_connector_version.columns] = enriched_catalog[ - adoption_metrics_per_connector_version.columns - ].fillna(0) - return enriched_catalog diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/inputs.py b/airbyte-ci/connectors/qa-engine/qa_engine/inputs.py deleted file mode 100644 index 8f481ae8ac824..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/inputs.py +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -from enum import Enum -from importlib.resources import files -from typing import Optional - -import pandas as pd -import requests - -from .constants import CONNECTOR_TEST_SUMMARY_URL - -LOGGER = logging.getLogger(__name__) - - -class BUILD_STATUSES(str, Enum): - SUCCESS = "success" - FAILURE = "failure" - NOT_FOUND = None - - @classmethod - def from_string(cls, string_value: Optional[str]) -> "BUILD_STATUSES": - if string_value is None: - return BUILD_STATUSES.NOT_FOUND - - return BUILD_STATUSES[string_value.upper()] - - -def get_connector_build_output_url(connector_technical_name: str) -> str: - """ - Get the connector build output url. - """ - # remove connectors/ prefix from connector_technical_name - connector_technical_name = connector_technical_name.replace("connectors/", "") - return f"{CONNECTOR_TEST_SUMMARY_URL}/{connector_technical_name}/index.json" - - -def fetch_latest_build_status_for_connector(connector_technical_name: str) -> BUILD_STATUSES: - """Fetch the latest build status for a given connector version.""" - connector_build_output_url = get_connector_build_output_url(connector_technical_name) - connector_build_output_response = requests.get(connector_build_output_url) - - # if the connector returned successfully, return the outcome - if connector_build_output_response.status_code == 200: - connector_build_output = connector_build_output_response.json() - - # we want to get the latest build status - # sort by date and get the first element - latest_connector_run = sorted(connector_build_output, key=lambda x: x["date"], reverse=True)[0] - - outcome = latest_connector_run.get("success") - if outcome is None: - LOGGER.error(f"Error: No outcome value for connector {connector_technical_name}") - return BUILD_STATUSES.NOT_FOUND - - if outcome == True: - return BUILD_STATUSES.SUCCESS - - if outcome == False: - return BUILD_STATUSES.FAILURE - - try: - return BUILD_STATUSES.from_string(outcome) - except KeyError: - LOGGER.error(f"Error: Unexpected build status value: {outcome} for connector {connector_technical_name}") - return BUILD_STATUSES.NOT_FOUND - - else: - return BUILD_STATUSES.NOT_FOUND - - -def fetch_remote_catalog(catalog_url: str) -> pd.DataFrame: - """Fetch a combined remote catalog and return a single DataFrame - with sources and destinations defined by the connector_type column. - - Args: - catalog_url (str): The remote catalog url. - - Returns: - pd.DataFrame: Sources and destinations combined under a denormalized DataFrame. - """ - raw_catalog = requests.get(catalog_url).json() - sources = pd.DataFrame(raw_catalog["sources"]) - destinations = pd.DataFrame(raw_catalog["destinations"]) - sources["connector_type"] = "source" - sources["connector_definition_id"] = sources.sourceDefinitionId - destinations["connector_type"] = "destination" - destinations["connector_definition_id"] = destinations.destinationDefinitionId - return pd.concat([sources, destinations]) - - -def fetch_adoption_metrics_per_connector_version() -> pd.DataFrame: - """Retrieve adoptions metrics for each connector version from our data warehouse. - - Returns: - pd.DataFrame: A dataframe with adoption metrics per connector version. - """ - connector_adoption_sql = files("qa_engine").joinpath("connector_adoption.sql").read_text() - adoption_metrics = pd.read_gbq(connector_adoption_sql, project_id="airbyte-data-prod") - return adoption_metrics[ - [ - "connector_definition_id", - "connector_version", - "number_of_connections", - "number_of_users", - "succeeded_syncs_count", - "failed_syncs_count", - "total_syncs_count", - "sync_success_rate", - ] - ] diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/main.py b/airbyte-ci/connectors/qa-engine/qa_engine/main.py deleted file mode 100644 index bde5cb0c2f44e..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/main.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging - -import click - -from . import cloud_availability_updater, enrichments, inputs, outputs, validations -from .constants import CLOUD_CATALOG_URL, GCS_QA_REPORT_PATH, OSS_CATALOG_URL - -logging.basicConfig(level=logging.INFO) - -logger = logging.getLogger(__name__) - - -@click.command() -@click.option("--create-prs", is_flag=True) -def main(create_prs): - logger.info("Fetch the OSS connectors catalog.") - oss_catalog = inputs.fetch_remote_catalog(OSS_CATALOG_URL) - logger.info("Fetch the Cloud connectors catalog.") - cloud_catalog = inputs.fetch_remote_catalog(CLOUD_CATALOG_URL) - logger.info("Fetch adoption metrics.") - adoption_metrics_per_connector_version = inputs.fetch_adoption_metrics_per_connector_version() - logger.info("Start the enriched catalog generation.") - enriched_catalog = enrichments.get_enriched_catalog(oss_catalog, cloud_catalog, adoption_metrics_per_connector_version) - logger.info("Start the QA report generation.") - qa_report = validations.get_qa_report(enriched_catalog, len(oss_catalog)) - logger.info("Persist QA report to GCS") - outputs.persist_qa_report(qa_report, GCS_QA_REPORT_PATH, public_fields_only=False) - - if create_prs: - logger.info("Start eligible connectors deployment to Cloud.") - eligible_connectors = validations.get_connectors_eligible_for_cloud(qa_report) - cloud_availability_updater.batch_deploy_eligible_connectors_to_cloud_repo(eligible_connectors) diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/models.py b/airbyte-ci/connectors/qa-engine/qa_engine/models.py deleted file mode 100644 index c5c009a7b0836..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/models.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from datetime import datetime -from typing import List - -from connector_ops.utils import ConnectorTypeEnum, SupportLevelEnum -from pydantic import BaseModel, Field - -PUBLIC_FIELD = Field(..., is_public=True) -PRIVATE_FIELD = Field(..., is_public=False) - - -class ConnectorQAReport(BaseModel): - connector_type: ConnectorTypeEnum = PUBLIC_FIELD - connector_name: str = PUBLIC_FIELD - connector_technical_name: str = PUBLIC_FIELD - connector_definition_id: str = PUBLIC_FIELD - connector_version: str = PUBLIC_FIELD - support_level: SupportLevelEnum = PUBLIC_FIELD - is_on_cloud: bool = PUBLIC_FIELD - is_appropriate_for_cloud_use: bool = PUBLIC_FIELD - latest_build_is_successful: bool = PUBLIC_FIELD - documentation_is_available: bool = PUBLIC_FIELD - number_of_connections: int = PRIVATE_FIELD - number_of_users: int = PRIVATE_FIELD - sync_success_rate: float = PRIVATE_FIELD - total_syncs_count: int = PRIVATE_FIELD - failed_syncs_count: int = PRIVATE_FIELD - succeeded_syncs_count: int = PRIVATE_FIELD - is_eligible_for_promotion_to_cloud: bool = PUBLIC_FIELD - report_generation_datetime: datetime = PUBLIC_FIELD - - -class QAReport(BaseModel): - connectors_qa_report: List[ConnectorQAReport] diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/outputs.py b/airbyte-ci/connectors/qa-engine/qa_engine/outputs.py deleted file mode 100644 index 7b91b28bbdb72..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/outputs.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from datetime import datetime - -import pandas as pd - -from .models import ConnectorQAReport - - -def persist_qa_report(qa_report: pd.DataFrame, path: str, public_fields_only: bool = True) -> str: - report_generation_date = datetime.strftime(qa_report["report_generation_datetime"].max(), "%Y%m%d") - path = path + f"{report_generation_date}_qa_report.jsonl" - final_fields = [ - field.name for field in ConnectorQAReport.__fields__.values() if field.field_info.extra["is_public"] or not public_fields_only - ] - qa_report[final_fields].to_json(path, orient="records", lines=True) - return path diff --git a/airbyte-ci/connectors/qa-engine/qa_engine/validations.py b/airbyte-ci/connectors/qa-engine/qa_engine/validations.py deleted file mode 100644 index 058e1af5ee22f..0000000000000 --- a/airbyte-ci/connectors/qa-engine/qa_engine/validations.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -from datetime import datetime -from typing import List - -import pandas as pd -import requests - -from .constants import INAPPROPRIATE_FOR_CLOUD_USE_CONNECTORS -from .inputs import BUILD_STATUSES, fetch_latest_build_status_for_connector -from .models import ConnectorQAReport, QAReport - -logger = logging.getLogger(__name__) - - -TRUTHY_COLUMNS_TO_BE_ELIGIBLE = ["documentation_is_available", "is_appropriate_for_cloud_use", "latest_build_is_successful"] - - -class QAReportGenerationError(Exception): - pass - - -def url_is_reachable(url: str) -> bool: - response = requests.get(url) - return response.status_code == 200 - - -def is_appropriate_for_cloud_use(definition_id: str) -> bool: - return definition_id not in INAPPROPRIATE_FOR_CLOUD_USE_CONNECTORS - - -def is_eligible_for_promotion_to_cloud(connector_qa_data: pd.Series) -> bool: - if connector_qa_data["is_on_cloud"]: - return False - return all([connector_qa_data[col] for col in TRUTHY_COLUMNS_TO_BE_ELIGIBLE]) - - -def latest_build_is_successful(connector_qa_data: pd.Series) -> bool: - connector_technical_name = connector_qa_data["connector_technical_name"] - latest_build_status = fetch_latest_build_status_for_connector(connector_technical_name) - return latest_build_status == BUILD_STATUSES.SUCCESS - - -def get_qa_report(enriched_catalog: pd.DataFrame, oss_catalog_length: int) -> pd.DataFrame: - """Perform validation steps on top of the enriched catalog. - Adds the following columns: - - documentation_is_available: - GET the documentation URL and expect a 200 status code. - - is_appropriate_for_cloud_use: - Determined from an hardcoded list of definition ids inappropriate for cloud use. - - latest_build_is_successful: - Check if the latest build for the current connector version is successful. - - number_of_connections: - Get the number of connections using this connector version from our datawarehouse. - - number_of_users: - Get the number of users using this connector version from our datawarehouse. - - sync_success_rate: - Get the sync success rate of the connections with this connector version from our datawarehouse. - Args: - enriched_catalog (pd.DataFrame): The enriched catalog. - oss_catalog_length (pd.DataFrame): The length of the OSS catalog, for sanity check. - - Returns: - pd.DataFrame: The final QA report. - """ - qa_report = enriched_catalog.copy(deep=True) - qa_report["documentation_is_available"] = qa_report.documentation_url.apply(url_is_reachable) - qa_report["is_appropriate_for_cloud_use"] = qa_report.connector_definition_id.apply(is_appropriate_for_cloud_use) - - qa_report["latest_build_is_successful"] = qa_report.apply(latest_build_is_successful, axis="columns") - - qa_report["is_eligible_for_promotion_to_cloud"] = qa_report.apply(is_eligible_for_promotion_to_cloud, axis="columns") - qa_report["report_generation_datetime"] = datetime.utcnow() - - # Only select dataframe columns defined in the ConnectorQAReport model. - qa_report = qa_report[[field.name for field in ConnectorQAReport.__fields__.values()]] - - # Validate the report structure with pydantic QAReport model. - QAReport(connectors_qa_report=qa_report.to_dict(orient="records")) - if len(qa_report) != oss_catalog_length: - raise QAReportGenerationError( - f"The QA report ({len(qa_report)}) does not contain all the connectors defined in the OSS catalog ({oss_catalog_length})." - ) - return qa_report - - -def get_connectors_eligible_for_cloud(qa_report: pd.DataFrame) -> List[ConnectorQAReport]: - eligible_connectors = [ConnectorQAReport(**row) for _, row in qa_report[qa_report["is_eligible_for_promotion_to_cloud"]].iterrows()] - logger.info(f"{len(eligible_connectors)} connectors are eligible for Cloud.") - return eligible_connectors diff --git a/airbyte-ci/connectors/qa-engine/tests/conftest.py b/airbyte-ci/connectors/qa-engine/tests/conftest.py deleted file mode 100644 index e68c067c61fe4..0000000000000 --- a/airbyte-ci/connectors/qa-engine/tests/conftest.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from datetime import datetime - -import pandas as pd -import pytest -from qa_engine.constants import CLOUD_CATALOG_URL, OSS_CATALOG_URL -from qa_engine.inputs import fetch_remote_catalog - - -@pytest.fixture(scope="module") -def oss_catalog(): - return fetch_remote_catalog(OSS_CATALOG_URL) - - -@pytest.fixture(scope="module") -def cloud_catalog(): - return fetch_remote_catalog(CLOUD_CATALOG_URL) - - -@pytest.fixture(scope="module") -def adoption_metrics_per_connector_version(): - return pd.DataFrame( - [ - { - "connector_definition_id": "dfd88b22-b603-4c3d-aad7-3701784586b1", - "connector_version": "2.0.0", - "number_of_connections": 0, - "number_of_users": 0, - "succeeded_syncs_count": 0, - "failed_syncs_count": 0, - "total_syncs_count": 0, - "sync_success_rate": 0.0, - } - ] - ) - - -@pytest.fixture -def dummy_qa_report() -> pd.DataFrame: - return pd.DataFrame( - [ - { - "connector_type": "source", - "connector_name": "test", - "connector_technical_name": "source-test", - "connector_definition_id": "foobar", - "connector_version": "0.0.0", - "support_level": "community", - "is_on_cloud": False, - "is_appropriate_for_cloud_use": True, - "latest_build_is_successful": True, - "documentation_is_available": False, - "number_of_connections": 0, - "number_of_users": 0, - "sync_success_rate": 0.99, - "total_syncs_count": 0, - "failed_syncs_count": 0, - "succeeded_syncs_count": 0, - "is_eligible_for_promotion_to_cloud": True, - "report_generation_datetime": datetime.utcnow(), - } - ] - ) diff --git a/airbyte-ci/connectors/qa-engine/tests/test_cloud_availability_updater.py b/airbyte-ci/connectors/qa-engine/tests/test_cloud_availability_updater.py deleted file mode 100644 index c143b49916b9c..0000000000000 --- a/airbyte-ci/connectors/qa-engine/tests/test_cloud_availability_updater.py +++ /dev/null @@ -1,331 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import datetime -from pathlib import Path - -import freezegun -import git -import pytest -import yaml -from qa_engine import cloud_availability_updater, models - - -@pytest.fixture(scope="module") -def dummy_repo_path(tmp_path_factory) -> Path: - repo_path = tmp_path_factory.mktemp("cloud_availability_updater_tests") / "airbyte" - repo_path.mkdir() - return repo_path - - -@pytest.fixture(scope="module") -def eligible_connectors(): - return [ - models.ConnectorQAReport( - connector_type="source", - connector_name="PokeAPI", - support_level="community", - is_on_cloud=False, - is_appropriate_for_cloud_use=True, - latest_build_is_successful=True, - documentation_is_available=True, - number_of_users=1, - total_syncs_count=1, - failed_syncs_count=0, - succeeded_syncs_count=1, - is_eligible_for_promotion_to_cloud=True, - report_generation_datetime=datetime.datetime.utcnow(), - connector_technical_name="source-pokeapi", - connector_version="0.0.0", - connector_definition_id="pokeapi-definition-id", - sync_success_rate=0.989, - number_of_connections=12, - ) - ] - - -@pytest.fixture(scope="module") -def excluded_connectors(): - return [ - models.ConnectorQAReport( - connector_type="source", - connector_name="excluded", - support_level="community", - is_on_cloud=False, - is_appropriate_for_cloud_use=True, - latest_build_is_successful=True, - documentation_is_available=True, - number_of_users=1, - total_syncs_count=1, - failed_syncs_count=0, - succeeded_syncs_count=1, - is_eligible_for_promotion_to_cloud=True, - report_generation_datetime=datetime.datetime.utcnow(), - connector_technical_name="source-excluded", - connector_version="0.0.0", - connector_definition_id="excluded-definition-id", - sync_success_rate=0.979, - number_of_connections=12, - ) - ] - - -@pytest.fixture(scope="module") -def dummy_repo(dummy_repo_path, eligible_connectors, excluded_connectors) -> git.Repo: - all_connectors = eligible_connectors + excluded_connectors - connectors_dir = dummy_repo_path / "airbyte-integrations/connectors" - connectors_dir.mkdir(parents=True) - repo = git.Repo.init(dummy_repo_path) - - # set master branch instead of main - repo.git.checkout(b="master") - - for connector in all_connectors: - connector_dir = connectors_dir / connector.connector_technical_name - connector_dir.mkdir() - metadata_path = connector_dir / "metadata.yaml" - metadata_path.touch() - - repo.git.add("--all") - repo.git.commit(m="🤖 Initialized the repo") - return repo - - -@pytest.fixture -def checkout_master(dummy_repo): - """ - Ensure we're always on dummy repo master before and after each test using this fixture - """ - yield dummy_repo.heads.master.checkout() - dummy_repo.heads.master.checkout() - - -def test_get_metadata_file_path(checkout_master, eligible_connectors, dummy_repo_path: Path): - for connector in eligible_connectors: - path = cloud_availability_updater.get_metadata_file_path(dummy_repo_path, connector) - assert path.exists() and path.name == "metadata.yaml" - - -def test_checkout_new_branch(mocker, checkout_master, dummy_repo): - new_branch = cloud_availability_updater.checkout_new_branch(dummy_repo, "test-branch") - assert new_branch.name == dummy_repo.active_branch.name == "test-branch" - - -@pytest.mark.parametrize("expect_update", [True, False]) -def test_enable_in_cloud(mocker, dummy_repo_path, expect_update, eligible_connectors): - connector = eligible_connectors[0] - connector_metadata_path = dummy_repo_path / f"airbyte-integrations/connectors/{connector.connector_technical_name}" / "metadata.yaml" - with open(connector_metadata_path, "w") as definitions_mask: - mask_yaml = yaml.safe_dump({"data": {"registries": {"cloud": {"enabled": not expect_update}}}}) - definitions_mask.write(mask_yaml) - updated_path = cloud_availability_updater.enable_in_cloud(connector, connector_metadata_path) - if not expect_update: - assert updated_path is None - else: - with open(updated_path, "r") as definitions_mask: - raw_content = definitions_mask.read() - metadata_content = yaml.safe_load(raw_content) - assert isinstance(metadata_content, dict) - assert metadata_content["data"]["registries"]["cloud"]["enabled"] is True - - -def test_commit_files(checkout_master, dummy_repo, dummy_repo_path): - cloud_availability_updater.checkout_new_branch(dummy_repo, "test-commit-files") - commit_message = "🤖 Add new connector to cloud" - with open(dummy_repo_path / "test_file.txt", "w") as f: - f.write(".") - - cloud_availability_updater.commit_all_files(dummy_repo, commit_message) - - assert dummy_repo.head.reference.commit.message == commit_message + "\n" - edited_files = dummy_repo.git.diff("--name-only", checkout_master.name).split("\n") - assert "test_file.txt" in edited_files - - -def test_push_branch(mocker): - mock_repo = mocker.Mock() - cloud_availability_updater.push_branch(mock_repo, "new_branch") - mock_repo.git.push.assert_called_once_with("--force", "--set-upstream", "origin", "new_branch") - - -@pytest.mark.parametrize("updated_files", [True, False]) -def test_add_new_connector_to_cloud_catalog(mocker, updated_files, dummy_repo_path): - mocker.patch.object(cloud_availability_updater, "get_metadata_file_path") - mocker.patch.object(cloud_availability_updater, "enable_in_cloud", mocker.Mock(return_value=updated_files)) - mocker.patch.object(cloud_availability_updater, "commit_all_files") - - connector = mocker.Mock() - repo = mocker.Mock() - - updated_connector = cloud_availability_updater.add_new_connector_to_cloud_catalog(dummy_repo_path, repo, connector) - assert updated_connector == updated_files - cloud_availability_updater.get_metadata_file_path.assert_called_with(dummy_repo_path, connector) - cloud_availability_updater.enable_in_cloud.assert_called_once_with( - connector, cloud_availability_updater.get_metadata_file_path.return_value - ) - if updated_files: - cloud_availability_updater.commit_all_files.assert_called_with(repo, f"🤖 Add {connector.connector_name} connector to cloud") - - -@pytest.mark.parametrize("pr_already_created", [True, False, True]) -def test_create_pr(mocker, pr_already_created): - mocker.patch.object(cloud_availability_updater, "requests") - pr_post_response = mocker.Mock(json=mocker.Mock(return_value={"url": "pr_url", "number": "pr_number"})) - cloud_availability_updater.requests.post.side_effect = [pr_post_response, mocker.Mock()] - mocker.patch.object(cloud_availability_updater, "pr_already_created_for_branch", mocker.Mock(return_value=pr_already_created)) - mocker.patch.object(cloud_availability_updater, "GITHUB_API_COMMON_HEADERS", {"common": "headers"}) - expected_pr_url = "https://api.github.com/repos/airbytehq/airbyte/pulls" - expected_pr_data = { - "title": "my pr title", - "body": "my pr body", - "head": "my_awesome_branch", - "base": "master", - } - expected_issue_url = "https://api.github.com/repos/airbytehq/airbyte/issues/pr_number/labels" - expected_issue_data = {"labels": cloud_availability_updater.PR_LABELS} - - response = cloud_availability_updater.create_pr("my pr title", "my pr body", "my_awesome_branch", cloud_availability_updater.PR_LABELS) - - if not pr_already_created: - expected_post_calls = [ - mocker.call(expected_pr_url, headers=cloud_availability_updater.GITHUB_API_COMMON_HEADERS, json=expected_pr_data), - mocker.call(expected_issue_url, headers=cloud_availability_updater.GITHUB_API_COMMON_HEADERS, json=expected_issue_data), - ] - cloud_availability_updater.requests.post.assert_has_calls(expected_post_calls, any_order=False) - assert response == pr_post_response - else: - assert response is None - - -@pytest.mark.parametrize("json_response, expected_result", [([], False), (["foobar"], True)]) -def test_pr_already_created_for_connector(mocker, json_response, expected_result): - mocker.patch.object(cloud_availability_updater.requests, "get") - cloud_availability_updater.requests.get.return_value.json.return_value = json_response - mocker.patch.object(cloud_availability_updater, "GITHUB_API_COMMON_HEADERS", {"common": "headers"}) - - is_already_created = cloud_availability_updater.pr_already_created_for_branch("my-awesome-branch") - expected_url = "https://api.github.com/repos/airbytehq/airbyte/pulls" - expected_headers = {"common": "headers"} - expected_params = {"head": "airbytehq:my-awesome-branch", "state": "open"} - cloud_availability_updater.requests.get.assert_called_with(expected_url, headers=expected_headers, params=expected_params) - assert is_already_created == expected_result - - -def test_set_git_identity(mocker): - mock_repo = mocker.Mock() - repo = cloud_availability_updater.set_git_identity(mock_repo) - repo.git.config.assert_has_calls( - [ - mocker.call("--global", "user.email", cloud_availability_updater.GIT_USER_EMAIL), - mocker.call("--global", "user.name", cloud_availability_updater.GIT_USERNAME), - ] - ) - assert repo == mock_repo - - -def test_get_authenticated_repo_url(mocker): - mocker.patch.object(cloud_availability_updater, "AIRBYTE_GITHUB_REPO_URL", "https://foobar.com") - repo_url = cloud_availability_updater.get_authenticated_repo_url("username", "token") - assert repo_url == "https://username:token@foobar.com" - - -@pytest.mark.parametrize("response, expected_output", [([], False), (["foo"], True)]) -def test_pr_already_created_for_branch(mocker, response, expected_output): - mocker.patch.object(cloud_availability_updater, "requests") - - cloud_availability_updater.requests.get.return_value = mocker.Mock(json=mocker.Mock(return_value=response)) - output = cloud_availability_updater.pr_already_created_for_branch("foo") - assert output == expected_output - cloud_availability_updater.requests.get.return_value.raise_for_status.assert_called_once() - cloud_availability_updater.requests.get.assert_called_with( - cloud_availability_updater.AIRBYTE_PR_ENDPOINT, - headers=cloud_availability_updater.GITHUB_API_COMMON_HEADERS, - params={"head": f"{cloud_availability_updater.AIRBYTE_REPO_OWNER}:foo", "state": "open"}, - ) - - -def test_add_labels_to_pr(mocker): - mocker.patch.object(cloud_availability_updater, "requests") - labels_to_add = ["foo", "bar"] - response = cloud_availability_updater.add_labels_to_pr("1", labels_to_add) - cloud_availability_updater.requests.post.assert_called_with( - f"{cloud_availability_updater.AIRBYTE_ISSUES_ENDPOINT}/1/labels", - headers=cloud_availability_updater.GITHUB_API_COMMON_HEADERS, - json={"labels": labels_to_add}, - ) - cloud_availability_updater.requests.post.return_value.raise_for_status.assert_called_once() - assert response == cloud_availability_updater.requests.post.return_value - - -def test_get_pr_body(mocker, eligible_connectors, excluded_connectors): - pr_body = cloud_availability_updater.get_pr_body(eligible_connectors, excluded_connectors) - assert "1 connectors available on Cloud!" in pr_body.split("/n")[0] - assert "# Promoted connectors\n" in pr_body - assert "# Excluded but eligible connectors\n" in pr_body - assert "connector_technical_name" in pr_body - assert "connector_version" in pr_body - assert "connector_definition_id" in pr_body - assert "source-pokeapi" in pr_body - assert "pokeapi-definition-id" in pr_body - assert "0.0.0" in pr_body - assert "source-excluded" in pr_body - assert "excluded-definition-id" in pr_body - - -@freezegun.freeze_time("2023-02-14") -@pytest.mark.parametrize("added_connectors", [True, False]) -def test_batch_deploy_eligible_connectors_to_cloud_repo( - mocker, dummy_repo_path, added_connectors, eligible_connectors, excluded_connectors -): - all_connectors = eligible_connectors + excluded_connectors - mocker.patch.object(cloud_availability_updater.tempfile, "mkdtemp", mocker.Mock(return_value=str(dummy_repo_path))) - mocker.patch.object(cloud_availability_updater, "clone_airbyte_repo") - mocker.patch.object(cloud_availability_updater, "set_git_identity") - mocker.patch.object(cloud_availability_updater, "checkout_new_branch") - mocker.patch.object(cloud_availability_updater, "add_new_connector_to_cloud_catalog") - mocker.patch.object(cloud_availability_updater, "enable_in_cloud", side_effect=False) - mocker.patch.object(cloud_availability_updater, "push_branch") - mocker.patch.object(cloud_availability_updater, "get_pr_body") - mocker.patch.object(cloud_availability_updater, "create_pr") - mocker.patch.object(cloud_availability_updater, "shutil") - - if added_connectors: - cloud_availability_updater.add_new_connector_to_cloud_catalog.side_effect = lambda _path, _repo, connector: ( - connector not in expected_excluded_connectors - ) - expected_added_connectors = eligible_connectors - else: - cloud_availability_updater.add_new_connector_to_cloud_catalog.return_value = False - - expected_excluded_connectors = excluded_connectors - - mock_repo = cloud_availability_updater.set_git_identity.return_value - expected_new_branch_name = "cloud-availability-updater/batch-deploy/20230214" - expected_pr_title = "🤖 Cloud Availability updater: new connectors to deploy [20230214]" - - cloud_availability_updater.batch_deploy_eligible_connectors_to_cloud_repo(all_connectors) - cloud_availability_updater.clone_airbyte_repo.assert_called_once_with(dummy_repo_path) - cloud_availability_updater.set_git_identity.assert_called_once_with(cloud_availability_updater.clone_airbyte_repo.return_value) - mock_repo.git.checkout.assert_called_with(cloud_availability_updater.AIRBYTE_MAIN_BRANCH_NAME) - - cloud_availability_updater.checkout_new_branch.assert_called_once_with(mock_repo, expected_new_branch_name) - cloud_availability_updater.add_new_connector_to_cloud_catalog.assert_has_calls( - [ - mocker.call(dummy_repo_path, cloud_availability_updater.set_git_identity.return_value, eligible_connectors[0]), - ] - ) - if added_connectors: - cloud_availability_updater.push_branch.assert_called_once_with(mock_repo, expected_new_branch_name) - cloud_availability_updater.create_pr.assert_called_once_with( - expected_pr_title, - cloud_availability_updater.get_pr_body.return_value, - expected_new_branch_name, - cloud_availability_updater.PR_LABELS, - ) - cloud_availability_updater.get_pr_body.assert_called_with(expected_added_connectors, expected_excluded_connectors) - else: - cloud_availability_updater.push_branch.assert_not_called() - cloud_availability_updater.create_pr.assert_not_called() - cloud_availability_updater.shutil.rmtree.assert_called_with(dummy_repo_path) diff --git a/airbyte-ci/connectors/qa-engine/tests/test_enrichments.py b/airbyte-ci/connectors/qa-engine/tests/test_enrichments.py deleted file mode 100644 index 73ae7fde8c416..0000000000000 --- a/airbyte-ci/connectors/qa-engine/tests/test_enrichments.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import re - -import pandas as pd -import pytest -from qa_engine import enrichments - - -@pytest.fixture -def enriched_catalog(oss_catalog, cloud_catalog, adoption_metrics_per_connector_version) -> pd.DataFrame: - return enrichments.get_enriched_catalog(oss_catalog, cloud_catalog, adoption_metrics_per_connector_version) - - -@pytest.fixture -def enriched_catalog_columns(enriched_catalog: pd.DataFrame) -> set: - return set(enriched_catalog.columns) - - -def test_merge_performed_correctly(enriched_catalog, oss_catalog): - assert len(enriched_catalog) == len(oss_catalog) - - -def test_new_columns_are_added(enriched_catalog_columns): - expected_new_columns = { - "is_on_cloud", - "connector_name", - "connector_technical_name", - "connector_version", - "number_of_connections", - "number_of_users", - "succeeded_syncs_count", - "failed_syncs_count", - "total_syncs_count", - "sync_success_rate", - } - assert expected_new_columns.issubset(enriched_catalog_columns) - - -def test_no_column_are_removed_and_lowercased(enriched_catalog_columns, oss_catalog): - for column in oss_catalog: - assert re.sub(r"(? pd.DataFrame: - return enrichments.get_enriched_catalog(oss_catalog, cloud_catalog, adoption_metrics_per_connector_version) - - -@pytest.fixture -def qa_report(enriched_catalog, mocker) -> pd.DataFrame: - mocker.patch.object(validations, "url_is_reachable", mocker.Mock(return_value=True)) - return validations.get_qa_report(enriched_catalog, len(enriched_catalog)) - - -@pytest.fixture -def qa_report_columns(qa_report: pd.DataFrame) -> set: - return set(qa_report.columns) - - -def test_all_columns_are_declared(qa_report_columns: set): - expected_columns = set([field.name for field in models.ConnectorQAReport.__fields__.values()]) - assert qa_report_columns == expected_columns - - -def test_not_null_values_after_validation(qa_report: pd.DataFrame): - assert len(qa_report.dropna()) == len(qa_report) - - -def test_report_generation_error(enriched_catalog, mocker): - mocker.patch.object(validations, "url_is_reachable", mocker.Mock(return_value=True)) - with pytest.raises(validations.QAReportGenerationError): - return validations.get_qa_report(enriched_catalog.sample(1), 2) - - -@pytest.mark.parametrize( - "connector_qa_data, expected_to_be_eligible", - [ - ( - pd.Series( - { - "is_on_cloud": False, - "documentation_is_available": True, - "is_appropriate_for_cloud_use": True, - "latest_build_is_successful": True, - } - ), - True, - ), - ( - pd.Series( - { - "is_on_cloud": True, - "documentation_is_available": True, - "is_appropriate_for_cloud_use": True, - "latest_build_is_successful": True, - } - ), - False, - ), - ( - pd.Series( - { - "is_on_cloud": True, - "documentation_is_available": False, - "is_appropriate_for_cloud_use": False, - "latest_build_is_successful": False, - } - ), - False, - ), - ( - pd.Series( - { - "is_on_cloud": False, - "documentation_is_available": False, - "is_appropriate_for_cloud_use": True, - "latest_build_is_successful": True, - } - ), - False, - ), - ( - pd.Series( - { - "is_on_cloud": False, - "documentation_is_available": True, - "is_appropriate_for_cloud_use": False, - "latest_build_is_successful": True, - } - ), - False, - ), - ( - pd.Series( - { - "is_on_cloud": False, - "documentation_is_available": True, - "is_appropriate_for_cloud_use": True, - "latest_build_is_successful": False, - } - ), - False, - ), - ], -) -def test_is_eligible_for_promotion_to_cloud(connector_qa_data: pd.Series, expected_to_be_eligible: bool): - assert validations.is_eligible_for_promotion_to_cloud(connector_qa_data) == expected_to_be_eligible - - -def test_get_connectors_eligible_for_cloud(qa_report: pd.DataFrame): - qa_report["is_eligible_for_promotion_to_cloud"] = True - connectors_eligible_for_cloud = list(validations.get_connectors_eligible_for_cloud(qa_report)) - assert len(qa_report) == len(connectors_eligible_for_cloud) - assert all([c.is_eligible_for_promotion_to_cloud for c in connectors_eligible_for_cloud]) - - qa_report["is_eligible_for_promotion_to_cloud"] = False - connectors_eligible_for_cloud = list(validations.get_connectors_eligible_for_cloud(qa_report)) - assert len(connectors_eligible_for_cloud) == 0 - - -@pytest.mark.parametrize( - "connector_qa_data, build_file_payload, build_file_status, expected_is_successful", - [ - ( - pd.Series( - { - "connector_version": "0.1.0", - "connector_technical_name": "connectors/source-pokeapi", - } - ), - [ - { - "link": "https://github.com/airbytehq/airbyte/actions/runs/4029659593", - "success": True, - "docker_version": "0.1.5", - "date": "2021-10-01T00:00:00Z", - "connector": "connectors/source-pokeapi", - } - ], - 200, - True, - ), - ( - pd.Series( - { - "connector_version": "0.1.0", - "connector_technical_name": "connectors/source-pokeapi", - } - ), - [ - { - "link": "https://github.com/airbytehq/airbyte/actions/runs/4029659593", - "success": False, - "docker_version": "0.1.5", - "date": "2021-10-01T00:00:00Z", - "connector": "connectors/source-pokeapi", - } - ], - 200, - False, - ), - ( - pd.Series( - { - "connector_version": "0.1.0", - "connector_technical_name": "connectors/source-pokeapi", - } - ), - None, - 404, - False, - ), - ], -) -def test_latest_build_is_successful( - mocker, connector_qa_data: pd.Series, build_file_payload: object, build_file_status: int, expected_is_successful: bool -): - # Mock the api call to get the latest build status for a connector version - mock_response = MagicMock() - mock_response.json.return_value = build_file_payload - mock_response.status_code = build_file_status - mocker.patch.object(requests, "get", return_value=mock_response) - - assert validations.latest_build_is_successful(connector_qa_data) == expected_is_successful diff --git a/airbyte-integrations/bases/base-normalization/README.md b/airbyte-integrations/bases/base-normalization/README.md deleted file mode 100644 index ec05895eda6f2..0000000000000 --- a/airbyte-integrations/bases/base-normalization/README.md +++ /dev/null @@ -1,461 +0,0 @@ -# Normalization - -* [Normalization](#normalization) - * [Under the hood](#under-the-hood) - * [Incremental updates with dedup-history sync mode](#incremental-updates-with-dedup-history-sync-mode) - * [Developer workflow](#developer-workflow) - * [Setting up your environment](#setting-up-your-environment) - * [Running dbt](#running-dbt) - * [Testing normalization](#testing-normalization) - * [Build & Activate Virtual Environment and install dependencies](#build--activate-virtual-environment-and-install-dependencies) - * [Unit Tests](#unit-tests) - * [test_transform_config.py:](#test_transform_configpy) - * [test_stream_processor.py and test_table_name_registry.py:](#test_stream_processorpy-and-test_table_name_registrypy) - * [test_destination_name_transformer.py:](#test_destination_name_transformerpy) - * [Integration Tests](#integration-tests) - * [Integration Tests Definitions for test_ephemeral.py:](#integration-tests-definitions-for-test_ephemeralpy) - * [Integration Tests Definitions for test_normalization.py:](#integration-tests-definitions-for-test_normalizationpy) - * [README.md:](#readmemd) - * [Integration Test Data Input:](#integration-test-data-input) - * [data_input/catalog.json:](#data_inputcatalogjson) - * [data_input/messages.txt:](#data_inputmessagestxt) - * [data_input/replace_identifiers.json:](#data_inputreplace_identifiersjson) - * [Integration Test Execution Flow:](#integration-test-execution-flow) - * [Integration Test Checks:](#integration-test-checks) - * [dbt schema tests:](#dbt-schema-tests) - * [dbt data tests:](#dbt-data-tests) - * [Notes using dbt seeds:](#notes-using-dbt-seeds) - * [Debug dbt operations with local database](#debug-dbt-operations-with-local-database) - * [Standard Destination Tests](#standard-destination-tests) - * [Acceptance Tests](#acceptance-tests) - -Related documentation on normalization is available here: - -* [architecture / Basic Normalization](../../../docs/understanding-airbyte/basic-normalization.md) -* [tutorials / Custom dbt normalization](../../../docs/operator-guides/transformation-and-normalization/transformations-with-dbt.md) - -## Under the hood - -Normalization has two Python modules: -* `transform_config` parses the destination connector config and generates a profile.yml file, - which configures how dbt will connect to the destination database. -* `transform_catalog` parses the connection's catalog and generates a dbt_project.yml file, - which configures the models that dbt will run and how they should be materialized. - -`entrypoint.sh` (the entrypoint to normalization's Docker image) invokes these two modules, then calls `dbt run` on their output. - -### Incremental updates with dedup-history sync mode - -When generating the final table, we need to pull data from the SCD model. -A naive implementation would require reading the entire SCD table and completely regenerating the final table on each run. -This is obviously inefficient, so we instead use dbt's [incremental materialization mode](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models). -At each stage of the dbt pipeline, normalization will query the target table for the newest `_airbyte_emitted_at` value. -Then we only need to find records from the source table with `_airbyte_emitted_at` greater than or equal to that value -(equal to is necessary in case a previous normalization run was interrupted). - -This handles the two error scenarios quite cleanly: -* If a sync fails but succeeds after a retry, such that the first attempt commits some records and the retry commits a superset - of those records, then normalization will see that the SCD table has none of those records. The SCD model has a deduping stage, - which removes the records which were synced multiple times. -* If normalization fails partway through, such that (for example) the SCD model is updated but the final table is not, and then the sync - is retried, then the source will not re-emit any old records (because the destination will have emitted a state message ack-ing - all of the records). If the retry emits some new records, then normalization will append them to the SCD table as usual - (because, from the SCD's point of view, this is just a normal sync). Then the final table's latest `__airbyte_emitted_at` - will be older than the original attempt, so it will pull both the new records _and_ the first attempt's records from the SCD table. - -## Developer workflow - -At a high level, this is the recommended workflow for updating base-normalization: -1. Manually edit the models in `integration_tests/normalization_test_output/postgres/test_simple_streams/models/generated`. - Run `dbt compile` and manually execute the SQL queries. This requires manual setup and validation, but allows you to quickly experiment - with different inputs. - 1. You can substitute your preferred database/warehouse. This document will use Postgres because it's easy to set up. -1. Run `dbt run` and verify that it generates the data correctly. -1. Once `dbt run` succeeds, edit `stream_processor.py` until it generates the models you hand-wrote in step 1. -1. Run the `test_normalization[DestinationType.POSTGRES-test_simple_streams]` integration test case. -1. Run the full [integration test suite](#integration-tests). -1. Commit the changes in `integration_tests/normalization_test_output`. - -### Setting up your environment - -If you have a fully-featured Python dev environment, you can just set a breakpoint -at [this line]([integration_tests/test_normalization.py#L105](https://github.com/airbytehq/airbyte/blob/17ee3ad44ff71164765b97ff439c7ffd51bf9bfe/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py#L108)) -and run the `test_normalization[DestinationType.POSTGRES-test_simple_streams]` test case. You can terminate the run after it hits the -breakpoint. This will start Postgres in a Docker container with some prepopulated data and configure profiles.yml to match the container. - -To achieve this, follow the steps to [set up your python environment](#build--activate-virtual-environment-and-install-dependencies). Next, you can -add a line with just `breakpoint()` after the contents of the `run_test_normalization` method -in [test_normalization.py]([integration_tests/test_normalization.py#L105](https://github.com/airbytehq/airbyte/blob/17ee3ad44ff71164765b97ff439c7ffd51bf9bfe/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py#L108)) -then, from the `integration_tests` directory run the following: - -```shell - pytest -k 'test_normalization[DestinationType.POSTGRES-test_simple_streams]' --pdb -``` - -Otherwise, you can run this command: - -```shell -docker run \ - --rm \ - --name "normalization_dev_postgres" \ - -e "integration-tests" \ - -e "integration-tests" \ - -p "9001:5432" \ - -d \ - marcosmarxm/postgres-ssl:dev \ - -c ssl=on \ - -c ssl_cert_file=/var/lib/postgresql/server.crt \ - -c ssl_key_file=/var/lib/postgresql/server.key \ -``` - -Then you'll need to edit `integration_tests/normalization_test_output/postgres/test_simple_streams/profiles.yml` and set the port to 9001. - -If you manually start an external Postgres instance (or whatever destination you're working on), you can set the [`NORMALIZATION_TEST_POSTGRES_DB_PORT`](https://github.com/airbytehq/airbyte/blob/8ed3fb5379bf5a93d011a78a3be435cf9de8ab74/airbyte-integrations/bases/base-normalization/integration_tests/dbt_integration_test.py#L26) -variable to run tests against that instance. - -### Running dbt - -Once you have a database available, you can run dbt commands. We recommend running dbt from inside the `airbyte/normalization:dev` image. -This saves you the effort of installing dbt and reconfiguring dbt_project.yml. You should build the image locally with `./gradlew :airbyte-integrations:bases:base-normalization:airbyteDocker`. - -First, `cd integration_tests/normalization_test_output/postgres/test_simple_streams`. Then install dbt's dependencies: -```shell -docker run \ - --rm \ - --init \ - -v $(pwd):/workspace \ - -v $(pwd)/build:/build \ - -v $(pwd)/logs:/logs \ - -v $(pwd)/build/dbt_packages/:/dbt \ - --entrypoint /usr/local/bin/dbt \ - --network host \ - -i airbyte/normalization:dev \ - deps \ - --profiles-dir=/workspace \ - --project-dir=/workspace -``` - -You should be able to run `dbt compile` now: -```shell -docker run \ - --rm \ - --init \ - -v $(pwd):/workspace \ - -v $(pwd)/build:/build \ - -v $(pwd)/logs:/logs \ - -v $(pwd)/build/dbt_packages/:/dbt \ - --entrypoint /usr/local/bin/dbt \ - --network host \ - -i airbyte/normalization:dev \ - compile \ - --profiles-dir=/workspace \ - --project-dir=/workspace -``` - -This will modify the files in `build/compiled/airbyte_utils/models/generated`. -For example, if you edit `models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql`, then after compiling, -you can see the results in `build/compiled/airbyte_utils/models/generated/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql`. - -You can also use `dbt run` to have dbt actually execute your models: -```shell -docker run \ - --rm \ - --init \ - -v $(pwd):/workspace \ - -v $(pwd)/build:/build \ - -v $(pwd)/logs:/logs \ - -v $(pwd)/build/dbt_packages/:/dbt \ - --entrypoint /usr/local/bin/dbt \ - --network host \ - -i airbyte/normalization:dev \ - run \ - --profiles-dir=/workspace \ - --project-dir=/workspace -``` -Like `dbt compile`, this will modify the files in `build/compiled/airbyte_utils/models/generated`. It will also modify the files in -`build/run/airbyte_utils/models/generated`. - -## Testing normalization - -Below are short descriptions of the kind of tests that may be affected by changes to the normalization code. - -### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python3 -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -### Unit Tests - -Unit tests are automatically included when building the normalization project. -But you could invoke them explicitly by running the following commands for example: - -with Gradle: - - ./gradlew :airbyte-integrations:bases:base-normalization:unitTest - -or directly with pytest: - - pytest airbyte-integrations/bases/base-normalization/unit_tests - -Unit tests are targeted at the main code generation functionality of normalization. -They should verify different logic rules on how to convert an input catalog.json (JSON Schema) file into -dbt files. - -#### test_transform_config.py: - -This class is testing the transform config functionality that converts a destination_config.json into the adequate profiles.yml file for dbt to use -see [related dbt docs on profiles.yml](https://docs.getdbt.com/reference/profiles.yml) for more context on what it actually is. - -#### test_stream_processor.py and test_table_name_registry.py: - -These unit tests functions check how each stream is converted to dbt models files. -For example, one big focus area is around how table names are chosen. -(especially since some destination like postgres have a very low limit to identifiers length of 64 characters) -In case of nested objects/arrays in a stream, names can be dragged on to even longer names... - -So you can find rules of how to truncate and concatenate part of the table names together in here. -Depending on the catalog context and what identifiers have been already used in the past, some naming -may also be affected and requires to choose new identifications to avoid collisions. - -Additional helper functions dealing with cursor fields, primary keys and other code generation parts are also being tested here. - -#### test_destination_name_transformer.py: - -These Unit tests checks implementation of specific rules of SQL identifier naming conventions for each destination. -The specifications rules of each destinations are detailed in the corresponding docs, especially on the -allowed characters, if quotes are needed or not, and the length limitations: - -* [bigquery](../../../docs/integrations/destinations/bigquery.md) -* [postgres](../../../docs/integrations/destinations/postgres.md) -* [redshift](../../../docs/integrations/destinations/redshift.md) -* [snowflake](../../../docs/integrations/destinations/snowflake.md) -* [mysql](../../../docs/integrations/destinations/mysql.md) -* [oracle](../../../docs/integrations/destinations/oracle.md) -* [mssql](../../../docs/integrations/destinations/mssql.md) - -Rules about truncations, for example for both of these strings which are too long for the postgres 64 limit: -* `Aaaa_Bbbb_Cccc_Dddd_Eeee_Ffff_Gggg_Hhhh_Iiii` -* `Aaaa_Bbbb_Cccc_Dddd_a_very_long_name_Ffff_Gggg_Hhhh_Iiii` - -Deciding on how to truncate (in the middle) are being verified in these tests. -In this instance, both strings ends up as: - -* `Aaaa_Bbbb_Cccc_Dddd___e_Ffff_Gggg_Hhhh_Iiii` - -The truncate operation gets rid of characters in the middle of the string to preserve the start -and end characters as it may contain more useful information in table naming. However the final -truncated name can still potentially cause collisions in table names... - -Note that dealing with such collisions is not part of `destination_name_transformer` but of the -`stream_processor` since one is focused on destination conventions and the other on putting together -identifier names from streams and catalogs. - -### Integration Tests - -Look at the `./setup/*.md` (e.g. `./setup/snowflake.md`) for how to set up integration environments. - -With Gradle: - - ./gradlew :airbyte-integrations:bases:base-normalization:integrationTest - -or directly with pytest: - - pytest airbyte-integrations/bases/base-normalization/integration_tests - -or can also be invoked on github, thanks to the slash commands posted as comments: - - /test connector=bases/base-normalization - -You can restrict the tests to a subset of destinations by specifying a comma separated list of destinations. -For example, let's say you are working on a change to normalization for Postgres, with Gradle: - - NORMALIZATION_TEST_TARGET=postgres ./gradlew :airbyte-integrations:bases:base-normalization:integrationTest - -or directly with pytest: - - NORMALIZATION_TEST_TARGET=postgres pytest airbyte-integrations/bases/base-normalization/integration_tests - -Note that these tests are connecting and processing data on top of real data warehouse destinations. -Therefore, valid credentials files are expected to be injected in the `secrets/` folder in order to run -(not included in git repository). - -This is usually automatically done by the CI thanks to the `tools/bin/ci_credentials.sh` script or you can -re-use the `destination_config.json` passed to destination connectors. - -As normalization supports more and more destinations, tests are relying on an increasing number of destinations. -As a result, it is possible that the docker garbage collector is triggered to wipe "unused" docker images while the -integration tests for normalization are running. Thus, if you encounter errors about a connector's docker image not being -present locally (even though it was built beforehand), make sure to increase the docker image storage size of your docker engine ("defaultKeepStorage" for mac for example). - -#### Integration Tests Definitions for test_ephemeral.py: -The test here focus on benchmarking the "ephemeral" materialization mode of dbt. Depending on the number of -columns in a catalog, this may throw exceptions and fail. This test ensures that we support reasonable number of columns in destination tables. - -For example, known limitations that are now supported were: -* Ephemeral materialization with some generated models break with more than 490 columns with "maximum recursion depth exceeded", we now automatically switch to a little more scalable mode when generating dbt models by using views materialization. -* The tests are currently checking that at least a reasonably large number (1500) of columns can complete successfully. - -However, limits on the destination still exists and can break for higher number of columns... - -#### Integration Tests Definitions for test_normalization.py: - -Some test suites can be selected to be versioned control in Airbyte git repository (or not). -This is useful to see direct impacts of code changes on downstream files generated or compiled -by normalization and dbt (directly in PR too). (_Simply refer to your test suite name in the -`git_versioned_tests` variable in the `base-normalization/integration_tests/test_normalization.py` file_) - -We would typically choose small and meaningful test suites to include in git while others more complex tests -can be left out. They would still be run in a temporary directory and thrown away at the end of the tests. - -They are defined, each one of them, in a separate directory in the resource folder. -For example, below, we would have 2 different tests "suites" with this hierarchy: - - base-normalization/integration_tests/resources/ - ├── test_suite1/ - │ ├── data_input/ - │ │ ├── catalog.json - │ │ ├── messages.txt - │ │ └── replace_identifiers.json - │ ├── dbt_data_tests/ - │ │ ├── file1.sql - │ │ └── file2.sql - │ ├── dbt_schema_tests/ - │ │ ├── file1.yml - │ │ └── file2.yml - │ └── README.md - └── test_suite2/ - ├── data_input/ - │ ├── catalog.json - │ └── messages.txt - ├── dbt_data_tests/ - ├── dbt_schema_tests/ - └── README.md - -##### README.md: - -Each test suite should have an optional `README.md` to include further details and descriptions of what the test is trying to verify and -how it is specifically built. - -#### Integration Test Data Input: - -##### data_input/catalog.json: - -The `catalog.json` is the main input for normalization from which the dbt models files are being -generated from as it describes in JSON Schema format what the data structure is. - -##### data_input/messages.txt: - -The `messages.txt` are serialized Airbyte JSON records that should be sent to the destination as if they were -transmitted by a source. In this integration test, the files is read and "cat" through to the docker image of -each destination connectors to populate `_airbyte_raw_tables`. These tables are finally used as input -data for dbt to run from. - -Note that `test_simple_streams` has additional message files, each representing a separate sync -(`messages_incremental.txt` and `messages_schema_change.txt`). - -##### data_input/replace_identifiers.json: -The `replace_identifiers.json` contains maps of string patterns and values to replace in the `dbt_schema_tests` -and `dbt_data_tests` files to handle cross database compatibility. - -Note that an additional step is added before replacing identifiers to change capitalization of identifiers in those -tests files. (to uppercase on snowflake and lowercase on redshift). - -#### Integration Test Execution Flow: - -These integration tests are run against all destinations that dbt can be executed on. -So, for each target destination, the steps run by the tests are: - -1. Prepare the test execution workspace folder (copy skeleton from `dbt-project-template/`) -2. Generate a dbt `profiles.yml` file to connect to the target destination -3. Populate raw tables by running the target destination connectors, reading and uploading the - `messages.txt` file as data input. -4. Run Normalization step to generate dbt models files from `catalog.json` input file. -5. Execute dbt cli command: `dbt run` from the test workspace folder to compile generated models files - * from `models/generated/` folder - * into `../build/(compiled|run)/airbyte_utils/models/generated/` folder - * The final "run" SQL files are also copied (for archiving) to `final/` folder by the test script. -6. Deploy the `schema_tests` and `data_tests` files into the test workspace folder. -7. Execute dbt cli command: `dbt tests` from the test workspace folder to run verifications and checks with dbt. -8. Optional checks (nothing for the moment) - -Note that the tests are using the normalization code from the python files directly, so it is not necessary to rebuild the docker images -in between when iterating on the code base. However, dbt cli and destination connectors are invoked via the dev docker images. -This means that if your `airbyte/normalization:dev` image doesn't have a working dbt installation, tests _will_ fail. -Similarly, if your `destination-xyz:dev` image doesn't work, then the base-normalization integration tests will fail. - -#### Integration Test Checks: - -##### dbt schema tests: - -dbt allows out of the box to configure some tests as properties for an existing model (or source, seed, or snapshot). -This can be done in yaml format as described in the following documentation pages: - -* [dbt schema-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#schema-tests) -* [custom schema test](https://docs.getdbt.com/docs/guides/writing-custom-schema-tests) -* [dbt expectations](https://github.com/calogica/dbt-expectations) - -We are leveraging these capabilities in these integration tests to verify some relationships in our -generated tables on the destinations. - -##### dbt data tests: - -Additionally, dbt also supports "data tests" which are specified as SQL queries. -A data test is a select statement that returns 0 records when the test is successful. - -* [dbt data-tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#data-tests) - -##### Notes using dbt seeds: - -Because some functionalities are not stable enough on dbt side, it is difficult to properly use -`dbt seed` commands to populate a set of expected data tables at the moment. Hopefully, this can be -more easily be done in the future... - -Related issues to watch on dbt progress to improve this aspects: -* -* - -A nice improvement would be to add csv/json seed files as expected output data from tables. -The integration tests would verify that the content of such tables in the destination would match -these seed files or fail. - -#### Debug dbt operations with local database -This only works for testing databases launched in local containers (e.g. postgres and mysql). - -* In `dbt_integration_test.py`, comment out the `tear_down_db` method so that the relevant database container is not deleted. -* Find the name of the database container in the logs (e.g. by searching `Executing`). -* Connect to the container by running `docker exec -it bash` in the commandline. -* Connect to the database inside the container (e.g. `mysql -u root` for mysql). -* Test the generated dbt operations directly in the database. - -### Standard Destination Tests - -Generally, to invoke standard destination tests, you run with gradle using: - - ./gradlew :airbyte-integrations:connectors:destination-:integrationTest - -For more details and options, you can also refer to the [testing connectors docs](../../../docs/connector-development/testing-connectors/README.md). - -### Acceptance Tests - -Please refer to the [developing docs](../../../docs/contributing-to-airbyte/developing-locally.md) on how to run Acceptance Tests. - -## Publishing normalization -The normalization publish pipeline still relies on the `manage.sh` [script](https://github.com/airbytehq/airbyte/blob/master/tools/integrations/manage.sh). It is not published on merge to master, but rather on demand, from the PR. To publish normalization, run the following slash command on the PR: - -```text -/legacy-publish connector=bases/base-normalization -``` \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/build.gradle b/airbyte-integrations/bases/base-normalization/build.gradle index 13f2dd53c9f94..4cc45316ef92d 100644 --- a/airbyte-integrations/bases/base-normalization/build.gradle +++ b/airbyte-integrations/bases/base-normalization/build.gradle @@ -4,17 +4,17 @@ plugins { } dependencies { - testFixtures(project(':airbyte-cdk:java:airbyte-cdk:dependencies')) + testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies')) } // we need to access the sshtunneling script from airbyte-workers for ssh support def copySshScript = tasks.register('copySshScript', Copy) { - from "${project(':airbyte-cdk:java:airbyte-cdk:dependencies').buildDir}/resources/testFixtures" + from "${project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies').buildDir}/resources/testFixtures" into "${buildDir}" include "sshtunneling.sh" } copySshScript.configure { - dependsOn project(':airbyte-cdk:java:airbyte-cdk:dependencies').tasks.named('processTestFixturesResources') + dependsOn project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies').tasks.named('processTestFixturesResources') } // make sure the copy task above worked (if it fails, it fails silently annoyingly) diff --git a/airbyte-integrations/bases/base-normalization/unit_tests/test_transform_config.py b/airbyte-integrations/bases/base-normalization/unit_tests/test_transform_config.py index 8668d791a719b..2c3fc60f7ea3e 100644 --- a/airbyte-integrations/bases/base-normalization/unit_tests/test_transform_config.py +++ b/airbyte-integrations/bases/base-normalization/unit_tests/test_transform_config.py @@ -154,8 +154,8 @@ def test_transform_bigquery(self): actual_keyfile = actual_output["keyfile_json"] expected_keyfile = {"type": "service_account-json"} - assert expected_output == actual_output - assert expected_keyfile == actual_keyfile + assert actual_output == expected_output + assert actual_keyfile == expected_keyfile assert extract_schema(actual_output) == "my_dataset_id" def test_transform_bigquery_no_credentials(self): @@ -172,7 +172,7 @@ def test_transform_bigquery_no_credentials(self): "threads": 8, } - assert expected_output == actual_output + assert actual_output == expected_output assert extract_schema(actual_output) == "my_dataset_id" def test_transform_bigquery_with_embedded_project_id(self): @@ -189,7 +189,7 @@ def test_transform_bigquery_with_embedded_project_id(self): "threads": 8, } - assert expected_output == actual_output + assert actual_output == expected_output assert extract_schema(actual_output) == "my_dataset_id" def test_transform_bigquery_with_embedded_mismatched_project_id(self): @@ -232,7 +232,7 @@ def test_transform_postgres(self): "user": "a user", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "public" def test_transform_postgres_ssh(self): @@ -265,7 +265,7 @@ def test_transform_postgres_ssh(self): "user": "a user", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "public" def test_transform_snowflake(self): @@ -298,7 +298,7 @@ def test_transform_snowflake(self): "warehouse": "AIRBYTE_WAREHOUSE", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "AIRBYTE_SCHEMA" def test_transform_snowflake_oauth(self): @@ -341,7 +341,7 @@ def test_transform_snowflake_oauth(self): "token": "AIRBYTE_REFRESH_TOKEN", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "AIRBYTE_SCHEMA" def test_transform_snowflake_key_pair(self): @@ -379,7 +379,7 @@ def test_transform_snowflake_key_pair(self): "private_key_passphrase": "AIRBYTE_PRIVATE_KEY_PASSWORD", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "AIRBYTE_SCHEMA" def test_transform_mysql(self): @@ -404,7 +404,7 @@ def test_transform_mysql(self): "password": "password1234", } - assert expected == actual + assert actual == expected # DBT schema is equivalent to MySQL database assert extract_schema(actual) == "my_db" @@ -430,7 +430,7 @@ def test_transform_mssql(self): "password": "password1234", } - assert expected == actual + assert actual == expected # DBT schema is equivalent to MySQL database assert extract_schema(actual) == "my_db" @@ -450,7 +450,7 @@ def test_transform_clickhouse(self): "secure": True, } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "default" # test that the full config is produced. this overlaps slightly with the transform_postgres test. @@ -477,7 +477,7 @@ def test_transform(self): } actual = TransformConfig().transform(DestinationType.POSTGRES, input) - assert expected == actual + assert actual == expected assert extract_schema(actual["normalize"]["outputs"]["prod"]) == "public" def test_transform_tidb(self): @@ -502,7 +502,7 @@ def test_transform_tidb(self): "password": "password1234", } - assert expected == actual + assert actual == expected assert extract_schema(actual) == "ti_db" def test_transform_duckdb_schema(self): @@ -519,7 +519,7 @@ def test_transform_duckdb_schema(self): "schema": "quackqauck", } - assert expected == actual + assert actual == expected assert extract_path(actual) == "/local/testing.duckdb" def test_transform_duckdb_no_schema(self): @@ -535,7 +535,7 @@ def test_transform_duckdb_no_schema(self): "schema": "main", } - assert expected == actual + assert actual == expected assert extract_path(actual) == "/local/testing.duckdb" def get_base_config(self): diff --git a/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md index e7d36c2769c21..a245577563d8f 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 3.7.0 +Add `validate_state_messages` to TestBasicRead.test_read:: Validate that all states contain neither legacy state emissions nor missing source stats in the state message. + ## 3.6.0 Relaxing CATs validation when a stream has a primary key defined. diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py index 39e60f4ce531a..1527ad778a36e 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py @@ -167,6 +167,7 @@ class BasicReadTestConfig(BaseConfig): expect_records: Optional[ExpectedRecordsConfig] = Field(description="Expected records from the read") validate_schema: bool = Field(True, description="Ensure that records match the schema of the corresponding stream") validate_stream_statuses: bool = Field(None, description="Ensure that all streams emit status messages") + validate_state_messages: bool = Field(True, description="Ensure that state messages emitted as expected") fail_on_extra_columns: bool = Field(True, description="Fail if extra top-level properties (i.e. columns) are detected in records.") # TODO: remove this field after https://github.com/airbytehq/airbyte/issues/8312 is done validate_data_points: bool = Field( diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/conftest.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/conftest.py index 94e9f815f24a5..3dfd1b5c053ba 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/conftest.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/conftest.py @@ -299,6 +299,8 @@ async def discovered_catalog_fixture( output = await docker_runner.call_discover(config=connector_config) catalogs = [message.catalog for message in output if message.type == Type.CATALOG] + if len(catalogs) == 0: + raise ValueError("No catalog message was emitted") return {stream.name: stream for stream in catalogs[-1].streams} @@ -322,6 +324,8 @@ async def previous_discovered_catalog_fixture( ) return None catalogs = [message.catalog for message in output if message.type == Type.CATALOG] + if len(catalogs) == 0: + raise ValueError("No catalog message was emitted") return {stream.name: stream for stream in catalogs[-1].streams} diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py index d512380712724..50e495c322855 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py @@ -23,6 +23,8 @@ from airbyte_protocol.models import ( AirbyteMessage, AirbyteRecordMessage, + AirbyteStateStats, + AirbyteStateType, AirbyteStream, AirbyteStreamStatus, AirbyteStreamStatusTraceMessage, @@ -866,7 +868,7 @@ def _validate_records_structure(records: List[AirbyteRecordMessage], configured_ therefore any arbitrary object would pass schema validation. This method is here to catch those cases by extracting all the paths from the object and compare it to paths expected from jsonschema. If - there no common pathes then raise an alert. + there no common paths then raise an alert. :param records: List of airbyte record messages gathered from connector instances. :param configured_catalog: Testcase parameters parsed from yaml file @@ -876,15 +878,15 @@ def _validate_records_structure(records: List[AirbyteRecordMessage], configured_ schemas[stream.stream.name] = set(get_expected_schema_structure(stream.stream.json_schema)) for record in records: - schema_pathes = schemas.get(record.stream) - if not schema_pathes: + schema_paths = schemas.get(record.stream) + if not schema_paths: continue record_fields = set(get_object_structure(record.data)) - common_fields = set.intersection(record_fields, schema_pathes) + common_fields = set.intersection(record_fields, schema_paths) assert ( common_fields - ), f" Record {record} from {record.stream} stream with fields {record_fields} should have some fields mentioned by json schema: {schema_pathes}" + ), f" Record {record} from {record.stream} stream with fields {record_fields} should have some fields mentioned by json schema: {schema_paths}" @staticmethod def _validate_schema(records: List[AirbyteRecordMessage], configured_catalog: ConfiguredAirbyteCatalog): @@ -997,6 +999,10 @@ def should_validate_stream_statuses_fixture(self, inputs: BasicReadTestConfig, i pytest.fail("High strictness level error: validate_stream_statuses must be set to true in the basic read test configuration.") return inputs.validate_stream_statuses + @pytest.fixture(name="should_validate_state_messages") + def should_validate_state_messages_fixture(self, inputs: BasicReadTestConfig): + return inputs.validate_state_messages + @pytest.fixture(name="should_fail_on_extra_columns") def should_fail_on_extra_columns_fixture(self, inputs: BasicReadTestConfig): # TODO (Ella): enforce this param once all connectors are passing @@ -1049,6 +1055,7 @@ async def test_read( should_validate_schema: Boolean, should_validate_data_points: Boolean, should_validate_stream_statuses: Boolean, + should_validate_state_messages: Boolean, should_fail_on_extra_columns: Boolean, empty_streams: Set[EmptyStreamConfiguration], ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]], @@ -1060,6 +1067,7 @@ async def test_read( output = await docker_runner.call_read(connector_config, configured_catalog) records = [message.record for message in filter_output(output, Type.RECORD)] + state_messages = [message for message in filter_output(output, Type.STATE)] if certified_file_based_connector: self._file_types.update(self._get_actual_file_types(records)) @@ -1098,6 +1106,9 @@ async def test_read( ] self._validate_stream_statuses(configured_catalog=configured_catalog, statuses=all_statuses) + if should_validate_state_messages: + self._validate_state_messages(state_messages=state_messages, configured_catalog=configured_catalog) + async def test_airbyte_trace_message_on_failure(self, connector_config, inputs: BasicReadTestConfig, docker_runner: ConnectorRunner): if not inputs.expect_trace_message_on_failure: pytest.skip("Skipping `test_airbyte_trace_message_on_failure` because `inputs.expect_trace_message_on_failure=False`") @@ -1254,6 +1265,27 @@ def _validate_stream_statuses(configured_catalog: ConfiguredAirbyteCatalog, stat assert status_list[-1] == AirbyteStreamStatus.COMPLETE assert all(x == AirbyteStreamStatus.RUNNING for x in status_list[1:-1]) + @staticmethod + def _validate_state_messages(state_messages: List[AirbyteMessage], configured_catalog: ConfiguredAirbyteCatalog): + # Ensure that at least one state message is emitted for each stream + assert len(state_messages) >= len( + configured_catalog.streams + ), "At least one state message should be emitted for each configured stream." + + for state_message in state_messages: + state = state_message.state + stream_name = state.stream.stream_descriptor.name + state_type = state.type + + # Ensure legacy state type is not emitted anymore + assert state_type != AirbyteStateType.LEGACY, ( + f"Ensure that statuses from the {stream_name} stream are emitted using either " + "`STREAM` or `GLOBAL` state types, as the `LEGACY` state type is now deprecated." + ) + + # Check if stats are of the correct type and present in state message + assert isinstance(state.sourceStats, AirbyteStateStats), "Source stats should be in state message." + @pytest.mark.default_timeout(TEN_MINUTES) class TestConnectorAttributes(BaseTest): diff --git a/airbyte-integrations/bases/connector-acceptance-test/poetry.lock b/airbyte-integrations/bases/connector-acceptance-test/poetry.lock index 508501085564a..63950b04e8f78 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/poetry.lock +++ b/airbyte-integrations/bases/connector-acceptance-test/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-protocol-models" -version = "0.5.3" +version = "0.8.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.3-py3-none-any.whl", hash = "sha256:a913f1e86d5b2ae17d19e0135339e55fc25bb93bfc3f7ab38592677f29b56c57"}, - {file = "airbyte_protocol_models-0.5.3.tar.gz", hash = "sha256:a71bc0e98e0722d5cbd3122c40a59a7f9cbc91b6c934db7e768a57c40546f54b"}, + {file = "airbyte_protocol_models-0.8.0-py3-none-any.whl", hash = "sha256:45357703a92eab4bd573f446306365acef9f4d3fe15d07fc713f519078df3f10"}, + {file = "airbyte_protocol_models-0.8.0.tar.gz", hash = "sha256:b147dbf15d40b0c5e3f1bf5058e7f219a4ff2e94ee23334f468ec5802809e56f"}, ] [package.dependencies] @@ -77,20 +77,20 @@ files = [ [[package]] name = "beartype" -version = "0.16.4" +version = "0.17.1" description = "Unbearably fast runtime type checking in pure Python." optional = false python-versions = ">=3.8.0" files = [ - {file = "beartype-0.16.4-py3-none-any.whl", hash = "sha256:64865952f9dff1e17f22684b3c7286fc79754553b47eaefeb1286224ae8c1bd9"}, - {file = "beartype-0.16.4.tar.gz", hash = "sha256:1ada89cf2d6eb30eb6e156eed2eb5493357782937910d74380918e53c2eae0bf"}, + {file = "beartype-0.17.1-py3-none-any.whl", hash = "sha256:583deb076e312f5acc2e2928706af2facab1f4282be775ee619e6f42c290f423"}, + {file = "beartype-0.17.1.tar.gz", hash = "sha256:001df1ce51c76f0a21c2183215b26254b667fd8b688a6cbe8f013907cdaaf9b3"}, ] [package.extras] all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] test-tox-coverage = ["coverage (>=5.5)"] [[package]] @@ -120,13 +120,13 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -241,63 +241,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -472,13 +472,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -489,17 +489,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -517,13 +517,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "hypothesis" -version = "6.96.0" +version = "6.99.13" description = "A library for property-based testing" optional = false python-versions = ">=3.8" files = [ - {file = "hypothesis-6.96.0-py3-none-any.whl", hash = "sha256:ec8e0348844e1a9368aeaf85dbea1d247f93f5f865fdf65801bc578b4608cc08"}, - {file = "hypothesis-6.96.0.tar.gz", hash = "sha256:fec50dcbc54ec5884a4199d723543ba9408bbab940cc3ab849a92fe1fab97625"}, + {file = "hypothesis-6.99.13-py3-none-any.whl", hash = "sha256:b538df1d22365df84f94c38fb2d9c41a222373594c2a910cc8f4ddc68240a62f"}, + {file = "hypothesis-6.99.13.tar.gz", hash = "sha256:e425e8a3f1912e44f62ff3e2768dca19c79f46d43ec70fa56e96e2d7194ccd2d"}, ] [package.dependencies] @@ -532,9 +532,10 @@ exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.4)"] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.53)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.2)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] +crosshair = ["crosshair-tool (>=0.0.53)", "hypothesis-crosshair (>=0.0.2)"] dateutil = ["python-dateutil (>=1.4)"] django = ["django (>=3.2)"] dpcontracts = ["dpcontracts (>=0.4)"] @@ -545,17 +546,17 @@ pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] -zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.4)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2024.1)"] [[package]] name = "hypothesis-jsonschema" -version = "0.23.0" +version = "0.23.1" description = "Generate test data from JSON schemata with Hypothesis" optional = false python-versions = ">=3.8" files = [ - {file = "hypothesis-jsonschema-0.23.0.tar.gz", hash = "sha256:c3cc5ecddd78efcb5c10cc3fbcf06aa4d32d8300d0babb8c6f89485f7a503aef"}, - {file = "hypothesis_jsonschema-0.23.0-py3-none-any.whl", hash = "sha256:bbf13b49970216b69adfeab666e483bd83691573d9fee55f3c69adeefa978a09"}, + {file = "hypothesis-jsonschema-0.23.1.tar.gz", hash = "sha256:f4ac032024342a4149a10253984f5a5736b82b3fe2afb0888f3834a31153f215"}, + {file = "hypothesis_jsonschema-0.23.1-py3-none-any.whl", hash = "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2"}, ] [package.dependencies] @@ -618,13 +619,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.21.0" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, - {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -688,85 +689,101 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] @@ -785,13 +802,13 @@ dev = ["black", "mypy", "pytest"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -915,28 +932,28 @@ test = ["time-machine (>=2.6.0)"] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -967,47 +984,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -1166,13 +1183,13 @@ testing = ["filelock"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1213,7 +1230,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1221,16 +1237,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1247,7 +1255,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1255,7 +1262,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1263,13 +1269,13 @@ files = [ [[package]] name = "referencing" -version = "0.32.1" +version = "0.34.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, - {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, ] [package.dependencies] @@ -1278,20 +1284,20 @@ rpds-py = ">=0.7.0" [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1318,13 +1324,13 @@ test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.1 [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -1336,110 +1342,110 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.17.1" +version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, - {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, - {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, - {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, - {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, - {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, - {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, - {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, - {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, - {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, - {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, - {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, - {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, ] [[package]] @@ -1455,13 +1461,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1513,24 +1519,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -1688,4 +1694,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "9d53af4fe5cca16b6ce5a61f3f7d286b561af9920f77163e00e4e59eacc9e4f6" +content-hash = "b013b7907a98562bd2ca96b4597f005a32651e3d9d3721b7b500ab3dc168cd3d" diff --git a/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml b/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml index 41522609d0cbd..5ddea88b8b220 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml +++ b/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "connector-acceptance-test" -version = "3.6.0" +version = "3.7.0" description = "Contains acceptance tests for connectors." authors = ["Airbyte "] license = "MIT" @@ -14,6 +14,7 @@ homepage = "https://github.com/airbytehq/airbyte" python = "^3.10" airbyte-protocol-models = "<1.0.0" dagger-io = "==0.9.6" +beartype = "<0.17.2" # dagger 0.9.6 doesn't pin this but doesn't play well with it. We should probably upgrade dagger PyYAML = "~=6.0" icdiff = "~=1.9" inflection = "~=0.5" @@ -38,7 +39,7 @@ docker = ">=6,<7" # Pinning requests and urllib3 to avoid an issue with dockerpy and requests 2. # Related issue: https://github.com/docker/docker-py/issues/3113 urllib3 = "<2.0" -requests = "<2.29.0" +requests = "^2.31" pytest-xdist = "^3.3.1" [tool.poe.tasks] diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_backward_compatibility.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_backward_compatibility.py index 306622325f2e3..3119a8d43511e 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_backward_compatibility.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_backward_compatibility.py @@ -1597,6 +1597,28 @@ def test_validate_previous_configs(previous_connector_spec, actual_connector_spe ) }, ), + Transition( + name="Given the same types, the order does not matter", + should_fail=False, + previous={ + "test_stream": AirbyteStream.parse_obj( + { + "name": "test_stream", + "json_schema": {"properties": {"user": {"type": "object", "properties": {"username": {"type": ["null", "string"]}}}}}, + "supported_sync_modes": ["full_refresh"], + } + ) + }, + current={ + "test_stream": AirbyteStream.parse_obj( + { + "name": "test_stream", + "json_schema": {"properties": {"user": {"type": "object", "properties": {"username": {"type": ["string", "null"]}}}}}, + "supported_sync_modes": ["full_refresh"], + } + ) + }, + ), Transition( name="Changing 'type' field to list should not fail.", should_fail=False, diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py index 22395658e043d..9b6d1c6ea45f0 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py @@ -12,7 +12,12 @@ AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateStats, + AirbyteStateType, AirbyteStream, + AirbyteStreamState, AirbyteStreamStatus, AirbyteStreamStatusTraceMessage, AirbyteTraceMessage, @@ -783,6 +788,7 @@ async def test_read(mocker, schema, ignored_fields, expect_records_config, recor should_validate_schema=True, should_validate_data_points=False, should_validate_stream_statuses=False, + should_validate_state_messages=False, should_fail_on_extra_columns=False, empty_streams=set(), expected_records_by_stream=expected_records_by_stream, @@ -1482,6 +1488,7 @@ async def test_read_validate_async_output_stream_statuses(mocker): should_validate_schema=False, should_validate_data_points=False, should_validate_stream_statuses=True, + should_validate_state_messages=False, should_fail_on_extra_columns=False, empty_streams=set(), expected_records_by_stream={}, @@ -1582,6 +1589,7 @@ async def test_read_validate_stream_statuses_exceptions(mocker, output): should_validate_schema=False, should_validate_data_points=False, should_validate_stream_statuses=True, + should_validate_state_messages=False, should_fail_on_extra_columns=False, empty_streams=set(), expected_records_by_stream={}, @@ -1699,3 +1707,105 @@ async def test_all_supported_file_types_present(mocker, file_types_found, should await t.test_all_supported_file_types_present(certified_file_based_connector=True, inputs=config) else: await t.test_all_supported_file_types_present(certified_file_based_connector=True, inputs=config) + +@pytest.mark.parametrize( + ("state_message_params", "should_fail"), + ( + ({"type": AirbyteStateType.STREAM, "sourceStats": AirbyteStateStats(recordCount=1.0)}, False), + ({"type": AirbyteStateType.STREAM}, True), + ({"type": AirbyteStateType.LEGACY}, True), + ({}, True), # Case where state was not emitted + + ), +) +async def test_read_validate_async_output_state_messages(mocker, state_message_params, should_fail): + configured_catalog = ConfiguredAirbyteCatalog( + streams=[ + ConfiguredAirbyteStream( + stream=AirbyteStream.parse_obj({"name": f"test_stream_0", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}), + sync_mode="full_refresh", + destination_sync_mode="overwrite", + ) + ] + ) + stream = AirbyteStreamState( + stream_descriptor=StreamDescriptor(name='test_stream_0', namespace=None), + stream_state=AirbyteStateBlob(__ab_full_refresh_state_message=True) + ) + async_stream_output = [ + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + emitted_at=1, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="test_stream_0"), status=AirbyteStreamStatus.STARTED + ), + ), + ), + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + emitted_at=114, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="test_stream_0"), status=AirbyteStreamStatus.RUNNING + ), + ), + ), + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream_0", data={"a": 1}, emitted_at=111)), + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(stream=stream, **state_message_params)), + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + emitted_at=120, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="test_stream_0"), status=AirbyteStreamStatus.COMPLETE + ), + ), + ) + ] + + if not state_message_params: + async_stream_output.pop() + print(async_stream_output) + docker_runner_mock = mocker.MagicMock(call_read=mocker.AsyncMock(return_value=async_stream_output)) + + t = test_core.TestBasicRead() + + if should_fail: + with pytest.raises((AssertionError, AttributeError)): + await t.test_read( + connector_config=None, + configured_catalog=configured_catalog, + expect_records_config=_DEFAULT_RECORD_CONFIG, + should_validate_schema=False, + should_validate_data_points=False, + should_validate_stream_statuses=True, + should_validate_state_messages=True, + should_fail_on_extra_columns=False, + empty_streams=set(), + expected_records_by_stream={}, + docker_runner=docker_runner_mock, + ignored_fields=None, + detailed_logger=MagicMock(), + certified_file_based_connector=False + ) + else: + await t.test_read( + connector_config=None, + configured_catalog=configured_catalog, + expect_records_config=_DEFAULT_RECORD_CONFIG, + should_validate_schema=False, + should_validate_data_points=False, + should_validate_stream_statuses=True, + should_validate_state_messages=True, + should_fail_on_extra_columns=False, + empty_streams=set(), + expected_records_by_stream={}, + docker_runner=docker_runner_mock, + ignored_fields=None, + detailed_logger=MagicMock(), + certified_file_based_connector=False + ) diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_json_schema_helper.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_json_schema_helper.py index a8f2f884d1f34..89536ae245421 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_json_schema_helper.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_json_schema_helper.py @@ -183,7 +183,7 @@ class Root(BaseModel): @pytest.mark.parametrize( - "object, pathes", + "object, paths", [ ({}, []), ({"a": 12}, ["/a"]), @@ -197,12 +197,12 @@ class Root(BaseModel): ({"a": [[[{"b": 12}, {"b": 15}]]]}, ["/a", "/a/[]", "/a/[]/[]", "/a/[]/[]/[]", "/a/[]/[]/[]/b"]), ], ) -def test_get_object_strucutre(object, pathes): - assert get_object_structure(object) == pathes +def test_get_object_strucutre(object, paths): + assert get_object_structure(object) == paths @pytest.mark.parametrize( - "schema, pathes", + "schema, paths", [ ({"type": "object", "properties": {"a": {"type": "string"}}}, ["/a"]), ({"properties": {"a": {"type": "string"}}}, ["/a"]), @@ -229,8 +229,8 @@ def test_get_object_strucutre(object, pathes): ({"type": "array", "items": {"type": "object", "additionalProperties": {"type": "string"}}}, ["/[]"]), ], ) -def test_get_expected_schema_structure(schema, pathes): - assert get_expected_schema_structure(schema) == pathes +def test_get_expected_schema_structure(schema, paths): + assert paths == get_expected_schema_structure(schema) @pytest.mark.parametrize( diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py index b99ae8389b22c..4316d871ca715 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py @@ -217,14 +217,14 @@ def test_load_json(self): f.write(json.dumps(self.VALID_SPEC)) f.flush() actual = common.load_yaml_or_json_path(Path(f.name)) - assert self.VALID_SPEC == actual + assert actual == self.VALID_SPEC def test_load_yaml(self): with tempfile.NamedTemporaryFile("w", suffix=".yaml") as f: f.write(yaml.dump(self.VALID_SPEC)) f.flush() actual = common.load_yaml_or_json_path(Path(f.name)) - assert self.VALID_SPEC == actual + assert actual == self.VALID_SPEC def test_load_other(self): with tempfile.NamedTemporaryFile("w", suffix=".txt") as f: diff --git a/airbyte-integrations/bases/readme.md b/airbyte-integrations/bases/readme.md deleted file mode 100644 index 46f0e1c3b87b0..0000000000000 --- a/airbyte-integrations/bases/readme.md +++ /dev/null @@ -1,6 +0,0 @@ -# airbyte-integrations:bases - -This directory contains modules that contain shared code or can be inherited when writing connectors. - -## Key Files -todo (cgardens) - each of these submodules in this directory should have their own readmes. diff --git a/airbyte-integrations/connector-templates/destination-java/.dockerignore.hbs b/airbyte-integrations/connector-templates/destination-java/.dockerignore.hbs deleted file mode 100644 index 65c7d0ad3e73c..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/.dockerignore.hbs +++ /dev/null @@ -1,3 +0,0 @@ -* -!Dockerfile -!build diff --git a/airbyte-integrations/connector-templates/destination-java/Destination.java.hbs b/airbyte-integrations/connector-templates/destination-java/Destination.java.hbs deleted file mode 100644 index f5785a3be5926..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/Destination.java.hbs +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class {{properCase name}}Destination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger({{properCase name}}Destination.class); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new {{properCase name}}Destination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) throws Exception { - // TODO - return null; - } - - @Override - public AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) throws Exception{ - // TODO - return null; - } - -} diff --git a/airbyte-integrations/connector-templates/destination-java/DestinationAcceptanceTest.java.hbs b/airbyte-integrations/connector-templates/destination-java/DestinationAcceptanceTest.java.hbs deleted file mode 100644 index 1663f10664290..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/DestinationAcceptanceTest.java.hbs +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import java.io.IOException; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class {{properCase name}}DestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger({{properCase name}}DestinationAcceptanceTest.class); - - private JsonNode configJson; - - @Override - protected String getImageName() { - return "airbyte/destination-{{snakeCase name}}:dev"; - } - - @Override - protected JsonNode getConfig() { - // TODO: Generate the configuration JSON file to be used for running the destination during the test - // configJson can either be static and read from secrets/config.json directly - // or created in the setup method - return configJson; - } - - @Override - protected JsonNode getFailCheckConfig() { - // TODO return an invalid config which, when used to run the connector's check connection operation, - // should result in a failed connection check - return null; - } - - @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) - throws IOException { - // TODO Implement this method to retrieve records which written to the destination by the connector. - // Records returned from this method will be compared against records provided to the connector - // to verify they were written correctly - return null; - } - - @Override - protected void setup(TestDestinationEnv testEnv) { - // TODO Implement this method to run any setup actions needed before every test case - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - // TODO Implement this method to run any cleanup actions needed after every test case - } - -} diff --git a/airbyte-integrations/connector-templates/destination-java/README.md.hbs b/airbyte-integrations/connector-templates/destination-java/README.md.hbs deleted file mode 100644 index 390002dca65d1..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/README.md.hbs +++ /dev/null @@ -1,71 +0,0 @@ -# Destination {{capitalCase name}} - -This is the repository for the {{capitalCase name}} destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.com/integrations/destinations/{{dashCase name}}). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-{{dashCase name}}:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.com/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:destination-{{dashCase name}}:buildConnectorImage -``` -Once built, the docker image name and tag will be `airbyte/source-{{dashCase name}}:dev`. - - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-{{dashCase name}}:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-{{dashCase name}}:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-{{dashCase name}}:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/{{snakeCase name}}`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/{{snakeCase name}}DestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-{{dashCase name}}:check -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-{{dashCase name}}:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-{{dashCase name}} test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/{{dashCase name}}.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs b/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs deleted file mode 100644 index a999413ffe2a5..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/build.gradle.hbs +++ /dev/null @@ -1,20 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = true -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.{{snakeCase name}}.{{properCase name}}Destination' -} - -dependencies { - implementation libs.airbyte.protocol -} diff --git a/airbyte-integrations/connector-templates/destination-java/doc.md.hbs b/airbyte-integrations/connector-templates/destination-java/doc.md.hbs deleted file mode 100644 index 85a8697b44a66..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/doc.md.hbs +++ /dev/null @@ -1,52 +0,0 @@ -# {{capitalCase name}} - -TODO: update this doc - -## Sync overview - -### Output schema - -Is the output schema fixed (e.g: for an API like Stripe)? If so, point to the connector's schema (e.g: link to Stripe’s documentation) or describe the schema here directly (e.g: include a diagram or paragraphs describing the schema). - -Describe how the connector's schema is mapped to Airbyte concepts. An example description might be: "MagicDB tables become Airbyte Streams and MagicDB columns become Airbyte Fields. In addition, an extracted\_at column is appended to each row being read." - -### Data type mapping - -This section should contain a table mapping each of the connector's data types to Airbyte types. At the moment, Airbyte uses the same types used by [JSONSchema](https://json-schema.org/understanding-json-schema/reference/index.html). `string`, `date-time`, `object`, `array`, `boolean`, `integer`, and `number` are the most commonly used data types. - -| Integration Type | Airbyte Type | Notes | -| :--- | :--- | :--- | - - -### Features - -This section should contain a table with the following format: - -| Feature | Supported?(Yes/No) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | | | -| Incremental Sync | | | -| Replicate Incremental Deletes | | | -| For databases, WAL/Logical replication | | | -| SSL connection | | | -| SSH Tunnel Support | | | -| (Any other source-specific features) | | | - -### Performance considerations - -Could this connector hurt the user's database/API/etc... or put too much strain on it in certain circumstances? For example, if there are a lot of tables or rows in a table? What is the breaking point (e.g: 100mm> records)? What can the user do to prevent this? (e.g: use a read-only replica, or schedule frequent syncs, etc..) - -## Getting started - -### Requirements - -* What versions of this connector does this implementation support? (e.g: `postgres v3.14 and above`) -* What configurations, if any, are required on the connector? (e.g: `buffer_size > 1024`) -* Network accessibility requirements -* Credentials/authentication requirements? (e.g: A DB user with read permissions on certain tables) - -### Setup guide - -For each of the above high-level requirements as appropriate, add or point to a follow-along guide. See existing source or destination guides for an example. - -For each major cloud provider we support, also add a follow-along guide for setting up Airbyte to connect to that destination. See the Postgres destination guide for an example of what this should look like. diff --git a/airbyte-integrations/connector-templates/destination-java/metadata.yaml.hbs b/airbyte-integrations/connector-templates/destination-java/metadata.yaml.hbs deleted file mode 100644 index 5f67617a6fe24..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/metadata.yaml.hbs +++ /dev/null @@ -1,25 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - connectorSubtype: database - connectorType: destination - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/destination-{{dashCase name}} - githubIssueLabel: destination-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - releaseStage: alpha - supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/destinations/{{dashCase name}} - tags: - - language:python -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/destination-java/spec.json.hbs b/airbyte-integrations/connector-templates/destination-java/spec.json.hbs deleted file mode 100644 index b56ea8a843fc0..0000000000000 --- a/airbyte-integrations/connector-templates/destination-java/spec.json.hbs +++ /dev/null @@ -1,21 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/{{dashCase name}}", - "supportsIncremental": TODO, - "supported_destination_sync_modes": ["TODO"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "TODO", - "type": "object", - "required": [ - "TODO" - ], - "properties": { - "TODO_sample_field": { - "title": "Sample Field", - "type": "string", - "description": "", - "examples": [""] - } - } - } -} diff --git a/airbyte-integrations/connector-templates/destination-python/metadata.yaml.hbs b/airbyte-integrations/connector-templates/destination-python/metadata.yaml.hbs index 170fee939b3f6..72b14c97ae7dc 100644 --- a/airbyte-integrations/connector-templates/destination-python/metadata.yaml.hbs +++ b/airbyte-integrations/connector-templates/destination-python/metadata.yaml.hbs @@ -11,7 +11,7 @@ data: # Please update to the latest version of the connector base image. # Please use the full address with sha256 hash to guarantee build reproducibility. # https://hub.docker.com/r/airbyte/python-connector-base - baseImage: docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: database connectorType: destination definitionId: {{generateDefinitionId}} diff --git a/airbyte-integrations/connector-templates/destination-python/setup.py b/airbyte-integrations/connector-templates/destination-python/setup.py index ac2c4de6f740e..7fb4bace9d9e1 100644 --- a/airbyte-integrations/connector-templates/destination-python/setup.py +++ b/airbyte-integrations/connector-templates/destination-python/setup.py @@ -9,7 +9,7 @@ "airbyte-cdk", ] -TEST_REQUIREMENTS = ["pytest~=6.2"] +TEST_REQUIREMENTS = ["pytest"] setup( name="destination_{{snakeCase name}}", diff --git a/airbyte-integrations/connector-templates/generator/build.gradle b/airbyte-integrations/connector-templates/generator/build.gradle index 2a93a2f67a08d..aef81870c0548 100644 --- a/airbyte-integrations/connector-templates/generator/build.gradle +++ b/airbyte-integrations/connector-templates/generator/build.gradle @@ -26,10 +26,5 @@ def addScaffoldTemplateTask(name, packageName, outputDirName, scaffoldParams=[]) generateScaffolds.configure { dependsOn task } } - addScaffoldTemplateTask('Python Source', 'scaffold-source-python', 'source-scaffold-source-python') -addScaffoldTemplateTask('Python HTTP API Source', 'scaffold-source-http', 'source-scaffold-source-http') -addScaffoldTemplateTask('Java JDBC Source', 'scaffold-java-jdbc', 'source-scaffold-java-jdbc') addScaffoldTemplateTask('Python Destination', 'scaffold-destination-python', 'destination-scaffold-destination-python') -// TODO: enable Singer template testing -//addScaffoldTask('source-python-singer', ['tap-exchangeratesapi']) diff --git a/airbyte-integrations/connector-templates/generator/plopfile.js b/airbyte-integrations/connector-templates/generator/plopfile.js index f1a97cc3f9429..055afa300f7f6 100644 --- a/airbyte-integrations/connector-templates/generator/plopfile.js +++ b/airbyte-integrations/connector-templates/generator/plopfile.js @@ -1,15 +1,21 @@ -'use strict'; -const path = require('path'); -const uuid = require('uuid'); -const capitalCase = require('capital-case'); -const changeCase = require('change-case') -const getSuccessMessage = function(connectorName, outputPath, additionalMessage){ - return ` +"use strict"; +const path = require("path"); +const uuid = require("uuid"); +const capitalCase = require("capital-case"); +const changeCase = require("change-case"); +const getSuccessMessage = function ( + connectorName, + outputPath, + additionalMessage +) { + return ` 🚀 🚀 🚀 🚀 🚀 🚀 Success! -Your ${connectorName} connector has been created at .${path.resolve(outputPath)}. +Your ${connectorName} connector has been created at .${path.resolve( + outputPath +)}. Follow the TODOs in the generated module to implement your connector. @@ -19,129 +25,137 @@ https://discuss.airbyte.io/c/connector-development/16 We're always happy to provide any support! ${additionalMessage || ""} -` -} +`; +}; module.exports = function (plop) { - const docRoot = '../../../docs/integrations'; + const connectorAcceptanceTestFilesInputRoot = + "../connector_acceptance_test_files"; - const connectorAcceptanceTestFilesInputRoot = '../connector_acceptance_test_files'; + const pythonSourceInputRoot = "../source-python"; + const lowCodeSourceInputRoot = "../source-low-code"; + const pythonDestinationInputRoot = "../destination-python"; - const pythonSourceInputRoot = '../source-python'; - const singerSourceInputRoot = '../source-singer'; - const genericSourceInputRoot = '../source-generic'; - const genericJdbcSourceInputRoot = '../source-java-jdbc'; - const httpApiInputRoot = '../source-python-http-api'; - const lowCodeSourceInputRoot = '../source-configuration-based'; - const javaDestinationInput = '../destination-java'; - const pythonDestinationInputRoot = '../destination-python'; + const outputDir = "../../connectors"; - const outputDir = '../../connectors'; const pythonSourceOutputRoot = `${outputDir}/source-{{dashCase name}}`; - const singerSourceOutputRoot = `${outputDir}/source-{{dashCase name}}-singer`; - const genericSourceOutputRoot = `${outputDir}/source-{{dashCase name}}`; - const genericJdbcSourceOutputRoot = `${outputDir}/source-{{dashCase name}}`; - const httpApiOutputRoot = `${outputDir}/source-{{dashCase name}}`; - const javaDestinationOutputRoot = `${outputDir}/destination-{{dashCase name}}`; const pythonDestinationOutputRoot = `${outputDir}/destination-{{dashCase name}}`; - const sourceConnectorImagePrefix = 'airbyte/source-' - const sourceConnectorImageTag = 'dev' - const defaultSpecPathFolderPrefix = 'source_' - const specFileName = 'spec.yaml' + const sourceConnectorImagePrefix = "airbyte/source-"; + const sourceConnectorImageTag = "dev"; + const defaultSpecPathFolderPrefix = "source_"; + + const specFileName = "spec.yaml"; - plop.setHelper('capitalCase', function(name) { + plop.setHelper("capitalCase", function (name) { return capitalCase.capitalCase(name); }); - plop.setHelper('generateDefinitionId', function() { + plop.setHelper("generateDefinitionId", function () { // if the env var CI is set then return a fixed FAKE uuid so that the tests are deterministic if (process.env.CI) { - return 'FAKE-UUID-0000-0000-000000000000'; + return "FAKE-UUID-0000-0000-000000000000"; } return uuid.v4().toLowerCase(); }); - plop.setHelper('connectorImage', function() { - let suffix = "" - if (typeof this.connectorImageNameSuffix !== 'undefined') { - suffix = this.connectorImageNameSuffix + plop.setHelper("connectorImage", function () { + let suffix = ""; + if (typeof this.connectorImageNameSuffix !== "undefined") { + suffix = this.connectorImageNameSuffix; } - return `${sourceConnectorImagePrefix}${changeCase.paramCase(this.name)}${suffix}:${sourceConnectorImageTag}` + return `${sourceConnectorImagePrefix}${changeCase.paramCase(this.name)}${suffix}:${sourceConnectorImageTag}`; }); - plop.setHelper('specPath', function() { - let suffix = "" - if (typeof this.specPathFolderSuffix !== 'undefined') { - suffix = this.specPathFolderSuffix + plop.setHelper("specPath", function () { + let suffix = ""; + if (typeof this.specPathFolderSuffix !== "undefined") { + suffix = this.specPathFolderSuffix; } - let inSubFolder = true - if (typeof this.inSubFolder !== 'undefined') { - inSubFolder = this.inSubFolder + let inSubFolder = true; + if (typeof this.inSubFolder !== "undefined") { + inSubFolder = this.inSubFolder; } if (inSubFolder) { - return `${defaultSpecPathFolderPrefix}${changeCase.snakeCase(this.name)}${suffix}/${specFileName}` + return `${defaultSpecPathFolderPrefix}${changeCase.snakeCase( + this.name + )}${suffix}/${specFileName}`; } else { - return specFileName + return specFileName; } }); - - plop.setActionType('emitSuccess', function(answers, config, plopApi){ - console.log(getSuccessMessage(answers.name, plopApi.renderString(config.outputPath, answers), config.message)); + plop.setActionType("emitSuccess", function (answers, config, plopApi) { + console.log( + getSuccessMessage( + answers.name, + plopApi.renderString(config.outputPath, answers), + config.message + ) + ); }); - plop.setGenerator('Python Destination', { - description: 'Generate a destination connector written in Python', + plop.setGenerator("Python CDK Destination", { + description: "Generate a destination connector based on Python CDK.", prompts: [ - {type:'input', name:'name', 'message': 'Connector name e.g: redis'}, + { type: "input", name: "name", message: "Connector name e.g: redis" }, ], actions: [ { abortOnFail: true, - type:'addMany', + type: "addMany", destination: pythonDestinationOutputRoot, base: pythonDestinationInputRoot, templateFiles: `${pythonDestinationInputRoot}/**/**`, }, - {type: 'emitSuccess', outputPath: pythonDestinationOutputRoot} - ] - }) + { type: "emitSuccess", outputPath: pythonDestinationOutputRoot }, + ], + }); - plop.setGenerator('Python HTTP API Source', { - description: 'Generate a Source that pulls data from a synchronous HTTP API.', + plop.setGenerator("Python CDK Source", { + description: + "Generate a source connector based on Python CDK.", prompts: [ - {type: 'input', name: 'name', message: 'Source name e.g: "google-analytics"'}, + { + type: "input", + name: "name", + message: 'Source name e.g: "google-analytics"', + }, ], actions: [ { abortOnFail: true, - type:'addMany', - destination: httpApiOutputRoot, - base: httpApiInputRoot, - templateFiles: `${httpApiInputRoot}/**/**`, + type: "addMany", + destination: pythonSourceOutputRoot, + base: pythonSourceInputRoot, + templateFiles: `${pythonSourceInputRoot}/**/**`, }, // common acceptance tests { abortOnFail: true, - type:'addMany', - destination: httpApiOutputRoot, + type: "addMany", + destination: pythonSourceOutputRoot, base: connectorAcceptanceTestFilesInputRoot, templateFiles: `${connectorAcceptanceTestFilesInputRoot}/**/**`, }, - {type: 'emitSuccess', outputPath: httpApiOutputRoot} - ] + { type: "emitSuccess", outputPath: pythonSourceOutputRoot }, + ], }); - plop.setGenerator('Configuration Based Source', { - description: 'Generate a Source that is described using a low code configuration file', + plop.setGenerator("Low-code Source", { + description: + "Generate a source based on the low-code CDK.", prompts: [ - {type: 'input', name: 'name', message: 'Source name e.g: "google-analytics"'}, + { + type: "input", + name: "name", + message: 'Source name e.g: "google-analytics"', + }, ], - actions: [ + actions: [ { abortOnFail: true, - type:'addMany', + type: "addMany", destination: pythonSourceOutputRoot, base: lowCodeSourceInputRoot, templateFiles: `${lowCodeSourceInputRoot}/**/**`, @@ -149,189 +163,12 @@ module.exports = function (plop) { // common acceptance tests { abortOnFail: true, - type:'addMany', + type: "addMany", destination: pythonSourceOutputRoot, base: connectorAcceptanceTestFilesInputRoot, templateFiles: `${connectorAcceptanceTestFilesInputRoot}/**/**`, }, - {type: 'emitSuccess', outputPath: pythonSourceOutputRoot} - ] - }); - - plop.setGenerator('Python Singer Source', { - description: 'Generate a Singer-tap-based Airbyte Source.', - prompts: [ - {type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "google-analytics"', filter: function (name) { - return name.endsWith('-singer') ? name.replace(/-singer$/, '') : name; - }}, - {type: 'input', name: 'tap_name', message: 'Singer tap package e.g "tap-mixpanel"'}, - ], - actions: [ - { - abortOnFail: true, - type:'addMany', - destination: singerSourceOutputRoot, - base: singerSourceInputRoot, - templateFiles: `${singerSourceInputRoot}/**/**`, - }, - // common acceptance tests - { - abortOnFail: true, - type:'addMany', - destination: singerSourceOutputRoot, - base: connectorAcceptanceTestFilesInputRoot, - templateFiles: `${connectorAcceptanceTestFilesInputRoot}/**/**`, - data: { - connectorImageNameSuffix: "-singer", - specPathFolderSuffix: "_singer" - } - }, - { - type:'add', - abortOnFail: true, - templateFile: `${singerSourceInputRoot}/.gitignore.hbs`, - path: `${singerSourceOutputRoot}/.gitignore` - }, - { - type:'add', - abortOnFail: true, - templateFile: `${singerSourceInputRoot}/.dockerignore.hbs`, - path: `${singerSourceOutputRoot}/.dockerignore` - }, - {type: 'emitSuccess', outputPath: singerSourceOutputRoot}, - ] - }); - - plop.setGenerator('Python Source', { - description: 'Generate a minimal Python Airbyte Source Connector that works with any kind of data source. Use this if none of the other Python templates serve your use case.', - prompts: [ - {type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "google-analytics"'}, - ], - actions: [ - { - abortOnFail: true, - type:'addMany', - destination: pythonSourceOutputRoot, - base: pythonSourceInputRoot, - templateFiles: `${pythonSourceInputRoot}/**/**`, - }, - // common acceptance tests - { - abortOnFail: true, - type:'addMany', - destination: pythonSourceOutputRoot, - base: connectorAcceptanceTestFilesInputRoot, - templateFiles: `${connectorAcceptanceTestFilesInputRoot}/**/**`, - }, - {type: 'emitSuccess', outputPath: pythonSourceOutputRoot, message: "For a checklist of what to do next go to https://docs.airbyte.com/connector-development/tutorials/building-a-python-source"}] - }); - - plop.setGenerator('Java JDBC Source', { - description: 'Generate a minimal Java JDBC Airbyte Source Connector.', - prompts: [ - {type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "mysql"'}, - ], - actions: [ - { - abortOnFail: true, - type:'addMany', - destination: genericJdbcSourceOutputRoot, - base: genericJdbcSourceInputRoot, - templateFiles: `${genericJdbcSourceInputRoot}/**/**`, - }, - {type: 'emitSuccess', outputPath: genericJdbcSourceOutputRoot} - ] - }); - - plop.setGenerator('Generic Source', { - description: 'Use if none of the other templates apply to your use case.', - prompts: [ - {type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "google-analytics"'}, - ], - actions: [ - { - abortOnFail: true, - type:'addMany', - destination: genericSourceOutputRoot, - base: genericSourceInputRoot, - templateFiles: `${genericSourceInputRoot}/**/**`, - }, - // common acceptance tests - { - abortOnFail: true, - type:'addMany', - destination: genericSourceOutputRoot, - base: connectorAcceptanceTestFilesInputRoot, - templateFiles: `${connectorAcceptanceTestFilesInputRoot}/**/**`, - data: { - inSubFolder: false - } - }, - {type: 'emitSuccess', outputPath: genericSourceOutputRoot} - ] - }); - - plop.setGenerator('Java Destination', { - description: 'Generate a Java Destination Connector.', - prompts: [ - { - type: 'input', - name: 'name', - message: 'Destination name, without the "destination-" prefix e.g: "google-pubsub"', - }, + { type: "emitSuccess", outputPath: pythonSourceOutputRoot }, ], - actions: [ - // Gradle - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/build.gradle.hbs`, - path: `${javaDestinationOutputRoot}/build.gradle` - }, - // Docker - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/.dockerignore.hbs`, - path: `${javaDestinationOutputRoot}/.dockerignore` - }, - // Java - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/Destination.java.hbs`, - path: `${javaDestinationOutputRoot}/src/main/java/io/airbyte/integrations/destination/{{snakeCase name}}/{{properCase name}}Destination.java` - }, - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/DestinationAcceptanceTest.java.hbs`, - path: `${javaDestinationOutputRoot}/src/test-integration/java/io/airbyte/integrations/destination/{{snakeCase name}}/{{properCase name}}DestinationAcceptanceTest.java` - }, - // Doc - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/README.md.hbs`, - path: `${javaDestinationOutputRoot}/README.md` - }, - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/doc.md.hbs`, - path: `${docRoot}/destinations/{{dashCase name}}.md` - }, - // Definition - { - type: 'add', - abortOnFail: true, - templateFile: `${javaDestinationInput}/spec.json.hbs`, - path: `${javaDestinationOutputRoot}/src/main/resources/spec.json` - }, - { - type: 'emitSuccess', - outputPath: javaDestinationOutputRoot, - } - ] }); }; diff --git a/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs deleted file mode 100644 index abb927715e3a4..0000000000000 --- a/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs +++ /dev/null @@ -1,35 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-{{dashCase name}} - connectorBuildOptions: - # Please update to the latest version of the connector base image. - # https://hub.docker.com/r/airbyte/python-connector-base - # Please use the full address with sha256 hash to guarantee build reproducibility. - baseImage: docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 - connectorSubtype: api - connectorType: source - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/source-{{dashCase name}} - githubIssueLabel: source-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - releaseStage: alpha - supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} - tags: - - language:python - - cdk:low-code -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs deleted file mode 100644 index 40bbebd8c0909..0000000000000 --- a/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs +++ /dev/null @@ -1,27 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.1.0" -name = "source-{{dashCase name}}" -description = "Source implementation for {{dashCase name}}." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^0" - -[tool.poetry.scripts] -source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -pytest-mock = "^3.6.1" -pytest = "^6.1" diff --git a/airbyte-integrations/connector-templates/source-generic/Dockerfile b/airbyte-integrations/connector-templates/source-generic/Dockerfile deleted file mode 100644 index 846e34ccc9617..0000000000000 --- a/airbyte-integrations/connector-templates/source-generic/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM scratch - -## TODO Add your dockerfile instructions here -## TODO uncomment the below line. This is required for Kubernetes compatibility. -# ENV AIRBYTE_ENTRYPOINT="update this with the command you use for an entrypoint" - -# Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. -LABEL io.airbyte.name=airbyte/source-{{dashCase name}} -LABEL io.airbyte.version=0.1.0 diff --git a/airbyte-integrations/connector-templates/source-generic/README.md b/airbyte-integrations/connector-templates/source-generic/README.md deleted file mode 100644 index 80f439a81ec53..0000000000000 --- a/airbyte-integrations/connector-templates/source-generic/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# {{capitalCase name}} Source - -This is the repository for the {{capitalCase name}} source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}). - -## Local development - -### Prerequisites -* If you are using Python for connector development, minimal required version `= 3.7.0` -* Valid credentials (see the "Create credentials section for instructions) -TODO: _which languages and tools does a user need to develop on this connector? add them to the bullet list above_ - -### Iteration -TODO: _which commands should a developer use to run this connector locally?_ - -### Testing -#### Unit Tests -TODO: _how can a user run unit tests?_ - -#### Integration Tests -TODO: _how can a user run integration tests?_ -_this section is currently under construction -- please reach out to us on Slack for help with setting up Airbyte's standard test suite_ - - -### Locally running the connector docker image - -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/{{dashCase name}}:dev -``` - -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-{{dashCase name}}:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /sample_files/configured_catalog.json -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `spec.json` file. `secrets` is gitignored by default. - -**If you are an Airbyte core member**, copy the credentials from Lastpass under the secret name `source {{dashCase name}} test creds` -and place them into `secrets/config.json`. diff --git a/airbyte-integrations/connector-templates/source-generic/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-generic/metadata.yaml.hbs deleted file mode 100644 index 59f8bcf9b5805..0000000000000 --- a/airbyte-integrations/connector-templates/source-generic/metadata.yaml.hbs +++ /dev/null @@ -1,23 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - connectorSubtype: api - connectorType: source - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/source-{{dashCase name}} - githubIssueLabel: source-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - releaseStage: alpha - supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/.dockerignore b/airbyte-integrations/connector-templates/source-java-jdbc/.dockerignore deleted file mode 100644 index e4fbece78752f..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!Dockerfile -!build/distributions diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/README.md b/airbyte-integrations/connector-templates/source-java-jdbc/README.md deleted file mode 100644 index 47e23b91d2613..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Source {{capitalCase name}} - -This is the repository for the {{capitalCase name}} source connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.com/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:buildConnectorImage -``` - -Once built, the docker image name and tag will be `airbyte/source-{{dashCase name}}:dev`. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-{{dashCase name}}:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/...` -Place integration tests in `src/test-integration/...` - -#### Acceptance Tests -Airbyte has a standard test suite that all source connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/sources/{{snakeCase name}}SourceAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-{{dashCase name}} test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/acceptance-test-config.yml.hbs deleted file mode 100644 index 314351c4290ee..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/acceptance-test-config.yml.hbs +++ /dev/null @@ -1,8 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-{{dashCase name}}:dev -acceptance_tests: - spec: - tests: - - spec_path: "src/test-integration/resources/expected_spec.json" - config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/build.gradle b/airbyte-integrations/connector-templates/source-java-jdbc/build.gradle deleted file mode 100644 index c1991bdadd160..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/build.gradle +++ /dev/null @@ -1,25 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-sources'] - useLocalCdk = true -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.source.{{dashCase name}}.{{pascalCase name}}Source' -} - -dependencies { - - //TODO Add jdbc driver import here. Ex: implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' - - testImplementation 'org.apache.commons:commons-lang3:3.11' - - integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-{{dashCase name}}') -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/metadata.yaml.hbs deleted file mode 100644 index 5e325ba27e88b..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/metadata.yaml.hbs +++ /dev/null @@ -1,25 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - connectorSubtype: database - connectorType: source - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/source-{{dashCase name}} - githubIssueLabel: source-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - supportLevel: community - releaseStage: alpha - documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} - tags: - - language:java -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs deleted file mode 100644 index a6b2045864392..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}Source.java.hbs +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.Source; -import io.airbyte.cdk.integrations.source.jdbc.AbstractJdbcSource; -import java.sql.JDBCType; -import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class {{pascalCase name}}Source extends AbstractJdbcSource implements Source { - - private static final Logger LOGGER = LoggerFactory.getLogger({{pascalCase name}}Source.class); - - // TODO insert your driver name. Ex: "com.microsoft.sqlserver.jdbc.SQLServerDriver" - static final String DRIVER_CLASS = "driver_name_here"; - - public {{pascalCase name}}Source() { - // TODO: if the JDBC driver does not support custom fetch size, use NoOpStreamingQueryConfig - // instead of AdaptiveStreamingQueryConfig. - super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, JdbcUtils.getDefaultSourceOperations()); - } - - // TODO The config is based on spec.json, update according to your DB - @Override - public JsonNode toDatabaseConfig(final JsonNode config) { - // TODO create DB config. Ex: "Jsons.jsonNode(ImmutableMap.builder().put("username", - // userName).put("password", pas)...build()); - return null; - } - - @Override - public Set getExcludedInternalNameSpaces() { - // TODO Add tables to exclude, Ex "INFORMATION_SCHEMA", "sys", "spt_fallback_db", etc - return Set.of(""); - } - - public static void main(final String[] args) throws Exception { - final Source source = new {{pascalCase name}}Source(); - LOGGER.info("starting source: {}", {{pascalCase name}}Source.class); - new IntegrationRunner(source).run(args); - LOGGER.info("completed source: {}", {{pascalCase name}}Source.class); - } - -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/resources/spec.json.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/main/resources/spec.json.hbs deleted file mode 100644 index c839b40a716bd..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/main/resources/spec.json.hbs +++ /dev/null @@ -1,60 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/{{snakeCase name}}", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "{{pascalCase name}} Source Spec", - "type": "object", - "required": ["host", "port", "database", "username", "replication_method"], - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 3306, - "examples": ["3306"], - "order": 1 - }, - "database": { - "title": "Database", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "Username", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "jdbc_url_params": { - "title": "JDBC URL params", - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)", - "type": "string", - "order": 5 - }, - "replication_method": { - "title": "Replication method", - "description": "Replication method to use for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses the Binlog to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", - "type": "string", - "order": 6, - "default": "STANDARD", - "enum": ["STANDARD", "CDC"] - } - } - } -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceAcceptanceTest.java.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceAcceptanceTest.java.hbs deleted file mode 100644 index eba3f8c53e74e..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceAcceptanceTest.java.hbs +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.cdk.integrations.standardtest.source.SourceAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.util.HashMap; - -public class {{pascalCase name}}SourceAcceptanceTest extends SourceAcceptanceTest { - - private JsonNode config; - - @Override - protected void setupEnvironment(final TestDestinationEnv testEnv) { - // TODO create new container. Ex: "new OracleContainer("epiclabs/docker-oracle-xe-11g");" - // TODO make container started. Ex: "container.start();" - // TODO init JsonNode config - // TODO crete airbyte Database object "Databases.createJdbcDatabase(...)" - // TODO insert test data to DB. Ex: "database.execute(connection-> ...)" - // TODO close Database. Ex: "database.close();" - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - // TODO close container that was initialized in setup() method. Ex: "container.close();" - } - - @Override - protected String getImageName() { - return "airbyte/source-{{dashCase name}}:dev"; - } - - @Override - protected ConnectorSpecification getSpec() throws Exception { - return Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class); - } - - @Override - protected JsonNode getConfig() { - return config; - } - - @Override - protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - // TODO Return the ConfiguredAirbyteCatalog with ConfiguredAirbyteStream objects - return null; - } - - @Override - protected JsonNode getState() { - return Jsons.jsonNode(new HashMap<>()); - } - -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/dummy_config.json deleted file mode 100644 index 483d12bc3cd11..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/dummy_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "host": "default", - "port": 5555, - "database": "default", - "username": "default", - "replication_method": "STANDARD" -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/expected_spec.json.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/expected_spec.json.hbs deleted file mode 100644 index 5d3ba7fc2413e..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/test-integration/resources/expected_spec.json.hbs +++ /dev/null @@ -1,61 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/{{snakeCase name}}", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "{{pascalCase name}} Source Spec", - "type": "object", - "required": ["host", "port", "database", "username", "replication_method"], - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 3306, - "examples": ["3306"], - "order": 1 - }, - "database": { - "title": "Database", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "Username", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "jdbc_url_params": { - "title": "JDBC URL params", - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)", - "type": "string", - "order": 5 - }, - "replication_method": { - "title": "Replication method", - "description": "Replication method to use for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses the Binlog to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", - "type": "string", - "order": 6, - "default": "STANDARD", - "enum": ["STANDARD", "CDC"] - } - } - }, - "supported_destination_sync_modes": [] -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}JdbcSourceAcceptanceTest.java.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}JdbcSourceAcceptanceTest.java.hbs deleted file mode 100644 index c2046c3a49da5..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}JdbcSourceAcceptanceTest.java.hbs +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.source.jdbc.AbstractJdbcSource; -import io.airbyte.cdk.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; -import java.sql.JDBCType; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class {{pascalCase name}}JdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger({{pascalCase name}}JdbcSourceAcceptanceTest.class); - - // TODO declare a test container for DB. EX: org.testcontainers.containers.OracleContainer - - @BeforeAll - static void init() { - // Oracle returns uppercase values - // TODO init test container. Ex: "new OracleContainer("epiclabs/docker-oracle-xe-11g")" - // TODO start container. Ex: "container.start();" - } - - @BeforeEach - public void setup() throws Exception { - // TODO init config. Ex: "config = Jsons.jsonNode(ImmutableMap.builder().put("host", - // host).put("port", port)....build()); - super.setup(); - } - - @AfterEach - public void tearDown() { - // TODO clean used resources - } - - @Override - public AbstractJdbcSource getSource() { - return new {{pascalCase name}}Source(); - } - - @Override - public boolean supportsSchemas() { - // TODO check if your db supports it and update method accordingly - return false; - } - - @Override - public JsonNode getConfig() { - return config; - } - - @Override - public String getDriverClass() { - return {{pascalCase name}}Source.DRIVER_CLASS; - } - - @Override - public AbstractJdbcSource getJdbcSource() { - // TODO - return null; - } - - @AfterAll - static void cleanUp() { - // TODO close the container. Ex: "container.close();" - } - -} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceTests.java.hbs b/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceTests.java.hbs deleted file mode 100644 index 082504f2e4b85..0000000000000 --- a/airbyte-integrations/connector-templates/source-java-jdbc/src/test/java/io/airbyte/integrations/source/{{snakeCase name}}/{{pascalCase name}}SourceTests.java.hbs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.{{snakeCase name}}; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.Database; -import org.junit.jupiter.api.Test; - -public class {{pascalCase name}}SourceTests { - - private JsonNode config; - private Database database; - - @Test - public void testSettingTimezones() throws Exception { - // TODO init your container. Ex: "new - // org.testcontainers.containers.MSSQLServerContainer<>("mcr.microsoft.com/mssql/server:2019-latest").acceptLicense();" - // TODO start the container. Ex: "container.start();" - // TODO prepare DB config. Ex: "config = getConfig(container, dbName, - // "serverTimezone=Europe/London");" - // TODO create DB, grant all privileges, etc. - // TODO check connection status. Ex: "AirbyteConnectionStatus check = new - // ScaffoldJavaJdbcGenericSource().check(config);" - // TODO assert connection status. Ex: "assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, - // check.getStatus());" - // TODO cleanup used resources and close used container. Ex: "container.close();" - } - -} diff --git a/airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs b/airbyte-integrations/connector-templates/source-low-code/README.md.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs rename to airbyte-integrations/connector-templates/source-low-code/README.md.hbs diff --git a/airbyte-ci/connectors/qa-engine/tests/__init__.py b/airbyte-integrations/connector-templates/source-low-code/__init__.py similarity index 100% rename from airbyte-ci/connectors/qa-engine/tests/__init__.py rename to airbyte-integrations/connector-templates/source-low-code/__init__.py diff --git a/airbyte-integrations/connector-templates/source-configuration-based/__init__.py b/airbyte-integrations/connector-templates/source-low-code/integration_tests/__init__.py similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/__init__.py rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/__init__.py diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/abnormal_state.json b/airbyte-integrations/connector-templates/source-low-code/integration_tests/abnormal_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/abnormal_state.json rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/abnormal_state.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/acceptance.py b/airbyte-integrations/connector-templates/source-low-code/integration_tests/acceptance.py similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/acceptance.py rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/acceptance.py diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/configured_catalog.json b/airbyte-integrations/connector-templates/source-low-code/integration_tests/configured_catalog.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/configured_catalog.json rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/configured_catalog.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/invalid_config.json b/airbyte-integrations/connector-templates/source-low-code/integration_tests/invalid_config.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/invalid_config.json rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/invalid_config.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/sample_config.json b/airbyte-integrations/connector-templates/source-low-code/integration_tests/sample_config.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/sample_config.json rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/sample_config.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/sample_state.json b/airbyte-integrations/connector-templates/source-low-code/integration_tests/sample_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/sample_state.json rename to airbyte-integrations/connector-templates/source-low-code/integration_tests/sample_state.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/main.py.hbs b/airbyte-integrations/connector-templates/source-low-code/main.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/main.py.hbs rename to airbyte-integrations/connector-templates/source-low-code/main.py.hbs diff --git a/airbyte-integrations/connector-templates/source-low-code/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-low-code/metadata.yaml.hbs new file mode 100644 index 0000000000000..869f859bfcd22 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-low-code/metadata.yaml.hbs @@ -0,0 +1,35 @@ +data: + allowedHosts: + hosts: + - TODO # Please change to the hostname of the source. + registries: + oss: + enabled: true + cloud: + enabled: false + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-{{dashCase name}} + connectorBuildOptions: + # Please update to the latest version of the connector base image. + # https://hub.docker.com/r/airbyte/python-connector-base + # Please use the full address with sha256 hash to guarantee build reproducibility. + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorSubtype: api + connectorType: source + definitionId: {{generateDefinitionId}} + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-{{dashCase name}} + githubIssueLabel: source-{{dashCase name}} + icon: {{dashCase name}}.svg + license: MIT + name: {{capitalCase name}} + releaseDate: TODO + releaseStage: alpha + supportLevel: community + documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} + tags: + - language:python + - cdk:low-code +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-low-code/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-low-code/pyproject.toml.hbs new file mode 100644 index 0000000000000..f8c24cb2f0f69 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-low-code/pyproject.toml.hbs @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-{{dashCase name}}" +description = "Source implementation for {{dashCase name}}." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_{{snakeCase name}}" }, {include = "main.py" } ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "*" +pytest-mock = "*" +pytest = "*" diff --git a/airbyte-integrations/connector-templates/source-configuration-based/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-low-code/secrets/config.json.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/secrets/config.json.hbs rename to airbyte-integrations/connector-templates/source-low-code/secrets/config.json.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/__init__.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/__init__.py.hbs rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/__init__.py.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/manifest.yaml.hbs b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/manifest.yaml.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/manifest.yaml.hbs rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/manifest.yaml.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/run.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/run.py.hbs rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/run.py.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/TODO.md.hbs b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/TODO.md.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/TODO.md.hbs rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/TODO.md.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/customers.json b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/customers.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/customers.json rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/customers.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/employees.json b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/employees.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/employees.json rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/schemas/employees.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/source.py.hbs rename to airbyte-integrations/connector-templates/source-low-code/source_{{snakeCase name}}/source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs b/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs deleted file mode 100644 index 28e5231c1cd16..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs +++ /dev/null @@ -1,105 +0,0 @@ -# {{capitalCase name}} Source - -This is the repository for the {{capitalCase name}} source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}). - -## Local development - -### Prerequisites - -* Python (`^3.9`) -* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) - - - -### Installing the connector - -From this connector directory, run: -```bash -poetry install --with dev -``` - - -### Create credentials - -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_{{snakeCase name}}/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `sample_files/sample_config.json` for a sample config file. - - -### Locally running the connector - -``` -poetry run source-{{dashCase name}} spec -poetry run source-{{dashCase name}} check --config secrets/config.json -poetry run source-{{dashCase name}} discover --config secrets/config.json -poetry run source-{{dashCase name}} read --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -### Running tests - -To run tests locally, from the connector directory run: - -``` -poetry run pytest tests -``` - -### Building the docker image - -1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) -2. Run the following command to build the docker image: -```bash -airbyte-ci connectors --name=source-{{dashCase name}} build -``` - -An image will be available on your host with the tag `airbyte/source-{{dashCase name}}:dev`. - - -### Running as a docker container - -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-{{dashCase name}}:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -### Running our CI test suite - -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): - -```bash -airbyte-ci connectors --name=source-{{dashCase name}} test -``` - -### Customizing acceptance Tests - -Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -### Dependency Management - -All of your dependencies should be managed via Poetry. -To add a new dependency, run: - -```bash -poetry add -``` - -Please commit the changes to `pyproject.toml` and `poetry.lock` files. - -## Publishing a new version of the connector - -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-{{dashCase name}} test` -2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): - - bump the `dockerImageTag` value in in `metadata.yaml` - - bump the `version` value in `pyproject.toml` -3. Make sure the `metadata.yaml` content is up to date. -4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. -8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/acceptance.py b/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/acceptance.py deleted file mode 100644 index 9e6409236281f..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies if needed. otherwise remove the TODO comments - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/configured_catalog.json b/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/configured_catalog.json deleted file mode 100644 index 36f0468db0d8f..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/configured_catalog.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "customers", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "employees", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/invalid_config.json b/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/invalid_config.json deleted file mode 100644 index f3732995784f2..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/invalid_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "todo-wrong-field": "this should be an incomplete config file, used in standard tests" -} diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/sample_config.json b/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/sample_config.json deleted file mode 100644 index ecc4913b84c74..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "fix-me": "TODO" -} diff --git a/airbyte-integrations/connector-templates/source-python-http-api/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/metadata.yaml.hbs deleted file mode 100644 index 8d952455ab14b..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/metadata.yaml.hbs +++ /dev/null @@ -1,34 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-{{dashCase name}} - connectorBuildOptions: - # Please update to the latest version of the connector base image. - # https://hub.docker.com/r/airbyte/python-connector-base - # Please use the full address with sha256 hash to guarantee build reproducibility. - baseImage: docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 - connectorSubtype: api - connectorType: source - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/source-{{dashCase name}} - githubIssueLabel: source-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - supportLevel: community - releaseStage: alpha - documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} - tags: - - language:python -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs deleted file mode 100644 index 40bbebd8c0909..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs +++ /dev/null @@ -1,27 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.1.0" -name = "source-{{dashCase name}}" -description = "Source implementation for {{dashCase name}}." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^0" - -[tool.poetry.scripts] -source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -pytest-mock = "^3.6.1" -pytest = "^6.1" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs deleted file mode 100644 index f5f8933895aae..0000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs +++ /dev/null @@ -1,3 +0,0 @@ -{ - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" -} diff --git a/airbyte-integrations/connector-templates/source-python/README.md.hbs b/airbyte-integrations/connector-templates/source-python/README.md.hbs index 919b24fa5f355..28e5231c1cd16 100644 --- a/airbyte-integrations/connector-templates/source-python/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-python/README.md.hbs @@ -67,7 +67,9 @@ docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integrat ``` ### Running our CI test suite + You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + ```bash airbyte-ci connectors --name=source-{{dashCase name}} test ``` @@ -81,6 +83,7 @@ If your connector requires to create or destroy resources for use during accepta All of your dependencies should be managed via Poetry. To add a new dependency, run: + ```bash poetry add ``` diff --git a/airbyte-integrations/connector-templates/source-python/integration_tests/acceptance.py b/airbyte-integrations/connector-templates/source-python/integration_tests/acceptance.py index 43ce950d77caa..9e6409236281f 100644 --- a/airbyte-integrations/connector-templates/source-python/integration_tests/acceptance.py +++ b/airbyte-integrations/connector-templates/source-python/integration_tests/acceptance.py @@ -11,6 +11,6 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies + # TODO: setup test dependencies if needed. otherwise remove the TODO comments yield # TODO: clean up test dependencies diff --git a/airbyte-integrations/connector-templates/source-python/integration_tests/configured_catalog.json b/airbyte-integrations/connector-templates/source-python/integration_tests/configured_catalog.json index b999c2ba3abf7..36f0468db0d8f 100644 --- a/airbyte-integrations/connector-templates/source-python/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connector-templates/source-python/integration_tests/configured_catalog.json @@ -2,14 +2,21 @@ "streams": [ { "stream": { - "name": "table_name", + "name": "customers", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "default_cursor_field": ["column_name"] + "supported_sync_modes": ["full_refresh"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "employees", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/main.py.hbs b/airbyte-integrations/connector-templates/source-python/main.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/main.py.hbs rename to airbyte-integrations/connector-templates/source-python/main.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-python/metadata.yaml.hbs index ba39befbfce39..1e5f739561fc1 100644 --- a/airbyte-integrations/connector-templates/source-python/metadata.yaml.hbs +++ b/airbyte-integrations/connector-templates/source-python/metadata.yaml.hbs @@ -2,20 +2,20 @@ data: allowedHosts: hosts: - TODO # Please change to the hostname of the source. - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-{{dashCase name}} registries: oss: enabled: true cloud: enabled: false + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-{{dashCase name}} connectorBuildOptions: # Please update to the latest version of the connector base image. # https://hub.docker.com/r/airbyte/python-connector-base # Please use the full address with sha256 hash to guarantee build reproducibility. - baseImage: docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: {{generateDefinitionId}} diff --git a/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs index 40bbebd8c0909..f647009895d03 100644 --- a/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs +++ b/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs @@ -12,7 +12,7 @@ readme = "README.md" documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" homepage = "https://airbyte.com" repository = "https://github.com/airbytehq/airbyte" -packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] +packages = [ { include = "source_{{snakeCase name}}" }, {include = "main.py" } ] [tool.poetry.dependencies] python = "^3.9,<3.12" @@ -22,6 +22,7 @@ airbyte-cdk = "^0" source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" [tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -pytest-mock = "^3.6.1" -pytest = "^6.1" +requests-mock = "*" +pytest-mock = "*" +pytest = "*" + diff --git a/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs index b494c8d9344b4..f5f8933895aae 100644 --- a/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs +++ b/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs @@ -1,3 +1,3 @@ { - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" + "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" } diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/__init__.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/__init__.py.hbs rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/__init__.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/run.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/run.py.hbs rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/run.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/TODO.md b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/TODO.md similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/TODO.md rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/TODO.md diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/customers.json b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/customers.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/customers.json rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/customers.json diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/employees.json b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/employees.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/employees.json rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/schemas/employees.json diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/source.py.hbs rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/spec.yaml.hbs rename to airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs diff --git a/airbyte-integrations/connector-templates/source-python/src/main.py.hbs b/airbyte-integrations/connector-templates/source-python/src/main.py.hbs deleted file mode 100644 index 202f3973567d7..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/src/main.py.hbs +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_{{snakeCase name}}.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/__init__.py.hbs deleted file mode 100644 index 09f02ce623ca5..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/__init__.py.hbs +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .source import Source{{properCase name}} - -__all__ = ["Source{{properCase name}}"] diff --git a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs deleted file mode 100644 index 25c9400301f9b..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from .source import Source{{properCase name}} - -def run(): - source = Source{{properCase name}}() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/source.py.hbs deleted file mode 100644 index 1e017d1252188..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/source.py.hbs +++ /dev/null @@ -1,105 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from datetime import datetime -from typing import Dict, Generator - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import ( - AirbyteCatalog, - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - Status, - Type, -) -from airbyte_cdk.sources import Source - - -class Source{{properCase name}}(Source): - def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the integration - e.g: if a provided Stripe API token can be used to connect to the Stripe API. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.yaml file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - # Not Implemented - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(e)}") - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - """ - Returns an AirbyteCatalog representing the available streams and fields in this integration. - For example, given valid credentials to a Postgres database, - returns an Airbyte catalog where each postgres table is a stream, and each table column is a field. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.yaml file - - :return: AirbyteCatalog is an object describing a list of all available streams in this source. - A stream is an AirbyteStream object that includes: - - its stream name (or table name in the case of Postgres) - - json_schema providing the specifications of expected schema for this stream (a list of columns described - by their names and types) - """ - streams = [] - - stream_name = "TableName" # Example - json_schema = { # Example - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {"columnName": {"type": "string"}}, - } - - # Not Implemented - - streams.append(AirbyteStream(name=stream_name, json_schema=json_schema)) - return AirbyteCatalog(streams=streams) - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: Dict[str, any] - ) -> Generator[AirbyteMessage, None, None]: - """ - Returns a generator of the AirbyteMessages generated by reading the source with the given configuration, - catalog, and state. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.yaml file - :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog - returned by discover(), but - in addition, it's been configured in the UI! For each particular stream and field, there may have been provided - with extra modifications such as: filtering streams and/or columns out, renaming some entities, etc - :param state: When a Airbyte reads data from a source, it might need to keep a checkpoint cursor to resume - replication in the future from that saved checkpoint. - This is the object that is provided with state from previous runs and avoid replicating the entire set of - data everytime. - - :return: A generator that produces a stream of AirbyteRecordMessage contained in AirbyteMessage object. - """ - stream_name = "TableName" # Example - data = {"columnName": "Hello World"} # Example - - # Not Implemented - - yield AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream_name, data=data, emitted_at=int(datetime.now().timestamp()) * 1000), - ) diff --git a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/spec.yaml.hbs deleted file mode 100644 index 22a7ba6d7749a..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/spec.yaml.hbs +++ /dev/null @@ -1,11 +0,0 @@ -documentationUrl: https://docsurl.com -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: {{capitalCase name}} Spec - type: object - required: - - fix-me - properties: - fix-me: - type: string - description: describe me diff --git a/airbyte-integrations/connector-templates/source-configuration-based/integration_tests/__init__.py b/airbyte-integrations/connector-templates/source-python/unit_tests/__init__.py similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/integration_tests/__init__.py rename to airbyte-integrations/connector-templates/source-python/unit_tests/__init__.py diff --git a/airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_incremental_streams.py.hbs b/airbyte-integrations/connector-templates/source-python/unit_tests/test_incremental_streams.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_incremental_streams.py.hbs rename to airbyte-integrations/connector-templates/source-python/unit_tests/test_incremental_streams.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_source.py.hbs b/airbyte-integrations/connector-templates/source-python/unit_tests/test_source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_source.py.hbs rename to airbyte-integrations/connector-templates/source-python/unit_tests/test_source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_streams.py.hbs b/airbyte-integrations/connector-templates/source-python/unit_tests/test_streams.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/unit_tests/test_streams.py.hbs rename to airbyte-integrations/connector-templates/source-python/unit_tests/test_streams.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/unit_tests/unit_test.py.hbs b/airbyte-integrations/connector-templates/source-python/unit_tests/unit_test.py.hbs deleted file mode 100644 index 219ae0142c724..0000000000000 --- a/airbyte-integrations/connector-templates/source-python/unit_tests/unit_test.py.hbs +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connector-templates/source-singer/.dockerignore.hbs b/airbyte-integrations/connector-templates/source-singer/.dockerignore.hbs deleted file mode 100644 index 5142f5b3bb632..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/.dockerignore.hbs +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_{{snakeCase name}}_singer -!setup.py -!secrets diff --git a/airbyte-integrations/connector-templates/source-singer/.gitignore.hbs b/airbyte-integrations/connector-templates/source-singer/.gitignore.hbs deleted file mode 100644 index 29fffc6a50cc9..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/.gitignore.hbs +++ /dev/null @@ -1 +0,0 @@ -NEW_SOURCE_CHECKLIST.md diff --git a/airbyte-integrations/connector-templates/source-singer/Dockerfile b/airbyte-integrations/connector-templates/source-singer/Dockerfile deleted file mode 100644 index 08618b9730936..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM python:3.9-slim - -# Bash is installed for more convenient debugging. -RUN apt-get update && apt-get install -y bash && apt-get install -y gcc && rm -rf /var/lib/apt/lists/* - -WORKDIR /airbyte/integration_code -COPY source_{{snakeCase name}}_singer ./source_{{snakeCase name}}_singer -COPY main.py ./ -COPY setup.py ./ -RUN pip install . - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-{{dashCase name}} diff --git a/airbyte-integrations/connector-templates/source-singer/README.md.hbs b/airbyte-integrations/connector-templates/source-singer/README.md.hbs deleted file mode 100644 index 92f666a72d265..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/README.md.hbs +++ /dev/null @@ -1,129 +0,0 @@ -# Source {{capitalCase name}} Singer - -This is the repository for the {{capitalCase name}} source connector, based on a Singer tap. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}_singer/spec.yaml` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source {{dashCase name}} test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/source-{{dashCase name}}-singer:dev -``` - -You can also build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}:airbyteDocker -``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-{{dashCase name}}-singer:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}-singer:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}-singer:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}-singer:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` -#### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -To run your integration tests with acceptance tests, from the connector root, run -``` -python -m pytest integration_tests -p integration_tests.acceptance -``` -To run your integration tests with docker - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}-singer:check -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:source-{{dashCase name}}-singer:integrationTest -``` - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/acceptance.py b/airbyte-integrations/connector-templates/source-singer/integration_tests/acceptance.py deleted file mode 100644 index 43ce950d77caa..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/configured_catalog.json b/airbyte-integrations/connector-templates/source-singer/integration_tests/configured_catalog.json deleted file mode 100644 index b999c2ba3abf7..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/integration_tests/configured_catalog.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "table_name", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "default_cursor_field": ["column_name"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/invalid_config.json b/airbyte-integrations/connector-templates/source-singer/integration_tests/invalid_config.json deleted file mode 100644 index f3732995784f2..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/integration_tests/invalid_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "todo-wrong-field": "this should be an incomplete config file, used in standard tests" -} diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/sample_config.json b/airbyte-integrations/connector-templates/source-singer/integration_tests/sample_config.json deleted file mode 100644 index ecc4913b84c74..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/integration_tests/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "fix-me": "TODO" -} diff --git a/airbyte-integrations/connector-templates/source-singer/main.py.hbs b/airbyte-integrations/connector-templates/source-singer/main.py.hbs deleted file mode 100644 index b3553b4ee38ba..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/main.py.hbs +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch - -from source_{{snakeCase name}}_singer import Source{{properCase name}}Singer - -if __name__ == "__main__": - source = Source{{properCase name}}Singer() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connector-templates/source-singer/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-singer/metadata.yaml.hbs deleted file mode 100644 index f20c375e60657..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/metadata.yaml.hbs +++ /dev/null @@ -1,25 +0,0 @@ -data: - allowedHosts: - hosts: - - TODO # Please change to the hostname of the source. - registries: - oss: - enabled: true - cloud: - enabled: false - connectorSubtype: api - connectorType: source - definitionId: {{generateDefinitionId}} - dockerImageTag: 0.1.0 - dockerRepository: airbyte/source-{{dashCase name}} - githubIssueLabel: source-{{dashCase name}} - icon: {{dashCase name}}.svg - license: MIT - name: {{capitalCase name}} - releaseDate: TODO - supportLevel: community - releaseStage: alpha - documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} - tags: - - language:python -metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-singer/requirements.txt.hbs b/airbyte-integrations/connector-templates/source-singer/requirements.txt.hbs deleted file mode 100644 index 7b9114ed5867e..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/requirements.txt.hbs +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs deleted file mode 100644 index f5f8933895aae..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs +++ /dev/null @@ -1,3 +0,0 @@ -{ - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" -} diff --git a/airbyte-integrations/connector-templates/source-singer/setup.py.hbs b/airbyte-integrations/connector-templates/source-singer/setup.py.hbs deleted file mode 100644 index adfdf1ecdbf6b..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/setup.py.hbs +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "{{ tap_name }}", - "airbyte-cdk~=0.1.56", -] - -TEST_REQUIREMENTS = [ - "pytest~=6.2", -] - -setup( - name="source_{{snakeCase name}}_singer", - description="Source implementation for {{capitalCase name}}, built on the Singer tap implementation.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/__init__.py.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/__init__.py.hbs deleted file mode 100644 index a5ec1bbc8309c..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/__init__.py.hbs +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .source import Source{{properCase name}}Singer - -__all__ = ["Source{{properCase name}}Singer"] diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs deleted file mode 100644 index d73445fac3061..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import logging - -from airbyte_cdk.models import AirbyteConnectionStatus, Status -from airbyte_cdk.sources.singer import ConfigContainer, SingerSource - - -class Source{{properCase name}}Singer(SingerSource): - TAP_CMD = "{{ tap_name }}" - - def check_config(self, logger: logging.Logger, config_path: str, config: ConfigContainer) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the integration - e.g: if a provided Stripe API token can be used to connect to the Stripe API. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config_path: Path to the file containing the configuration json config - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.yaml file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - # TODO Not Implemented - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(e)}") - - def discover_cmd(self, logger: logging.Logger, config_path: str) -> str: - """ - Return the string commands to invoke the tap with the --discover flag and the right configuration options - """ - # TODO update the command below if needed. Otherwise you're good to go - return f"{self.TAP_CMD} -c {config_path} --discover" - - def read_cmd(self, logger: logging.Logger, config_path: str, catalog_path: str, state_path: str = None) -> str: - """ - Return the string commands to invoke the tap with the right configuration options to read data from the source - """ - # TODO update the command below if needed. Otherwise you're good to go - config_option = f"--config {config_path}" - properties_option = f"--properties {catalog_path}" - state_option = f"--state {state_path}" if state_path else "" - return f"{self.TAP_CMD} {config_option} {properties_option} {state_option}" diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs deleted file mode 100644 index 9525c8f90c61a..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs +++ /dev/null @@ -1,12 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/{{snakeCase name}} -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Source {{capitalCase name}} Singer Spec - type: object - required: - - TODO - properties: - # TODO -- add all the properties required to configure this tap e.g: username, password, api token, etc. - TODO: - type: string - description: describe me diff --git a/airbyte-integrations/connector-templates/source-singer/unit_tests/unit_test.py.hbs b/airbyte-integrations/connector-templates/source-singer/unit_tests/unit_test.py.hbs deleted file mode 100644 index 219ae0142c724..0000000000000 --- a/airbyte-integrations/connector-templates/source-singer/unit_tests/unit_test.py.hbs +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors-performance/destination-harness/build.gradle b/airbyte-integrations/connectors-performance/destination-harness/build.gradle index fd6bb54d0aed3..ab1d3ce7c0990 100644 --- a/airbyte-integrations/connectors-performance/destination-harness/build.gradle +++ b/airbyte-integrations/connectors-performance/destination-harness/build.gradle @@ -8,7 +8,7 @@ application { } dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') implementation 'io.fabric8:kubernetes-client:5.12.2' implementation 'org.apache.commons:commons-lang3:3.11' diff --git a/airbyte-integrations/connectors-performance/destination-harness/src/main/java/io/airbyte/integrations/destination_performance/PerformanceHarness.java b/airbyte-integrations/connectors-performance/destination-harness/src/main/java/io/airbyte/integrations/destination_performance/PerformanceHarness.java index 2d62460909bc8..91dd067c36a70 100644 --- a/airbyte-integrations/connectors-performance/destination-harness/src/main/java/io/airbyte/integrations/destination_performance/PerformanceHarness.java +++ b/airbyte-integrations/connectors-performance/destination-harness/src/main/java/io/airbyte/integrations/destination_performance/PerformanceHarness.java @@ -145,7 +145,7 @@ void runTest() throws Exception { log.info("End of datasource after {} lines", counter); break; } - final List row; + final List row; try { row = Arrays.asList(pattern.split(line)); } catch (final NullPointerException npe) { @@ -226,7 +226,7 @@ private AirbyteIntegrationLauncher getAirbyteIntegrationLauncher() throws Unknow allowedHosts, false, new EnvVariableFeatureFlags()); } - private String buildRecordString(final List columns, final List row) { + private String buildRecordString(final List columns, final List row) { final StringBuilder sb = new StringBuilder(); sb.append("{"); final Iterator rowIterator = row.iterator(); diff --git a/airbyte-integrations/connectors-performance/source-harness/build.gradle b/airbyte-integrations/connectors-performance/source-harness/build.gradle index 667e5af0a9a5f..4ff2afdcfb0bd 100644 --- a/airbyte-integrations/connectors-performance/source-harness/build.gradle +++ b/airbyte-integrations/connectors-performance/source-harness/build.gradle @@ -8,7 +8,7 @@ application { } dependencies { - implementation project(':airbyte-cdk:java:airbyte-cdk:dependencies') + implementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') implementation 'io.fabric8:kubernetes-client:5.12.2' implementation 'org.apache.commons:commons-lang3:3.11' diff --git a/airbyte-integrations/connectors/destination-astra/README.md b/airbyte-integrations/connectors/destination-astra/README.md index 2fa995b22593a..94fea87af407e 100644 --- a/airbyte-integrations/connectors/destination-astra/README.md +++ b/airbyte-integrations/connectors/destination-astra/README.md @@ -10,24 +10,11 @@ For information about how to use this connector within Airbyte, see [the documen #### Minimum Python version required `= 3.9.0` -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials @@ -43,8 +30,7 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image @@ -118,17 +104,15 @@ docker run --rm airbyte/destination-astra:dev spec docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-astra:dev check --config /secrets/config.json # messages.jsonl is a file containing line-separated JSON representing AirbyteMessages cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-astra:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` + + ## Testing Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` + ### Unit Tests To run unit tests locally, from the connector directory run: ``` -python -m pytest unit_tests +poetry run pytest -s unit_tests ``` ### Integration Tests @@ -136,7 +120,7 @@ There are two types of integration tests: Acceptance Tests (Airbyte's test suite #### Custom Integration tests Place custom tests inside `integration_tests/` folder, then, from the connector root, run ``` -python -m pytest integration_tests +poetry run pytest -s integration_tests ``` #### Acceptance Tests Coming soon: diff --git a/airbyte-integrations/connectors/destination-astra/metadata.yaml b/airbyte-integrations/connectors/destination-astra/metadata.yaml index ffd7abf1f6589..4b539f25105d3 100644 --- a/airbyte-integrations/connectors/destination-astra/metadata.yaml +++ b/airbyte-integrations/connectors/destination-astra/metadata.yaml @@ -15,7 +15,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 042ce96f-1158-4662-9543-e2ff015be97a - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 dockerRepository: airbyte/destination-astra githubIssueLabel: destination-astra icon: astra.svg diff --git a/airbyte-integrations/connectors/destination-astra/poetry.lock b/airbyte-integrations/connectors/destination-astra/poetry.lock new file mode 100644 index 0000000000000..747a931557379 --- /dev/null +++ b/airbyte-integrations/connectors/destination-astra/poetry.lock @@ -0,0 +1,2923 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.48" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.48-py3-none-any.whl", hash = "sha256:2f8967e2aaaed8881efe6f346590681243b315af8ba8a037d969c299d42071d3"}, + {file = "langsmith-0.1.48.tar.gz", hash = "sha256:9cd21cd0928123b2bd2363f03515cb1f6a833d9a9f00420240d5132861d15fcc"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "0d5dfd58633fb8fbcd32ce71909d8742bcf6c8848346bc0deab3aaca1aaa87be" diff --git a/airbyte-integrations/connectors/destination-astra/pyproject.toml b/airbyte-integrations/connectors/destination-astra/pyproject.toml new file mode 100644 index 0000000000000..13a10d5a1d425 --- /dev/null +++ b/airbyte-integrations/connectors/destination-astra/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-astra" +version = "0.1.2" +description = "Airbyte destination implementation for Astra DB." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/astra" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_astra" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-astra = "destination_astra.run:run" diff --git a/airbyte-integrations/connectors/destination-astra/setup.py b/airbyte-integrations/connectors/destination-astra/setup.py deleted file mode 100644 index 8bd1a185b52e7..0000000000000 --- a/airbyte-integrations/connectors/destination-astra/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk[vector-db-based]==0.57.0"] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_astra", - description="Destination implementation for Astra.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-aws-datalake/Dockerfile b/airbyte-integrations/connectors/destination-aws-datalake/Dockerfile deleted file mode 100644 index 73d0a933e1c59..0000000000000 --- a/airbyte-integrations/connectors/destination-aws-datalake/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM python:3.10-slim - -# Bash is installed for more convenient debugging. -# RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* - -WORKDIR /airbyte/integration_code -COPY main.py ./ -COPY setup.py ./ -RUN pip install . - -COPY destination_aws_datalake ./destination_aws_datalake - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.5 -LABEL io.airbyte.name=airbyte/destination-aws-datalake diff --git a/airbyte-integrations/connectors/destination-aws-datalake/destination_aws_datalake/destination.py b/airbyte-integrations/connectors/destination-aws-datalake/destination_aws_datalake/destination.py index 95bce879685b4..f1d5d3a03a46e 100644 --- a/airbyte-integrations/connectors/destination-aws-datalake/destination_aws_datalake/destination.py +++ b/airbyte-integrations/connectors/destination-aws-datalake/destination_aws_datalake/destination.py @@ -10,7 +10,7 @@ import pandas as pd from airbyte_cdk import AirbyteLogger from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, AirbyteStateType, ConfiguredAirbyteCatalog, Status, Type from botocore.exceptions import ClientError, InvalidRegionError from .aws import AwsHandler @@ -29,7 +29,7 @@ def _flush_streams(self, streams: Dict[str, StreamWriter]) -> None: streams[stream].flush() @staticmethod - def _get_random_string(length): + def _get_random_string(length: int) -> str: return "".join(random.choice(string.ascii_letters) for i in range(length)) def write( @@ -65,29 +65,25 @@ def write( } for message in input_messages: - if message.type == Type.STATE: - if not message.state.data: - - if message.state.stream: - stream = message.state.stream.stream_descriptor.name - logger.info(f"Received empty state for stream {stream}, resetting stream") - if stream in streams: - streams[stream].reset() - else: - logger.warning(f"Trying to reset stream {stream} that is not in the configured catalog") - - if not message.state.stream: - logger.info("Received empty state for, resetting all streams including non-incremental streams") - for stream in streams: - streams[stream].reset() + if message.type == Type.STATE and message.state.type == AirbyteStateType.STREAM: + state_stream = message.state.stream + + if not state_stream.stream_state: + stream = state_stream.stream_descriptor.name + logger.info(f"Received empty state for stream {stream}, resetting stream") + if stream in streams: + streams[stream].reset() + else: + logger.warning(f"Trying to reset stream {stream} that is not in the configured catalog") # Flush records when state is received - if message.state.stream: - if message.state.stream.stream_state and hasattr(message.state.stream.stream_state, "stream_name"): - stream_name = message.state.stream.stream_state.stream_name - if stream_name in streams: - logger.info(f"Got state message from source: flushing records for {stream_name}") - streams[stream_name].flush(partial=True) + else: + stream = state_stream.stream_descriptor.name + if stream in streams: + logger.info(f"Got state message from source: flushing records for {stream}") + streams[stream].flush(partial=True) + else: + logger.warning(f"Trying to flush stream {stream} that is not in the configured catalog") yield message diff --git a/airbyte-integrations/connectors/destination-aws-datalake/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-aws-datalake/integration_tests/integration_test.py index 8e8c9fd72532b..29f792fbc02e8 100644 --- a/airbyte-integrations/connectors/destination-aws-datalake/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/destination-aws-datalake/integration_tests/integration_test.py @@ -13,11 +13,14 @@ AirbyteMessage, AirbyteRecordMessage, AirbyteStateMessage, + AirbyteStateType, AirbyteStream, + AirbyteStreamState, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, Status, + StreamDescriptor, SyncMode, Type, ) @@ -91,8 +94,14 @@ def test_check_invalid_aws_account_config(invalid_account_config: Mapping): assert outcome.status == Status.FAILED -def _state(data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) +def _state(stream: str, data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_state=data, + stream_descriptor=StreamDescriptor(name=stream, namespace=None) + ) + )) def _record(stream: str, str_value: str, int_value: int, date_value: datetime) -> AirbyteMessage: @@ -122,13 +131,13 @@ def test_write(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): for tbl in [append_stream, overwrite_stream]: aws_handler.reset_table(database, tbl) - first_state_message = _state({"state": "1"}) + first_state_message = _state(append_stream, {"state": "1"}) first_record_chunk = [_record(append_stream, str(i), i, datetime.now()) for i in range(5)] + [ _record(overwrite_stream, str(i), i, datetime.now()) for i in range(5) ] - second_state_message = _state({"state": "2"}) + second_state_message = _state(append_stream, {"state": "2"}) second_record_chunk = [_record(append_stream, str(i), i, datetime.now()) for i in range(5, 10)] + [ _record(overwrite_stream, str(i), i, datetime.now()) for i in range(5, 10) ] diff --git a/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml b/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml index 86e208bc2562a..2b32398d55c8a 100644 --- a/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml @@ -2,7 +2,9 @@ data: connectorSubtype: database connectorType: destination definitionId: 99878c90-0fbd-46d3-9d98-ffde879d17fc - dockerImageTag: 0.1.5 + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c + dockerImageTag: 0.1.6 dockerRepository: airbyte/destination-aws-datalake githubIssueLabel: destination-aws-datalake icon: awsdatalake.svg diff --git a/airbyte-integrations/connectors/destination-aws-datalake/poetry.lock b/airbyte-integrations/connectors/destination-aws-datalake/poetry.lock new file mode 100644 index 0000000000000..c8c0670d6f1a9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-aws-datalake/poetry.lock @@ -0,0 +1,1370 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.57.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.57.0.tar.gz", hash = "sha256:b163c0294ee4f1cd17776e9fafabfa1ec8f0c52796bb22e50288e84752f3cfd6"}, + {file = "airbyte_cdk-0.57.0-py3-none-any.whl", hash = "sha256:cfd22e7a81bb4e9c57a3d9ea35e13752aeefbdc1632fc2aeb99a0c6b02c75eac"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.4.2" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "*" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.4.2" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, + {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "awswrangler" +version = "3.7.1" +description = "Pandas on AWS." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "awswrangler-3.7.1-py3-none-any.whl", hash = "sha256:0ddf439d1d84e240e33a510e6f0bee25fdadb1788ea58a12c098eb122c725f7b"}, + {file = "awswrangler-3.7.1.tar.gz", hash = "sha256:b9bb6c8e953c08e22c9b4056ff703be5c418dca1b1a16c3dd820458766a872cf"}, +] + +[package.dependencies] +boto3 = ">=1.20.32,<2.0.0" +botocore = ">=1.23.32,<2.0.0" +numpy = {version = ">=1.18,<2.0", markers = "python_version < \"3.12\""} +packaging = ">=21.1,<24.0" +pandas = {version = ">=1.2.0,<3.0.0", markers = "python_version >= \"3.9\""} +pyarrow = ">=8.0.0" +typing-extensions = ">=4.4.0,<5.0.0" + +[package.extras] +deltalake = ["deltalake (>=0.6.4,<0.16.0)"] +geopandas = ["geopandas (>=0.13.2,<0.14.0)", "geopandas (>=0.14.1,<0.15.0)"] +gremlin = ["aiohttp (>=3.9.0,<4.0.0)", "async-timeout (>=4.0.3,<5.0.0)", "gremlinpython (>=3.7.1,<4.0.0)", "requests (>=2.0.0,<3.0.0)"] +modin = ["modin (==0.23.1post0)", "modin (>=0.26.0,<0.27.0)"] +mysql = ["pymysql (>=1.0.0,<2.0.0)"] +opencypher = ["requests (>=2.0.0,<3.0.0)"] +openpyxl = ["openpyxl (>=3.0.0,<4.0.0)"] +opensearch = ["jsonpath-ng (>=1.5.3,<2.0.0)", "opensearch-py (>=2.0.0,<3.0.0)", "requests-aws4auth (>=1.1.1,<2.0.0)"] +oracle = ["oracledb (>=1,<3)"] +postgres = ["pg8000 (>=1.29.0,<2.0.0)"] +progressbar = ["progressbar2 (>=4.0.0,<5.0.0)"] +ray = ["ray[data,default] (>=2.9.0,<3.0.0)"] +redshift = ["redshift-connector (>=2.0.0,<3.0.0)"] +sparql = ["SPARQLWrapper (>=2.0.0,<3.0.0)", "requests (>=2.0.0,<3.0.0)"] +sqlserver = ["pyodbc (>=4,<6)"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "boto3" +version = "1.34.72" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.72-py3-none-any.whl", hash = "sha256:a33585ef0d811ee0dffd92a96108344997a3059262c57349be0761d7885f6ae7"}, + {file = "boto3-1.34.72.tar.gz", hash = "sha256:cbfabd99c113bbb1708c2892e864b6dd739593b97a76fbb2e090a7d965b63b82"}, +] + +[package.dependencies] +botocore = ">=1.34.72,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.72" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.72-py3-none-any.whl", hash = "sha256:a6b92735a73c19a7e540d77320420da3af3f32c91fa661c738c0b8c9f912d782"}, + {file = "botocore-1.34.72.tar.gz", hash = "sha256:342edb6f91d5839e790411822fc39f9c712c87cdaa7f3b1999f50b1ca16c4a14"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +crt = ["awscrt (==0.19.19)"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pendulum" +version = "3.0.0" +description = "Python datetimes made easy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, +] + +[package.dependencies] +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "retrying" +version = "1.3.4" +description = "Retrying" +optional = false +python-versions = "*" +files = [ + {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"}, + {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"}, +] + +[package.dependencies] +six = ">=1.7.0" + +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "c1af7e485e1870498e55f1a411f1339b97725d547038f08b9ba714d1b7ad31f4" diff --git a/airbyte-integrations/connectors/destination-aws-datalake/pyproject.toml b/airbyte-integrations/connectors/destination-aws-datalake/pyproject.toml new file mode 100644 index 0000000000000..0fd13aa1c0ee8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-aws-datalake/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.6" +name = "destination-aws-datalake" +description = "Destination Implementation for AWS Datalake." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destination/aws-datalake" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "destination_aws_datalake" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "==0.57.0" +retrying = "^1" +awswrangler = "==3.7.1" +pandas = "==2.0.3" + +[tool.poetry.scripts] +destination-aws-datalake = "destination_aws_datalake.run:run" + +[tool.poetry.group.dev.dependencies] +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/destination-aws-datalake/setup.py b/airbyte-integrations/connectors/destination-aws-datalake/setup.py deleted file mode 100644 index ca86286554645..0000000000000 --- a/airbyte-integrations/connectors/destination-aws-datalake/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", - "retrying", - "awswrangler==3.3.0", - "pandas==2.0.3", -] - -TEST_REQUIREMENTS = ["pytest~=6.1"] - -setup( - name="destination_aws_datalake", - description="Destination implementation for AWS Datalake.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-bigquery/build.gradle b/airbyte-integrations/connectors/destination-bigquery/build.gradle index 14da5852eef09..af57ba604a3c5 100644 --- a/airbyte-integrations/connectors/destination-bigquery/build.gradle +++ b/airbyte-integrations/connectors/destination-bigquery/build.gradle @@ -1,6 +1,5 @@ plugins { id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { diff --git a/airbyte-integrations/connectors/destination-chroma/.dockerignore b/airbyte-integrations/connectors/destination-chroma/.dockerignore index f89c3a5ca804f..943fe5d6d53d1 100644 --- a/airbyte-integrations/connectors/destination-chroma/.dockerignore +++ b/airbyte-integrations/connectors/destination-chroma/.dockerignore @@ -2,4 +2,5 @@ !Dockerfile !main.py !destination_chroma -!setup.py +!pyproject.toml +!poetry.lock diff --git a/airbyte-integrations/connectors/destination-chroma/Dockerfile b/airbyte-integrations/connectors/destination-chroma/Dockerfile deleted file mode 100644 index 6eec4a792d2a9..0000000000000 --- a/airbyte-integrations/connectors/destination-chroma/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.10-slim as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -RUN apt-get update \ - && pip install --upgrade pip \ - && apt-get install -y build-essential cmake g++ libffi-dev libstdc++6 - -# upgrade pip to the latest version -COPY setup.py ./ - -RUN pip install --upgrade pip - -# This is required because the current connector dependency is not compatible with the CDK version -# An older CDK version will be used, which depends on pyYAML 5.4, for which we need to pin Cython to <3.0 -# As of today the CDK version that satisfies the main dependency requirements, is 0.1.80 ... -RUN pip install --prefix=/install "Cython<3.0" "pyyaml~=5.4" --no-build-isolation - -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apt-get install bash - -# copy payload code only -COPY main.py ./ -COPY destination_chroma ./destination_chroma - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.0.9 -LABEL io.airbyte.name=airbyte/destination-chroma diff --git a/airbyte-integrations/connectors/destination-chroma/README.md b/airbyte-integrations/connectors/destination-chroma/README.md index 0a18a90bb5c93..eb27467da102f 100644 --- a/airbyte-integrations/connectors/destination-chroma/README.md +++ b/airbyte-integrations/connectors/destination-chroma/README.md @@ -10,24 +10,11 @@ For information about how to use this connector within Airbyte, see [the documen #### Minimum Python version required `= 3.7.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/chroma) @@ -42,8 +29,7 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image @@ -77,6 +63,18 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=destination-chroma test ``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest -s unit_tests +``` + +### Integration Tests +To run integration tests locally, make sure you have a secrets/config.json as explained above, and then run: +``` +poetry run pytest -s integration_tests +``` + ### Customizing acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. diff --git a/airbyte-integrations/connectors/destination-chroma/metadata.yaml b/airbyte-integrations/connectors/destination-chroma/metadata.yaml index 3fbf9ea9d1098..a0ebf0b625530 100644 --- a/airbyte-integrations/connectors/destination-chroma/metadata.yaml +++ b/airbyte-integrations/connectors/destination-chroma/metadata.yaml @@ -4,10 +4,12 @@ data: enabled: true cloud: enabled: false + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c connectorSubtype: vectorstore connectorType: destination definitionId: 0b75218b-f702-4a28-85ac-34d3d84c0fc2 - dockerImageTag: 0.0.9 + dockerImageTag: 0.0.10 dockerRepository: airbyte/destination-chroma githubIssueLabel: destination-chroma icon: chroma.svg diff --git a/airbyte-integrations/connectors/destination-chroma/poetry.lock b/airbyte-integrations/connectors/destination-chroma/poetry.lock new file mode 100644 index 0000000000000..4429a15930580 --- /dev/null +++ b/airbyte-integrations/connectors/destination-chroma/poetry.lock @@ -0,0 +1,4523 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bcrypt" +version = "4.1.2" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "build" +version = "1.2.1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +files = [ + {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, + {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "chroma-hnswlib" +version = "0.7.3" +description = "Chromas fork of hnswlib" +optional = false +python-versions = "*" +files = [ + {file = "chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70f897dc6218afa1d99f43a9ad5eb82f392df31f57ff514ccf4eeadecd62f544"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aef10b4952708f5a1381c124a29aead0c356f8d7d6e0b520b778aaa62a356f4"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee2d8d1529fca3898d512079144ec3e28a81d9c17e15e0ea4665697a7923253"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a4021a70e898783cd6f26e00008b494c6249a7babe8774e90ce4766dd288c8ba"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8f61fa1d417fda848e3ba06c07671f14806a2585272b175ba47501b066fe6b1"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7563be58bc98e8f0866907368e22ae218d6060601b79c42f59af4eccbbd2e0a"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51b8d411486ee70d7b66ec08cc8b9b6620116b650df9c19076d2d8b6ce2ae914"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d706782b628e4f43f1b8a81e9120ac486837fbd9bcb8ced70fe0d9b95c72d77"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:54f053dedc0e3ba657f05fec6e73dd541bc5db5b09aa8bc146466ffb734bdc86"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e607c5a71c610a73167a517062d302c0827ccdd6e259af6e4869a5c1306ffb5d"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2358a795870156af6761890f9eb5ca8cade57eb10c5f046fe94dae1faa04b9e"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea425df2e6b8a5e201fff0d922a1cc1d165b3cfe762b1408075723c8892218"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:454df3dd3e97aa784fba7cf888ad191e0087eef0fd8c70daf28b753b3b591170"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:df587d15007ca701c6de0ee7d5585dd5e976b7edd2b30ac72bc376b3c3f85882"}, +] + +[package.dependencies] +numpy = "*" + +[[package]] +name = "chromadb" +version = "0.4.24" +description = "Chroma." +optional = false +python-versions = ">=3.8" +files = [ + {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"}, + {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"}, +] + +[package.dependencies] +bcrypt = ">=4.0.1" +build = ">=1.0.3" +chroma-hnswlib = "0.7.3" +fastapi = ">=0.95.2" +grpcio = ">=1.58.0" +importlib-resources = "*" +kubernetes = ">=28.1.0" +mmh3 = ">=4.0.1" +numpy = ">=1.22.5" +onnxruntime = ">=1.14.1" +opentelemetry-api = ">=1.2.0" +opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" +opentelemetry-instrumentation-fastapi = ">=0.41b0" +opentelemetry-sdk = ">=1.2.0" +orjson = ">=3.9.12" +overrides = ">=7.3.1" +posthog = ">=2.4.0" +pulsar-client = ">=3.1.0" +pydantic = ">=1.9" +pypika = ">=0.48.9" +PyYAML = ">=6.0.0" +requests = ">=2.28" +tenacity = ">=8.2.3" +tokenizers = ">=0.13.2" +tqdm = ">=4.65.0" +typer = ">=0.9.0" +typing-extensions = ">=4.5.0" +uvicorn = {version = ">=0.18.3", extras = ["standard"]} + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastapi" +version = "0.110.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, + {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filelock" +version = "3.13.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flatbuffers" +version = "24.3.25" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +files = [ + {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, + {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, +] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "fsspec" +version = "2024.3.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "huggingface-hub" +version = "0.22.2" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"}, + {file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "kubernetes" +version = "29.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, + {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.48" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.48-py3-none-any.whl", hash = "sha256:2f8967e2aaaed8881efe6f346590681243b315af8ba8a037d969c299d42071d3"}, + {file = "langsmith-0.1.48.tar.gz", hash = "sha256:9cd21cd0928123b2bd2363f03515cb1f6a833d9a9f00420240d5132861d15fcc"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mmh3" +version = "4.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = "*" +files = [ + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, +] + +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "onnxruntime" +version = "1.17.3" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = "*" +files = [ + {file = "onnxruntime-1.17.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d86dde9c0bb435d709e51bd25991c9fe5b9a5b168df45ce119769edc4d198b15"}, + {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d87b68bf931ac527b2d3c094ead66bb4381bac4298b65f46c54fe4d1e255865"}, + {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26e950cf0333cf114a155f9142e71da344d2b08dfe202763a403ae81cc02ebd1"}, + {file = "onnxruntime-1.17.3-cp310-cp310-win32.whl", hash = "sha256:0962a4d0f5acebf62e1f0bf69b6e0adf16649115d8de854c1460e79972324d68"}, + {file = "onnxruntime-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:468ccb8a0faa25c681a41787b1594bf4448b0252d3efc8b62fd8b2411754340f"}, + {file = "onnxruntime-1.17.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e8cd90c1c17d13d47b89ab076471e07fb85467c01dcd87a8b8b5cdfbcb40aa51"}, + {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a058b39801baefe454eeb8acf3ada298c55a06a4896fafc224c02d79e9037f60"}, + {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f823d5eb4807007f3da7b27ca972263df6a1836e6f327384eb266274c53d05d"}, + {file = "onnxruntime-1.17.3-cp311-cp311-win32.whl", hash = "sha256:b66b23f9109e78ff2791628627a26f65cd335dcc5fbd67ff60162733a2f7aded"}, + {file = "onnxruntime-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:570760ca53a74cdd751ee49f13de70d1384dcf73d9888b8deac0917023ccda6d"}, + {file = "onnxruntime-1.17.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:77c318178d9c16e9beadd9a4070d8aaa9f57382c3f509b01709f0f010e583b99"}, + {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23da8469049b9759082e22c41a444f44a520a9c874b084711b6343672879f50b"}, + {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2949730215af3f9289008b2e31e9bbef952012a77035b911c4977edea06f3f9e"}, + {file = "onnxruntime-1.17.3-cp312-cp312-win32.whl", hash = "sha256:6c7555a49008f403fb3b19204671efb94187c5085976ae526cb625f6ede317bc"}, + {file = "onnxruntime-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:58672cf20293a1b8a277a5c6c55383359fcdf6119b2f14df6ce3b140f5001c39"}, + {file = "onnxruntime-1.17.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4395ba86e3c1e93c794a00619ef1aec597ab78f5a5039f3c6d2e9d0695c0a734"}, + {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdf354c04344ec38564fc22394e1fe08aa6d70d790df00159205a0055c4a4d3f"}, + {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a94b600b7af50e922d44b95a57981e3e35103c6e3693241a03d3ca204740bbda"}, + {file = "onnxruntime-1.17.3-cp38-cp38-win32.whl", hash = "sha256:5a335c76f9c002a8586c7f38bc20fe4b3725ced21f8ead835c3e4e507e42b2ab"}, + {file = "onnxruntime-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f56a86fbd0ddc8f22696ddeda0677b041381f4168a2ca06f712ef6ec6050d6d"}, + {file = "onnxruntime-1.17.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:e0ae39f5452278cd349520c296e7de3e90d62dc5b0157c6868e2748d7f28b871"}, + {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ff2dc012bd930578aff5232afd2905bf16620815f36783a941aafabf94b3702"}, + {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf6c37483782e4785019b56e26224a25e9b9a35b849d0169ce69189867a22bb1"}, + {file = "onnxruntime-1.17.3-cp39-cp39-win32.whl", hash = "sha256:351bf5a1140dcc43bfb8d3d1a230928ee61fcd54b0ea664c8e9a889a8e3aa515"}, + {file = "onnxruntime-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:57a3de15778da8d6cc43fbf6cf038e1e746146300b5f0b1fbf01f6f795dc6440"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "opentelemetry-api" +version = "1.24.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"}, + {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=7.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.24.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"}, +] + +[package.dependencies] +opentelemetry-proto = "1.24.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.24.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.24.0" +opentelemetry-proto = "1.24.0" +opentelemetry-sdk = ">=1.24.0,<1.25.0" + +[package.extras] +test = ["pytest-grpc"] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.45b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"}, + {file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.45b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"}, + {file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.45b0" +opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-util-http = "0.45b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.45b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"}, + {file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.45b0" +opentelemetry-instrumentation-asgi = "0.45b0" +opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-util-http = "0.45b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.24.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"}, + {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.24.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"}, + {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"}, +] + +[package.dependencies] +opentelemetry-api = "1.24.0" +opentelemetry-semantic-conventions = "0.45b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.45b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"}, + {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"}, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.45b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"}, + {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"}, +] + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "posthog" +version = "3.5.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +files = [ + {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, + {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] +sentry = ["django", "sentry-sdk"] +test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "pulsar-client" +version = "3.5.0" +description = "Apache Pulsar Python client library" +optional = false +python-versions = "*" +files = [ + {file = "pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38"}, + {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b"}, + {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f"}, + {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319"}, + {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa"}, + {file = "pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7"}, + {file = "pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da"}, + {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b"}, + {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6"}, + {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1"}, + {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842"}, + {file = "pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0"}, + {file = "pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d"}, + {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17"}, + {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a"}, + {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290"}, + {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc"}, + {file = "pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af"}, + {file = "pulsar_client-3.5.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:15b58f5d759dd6166db8a2d90ed05a38063b05cda76c36d190d86ef5c9249397"}, + {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af34bfe813dddf772a8a298117fa0a036ee963595d8bc8f00d969a0329ae6ed9"}, + {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0fec1dd74e1367d3742ce16679c1807994df60f5e666f440cf39323938fad"}, + {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbcd26ef9c03f96fb9cd91baec3bbd3c4b997834eb3556670d31f41cc25b5f64"}, + {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:afea1d0b6e793fd56e56463145751ff3aa79fdcd5b26e90d0da802a1bbabe07e"}, + {file = "pulsar_client-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:da1ab2fb1bef64b966e9403a0a186ebc90368d99e054ce2cae5b1128478f4ef4"}, + {file = "pulsar_client-3.5.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:9ad5dcc0eb8d2a7c0fb8e1fa146a0c6d4bdaf934f1169080b2c64b2f0573e086"}, + {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5870c6805b1a57962ed908d1173e97e13470415998393925c86a43694420389"}, + {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29cb5fedb969895b78301dc00a979133e69940812b8332e4de948bb0ad3db7cb"}, + {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e53c74bfa59b20c66adea95023169060f5048dd8d843e6ef9cd3b8ee2d23e93b"}, + {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99dbadb13967f1add57010971ed36b5a77d24afcdaea01960d0e55e56cf4ba6f"}, + {file = "pulsar_client-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:058887661d438796f42307dcc8054c84dea88a37683dae36498b95d7e1c39b37"}, +] + +[package.dependencies] +certifi = "*" + +[package.extras] +all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] +avro = ["fastavro (>=1.9.2)"] +functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pypika" +version = "0.48.9" +description = "A SQL query builder API for Python" +optional = false +python-versions = "*" +files = [ + {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, +] + +[[package]] +name = "pyproject-hooks" +version = "1.0.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"}, + {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"}, +] + +[package.dependencies] +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.37.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typer" +version = "0.12.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, + {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.29.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, + {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.21.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, + {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, + {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, + {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, + {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, + {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, + {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, + {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, + {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, + {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, + {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, + {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, + {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, + {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, + {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "0ce7a4a0ff7491770d8dc7e3524afb8288cf0467d47985bbcf978714ec5d1656" diff --git a/airbyte-integrations/connectors/destination-chroma/pyproject.toml b/airbyte-integrations/connectors/destination-chroma/pyproject.toml new file mode 100644 index 0000000000000..d9b091ca95c16 --- /dev/null +++ b/airbyte-integrations/connectors/destination-chroma/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-chroma" +version = "0.0.10" +description = "Airbyte destination implementation for Chroma." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/chroma" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_chroma" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} +chromadb = "*" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-chroma = "destination_chroma.run:run" diff --git a/airbyte-integrations/connectors/destination-chroma/requirements.txt b/airbyte-integrations/connectors/destination-chroma/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-chroma/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-chroma/setup.py b/airbyte-integrations/connectors/destination-chroma/setup.py deleted file mode 100644 index ae2f701634525..0000000000000 --- a/airbyte-integrations/connectors/destination-chroma/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[vector-db-based]==0.57.0", - "chromadb", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_chroma", - description="Destination implementation for Chroma.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-databricks/metadata.yaml b/airbyte-integrations/connectors/destination-databricks/metadata.yaml index 8d7eeeb33ee73..99acfabe8250c 100644 --- a/airbyte-integrations/connectors/destination-databricks/metadata.yaml +++ b/airbyte-integrations/connectors/destination-databricks/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 072d5540-f236-4294-ba7c-ade8fd918496 - dockerImageTag: 1.1.1 + dockerImageTag: 1.1.2 dockerRepository: airbyte/destination-databricks githubIssueLabel: destination-databricks icon: databricks.svg diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json index 5331b730b2583..ae6a336dab77d 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json @@ -156,7 +156,6 @@ "me-central-1", "me-south-1", "sa-east-1", - "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", diff --git a/airbyte-integrations/connectors/destination-duckdb/README.md b/airbyte-integrations/connectors/destination-duckdb/README.md index b2dfc61e352ed..a43524f06dd38 100644 --- a/airbyte-integrations/connectors/destination-duckdb/README.md +++ b/airbyte-integrations/connectors/destination-duckdb/README.md @@ -54,9 +54,10 @@ cat integration_tests/messages.jsonl| python main.py write --config integration_ #### Build **Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** ```bash -airbyte-ci connectors --name=destination-duckdb build +airbyte-ci connectors --name=destination-duckdb build [--architecture=...] ``` + An image will be built with the tag `airbyte/destination-duckdb:dev`. **Via `docker build`:** diff --git a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py index 6e3d0882130f0..ede10d14d32ce 100644 --- a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py +++ b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py @@ -81,10 +81,12 @@ def write( # Get and register auth token if applicable motherduck_api_key = str(config.get(CONFIG_MOTHERDUCK_API_KEY, "")) + duckdb_config = {} if motherduck_api_key: - os.environ["motherduck_token"] = motherduck_api_key + duckdb_config["motherduck_token"] = motherduck_api_key + duckdb_config["custom_user_agent"] = "airbyte" - con = duckdb.connect(database=path, read_only=False) + con = duckdb.connect(database=path, read_only=False, config=duckdb_config) con.execute(f"CREATE SCHEMA IF NOT EXISTS {schema_name}") @@ -175,10 +177,12 @@ def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConn logger.info(f"Using DuckDB file at {path}") os.makedirs(os.path.dirname(path), exist_ok=True) + duckdb_config = {} if CONFIG_MOTHERDUCK_API_KEY in config: - os.environ["motherduck_token"] = str(config[CONFIG_MOTHERDUCK_API_KEY]) + duckdb_config["motherduck_token"] = str(config[CONFIG_MOTHERDUCK_API_KEY]) + duckdb_config["custom_user_agent"] = "airbyte" - con = duckdb.connect(database=path, read_only=False) + con = duckdb.connect(database=path, read_only=False, config=duckdb_config) con.execute("SELECT 1;") return AirbyteConnectionStatus(status=Status.SUCCEEDED) diff --git a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py new file mode 100644 index 0000000000000..9eb783fd1908c --- /dev/null +++ b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py @@ -0,0 +1,15 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import sys + +from destination_duckdb import DestinationDuckdb + + +def run(): + DestinationDuckdb().run(sys.argv[1:]) + + +if __name__ == "__main__": + run() diff --git a/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py index e15a0b26136f5..54666923227e9 100644 --- a/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py @@ -29,9 +29,12 @@ Type, ) from destination_duckdb import DestinationDuckdb +from destination_duckdb.destination import CONFIG_MOTHERDUCK_API_KEY CONFIG_PATH = "integration_tests/config.json" -SECRETS_CONFIG_PATH = "secrets/config.json" # Should contain a valid MotherDuck API token +SECRETS_CONFIG_PATH = ( + "secrets/config.json" # Should contain a valid MotherDuck API token +) def pytest_generate_tests(metafunc): @@ -42,7 +45,9 @@ def pytest_generate_tests(metafunc): if Path(SECRETS_CONFIG_PATH).is_file(): configs.append("motherduck_config") else: - print(f"Skipping MotherDuck tests because config file not found at: {SECRETS_CONFIG_PATH}") + print( + f"Skipping MotherDuck tests because config file not found at: {SECRETS_CONFIG_PATH}" + ) # for test_name in ["test_check_succeeds", "test_write"]: metafunc.parametrize("config", configs, indirect=True) @@ -98,7 +103,9 @@ def table_schema() -> str: @pytest.fixture -def configured_catalogue(test_table_name: str, table_schema: str) -> ConfiguredAirbyteCatalog: +def configured_catalogue( + test_table_name: str, table_schema: str +) -> ConfiguredAirbyteCatalog: append_stream = ConfiguredAirbyteStream( stream=AirbyteStream( name=test_table_name, @@ -137,7 +144,9 @@ def airbyte_message2(test_table_name: str): @pytest.fixture def airbyte_message3(): - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={"state": "1"})) + return AirbyteMessage( + type=Type.STATE, state=AirbyteStateMessage(data={"state": "1"}) + ) @pytest.mark.disable_autouse @@ -179,8 +188,14 @@ def test_write( result = list(generator) assert len(result) == 1 - - con = duckdb.connect(database=config.get("destination_path"), read_only=False) + motherduck_api_key = str(config.get(CONFIG_MOTHERDUCK_API_KEY, "")) + duckdb_config = {} + if motherduck_api_key: + duckdb_config["motherduck_token"] = motherduck_api_key + duckdb_config["custom_user_agent"] = "airbyte_intg_test" + con = duckdb.connect( + database=config.get("destination_path"), read_only=False, config=duckdb_config + ) with con: cursor = con.execute( "SELECT _airbyte_ab_id, _airbyte_emitted_at, _airbyte_data " diff --git a/airbyte-integrations/connectors/destination-duckdb/main.py b/airbyte-integrations/connectors/destination-duckdb/main.py index d02d2a60103b6..5aca3c1667452 100644 --- a/airbyte-integrations/connectors/destination-duckdb/main.py +++ b/airbyte-integrations/connectors/destination-duckdb/main.py @@ -1,11 +1,9 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +"""Code moved to `run.py`. This file is now a placeholder for legacy reasons.""" - -import sys - -from destination_duckdb import DestinationDuckdb +from destination_duckdb.run import run if __name__ == "__main__": - DestinationDuckdb().run(sys.argv[1:]) + run() diff --git a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml index fd914fefbb781..571c386ab1ad7 100644 --- a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml @@ -4,7 +4,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 94bd199c-2ff0-4aa2-b98e-17f0acb72610 - dockerImageTag: 0.3.1 + dockerImageTag: 0.3.3 dockerRepository: airbyte/destination-duckdb githubIssueLabel: destination-duckdb icon: duckdb.svg diff --git a/airbyte-integrations/connectors/destination-duckdb/poetry.lock b/airbyte-integrations/connectors/destination-duckdb/poetry.lock index 7562101a90b47..0b6a2708319ae 100644 --- a/airbyte-integrations/connectors/destination-duckdb/poetry.lock +++ b/airbyte-integrations/connectors/destination-duckdb/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -52,21 +52,22 @@ pydantic = ">=1.9.2,<2.0.0" [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "backoff" @@ -79,31 +80,63 @@ files = [ {file = "backoff-1.11.1.tar.gz", hash = "sha256:ccb962a2378418c667b3c979b504fdeb7d9e0d29c0579e3b13b86467177728cb"}, ] +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] + [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -117,7 +150,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -134,24 +167,49 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -356,13 +414,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -380,15 +438,33 @@ files = [ [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, ] +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -414,26 +490,15 @@ files = [ [package.dependencies] six = "*" -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.7" -files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] - [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -476,107 +541,107 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "mypy" -version = "1.7.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, - {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, - {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, - {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, - {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, - {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, - {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, - {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, - {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, - {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, - {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, - {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, - {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, - {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, - {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, - {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, - {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, - {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, - {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -603,84 +668,151 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pendulum" -version = "2.1.2" +version = "3.0.0" description = "Python datetimes made easy" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, ] [package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" +"backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""} +importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""} +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -689,47 +821,47 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -782,13 +914,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -804,29 +936,18 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - [[package]] name = "pyyaml" version = "6.0.1" @@ -839,7 +960,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -847,15 +967,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -872,7 +985,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -880,7 +992,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -909,26 +1020,33 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "0.6.4" -description = "Persistent cache for requests library" +version = "1.2.0" +description = "A persistent cache for python requests" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "requests-cache-0.6.4.tar.gz", hash = "sha256:dd9120a4ab7b8128cba9b6b120d8b5560d566a3cd0f828cced3d3fd60a42ec40"}, - {file = "requests_cache-0.6.4-py2.py3-none-any.whl", hash = "sha256:1102daa13a804abe23fad62d694e7dee58d6063a35d94bf6e8c9821e22e5a78b"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] -itsdangerous = "*" -requests = ">=2.0.0" +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" url-normalize = ">=1.4" +urllib3 = ">=1.25.5" [package.extras] -backends = ["boto3", "pymongo", "redis"] -build = ["coveralls", "twine", "wheel"] -dev = ["Sphinx (>=3.5.3,<3.6.0)", "black (==20.8b1)", "boto3", "coveralls", "flake8", "flake8-comprehensions", "flake8-polyfill", "isort", "m2r2", "pre-commit", "psutil", "pymongo", "pytest (>=5.0)", "pytest-cov (>=2.11)", "pytest-order (>=0.11.0,<0.12.0)", "pytest-xdist", "radon", "redis", "requests-mock (>=1.8)", "sphinx-autodoc-typehints", "sphinx-copybutton", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-apidoc", "timeout-decorator", "twine", "wheel"] -docs = ["Sphinx (>=3.5.3,<3.6.0)", "m2r2", "sphinx-autodoc-typehints", "sphinx-copybutton", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-apidoc"] -test = ["black (==20.8b1)", "flake8", "flake8-comprehensions", "flake8-polyfill", "isort", "pre-commit", "psutil", "pytest (>=5.0)", "pytest-cov (>=2.11)", "pytest-order (>=0.11.0,<0.12.0)", "pytest-xdist", "radon", "requests-mock (>=1.8)", "timeout-decorator"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "ruff" @@ -958,19 +1076,19 @@ files = [ [[package]] name = "setuptools" -version = "68.2.2" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -996,13 +1114,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -1021,17 +1150,18 @@ six = "*" [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1128,6 +1258,21 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" python-versions = ">=3.8" diff --git a/airbyte-integrations/connectors/destination-duckdb/pyproject.toml b/airbyte-integrations/connectors/destination-duckdb/pyproject.toml index 1d47a9651f97a..a9704acab9a92 100644 --- a/airbyte-integrations/connectors/destination-duckdb/pyproject.toml +++ b/airbyte-integrations/connectors/destination-duckdb/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "destination-duckdb" -version = "0.3.0" +version = "0.3.3" description = "Destination implementation for Duckdb." authors = ["Simon Späti, Airbyte"] license = "MIT" @@ -20,3 +20,6 @@ mypy = "^1.5.1" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +destination-duckdb = "destination_duckdb.run:run" diff --git a/airbyte-integrations/connectors/destination-e2e-test/metadata.yaml b/airbyte-integrations/connectors/destination-e2e-test/metadata.yaml index 381d1b6b1ecc0..142ee7791d417 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/metadata.yaml +++ b/airbyte-integrations/connectors/destination-e2e-test/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: unknown connectorType: destination definitionId: 2eb65e87-983a-4fd7-b3e3-9d9dc6eb8537 - dockerImageTag: 0.3.1 + dockerImageTag: 0.3.3 dockerRepository: airbyte/destination-e2e-test githubIssueLabel: destination-e2e-test icon: airbyte.svg diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java index 4977f2b763f7c..2700adcd96e0c 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java @@ -53,6 +53,8 @@ public void accept(final AirbyteMessage message) { LOGGER.info("Emitting state: {}", message); outputRecordCollector.accept(message); return; + } else if (message.getType() == Type.TRACE) { + LOGGER.info("Received a trace: {}", message); } else if (message.getType() != Type.RECORD) { return; } diff --git a/airbyte-integrations/connectors/destination-iceberg/metadata.yaml b/airbyte-integrations/connectors/destination-iceberg/metadata.yaml index 64e40d6491fd2..9657f01cd844b 100644 --- a/airbyte-integrations/connectors/destination-iceberg/metadata.yaml +++ b/airbyte-integrations/connectors/destination-iceberg/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: df65a8f3-9908-451b-aa9b-445462803560 - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.6 dockerRepository: airbyte/destination-iceberg githubIssueLabel: destination-iceberg license: MIT diff --git a/airbyte-integrations/connectors/destination-iceberg/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-iceberg/src/main/resources/spec.json index 245874d890e07..5f1f4c6a49ed3 100644 --- a/airbyte-integrations/connectors/destination-iceberg/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-iceberg/src/main/resources/spec.json @@ -241,7 +241,6 @@ "me-central-1", "me-south-1", "sa-east-1", - "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", diff --git a/airbyte-integrations/connectors/destination-milvus/.dockerignore b/airbyte-integrations/connectors/destination-milvus/.dockerignore index d18815658a44c..6673b92602ae1 100644 --- a/airbyte-integrations/connectors/destination-milvus/.dockerignore +++ b/airbyte-integrations/connectors/destination-milvus/.dockerignore @@ -2,4 +2,5 @@ !Dockerfile !main.py !destination_milvus -!setup.py +!pyproject.toml +!poetry.lock diff --git a/airbyte-integrations/connectors/destination-milvus/README.md b/airbyte-integrations/connectors/destination-milvus/README.md index be53efd50e02f..b37491365c320 100644 --- a/airbyte-integrations/connectors/destination-milvus/README.md +++ b/airbyte-integrations/connectors/destination-milvus/README.md @@ -8,26 +8,13 @@ For information about how to use this connector within Airbyte, see [the documen ### Prerequisites **To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.10.0` +#### Minimum Python version required `= 3.9.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/milvus) @@ -42,14 +29,12 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image - #### Use `airbyte-ci` to build your connector The Airbyte way of building this connector is to use our `airbyte-ci` tool. You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). @@ -127,6 +112,18 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=destination-milvus test ``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest -s unit_tests +``` + +### Integration Tests +To run integration tests locally, make sure you have a secrets/config.json as explained above, and then run: +``` +poetry run pytest -s integration_tests +``` + ### Customizing acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. diff --git a/airbyte-integrations/connectors/destination-milvus/metadata.yaml b/airbyte-integrations/connectors/destination-milvus/metadata.yaml index db084c260dc20..c2dff2baff42e 100644 --- a/airbyte-integrations/connectors/destination-milvus/metadata.yaml +++ b/airbyte-integrations/connectors/destination-milvus/metadata.yaml @@ -22,7 +22,7 @@ data: connectorSubtype: vectorstore connectorType: destination definitionId: 65de8962-48c9-11ee-be56-0242ac120002 - dockerImageTag: 0.0.12 + dockerImageTag: 0.0.14 dockerRepository: airbyte/destination-milvus githubIssueLabel: destination-milvus icon: milvus.svg diff --git a/airbyte-integrations/connectors/destination-milvus/poetry.lock b/airbyte-integrations/connectors/destination-milvus/poetry.lock new file mode 100644 index 0000000000000..94e832a04eab4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-milvus/poetry.lock @@ -0,0 +1,3127 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +optional = false +python-versions = ">=3.6" +files = [ + {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, + {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, +] + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpcio" +version = "1.56.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.56.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:fb34ace11419f1ae321c36ccaa18d81cd3f20728cd191250be42949d6845bb2d"}, + {file = "grpcio-1.56.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:008767c0aed4899e657b50f2e0beacbabccab51359eba547f860e7c55f2be6ba"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:17f47aeb9be0da5337f9ff33ebb8795899021e6c0741ee68bd69774a7804ca86"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43c50d810cc26349b093bf2cfe86756ab3e9aba3e7e681d360930c1268e1399a"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187b8f71bad7d41eea15e0c9812aaa2b87adfb343895fffb704fb040ca731863"}, + {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:881575f240eb5db72ddca4dc5602898c29bc082e0d94599bf20588fb7d1ee6a0"}, + {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c243b158dd7585021d16c50498c4b2ec0a64a6119967440c5ff2d8c89e72330e"}, + {file = "grpcio-1.56.0-cp310-cp310-win32.whl", hash = "sha256:8b3b2c7b5feef90bc9a5fa1c7f97637e55ec3e76460c6d16c3013952ee479cd9"}, + {file = "grpcio-1.56.0-cp310-cp310-win_amd64.whl", hash = "sha256:03a80451530fd3b8b155e0c4480434f6be669daf7ecba56f73ef98f94222ee01"}, + {file = "grpcio-1.56.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:64bd3abcf9fb4a9fa4ede8d0d34686314a7075f62a1502217b227991d9ca4245"}, + {file = "grpcio-1.56.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:fdc3a895791af4addbb826808d4c9c35917c59bb5c430d729f44224e51c92d61"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:4f84a6fd4482e5fe73b297d4874b62a535bc75dc6aec8e9fe0dc88106cd40397"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14e70b4dda3183abea94c72d41d5930c333b21f8561c1904a372d80370592ef3"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5ce42a5ebe3e04796246ba50357f1813c44a6efe17a37f8dc7a5c470377312"}, + {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8219f17baf069fe8e42bd8ca0b312b875595e43a70cabf397be4fda488e2f27d"}, + {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:defdd14b518e6e468466f799aaa69db0355bca8d3a5ea75fb912d28ba6f8af31"}, + {file = "grpcio-1.56.0-cp311-cp311-win32.whl", hash = "sha256:50f4daa698835accbbcc60e61e0bc29636c0156ddcafb3891c987e533a0031ba"}, + {file = "grpcio-1.56.0-cp311-cp311-win_amd64.whl", hash = "sha256:59c4e606993a47146fbeaf304b9e78c447f5b9ee5641cae013028c4cca784617"}, + {file = "grpcio-1.56.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b1f4b6f25a87d80b28dd6d02e87d63fe1577fe6d04a60a17454e3f8077a38279"}, + {file = "grpcio-1.56.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:c2148170e01d464d41011a878088444c13413264418b557f0bdcd1bf1b674a0e"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:0409de787ebbf08c9d2bca2bcc7762c1efe72eada164af78b50567a8dfc7253c"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66f0369d27f4c105cd21059d635860bb2ea81bd593061c45fb64875103f40e4a"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38fdf5bd0a1c754ce6bf9311a3c2c7ebe56e88b8763593316b69e0e9a56af1de"}, + {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:79d4c5911d12a7aa671e5eb40cbb50a830396525014d2d6f254ea2ba180ce637"}, + {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5d2fc471668a7222e213f86ef76933b18cdda6a51ea1322034478df8c6519959"}, + {file = "grpcio-1.56.0-cp37-cp37m-win_amd64.whl", hash = "sha256:991224fd485e088d3cb5e34366053691a4848a6b7112b8f5625a411305c26691"}, + {file = "grpcio-1.56.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c6f36621aabecbaff3e70c4d1d924c76c8e6a7ffec60c331893640a4af0a8037"}, + {file = "grpcio-1.56.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:1eadd6de258901929223f422ffed7f8b310c0323324caf59227f9899ea1b1674"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:72836b5a1d4f508ffbcfe35033d027859cc737972f9dddbe33fb75d687421e2e"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f92a99ab0c7772fb6859bf2e4f44ad30088d18f7c67b83205297bfb229e0d2cf"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa08affbf672d051cd3da62303901aeb7042a2c188c03b2c2a2d346fc5e81c14"}, + {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2db108b4c8e29c145e95b0226973a66d73ae3e3e7fae00329294af4e27f1c42"}, + {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8674fdbd28266d8efbcddacf4ec3643f76fe6376f73283fd63a8374c14b0ef7c"}, + {file = "grpcio-1.56.0-cp38-cp38-win32.whl", hash = "sha256:bd55f743e654fb050c665968d7ec2c33f03578a4bbb163cfce38024775ff54cc"}, + {file = "grpcio-1.56.0-cp38-cp38-win_amd64.whl", hash = "sha256:c63bc5ac6c7e646c296fed9139097ae0f0e63f36f0864d7ce431cce61fe0118a"}, + {file = "grpcio-1.56.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c0bc9dda550785d23f4f025be614b7faa8d0293e10811f0f8536cf50435b7a30"}, + {file = "grpcio-1.56.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:d596408bab632ec7b947761e83ce6b3e7632e26b76d64c239ba66b554b7ee286"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76b6e6e1ee9bda32e6e933efd61c512e9a9f377d7c580977f090d1a9c78cca44"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7beb84ebd0a3f732625124b73969d12b7350c5d9d64ddf81ae739bbc63d5b1ed"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83ec714bbbe9b9502177c842417fde39f7a267031e01fa3cd83f1ca49688f537"}, + {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4feee75565d1b5ab09cb3a5da672b84ca7f6dd80ee07a50f5537207a9af543a4"}, + {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b4638a796778329cc8e142e4f57c705adb286b3ba64e00b0fa91eeb919611be8"}, + {file = "grpcio-1.56.0-cp39-cp39-win32.whl", hash = "sha256:437af5a7673bca89c4bc0a993382200592d104dd7bf55eddcd141cef91f40bab"}, + {file = "grpcio-1.56.0-cp39-cp39-win_amd64.whl", hash = "sha256:4241a1c2c76e748023c834995cd916570e7180ee478969c2d79a60ce007bc837"}, + {file = "grpcio-1.56.0.tar.gz", hash = "sha256:4c08ee21b3d10315b8dc26f6c13917b20ed574cdbed2d2d80c53d5508fdcc0f2"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.56.0)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.47" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.47-py3-none-any.whl", hash = "sha256:17b0a908b8d39b6da3ecff658c8c00304b0b62f59945a5e16c2da5a254ea21a6"}, + {file = "langsmith-0.1.47.tar.gz", hash = "sha256:f5ddd17628baa03a775525c5547a543a559313e425cdb2bf23579ffcf6056a76"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "protobuf" +version = "5.26.1" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-5.26.1-cp310-abi3-win32.whl", hash = "sha256:3c388ea6ddfe735f8cf69e3f7dc7611e73107b60bdfcf5d0f024c3ccd3794e23"}, + {file = "protobuf-5.26.1-cp310-abi3-win_amd64.whl", hash = "sha256:e6039957449cb918f331d32ffafa8eb9255769c96aa0560d9a5bf0b4e00a2a33"}, + {file = "protobuf-5.26.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:38aa5f535721d5bb99861166c445c4105c4e285c765fbb2ac10f116e32dcd46d"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fbfe61e7ee8c1860855696e3ac6cfd1b01af5498facc6834fcc345c9684fb2ca"}, + {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7417703f841167e5a27d48be13389d52ad705ec09eade63dfc3180a959215d7"}, + {file = "protobuf-5.26.1-cp38-cp38-win32.whl", hash = "sha256:d693d2504ca96750d92d9de8a103102dd648fda04540495535f0fec7577ed8fc"}, + {file = "protobuf-5.26.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b557c317ebe6836835ec4ef74ec3e994ad0894ea424314ad3552bc6e8835b4e"}, + {file = "protobuf-5.26.1-cp39-cp39-win32.whl", hash = "sha256:b9ba3ca83c2e31219ffbeb9d76b63aad35a3eb1544170c55336993d7a18ae72c"}, + {file = "protobuf-5.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ee014c2c87582e101d6b54260af03b6596728505c79f17c8586e7523aaa8f8c"}, + {file = "protobuf-5.26.1-py3-none-any.whl", hash = "sha256:da612f2720c0183417194eeaa2523215c4fcc1a1949772dc65f05047e08d5932"}, + {file = "protobuf-5.26.1.tar.gz", hash = "sha256:8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pymilvus" +version = "2.3.0" +description = "Python Sdk for Milvus" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymilvus-2.3.0-py3-none-any.whl", hash = "sha256:0be97b387eb1ef16520d33b2ee876f369bafd8e8f383affd3e17ddb2cab0bbba"}, + {file = "pymilvus-2.3.0.tar.gz", hash = "sha256:0a1baafa99d37e6c2159bfb1462d2052f177b973375c60f487984130b036faf5"}, +] + +[package.dependencies] +environs = "<=9.5.0" +grpcio = ">=1.49.1,<=1.56.0" +pandas = ">=1.2.4" +protobuf = ">=3.20.0" +ujson = ">=2.0.0" + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "ujson" +version = "5.9.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab71bf27b002eaf7d047c54a68e60230fbd5cd9da60de7ca0aa87d0bccead8fa"}, + {file = "ujson-5.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a365eac66f5aa7a7fdf57e5066ada6226700884fc7dce2ba5483538bc16c8c5"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e015122b337858dba5a3dc3533af2a8fc0410ee9e2374092f6a5b88b182e9fcc"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:779a2a88c53039bebfbccca934430dabb5c62cc179e09a9c27a322023f363e0d"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10ca3c41e80509fd9805f7c149068fa8dbee18872bbdc03d7cca928926a358d5"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a566e465cb2fcfdf040c2447b7dd9718799d0d90134b37a20dff1e27c0e9096"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f833c529e922577226a05bc25b6a8b3eb6c4fb155b72dd88d33de99d53113124"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b68a0caab33f359b4cbbc10065c88e3758c9f73a11a65a91f024b2e7a1257106"}, + {file = "ujson-5.9.0-cp310-cp310-win32.whl", hash = "sha256:7cc7e605d2aa6ae6b7321c3ae250d2e050f06082e71ab1a4200b4ae64d25863c"}, + {file = "ujson-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6d3f10eb8ccba4316a6b5465b705ed70a06011c6f82418b59278fbc919bef6f"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d"}, + {file = "ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120"}, + {file = "ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c"}, + {file = "ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437"}, + {file = "ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d581db9db9e41d8ea0b2705c90518ba623cbdc74f8d644d7eb0d107be0d85d9c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ff741a5b4be2d08fceaab681c9d4bc89abf3c9db600ab435e20b9b6d4dfef12e"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdcb02cabcb1e44381221840a7af04433c1dc3297af76fde924a50c3054c708c"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e208d3bf02c6963e6ef7324dadf1d73239fb7008491fdf523208f60be6437402"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4b3917296630a075e04d3d07601ce2a176479c23af838b6cf90a2d6b39b0d95"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c4d6adb2c7bb9eb7c71ad6f6f612e13b264942e841f8cc3314a21a289a76c4e"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0b159efece9ab5c01f70b9d10bbb77241ce111a45bc8d21a44c219a2aec8ddfd"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0cb4a7814940ddd6619bdce6be637a4b37a8c4760de9373bac54bb7b229698b"}, + {file = "ujson-5.9.0-cp38-cp38-win32.whl", hash = "sha256:dc80f0f5abf33bd7099f7ac94ab1206730a3c0a2d17549911ed2cb6b7aa36d2d"}, + {file = "ujson-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:506a45e5fcbb2d46f1a51fead991c39529fc3737c0f5d47c9b4a1d762578fc30"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0fd2eba664a22447102062814bd13e63c6130540222c0aa620701dd01f4be81"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bdf7fc21a03bafe4ba208dafa84ae38e04e5d36c0e1c746726edf5392e9f9f36"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f909bc08ce01f122fd9c24bc6f9876aa087188dfaf3c4116fe6e4daf7e194f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4ea86c2afd41429751d22a3ccd03311c067bd6aeee2d054f83f97e41e11d8f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63fb2e6599d96fdffdb553af0ed3f76b85fda63281063f1cb5b1141a6fcd0617"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32bba5870c8fa2a97f4a68f6401038d3f1922e66c34280d710af00b14a3ca562"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37ef92e42535a81bf72179d0e252c9af42a4ed966dc6be6967ebfb929a87bc60"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f69f16b8f1c69da00e38dc5f2d08a86b0e781d0ad3e4cc6a13ea033a439c4844"}, + {file = "ujson-5.9.0-cp39-cp39-win32.whl", hash = "sha256:3382a3ce0ccc0558b1c1668950008cece9bf463ebb17463ebf6a8bfc060dae34"}, + {file = "ujson-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:6adef377ed583477cf005b58c3025051b5faa6b8cc25876e594afbb772578f21"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ffdfebd819f492e48e4f31c97cb593b9c1a8251933d8f8972e81697f00326ff1"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eec2ddc046360d087cf35659c7ba0cbd101f32035e19047013162274e71fcf"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbb90aa5c23cb3d4b803c12aa220d26778c31b6e4b7a13a1f49971f6c7d088e"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0823cb70866f0d6a4ad48d998dd338dce7314598721bc1b7986d054d782dfd"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4e35d7885ed612feb6b3dd1b7de28e89baaba4011ecdf995e88be9ac614765e9"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b048aa93eace8571eedbd67b3766623e7f0acbf08ee291bef7d8106210432427"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323279e68c195110ef85cbe5edce885219e3d4a48705448720ad925d88c9f851"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ac92d86ff34296f881e12aa955f7014d276895e0e4e868ba7fddebbde38e378"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6eecbd09b316cea1fd929b1e25f70382917542ab11b692cb46ec9b0a26c7427f"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:473fb8dff1d58f49912323d7cb0859df5585cfc932e4b9c053bf8cf7f2d7c5c4"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f91719c6abafe429c1a144cfe27883eace9fb1c09a9c5ef1bcb3ae80a3076a4e"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1c0991c4fe256f5fdb19758f7eac7f47caac29a6c57d0de16a19048eb86bad"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea0f55a1396708e564595aaa6696c0d8af532340f477162ff6927ecc46e21"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:07e0cfdde5fd91f54cd2d7ffb3482c8ff1bf558abf32a8b953a5d169575ae1cd"}, + {file = "ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "445d0273610512c1ca8aca3d7e6973dd2587f531a896c717409d231aa0aea5ab" diff --git a/airbyte-integrations/connectors/destination-milvus/pyproject.toml b/airbyte-integrations/connectors/destination-milvus/pyproject.toml new file mode 100644 index 0000000000000..a73f5df7aedae --- /dev/null +++ b/airbyte-integrations/connectors/destination-milvus/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-milvus" +version = "0.0.14" +description = "Airbyte destination implementation for Milvus." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/milvus" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_milvus" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} +pymilvus = "2.3.0" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-milvus = "destination_milvus.run:run" \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-milvus/requirements.txt b/airbyte-integrations/connectors/destination-milvus/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-milvus/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-milvus/setup.py b/airbyte-integrations/connectors/destination-milvus/setup.py deleted file mode 100644 index e5c0cf315a835..0000000000000 --- a/airbyte-integrations/connectors/destination-milvus/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk[vector-db-based]==0.57.0", "pymilvus==2.3.0"] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_milvus", - description="Destination implementation for Milvus.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle index 4f4da7b4aab92..4695103edad9f 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle @@ -4,8 +4,8 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.8.0' - features = ['db-destinations'] + cdkVersionRequired = '0.30.2' + features = ['db-destinations', 'typing-deduping'] useLocalCdk = false } @@ -28,10 +28,6 @@ dependencies { implementation 'mysql:mysql-connector-java:8.0.22' - // TODO: declare typing-deduping as a CDK feature instead of importing from source. - implementation project(':airbyte-cdk:java:airbyte-cdk:typing-deduping') - integrationTestJavaImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:typing-deduping')) - integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') integrationTestJavaImplementation libs.testcontainers.mysql } diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/metadata.yaml index e03ea32c8f73d..32964d1a1fe4c 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/metadata.yaml @@ -1,15 +1,13 @@ data: registries: cloud: - dockerImageTag: 0.2.0 enabled: false # strict encrypt connectors are deployed to Cloud by their non strict encrypt sibling. oss: - dockerImageTag: 0.2.0 enabled: false # strict encrypt connectors are not used on OSS. connectorSubtype: database connectorType: destination definitionId: ca81ee7c-3163-4246-af40-094cc31e5e42 - dockerImageTag: 0.3.0 + dockerImageTag: 0.3.1 dockerRepository: airbyte/destination-mysql-strict-encrypt githubIssueLabel: destination-mysql icon: mysql.svg diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java index 49e7776c7d114..0fd243edc6570 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLStrictEncryptDestinationAcceptanceTest.java @@ -17,6 +17,7 @@ import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.destination.StandardNameTransformer; import io.airbyte.cdk.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; +import io.airbyte.cdk.integrations.standardtest.destination.argproviders.DataTypeTestArgumentProvider; import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; import io.airbyte.cdk.integrations.util.HostPortResolver; import io.airbyte.commons.json.Jsons; @@ -34,6 +35,7 @@ import java.util.stream.Collectors; import org.jooq.DSLContext; import org.jooq.SQLDialect; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; @@ -239,6 +241,20 @@ public void testLineBreakCharacters() { // overrides test with a no-op until we handle full UTF-8 in the destination } + /** + * Legacy mysql normalization is broken, and uses the FLOAT type for numbers. This rounds off e.g. + * 12345.678 to 12345.7. We can fix this in DV2, but will not fix legacy normalization. As such, + * disabling the test case. + */ + @Override + @Disabled("MySQL normalization uses the wrong datatype for numbers. This will not be fixed, because we intend to replace normalization with DV2.") + public void testDataTypeTestWithNormalization(final String messagesFilename, + final String catalogFilename, + final DataTypeTestArgumentProvider.TestCompatibility testCompatibility) + throws Exception { + super.testDataTypeTestWithNormalization(messagesFilename, catalogFilename, testCompatibility); + } + protected void assertSameValue(final JsonNode expectedValue, final JsonNode actualValue) { if (expectedValue.isBoolean()) { // Boolean in MySQL are stored as TINYINT (0 or 1) so we force them to boolean values here diff --git a/airbyte-integrations/connectors/destination-mysql/build.gradle b/airbyte-integrations/connectors/destination-mysql/build.gradle index 7c04b7df44472..e9f09a5e94b95 100644 --- a/airbyte-integrations/connectors/destination-mysql/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql/build.gradle @@ -4,8 +4,8 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.8.0' - features = ['db-destinations'] + cdkVersionRequired = '0.30.2' + features = ['db-destinations', 'typing-deduping'] useLocalCdk = false } @@ -26,10 +26,6 @@ application { dependencies { implementation 'mysql:mysql-connector-java:8.0.22' integrationTestJavaImplementation libs.testcontainers.mysql - - // TODO: declare typing-deduping as a CDK feature instead of importing from source. - implementation project(':airbyte-cdk:java:airbyte-cdk:typing-deduping') - integrationTestJavaImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:typing-deduping')) } configurations.all { diff --git a/airbyte-integrations/connectors/destination-mysql/metadata.yaml b/airbyte-integrations/connectors/destination-mysql/metadata.yaml index 9e6ee1de71e21..985de4396ef07 100644 --- a/airbyte-integrations/connectors/destination-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/destination-mysql/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: ca81ee7c-3163-4246-af40-094cc31e5e42 - dockerImageTag: 0.3.0 + dockerImageTag: 0.3.1 dockerRepository: airbyte/destination-mysql githubIssueLabel: destination-mysql icon: mysql.svg diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java index 438086bd8b389..31a2ec66d19b7 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java @@ -16,20 +16,31 @@ import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.IntegrationRunner; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; +import io.airbyte.cdk.integrations.destination.PropertyNameSimplifyingDataTransformer; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; import io.airbyte.commons.exceptions.ConnectionErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.map.MoreMaps; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.integrations.destination.mysql.MySQLSqlOperations.VersionCompatibility; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; +import java.util.Collections; +import java.util.List; import java.util.Map; import javax.sql.DataSource; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class MySQLDestination extends AbstractJdbcDestination implements Destination { +public class MySQLDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLDestination.class); public static final String DRIVER_CLASS = DatabaseDriver.MYSQL.getDriverClassName(); @@ -129,10 +140,15 @@ public JsonNode toJdbcConfig(final JsonNode config) { } @Override - protected JdbcSqlGenerator getSqlGenerator() { + protected JdbcSqlGenerator getSqlGenerator(final JsonNode config) { throw new UnsupportedOperationException("mysql does not yet support DV2"); } + @Override + protected StreamAwareDataTransformer getDataTransformer(ParsedCatalog parsedCatalog, String defaultNamespace) { + return new PropertyNameSimplifyingDataTransformer(); + } + public static void main(final String[] args) throws Exception { final Destination destination = MySQLDestination.sshWrappedDestination(); LOGGER.info("starting destination: {}", MySQLDestination.class); @@ -140,4 +156,21 @@ public static void main(final String[] args) throws Exception { LOGGER.info("completed destination: {}", MySQLDestination.class); } + @NotNull + @Override + protected JdbcDestinationHandler getDestinationHandler(@NotNull String databaseName, + @NotNull JdbcDatabase database, + @NotNull String rawTableSchema) { + throw new UnsupportedOperationException("Mysql does not yet support DV2"); + } + + @NotNull + @Override + protected List> getMigrations(@NotNull JdbcDatabase database, + @NotNull String databaseName, + @NotNull SqlGenerator sqlGenerator, + @NotNull DestinationHandler destinationHandler) { + return Collections.emptyList(); + } + } diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java index 2fa5ec8dd5722..f6537252b7621 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLSqlOperations.java @@ -4,13 +4,11 @@ package io.airbyte.integrations.destination.mysql; -import com.fasterxml.jackson.databind.JsonNode; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage; import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -86,11 +84,6 @@ private void loadDataIntoTable(final JdbcDatabase database, }); } - @Override - protected JsonNode formatData(final JsonNode data) { - return StandardNameTransformer.formatJsonPath(data); - } - void verifyLocalFileEnabled(final JdbcDatabase database) throws SQLException { final boolean localFileEnabled = isLocalFileEnabled || checkIfLocalFileIsEnabled(database); if (!localFileEnabled) { diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java index b6d83448bf466..a299c51a84aa8 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.destination.mysql; +import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -39,6 +40,7 @@ import org.jooq.SQLDialect; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.testcontainers.containers.MySQLContainer; public class MySQLDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { @@ -267,6 +269,10 @@ protected void assertSameValue(final JsonNode expectedValue, final JsonNode actu } } + // Something is very weird in our connection check code. A wrong password takes >1 minute to return. + // TODO investigate why invalid creds take so long to detect + @Timeout(value = 300, + unit = SECONDS) @Test void testCheckIncorrectPasswordFailure() { final JsonNode config = ((ObjectNode) getConfigForBareMetalConnection()).put(JdbcUtils.PASSWORD_KEY, "fake"); @@ -276,6 +282,8 @@ void testCheckIncorrectPasswordFailure() { assertStringContains(status.getMessage(), "State code: 28000; Error code: 1045;"); } + @Timeout(value = 300, + unit = SECONDS) @Test public void testCheckIncorrectUsernameFailure() { final JsonNode config = ((ObjectNode) getConfigForBareMetalConnection()).put(JdbcUtils.USERNAME_KEY, "fake"); @@ -285,6 +293,8 @@ public void testCheckIncorrectUsernameFailure() { assertStringContains(status.getMessage(), "State code: 28000; Error code: 1045;"); } + @Timeout(value = 300, + unit = SECONDS) @Test public void testCheckIncorrectHostFailure() { final JsonNode config = ((ObjectNode) getConfigForBareMetalConnection()).put(JdbcUtils.HOST_KEY, "localhost2"); @@ -294,6 +304,8 @@ public void testCheckIncorrectHostFailure() { assertStringContains(status.getMessage(), "State code: 08S01;"); } + @Timeout(value = 300, + unit = SECONDS) @Test public void testCheckIncorrectPortFailure() { final JsonNode config = ((ObjectNode) getConfigForBareMetalConnection()).put(JdbcUtils.PORT_KEY, "0000"); @@ -303,6 +315,8 @@ public void testCheckIncorrectPortFailure() { assertStringContains(status.getMessage(), "State code: 08S01;"); } + @Timeout(value = 300, + unit = SECONDS) @Test public void testCheckIncorrectDataBaseFailure() { final JsonNode config = ((ObjectNode) getConfigForBareMetalConnection()).put(JdbcUtils.DATABASE_KEY, "wrongdatabase"); @@ -312,6 +326,8 @@ public void testCheckIncorrectDataBaseFailure() { assertStringContains(status.getMessage(), "State code: 42000; Error code: 1049;"); } + @Timeout(value = 300, + unit = SECONDS) @Test public void testUserHasNoPermissionToDataBase() { executeQuery("create user '" + USERNAME_WITHOUT_PERMISSION + "'@'%' IDENTIFIED BY '" + PASSWORD_WITHOUT_PERMISSION + "';\n"); diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java index 0750f3393ae30..0f637280b0bb8 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/SshMySQLDestinationAcceptanceTest.java @@ -31,6 +31,9 @@ /** * Abstract class that allows us to avoid duplicating testing logic for testing SSH with a key file * or with a password. + *

    + * This class probably should extend {@link MySQLDestinationAcceptanceTest} to further reduce code + * duplication though. */ public abstract class SshMySQLDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { @@ -109,6 +112,14 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv env, return retrieveRecordsFromTable(tableName, schema); } + @Override + protected String getDefaultSchema(final JsonNode config) { + if (config.get(JdbcUtils.DATABASE_KEY) == null) { + return null; + } + return config.get(JdbcUtils.DATABASE_KEY).asText(); + } + private static Database getDatabaseFromConfig(final JsonNode config) { final DSLContext dslContext = DSLContextFactory.create( config.get(JdbcUtils.USERNAME_KEY).asText(), diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle index 0e940345ab009..c14752b64f8fa 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle @@ -4,8 +4,8 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] + cdkVersionRequired = '0.29.10' + features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/metadata.yaml index bd323c6fdbc51..d37ac24b2b47d 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/metadata.yaml @@ -7,17 +7,23 @@ data: connectorSubtype: database connectorType: destination definitionId: 3986776d-2319-4de9-8af8-db14c0996e72 - dockerImageTag: 0.2.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/destination-oracle-strict-encrypt githubIssueLabel: destination-oracle icon: oracle.svg license: ELv2 name: Oracle - normalizationConfig: - normalizationIntegrationType: oracle - normalizationRepository: airbyte/normalization-oracle - normalizationTag: 0.4.1 releaseStage: alpha + releases: + breakingChanges: + 1.0.0: + upgradeDeadline: "2024-05-01" + message: > + This version removes the option to use "normalization" with Oracle. It also changes + the schema and database of Airbyte's "raw" tables to be compatible with the new + [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2) + format. These changes will likely require updates to downstream dbt / SQL models. + Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. documentationUrl: https://docs.airbyte.com/integrations/destinations/oracle supportsDbt: true tags: diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java index 72e2a11ce32f9..20cb8a31c8d04 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java @@ -17,13 +17,14 @@ import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.destination.StandardNameTransformer; import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.integrations.destination.oracle.OracleDestination; import io.airbyte.integrations.destination.oracle.OracleNameTransformer; import java.sql.SQLException; +import java.time.Duration; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -73,7 +74,7 @@ protected List retrieveRecords(final TestDestinationEnv env, return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() .map(r -> Jsons.deserialize( - r.get(OracleDestination.COLUMN_NAME_DATA.replace("\"", "")).asText())) + r.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) .collect(Collectors.toList()); } @@ -113,16 +114,15 @@ protected List resolveIdentifier(final String identifier) { private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, OracleDestination.COLUMN_NAME_EMITTED_AT); - - try (final DSLContext dslContext = getDslContext(config)) { - final List result = getDatabase(dslContext).query(ctx -> ctx.fetch(query).stream().toList()); - return result - .stream() - .map(r -> r.formatJSON(JSON_FORMAT)) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } + final String query = + String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT.toUpperCase()); + final DSLContext dslContext = getDslContext(config); + final List result = getDatabase(dslContext).query(ctx -> ctx.fetch(query).stream().toList()); + return result + .stream() + .map(r -> r.formatJSON(JSON_FORMAT)) + .map(Jsons::deserialize) + .collect(Collectors.toList()); } private static Database getDatabase(final DSLContext dslContext) { @@ -151,15 +151,13 @@ protected void setup(final TestDestinationEnv testEnv, final HashSet TES db.start(); config = getConfig(db); + final DSLContext dslContext = getDslContext(config); + final Database database = getDatabase(dslContext); + database.query( + ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); + database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query( - ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); - database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - - ((ObjectNode) config).put(JdbcUtils.SCHEMA_KEY, dbName); - } + ((ObjectNode) config).put(JdbcUtils.SCHEMA_KEY, dbName); } @Override @@ -182,7 +180,8 @@ public void testEncryption() throws SQLException { config.get(JdbcUtils.PORT_KEY).asInt(), config.get("sid").asText()), JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + "oracle.net.encryption_types_client=( " + algorithm + " )", ";"), + Duration.ofMinutes(5)); final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); final String networkServiceBanner = @@ -208,7 +207,8 @@ public void testCheckProtocol() throws SQLException { config.get(JdbcUtils.PORT_KEY).asInt(), config.get("sid").asText()), JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " + algorithm + " )", ";")); + "oracle.net.encryption_types_client=( " + algorithm + " )", ";"), + Duration.ofMinutes(5)); final JdbcDatabase database = new DefaultJdbcDatabase(dataSource); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test/resources/expected_spec.json index 86b2da9a042e1..b6badb3b72281 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test/resources/expected_spec.json @@ -59,6 +59,126 @@ "examples": ["airbyte"], "default": "airbyte", "order": 6 + }, + "raw_data_schema": { + "type": "string", + "description": "The schema to write raw tables into (default: airbyte_internal)", + "title": "Raw Table Schema Name", + "order": 7 + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] } } } diff --git a/airbyte-integrations/connectors/destination-oracle/build.gradle b/airbyte-integrations/connectors/destination-oracle/build.gradle index a192ee34744a3..861a1174ff4fd 100644 --- a/airbyte-integrations/connectors/destination-oracle/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle/build.gradle @@ -4,8 +4,8 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] + cdkVersionRequired = '0.29.10' + features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-oracle/metadata.yaml b/airbyte-integrations/connectors/destination-oracle/metadata.yaml index f6f1acf44c377..c280427bd530b 100644 --- a/airbyte-integrations/connectors/destination-oracle/metadata.yaml +++ b/airbyte-integrations/connectors/destination-oracle/metadata.yaml @@ -2,16 +2,12 @@ data: connectorSubtype: database connectorType: destination definitionId: 3986776d-2319-4de9-8af8-db14c0996e72 - dockerImageTag: 0.2.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/destination-oracle githubIssueLabel: destination-oracle icon: oracle.svg license: ELv2 name: Oracle - normalizationConfig: - normalizationIntegrationType: oracle - normalizationRepository: airbyte/normalization-oracle - normalizationTag: 0.4.3 registries: cloud: dockerRepository: airbyte/destination-oracle-strict-encrypt @@ -21,6 +17,16 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/oracle supportsDbt: true + releases: + breakingChanges: + 1.0.0: + upgradeDeadline: "2024-05-01" + message: > + This version removes the option to use "normalization" with Oracle. It also changes + the schema and database of Airbyte's "raw" tables to be compatible with the new + [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2) + format. These changes will likely require updates to downstream dbt / SQL models. + Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. tags: - language:java ab_internal: diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java index 9a515ef1f74e6..783e059b645b5 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java @@ -7,35 +7,39 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.factory.DatabaseDriver; +import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.NoOpJdbcDestinationHandler; +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.RawOnlySqlGenerator; import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import java.io.IOException; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.RandomStringUtils; +import org.jetbrains.annotations.NotNull; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class OracleDestination extends AbstractJdbcDestination implements Destination { +public class OracleDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(OracleDestination.class); public static final String DRIVER_CLASS = DatabaseDriver.ORACLE.getDriverClassName(); - public static final String COLUMN_NAME_AB_ID = - "\"" + JavaBaseConstants.COLUMN_NAME_AB_ID.toUpperCase() + "\""; - public static final String COLUMN_NAME_DATA = - "\"" + JavaBaseConstants.COLUMN_NAME_DATA.toUpperCase() + "\""; - public static final String COLUMN_NAME_EMITTED_AT = - "\"" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT.toUpperCase() + "\""; - protected static final String KEY_STORE_FILE_PATH = "clientkeystore.jks"; private static final String KEY_STORE_PASS = RandomStringUtils.randomAlphanumeric(8); public static final String ENCRYPTION_METHOD_KEY = "encryption_method"; @@ -84,8 +88,9 @@ protected Map getDefaultConnectionProperties(final JsonNode conf return properties; } + @NotNull @Override - public JsonNode toJdbcConfig(final JsonNode config) { + public JsonNode toJdbcConfig(@NotNull final JsonNode config) { final Protocol protocol = obtainConnectionProtocol(config); final String connectionString = String.format( "jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS=(PROTOCOL=%s)(HOST=%s)(PORT=%s))(CONNECT_DATA=(SID=%s)))", @@ -134,6 +139,45 @@ private static void tryConvertAndImportCertificate(final String certificate) { } } + @Override + public boolean isV2Destination() { + return true; + } + + @Override + protected boolean shouldAlwaysDisableTypeDedupe() { + return true; + } + + @NotNull + @Override + protected String getDatabaseName(@NotNull final JsonNode config) { + return config.get("sid").asText(); + } + + @NotNull + @Override + protected JdbcSqlGenerator getSqlGenerator() { + return new RawOnlySqlGenerator(new OracleNameTransformer()); + } + + @NotNull + @Override + protected JdbcDestinationHandler getDestinationHandler(final String databaseName, + final JdbcDatabase database, + final String rawTableSchema) { + return new NoOpJdbcDestinationHandler<>(databaseName, database, rawTableSchema, SQLDialect.DEFAULT); + } + + @NotNull + @Override + protected List> getMigrations(@NotNull final JdbcDatabase database, + @NotNull final String databaseName, + @NotNull final SqlGenerator sqlGenerator, + @NotNull final DestinationHandler destinationHandler) { + return List.of(); + } + private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java index ace5753550503..18fbe69950717 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java @@ -17,6 +17,7 @@ public String applyDefaultCase(final String input) { } @Override + @Deprecated public String getRawTableName(final String streamName) { return convertStreamName("airbyte_raw_" + streamName); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java index 468e33bd7345d..fb25e07bc666f 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java @@ -4,25 +4,33 @@ package io.airbyte.integrations.destination.oracle; +import static io.airbyte.cdk.integrations.base.JavaBaseConstantsKt.upperQuoted; + +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import io.airbyte.cdk.db.jdbc.JdbcDatabase; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.destination.StandardNameTransformer; +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage; import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import java.sql.PreparedStatement; -import java.sql.SQLException; import java.sql.Timestamp; +import java.sql.Types; import java.time.Instant; import java.util.List; +import java.util.Optional; import java.util.UUID; import java.util.function.Supplier; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OracleOperations implements SqlOperations { private static final Logger LOGGER = LoggerFactory.getLogger(OracleOperations.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final String tablespace; @@ -58,18 +66,25 @@ public void createTableIfNotExists(final JdbcDatabase database, final String sch @Override public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { return String.format( - "CREATE TABLE %s.%s ( \n" - + "%s VARCHAR(64) PRIMARY KEY,\n" - + "%s NCLOB,\n" - + "%s TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP\n" - + ")", + """ + CREATE TABLE %s.%s ( + %s VARCHAR(64) PRIMARY KEY, + %s JSON, + %s TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + %s TIMESTAMP WITH TIME ZONE DEFAULT NULL, + %s JSON + ) + """, schemaName, tableName, - OracleDestination.COLUMN_NAME_AB_ID, OracleDestination.COLUMN_NAME_DATA, OracleDestination.COLUMN_NAME_EMITTED_AT, - OracleDestination.COLUMN_NAME_DATA); + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_RAW_ID), + upperQuoted(JavaBaseConstants.COLUMN_NAME_DATA), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_META)); } private boolean tableExists(final JdbcDatabase database, final String schemaName, final String tableName) throws Exception { - final Integer count = database.queryInt("select count(*) \n from all_tables\n where upper(owner) = upper(?) and upper(table_name) = upper(?)", + final int count = database.queryInt("select count(*) \n from all_tables\n where upper(owner) = upper(?) and upper(table_name) = upper(?)", schemaName, tableName); return count == 1; } @@ -92,27 +107,13 @@ public String truncateTableQuery(final JdbcDatabase database, final String schem return String.format("DELETE FROM %s.%s\n", schemaName, tableName); } - @Override - public void insertRecords(final JdbcDatabase database, - final List records, - final String schemaName, - final String tempTableName) - throws Exception { - final String tableName = String.format("%s.%s", schemaName, tempTableName); - final String columns = String.format("(%s, %s, %s)", - OracleDestination.COLUMN_NAME_AB_ID, OracleDestination.COLUMN_NAME_DATA, OracleDestination.COLUMN_NAME_EMITTED_AT); - final String recordQueryComponent = "(?, ?, ?)\n"; - insertRawRecordsInSingleQuery(tableName, columns, recordQueryComponent, database, records, UUID::randomUUID); - } - // Adapted from SqlUtils.insertRawRecordsInSingleQuery to meet some needs specific to Oracle syntax private static void insertRawRecordsInSingleQuery(final String tableName, final String columns, - final String recordQueryComponent, final JdbcDatabase jdbcDatabase, - final List records, + final List records, final Supplier uuidSupplier) - throws SQLException { + throws Exception { if (records.isEmpty()) { return; } @@ -129,23 +130,34 @@ private static void insertRawRecordsInSingleQuery(final String tableName, // The "SELECT 1 FROM DUAL" at the end is a formality to satisfy the needs of the Oracle syntax. // (see https://stackoverflow.com/a/93724 for details) final StringBuilder sql = new StringBuilder("INSERT ALL "); - records.forEach(r -> sql.append(String.format("INTO %s %s VALUES %s", tableName, columns, recordQueryComponent))); + records.forEach(r -> sql.append(String.format("INTO %s %s VALUES %s", tableName, columns, "(?, ?, ?, ?, ?)\n"))); sql.append(" SELECT 1 FROM DUAL"); final String query = sql.toString(); try (final PreparedStatement statement = connection.prepareStatement(query)) { // second loop: bind values to the SQL string. + // 1-indexed int i = 1; - for (final AirbyteRecordMessage message : records) { - // 1-indexed - final JsonNode formattedData = StandardNameTransformer.formatJsonPath(message.getData()); - statement.setString(i, uuidSupplier.get().toString()); - statement.setString(i + 1, Jsons.serialize(formattedData)); - statement.setTimestamp(i + 2, Timestamp.from(Instant.ofEpochMilli(message.getEmittedAt()))); - i += 3; + for (final PartialAirbyteMessage message : records) { + final Optional jsonData = Optional.ofNullable(message).map(PartialAirbyteMessage::getSerialized); + if (jsonData.isPresent()) { + // This is inefficient and brings me shame but it works + final String formattedData = OBJECT_MAPPER.writeValueAsString( + StandardNameTransformer.formatJsonPath(OBJECT_MAPPER.readTree(jsonData.get()))); + statement.setString(i++, uuidSupplier.get().toString()); + statement.setString(i++, formattedData); + statement.setTimestamp(i++, Timestamp.from(Instant.ofEpochMilli(message.getRecord().getEmittedAt()))); + statement.setNull(i++, Types.TIMESTAMP); + statement.setString(i++, OBJECT_MAPPER.writeValueAsString(message.getRecord().getMeta())); + } else { + LOGGER.warn("Record data is empty, skipping record: {}", message); + } + } statement.execute(); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); } }); } @@ -160,6 +172,9 @@ public String insertTableQuery(final JdbcDatabase database, @Override public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { + if (queries.isEmpty()) { + return; + } final String SQL = "BEGIN\n COMMIT;\n" + String.join(";\n", queries) + "; \nCOMMIT; \nEND;"; database.execute(SQL); } @@ -174,4 +189,25 @@ public boolean isSchemaRequired() { return true; } + @Override + public boolean isSchemaExists(@Nullable final JdbcDatabase database, @Nullable final String schemaName) throws Exception { + return SqlOperations.super.isSchemaExists(database, schemaName); + } + + @Override + public void insertRecords(@NotNull final JdbcDatabase database, + @NotNull final List records, + @Nullable final String schemaName, + @Nullable final String tableName) + throws Exception { + final String formattedTableName = String.format("%s.%s", schemaName, tableName); + final String columns = String.format("(%s, %s, %s, %s, %s)", + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_RAW_ID), + upperQuoted(JavaBaseConstants.COLUMN_NAME_DATA), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT), + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_META)); + insertRawRecordsInSingleQuery(formattedTableName, columns, database, records, UUID::randomUUID); + } + } diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json index 35aa4090b786d..c5cd4f20adf26 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json @@ -120,6 +120,12 @@ } } ] + }, + "raw_data_schema": { + "type": "string", + "description": "The schema to write raw tables into (default: airbyte_internal)", + "title": "Raw Table Schema Name", + "order": 7 } } } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java index fc946fdddae9d..599c50b76a5bf 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java @@ -18,6 +18,7 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.commons.json.Jsons; import java.sql.SQLException; +import java.time.Duration; import java.util.List; import java.util.Map; import org.junit.jupiter.api.Test; @@ -43,7 +44,8 @@ public void testEncryption() throws SQLException { config.get("host").asText(), config.get("port").asInt(), config.get("sid").asText()), - getAdditionalProperties(algorithm))); + getAdditionalProperties(algorithm), + Duration.ofMinutes(5))); final String networkServiceBanner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -78,7 +80,7 @@ public void testCheckProtocol() throws SQLException { clone.get("host").asText(), clone.get("port").asInt(), clone.get("sid").asText()), - getAdditionalProperties(algorithm))); + getAdditionalProperties(algorithm), Duration.ofMinutes(5))); final String networkServiceBanner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.queryJsons(networkServiceBanner); diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index 10ce0fde6c7ac..454e7e339b367 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.destination.oracle; +import static io.airbyte.cdk.integrations.base.JavaBaseConstantsKt.upperQuoted; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; @@ -124,7 +126,8 @@ private List retrieveRecordsFromTable(final String tableName, final St (CheckedFunction, Exception>) mangledConfig -> getDatabaseFromConfig(mangledConfig) .query( ctx -> ctx - .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, OracleDestination.COLUMN_NAME_EMITTED_AT))) + .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT)))) .stream() .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) .map(Jsons::deserialize) diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index d87d36041168f..34e72f9eeedf9 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.destination.oracle; +import static io.airbyte.cdk.integrations.base.JavaBaseConstantsKt.upperQuoted; import static io.airbyte.cdk.integrations.util.HostPortResolver.resolveHost; import static io.airbyte.cdk.integrations.util.HostPortResolver.resolvePort; import static org.hamcrest.CoreMatchers.equalTo; @@ -20,11 +21,13 @@ import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.destination.StandardNameTransformer; import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.integrations.base.destination.typing_deduping.StreamId; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashSet; @@ -72,10 +75,10 @@ protected List retrieveRecords(final TestDestinationEnv env, final String namespace, final JsonNode streamSchema) throws Exception { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) + return retrieveRecordsFromTable(namingResolver.convertStreamName(StreamId.concatenateRawTableName(namespace, streamName)), namespace) .stream() .map(r -> Jsons.deserialize( - r.get(OracleDestination.COLUMN_NAME_DATA.replace("\"", "")).asText())) + r.get(JavaBaseConstants.COLUMN_NAME_DATA.toUpperCase()).asText())) .collect(Collectors.toList()); } @@ -122,17 +125,16 @@ protected JsonNode getFailCheckConfig() { private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - try (final DSLContext dslContext = getDSLContext(config)) { - final List result = getDatabase(dslContext) - .query(ctx -> new ArrayList<>(ctx.fetch( - String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - OracleDestination.COLUMN_NAME_EMITTED_AT)))); - return result - .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } + final DSLContext dslContext = getDSLContext(config); + final List result = getDatabase(dslContext) + .query(ctx -> new ArrayList<>(ctx.fetch( + String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, + upperQuoted(JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT))))); + return result + .stream() + .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) + .map(Jsons::deserialize) + .collect(Collectors.toList()); } private static DSLContext getDSLContext(final JsonNode config) { @@ -159,15 +161,13 @@ protected void setup(final TestDestinationEnv testEnv, HashSet TEST_SCHE db.start(); config = getConfig(db); + final DSLContext dslContext = getDSLContext(config); + final Database database = getDatabase(dslContext); + database.query( + ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); + database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - try (final DSLContext dslContext = getDSLContext(config)) { - final Database database = getDatabase(dslContext); - database.query( - ctx -> ctx.fetch(String.format("CREATE USER %s IDENTIFIED BY %s", schemaName, schemaName))); - database.query(ctx -> ctx.fetch(String.format("GRANT ALL PRIVILEGES TO %s", schemaName))); - - ((ObjectNode) config).put(JdbcUtils.SCHEMA_KEY, dbName); - } + ((ObjectNode) config).put(JdbcUtils.SCHEMA_KEY, dbName); } @Override diff --git a/airbyte-integrations/connectors/destination-pinecone/.dockerignore b/airbyte-integrations/connectors/destination-pinecone/.dockerignore index e494245da5390..a598b7b8f5cc8 100644 --- a/airbyte-integrations/connectors/destination-pinecone/.dockerignore +++ b/airbyte-integrations/connectors/destination-pinecone/.dockerignore @@ -2,4 +2,5 @@ !Dockerfile !main.py !destination_pinecone -!setup.py +!pyproject.toml +!poetry.lock diff --git a/airbyte-integrations/connectors/destination-pinecone/README.md b/airbyte-integrations/connectors/destination-pinecone/README.md index b4a52f0ff1778..6ea64e53430ef 100644 --- a/airbyte-integrations/connectors/destination-pinecone/README.md +++ b/airbyte-integrations/connectors/destination-pinecone/README.md @@ -7,26 +7,13 @@ This is the repository for the Pinecone destination connector, written in Python ### Prerequisites **To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.10.0` +#### Minimum Python version required `= 3.9.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/pinecone) @@ -41,14 +28,12 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image - #### Use `airbyte-ci` to build your connector The Airbyte way of building this connector is to use our `airbyte-ci` tool. You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). @@ -126,6 +111,18 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=destination-pinecone test ``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest -s unit_tests +``` + +### Integration Tests +To run integration tests locally, make sure you have a secrets/config.json as explained above, and then run: +``` +poetry run pytest -s integration_tests +``` + ### Customizing acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. diff --git a/airbyte-integrations/connectors/destination-pinecone/metadata.yaml b/airbyte-integrations/connectors/destination-pinecone/metadata.yaml index 4d26762da8ea9..f7202798af974 100644 --- a/airbyte-integrations/connectors/destination-pinecone/metadata.yaml +++ b/airbyte-integrations/connectors/destination-pinecone/metadata.yaml @@ -13,13 +13,17 @@ data: connectorSubtype: vectorstore connectorType: destination definitionId: 3d2b6f84-7f0d-4e3f-a5e5-7c7d4b50eabd - dockerImageTag: 0.0.22 + dockerImageTag: 0.0.24 dockerRepository: airbyte/destination-pinecone documentationUrl: https://docs.airbyte.com/integrations/destinations/pinecone githubIssueLabel: destination-pinecone icon: pinecone.svg license: MIT name: Pinecone + remoteRegistries: + pypi: + enabled: false # TODO: enable once the CLI is working + packageName: airbyte-destination-pinecone registries: cloud: enabled: true diff --git a/airbyte-integrations/connectors/destination-pinecone/poetry.lock b/airbyte-integrations/connectors/destination-pinecone/poetry.lock new file mode 100644 index 0000000000000..a3da1fd378a46 --- /dev/null +++ b/airbyte-integrations/connectors/destination-pinecone/poetry.lock @@ -0,0 +1,3183 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-gateway-protoc-gen-openapiv2" +version = "0.1.0" +description = "Provides the missing pieces for gRPC Gateway." +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpc-gateway-protoc-gen-openapiv2-0.1.0.tar.gz", hash = "sha256:03b8934080ae81f709af041e4f89694db586a95ff35abba05d033d499811d4f6"}, + {file = "grpc_gateway_protoc_gen_openapiv2-0.1.0-py3-none-any.whl", hash = "sha256:45ba00a6e9df13d35fe46d4149c62361a63c27e61fb08faa192aea0f4fbed609"}, +] + +[package.dependencies] +googleapis-common-protos = "*" + +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.48" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.48-py3-none-any.whl", hash = "sha256:2f8967e2aaaed8881efe6f346590681243b315af8ba8a037d969c299d42071d3"}, + {file = "langsmith-0.1.48.tar.gz", hash = "sha256:9cd21cd0928123b2bd2363f03515cb1f6a833d9a9f00420240d5132861d15fcc"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + +[[package]] +name = "lz4" +version = "4.3.3" +description = "LZ4 Bindings for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, + {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, + {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, + {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, + {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, + {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, + {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, + {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, + {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, + {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, + {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, + {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, +] + +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "pinecone-client" +version = "2.2.4" +description = "Pinecone client and SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pinecone-client-2.2.4.tar.gz", hash = "sha256:2c1cc1d6648b2be66e944db2ffa59166a37b9164d1135ad525d9cd8b1e298168"}, + {file = "pinecone_client-2.2.4-py3-none-any.whl", hash = "sha256:5bf496c01c2f82f4e5c2dc977cc5062ecd7168b8ed90743b09afcc8c7eb242ec"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +googleapis-common-protos = {version = ">=1.53.0", optional = true, markers = "extra == \"grpc\""} +grpc-gateway-protoc-gen-openapiv2 = {version = "0.1.0", optional = true, markers = "extra == \"grpc\""} +grpcio = {version = ">=1.44.0", optional = true, markers = "extra == \"grpc\""} +loguru = ">=0.5.0" +lz4 = {version = ">=3.1.3", optional = true, markers = "extra == \"grpc\""} +numpy = ">=1.22.0" +protobuf = {version = ">=3.20.0,<3.21.0", optional = true, markers = "extra == \"grpc\""} +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4" +requests = ">=2.19.0" +tqdm = ">=4.64.1" +typing-extensions = ">=3.7.4" +urllib3 = ">=1.21.1" + +[package.extras] +grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "protobuf" +version = "3.20.3" +description = "Protocol Buffers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "d889a207a4f74be7d21811863564840a0253e405684ebbc8806435fc502e563d" diff --git a/airbyte-integrations/connectors/destination-pinecone/pyproject.toml b/airbyte-integrations/connectors/destination-pinecone/pyproject.toml new file mode 100644 index 0000000000000..e5201b7aa74c3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-pinecone/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-pinecone" +version = "0.0.24" +description = "Airbyte destination implementation for Pinecone." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/pinecone" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_pinecone" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} +pinecone-client = {version = "2.2.4", extras = ["grpc"]} + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-pinecone = "destination_pinecone.run:run" diff --git a/airbyte-integrations/connectors/destination-pinecone/requirements.txt b/airbyte-integrations/connectors/destination-pinecone/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-pinecone/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-pinecone/setup.py b/airbyte-integrations/connectors/destination-pinecone/setup.py deleted file mode 100644 index 3e1fbd33d1dca..0000000000000 --- a/airbyte-integrations/connectors/destination-pinecone/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[vector-db-based]==0.57.0", - "pinecone-client[grpc]", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_pinecone", - description="Destination implementation for Pinecone.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index 211f167e7fbfa..875c0b1f001d2 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -1,10 +1,9 @@ plugins { id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.18' + cdkVersionRequired = '0.30.0' features = ['db-destinations', 'typing-deduping', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml index e788d32190edc..f731131ac2e39 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.4 + dockerImageTag: 2.0.9 dockerRepository: airbyte/destination-postgres-strict-encrypt documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres @@ -18,8 +18,8 @@ data: breakingChanges: 2.0.0: message: > - This version introduces [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2), which provides better error handling, incremental delivery of data for large syncs, and improved final table structures. - To review the breaking changes, and how to upgrade, see [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#quick-start-to-upgrading). + This version introduces [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2), which provides better error handling, incremental delivery of data for large syncs, and improved final table structures. + To review the breaking changes, and how to upgrade, see [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#quick-start-to-upgrading). These changes will likely require updates to downstream dbt / SQL models, which we walk through [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#updating-downstream-transformations). Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. For more controlled upgrade [see instructions](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#upgrading-connections-one-by-one-with-dual-writing). upgradeDeadline: "2024-05-31" diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl index 9f11b2293a95b..1710a288dde5f 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl index 7f75f0f804e25..fdd7c1d4723dc 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl index 61024be7867d0..8f75c02eb4603 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl @@ -1,5 +1,5 @@ // Keep the Alice record with more recent updated_at -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl index b2bf47df66c11..1b29b504aadd4 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl index f3a225756cedf..1164999f33e0e 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} // Invalid columns are nulled out (i.e. SQL null, not JSON null) -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl index 4012c086a9e61..c31f21a205c7d 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} // Invalid data is still allowed in the raw table. -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl index b489accda1bb7..45c12cb18720a 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl index c26d4a49aacd7..b346de4404284 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} // Charlie wasn't reemitted with updated_at, so it still has a null cursor -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl index 03f28e155af53..10ba449ad2e4a 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl @@ -1,7 +1,7 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl index 0989dfc17ed07..413807314c8a2 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl @@ -1,9 +1,9 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl new file mode 100644 index 0000000000000..315e3707930d5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl @@ -0,0 +1,10 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`","Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} + +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl index 9d1f1499469fc..0a4deced5cefc 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl index 33bc3280be274..55d0a3af55826 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl index fd2a4b3adbf37..aeba20f60e2a5 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} // Delete Bob, keep Charlie -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl index 53c304c89d311..69eeec6bab90b 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl new file mode 100644 index 0000000000000..eb63a8d0a8bf0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl @@ -0,0 +1,5 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +// Delete Bob, updated Charlie +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} +// Record before meta in raw table will continue to have errors. +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl new file mode 100644 index 0000000000000..a1112818b1387 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl @@ -0,0 +1,11 @@ +// We keep the records from the first sync +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name":"Charlie", "age":"this is not an integer", "registration_date":"this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta":{"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl index 2f634c6ad4e95..f0c2f3b4c342a 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl @@ -1,10 +1,10 @@ // We keep the records from the first sync -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} // And append the records from the second sync -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl index 88b8ee7746c1c..b7c4206c7898d 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl new file mode 100644 index 0000000000000..c31da6b35ae7e --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl @@ -0,0 +1,8 @@ +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}}} +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}}} +// Set deleted_at to something non-null. Again, T+D doesn't check the actual _value_ of deleted_at (i.e. the fact that it's in the past is irrelevant). +// It only cares whether deleted_at is non-null. So this should delete Bob from the final table (in dedup mode). +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}}} +// Emit earlier message with _airbyte_meta again with one fixed column. +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl new file mode 100644 index 0000000000000..4dff86fcc890b --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl @@ -0,0 +1,9 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} + +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..78da60c83138d --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl @@ -0,0 +1,10 @@ +// We keep the records from the first sync, _airbyte_meta in raw didn't exist in that version +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync, _airbyte_meta was added in this version +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json index c076616ec0b19..283fe9f9f96e9 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json @@ -335,6 +335,13 @@ } } ] + }, + "drop_cascade": { + "type": "boolean", + "default": false, + "description": "Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects.", + "title": "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)", + "order": 11 } } } diff --git a/airbyte-integrations/connectors/destination-postgres/build.gradle b/airbyte-integrations/connectors/destination-postgres/build.gradle index 3ccc03586ae55..fb63ffcb088d5 100644 --- a/airbyte-integrations/connectors/destination-postgres/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres/build.gradle @@ -1,10 +1,9 @@ plugins { id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.18' + cdkVersionRequired = '0.30.0' features = ['db-destinations', 'datastore-postgres', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres/gradle.properties b/airbyte-integrations/connectors/destination-postgres/gradle.properties index 23da4989675e6..c985d1e8f1753 100644 --- a/airbyte-integrations/connectors/destination-postgres/gradle.properties +++ b/airbyte-integrations/connectors/destination-postgres/gradle.properties @@ -1,3 +1,5 @@ # our testcontainer has issues with too much concurrency. # 4 threads seems to be the sweet spot. testExecutionConcurrency=4 +# large sync test takes a while, add 15m timeout. +JunitMethodExecutionTimeout=15 m diff --git a/airbyte-integrations/connectors/destination-postgres/metadata.yaml b/airbyte-integrations/connectors/destination-postgres/metadata.yaml index 6d4b8efd93264..c6ddb50373424 100644 --- a/airbyte-integrations/connectors/destination-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.4 + dockerImageTag: 2.0.9 dockerRepository: airbyte/destination-postgres documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres @@ -22,8 +22,8 @@ data: breakingChanges: 2.0.0: message: > - This version introduces [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2), which provides better error handling, incremental delivery of data for large syncs, and improved final table structures. - To review the breaking changes, and how to upgrade, see [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#quick-start-to-upgrading). + This version introduces [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2), which provides better error handling, incremental delivery of data for large syncs, and improved final table structures. + To review the breaking changes, and how to upgrade, see [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#quick-start-to-upgrading). These changes will likely require updates to downstream dbt / SQL models, which we walk through [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#updating-downstream-transformations). Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. For more controlled upgrade [see instructions](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#upgrading-connections-one-by-one-with-dual-writing). upgradeDeadline: "2024-05-31" diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDataAdapter.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDataAdapter.java deleted file mode 100644 index 2793af8cd1e6e..0000000000000 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDataAdapter.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.postgres; - -import io.airbyte.cdk.integrations.destination.jdbc.DataAdapter; -import io.airbyte.commons.json.Jsons; - -public class PostgresDataAdapter extends DataAdapter { - - public PostgresDataAdapter() { - super(jsonNode -> jsonNode.isTextual() && jsonNode.textValue().contains("\u0000"), - jsonNode -> { - final String textValue = jsonNode.textValue().replaceAll("\\u0000", ""); - return Jsons.jsonNode(textValue); - }); - } - -} diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java index 5e7cfa265968a..a00435bea67e9 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java @@ -8,7 +8,6 @@ import static io.airbyte.cdk.integrations.util.PostgresSslConnectionUtils.PARAM_MODE; import static io.airbyte.cdk.integrations.util.PostgresSslConnectionUtils.PARAM_SSL; import static io.airbyte.cdk.integrations.util.PostgresSslConnectionUtils.PARAM_SSL_MODE; -import static io.airbyte.cdk.integrations.util.PostgresSslConnectionUtils.obtainConnectionOptions; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; @@ -20,29 +19,40 @@ import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.IntegrationRunner; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; +import io.airbyte.cdk.integrations.util.PostgresSslConnectionUtils; import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresDataTransformer; import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresDestinationHandler; +import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresRawTableAirbyteMetaMigration; import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresSqlGenerator; import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresState; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Optional; import org.postgresql.util.PSQLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class PostgresDestination extends AbstractJdbcDestination implements Destination { +public class PostgresDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresDestination.class); public static final String DRIVER_CLASS = DatabaseDriver.POSTGRESQL.getDriverClassName(); + private static final String DROP_CASCADE_OPTION = "drop_cascade"; + public static Destination sshWrappedDestination() { return new SshWrappedDestination(new PostgresDestination(), JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY); } @@ -84,7 +94,7 @@ protected Map getDefaultConnectionProperties(final JsonNode conf if (DISABLE.equals(config.get(PARAM_SSL_MODE).get(PARAM_MODE).asText())) { additionalParameters.put("sslmode", DISABLE); } else { - additionalParameters.putAll(obtainConnectionOptions(config.get(PARAM_SSL_MODE))); + additionalParameters.putAll(PostgresSslConnectionUtils.obtainConnectionOptions(config.get(PARAM_SSL_MODE))); } } else { additionalParameters.put(JdbcUtils.SSL_KEY, "true"); @@ -124,8 +134,10 @@ public JsonNode toJdbcConfig(final JsonNode config) { } @Override - protected JdbcSqlGenerator getSqlGenerator() { - return new PostgresSqlGenerator(new PostgresSQLNameTransformer()); + protected JdbcSqlGenerator getSqlGenerator(final JsonNode config) { + final JsonNode dropCascadeNode = config.get(DROP_CASCADE_OPTION); + final boolean dropCascade = dropCascadeNode != null && dropCascadeNode.asBoolean(); + return new PostgresSqlGenerator(new PostgresSQLNameTransformer(), dropCascade); } @Override @@ -133,6 +145,19 @@ protected JdbcDestinationHandler getDestinationHandler(String dat return new PostgresDestinationHandler(databaseName, database, rawTableSchema); } + @Override + protected List> getMigrations(JdbcDatabase database, + String databaseName, + SqlGenerator sqlGenerator, + DestinationHandler destinationHandler) { + return List.of(new PostgresRawTableAirbyteMetaMigration(database, databaseName)); + } + + @Override + protected StreamAwareDataTransformer getDataTransformer(ParsedCatalog parsedCatalog, String defaultNamespace) { + return new PostgresDataTransformer(); + } + @Override public boolean isV2Destination() { return true; diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java index e586f4047995d..0fd0b950fdd7c 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java @@ -8,9 +8,36 @@ public class PostgresSQLNameTransformer extends StandardNameTransformer { + // I _think_ overriding these two methods is sufficient to apply the truncation logic everywhere + // but this interface + our superclass are weirdly complicated, so plausibly something is missing + @Override + public String getIdentifier(final String name) { + return truncate(super.getIdentifier(name)); + } + + public String convertStreamName(String input) { + return truncate(super.convertStreamName(input)); + } + @Override public String applyDefaultCase(final String input) { return input.toLowerCase(); } + @Override + // @Deprecated see https://github.com/airbytehq/airbyte/issues/35333 + // We cannot delete these method until connectors don't need old v1 raw table references for + // migration + public String getRawTableName(final String streamName) { + return convertStreamName("_airbyte_raw_" + streamName.toLowerCase()); + } + + /** + * Postgres silently truncates identifiers to 63 characters. Utility method to do that truncation + * explicitly, so that we can detect e.g. name collisions. + */ + private String truncate(String str) { + return str.substring(0, Math.min(str.length(), 63)); + } + } diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java index 210cc6d9bc4de..933e9af80e8dc 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java @@ -8,8 +8,8 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag; +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage; import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; @@ -25,9 +25,7 @@ public class PostgresSqlOperations extends JdbcSqlOperations { - public PostgresSqlOperations() { - super(new PostgresDataAdapter()); - } + public PostgresSqlOperations() {} @Override protected List postCreateTableQueries(final String schemaName, final String tableName) { @@ -56,7 +54,8 @@ protected void insertRecordsInternalV2(final JdbcDatabase database, COLUMN_NAME_AB_RAW_ID, COLUMN_NAME_DATA, COLUMN_NAME_AB_EXTRACTED_AT, - COLUMN_NAME_AB_LOADED_AT); + COLUMN_NAME_AB_LOADED_AT, + COLUMN_NAME_AB_META); } @Override diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDataTransformer.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDataTransformer.java new file mode 100644 index 0000000000000..4eb886098e50c --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDataTransformer.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.postgres.typing_deduping; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange; +import io.airbyte.protocol.models.v0.StreamDescriptor; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Predicate; +import kotlin.Pair; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +public class PostgresDataTransformer implements StreamAwareDataTransformer { + + /* + * This class is copied in its entirety from DataAdapter class to unify logic into one single + * transformer invocation before serializing to string in AsyncStreamConsumer. + */ + + final Predicate filterValueNode; + final Function valueNodeAdapter; + + public PostgresDataTransformer() { + this.filterValueNode = jsonNode -> jsonNode.isTextual() && jsonNode.textValue().contains("\u0000"); + this.valueNodeAdapter = jsonNode -> { + final String textValue = jsonNode.textValue().replaceAll("\\u0000", ""); + return Jsons.jsonNode(textValue); + }; + } + + @NotNull + @Override + public Pair transform(@Nullable StreamDescriptor streamDescriptor, + @Nullable JsonNode data, + @Nullable AirbyteRecordMessageMeta meta) { + final List metaChanges = new ArrayList<>(); + if (meta != null && meta.getChanges() != null) { + metaChanges.addAll(meta.getChanges()); + } + // Does inplace changes in the actual JsonNode reference. + adapt(data); + return new Pair<>(data, new AirbyteRecordMessageMeta().withChanges(metaChanges)); + } + + public void adapt(final JsonNode messageData) { + if (messageData != null) { + adaptAllValueNodes(messageData); + } + } + + private void adaptAllValueNodes(final JsonNode rootNode) { + adaptValueNodes(null, rootNode, null); + } + + /** + * The method inspects json node. In case, it's a value node we check the node by CheckFunction and + * apply ValueNodeAdapter. Filtered nodes will be updated by adapted version. If element is an array + * or an object, this we run the method recursively for them. + * + * @param fieldName Name of a json node + * @param node Json node + * @param parentNode Parent json node + */ + private void adaptValueNodes(final String fieldName, final JsonNode node, final JsonNode parentNode) { + if (node.isValueNode() && filterValueNode.test(node)) { + if (fieldName != null) { + final var adaptedNode = valueNodeAdapter.apply(node); + ((ObjectNode) parentNode).set(fieldName, adaptedNode); + } else + throw new RuntimeException("Unexpected value node without fieldName. Node: " + node); + } else if (node.isArray()) { + node.elements().forEachRemaining(arrayNode -> adaptValueNodes(null, arrayNode, node)); + } else { + node.fields().forEachRemaining(stringJsonNodeEntry -> adaptValueNodes(stringJsonNodeEntry.getKey(), stringJsonNodeEntry.getValue(), node)); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java index ac235039aae13..4462dbffe13ad 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java @@ -38,7 +38,8 @@ protected String toJdbcTypeName(AirbyteType airbyteType) { @Override protected PostgresState toDestinationState(JsonNode json) { return new PostgresState( - json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean()); + json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean(), + json.hasNonNull("isAirbyteMetaPresentInRaw") && json.get("isAirbyteMetaPresentInRaw").asBoolean()); } private String toJdbcTypeName(final AirbyteProtocolType airbyteProtocolType) { diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresRawTableAirbyteMetaMigration.kt b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresRawTableAirbyteMetaMigration.kt new file mode 100644 index 0000000000000..63fabd788be81 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresRawTableAirbyteMetaMigration.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.postgres.typing_deduping + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresSqlGenerator.* +import org.jooq.conf.ParamType +import org.jooq.impl.DSL +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class PostgresRawTableAirbyteMetaMigration( + private val database: JdbcDatabase, + private val databaseName: String +) : Migration { + private val logger: Logger = LoggerFactory.getLogger(this.javaClass) + + // TODO: This class is almost similar to RedshiftAirbyteMetaMigration except the JSONB type. + // try to unify later. + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + state: DestinationInitialStatus + ): Migration.MigrationResult { + if (!state.initialRawTableStatus.rawTableExists) { + // The raw table doesn't exist. No migration necessary. Update the state. + logger.info( + "Skipping RawTableAirbyteMetaMigration for ${stream.id.originalNamespace}.${stream.id.originalName} because the raw table doesn't exist" + ) + return Migration.MigrationResult( + state.destinationState.copy(isAirbyteMetaPresentInRaw = true), + false + ) + } + + // The table should exist because we checked for it above, so safe to get it. + val existingRawTable = + JdbcDestinationHandler.findExistingTable( + database, + databaseName, + stream.id.rawNamespace, + stream.id.rawName + ) + .get() + + if (existingRawTable.columns[JavaBaseConstants.COLUMN_NAME_AB_META] != null) { + // The raw table already has the _airbyte_meta column. No migration necessary. Update + // the state. + return Migration.MigrationResult( + state.destinationState.copy(isAirbyteMetaPresentInRaw = true), + false + ) + } + + logger.info( + "Executing RawTableAirbyteMetaMigration for ${stream.id.rawNamespace}.${stream.id.rawName} for real" + ) + + destinationHandler.execute( + Sql.of( + DSL.alterTable(DSL.name(stream.id.rawNamespace, stream.id.rawName)) + .addColumn(DSL.name(JavaBaseConstants.COLUMN_NAME_AB_META), JSONB_TYPE) + .getSQL(ParamType.INLINED) + ) + ) + + // Update the state. We didn't modify the table in a relevant way, so don't invalidate the + // InitialState. + // We will not do a soft reset since it could be time-consuming, instead we leave the old + // data i.e. `errors` instead of `changes` as is since this column is controlled by us. + return Migration.MigrationResult( + state.destinationState.copy(needsSoftReset = false, isAirbyteMetaPresentInRaw = true), + false + ) + } +} diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGenerator.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGenerator.java index 9d7217e3f8266..85c30179c0af4 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGenerator.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGenerator.java @@ -30,6 +30,8 @@ import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; import io.airbyte.integrations.base.destination.typing_deduping.Struct; +import io.airbyte.protocol.models.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.AirbyteRecordMessageMetaChange.Reason; import io.airbyte.protocol.models.v0.DestinationSyncMode; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -48,10 +50,17 @@ public class PostgresSqlGenerator extends JdbcSqlGenerator { - public static final DataType JSONB_TYPE = new DefaultDataType<>(null, Object.class, "jsonb"); + public static final DataType JSONB_TYPE = new DefaultDataType<>(SQLDialect.POSTGRES, Object.class, "jsonb"); - public PostgresSqlGenerator(final NamingConventionTransformer namingTransformer) { - super(namingTransformer); + public static final String CASE_STATEMENT_SQL_TEMPLATE = "CASE WHEN {0} THEN {1} ELSE {2} END "; + + private static final String AB_META_COLUMN_CHANGES_KEY = "changes"; + private static final String AB_META_CHANGES_FIELD_KEY = "field"; + private static final String AB_META_CHANGES_CHANGE_KEY = "change"; + private static final String AB_META_CHANGES_REASON_KEY = "reason"; + + public PostgresSqlGenerator(final NamingConventionTransformer namingTransformer, final boolean cascadeDrop) { + super(namingTransformer, cascadeDrop); } @Override @@ -62,11 +71,12 @@ public StreamId buildStreamId(final String namespace, final String name, final S // To keep it consistent when querying raw table in T+D query, convert it to lowercase. // TODO: This logic should be unified across Raw and final table operations in a single class // operating on a StreamId. + final String streamName = getNamingTransformer().getIdentifier(StreamId.concatenateRawTableName(namespace, name)).toLowerCase(); return new StreamId( - namingTransformer.getNamespace(namespace), - namingTransformer.convertStreamName(name), - namingTransformer.getNamespace(rawNamespaceOverride).toLowerCase(), - namingTransformer.convertStreamName(StreamId.concatenateRawTableName(namespace, name)).toLowerCase(), + getNamingTransformer().getNamespace(namespace), + getNamingTransformer().convertStreamName(name), + getNamingTransformer().getNamespace(rawNamespaceOverride).toLowerCase(), + streamName, namespace, name); } @@ -107,14 +117,14 @@ public DataType toDialectType(AirbyteProtocolType airbyteProtocolType) { @Override public Sql createTable(final StreamConfig stream, final String suffix, final boolean force) { final List statements = new ArrayList<>(); - final Name finalTableName = name(stream.id().finalNamespace(), stream.id().finalName() + suffix); + final Name finalTableName = name(stream.getId().getFinalNamespace(), stream.getId().getFinalName() + suffix); statements.add(super.createTable(stream, suffix, force)); - if (stream.destinationSyncMode() == DestinationSyncMode.APPEND_DEDUP) { + if (stream.getDestinationSyncMode() == DestinationSyncMode.APPEND_DEDUP) { // An index for our ROW_NUMBER() PARTITION BY pk ORDER BY cursor, extracted_at function - final List pkNames = stream.primaryKey().stream() - .map(pk -> quotedName(pk.name())) + final List pkNames = stream.getPrimaryKey().stream() + .map(pk -> quotedName(pk.getName())) .toList(); statements.add(Sql.of(getDslContext().createIndex().on( finalTableName, @@ -122,7 +132,7 @@ public Sql createTable(final StreamConfig stream, final String suffix, final boo pkNames.stream(), // if cursor is present, then a stream containing its name // but if no cursor, then empty stream - stream.cursor().stream().map(cursor -> quotedName(cursor.name())), + stream.getCursor().stream().map(cursor -> quotedName(cursor.getName())), Stream.of(name(COLUMN_NAME_AB_EXTRACTED_AT))).flatMap(Function.identity()).toList()) .getSQL())); } @@ -141,12 +151,12 @@ public Sql createTable(final StreamConfig stream, final String suffix, final boo @Override protected List createIndexSql(final StreamConfig stream, final String suffix) { - if (stream.destinationSyncMode() == DestinationSyncMode.APPEND_DEDUP && !stream.primaryKey().isEmpty()) { + if (stream.getDestinationSyncMode() == DestinationSyncMode.APPEND_DEDUP && !stream.getPrimaryKey().isEmpty()) { return List.of( getDslContext().createIndex().on( - name(stream.id().finalNamespace(), stream.id().finalName() + suffix), - stream.primaryKey().stream() - .map(pk -> quotedName(pk.name())) + name(stream.getId().getFinalNamespace(), stream.getId().getFinalName() + suffix), + stream.getPrimaryKey().stream() + .map(pk -> quotedName(pk.getName())) .toList()) .getSQL()); } else { @@ -162,7 +172,7 @@ protected List> extractRawDataFields(final LinkedHashMap castedField( extractColumnAsJson(column.getKey()), column.getValue(), - column.getKey().name(), + column.getKey().getName(), useExpensiveSaferCasting)) .collect(Collectors.toList()); } @@ -218,66 +228,71 @@ protected Field castedField( } } - // TODO this isn't actually used right now... can we refactor this out? - // (redshift is doing something interesting with this method, so leaving it for now) @Override protected Field castedField(final Field field, final AirbyteProtocolType type, final boolean useExpensiveSaferCasting) { return cast(field, toDialectType(type)); } + private Field jsonBuildObject(Field... arguments) { + return function("JSONB_BUILD_OBJECT", JSONB_TYPE, arguments); + } + @Override protected Field buildAirbyteMetaColumn(final LinkedHashMap columns) { - final Field[] dataFieldErrors = columns + final List> dataFieldErrors = columns .entrySet() .stream() .map(column -> toCastingErrorCaseStmt(column.getKey(), column.getValue())) - .toArray(Field[]::new); - return function( - "JSONB_BUILD_OBJECT", - JSONB_TYPE, - val("errors"), - function("ARRAY_REMOVE", JSONB_TYPE, array(dataFieldErrors), val((String) null))).as(COLUMN_NAME_AB_META); + .toList(); + final Field rawTableChangesArray = + field("ARRAY(SELECT jsonb_array_elements_text({0}#>'{changes}'))::jsonb[]", field(name(COLUMN_NAME_AB_META))); + + // Jooq is inferring and casting as int[] for empty fields array call. So explicitly casting it to + // jsonb[] on empty array + final Field finalTableChangesArray = dataFieldErrors.isEmpty() ? field("ARRAY[]::jsonb[]") + : function("ARRAY_REMOVE", JSONB_TYPE, array(dataFieldErrors).cast(JSONB_TYPE.getArrayDataType()), val((String) null)); + return jsonBuildObject(val(AB_META_COLUMN_CHANGES_KEY), + field("ARRAY_CAT({0}, {1})", finalTableChangesArray, rawTableChangesArray)).as(COLUMN_NAME_AB_META); + } + + private Field nulledChangeObject(String fieldName) { + return jsonBuildObject(val(AB_META_CHANGES_FIELD_KEY), val(fieldName), + val(AB_META_CHANGES_CHANGE_KEY), val(Change.NULLED), + val(AB_META_CHANGES_REASON_KEY), val(Reason.DESTINATION_TYPECAST_ERROR)); } - private Field toCastingErrorCaseStmt(final ColumnId column, final AirbyteType type) { + private Field toCastingErrorCaseStmt(final ColumnId column, final AirbyteType type) { final Field extract = extractColumnAsJson(column); - if (type instanceof Struct) { - // If this field is a struct, verify that the raw data is an object or null. - return case_() - .when( - extract.isNotNull() - .and(jsonTypeof(extract).notIn("object", "null")), - val("Problem with `" + column.originalName() + "`")) - .else_(val((String) null)); - } else if (type instanceof Array) { - // Do the same for arrays. - return case_() - .when( - extract.isNotNull() - .and(jsonTypeof(extract).notIn("array", "null")), - val("Problem with `" + column.originalName() + "`")) - .else_(val((String) null)); - } else if (type == AirbyteProtocolType.UNKNOWN || type == AirbyteProtocolType.STRING) { + + // If this field is a struct, verify that the raw data is an object or null. + // Do the same for arrays. + return switch (type) { + case Struct ignored -> field(CASE_STATEMENT_SQL_TEMPLATE, + extract.isNotNull().and(jsonTypeof(extract).notIn("object", "null")), + nulledChangeObject(column.getOriginalName()), + cast(val((Object) null), JSONB_TYPE)); + case Array ignored -> field(CASE_STATEMENT_SQL_TEMPLATE, + extract.isNotNull().and(jsonTypeof(extract).notIn("array", "null")), + nulledChangeObject(column.getOriginalName()), + cast(val((Object) null), JSONB_TYPE)); // Unknown types require no casting, so there's never an error. // Similarly, everything can cast to string without error. - return val((String) null); - } else { - // For other type: If the raw data is not NULL or 'null', but the casted data is NULL, - // then we have a typing error. - return case_() - .when( - extract.isNotNull() - .and(jsonTypeof(extract).ne("null")) - .and(castedField(extract, type, true).isNull()), - val("Problem with `" + column.originalName() + "`")) - .else_(val((String) null)); - } + case AirbyteProtocolType airbyteProtocolType + when (airbyteProtocolType == AirbyteProtocolType.UNKNOWN || airbyteProtocolType == AirbyteProtocolType.STRING) -> + cast(val((Object) null), JSONB_TYPE); + default -> field(CASE_STATEMENT_SQL_TEMPLATE, + extract.isNotNull() + .and(jsonTypeof(extract).ne("null")) + .and(castedField(extract, type, true).isNull()), + nulledChangeObject(column.getOriginalName()), + cast(val((Object) null), JSONB_TYPE)); + }; } @Override protected Condition cdcDeletedAtNotNullCondition() { return field(name(COLUMN_NAME_AB_LOADED_AT)).isNotNull() - .and(jsonTypeof(extractColumnAsJson(cdcDeletedAtColumn)).ne("null")); + .and(jsonTypeof(extractColumnAsJson(getCdcDeletedAtColumn())).ne("null")); } @Override @@ -285,12 +300,12 @@ protected Field getRowNumber(final List primaryKeys, final Op // literally identical to redshift's getRowNumber implementation, changes here probably should // be reflected there final List> primaryKeyFields = - primaryKeys != null ? primaryKeys.stream().map(columnId -> field(quotedName(columnId.name()))).collect(Collectors.toList()) + primaryKeys != null ? primaryKeys.stream().map(columnId -> field(quotedName(columnId.getName()))).collect(Collectors.toList()) : new ArrayList<>(); final List> orderedFields = new ArrayList<>(); // We can still use Jooq's field to get the quoted name with raw sql templating. // jooq's .desc returns SortField instead of Field and NULLS LAST doesn't work with it - cursor.ifPresent(columnId -> orderedFields.add(field("{0} desc NULLS LAST", field(quotedName(columnId.name()))))); + cursor.ifPresent(columnId -> orderedFields.add(field("{0} desc NULLS LAST", field(quotedName(columnId.getName()))))); orderedFields.add(field("{0} desc", quotedName(COLUMN_NAME_AB_EXTRACTED_AT))); return rowNumber() .over() @@ -302,7 +317,7 @@ protected Field getRowNumber(final List primaryKeys, final Op * Extract a raw field, leaving it as jsonb */ private Field extractColumnAsJson(final ColumnId column) { - return field("{0} -> {1}", name(COLUMN_NAME_DATA), val(column.originalName())); + return field("{0} -> {1}", name(COLUMN_NAME_DATA), val(column.getOriginalName())); } private Field jsonTypeof(final Field field) { diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt index b9380fe033d6f..7c31a29296b3f 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt @@ -6,11 +6,13 @@ package io.airbyte.integrations.destination.postgres.typing_deduping import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState -data class PostgresState(val needsSoftReset: Boolean) : MinimumDestinationState { +data class PostgresState(val needsSoftReset: Boolean, val isAirbyteMetaPresentInRaw: Boolean) : + MinimumDestinationState { override fun needsSoftReset(): Boolean { return needsSoftReset } + @Suppress("UNCHECKED_CAST") override fun withSoftReset(needsSoftReset: Boolean): T { return copy(needsSoftReset = needsSoftReset) as T } diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json index 4c775be8b887f..90ce568111c60 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json @@ -228,6 +228,13 @@ "description": "Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions", "title": "Disable Final Tables. (WARNING! Unstable option; Columns in raw table schema might change between versions)", "order": 10 + }, + "drop_cascade": { + "type": "boolean", + "default": false, + "description": "Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects.", + "title": "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)", + "order": 11 } } } diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java index 23cbda8a5b05d..bafad1ba0f565 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java @@ -5,6 +5,10 @@ package io.airbyte.integrations.destination.postgres.typing_deduping; import static io.airbyte.integrations.destination.postgres.typing_deduping.PostgresSqlGenerator.JSONB_TYPE; +import static org.jooq.impl.DSL.createView; +import static org.jooq.impl.DSL.quotedName; +import static org.jooq.impl.DSL.select; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -26,6 +30,7 @@ import org.jooq.DataType; import org.jooq.Field; import org.jooq.SQLDialect; +import org.jooq.conf.ParamType; import org.jooq.impl.DSL; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -71,12 +76,12 @@ protected DataType getStructType() { @Override protected JdbcSqlGenerator getSqlGenerator() { - return new PostgresSqlGenerator(new PostgresSQLNameTransformer()); + return new PostgresSqlGenerator(new PostgresSQLNameTransformer(), false); } @Override protected DestinationHandler getDestinationHandler() { - return new PostgresDestinationHandler(databaseName, database, namespace); + return new PostgresDestinationHandler(databaseName, database, getNamespace()); } @Override @@ -92,14 +97,33 @@ protected Field toJsonValue(final String valueAsString) { @Test @Override public void testCreateTableIncremental() throws Exception { - final Sql sql = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(sql); + final Sql sql = getGenerator().createTable(getIncrementalDedupStream(), "", false); + getDestinationHandler().execute(sql); - List> initialStatuses = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); + List> initialStatuses = getDestinationHandler().gatherInitialState(List.of(getIncrementalDedupStream())); assertEquals(1, initialStatuses.size()); final DestinationInitialStatus initialStatus = initialStatuses.getFirst(); assertTrue(initialStatus.isFinalTablePresent()); assertFalse(initialStatus.isSchemaMismatch()); } + /** + * Verify that we correctly DROP...CASCADE the final table when cascadeDrop is enabled. + */ + @Test + public void testCascadeDrop() throws Exception { + // Explicitly create a sqlgenerator with cascadeDrop=true + final PostgresSqlGenerator generator = new PostgresSqlGenerator(new PostgresSQLNameTransformer(), true); + // Create a table, then create a view referencing it + getDestinationHandler().execute(generator.createTable(getIncrementalAppendStream(), "", false)); + database.execute(createView(quotedName(getIncrementalAppendStream().getId().getFinalNamespace(), "example_view")) + .as(select().from(quotedName(getIncrementalAppendStream().getId().getFinalNamespace(), getIncrementalAppendStream().getId().getFinalName()))) + .getSQL(ParamType.INLINED)); + // Create a "soft reset" table + getDestinationHandler().execute(generator.createTable(getIncrementalDedupStream(), "_soft_reset", false)); + + // Overwriting the first table with the second table should succeed. + assertDoesNotThrow(() -> getDestinationHandler().execute(generator.overwriteFinalTable(getIncrementalDedupStream().getId(), "_soft_reset"))); + } + } diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl index 9f11b2293a95b..1710a288dde5f 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl index 7f75f0f804e25..fdd7c1d4723dc 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl index 61024be7867d0..8f75c02eb4603 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl @@ -1,5 +1,5 @@ // Keep the Alice record with more recent updated_at -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl index b2bf47df66c11..1b29b504aadd4 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl index f3a225756cedf..1164999f33e0e 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} // Invalid columns are nulled out (i.e. SQL null, not JSON null) -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl index 4012c086a9e61..c31f21a205c7d 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} // Invalid data is still allowed in the raw table. -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl index b489accda1bb7..45c12cb18720a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl new file mode 100644 index 0000000000000..a37e8a603749e --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl @@ -0,0 +1,14 @@ +// emitted_at:1000 is equal to 1970-01-01 00:00:01Z, which is what you'll see in the expected records. +// This obviously makes no sense in relation to updated_at being in the year 2000, but that's OK +// because (from destinations POV) updated_at has no relation to emitted_at. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}}} +// Emit a second record for id=(1,200) with a different updated_at. This generally doesn't happen +// in full refresh syncs - but if T+D is implemented correctly, it shouldn't matter +// (i.e. both records should be written to the final table). +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}}} +// Emit a record with no _ab_cdc_deleted_at field. CDC sources typically emit an explicit null, but we should handle both cases. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}}} +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} +// Emit a record with interesting characters in one of the values. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl index c26d4a49aacd7..b346de4404284 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} // Charlie wasn't reemitted with updated_at, so it still has a null cursor -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl index 03f28e155af53..10ba449ad2e4a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl @@ -1,7 +1,7 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl index 0989dfc17ed07..413807314c8a2 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl @@ -1,9 +1,9 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl new file mode 100644 index 0000000000000..315e3707930d5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl @@ -0,0 +1,10 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`","Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} + +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl index 9d1f1499469fc..0a4deced5cefc 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl index 33bc3280be274..55d0a3af55826 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl index fd2a4b3adbf37..aeba20f60e2a5 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} // Delete Bob, keep Charlie -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl index 53c304c89d311..69eeec6bab90b 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl new file mode 100644 index 0000000000000..eb63a8d0a8bf0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl @@ -0,0 +1,5 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +// Delete Bob, updated Charlie +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} +// Record before meta in raw table will continue to have errors. +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl new file mode 100644 index 0000000000000..a1112818b1387 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl @@ -0,0 +1,11 @@ +// We keep the records from the first sync +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name":"Charlie", "age":"this is not an integer", "registration_date":"this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta":{"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl index 2f634c6ad4e95..f0c2f3b4c342a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl @@ -1,10 +1,10 @@ // We keep the records from the first sync -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} // And append the records from the second sync -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl index 88b8ee7746c1c..b7c4206c7898d 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl new file mode 100644 index 0000000000000..c31da6b35ae7e --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl @@ -0,0 +1,8 @@ +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}}} +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}}} +// Set deleted_at to something non-null. Again, T+D doesn't check the actual _value_ of deleted_at (i.e. the fact that it's in the past is irrelevant). +// It only cares whether deleted_at is non-null. So this should delete Bob from the final table (in dedup mode). +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}}} +// Emit earlier message with _airbyte_meta again with one fixed column. +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl new file mode 100644 index 0000000000000..4dff86fcc890b --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl @@ -0,0 +1,9 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} + +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..78da60c83138d --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/dat/sync2_mixedcase_expectedrecords_raw.jsonl @@ -0,0 +1,10 @@ +// We keep the records from the first sync, _airbyte_meta in raw didn't exist in that version +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync, _airbyte_meta was added in this version +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl index 76d0442ebe798..fe43974cd78e7 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl @@ -1,8 +1,8 @@ -{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": ["Problem with `struct`", "Problem with `array`", "Problem with `number`", "Problem with `integer`", "Problem with `boolean`","Problem with `timestamp_with_timezone`", "Problem with `timestamp_without_timezone`", "Problem with `time_with_timezone`","Problem with `time_without_timezone`", "Problem with `date`"]}} +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes":[{"field":"struct","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"array","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"number","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"boolean","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"string","change":"NULLED","reason":"SOURCE_SERIALIZATION_ERROR"}]}} // Note that for numbers where we parse the value to JSON (struct, array, unknown) we lose precision. // But for numbers where we create a NUMBER column, we do not lose precision (see the `number` column). -{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "IamACaseSensitiveColumnName": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "IamACaseSensitiveColumnName": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl index 6b99169ececf1..ed12fd09bccee 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl @@ -1,6 +1,6 @@ {"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}}} {"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} -{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}, "_airbyte_meta": {"changes": [{"field": "string", "change": "NULLED", "reason": "SOURCE_SERIALIZATION_ERROR"}]}} {"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl new file mode 100644 index 0000000000000..6e5f1175b4fe5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl @@ -0,0 +1,8 @@ +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "unknown": null, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes":[{"field":"struct","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"array","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"number","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"boolean","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}} +// Note that for numbers where we parse the value to JSON (struct, array, unknown) we lose precision. +// But for numbers where we create a NUMBER column, we do not lose precision (see the `number` column). +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "IamACaseSensitiveColumnName": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..6b99169ececf1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl @@ -0,0 +1,6 @@ +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}}} +{"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl index 5842f7b37e42b..c59f838544eec 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_raw_id": "80c99b54-54b4-43bd-b51b-1f67dafa2c52", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "string": "Alice", "struct": {"city": "San Diego", "state": "CA"}, "integer": 84} -{"_airbyte_raw_id": "ad690bfb-c2c2-4172-bd73-a16c86ccbb67", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": ["Problem with `integer`"]}, "id1": 2, "id2": 100, "updated_at": "2023-01-01T03:00:00.000000Z", "string": "Bob"} +{"_airbyte_raw_id": "80c99b54-54b4-43bd-b51b-1f67dafa2c52", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "string": "Alice", "struct": {"city": "San Diego", "state": "CA"}, "integer": 84} +{"_airbyte_raw_id": "ad690bfb-c2c2-4172-bd73-a16c86ccbb67", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": [{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 100, "updated_at": "2023-01-01T03:00:00.000000Z", "string": "Bob"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl index edcc0cc462d6b..0a59916461c0d 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl @@ -1,5 +1,5 @@ -{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "[\"I\", \"am\", \"an\", \"array\"]", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "{\"I\": \"am\", \"an\": \"object\"}", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "true", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "3.14", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "I am a valid json string", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "[\"I\", \"am\", \"an\", \"array\"]", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "{\"I\": \"am\", \"an\": \"object\"}", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "true", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "3.14", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "I am a valid json string", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl index 4ecd95d83b637..d14bcddf132f6 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl @@ -1 +1 @@ -{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl index b34ad054ab33c..8ffcc0c73bdc6 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl @@ -1 +1 @@ -{"_airbyte_raw_id":"b2e0efc4-38a8-47ba-970c-8103f09f08d5","_airbyte_extracted_at":"2023-01-01T00:00:00.000000Z","_airbyte_meta":{"errors":[]}, "current_date": "foo", "join": "bar"} +{"_airbyte_raw_id":"b2e0efc4-38a8-47ba-970c-8103f09f08d5","_airbyte_extracted_at":"2023-01-01T00:00:00.000000Z","_airbyte_meta":{"changes":[]}, "current_date": "foo", "join": "bar"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl index 78ded5f99d0e9..2f6919ab547db 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl @@ -2,15 +2,15 @@ // TIME, TIMETZ, TIMESTAMP, TIMESTAMPTZ values are UTC in user tables. // Note that redshift stores precision to microseconds. Java deserialization in tests preserves them only for non-zero values // except for timestamp with time zone where Z is required at end for even zero values -{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "time_with_timezone": "12:34:56Z"} -{"_airbyte_raw_id": "05028c5f-7813-4e9c-bd4b-387d1f8ba435", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} -{"_airbyte_raw_id": "95dfb0c6-6a67-4ba0-9935-643bebc90437", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} -{"_airbyte_raw_id": "f3d8abe2-bb0f-4caf-8ddc-0641df02f3a9", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} -{"_airbyte_raw_id": "a81ed40a-2a49-488d-9714-d53e8b052968", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} -{"_airbyte_raw_id": "c07763a0-89e6-4cb7-b7d0-7a34a7c9918a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} -{"_airbyte_raw_id": "358d3b52-50ab-4e06-9094-039386f9bf0d", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} -{"_airbyte_raw_id": "db8200ac-b2b9-4b95-a053-8a0343042751", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.123000Z", "time_with_timezone": "12:34:56.123Z"} +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "time_with_timezone": "12:34:56Z"} +{"_airbyte_raw_id": "05028c5f-7813-4e9c-bd4b-387d1f8ba435", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} +{"_airbyte_raw_id": "95dfb0c6-6a67-4ba0-9935-643bebc90437", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} +{"_airbyte_raw_id": "f3d8abe2-bb0f-4caf-8ddc-0641df02f3a9", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "12:34:56-08:00"} +{"_airbyte_raw_id": "a81ed40a-2a49-488d-9714-d53e8b052968", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} +{"_airbyte_raw_id": "c07763a0-89e6-4cb7-b7d0-7a34a7c9918a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} +{"_airbyte_raw_id": "358d3b52-50ab-4e06-9094-039386f9bf0d", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "12:34:56+08:00"} +{"_airbyte_raw_id": "db8200ac-b2b9-4b95-a053-8a0343042751", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.123000Z", "time_with_timezone": "12:34:56.123Z"} -{"_airbyte_raw_id": "10ce5d93-6923-4217-a46f-103833837038", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_without_timezone": "2023-01-23T12:34:56", "time_without_timezone": "12:34:56", "date": "2023-01-23"} +{"_airbyte_raw_id": "10ce5d93-6923-4217-a46f-103833837038", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_without_timezone": "2023-01-23T12:34:56", "time_without_timezone": "12:34:56", "date": "2023-01-23"} // Bigquery returns 6 decimal places if there are any decimal places... but not for timestamp_with_timezone -{"_airbyte_raw_id": "a7a6e176-7464-4a0b-b55c-b4f936e8d5a1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_without_timezone": "2023-01-23T12:34:56.123", "time_without_timezone": "12:34:56.123"} +{"_airbyte_raw_id": "a7a6e176-7464-4a0b-b55c-b4f936e8d5a1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_without_timezone": "2023-01-23T12:34:56.123", "time_without_timezone": "12:34:56.123"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl index adfbd06d6a55a..9d73b0601264a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl @@ -6,4 +6,4 @@ // * includes$$doubledollar -> includes__doubledollar // * includes.period -> includes_period // * endswithbackslash\ -> endswithbackslash_ -{"_airbyte_raw_id": "7e7330a1-42fb-41ec-a955-52f18bd61964", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "_starts_with_dollar_sign": "foo", "includes_doublequote": "foo", "includes_singlequote": "foo", "includes_backtick": "foo", "includes_period": "foo", "includes__doubledollar": "foo", "endswithbackslash_": "foo"} +{"_airbyte_raw_id": "7e7330a1-42fb-41ec-a955-52f18bd61964", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "_starts_with_dollar_sign": "foo", "includes_doublequote": "foo", "includes_singlequote": "foo", "includes_backtick": "foo", "includes_period": "foo", "includes__doubledollar": "foo", "endswithbackslash_": "foo"} diff --git a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java index 6849af062f50d..afc0ea8d59c6c 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java +++ b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java @@ -190,9 +190,9 @@ public PostgresConfigBuilder withCdcReplication(String LsnCommitBehaviour) { .with("is_test", true) .with("replication_method", Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", testDatabase.getReplicationSlotName()) - .put("publication", testDatabase.getPublicationName()) - .put("initial_waiting_seconds", DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds()) + .put("replication_slot", getTestDatabase().getReplicationSlotName()) + .put("publication", getTestDatabase().getPublicationName()) + .put("initial_waiting_seconds", ConfigBuilder.DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds()) .put("lsn_commit_behaviour", LsnCommitBehaviour) .build())); } diff --git a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/typing_deduping/AbstractPostgresTypingDedupingTest.java b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/typing_deduping/AbstractPostgresTypingDedupingTest.java index 128d8d2de1cf0..971b1c978e86a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/typing_deduping/AbstractPostgresTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/typing_deduping/AbstractPostgresTypingDedupingTest.java @@ -11,6 +11,7 @@ import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; import io.airbyte.cdk.integrations.standardtest.destination.typing_deduping.JdbcTypingDedupingTest; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.text.Names; import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; import io.airbyte.integrations.destination.postgres.PostgresSQLNameTransformer; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -25,6 +26,7 @@ import java.util.List; import java.util.Map; import java.util.Random; +import org.jooq.impl.DSL; import org.junit.jupiter.api.Test; public abstract class AbstractPostgresTypingDedupingTest extends JdbcTypingDedupingTest { @@ -45,7 +47,7 @@ private String generateBigString() { @Override protected SqlGenerator getSqlGenerator() { - return new PostgresSqlGenerator(new PostgresSQLNameTransformer()); + return new PostgresSqlGenerator(new PostgresSQLNameTransformer(), false); } @Override @@ -55,15 +57,15 @@ protected JdbcCompatibleSourceOperations getSourceOperations() { @Test public void testMixedCasedSchema() throws Exception { - streamName = "MixedCaseSchema" + streamName; + setStreamName("MixedCaseSchema" + getStreamName()); final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( new ConfiguredAirbyteStream() .withSyncMode(SyncMode.FULL_REFRESH) .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); // First sync final List messages1 = readMessages("dat/sync1_messages.jsonl"); @@ -75,6 +77,83 @@ public void testMixedCasedSchema() throws Exception { verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); } + @Test + public void testMixedCaseRawTableV1V2Migration() throws Exception { + setStreamName("Mixed Case Table" + getStreamName()); + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + + // First sync + final List messages1 = readMessages("dat/sync1_messages.jsonl"); + + runSync(catalog, messages1, "airbyte/destination-postgres:0.6.3"); + // Special case to retrieve raw records pre DV2 using the same logic as actual code. + final List rawActualRecords = getDatabase().queryJsons( + DSL.selectFrom(DSL.name(getStreamNamespace(), "_airbyte_raw_" + Names.toAlphanumericAndUnderscore(getStreamName()).toLowerCase())).getSQL()); + // Just verify the size of raw pre DV2, postgres was lower casing the MixedCaseSchema so above + // retrieval should give 5 records from sync1 + assertEquals(5, rawActualRecords.size()); + final List messages2 = readMessages("dat/sync2_messages.jsonl"); + runSync(catalog, messages2); + final List expectedRawRecords2 = readRecords("dat/sync2_mixedcase_expectedrecords_raw.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_mixedcase_expectedrecords_fullrefresh_append_final.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + + @Test + public void testRawTableMetaMigration_append() throws Exception { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + + // First sync without _airbyte_meta + final List messages1 = readMessages("dat/sync1_messages.jsonl"); + runSync(catalog, messages1, "airbyte/destination-postgres:2.0.4"); + // Second sync + final List messages2 = readMessages("dat/sync2_messages_after_meta.jsonl"); + runSync(catalog, messages2); + + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_mixed_meta_raw.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + + @Test + public void testRawTableMetaMigration_incrementalDedupe() throws Exception { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(List.of("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + + // First sync without _airbyte_meta + final List messages1 = readMessages("dat/sync1_messages.jsonl"); + runSync(catalog, messages1, "airbyte/destination-postgres:2.0.4"); + // Second sync + final List messages2 = readMessages("dat/sync2_messages_after_meta.jsonl"); + runSync(catalog, messages2); + + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_mixed_meta_raw.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + @Override protected List dumpRawTableRecords(String streamNamespace, String streamName) throws Exception { return super.dumpRawTableRecords(streamNamespace, streamName.toLowerCase()); @@ -87,9 +166,9 @@ public void testVarcharLimitOver64K() throws Exception { .withSyncMode(SyncMode.FULL_REFRESH) .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) .withStream(new AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA)))); + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); final AirbyteMessage message = new AirbyteMessage(); final String largeString = generateBigString(); @@ -100,8 +179,8 @@ public void testVarcharLimitOver64K() throws Exception { "name", largeString); message.setType(Type.RECORD); message.setRecord(new AirbyteRecordMessage() - .withNamespace(streamNamespace) - .withStream(streamName) + .withNamespace(getStreamNamespace()) + .withStream(getStreamName()) .withData(Jsons.jsonNode(data)) .withEmittedAt(1000L)); final List messages1 = new ArrayList<>(); @@ -110,7 +189,7 @@ public void testVarcharLimitOver64K() throws Exception { // Only assert on the large varchar string landing in final table. // Rest of the fields' correctness is tested by other means in other tests. - final List actualFinalRecords = dumpFinalTableRecords(streamNamespace, streamName); + final List actualFinalRecords = dumpFinalTableRecords(getStreamNamespace(), getStreamName()); assertEquals(1, actualFinalRecords.size()); assertEquals(largeString, actualFinalRecords.get(0).get("name").asText()); diff --git a/airbyte-integrations/connectors/destination-qdrant/.dockerignore b/airbyte-integrations/connectors/destination-qdrant/.dockerignore index b423c7670a8c4..d5255d7d6cc56 100644 --- a/airbyte-integrations/connectors/destination-qdrant/.dockerignore +++ b/airbyte-integrations/connectors/destination-qdrant/.dockerignore @@ -3,4 +3,6 @@ !main.py !destination_qdrant !airbyte-cdk -!setup.py +!pyproject.toml +!poetry.lock + diff --git a/airbyte-integrations/connectors/destination-qdrant/Dockerfile b/airbyte-integrations/connectors/destination-qdrant/Dockerfile deleted file mode 100644 index bbbdae6005c1f..0000000000000 --- a/airbyte-integrations/connectors/destination-qdrant/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.10-slim as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -RUN apt-get update \ - && pip install --upgrade pip \ - && apt-get install -y build-essential cmake g++ libffi-dev libstdc++6 - -# upgrade pip to the latest version -COPY setup.py ./ - -RUN pip install --upgrade pip - -# This is required because the current connector dependency is not compatible with the CDK version -# An older CDK version will be used, which depends on pyYAML 5.4, for which we need to pin Cython to <3.0 -# As of today the CDK version that satisfies the main dependency requirements, is 0.1.80 ... -RUN pip install --prefix=/install "Cython<3.0" "pyyaml~=5.4" --no-build-isolation - -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apt-get install bash - -# copy payload code only -COPY main.py ./ -COPY destination_qdrant ./destination_qdrant - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.0.10 -LABEL io.airbyte.name=airbyte/destination-qdrant diff --git a/airbyte-integrations/connectors/destination-qdrant/README.md b/airbyte-integrations/connectors/destination-qdrant/README.md index 462eea0f76b7d..45a3f2ff1881b 100644 --- a/airbyte-integrations/connectors/destination-qdrant/README.md +++ b/airbyte-integrations/connectors/destination-qdrant/README.md @@ -10,24 +10,11 @@ For information about how to use this connector within Airbyte, see [the documen #### Minimum Python version required `= 3.10.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/qdrant) @@ -42,8 +29,7 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image @@ -77,6 +63,18 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=destination-qdrant test ``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest -s unit_tests +``` + +### Integration Tests +To run integration tests locally, make sure you have a secrets/config.json as explained above, and then run: +``` +poetry run pytest -s integration_tests +``` + ### Customizing acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. diff --git a/airbyte-integrations/connectors/destination-qdrant/metadata.yaml b/airbyte-integrations/connectors/destination-qdrant/metadata.yaml index bf8195a39bf89..06db4f2ad27d3 100644 --- a/airbyte-integrations/connectors/destination-qdrant/metadata.yaml +++ b/airbyte-integrations/connectors/destination-qdrant/metadata.yaml @@ -17,10 +17,12 @@ data: resourceRequirements: memory_limit: 2Gi memory_request: 2Gi + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c connectorSubtype: vectorstore connectorType: destination definitionId: 6eb1198a-6d38-43e5-aaaa-dccd8f71db2b - dockerImageTag: 0.0.10 + dockerImageTag: 0.0.11 dockerRepository: airbyte/destination-qdrant githubIssueLabel: destination-qdrant icon: qdrant.svg diff --git a/airbyte-integrations/connectors/destination-qdrant/poetry.lock b/airbyte-integrations/connectors/destination-qdrant/poetry.lock new file mode 100644 index 0000000000000..613439feaa771 --- /dev/null +++ b/airbyte-integrations/connectors/destination-qdrant/poetry.lock @@ -0,0 +1,3705 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "fastembed" +version = "0.2.6" +description = "Fast, light, accurate library built for retrieval embedding generation" +optional = false +python-versions = "<3.13,>=3.8.0" +files = [ + {file = "fastembed-0.2.6-py3-none-any.whl", hash = "sha256:3e18633291722087abebccccd7fcdffafef643cb22d203370d7fad4fa83c10fb"}, + {file = "fastembed-0.2.6.tar.gz", hash = "sha256:adaed5b46e19cc1bbe5f98f2b3ffecfc4d2a48d27512e28ff5bfe92a42649a66"}, +] + +[package.dependencies] +huggingface-hub = ">=0.20,<0.21" +loguru = ">=0.7.2,<0.8.0" +numpy = {version = ">=1.21", markers = "python_version < \"3.12\""} +onnx = ">=1.15.0,<2.0.0" +onnxruntime = ">=1.17.0,<2.0.0" +requests = ">=2.31,<3.0" +tokenizers = ">=0.15.1,<0.16.0" +tqdm = ">=4.66,<5.0" + +[[package]] +name = "filelock" +version = "3.13.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flatbuffers" +version = "24.3.25" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +files = [ + {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, + {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, +] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "fsspec" +version = "2024.3.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + +[[package]] +name = "grpcio-tools" +version = "1.62.1" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-tools-1.62.1.tar.gz", hash = "sha256:a4991e5ee8a97ab791296d3bf7e8700b1445635cc1828cc98df945ca1802d7f2"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:f2b404bcae7e2ef9b0b9803b2a95119eb7507e6dc80ea4a64a78be052c30cebc"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:fdd987a580b4474769adfd40144486f54bcc73838d5ec5d3647a17883ea78e76"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:07af1a6442e2313cff22af93c2c4dd37ae32b5239b38e0d99e2cbf93de65429f"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41384c9ee18e61ef20cad2774ef71bd8854b63efce263b5177aa06fccb84df1f"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c38006f7702d2ff52122e4c77a47348709374050c76216e84b30a9f06e45afa"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08fecc3c5b4e6dd3278f2b9d12837e423c7dcff551ca1e587018b4a0fc5f8019"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a01e8dcd0f041f6fa6d815c54a2017d032950e310c41d514a8bc041e872c4d12"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-win32.whl", hash = "sha256:dd933b8e0b3c13fe3543d58f849a6a5e0d7987688cb6801834278378c724f695"}, + {file = "grpcio_tools-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:2b04844a9382f1bde4b4174e476e654ab3976168d2469cb4b29e352f4f35a5aa"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:024380536ba71a96cdf736f0954f6ad03f5da609c09edbcc2ca02fdd639e0eed"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:21f14b99e0cd38ad56754cc0b62b2bf3cf75f9f7fc40647da54669e0da0726fe"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:975ac5fb482c23f3608c16e06a43c8bab4d79c2e2564cdbc25cf753c6e998775"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50739aaab0c8076ad5957204e71f2e0c9876e11fd8338f7f09de12c2d75163c5"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598c54318f0326cf5020aa43fc95a15e933aba4a71943d3bff2677d2d21ddfa1"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f309bdb33a61f8e049480d41498ee2e525cfb5e959958b326abfdf552bf9b9cb"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f358effd3c11d66c150e0227f983d54a5cd30e14038566dadcf25f9f6844e6e8"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-win32.whl", hash = "sha256:b76aead9b73f1650a091870fe4e9ed15ac4d8ed136f962042367255199c23594"}, + {file = "grpcio_tools-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:d66a5d47eaa427039752fa0a83a425ff2a487b6a0ac30556fd3be2f3a27a0130"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:575535d039b97d63e6a9abee626d6c7cd47bd8cb73dd00a5c84a98254a2164a4"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:22644c90e43d1a888477899af917979e17364fdd6e9bbb92679cd6a54c4d36c3"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:156d3e1b227c16e903003a56881dbe60e40f2b4bd66f0bc3b27c53e466e6384d"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ad7c5691625a85327e5b683443baf73ae790fd5afc938252041ed5cd665e377"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e140bbc08eea8abf51c0274f45fb1e8350220e64758998d7f3c7f985a0b2496"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7444fcab861911525470d398e5638b70d5cbea3b4674a3de92b5c58c5c515d4d"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e643cd14a5d1e59865cba68a5a6f0175d987f36c5f4cb0db80dee9ed60b4c174"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-win32.whl", hash = "sha256:1344a773d2caa9bb7fbea7e879b84f33740c808c34a5bd2a2768e526117a6b44"}, + {file = "grpcio_tools-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:2eea1db3748b2f37b4dce84d8e0c15d9bc811094807cabafe7b0ea47f424dfd5"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:45d2e6cf04d27286b6f73e6e20ba3f0a1f6d8f5535e5dcb1356200419bb457f4"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:46ae58e6926773e7315e9005f0f17aacedbc0895a8752bec087d24efa2f1fb21"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:4c28086df31478023a36f45e50767872ab3aed2419afff09814cb61c88b77db4"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4fba5b339f4797548591036c9481e6895bf920fab7d3dc664d2697f8fb7c0bf"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23eb3d47f78f509fcd201749b1f1e44b76f447913f7fbb3b8bae20f109086295"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fd5d47707bd6bc2b707ece765c362d2a1d2e8f6cd92b04c99fab49a929f3610c"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d1924a6a943df7c73b9ef0048302327c75962b567451479710da729ead241228"}, + {file = "grpcio_tools-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:fe71ca30aabe42591e84ecb9694c0297dc699cc20c5b24d2cb267fb0fc01f947"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1819fd055c1ae672d1d725ec75eefd1f700c18acba0ed9332202be31d69c401d"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:5dbe1f7481dd14b6d477b4bace96d275090bc7636b9883975a08b802c94e7b78"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:771c051c5ece27ad03e4f2e33624a925f0ad636c01757ab7dbb04a37964af4ba"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98209c438b38b6f1276dbc27b1c04e346a75bfaafe72a25a548f2dc5ce71d226"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2152308e5321cb90fb45aaa84d03d6dedb19735a8779aaf36c624f97b831842d"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ed1f27dc2b2262c8b8d9036276619c1bb18791311c16ccbf1f31b660f2aad7cf"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2744947b6c5e907af21133431809ccca535a037356864e32c122efed8cb9de1f"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-win32.whl", hash = "sha256:13b20e269d14ad629ff9a2c9a2450f3dbb119d5948de63b27ffe624fa7aea85a"}, + {file = "grpcio_tools-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:999823758e9eacd0095863d06cd6d388be769f80c9abb65cdb11c4f2cfce3fea"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:941f8a5c31986053e75fa466bcfa743c2bf1b513b7978cf1f4ab4e96a8219d27"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b9c02c88c77ef6057c6cbeea8922d7c2424aabf46bfc40ddf42a32765ba91061"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:6abd4eb3ccb444383a40156139acc3aaa73745d395139cb6bc8e2a3429e1e627"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:449503213d142f8470b331a1c2f346f8457f16c7fe20f531bc2500e271f7c14c"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a11bcf609d00cfc9baed77ab308223cabc1f0b22a05774a26dd4c94c0c80f1f"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5d7bdea33354b55acf40bb4dd3ba7324d6f1ef6b4a1a4da0807591f8c7e87b9a"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d03b645852d605f43003020e78fe6d573cae6ee6b944193e36b8b317e7549a20"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-win32.whl", hash = "sha256:52b185dfc3bf32e70929310367dbc66185afba60492a6a75a9b1141d407e160c"}, + {file = "grpcio_tools-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:63a273b70896d3640b7a883eb4a080c3c263d91662d870a2e9c84b7bbd978e7b"}, +] + +[package.dependencies] +grpcio = ">=1.62.1" +protobuf = ">=4.21.6,<5.0dev" +setuptools = "*" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "huggingface-hub" +version = "0.20.3" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.20.3-py3-none-any.whl", hash = "sha256:d988ae4f00d3e307b0c80c6a05ca6dbb7edba8bba3079f74cda7d9c2e562a7b6"}, + {file = "huggingface_hub-0.20.3.tar.gz", hash = "sha256:94e7f8e074475fbc67d6a71957b678e1b4a74ff1b64a644fd6cbb83da962d05d"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.48" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.48-py3-none-any.whl", hash = "sha256:2f8967e2aaaed8881efe6f346590681243b315af8ba8a037d969c299d42071d3"}, + {file = "langsmith-0.1.48.tar.gz", hash = "sha256:9cd21cd0928123b2bd2363f03515cb1f6a833d9a9f00420240d5132861d15fcc"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "onnx" +version = "1.16.0" +description = "Open Neural Network Exchange" +optional = false +python-versions = ">=3.8" +files = [ + {file = "onnx-1.16.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9eadbdce25b19d6216f426d6d99b8bc877a65ed92cbef9707751c6669190ba4f"}, + {file = "onnx-1.16.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:034ae21a2aaa2e9c14119a840d2926d213c27aad29e5e3edaa30145a745048e1"}, + {file = "onnx-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec22a43d74eb1f2303373e2fbe7fbcaa45fb225f4eb146edfed1356ada7a9aea"}, + {file = "onnx-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298f28a2b5ac09145fa958513d3d1e6b349ccf86a877dbdcccad57713fe360b3"}, + {file = "onnx-1.16.0-cp310-cp310-win32.whl", hash = "sha256:66300197b52beca08bc6262d43c103289c5d45fde43fb51922ed1eb83658cf0c"}, + {file = "onnx-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:ae0029f5e47bf70a1a62e7f88c80bca4ef39b844a89910039184221775df5e43"}, + {file = "onnx-1.16.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:f51179d4af3372b4f3800c558d204b592c61e4b4a18b8f61e0eea7f46211221a"}, + {file = "onnx-1.16.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5202559070afec5144332db216c20f2fff8323cf7f6512b0ca11b215eacc5bf3"}, + {file = "onnx-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77579e7c15b4df39d29465b216639a5f9b74026bdd9e4b6306cd19a32dcfe67c"}, + {file = "onnx-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e60ca76ac24b65c25860d0f2d2cdd96d6320d062a01dd8ce87c5743603789b8"}, + {file = "onnx-1.16.0-cp311-cp311-win32.whl", hash = "sha256:81b4ee01bc554e8a2b11ac6439882508a5377a1c6b452acd69a1eebb83571117"}, + {file = "onnx-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:7449241e70b847b9c3eb8dae622df8c1b456d11032a9d7e26e0ee8a698d5bf86"}, + {file = "onnx-1.16.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:03a627488b1a9975d95d6a55582af3e14c7f3bb87444725b999935ddd271d352"}, + {file = "onnx-1.16.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c392faeabd9283ee344ccb4b067d1fea9dfc614fa1f0de7c47589efd79e15e78"}, + {file = "onnx-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0efeb46985de08f0efe758cb54ad3457e821a05c2eaf5ba2ccb8cd1602c08084"}, + {file = "onnx-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf14a3d32234f23e44abb73a755cb96a423fac7f004e8f046f36b10214151ee"}, + {file = "onnx-1.16.0-cp312-cp312-win32.whl", hash = "sha256:62a2e27ae8ba5fc9b4a2620301446a517b5ffaaf8566611de7a7c2160f5bcf4c"}, + {file = "onnx-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:3e0860fea94efde777e81a6f68f65761ed5e5f3adea2e050d7fbe373a9ae05b3"}, + {file = "onnx-1.16.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:70a90649318f3470985439ea078277c9fb2a2e6e2fd7c8f3f2b279402ad6c7e6"}, + {file = "onnx-1.16.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:71839546b7f93be4fa807995b182ab4b4414c9dbf049fee11eaaced16fcf8df2"}, + {file = "onnx-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7665217c45a61eb44718c8e9349d2ad004efa0cb9fbc4be5c6d5e18b9fe12b52"}, + {file = "onnx-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5752bbbd5717304a7643643dba383a2fb31e8eb0682f4e7b7d141206328a73b"}, + {file = "onnx-1.16.0-cp38-cp38-win32.whl", hash = "sha256:257858cbcb2055284f09fa2ae2b1cfd64f5850367da388d6e7e7b05920a40c90"}, + {file = "onnx-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:209fe84995a28038e29ae8369edd35f33e0ef1ebc3bddbf6584629823469deb1"}, + {file = "onnx-1.16.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:8cf3e518b1b1b960be542e7c62bed4e5219e04c85d540817b7027029537dec92"}, + {file = "onnx-1.16.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:30f02beaf081c7d9fa3a8c566a912fc4408e28fc33b1452d58f890851691d364"}, + {file = "onnx-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb29a9a692b522deef1f6b8f2145da62c0c43ea1ed5b4c0f66f827fdc28847d"}, + {file = "onnx-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7755cbd5f4e47952e37276ea5978a46fc8346684392315902b5ed4a719d87d06"}, + {file = "onnx-1.16.0-cp39-cp39-win32.whl", hash = "sha256:7532343dc5b8b5e7c3e3efa441a3100552f7600155c4db9120acd7574f64ffbf"}, + {file = "onnx-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:d7886c05aa6d583ec42f6287678923c1e343afc4350e49d5b36a0023772ffa22"}, + {file = "onnx-1.16.0.tar.gz", hash = "sha256:237c6987c6c59d9f44b6136f5819af79574f8d96a760a1fa843bede11f3822f7"}, +] + +[package.dependencies] +numpy = ">=1.20" +protobuf = ">=3.20.2" + +[package.extras] +reference = ["Pillow", "google-re2"] + +[[package]] +name = "onnxruntime" +version = "1.17.3" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = "*" +files = [ + {file = "onnxruntime-1.17.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d86dde9c0bb435d709e51bd25991c9fe5b9a5b168df45ce119769edc4d198b15"}, + {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d87b68bf931ac527b2d3c094ead66bb4381bac4298b65f46c54fe4d1e255865"}, + {file = "onnxruntime-1.17.3-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26e950cf0333cf114a155f9142e71da344d2b08dfe202763a403ae81cc02ebd1"}, + {file = "onnxruntime-1.17.3-cp310-cp310-win32.whl", hash = "sha256:0962a4d0f5acebf62e1f0bf69b6e0adf16649115d8de854c1460e79972324d68"}, + {file = "onnxruntime-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:468ccb8a0faa25c681a41787b1594bf4448b0252d3efc8b62fd8b2411754340f"}, + {file = "onnxruntime-1.17.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e8cd90c1c17d13d47b89ab076471e07fb85467c01dcd87a8b8b5cdfbcb40aa51"}, + {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a058b39801baefe454eeb8acf3ada298c55a06a4896fafc224c02d79e9037f60"}, + {file = "onnxruntime-1.17.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f823d5eb4807007f3da7b27ca972263df6a1836e6f327384eb266274c53d05d"}, + {file = "onnxruntime-1.17.3-cp311-cp311-win32.whl", hash = "sha256:b66b23f9109e78ff2791628627a26f65cd335dcc5fbd67ff60162733a2f7aded"}, + {file = "onnxruntime-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:570760ca53a74cdd751ee49f13de70d1384dcf73d9888b8deac0917023ccda6d"}, + {file = "onnxruntime-1.17.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:77c318178d9c16e9beadd9a4070d8aaa9f57382c3f509b01709f0f010e583b99"}, + {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23da8469049b9759082e22c41a444f44a520a9c874b084711b6343672879f50b"}, + {file = "onnxruntime-1.17.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2949730215af3f9289008b2e31e9bbef952012a77035b911c4977edea06f3f9e"}, + {file = "onnxruntime-1.17.3-cp312-cp312-win32.whl", hash = "sha256:6c7555a49008f403fb3b19204671efb94187c5085976ae526cb625f6ede317bc"}, + {file = "onnxruntime-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:58672cf20293a1b8a277a5c6c55383359fcdf6119b2f14df6ce3b140f5001c39"}, + {file = "onnxruntime-1.17.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4395ba86e3c1e93c794a00619ef1aec597ab78f5a5039f3c6d2e9d0695c0a734"}, + {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdf354c04344ec38564fc22394e1fe08aa6d70d790df00159205a0055c4a4d3f"}, + {file = "onnxruntime-1.17.3-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a94b600b7af50e922d44b95a57981e3e35103c6e3693241a03d3ca204740bbda"}, + {file = "onnxruntime-1.17.3-cp38-cp38-win32.whl", hash = "sha256:5a335c76f9c002a8586c7f38bc20fe4b3725ced21f8ead835c3e4e507e42b2ab"}, + {file = "onnxruntime-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f56a86fbd0ddc8f22696ddeda0677b041381f4168a2ca06f712ef6ec6050d6d"}, + {file = "onnxruntime-1.17.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:e0ae39f5452278cd349520c296e7de3e90d62dc5b0157c6868e2748d7f28b871"}, + {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ff2dc012bd930578aff5232afd2905bf16620815f36783a941aafabf94b3702"}, + {file = "onnxruntime-1.17.3-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf6c37483782e4785019b56e26224a25e9b9a35b849d0169ce69189867a22bb1"}, + {file = "onnxruntime-1.17.3-cp39-cp39-win32.whl", hash = "sha256:351bf5a1140dcc43bfb8d3d1a230928ee61fcd54b0ea664c8e9a889a8e3aa515"}, + {file = "onnxruntime-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:57a3de15778da8d6cc43fbf6cf038e1e746146300b5f0b1fbf01f6f795dc6440"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "portalocker" +version = "2.8.2" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +files = [ + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "qdrant-client" +version = "1.8.2" +description = "Client library for the Qdrant vector search engine" +optional = false +python-versions = ">=3.8" +files = [ + {file = "qdrant_client-1.8.2-py3-none-any.whl", hash = "sha256:ee5341c0486d09e4346b0f5ef7781436e6d8cdbf1d5ecddfde7adb3647d353a8"}, + {file = "qdrant_client-1.8.2.tar.gz", hash = "sha256:65078d5328bc0393f42a46a31cd319a989b8285bf3958360acf1dffffdf4cc4e"}, +] + +[package.dependencies] +grpcio = ">=1.41.0" +grpcio-tools = ">=1.41.0" +httpx = {version = ">=0.20.0", extras = ["http2"]} +numpy = {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""} +portalocker = ">=2.7.0,<3.0.0" +pydantic = ">=1.10.8" +urllib3 = ">=1.26.14,<3" + +[package.extras] +fastembed = ["fastembed (==0.2.5)"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "dcfdd9900b799461993fac12e0132bb16d8ae59ed5d0a07c1fb3c78deea7deff" diff --git a/airbyte-integrations/connectors/destination-qdrant/pyproject.toml b/airbyte-integrations/connectors/destination-qdrant/pyproject.toml new file mode 100644 index 0000000000000..d64d840b12aec --- /dev/null +++ b/airbyte-integrations/connectors/destination-qdrant/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-qdrant" +version = "0.0.11" +description = "Airbyte destination implementation for Qdrant." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/qdrant" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_qdrant" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} +qdrant-client = "*" +fastembed = "*" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-qdrant = "destination_qdrant.run:run" diff --git a/airbyte-integrations/connectors/destination-qdrant/requirements.txt b/airbyte-integrations/connectors/destination-qdrant/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-qdrant/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-qdrant/setup.py b/airbyte-integrations/connectors/destination-qdrant/setup.py deleted file mode 100644 index f30ca62213c4d..0000000000000 --- a/airbyte-integrations/connectors/destination-qdrant/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk[vector-db-based]==0.57.0", "qdrant-client", "fastembed"] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_qdrant", - description="Destination implementation for Qdrant.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile b/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile index a3e040a707872..e616ffec70a3a 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile +++ b/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile @@ -34,5 +34,5 @@ COPY destination_rabbitmq ./destination_rabbitmq ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/destination-rabbitmq diff --git a/airbyte-integrations/connectors/destination-rabbitmq/icon.svg b/airbyte-integrations/connectors/destination-rabbitmq/icon.svg index dacd88fc7b23d..820c90361a9e3 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/icon.svg +++ b/airbyte-integrations/connectors/destination-rabbitmq/icon.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml index d19aa31ab4c31..c9abadca011aa 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml +++ b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: e06ad785-ad6f-4647-b2e8-3027a5c59454 - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 dockerRepository: airbyte/destination-rabbitmq githubIssueLabel: destination-rabbitmq icon: pulsar.svg @@ -10,9 +10,9 @@ data: name: RabbitMQ registries: cloud: - enabled: false + enabled: false # hide RabbitMQ Destination https://github.com/airbytehq/airbyte/issues/16315 oss: - enabled: false + enabled: true releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/rabbitmq tags: diff --git a/airbyte-integrations/connectors/destination-redshift/build.gradle b/airbyte-integrations/connectors/destination-redshift/build.gradle index bf3009392619d..38a06d8b272fa 100644 --- a/airbyte-integrations/connectors/destination-redshift/build.gradle +++ b/airbyte-integrations/connectors/destination-redshift/build.gradle @@ -1,11 +1,10 @@ plugins { id 'application' id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.18' + cdkVersionRequired = '0.29.12' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-redshift/metadata.yaml b/airbyte-integrations/connectors/destination-redshift/metadata.yaml index de2f7e7d91eac..bd5bcce7736e3 100644 --- a/airbyte-integrations/connectors/destination-redshift/metadata.yaml +++ b/airbyte-integrations/connectors/destination-redshift/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc - dockerImageTag: 2.1.10 + dockerImageTag: 2.4.3 dockerRepository: airbyte/destination-redshift documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift githubIssueLabel: destination-redshift @@ -24,14 +24,14 @@ data: This version introduces [Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2), which provides better error handling, incremental delivery of data for large syncs, and improved final table structures. To review the breaking changes, and how to upgrade, see [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#quick-start-to-upgrading). These changes will likely require updates to downstream dbt / SQL models, which we walk through [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#updating-downstream-transformations). Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. You can manually sync existing connections prior to the next scheduled sync to start the upgrade early. upgradeDeadline: "2024-03-15" - releaseStage: beta resourceRequirements: jobSpecific: - jobType: sync resourceRequirements: memory_limit: 1Gi memory_request: 1Gi - supportLevel: community + releaseStage: generally_available + supportLevel: certified supportsDbt: true tags: - language:java diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java index 81521b03b9fa9..ebd28fd395816 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java @@ -15,34 +15,49 @@ import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftDestinationHandler; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftRawTableAirbyteMetaMigration; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSqlGenerator; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftState; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSuperLimitationTransformer; import io.airbyte.integrations.destination.redshift.util.RedshiftUtil; import java.time.Duration; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Optional; import javax.sql.DataSource; -public class RedshiftInsertDestination extends AbstractJdbcDestination { +public class RedshiftInsertDestination extends AbstractJdbcDestination { public static final String DRIVER_CLASS = DatabaseDriver.REDSHIFT.getDriverClassName(); public static final Map SSL_JDBC_PARAMETERS = ImmutableMap.of( JdbcUtils.SSL_KEY, "true", "sslfactory", "com.amazon.redshift.ssl.NonValidatingFactory"); + // insert into stmt has ~200 bytes + // Per record overhead of ~150 bytes for strings in statement like JSON_PARSE.. uuid etc + // If the flush size allows the max batch of 10k records, then net overhead is ~1.5MB. + // Lets round it to 2MB for wiggle room and keep a max buffer of 14MB per flush. + // This will allow not sending record set larger than 14M limiting the batch insert statement. + private static final Long REDSHIFT_OPTIMAL_BATCH_SIZE_FOR_FLUSH = 14 * 1024 * 1024L; + public static Destination sshWrappedDestination() { return new SshWrappedDestination(new RedshiftInsertDestination(), JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY); } public RedshiftInsertDestination() { - super(DRIVER_CLASS, new RedshiftSQLNameTransformer(), new RedshiftSqlOperations()); + super(DRIVER_CLASS, REDSHIFT_OPTIMAL_BATCH_SIZE_FOR_FLUSH, new RedshiftSQLNameTransformer(), new RedshiftSqlOperations()); } @Override @@ -85,6 +100,8 @@ protected Map getDefaultConnectionProperties(final JsonNode conf // connectTimeout is different from Hikari pool's connectionTimout, driver defaults to 10seconds so // increase it to match hikari's default connectionOptions.put("connectTimeout", "120"); + // See RedshiftProperty.LOG_SERVER_ERROR_DETAIL, defaults to true + connectionOptions.put("logservererrordetail", "false"); // HikariPool properties // https://github.com/brettwooldridge/HikariCP?tab=readme-ov-file#frequently-used // TODO: Change data source factory to configure these properties @@ -122,4 +139,17 @@ protected JdbcDestinationHandler getDestinationHandler(final Stri return new RedshiftDestinationHandler(databaseName, database, rawTableSchema); } + @Override + protected List> getMigrations(JdbcDatabase database, + String databaseName, + SqlGenerator sqlGenerator, + DestinationHandler destinationHandler) { + return List.of(new RedshiftRawTableAirbyteMetaMigration(database, databaseName)); + } + + @Override + protected StreamAwareDataTransformer getDataTransformer(ParsedCatalog parsedCatalog, String defaultNamespace) { + return new RedshiftSuperLimitationTransformer(parsedCatalog, defaultNamespace); + } + } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java index 97e8b4393890a..4d7d96db0bac6 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java @@ -12,7 +12,6 @@ import static io.airbyte.integrations.destination.redshift.util.RedshiftUtil.findS3Options; import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; import io.airbyte.cdk.db.factory.DataSourceFactory; import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcDatabase; @@ -25,11 +24,11 @@ import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcV1V2Migrator; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; import io.airbyte.cdk.integrations.destination.s3.AesCbcEnvelopeEncryption; import io.airbyte.cdk.integrations.destination.s3.AesCbcEnvelopeEncryption.KeyType; import io.airbyte.cdk.integrations.destination.s3.EncryptionConfig; @@ -42,16 +41,21 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.CatalogParser; import io.airbyte.integrations.base.destination.typing_deduping.DefaultTyperDeduper; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; import io.airbyte.integrations.base.destination.typing_deduping.NoOpTyperDeduperWithV1V2Migrations; import io.airbyte.integrations.base.destination.typing_deduping.NoopV2TableMigrator; import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve; import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; import io.airbyte.integrations.destination.redshift.operations.RedshiftS3StagingSqlOperations; import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftDestinationHandler; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftRawTableAirbyteMetaMigration; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSqlGenerator; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftState; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSuperLimitationTransformer; import io.airbyte.integrations.destination.redshift.util.RedshiftUtil; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; @@ -69,7 +73,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class RedshiftStagingS3Destination extends AbstractJdbcDestination implements Destination { +public class RedshiftStagingS3Destination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStagingS3Destination.class); @@ -89,7 +93,8 @@ private boolean isEphemeralKeysAndPurgingStagingData(final JsonNode config, fina public AirbyteConnectionStatus check(final JsonNode config) { final S3DestinationConfig s3Config = getS3DestinationConfig(findS3Options(config)); final EncryptionConfig encryptionConfig = - config.has(UPLOADING_METHOD) ? EncryptionConfig.fromJson(config.get(UPLOADING_METHOD).get(JdbcUtils.ENCRYPTION_KEY)) : new NoEncryption(); + config.has(UPLOADING_METHOD) ? EncryptionConfig.fromJson(config.get(UPLOADING_METHOD).get(JdbcUtils.ENCRYPTION_KEY)) + : new NoEncryption(); if (isEphemeralKeysAndPurgingStagingData(config, encryptionConfig)) { return new AirbyteConnectionStatus() .withStatus(Status.FAILED) @@ -185,6 +190,21 @@ protected JdbcDestinationHandler getDestinationHandler(final Stri return new RedshiftDestinationHandler(databaseName, database, rawTableSchema); } + @Override + protected List> getMigrations(JdbcDatabase database, + String databaseName, + SqlGenerator sqlGenerator, + DestinationHandler destinationHandler) { + return List.of(new RedshiftRawTableAirbyteMetaMigration(database, databaseName)); + } + + @Override + protected StreamAwareDataTransformer getDataTransformer(ParsedCatalog parsedCatalog, String defaultNamespace) { + // Redundant override to keep in consistent with InsertDestination. TODO: Unify these 2 classes with + // composition. + return new RedshiftSuperLimitationTransformer(parsedCatalog, defaultNamespace); + } + @Override @Deprecated public AirbyteMessageConsumer getConsumer(final JsonNode config, @@ -199,17 +219,10 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final Consumer outputRecordCollector) throws Exception { final EncryptionConfig encryptionConfig = - config.has(UPLOADING_METHOD) ? EncryptionConfig.fromJson(config.get(UPLOADING_METHOD).get(JdbcUtils.ENCRYPTION_KEY)) : new NoEncryption(); + config.has(UPLOADING_METHOD) ? EncryptionConfig.fromJson(config.get(UPLOADING_METHOD).get(JdbcUtils.ENCRYPTION_KEY)) + : new NoEncryption(); final JsonNode s3Options = findS3Options(config); final S3DestinationConfig s3Config = getS3DestinationConfig(s3Options); - final int numberOfFileBuffers = getNumberOfFileBuffers(s3Options); - if (numberOfFileBuffers > FileBuffer.SOFT_CAP_CONCURRENT_STREAM_IN_BUFFER) { - LOGGER.warn(""" - Increasing the number of file buffers past {} can lead to increased performance but - leads to increased memory usage. If the number of file buffers exceeds the number - of streams {} this will create more buffers than necessary, leading to nonexistent gains - """, FileBuffer.SOFT_CAP_CONCURRENT_STREAM_IN_BUFFER, catalog.getStreams().size()); - } final String defaultNamespace = config.get("schema").asText(); for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { @@ -236,13 +249,16 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final JdbcV1V2Migrator migrator = new JdbcV1V2Migrator(getNamingResolver(), database, databaseName); final NoopV2TableMigrator v2TableMigrator = new NoopV2TableMigrator(); final boolean disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config.get(DISABLE_TYPE_DEDUPE).asBoolean(false); + List> redshiftMigrations = getMigrations(database, databaseName, sqlGenerator, redshiftDestinationHandler); if (disableTypeDedupe) { typerDeduper = - new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); + new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, + migrator, v2TableMigrator, redshiftMigrations); } else { typerDeduper = - new DefaultTyperDeduper<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); + new DefaultTyperDeduper<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator, redshiftMigrations); } + return StagingConsumerFactory.builder( outputRecordCollector, database, @@ -255,27 +271,10 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN typerDeduper, parsedCatalog, defaultNamespace, - true).build().createAsync(); - } - - /** - * Retrieves user configured file buffer amount so as long it doesn't exceed the maximum number of - * file buffers and sets the minimum number to the default - *

    - * NOTE: If Out Of Memory Exceptions (OOME) occur, this can be a likely cause as this hard limit has - * not been thoroughly load tested across all instance sizes - * - * @param config user configurations - * @return number of file buffers if configured otherwise default - */ - @VisibleForTesting - public int getNumberOfFileBuffers(final JsonNode config) { - int numOfFileBuffers = FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER; - if (config.has(FileBuffer.FILE_BUFFER_COUNT_KEY)) { - numOfFileBuffers = Math.min(config.get(FileBuffer.FILE_BUFFER_COUNT_KEY).asInt(), FileBuffer.MAX_CONCURRENT_STREAM_IN_BUFFER); - } - // Only allows for values 10 <= numOfFileBuffers <= 50 - return Math.max(numOfFileBuffers, FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER); + true) + .setDataTransformer(getDataTransformer(parsedCatalog, defaultNamespace)) + .build() + .createAsync(); } private boolean isPurgeStagingData(final JsonNode config) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java index 6ddd4bd094194..2d600d6813900 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/constants/RedshiftDestinationConstants.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.destination.redshift.constants; +import org.jooq.DataType; +import org.jooq.impl.DefaultDataType; + /** * Constant holder for Redshift Destination */ @@ -13,4 +16,6 @@ private RedshiftDestinationConstants() {} public static final String UPLOADING_METHOD = "uploading_method"; + public static final DataType SUPER_TYPE = new DefaultDataType<>(null, String.class, "super"); + } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java index c2b4da5c97ef9..59064ed591efd 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java @@ -28,6 +28,8 @@ import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RedshiftS3StagingSqlOperations extends RedshiftSqlOperations implements StagingOperations { @@ -38,6 +40,8 @@ public class RedshiftS3StagingSqlOperations extends RedshiftSqlOperations implem private final ObjectMapper objectMapper; private final byte[] keyEncryptingKey; + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftS3StagingSqlOperations.class); + public RedshiftS3StagingSqlOperations(final NamingConventionTransformer nameTransformer, final AmazonS3 s3Client, final S3DestinationConfig s3Config, diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java index 4c8927098ea83..a0b261982fac4 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperations.java @@ -6,8 +6,10 @@ import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT; import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT; +import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_META; import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_RAW_ID; import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_DATA; +import static io.airbyte.integrations.destination.redshift.constants.RedshiftDestinationConstants.*; import static org.jooq.impl.DSL.field; import static org.jooq.impl.DSL.function; import static org.jooq.impl.DSL.name; @@ -18,9 +20,11 @@ import com.google.common.collect.Iterables; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.base.JavaBaseConstants; +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage; import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; import io.airbyte.cdk.integrations.destination.jdbc.SqlOperationsUtils; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; +import io.airbyte.commons.json.Jsons; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.time.Instant; import java.time.OffsetDateTime; @@ -28,12 +32,12 @@ import java.util.List; import java.util.UUID; import org.jooq.DSLContext; -import org.jooq.InsertValuesStep4; +import org.jooq.InsertValuesStep5; import org.jooq.Record; import org.jooq.SQLDialect; +import org.jooq.conf.ParamType; import org.jooq.conf.Settings; import org.jooq.conf.StatementType; -import org.jooq.impl.DefaultDataType; import org.jooq.impl.SQLDataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +46,6 @@ public class RedshiftSqlOperations extends JdbcSqlOperations { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftSqlOperations.class); public static final int REDSHIFT_VARCHAR_MAX_BYTE_SIZE = 65535; - public static final int REDSHIFT_SUPER_MAX_BYTE_SIZE = 1000000; public RedshiftSqlOperations() {} @@ -71,7 +74,8 @@ protected String createTableQueryV2(final String schemaName, final String tableN .column(COLUMN_NAME_AB_EXTRACTED_AT, SQLDataType.TIMESTAMPWITHTIMEZONE.defaultValue(function("GETDATE", SQLDataType.TIMESTAMPWITHTIMEZONE))) .column(COLUMN_NAME_AB_LOADED_AT, SQLDataType.TIMESTAMPWITHTIMEZONE) - .column(COLUMN_NAME_DATA, new DefaultDataType<>(null, String.class, "super").nullable(false)) + .column(COLUMN_NAME_DATA, SUPER_TYPE.nullable(false)) + .column(COLUMN_NAME_AB_META, SUPER_TYPE.nullable(true)) .getSQL(); } @@ -112,7 +116,6 @@ protected void insertRecordsInternalV2(final JdbcDatabase database, // > TODO(sherif) this should use a smarter, destination-aware partitioning scheme instead of 10k by // > default for (final List batch : Iterables.partition(records, 10_000)) { - LOGGER.info("Prepared batch size: {}, {}, {}", batch.size(), schemaName, tableName); final DSLContext create = using( connection, SQLDialect.POSTGRES, @@ -138,21 +141,30 @@ protected void insertRecordsInternalV2(final JdbcDatabase database, // and // https://github.com/airbytehq/airbyte/blob/f73827eb43f62ee30093451c434ad5815053f32d/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/SqlOperationsUtils.java#L62 // for how DV1 did this in pure JDBC. - InsertValuesStep4 insert = create + InsertValuesStep5 insert = create .insertInto(table(name(schemaName, tableName)), field(COLUMN_NAME_AB_RAW_ID, SQLDataType.VARCHAR(36)), - field(COLUMN_NAME_DATA, new DefaultDataType<>(null, String.class, "super")), + field(COLUMN_NAME_DATA, SUPER_TYPE), + field(COLUMN_NAME_AB_META, SUPER_TYPE), field(COLUMN_NAME_AB_EXTRACTED_AT, SQLDataType.TIMESTAMPWITHTIMEZONE), field(COLUMN_NAME_AB_LOADED_AT, SQLDataType.TIMESTAMPWITHTIMEZONE)); for (final PartialAirbyteMessage record : batch) { insert = insert.values( val(UUID.randomUUID().toString()), function("JSON_PARSE", String.class, val(escapeStringLiteral(record.getSerialized()))), + function("JSON_PARSE", String.class, val(Jsons.serialize(record.getRecord().getMeta()))), val(Instant.ofEpochMilli(record.getRecord().getEmittedAt()).atOffset(ZoneOffset.UTC)), val((OffsetDateTime) null)); } - insert.execute(); - LOGGER.info("Executed batch size: {}, {}, {}", batch.size(), schemaName, tableName); + final String insertSQL = insert.getSQL(ParamType.INLINED); + LOGGER.info("Prepared batch size: {}, Schema: {}, Table: {}, SQL statement size {} MB", batch.size(), schemaName, tableName, + (insertSQL.getBytes(StandardCharsets.UTF_8).length) / (1024 * 1024L)); + final long startTime = System.currentTimeMillis(); + // Intentionally not using Jooq's insert.execute() as it was hiding the actual RedshiftException + // and also leaking the insert record values in the exception message. + connection.createStatement().execute(insertSQL); + LOGGER.info("Executed batch size: {}, Schema: {}, Table: {} in {} ms", batch.size(), schemaName, tableName, + (System.currentTimeMillis() - startTime)); } }); } catch (final Exception e) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java index 497d6469cd05e..3e4af2021bb8b 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java @@ -46,7 +46,7 @@ public void execute(final Sql sql) throws Exception { // see https://github.com/airbytehq/airbyte/issues/33900 modifiedStatements.add("SET enable_case_sensitive_identifier to TRUE;\n"); modifiedStatements.addAll(transaction); - jdbcDatabase.executeWithinTransaction(modifiedStatements); + getJdbcDatabase().executeWithinTransaction(modifiedStatements); } catch (final SQLException e) { log.error("Sql {}-{} failed", queryId, transactionId, e); throw e; @@ -73,7 +73,8 @@ protected String toJdbcTypeName(AirbyteType airbyteType) { @Override protected RedshiftState toDestinationState(JsonNode json) { return new RedshiftState( - json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean()); + json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean(), + json.hasNonNull("isAirbyteMetaPresentInRaw") && json.get("isAirbyteMetaPresentInRaw").asBoolean()); } private String toJdbcTypeName(final AirbyteProtocolType airbyteProtocolType) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftRawTableAirbyteMetaMigration.kt b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftRawTableAirbyteMetaMigration.kt new file mode 100644 index 0000000000000..b459683174cef --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftRawTableAirbyteMetaMigration.kt @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.typing_deduping + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.destination.redshift.constants.RedshiftDestinationConstants.SUPER_TYPE +import org.jooq.conf.ParamType +import org.jooq.impl.DSL +import org.jooq.impl.DSL.name +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +class RedshiftRawTableAirbyteMetaMigration( + private val database: JdbcDatabase, + private val databaseName: String +) : Migration { + private val logger: Logger = + LoggerFactory.getLogger(RedshiftRawTableAirbyteMetaMigration::class.java) + + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + state: DestinationInitialStatus + ): Migration.MigrationResult { + if (!state.initialRawTableStatus.rawTableExists) { + // The raw table doesn't exist. No migration necessary. Update the state. + logger.info( + "Skipping RawTableAirbyteMetaMigration for ${stream.id.originalNamespace}.${stream.id.originalName} because the raw table doesn't exist" + ) + return Migration.MigrationResult( + state.destinationState.copy(isAirbyteMetaPresentInRaw = true), + false + ) + } + + val existingRawTable = + JdbcDestinationHandler.findExistingTable( + database, + databaseName, + stream.id.rawNamespace, + stream.id.rawName + ) + // The table should exist because we checked for it above + .get() + if (existingRawTable.columns[JavaBaseConstants.COLUMN_NAME_AB_META] != null) { + // The raw table already has the _airbyte_meta column. No migration necessary. Update + // the state. + return Migration.MigrationResult( + state.destinationState.copy(isAirbyteMetaPresentInRaw = true), + false + ) + } + + logger.info( + "Executing RawTableAirbyteMetaMigration for ${stream.id.originalNamespace}.${stream.id.originalName} for real" + ) + destinationHandler.execute( + getRawTableMetaColumnAddDdl(stream.id.rawNamespace!!, stream.id.rawName!!) + ) + + // Update the state. We didn't modify the table in a relevant way, so don't invalidate the + // InitialState. + // We will not do a soft reset since it could be time-consuming, instead we leave the old + // data i.e. `errors` instead of `changes` as is since this column is controlled by us. + return Migration.MigrationResult( + state.destinationState.copy(needsSoftReset = false, isAirbyteMetaPresentInRaw = true), + false + ) + } + + fun getRawTableMetaColumnAddDdl(namespace: String, name: String): Sql { + return Sql.of( + DSL.alterTable(name(namespace, name)) + .addColumn(name(JavaBaseConstants.COLUMN_NAME_AB_META), SUPER_TYPE) + .getSQL(ParamType.INLINED) + ) + } +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGenerator.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGenerator.java index 37f72c21c9f1a..d009eeba528de 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGenerator.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGenerator.java @@ -25,6 +25,9 @@ import io.airbyte.integrations.base.destination.typing_deduping.Struct; import io.airbyte.integrations.base.destination.typing_deduping.Union; import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; +import io.airbyte.integrations.destination.redshift.constants.RedshiftDestinationConstants; +import io.airbyte.protocol.models.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.AirbyteRecordMessageMetaChange.Reason; import java.sql.Timestamp; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -35,15 +38,16 @@ import org.jooq.DataType; import org.jooq.Field; import org.jooq.SQLDialect; -import org.jooq.impl.DefaultDataType; import org.jooq.impl.SQLDataType; public class RedshiftSqlGenerator extends JdbcSqlGenerator { public static final String CASE_STATEMENT_SQL_TEMPLATE = "CASE WHEN {0} THEN {1} ELSE {2} END "; public static final String CASE_STATEMENT_NO_ELSE_SQL_TEMPLATE = "CASE WHEN {0} THEN {1} END "; - private static final String COLUMN_ERROR_MESSAGE_FORMAT = "Problem with `%s`"; - private static final String AIRBYTE_META_COLUMN_ERRORS_KEY = "errors"; + + private static final String CHANGE_TRACKER_JSON_TEMPLATE = "{\"field\": \"{0}\", \"change\": \"{1}\", \"reason\": \"{2}\"}"; + + private static final String AIRBYTE_META_COLUMN_CHANGES_KEY = "changes"; public RedshiftSqlGenerator(final NamingConventionTransformer namingTransformer) { super(namingTransformer); @@ -56,7 +60,7 @@ public RedshiftSqlGenerator(final NamingConventionTransformer namingTransformer) * @return */ private DataType getSuperType() { - return new DefaultDataType<>(null, String.class, "super"); + return RedshiftDestinationConstants.SUPER_TYPE; } @Override @@ -129,9 +133,9 @@ protected List> extractRawDataFields(final LinkedHashMap castedField( - field(quotedName(COLUMN_NAME_DATA, column.getKey().originalName())), + field(quotedName(COLUMN_NAME_DATA, column.getKey().getOriginalName())), column.getValue(), - column.getKey().name(), + column.getKey().getName(), useExpensiveSaferCasting)) .collect(Collectors.toList()); } @@ -166,14 +170,19 @@ Field arrayConcatStmt(final List> arrays) { } Field toCastingErrorCaseStmt(final ColumnId column, final AirbyteType type) { - final Field field = field(quotedName(COLUMN_NAME_DATA, column.originalName())); + final Field field = field(quotedName(COLUMN_NAME_DATA, column.getOriginalName())); // Just checks if data is not null but casted data is null. This also accounts for conditional // casting result of array and struct. // TODO: Timestamp format issues can result in null values when cast, add regex check if destination // supports regex functions. return field(CASE_STATEMENT_SQL_TEMPLATE, - field.isNotNull().and(castedField(field, type, column.name(), true).isNull()), - function("ARRAY", getSuperType(), val(COLUMN_ERROR_MESSAGE_FORMAT.formatted(column.name()))), field("ARRAY()")); + field.isNotNull().and(castedField(field, type, column.getName(), true).isNull()), + function("ARRAY", getSuperType(), + function("JSON_PARSE", getSuperType(), val( + "{\"field\": \"" + column.getName() + "\", " + + "\"change\": \"" + Change.NULLED.value() + "\", " + + "\"reason\": \"" + Reason.DESTINATION_TYPECAST_ERROR + "\"}"))), + field("ARRAY()")); } @Override @@ -183,7 +192,17 @@ protected Field buildAirbyteMetaColumn(final LinkedHashMap toCastingErrorCaseStmt(column.getKey(), column.getValue())) .collect(Collectors.toList()); - return function("OBJECT", getSuperType(), val(AIRBYTE_META_COLUMN_ERRORS_KEY), arrayConcatStmt(dataFields)).as(COLUMN_NAME_AB_META); + final Condition rawTableAirbyteMetaExists = + field(quotedName(COLUMN_NAME_AB_META)).isNotNull() + .and(function("IS_OBJECT", SQLDataType.BOOLEAN, field(quotedName(COLUMN_NAME_AB_META)))) + .and(field(quotedName(COLUMN_NAME_AB_META, AIRBYTE_META_COLUMN_CHANGES_KEY)).isNotNull()) + .and(function("IS_ARRAY", SQLDataType.BOOLEAN, field(quotedName(COLUMN_NAME_AB_META, AIRBYTE_META_COLUMN_CHANGES_KEY)))); + final Field airbyteMetaChangesArray = function("ARRAY_CONCAT", getSuperType(), + arrayConcatStmt(dataFields), field(CASE_STATEMENT_SQL_TEMPLATE, + rawTableAirbyteMetaExists, + field(quotedName(COLUMN_NAME_AB_META, AIRBYTE_META_COLUMN_CHANGES_KEY)), + field("ARRAY()"))); + return function("OBJECT", getSuperType(), val(AIRBYTE_META_COLUMN_CHANGES_KEY), airbyteMetaChangesArray).as(COLUMN_NAME_AB_META); } @@ -200,12 +219,12 @@ protected Field getRowNumber(final List primaryKeys, final Op // literally identical to postgres's getRowNumber implementation, changes here probably should // be reflected there final List> primaryKeyFields = - primaryKeys != null ? primaryKeys.stream().map(columnId -> field(quotedName(columnId.name()))).collect(Collectors.toList()) + primaryKeys != null ? primaryKeys.stream().map(columnId -> field(quotedName(columnId.getName()))).collect(Collectors.toList()) : new ArrayList<>(); final List> orderedFields = new ArrayList<>(); // We can still use Jooq's field to get the quoted name with raw sql templating. // jooq's .desc returns SortField instead of Field and NULLS LAST doesn't work with it - cursor.ifPresent(columnId -> orderedFields.add(field("{0} desc NULLS LAST", field(quotedName(columnId.name()))))); + cursor.ifPresent(columnId -> orderedFields.add(field("{0} desc NULLS LAST", field(quotedName(columnId.getName()))))); orderedFields.add(field("{0} desc", quotedName(COLUMN_NAME_AB_EXTRACTED_AT))); return rowNumber() .over() @@ -216,7 +235,7 @@ protected Field getRowNumber(final List primaryKeys, final Op @Override protected Condition cdcDeletedAtNotNullCondition() { return field(name(COLUMN_NAME_AB_LOADED_AT)).isNotNull() - .and(function("JSON_TYPEOF", SQLDataType.VARCHAR, field(quotedName(COLUMN_NAME_DATA, cdcDeletedAtColumn.name()))) + .and(function("JSON_TYPEOF", SQLDataType.VARCHAR, field(quotedName(COLUMN_NAME_DATA, getCdcDeletedAtColumn().getName()))) .ne("null")); } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt index d2200ea9a60c2..056b3323046ea 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt @@ -6,11 +6,13 @@ package io.airbyte.integrations.destination.redshift.typing_deduping import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState -data class RedshiftState(val needsSoftReset: Boolean) : MinimumDestinationState { +data class RedshiftState(val needsSoftReset: Boolean, val isAirbyteMetaPresentInRaw: Boolean) : + MinimumDestinationState { override fun needsSoftReset(): Boolean { return needsSoftReset } + @Suppress("UNCHECKED_CAST") override fun withSoftReset(needsSoftReset: Boolean): T { return copy(needsSoftReset = needsSoftReset) as T } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformer.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformer.java new file mode 100644 index 0000000000000..bdb123fb41d8b --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformer.java @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.typing_deduping; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.cdk.integrations.destination.async.deser.StreamAwareDataTransformer; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Reason; +import io.airbyte.protocol.models.v0.DestinationSyncMode; +import io.airbyte.protocol.models.v0.StreamDescriptor; +import java.nio.charset.StandardCharsets; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import kotlin.Pair; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.jetbrains.annotations.NotNull; + +@Slf4j +public class RedshiftSuperLimitationTransformer implements StreamAwareDataTransformer { + + private record ScalarNodeModification(int size, int removedSize, boolean shouldNull) {} + + public record TransformationInfo(int originalBytes, int removedBytes, JsonNode node, AirbyteRecordMessageMeta meta) {} + + public static final int REDSHIFT_VARCHAR_MAX_BYTE_SIZE = 65535; + public static final int REDSHIFT_SUPER_MAX_BYTE_SIZE = 16 * 1024 * 1024; + + static final Predicate DEFAULT_PREDICATE_VARCHAR_GREATER_THAN_64K = text -> getByteSize(text) > REDSHIFT_VARCHAR_MAX_BYTE_SIZE; + static final Predicate DEFAULT_PREDICATE_RECORD_SIZE_GT_THAN_16M = size -> size > REDSHIFT_SUPER_MAX_BYTE_SIZE; + + private static final int CURLY_BRACES_BYTE_SIZE = getByteSize("{}"); + private static final int SQUARE_BRACKETS_BYTE_SIZE = getByteSize("[]"); + private static final int OBJECT_COLON_QUOTES_COMMA_BYTE_SIZE = getByteSize("\"\":,"); + + private final ParsedCatalog parsedCatalog; + + private final String defaultNamespace; + + public RedshiftSuperLimitationTransformer(final ParsedCatalog parsedCatalog, final String defaultNamespace) { + this.parsedCatalog = parsedCatalog; + Objects.requireNonNull(defaultNamespace); + this.defaultNamespace = defaultNamespace; + + } + + /* + * This method walks the Json tree nodes and does the following + * + * 1. Collect the original bytes using UTF-8 charset. This is to avoid double walking the tree if + * the total size > 16MB This is to optimize for best case (see worst case as 4 below) that most of + * the data will be < 16MB and only few offending varchars > 64KB. + * + * 2. Replace all TextNodes with Null nodes if they are greater than 64K. + * + * 3. Verify if replacing the varchars with NULLs brought the record size down to < 16MB. This + * includes verifying the original bytes and transformed bytes are below the record size limit. + * + * 4. If 3 is false, this is the worst case scenarios where we try to resurrect PKs and cursors and + * trash the rest of the record. + * + */ + @NotNull + @Override + public Pair transform(final StreamDescriptor streamDescriptor, + final JsonNode jsonNode, + final AirbyteRecordMessageMeta airbyteRecordMessageMeta) { + final long startTime = System.currentTimeMillis(); + log.debug("Traversing the record to NULL fields for redshift size limitations"); + final String namespace = + (streamDescriptor.getNamespace() != null && !streamDescriptor.getNamespace().isEmpty()) ? streamDescriptor.getNamespace() : defaultNamespace; + final StreamConfig streamConfig = parsedCatalog.getStream(namespace, streamDescriptor.getName()); + final Optional cursorField = streamConfig.getCursor().map(ColumnId::getOriginalName); + // convert List to Set for faster lookup + final Set primaryKeys = streamConfig.getPrimaryKey().stream().map(ColumnId::getOriginalName).collect(Collectors.toSet()); + final DestinationSyncMode syncMode = streamConfig.getDestinationSyncMode(); + final TransformationInfo transformationInfo = transformNodes(jsonNode, DEFAULT_PREDICATE_VARCHAR_GREATER_THAN_64K); + final int originalBytes = transformationInfo.originalBytes; + final int transformedBytes = transformationInfo.originalBytes - transformationInfo.removedBytes; + // We check if the transformedBytes has solved the record limit. + log.debug("Traversal complete in {} ms", System.currentTimeMillis() - startTime); + if (DEFAULT_PREDICATE_RECORD_SIZE_GT_THAN_16M.test(originalBytes) + && DEFAULT_PREDICATE_RECORD_SIZE_GT_THAN_16M.test(transformedBytes)) { + // If we have reached here with a bunch of small varchars constituted to becoming a large record, + // person using Redshift for this data should re-evaluate life choices. + log.warn("Record size before transformation {}, after transformation {} bytes exceeds 16MB limit", originalBytes, transformedBytes); + final JsonNode minimalNode = constructMinimalJsonWithPks(jsonNode, primaryKeys, cursorField); + if (minimalNode.isEmpty() && syncMode == DestinationSyncMode.APPEND_DEDUP) { + // Fail the sync if PKs are missing in DEDUPE, no point sending an empty record to destination. + throw new RuntimeException("Record exceeds size limit, cannot transform without PrimaryKeys in DEDUPE sync"); + } + // Preserve original changes + final List changes = new ArrayList<>(); + changes.add(new AirbyteRecordMessageMetaChange() + .withField("all").withChange(Change.NULLED) + .withReason(Reason.DESTINATION_RECORD_SIZE_LIMITATION)); + if (airbyteRecordMessageMeta != null && airbyteRecordMessageMeta.getChanges() != null) { + changes.addAll(airbyteRecordMessageMeta.getChanges()); + } + return new Pair<>(minimalNode, new AirbyteRecordMessageMeta().withChanges(changes)); + } + if (airbyteRecordMessageMeta != null && airbyteRecordMessageMeta.getChanges() != null) { + // The underlying list of AirbyteRecordMessageMeta is mutable + transformationInfo.meta.getChanges().addAll(airbyteRecordMessageMeta.getChanges()); + } + // We intentionally don't deep copy for transformation to avoid memory bloat. + // The caller already has the reference of original jsonNode but returning again in + // case we choose to deepCopy in future for thread-safety. + return new Pair<>(jsonNode, transformationInfo.meta); + } + + private ScalarNodeModification shouldTransformScalarNode(final JsonNode node, + final Predicate textNodePredicate) { + final int bytes; + if (node.isTextual()) { + final int originalBytes = getByteSize(node.asText()) + 2; // for quotes + if (textNodePredicate.test(node.asText())) { + return new ScalarNodeModification(originalBytes, // size before nulling + originalBytes - 4, // account 4 bytes for null string + true); + } + bytes = originalBytes; + } else if (node.isNumber()) { + // Serialize exactly for numbers to account for Scientific notation converted to full value. + // This is what we send over wire for persistence. + bytes = getByteSize(Jsons.serialize(node)); + } else if (node.isBoolean()) { + bytes = getByteSize(node.toString()); + } else if (node.isNull()) { + bytes = 4; // for "null" + } else { + bytes = 0; + } + return new ScalarNodeModification(bytes, // For all other types, just return bytes + 0, + false); + } + + private static int getByteSize(final String value) { + return value.getBytes(StandardCharsets.UTF_8).length; + } + + @VisibleForTesting + TransformationInfo transformNodes(final JsonNode rootNode, + final Predicate textNodePredicate) { + + // Walk the tree and transform Varchars that exceed the limit + // We are intentionally not checking the whole size upfront to check if it exceeds 16MB limit to + // optimize for best case. + int originalBytes = 0; + int removedBytes = 0; + // We accumulate nested keys in jsonPath format for adding to airbyte changes. + final Deque> stack = new ArrayDeque<>(); + final List changes = new ArrayList<>(); + + // This was intentionally done using Iterative DFS to avoid stack overflow for large records. + // This will ensure we are allocating on heap and not on stack. + stack.push(ImmutablePair.of("$", rootNode)); + while (!stack.isEmpty()) { + final ImmutablePair jsonPathNodePair = stack.pop(); + final JsonNode currentNode = jsonPathNodePair.right; + if (currentNode.isObject()) { + originalBytes += CURLY_BRACES_BYTE_SIZE; + final Iterator> fields = currentNode.fields(); + while (fields.hasNext()) { + final Map.Entry field = fields.next(); + originalBytes += getByteSize(field.getKey()) + OBJECT_COLON_QUOTES_COMMA_BYTE_SIZE; // for quotes, colon, comma + final String jsonPathKey = String.format("%s.%s", jsonPathNodePair.left, field.getKey()); + // TODO: Little difficult to unify this logic in Object & Array, find a way later + // Push only non-scalar nodes to stack. For scalar nodes, we need reference of parent to do in-place + // update. + if (field.getValue().isContainerNode()) { + stack.push(ImmutablePair.of(jsonPathKey, field.getValue())); + } else { + final ScalarNodeModification shouldTransform = shouldTransformScalarNode(field.getValue(), textNodePredicate); + if (shouldTransform.shouldNull()) { + removedBytes += shouldTransform.removedSize; + // DO NOT do this if this code every modified to a multithreading call stack + field.setValue(Jsons.jsonNode(null)); + changes.add(new AirbyteRecordMessageMetaChange() + .withField(jsonPathKey) + .withChange(Change.NULLED) + .withReason(Reason.DESTINATION_FIELD_SIZE_LIMITATION)); + } + originalBytes += shouldTransform.size; + } + } + originalBytes -= 1; // remove extra comma from last key-value pair + } else if (currentNode.isArray()) { + originalBytes += SQUARE_BRACKETS_BYTE_SIZE; + final ArrayNode arrayNode = (ArrayNode) currentNode; + // We cannot use foreach here as we need to update the array in place. + for (int i = 0; i < arrayNode.size(); i++) { + final JsonNode childNode = arrayNode.get(i); + final String jsonPathKey = String.format("%s[%d]", jsonPathNodePair.left, i); + if (childNode.isContainerNode()) + stack.push(ImmutablePair.of(jsonPathKey, childNode)); + else { + final ScalarNodeModification shouldTransform = shouldTransformScalarNode(childNode, textNodePredicate); + if (shouldTransform.shouldNull()) { + removedBytes += shouldTransform.removedSize; + // DO NOT do this if this code every modified to a multithreading call stack + arrayNode.set(i, Jsons.jsonNode(null)); + changes.add(new AirbyteRecordMessageMetaChange() + .withField(jsonPathKey) + .withChange(Change.NULLED) + .withReason(Reason.DESTINATION_FIELD_SIZE_LIMITATION)); + } + originalBytes += shouldTransform.size; + } + } + originalBytes += !currentNode.isEmpty() ? currentNode.size() - 1 : 0; // for commas + } else { // Top level scalar node is a valid json + originalBytes += shouldTransformScalarNode(currentNode, textNodePredicate).size(); + } + } + + if (removedBytes != 0) { + log.info("Original record size {} bytes, Modified record size {} bytes", originalBytes, (originalBytes - removedBytes)); + } + return new TransformationInfo(originalBytes, removedBytes, rootNode, new AirbyteRecordMessageMeta().withChanges(changes)); + } + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private JsonNode constructMinimalJsonWithPks(JsonNode rootNode, Set primaryKeys, Optional cursorField) { + final ObjectNode minimalNode = (ObjectNode) Jsons.emptyObject(); + // We only iterate for top-level fields in the root object, since we only support PKs and cursor in + // top level keys. + if (rootNode.isObject()) { + final Iterator> fields = rootNode.fields(); + while (fields.hasNext()) { + final Map.Entry field = fields.next(); + if (!field.getValue().isContainerNode()) { + if (primaryKeys.contains(field.getKey()) || cursorField.isPresent() && cursorField.get().equals(field.getKey())) { + // Make a deepcopy into minimalNode of PKs and cursor fields and values, + // without deepcopy, we will re-reference the original Tree's nodes. + // god help us if someone set a PK on non-scalar field, and it reached this point, only do at root + // level + minimalNode.set(field.getKey(), field.getValue().deepCopy()); + } + } + } + } else { + log.error("Encountered {} as top level JSON field, this is not supported", rootNode.getNodeType()); + // This should have caught way before it reaches here. Just additional safety. + throw new RuntimeException("Encountered " + rootNode.getNodeType() + " as top level JSON field, this is not supported"); + } + return minimalNode; + } + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index 55cb60c52a62f..b68874c9fe552 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -138,7 +138,6 @@ "me-central-1", "me-south-1", "sa-east-1", - "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", @@ -224,15 +223,6 @@ } ], "order": 7 - }, - "file_buffer_count": { - "title": "File Buffer Count", - "type": "integer", - "minimum": 10, - "maximum": 50, - "default": 10, - "description": "Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects", - "examples": ["10"] } } }, diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationAcceptanceTest.java index de31216ed2221..4ea666f811dac 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftDestinationAcceptanceTest.java @@ -211,9 +211,9 @@ private void removeOldNamespaces() { @Override protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - System.out.println("TEARING_DOWN_SCHEMAS: " + TEST_SCHEMAS); + System.out.println("TEARING_DOWN_SCHEMAS: " + getTestSchemas()); getDatabase().query(ctx -> ctx.execute(String.format("DROP SCHEMA IF EXISTS %s CASCADE", config.get("schema").asText()))); - for (final String schema : TEST_SCHEMAS) { + for (final String schema : getTestSchemas()) { getDatabase().query(ctx -> ctx.execute(String.format("DROP SCHEMA IF EXISTS %s CASCADE", schema))); } getDatabase().query(ctx -> ctx.execute(String.format("drop user if exists %s;", USER_WITHOUT_CREDS))); diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftFileBufferTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftFileBufferTest.java deleted file mode 100644 index bbeab71e6be0c..0000000000000 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftFileBufferTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redshift; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.nio.file.Path; -import org.junit.jupiter.api.Test; - -public class RedshiftFileBufferTest { - - private final JsonNode config = Jsons.deserialize(IOs.readFile(Path.of("secrets/config_staging.json"))); - private final RedshiftStagingS3Destination destination = new RedshiftStagingS3Destination(); - - @Test - public void testGetFileBufferDefault() { - assertEquals(destination.getNumberOfFileBuffers(config), FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER); - } - - @Test - public void testGetFileBufferMaxLimited() { - ((ObjectNode) config).put(FileBuffer.FILE_BUFFER_COUNT_KEY, 100); - assertEquals(destination.getNumberOfFileBuffers(config), FileBuffer.MAX_CONCURRENT_STREAM_IN_BUFFER); - } - - @Test - public void testGetMinimumFileBufferCount() { - ((ObjectNode) config).put(FileBuffer.FILE_BUFFER_COUNT_KEY, 1); - // User cannot set number of file counts below the default file buffer count, which is existing - // behavior - assertEquals(destination.getNumberOfFileBuffers(config), FileBuffer.DEFAULT_MAX_CONCURRENT_STREAM_IN_BUFFER); - } - -} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/AbstractRedshiftTypingDedupingTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/AbstractRedshiftTypingDedupingTest.java index 514fd14363a8b..93053a4f0fd80 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/AbstractRedshiftTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/AbstractRedshiftTypingDedupingTest.java @@ -4,20 +4,35 @@ package io.airbyte.integrations.destination.redshift.typing_deduping; +import static io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSuperLimitationTransformer.*; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; import io.airbyte.cdk.integrations.standardtest.destination.typing_deduping.JdbcTypingDedupingTest; +import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; import io.airbyte.integrations.destination.redshift.RedshiftInsertDestination; import io.airbyte.integrations.destination.redshift.RedshiftSQLNameTransformer; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSqlGeneratorIntegrationTest.RedshiftSourceOperations; +import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteStream; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.v0.DestinationSyncMode; +import io.airbyte.protocol.models.v0.SyncMode; +import java.util.List; +import java.util.Random; import javax.sql.DataSource; import org.jooq.DSLContext; import org.jooq.conf.Settings; import org.jooq.impl.DSL; +import org.junit.jupiter.api.Test; public abstract class AbstractRedshiftTypingDedupingTest extends JdbcTypingDedupingTest { + private static final Random RANDOM = new Random(); + @Override protected String getImageName() { return "airbyte/destination-redshift:dev"; @@ -47,4 +62,122 @@ protected DSLContext getDslContext() { }; } + @Test + public void testRawTableMetaMigration_append() throws Exception { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + + // First sync without _airbyte_meta + final List messages1 = readMessages("dat/sync1_messages_before_meta.jsonl"); + runSync(catalog, messages1, "airbyte/destination-redshift:2.1.10"); + // Second sync + final List messages2 = readMessages("dat/sync2_messages_after_meta.jsonl"); + runSync(catalog, messages2); + + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_mixed_meta_raw.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + + @Test + public void testRawTableMetaMigration_incrementalDedupe() throws Exception { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(List.of("updated_at")) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + + // First sync without _airbyte_meta + final List messages1 = readMessages("dat/sync1_messages_before_meta.jsonl"); + runSync(catalog, messages1, "airbyte/destination-redshift:2.1.10"); + // Second sync + final List messages2 = readMessages("dat/sync2_messages_after_meta.jsonl"); + runSync(catalog, messages2); + + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_mixed_meta_raw.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + + @Test + public void testRawTableLoadWithSuperVarcharLimitation() throws Exception { + final String record1 = """ + {"type": "RECORD", + "record":{ + "emitted_at": 1000, + "data": { + "id1": 1, + "id2": 200, + "updated_at": "2000-01-01T00:00:00Z", + "_ab_cdc_deleted_at": null, + "name": "PLACE_HOLDER", + "address": {"city": "San Francisco", "state": "CA"}} + } + } + """; + final String record2 = """ + {"type": "RECORD", + "record":{ + "emitted_at": 1000, + "data": { + "id1": 2, + "id2": 201, + "updated_at": "2000-01-01T00:00:00Z", + "_ab_cdc_deleted_at": null, + "name": "PLACE_HOLDER", + "address": {"city": "New York", "state": "NY"}} + } + } + """; + final String largeString1 = generateRandomString(REDSHIFT_VARCHAR_MAX_BYTE_SIZE); + final String largeString2 = generateRandomString(REDSHIFT_VARCHAR_MAX_BYTE_SIZE + 2); + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + final AirbyteMessage message1 = Jsons.deserialize(record1, AirbyteMessage.class); + message1.getRecord().setNamespace(getStreamNamespace()); + message1.getRecord().setStream(getStreamName()); + ((ObjectNode) message1.getRecord().getData()).put("name", largeString1); + final AirbyteMessage message2 = Jsons.deserialize(record2, AirbyteMessage.class); + message2.getRecord().setNamespace(getStreamNamespace()); + message2.getRecord().setStream(getStreamName()); + ((ObjectNode) message2.getRecord().getData()).put("name", largeString2); + + // message1 should be preserved which is just on limit, message2 should be nulled. + runSync(catalog, List.of(message1, message2)); + + // Add verification. + final List expectedRawRecords = readRecords("dat/sync1_recordnull_expectedrecords_raw.jsonl"); + final List expectedFinalRecords = readRecords("dat/sync1_recordnull_expectedrecords_final.jsonl"); + // Only replace for first record, second record should be nulled by transformer. + ((ObjectNode) expectedRawRecords.get(0).get("_airbyte_data")).put("name", largeString1); + ((ObjectNode) expectedFinalRecords.get(0)).put("name", largeString1); + verifySyncResult(expectedRawRecords, expectedFinalRecords, disableFinalTableComparison()); + + } + + protected String generateRandomString(final int totalLength) { + return RANDOM + .ints('a', 'z' + 1) + .limit(totalLength) + .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) + .toString(); + } + } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java index 75515b5130b7c..7fdf96b1e4217 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java @@ -152,7 +152,7 @@ protected DSLContext getDslContext() { @Override protected DestinationHandler getDestinationHandler() { - return new RedshiftDestinationHandler(databaseName, database, namespace); + return new RedshiftDestinationHandler(databaseName, database, getNamespace()); } @Override @@ -178,9 +178,9 @@ protected Field toJsonValue(final String valueAsString) { @Override @Test public void testCreateTableIncremental() throws Exception { - final Sql sql = generator.createTable(incrementalDedupStream, "", false); - destinationHandler.execute(sql); - List> initialStatuses = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); + final Sql sql = getGenerator().createTable(getIncrementalDedupStream(), "", false); + getDestinationHandler().execute(sql); + List> initialStatuses = getDestinationHandler().gatherInitialState(List.of(getIncrementalDedupStream())); assertEquals(1, initialStatuses.size()); final DestinationInitialStatus initialStatus = initialStatuses.getFirst(); assertTrue(initialStatus.isFinalTablePresent()); diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftStandardInsertsTypingDedupingTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftStandardInsertsTypingDedupingTest.java index d99d597e45100..7f99362777dde 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftStandardInsertsTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftStandardInsertsTypingDedupingTest.java @@ -4,10 +4,22 @@ package io.airbyte.integrations.destination.redshift.typing_deduping; +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteStream; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.v0.DestinationSyncMode; +import io.airbyte.protocol.models.v0.SyncMode; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.Test; public class RedshiftStandardInsertsTypingDedupingTest extends AbstractRedshiftTypingDedupingTest { @@ -16,4 +28,61 @@ protected ObjectNode getBaseConfig() { return (ObjectNode) Jsons.deserialize(IOs.readFile(Path.of("secrets/1s1t_config.json"))); } + @Test + public void testStandardInsertBatchSizeGtThan16Mb() throws Exception { + final String placeholderRecord = """ + {"type": "RECORD", + "record":{ + "emitted_at": 1000, + "data": { + "id1": 1, + "id2": 200, + "updated_at": "2000-01-01T00:00:00Z", + "_ab_cdc_deleted_at": null, + "name": "PLACE_HOLDER", + "address": {"city": "San Francisco", "state": "CA"}} + } + } + """; + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) + .withStream(new AirbyteStream() + .withNamespace(getStreamNamespace()) + .withName(getStreamName()) + .withJsonSchema(getSchema())))); + List messages = new ArrayList<>(); + final int numberOfRecords = 1000; + for (int i = 0; i < numberOfRecords; ++i) { + // Stuff the record with 40Kb string, making the total record size to 41233 bytes + // Total sync generates ~39MB in 1000 records. + // Standard insert should not fail and chunk it into smaller inserts < 16MB statement length + final AirbyteMessage placeHolderMessage = Jsons.deserialize(placeholderRecord, AirbyteMessage.class); + placeHolderMessage.getRecord().setNamespace(getStreamNamespace()); + placeHolderMessage.getRecord().setStream(getStreamName()); + ((ObjectNode) placeHolderMessage.getRecord().getData()).put("id1", i); + ((ObjectNode) placeHolderMessage.getRecord().getData()).put("id2", 200 + i); + ((ObjectNode) placeHolderMessage.getRecord().getData()).put("name", generateRandomString(40 * 1024)); + messages.add(placeHolderMessage); + } + runSync(catalog, messages); + // we just need to iterate over final tables to verify the count and confirm they are inserted + // properly. + List finalTableResults = dumpFinalTableRecords(getStreamNamespace(), getStreamName()); + assertEquals(1000, finalTableResults.size()); + // getJsons query doesn't have order by clause, so using sum of n-numbers math to assert all IDs are + // inserted + int id1sum = 0; + int id2sum = 0; + int id1ExpectedSum = ((numberOfRecords - 1) * (numberOfRecords)) / 2; // n(n+1)/2 + int id2ExpectedSum = (200 * numberOfRecords) + id1ExpectedSum; // 200*n + id1Sum + for (JsonNode record : finalTableResults) { + id1sum += record.get("id1").asInt(); + id2sum += record.get("id2").asInt(); + } + assertEquals(id1ExpectedSum, id1sum); + assertEquals(id2ExpectedSum, id2sum); + } + } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl index 9f11b2293a95b..1710a288dde5f 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "old_cursor": 1, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl index 7f75f0f804e25..0b446fd740a07 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl index 61024be7867d0..8f75c02eb4603 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl @@ -1,5 +1,5 @@ // Keep the Alice record with more recent updated_at -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl index b2bf47df66c11..1b29b504aadd4 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00.000000Z", "name": "Someone completely different"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl index 6f53b9f3c12dd..08b7b6d1003a0 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} // Invalid columns are nulled out (i.e. SQL null, not JSON null) -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl index 4012c086a9e61..fb6986690b1f4 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl @@ -1,6 +1,6 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} // Invalid data is still allowed in the raw table. -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl index b489accda1bb7..abbb44d6df67f 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl new file mode 100644 index 0000000000000..a37e8a603749e --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_messages_before_meta.jsonl @@ -0,0 +1,14 @@ +// emitted_at:1000 is equal to 1970-01-01 00:00:01Z, which is what you'll see in the expected records. +// This obviously makes no sense in relation to updated_at being in the year 2000, but that's OK +// because (from destinations POV) updated_at has no relation to emitted_at. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}}} +// Emit a second record for id=(1,200) with a different updated_at. This generally doesn't happen +// in full refresh syncs - but if T+D is implemented correctly, it shouldn't matter +// (i.e. both records should be written to the final table). +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}}} +// Emit a record with no _ab_cdc_deleted_at field. CDC sources typically emit an explicit null, but we should handle both cases. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}}} +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} +// Emit a record with interesting characters in one of the values. +{"type": "RECORD", "record": {"emitted_at": 1000, "data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_final.jsonl new file mode 100644 index 0000000000000..4b2592985d1fe --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_final.jsonl @@ -0,0 +1,3 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "PLACE_HOLDER", "address": {"city": "San Francisco", "state": "CA"}} +// name is SQL null after nulling the record before persisting it. +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"$.name","change":"NULLED","reason":"DESTINATION_FIELD_SIZE_LIMITATION"}]}, "id1": 2, "id2": 201, "updated_at": "2000-01-01T00:00:00.000000Z", "address": {"city": "New York", "state": "NY"}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..b6746d3f906ee --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync1_recordnull_expectedrecords_raw.jsonl @@ -0,0 +1,2 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "PLACE_HOLDER", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 201, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": null, "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta":{"changes":[{"field":"$.name","change":"NULLED","reason":"DESTINATION_FIELD_SIZE_LIMITATION"}]}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl index c26d4a49aacd7..d48e1c0b78457 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -// Charlie wasn't reemitted with updated_at, so it still has a null cursor -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +// Charlie wasn't re-emitted with updated_at, so it still has a null cursor +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 200, "name": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl index 03f28e155af53..85c770abacea8 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl @@ -1,7 +1,7 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl index 0989dfc17ed07..d8bf8c063422f 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl @@ -1,9 +1,9 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl new file mode 100644 index 0000000000000..315e3707930d5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_mixed_meta_final.jsonl @@ -0,0 +1,10 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00.000000Z", "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00.000000Z", "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00.000000Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`","Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} + +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl index 9d1f1499469fc..0a4deced5cefc 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Bob", "address": {"city": "New York", "state": "NY"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00.000000Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00.000000Z"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl index 33bc3280be274..fbf2611fe68e3 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl index 1187ca159d722..1ad09d77383bf 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} // Delete Bob, keep Charlie -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00.000000Z", "name": "Charlie"} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"changes":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl index 53c304c89d311..69eeec6bab90b 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"errors":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00.000000Z", "name": "Someone completely different v2"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl new file mode 100644 index 0000000000000..eb63a8d0a8bf0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_meta_final.jsonl @@ -0,0 +1,5 @@ +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[]}, "id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00.000000Z", "name": "Alice", "address": {"city": "Seattle", "state": "WA"}} +// Delete Bob, updated Charlie +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_meta":{"changes":[{"field":"age","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"registration_date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}, "id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00.000000Z", "name": "Charlie", "address": {"city": "San Francisco", "state": "CA"}} +// Record before meta in raw table will continue to have errors. +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_meta": {"errors":[]}, "id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00.000000Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl new file mode 100644 index 0000000000000..a1112818b1387 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_mixed_meta_raw.jsonl @@ -0,0 +1,11 @@ +// We keep the records from the first sync +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name":"Charlie", "age":"this is not an integer", "registration_date":"this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta":{"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl index 2f634c6ad4e95..65ba566c64269 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl @@ -1,10 +1,10 @@ // We keep the records from the first sync -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}, "_airbyte_meta": {"changes":[{"field":"address","change":"NULLED","reason":"SOURCE_RETRIEVAL_ERROR"}]}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}, "_airbyte_meta": {"changes": []}} // And append the records from the second sync -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z", "_airbyte_meta": {"changes": []}}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl index 88b8ee7746c1c..6ae7bc9030ad7 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}, "_airbyte_meta": {"changes": []}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}, "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl new file mode 100644 index 0000000000000..c31da6b35ae7e --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/dat/sync2_messages_after_meta.jsonl @@ -0,0 +1,8 @@ +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}}} +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}}} +// Set deleted_at to something non-null. Again, T+D doesn't check the actual _value_ of deleted_at (i.e. the fact that it's in the past is irrelevant). +// It only cares whether deleted_at is non-null. So this should delete Bob from the final table (in dedup mode). +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}}} +// Emit earlier message with _airbyte_meta again with one fixed column. +// Emit a record with an invalid age & address nulled at source. +{"type": "RECORD", "record": {"emitted_at": 2000, "data": {"id1": 2, "id2": 200, "updated_at": "2000-01-02T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date", "address": {"city": "San Francisco", "state": "CA"}}, "meta": {"changes": [{"field": "address", "change": "NULLED", "reason": "SOURCE_RETRIEVAL_ERROR"}]}}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl index f6441416658b4..22b1a4e28c21e 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_final.jsonl @@ -1,9 +1,9 @@ -{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": ["Problem with `struct`", "Problem with `array`", "Problem with `number`", "Problem with `integer`", "Problem with `boolean`","Problem with `timestamp_with_timezone`", "Problem with `timestamp_without_timezone`", "Problem with `time_with_timezone`","Problem with `time_without_timezone`", "Problem with `date`"]}} +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": [{"field":"struct","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"array","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"number","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"boolean","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"string","change":"NULLED","reason":"SOURCE_SERIALIZATION_ERROR"}]}} // Note that for numbers where we parse the value to JSON (struct, array, unknown) we lose precision. // But for numbers where we create a NUMBER column, we do not lose precision (see the `number` column). -{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} // Note that redshift downcases IAmACaseSensitiveColumnName to all lowercase -{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "iamacasesensitivecolumnname": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} \ No newline at end of file +{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "iamacasesensitivecolumnname": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl index 6b99169ececf1..ed12fd09bccee 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_expectedrecords_raw.jsonl @@ -1,6 +1,6 @@ {"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}}} {"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} -{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}, "_airbyte_meta": {"changes": [{"field": "string", "change": "NULLED", "reason": "SOURCE_SERIALIZATION_ERROR"}]}} {"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} {"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl new file mode 100644 index 0000000000000..b4bfef19579ec --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl @@ -0,0 +1,10 @@ +// Same as alltypes_expected but the meta didn't exist in v1 raw tables, so that information is not resurrected to the final. +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": [{"field":"struct","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"array","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"number","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"boolean","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"timestamp_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_with_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"time_without_timezone","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"},{"field":"date","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}} +// Note that for numbers where we parse the value to JSON (struct, array, unknown) we lose precision. +// But for numbers where we create a NUMBER column, we do not lose precision (see the `number` column). +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +// Note that redshift downcases IAmACaseSensitiveColumnName to all lowercase +{"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "iamacasesensitivecolumnname": "Case senstive value", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..6b99169ececf1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl @@ -0,0 +1,6 @@ +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}}} +{"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl index 5842f7b37e42b..c59f838544eec 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/incrementaldedup_expectedrecords_final.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_raw_id": "80c99b54-54b4-43bd-b51b-1f67dafa2c52", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "string": "Alice", "struct": {"city": "San Diego", "state": "CA"}, "integer": 84} -{"_airbyte_raw_id": "ad690bfb-c2c2-4172-bd73-a16c86ccbb67", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": ["Problem with `integer`"]}, "id1": 2, "id2": 100, "updated_at": "2023-01-01T03:00:00.000000Z", "string": "Bob"} +{"_airbyte_raw_id": "80c99b54-54b4-43bd-b51b-1f67dafa2c52", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "string": "Alice", "struct": {"city": "San Diego", "state": "CA"}, "integer": 84} +{"_airbyte_raw_id": "ad690bfb-c2c2-4172-bd73-a16c86ccbb67", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": [{"field":"integer","change":"NULLED","reason":"DESTINATION_TYPECAST_ERROR"}]}, "id1": 2, "id2": 100, "updated_at": "2023-01-01T03:00:00.000000Z", "string": "Bob"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl index 52a9c10fcc47d..e015923deeb79 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/json_types_in_string_expectedrecords_final.jsonl @@ -1,5 +1,5 @@ -{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "[\"I\",\"am\",\"an\",\"array\"]", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "{\"I\":\"am\",\"an\":\"object\"}", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "true", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "3.14", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} -{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "I am a valid json string", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "[\"I\",\"am\",\"an\",\"array\"]", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "{\"I\":\"am\",\"an\":\"object\"}", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "true", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "3.14", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} +{"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00.000000Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "I am a valid json string", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}, "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl index 4ecd95d83b637..d14bcddf132f6 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/nocolumns_expectedrecords_final.jsonl @@ -1 +1 @@ -{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}} +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl index b34ad054ab33c..8ffcc0c73bdc6 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/reservedkeywords_expectedrecords_final.jsonl @@ -1 +1 @@ -{"_airbyte_raw_id":"b2e0efc4-38a8-47ba-970c-8103f09f08d5","_airbyte_extracted_at":"2023-01-01T00:00:00.000000Z","_airbyte_meta":{"errors":[]}, "current_date": "foo", "join": "bar"} +{"_airbyte_raw_id":"b2e0efc4-38a8-47ba-970c-8103f09f08d5","_airbyte_extracted_at":"2023-01-01T00:00:00.000000Z","_airbyte_meta":{"changes":[]}, "current_date": "foo", "join": "bar"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl index 5a4bfc33d9060..33a87e1f47487 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/timestampformats_expectedrecords_final.jsonl @@ -2,15 +2,15 @@ // TIME, TIMETZ, TIMESTAMP, TIMESTAMPTZ values are UTC in user tables. // Note that redshift stores precision to microseconds. Java deserialization in tests preserves them only for non-zero values // except for timestamp with time zone where Z is required at end for even zero values -{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "time_with_timezone": "12:34:56Z"} -{"_airbyte_raw_id": "05028c5f-7813-4e9c-bd4b-387d1f8ba435", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} -{"_airbyte_raw_id": "95dfb0c6-6a67-4ba0-9935-643bebc90437", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} -{"_airbyte_raw_id": "f3d8abe2-bb0f-4caf-8ddc-0641df02f3a9", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} -{"_airbyte_raw_id": "a81ed40a-2a49-488d-9714-d53e8b052968", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} -{"_airbyte_raw_id": "c07763a0-89e6-4cb7-b7d0-7a34a7c9918a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} -{"_airbyte_raw_id": "358d3b52-50ab-4e06-9094-039386f9bf0d", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} -{"_airbyte_raw_id": "db8200ac-b2b9-4b95-a053-8a0343042751", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.123000Z", "time_with_timezone": "12:34:56.123Z"} +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.000000Z", "time_with_timezone": "12:34:56Z"} +{"_airbyte_raw_id": "05028c5f-7813-4e9c-bd4b-387d1f8ba435", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} +{"_airbyte_raw_id": "95dfb0c6-6a67-4ba0-9935-643bebc90437", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} +{"_airbyte_raw_id": "f3d8abe2-bb0f-4caf-8ddc-0641df02f3a9", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T20:34:56.000000Z", "time_with_timezone": "20:34:56Z"} +{"_airbyte_raw_id": "a81ed40a-2a49-488d-9714-d53e8b052968", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} +{"_airbyte_raw_id": "c07763a0-89e6-4cb7-b7d0-7a34a7c9918a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} +{"_airbyte_raw_id": "358d3b52-50ab-4e06-9094-039386f9bf0d", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T04:34:56.000000Z", "time_with_timezone": "04:34:56Z"} +{"_airbyte_raw_id": "db8200ac-b2b9-4b95-a053-8a0343042751", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_with_timezone": "2023-01-23T12:34:56.123000Z", "time_with_timezone": "12:34:56.123Z"} -{"_airbyte_raw_id": "10ce5d93-6923-4217-a46f-103833837038", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_without_timezone": "2023-01-23T12:34:56", "time_without_timezone": "12:34:56", "date": "2023-01-23"} +{"_airbyte_raw_id": "10ce5d93-6923-4217-a46f-103833837038", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_without_timezone": "2023-01-23T12:34:56", "time_without_timezone": "12:34:56", "date": "2023-01-23"} // Bigquery returns 6 decimal places if there are any decimal places... but not for timestamp_with_timezone -{"_airbyte_raw_id": "a7a6e176-7464-4a0b-b55c-b4f936e8d5a1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "timestamp_without_timezone": "2023-01-23T12:34:56.123", "time_without_timezone": "12:34:56.123"} +{"_airbyte_raw_id": "a7a6e176-7464-4a0b-b55c-b4f936e8d5a1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "timestamp_without_timezone": "2023-01-23T12:34:56.123", "time_without_timezone": "12:34:56.123"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl index adfbd06d6a55a..9d73b0601264a 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/resources/sqlgenerator/weirdcolumnnames_expectedrecords_final.jsonl @@ -6,4 +6,4 @@ // * includes$$doubledollar -> includes__doubledollar // * includes.period -> includes_period // * endswithbackslash\ -> endswithbackslash_ -{"_airbyte_raw_id": "7e7330a1-42fb-41ec-a955-52f18bd61964", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"errors": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "_starts_with_dollar_sign": "foo", "includes_doublequote": "foo", "includes_singlequote": "foo", "includes_backtick": "foo", "includes_period": "foo", "includes__doubledollar": "foo", "endswithbackslash_": "foo"} +{"_airbyte_raw_id": "7e7330a1-42fb-41ec-a955-52f18bd61964", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000Z", "_airbyte_meta": {"changes": []}, "id1": 1, "id2": 100, "updated_at": "2023-01-01T02:00:00.000000Z", "_starts_with_dollar_sign": "foo", "includes_doublequote": "foo", "includes_singlequote": "foo", "includes_backtick": "foo", "includes_period": "foo", "includes__doubledollar": "foo", "endswithbackslash_": "foo"} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperationsTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperationsTest.java deleted file mode 100644 index 1ff61d389c767..0000000000000 --- a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/operations/RedshiftSqlOperationsTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redshift.operations; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import java.util.Random; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -@DisplayName("RedshiftSqlOperations") -public class RedshiftSqlOperationsTest { - - private static final Random RANDOM = new Random(); - - private String generateBigString(final int addExtraCharacters) { - final int length = RedshiftSqlOperations.REDSHIFT_VARCHAR_MAX_BYTE_SIZE + addExtraCharacters; - return RANDOM - .ints('a', 'z' + 1) - .limit(length) - .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) - .toString(); - } - - @Test - @DisplayName("isValidData should return true for valid data") - public void isValidDataForValid() { - JsonNode testNode = Jsons.jsonNode(ImmutableMap.builder() - .put("id", 3) - .put("currency", generateBigString(0)) - .put("date", "2020-10-10T00:00:00Z") - .put("HKD", 10.5) - .put("NZD", 1.14) - .build()); - - RedshiftSqlOperations uut = new RedshiftSqlOperations(); - boolean isValid = uut.isValidData(testNode); - assertEquals(true, isValid); - } - -} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorTest.java index 341c7df14ced1..e40bbcd309f25 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorTest.java @@ -104,8 +104,10 @@ public void testTypingAndDeduping() throws IOException { .map(String::trim) .filter(line -> !line.isEmpty()) .toList(); - System.out.println(generatedSql); - assertEquals(expectedSqlLines, generatedSqlLines); + assertEquals(expectedSqlLines.size(), generatedSqlLines.size()); + for (int i = 0; i < expectedSqlLines.size(); i++) { + assertEquals(expectedSqlLines.get(i), generatedSqlLines.get(i)); + } } @Test diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformerTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformerTest.java new file mode 100644 index 0000000000000..3f011d39780bb --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSuperLimitationTransformerTest.java @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.typing_deduping; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; +import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; +import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; +import io.airbyte.integrations.base.destination.typing_deduping.StreamId; +import io.airbyte.integrations.destination.redshift.RedshiftSQLNameTransformer; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSuperLimitationTransformer.TransformationInfo; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Reason; +import io.airbyte.protocol.models.v0.DestinationSyncMode; +import io.airbyte.protocol.models.v0.StreamDescriptor; +import io.airbyte.protocol.models.v0.SyncMode; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.IntStream; +import kotlin.Pair; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class RedshiftSuperLimitationTransformerTest { + + private RedshiftSuperLimitationTransformer transformer; + private static final RedshiftSqlGenerator redshiftSqlGenerator = new RedshiftSqlGenerator(new RedshiftSQLNameTransformer()); + + @BeforeEach + public void setup() { + final ColumnId column1 = redshiftSqlGenerator.buildColumnId("column1"); + final ColumnId column2 = redshiftSqlGenerator.buildColumnId("column2"); + final List primaryKey = List.of(column1, column2); + final LinkedHashMap columns = new LinkedHashMap<>(); + // Generate columnIds from 3 to 1024 and add to columns map + IntStream.range(3, 1025).forEach(i -> columns.put(redshiftSqlGenerator.buildColumnId("column" + i), AirbyteProtocolType.STRING)); + + final StreamId streamId = new StreamId("test_schema", "users_final", "test_schema", "users_raw", "test_schema", "users_final"); + StreamConfig streamConfig = new StreamConfig( + streamId, + SyncMode.INCREMENTAL, + DestinationSyncMode.APPEND_DEDUP, + primaryKey, + Optional.empty(), + columns); + final ParsedCatalog parsedCatalog = new ParsedCatalog(List.of(streamConfig)); + transformer = new RedshiftSuperLimitationTransformer(parsedCatalog, "test_schema"); + } + + @Test + public void testVarcharNulling() throws IOException { + final String jsonString = MoreResources.readResource("test.json"); + final JsonNode jsonNode = Jsons.deserializeExact(jsonString); + // Calculate the size of the json before transformation, note that the original JsonNode is altered + // so + // serializing after transformation will return modified size. + final int jacksonDeserializationSize = Jsons.serialize(jsonNode).getBytes(StandardCharsets.UTF_8).length; + // Add a short length as predicate. + final TransformationInfo transformationInfo = + transformer.transformNodes(jsonNode, text -> text.length() > 10); + // Calculate the size of the json after transformation + final int jacksonDeserializeSizeAfterTransform = Jsons.serialize(jsonNode).getBytes(StandardCharsets.UTF_8).length; + assertEquals(jacksonDeserializationSize, transformationInfo.originalBytes()); + assertEquals(jacksonDeserializeSizeAfterTransform, transformationInfo.originalBytes() - transformationInfo.removedBytes()); + System.out.println(transformationInfo.meta()); + System.out.println(Jsons.serialize(jsonNode)); + } + + @Test + public void testRedshiftSuperLimit_ShouldRemovePartialRecord() throws IOException { + // We generate 1020 16Kb strings and 1 64Kb string + 2 uuids. + // Removing the 64kb will make it fall below the 16MB limit & offending varchar removed too. + final Map testData = new HashMap<>(); + testData.put("column1", UUID.randomUUID().toString()); + testData.put("column2", UUID.randomUUID().toString()); + testData.put("column3", getLargeString(64)); + // Add 16Kb strings from column 3 to 1024 in testData + IntStream.range(4, 1025).forEach(i -> testData.put("column" + i, getLargeString(16))); + + AirbyteRecordMessageMeta upstreamMeta = new AirbyteRecordMessageMeta() + .withChanges(List.of( + new AirbyteRecordMessageMetaChange() + .withField("upstream_field") + .withChange(Change.NULLED) + .withReason(Reason.PLATFORM_SERIALIZATION_ERROR))); + final Pair transformed = + transformer.transform(new StreamDescriptor().withNamespace("test_schema").withName("users_final"), Jsons.jsonNode(testData), upstreamMeta); + assertTrue( + Jsons.serialize(transformed.getFirst()) + .getBytes(StandardCharsets.UTF_8).length < RedshiftSuperLimitationTransformer.REDSHIFT_SUPER_MAX_BYTE_SIZE); + assertEquals(2, transformed.getSecond().getChanges().size()); + // Assert that transformation added the change + assertEquals("$.column3", transformed.getSecond().getChanges().getFirst().getField()); + assertEquals(Change.NULLED, transformed.getSecond().getChanges().getFirst().getChange()); + assertEquals(Reason.DESTINATION_FIELD_SIZE_LIMITATION, transformed.getSecond().getChanges().getFirst().getReason()); + // Assert that upstream changes are preserved (appended last) + assertEquals("upstream_field", transformed.getSecond().getChanges().getLast().getField()); + } + + @Test + public void testRedshiftSuperLimit_ShouldRemoveWholeRecord() { + final Map testData = new HashMap<>(); + // Add 16Kb strings from column 1 to 1024 in testData where total > 16MB + IntStream.range(1, 1025).forEach(i -> testData.put("column" + i, getLargeString(16))); + + AirbyteRecordMessageMeta upstreamMeta = new AirbyteRecordMessageMeta() + .withChanges(List.of( + new AirbyteRecordMessageMetaChange() + .withField("upstream_field") + .withChange(Change.NULLED) + .withReason(Reason.PLATFORM_SERIALIZATION_ERROR))); + final Pair transformed = + transformer.transform(new StreamDescriptor().withNamespace("test_schema").withName("users_final"), Jsons.jsonNode(testData), upstreamMeta); + // Verify PKs are preserved. + assertNotNull(transformed.getFirst().get("column1")); + assertNotNull(transformed.getFirst().get("column1")); + assertTrue( + Jsons.serialize(transformed.getSecond()) + .getBytes(StandardCharsets.UTF_8).length < RedshiftSuperLimitationTransformer.REDSHIFT_SUPER_MAX_BYTE_SIZE); + assertEquals(2, transformed.getSecond().getChanges().size()); + // Assert that transformation added the change + assertEquals("all", transformed.getSecond().getChanges().getFirst().getField()); + assertEquals(Change.NULLED, transformed.getSecond().getChanges().getFirst().getChange()); + assertEquals(Reason.DESTINATION_RECORD_SIZE_LIMITATION, transformed.getSecond().getChanges().getFirst().getReason()); + // Assert that upstream changes are preserved (appended last) + assertEquals("upstream_field", transformed.getSecond().getChanges().getLast().getField()); + } + + @Test + public void testRedshiftSuperLimit_ShouldFailOnPKMissing() { + final Map testData = new HashMap<>(); + // Add 16Kb strings from column 3 to 1027 in testData, 1 & 2 are pks missing + IntStream.range(3, 1028).forEach(i -> testData.put("column" + i, getLargeString(16))); + + AirbyteRecordMessageMeta upstreamMeta = new AirbyteRecordMessageMeta() + .withChanges(List.of( + new AirbyteRecordMessageMetaChange() + .withField("upstream_field") + .withChange(Change.NULLED) + .withReason(Reason.PLATFORM_SERIALIZATION_ERROR))); + final Exception ex = assertThrows(RuntimeException.class, + () -> transformer.transform( + new StreamDescriptor().withNamespace("test_schema").withName("users_final"), Jsons.jsonNode(testData), + upstreamMeta)); + + assertEquals("Record exceeds size limit, cannot transform without PrimaryKeys in DEDUPE sync", ex.getMessage()); + } + + private String getLargeString(int kbSize) { + StringBuilder longString = new StringBuilder(); + while (longString.length() < 1024 * kbSize) { // Repeat until the given KB size + longString.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit. "); + } + return longString.toString(); + } + +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/resources/test.json b/airbyte-integrations/connectors/destination-redshift/src/test/resources/test.json new file mode 100644 index 0000000000000..82956666435b2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test/resources/test.json @@ -0,0 +1,55 @@ +{ + "employees": [ + { + "id": 1, + "name": "John Doe", + "age": 35, + "salary": 7.50005e4, + "performance_rating": 4.5, + "department": "Engineering", + "skills": ["Java", "Python", "C++"], + "manager": { + "id": 101, + "name": "Jane Smith" + } + }, + { + "id": 2, + "name": "Alice Johnson", + "age": 28, + "salary": 5.500075e4, + "performance_rating": 4.2, + "department": "Marketing", + "skills": ["Marketing Strategy", "Social Media"], + "manager": { + "id": 102, + "name": "Bob Brown" + } + }, + { + "id": 3, + "name": "Michael Clark", + "age": 40, + "salary": 9.000025e4, + "performance_rating": 4.8, + "department": "Finance", + "skills": ["Accounting", "Financial Analysis"], + "manager": { + "id": 103, + "name": "Emily Davis" + } + } + ], + "company": { + "name": "Acme Corporation", + "address": "123 Main St, Anytown, USA", + "founded_year": 2000, + "revenue": 1.23456789987654321e8, + "stock_price": 5.25e3, + "employees_count": 5000, + "ceo": { + "id": 1001, + "name": "David Johnson" + } + } +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/resources/typing_deduping_with_cdc.sql b/airbyte-integrations/connectors/destination-redshift/src/test/resources/typing_deduping_with_cdc.sql index 371b189e4856a..e224ff77babe7 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test/resources/typing_deduping_with_cdc.sql +++ b/airbyte-integrations/connectors/destination-redshift/src/test/resources/typing_deduping_with_cdc.sql @@ -45,7 +45,7 @@ with "_airbyte_raw_id", "_airbyte_extracted_at", OBJECT( - 'errors', + 'changes', ARRAY_CONCAT( ARRAY_CONCAT( ARRAY_CONCAT( @@ -61,84 +61,92 @@ with ARRAY_CONCAT( ARRAY_CONCAT( ARRAY_CONCAT( + ARRAY_CONCAT( + CASE WHEN ( + "_airbyte_data"."id1" is not null + and "id1" is null + ) THEN ARRAY(JSON_PARSE('{"field": "id1", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END , + CASE WHEN ( + "_airbyte_data"."id2" is not null + and "id2" is null + ) THEN ARRAY(JSON_PARSE('{"field": "id2", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END + ), CASE WHEN ( - "_airbyte_data"."id1" is not null - and "id1" is null - ) THEN ARRAY('Problem with `id1`') ELSE ARRAY() END , - CASE WHEN ( - "_airbyte_data"."id2" is not null - and "id2" is null - ) THEN ARRAY('Problem with `id2`') ELSE ARRAY() END + "_airbyte_data"."updated_at" is not null + and "updated_at" is null + ) THEN ARRAY(JSON_PARSE('{"field": "updated_at", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."updated_at" is not null - and "updated_at" is null - ) THEN ARRAY('Problem with `updated_at`') ELSE ARRAY() END + "_airbyte_data"."struct" is not null + and "struct" is null + ) THEN ARRAY(JSON_PARSE('{"field": "struct", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."struct" is not null - and "struct" is null - ) THEN ARRAY('Problem with `struct`') ELSE ARRAY() END + "_airbyte_data"."array" is not null + and "array" is null + ) THEN ARRAY(JSON_PARSE('{"field": "array", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."array" is not null - and "array" is null - ) THEN ARRAY('Problem with `array`') ELSE ARRAY() END + "_airbyte_data"."string" is not null + and "string" is null + ) THEN ARRAY(JSON_PARSE('{"field": "string", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."string" is not null - and "string" is null - ) THEN ARRAY('Problem with `string`') ELSE ARRAY() END + "_airbyte_data"."number" is not null + and "number" is null + ) THEN ARRAY(JSON_PARSE('{"field": "number", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."number" is not null - and "number" is null - ) THEN ARRAY('Problem with `number`') ELSE ARRAY() END + "_airbyte_data"."integer" is not null + and "integer" is null + ) THEN ARRAY(JSON_PARSE('{"field": "integer", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."integer" is not null - and "integer" is null - ) THEN ARRAY('Problem with `integer`') ELSE ARRAY() END + "_airbyte_data"."boolean" is not null + and "boolean" is null + ) THEN ARRAY(JSON_PARSE('{"field": "boolean", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."boolean" is not null - and "boolean" is null - ) THEN ARRAY('Problem with `boolean`') ELSE ARRAY() END + "_airbyte_data"."timestamp_with_timezone" is not null + and "timestamp_with_timezone" is null + ) THEN ARRAY(JSON_PARSE('{"field": "timestamp_with_timezone", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."timestamp_with_timezone" is not null - and "timestamp_with_timezone" is null - ) THEN ARRAY('Problem with `timestamp_with_timezone`') ELSE ARRAY() END + "_airbyte_data"."timestamp_without_timezone" is not null + and "timestamp_without_timezone" is null + ) THEN ARRAY(JSON_PARSE('{"field": "timestamp_without_timezone", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."timestamp_without_timezone" is not null - and "timestamp_without_timezone" is null - ) THEN ARRAY('Problem with `timestamp_without_timezone`') ELSE ARRAY() END + "_airbyte_data"."time_with_timezone" is not null + and "time_with_timezone" is null + ) THEN ARRAY(JSON_PARSE('{"field": "time_with_timezone", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."time_with_timezone" is not null - and "time_with_timezone" is null - ) THEN ARRAY('Problem with `time_with_timezone`') ELSE ARRAY() END + "_airbyte_data"."time_without_timezone" is not null + and "time_without_timezone" is null + ) THEN ARRAY(JSON_PARSE('{"field": "time_without_timezone", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."time_without_timezone" is not null - and "time_without_timezone" is null - ) THEN ARRAY('Problem with `time_without_timezone`') ELSE ARRAY() END + "_airbyte_data"."date" is not null + and "date" is null + ) THEN ARRAY(JSON_PARSE('{"field": "date", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."date" is not null - and "date" is null - ) THEN ARRAY('Problem with `date`') ELSE ARRAY() END + "_airbyte_data"."unknown" is not null + and "unknown" is null + ) THEN ARRAY(JSON_PARSE('{"field": "unknown", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."unknown" is not null - and "unknown" is null - ) THEN ARRAY('Problem with `unknown`') ELSE ARRAY() END + "_airbyte_data"."_ab_cdc_deleted_at" is not null + and "_ab_cdc_deleted_at" is null + ) THEN ARRAY(JSON_PARSE('{"field": "_ab_cdc_deleted_at", "change": "NULLED", "reason": "DESTINATION_TYPECAST_ERROR"}')) ELSE ARRAY() END ), CASE WHEN ( - "_airbyte_data"."_ab_cdc_deleted_at" is not null - and "_ab_cdc_deleted_at" is null - ) THEN ARRAY('Problem with `_ab_cdc_deleted_at`') ELSE ARRAY() END + "_airbyte_meta" is not null + and IS_OBJECT("_airbyte_meta") + and "_airbyte_meta"."changes" is not null + and IS_ARRAY("_airbyte_meta"."changes") + ) THEN "_airbyte_meta"."changes" ELSE ARRAY() END ) ) as "_airbyte_meta" from "test_schema"."users_raw" diff --git a/airbyte-integrations/connectors/destination-s3/build.gradle b/airbyte-integrations/connectors/destination-s3/build.gradle index 1e53b23dced0c..2fa42a7f454d1 100644 --- a/airbyte-integrations/connectors/destination-s3/build.gradle +++ b/airbyte-integrations/connectors/destination-s3/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.10.2' + cdkVersionRequired = '0.29.8' features = ['db-destinations', 's3-destinations'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-s3/metadata.yaml b/airbyte-integrations/connectors/destination-s3/metadata.yaml index 617cf229c6bb3..91954395b8e12 100644 --- a/airbyte-integrations/connectors/destination-s3/metadata.yaml +++ b/airbyte-integrations/connectors/destination-s3/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: file connectorType: destination definitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 - dockerImageTag: 0.5.9 + dockerImageTag: 0.6.0 dockerRepository: airbyte/destination-s3 githubIssueLabel: destination-s3 icon: s3.svg diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncrypt.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncrypt.java index 8258bbcb7d04d..bdbbdecc42f53 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncrypt.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncrypt.java @@ -25,7 +25,7 @@ protected S3DestinationStrictEncrypt(final S3DestinationConfigFactory configFact @Override public AirbyteConnectionStatus check(final JsonNode config) { - final S3DestinationConfig destinationConfig = this.configFactory.getS3DestinationConfig(config, super.storageProvider()); + final S3DestinationConfig destinationConfig = this.getConfigFactory().getS3DestinationConfig(config, super.storageProvider()); // Fails early to avoid extraneous validations checks if custom endpoint is not secure if (!S3BaseChecks.testCustomEndpointSecured(destinationConfig.getEndpoint())) { diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncryptTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncryptTest.java index ee0b99fdbadb1..cb4dfdf2e1cdc 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncryptTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationStrictEncryptTest.java @@ -18,6 +18,7 @@ import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfig; import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfigFactory; import io.airbyte.cdk.integrations.destination.s3.StorageProvider; +import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; import org.junit.jupiter.api.BeforeEach; @@ -55,7 +56,7 @@ public S3DestinationConfig getS3DestinationConfig(final JsonNode config, final S @Test public void checksCustomEndpointIsHttpsOnly() { final S3Destination destinationWithHttpsOnlyEndpoint = new S3DestinationStrictEncrypt(factoryConfig); - final AirbyteConnectionStatus status = destinationWithHttpsOnlyEndpoint.check(null); + final AirbyteConnectionStatus status = destinationWithHttpsOnlyEndpoint.check(Jsons.emptyObject()); assertEquals(Status.SUCCEEDED, status.getStatus(), "custom endpoint did not contain `s3-accesspoint`"); } @@ -79,7 +80,7 @@ public S3DestinationConfig getS3DestinationConfig(final JsonNode config, final S } }); - final AirbyteConnectionStatus status = destinationWithStandardUnsecuredEndpoint.check(null); + final AirbyteConnectionStatus status = destinationWithStandardUnsecuredEndpoint.check(Jsons.emptyObject()); assertEquals(Status.FAILED, status.getStatus()); } diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java index 888e2a225f54f..e7aafd84279bf 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java @@ -27,6 +27,7 @@ import io.airbyte.cdk.integrations.destination.s3.S3DestinationConfigFactory; import io.airbyte.cdk.integrations.destination.s3.S3StorageOperations; import io.airbyte.cdk.integrations.destination.s3.StorageProvider; +import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; import org.junit.jupiter.api.BeforeEach; @@ -75,7 +76,7 @@ public S3DestinationConfig getS3DestinationConfig(final JsonNode config, final S public void checksS3WithoutListObjectPermission() { final S3Destination destinationFail = new S3Destination(factoryConfig); doThrow(new AmazonS3Exception("Access Denied")).when(s3).listObjects(any(ListObjectsRequest.class)); - final AirbyteConnectionStatus status = destinationFail.check(null); + final AirbyteConnectionStatus status = destinationFail.check(Jsons.emptyObject()); assertEquals(Status.FAILED, status.getStatus(), "Connection check should have failed"); assertTrue(status.getMessage().indexOf("Access Denied") > 0, "Connection check returned wrong failure message"); } @@ -86,7 +87,7 @@ public void checksS3WithoutListObjectPermission() { */ public void checksS3WithListObjectPermission() { final S3Destination destinationSuccess = new S3Destination(factoryConfig); - final AirbyteConnectionStatus status = destinationSuccess.check(null); + final AirbyteConnectionStatus status = destinationSuccess.check(Jsons.emptyObject()); assertEquals(Status.SUCCEEDED, status.getStatus(), "Connection check should have succeeded"); } diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index 77922be6f7779..b05e26d234e0d 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -1,10 +1,9 @@ plugins { id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.18' + cdkVersionRequired = '0.27.7' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml index a72861cd17942..2bd24d353aa54 100644 --- a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 424892c4-daac-4491-b35d-c6688ba547ba - dockerImageTag: 3.6.1 + dockerImageTag: 3.7.0 dockerRepository: airbyte/destination-snowflake documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake githubIssueLabel: destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java index 7c2bcdf8d2e76..8a2745fa29ea4 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java @@ -41,7 +41,6 @@ public class SnowflakeDatabase { private static final int PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7; // snowflake access token TTL is 10min and can't be modified private static final Duration NETWORK_TIMEOUT = Duration.ofMinutes(1); - private static final Duration QUERY_TIMEOUT = Duration.ofHours(3); private static final SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer(); private static final String DRIVER_CLASS_NAME = "net.snowflake.client.jdbc.SnowflakeDriver"; @@ -123,7 +122,6 @@ public static HikariDataSource createDataSource(final JsonNode config, final Str properties.put(JdbcUtils.SCHEMA_KEY, nameTransformer.getIdentifier(config.get(JdbcUtils.SCHEMA_KEY).asText())); properties.put("networkTimeout", Math.toIntExact(NETWORK_TIMEOUT.toSeconds())); - properties.put("queryTimeout", Math.toIntExact(QUERY_TIMEOUT.toSeconds())); // allows queries to contain any number of statements. properties.put("MULTI_STATEMENT_COUNT", 0); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index 29eb9175e988a..ff7acac1da01b 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -20,8 +20,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.CatalogParser; import io.airbyte.integrations.base.destination.typing_deduping.DefaultTyperDeduper; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; import io.airbyte.integrations.base.destination.typing_deduping.NoOpTyperDeduperWithV1V2Migrations; import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; +import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator; import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve; import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; @@ -45,7 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SnowflakeInternalStagingDestination extends AbstractJdbcDestination implements Destination { +public class SnowflakeInternalStagingDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeInternalStagingDestination.class); public static final String RAW_SCHEMA_OVERRIDE = "raw_data_schema"; @@ -139,6 +141,14 @@ protected JdbcDestinationHandler getDestinationHandler(String da throw new UnsupportedOperationException("Snowflake does not yet use the native JDBC DV2 interface"); } + @Override + protected List> getMigrations(JdbcDatabase database, + String databaseName, + SqlGenerator sqlGenerator, + DestinationHandler destinationHandler) { + return List.of(); + } + @Override public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonNode config, final ConfiguredAirbyteCatalog catalog, @@ -150,7 +160,10 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN } } - final SnowflakeSqlGenerator sqlGenerator = new SnowflakeSqlGenerator(); + final int retentionPeriodDays = SnowflakeSqlOperations.getRetentionPeriodDays( + config.get(SnowflakeSqlOperations.RETENTION_PERIOD_DAYS_CONFIG_KEY)); + + final SnowflakeSqlGenerator sqlGenerator = new SnowflakeSqlGenerator(retentionPeriodDays); final ParsedCatalog parsedCatalog; final TyperDeduper typerDeduper; final JdbcDatabase database = getDatabase(getDataSource(config)); @@ -169,7 +182,7 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final SnowflakeV1V2Migrator migrator = new SnowflakeV1V2Migrator(getNamingResolver(), database, databaseName); final SnowflakeV2TableMigrator v2TableMigrator = new SnowflakeV2TableMigrator(database, databaseName, sqlGenerator, snowflakeDestinationHandler); final boolean disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config.get(DISABLE_TYPE_DEDUPE).asBoolean(false); - final List> migrations = List.of(); + final List> migrations = getMigrations(database, databaseName, sqlGenerator, snowflakeDestinationHandler); if (disableTypeDedupe) { typerDeduper = new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, snowflakeDestinationHandler, parsedCatalog, migrator, v2TableMigrator, migrations); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java index 6e8f888ef3941..cf996fbb0f5df 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java @@ -30,6 +30,10 @@ public class SnowflakeInternalStagingSqlOperations extends SnowflakeSqlStagingOp private static final String PUT_FILE_QUERY = "PUT file://%s @%s/%s PARALLEL = %d;"; private static final String LIST_STAGE_QUERY = "LIST @%s/%s/%s;"; // the 1s1t copy query explicitly quotes the raw table+schema name. + // we set error_on_column_count_mismatch because (at time of writing), we haven't yet added + // the airbyte_meta column to the raw table. + // See also https://github.com/airbytehq/airbyte/issues/36410 for improved error handling. + // TODO remove error_on_column_count_mismatch once snowflake has airbyte_meta in raw data. private static final String COPY_QUERY_1S1T = """ COPY INTO "%s"."%s" FROM '@%s/%s' @@ -40,6 +44,7 @@ public class SnowflakeInternalStagingSqlOperations extends SnowflakeSqlStagingOp skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '"' NULL_IF=('') + error_on_column_count_mismatch=false )"""; private static final String DROP_STAGE_QUERY = "DROP STAGE IF EXISTS %s;"; private static final String REMOVE_QUERY = "REMOVE @%s;"; diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.java index be9ff16282f73..bf709f6f8904d 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.java @@ -6,11 +6,12 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcDatabase; +import io.airbyte.cdk.integrations.base.DestinationConfig; import io.airbyte.cdk.integrations.base.JavaBaseConstants; +import io.airbyte.cdk.integrations.destination.async.partial_messages.PartialAirbyteMessage; import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations; import io.airbyte.cdk.integrations.destination.jdbc.SqlOperationsUtils; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; import io.airbyte.commons.exceptions.ConfigErrorException; import java.sql.SQLException; import java.util.List; @@ -21,7 +22,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class SnowflakeSqlOperations extends JdbcSqlOperations implements SqlOperations { +public class SnowflakeSqlOperations extends JdbcSqlOperations implements SqlOperations { + + public static final String RETENTION_PERIOD_DAYS_CONFIG_KEY = "retention_period_days"; private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSqlOperations.class); private static final int MAX_FILES_IN_LOADING_QUERY_LIMIT = 1000; @@ -46,6 +49,7 @@ public void createSchemaIfNotExists(final JdbcDatabase database, final String sc @Override public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { + int retentionPeriodDays = getRetentionPeriodDaysFromConfigSingleton(); return String.format( """ CREATE TABLE IF NOT EXISTS "%s"."%s" ( @@ -53,13 +57,34 @@ public String createTableQuery(final JdbcDatabase database, final String schemaN "%s" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(), "%s" TIMESTAMP WITH TIME ZONE DEFAULT NULL, "%s" VARIANT - ) data_retention_time_in_days = 0;""", + ) data_retention_time_in_days = %d;""", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_RAW_ID, JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT, JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); + JavaBaseConstants.COLUMN_NAME_DATA, + retentionPeriodDays); + } + + /** + * Sort of hacky. The problem is that SnowflakeSqlOperations is constructed in the + * SnowflakeDestination constructor, but we don't have the JsonNode config until we try to call + * check/getSerializedConsumer on the SnowflakeDestination. So we can't actually inject the config + * normally. Instead, we just use the singleton object. :( + */ + private static int getRetentionPeriodDaysFromConfigSingleton() { + return getRetentionPeriodDays(DestinationConfig.getInstance().getNodeValue(RETENTION_PERIOD_DAYS_CONFIG_KEY)); + } + + public static int getRetentionPeriodDays(final JsonNode node) { + int retentionPeriodDays; + if (node == null || node.isNull()) { + retentionPeriodDays = 1; + } else { + retentionPeriodDays = node.asInt(); + } + return retentionPeriodDays; } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java index 9c87733e66114..28198233a9480 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java @@ -49,6 +49,12 @@ public class SnowflakeSqlGenerator implements SqlGenerator { "LOCALTIME", "LOCALTIMESTAMP"); + private final int retentionPeriodDays; + + public SnowflakeSqlGenerator(int retentionPeriodDays) { + this.retentionPeriodDays = retentionPeriodDays; + } + @Override public StreamId buildStreamId(final String namespace, final String name, final String rawNamespaceOverride) { return new StreamId( @@ -119,14 +125,15 @@ public Sql createTable(final StreamConfig stream, final String suffix, final boo return Sql.of(new StringSubstitutor(Map.of( "final_table_id", stream.id().finalTableId(QUOTE, suffix.toUpperCase()), "force_create_table", forceCreateTable, - "column_declarations", columnDeclarations)).replace( + "column_declarations", columnDeclarations, + "retention_period_days", retentionPeriodDays)).replace( """ CREATE ${force_create_table} TABLE ${final_table_id} ( "_AIRBYTE_RAW_ID" TEXT NOT NULL, "_AIRBYTE_EXTRACTED_AT" TIMESTAMP_TZ NOT NULL, "_AIRBYTE_META" VARIANT NOT NULL ${column_declarations} - ); + ) data_retention_time_in_days = ${retention_period_days}; """)); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt index d6648acb142be..0978d591d9753 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt @@ -13,6 +13,7 @@ data class SnowflakeState(val needsSoftReset: Boolean) : MinimumDestinationState return needsSoftReset } + @Suppress("UNCHECKED_CAST") override fun withSoftReset(needsSoftReset: Boolean): T { return copy(needsSoftReset = needsSoftReset) as T } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json index 53abb997a8b8c..87067870a0790 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/resources/spec.json @@ -118,6 +118,7 @@ "type": "object", "order": 2, "required": ["access_token", "refresh_token"], + "airbyte_hidden": true, "properties": { "auth_type": { "type": "string", @@ -180,6 +181,13 @@ "description": "When enabled your data will load into your final tables incrementally while your data is still being synced. When Disabled (the default), your data loads into your final tables once at the end of a sync. Note that this option only applies if you elect to create Final tables", "title": "Enable Loading Data Incrementally to Final Tables", "order": 12 + }, + "retention_period_days": { + "type": "integer", + "default": 1, + "description": "The number of days of Snowflake Time Travel to enable on the tables. See Snowflake's documentation for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.", + "title": "Data Retention Period (days)", + "order": 13 } } }, diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java index 0fb53312d3d87..cd105b663b79a 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java @@ -122,7 +122,7 @@ protected List retrieveRecords(final TestDestinationEnv env, final String namespace, final JsonNode streamSchema) throws Exception { - final StreamId streamId = new SnowflakeSqlGenerator().buildStreamId(namespace, streamName, JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE); + final StreamId streamId = new SnowflakeSqlGenerator(0).buildStreamId(namespace, streamName, JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE); return retrieveRecordsFromTable(streamId.rawName(), streamId.rawNamespace()) .stream() .map(r -> r.get(JavaBaseConstants.COLUMN_NAME_DATA)) diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java index de6f4f8498684..3bac3538f5bca 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java @@ -99,6 +99,9 @@ protected void teardownStreamAndNamespace(String streamNamespace, final String s // Raw table is still lowercase. StreamId.concatenateRawTableName(streamNamespace, streamName), streamNamespace.toUpperCase())); + database.execute( + String.format("DELETE FROM \"airbyte_internal\".\"_airbyte_destination_state\" WHERE \"name\"='%s' AND \"namespace\"='%s'", streamName, + streamNamespace)); } @Override @@ -108,7 +111,7 @@ protected void globalTeardown() throws Exception { @Override protected SqlGenerator getSqlGenerator() { - return new SnowflakeSqlGenerator(); + return new SnowflakeSqlGenerator(0); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.java new file mode 100644 index 0000000000000..f9d137734ad37 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.typing_deduping; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import java.util.List; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +public class SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest extends AbstractSnowflakeTypingDedupingTest { + + @Override + protected String getConfigPath() { + return "secrets/1s1t_case_insensitive.json"; + } + + @Override + protected List dumpRawTableRecords(String streamNamespace, final String streamName) throws Exception { + List records = super.dumpRawTableRecords(streamNamespace, streamName); + return records.stream() + .map(record -> { + // Downcase the column names. + // RecordDiffer expects the raw table column names to be lowercase. + // TODO we should probably provide a way to mutate the expected data? + ObjectNode mutatedRecord = (ObjectNode) Jsons.emptyObject(); + record.fields().forEachRemaining(entry -> { + mutatedRecord.set(entry.getKey().toLowerCase(), entry.getValue()); + }); + return (JsonNode) mutatedRecord; + }) + .toList(); + } + + @Disabled("This test assumes the ability to create case-sensitive tables, which is by definition not available with QUOTED_IDENTIFIERS_IGNORE_CASE=TRUE") + @Test + public void testFinalTableUppercasingMigration_append() throws Exception { + super.testFinalTableUppercasingMigration_append(); + } + +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java index 7277f5991957e..53c725094c080 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java @@ -69,7 +69,7 @@ public static void teardownSnowflake() throws Exception { @Override protected SnowflakeSqlGenerator getSqlGenerator() { - return new SnowflakeSqlGenerator(); + return new SnowflakeSqlGenerator(0); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl new file mode 100644 index 0000000000000..6a9cb02645378 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_final.jsonl @@ -0,0 +1,9 @@ +// Snowflake doesn't yet support the new airbyte_meta format / copying airbyte_meta from raw table, so we still have the old `Problem with...` format. +{"ID1": 1, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "ARRAY": ["foo"], "STRUCT": {"foo": "bar"}, "STRING": "foo", "NUMBER": 42.1, "INTEGER": 42, "BOOLEAN": true, "TIMESTAMP_WITH_TIMEZONE": "2023-01-23T12:34:56.000000000Z", "TIMESTAMP_WITHOUT_TIMEZONE": "2023-01-23T12:34:56.000000000", "TIME_WITH_TIMEZONE": "12:34:56Z", "TIME_WITHOUT_TIMEZONE": "12:34:56.000000000", "DATE": "2023-01-23", "UNKNOWN": {}, "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors": []}} +{"ID1": 2, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "UNKNOWN": null, "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors": []}} +{"ID1": 3, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors": []}} +{"ID1": 4, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "UNKNOWN": null, "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `struct`","Problem with `array`","Problem with `number`","Problem with `integer`","Problem with `boolean`","Problem with `timestamp_with_timezone`","Problem with `timestamp_without_timezone`","Problem with `time_with_timezone`","Problem with `time_without_timezone`","Problem with `date`"]}} +// Note: no loss of precision on the `number` column anywhere. +{"ID1": 5, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "NUMBER": 67.174118, "STRUCT": {"nested_number": 67.174118}, "ARRAY": [67.174118], "UNKNOWN": 67.174118, "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors": []}} +// Note that we unconditionally upcase IAmACaseSensitiveColumnName +{"ID1": 6, "ID2": 100, "UPDATED_AT": "2023-01-01T01:00:00.000000000Z", "IAMACASESENSITIVECOLUMNNAME": "Case senstive value", "_AIRBYTE_EXTRACTED_AT": "2023-01-01T00:00:00.000000000Z", "_AIRBYTE_META": {"errors": []}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl new file mode 100644 index 0000000000000..d2a23f103ede3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/sqlgenerator/alltypes_v1v2_expectedrecords_raw.jsonl @@ -0,0 +1,6 @@ +{"_airbyte_raw_id": "14ba7c7f-e398-4e69-ac22-28d578400dbc", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 1, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": ["foo"], "struct": {"foo": "bar"}, "string": "foo", "number": 42.1, "integer": 42, "boolean": true, "timestamp_with_timezone": "2023-01-23T12:34:56Z", "timestamp_without_timezone": "2023-01-23T12:34:56", "time_with_timezone": "12:34:56Z", "time_without_timezone": "12:34:56", "date": "2023-01-23", "unknown": {}}} +{"_airbyte_raw_id": "53ce75a5-5bcc-47a3-b45c-96c2015cfe35", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 2, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": null, "struct": null, "string": null, "number": null, "integer": null, "boolean": null, "timestamp_with_timezone": null, "timestamp_without_timezone": null, "time_with_timezone": null, "time_without_timezone": null, "date": null, "unknown": null}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fbe", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 3, "id2": 100, "updated_at": "2023-01-01T01:00:00Z"}} +{"_airbyte_raw_id": "84242b60-3a34-4531-ad75-a26702960a9a", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 4, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "array": {}, "struct": [], "string": null, "number": "foo", "integer": "bar", "boolean": "fizz", "timestamp_with_timezone": {}, "timestamp_without_timezone": {}, "time_with_timezone": {}, "time_without_timezone": {}, "date": "airbyte", "unknown": null}} +{"_airbyte_raw_id": "a4a783b5-7729-4d0b-b659-48ceb08713f1", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 5, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "number": 67.174118, "struct": {"nested_number": 67.174118}, "array": [67.174118], "unknown": 67.174118}} +{"_airbyte_raw_id": "7e1fac0c-017e-4ad6-bc78-334a34d64fce", "_airbyte_extracted_at": "2023-01-01T00:00:00.000000000Z", "_airbyte_data": {"id1": 6, "id2": 100, "updated_at": "2023-01-01T01:00:00Z", "IamACaseSensitiveColumnName": "Case senstive value"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java index 8b15f51409001..220369768f192 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java @@ -10,7 +10,7 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.integrations.base.DestinationConfig; import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination_async.AsyncStreamConsumer; +import io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.integrations.destination.snowflake.SnowflakeDestination.DestinationType; @@ -90,7 +90,7 @@ private static Stream destinationTypeToConfig() { void testWriteSnowflakeInternal() throws Exception { final JsonNode config = Jsons.deserialize(MoreResources.readResource("internal_staging_config.json"), JsonNode.class); final SerializedAirbyteMessageConsumer consumer = new SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS) - .getSerializedMessageConsumer(config, new ConfiguredAirbyteCatalog(), null); + .getSerializedMessageConsumer(config, new ConfiguredAirbyteCatalog(), message -> {}); assertEquals(AsyncStreamConsumer.class, consumer.getClass()); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java index 1d8f16da008d6..f158e79148f40 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java @@ -63,6 +63,7 @@ void copyIntoTmpTableFromStage() { skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '"' NULL_IF=('') + error_on_column_count_mismatch=false ) files = ('filename1','filename2');"""; final String actualCopyQuery = snowflakeStagingSqlOperations.getCopyQuery(STAGE_NAME, STAGE_PATH, List.of("filename1", "filename2"), "tableName", SCHEMA_NAME); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java index 66fa8866f2337..39756ec68f1b1 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java @@ -14,7 +14,7 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.base.DestinationConfig; import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; +import io.airbyte.cdk.integrations.destination.async.partial_messages.PartialAirbyteMessage; import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; import java.sql.SQLException; @@ -44,7 +44,7 @@ void createTableQuery() { "%s" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(), "%s" TIMESTAMP WITH TIME ZONE DEFAULT NULL, "%s" VARIANT - ) data_retention_time_in_days = 0;""", + ) data_retention_time_in_days = 1;""", SCHEMA_NAME, TABLE_NAME, JavaBaseConstants.COLUMN_NAME_AB_RAW_ID, diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.java index 534cf31251e0f..0189f29744f37 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.java @@ -24,7 +24,7 @@ public class SnowflakeSqlGeneratorTest { - private final SnowflakeSqlGenerator generator = new SnowflakeSqlGenerator(); + private final SnowflakeSqlGenerator generator = new SnowflakeSqlGenerator(0); @Test void columnNameSpecialCharacterHandling() { diff --git a/airbyte-integrations/connectors/destination-typesense/Dockerfile b/airbyte-integrations/connectors/destination-typesense/Dockerfile deleted file mode 100644 index 0a9d49772a9f0..0000000000000 --- a/airbyte-integrations/connectors/destination-typesense/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_typesense ./destination_typesense - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.3 -LABEL io.airbyte.name=airbyte/destination-typesense diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py b/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py index 0f03edf9fe7a9..4572a5118da37 100644 --- a/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py @@ -3,6 +3,7 @@ # +import time from typing import Any, Iterable, Mapping from airbyte_cdk import AirbyteLogger @@ -13,12 +14,11 @@ def get_client(config: Mapping[str, Any]) -> Client: - api_key = config.get("api_key") - host = config.get("host") - port = config.get("port") or "8108" - protocol = config.get("protocol") or "https" - - client = Client({"api_key": api_key, "nodes": [{"host": host, "port": port, "protocol": protocol}], "connection_timeout_seconds": 3600}) + node = {"host": config.get("host"), "port": config.get("port") or "8108", "protocol": config.get("protocol") or "https"} + path = config.get("path") + if path: + node["path"] = path + client = Client({"api_key": config.get("api_key"), "nodes": [node], "connection_timeout_seconds": 3600}) return client @@ -56,6 +56,7 @@ def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConn client = get_client(config=config) client.collections.create({"name": "_airbyte", "fields": [{"name": "title", "type": "string"}]}) client.collections["_airbyte"].documents.create({"id": "1", "title": "The Hunger Games"}) + time.sleep(3) client.collections["_airbyte"].documents["1"].retrieve() client.collections["_airbyte"].delete() return AirbyteConnectionStatus(status=Status.SUCCEEDED) diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json b/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json index 46462eb37b937..797457d6b4720 100644 --- a/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json @@ -38,6 +38,12 @@ "type": "integer", "description": "How many documents should be imported together. Default 1000", "order": 4 + }, + "path": { + "title": "Path", + "type": "string", + "description": "Path of the Typesense instance. Default is none", + "order": 5 } } } diff --git a/airbyte-integrations/connectors/destination-typesense/metadata.yaml b/airbyte-integrations/connectors/destination-typesense/metadata.yaml index 0b0f9562a2ffa..d954d016d1987 100644 --- a/airbyte-integrations/connectors/destination-typesense/metadata.yaml +++ b/airbyte-integrations/connectors/destination-typesense/metadata.yaml @@ -2,8 +2,10 @@ data: connectorSubtype: database connectorType: destination definitionId: 36be8dc6-9851-49af-b776-9d4c30e4ab6a - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 dockerRepository: airbyte/destination-typesense + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c githubIssueLabel: destination-typesense icon: typesense.svg license: MIT diff --git a/airbyte-integrations/connectors/destination-typesense/poetry.lock b/airbyte-integrations/connectors/destination-typesense/poetry.lock new file mode 100644 index 0000000000000..2e64cfa7d4996 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/poetry.lock @@ -0,0 +1,1045 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.74.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.9" +files = [ + {file = "airbyte-cdk-0.74.0.tar.gz", hash = "sha256:74241a055c205403a951383f43801067b7f451370e14d553d13d0cc476cbfff7"}, + {file = "airbyte_cdk-0.74.0-py3-none-any.whl", hash = "sha256:7e5b201d69ec0e7daab7e627dbc6add4dbba4a2f779132e86aaf6713650ff4d5"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typesense" +version = "0.14.0" +description = "Python client for Typesense, an open source and typo tolerant search engine." +optional = false +python-versions = ">=3" +files = [ + {file = "typesense-0.14.0-py2.py3-none-any.whl", hash = "sha256:0ee444351d59243b51d1ea7502dc41e14a3997f954269519c3a445b39b137bba"}, +] + +[package.dependencies] +requests = "*" + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "246231af6d2f6144787a8e320f4daf7f5a57f5a2a2f82564a0dfcd93be315c05" diff --git a/airbyte-integrations/connectors/destination-typesense/pyproject.toml b/airbyte-integrations/connectors/destination-typesense/pyproject.toml new file mode 100644 index 0000000000000..acb75a653816e --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/pyproject.toml @@ -0,0 +1,32 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.4" +name = "destination-typesense" +description = "Destination Implementation for Typesense." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destination/typesense" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "destination_typesense" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "==0.74.0" +typesense = "^0.14.0" + + +[tool.poetry.scripts] +destination-typesense = "destination_typesense.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +typesense = "^0.14.0" +pytest-mock = "^3.6.1" +pytest = "^6.1" + diff --git a/airbyte-integrations/connectors/destination-typesense/setup.py b/airbyte-integrations/connectors/destination-typesense/setup.py deleted file mode 100644 index 2e3a927b43f0b..0000000000000 --- a/airbyte-integrations/connectors/destination-typesense/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "typesense>=0.14.0"] - -TEST_REQUIREMENTS = ["pytest~=6.1", "typesense>=0.14.0"] - -setup( - name="destination_typesense", - description="Destination implementation for Typesense.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-vectara/.dockerignore b/airbyte-integrations/connectors/destination-vectara/.dockerignore index f784000e19e24..f38dda3faba3a 100644 --- a/airbyte-integrations/connectors/destination-vectara/.dockerignore +++ b/airbyte-integrations/connectors/destination-vectara/.dockerignore @@ -2,4 +2,5 @@ !Dockerfile !main.py !destination_vectara -!setup.py +!pyproject.toml +!poetry.lock \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-vectara/Dockerfile b/airbyte-integrations/connectors/destination-vectara/Dockerfile deleted file mode 100644 index 09e12723300f6..0000000000000 --- a/airbyte-integrations/connectors/destination-vectara/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_vectara ./destination_vectara - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.2.1 -LABEL io.airbyte.name=airbyte/destination-vectara diff --git a/airbyte-integrations/connectors/destination-vectara/README.md b/airbyte-integrations/connectors/destination-vectara/README.md index 2c68229551bcf..13fd46d9fd08a 100644 --- a/airbyte-integrations/connectors/destination-vectara/README.md +++ b/airbyte-integrations/connectors/destination-vectara/README.md @@ -8,31 +8,12 @@ For information about how to use this connector within Airbyte, see [the documen ### Prerequisites **To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.7.0` +#### Minimum Python version required `= 3.9` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:build +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` #### Create credentials @@ -44,28 +25,80 @@ See `integration_tests/sample_config.json` for a sample config file. **If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination vectara test creds` and place them into `secrets/config.json`. + ### Locally running the connector ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` + ### Locally running the connector docker image -#### Build -First, make sure you build the latest Docker image: + +#### Use `airbyte-ci` to build your connector +The Airbyte way of building this connector is to use our `airbyte-ci` tool. +You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). +Then running the following command will build your connector: + +```bash +airbyte-ci connectors --name=destination-vectara build ``` -docker build . -t airbyte/destination-vectara:dev +Once the command is done, you will find your connector image in your local docker registry: `airbyte/destination-vectara:dev`. + +##### Customizing our build process +When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. +You can customize our build process by adding a `build_customization.py` module to your connector. +This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. +It will be imported at runtime by our build process and the functions will be called if they exist. + +Here is an example of a `build_customization.py` module: +```python +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # Feel free to check the dagger documentation for more information on the Container object and its methods. + # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ + from dagger import Container + + +async def pre_connector_install(base_image_container: Container) -> Container: + return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") + +async def post_connector_install(connector_container: Container) -> Container: + return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") ``` -You can also build the connector image via Gradle: +#### Build your own connector image +This connector is built using our dynamic built process in `airbyte-ci`. +The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. +The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). +It does not rely on a Dockerfile. + +If you would like to patch our connector and build your own a simple approach would be to: + +1. Create your own Dockerfile based on the latest version of the connector image. +```Dockerfile +FROM airbyte/destination-vectara:latest + +COPY . ./airbyte/integration_code +RUN pip install ./airbyte/integration_code + +# The entrypoint and default env vars are already set in the base image +# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] ``` -./gradlew :airbyte-integrations:connectors:destination-vectara:airbyteDocker +Please use this as an example. This is not optimized. + +2. Build your image: +```bash +docker build -t airbyte/destination-vectara:dev . +# Running the spec command against your patched connector +docker run airbyte/destination-vectara:dev spec ``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. #### Run Then run any of the connector commands as follows: @@ -75,16 +108,17 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vectara:dev check # messages.jsonl is a file containing line-separated JSON representing AirbyteMessages cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-vectara:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` + ## Testing - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-vectara test ``` + ### Unit Tests To run unit tests locally, from the connector directory run: ``` -python -m pytest unit_tests +poetry run pytest -s unit_tests ``` ### Integration Tests @@ -92,32 +126,28 @@ There are two types of integration tests: Acceptance Tests (Airbyte's test suite #### Custom Integration tests Place custom tests inside `integration_tests/` folder, then, from the connector root, run ``` -python -m pytest integration_tests +poetry run pytest -s integration_tests ``` + #### Acceptance Tests Coming soon: -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:integrationTest -``` + ## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +All of your dependencies should go in `pyproject.toml`. + We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +* required for your connector to work need to go to `[tool.poetry.dependencies]` list. +* required for the testing need to go to `[tool.poetry.group.dev.dependencies]` list ### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-vectara test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/vectara.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-vectara/metadata.yaml b/airbyte-integrations/connectors/destination-vectara/metadata.yaml index a144309a09882..4b8e7ef76e3ee 100644 --- a/airbyte-integrations/connectors/destination-vectara/metadata.yaml +++ b/airbyte-integrations/connectors/destination-vectara/metadata.yaml @@ -8,15 +8,21 @@ data: enabled: true cloud: enabled: true + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c connectorSubtype: database connectorType: destination definitionId: 102900e7-a236-4c94-83e4-a4189b99adc2 - dockerImageTag: 0.2.1 + dockerImageTag: 0.2.3 dockerRepository: airbyte/destination-vectara githubIssueLabel: destination-vectara icon: vectara.svg license: MIT name: Vectara + remoteRegistries: + pypi: + enabled: false # TODO: enable once the CLI is working + packageName: airbyte-destination-vectara releaseDate: 2023-12-16 releaseStage: alpha supportLevel: community diff --git a/airbyte-integrations/connectors/destination-vectara/poetry.lock b/airbyte-integrations/connectors/destination-vectara/poetry.lock new file mode 100644 index 0000000000000..eda7307366f35 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/poetry.lock @@ -0,0 +1,1196 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain_core = "0.1.42" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langsmith" +version = "0.1.47" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.47-py3-none-any.whl", hash = "sha256:17b0a908b8d39b6da3ecff658c8c00304b0b62f59945a5e16c2da5a254ea21a6"}, + {file = "langsmith-0.1.47.tar.gz", hash = "sha256:f5ddd17628baa03a775525c5547a543a559313e425cdb2bf23579ffcf6056a76"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "939bcd29a8074f61c5b6baddd802d75fd45ef73b7b5368feeb13d4bd7ee2df5f" diff --git a/airbyte-integrations/connectors/destination-vectara/pyproject.toml b/airbyte-integrations/connectors/destination-vectara/pyproject.toml new file mode 100644 index 0000000000000..c88e6f7a6e680 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-vectara" +version = "0.2.3" +description = "Airbyte destination implementation for Vectara" +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/vectara" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_vectara" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +# Note: Maintainers can bump this to the latest via `poetry add airbyte-cdk@latest` +airbyte-cdk = "0.81.6" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-vectara = "destination_vectara.run:run" + diff --git a/airbyte-integrations/connectors/destination-vectara/requirements.txt b/airbyte-integrations/connectors/destination-vectara/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-vectara/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-vectara/setup.py b/airbyte-integrations/connectors/destination-vectara/setup.py deleted file mode 100644 index ab10a8c60fb93..0000000000000 --- a/airbyte-integrations/connectors/destination-vectara/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk==0.57.8", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_vectara", - description="Destination implementation for Vectara.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-weaviate/.dockerignore b/airbyte-integrations/connectors/destination-weaviate/.dockerignore index 0c91a8221067c..b87d6472952a7 100644 --- a/airbyte-integrations/connectors/destination-weaviate/.dockerignore +++ b/airbyte-integrations/connectors/destination-weaviate/.dockerignore @@ -2,4 +2,5 @@ !Dockerfile !main.py !destination_weaviate -!setup.py +!pyproject.toml +!poetry.lock diff --git a/airbyte-integrations/connectors/destination-weaviate/README.md b/airbyte-integrations/connectors/destination-weaviate/README.md index c14faa8242805..24aaea31bcea5 100644 --- a/airbyte-integrations/connectors/destination-weaviate/README.md +++ b/airbyte-integrations/connectors/destination-weaviate/README.md @@ -10,24 +10,11 @@ For information about how to use this connector within Airbyte, see [the documen #### Minimum Python version required `= 3.7.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/weaviate) @@ -42,14 +29,12 @@ and place them into `secrets/config.json`. ``` python main.py spec python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Locally running the connector docker image - #### Use `airbyte-ci` to build your connector The Airbyte way of building this connector is to use our `airbyte-ci` tool. You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). @@ -127,6 +112,18 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=destination-weaviate test ``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest -s unit_tests +``` + +### Integration Tests +To run integration tests locally, make sure you create a secrets/config.json as explained above, and then run: +``` +poetry run pytest -s integration_tests +``` + ### Customizing acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. diff --git a/airbyte-integrations/connectors/destination-weaviate/metadata.yaml b/airbyte-integrations/connectors/destination-weaviate/metadata.yaml index 8fb3245d933f1..f823b680d3ace 100644 --- a/airbyte-integrations/connectors/destination-weaviate/metadata.yaml +++ b/airbyte-integrations/connectors/destination-weaviate/metadata.yaml @@ -13,7 +13,7 @@ data: connectorSubtype: vectorstore connectorType: destination definitionId: 7b7d7a0d-954c-45a0-bcfc-39a634b97736 - dockerImageTag: 0.2.15 + dockerImageTag: 0.2.17 dockerRepository: airbyte/destination-weaviate documentationUrl: https://docs.airbyte.com/integrations/destinations/weaviate githubIssueLabel: destination-weaviate diff --git a/airbyte-integrations/connectors/destination-weaviate/poetry.lock b/airbyte-integrations/connectors/destination-weaviate/poetry.lock new file mode 100644 index 0000000000000..abc0d8aca53fa --- /dev/null +++ b/airbyte-integrations/connectors/destination-weaviate/poetry.lock @@ -0,0 +1,3159 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +cohere = {version = "4.21", optional = true, markers = "extra == \"vector-db-based\""} +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain = {version = "0.1.16", optional = true, markers = "extra == \"vector-db-based\""} +langchain_core = "0.1.42" +openai = {version = "0.27.9", extras = ["embeddings"], optional = true, markers = "extra == \"vector-db-based\""} +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +tiktoken = {version = "0.4.0", optional = true, markers = "extra == \"vector-db-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "authlib" +version = "1.3.0" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, + {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, +] + +[package.dependencies] +cryptography = "*" + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cohere" +version = "4.21" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, + {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, +] + +[package.dependencies] +aiohttp = ">=3.0,<4.0" +backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} +importlib_metadata = ">=6.0,<7.0" +requests = ">=2.25.0,<3.0.0" +urllib3 = ">=1.26,<3" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "docker" +version = "7.0.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, + {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.2" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, + {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"}, + {file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"}, + {file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"}, + {file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"}, + {file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"}, + {file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"}, + {file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"}, + {file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"}, + {file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"}, + {file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"}, + {file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"}, + {file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"}, + {file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"}, + {file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"}, + {file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"}, + {file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "fonttools" +version = "4.51.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.4.0" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "langchain" +version = "0.1.16" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, + {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.32,<0.1" +langchain-core = ">=0.1.42,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.32" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, + {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.41,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.48" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.48-py3-none-any.whl", hash = "sha256:2f8967e2aaaed8881efe6f346590681243b315af8ba8a037d969c299d42071d3"}, + {file = "langsmith-0.1.48.tar.gz", hash = "sha256:9cd21cd0928123b2bd2363f03515cb1f6a833d9a9f00420240d5132861d15fcc"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib" +version = "3.8.4" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, + {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, + {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, + {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, + {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, + {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, + {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, + {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, + {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, + {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, + {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, + {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, + {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, + {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, + {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, + {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, + {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, + {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, + {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.21" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "0.27.9" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, + {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, +] + +[package.dependencies] +aiohttp = "*" +matplotlib = {version = "*", optional = true, markers = "extra == \"embeddings\""} +numpy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +openpyxl = {version = ">=3.0.7", optional = true, markers = "extra == \"embeddings\""} +pandas = {version = ">=1.2.3", optional = true, markers = "extra == \"embeddings\""} +pandas-stubs = {version = ">=1.1.0.11", optional = true, markers = "extra == \"embeddings\""} +plotly = {version = "*", optional = true, markers = "extra == \"embeddings\""} +requests = ">=2.20" +scikit-learn = {version = ">=1.0.2", optional = true, markers = "extra == \"embeddings\""} +scipy = {version = "*", optional = true, markers = "extra == \"embeddings\""} +tenacity = {version = ">=8.0.1", optional = true, markers = "extra == \"embeddings\""} +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "plotly" +version = "5.20.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-docker" +version = "2.0.1" +description = "Simple pytest fixtures for Docker and Docker Compose based tests" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-docker-2.0.1.tar.gz", hash = "sha256:1c17e9202a566f85ed5ef269fe2815bd4899e90eb639622e5d14277372ca7524"}, + {file = "pytest_docker-2.0.1-py3-none-any.whl", hash = "sha256:7103f97b8c479c826b63d73cfb83383dc1970d35105ed1ce78a722c90c7fe650"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +pytest = ">=4.0,<8.0" + +[package.extras] +docker-compose-v1 = ["docker-compose (>=1.27.3,<2.0)"] +tests = ["pytest-pycodestyle (>=2.0.0,<3.0)", "pytest-pylint (>=0.14.1,<1.0)", "requests (>=2.22.0,<3.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90378e1747949f90c8f385898fff35d73193dfcaec3dd75d6b542f90c4e89755"}, + {file = "scikit_learn-1.4.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ff4effe5a1d4e8fed260a83a163f7dbf4f6087b54528d8880bab1d1377bd78be"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:671e2f0c3f2c15409dae4f282a3a619601fa824d2c820e5b608d9d775f91780c"}, + {file = "scikit_learn-1.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36d0bc983336bbc1be22f9b686b50c964f593c8a9a913a792442af9bf4f5e68"}, + {file = "scikit_learn-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d762070980c17ba3e9a4a1e043ba0518ce4c55152032f1af0ca6f39b376b5928"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "threadpoolctl" +version = "3.4.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, +] + +[[package]] +name = "tiktoken" +version = "0.4.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:176cad7f053d2cc82ce7e2a7c883ccc6971840a4b5276740d0b732a2b2011f8a"}, + {file = "tiktoken-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:450d504892b3ac80207700266ee87c932df8efea54e05cefe8613edc963c1285"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d662de1e7986d129139faf15e6a6ee7665ee103440769b8dedf3e7ba6ac37f"}, + {file = "tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5727d852ead18b7927b8adf558a6f913a15c7766725b23dbe21d22e243041b28"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c06cd92b09eb0404cedce3702fa866bf0d00e399439dad3f10288ddc31045422"}, + {file = "tiktoken-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ec161e40ed44e4210d3b31e2ff426b4a55e8254f1023e5d2595cb60044f8ea6"}, + {file = "tiktoken-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:1e8fa13cf9889d2c928b9e258e9dbbbf88ab02016e4236aae76e3b4f82dd8288"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb2341836b725c60d0ab3c84970b9b5f68d4b733a7bcb80fb25967e5addb9920"}, + {file = "tiktoken-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ca30367ad750ee7d42fe80079d3092bd35bb266be7882b79c3bd159b39a17b0"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc3df19ddec79435bb2a94ee46f4b9560d0299c23520803d851008445671197"}, + {file = "tiktoken-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d980fa066e962ef0f4dad0222e63a484c0c993c7a47c7dafda844ca5aded1f3"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:329f548a821a2f339adc9fbcfd9fc12602e4b3f8598df5593cfc09839e9ae5e4"}, + {file = "tiktoken-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b1a038cee487931a5caaef0a2e8520e645508cde21717eacc9af3fbda097d8bb"}, + {file = "tiktoken-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:08efa59468dbe23ed038c28893e2a7158d8c211c3dd07f2bbc9a30e012512f1d"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3020350685e009053829c1168703c346fb32c70c57d828ca3742558e94827a9"}, + {file = "tiktoken-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba16698c42aad8190e746cd82f6a06769ac7edd415d62ba027ea1d99d958ed93"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c15d9955cc18d0d7ffcc9c03dc51167aedae98542238b54a2e659bd25fe77ed"}, + {file = "tiktoken-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64e1091c7103100d5e2c6ea706f0ec9cd6dc313e6fe7775ef777f40d8c20811e"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e87751b54eb7bca580126353a9cf17a8a8eaadd44edaac0e01123e1513a33281"}, + {file = "tiktoken-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e063b988b8ba8b66d6cc2026d937557437e79258095f52eaecfafb18a0a10c03"}, + {file = "tiktoken-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9c6dd439e878172dc163fced3bc7b19b9ab549c271b257599f55afc3a6a5edef"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d1d97f83697ff44466c6bef5d35b6bcdb51e0125829a9c0ed1e6e39fb9a08fb"}, + {file = "tiktoken-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b6bce7c68aa765f666474c7c11a7aebda3816b58ecafb209afa59c799b0dd2d"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a73286c35899ca51d8d764bc0b4d60838627ce193acb60cc88aea60bddec4fd"}, + {file = "tiktoken-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0394967d2236a60fd0aacef26646b53636423cc9c70c32f7c5124ebe86f3093"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dae2af6f03ecba5f679449fa66ed96585b2fa6accb7fd57d9649e9e398a94f44"}, + {file = "tiktoken-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55e251b1da3c293432179cf7c452cfa35562da286786be5a8b1ee3405c2b0dd2"}, + {file = "tiktoken-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:c835d0ee1f84a5aa04921717754eadbc0f0a56cf613f78dfc1cf9ad35f6c3fea"}, + {file = "tiktoken-0.4.0.tar.gz", hash = "sha256:59b20a819969735b48161ced9b92f05dc4519c17be4015cfb73b65270a243620"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "validators" +version = "0.28.0" +description = "Python Data Validation for Humans™" +optional = false +python-versions = ">=3.8" +files = [ + {file = "validators-0.28.0-py3-none-any.whl", hash = "sha256:e0184691dea3ba82b52c161ba81d3ec1d8be8da9609f0137d1430b395b366521"}, + {file = "validators-0.28.0.tar.gz", hash = "sha256:85bc82511f6ccd0800f4c15d8c0dc546c15e369640c5ea1f24349ba0b3b17815"}, +] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "weaviate-client" +version = "3.25.2" +description = "A python native Weaviate client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "weaviate-client-3.25.2.tar.gz", hash = "sha256:47b2beb0a986960e692b1ad99bd165f27c4ff15658d0c26f3dd58ab90f093064"}, + {file = "weaviate_client-3.25.2-py3-none-any.whl", hash = "sha256:881717258b9510835f7643f4bb94fbae03cd98ae328e74e8c1bed5ec6872e91e"}, +] + +[package.dependencies] +authlib = ">=1.2.1,<2.0.0" +requests = ">=2.30.0,<3.0.0" +validators = ">=0.21.2,<1.0.0" + +[package.extras] +grpc = ["grpcio (>=1.57.0,<2.0.0)", "grpcio-tools (>=1.57.0,<2.0.0)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "4e8d8d52d67662b1c495b65de8830b684360719e89d301919c50a207ec773c21" diff --git a/airbyte-integrations/connectors/destination-weaviate/pyproject.toml b/airbyte-integrations/connectors/destination-weaviate/pyproject.toml new file mode 100644 index 0000000000000..140161db2ad56 --- /dev/null +++ b/airbyte-integrations/connectors/destination-weaviate/pyproject.toml @@ -0,0 +1,32 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "airbyte-destination-weaviate" +version = "0.2.17" +description = "Airbyte destination implementation for Weaviate." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/weaviate" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" + +[[tool.poetry.packages]] +include = "destination_weaviate" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "0.81.6", extras = ["vector-db-based"]} +weaviate-client = "3.25.2" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2" +pytest-docker = "2.0.1" +docker = "*" +ruff = "^0.3.2" +mypy = "^1.9.0" + +[tool.poetry.scripts] +destination-weaviate = "destination_weaviate.run:run" \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-weaviate/requirements.txt b/airbyte-integrations/connectors/destination-weaviate/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-weaviate/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-weaviate/setup.py b/airbyte-integrations/connectors/destination-weaviate/setup.py deleted file mode 100644 index 0a49aa856b74a..0000000000000 --- a/airbyte-integrations/connectors/destination-weaviate/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk[vector-db-based]==0.57.0", "weaviate-client==3.25.2"] - -TEST_REQUIREMENTS = ["pytest~=6.2", "docker", "pytest-docker==2.0.1"] - -setup( - name="destination_weaviate", - description="Destination implementation for Weaviate.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/requirements.txt b/airbyte-integrations/connectors/requirements.txt deleted file mode 100644 index 924cc59de2957..0000000000000 --- a/airbyte-integrations/connectors/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -invoke~=1.6.0 -virtualenv~=20.10.0 diff --git a/airbyte-integrations/connectors/source-airtable/metadata.yaml b/airbyte-integrations/connectors/source-airtable/metadata.yaml index e6c3777e43c48..1f1bb77ddcfee 100644 --- a/airbyte-integrations/connectors/source-airtable/metadata.yaml +++ b/airbyte-integrations/connectors/source-airtable/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 14c6e7ea-97ed-4f5e-a7b5-25e9a80b8212 - dockerImageTag: 4.1.6 + dockerImageTag: 4.2.0 dockerRepository: airbyte/source-airtable documentationUrl: https://docs.airbyte.com/integrations/sources/airtable githubIssueLabel: source-airtable icon: airtable.svg license: MIT + maxSecondsBetweenMessages: 5400 name: Airtable remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-airtable/poetry.lock b/airbyte-integrations/connectors/source-airtable/poetry.lock index 226b83b4d7cfd..7cfdd7f18c76a 100644 --- a/airbyte-integrations/connectors/source-airtable/poetry.lock +++ b/airbyte-integrations/connectors/source-airtable/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.51.41" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.51.41.tar.gz", hash = "sha256:cce614d67872cf66a151e5b72d70f4bf26e2a1ce672c7abfc15a5cb4e45d8429"}, - {file = "airbyte_cdk-0.51.41-py3-none-any.whl", hash = "sha256:bbf82a45d9ec97c4a92b85e3312b327f8060fffec1f7c7ea7dfa720f9adcc13b"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -466,113 +467,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -667,6 +603,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -751,18 +702,29 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -788,7 +750,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -846,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -864,15 +825,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -895,19 +856,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -933,24 +894,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -969,13 +919,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1080,4 +1030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "77f1e32173eb6c3117ba3b83b24e6d36db3109cc6ff5c9f292c590998617b18e" +content-hash = "25d79195c052c9654e64e6cd73809188b3aa16bd228841f214ff871a895c9c6c" diff --git a/airbyte-integrations/connectors/source-airtable/pyproject.toml b/airbyte-integrations/connectors/source-airtable/pyproject.toml index abfae85d96bd4..0941b5fc061fe 100644 --- a/airbyte-integrations/connectors/source-airtable/pyproject.toml +++ b/airbyte-integrations/connectors/source-airtable/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.1.6" +version = "4.2.0" name = "source-airtable" description = "Source implementation for Airtable." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_airtable" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.51.41" +airbyte-cdk = "^0" [tool.poetry.scripts] source-airtable = "source_airtable.run:run" diff --git a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml index eff7c3273bf15..50127adf3e5d5 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml @@ -29,6 +29,8 @@ acceptance_tests: bypass_reason: "can't populate stream because it requires real ad campaign" - name: sponsored_display_creatives bypass_reason: "can't populate stream because it requires real ad campaign" + - name: sponsored_product_ad_group_bid_recommendations + bypass_reason: "data is updated frequently" timeout_seconds: 2400 expect_records: path: integration_tests/expected_records.jsonl diff --git a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/expected_records.jsonl index bf609dee5420b..126530502c8a0 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/expected_records.jsonl @@ -48,86 +48,31 @@ {"stream":"sponsored_display_product_ads","data":{"adId":195948665185008,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBOA","state":"enabled"},"emitted_at":1659020219614} {"stream":"sponsored_display_product_ads","data":{"adId":130802512011075,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091G1HT4P","state":"enabled"},"emitted_at":1659020219614} {"stream":"sponsored_display_targetings","data":{"adGroupId":239470166910761,"bid":0.4,"expression":[{"type":"similarProduct"}],"expressionType":"auto","resolvedExpression":[{"type":"similarProduct"}],"state":"enabled","targetId":124150067548052,"campaignId": 25934734632378},"emitted_at":1659020220625} -{"stream":"sponsored_product_campaigns","data":{"campaignId":39413387973397,"name":"Test campaging for profileId 1861552880916640","campaignType":"sponsoredProducts","targetingType":"manual","premiumBidAdjustment":true,"dailyBudget":10,"ruleBasedBudget":{"isProcessing":false},"startDate":"20220705","endDate":"20220712","state":"paused","bidding":{"strategy":"legacyForSales","adjustments":[{"predicate":"placementTop","percentage":50}]},"tags":{"PONumber":"examplePONumber","accountManager":"exampleAccountManager"}},"emitted_at":1687524797996} -{"stream":"sponsored_product_campaigns","data":{"campaignId":135264288913079,"name":"Campaign - 7/5/2022 18:14:02","campaignType":"sponsoredProducts","targetingType":"auto","premiumBidAdjustment":false,"dailyBudget":10,"startDate":"20220705","state":"enabled","bidding":{"strategy":"legacyForSales","adjustments":[]},"portfolioId":270076898441727},"emitted_at":1687524798170} -{"stream":"sponsored_product_campaigns","data":{"campaignId":191249325250025,"name":"Campaign - 7/8/2022 13:57:48","campaignType":"sponsoredProducts","targetingType":"auto","premiumBidAdjustment":true,"dailyBudget":50,"ruleBasedBudget":{"isProcessing":false},"startDate":"20220708","state":"enabled","bidding":{"strategy":"legacyForSales","adjustments":[{"predicate":"placementProductPage","percentage":100},{"predicate":"placementTop","percentage":100}]},"portfolioId":253945852845204},"emitted_at":1687524798171} -{"stream":"sponsored_product_campaigns","data":{"campaignId":146003174711486,"name":"Test campaging for profileId 3039403378822505","campaignType":"sponsoredProducts","targetingType":"manual","premiumBidAdjustment":true,"dailyBudget":2,"startDate":"20220705","endDate":"20231111","state":"enabled","bidding":{"strategy":"legacyForSales","adjustments":[{"predicate":"placementTop","percentage":50}]},"tags":{"PONumber":"examplePONumber","accountManager":"exampleAccountManager"}},"emitted_at":1687524798327} -{"stream":"sponsored_product_ad_groups","data":{"adGroupId":226404883721634,"name":"My AdGroup for Campaign 39413387973397","campaignId":39413387973397,"defaultBid":10,"state":"enabled"},"emitted_at":1659020222108} -{"stream":"sponsored_product_ad_groups","data":{"adGroupId":183961953969922,"name":"Ad group - 7/5/2022 18:14:02","campaignId":135264288913079,"defaultBid":0.75,"state":"enabled"},"emitted_at":1659020222276} -{"stream":"sponsored_product_ad_groups","data":{"adGroupId":108551155050351,"name":"Ad group - 7/8/2022 13:57:48","campaignId":191249325250025,"defaultBid":1,"state":"enabled"},"emitted_at":1659020222276} -{"stream":"sponsored_product_ad_groups","data":{"adGroupId":103188883625219,"name":"My AdGroup for Campaign 146003174711486","campaignId":146003174711486,"defaultBid":10,"state":"enabled"},"emitted_at":1659020222593} -{"stream":"sponsored_product_keywords","data":{"keywordId":88368653576677,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"keyword1","matchType":"exact","state":"enabled","bid":1.12},"emitted_at":1659020223173} -{"stream":"sponsored_product_keywords","data":{"keywordId":256414981667762,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"test book","matchType":"broad","state":"enabled","bid":1.12},"emitted_at":1659020223174} -{"stream":"sponsored_product_keywords","data":{"keywordId":162522197737998,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"test book","matchType":"phrase","state":"enabled","bid":2.85},"emitted_at":1659020223175} -{"stream":"sponsored_product_keywords","data":{"keywordId":156474025571250,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"test book","matchType":"exact","state":"enabled","bid":1.12},"emitted_at":1659020223175} -{"stream":"sponsored_product_keywords","data":{"keywordId":97960974522677,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"exam book","matchType":"broad","state":"enabled","bid":0.83},"emitted_at":1659020223175} -{"stream":"sponsored_product_keywords","data":{"keywordId":21494218191267,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"exam book","matchType":"phrase","state":"enabled","bid":4.06},"emitted_at":1659020223175} -{"stream":"sponsored_product_keywords","data":{"keywordId":122265145299463,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"exam book","matchType":"exact","state":"enabled","bid":1.12},"emitted_at":1659020223176} -{"stream":"sponsored_product_keywords","data":{"keywordId":105707339702386,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"smartphone","matchType":"broad","state":"enabled","bid":3.52},"emitted_at":1659020223176} -{"stream":"sponsored_product_keywords","data":{"keywordId":185938124401124,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"smartphone","matchType":"phrase","state":"enabled","bid":3.44},"emitted_at":1659020223176} -{"stream":"sponsored_product_keywords","data":{"keywordId":16455263285469,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"smartphone","matchType":"exact","state":"enabled","bid":3.69},"emitted_at":1659020223177} -{"stream":"sponsored_product_negative_keywords","data":{"keywordId":32531566025493,"adGroupId":226404883721634,"campaignId":39413387973397,"keywordText":"negkeyword1","matchType":"negativeExact","state":"enabled"},"emitted_at":1659020224091} -{"stream":"sponsored_product_ads","data":{"adId":134721479349712,"adGroupId":226404883721634,"campaignId":39413387973397,"asin":"B09X3NTQ5S","state":"enabled"},"emitted_at":1659020225056} -{"stream":"sponsored_product_ads","data":{"adId":265970953521535,"adGroupId":226404883721634,"campaignId":39413387973397,"asin":"B09X3QCS24","state":"enabled"},"emitted_at":1659020225057} -{"stream":"sponsored_product_ads","data":{"adId":253366527049144,"adGroupId":226404883721634,"campaignId":39413387973397,"asin":"B09X3P7D6Z","state":"enabled"},"emitted_at":1659020225057} -{"stream":"sponsored_product_ads","data":{"adId":44137758141732,"adGroupId":183961953969922,"campaignId":135264288913079,"asin":"B000VHYM2E","sku":"0R-4KDA-Z2U8","state":"enabled"},"emitted_at":1659020225248} -{"stream":"sponsored_product_ads","data":{"adId":126456292487945,"adGroupId":108551155050351,"campaignId":191249325250025,"asin":"B074K5MDLW","sku":"2J-D6V7-C8XI","state":"enabled"},"emitted_at":1659020225248} -{"stream":"sponsored_product_ads","data":{"adId":125773733335504,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT390","state":"enabled"},"emitted_at":1659020225461} -{"stream":"sponsored_product_ads","data":{"adId":22923447445879,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBJK","state":"enabled"},"emitted_at":1659020225461} -{"stream":"sponsored_product_ads","data":{"adId":174434781640143,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B006K1JR0W","state":"enabled"},"emitted_at":1659020225462} -{"stream":"sponsored_product_ads","data":{"adId":209576432984926,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNV58W","state":"enabled"},"emitted_at":1659020225462} -{"stream":"sponsored_product_ads","data":{"adId":78757678617297,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E0BD0","state":"enabled"},"emitted_at":1659020225462} -{"stream":"sponsored_product_ads","data":{"adId":193756923178712,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBNG","state":"enabled"},"emitted_at":1659020225462} -{"stream":"sponsored_product_ads","data":{"adId":31271769792588,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT38G","state":"enabled"},"emitted_at":1659020225463} -{"stream":"sponsored_product_ads","data":{"adId":150153237605370,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNV596","state":"enabled"},"emitted_at":1659020225463} -{"stream":"sponsored_product_ads","data":{"adId":2074333536480,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E0R66","state":"enabled"},"emitted_at":1659020225463} -{"stream":"sponsored_product_ads","data":{"adId":123533571549424,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNRD2O","state":"enabled"},"emitted_at":1659020225463} -{"stream":"sponsored_product_ads","data":{"adId":217260138761504,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091FZ92NV","state":"enabled"},"emitted_at":1659020225464} -{"stream":"sponsored_product_ads","data":{"adId":145457886517316,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNRD1U","state":"enabled"},"emitted_at":1659020225464} -{"stream":"sponsored_product_ads","data":{"adId":203822232798249,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E9VEK","state":"enabled"},"emitted_at":1659020225464} -{"stream":"sponsored_product_ads","data":{"adId":117735697461953,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBNQ","state":"enabled"},"emitted_at":1659020225464} -{"stream":"sponsored_product_ads","data":{"adId":142089319699283,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091G29WN9","state":"enabled"},"emitted_at":1659020225465} -{"stream":"sponsored_product_ads","data":{"adId":95431347262692,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E1JCM","state":"enabled"},"emitted_at":1659020225465} -{"stream":"sponsored_product_ads","data":{"adId":155014902487440,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBJU","state":"enabled"},"emitted_at":1659020225465} -{"stream":"sponsored_product_ads","data":{"adId":11743222321360,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT3AE","state":"enabled"},"emitted_at":1659020225465} -{"stream":"sponsored_product_ads","data":{"adId":103439653344998,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00RW78E52","state":"enabled"},"emitted_at":1659020225466} -{"stream":"sponsored_product_ads","data":{"adId":265969657657801,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT39K","state":"enabled"},"emitted_at":1659020225466} -{"stream":"sponsored_product_ads","data":{"adId":109412610635634,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT39U","state":"enabled"},"emitted_at":1659020225466} -{"stream":"sponsored_product_ads","data":{"adId":136393331771998,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNV59Q","state":"enabled"},"emitted_at":1659020225466} -{"stream":"sponsored_product_ads","data":{"adId":186420999434919,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNRD2E","state":"enabled"},"emitted_at":1659020225467} -{"stream":"sponsored_product_ads","data":{"adId":278853238562368,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091G35ZDQ","state":"enabled"},"emitted_at":1659020225467} -{"stream":"sponsored_product_ads","data":{"adId":166899201791771,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E1SLE","state":"enabled"},"emitted_at":1659020225467} -{"stream":"sponsored_product_ads","data":{"adId":109280751164007,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091G3QCHL","state":"enabled"},"emitted_at":1659020225467} -{"stream":"sponsored_product_ads","data":{"adId":151372475824008,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT39A","state":"enabled"},"emitted_at":1659020225467} -{"stream":"sponsored_product_ads","data":{"adId":111491538035732,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00CKZKG20","state":"enabled"},"emitted_at":1659020225468} -{"stream":"sponsored_product_ads","data":{"adId":61045475129398,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B00U2E3HUO","state":"enabled"},"emitted_at":1659020225468} -{"stream":"sponsored_product_ads","data":{"adId":125617015283672,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBO0","state":"enabled"},"emitted_at":1659020225468} -{"stream":"sponsored_product_ads","data":{"adId":183608040922804,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBMW","state":"enabled"},"emitted_at":1659020225468} -{"stream":"sponsored_product_ads","data":{"adId":252975632234287,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNV58M","state":"enabled"},"emitted_at":1659020225469} -{"stream":"sponsored_product_ads","data":{"adId":223374763750850,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNRD2Y","state":"enabled"},"emitted_at":1659020225469} -{"stream":"sponsored_product_ads","data":{"adId":155052344322362,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT37M","state":"enabled"},"emitted_at":1659020225469} -{"stream":"sponsored_product_ads","data":{"adId":210510170479158,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT3AY","state":"enabled"},"emitted_at":1659020225470} -{"stream":"sponsored_product_ads","data":{"adId":179517989169690,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT37W","state":"enabled"},"emitted_at":1659020225470} -{"stream":"sponsored_product_ads","data":{"adId":163992879107492,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNV5AA","state":"enabled"},"emitted_at":1659020225470} -{"stream":"sponsored_product_ads","data":{"adId":103527738992867,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNT386","state":"enabled"},"emitted_at":1659020225470} -{"stream":"sponsored_product_ads","data":{"adId":195948665185008,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B000BNQBOA","state":"enabled"},"emitted_at":1659020225470} -{"stream":"sponsored_product_ads","data":{"adId":130802512011075,"adGroupId":239470166910761,"campaignId":25934734632378,"asin":"B091G1HT4P","state":"enabled"},"emitted_at":1659020225471} -{"stream":"sponsored_product_targetings","data":{"targetId":50319181484813,"adGroupId":183961953969922,"campaignId":135264288913079,"expressionType":"auto","state":"enabled","expression":[{"type":"queryHighRelMatches"}],"resolvedExpression":[{"type":"queryHighRelMatches"}]},"emitted_at":1659020226434} -{"stream":"sponsored_product_targetings","data":{"targetId":27674318672023,"adGroupId":183961953969922,"campaignId":135264288913079,"expressionType":"auto","state":"enabled","expression":[{"type":"queryBroadRelMatches"}],"resolvedExpression":[{"type":"queryBroadRelMatches"}]},"emitted_at":1659020226435} -{"stream":"sponsored_product_targetings","data":{"targetId":231060819625654,"adGroupId":183961953969922,"campaignId":135264288913079,"expressionType":"auto","state":"enabled","expression":[{"type":"asinAccessoryRelated"}],"resolvedExpression":[{"type":"asinAccessoryRelated"}]},"emitted_at":1659020226435} -{"stream":"sponsored_product_targetings","data":{"targetId":223980840024498,"adGroupId":183961953969922,"campaignId":135264288913079,"expressionType":"auto","state":"enabled","expression":[{"type":"asinSubstituteRelated"}],"resolvedExpression":[{"type":"asinSubstituteRelated"}]},"emitted_at":1659020226436} -{"stream":"sponsored_product_targetings","data":{"targetId":62579800516352,"adGroupId":108551155050351,"campaignId":191249325250025,"expressionType":"auto","state":"enabled","expression":[{"type":"queryHighRelMatches"}],"resolvedExpression":[{"type":"queryHighRelMatches"}]},"emitted_at":1659020226436} -{"stream":"sponsored_product_targetings","data":{"targetId":232221427954900,"adGroupId":108551155050351,"campaignId":191249325250025,"expressionType":"auto","state":"enabled","expression":[{"type":"queryBroadRelMatches"}],"resolvedExpression":[{"type":"queryBroadRelMatches"}]},"emitted_at":1659020226436} -{"stream":"sponsored_product_targetings","data":{"targetId":12739477778779,"adGroupId":108551155050351,"campaignId":191249325250025,"expressionType":"auto","state":"enabled","expression":[{"type":"asinAccessoryRelated"}],"resolvedExpression":[{"type":"asinAccessoryRelated"}]},"emitted_at":1659020226436} -{"stream":"sponsored_product_targetings","data":{"targetId":1189452552122,"adGroupId":108551155050351,"campaignId":191249325250025,"expressionType":"auto","state":"enabled","expression":[{"type":"asinSubstituteRelated"}],"resolvedExpression":[{"type":"asinSubstituteRelated"}]},"emitted_at":1659020226437} -{"stream":"sponsored_product_campaign_negative_keywords","data":{"keywordId":367623300145491,"campaignId":39413387973397,"keywordText":"campaign negative keyword","matchType":"negativeExact","state":"enabled"},"emitted_at":1687197089168} -{"stream":"sponsored_product_campaign_negative_keywords","data":{"keywordId":313227767817048,"campaignId":39413387973397,"keywordText":"campaign negative keyword2","matchType":"negativeExact","state":"enabled"},"emitted_at":1687197089170} -{"stream":"sponsored_product_campaign_negative_keywords","data":{"keywordId":362264772936192,"campaignId":191249325250025,"keywordText":"negative","matchType":"negativeExact","state":"enabled"},"emitted_at":1687197089322} -{"stream":"sponsored_product_campaign_negative_keywords","data":{"keywordId":318852596875190,"campaignId":191249325250025,"keywordText":"negative phrase","matchType":"negativeExact","state":"enabled"},"emitted_at":1687197089323} -{"stream":"sponsored_product_campaign_negative_keywords","data":{"keywordId":435410875929367,"campaignId":191249325250025,"keywordText":"another negative phrase","matchType":"negativeExact","state":"enabled"},"emitted_at":1687197089324} {"stream":"sponsored_display_budget_rules","data":{"createdDate":1657024512836,"lastUpdatedDate":1657024512836,"ruleDetails":{"budgetIncreaseBy":{"type":"PERCENT","value":32},"duration":{"dateRangeTypeRuleDuration":null,"eventTypeRuleDuration":{"endDate":"20220713","eventId":"ae0226d3-9f97-5122-a749-2e9ba741a2dc","eventName":"Prime Day","startDate":"20220712"}},"name":"ex","performanceMeasureCondition":null,"recurrence":{"daysOfWeek":null,"intraDaySchedule":null,"type":"DAILY"},"ruleType":"SCHEDULE"},"ruleId":"b5abeec6-7624-49e7-8571-97b8ba61551e","ruleState":"ACTIVE","ruleStatus":"EXPIRED","ruleStatusDetails":null},"emitted_at":1687254964816} {"stream":"sponsored_display_budget_rules","data":{"createdDate":1686765545918,"lastUpdatedDate":1686765545918,"ruleDetails":{"budgetIncreaseBy":{"type":"PERCENT","value":1},"duration":{"dateRangeTypeRuleDuration":null,"eventTypeRuleDuration":{"endDate":"20230619","eventId":"553ddee0-8178-544b-a54a-f8918d21ad5f","eventName":"Father's Day","startDate":"20230611"}},"name":"Rule for vadim","performanceMeasureCondition":null,"recurrence":{"daysOfWeek":null,"intraDaySchedule":null,"type":"DAILY"},"ruleType":"SCHEDULE"},"ruleId":"039ff522-f785-4409-8f3a-f6f884ec1750","ruleState":"ACTIVE","ruleStatus":"EXPIRED","ruleStatusDetails":null},"emitted_at":1687254965077} {"stream": "portfolios", "data": {"portfolioId": 253945852845204, "name": "Test Portfolio 2", "inBudget": true, "state": "enabled", "creationDate": 1687510907465, "lastUpdatedDate": 1687510907465, "servingStatus": "PORTFOLIO_STATUS_ENABLED"}, "emitted_at": 1688475309870} {"stream": "portfolios", "data": {"portfolioId": 270076898441727, "name": "Test Portfolio", "budget": {"amount": 1.0, "currencyCode": "USD", "policy": "dateRange", "startDate": "20230623", "endDate": "20230624"}, "inBudget": true, "state": "enabled", "creationDate": 1687510616329, "lastUpdatedDate": 1687514774484, "servingStatus": "PORTFOLIO_ENDED"}, "emitted_at": 1688475309871} -{"stream":"sponsored_product_ad_group_suggested_keywords","data":{"adGroupId":103188883625219,"suggestedKeywords":[{"keywordText":"disposable hotel slippers","matchType":"broad"},{"keywordText":"hotel slippers women","matchType":"broad"},{"keywordText":"slippers bulk","matchType":"broad"},{"keywordText":"spa slipper","matchType":"broad"},{"keywordText":"disposable guest slippers","matchType":"broad"},{"keywordText":"hotel slipper","matchType":"broad"},{"keywordText":"black bulk slippers","matchType":"broad"},{"keywordText":"disposable black slippers","matchType":"broad"},{"keywordText":"toothbrush oral b medium","matchType":"broad"},{"keywordText":"toothbrush soft oral b","matchType":"broad"},{"keywordText":"diamond cat food wet","matchType":"broad"},{"keywordText":"toothbrush 1 count","matchType":"broad"},{"keywordText":"black slipper pack","matchType":"broad"},{"keywordText":"toothbrush medium","matchType":"broad"},{"keywordText":"peach mango propel water","matchType":"broad"},{"keywordText":"black guest slippers","matchType":"broad"},{"keywordText":"black hotel slippers","matchType":"broad"},{"keywordText":"black house slippers bulk","matchType":"broad"},{"keywordText":"black spa slippers","matchType":"broad"},{"keywordText":"diamond natural wet cat food","matchType":"broad"},{"keywordText":"house slippers 6 pack","matchType":"broad"},{"keywordText":"house slippers guests bulk","matchType":"broad"},{"keywordText":"single toothbrush","matchType":"broad"},{"keywordText":"spa slippers women black","matchType":"broad"},{"keywordText":"toothbrush oral b manual","matchType":"broad"},{"keywordText":"medium toothbrush single","matchType":"broad"},{"keywordText":"toothbrush charcoal","matchType":"broad"},{"keywordText":"toothbrush hard","matchType":"broad"},{"keywordText":"tooth brush medium","matchType":"broad"},{"keywordText":"tooth brush soft","matchType":"broad"},{"keywordText":"house slippers disposable","matchType":"broad"},{"keywordText":"toothbrush oral b","matchType":"broad"},{"keywordText":"toothbrush soft extra","matchType":"broad"},{"keywordText":"black disposable slippers guests","matchType":"broad"},{"keywordText":"bulk slippers guests washable","matchType":"broad"},{"keywordText":"crest toothbrush","matchType":"broad"},{"keywordText":"toothbrush travel","matchType":"broad"},{"keywordText":"black spa slippers bulk","matchType":"broad"},{"keywordText":"house slippers bulk","matchType":"broad"},{"keywordText":"house slippers visitor","matchType":"broad"},{"keywordText":"disposable slipper","matchType":"broad"},{"keywordText":"spa slippers disposable black","matchType":"broad"},{"keywordText":"toothbrush small","matchType":"broad"},{"keywordText":"cepillo de dientes","matchType":"broad"},{"keywordText":"guest slippers bulk","matchType":"broad"},{"keywordText":"soft toothbrush","matchType":"broad"},{"keywordText":"spa house slippers","matchType":"broad"},{"keywordText":"toothbrush firm","matchType":"broad"},{"keywordText":"toothbrush sensitive","matchType":"broad"},{"keywordText":"bulk pack slippers","matchType":"broad"},{"keywordText":"house guest slippers","matchType":"broad"},{"keywordText":"propel peach water","matchType":"broad"},{"keywordText":"teeth brush","matchType":"broad"},{"keywordText":"tooth brush oral b","matchType":"broad"},{"keywordText":"toothbrush amazon fresh","matchType":"broad"},{"keywordText":"toothbrush oralb","matchType":"broad"},{"keywordText":"toothbrush whitening","matchType":"broad"},{"keywordText":"toothbrusj","matchType":"broad"},{"keywordText":"water propel","matchType":"broad"},{"keywordText":"extra soft tooth brush","matchType":"broad"},{"keywordText":"house slippers guests washable","matchType":"broad"},{"keywordText":"propel peach mango","matchType":"broad"},{"keywordText":"tootbrush","matchType":"broad"},{"keywordText":"toothbrush","matchType":"broad"},{"keywordText":"toothbrush bamboo","matchType":"broad"},{"keywordText":"diamond naturals canned cat food","matchType":"broad"},{"keywordText":"organic potato","matchType":"broad"},{"keywordText":"spa slippers bulk","matchType":"broad"},{"keywordText":"toothbrush oral b white","matchType":"broad"},{"keywordText":"toothbrush soft bristle","matchType":"broad"},{"keywordText":"slippers 12 pair","matchType":"broad"},{"keywordText":"black house guest slippers","matchType":"broad"},{"keywordText":"black house slippers pack","matchType":"broad"},{"keywordText":"black slippers set","matchType":"broad"},{"keywordText":"black washable slippers","matchType":"broad"},{"keywordText":"black washable spa slippers","matchType":"broad"},{"keywordText":"bulk house shoes guests","matchType":"broad"},{"keywordText":"diamond naturals cat food can","matchType":"broad"},{"keywordText":"diamond naturals kitten food wet","matchType":"broad"},{"keywordText":"dispisable slippers","matchType":"broad"},{"keywordText":"disposable house slippers black","matchType":"broad"},{"keywordText":"disposable spa slippers bulk","matchType":"broad"},{"keywordText":"disposable washable slippers","matchType":"broad"},{"keywordText":"disposal house slippers","matchType":"broad"},{"keywordText":"fisposable slippers","matchType":"broad"},{"keywordText":"guest slippers washable set","matchType":"broad"},{"keywordText":"hoise slippers","matchType":"broad"},{"keywordText":"home slipper set","matchType":"broad"},{"keywordText":"house alippers guests","matchType":"broad"},{"keywordText":"house shoes guests washable","matchType":"broad"},{"keywordText":"house slipeprs","matchType":"broad"},{"keywordText":"disposable house slippers guest","matchType":"broad"},{"keywordText":"disposable slippers women","matchType":"broad"},{"keywordText":"disposable spa slippers","matchType":"broad"},{"keywordText":"hotel slippers bulk","matchType":"broad"},{"keywordText":"disposable slippers travel","matchType":"broad"},{"keywordText":"one time use slippers","matchType":"broad"},{"keywordText":"pack slippers guest","matchType":"broad"},{"keywordText":"guest slipper","matchType":"broad"},{"keywordText":"guest slippers washable","matchType":"broad"}]},"emitted_at":1688632533382} -{"stream":"sponsored_product_ad_group_bid_recommendations","data":{"adGroupId":183961953969922,"suggestedBid":{"rangeEnd":1.71,"rangeStart":0.14,"suggested":0.62}},"emitted_at":1688632722904} +{"stream": "sponsored_product_campaigns", "data": {"budget": {"budget": 10.0, "budgetType": "DAILY"}, "campaignId": "135264288913079", "dynamicBidding": {"placementBidding": [], "strategy": "LEGACY_FOR_SALES"}, "name": "Campaign - 7/5/2022 18:14:02", "portfolioId": "270076898441727", "startDate": "2022-07-05", "state": "ENABLED", "tags": {}, "targetingType": "AUTO"}, "emitted_at": 1710888592084} +{"stream": "sponsored_product_campaigns", "data": {"budget": {"budget": 50.0, "budgetType": "DAILY"}, "campaignId": "191249325250025", "dynamicBidding": {"placementBidding": [{"percentage": 100, "placement": "PLACEMENT_PRODUCT_PAGE"}, {"percentage": 100, "placement": "PLACEMENT_TOP"}], "strategy": "LEGACY_FOR_SALES"}, "name": "Campaign - 7/8/2022 13:57:48", "portfolioId": "253945852845204", "startDate": "2022-07-08", "state": "ENABLED", "tags": {}, "targetingType": "AUTO"}, "emitted_at": 1710888592085} +{"stream": "sponsored_product_campaigns", "data": {"budget": {"budget": 10.0, "budgetType": "DAILY"}, "campaignId": "39413387973397", "dynamicBidding": {"placementBidding": [{"percentage": 50, "placement": "PLACEMENT_TOP"}], "strategy": "LEGACY_FOR_SALES"}, "endDate": "2022-07-12", "name": "Test campaging for profileId 1861552880916640#l1iwuw7s954", "startDate": "2022-07-05", "state": "PAUSED", "tags": {"PONumber": "examplePONumber", "accountManager": "exampleAccountManager"}, "targetingType": "MANUAL"}, "emitted_at": 1710888591878} +{"stream": "sponsored_product_ad_groups", "data": {"adGroupId": "183961953969922", "campaignId": "135264288913079", "defaultBid": 0.75, "name": "Ad group - 7/5/2022 18:14:02", "state": "ENABLED"}, "emitted_at": 1710888605592} +{"stream": "sponsored_product_ad_groups", "data": {"adGroupId": "226404883721634", "campaignId": "39413387973397", "defaultBid": 10.0, "name": "My AdGroup for Campaign 39413387973397", "state": "ENABLED"}, "emitted_at": 1710888605408} +{"stream": "sponsored_product_ad_groups", "data": {"adGroupId": "108551155050351", "campaignId": "191249325250025", "defaultBid": 1.0, "name": "Ad group - 7/8/2022 13:57:48", "state": "ENABLED"}, "emitted_at": 1710888605592} +{"stream": "sponsored_product_ad_groups", "data": {"adGroupId": "475489269904624", "campaignId": "556045554720184", "defaultBid": 0.75, "name": "Ad group - 7/5/2023 20:50:20.159", "state": "ENABLED"}, "emitted_at": 1710891690912} +{"stream": "sponsored_product_ad_groups", "data": {"adGroupId": "103188883625219", "campaignId": "146003174711486", "defaultBid": 10.0, "name": "My AdGroup for Campaign 146003174711486", "state": "ENABLED"}, "emitted_at": 1710891691112} +{"stream": "sponsored_product_keywords", "data": {"adGroupId": "226404883721634", "bid": 1.12, "campaignId": "39413387973397", "keywordId": "88368653576677", "keywordText": "keyword1", "matchType": "EXACT", "state": "ENABLED"}, "emitted_at": 1710888676159} +{"stream": "sponsored_product_keywords", "data": {"adGroupId": "226404883721634", "bid": 2.85, "campaignId": "39413387973397", "keywordId": "162522197737998", "keywordText": "test book", "matchType": "PHRASE", "state": "ENABLED"}, "emitted_at": 1710888676160} +{"stream": "sponsored_product_keywords", "data": {"adGroupId": "226404883721634", "bid": 1.12, "campaignId": "39413387973397", "keywordId": "256414981667762", "keywordText": "test book", "matchType": "BROAD", "state": "ENABLED"}, "emitted_at": 1710888676160} +{"stream": "sponsored_product_negative_keywords", "data": {"adGroupId": "226404883721634", "campaignId": "39413387973397", "keywordId": "32531566025493", "keywordText": "negkeyword1", "matchType": "NEGATIVE_EXACT", "state": "ENABLED"}, "emitted_at": 1710888687416} +{"stream": "sponsored_product_ads", "data": {"adGroupId": "226404883721634", "adId": "134721479349712", "asin": "B09X3NTQ5S", "campaignId": "39413387973397", "state": "ENABLED"}, "emitted_at": 1710888734046} +{"stream": "sponsored_product_ads", "data": {"adGroupId": "226404883721634", "adId": "253366527049144", "asin": "B09X3P7D6Z", "campaignId": "39413387973397", "state": "ENABLED"}, "emitted_at": 1710888734047} +{"stream": "sponsored_product_ads", "data": {"adGroupId": "226404883721634", "adId": "265970953521535", "asin": "B09X3QCS24", "campaignId": "39413387973397", "state": "ENABLED"}, "emitted_at": 1710888734047} +{"stream": "sponsored_product_targetings", "data": {"adGroupId": "183961953969922", "campaignId": "135264288913079", "expression": [{"type": "ASIN_ACCESSORY_RELATED"}], "expressionType": "AUTO", "resolvedExpression": [{"type": "ASIN_ACCESSORY_RELATED"}], "state": "ENABLED", "targetId": "231060819625654"}, "emitted_at": 1710888741880} +{"stream": "sponsored_product_targetings", "data": {"adGroupId": "183961953969922", "campaignId": "135264288913079", "expression": [{"type": "QUERY_BROAD_REL_MATCHES"}], "expressionType": "AUTO", "resolvedExpression": [{"type": "QUERY_BROAD_REL_MATCHES"}], "state": "ENABLED", "targetId": "27674318672023"}, "emitted_at": 1710888741880} +{"stream": "sponsored_product_targetings", "data": {"adGroupId": "183961953969922", "campaignId": "135264288913079", "expression": [{"type": "QUERY_HIGH_REL_MATCHES"}], "expressionType": "AUTO", "resolvedExpression": [{"type": "QUERY_HIGH_REL_MATCHES"}], "state": "ENABLED", "targetId": "50319181484813"}, "emitted_at": 1710888741879} +{"stream": "sponsored_product_campaign_negative_keywords", "data": {"campaignId": "191249325250025", "keywordId": "362264772936192", "keywordText": "negative", "matchType": "NEGATIVE_EXACT", "state": "ENABLED"}, "emitted_at": 1710888790876} +{"stream": "sponsored_product_campaign_negative_keywords", "data": {"campaignId": "39413387973397", "keywordId": "313227767817048", "keywordText": "campaign negative keyword2", "matchType": "NEGATIVE_EXACT", "state": "ENABLED"}, "emitted_at": 1710888790672} +{"stream": "sponsored_product_campaign_negative_keywords", "data": {"campaignId": "39413387973397", "keywordId": "367623300145491", "keywordText": "campaign negative keyword", "matchType": "NEGATIVE_EXACT", "state": "ENABLED"}, "emitted_at": 1710888790671} +{"stream": "sponsored_product_ad_group_suggested_keywords", "data": {"adGroupId": 475489269904624, "suggestedKeywords": []}, "emitted_at": 1710889475987} +{"stream": "sponsored_product_ad_group_suggested_keywords", "data": {"adGroupId": 103188883625219, "suggestedKeywords": [{"keywordText": "guest slipper", "matchType": "broad"}, {"keywordText": "bulk hotel slippers", "matchType": "broad"}, {"keywordText": "hotel slippers women", "matchType": "broad"}, {"keywordText": "slippers bulk", "matchType": "broad"}, {"keywordText": "spa slipper", "matchType": "broad"}, {"keywordText": "disposable slipper", "matchType": "broad"}, {"keywordText": "hotel slippers", "matchType": "broad"}, {"keywordText": "disposable guest slippers", "matchType": "broad"}, {"keywordText": "house guest slippers", "matchType": "broad"}, {"keywordText": "oral b stain eraser toothbrush", "matchType": "broad"}, {"keywordText": "black bulk slippers", "matchType": "broad"}, {"keywordText": "disposable black slippers", "matchType": "broad"}, {"keywordText": "toothbrush oral b medium", "matchType": "broad"}, {"keywordText": "toothbrush soft oral b", "matchType": "broad"}, {"keywordText": "diamond cat food wet", "matchType": "broad"}, {"keywordText": "toothbrush 1 count", "matchType": "broad"}, {"keywordText": "black slipper pack", "matchType": "broad"}, {"keywordText": "toothbrush medium", "matchType": "broad"}, {"keywordText": "peach mango propel water", "matchType": "broad"}, {"keywordText": "black guest slippers", "matchType": "broad"}, {"keywordText": "black hotel slippers", "matchType": "broad"}, {"keywordText": "black house slippers bulk", "matchType": "broad"}, {"keywordText": "black spa slippers", "matchType": "broad"}, {"keywordText": "diamond natural wet cat food", "matchType": "broad"}, {"keywordText": "house slippers 6 pack", "matchType": "broad"}, {"keywordText": "house slippers guests bulk", "matchType": "broad"}, {"keywordText": "single toothbrush", "matchType": "broad"}, {"keywordText": "spa slippers women black", "matchType": "broad"}, {"keywordText": "toothbrush oral b manual", "matchType": "broad"}, {"keywordText": "medium toothbrush single", "matchType": "broad"}, {"keywordText": "toothbrush charcoal", "matchType": "broad"}, {"keywordText": "toothbrush hard", "matchType": "broad"}, {"keywordText": "tooth brush medium", "matchType": "broad"}, {"keywordText": "tooth brush soft", "matchType": "broad"}, {"keywordText": "house slippers disposable", "matchType": "broad"}, {"keywordText": "toothbrush oral b", "matchType": "broad"}, {"keywordText": "toothbrush soft extra", "matchType": "broad"}, {"keywordText": "black disposable slippers guests", "matchType": "broad"}, {"keywordText": "bulk slippers guests washable", "matchType": "broad"}, {"keywordText": "crest toothbrush", "matchType": "broad"}, {"keywordText": "toothbrush travel", "matchType": "broad"}, {"keywordText": "black spa slippers bulk", "matchType": "broad"}, {"keywordText": "house slippers bulk", "matchType": "broad"}, {"keywordText": "house slippers visitor", "matchType": "broad"}, {"keywordText": "spa slippers disposable black", "matchType": "broad"}, {"keywordText": "toothbrush small", "matchType": "broad"}, {"keywordText": "cepillo de dientes", "matchType": "broad"}, {"keywordText": "guest slippers bulk", "matchType": "broad"}, {"keywordText": "soft toothbrush", "matchType": "broad"}, {"keywordText": "spa house slippers", "matchType": "broad"}, {"keywordText": "toothbrush firm", "matchType": "broad"}, {"keywordText": "toothbrush sensitive", "matchType": "broad"}, {"keywordText": "bulk pack slippers", "matchType": "broad"}, {"keywordText": "propel peach water", "matchType": "broad"}, {"keywordText": "teeth brush", "matchType": "broad"}, {"keywordText": "tooth brush oral b", "matchType": "broad"}, {"keywordText": "toothbrush amazon fresh", "matchType": "broad"}, {"keywordText": "toothbrush oralb", "matchType": "broad"}, {"keywordText": "toothbrush whitening", "matchType": "broad"}, {"keywordText": "toothbrusj", "matchType": "broad"}, {"keywordText": "water propel", "matchType": "broad"}, {"keywordText": "extra soft tooth brush", "matchType": "broad"}, {"keywordText": "house slippers guests washable", "matchType": "broad"}, {"keywordText": "propel peach mango", "matchType": "broad"}, {"keywordText": "tootbrush", "matchType": "broad"}, {"keywordText": "toothbrush", "matchType": "broad"}, {"keywordText": "toothbrush bamboo", "matchType": "broad"}, {"keywordText": "diamond naturals canned cat food", "matchType": "broad"}, {"keywordText": "organic potato", "matchType": "broad"}, {"keywordText": "spa slippers bulk", "matchType": "broad"}, {"keywordText": "toothbrush oral b white", "matchType": "broad"}, {"keywordText": "toothbrush soft bristle", "matchType": "broad"}, {"keywordText": "12 pairs slippers", "matchType": "broad"}, {"keywordText": "black house guest slippers", "matchType": "broad"}, {"keywordText": "black house slippers pack", "matchType": "broad"}, {"keywordText": "black slippers set", "matchType": "broad"}, {"keywordText": "black washable slippers", "matchType": "broad"}, {"keywordText": "black washable spa slippers", "matchType": "broad"}, {"keywordText": "bulk house shoes guests", "matchType": "broad"}, {"keywordText": "diamond naturals cat food can", "matchType": "broad"}, {"keywordText": "diamond naturals kitten food wet", "matchType": "broad"}, {"keywordText": "dispisable slippers", "matchType": "broad"}, {"keywordText": "disposable house slippers black", "matchType": "broad"}, {"keywordText": "disposable spa slippers bulk", "matchType": "broad"}, {"keywordText": "disposable washable slippers", "matchType": "broad"}, {"keywordText": "disposal house slippers", "matchType": "broad"}, {"keywordText": "fisposable slippers", "matchType": "broad"}, {"keywordText": "guest slippers washable set", "matchType": "broad"}, {"keywordText": "hoise slippers", "matchType": "broad"}, {"keywordText": "home slipper set", "matchType": "broad"}, {"keywordText": "house alippers guests", "matchType": "broad"}, {"keywordText": "guest house shoes washable", "matchType": "broad"}, {"keywordText": "house slipeprs", "matchType": "broad"}, {"keywordText": "disposable hotel slippers", "matchType": "broad"}, {"keywordText": "disposable house slippers guest", "matchType": "broad"}, {"keywordText": "disposable slippers women", "matchType": "broad"}, {"keywordText": "disposable spa slippers", "matchType": "broad"}, {"keywordText": "disposable slippers travel", "matchType": "broad"}, {"keywordText": "one time use slippers", "matchType": "broad"}, {"keywordText": "pack slippers guest", "matchType": "broad"}]}, "emitted_at": 1710889477445} +{"stream": "sponsored_product_ad_group_suggested_keywords", "data": {"adGroupId": 226404883721634, "suggestedKeywords": []}, "emitted_at": 1710889474524} diff --git a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml index 1325ac9d18daf..8ea5d10583011 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml @@ -13,12 +13,13 @@ data: connectorSubtype: api connectorType: source definitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 - dockerImageTag: 4.0.4 + dockerImageTag: 5.0.0 dockerRepository: airbyte/source-amazon-ads documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-ads githubIssueLabel: source-amazon-ads icon: amazonads.svg license: MIT + maxSecondsBetweenMessages: 5400 name: Amazon Ads remoteRegistries: pypi: @@ -32,6 +33,18 @@ data: releaseStage: generally_available releases: breakingChanges: + 5.0.0: + message: "`SponsoredBrandCampaigns`, `SponsoredBrandsAdGroups`, `SponsoredProductCampaigns`, and `SponsoredProductAdGroupBidRecommendations` streams have updated schemas and must be reset." + upgradeDeadline: "2024-03-27" + scopedImpact: + - scopeType: stream + impactedScopes: + [ + "sponsored_brands_campaigns", + "sponsored_brands_ad_groups", + "sponsored_product_campaigns", + "sponsored_product_ad_group_bid_recommendations", + ] 4.0.0: message: "Streams `SponsoredBrandsAdGroups` and `SponsoredBrandsKeywords` now have updated schemas." upgradeDeadline: "2024-01-17" @@ -52,5 +65,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-amazon-ads/poetry.lock b/airbyte-integrations/connectors/source-amazon-ads/poetry.lock index 2b1ea15309139..525a551c189fe 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-amazon-ads/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.62.0" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.62.0.tar.gz", hash = "sha256:622f56bd7101493a74f11c33a45a31c251032333989996f137cac8370873c614"}, - {file = "airbyte_cdk-0.62.0-py3-none-any.whl", hash = "sha256:b21330a566b33dbdddde33243eb9855f086ad4272e3585ca626be1225451a3b8"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -497,13 +497,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -732,13 +732,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -780,7 +780,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -838,13 +837,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -856,15 +855,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -925,19 +924,19 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -963,24 +962,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240311" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -999,13 +998,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1110,4 +1109,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "f7a78e82f7679c556ca51fda798b61e4de3b8b78ac2125427c85dcf24542adf0" +content-hash = "61062ec976deec5a1ef8646cbfa9341eb383ee54271f8cff5fac1b65f1ab7d3b" diff --git a/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml b/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml index e281ad72d5ab7..38d0e64b3876c 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.4" +version = "5.0.0" name = "source-amazon-ads" description = "Source implementation for Amazon Ads." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_amazon_ads" [tool.poetry.dependencies] python = "^3.9,<3.12" requests-oauthlib = "==1.3.1" -airbyte-cdk = "==0.62.0" +airbyte-cdk = "^0" pendulum = "==2.1.2" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/__init__.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/__init__.py index e9c8aae725fc2..053c733ac2fff 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/__init__.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/__init__.py @@ -2,9 +2,18 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # from .attribution_report import AttributionReportModel -from .common import CatalogModel, Keywords, MetricsReport, NegativeKeywords, Portfolio +from .common import ( + CatalogModel, + Keywords, + MetricsReport, + NegativeKeywords, + Portfolio +) from .profile import Profile -from .sponsored_brands import BrandsAdGroup, BrandsCampaign +from .sponsored_brands import ( + BrandsAdGroup, + BrandsCampaign, +) from .sponsored_display import DisplayAdGroup, DisplayBudgetRules, DisplayCampaign, DisplayCreatives, DisplayProductAds, DisplayTargeting from .sponsored_products import ( ProductAd, @@ -13,6 +22,9 @@ ProductAdGroupSuggestedKeywords, ProductCampaign, ProductTargeting, + SponsoredProductCampaignNegativeKeywordsModel, + SponsoredProductKeywordsModel, + SponsoredProductNegativeKeywordsModel ) __all__ = [ @@ -28,6 +40,7 @@ "DisplayCreatives", "MetricsReport", "NegativeKeywords", + "CampaignNegativeKeywords", "Portfolio", "ProductAd", "ProductAdGroups", @@ -37,4 +50,7 @@ "ProductTargeting", "Profile", "AttributionReportModel", + "SponsoredProductCampaignNegativeKeywordsModel", + "SponsoredProductKeywordsModel", + "SponsoredProductNegativeKeywordsModel" ] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_brands.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_brands.py index e7e2fa7cd07c3..51d8f091e81c0 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_brands.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_brands.py @@ -3,7 +3,7 @@ # from decimal import Decimal -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from .common import CatalogModel @@ -27,7 +27,7 @@ class Creative(CatalogModel): class BrandsCampaign(CatalogModel): - campaignId: Decimal + campaignId: str name: str tags: Dict[str, str] budget: Decimal @@ -35,25 +35,19 @@ class BrandsCampaign(CatalogModel): startDate: str endDate: str state: str - servingStatus: str brandEntityId: str - portfolioId: int - bidOptimization: bool = None - bidMultiplier: Decimal = None - adFormat: str - bidAdjustments: Optional[List[BidAdjustment]] - creative: Optional[Creative] - landingPage: Optional[LandingPage] - supplySource: Optional[str] + portfolioId: str + ruleBasedBudget: Optional[Dict[str, Any]] + bidding: Optional[Dict[str, Any]] + productLocation: Optional[str] + costType: Optional[str] + smartDefault: Optional[List[str]] + extendedData: Optional[Dict[str, Any]] class BrandsAdGroup(CatalogModel): - campaignId: Decimal - adGroupId: Decimal + campaignId: str + adGroupId: str name: str - bid: Decimal - keywordId: Decimal - keywordText: str - nativeLanguageKeyword: str - matchType: str state: str + extendedData: Dict[str, Any] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_products.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_products.py index b5ca604e06b69..6ef9a7b5ff1d5 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_products.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/schemas/sponsored_products.py @@ -3,9 +3,9 @@ # from decimal import Decimal -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional -from .common import CatalogModel, Targeting +from .common import CatalogModel, KeywordsBase class Adjustments(CatalogModel): @@ -19,39 +19,38 @@ class Bidding(CatalogModel): class ProductCampaign(CatalogModel): - portfolioId: int - campaignId: Decimal + portfolioId: str + campaignId: str name: str tags: Dict[str, str] - campaignType: str targetingType: str state: str - dailyBudget: Decimal - ruleBasedBudget: Dict[str, str] + dynamicBidding: Dict[str, Any] startDate: str - endDate: str = None - premiumBidAdjustment: bool - bidding: Bidding - networks: str + endDate: str + budget: Dict[str, Any] + extendedData: Optional[Dict[str, Any]] class ProductAdGroups(CatalogModel): - adGroupId: Decimal + adGroupId: str name: str - campaignId: Decimal + campaignId: str defaultBid: Decimal state: str + extendedData: dict -class SuggestedBid(CatalogModel): - suggested: Decimal - rangeStart: Decimal - rangeEnd: Decimal +class BidRecommendations(CatalogModel): + bidValues: List[Dict[str, str]] + targetingExpression: Dict[str, str] class ProductAdGroupBidRecommendations(CatalogModel): - adGroupId: Decimal - suggestedBid: Optional[SuggestedBid] = None + adGroupId: str + campaignId: str + theme: str + bidRecommendationsForTargetingExpressions: List[BidRecommendations] class SuggestedKeyword(CatalogModel): @@ -60,20 +59,56 @@ class SuggestedKeyword(CatalogModel): class ProductAdGroupSuggestedKeywords(CatalogModel): - adGroupId: Decimal + adGroupId: int suggestedKeywords: List[SuggestedKeyword] = None class ProductAd(CatalogModel): - adId: Decimal - campaignId: Decimal - adGroupId: Decimal - sku: str + adId: str + campaignId: str + customText: str asin: str state: str + sku: str + adGroupId: str + extendedData: Optional[Dict[str, Any]] -class ProductTargeting(Targeting): - campaignId: Decimal +class ProductTargeting(CatalogModel): expression: List[Dict[str, str]] + targetId: str resolvedExpression: List[Dict[str, str]] + campaignId: str + expressionType: str + state: str + bid: float + adGroupId: str + extendedData: Optional[Dict[str, Any]] + + +class SponsoredProductCampaignNegativeKeywordsModel(KeywordsBase): + keywordId: str + campaignId: str + state: str + keywordText: str + extendedData: Optional[Dict[str, Any]] + + +class SponsoredProductKeywordsModel(KeywordsBase): + keywordId: str + nativeLanguageLocale: str + campaignId: str + state: str + adGroupId: str + keywordText: str + extendedData: Optional[Dict[str, Any]] + + +class SponsoredProductNegativeKeywordsModel(KeywordsBase): + keywordId: str + nativeLanguageLocale: str + campaignId: str + state: str + adGroupId: str + keywordText: str + extendedData: Optional[Dict[str, Any]] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/__init__.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/__init__.py index a449faafedb60..caa39bccfc91c 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/__init__.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/__init__.py @@ -16,7 +16,11 @@ SponsoredDisplayReportStream, SponsoredProductsReportStream, ) -from .sponsored_brands import SponsoredBrandsAdGroups, SponsoredBrandsCampaigns, SponsoredBrandsKeywords +from .sponsored_brands import ( + SponsoredBrandsAdGroups, + SponsoredBrandsCampaigns, + SponsoredBrandsKeywords +) from .sponsored_display import ( SponsoredDisplayAdGroups, SponsoredDisplayBudgetRules, diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/attribution_report.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/attribution_report.py index 38d2073e5e2a7..4c683846e6d26 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/attribution_report.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/attribution_report.py @@ -82,7 +82,7 @@ def metrics(self): def http_method(self) -> str: return "POST" - def path(self, **kvargs) -> str: + def path(self, **kwargs) -> str: return "/attribution/report" def get_json_schema(self): diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py index 247122c1e9c20..9ad9bb481f94f 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/common.py @@ -30,14 +30,16 @@ class to provide explanation why it had been done in this way. │ ├── SponsoredDisplayCampaigns │ ├── SponsoredDisplayProductAds │ ├── SponsoredDisplayTargetings - │ ├── SponsoredProductAdGroups - │ ├── SponsoredProductAds - │ ├── SponsoredProductCampaigns - │ ├── SponsoredProductKeywords - │ ├── SponsoredProductNegativeKeywords - │ ├── SponsoredProductTargetings - │ ├── SponsoredBrandsCampaigns - │ ├── SponsoredBrandsAdGroups + │ ├── SponsoredProductsV3 + │ | ├── SponsoredProductAdGroups + │ | ├── SponsoredProductAds + │ | ├── SponsoredProductCampaigns + │ | ├── SponsoredProductKeywords + │ | ├── SponsoredProductNegativeKeywords + │ | └── SponsoredProductTargetings + │ ├── SponsoredBrandsV4 + │ | ├── SponsoredBrandsCampaigns + │ | └── SponsoredBrandsAdGroups │ └── SponsoredBrandsKeywords └── ReportStream ├── SponsoredBrandsReportStream @@ -117,7 +119,7 @@ def raise_on_http_errors(self): def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: return None - def request_headers(self, *args, **kvargs) -> MutableMapping[str, Any]: + def request_headers(self, *args, **kwargs) -> MutableMapping[str, Any]: return {"Amazon-Advertising-API-ClientId": self._client_id} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: @@ -171,9 +173,9 @@ class SubProfilesStream(AmazonAdsStream): page_size = 100 - def __init__(self, *args, **kvargs): + def __init__(self, *args, **kwargs): self._current_offset = 0 - super().__init__(*args, **kvargs) + super().__init__(*args, **kwargs) def next_page_token(self, response: requests.Response) -> Optional[int]: if not response: @@ -199,15 +201,15 @@ def request_params( "count": self.page_size, } - def read_records(self, *args, **kvargs) -> Iterable[Mapping[str, Any]]: + def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: """ Iterate through self._profiles list and send read all records for each profile. """ for profile in self._profiles: self._current_profile_id = profile.profileId - yield from super().read_records(*args, **kvargs) + yield from super().read_records(*args, **kwargs) - def request_headers(self, *args, **kvargs) -> MutableMapping[str, Any]: - headers = super().request_headers(*args, **kvargs) + def request_headers(self, *args, **kwargs) -> MutableMapping[str, Any]: + headers = super().request_headers(*args, **kwargs) headers["Amazon-Advertising-API-Scope"] = str(self._current_profile_id) return headers diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/portfolios.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/portfolios.py index 6892d8ffa8963..1d253fda57e66 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/portfolios.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/portfolios.py @@ -17,18 +17,18 @@ class Portfolios(AmazonAdsStream): primary_key = "portfolioId" model = Portfolio - def path(self, **kvargs) -> str: + def path(self, **kwargs) -> str: return "v2/portfolios/extended" - def read_records(self, *args, **kvargs) -> Iterable[Mapping[str, Any]]: + def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: """ Iterate through self._profiles list and send read all records for each profile. """ for profile in self._profiles: self._current_profile_id = profile.profileId - yield from super().read_records(*args, **kvargs) + yield from super().read_records(*args, **kwargs) - def request_headers(self, *args, **kvargs) -> MutableMapping[str, Any]: - headers = super().request_headers(*args, **kvargs) + def request_headers(self, *args, **kwargs) -> MutableMapping[str, Any]: + headers = super().request_headers(*args, **kwargs) headers["Amazon-Advertising-API-Scope"] = str(self._current_profile_id) return headers diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/profiles.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/profiles.py index 9663656f3689e..c6491c052acfa 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/profiles.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/profiles.py @@ -19,7 +19,7 @@ class Profiles(AmazonAdsStream): primary_key = "profileId" model = Profile - def path(self, **kvargs) -> str: + def path(self, **kwargs) -> str: return "v2/profiles?profileTypeFilter=seller,vendor" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: @@ -30,13 +30,13 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp self._profiles.append(profile_id_obj) yield record - def read_records(self, *args, **kvargs) -> Iterable[Mapping[str, Any]]: + def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: if self._profiles: # In case if we have _profiles populated we can use it instead of making API call. yield from [profile.dict(exclude_unset=True) for profile in self._profiles] else: # Make API call by the means of basic HttpStream class. - yield from super().read_records(*args, **kvargs) + yield from super().read_records(*args, **kwargs) def get_all_profiles(self) -> List[Profile]: """ diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_brands.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_brands.py index 025fb44d3d330..030ae17f94e03 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_brands.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_brands.py @@ -1,15 +1,56 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from typing import Any, Mapping, MutableMapping +from requests import Response from source_amazon_ads.schemas import BrandsAdGroup, BrandsCampaign from source_amazon_ads.streams.common import SubProfilesStream -class SponsoredBrandsCampaigns(SubProfilesStream): +class SponsoredBrandsV4(SubProfilesStream): """ - This stream corresponds to Amazon Advertising API - Sponsored Brands Campaigns - https://advertising.amazon.com/API/docs/en-us/sponsored-brands/3-0/openapi#/Campaigns + This Stream supports the Sponsored Brands V4 API, which requires POST methods + https://advertising.amazon.com/API/docs/en-us/sponsored-brands/3-0/openapi/prod + """ + + @property + def http_method(self, **kwargs) -> str: + return "POST" + + def request_headers(self, profile_id: str = None, *args, **kwargs) -> MutableMapping[str, Any]: + headers = super().request_headers(*args, **kwargs) + headers["Accept"] = self.content_type + headers["Content-Type"] = self.content_type + return headers + + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + request_body = {} + request_body["maxResults"] = self.page_size + if next_page_token: + request_body["nextToken"] = next_page_token + return request_body + + def next_page_token(self, response: Response) -> str: + if not response: + return None + return response.json().get("nextToken", None) + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: int = None, + ) -> MutableMapping[str, Any]: + return {} + + +class SponsoredBrandsCampaigns(SponsoredBrandsV4): + """ + This stream corresponds to Amazon Ads API - Sponsored Brands Campaigns v4 + https://advertising.amazon.com/API/docs/en-us/sponsored-brands/3-0/openapi/prod#tag/Campaigns/operation/ListSponsoredBrandsCampaigns """ def __init__(self, *args, **kwargs): @@ -17,30 +58,36 @@ def __init__(self, *args, **kwargs): self.state_filter = kwargs.get("config", {}).get("state_filter") primary_key = "campaignId" + data_field = "campaigns" state_filter = None + content_type = "application/vnd.sbcampaignresource.v4+json" model = BrandsCampaign - def path(self, **kvargs) -> str: - return "sb/campaigns" + def path(self, **kwargs) -> str: + return "sb/v4/campaigns/list" - def request_params(self, *args, **kwargs): - params = super().request_params(*args, **kwargs) + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + request_body = super().request_body_json(stream_state, stream_slice, next_page_token) if self.state_filter: - params["stateFilter"] = ",".join(self.state_filter) - return params + request_body["stateFilter"] = {"include": self.state_filter} + return request_body -class SponsoredBrandsAdGroups(SubProfilesStream): +class SponsoredBrandsAdGroups(SponsoredBrandsV4): """ - This stream corresponds to Amazon Advertising API - Sponsored Brands Ad groups - https://advertising.amazon.com/API/docs/en-us/sponsored-brands/3-0/openapi#/Ad%20groups + This stream corresponds to Amazon Ads API - Sponsored Brands Ad Groups v4 + https://advertising.amazon.com/API/docs/en-us/sponsored-brands/3-0/openapi/prod#tag/Ad-groups/operation/ListSponsoredBrandsAdGroups """ primary_key = "adGroupId" + data_field = "adGroups" model = BrandsAdGroup + content_type = "application/vnd.sbadgroupresource.v4+json" - def path(self, **kvargs) -> str: - return "sb/adGroups" + def path(self, **kwargs) -> str: + return "sb/v4/adGroups/list" class SponsoredBrandsKeywords(SubProfilesStream): @@ -52,5 +99,5 @@ class SponsoredBrandsKeywords(SubProfilesStream): primary_key = "adGroupId" model = BrandsAdGroup - def path(self, **kvargs) -> str: + def path(self, **kwargs) -> str: return "sb/keywords" diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_products.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_products.py index c3b596ae0490c..3b465e9ce2b67 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_products.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/sponsored_products.py @@ -2,29 +2,70 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +import json from abc import ABC from http import HTTPStatus from typing import Any, Iterable, List, Mapping, MutableMapping, Optional -import requests as requests from airbyte_protocol.models import SyncMode +from requests import Response from source_amazon_ads.schemas import ( - Keywords, - NegativeKeywords, ProductAd, ProductAdGroupBidRecommendations, ProductAdGroups, ProductAdGroupSuggestedKeywords, ProductCampaign, ProductTargeting, + SponsoredProductCampaignNegativeKeywordsModel, + SponsoredProductKeywordsModel, + SponsoredProductNegativeKeywordsModel, ) -from source_amazon_ads.streams.common import AmazonAdsStream, SubProfilesStream +from source_amazon_ads.streams.common import SubProfilesStream -class SponsoredProductCampaigns(SubProfilesStream): +class SponsoredProductsV3(SubProfilesStream): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Campaigns - https://advertising.amazon.com/API/docs/en-us/sponsored-display/3-0/openapi#/Campaigns + This Stream supports the Sponsored Products v3 API, which requires POST methods + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod + """ + + @property + def http_method(self, **kwargs) -> str: + return "POST" + + def request_headers(self, profile_id: str = None, *args, **kwargs) -> MutableMapping[str, Any]: + headers = super().request_headers(*args, **kwargs) + headers["Accept"] = self.content_type + headers["Content-Type"] = self.content_type + return headers + + def next_page_token(self, response: Response) -> str: + if not response: + return None + return response.json().get("nextToken", None) + + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + request_body = {} + request_body["maxResults"] = self.page_size + if next_page_token: + request_body["nextToken"] = next_page_token + return request_body + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: int = None, + ) -> MutableMapping[str, Any]: + return {} + + +class SponsoredProductCampaigns(SponsoredProductsV3): + """ + This stream corresponds to Amazon Ads API - Sponsored Products (v3) Campaigns + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Campaigns/operation/ListSponsoredProductsCampaigns """ def __init__(self, *args, **kwargs): @@ -32,42 +73,39 @@ def __init__(self, *args, **kwargs): self.state_filter = kwargs.get("config", {}).get("state_filter") primary_key = "campaignId" + data_field = "campaigns" state_filter = None model = ProductCampaign + content_type = "application/vnd.spCampaign.v3+json" - def path(self, **kvargs) -> str: - return "v2/sp/campaigns" + def path(self, **kwargs) -> str: + return "sp/campaigns/list" - def request_params(self, *args, **kwargs): - params = super().request_params(*args, **kwargs) + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + request_body = super().request_body_json(stream_state, stream_slice, next_page_token) if self.state_filter: - params["stateFilter"] = ",".join(self.state_filter) - return params + request_body["stateFilter"] = {"include": self.state_filter} + return request_body -class SponsoredProductAdGroups(SubProfilesStream): +class SponsoredProductAdGroups(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Ad groups - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Ad%20groups + This stream corresponds to Amazon Ads API - Sponsored Products (v3) Ad groups + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Ad-groups/operation/ListSponsoredProductsAdGroups """ primary_key = "adGroupId" + data_field = "adGroups" + content_type = "application/vnd.spAdGroup.v3+json" model = ProductAdGroups - def path(self, **kvargs) -> str: - return "v2/sp/adGroups" - + def path(self, **kwargs) -> str: + return "/sp/adGroups/list" -class SponsoredProductAdGroupsWithProfileId(SponsoredProductAdGroups): - """Add profileId attr for each records in SponsoredProductAdGroups stream""" - def parse_response(self, *args, **kwargs) -> Iterable[Mapping]: - for record in super().parse_response(*args, **kwargs): - record["profileId"] = self._current_profile_id - yield record - - -class SponsoredProductAdGroupWithSlicesABC(AmazonAdsStream, ABC): +class SponsoredProductAdGroupWithSlicesABC(SponsoredProductsV3, ABC): """ABC Class for extraction of additional information for each known sp ad group""" primary_key = "adGroupId" @@ -77,19 +115,14 @@ def __init__(self, *args, **kwargs): self.__kwargs = kwargs super().__init__(*args, **kwargs) - def request_headers(self, *args, **kvargs) -> MutableMapping[str, Any]: - headers = super().request_headers(*args, **kvargs) - headers["Amazon-Advertising-API-Scope"] = str(kvargs["stream_slice"]["profileId"]) - return headers - def stream_slices( self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: - yield from SponsoredProductAdGroupsWithProfileId(*self.__args, **self.__kwargs).read_records( + yield from SponsoredProductAdGroups(*self.__args, **self.__kwargs).read_records( sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=None, stream_state=stream_state ) - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + def parse_response(self, response: Response, **kwargs) -> Iterable[Mapping]: resp = response.json() if response.status_code == HTTPStatus.OK: @@ -104,6 +137,12 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp f"Skip current AdGroup because it does not support request {response.request.url} for " f"{response.request.headers['Amazon-Advertising-API-Scope']} profile: {response.text}" ) + elif response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + # 422 error message for bids recommendation: + # No recommendations can be provided as the input ad group does not have any asins. + self.logger.warning( + f"Skip current AdGroup because the ad group {json.loads(response.request.body)['adGroupId']} does not have any asins {response.request.url}" + ) elif response.status_code == HTTPStatus.NOT_FOUND: # 404 Either the specified ad group identifier was not found, # or the specified ad group was found but no associated bid was found. @@ -117,98 +156,148 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp class SponsoredProductAdGroupBidRecommendations(SponsoredProductAdGroupWithSlicesABC): - """Docs: - Latest API: - https://advertising.amazon.com/API/docs/en-us/sponsored-display/3-0/openapi#/Bid%20Recommendations/getTargetBidRecommendations - POST /sd/targets/bid/recommendations - Note: does not work, always get "403 Forbidden" - - V2 API: - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Bid%20recommendations/getAdGroupBidRecommendations - GET /v2/sp/adGroups/{adGroupId}/bidRecommendations + """ + This stream corresponds to Amazon Ads API - Sponsored Products (v3) Ad group bid recommendations, now referred to as "Target Bid Recommendations" by Amazon Ads + https://advertising.amazon.com/API/docs/en-us/sponsored-display/3-0/openapi#tag/Bid-Recommendations/operation/getTargetBidRecommendations """ + primary_key = None + data_field = "bidRecommendations" + content_type = "application/vnd.spthemebasedbidrecommendation.v4+json" model = ProductAdGroupBidRecommendations def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"v2/sp/adGroups/{stream_slice['adGroupId']}/bidRecommendations" + return "/sp/targets/bid/recommendations" + + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + self.current_ad_group_id = stream_slice["adGroupId"] + self.current_campaign_id = stream_slice["campaignId"] + + request_body = {} + request_body["targetingExpressions"] = [ + {"type": "CLOSE_MATCH"}, + {"type": "LOOSE_MATCH"}, + {"type": "SUBSTITUTES"}, + {"type": "COMPLEMENTS"}, + ] + request_body["adGroupId"] = stream_slice["adGroupId"] + request_body["campaignId"] = stream_slice["campaignId"] + request_body["recommendationType"] = "BIDS_FOR_EXISTING_AD_GROUP" + return request_body + + def parse_response(self, response: Response, **kwargs) -> Iterable[Mapping]: + for record in super().parse_response(response, **kwargs): + record["adGroupId"] = self.current_ad_group_id + record["campaignId"] = self.current_campaign_id + yield record class SponsoredProductAdGroupSuggestedKeywords(SponsoredProductAdGroupWithSlicesABC): """Docs: - Latest API: - https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#/Keyword%20Targets/getRankedKeywordRecommendation - POST /sp/targets/keywords/recommendations - Note: does not work, always get "403 Forbidden" - V2 API: https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Suggested%20keywords GET /v2/sp/adGroups/{{adGroupId}}>/suggested/keywords """ + primary_key = None + data_field = "" model = ProductAdGroupSuggestedKeywords + @property + def http_method(self, **kwargs) -> str: + return "GET" + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"v2/sp/adGroups/{stream_slice['adGroupId']}/suggested/keywords" + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: int = None + ) -> MutableMapping[str, Any]: + return {"maxNumSuggestions": 100} -class SponsoredProductKeywords(SubProfilesStream): + def request_headers(self, profile_id: str = None, *args, **kwargs) -> MutableMapping[str, Any]: + headers = {} + headers["Amazon-Advertising-API-Scope"] = str(self._current_profile_id) + headers["Amazon-Advertising-API-ClientId"] = self._client_id + return headers + + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return {} + + +class SponsoredProductKeywords(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Keywords - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Keywords + This stream corresponds to Amazon Ads Sponsored Products v3 API - Sponsored Products Keywords + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Keywords/operation/ListSponsoredProductsKeywords """ primary_key = "keywordId" - model = Keywords + data_field = "keywords" + content_type = "application/vnd.spKeyword.v3+json" + model = SponsoredProductKeywordsModel - def path(self, **kvargs) -> str: - return "v2/sp/keywords" + def path(self, **kwargs) -> str: + return "sp/keywords/list" -class SponsoredProductNegativeKeywords(SubProfilesStream): +class SponsoredProductNegativeKeywords(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Negative Keywords - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Negative%20keywords + This stream corresponds to Amazon Ads Sponsored Products v3 API - Sponsored Products Negative Keywords + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Negative-keywords/operation/ListSponsoredProductsNegativeKeywords """ primary_key = "keywordId" - model = NegativeKeywords + data_field = "negativeKeywords" + content_type = "application/vnd.spNegativeKeyword.v3+json" + model = SponsoredProductNegativeKeywordsModel - def path(self, **kvargs) -> str: - return "v2/sp/negativeKeywords" + def path(self, **kwargs) -> str: + return "sp/negativeKeywords/list" -class SponsoredProductCampaignNegativeKeywords(SponsoredProductNegativeKeywords): +class SponsoredProductCampaignNegativeKeywords(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Negative Keywords - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Negative%20keywords + This stream corresponds to Amazon Ads Sponsored Products v3 API - Sponsored Products Negative Keywords + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Campaign-negative-keywords/operation/ListSponsoredProductsCampaignNegativeKeywords """ - def path(self, **kvargs) -> str: - return "v2/sp/campaignNegativeKeywords" + primary_key = "keywordId" + data_field = "campaignNegativeKeywords" + content_type = "application/vnd.spCampaignNegativeKeyword.v3+json" + model = SponsoredProductCampaignNegativeKeywordsModel + + def path(self, **kwargs) -> str: + return "sp/campaignNegativeKeywords/list" -class SponsoredProductAds(SubProfilesStream): +class SponsoredProductAds(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Ads - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Product%20ads + This stream corresponds to Amazon Ads v3 API - Sponsored Products Ads + https://advertising.amazon.com/API/docs/en-us/sponsored-products/3-0/openapi/prod#tag/Product-ads/operation/ListSponsoredProductsProductAds """ primary_key = "adId" + data_field = "productAds" + content_type = "application/vnd.spProductAd.v3+json" model = ProductAd - def path(self, **kvargs) -> str: - return "v2/sp/productAds" + def path(self, **kwargs) -> str: + return "sp/productAds/list" -class SponsoredProductTargetings(SubProfilesStream): +class SponsoredProductTargetings(SponsoredProductsV3): """ - This stream corresponds to Amazon Advertising API - Sponsored Products Targetings - https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Product%20targeting + This stream corresponds to Amazon Ads Sponsored Products v3 API - Sponsored Products Targeting Clauses """ primary_key = "targetId" + data_field = "targetingClauses" + content_type = "application/vnd.spTargetingClause.v3+json" model = ProductTargeting - def path(self, **kvargs) -> str: - return "v2/sp/targets" + def path(self, **kwargs) -> str: + return "sp/targets/list" diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_requests/sponsored_brands_request_builder.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_requests/sponsored_brands_request_builder.py index 400fe018e55f7..65b947483c666 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_requests/sponsored_brands_request_builder.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_requests/sponsored_brands_request_builder.py @@ -8,14 +8,12 @@ class SponsoredBrandsRequestBuilder(AmazonAdsBaseRequestBuilder): @classmethod def ad_groups_endpoint( - cls, client_id: str, client_access_token: str, profile_id: str, limit: Optional[int] = 100, start_index: Optional[int] = 0 + cls, client_id: str, client_access_token: str, profile_id: str ) -> "SponsoredBrandsRequestBuilder": - return cls("sb/adGroups") \ + return cls("sb/v4/adGroups/list") \ .with_client_id(client_id) \ .with_client_access_token(client_access_token) \ - .with_profile_id(profile_id) \ - .with_limit(limit) \ - .with_start_index(start_index) + .with_profile_id(profile_id) @classmethod def keywords_endpoint( @@ -30,19 +28,18 @@ def keywords_endpoint( @classmethod def campaigns_endpoint( - cls, client_id: str, client_access_token: str, profile_id: str, limit: Optional[int] = 100, start_index: Optional[int] = 0 + cls, client_id: str, client_access_token: str, profile_id: str ) -> "SponsoredBrandsRequestBuilder": - return cls("sb/campaigns") \ + return cls("sb/v4/campaigns/list") \ .with_client_id(client_id) \ .with_client_access_token(client_access_token) \ - .with_profile_id(profile_id) \ - .with_limit(limit) \ - .with_start_index(start_index) + .with_profile_id(profile_id) def __init__(self, resource: str) -> None: super().__init__(resource) self._limit: Optional[int] = None self._start_index: Optional[int] = None + self._body: dict = None @property def query_params(self) -> Dict[str, Any]: @@ -55,7 +52,7 @@ def query_params(self) -> Dict[str, Any]: @property def request_body(self) ->Optional[str]: - return None + return self._body def with_limit(self, limit: int) -> "SponsoredBrandsRequestBuilder": self._limit: int = limit @@ -64,3 +61,7 @@ def with_limit(self, limit: int) -> "SponsoredBrandsRequestBuilder": def with_start_index(self, offset: int) -> "SponsoredBrandsRequestBuilder": self._start_index: int = offset return self + + def with_request_body(self, body: dict) -> "SponsoredBrandsRequestBuilder": + self._body: dict = body + return self diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/__init__.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/__init__.py index b7884e21612c9..692ce0bff949f 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/__init__.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/__init__.py @@ -1,2 +1,3 @@ from .count_based_pagination_strategy import CountBasedPaginationStrategy from .cursor_based_pagination_strategy import CursorBasedPaginationStrategy +from .sponsored_cursor_based_pagination_strategy import SponsoredCursorBasedPaginationStrategy diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/sponsored_cursor_based_pagination_strategy.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/sponsored_cursor_based_pagination_strategy.py new file mode 100644 index 0000000000000..04501edbb36f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/ad_responses/pagination_strategies/sponsored_cursor_based_pagination_strategy.py @@ -0,0 +1,11 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + + +class SponsoredCursorBasedPaginationStrategy(PaginationStrategy): + @staticmethod + def update(response: Dict[str, Any]) -> None: + response["nextToken"] = "next-page-token" diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/test_sponsored_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/test_sponsored_streams.py index ffad23c720241..e5096bcb13518 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/test_sponsored_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/test_sponsored_streams.py @@ -1,19 +1,48 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import json from unittest import TestCase from unittest.mock import patch from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + PaginationStrategy, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) from airbyte_protocol.models import Level as LogLevel from airbyte_protocol.models import SyncMode from .ad_requests import OAuthRequestBuilder, ProfilesRequestBuilder, SponsoredBrandsRequestBuilder from .ad_responses import ErrorResponseBuilder, OAuthResponseBuilder, ProfilesResponseBuilder, SponsoredBrandsResponseBuilder -from .ad_responses.pagination_strategies import CountBasedPaginationStrategy +from .ad_responses.pagination_strategies import CountBasedPaginationStrategy, SponsoredCursorBasedPaginationStrategy from .ad_responses.records import ErrorRecordBuilder, ProfilesRecordBuilder, SponsoredBrandsRecordBuilder from .config import ConfigBuilder from .utils import get_log_messages_by_log_level, read_stream +_DEFAULT_REQUEST_BODY = json.dumps({ + "maxResults": 100 +}) + +def _a_record(stream_name: str, data_field: str, record_id_path: str) -> RecordBuilder: + return create_record_builder( + find_template(stream_name, __file__), + FieldPath(data_field), + record_id_path=FieldPath(record_id_path), + record_cursor_path=None + ) + +def _a_response(stream_name: str, data_field: str, pagination_strategy: PaginationStrategy = None) -> HttpResponseBuilder: + return create_response_builder( + find_template(stream_name, __file__), + FieldPath(data_field), + pagination_strategy=pagination_strategy + ) class TestSponsoredBrandsStreamsFullRefresh(TestCase): @property @@ -34,7 +63,7 @@ def _given_oauth_and_profiles(self, http_mocker: HttpMocker, config: dict) -> No ) @HttpMocker() - def test_given_non_breaking_error_when_read_ad_groups_then_stream_is_ignored(self, http_mocker): + def test_given_non_breaking_error_when_read_ad_groups_then_stream_is_ignored(self, http_mocker: HttpMocker): """ Check ad groups stream: non-breaking errors are ignored When error of this kind happen, we warn and then keep syncing another streams @@ -42,8 +71,8 @@ def test_given_non_breaking_error_when_read_ad_groups_then_stream_is_ignored(sel self._given_oauth_and_profiles(http_mocker, self._config) non_breaking_error = ErrorRecordBuilder.non_breaking_error() - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), + http_mocker.post( + SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), ErrorResponseBuilder.non_breaking_error_response().with_record(non_breaking_error).with_status_code(400).build() ) output = read_stream("sponsored_brands_ad_groups", SyncMode.full_refresh, self._config) @@ -53,15 +82,15 @@ def test_given_non_breaking_error_when_read_ad_groups_then_stream_is_ignored(sel assert any([non_breaking_error.build().get("details") in worning for worning in warning_logs]) @HttpMocker() - def test_given_breaking_error_when_read_ad_groups_then_stream_stop_syncing(self, http_mocker): + def test_given_breaking_error_when_read_ad_groups_then_stream_stop_syncing(self, http_mocker: HttpMocker): """ Check ad groups stream: when unknown error happen we stop syncing with raising the error """ self._given_oauth_and_profiles(http_mocker, self._config) breaking_error = ErrorRecordBuilder.breaking_error() - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), + http_mocker.post( + SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), ErrorResponseBuilder.breaking_error_response().with_record(breaking_error).with_status_code(500).build() ) with patch('time.sleep', return_value=None): @@ -72,45 +101,57 @@ def test_given_breaking_error_when_read_ad_groups_then_stream_stop_syncing(self, assert any([breaking_error.build().get("message") in error for error in error_logs]) @HttpMocker() - def test_given_one_page_when_read_ad_groups_then_return_records(self, http_mocker): + def test_given_one_page_when_read_ad_groups_then_return_records(self, http_mocker: HttpMocker): """ Check ad groups stream: normal full refresh sync without pagination """ + stream_name = "sponsored_brands_ad_groups" + data_field = "adGroups" + record_id_path = "adGroupId" + self._given_oauth_and_profiles(http_mocker, self._config) - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), - SponsoredBrandsResponseBuilder.ad_groups_response().with_record(SponsoredBrandsRecordBuilder.ad_groups_record()).build() + http_mocker.post( + SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), + _a_response(stream_name, data_field, None).with_record(_a_record(stream_name, data_field, record_id_path)).build() ) output = read_stream("sponsored_brands_ad_groups", SyncMode.full_refresh, self._config) + print(output.records) assert len(output.records) == 1 @HttpMocker() - def test_given_many_pages_when_read_ad_groups_then_return_records(self, http_mocker): + def test_given_many_pages_when_read_ad_groups_then_return_records(self, http_mocker: HttpMocker): """ Check ad groups stream: normal full refresh sync with pagination """ + + stream_name = "sponsored_brands_ad_groups" + data_field = "adGroups" + record_id_path = "adGroupId" + pagination_strategy = SponsoredCursorBasedPaginationStrategy() + + paginated_request_body = json.dumps({ + "maxResults": 100, + "nextToken": "next-page-token" + }) + self._given_oauth_and_profiles(http_mocker, self._config) - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), - SponsoredBrandsResponseBuilder.ad_groups_response(CountBasedPaginationStrategy()).with_record(SponsoredBrandsRecordBuilder.ad_groups_record()).with_pagination().build() - ) - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100, start_index=100).build(), - SponsoredBrandsResponseBuilder.ad_groups_response(CountBasedPaginationStrategy()).with_record(SponsoredBrandsRecordBuilder.ad_groups_record()).with_pagination().build() + http_mocker.post( + SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), + _a_response(stream_name, data_field, pagination_strategy).with_record(_a_record(stream_name, data_field, record_id_path)).with_pagination().build() ) - http_mocker.get( - SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100, start_index=200).build(), - SponsoredBrandsResponseBuilder.ad_groups_response().with_record(SponsoredBrandsRecordBuilder.ad_groups_record()).build() + http_mocker.post( + SponsoredBrandsRequestBuilder.ad_groups_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(paginated_request_body).build(), + _a_response(stream_name, data_field, pagination_strategy).with_record(_a_record(stream_name, data_field, record_id_path)).build() ) output = read_stream("sponsored_brands_ad_groups", SyncMode.full_refresh, self._config) - assert len(output.records) == 201 + assert len(output.records) == 2 @HttpMocker() - def test_given_non_breaking_error_when_read_campaigns_then_stream_is_ignored(self, http_mocker): + def test_given_non_breaking_error_when_read_campaigns_then_stream_is_ignored(self, http_mocker: HttpMocker): """ Check campaigns stream: non-breaking errors are ignored When error of this kind happen, we warn and then keep syncing another streams @@ -118,8 +159,8 @@ def test_given_non_breaking_error_when_read_campaigns_then_stream_is_ignored(sel self._given_oauth_and_profiles(http_mocker, self._config) non_breaking_error = ErrorRecordBuilder.non_breaking_error() - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), + http_mocker.post( + SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), ErrorResponseBuilder.non_breaking_error_response().with_record(non_breaking_error).with_status_code(400).build() ) output = read_stream("sponsored_brands_campaigns", SyncMode.full_refresh, self._config) @@ -129,15 +170,15 @@ def test_given_non_breaking_error_when_read_campaigns_then_stream_is_ignored(sel assert any([non_breaking_error.build().get("details") in worning for worning in warning_logs]) @HttpMocker() - def test_given_breaking_error_when_read_campaigns_then_stream_stop_syncing(self, http_mocker): + def test_given_breaking_error_when_read_campaigns_then_stream_stop_syncing(self, http_mocker: HttpMocker): """ Check campaigns stream: when unknown error happen we stop syncing with raising the error """ self._given_oauth_and_profiles(http_mocker, self._config) breaking_error = ErrorRecordBuilder.breaking_error() - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), + http_mocker.post( + SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), ErrorResponseBuilder.breaking_error_response().with_record(breaking_error).with_status_code(500).build() ) with patch('time.sleep', return_value=None): @@ -148,45 +189,56 @@ def test_given_breaking_error_when_read_campaigns_then_stream_stop_syncing(self, assert any([breaking_error.build().get("message") in error for error in error_logs]) @HttpMocker() - def test_given_one_page_when_read_campaigns_then_return_records(self, http_mocker): + def test_given_one_page_when_read_campaigns_then_return_records(self, http_mocker: HttpMocker): """ Check campaigns stream: normal full refresh sync without pagination """ self._given_oauth_and_profiles(http_mocker, self._config) - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), - SponsoredBrandsResponseBuilder.campaigns_response().with_record(SponsoredBrandsRecordBuilder.campaigns_record()).build() + stream_name = "sponsored_brands_campaigns" + data_field = "campaigns" + record_id_path = "campaignId" + + http_mocker.post( + SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), + _a_response(stream_name, data_field, None).with_record(_a_record(stream_name, data_field, record_id_path)).build() ) output = read_stream("sponsored_brands_campaigns", SyncMode.full_refresh, self._config) assert len(output.records) == 1 @HttpMocker() - def test_given_many_pages_when_read_campaigns_then_return_records(self, http_mocker): + def test_given_many_pages_when_read_campaigns_then_return_records(self, http_mocker: HttpMocker): """ Check campaigns stream: normal full refresh sync with pagination """ + + stream_name = "sponsored_brands_campaigns" + data_field = "campaigns" + record_id_path = "campaignId" + pagination_strategy = SponsoredCursorBasedPaginationStrategy() + + paginated_request_body = json.dumps({ + "maxResults": 100, + "nextToken": "next-page-token" + }) + self._given_oauth_and_profiles(http_mocker, self._config) - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100).build(), - SponsoredBrandsResponseBuilder.campaigns_response(CountBasedPaginationStrategy()).with_record(SponsoredBrandsRecordBuilder.campaigns_record()).with_pagination().build() - ) - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100, start_index=100).build(), - SponsoredBrandsResponseBuilder.campaigns_response(CountBasedPaginationStrategy()).with_record(SponsoredBrandsRecordBuilder.campaigns_record()).with_pagination().build() + http_mocker.post( + SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(_DEFAULT_REQUEST_BODY).build(), + _a_response(stream_name, data_field, pagination_strategy).with_record(_a_record(stream_name, data_field, record_id_path)).with_pagination().build() ) - http_mocker.get( - SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0], limit=100, start_index=200).build(), - SponsoredBrandsResponseBuilder.campaigns_response().with_record(SponsoredBrandsRecordBuilder.campaigns_record()).build() + http_mocker.post( + SponsoredBrandsRequestBuilder.campaigns_endpoint(self._config["client_id"], self._config["access_token"], self._config["profiles"][0]).with_request_body(paginated_request_body).build(), + _a_response(stream_name, data_field, pagination_strategy).with_record(_a_record(stream_name, data_field, record_id_path)).build() ) output = read_stream("sponsored_brands_campaigns", SyncMode.full_refresh, self._config) - assert len(output.records) == 201 + assert len(output.records) == 2 @HttpMocker() - def test_given_non_breaking_error_when_read_keywords_then_stream_is_ignored(self, http_mocker): + def test_given_non_breaking_error_when_read_keywords_then_stream_is_ignored(self, http_mocker: HttpMocker): """ Check keywords stream: non-breaking errors are ignored When error of this kind happen, we warn and then keep syncing another streams @@ -205,7 +257,7 @@ def test_given_non_breaking_error_when_read_keywords_then_stream_is_ignored(self assert any([non_breaking_error.build().get("details") in worning for worning in warning_logs]) @HttpMocker() - def test_given_breaking_error_when_read_keywords_then_stream_stop_syncing(self, http_mocker): + def test_given_breaking_error_when_read_keywords_then_stream_stop_syncing(self, http_mocker: HttpMocker): """ Check keywords stream: when unknown error happen we stop syncing with raising the error """ @@ -224,7 +276,7 @@ def test_given_breaking_error_when_read_keywords_then_stream_stop_syncing(self, assert any([breaking_error.build().get("message") in error for error in error_logs]) @HttpMocker() - def test_given_one_page_when_read_keywords_then_return_records(self, http_mocker): + def test_given_one_page_when_read_keywords_then_return_records(self, http_mocker: HttpMocker): """ Check keywords stream: normal full refresh sync without pagination """ @@ -239,7 +291,7 @@ def test_given_one_page_when_read_keywords_then_return_records(self, http_mocker assert len(output.records) == 1 @HttpMocker() - def test_given_many_pages_when_read_keywords_then_return_records(self, http_mocker): + def test_given_many_pages_when_read_keywords_then_return_records(self, http_mocker: HttpMocker): """ Check keywords stream: normal full refresh sync with pagination """ diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_ad_groups.json b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_ad_groups.json index e7ef472e37834..35f0f155282c2 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_ad_groups.json +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_ad_groups.json @@ -1,7 +1,12 @@ -[ - { - "campaignId": 1, - "adGroupId": 1, - "name": "string" - } -] +{ + "adGroups": [ + { + "campaignId": "string", + "name": "string", + "state": "ENABLED", + "adGroupId": "string", + "extendedData": {} + } + ], + "totalResults": 100 +} diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_campaigns.json b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_campaigns.json index 51b88a4d08950..07abeb5404d3e 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_campaigns.json +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/resource/http/response/sponsored_brands_campaigns.json @@ -1,42 +1,53 @@ -[ - { - "campaignId": 1, - "name": "string", - "budget": 0, - "budgetType": "lifetime", - "startDate": "string", - "endDate": "string", - "state": "enabled", - "servingStatus": "asinNotBuyable", - "portfolioId": 0, - "bidOptimization": true, - "bidMultiplier": 0, - "bidAdjustments": [ - { - "bidAdjustmentPredicate": "placementGroupHome", - "bidAdjustmentPercent": 50 +{ + "campaigns": [ + { + "budgetType": "DAILY", + "ruleBasedBudget": { + "isProcessing": true, + "applicableRuleName": "string", + "value": 0.1, + "applicableRuleId": "string" }, - { - "bidAdjustmentPredicate": "placementGroupDetailPage", - "bidAdjustmentPercent": 50 + "brandEntityId": "string", + "isMultiAdGroupsEnabled": true, + "goal": "string", + "bidding": { + "bidOptimization": true, + "bidAdjustmentsByShopperSegment": [ + { + "percentage": 900, + "shopperSegment": "NEW_TO_BRAND_PURCHASE" + } + ], + "bidAdjustmentsByPlacement": [ + { + "percentage": -99, + "placement": "HOME" + } + ], + "bidOptimizationStrategy": "MAXIMIZE_IMMEDIATE_SALES" }, - { - "bidAdjustmentPredicate": "placementGroupOther", - "bidAdjustmentPercent": 50 + "endDate": "string", + "campaignId": "string", + "productLocation": "SOLD_ON_AMAZON", + "tags": { + "property1": "string", + "property2": "string" + }, + "portfolioId": "string", + "costType": "string", + "smartDefault": ["string"], + "name": "string", + "state": "ENABLED", + "startDate": "string", + "budget": 0.1, + "extendedData": { + "servingStatus": "ADVERTISER_STATUS_ENABLED", + "lastUpdateDate": 0, + "servingStatusDetails": ["string"], + "creationDate": 0 } - ], - "adFormat": "productCollection", - "creative": { - "brandName": "string", - "brandLogoAssetID": "string", - "brandLogoUrl": "string", - "headline": "string", - "asins": ["string"], - "shouldOptimizeAsins": false - }, - "landingPage": { - "pageType": "productList", - "url": "string" } - } -] + ], + "totalCount": 100 +} diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py index 13783837a56ad..45c48b39d0919 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py @@ -708,50 +708,66 @@ def test_read_incremental_with_records_start_date(config): [ ( ["enabled", "archived", "paused"], - SponsoredBrandsCampaigns, + SponsoredDisplayCampaigns, ), ( ["enabled"], - SponsoredBrandsCampaigns, + SponsoredDisplayCampaigns, ), ( None, - SponsoredBrandsCampaigns, + SponsoredDisplayCampaigns, ), + ], +) +def test_streams_state_filter(mocker, config, state_filter, stream_class): + profiles = make_profiles() + mocker.patch.object(stream_class, "state_filter", new_callable=mocker.PropertyMock, return_value=state_filter) + + stream = stream_class(config, profiles) + params = stream.request_params(stream_state=None, stream_slice=None, next_page_token=None) + if "stateFilter" in params: + assert params["stateFilter"] == ",".join(state_filter) + else: + assert state_filter is None + +@pytest.mark.parametrize( + "state_filter, stream_class", + [ ( ["enabled", "archived", "paused"], - SponsoredProductCampaigns, + SponsoredBrandsCampaigns, ), ( ["enabled"], - SponsoredProductCampaigns, + SponsoredBrandsCampaigns, ), ( None, - SponsoredProductCampaigns, + SponsoredBrandsCampaigns, ), ( ["enabled", "archived", "paused"], - SponsoredDisplayCampaigns, + SponsoredProductCampaigns, ), ( ["enabled"], - SponsoredDisplayCampaigns, + SponsoredProductCampaigns, ), ( None, - SponsoredDisplayCampaigns, + SponsoredProductCampaigns, ), ], ) -def test_streams_state_filter(mocker, config, state_filter, stream_class): +def test_sponsored_brand_and_products_streams_state_filter(mocker, config, state_filter, stream_class): profiles = make_profiles() mocker.patch.object(stream_class, "state_filter", new_callable=mocker.PropertyMock, return_value=state_filter) stream = stream_class(config, profiles) - params = stream.request_params(stream_state=None, stream_slice=None, next_page_token=None) - if "stateFilter" in params: - assert params["stateFilter"] == ",".join(state_filter) + request_body = stream.request_body_json(stream_state=None, stream_slice=None, next_page_token=None) + if "stateFilter" in request_body: + assert request_body["stateFilter"]["include"] == state_filter else: assert state_filter is None diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py index 1eb1a45d3ac18..31264c0210083 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_streams.py @@ -23,6 +23,7 @@ def setup_responses( product_ads_response=None, generic_response=None, creatives_response=None, + post_response=None, ): responses.add( responses.POST, @@ -77,6 +78,12 @@ def setup_responses( f"https://advertising-api.amazon.com/{generic_response}", json=[], ) + if post_response: + responses.add( + responses.POST, + f"https://advertising-api.amazon.com/{post_response}", + json={}, + ) def get_all_stream_records(stream, stream_slice=None): @@ -254,20 +261,23 @@ def test_streams_displays( @pytest.mark.parametrize( ("stream_name", "endpoint"), [ - ("sponsored_brands_campaigns", "sb/campaigns"), - ("sponsored_brands_ad_groups", "sb/adGroups"), + ("sponsored_brands_campaigns", "sb/v4/campaigns/list"), + ("sponsored_brands_ad_groups", "sb/v4/adGroups/list"), ("sponsored_brands_keywords", "sb/keywords"), - ("sponsored_product_campaigns", "v2/sp/campaigns"), - ("sponsored_product_ad_groups", "v2/sp/adGroups"), - ("sponsored_product_keywords", "v2/sp/keywords"), - ("sponsored_product_negative_keywords", "v2/sp/negativeKeywords"), - ("sponsored_product_ads", "v2/sp/productAds"), - ("sponsored_product_targetings", "v2/sp/targets"), + ("sponsored_product_campaigns", "sp/campaigns/list"), + ("sponsored_product_ad_groups", "sp/adGroups/list"), + ("sponsored_product_keywords", "sp/keywords/list"), + ("sponsored_product_negative_keywords", "sp/negativeKeywords/list"), + ("sponsored_product_ads", "sp/productAds/list"), + ("sponsored_product_targetings", "sp/targets/list"), ], ) @responses.activate def test_streams_brands_and_products(config, stream_name, endpoint, profiles_response): - setup_responses(profiles_response=profiles_response, generic_response=endpoint) + if endpoint != "sb/keywords": + setup_responses(profiles_response=profiles_response, post_response=endpoint) + else: + setup_responses(profiles_response=profiles_response, generic_response=endpoint) source = SourceAmazonAds() streams = source.streams(config) @@ -282,8 +292,8 @@ def test_streams_brands_and_products(config, stream_name, endpoint, profiles_res def test_sponsored_product_ad_group_bid_recommendations_404_error(caplog, config, profiles_response): setup_responses(profiles_response=profiles_response) responses.add( - responses.GET, - "https://advertising-api.amazon.com/v2/sp/adGroups/xxx/bidRecommendations", + responses.POST, + "https://advertising-api.amazon.com/sp/targets/bid/recommendations", json={ "code": "404", "details": "404 Either the specified ad group identifier was not found or the specified ad group was found but no associated bid was found.", @@ -293,6 +303,6 @@ def test_sponsored_product_ad_group_bid_recommendations_404_error(caplog, config source = SourceAmazonAds() streams = source.streams(config) test_stream = get_stream_by_name(streams, "sponsored_product_ad_group_bid_recommendations") - records = get_all_stream_records(test_stream, stream_slice={"profileId": "1231", "adGroupId": "xxx"}) + records = get_all_stream_records(test_stream, stream_slice={"campaignId": "1231", "adGroupId": "xxx"}) assert records == [] assert "Skip current AdGroup because the specified ad group has no associated bid" in caplog.text diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml index 2d84d9683a7fc..4aea83c726f61 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml @@ -99,6 +99,10 @@ acceptance_tests: bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" - name: VendorOrders bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" + - name: GET_VENDOR_FORECASTING_FRESH_REPORT + bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" + - name: GET_VENDOR_FORECASTING_RETAIL_REPORT + bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml index 59fa239a7adc8..ecaabe8a28139 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml @@ -15,7 +15,7 @@ data: connectorSubtype: api connectorType: source definitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 - dockerImageTag: 4.0.0 + dockerImageTag: 4.2.1 dockerRepository: airbyte/source-amazon-seller-partner documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-seller-partner githubIssueLabel: source-amazon-seller-partner diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock b/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock index bd41472c15b0b..6c0b1a20cad94 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -503,13 +503,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -738,13 +738,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -797,7 +797,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1006,19 +1005,19 @@ test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.1 [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1044,13 +1043,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1219,4 +1218,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "d3855ad9303c1e3d5d8d2a3eb082e9a4aa11c293d0c11fddd3e9aaa986afeb4a" +content-hash = "194e43daaa993a09718bb9ed1d5414c4538cf485a4ba2bf01901145d55940915" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml index 467e46d1c1950..9190fec443ea1 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.0" +version = "4.2.1" name = "source-amazon-seller-partner" description = "Source implementation for Amazon Seller Partner." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_amazon_seller_partner" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "*" +airbyte-cdk = "^0" xmltodict = "~=0.12" dateparser = "==1.2.0" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json new file mode 100644 index 0000000000000..3a1cd743901ec --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json @@ -0,0 +1,7 @@ +{ + "title": "Vendor Forecasting Fresh Report", + "description": "A report with forward looking mean, P70, P80, and P90 weekly customer demand forecasts. Data is reported at the ASIN level for the most recent weekly forecast generation date.", + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { "$ref": "GET_VENDOR_FORECASTING_REPORT.json" } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json new file mode 100644 index 0000000000000..37002421ae3a1 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json @@ -0,0 +1,7 @@ +{ + "title": "Vendor Forecasting Retail Report", + "description": "A report with forward looking mean, P70, P80, and P90 weekly customer demand forecasts. Data is reported at the ASIN level for the most recent weekly forecast generation date.", + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { "$ref": "GET_VENDOR_FORECASTING_REPORT.json" } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json new file mode 100644 index 0000000000000..9890c14ef486f --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json @@ -0,0 +1,33 @@ +{ + "forecastGenerationDate": { + "type": ["null", "string"], + "format": "date" + }, + "asin": { + "type": ["null", "string"] + }, + "startDate": { + "type": ["null", "string"], + "format": "date" + }, + "endDate": { + "type": ["null", "string"], + "format": "date" + }, + "meanForecastUnits": { + "type": ["null", "number"] + }, + "p70ForecastUnits": { + "type": ["null", "number"] + }, + "p80ForecastUnits": { + "type": ["null", "number"] + }, + "p90ForecastUnits": { + "type": ["null", "number"] + }, + "dataEndTime": { + "type": ["null", "string"], + "format": "date" + } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py index 91c2e1bd80ec0..71a866be6dc70 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py @@ -3,7 +3,6 @@ # -import traceback from os import getenv from typing import Any, List, Mapping, Optional, Tuple @@ -63,6 +62,8 @@ SellerFeedbackReports, StrandedInventoryUiReport, VendorDirectFulfillmentShipping, + VendorForecastingFreshReport, + VendorForecastingRetailReport, VendorInventoryReports, VendorOrders, VendorSalesReports, @@ -184,6 +185,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: LedgerSummaryViewReport, FbaReimbursementsReports, VendorOrders, + VendorForecastingFreshReport, + VendorForecastingRetailReport, ] # TODO: Remove after Brand Analytics will be enabled in CLOUD: https://github.com/airbytehq/airbyte/issues/32353 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py index 83cd4b4663f4c..bef06abc8a305 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py @@ -409,13 +409,13 @@ def read_records( logging.error(f"Failed to retrieve the report result document for stream '{self.name}'. Exception: {e}") error_response = "Failed to retrieve the report result document." - raise AirbyteTracedException( - internal_message=( - f"Failed to retrieve the report '{self.name}' for period " - f"{stream_slice['dataStartTime']}-{stream_slice['dataEndTime']}. " + exception_message = f"Failed to retrieve the report '{self.name}'" + if stream_slice and "dataStartTime" in stream_slice: + exception_message += ( + f" for period {stream_slice['dataStartTime']}-{stream_slice['dataEndTime']}. " f"This will be read during the next sync. Error: {error_response}" ) - ) + raise AirbyteTracedException(internal_message=exception_message) elif processing_status == ReportProcessingStatus.CANCELLED: logger.warning(f"The report for stream '{self.name}' was cancelled or there is no data to return.") else: @@ -708,7 +708,7 @@ def _report_data( ) -> Mapping[str, Any]: data = super()._report_data(sync_mode, cursor_field, stream_slice, stream_state) options = self.report_options() - if options and options.get("reportPeriod") is not None: + if options and options.get("reportPeriod"): data.update(self._augmented_data(options)) return data @@ -752,24 +752,6 @@ class IncrementalAnalyticsStream(AnalyticsStream): def cursor_field(self) -> Union[str, List[str]]: return "endDate" - def _report_data( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Mapping[str, Any]: - data = super()._report_data(sync_mode, cursor_field, stream_slice, stream_state) - if stream_slice: - data_times = {} - if stream_slice.get("dataStartTime"): - data_times["dataStartTime"] = stream_slice["dataStartTime"] - if stream_slice.get("dataEndTime"): - data_times["dataEndTime"] = stream_slice["dataEndTime"] - data.update(data_times) - - return data - def parse_response( self, response: requests.Response, @@ -891,6 +873,52 @@ class VendorSalesReports(IncrementalAnalyticsStream): availability_sla_days = 4 # Data is only available after 4 days +class VendorForecastingReport(AnalyticsStream, ABC): + """ + Field definitions: + https://github.com/amzn/selling-partner-api-models/blob/main/schemas/reports/vendorForecastingReport.json + Docs: https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports + """ + + result_key = "forecastByAsin" + + @property + @abstractmethod + def selling_program(self) -> str: + pass + + @property + def name(self) -> str: + return f"GET_VENDOR_FORECASTING_{self.selling_program}_REPORT" + + def stream_slices( + self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + return [None] + + def _report_data( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Mapping[str, Any]: + # This report supports the `sellingProgram` parameter only + return { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "marketplaceIds": [self.marketplace_id], + "reportOptions": {"sellingProgram": self.selling_program}, + } + + +class VendorForecastingFreshReport(VendorForecastingReport): + selling_program = "FRESH" + + +class VendorForecastingRetailReport(VendorForecastingReport): + selling_program = "RETAIL" + + class SellerFeedbackReports(IncrementalReportsAmazonSPStream): """ Field definitions: https://sellercentral.amazon.com/help/hub/reference/G202125660 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py index bfdbfa241e73d..d50bb9de9b72c 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py @@ -4,6 +4,7 @@ import gzip +import json from http import HTTPStatus from typing import List, Optional @@ -170,7 +171,6 @@ def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool = @HttpMocker() def test_given_report_when_read_then_return_records(self, stream_name: str, data_format: str, http_mocker: HttpMocker) -> None: mock_auth(http_mocker) - http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID)) http_mocker.get( _check_report_status_request(_REPORT_ID).build(), @@ -194,7 +194,6 @@ def test_given_compressed_report_when_read_then_return_records( self, stream_name: str, data_format: str, http_mocker: HttpMocker ) -> None: mock_auth(http_mocker) - http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID)) http_mocker.get( _check_report_status_request(_REPORT_ID).build(), @@ -510,6 +509,313 @@ def test_given_report_when_read_then_state_message_produced_and_state_match_late assert len(output.state_messages) == 1 cursor_field = get_stream_by_name(stream_name, _config.build()).cursor_field - cursor_value_from_state_message = output.most_recent_state.get(stream_name, {}).get(cursor_field) cursor_value_from_latest_record = output.records[-1].record.data.get(cursor_field) - assert cursor_value_from_state_message == cursor_value_from_latest_record + + most_recent_state = output.most_recent_state.stream_state + assert most_recent_state == {cursor_field: cursor_value_from_latest_record} + + +@freezegun.freeze_time(NOW.isoformat()) +class TestVendorSalesReportsFullRefresh: + data_format = "json" + selling_program = ("RETAIL", "FRESH") + + @staticmethod + def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return read_output( + config_builder=config_, + stream_name=stream_name, + sync_mode=SyncMode.full_refresh, + expecting_exception=expecting_exception, + ) + + @staticmethod + def _get_stream_name(selling_program: str) -> str: + return f"GET_VENDOR_FORECASTING_{selling_program}_REPORT" + + @staticmethod + def _get_report_request_body(selling_program: str) -> str: + return json.dumps( + { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "marketplaceIds": [MARKETPLACE_ID], + "reportOptions": {"sellingProgram": selling_program}, + } + ) + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_when_read_then_return_records(self, selling_program: str, http_mocker: HttpMocker) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_compressed_report_when_read_then_return_records(self, selling_program: str, http_mocker: HttpMocker) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID, compressed=True), + ) + + # a workaround to pass compressed document to the mocked response + document_request = _download_document_request(_DOCUMENT_DOWNLOAD_URL).build() + document_response = _download_document_response(stream_name, data_format=self.data_format, compressed=True) + document_request_matcher = HttpRequestMatcher(document_request, minimum_number_of_expected_match=1) + http_mocker._matchers.append(document_request_matcher) + + http_mocker._mocker.get( + requests_mock.ANY, + additional_matcher=http_mocker._matches_wrapper(document_request_matcher), + response_list=[{"content": document_response.body, "status_code": document_response.status_code}], + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_create_report_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + [response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), _create_report_response(_REPORT_ID)], + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_retrieve_report_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ], + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_get_document_url_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ], + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_download_document_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _download_document_response(stream_name, data_format=self.data_format), + ], + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_access_forbidden_when_read_then_no_records_and_error_logged( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + response_with_status(status_code=HTTPStatus.FORBIDDEN), + ) + + output = self._read(stream_name, config()) + message_on_access_forbidden = ( + "This is most likely due to insufficient permissions on the credentials in use. " + "Try to grant required permissions/scopes or re-authenticate." + ) + assert_message_in_log_output(message_on_access_forbidden, output) + assert len(output.records) == 0 + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_status_cancelled_when_read_then_stream_completed_successfully_and_warn_about_cancellation( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, processing_status=ReportProcessingStatus.CANCELLED), + ) + + message_on_report_cancelled = f"The report for stream '{stream_name}' was cancelled or there is no data to return." + + output = self._read(stream_name, config()) + assert_message_in_log_output(message_on_report_cancelled, output) + assert len(output.records) == 0 + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_status_fatal_when_read_then_exception_raised(self, selling_program: str, http_mocker: HttpMocker) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response( + stream_name, processing_status=ReportProcessingStatus.FATAL, report_document_id=_REPORT_DOCUMENT_ID + ), + ) + + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _download_document_error_response(), + ], + ) + + output = self._read(stream_name, config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + assert f"Failed to retrieve the report '{stream_name}'" in output.errors[-1].trace.error.message + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_error_500_on_create_report_when_read_then_no_records_and_error_logged( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + ) + + message_on_backoff_exception = f"The report for stream '{stream_name}' was cancelled due to several failed retry attempts." + + output = self._read(stream_name, config()) + assert_message_in_log_output(message_on_backoff_exception, output) + assert len(output.records) == 0 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_direct_fulfillment_shipping.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_direct_fulfillment_shipping.py index 7706c715d3c42..f3eccb1b26159 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_direct_fulfillment_shipping.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_direct_fulfillment_shipping.py @@ -198,9 +198,10 @@ def test_when_read_then_state_message_produced_and_state_match_latest_record(sel output = self._read(config().with_start_date(_START_DATE).with_end_date(_END_DATE)) assert len(output.state_messages) == 1 - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(_CURSOR_FIELD) cursor_value_from_latest_record = output.records[-1].record.data.get(_CURSOR_FIELD) - assert cursor_value_from_state_message == cursor_value_from_latest_record + + most_recent_state = output.most_recent_state.stream_state + assert most_recent_state == {_CURSOR_FIELD: cursor_value_from_latest_record} @HttpMocker() def test_given_state_when_read_then_state_value_is_created_after_query_param(self, http_mocker: HttpMocker) -> None: @@ -232,4 +233,4 @@ def test_given_state_when_read_then_state_value_is_created_after_query_param(sel config_=config().with_start_date(_START_DATE).with_end_date(_END_DATE), state=StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_value}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {_CURSOR_FIELD: _END_DATE.strftime(TIME_FORMAT)}} + assert output.most_recent_state.stream_state == {_CURSOR_FIELD: _END_DATE.strftime(TIME_FORMAT)} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_orders.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_orders.py index 691bd32608deb..a1416a505328d 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_orders.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_vendor_orders.py @@ -183,9 +183,10 @@ def test_when_read_then_state_message_produced_and_state_match_latest_record(sel output = self._read(config().with_start_date(_START_DATE).with_end_date(_END_DATE)) assert len(output.state_messages) == 1 - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(_CURSOR_FIELD) cursor_value_from_latest_record = output.records[-1].record.data.get(_CURSOR_FIELD) - assert cursor_value_from_state_message == cursor_value_from_latest_record + + most_recent_state = output.most_recent_state.stream_state + assert most_recent_state == {_CURSOR_FIELD: cursor_value_from_latest_record} @HttpMocker() def test_given_state_when_read_then_state_value_is_created_after_query_param(self, http_mocker: HttpMocker) -> None: @@ -211,4 +212,4 @@ def test_given_state_when_read_then_state_value_is_created_after_query_param(sel config_=config().with_start_date(_START_DATE).with_end_date(_END_DATE), state=StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_value}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {_CURSOR_FIELD: _END_DATE.strftime(TIME_FORMAT)}} + assert output.most_recent_state.stream_state == {_CURSOR_FIELD: _END_DATE.strftime(TIME_FORMAT)} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json new file mode 100644 index 0000000000000..5cbf2cdff1690 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json @@ -0,0 +1,31 @@ +{ + "reportSpecification": { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "reportOptions": { + "sellingProgram": "FRESH" + }, + "marketplaceIds": ["ATVPDKIKX0DER"] + }, + "forecastByAsin": [ + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-06", + "endDate": "2021-06-12", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + }, + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-13", + "endDate": "2021-06-19", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + } + ] +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json new file mode 100644 index 0000000000000..059c5301d8d6e --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json @@ -0,0 +1,31 @@ +{ + "reportSpecification": { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "reportOptions": { + "sellingProgram": "RETAIL" + }, + "marketplaceIds": ["ATVPDKIKX0DER"] + }, + "forecastByAsin": [ + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-06", + "endDate": "2021-06-12", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + }, + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-13", + "endDate": "2021-06-19", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + } + ] +} diff --git a/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py old mode 100644 new mode 100755 index 63ad0a9063ec9..cb2230f94ead1 --- a/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py @@ -8,6 +8,7 @@ import pytest from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.types import StreamSlice from source_amplitude.source import SourceAmplitude @@ -85,7 +86,8 @@ def test_empty_streams(stream_fixture_name, url, expected_records, request, requ due to free subscription plan for the sandbox """ stream = request.getfixturevalue(stream_fixture_name) - records_reader = stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice={}) + empty_stream_slice = StreamSlice(partition={}, cursor_slice={}) + records_reader = stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=empty_stream_slice) requests_mock.get(url, status_code=200, json={"data": expected_records}) # Sort actual and expected records by ID. diff --git a/airbyte-integrations/connectors/source-amplitude/metadata.yaml b/airbyte-integrations/connectors/source-amplitude/metadata.yaml index 59a49c0f5fc0b..9a175d6a31b32 100644 --- a/airbyte-integrations/connectors/source-amplitude/metadata.yaml +++ b/airbyte-integrations/connectors/source-amplitude/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 - dockerImageTag: 0.3.7 + dockerImageTag: 0.3.8 dockerRepository: airbyte/source-amplitude documentationUrl: https://docs.airbyte.com/integrations/sources/amplitude githubIssueLabel: source-amplitude icon: amplitude.svg license: MIT + maxSecondsBetweenMessages: 86400 name: Amplitude remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-amplitude/poetry.lock b/airbyte-integrations/connectors/source-amplitude/poetry.lock index 647f6526bac0f..47a96853cf9f1 100644 --- a/airbyte-integrations/connectors/source-amplitude/poetry.lock +++ b/airbyte-integrations/connectors/source-amplitude/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.52.0" +version = "0.69.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.52.0.tar.gz", hash = "sha256:760b5bb279e5b06455bc33c9744dd9facbc0b203ccc4ac48e1e2877807e3c845"}, - {file = "airbyte_cdk-0.52.0-py3-none-any.whl", hash = "sha256:bf7c82b2a7ec3cc4ddedd17cd6cd6e2385991af965729f23ffbdb0515388a8e2"}, + {file = "airbyte-cdk-0.69.1.tar.gz", hash = "sha256:f30fc6d3756b43b5fc1e50f076861de42f032efde803df07083d1e17b94ca0d8"}, + {file = "airbyte_cdk-0.69.1-py3-none-any.whl", hash = "sha256:dfb3008cbf609c907f8a03c4625de3540812734d8570dec83eae8940929ead4e"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -466,113 +467,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -667,6 +603,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -751,18 +702,29 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -846,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -864,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -895,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -933,24 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -969,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1080,4 +1031,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "71149e8c9b376cbd538e039f53fb1be4ceb6562766a6221a6a95d15a2dab08e3" +content-hash = "a7a96e2b3330d2b39e398d386ac5724f0ddb92f7862e5029789b59942d9ba36d" diff --git a/airbyte-integrations/connectors/source-amplitude/pyproject.toml b/airbyte-integrations/connectors/source-amplitude/pyproject.toml index e610b3f4642f4..65d9582863d78 100644 --- a/airbyte-integrations/connectors/source-amplitude/pyproject.toml +++ b/airbyte-integrations/connectors/source-amplitude/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.7" +version = "0.3.8" name = "source-amplitude" description = "Source implementation for Amplitude." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_amplitude" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.52.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-amplitude = "source_amplitude.run:run" diff --git a/airbyte-integrations/connectors/source-avni/README.md b/airbyte-integrations/connectors/source-avni/README.md new file mode 100644 index 0000000000000..075de8d477962 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/README.md @@ -0,0 +1,82 @@ +# Avni Source + +This is the repository for the Avni configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/avni). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-avni:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/avni) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_avni/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source avni test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-avni:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-avni:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-avni:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-avni:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-avni:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-avni:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with Docker, run: +``` +./acceptance-test-docker.sh +``` + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-avni:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-avni:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-avni/__init__.py similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/integration_tests/__init__.py rename to airbyte-integrations/connectors/source-avni/__init__.py diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml new file mode 100644 index 0000000000000..ffd159d73dc17 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml @@ -0,0 +1,31 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-avni:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_avni/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state: + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh new file mode 100755 index 0000000000000..b6d65deeccb43 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +source "$(git rev-parse --show-toplevel)/airbyte-integrations/bases/connector-acceptance-test/acceptance-test-docker.sh" diff --git a/airbyte-integrations/connectors/source-avni/icon.svg b/airbyte-integrations/connectors/source-avni/icon.svg new file mode 100644 index 0000000000000..df3f9ddda0c96 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/icon.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/airbyte-integrations/connector-templates/source-python-http-api/unit_tests/__init__.py b/airbyte-integrations/connectors/source-avni/integration_tests/__init__.py similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/unit_tests/__init__.py rename to airbyte-integrations/connectors/source-avni/integration_tests/__init__.py diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..195c2660ca8b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json @@ -0,0 +1,14 @@ +{ + "subjects": { + "last_modified_at": "2200-06-27T04:18:36.914Z" + }, + "program_enrolments": { + "last_modified_at": "2200-06-27T04:18:36.914Z" + }, + "program_encounters": { + "last_modified_at": "2200-06-27T04:18:36.914Z" + }, + "encounters": { + "last_modified_at": "2200-06-27T04:18:36.914Z" + } +} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py similarity index 100% rename from airbyte-integrations/connector-templates/source-java-jdbc/integration_tests/acceptance.py rename to airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..1e435deb4481f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json @@ -0,0 +1,68 @@ +{ + "streams": [ + { + "stream": { + "name": "subjects", + "json_schema": { + "properties": {} + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["last_modified_at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["last_modified_at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "program_enrolments", + "json_schema": { + "properties": {} + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["last_modified_at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["last_modified_at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "program_encounters", + "json_schema": { + "properties": {} + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["last_modified_at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["last_modified_at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "encounters", + "json_schema": { + "properties": {} + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["last_modified_at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["last_modified_at"], + "primary_key": [["ID"]] + } + ] +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..6ab0009045de1 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "username": "avni", + "password": "test", + "start_date": "2000-06-27T04:18:36.914Z" +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json new file mode 100644 index 0000000000000..37c2f075deaa4 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "username": "Username", + "password": "password", + "start_date": "2000-06-27T04:18:36.914Z" +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json new file mode 100644 index 0000000000000..812c9b530911f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json @@ -0,0 +1,14 @@ +{ + "subjects": { + "last_modified_at": "2000-06-27T04:18:36.914Z" + }, + "program_enrolments": { + "last_modified_at": "2000-06-27T04:18:36.914Z" + }, + "program_encounters": { + "last_modified_at": "2000-06-27T04:18:36.914Z" + }, + "encounters": { + "last_modified_at": "2000-06-27T04:18:36.914Z" + } +} diff --git a/airbyte-integrations/connectors/source-avni/main.py b/airbyte-integrations/connectors/source-avni/main.py new file mode 100644 index 0000000000000..5ab8e86addc58 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_avni import SourceAvni + +if __name__ == "__main__": + source = SourceAvni() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-avni/metadata.yaml b/airbyte-integrations/connectors/source-avni/metadata.yaml new file mode 100644 index 0000000000000..7a0ccb35aff26 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/metadata.yaml @@ -0,0 +1,32 @@ +data: + allowedHosts: + hosts: + - "*" + registries: + oss: + enabled: true + cloud: + enabled: false + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-avni + connectorSubtype: api + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorType: source + definitionId: 5d297ac7-355e-4a04-be75-a5e7e175fc4e + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-avni + githubIssueLabel: source-avni + icon: avni.svg + license: MIT + name: Avni + releaseDate: "2024-03-21" + releaseStage: alpha + supportLevel: community + documentationUrl: https://docs.airbyte.com/integrations/sources/avni + tags: + - cdk:low-code + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-avni/poetry.lock b/airbyte-integrations/connectors/source-avni/poetry.lock new file mode 100644 index 0000000000000..68f4fd8bd1ca5 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/poetry.lock @@ -0,0 +1,1096 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.73.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.9" +files = [ + {file = "airbyte-cdk-0.73.0.tar.gz", hash = "sha256:a03e0265a8a4afb1378d285993624659d9f481404aaf69cf7c0a5ddad3568ea2"}, + {file = "airbyte_cdk-0.73.0-py3-none-any.whl", hash = "sha256:339e42a7602461073a69bf0c4e11be26a7eea3157def43ffecdf9d0d73f32c6f"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "boto3" +version = "1.18.0" +description = "The AWS SDK for Python" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "boto3-1.18.0-py3-none-any.whl", hash = "sha256:22802b3b4806cafff41ed591e578f048789a8dd0deeff07d055cd6f59a7c6076"}, + {file = "boto3-1.18.0.tar.gz", hash = "sha256:054e347824064b7cd77616f35596420eb4f6aca049ecc131a2aec23bcf4cf6ba"}, +] + +[package.dependencies] +botocore = ">=1.21.0,<1.22.0" +jmespath = ">=0.7.1,<1.0.0" +s3transfer = ">=0.5.0,<0.6.0" + +[[package]] +name = "botocore" +version = "1.21.65" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.6" +files = [ + {file = "botocore-1.21.65-py3-none-any.whl", hash = "sha256:3bd0e3d6daee6afcc747d596b52158519abe1ce36f906d556b9f8b54faa081e8"}, + {file = "botocore-1.21.65.tar.gz", hash = "sha256:6437d6a3999a189e7d45b3fcd8f794a46670fb255ae670c946d3f224caa8b46a"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<1.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.12.5)"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, + {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, +] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "s3transfer" +version = "0.5.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "08ea68b960bdcc8f59d9c3331bf6aab97d5df71bcdb73df21c0d17f4aa22ff20" diff --git a/airbyte-integrations/connectors/source-avni/pyproject.toml b/airbyte-integrations/connectors/source-avni/pyproject.toml new file mode 100644 index 0000000000000..c473512b06766 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-avni" +description = "Source implementation for Avni." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/avni" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_avni" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" +boto3 = "==1.18.0" + +[tool.poetry.scripts] +source-avni = "source_avni.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.2" diff --git a/airbyte-integrations/connectors/destination-astra/requirements.txt b/airbyte-integrations/connectors/source-avni/requirements.txt similarity index 100% rename from airbyte-integrations/connectors/destination-astra/requirements.txt rename to airbyte-integrations/connectors/source-avni/requirements.txt diff --git a/airbyte-integrations/connectors/source-avni/source_avni/__init__.py b/airbyte-integrations/connectors/source-avni/source_avni/__init__.py new file mode 100644 index 0000000000000..93eb8dbfdf506 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceAvni + +__all__ = ["SourceAvni"] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/components.py b/airbyte-integrations/connectors/source-avni/source_avni/components.py new file mode 100644 index 0000000000000..d47cbf7654f6f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/components.py @@ -0,0 +1,37 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from dataclasses import dataclass + +import boto3 +import requests +from airbyte_cdk.sources.declarative.auth.token import BasicHttpAuthenticator + + +@dataclass +class CustomAuthenticator(BasicHttpAuthenticator): + @property + def token(self) -> str: + + username = self._username.eval(self.config) + password = self._password.eval(self.config) + + app_client_id = self.get_client_id() + + client = boto3.client("cognito-idp", region_name="ap-south-1") + response = client.initiate_auth( + ClientId=app_client_id, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={"USERNAME": username, "PASSWORD": password} + ) + token = response["AuthenticationResult"]["IdToken"] + return token + + @property + def auth_header(self) -> str: + return "auth-token" + + def get_client_id(self): + + url_client = "https://app.avniproject.org/idp-details" + response = requests.get(url_client) + response.raise_for_status() + client = response.json() + return client["cognito"]["clientId"] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml new file mode 100644 index 0000000000000..5f8566c5b0ad7 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -0,0 +1,140 @@ +version: "0.52.0" + +definitions: + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["content"] + + requester: + type: HttpRequester + url_base: "https://app.avniproject.org/api" + http_method: "GET" + authenticator: + class_name: source_avni.components.CustomAuthenticator + username: "{{config['username']}}" + password: "{{config['password']}}" + + retriever: + type: SimpleRetriever + record_selector: + $ref: "#/definitions/selector" + paginator: + type: "DefaultPaginator" + page_size_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "size" + pagination_strategy: + type: "PageIncrement" + page_size: 100 + page_token_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "page" + requester: + $ref: "#/definitions/requester" + + incremental_base: + type: DatetimeBasedCursor + cursor_field: "last_modified_at" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_time_option: + field_name: "lastModifiedDateTime" + inject_into: "request_parameter" + + transformations_base: + - type: AddFields + fields: + - path: ["last_modified_at"] + value: "{{ record['audit']['Last modified at'] }}" + + base_stream: + type: DeclarativeStream + retriever: + $ref: "#/definitions/retriever" + + subjects_stream: + $ref: "#/definitions/base_stream" + name: "subjects" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/subjects" + + program_encounters_stream: + $ref: "#/definitions/base_stream" + name: "program_encounters" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/programEncounters" + + program_enrolments_stream: + $ref: "#/definitions/base_stream" + name: "program_enrolments" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/programEnrolments" + + encounters_stream: + $ref: "#/definitions/base_stream" + name: "encounters" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/encounters" + +streams: + - "#/definitions/subjects_stream" + - "#/definitions/program_enrolments_stream" + - "#/definitions/program_encounters_stream" + - "#/definitions/encounters_stream" + +check: + type: CheckStream + stream_names: + - "subjects" + +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/avni + connection_specification: + title: Avni Spec + type: object + required: + - username + - password + - start_date + additionalProperties: true + properties: + username: + type: string + description: Your avni platform Username + password: + type: string + description: Your avni platform password + airbyte_secret: true + start_date: + type: string + default: "2000-06-23T01:30:00.000Z" + description: Specify Date and time from which you want to fetch data + examples: + - "2000-10-31T01:30:00.000Z" diff --git a/airbyte-integrations/connectors/source-avni/source_avni/run.py b/airbyte-integrations/connectors/source-avni/source_avni/run.py new file mode 100644 index 0000000000000..636ce2134b8d2 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/run.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_avni import SourceAvni + + +def run(): + source = SourceAvni() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json new file mode 100644 index 0000000000000..4e76035f17c07 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json @@ -0,0 +1,101 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Encounter type": { + "type": ["null", "string"] + }, + "Subject ID": { + "type": "string" + }, + "Subject type": { + "type": "string" + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Encounter date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Encounter location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Earliest scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Max scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "Cancel location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"], + "example": 74.7364501 + } + } + }, + "Cancel date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "cancelObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json new file mode 100644 index 0000000000000..ce32347b7495f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json @@ -0,0 +1,110 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": ["null", "string"] + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject ID": { + "type": ["null", "string"] + }, + "Subject type": { + "type": ["null", "string"] + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Enrolment ID": { + "type": ["null", "string"] + }, + "Enrolment external ID": { + "type": ["null", "string"] + }, + "Program": { + "type": ["null", "string"] + }, + "Encounter type": { + "type": ["null", "string"] + }, + "Encounter date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Encounter location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"], + "example": 74.7364501 + } + } + }, + "Earliest scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Max scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "Cancel location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"], + "example": 74.7364501 + } + } + }, + "Cancel date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "cancelObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "audit": { + "type": ["null", "object"], + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json new file mode 100644 index 0000000000000..bbfe5d262c559 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject ID": { + "type": "string" + }, + "Subject type": { + "type": "string" + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Program": { + "type": ["null", "string"] + }, + "Enrolment datetime": { + "type": ["null", "string"] + }, + "Enrolment location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Exit datetime": { + "type": ["null", "string"] + }, + "Exit location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "exitObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "encounters": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json new file mode 100644 index 0000000000000..9b1d74bb7f0f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json @@ -0,0 +1,120 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject type": { + "type": ["null", "string"] + }, + "Registration location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"], + "example": 74.7364501 + } + } + }, + "Registration date": { + "type": ["null", "string"] + }, + "location": { + "type": ["null", "object"], + "additionalProperties": true + }, + "relatives": { + "type": ["null", "array"], + "items": { + "type": "object", + "additionalProperties": true, + "properties": { + "Voided": { + "type": "boolean" + }, + "Relationship type": { + "type": ["null", "string"] + }, + "Relative ID": { + "type": ["null", "string"] + }, + "Relative external ID": { + "type": ["null", "string"] + }, + "Enter date": { + "type": ["null", "string"] + }, + "Exit date": { + "type": ["null", "string"] + } + } + } + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "encounters": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "format": "uuid" + } + }, + "enrolments": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "format": "uuid" + } + }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "catchments": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + }, + "Groups": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/source.py b/airbyte-integrations/connectors/source-avni/source_avni/source.py new file mode 100644 index 0000000000000..e6c65ceadb7d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceAvni(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py new file mode 100644 index 0000000000000..49f77bed58ec9 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py @@ -0,0 +1,45 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import Mock, patch + +from source_avni.components import CustomAuthenticator + + +@patch('boto3.client') +def test_token_property(mock_boto3_client): + + mock_cognito_client = Mock() + mock_boto3_client.return_value = mock_cognito_client + + config= { "username": "example@gmail.com", "api_key": "api_key" } + source = CustomAuthenticator(config=config,username="example@gmail.com",password="api_key",parameters="") + source._username = Mock() + source._username.eval.return_value = "test_username" + source._password = Mock() + source._password.eval.return_value = "test_password" + source.get_client_id = Mock() + source.get_client_id.return_value = "test_client_id" + + mock_cognito_client.initiate_auth.return_value = { + "AuthenticationResult": { + "IdToken": "test_id_token" + } + } + token = source.token + mock_boto3_client.assert_called_once_with("cognito-idp", region_name="ap-south-1") + mock_cognito_client.initiate_auth.assert_called_once_with( + ClientId="test_client_id", + AuthFlow="USER_PASSWORD_AUTH", + AuthParameters={"USERNAME": "test_username", "PASSWORD": "test_password"} + ) + assert token == "test_id_token" + +def test_get_client_id(mocker): + + config= { "username": "example@gmail.com", "api_key": "api_key" } + source = CustomAuthenticator(config=config,username="example@gmail.com",password="api_key",parameters="") + client_id = source.get_client_id() + expected_length = 26 + assert len(client_id) == expected_length \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/.coveragerc b/airbyte-integrations/connectors/source-azure-blob-storage/.coveragerc new file mode 100644 index 0000000000000..8f397ab098ee7 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_azure_blob_storage/run.py \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/README.md b/airbyte-integrations/connectors/source-azure-blob-storage/README.md index 9e2f70b6ed52c..ac99c3b6e0b51 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/README.md +++ b/airbyte-integrations/connectors/source-azure-blob-storage/README.md @@ -1,118 +1,75 @@ -# Azure Blob Storage Source +# Azure-Blob-Storage source connector -This is the repository for the Azure Blob Storage source connector, written in Python. + +This is the repository for the Azure-Blob-Storage source connector, written in Python. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/azure-blob-storage). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Minimum Python version required `= 3.9.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` +### Generate new oauth token -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +Tenant id should be provided by user, reason: +https://learn.microsoft.com/en-us/answers/questions/1531138/which-tenant-id-do-i-have-to-use-to-get-tokens-and + +1. GET https://login.microsoftonline.com//oauth2/v2.0/authorize + ?response_type=code + &client_id= + &scope=offline_access https://storage.azure.com/.default + &redirect_uri=http://localhost:8000/auth_flow + &response_mode=query + &state=1234 + +2. POST https://login.microsoftonline.com//oauth2/v2.0/token +client_id: +code: +redirect_uri:http://localhost:8000/auth_flow +grant_type:authorization_code +client_secret: + +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/azure-blob-storage) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_azure_blob_storage/spec.yaml` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source azure-blob-storage test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-azure-blob-storage spec +poetry run source-azure-blob-storage check --config secrets/config.json +poetry run source-azure-blob-storage discover --config secrets/config.json +poetry run source-azure-blob-storage read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - - - -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-azure-blob-storage build ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-azure-blob-storage:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container +An image will be available on your host with the tag `airbyte/source-azure-blob-storage:dev`. -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") -``` - -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. - -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-azure-blob-storage:latest - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. - -2. Build your image: -```bash -docker build -t airbyte/source-azure-blob-storage:dev . -# Running the spec command against your patched connector -docker run airbyte/source-azure-blob-storage:dev spec -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-azure-blob-storage:dev spec @@ -121,29 +78,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-azure-blob-storage:dev docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-azure-blob-storage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-azure-blob-storage test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-azure-blob-storage test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/azure-blob-storage.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/azure-blob-storage.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-azure-blob-storage/acceptance-test-config.yml index 71d40148b88f3..0d69624f3c795 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-azure-blob-storage/acceptance-test-config.yml @@ -9,7 +9,7 @@ acceptance_tests: expect_records: path: integration_tests/expected_records/csv_custom_encoding.jsonl exact_order: true - - config_path: secrets/csv_custom_format_config.json + - config_path: secrets/csv_custom_format_encoding_config.json expect_records: path: integration_tests/expected_records/csv_custom_format.jsonl exact_order: true @@ -64,7 +64,7 @@ acceptance_tests: status: succeed - config_path: secrets/csv_custom_encoding_config.json status: succeed - - config_path: secrets/csv_custom_format_config.json + - config_path: secrets/csv_custom_format_encoding_config.json status: succeed - config_path: secrets/csv_user_schema_config.json status: succeed @@ -92,7 +92,7 @@ acceptance_tests: tests: - config_path: secrets/config.json - config_path: secrets/csv_custom_encoding_config.json - - config_path: secrets/csv_custom_format_config.json + - config_path: secrets/csv_custom_format_encoding_config.json - config_path: secrets/csv_user_schema_config.json - config_path: secrets/csv_no_header_config.json - config_path: secrets/csv_skip_rows_config.json diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/build_customization.py b/airbyte-integrations/connectors/source-azure-blob-storage/build_customization.py new file mode 100644 index 0000000000000..ef8859cc8c2bc --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/build_customization.py @@ -0,0 +1,23 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dagger import Container + + +async def pre_connector_install(base_image_container: Container) -> Container: + """ + Docker compose is required to run the integration tests so we install Docker on top of the base image. + """ + return ( + base_image_container.with_exec(["sh", "-c", "apt-get update && apt-get install -y curl jq"]) + # Download install-docker.sh script + .with_exec(["curl", "-fsSL", "https://get.docker.com", "-o", "/tmp/install-docker.sh"]) + # Run the install-docker.sh script with a pinned Docker version + .with_exec(["sh", "/tmp/install-docker.sh", "--version", "25.0"]) + # Remove the install-docker.sh script + .with_exec(["rm", "/tmp/install-docker.sh"]) + ) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_avro.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_avro.json new file mode 100644 index 0000000000000..e3b547a791f2f --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_avro.json @@ -0,0 +1,38 @@ +{ + "azure_blob_storage_endpoint": "http://localhost:10000/account1", + "azure_blob_storage_account_name": "account1", + "credentials": { + "auth_type": "storage_account_key", + "azure_blob_storage_account_key": "key1" + }, + "azure_blob_storage_container_name": "testcontainer", + "streams": [ + { + "name": "users", + "file_type": "avro", + "globs": ["**/test_avro_users*.avro"], + "validation_policy": "Emit Record", + "format": { + "filetype": "avro" + } + }, + { + "name": "purchases", + "file_type": "avro", + "globs": ["**/test_avro_purchases*.avro"], + "validation_policy": "Emit Record", + "format": { + "filetype": "avro" + } + }, + { + "name": "products", + "file_type": "avro", + "globs": ["**/test_avro_products*.avro"], + "validation_policy": "Emit Record", + "format": { + "filetype": "avro" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_csv.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_csv.json new file mode 100644 index 0000000000000..67098b73a0709 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_csv.json @@ -0,0 +1,131 @@ +{ + "azure_blob_storage_endpoint": "http://localhost:10000/account1", + "azure_blob_storage_account_name": "account1", + "credentials": { + "auth_type": "storage_account_key", + "azure_blob_storage_account_key": "key1" + }, + "azure_blob_storage_container_name": "testcontainer", + "streams": [ + { + "name": "users", + "file_type": "csv", + "globs": ["**/test_csv_users*.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + }, + { + "name": "purchases", + "file_type": "csv", + "globs": ["**/test_csv_purchases*.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + }, + { + "name": "products", + "file_type": "csv", + "globs": ["**/test_csv_products*.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_jsonl.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_jsonl.json new file mode 100644 index 0000000000000..b17a2e14d33db --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_jsonl.json @@ -0,0 +1,44 @@ +{ + "azure_blob_storage_endpoint": "http://localhost:10000/account1", + "azure_blob_storage_account_name": "account1", + "credentials": { + "auth_type": "storage_account_key", + "azure_blob_storage_account_key": "key1" + }, + "azure_blob_storage_container_name": "testcontainer", + "streams": [ + { + "name": "users", + "file_type": "jsonl", + "globs": ["**/test_jsonl_users*.jsonl"], + "legacy_prefix": "", + "newlines_in_values": true, + "validation_policy": "Emit Record", + "format": { + "filetype": "jsonl" + } + }, + { + "name": "purchases", + "file_type": "jsonl", + "globs": ["**/test_jsonl_purchases*.jsonl"], + "legacy_prefix": "", + "newlines_in_values": true, + "validation_policy": "Emit Record", + "format": { + "filetype": "jsonl" + } + }, + { + "name": "products", + "file_type": "jsonl", + "globs": ["**/test_jsonl_products*.jsonl"], + "legacy_prefix": "", + "newlines_in_values": true, + "validation_policy": "Emit Record", + "format": { + "filetype": "jsonl" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_parquet.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_parquet.json new file mode 100644 index 0000000000000..eb0d9d808f37c --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/configs/config_integration_parquet.json @@ -0,0 +1,38 @@ +{ + "azure_blob_storage_endpoint": "http://localhost:10000/account1", + "azure_blob_storage_account_name": "account1", + "credentials": { + "auth_type": "storage_account_key", + "azure_blob_storage_account_key": "key1" + }, + "azure_blob_storage_container_name": "testcontainer", + "streams": [ + { + "name": "users", + "file_type": "jsonl", + "globs": ["**/test_parquet_users*.parquet"], + "validation_policy": "Emit Record", + "format": { + "filetype": "parquet" + } + }, + { + "name": "purchases", + "file_type": "jsonl", + "globs": ["**/test_parquet_purchases*.parquet"], + "validation_policy": "Emit Record", + "format": { + "filetype": "parquet" + } + }, + { + "name": "products", + "file_type": "jsonl", + "globs": ["**/test_parquet_products*.parquet"], + "validation_policy": "Emit Record", + "format": { + "filetype": "parquet" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/conftest.py b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/conftest.py new file mode 100644 index 0000000000000..86910ce227f34 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/conftest.py @@ -0,0 +1,175 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import logging +import os +import subprocess +import time +import uuid +from typing import Any, Mapping + +import azure +import docker +import pytest +from airbyte_protocol.models import ConfiguredAirbyteCatalog +from azure.storage.blob import BlobServiceClient, ContainerClient +from azure.storage.blob._shared.authentication import SharedKeyCredentialPolicy +from fastavro import parse_schema, writer +from pandas import read_csv +from source_azure_blob_storage import SourceAzureBlobStorage + +from .utils import get_docker_ip, load_config + +logger = logging.getLogger("airbyte") + +JSON_TO_AVRO_TYPES = {"string": "string", "integer": "long", "number": "float", "object": "record"} + + +# Monkey patch credentials method to make it work with "global-docker-host" inside dagger +# (original method handles only localhost and 127.0.0.1 addresses) +def _format_shared_key_credential(account_name, credential): + credentials = {"account_key": "key1", "account_name": "account1"} + return SharedKeyCredentialPolicy(**credentials) + + +azure.storage.blob._shared.base_client._format_shared_key_credential = _format_shared_key_credential + + +@pytest.fixture(scope="session") +def docker_client() -> docker.client.DockerClient: + return docker.from_env() + + +def get_container_client() -> ContainerClient: + docker_ip = get_docker_ip() + blob_service_client = BlobServiceClient(f"http://{docker_ip}:10000/account1", credential="key1") + container_client = blob_service_client.get_container_client("testcontainer") + return container_client + + +def generate_random_csv_with_source_faker(): + """Generate csv files using source-faker and save output to folder: /tmp/csv""" + subprocess.run(f"{os.path.dirname(__file__)}/csv_export/main.sh") + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup_fixture(docker_client) -> None: + generate_random_csv_with_source_faker() + container = docker_client.containers.run( + image="mcr.microsoft.com/azure-storage/azurite", + command="azurite-blob --blobHost 0.0.0.0 -l /data --loose", + name=f"azurite_integration_{uuid.uuid4().hex}", + hostname="azurite", + ports={10000: ("0.0.0.0", 10000), 10001: ("0.0.0.0", 10001), 10002: ("0.0.0.0", 10002)}, + environment={"AZURITE_ACCOUNTS": "account1:key1"}, + detach=True, + ) + time.sleep(10) + container_client = get_container_client() + container_client.create_container() + + yield + + container.kill() + container.remove() + + +def upload_csv_files(container_client: ContainerClient) -> None: + """upload 30 csv files""" + for table in ("products", "purchases", "users"): + csv_large_file = open(f"/tmp/csv/{table}.csv", "rb").read() + for i in range(10): + container_client.upload_blob(f"test_csv_{table}_{i}.csv", csv_large_file, validate_content=False) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + return SourceAzureBlobStorage.read_catalog(f"{os.path.dirname(__file__)}/integration_configured_catalog/configured_catalog.json") + + +@pytest.fixture(name="config_csv", scope="function") +def config_csv_fixture() -> Mapping[str, Any]: + config = load_config("config_integration_csv.json") + config["azure_blob_storage_endpoint"] = config["azure_blob_storage_endpoint"].replace("localhost", get_docker_ip()) + container_client = get_container_client() + upload_csv_files(container_client) + yield config + for blob in container_client.list_blobs(): + container_client.delete_blob(blob.name) + + +def upload_jsonl_files(container_client: ContainerClient) -> None: + """upload 30 csv files""" + for table in ("products", "purchases", "users"): + df = read_csv(f"/tmp/csv/{table}.csv") + df.to_json(f"/tmp/csv/{table}.jsonl", orient="records", lines=True) + jsonl_file = open(f"/tmp/csv/{table}.jsonl", "rb").read() + for i in range(10): + container_client.upload_blob(f"test_jsonl_{table}_{i}.jsonl", jsonl_file, validate_content=False) + + +@pytest.fixture(name="config_jsonl", scope="function") +def config_jsonl_fixture() -> Mapping[str, Any]: + config = load_config("config_integration_jsonl.json") + config["azure_blob_storage_endpoint"] = config["azure_blob_storage_endpoint"].replace("localhost", get_docker_ip()) + container_client = get_container_client() + upload_jsonl_files(container_client) + yield config + for blob in container_client.list_blobs(): + container_client.delete_blob(blob.name) + + +def upload_parquet_files(container_client: ContainerClient) -> None: + """upload 30 parquet files""" + for table in ("products", "purchases", "users"): + df = read_csv(f"/tmp/csv/{table}.csv") + parquet_file = df.to_parquet() + for i in range(10): + container_client.upload_blob(f"test_parquet_{table}_{i}.parquet", parquet_file, validate_content=False) + + +@pytest.fixture(name="config_parquet", scope="function") +def config_parquet_fixture() -> Mapping[str, Any]: + config = load_config("config_integration_parquet.json") + config["azure_blob_storage_endpoint"] = config["azure_blob_storage_endpoint"].replace("localhost", get_docker_ip()) + container_client = get_container_client() + upload_parquet_files(container_client) + yield config + for blob in container_client.list_blobs(): + container_client.delete_blob(blob.name) + + +def upload_avro_files(container_client: ContainerClient, json_schemas: Mapping) -> None: + """upload 30 avro files""" + for table in ("products", "purchases", "users"): + schema = { + "name": table, + "namespace": "test", + "type": "record", + "fields": [ + { + "name": k, + "type": JSON_TO_AVRO_TYPES.get(v.get("type")[1] if isinstance(v.get("type"), list) else v.get("type")), + "default": "" if (v.get("type")[1] if isinstance(v.get("type"), list) else v.get("type")) == "string" else 0, + } + for k, v in json_schemas.get(table)["properties"].items() + ], + } + df_records = read_csv(f"/tmp/csv/{table}.csv").fillna("").to_dict("records") + parsed_schema = parse_schema(schema) + with open(f"/tmp/csv/{table}.avro", "wb") as out: + writer(out, parsed_schema, df_records) + avro_file = open(f"/tmp/csv/{table}.avro", "rb").read() + for i in range(10): + container_client.upload_blob(f"test_avro_{table}_{i}.avro", avro_file, validate_content=False) + + +@pytest.fixture(name="config_avro", scope="function") +def config_avro_fixture(configured_catalog) -> Mapping[str, Any]: + schemas = {x.stream.name: x.stream.json_schema for x in configured_catalog.streams} + config = load_config("config_integration_avro.json") + config["azure_blob_storage_endpoint"] = config["azure_blob_storage_endpoint"].replace("localhost", get_docker_ip()) + container_client = get_container_client() + upload_avro_files(container_client, schemas) + yield config + for blob in container_client.list_blobs(): + container_client.delete_blob(blob.name) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/README.md b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/README.md new file mode 100644 index 0000000000000..cc1e11d8d426e --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/README.md @@ -0,0 +1,75 @@ +# Python Source CSV Export + +This collection of tools is used to run the source and capture it's AirbyteMessages and convert them into CSV files. This is useful if you want to manually inspect this data or load it into a database manually. + +To be fast, we make use of parallel processing per-stream and only using command-line tools. This works by the main file (`main.sh`) running the source via python and tee-ing the output of RECORDS to sub-scripts which use `jq` to convert the records into CSV-delimited output, which we finally write to disk. + +As we read the connector config files, e.g. `--config secrets/config.json --state secrets/state.json --catalog integration_tests/configured_catalog.json`, you can manually step forward your sync if you need to read and store the input in chunks. + +## The road to 1TB of faker data + +There's commentary on this at https://github.com/airbytehq/airbyte/pull/20558, along with some cool SQL tricks. + +- 2 Billion faker users for 1TB: `10,000,000*(1024/5.02) = 2,039,840,637` +- 200 Million faker users for 100GB: `10,000,000*(100/5.02) = 199,203,187` +- 20 Million faker users for 10GB: `10,000,000*(10/5.02) = 19,920,318` + +But let's assume we don't have 1TB of local hard disk. So, we want to make 10 chunks of data, each around 100GB in size. + +**`config.json`** + +```json +{ + "count": 2039840637, + "seed": 0 +} +``` + +**`state.json`** + +At the end of every sync, increment the `id` in the users stream and the `user_id` in the purchases stream by `203984064` + +```json +[ + { + "type": "STREAM", + "stream": { + "stream_state": { + "id": 0 + }, + "stream_descriptor": { + "name": "users" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "id": 0, + "user_id": 0 + }, + "stream_descriptor": { + "name": "purchases" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "id": 0 + }, + "stream_descriptor": { + "name": "products" + } + } + } +] +``` + +Finally, ensure that you've opted-into all the streams in `integration_tests/configured_catalog.json` + +## TODO + +- This is currently set up very manually, in that we build bash scripts for each stream and manually populate the header information. This information all already lives in the connector's catalog. We probably could build these bash files on-demand with a python script... diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/configured_catalog/configured_catalog.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/configured_catalog/configured_catalog.json new file mode 100644 index 0000000000000..1b6f30efdf190 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/configured_catalog/configured_catalog.json @@ -0,0 +1,115 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { "type": "integer" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "name": { "type": "string" }, + "title": { "type": "string" }, + "age": { "type": "integer" }, + "email": { "type": "string" }, + "telephone": { "type": "string" }, + "gender": { "type": "string" }, + "language": { "type": "string" }, + "academic_degree": { "type": "string" }, + "nationality": { "type": "string" }, + "occupation": { "type": "string" }, + "height": { "type": "string" }, + "blood_type": { "type": "string" }, + "weight": { "type": "integer" } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "purchases", + "json_schema": { + "properties": { + "id": { "type": "integer" }, + "user_id": { "type": "integer" }, + "product_id": { "type": "integer" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "added_to_cart_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "purchased_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "returned_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": { + "properties": { + "id": { "type": "integer" }, + "make": { "type": "string" }, + "model": { "type": "string" }, + "year": { "type": "integer" }, + "price": { "type": "number" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/main.sh b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/main.sh new file mode 100755 index 0000000000000..65b6f922f9b60 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/main.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" + +mkdir -p /tmp/csv +cp -r $(pwd) /tmp/csv + +docker run --rm \ + -v /tmp/csv/csv_export/secret_faker:/secrets \ + -v /tmp/csv/csv_export/configured_catalog:/integration_tests \ + airbyte/source-faker:latest read \ + --config /secrets/secret_faker.json \ + --catalog /integration_tests/configured_catalog.json \ + | tee >(./purchases.sh) >(./products.sh) >(./users.sh) > /dev/null diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/products.sh b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/products.sh new file mode 100755 index 0000000000000..e0c869876d5eb --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/products.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" + +FILE="/tmp/csv/products.csv" + +rm -rf $FILE +echo "id,make,year,model,price,created_at,updated_at" >> $FILE + +jq -c 'select((.type | contains("RECORD")) and (.record.stream | contains("products"))) .record.data' \ + | jq -r '[.id, .make, .year, .model, .price, .created_at, .updated_at] | @csv' \ + >> $FILE diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/purchases.sh b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/purchases.sh new file mode 100755 index 0000000000000..af7d265a75478 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/purchases.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" + +FILE="/tmp/csv/purchases.csv" + +rm -rf $FILE + +echo "id,product_id,user_id,added_to_cart_at,purchased_at,returned_at" >> $FILE + +jq -c 'select((.type | contains("RECORD")) and (.record.stream | contains("purchases"))) .record.data' \ + | jq -r '[.id, .product_id, .user_id, .added_to_cart_at, .purchased_at, .returned_at] | @csv' \ + >> $FILE diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/secret_faker/secret_faker.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/secret_faker/secret_faker.json new file mode 100644 index 0000000000000..5fcb7717c5c70 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/secret_faker/secret_faker.json @@ -0,0 +1,6 @@ +{ + "count": 100000, + "seed": 0, + "parallelism": 2, + "always_updated": false +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/users.sh b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/users.sh new file mode 100755 index 0000000000000..1c70d8b7c9ba5 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/csv_export/users.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" + +FILE="/tmp/csv/users.csv" + +rm -rf $FILE + +echo "id,created_at,updated_at,name,title,age,email,telephone,gender,language,academic_degree,nationality,occupation,height,blood_type,weight" >> $FILE + +jq -c 'select((.type | contains("RECORD")) and (.record.stream | contains("users"))) .record.data' \ + | jq -r '[.id, .created_at, .updated_at, .name, .title, .age, .email, .telephone, .gender, .language, .academic_degree, .nationality, .occupation, .height, .blood_type, .weight] | @csv' \ + >> $FILE diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/docker-compose.yaml b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/docker-compose.yaml new file mode 100644 index 0000000000000..46fa079be0dfc --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/docker-compose.yaml @@ -0,0 +1,16 @@ +version: "3.9" +services: + azurite: + image: mcr.microsoft.com/azure-storage/azurite + container_name: "azurite" + hostname: azurite + ports: + - "10000:10000" + - "10001:10001" + - "10002:10002" + volumes: + - ./data:/data + environment: + - AZURITE_ACCOUNTS=account1:key1 + - UseDevelopmentStorage=true + command: "azurite-blob --blobHost 0.0.0.0 --location /data --loose" diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_configured_catalog/configured_catalog.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_configured_catalog/configured_catalog.json new file mode 100644 index 0000000000000..553b6c862d853 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_configured_catalog/configured_catalog.json @@ -0,0 +1,115 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { "type": "integer" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "name": { "type": "string" }, + "title": { "type": "string" }, + "age": { "type": "integer" }, + "email": { "type": "string" }, + "telephone": { "type": "string" }, + "gender": { "type": "string" }, + "language": { "type": "string" }, + "academic_degree": { "type": "string" }, + "nationality": { "type": "string" }, + "occupation": { "type": "string" }, + "height": { "type": "number" }, + "blood_type": { "type": "string" }, + "weight": { "type": "integer" } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "purchases", + "json_schema": { + "properties": { + "id": { "type": "integer" }, + "user_id": { "type": "integer" }, + "product_id": { "type": "integer" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "added_to_cart_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "purchased_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "returned_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": { + "properties": { + "id": { "type": "integer" }, + "make": { "type": "string" }, + "model": { "type": "string" }, + "year": { "type": "integer" }, + "price": { "type": "number" }, + "created_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + }, + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_test.py new file mode 100644 index 0000000000000..e994330ee99c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/integration_test.py @@ -0,0 +1,33 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from typing import Any, Mapping + +import pytest +from airbyte_cdk.sources.file_based.stream.cursor import DefaultFileBasedCursor +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_protocol.models import ConfiguredAirbyteCatalog +from source_azure_blob_storage import Config, SourceAzureBlobStorage, SourceAzureBlobStorageStreamReader + + +@pytest.mark.parametrize( + "config", + [ + "config_csv", + "config_jsonl", + "config_parquet", + "config_avro", + ], +) +def test_read_files(configured_catalog: ConfiguredAirbyteCatalog, config: Mapping[str, Any], request): + """Read 2_001_000 records in 30 files""" + config = request.getfixturevalue(config) + source = SourceAzureBlobStorage( + SourceAzureBlobStorageStreamReader(), + spec_class=Config, + catalog=configured_catalog, + config=config, + state=None, + cursor_cls=DefaultFileBasedCursor, + ) + output = read(source=source, config=config, catalog=configured_catalog) + assert sum(x.state.sourceStats.recordCount for x in output.state_messages) == 2_001_000 diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/spec.json b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/spec.json index 81b04111ee81b..75815501c5e74 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/spec.json @@ -31,9 +31,9 @@ }, "globs": { "title": "Globs", + "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", "default": ["**"], "order": 1, - "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", "type": "array", "items": { "type": "string" @@ -59,8 +59,8 @@ "primary_key": { "title": "Primary Key", "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", - "type": "string", - "airbyte_hidden": true + "airbyte_hidden": true, + "type": "string" }, "days_to_sync_if_history_is_full": { "title": "Days To Sync If History Is Full", @@ -242,6 +242,12 @@ "default": "None", "airbyte_hidden": true, "enum": ["None", "Primitive Types Only"] + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" } }, "required": ["filetype"] @@ -289,20 +295,20 @@ "type": "string" }, "skip_unprocessable_files": { - "type": "boolean", - "default": true, "title": "Skip Unprocessable Files", "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", - "always_show": true + "default": true, + "always_show": true, + "type": "boolean" }, "strategy": { - "type": "string", + "title": "Parsing Strategy", + "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", + "default": "auto", "always_show": true, "order": 0, - "default": "auto", - "title": "Parsing Strategy", "enum": ["auto", "fast", "ocr_only", "hi_res"], - "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + "type": "string" }, "processing": { "title": "Processing", @@ -352,15 +358,81 @@ "order": 2, "type": "string" }, - "azure_blob_storage_account_key": { - "title": "Azure Blob Storage account key", - "description": "The Azure blob storage account key.", - "airbyte_secret": true, - "examples": [ - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" - ], + "credentials": { + "title": "Authentication", + "description": "Credentials for connecting to the Azure Blob Storage", + "type": "object", "order": 3, - "type": "string" + "oneOf": [ + { + "title": "Authenticate via Oauth2", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "oauth2", + "const": "oauth2", + "enum": ["oauth2"], + "type": "string" + }, + "tenant_id": { + "title": "Tenant ID", + "description": "Tenant ID of the Microsoft Azure Application user", + "airbyte_secret": true, + "type": "string" + }, + "client_id": { + "title": "Client ID", + "description": "Client ID of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + }, + "client_secret": { + "title": "Client Secret", + "description": "Client Secret of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + }, + "refresh_token": { + "title": "Refresh Token", + "description": "Refresh Token of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + } + }, + "required": [ + "tenant_id", + "client_id", + "client_secret", + "refresh_token", + "auth_type" + ] + }, + { + "title": "Authenticate via Storage Account Key", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "storage_account_key", + "const": "storage_account_key", + "enum": ["storage_account_key"], + "type": "string" + }, + "azure_blob_storage_account_key": { + "title": "Azure Blob Storage account key", + "description": "The Azure blob storage account key.", + "airbyte_secret": true, + "examples": [ + "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + ], + "order": 3, + "type": "string" + } + }, + "required": ["azure_blob_storage_account_key", "auth_type"] + } + ] }, "azure_blob_storage_container_name": { "title": "Azure blob storage container (Bucket) Name", @@ -380,8 +452,61 @@ "required": [ "streams", "azure_blob_storage_account_name", - "azure_blob_storage_account_key", + "credentials", "azure_blob_storage_container_name" ] + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "oauth2", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "tenant_id": { + "type": "string", + "path_in_connector_config": ["credentials", "tenant_id"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } + } + } } } diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/utils.py b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/utils.py new file mode 100644 index 0000000000000..045e97177c8ad --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/integration_tests/utils.py @@ -0,0 +1,25 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + + +import json +import os +import re +from typing import Any, Mapping, Union + + +def load_config(config_path: str) -> Mapping[str, Any]: + with open(f"{os.path.dirname(__file__)}/configs/{config_path}", "r") as config: + return json.load(config) + + +def get_docker_ip() -> Union[str, Any]: + # When talking to the Docker daemon via a UNIX socket, route all TCP + # traffic to docker containers via the TCP loopback interface. + docker_host = os.environ.get("DOCKER_HOST", "").strip() + if not docker_host or docker_host.startswith("unix://"): + return "127.0.0.1" + + match = re.match(r"^tcp://(.+?):\d+$", docker_host) + if not match: + raise ValueError('Invalid value for DOCKER_HOST: "%s".' % (docker_host,)) + return match.group(1) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml b/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml index 32b5318b628ba..3a1ba434ca9d8 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml +++ b/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: file connectorType: source definitionId: fdaaba68-4875-4ed9-8fcd-4ae1e0a25093 - dockerImageTag: 0.3.4 + dockerImageTag: 0.4.0 dockerRepository: airbyte/source-azure-blob-storage documentationUrl: https://docs.airbyte.com/integrations/sources/azure-blob-storage githubIssueLabel: source-azure-blob-storage diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/poetry.lock b/airbyte-integrations/connectors/source-azure-blob-storage/poetry.lock new file mode 100644 index 0000000000000..d02d13d1fe7dd --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/poetry.lock @@ -0,0 +1,2443 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.78.3" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.78.3-py3-none-any.whl", hash = "sha256:699d61ace9f8ca4477e06af3ff1bc56856e955a444081a1701c41d94629dcd74"}, + {file = "airbyte_cdk-0.78.3.tar.gz", hash = "sha256:192c2594d0e93140a7ec635fea3d4644318faada6aa986805752adf4caf9b126"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} +"pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +unstructured = {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""} +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "azure-common" +version = "1.1.28" +description = "Microsoft Azure Client Library for Python (Common)" +optional = false +python-versions = "*" +files = [ + {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, + {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, +] + +[[package]] +name = "azure-core" +version = "1.30.1" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, + {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, +] + +[package.dependencies] +requests = ">=2.21.0" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-storage-blob" +version = "12.19.1" +description = "Microsoft Azure Blob Storage Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, + {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, +] + +[package.dependencies] +azure-core = ">=1.28.0,<2.0.0" +cryptography = ">=2.1.4" +isodate = ">=0.6.1" +typing-extensions = ">=4.3.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "docker" +version = "7.0.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, + {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.2.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c54f8d6160080831a76780d850302fdeb0e8d0806f661777b0714dfb55d9a08a"}, + {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e95ae029396382a0d2e8174e4077f96befcd4a2184678db363ddc074eb4d3b2"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5810fa80e64a0c689262a71af999c5735f48c0da0affcbc9041d1ef5ef3920be"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae69524fd6a68b288574013f8fadac23cacf089c75cd3fc5b216277a445eb736"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadda215e32fe375d65e560b7f7e2a37c7f9c4ecee5315bb1225ca6ac9bf5838"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f1f164e4cc6bc646b1fc86664c3543bf4a941d45235797279b120dc740ee7af5"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3603a8a41097daf7672cae22cc4a860ab9ea5597f1c5371cb21beca3398b8d6a"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3b4bb89a785f4fd60e05f3c3a526c07d0d68e3536f17f169ca13bf5b5dd75a5"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1effc10bf782f0696e76ecfeba0720ea02c0c31d5bffb7b29ba10debd57d1c3d"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b03531f6cd6ce4b511dcece060ca20aa5412f8db449274b44f4003f282e6272f"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fac15090bb966719df06f0c4f8139783746d1e60e71016d8a65db2031ca41b8"}, + {file = "lxml-5.2.0-cp310-cp310-win32.whl", hash = "sha256:92bb37c96215c4b2eb26f3c791c0bf02c64dd251effa532b43ca5049000c4478"}, + {file = "lxml-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:b0181c22fdb89cc19e70240a850e5480817c3e815b1eceb171b3d7a3aa3e596a"}, + {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ada8ce9e6e1d126ef60d215baaa0c81381ba5841c25f1d00a71cdafdc038bd27"}, + {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cefb133c859f06dab2ae63885d9f405000c4031ec516e0ed4f9d779f690d8e3"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ede2a7a86a977b0c741654efaeca0af7860a9b1ae39f9268f0936246a977ee0"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46df6f0b1a0cda39d12c5c4615a7d92f40342deb8001c7b434d7c8c78352e58"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2259243ee734cc736e237719037efb86603c891fd363cc7973a2d0ac8a0e3f"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c53164f29ed3c3868787144e8ea8a399ffd7d8215f59500a20173593c19e96eb"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:371aab9a397dcc76625ad3b02fa9b21be63406d69237b773156e7d1fc2ce0cae"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e08784288a179b59115b5e57abf6d387528b39abb61105fe17510a199a277a40"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c232726f7b6df5143415a06323faaa998ef8abbe1c0ed00d718755231d76f08"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4366e58c0508da4dee4c7c70cee657e38553d73abdffa53abbd7d743711ee11"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c84dce8fb2e900d4fb094e76fdad34a5fd06de53e41bddc1502c146eb11abd74"}, + {file = "lxml-5.2.0-cp311-cp311-win32.whl", hash = "sha256:0947d1114e337dc2aae2fa14bbc9ed5d9ca1a0acd6d2f948df9926aef65305e9"}, + {file = "lxml-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1eace37a9f4a1bef0bb5c849434933fd6213008ec583c8e31ee5b8e99c7c8500"}, + {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f2cb157e279d28c66b1c27e0948687dc31dc47d1ab10ce0cd292a8334b7de3d5"}, + {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53c0e56f41ef68c1ce4e96f27ecdc2df389730391a2fd45439eb3facb02d36c8"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703d60e59ab45c17485c2c14b11880e4f7f0eab07134afa9007573fa5a779a5a"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaf5e308a5e50bc0548c4fdca0117a31ec9596f8cfc96592db170bcecc71a957"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af64df85fecd3cf3b2e792f0b5b4d92740905adfa8ce3b24977a55415f1a0c40"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:df7dfbdef11702fd22c2eaf042d7098d17edbc62d73f2199386ad06cbe466f6d"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7250030a7835bfd5ba6ca7d1ad483ec90f9cbc29978c5e75c1cc3e031d3c4160"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:be5faa2d5c8c8294d770cfd09d119fb27b5589acc59635b0cf90f145dbe81dca"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:347ec08250d5950f5b016caa3e2e13fb2cb9714fe6041d52e3716fb33c208663"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc7b630c4fb428b8a40ddd0bfc4bc19de11bb3c9b031154f77360e48fe8b4451"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ae550cbd7f229cdf2841d9b01406bcca379a5fb327b9efb53ba620a10452e835"}, + {file = "lxml-5.2.0-cp312-cp312-win32.whl", hash = "sha256:7c61ce3cdd6e6c9f4003ac118be7eb3036d0ce2afdf23929e533e54482780f74"}, + {file = "lxml-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:f90c36ca95a44d2636bbf55a51ca30583b59b71b6547b88d954e029598043551"}, + {file = "lxml-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1cce2eaad7e38b985b0f91f18468dda0d6b91862d32bec945b0e46e2ffe7222e"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60a3983d32f722a8422c01e4dc4badc7a307ca55c59e2485d0e14244a52c482f"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60847dfbdfddf08a56c4eefe48234e8c1ab756c7eda4a2a7c1042666a5516564"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbe335f0d1a86391671d975a1b5e9b08bb72fba6b567c43bdc2e55ca6e6c086"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:3ac7c8a60b8ad51fe7bca99a634dd625d66492c502fd548dc6dc769ce7d94b6a"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:73e69762cf740ac3ae81137ef9d6f15f93095f50854e233d50b29e7b8a91dbc6"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:281ee1ffeb0ab06204dfcd22a90e9003f0bb2dab04101ad983d0b1773bc10588"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ba3a86b0d5a5c93104cb899dff291e3ae13729c389725a876d00ef9696de5425"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:356f8873b1e27b81793e30144229adf70f6d3e36e5cb7b6d289da690f4398953"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2a34e74ffe92c413f197ff4967fb1611d938ee0691b762d062ef0f73814f3aa4"}, + {file = "lxml-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:6f0d2b97a5a06c00c963d4542793f3e486b1ed3a957f8c19f6006ed39d104bb0"}, + {file = "lxml-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:35e39c6fd089ad6674eb52d93aa874d6027b3ae44d2381cca6e9e4c2e102c9c8"}, + {file = "lxml-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5f6e4e5a62114ae76690c4a04c5108d067442d0a41fd092e8abd25af1288c450"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93eede9bcc842f891b2267c7f0984d811940d1bc18472898a1187fe560907a99"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad364026c2cebacd7e01d1138bd53639822fefa8f7da90fc38cd0e6319a2699"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f06e4460e76468d99cc36d5b9bc6fc5f43e6662af44960e13e3f4e040aacb35"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ca3236f31d565555139d5b00b790ed2a98ac6f0c4470c4032f8b5e5a5dba3c1a"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:a9b67b850ab1d304cb706cf71814b0e0c3875287083d7ec55ee69504a9c48180"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5261c858c390ae9a19aba96796948b6a2d56649cbd572968970dc8da2b2b2a42"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e8359fb610c8c444ac473cfd82dae465f405ff807cabb98a9b9712bbd0028751"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:f9e27841cddfaebc4e3ffbe5dbdff42891051acf5befc9f5323944b2c61cef16"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:641a8da145aca67671205f3e89bfec9815138cf2fe06653c909eab42e486d373"}, + {file = "lxml-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:931a3a13e0f574abce8f3152b207938a54304ccf7a6fd7dff1fdb2f6691d08af"}, + {file = "lxml-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:246c93e2503c710cf02c7e9869dc0258223cbefe5e8f9ecded0ac0aa07fd2bf8"}, + {file = "lxml-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:11acfcdf5a38cf89c48662123a5d02ae0a7d99142c7ee14ad90de5c96a9b6f06"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200f70b5d95fc79eb9ed7f8c4888eef4e274b9bf380b829d3d52e9ed962e9231"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba4d02aed47c25be6775a40d55c5774327fdedba79871b7c2485e80e45750cb2"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e283b24c14361fe9e04026a1d06c924450415491b83089951d469509900d9f32"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:03e3962d6ad13a862dacd5b3a3ea60b4d092a550f36465234b8639311fd60989"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6e45fd5213e5587a610b7e7c8c5319a77591ab21ead42df46bb342e21bc1418d"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:27877732946843f4b6bfc56eb40d865653eef34ad2edeed16b015d5c29c248df"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4d16b44ad0dd8c948129639e34c8d301ad87ebc852568ace6fe9a5ad9ce67ee1"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b8f842df9ba26135c5414e93214e04fe0af259bb4f96a32f756f89467f7f3b45"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c74e77df9e36c8c91157853e6cd400f6f9ca7a803ba89981bfe3f3fc7e5651ef"}, + {file = "lxml-5.2.0-cp38-cp38-win32.whl", hash = "sha256:1459a998c10a99711ac532abe5cc24ba354e4396dafef741c7797f8830712d56"}, + {file = "lxml-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:a00f5931b7cccea775123c3c0a2513aee58afdad8728550cc970bff32280bdd2"}, + {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ddda5ba8831f258ac7e6364be03cb27aa62f50c67fd94bc1c3b6247959cc0369"}, + {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56835b9e9a7767202fae06310c6b67478963e535fe185bed3bf9af5b18d2b67e"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25fef8794f0dc89f01bdd02df6a7fec4bcb2fbbe661d571e898167a83480185e"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d44af078485c4da9a7ec460162392d49d996caf89516fa0b75ad0838047122"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f354d62345acdf22aa3e171bd9723790324a66fafe61bfe3873b86724cf6daaa"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6a7e0935f05e1cf1a3aa1d49a87505773b04f128660eac2a24a5594ea6b1baa7"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:75a4117b43694c72a0d89f6c18a28dc57407bde4650927d4ef5fd384bdf6dcc7"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:57402d6cdd8a897ce21cf8d1ff36683583c17a16322a321184766c89a1980600"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:56591e477bea531e5e1854f5dfb59309d5708669bc921562a35fd9ca5182bdcd"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7efbce96719aa275d49ad5357886845561328bf07e1d5ab998f4e3066c5ccf15"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a3c39def0965e8fb5c8d50973e0c7b4ce429a2fa730f3f9068a7f4f9ce78410b"}, + {file = "lxml-5.2.0-cp39-cp39-win32.whl", hash = "sha256:5188f22c00381cb44283ecb28c8d85c2db4a3035774dd851876c8647cb809c27"}, + {file = "lxml-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ed1fe80e1fcdd1205a443bddb1ad3c3135bb1cd3f36cc996a1f4aed35960fbe8"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d2b339fb790fc923ae2e9345c8633e3d0064d37ea7920c027f20c8ae6f65a91f"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06036d60fccb21e22dd167f6d0e422b9cbdf3588a7e999a33799f9cbf01e41a5"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1611fb9de0a269c05575c024e6d8cdf2186e3fa52b364e3b03dcad82514d57"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:05fc3720250d221792b6e0d150afc92d20cb10c9cdaa8c8f93c2a00fbdd16015"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:11e41ffd3cd27b0ca1c76073b27bd860f96431d9b70f383990f1827ca19f2f52"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0382e6a3eefa3f6699b14fa77c2eb32af2ada261b75120eaf4fc028a20394975"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be5c8e776ecbcf8c1bce71a7d90e3a3680c9ceae516cac0be08b47e9fac0ca43"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da12b4efc93d53068888cb3b58e355b31839f2428b8f13654bd25d68b201c240"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f8033da364bacc74aca5e319509a20bb711c8a133680ca5f35020f9eaf025"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50a26f68d090594477df8572babac64575cd5c07373f7a8319c527c8e56c0f99"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:57cbadf028727705086047994d2e50124650e63ce5a035b0aa79ab50f001989f"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8aa11638902ac23f944f16ce45c9f04c9d5d57bb2da66822abb721f4efe5fdbb"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7150e630b879390e02121e71ceb1807f682b88342e2ea2082e2c8716cf8bd93"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4add722393c99da4d51c8d9f3e1ddf435b30677f2d9ba9aeaa656f23c1b7b580"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd0f25a431cd16f70ec1c47c10b413e7ddfe1ccaaddd1a7abd181e507c012374"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:883e382695f346c2ea3ad96bdbdf4ca531788fbeedb4352be3a8fcd169fc387d"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:80cc2b55bb6e35d3cb40936b658837eb131e9f16357241cd9ba106ae1e9c5ecb"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:59ec2948385336e9901008fdf765780fe30f03e7fdba8090aafdbe5d1b7ea0cd"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ddbea6e58cce1a640d9d65947f1e259423fc201c9cf9761782f355f53b7f3097"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52d6cdea438eb7282c41c5ac00bd6d47d14bebb6e8a8d2a1c168ed9e0cacfbab"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c556bbf88a8b667c849d326dd4dd9c6290ede5a33383ffc12b0ed17777f909d"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:947fa8bf15d1c62c6db36c6ede9389cac54f59af27010251747f05bddc227745"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e6cb8f7a332eaa2d876b649a748a445a38522e12f2168e5e838d1505a91cdbb7"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:16e65223f34fd3d65259b174f0f75a4bb3d9893698e5e7d01e54cd8c5eb98d85"}, + {file = "lxml-5.2.0.tar.gz", hash = "sha256:21dc490cdb33047bc7f7ad76384f3366fa8f5146b86cc04c4af45de901393b90"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pandas" +version = "2.2.1" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pdf2image" +version = "1.16.3" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +optional = false +python-versions = "*" +files = [ + {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, + {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, +] + +[package.dependencies] +pillow = "*" + +[[package]] +name = "pdfminer-six" +version = "20221105" +description = "PDF parser and analyzer" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, + {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, +] + +[package.dependencies] +charset-normalizer = ">=2.0.0" +cryptography = ">=36.0.0" + +[package.extras] +dev = ["black", "mypy (==0.931)", "nox", "pytest"] +docs = ["sphinx", "sphinx-argparse"] +image = ["Pillow"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.7.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"}, + {file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smart-open" +version = "6.4.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, +] + +[package.dependencies] +azure-common = {version = "*", optional = true, markers = "extra == \"azure\""} +azure-core = {version = "*", optional = true, markers = "extra == \"azure\""} +azure-storage-blob = {version = "*", optional = true, markers = "extra == \"azure\""} + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "9f60ba4978f0e4d71316fa93c59dbaa0103f50be582641f436b0eade8a5ba0e4" diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/pyproject.toml b/airbyte-integrations/connectors/source-azure-blob-storage/pyproject.toml new file mode 100644 index 0000000000000..2c559d57dc328 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.4.0" +name = "source-azure-blob-storage" +description = "Source implementation for Azure Blob Storage." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/azure-blob-storage" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_azure_blob_storage" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +pytz = "==2024.1" +airbyte-cdk = {extras = ["file-based"], version = "^0"} +[tool.poetry.dependencies.smart-open] +extras = [ "azure",] +version = "==6.4.0" + +[tool.poetry.scripts] +source-azure-blob-storage = "source_azure_blob_storage.run:run" + +[tool.poetry.group.dev.dependencies] +docker = "^7.0.0" +pytest-mock = "^3.6.1" +requests-mock = "^1.9.3" +pandas = "2.2.1" +pytest = "^6.2" diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/setup.py b/airbyte-integrations/connectors/source-azure-blob-storage/setup.py deleted file mode 100644 index 97e8173bf2b2b..0000000000000 --- a/airbyte-integrations/connectors/source-azure-blob-storage/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[file-based]>=0.61.0", - "smart_open[azure]", - "pytz", -] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest-mock~=3.6.1", "pytest~=6.2"] - -setup( - entry_points={ - "console_scripts": [ - "source-azure-blob-storage=source_azure_blob_storage.run:run", - ], - }, - name="source_azure_blob_storage", - description="Source implementation for Azure Blob Storage.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config.py b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config.py index 9955603ba74bd..8243cd0ac7e0e 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config.py +++ b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config.py @@ -2,11 +2,51 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Any, Dict, Optional +from typing import Any, Dict, Literal, Optional, Union import dpath.util from airbyte_cdk.sources.file_based.config.abstract_file_based_spec import AbstractFileBasedSpec -from pydantic import AnyUrl, Field +from airbyte_cdk.utils.oneof_option_config import OneOfOptionConfig +from pydantic import AnyUrl, BaseModel, Field + + +class Oauth2(BaseModel): + class Config(OneOfOptionConfig): + title = "Authenticate via Oauth2" + discriminator = "auth_type" + + auth_type: Literal["oauth2"] = Field("oauth2", const=True) + tenant_id: str = Field(title="Tenant ID", description="Tenant ID of the Microsoft Azure Application user", airbyte_secret=True) + client_id: str = Field( + title="Client ID", + description="Client ID of your Microsoft developer application", + airbyte_secret=True, + ) + client_secret: str = Field( + title="Client Secret", + description="Client Secret of your Microsoft developer application", + airbyte_secret=True, + ) + refresh_token: str = Field( + title="Refresh Token", + description="Refresh Token of your Microsoft developer application", + airbyte_secret=True, + ) + + +class StorageAccountKey(BaseModel): + class Config(OneOfOptionConfig): + title = "Authenticate via Storage Account Key" + discriminator = "auth_type" + + auth_type: Literal["storage_account_key"] = Field("storage_account_key", const=True) + azure_blob_storage_account_key: str = Field( + title="Azure Blob Storage account key", + description="The Azure blob storage account key.", + airbyte_secret=True, + examples=["Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="], + order=3, + ) class Config(AbstractFileBasedSpec): @@ -25,11 +65,11 @@ def documentation_url(cls) -> AnyUrl: examples=["airbyte5storage"], order=2, ) - azure_blob_storage_account_key: str = Field( - title="Azure Blob Storage account key", - description="The Azure blob storage account key.", - airbyte_secret=True, - examples=["Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="], + credentials: Union[Oauth2, StorageAccountKey] = Field( + title="Authentication", + description="Credentials for connecting to the Azure Blob Storage", + discriminator="auth_type", + type="object", order=3, ) azure_blob_storage_container_name: str = Field( diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config_migrations.py b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config_migrations.py new file mode 100644 index 0000000000000..50fa6cef874f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/config_migrations.py @@ -0,0 +1,77 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import logging +from typing import Any, List, Mapping + +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source + +logger = logging.getLogger("airbyte_logger") + + +class MigrateCredentials: + """ + This class stands for migrating the config azure_blob_storage_account_key inside object `credentials` + """ + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + return "credentials" not in config + + @classmethod + def set_azure_blob_storage_account_key(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: + config["credentials"] = { + "auth_type": "storage_account_key", + "azure_blob_storage_account_key": config.pop("azure_blob_storage_account_key"), + } + return config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Modifies the configuration and then saves it back to the source. + + Args: + - config_path (str): The path where the configuration is stored. + - source (Source): The data source. + - config (Mapping[str, Any]): The current configuration. + + Returns: + - Mapping[str, Any]: The updated configuration. + """ + migrated_config = cls.set_azure_blob_storage_account_key(config) + source.write_config(migrated_config, config_path) + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + """ + Emits the control messages related to configuration migration. + + Args: + - migrated_config (Mapping[str, Any]): The migrated configuration. + """ + print(create_connector_config_control_message(migrated_config).json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + Orchestrates the configuration migration process. + + It first checks if the `--config` argument is provided, and if so, + determines whether migration is needed, and then performs the migration + if required. + + Args: + - args (List[str]): List of command-line arguments. + - source (Source): The data source. + """ + config_path = AirbyteEntrypoint(source).extract_config(args) + if config_path: + config = source.read_config(config_path) + if cls.should_migrate(config): + cls.emit_control_message(cls.modify_and_save(config_path, source, config)) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/run.py b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/run.py index a671d836526a5..dc1ed3c66c80d 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/run.py +++ b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/run.py @@ -8,7 +8,9 @@ from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch from airbyte_cdk.models import AirbyteErrorTraceMessage, AirbyteMessage, AirbyteTraceMessage, TraceType, Type +from airbyte_cdk.sources.file_based.stream.cursor import DefaultFileBasedCursor from source_azure_blob_storage import Config, SourceAzureBlobStorage, SourceAzureBlobStorageStreamReader +from source_azure_blob_storage.config_migrations import MigrateCredentials def run(): @@ -23,7 +25,9 @@ def run(): SourceAzureBlobStorage.read_catalog(catalog_path) if catalog_path else None, SourceAzureBlobStorage.read_config(config_path) if catalog_path else None, SourceAzureBlobStorage.read_state(state_path) if catalog_path else None, + cursor_cls=DefaultFileBasedCursor, ) + MigrateCredentials.migrate(sys.argv[1:], source) except Exception: print( AirbyteMessage( diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/source.py b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/source.py index 792dcdfe2221b..cac72123e7805 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/source.py +++ b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/source.py @@ -5,7 +5,9 @@ from typing import Any, Mapping from airbyte_cdk.config_observation import emit_configuration_as_airbyte_control_message +from airbyte_cdk.sources.declarative.models import OAuthConfigSpecification from airbyte_cdk.sources.file_based.file_based_source import FileBasedSource +from airbyte_protocol.models import AdvancedAuth, ConnectorSpecification from .legacy_config_transformer import LegacyConfigTransformer @@ -28,3 +30,43 @@ def read_config(cls, config_path: str) -> Mapping[str, Any]: @staticmethod def _is_v1_config(config: Mapping[str, Any]) -> bool: return "streams" in config + + def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: + """ + Returns the specification describing what fields can be configured by a user when setting up a file-based source. + """ + + return ConnectorSpecification( + documentationUrl=self.spec_class.documentation_url(), + connectionSpecification=self.spec_class.schema(), + advanced_auth=AdvancedAuth( + auth_flow_type="oauth2.0", + predicate_key=["credentials", "auth_type"], + predicate_value="oauth2", + oauth_config_specification=OAuthConfigSpecification( + complete_oauth_output_specification={ + "type": "object", + "additionalProperties": False, + "properties": {"refresh_token": {"type": "string", "path_in_connector_config": ["credentials", "refresh_token"]}}, + }, + complete_oauth_server_input_specification={ + "type": "object", + "additionalProperties": False, + "properties": {"client_id": {"type": "string"}, "client_secret": {"type": "string"}}, + }, + complete_oauth_server_output_specification={ + "type": "object", + "additionalProperties": False, + "properties": { + "client_id": {"type": "string", "path_in_connector_config": ["credentials", "client_id"]}, + "client_secret": {"type": "string", "path_in_connector_config": ["credentials", "client_secret"]}, + }, + }, + oauth_user_input_from_connector_config_specification={ + "type": "object", + "additionalProperties": False, + "properties": {"tenant_id": {"type": "string", "path_in_connector_config": ["credentials", "tenant_id"]}}, + }, + ), + ), + ) diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/stream_reader.py b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/stream_reader.py index c751b72403bd8..43070ebcba5e1 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/stream_reader.py +++ b/airbyte-integrations/connectors/source-azure-blob-storage/source_azure_blob_storage/stream_reader.py @@ -1,20 +1,35 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. import logging -from contextlib import contextmanager from io import IOBase -from typing import Iterable, List, Optional +from typing import Iterable, List, Optional, Union import pytz from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader, FileReadMode from airbyte_cdk.sources.file_based.remote_file import RemoteFile +from airbyte_cdk.sources.streams.http.requests_native_auth import Oauth2Authenticator +from azure.core.credentials import AccessToken from azure.storage.blob import BlobServiceClient, ContainerClient from smart_open import open from .config import Config +class AzureOauth2Authenticator(Oauth2Authenticator): + """ + Authenticator for Azure Blob Storage SDK to align with azure.core.credentials.TokenCredential protocol + """ + + def get_token(self, *args, **kwargs) -> AccessToken: + """Parent class handles Oauth Refresh token logic. + `expires_on` is ignored and set to year 2222 to align with protocol. + """ + return AccessToken(token=self.get_access_token(), expires_on=7952342400) + + class SourceAzureBlobStorageStreamReader(AbstractFileBasedStreamReader): + _credentials = None + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._config = None @@ -36,14 +51,26 @@ def account_url(self) -> str: @property def azure_container_client(self): return ContainerClient( - self.account_url, - container_name=self.config.azure_blob_storage_container_name, - credential=self.config.azure_blob_storage_account_key, + self.account_url, container_name=self.config.azure_blob_storage_container_name, credential=self.azure_credentials ) @property def azure_blob_service_client(self): - return BlobServiceClient(self.account_url, credential=self.config.azure_blob_storage_account_key) + return BlobServiceClient(self.account_url, credential=self._credentials) + + @property + def azure_credentials(self) -> Union[str, AzureOauth2Authenticator]: + if not self._credentials: + if self.config.credentials.auth_type == "storage_account_key": + self._credentials = self.config.credentials.azure_blob_storage_account_key + else: + self._credentials = AzureOauth2Authenticator( + token_refresh_endpoint=f"https://login.microsoftonline.com/{self.config.credentials.tenant_id}/oauth2/v2.0/token", + client_id=self.config.credentials.client_id, + client_secret=self.config.credentials.client_secret, + refresh_token=self.config.credentials.refresh_token, + ) + return self._credentials def get_matching_files( self, diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_config_migration.py b/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_config_migration.py new file mode 100644 index 0000000000000..f1699bfc67cd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_config_migration.py @@ -0,0 +1,32 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +import os +from typing import Any, Mapping + +from airbyte_cdk.sources.file_based.stream.cursor import DefaultFileBasedCursor +from source_azure_blob_storage import Config, SourceAzureBlobStorage, SourceAzureBlobStorageStreamReader +from source_azure_blob_storage.config_migrations import MigrateCredentials + + +# HELPERS +def load_config(config_path: str) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +def test_mailchimp_config_migration(): + config_path = f"{os.path.dirname(__file__)}/test_configs/test_config_without_credentials.json" + initial_config = load_config(config_path) + migration_instance = MigrateCredentials + source = SourceAzureBlobStorage( + SourceAzureBlobStorageStreamReader(), + spec_class=Config, + catalog={}, + config=load_config(config_path), + state=None, + cursor_cls=DefaultFileBasedCursor, + ) + migration_instance.migrate(["check", "--config", config_path], source) + test_migrated_config = load_config(config_path) + assert test_migrated_config["credentials"]["azure_blob_storage_account_key"] == initial_config["azure_blob_storage_account_key"] diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_configs/test_config_without_credentials.json b/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_configs/test_config_without_credentials.json new file mode 100644 index 0000000000000..4a381bd3878d9 --- /dev/null +++ b/airbyte-integrations/connectors/source-azure-blob-storage/unit_tests/test_configs/test_config_without_credentials.json @@ -0,0 +1,49 @@ +{ + "azure_blob_storage_endpoint": "https://test.blob.core.windows.net", + "azure_blob_storage_account_name": "teststorage", + "azure_blob_storage_container_name": "source-azure-blob-storage-test", + "azure_blob_storage_account_key": "test_key", + "streams": [ + { + "name": "source-azure-blob-storage-test", + "file_type": "csv", + "globs": ["csv_tests/user_schema.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "input_schema": "{\"id\": \"number\", \"name\": \"string\", \"valid\": \"boolean\", \"valid_string\": \"string\", \"array\": \"string\", \"dict\": \"string\"}", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml index 93c943f6e7699..67346c82fcbd8 100644 --- a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml @@ -42,35 +42,35 @@ acceptance_tests: - name: product_search_query_performance_report_monthly bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" - name: audience_performance_report_daily - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: audience_performance_report_hourly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: audience_performance_report_weekly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: audience_performance_report_monthly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: goals_and_funnels_report_daily - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: goals_and_funnels_report_hourly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: goals_and_funnels_report_weekly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: goals_and_funnels_report_monthly - bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign, testing in integration test" - name: account_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: ad_group_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: ad_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: campaign_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: campaign_impression_performance_report_hourly bypass_reason: "Empty report; hourly data fetched is limited to 180 days" - name: keyword_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: geographic_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: age_gender_audience_report_hourly bypass_reason: "Empty report; hourly data fetched is limited to 180 days" - name: search_query_performance_report_hourly @@ -85,31 +85,6 @@ acceptance_tests: bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet" - name: app_install_ad_labels bypass_reason: "Can not populate; depends on stream app_install_ads" - #### TODO: remove *_report_monthly after all become populated on December, 1 - - name: account_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: ad_group_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: ad_group_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: ad_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: campaign_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: campaign_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: keyword_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: geographic_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: age_gender_audience_report_monthly - bypass_reason: "Campaign is still in progress" - - name: search_query_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: user_location_performance_report_monthly - bypass_reason: "Campaign is still in progress" - - name: account_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" ### For stream below start date in config is not relevant, empty data - name: keyword_labels bypass_reason: "This stream is tested without start date" @@ -159,59 +134,58 @@ acceptance_tests: - name: goals_and_funnels_report_monthly bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." - name: app_install_ads - bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet" + bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet. Testing in integration tests." - name: app_install_ad_labels - bypass_reason: "Can not populate; depends on stream app_install_ads" + bypass_reason: "Can not populate; depends on stream app_install_ads. Testing in integration tests." - name: age_gender_audience_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: user_location_performance_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: account_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: account_impression_performance_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: campaign_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: ad_group_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: ad_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: ad_group_impression_performance_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: keyword_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: geographic_performance_report_hourly - bypass_reason: "Hourly reports are disabled, because sync is too long" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: campaign_impression_performance_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." - name: search_query_performance_report_hourly - bypass_reason: "Empty report; hourly data fetched is limited to 180 days" - #### TODO: remove *_report_monthly after all become populated on December, 1 + bypass_reason: "Hourly reports are disabled, because sync is too long. Testing in integration tests." + #### Streams below sync takes a lot of time if start date is not provided - name: account_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: ad_group_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: ad_group_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: ad_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: campaign_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: campaign_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: keyword_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: geographic_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: age_gender_audience_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: search_query_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: user_location_performance_report_monthly - bypass_reason: "Campaign is still in progress" + bypass_reason: "This stream is tested with config with start date" - name: account_impression_performance_report_monthly - bypass_reason: "Campaign is still in progress" - #### Streams below sync takes a lot of time if start date is not provided + bypass_reason: "This stream is tested with config with start date" - name: ad_groups bypass_reason: "This stream is tested with config with start date" - name: ads diff --git a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl index 2da8a26461d36..f7e62e8a35817 100644 --- a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl @@ -28,3 +28,16 @@ {"stream":"account_impression_performance_report_daily","data":{"AccountName":"Airbyte","AccountNumber":"F149MJ18","AccountId":180519267,"TimePeriod":"2023-12-18","CurrencyCode":"USD","AdDistribution":"Search","Impressions":22,"Clicks":0,"Ctr":0.0,"AverageCpc":0.0,"Spend":0.0,"AveragePosition":0.0,"Conversions":0,"ConversionRate":null,"CostPerConversion":null,"LowQualityClicks":0,"LowQualityClicksPercent":null,"LowQualityImpressions":6,"LowQualityImpressionsPercent":21.43,"LowQualityConversions":0,"LowQualityConversionRate":null,"DeviceType":"Computer","ImpressionSharePercent":34.92,"ImpressionLostToBudgetPercent":1.59,"ImpressionLostToRankAggPercent":63.49,"PhoneImpressions":0,"PhoneCalls":0,"Ptr":null,"Network":"Syndicated search partners","Assists":0,"Revenue":0.0,"ReturnOnAdSpend":null,"CostPerAssist":null,"RevenuePerConversion":null,"RevenuePerAssist":null,"AccountStatus":"Active","LowQualityGeneralClicks":0,"LowQualitySophisticatedClicks":0,"ExactMatchImpressionSharePercent":5.26,"ClickSharePercent":null,"AbsoluteTopImpressionSharePercent":10.2,"TopImpressionShareLostToRankPercent":68.0,"TopImpressionShareLostToBudgetPercent":0.0,"AbsoluteTopImpressionShareLostToRankPercent":89.8,"AbsoluteTopImpressionShareLostToBudgetPercent":0.0,"TopImpressionSharePercent":32.0,"AbsoluteTopImpressionRatePercent":22.73,"TopImpressionRatePercent":72.73,"AllConversions":0,"AllRevenue":0.0,"AllConversionRate":null,"AllCostPerConversion":null,"AllReturnOnAdSpend":null,"AllRevenuePerConversion":null,"ViewThroughConversions":0,"AudienceImpressionSharePercent":null,"AudienceImpressionLostToRankPercent":null,"AudienceImpressionLostToBudgetPercent":null,"AverageCpm":0.0,"ConversionsQualified":0.0,"LowQualityConversionsQualified":0.0,"AllConversionsQualified":0.0,"ViewThroughConversionsQualified":null,"ViewThroughRevenue":0.0,"VideoViews":0,"ViewThroughRate":0.0,"AverageCPV":null,"VideoViewsAt25Percent":0,"VideoViewsAt50Percent":0,"VideoViewsAt75Percent":0,"CompletedVideoViews":0,"VideoCompletionRate":0.0,"TotalWatchTimeInMS":0,"AverageWatchTimePerVideoView":null,"AverageWatchTimePerImpression":0.0,"Sales":0,"CostPerSale":null,"RevenuePerSale":null,"Installs":0,"CostPerInstall":null,"RevenuePerInstall":null},"emitted_at":1704833886551} {"stream":"account_impression_performance_report_weekly","data":{"AccountName":"Airbyte","AccountNumber":"F149MJ18","AccountId":180519267,"TimePeriod":"2023-12-17","CurrencyCode":"USD","AdDistribution":"Search","Impressions":639,"Clicks":14,"Ctr":2.19,"AverageCpc":0.12,"Spend":1.74,"AveragePosition":0.0,"Conversions":0,"ConversionRate":0.0,"CostPerConversion":null,"LowQualityClicks":6,"LowQualityClicksPercent":30.0,"LowQualityImpressions":53,"LowQualityImpressionsPercent":7.66,"LowQualityConversions":0,"LowQualityConversionRate":0.0,"DeviceType":"Computer","ImpressionSharePercent":13.57,"ImpressionLostToBudgetPercent":17.96,"ImpressionLostToRankAggPercent":68.47,"PhoneImpressions":0,"PhoneCalls":0,"Ptr":null,"Network":"Syndicated search partners","Assists":0,"Revenue":0.0,"ReturnOnAdSpend":0.0,"CostPerAssist":null,"RevenuePerConversion":null,"RevenuePerAssist":null,"AccountStatus":"Active","LowQualityGeneralClicks":0,"LowQualitySophisticatedClicks":6,"ExactMatchImpressionSharePercent":17.65,"ClickSharePercent":1.28,"AbsoluteTopImpressionSharePercent":3.2,"TopImpressionShareLostToRankPercent":74.15,"TopImpressionShareLostToBudgetPercent":18.25,"AbsoluteTopImpressionShareLostToRankPercent":78.51,"AbsoluteTopImpressionShareLostToBudgetPercent":18.29,"TopImpressionSharePercent":7.6,"AbsoluteTopImpressionRatePercent":22.69,"TopImpressionRatePercent":53.99,"AllConversions":0,"AllRevenue":0.0,"AllConversionRate":0.0,"AllCostPerConversion":null,"AllReturnOnAdSpend":0.0,"AllRevenuePerConversion":null,"ViewThroughConversions":0,"AudienceImpressionSharePercent":null,"AudienceImpressionLostToRankPercent":null,"AudienceImpressionLostToBudgetPercent":null,"AverageCpm":2.72,"ConversionsQualified":0.0,"LowQualityConversionsQualified":0.0,"AllConversionsQualified":0.0,"ViewThroughConversionsQualified":null,"ViewThroughRevenue":0.0,"VideoViews":0,"ViewThroughRate":0.0,"AverageCPV":null,"VideoViewsAt25Percent":0,"VideoViewsAt50Percent":0,"VideoViewsAt75Percent":0,"CompletedVideoViews":0,"VideoCompletionRate":0.0,"TotalWatchTimeInMS":0,"AverageWatchTimePerVideoView":null,"AverageWatchTimePerImpression":0.0,"Sales":0,"CostPerSale":null,"RevenuePerSale":null,"Installs":0,"CostPerInstall":null,"RevenuePerInstall":null},"emitted_at":1704833908003} {"stream": "budget", "data": {"Type": "Budget", "Status": "Active", "Id": 10239202868095, "Parent Id": 180519267, "Client Id": null, "Modified Time": "2024-02-28T17:52:08.900+00:00", "Budget Id": null, "Budget Name": "Test Shared Budget", "Budget": 2.0, "Budget Type": "DailyBudgetStandard", "Account Id": 180519267}, "emitted_at": 1709228203331} +{"stream": "account_performance_report_monthly", "data": {"AccountId": 180519267, "TimePeriod": "2024-03-01", "CurrencyCode": "USD", "AdDistribution": "Audience", "DeviceType": "Tablet", "Network": "Audience", "DeliveredMatchType": "Exact", "DeviceOS": "Android", "TopVsOther": "Audience network", "BidMatchType": "Broad", "AccountName": "Airbyte", "AccountNumber": "F149MJ18", "PhoneImpressions": 0, "PhoneCalls": 0, "Clicks": 4, "Ctr": 2.6, "Spend": 0.06, "Impressions": 154, "CostPerConversion": null, "Ptr": null, "Assists": 0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "AverageCpc": 0.02, "AveragePosition": 0.0, "AverageCpm": 0.39, "Conversions": 0.0, "ConversionsQualified": 0.0, "ConversionRate": 0.0, "LowQualityClicks": 4, "LowQualityClicksPercent": 50.0, "LowQualityImpressions": 13, "LowQualitySophisticatedClicks": 4, "LowQualityConversions": 0, "LowQualityConversionRate": 0.0, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null}, "emitted_at": 1709910004377} +{"stream": "ad_group_performance_report_monthly", "data": {"AccountId": 180519267, "CampaignId": 531016227, "AdGroupId": 1356799861840328, "TimePeriod": "2024-03-01", "CurrencyCode": "USD", "AdDistribution": "Audience", "DeviceType": "Tablet", "Network": "Audience", "DeliveredMatchType": "Exact", "DeviceOS": "Android", "TopVsOther": "Audience network", "BidMatchType": "Broad", "Language": "English", "AccountName": "Airbyte", "CampaignName": "Airbyte test", "CampaignType": "Search & content", "AdGroupName": "keywords", "AdGroupType": "Standard", "Impressions": 149, "Clicks": 4, "Ctr": 2.68, "Spend": 0.06, "CostPerConversion": null, "QualityScore": 7.0, "ExpectedCtr": "2", "AdRelevance": 2.0, "LandingPageExperience": 2.0, "PhoneImpressions": 0, "PhoneCalls": 0, "Ptr": null, "Assists": 0, "CostPerAssist": null, "CustomParameters": null, "FinalUrlSuffix": null, "ViewThroughConversions": 0, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllConversions": 0, "AllConversionRate": 0.0, "AllRevenue": 0.0, "AllRevenuePerConversion": null, "AverageCpc": 0.02, "AveragePosition": 0.0, "AverageCpm": 0.4, "Conversions": 0.0, "ConversionRate": 0.0, "ConversionsQualified": 0.0, "HistoricalQualityScore": 7.0, "HistoricalExpectedCtr": 2.0, "HistoricalAdRelevance": 2.0, "HistoricalLandingPageExperience": 2.0, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null}, "emitted_at": 1709910084639} +{"stream": "ad_group_impression_performance_report_monthly", "data": {"AccountName": "Airbyte", "AccountNumber": "F149MJ18", "AccountId": 180519267, "TimePeriod": "2024-03-01", "Status": "Active", "CampaignName": "Airbyte test", "CampaignId": 531016227, "AdGroupName": "keywords", "AdGroupId": 1356799861840328, "CurrencyCode": "USD", "AdDistribution": "Audience", "Impressions": 151, "Clicks": 4, "Ctr": 2.65, "AverageCpc": 0.02, "Spend": 0.06, "AveragePosition": 0.0, "Conversions": 0, "ConversionRate": 0.0, "CostPerConversion": null, "DeviceType": "Tablet", "Language": "English", "ImpressionSharePercent": null, "ImpressionLostToBudgetPercent": null, "ImpressionLostToRankAggPercent": null, "QualityScore": 7, "ExpectedCtr": 2.0, "AdRelevance": 2, "LandingPageExperience": 2, "HistoricalQualityScore": 7, "HistoricalExpectedCtr": 2, "HistoricalAdRelevance": 2, "HistoricalLandingPageExperience": 2, "PhoneImpressions": 0, "PhoneCalls": 0, "Ptr": null, "Network": "Audience", "Assists": 0, "Revenue": 0.0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "RevenuePerConversion": null, "RevenuePerAssist": null, "TrackingTemplate": null, "CustomParameters": null, "AccountStatus": "Active", "CampaignStatus": "Active", "AdGroupLabels": null, "ExactMatchImpressionSharePercent": null, "ClickSharePercent": null, "AbsoluteTopImpressionSharePercent": null, "FinalUrlSuffix": null, "CampaignType": "Search & content", "TopImpressionShareLostToRankPercent": null, "TopImpressionShareLostToBudgetPercent": null, "AbsoluteTopImpressionShareLostToRankPercent": null, "AbsoluteTopImpressionShareLostToBudgetPercent": null, "TopImpressionSharePercent": null, "AbsoluteTopImpressionRatePercent": null, "TopImpressionRatePercent": null, "BaseCampaignId": 531016227, "AllConversions": 0, "AllRevenue": 0.0, "AllConversionRate": 0.0, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllRevenuePerConversion": null, "ViewThroughConversions": 0, "AudienceImpressionSharePercent": null, "AudienceImpressionLostToRankPercent": null, "AudienceImpressionLostToBudgetPercent": null, "RelativeCtr": null, "AdGroupType": "Standard", "AverageCpm": 0.4, "ConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "ViewThroughRevenue": 0.0, "VideoViews": 0, "ViewThroughRate": 0.0, "AverageCPV": null, "VideoViewsAt25Percent": 0, "VideoViewsAt50Percent": 0, "VideoViewsAt75Percent": 0, "CompletedVideoViews": 0, "VideoCompletionRate": 0.0, "TotalWatchTimeInMS": 0, "AverageWatchTimePerVideoView": null, "AverageWatchTimePerImpression": 0.0, "Sales": 0, "CostPerSale": null, "RevenuePerSale": null, "Installs": 0, "CostPerInstall": null, "RevenuePerInstall": null}, "emitted_at": 1709910137241} +{"stream": "ad_performance_report_monthly", "data": {"AccountId": 180519267, "CampaignId": 531016227, "AdGroupId": 1356799861840328, "AdId": 84800390693061, "TimePeriod": "2024-03-01", "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "CurrencyCode": "USD", "AdDistribution": "Audience", "DeviceType": "Tablet", "Language": "English", "Network": "Audience", "DeviceOS": "Android", "TopVsOther": "Audience network", "BidMatchType": "Broad", "DeliveredMatchType": "Exact", "AccountName": "Airbyte", "CampaignName": "Airbyte test", "CampaignType": "Search & content", "AdGroupName": "keywords", "Impressions": 154, "Clicks": 4, "Ctr": 2.6, "Spend": 0.06, "CostPerConversion": null, "DestinationUrl": null, "Assists": 0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "CustomParameters": null, "FinalAppUrl": null, "AdDescription": null, "AdDescription2": null, "ViewThroughConversions": 0, "ViewThroughConversionsQualified": null, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "Conversions": 0.0, "ConversionRate": 0.0, "ConversionsQualified": 0.0, "AverageCpc": 0.02, "AveragePosition": 0.0, "AverageCpm": 0.39, "AllConversions": 0, "AllConversionRate": 0.0, "AllRevenue": 0.0, "AllRevenuePerConversion": null, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null}, "emitted_at": 1709910200316} +{"stream": "campaign_performance_report_monthly", "data": {"AccountId": 180519267, "CampaignId": 531016227, "TimePeriod": "2024-03-01", "CurrencyCode": "USD", "AdDistribution": "Audience", "DeviceType": "Tablet", "Network": "Audience", "DeliveredMatchType": "Exact", "DeviceOS": "Android", "TopVsOther": "Audience network", "BidMatchType": "Broad", "AccountName": "Airbyte", "CampaignName": "Airbyte test", "CampaignType": "Search & content", "CampaignStatus": "Active", "CampaignLabels": null, "Impressions": 149, "Clicks": 4, "Ctr": 2.68, "Spend": 0.06, "CostPerConversion": null, "QualityScore": 7.0, "AdRelevance": 2.0, "LandingPageExperience": 2.0, "PhoneImpressions": 0, "PhoneCalls": 0, "Ptr": null, "Assists": 0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "CustomParameters": null, "ViewThroughConversions": 0, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllConversions": 0, "ConversionsQualified": 0.0, "AllConversionRate": null, "AllRevenue": 0.0, "AllRevenuePerConversion": null, "AverageCpc": 0.02, "AveragePosition": 0.0, "AverageCpm": 0.4, "Conversions": 0.0, "ConversionRate": null, "LowQualityClicks": 4, "LowQualityClicksPercent": 50.0, "LowQualityImpressions": 9, "LowQualitySophisticatedClicks": 4, "LowQualityConversions": 0, "LowQualityConversionRate": 0.0, "HistoricalQualityScore": 7.0, "HistoricalExpectedCtr": 2.0, "HistoricalAdRelevance": 2.0, "HistoricalLandingPageExperience": 2.0, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null, "BudgetName": null, "BudgetStatus": null, "BudgetAssociationStatus": "Current"}, "emitted_at": 1709910271450} +{"stream": "campaign_impression_performance_report_monthly", "data": {"AccountName": "Airbyte", "AccountNumber": "F149MJ18", "AccountId": 180519267, "TimePeriod": "2024-03-01", "CampaignStatus": "Active", "CampaignName": "Airbyte test", "CampaignId": 531016227, "CurrencyCode": "USD", "AdDistribution": "Audience", "Impressions": 151, "Clicks": 4, "Ctr": 2.65, "AverageCpc": 0.02, "Spend": 0.06, "AveragePosition": 0.0, "Conversions": 0, "ConversionRate": null, "CostPerConversion": null, "LowQualityClicks": 4, "LowQualityClicksPercent": 50.0, "LowQualityImpressions": 9, "LowQualityImpressionsPercent": 5.63, "LowQualityConversions": 0, "LowQualityConversionRate": 0.0, "DeviceType": "Tablet", "ImpressionSharePercent": null, "ImpressionLostToBudgetPercent": null, "ImpressionLostToRankAggPercent": null, "QualityScore": 7.0, "ExpectedCtr": "2", "AdRelevance": 2.0, "LandingPageExperience": 2.0, "HistoricalQualityScore": 7, "HistoricalExpectedCtr": 2, "HistoricalAdRelevance": 2, "HistoricalLandingPageExperience": 2, "PhoneImpressions": 0, "PhoneCalls": 0, "Ptr": null, "Network": "Audience", "Assists": 0, "Revenue": 0.0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "RevenuePerConversion": null, "RevenuePerAssist": null, "TrackingTemplate": null, "CustomParameters": null, "AccountStatus": "Active", "LowQualityGeneralClicks": 0, "LowQualitySophisticatedClicks": 4, "CampaignLabels": null, "ExactMatchImpressionSharePercent": null, "ClickSharePercent": null, "AbsoluteTopImpressionSharePercent": null, "FinalUrlSuffix": null, "CampaignType": "Search & content", "TopImpressionShareLostToRankPercent": null, "TopImpressionShareLostToBudgetPercent": null, "AbsoluteTopImpressionShareLostToRankPercent": null, "AbsoluteTopImpressionShareLostToBudgetPercent": null, "TopImpressionSharePercent": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "BaseCampaignId": 531016227, "AllConversions": 0, "AllRevenue": 0.0, "AllConversionRate": null, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllRevenuePerConversion": null, "ViewThroughConversions": 0, "AudienceImpressionSharePercent": null, "AudienceImpressionLostToRankPercent": null, "AudienceImpressionLostToBudgetPercent": null, "RelativeCtr": null, "AverageCpm": 0.4, "ConversionsQualified": 0.0, "LowQualityConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "ViewThroughRevenue": 0.0, "VideoViews": 0, "ViewThroughRate": 0.0, "AverageCPV": null, "VideoViewsAt25Percent": 0, "VideoViewsAt50Percent": 0, "VideoViewsAt75Percent": 0, "CompletedVideoViews": 0, "VideoCompletionRate": 0.0, "TotalWatchTimeInMS": 0, "AverageWatchTimePerVideoView": null, "AverageWatchTimePerImpression": 0.0, "Sales": 0, "CostPerSale": null, "RevenuePerSale": null, "Installs": 0, "CostPerInstall": null, "RevenuePerInstall": null}, "emitted_at": 1709910330070} +{"stream": "keyword_performance_report_monthly", "data": {"AccountId": 180519267, "CampaignId": 531016227, "AdGroupId": 1356799861840328, "KeywordId": 84801135055370, "Keyword": "Airbyte", "AdId": 84800390693061, "TimePeriod": "2024-03-01", "CurrencyCode": "USD", "DeliveredMatchType": "Broad", "AdDistribution": "Search", "DeviceType": "Computer", "Language": "Portuguese", "Network": "Microsoft sites and select traffic", "DeviceOS": "Windows", "TopVsOther": "Microsoft sites and select traffic - top", "BidMatchType": "Broad", "AccountName": "Airbyte", "CampaignName": "Airbyte test", "AdGroupName": "keywords", "KeywordStatus": "Active", "Impressions": 1, "Clicks": 0, "Ctr": 0.0, "CurrentMaxCpc": 2.27, "Spend": 0.0, "CostPerConversion": null, "QualityScore": 10.0, "ExpectedCtr": "3", "AdRelevance": 3.0, "LandingPageExperience": 3.0, "QualityImpact": 0.0, "Assists": 0, "ReturnOnAdSpend": null, "CostPerAssist": null, "CustomParameters": null, "FinalAppUrl": null, "Mainline1Bid": null, "MainlineBid": 0.47, "FirstPageBid": 0.26, "FinalUrlSuffix": null, "ViewThroughConversions": 0, "ViewThroughConversionsQualified": null, "AllCostPerConversion": null, "AllReturnOnAdSpend": null, "Conversions": 0.0, "ConversionRate": null, "ConversionsQualified": 0.0, "AverageCpc": 0.0, "AveragePosition": 0.0, "AverageCpm": 0.0, "AllConversions": 0, "AllConversionRate": null, "AllRevenue": 0.0, "AllRevenuePerConversion": null, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null}, "emitted_at": 1709910365826} +{"stream": "geographic_performance_report_monthly", "data": {"AccountId": 180519267, "CampaignId": 531016227, "AdGroupId": 1356799861840328, "TimePeriod": "2024-03-01", "AccountNumber": "F149MJ18", "Country": "Zimbabwe", "State": "Harare", "MetroArea": null, "City": "Harare", "ProximityTargetLocation": null, "Radius": "0", "LocationType": "Physical location", "MostSpecificLocation": "Harare", "AccountStatus": "Active", "CampaignStatus": "Active", "AdGroupStatus": "Active", "County": null, "PostalCode": null, "LocationId": "153436", "BaseCampaignId": "531016227", "Goal": null, "GoalType": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": "0.00", "AllConversionsQualified": "0.00", "Neighborhood": null, "ViewThroughRevenue": "0.00", "CampaignType": "Search & content", "AssetGroupId": null, "AssetGroupName": null, "AssetGroupStatus": null, "CurrencyCode": "USD", "DeliveredMatchType": "Exact", "AdDistribution": "Audience", "DeviceType": "Computer", "Language": "English", "Network": "Audience", "DeviceOS": "Windows", "TopVsOther": "Audience network", "BidMatchType": "Broad", "AccountName": "Airbyte", "CampaignName": "Airbyte test", "AdGroupName": "keywords", "Impressions": 2, "Clicks": 0, "Ctr": 0.0, "Spend": 0.0, "CostPerConversion": null, "Assists": 0, "ReturnOnAdSpend": null, "CostPerAssist": null, "ViewThroughConversions": 0, "ViewThroughConversionsQualified": null, "AllCostPerConversion": null, "AllReturnOnAdSpend": null, "Conversions": 0.0, "ConversionRate": null, "ConversionsQualified": 0.0, "AverageCpc": 0.0, "AveragePosition": 0.0, "AverageCpm": 0.0, "AllConversions": 0, "AllConversionRate": null, "AllRevenue": 0.0, "AllRevenuePerConversion": null, "Revenue": 0.0, "RevenuePerConversion": null, "RevenuePerAssist": null}, "emitted_at": 1709910490360} +{"stream": "age_gender_audience_report_monthly", "data": {"AccountId": 180519267, "AgeGroup": "65+", "Gender": "Female", "TimePeriod": "2024-02-01", "AllConversions": 0, "AccountName": "Airbyte", "AccountNumber": "F149MJ18", "CampaignName": "Airbyte test", "CampaignId": 531016227, "AdGroupName": "keywords", "AdGroupId": 1356799861840328, "AdDistribution": "Audience", "Impressions": 35, "Clicks": 0, "Conversions": 0.0, "Spend": 0.0, "Revenue": 0.0, "ExtendedCost": 0.0, "Assists": 0, "Language": "English", "AccountStatus": "Active", "CampaignStatus": "Active", "AdGroupStatus": "Active", "BaseCampaignId": "531016227", "AllRevenue": 0.0, "ViewThroughConversions": 0, "Goal": null, "GoalType": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "ConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "ViewThroughRevenue": 0.0}, "emitted_at": 1709910536226} +{"stream": "age_gender_audience_report_monthly", "data": {"AccountId": 180519267, "AgeGroup": "65+", "Gender": "Female", "TimePeriod": "2024-03-01", "AllConversions": 0, "AccountName": "Airbyte", "AccountNumber": "F149MJ18", "CampaignName": "Airbyte test", "CampaignId": 531016227, "AdGroupName": "keywords", "AdGroupId": 1356799861840328, "AdDistribution": "Audience", "Impressions": 37, "Clicks": 0, "Conversions": 0.0, "Spend": 0.0, "Revenue": 0.0, "ExtendedCost": 0.0, "Assists": 0, "Language": "English", "AccountStatus": "Active", "CampaignStatus": "Active", "AdGroupStatus": "Active", "BaseCampaignId": "531016227", "AllRevenue": 0.0, "ViewThroughConversions": 0, "Goal": null, "GoalType": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "ConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "ViewThroughRevenue": 0.0}, "emitted_at": 1709910536228} +{"stream": "search_query_performance_report_monthly", "data": {"AccountName": "Airbyte", "AccountNumber": "F149MJ18", "AccountId": 180519267, "TimePeriod": "2024-03-01", "CampaignName": "Airbyte test", "CampaignId": 531016227, "AdGroupName": "keywords", "AdGroupId": 1356799861840328, "AdId": 84800390693061, "AdType": "Responsive search ad", "DestinationUrl": null, "BidMatchType": "Broad", "DeliveredMatchType": "Broad", "CampaignStatus": "Active", "AdStatus": "Active", "Impressions": 1, "Clicks": 1, "Ctr": 100.0, "AverageCpc": 1.29, "Spend": 1.29, "AveragePosition": 0.0, "SearchQuery": "test data management", "Keyword": "ELT infrastructure", "AdGroupCriterionId": null, "Conversions": 0, "ConversionRate": 0.0, "CostPerConversion": null, "Language": "English", "KeywordId": 84801135055369, "Network": "Microsoft sites and select traffic", "TopVsOther": "Microsoft sites and select traffic - other", "DeviceType": "Computer", "DeviceOS": "Windows", "Assists": 0, "Revenue": 0.0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "RevenuePerConversion": null, "RevenuePerAssist": null, "AccountStatus": "Active", "AdGroupStatus": "Active", "KeywordStatus": "Active", "CampaignType": "Search & content", "CustomerId": 251186883, "CustomerName": "Daxtarity Inc.", "AllConversions": 0, "AllRevenue": 0.0, "AllConversionRate": 0.0, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllRevenuePerConversion": null, "Goal": null, "GoalType": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "AverageCpm": 1290.0, "ConversionsQualified": 0.0, "AllConversionsQualified": 0.0}, "emitted_at": 1709910585743} +{"stream": "user_location_performance_report_monthly", "data": {"AccountName": "Airbyte", "AccountNumber": "F149MJ18", "AccountId": 180519267, "TimePeriod": "2024-03-01", "CampaignName": "Airbyte test", "CampaignId": 531016227, "AdGroupName": "keywords", "AdGroupId": 1356799861840328, "Country": "Zimbabwe", "State": "Harare", "MetroArea": null, "CurrencyCode": "USD", "AdDistribution": "Audience", "Impressions": 2, "Clicks": 0, "Ctr": 0.0, "AverageCpc": 0.0, "Spend": 0.0, "AveragePosition": 0.0, "ProximityTargetLocation": null, "Radius": 0, "Language": "English", "City": "Harare", "QueryIntentCountry": "Zimbabwe", "QueryIntentState": null, "QueryIntentCity": null, "QueryIntentDMA": null, "BidMatchType": "Broad", "DeliveredMatchType": "Exact", "Network": "Audience", "TopVsOther": "Audience network", "DeviceType": "Computer", "DeviceOS": "Windows", "Assists": 0, "Conversions": 0, "ConversionRate": null, "Revenue": 0.0, "ReturnOnAdSpend": null, "CostPerConversion": null, "CostPerAssist": null, "RevenuePerConversion": null, "RevenuePerAssist": null, "County": null, "PostalCode": null, "QueryIntentCounty": null, "QueryIntentPostalCode": null, "LocationId": 153436, "QueryIntentLocationId": 169, "AllConversions": 0, "AllRevenue": 0.0, "AllConversionRate": null, "AllCostPerConversion": null, "AllReturnOnAdSpend": null, "AllRevenuePerConversion": null, "ViewThroughConversions": 0, "Goal": null, "GoalType": null, "AbsoluteTopImpressionRatePercent": 0.0, "TopImpressionRatePercent": 0.0, "AverageCpm": 0.0, "ConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "Neighborhood": null, "QueryIntentNeighborhood": null, "ViewThroughRevenue": 0.0, "CampaignType": "Search & content", "AssetGroupId": null, "AssetGroupName": null}, "emitted_at": 1709910728070} +{"stream": "account_impression_performance_report_monthly", "data": {"AccountName": "Airbyte", "AccountNumber": "F149MJ18", "AccountId": 180519267, "TimePeriod": "2024-03-01", "CurrencyCode": "USD", "AdDistribution": "Audience", "Impressions": 156, "Clicks": 4, "Ctr": 2.56, "AverageCpc": 0.02, "Spend": 0.06, "AveragePosition": 0.0, "Conversions": 0, "ConversionRate": 0.0, "CostPerConversion": null, "LowQualityClicks": 4, "LowQualityClicksPercent": 50.0, "LowQualityImpressions": 13, "LowQualityImpressionsPercent": 7.69, "LowQualityConversions": 0, "LowQualityConversionRate": 0.0, "DeviceType": "Tablet", "ImpressionSharePercent": null, "ImpressionLostToBudgetPercent": null, "ImpressionLostToRankAggPercent": null, "PhoneImpressions": 0, "PhoneCalls": 0, "Ptr": null, "Network": "Audience", "Assists": 0, "Revenue": 0.0, "ReturnOnAdSpend": 0.0, "CostPerAssist": null, "RevenuePerConversion": null, "RevenuePerAssist": null, "AccountStatus": "Active", "LowQualityGeneralClicks": 0, "LowQualitySophisticatedClicks": 4, "ExactMatchImpressionSharePercent": null, "ClickSharePercent": null, "AbsoluteTopImpressionSharePercent": null, "TopImpressionShareLostToRankPercent": null, "TopImpressionShareLostToBudgetPercent": null, "AbsoluteTopImpressionShareLostToRankPercent": null, "AbsoluteTopImpressionShareLostToBudgetPercent": null, "TopImpressionSharePercent": null, "AbsoluteTopImpressionRatePercent": null, "TopImpressionRatePercent": null, "AllConversions": 0, "AllRevenue": 0.0, "AllConversionRate": 0.0, "AllCostPerConversion": null, "AllReturnOnAdSpend": 0.0, "AllRevenuePerConversion": null, "ViewThroughConversions": 0, "AudienceImpressionSharePercent": null, "AudienceImpressionLostToRankPercent": null, "AudienceImpressionLostToBudgetPercent": null, "AverageCpm": 0.38, "ConversionsQualified": 0.0, "LowQualityConversionsQualified": 0.0, "AllConversionsQualified": 0.0, "ViewThroughConversionsQualified": null, "ViewThroughRevenue": 0.0, "VideoViews": 0, "ViewThroughRate": 0.0, "AverageCPV": null, "VideoViewsAt25Percent": 0, "VideoViewsAt50Percent": 0, "VideoViewsAt75Percent": 0, "CompletedVideoViews": 0, "VideoCompletionRate": 0.0, "TotalWatchTimeInMS": 0, "AverageWatchTimePerVideoView": null, "AverageWatchTimePerImpression": 0.0, "Sales": 0, "CostPerSale": null, "RevenuePerSale": null, "Installs": 0, "CostPerInstall": null, "RevenuePerInstall": null}, "emitted_at": 1709910794831} diff --git a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml index 127fe231e9f95..12da1841ed8d8 100644 --- a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml @@ -16,12 +16,13 @@ data: connectorSubtype: api connectorType: source definitionId: 47f25999-dd5e-4636-8c39-e7cea2453331 - dockerImageTag: 2.3.0 + dockerImageTag: 2.5.0 dockerRepository: airbyte/source-bing-ads documentationUrl: https://docs.airbyte.com/integrations/sources/bing-ads githubIssueLabel: source-bing-ads icon: bingads.svg license: MIT + maxSecondsBetweenMessages: 60 name: Bing Ads remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-bing-ads/poetry.lock b/airbyte-integrations/connectors/source-bing-ads/poetry.lock index 0200423f5f0fc..260ff306ee98c 100644 --- a/airbyte-integrations/connectors/source-bing-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-bing-ads/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.60.1" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.60.1.tar.gz", hash = "sha256:fc5212b2962c1dc6aca9cc6f1c2000d7636b7509915846c126420c2b0c814317"}, - {file = "airbyte_cdk-0.60.1-py3-none-any.whl", hash = "sha256:94b33c0f6851d1e2546eac3cec54c67489239595d9e0a496ef57c3fc808e89e3"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -129,13 +129,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -551,13 +551,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -857,13 +857,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -916,7 +916,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -974,13 +973,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -992,15 +991,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1023,19 +1022,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1072,13 +1071,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1218,4 +1217,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "7ffb684bece88c37d64d09e4ce2e340727c0ad7811c8d9ffe988779870a9204c" +content-hash = "ccf3207003fb6ca7231f702f05457d3630808ea9ec4bcfc9e6c8cce71a9cea21" diff --git a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml index 9da2f525ae07c..9cec659742a15 100644 --- a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.3.0" +version = "2.5.0" name = "source-bing-ads" description = "Source implementation for Bing Ads." authors = [ "Airbyte ",] @@ -20,7 +20,7 @@ python = "^3.9,<3.12" bingads = "==13.0.18.1" pandas = "==2.2.0" urllib3 = "==1.26.18" -airbyte-cdk = "==0.60.1" +airbyte-cdk = "^0" cached-property = "==1.5.2" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/base_streams.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/base_streams.py index 7294f453deb19..6993542c57e1f 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/base_streams.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/base_streams.py @@ -206,13 +206,21 @@ def request_params( "ReturnAdditionalFields": self.additional_fields, } + def _transform_tax_fields(self, record: Mapping[str, Any]) -> Mapping[str, Any]: + tax_certificates = record["TaxCertificate"].get("TaxCertificates", {}) if record.get("TaxCertificate") is not None else {} + if tax_certificates and not isinstance(tax_certificates, list): + tax_certificate_pairs = tax_certificates.get("KeyValuePairOfstringbase64Binary") + if tax_certificate_pairs: + record["TaxCertificate"]["TaxCertificates"] = tax_certificate_pairs + return record + def parse_response(self, response: sudsobject.Object, **kwargs) -> Iterable[Mapping]: if response is not None and hasattr(response, self.data_field): records = self.client.asdict(response)[self.data_field] for record in records: if record["Id"] not in self._unique_account_ids: self._unique_account_ids.add(record["Id"]) - yield record + yield self._transform_tax_fields(record) class Campaigns(BingAdsCampaignManagementStream): diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/accounts.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/accounts.json index 63bf3d699add3..610377700cf8e 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/accounts.json +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/accounts.json @@ -127,6 +127,32 @@ }, "TimeStamp": { "type": ["null", "string"] + }, + "TaxCertificate": { + "type": ["null", "object"], + "properties": { + "TaxCertificateBlobContainerName": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"], + "enum": ["Invalid", "Pending", "Valid"] + }, + "TaxCertificates": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "key": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + } + } } } } diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py index 6554e36840092..129b81b076a8f 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py @@ -21,7 +21,6 @@ class BaseTest(TestCase): - @property def service_manager(self) -> Union[ReportingServiceManager, BulkServiceManager]: pass @@ -40,21 +39,20 @@ def _state(self, file: str, stream_name: str) -> list[AirbyteStateMessage]: return StateBuilder().with_stream_state(stream_name, state).build() def auth_client(self, http_mocker: HttpMocker) -> None: - http_mocker.post( - request=build_request(self._config), - responses=response_with_status("oauth", 200) - ) + http_mocker.post(request=build_request(self._config), responses=response_with_status("oauth", 200)) def read_stream( - self, - stream_name: str, - sync_mode: SyncMode, - config: Dict[str, Any], - stream_data_file: str = None, - state: Optional[Dict[str, Any]] = None, - expecting_exception: bool = False, + self, + stream_name: str, + sync_mode: SyncMode, + config: Dict[str, Any], + stream_data_file: str = None, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, ) -> Tuple[EntrypointOutput, MagicMock]: with patch.object(HttpAuthenticated, "send", mock_http_authenticated_send): - with patch.object(self.service_manager, "download_file", return_value=self._download_file(stream_data_file)) as service_call_mock: + with patch.object( + self.service_manager, "download_file", return_value=self._download_file(stream_data_file) + ) as service_call_mock: catalog = CatalogBuilder().with_stream(stream_name, sync_mode).build() return read(SourceBingAds(), config, catalog, state, expecting_exception), service_call_mock diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py index 5efab804e5569..18036b1f0229e 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py @@ -25,7 +25,5 @@ def build_request(config: Dict[str, Any]) -> HttpRequest: url="https://login.microsoftonline.com/common/oauth2/v2.0/token", query_params={}, body=body, - headers={ - "Content-Type": "application/x-www-form-urlencoded" - }, + headers={"Content-Type": "application/x-www-form-urlencoded"}, ) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py index 224a810a402ae..59b3bea844eab 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py @@ -52,6 +52,16 @@ Inactive + + Test Container Name + + + test_key + test_value + + + Active + Expert @@ -137,10 +147,10 @@ def mock_http_authenticated_send(transport: HttpAuthenticated, request: Request) -> Reply: - if request.headers.get('SOAPAction').decode() == '"GetUser"': + if request.headers.get("SOAPAction").decode() == '"GetUser"': return Reply(code=200, headers={}, message=GET_USER_RESPONSE) - if request.headers.get('SOAPAction').decode() == '"SearchAccounts"': + if request.headers.get("SOAPAction").decode() == '"SearchAccounts"': return Reply(code=200, headers={}, message=SEARCH_ACCOUNTS_RESPONSE) raise Exception(f"Unexpected SOAPAction provided for mock SOAP client: {request.headers.get('SOAPAction').decode()}") diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_accounts_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_accounts_stream.py new file mode 100644 index 0000000000000..d97762fc0bf56 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_accounts_stream.py @@ -0,0 +1,39 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from typing import Any, Dict, Optional, Tuple +from unittest.mock import MagicMock, patch + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from base_test import BaseTest +from source_bing_ads.source import SourceBingAds +from suds.transport.https import HttpAuthenticated +from suds_response_mock import mock_http_authenticated_send + + +class TestAccountsStream(BaseTest): + stream_name = "accounts" + + def read_stream( + self, + stream_name: str, + sync_mode: SyncMode, + config: Dict[str, Any], + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, + ) -> Tuple[EntrypointOutput, MagicMock]: + with patch.object(HttpAuthenticated, "send", mock_http_authenticated_send): + catalog = CatalogBuilder().with_stream(stream_name, sync_mode).build() + return read(SourceBingAds(), config, catalog, state, expecting_exception) + + @HttpMocker() + def test_read_accounts_tax_certificate_data(self, http_mocker): + # Our account doesn't have configured Tax certificate. + self.auth_client(http_mocker) + output = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config) + assert output.records[0].record.data["TaxCertificate"] == { + "Status": "Active", + "TaxCertificateBlobContainerName": "Test Container Name", + "TaxCertificates": [{"key": "test_key", "value": "test_value"}], + } diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ad_labels_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ad_labels_stream.py new file mode 100644 index 0000000000000..025773e4a35c0 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ad_labels_stream.py @@ -0,0 +1,59 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import pendulum +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.mock_http import HttpMocker +from freezegun import freeze_time +from test_bulk_stream import TestBulkStream + + +class TestAppInstallAdLabelsStream(TestBulkStream): + stream_name = "app_install_ad_labels" + account_id = "180535609" + cursor_field = "Modified Time" + + @HttpMocker() + def test_return_records_from_given_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ad_labels") + assert len(output.records) == 1 + + @HttpMocker() + def test_return_logged_info_for_empty_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ad_labels_empty") + assert len(output.records) == 0 + assert len(output.logs) == 10 + + @HttpMocker() + def test_transform_records(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ad_labels") + assert output.records + for record in output.records: + assert "Account Id" in record.record.data.keys() + assert isinstance(record.record.data["Account Id"], int) + + @HttpMocker() + def test_incremental_read_cursor_value_matches_value_from_most_recent_record(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "app_install_ad_labels_with_cursor_value") + assert len(output.records) == 4 + assert dict(output.most_recent_state.stream_state).get(self.account_id, {}) == {self.cursor_field: "2024-01-04T12:12:12.028+00:00"} + + @HttpMocker() + @freeze_time("2024-02-26") # mock current time as stream data available for 30 days only + def test_incremental_read_with_state(self, http_mocker: HttpMocker): + state = self._state("app_install_ad_labels_state", self.stream_name) + self.auth_client(http_mocker) + output, service_call_mock = self.read_stream( + self.stream_name, + SyncMode.incremental, + self._config, + "app_install_ad_labels_with_state", + state + ) + assert dict(output.most_recent_state.stream_state).get(self.account_id, {}) == {self.cursor_field: "2024-01-29T12:55:12.028+00:00"} + + previous_state = state[0].stream.stream_state.dict() + # gets DownloadParams object + assert service_call_mock.call_args.args[0].last_sync_time_in_utc == pendulum.parse(previous_state[self.account_id][self.cursor_field]) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ads_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ads_stream.py new file mode 100644 index 0000000000000..831428bf11d96 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_app_install_ads_stream.py @@ -0,0 +1,53 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import pendulum +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.mock_http import HttpMocker +from freezegun import freeze_time +from test_bulk_stream import TestBulkStream + + +class TestAppInstallAdsStream(TestBulkStream): + stream_name = "app_install_ads" + account_id = "180535609" + cursor_field = "Modified Time" + + @HttpMocker() + def test_return_records_from_given_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ads") + assert len(output.records) == 1 + + @HttpMocker() + def test_return_logged_info_for_empty_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ads_empty") + assert len(output.records) == 0 + assert len(output.logs) == 10 + + @HttpMocker() + def test_transform_records(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "app_install_ads") + assert output.records + for record in output.records: + assert "Account Id" in record.record.data.keys() + assert isinstance(record.record.data["Account Id"], int) + + @HttpMocker() + def test_incremental_read_cursor_value_matches_value_from_most_recent_record(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "app_install_ads_with_cursor_value") + assert len(output.records) == 4 + assert dict(output.most_recent_state.stream_state).get(self.account_id, {}) == {self.cursor_field: "2024-03-01T12:49:12.028+00:00"} + + @HttpMocker() + @freeze_time("2023-12-29") # mock current time as stream data available for 30 days only + def test_incremental_read_with_state(self, http_mocker: HttpMocker): + state = self._state("app_install_ads_state", self.stream_name) + self.auth_client(http_mocker) + output, service_call_mock = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "app_install_ads_with_state", state) + assert dict(output.most_recent_state.stream_state).get(self.account_id, {}) == {self.cursor_field: "2024-01-01T10:55:12.028+00:00"} + + previous_state = state[0].stream.stream_state.dict() + # gets DownloadParams object + assert service_call_mock.call_args.args[0].last_sync_time_in_utc == pendulum.parse(previous_state[self.account_id][self.cursor_field]) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py index f8075e01c2be6..7d10bf1dc3f42 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py @@ -9,8 +9,8 @@ class TestAudiencePerformanceReportDailyStream(TestSuiteReportStream): records_number = 8 state_file = "audience_performance_report_daily_state" incremental_report_file = "audience_performance_report_daily_incremental" - first_read_state = {"audience_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} - second_read_state = {"audience_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-17"}} + second_read_state = {"180535609": {"TimePeriod": "2023-12-24"}} class TestAudiencePerformanceReportHourlyStream(TestSuiteReportStream): @@ -19,8 +19,8 @@ class TestAudiencePerformanceReportHourlyStream(TestSuiteReportStream): records_number = 24 state_file = "audience_performance_report_hourly_state" incremental_report_file = "audience_performance_report_hourly_incremental" - first_read_state = {"audience_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} - second_read_state = {"audience_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}} + second_read_state = {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}} class TestAudiencePerformanceReportWeeklyStream(TestSuiteReportStream): @@ -30,8 +30,8 @@ class TestAudiencePerformanceReportWeeklyStream(TestSuiteReportStream): second_read_records_number = 5 state_file = "audience_performance_report_weekly_state" incremental_report_file = "audience_performance_report_weekly_incremental" - first_read_state = {"audience_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} - second_read_state = {"audience_performance_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-25"}} + second_read_state = {"180535609": {"TimePeriod": "2024-01-29"}} class TestAudiencePerformanceReportMonthlyStream(TestSuiteReportStream): @@ -40,5 +40,5 @@ class TestAudiencePerformanceReportMonthlyStream(TestSuiteReportStream): records_number = 6 state_file = "audience_performance_report_monthly_state" incremental_report_file = "audience_performance_report_monthly_incremental" - first_read_state = {"audience_performance_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} - second_read_state = {"audience_performance_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-09-01"}} + second_read_state = {"180535609": {"TimePeriod": "2024-03-01"}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py index 5f7bd48026139..5c6aebdd00a8b 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py @@ -38,17 +38,19 @@ def test_incremental_read_cursor_value_matches_value_from_most_recent_record(sel self.auth_client(http_mocker) output, _ = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "budget_with_cursor_value") assert len(output.records) == 8 - assert output.most_recent_state.get(self.stream_name, {}).get(self.account_id, {}) == {self.cursor_field: "2024-01-01T12:54:12.028+00:00"} + assert output.most_recent_state.stream_state.dict().get(self.account_id) == {self.cursor_field: "2024-01-01T12:54:12.028+00:00"} @HttpMocker() - @freeze_time("204-02-26") # mock current time as stream data available for 30 days only + @freeze_time("2024-02-26") # mock current time as stream data available for 30 days only def test_incremental_read_with_state(self, http_mocker: HttpMocker): state = self._state("budget_state", self.stream_name) self.auth_client(http_mocker) output, service_call_mock = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "budget_with_state", state) assert len(output.records) == 8 - assert output.most_recent_state.get(self.stream_name, {}).get(self.account_id, {}) == {self.cursor_field: "2024-01-30T12:54:12.028+00:00"} + assert output.most_recent_state.stream_state.dict().get(self.account_id) == {self.cursor_field: "2024-01-30T12:54:12.028+00:00"} previous_state = state[0].stream.stream_state.dict() # gets DownloadParams object - assert service_call_mock.call_args.args[0].last_sync_time_in_utc == pendulum.parse(previous_state[self.account_id][self.cursor_field]) + assert service_call_mock.call_args.args[0].last_sync_time_in_utc == pendulum.parse( + previous_state[self.account_id][self.cursor_field] + ) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py index 9435295921b27..92fcb2e8a2430 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py @@ -8,7 +8,6 @@ class TestBulkStream(BaseTest): - @property def service_manager(self) -> BulkServiceManager: return BulkServiceManager diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py index 7affe69b4f4c4..53eb739802d65 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py @@ -9,8 +9,8 @@ class TestGoalsAndFunnelsReportDailyStream(TestSuiteReportStream): records_number = 8 state_file = "goals_and_funnels_report_daily_state" incremental_report_file = "goals_and_funnels_report_daily_incremental" - first_read_state = {"goals_and_funnels_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} - second_read_state = {"goals_and_funnels_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-17"}} + second_read_state = {"180535609": {"TimePeriod": "2023-12-24"}} class TestGoalsAndFunnelsReportHourlyStream(TestSuiteReportStream): @@ -19,8 +19,8 @@ class TestGoalsAndFunnelsReportHourlyStream(TestSuiteReportStream): records_number = 24 state_file = "goals_and_funnels_report_hourly_state" incremental_report_file = "goals_and_funnels_report_hourly_incremental" - first_read_state = {"goals_and_funnels_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} - second_read_state = {"goals_and_funnels_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}} + second_read_state = {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}} class TestGoalsAndFunnelsReportWeeklyStream(TestSuiteReportStream): @@ -30,8 +30,8 @@ class TestGoalsAndFunnelsReportWeeklyStream(TestSuiteReportStream): second_read_records_number = 5 state_file = "goals_and_funnels_report_weekly_state" incremental_report_file = "goals_and_funnels_report_weekly_incremental" - first_read_state = {"goals_and_funnels_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} - second_read_state = {"goals_and_funnels_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-25"}} + second_read_state = {"180535609": {"TimePeriod": "2024-01-29"}} class TestGoalsAndFunnelsReportMonthlyStream(TestSuiteReportStream): @@ -40,5 +40,5 @@ class TestGoalsAndFunnelsReportMonthlyStream(TestSuiteReportStream): records_number = 6 state_file = "goals_and_funnels_report_monthly_state" incremental_report_file = "goals_and_funnels_report_monthly_incremental" - first_read_state = {"goals_and_funnels_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} - second_read_state = {"goals_and_funnels_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-09-01"}} + second_read_state = {"180535609": {"TimePeriod": "2024-03-01"}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_hourly_reports.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_hourly_reports.py new file mode 100644 index 0000000000000..ce40374eed82d --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_hourly_reports.py @@ -0,0 +1,107 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from test_report_stream import TestSuiteReportStream + +FIRST_STATE = {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}} +SECOND_STATE = {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}} + + +class HourlyReportsTest(TestSuiteReportStream): + first_read_state = FIRST_STATE + second_read_state = SECOND_STATE + + +class TestAgeGenderAudienceReportHourlyStream(HourlyReportsTest): + stream_name = "age_gender_audience_report_hourly" + report_file = "age_gender_audience_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "age_gender_audience_report_hourly_incremental" + + +class TestAccountImpressionPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "account_impression_performance_report_hourly" + report_file = "account_impression_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "account_impression_performance_report_hourly_incremental" + + +class TestKeywordPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "keyword_performance_report_hourly" + report_file = "keyword_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "keyword_performance_report_hourly_incremental" + + +class TestAdPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "ad_performance_report_hourly" + report_file = "ad_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "ad_performance_report_hourly_incremental" + + +class TestAdGroupImpressionPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "ad_group_impression_performance_report_hourly" + report_file = "ad_group_impression_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "ad_group_impression_performance_report_hourly_incremental" + + +class TestCampaignPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "campaign_performance_report_hourly" + report_file = "campaign_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "campaign_performance_report_hourly_incremental" + + +class TestCampaignImpressionPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "campaign_impression_performance_report_hourly" + report_file = "campaign_impression_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "campaign_impression_performance_report_hourly_incremental" + + +class TestGeographicPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "geographic_performance_report_hourly" + report_file = "geographic_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "geographic_performance_report_hourly_incremental" + + +class TestSearchQueryPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "search_query_performance_report_hourly" + report_file = "search_query_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "search_query_performance_report_hourly_incremental" + + +class TestUserLocationPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "user_location_performance_report_hourly" + report_file = "user_location_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "user_location_performance_report_hourly_incremental" + + +class TestAdGroupPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "ad_group_performance_report_hourly" + report_file = "ad_group_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "ad_group_performance_report_hourly_incremental" + + +class TestAccountPerformanceReportHourlyStream(HourlyReportsTest): + stream_name = "account_performance_report_hourly" + report_file = "account_performance_report_hourly" + records_number = 24 + state_file = "hourly_reports_state" + incremental_report_file = "account_performance_report_hourly_incremental" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py index 89968b69c67b3..8edc6d392cdf5 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py @@ -9,8 +9,8 @@ class TestProductDimensionPerformanceReportDailyStream(TestSuiteReportStream): records_number = 8 state_file = "product_dimension_performance_report_daily_state" incremental_report_file = "product_dimension_performance_report_daily_incremental" - first_read_state = {"product_dimension_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} - second_read_state = {"product_dimension_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-25"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-17"}} + second_read_state = {"180535609": {"TimePeriod": "2023-12-25"}} class TestProductDimensionPerformanceReportHourlyStream(TestSuiteReportStream): @@ -19,8 +19,8 @@ class TestProductDimensionPerformanceReportHourlyStream(TestSuiteReportStream): records_number = 8 state_file = "product_dimension_performance_report_hourly_state" incremental_report_file = "product_dimension_performance_report_hourly_incremental" - first_read_state = {"product_dimension_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-11T01:00:00+00:00"}}} - second_read_state = {"product_dimension_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T01:00:00+00:00"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-11-11T01:00:00+00:00"}} + second_read_state = {"180535609": {"TimePeriod": "2023-11-12T01:00:00+00:00"}} class TestProductDimensionPerformanceReportWeeklyStream(TestSuiteReportStream): @@ -29,8 +29,8 @@ class TestProductDimensionPerformanceReportWeeklyStream(TestSuiteReportStream): records_number = 8 state_file = "product_dimension_performance_report_weekly_state" incremental_report_file = "product_dimension_performance_report_weekly_incremental" - first_read_state = {"product_dimension_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-17"}}} - second_read_state = {"product_dimension_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-17"}} + second_read_state = {"180535609": {"TimePeriod": "2023-12-25"}} class TestProductDimensionPerformanceReportMonthlyStream(TestSuiteReportStream): @@ -39,5 +39,5 @@ class TestProductDimensionPerformanceReportMonthlyStream(TestSuiteReportStream): records_number = 8 state_file = "product_dimension_performance_report_monthly_state" incremental_report_file = "product_dimension_performance_report_monthly_incremental" - first_read_state = {"product_dimension_performance_report_monthly": {"180535609": {"TimePeriod": "2023-12-01"}}} - second_read_state = {"product_dimension_performance_report_monthly": {"180535609": {"TimePeriod": "2024-01-01"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-01"}} + second_read_state = {"180535609": {"TimePeriod": "2024-01-01"}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py index a7fabbf1e1c4f..df07d6552ab5c 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py @@ -9,8 +9,8 @@ class TestProductSearchPerformanceReportDailyStream(TestSuiteReportStream): records_number = 8 state_file = "product_search_query_performance_report_daily_state" incremental_report_file = "product_search_query_performance_report_daily_incremental" - first_read_state = {"product_search_query_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} - second_read_state = {"product_search_query_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-17"}} + second_read_state = {"180535609": {"TimePeriod": "2023-12-24"}} class TestProductSearchQueryPerformanceReportHourlyStream(TestSuiteReportStream): @@ -19,8 +19,8 @@ class TestProductSearchQueryPerformanceReportHourlyStream(TestSuiteReportStream) records_number = 24 state_file = "product_search_query_performance_report_hourly_state" incremental_report_file = "product_search_query_performance_report_hourly_incremental" - first_read_state = {"product_search_query_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} - second_read_state = {"product_search_query_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}} + second_read_state = {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}} class TestProductSearchQueryPerformanceReportWeeklyStream(TestSuiteReportStream): @@ -30,8 +30,8 @@ class TestProductSearchQueryPerformanceReportWeeklyStream(TestSuiteReportStream) second_read_records_number = 5 state_file = "product_dimension_performance_report_weekly_state" incremental_report_file = "product_search_query_performance_report_weekly_incremental" - first_read_state = {"product_search_query_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} - second_read_state = {"product_search_query_performance_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-12-25"}} + second_read_state = {"180535609": {"TimePeriod": "2024-01-29"}} class TestProductSearchQueryPerformanceReportMonthlyStream(TestSuiteReportStream): @@ -40,5 +40,5 @@ class TestProductSearchQueryPerformanceReportMonthlyStream(TestSuiteReportStream records_number = 6 state_file = "product_search_query_performance_report_monthly_state" incremental_report_file = "product_search_query_performance_report_monthly_incremental" - first_read_state = {"product_search_query_performance_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} - second_read_state = {"product_search_query_performance_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} + first_read_state = {"180535609": {"TimePeriod": "2023-09-01"}} + second_read_state = {"180535609": {"TimePeriod": "2024-03-01"}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py index a0d7feb18261e..f4be42fef8795 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py @@ -53,23 +53,13 @@ def setUp(self): @HttpMocker() def test_return_records_from_given_csv_file(self, http_mocker: HttpMocker): self.auth_client(http_mocker) - output, _ = self.read_stream( - self.stream_name, - SyncMode.full_refresh, - self._config, - self.report_file - ) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, self.report_file) assert len(output.records) == self.records_number @HttpMocker() def test_transform_records_from_given_csv_file(self, http_mocker: HttpMocker): self.auth_client(http_mocker) - output, _ = self.read_stream( - self.stream_name, - SyncMode.full_refresh, - self._config, - self.report_file - ) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, self.report_file) assert len(output.records) == self.records_number for record in output.records: @@ -78,33 +68,24 @@ def test_transform_records_from_given_csv_file(self, http_mocker: HttpMocker): @HttpMocker() def test_incremental_read_returns_records(self, http_mocker: HttpMocker): self.auth_client(http_mocker) - output, _ = self.read_stream( - self.stream_name, - SyncMode.incremental, - self._config, - self.report_file - ) + output, _ = self.read_stream(self.stream_name, SyncMode.incremental, self._config, self.report_file) assert len(output.records) == self.records_number - assert output.most_recent_state == self.first_read_state + assert dict(output.most_recent_state.stream_state) == self.first_read_state @HttpMocker() def test_incremental_read_with_state_returns_records(self, http_mocker: HttpMocker): state = self._state(self.state_file, self.stream_name) self.auth_client(http_mocker) output, service_call_mock = self.read_stream( - self.stream_name, - SyncMode.incremental, - self._config, - self.incremental_report_file, - state + self.stream_name, SyncMode.incremental, self._config, self.incremental_report_file, state ) if not self.second_read_records_number: assert len(output.records) == self.records_number else: assert len(output.records) == self.second_read_records_number - actual_cursor = output.most_recent_state.get(self.stream_name).get(self.account_id) - expected_cursor = self.second_read_state.get(self.stream_name).get(self.account_id) + actual_cursor = dict(output.most_recent_state.stream_state).get(self.account_id) + expected_cursor = self.second_read_state.get(self.account_id) assert actual_cursor == expected_cursor provided_state = state[0].stream.stream_state.dict()[self.account_id][self.cursor_field] diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly.csv new file mode 100644 index 0000000000000..bf44c0fd6307c --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualityImpressionsPercent","LowQualityConversions","LowQualityConversionRate","DeviceType","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","AccountStatus","LowQualityGeneralClicks","LowQualitySophisticatedClicks","TopImpressionRatePercent","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AverageCpm","ConversionsQualified","LowQualityConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-11|01","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|02","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|03","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|04","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|05","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|06","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|07","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|08","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|09","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|10","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|11","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|12","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|13","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|14","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|15","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|16","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|17","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|18","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|19","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|20","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|21","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|22","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|23","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|00","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..7c609d5ce6bac --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_impression_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualityImpressionsPercent","LowQualityConversions","LowQualityConversionRate","DeviceType","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","AccountStatus","LowQualityGeneralClicks","LowQualitySophisticatedClicks","TopImpressionRatePercent","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AverageCpm","ConversionsQualified","LowQualityConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-12|01","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|02","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|03","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|04","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|05","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|06","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|07","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|08","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|09","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|10","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|11","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|12","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|13","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|14","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|15","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|16","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|17","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|18","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|19","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|20","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|21","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|22","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|23","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-13|00","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","0","0.00%","1","50.00%","0","","Computer","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","Active","0","0","100.00%","0","0.00","0.00%","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly.csv new file mode 100644 index 0000000000000..bcdbb7d481663 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","AccountName","AccountNumber","PhoneImpressions","PhoneCalls","Clicks","Ctr","Spend","Impressions","CostPerConversion","Ptr","Assists","ReturnOnAdSpend","CostPerAssist","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionsQualified","ConversionRate","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualitySophisticatedClicks","LowQualityConversions","LowQualityConversionRate","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","2023-11-11|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-11|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..8eabe2f670414 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/account_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","AccountName","AccountNumber","PhoneImpressions","PhoneCalls","Clicks","Ctr","Spend","Impressions","CostPerConversion","Ptr","Assists","ReturnOnAdSpend","CostPerAssist","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionsQualified","ConversionRate","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualitySophisticatedClicks","LowQualityConversions","LowQualityConversionRate","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","2023-11-12|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-12|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" +"180535609","2023-11-13|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","F149MJ18","0","0","1","100.00%","1.48","1","","","0","0.00","","1.48","0.00","1480.00","0","0.00","0.00%","0","0.00%","0","0","0","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly.csv new file mode 100644 index 0000000000000..9374142e07fd4 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","Status","CampaignName","CampaignId","AdGroupName","AdGroupId","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","DeviceType","Language","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","TrackingTemplate","CustomParameters","AccountStatus","CampaignStatus","AdGroupLabels","FinalUrlSuffix","CampaignType","TopImpressionSharePercent","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","BaseCampaignId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AdGroupType","AverageCpm","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-11|01","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|02","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|03","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|04","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|05","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|06","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|07","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|08","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|09","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|10","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|11","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|12","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|13","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|14","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|15","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|16","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|17","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|18","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|19","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|20","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|21","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|22","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|23","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|00","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..76b293f8d904c --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_impression_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","Status","CampaignName","CampaignId","AdGroupName","AdGroupId","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","DeviceType","Language","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","TrackingTemplate","CustomParameters","AccountStatus","CampaignStatus","AdGroupLabels","FinalUrlSuffix","CampaignType","TopImpressionSharePercent","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","BaseCampaignId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AdGroupType","AverageCpm","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-12|01","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|02","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|03","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|04","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|05","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|06","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|07","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|08","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|09","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|10","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|11","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|12","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|13","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|14","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|15","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|16","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|17","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|18","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|19","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|20","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|21","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|22","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|23","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-13|00","Active","Airbyte test","531016227","keywords","1356799861840328","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","0.00%","","Computer","English","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","Active","","","Search & content","","100.00%","100.00%","531016227","0","0.00","0.00%","","0.00","","0","Standard","1480.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly.csv new file mode 100644 index 0000000000000..11f3b4f21b2f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","Language","AccountName","CampaignName","CampaignType","AdGroupName","AdGroupType","Impressions","Clicks","Ctr","Spend","CostPerConversion","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Assists","CostPerAssist","CustomParameters","FinalUrlSuffix","ViewThroughConversions","AllCostPerConversion","AllReturnOnAdSpend","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionRate","ConversionsQualified","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","2023-11-11|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..c608b9df8f488 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_group_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","Language","AccountName","CampaignName","CampaignType","AdGroupName","AdGroupType","Impressions","Clicks","Ctr","Spend","CostPerConversion","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Assists","CostPerAssist","CustomParameters","FinalUrlSuffix","ViewThroughConversions","AllCostPerConversion","AllReturnOnAdSpend","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionRate","ConversionsQualified","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","2023-11-12|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" +"180535609","531016227","1356799861840328","2023-11-13|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","English","Airbyte","Airbyte test","Search & content","keywords","Standard","1","1","100.00%","1.48","","7","2","2","2","0","0","","0","","","","0","","0.00","0","0.00%","0.00","","1.48","0.00","1480.00","0","0.00%","0.00","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly.csv new file mode 100644 index 0000000000000..286c0de86c7e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","AdId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","DeliveredMatchType","AccountName","CampaignName","CampaignType","AdGroupName","Impressions","Clicks","Ctr","Spend","CostPerConversion","DestinationUrl","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","FinalAppUrl","AdDescription","AdDescription2","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|01","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|02","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|03","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|04","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|05","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|06","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|07","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|08","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|09","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|10","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|11","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|12","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|13","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|14","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|15","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|16","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|17","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|18","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|19","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|20","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|21","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|22","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-11|23","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|00","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..9e28637eea8f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/ad_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","AdId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","DeliveredMatchType","AccountName","CampaignName","CampaignType","AdGroupName","Impressions","Clicks","Ctr","Spend","CostPerConversion","DestinationUrl","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","FinalAppUrl","AdDescription","AdDescription2","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|01","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|02","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|03","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|04","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|05","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|06","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|07","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|08","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|09","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|10","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|11","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|12","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|13","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|14","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|15","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|16","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|17","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|18","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|19","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|20","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|21","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|22","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-12|23","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84800390693061","2023-11-13|00","USD","Search","Computer","English","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Phrase","Airbyte","Airbyte test","Search & content","keywords","1","1","100.00%","1.48","","","0","0.00","","","","","","0","","","0.00","0","0.00%","0.00","1.48","0.00","1480.00","0","0.00%","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly.csv new file mode 100644 index 0000000000000..d806ce9ff39e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","AgeGroup","Gender","TimePeriod","AllConversions","AccountName","AccountNumber","CampaignName","CampaignId","AdGroupName","AdGroupId","AdDistribution","Impressions","Clicks","Conversions","Spend","Revenue","ExtendedCost","Assists","Language","AccountStatus","CampaignStatus","AdGroupStatus","BaseCampaignId","AllRevenue","ViewThroughConversions","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue" +"180535609","Unknown","Unknown","2023-11-11|01","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|02","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|03","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|04","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|05","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|06","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|07","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|08","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|09","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|10","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|11","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|12","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|13","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|14","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|15","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|16","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|17","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|18","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|19","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|20","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|21","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|22","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-11|23","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|00","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly_incremental.csv new file mode 100644 index 0000000000000..be83f8337a3e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/age_gender_audience_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","AgeGroup","Gender","TimePeriod","AllConversions","AccountName","AccountNumber","CampaignName","CampaignId","AdGroupName","AdGroupId","AdDistribution","Impressions","Clicks","Conversions","Spend","Revenue","ExtendedCost","Assists","Language","AccountStatus","CampaignStatus","AdGroupStatus","BaseCampaignId","AllRevenue","ViewThroughConversions","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue" +"180535609","Unknown","Unknown","2023-11-12|01","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|02","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|03","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|04","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|05","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|06","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|07","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|08","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|09","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|10","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|11","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|12","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|13","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|14","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|15","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|16","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|17","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|18","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|19","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|20","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|21","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|22","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-12|23","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" +"180535609","Unknown","Unknown","2023-11-13|00","0","Airbyte","F149MJ18","Airbyte test","531016227","keywords","1356799861840328","Search","1","0","0","0.00","0.00","0.00","0","Czech","Active","Active","Active","531016227","0.00","0","","","100.00%","100.00%","0.00","0.00","","0.00" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels.csv new file mode 100644 index 0000000000000..48e9371037191 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels.csv @@ -0,0 +1,3 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Name,Description,Label,Color +Format Version,,,,,,,,6.0,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,,,,, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/empty/empty.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_empty.csv similarity index 100% rename from airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/empty/empty.json rename to airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_empty.csv diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_cursor_value.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_cursor_value.csv new file mode 100644 index 0000000000000..84c863913d752 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_cursor_value.csv @@ -0,0 +1,6 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Name,Description,Label,Color +Format Version,,,,,,,,6.0,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/01/2024 12:12:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/02/2024 12:12:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/03/2024 12:12:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/04/2024 12:12:12.02837,,,, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_state.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_state.csv new file mode 100644 index 0000000000000..453f13997d4cf --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ad_labels_with_state.csv @@ -0,0 +1,6 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Name,Description,Label,Color +Format Version,,,,,,,,6.0,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/28/2024 12:11:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/28/2024 12:12:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/28/2024 12:13:12.02837,,,, +App Install Ad Label,,-22,-11112,,,ClientIdGoesHere,01/29/2024 12:55:12.02837,,,, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads.csv new file mode 100644 index 0000000000000..7585a9fb65dbf --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads.csv @@ -0,0 +1,3 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Title,Text,Display Url,Destination Url,Promotion,Device Preference,Name,App Platform,App Id,Final Url,Mobile Final Url,Tracking Template,Final Url Suffix,Custom Parameter +Format Version,,,,,,,,,,,,,,6.0,,,,,,, +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,11111111,,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer \ No newline at end of file diff --git a/airbyte-lib/airbyte_lib/py.typed b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_empty.csv similarity index 100% rename from airbyte-lib/airbyte_lib/py.typed rename to airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_empty.csv diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_cursor_value.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_cursor_value.csv new file mode 100644 index 0000000000000..64fc81d0aa9bb --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_cursor_value.csv @@ -0,0 +1,6 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Title,Text,Display Url,Destination Url,Promotion,Device Preference,Name,App Platform,App Id,Final Url,Mobile Final Url,Tracking Template,Final Url Suffix,Custom Parameter +Format Version,,,,,,,,,,,,,,6.0,,,,,,, +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,03/01/2024 12:12:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,03/01/2024 12:40:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,03/01/2024 12:45:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,03/01/2024 12:49:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_state.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_state.csv new file mode 100644 index 0000000000000..1c33f677ca4d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/app_install_ads_with_state.csv @@ -0,0 +1,6 @@ +Type,Status,Id,Parent Id,Campaign,Ad Group,Client Id,Modified Time,Title,Text,Display Url,Destination Url,Promotion,Device Preference,Name,App Platform,App Id,Final Url,Mobile Final Url,Tracking Template,Final Url Suffix,Custom Parameter +Format Version,,,,,,,,,,,,,,6.0,,,,,,, +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,01/01/2024 10:12:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,01/01/2024 10:39:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,01/01/2024 10:46:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer +App Install Ad,Active,,-1111,ParentCampaignNameGoesHere,AdGroupNameGoesHere,111111,01/01/2024 10:55:12.02837,Contoso Quick Setup,Find New Customers & Increase Sales!,,,,All,,Android,111111,FinalUrlGoesHere,,,,{_promoCode}=PROMO1; {_season}=summer \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly.csv new file mode 100644 index 0000000000000..0ec81559849bc --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignStatus","CampaignName","CampaignId","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualityImpressionsPercent","LowQualityConversions","LowQualityConversionRate","DeviceType","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","TrackingTemplate","CustomParameters","AccountStatus","LowQualityGeneralClicks","LowQualitySophisticatedClicks","CampaignLabels","FinalUrlSuffix","CampaignType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","BaseCampaignId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AverageCpm","ConversionsQualified","LowQualityConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-11|01","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|02","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|03","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|04","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|05","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|06","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|07","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|08","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|09","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|10","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|11","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|12","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|13","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|14","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|15","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|16","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|17","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|18","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|19","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|20","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|21","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|22","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-11|23","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|00","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..ee93f33d5c2dd --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_impression_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignStatus","CampaignName","CampaignId","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","Conversions","ConversionRate","CostPerConversion","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualityImpressionsPercent","LowQualityConversions","LowQualityConversionRate","DeviceType","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Network","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","TrackingTemplate","CustomParameters","AccountStatus","LowQualityGeneralClicks","LowQualitySophisticatedClicks","CampaignLabels","FinalUrlSuffix","CampaignType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","BaseCampaignId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","AverageCpm","ConversionsQualified","LowQualityConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","ViewThroughRevenue","VideoViews","ViewThroughRate","AverageCPV","VideoViewsAt25Percent","VideoViewsAt50Percent","VideoViewsAt75Percent","CompletedVideoViews","VideoCompletionRate","TotalWatchTimeInMS","AverageWatchTimePerVideoView","AverageWatchTimePerImpression","Sales","CostPerSale","RevenuePerSale","Installs","CostPerInstall","RevenuePerInstall" +"Airbyte","F149MJ18","180535609","2023-11-12|01","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|02","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|03","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|04","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|05","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|06","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|07","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|08","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|09","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|10","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|11","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|12","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|13","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|14","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|15","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|16","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|17","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|18","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|19","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|20","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|21","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|22","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-12|23","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" +"Airbyte","F149MJ18","180535609","2023-11-13|00","Active","Airbyte test","531016227","USD","Search","1","1","100.00%","1.48","1.48","0.00","0","","","0","0.00%","1","50.00%","0","","Computer","7","2","2","2","0","0","","Microsoft sites and select traffic","0","0.00","0.00","","","","","","Active","0","0","","","Search & content","100.00%","100.00%","531016227","0","0.00","","","0.00","","0","1480.00","0.00","0.00","0.00","","0.00","0","0.00","","0","0","0","0","0.00","0","","0.00","0","","","0","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly.csv new file mode 100644 index 0000000000000..f5aa7513c3f67 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","CampaignType","CampaignStatus","CampaignLabels","Impressions","Clicks","Ctr","Spend","CostPerConversion","QualityScore","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","ViewThroughConversions","AllCostPerConversion","AllReturnOnAdSpend","AllConversions","ConversionsQualified","AllConversionRate","AllRevenue","AllRevenuePerConversion","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionRate","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualitySophisticatedClicks","LowQualityConversions","LowQualityConversionRate","Revenue","RevenuePerConversion","RevenuePerAssist","BudgetName","BudgetStatus","BudgetAssociationStatus" +"180535609","531016227","2023-11-11|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-11|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..170d854d58648 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/campaign_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","TimePeriod","CurrencyCode","AdDistribution","DeviceType","Network","DeliveredMatchType","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","CampaignType","CampaignStatus","CampaignLabels","Impressions","Clicks","Ctr","Spend","CostPerConversion","QualityScore","AdRelevance","LandingPageExperience","PhoneImpressions","PhoneCalls","Ptr","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","ViewThroughConversions","AllCostPerConversion","AllReturnOnAdSpend","AllConversions","ConversionsQualified","AllConversionRate","AllRevenue","AllRevenuePerConversion","AverageCpc","AveragePosition","AverageCpm","Conversions","ConversionRate","LowQualityClicks","LowQualityClicksPercent","LowQualityImpressions","LowQualitySophisticatedClicks","LowQualityConversions","LowQualityConversionRate","Revenue","RevenuePerConversion","RevenuePerAssist","BudgetName","BudgetStatus","BudgetAssociationStatus" +"180535609","531016227","2023-11-12|01","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|02","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|03","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|04","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|05","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|06","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|07","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|08","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|09","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|10","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|11","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|12","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|13","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|14","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|15","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|16","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|17","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|18","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|19","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|20","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|21","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|22","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-12|23","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" +"180535609","531016227","2023-11-13|00","USD","Search","Computer","Microsoft sites and select traffic","Phrase","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","Search & content","Active","","1","1","100.00%","1.48","","7","2","2","0","0","","0","0.00","","","0","","0.00","0","0.00","","0.00","","1.48","0.00","1480.00","0","","0","0.00%","0","0","0","","0.00","","","","","Current" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly.csv new file mode 100644 index 0000000000000..a5d9e37f47800 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","TimePeriod","AccountNumber","Country","State","MetroArea","City","ProximityTargetLocation","Radius","LocationType","MostSpecificLocation","AccountStatus","CampaignStatus","AdGroupStatus","County","PostalCode","LocationId","BaseCampaignId","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AllConversionsQualified","Neighborhood","ViewThroughRevenue","CampaignType","AssetGroupId","AssetGroupName","AssetGroupStatus","CurrencyCode","DeliveredMatchType","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","AdGroupName","Impressions","Clicks","Ctr","Spend","CostPerConversion","Assists","ReturnOnAdSpend","CostPerAssist","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","2023-11-11|01","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|02","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|03","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|04","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|05","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|06","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|07","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|08","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|09","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|10","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|11","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|12","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|13","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|14","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|15","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|16","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|17","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|18","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|19","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|20","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|21","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|22","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-11|23","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|00","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..43f65106a4c42 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/geographic_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","TimePeriod","AccountNumber","Country","State","MetroArea","City","ProximityTargetLocation","Radius","LocationType","MostSpecificLocation","AccountStatus","CampaignStatus","AdGroupStatus","County","PostalCode","LocationId","BaseCampaignId","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AllConversionsQualified","Neighborhood","ViewThroughRevenue","CampaignType","AssetGroupId","AssetGroupName","AssetGroupStatus","CurrencyCode","DeliveredMatchType","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","AdGroupName","Impressions","Clicks","Ctr","Spend","CostPerConversion","Assists","ReturnOnAdSpend","CostPerAssist","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","2023-11-12|01","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|02","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|03","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|04","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|05","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|06","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|07","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|08","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|09","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|10","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|11","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|12","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|13","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|14","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|15","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|16","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|17","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|18","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|19","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|20","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|21","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|22","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-12|23","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","2023-11-13|00","F149MJ18","Spain","","","","","0","Physical location","Spain","Active","Active","Active","","","170","531016227","","","0.00%","0.00%","0.00","","0.00","Search & content","","","","USD","Exact","Audience","Computer","English","Audience","Windows","Audience network","Broad","Airbyte","Airbyte test","keywords","1","0","0.00%","0.00","","0","","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly.csv new file mode 100644 index 0000000000000..2ad855da877cf --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","KeywordId","Keyword","AdId","TimePeriod","CurrencyCode","DeliveredMatchType","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","AdGroupName","KeywordStatus","Impressions","Clicks","Ctr","CurrentMaxCpc","Spend","CostPerConversion","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","QualityImpact","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","FinalAppUrl","Mainline1Bid","MainlineBid","FirstPageBid","FinalUrlSuffix","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|01","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|02","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|03","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|04","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|05","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|06","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|07","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|08","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|09","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|10","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|11","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|12","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|13","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|14","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|15","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|16","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|17","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|18","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|19","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|20","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|21","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|22","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-11|23","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|00","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..cf408cf45c472 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/keyword_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountId","CampaignId","AdGroupId","KeywordId","Keyword","AdId","TimePeriod","CurrencyCode","DeliveredMatchType","AdDistribution","DeviceType","Language","Network","DeviceOS","TopVsOther","BidMatchType","AccountName","CampaignName","AdGroupName","KeywordStatus","Impressions","Clicks","Ctr","CurrentMaxCpc","Spend","CostPerConversion","QualityScore","ExpectedCtr","AdRelevance","LandingPageExperience","QualityImpact","Assists","ReturnOnAdSpend","CostPerAssist","CustomParameters","FinalAppUrl","Mainline1Bid","MainlineBid","FirstPageBid","FinalUrlSuffix","ViewThroughConversions","ViewThroughConversionsQualified","AllCostPerConversion","AllReturnOnAdSpend","Conversions","ConversionRate","ConversionsQualified","AverageCpc","AveragePosition","AverageCpm","AllConversions","AllConversionRate","AllRevenue","AllRevenuePerConversion","Revenue","RevenuePerConversion","RevenuePerAssist" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|01","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|02","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|03","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|04","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|05","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|06","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|07","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|08","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|09","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|10","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|11","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|12","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|13","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|14","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|15","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|16","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|17","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|18","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|19","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|20","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|21","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|22","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-12|23","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" +"180535609","531016227","1356799861840328","84801135055365","connector","84800390693061","2023-11-13|00","USD","Phrase","Search","Computer","Hebrew","Microsoft sites and select traffic","Windows","Microsoft sites and select traffic - top","Broad","Airbyte","Airbyte test","keywords","Active","1","0","0.00%","2.27","0.00","","4","2","2","1","0","0","","","","","","1.50","0.62","","0","","","","0","","0.00","0.00","0.00","0.00","0","","0.00","","0.00","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly.csv new file mode 100644 index 0000000000000..eef3913f573a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignName","CampaignId","AdGroupName","AdGroupId","AdId","AdType","DestinationUrl","BidMatchType","DeliveredMatchType","CampaignStatus","AdStatus","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","SearchQuery","Keyword","AdGroupCriterionId","Conversions","ConversionRate","CostPerConversion","Language","KeywordId","Network","TopVsOther","DeviceType","DeviceOS","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","AccountStatus","AdGroupStatus","KeywordStatus","CampaignType","CustomerId","CustomerName","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AverageCpm","ConversionsQualified","AllConversionsQualified" +"Airbyte","F149MJ18","180535609","2023-11-11|01","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|02","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|03","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|04","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|05","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|06","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|07","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|08","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|09","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|10","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|11","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|12","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|13","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|14","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|15","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|16","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|17","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|18","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|19","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|20","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|21","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|22","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-11|23","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|00","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..e5b6829005238 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/search_query_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignName","CampaignId","AdGroupName","AdGroupId","AdId","AdType","DestinationUrl","BidMatchType","DeliveredMatchType","CampaignStatus","AdStatus","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","SearchQuery","Keyword","AdGroupCriterionId","Conversions","ConversionRate","CostPerConversion","Language","KeywordId","Network","TopVsOther","DeviceType","DeviceOS","Assists","Revenue","ReturnOnAdSpend","CostPerAssist","RevenuePerConversion","RevenuePerAssist","AccountStatus","AdGroupStatus","KeywordStatus","CampaignType","CustomerId","CustomerName","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AverageCpm","ConversionsQualified","AllConversionsQualified" +"Airbyte","F149MJ18","180535609","2023-11-12|01","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|02","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|03","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|04","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|05","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|06","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|07","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|08","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|09","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|10","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|11","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|12","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|13","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|14","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|15","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|16","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|17","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|18","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|19","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|20","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|21","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|22","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-12|23","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" +"Airbyte","F149MJ18","180535609","2023-11-13|00","Airbyte test","531016227","keywords","1356799861840328","84800390693061","Responsive search ad","","Broad","Exact","Active","Active","1","1","100.00%","0.05","0.05","0.00","airbyte","Airbyte","","0","0.00%","","German","84801135055370","Microsoft sites and select traffic","Microsoft sites and select traffic - top","Computer","Unknown","0","0.00","0.00","","","","Active","Active","Active","Search & content","251186883","Daxtarity Inc.","0","0.00","0.00%","","0.00","","","","0.00%","100.00%","50.00","0.00","0.00" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly.csv new file mode 100644 index 0000000000000..f9e6d6efe2d62 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignName","CampaignId","AdGroupName","AdGroupId","Country","State","MetroArea","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","ProximityTargetLocation","Radius","Language","City","QueryIntentCountry","QueryIntentState","QueryIntentCity","QueryIntentDMA","BidMatchType","DeliveredMatchType","Network","TopVsOther","DeviceType","DeviceOS","Assists","Conversions","ConversionRate","Revenue","ReturnOnAdSpend","CostPerConversion","CostPerAssist","RevenuePerConversion","RevenuePerAssist","County","PostalCode","QueryIntentCounty","QueryIntentPostalCode","LocationId","QueryIntentLocationId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AverageCpm","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","Neighborhood","QueryIntentNeighborhood","ViewThroughRevenue","CampaignType","AssetGroupId","AssetGroupName" +"Airbyte","F149MJ18","180535609","2023-11-11|01","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|02","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|03","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|04","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|05","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|06","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|07","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|08","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|09","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|10","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|11","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|12","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|13","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|14","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|15","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|16","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|17","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|18","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|19","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|20","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|21","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|22","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-11|23","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|00","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly_incremental.csv new file mode 100644 index 0000000000000..7e10fb38f3e96 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/user_location_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +"AccountName","AccountNumber","AccountId","TimePeriod","CampaignName","CampaignId","AdGroupName","AdGroupId","Country","State","MetroArea","CurrencyCode","AdDistribution","Impressions","Clicks","Ctr","AverageCpc","Spend","AveragePosition","ProximityTargetLocation","Radius","Language","City","QueryIntentCountry","QueryIntentState","QueryIntentCity","QueryIntentDMA","BidMatchType","DeliveredMatchType","Network","TopVsOther","DeviceType","DeviceOS","Assists","Conversions","ConversionRate","Revenue","ReturnOnAdSpend","CostPerConversion","CostPerAssist","RevenuePerConversion","RevenuePerAssist","County","PostalCode","QueryIntentCounty","QueryIntentPostalCode","LocationId","QueryIntentLocationId","AllConversions","AllRevenue","AllConversionRate","AllCostPerConversion","AllReturnOnAdSpend","AllRevenuePerConversion","ViewThroughConversions","Goal","GoalType","AbsoluteTopImpressionRatePercent","TopImpressionRatePercent","AverageCpm","ConversionsQualified","AllConversionsQualified","ViewThroughConversionsQualified","Neighborhood","QueryIntentNeighborhood","ViewThroughRevenue","CampaignType","AssetGroupId","AssetGroupName" +"Airbyte","F149MJ18","180535609","2023-11-12|01","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|02","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|03","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|04","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|05","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|06","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|07","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|08","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|09","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|10","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|11","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|12","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|13","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|14","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|15","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|16","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|17","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|18","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|19","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|20","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|21","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|22","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-12|23","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" +"Airbyte","F149MJ18","180535609","2023-11-13|00","Airbyte test","531016227","keywords","1356799861840328","Spain","","","USD","Audience","1","0","0.00%","0.00","0.00","0.00","","0","English","","Indonesia","","","","Broad","Exact","Audience","Audience network","Computer","Windows","0","0","","0.00","","","","","","","","","","170","91","0","0.00","","","","","0","","","0.00%","0.00%","0.00","0.00","0.00","","","","0.00","Search & content","","" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ad_labels_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ad_labels_state.json new file mode 100644 index 0000000000000..7c234188ff04b --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ad_labels_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "Modified Time": "2024-01-29T12:54:12.028+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ads_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ads_state.json new file mode 100644 index 0000000000000..9ee221513cd01 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/app_install_ads_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "Modified Time": "2024-01-01T09:54:12.028+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/hourly_reports_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/hourly_reports_state.json new file mode 100644 index 0000000000000..ceac901a39aca --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/hourly_reports_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-12T00:00:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_base_streams.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_base_streams.py new file mode 100644 index 0000000000000..8f7720fbc5847 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_base_streams.py @@ -0,0 +1,75 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import patch + +import pytest +import source_bing_ads +from source_bing_ads.base_streams import Accounts + + +@patch.object(source_bing_ads.source, "Client") +@pytest.mark.parametrize( + "record, expected", + [ + ( + { + "AccountId": 16253412, + "TaxCertificate": { + "TaxCertificates": {"KeyValuePairOfstringbase64Binary": [{"key": "test key", "value": "test value"}]}, + "Status": "Active", + "TaxCertificateBlobContainerName": "Test Container Name", + }, + }, + { + "AccountId": 16253412, + "TaxCertificate": { + "TaxCertificates": [{"key": "test key", "value": "test value"}], + "Status": "Active", + "TaxCertificateBlobContainerName": "Test Container Name", + }, + }, + ), + ( + { + "AccountId": 16253412, + "TaxCertificate": { + "TaxCertificates": [{"key": "test key", "value": "test value"}], + "Status": "Active", + "TaxCertificateBlobContainerName": "Test Container Name", + }, + }, + { + "AccountId": 16253412, + "TaxCertificate": { + "TaxCertificates": [{"key": "test key", "value": "test value"}], + "Status": "Active", + "TaxCertificateBlobContainerName": "Test Container Name", + }, + }, + ), + ( + { + "AccountId": 16253412, + }, + { + "AccountId": 16253412, + }, + ), + ( + {"AccountId": 16253412, "TaxCertificate": None}, + {"AccountId": 16253412, "TaxCertificate": None}, + ), + ], + ids=[ + "record_with_KeyValuePairOfstringbase64Binary_field", + "record_without_KeyValuePairOfstringbase64Binary_field", + "record_without_TaxCertificate_field", + "record_with_TaxCertificate_is_None", + ], +) +def test_accounts_transform_tax_fields(mocked_client, config, record, expected): + stream = Accounts(mocked_client, config) + actual = stream._transform_tax_fields(record) + assert actual == expected diff --git a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml index 850da25aec466..18575b4a9c14a 100644 --- a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml @@ -23,26 +23,27 @@ acceptance_tests: timeout_seconds: 1200 empty_streams: - name: "addon" - bypass_reason: "Not available for Product Catalog 2.0 sites." + bypass_reason: "Not available for Product Catalog 2.0 sites. Tested with mocker server tests." - name: "plan" - bypass_reason: "Not available for Product Catalog 2.0 sites." + bypass_reason: "Not available for Product Catalog 2.0 sites. Tested with mocker server tests." - name: "virtual_bank_account" - bypass_reason: "Cannot populate with test data" + bypass_reason: "Tested with mocker server tests." - name: "event" - bypass_reason: "Unstable data. Test data is not persistent." + bypass_reason: "Unstable data. Test data is not persistent. Tested with mocker server tests." - name: "site_migration_detail" - bypass_reason: "Cannot populate with test data." + bypass_reason: "Cannnot populate with test data." - name: "customer" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "subscription" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "coupon" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "hosted_page" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no + validate_state_messages: False fail_on_extra_columns: true incremental: tests: diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json index 8adc9742557e9..1f502d4c73953 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json @@ -121,11 +121,11 @@ "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]], - "default_cursor_field": ["created_at"] + "default_cursor_field": ["updated_at"] }, "sync_mode": "incremental", "destination_sync_mode": "append", - "cursor_field": ["created_at"] + "cursor_field": ["updated_at"] }, { "stream": { @@ -227,6 +227,19 @@ "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"] + }, + { + "stream": { + "name": "site_migration_detail", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["migrated_at"], + "source_defined_primary_key": [["entity_id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["migrated_at"] } ] } diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl index 4576e894a548c..738efeb22afa9 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl @@ -7,10 +7,6 @@ {"stream": "item", "data": {"id": "cbdemo_advanced", "name": "Advanced", "external_name": "Advanced", "description": "Uncover hidden insights and carry out deeper analytics for your enterprise with this advanced plan.", "status": "active", "resource_version": 1674035640445, "updated_at": 1674035640, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136879} {"stream": "item", "data": {"id": "cbdemo_basic", "name": "Basic", "external_name": "Basic", "description": "Starter plan for all your basic reporting requirements.", "status": "active", "resource_version": 1674035673162, "updated_at": 1674035673, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136891} {"stream": "item", "data": {"id": "cbdemo_intermediary", "name": "Intermediary", "external_name": "Intermediary", "description": "Smart plan with the right mix of basic and slightly advanced reporting tools.", "status": "active", "resource_version": 1674035686971, "updated_at": 1674035686, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136900} -{"stream": "attached_item", "data": {"id": "e49c6ed7-9f1b-4c79-9235-549ce8ae9a1f", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_trial_start", "charge_once": false, "created_at": 1674032839, "resource_version": 1674032839573, "updated_at": 1674032839, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569205846} -{"stream": "attached_item", "data": {"id": "25976ccf-8e44-4fce-8eab-2a1658eb0a2b", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_analytics_additionalusers", "type": "mandatory", "status": "active", "quantity": 1, "created_at": 1674032827, "resource_version": 1674032827801, "updated_at": 1674032827, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569205849} -{"stream": "attached_item", "data": {"id": "69b451b1-e00a-4522-ab6f-027586d24b85", "parent_item_id": "cbdemo_basic", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_creation", "charge_once": false, "created_at": 1674032880, "resource_version": 1674032880261, "updated_at": 1674032880, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569206020} -{"stream": "item_price", "data": {"id": "Test-Plan-1-USD-Daily", "name": "Test Plan 1 USD Daily", "item_family_id": "cbdemo_pf_analytics", "item_id": "Test-Plan-1", "description": "Test", "status": "active", "external_name": "Test Plan 1", "pricing_model": "flat_fee", "price": 1000, "period": 1, "currency_code": "USD", "period_unit": "day", "shipping_period": 1, "shipping_period_unit": "day", "free_quantity": 0, "channel": "web", "resource_version": 1674036400224, "updated_at": 1674036400, "created_at": 1674036400, "invoice_notes": "Test", "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392306} {"stream": "item_price", "data": {"id": "Test-Gift-Plan-1-USD-Daily", "name": "Test Gift Plan 1 USD Daily", "item_family_id": "cbdemo_pf_crm", "item_id": "Test-Gift-Plan-1", "description": "Test gift", "status": "active", "external_name": "Test Gift Plan 1", "pricing_model": "flat_fee", "price": 1500, "period": 1, "currency_code": "USD", "period_unit": "day", "shipping_period": 1, "shipping_period_unit": "day", "billing_cycles": 1, "free_quantity": 0, "channel": "web", "resource_version": 1674055340456, "updated_at": 1674055340, "created_at": 1674055340, "invoice_notes": "Test gift", "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392312} {"stream": "item_price", "data": {"id": "Test-Gift-Plan-1-USD-Weekly", "name": "Test Gift Plan 1 USD Weekly", "item_family_id": "cbdemo_pf_crm", "item_id": "Test-Gift-Plan-1", "description": "Test", "status": "active", "external_name": "Test Gift Plan 1", "pricing_model": "flat_fee", "price": 20000, "period": 1, "currency_code": "USD", "period_unit": "week", "shipping_period": 1, "shipping_period_unit": "week", "billing_cycles": 1, "free_quantity": 0, "channel": "web", "resource_version": 1674056134136, "updated_at": 1674056134, "created_at": 1674056134, "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392319} {"stream": "payment_source", "data": {"id": "pm_Azz5jBTTJ96QflvC", "updated_at": 1674057604, "resource_version": 1674057604123, "deleted": false, "object": "payment_source", "customer_id": "Azz5jBTTJ96Mjlv5", "type": "card", "reference_id": "tok_Azz5jBTTJ96QSlvA", "status": "valid", "gateway": "chargebee", "gateway_account_id": "gw_16CKmRSb2oGddH4", "ip_address": "85.209.47.207", "created_at": 1674057604, "card": {"iin": "411111", "last4": "1111", "funding_type": "credit", "expiry_month": 12, "expiry_year": 2029, "masked_number": "************1111", "object": "card", "brand": "visa"}, "custom_fields": []}, "emitted_at": 1678971627515} @@ -45,4 +41,7 @@ {"stream": "item_family", "data": {"id": "test-4", "name": "test item family 4", "status": "active", "resource_version": 1705960880668, "updated_at": 1705960880, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929497} {"stream": "item_family", "data": {"id": "test-3", "name": "test item family 3", "status": "active", "resource_version": 1705956309899, "updated_at": 1705956309, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929501} {"stream": "item_family", "data": {"id": "test-2", "name": "test item family 2", "status": "active", "resource_version": 1705956286577, "updated_at": 1705956286, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929506} -{"stream": "item_family", "data": {"id": "test-1", "name": "test item family 1", "status": "active", "resource_version": 1705956260965, "updated_at": 1705956260, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929509} \ No newline at end of file +{"stream": "item_family", "data": {"id": "test-1", "name": "test item family 1", "status": "active", "resource_version": 1705956260965, "updated_at": 1705956260, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929509} +{"stream": "attached_item", "data": {"id": "25976ccf-8e44-4fce-8eab-2a1658eb0a2b", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_analytics_additionalusers", "type": "mandatory", "status": "active", "quantity": 1, "created_at": 1674032827, "resource_version": 1674032827801, "updated_at": 1674032827, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907178} +{"stream": "attached_item", "data": {"id": "e49c6ed7-9f1b-4c79-9235-549ce8ae9a1f", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_trial_start", "charge_once": false, "created_at": 1674032839, "resource_version": 1674032839573, "updated_at": 1674032839, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907177} +{"stream": "attached_item", "data": {"id": "69b451b1-e00a-4522-ab6f-027586d24b85", "parent_item_id": "cbdemo_basic", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_creation", "charge_once": false, "created_at": 1674032880, "resource_version": 1674032880261, "updated_at": 1674032880, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907297} diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json index 24299a54ae846..73696eb5b8e58 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json @@ -48,6 +48,13 @@ "stream_descriptor": { "name": "item" } } }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": 2147483647 }, + "stream_descriptor": { "name": "attached_item" } + } + }, { "type": "STREAM", "stream": { @@ -124,5 +131,19 @@ "stream_state": { "updated_at": 2147483647 }, "stream_descriptor": { "name": "differential_price" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "migrated_at": 2147483647 }, + "stream_descriptor": { "name": "site_migration_detail" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "occurred_at": 2147483647 }, + "stream_descriptor": { "name": "event" } + } } ] diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json index f8a43e7e1d9ab..49e8b43ee928a 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json @@ -48,6 +48,13 @@ "stream_descriptor": { "name": "item" } } }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": 1625596058 }, + "stream_descriptor": { "name": "attached_item" } + } + }, { "type": "STREAM", "stream": { diff --git a/airbyte-integrations/connectors/source-chargebee/metadata.yaml b/airbyte-integrations/connectors/source-chargebee/metadata.yaml index 82b51e27cfae8..5583661e7c6e0 100644 --- a/airbyte-integrations/connectors/source-chargebee/metadata.yaml +++ b/airbyte-integrations/connectors/source-chargebee/metadata.yaml @@ -1,6 +1,6 @@ data: ab_internal: - ql: 200 + ql: 400 sl: 200 allowedHosts: hosts: @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 686473f1-76d9-4994-9cc7-9b13da46147c - dockerImageTag: 0.4.0 + dockerImageTag: 0.5.0 dockerRepository: airbyte/source-chargebee documentationUrl: https://docs.airbyte.com/integrations/sources/chargebee githubIssueLabel: source-chargebee icon: chargebee.svg license: MIT + maxSecondsBetweenMessages: 300 name: Chargebee remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-chargebee/poetry.lock b/airbyte-integrations/connectors/source-chargebee/poetry.lock index 4f28624f1368a..ea8b5d73f21d7 100644 --- a/airbyte-integrations/connectors/source-chargebee/poetry.lock +++ b/airbyte-integrations/connectors/source-chargebee/poetry.lock @@ -2,39 +2,38 @@ [[package]] name = "airbyte-cdk" -version = "0.58.1" +version = "0.77.2" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.1.tar.gz", hash = "sha256:0725c63184c37c2caf89faa2c9972e759d73877d03715b9e3eb56a132a6764a8"}, - {file = "airbyte_cdk-0.58.1-py3-none-any.whl", hash = "sha256:605299228e8838cbe6ea39c6d89c38c9674f3997e7b9b77f1dfb7577d84e0874"}, + {file = "airbyte_cdk-0.77.2-py3-none-any.whl", hash = "sha256:6dffbe0c4b3454a5cdd20525b4f1e9cfef2e80c005b6b30473fc5bf6f75af64e"}, + {file = "airbyte_cdk-0.77.2.tar.gz", hash = "sha256:84aeb27862a18e135c7bc3a5dfc363037665d428e7495e8824673f853adcca70"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +300,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -366,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -685,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.0" description = "Mock out responses from the requests package" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +906,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +931,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1042,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "71e2453d758f0222900531815381ff9c55f1d1a6a68f4c64b00c16de6727c8da" +content-hash = "af61ac8416c3cd7be48ea49deab390ed2103fd41bf434cd601ceb79e8bc0916d" diff --git a/airbyte-integrations/connectors/source-chargebee/pyproject.toml b/airbyte-integrations/connectors/source-chargebee/pyproject.toml index a3926727b9ff4..1d8763712c02d 100644 --- a/airbyte-integrations/connectors/source-chargebee/pyproject.toml +++ b/airbyte-integrations/connectors/source-chargebee/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.0" +version = "0.5.0" name = "source-chargebee" description = "Source implementation for Chargebee." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_chargebee" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.1" +airbyte-cdk = "^0" [tool.poetry.scripts] source-chargebee = "source_chargebee.run:run" @@ -26,3 +26,4 @@ source-chargebee = "source_chargebee.run:run" requests-mock = "^1.9.3" pytest-mock = "^3.6.1" pytest = "^6.2" +freezegun = "^1.4.0" diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/components.py b/airbyte-integrations/connectors/source-chargebee/source_chargebee/components.py index 9e71e8c97066d..2e879f3b84b04 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/components.py +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/components.py @@ -47,18 +47,20 @@ def transform( ... } """ - record["custom_fields"] = [{"name": k, "value": v} for k, v in record.items() if k.startswith("cf_")] + record["custom_fields"] = [{"name": k, "value": record.pop(k)} for k in record.copy() if k.startswith("cf_")] return record @dataclass class IncrementalSingleSliceCursor(Cursor): + cursor_field: Union[InterpolatedString, str] config: Config parameters: InitVar[Mapping[str, Any]] def __post_init__(self, parameters: Mapping[str, Any]): self._state = {} + self._cursor = None self.cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) def get_request_params( @@ -113,15 +115,29 @@ def set_initial_state(self, stream_state: StreamState): if cursor_value: self._state[cursor_field] = cursor_value self._state["prior_state"] = self._state.copy() + self._cursor = cursor_value + + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Register a record with the cursor; the cursor instance can then use it to manage the state of the in-progress stream read. + + :param stream_slice: The current slice, which may or may not contain the most recently observed record + :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the + stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. + """ + record_cursor_value = record.get(self.cursor_field.eval(self.config)) + if not record_cursor_value: + return - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: - latest_record = self._state if self.is_greater_than_or_equal(self._state, most_recent_record) else most_recent_record - if latest_record: - cursor_field = self.cursor_field.eval(self.config) - self._state[cursor_field] = latest_record[cursor_field] + if self.is_greater_than_or_equal(record, self._state): + self._cursor = record_cursor_value + + def close_slice(self, stream_slice: StreamSlice) -> None: + cursor_field = self.cursor_field.eval(self.config) + self._state[cursor_field] = self._cursor def stream_slices(self) -> Iterable[Mapping[str, Any]]: - yield {} + yield StreamSlice(partition={}, cursor_slice={}) def should_be_synced(self, record: Record) -> bool: """ diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml b/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml index 9b4fd4f7f2790..bbc097629c145 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml @@ -168,8 +168,6 @@ definitions: requester: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: - sort_by[asc]: created_at - include_deleted: "true" updated_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "coupon" @@ -198,8 +196,7 @@ definitions: requester: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: - sort_by[asc]: "occurred_at" - include_deleted: "true" + sort_by[asc]: occurred_at occurred_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "event" @@ -270,7 +267,6 @@ definitions: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: sort_by[asc]: created_at - include_deleted: "true" created_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "promotional_credit" @@ -363,7 +359,7 @@ definitions: record_selector: $ref: "#/definitions/nested_selector" record_filter: - condition: "{{ record['updated_at'] >= ( stream_state.get('prior_state', {}).get('updated_at', 0) if stream_state else stream_slice.get('prior_state', {}).get('updated_at', 0) ) }}" + condition: "{{ record['migrated_at'] >= ( stream_state.get('prior_state', {}).get('migrated_at', 0) if stream_state else stream_slice.get('prior_state', {}).get('migrated_at', 0) ) }}" $parameters: name: "site_migration_detail" primary_key: "entity_id" @@ -377,7 +373,6 @@ definitions: $ref: "#/definitions/retriever/requester" request_parameters: sort_by[asc]: created_at - include_deleted: "true" created_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" incremental_sync: $ref: "#/definitions/date_stream_slicer" diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/coupon.json b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/coupon.json index 512860182a479..565859eab9aa4 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/coupon.json +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/coupon.json @@ -113,6 +113,23 @@ } } }, + "coupon_constraints": { + "type": ["array", "null"], + "items": { + "type": ["object", "null"], + "properties": { + "entity_type": { + "type": ["string", "null"] + }, + "type": { + "type": ["string", "null"] + }, + "vlaue": { + "type": ["string", "null"] + } + } + } + }, "custom_fields": { "$ref": "_definitions.json#/definitions/custom_fields" } diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/customer.json b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/customer.json index 74ea11e349887..6f37b98d81ce6 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/customer.json +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/customer.json @@ -88,6 +88,9 @@ "billing_day_of_week_mode": { "type": ["string", "null"] }, + "billing_month": { + "type": ["integer", "null"] + }, "pii_cleared": { "type": ["string", "null"] }, diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/transaction.json b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/transaction.json index 3a54a5e2782c4..6aedd8352cac9 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/transaction.json +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/schemas/transaction.json @@ -66,9 +66,6 @@ "error_code": { "type": ["string", "null"] }, - "error-text": { - "type": ["string", "null"] - }, "voided_at": { "type": ["integer", "null"] }, @@ -126,6 +123,9 @@ "error_text": { "type": ["string", "null"] }, + "error_detail": { + "type": ["string", "null"] + }, "payment_method_details": { "type": ["string", "null"] }, diff --git a/airbyte-lib/tests/integration_tests/__init__.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/__init__.py similarity index 100% rename from airbyte-lib/tests/integration_tests/__init__.py rename to airbyte-integrations/connectors/source-chargebee/unit_tests/integration/__init__.py diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py new file mode 100644 index 0000000000000..85f0de928865a --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py @@ -0,0 +1,33 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime +from typing import Any, Dict + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: Dict[str, Any] = { + "site": "ConfigBuilder default site", + "site_api_key": "ConfigBuilder default site api key", + "start_date": "2023-01-01T06:57:44Z", + "product_catalog": "2.0" + } + + def with_site(self, site: str) -> "ConfigBuilder": + self._config["site"] = site + return self + + def with_site_api_key(self, site_api_key: str) -> "ConfigBuilder": + self._config["site_api_key"] = site_api_key + return self + + def with_start_date(self, start_datetime: datetime) -> "ConfigBuilder": + self._config["start_date"] = start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + return self + + def with_product_catalog(self, product_catalog: str) -> "ConfigBuilder": + self._config["product_catalog"] = product_catalog or "2.0" + return self + + def build(self) -> Dict[str, Any]: + return self._config \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py new file mode 100644 index 0000000000000..0cf9d9d5a5bcd --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py @@ -0,0 +1,11 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + + +class ChargebeePaginationStrategy(PaginationStrategy): + @staticmethod + def update(response: Dict[str, Any]) -> None: + response["next_offset"] = "[1707076198000,57873868]" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py new file mode 100644 index 0000000000000..f9b15d847be5f --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py @@ -0,0 +1,127 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import base64 +from datetime import datetime +from typing import List, Optional, Union + +from airbyte_cdk.test.mock_http import HttpRequest +from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS + + +class ChargebeeRequestBuilder: + + @classmethod + def addon_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("addons", site, site_api_key) + + @classmethod + def plan_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("plans", site, site_api_key) + + @classmethod + def virtual_bank_account_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("virtual_bank_accounts", site, site_api_key) + + @classmethod + def event_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("events", site, site_api_key) + + @classmethod + def site_migration_detail_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("site_migration_details", site, site_api_key) + + @classmethod + def customer_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("customers", site, site_api_key) + + @classmethod + def coupon_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("coupons", site, site_api_key) + + @classmethod + def subscription_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("subscriptions", site, site_api_key) + + @classmethod + def hosted_page_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("hosted_pages", site, site_api_key) + + def __init__(self, resource: str, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + self._resource: str = resource + self._site: str = site + self._site_api_key: str = site_api_key + self._any_query_params: bool = False + self._include_deleted: Optional[str] = None + self._created_at_btw: Optional[str] = None + self._updated_at_btw: Optional[str] = None + self._occurred_at_btw: Optional[str] = None + self._sort_by_asc: Optional[str] = None + self._sort_by_desc: Optional[str] = None + self._offset: Optional[str] = None + self._limit: Optional[str] = None + + def with_any_query_params(self) -> "ChargebeeRequestBuilder": + self._any_query_params = True + return self + + def with_include_deleted(self, include_deleted: bool) -> "ChargebeeRequestBuilder": + self._include_deleted = str(include_deleted).lower() + return self + + def with_created_at_btw(self, created_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._created_at_btw = f'{created_at_btw}' + return self + + def with_updated_at_btw(self, updated_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._updated_at_btw = f"{updated_at_btw}" + return self + + def with_occurred_at_btw(self, occurred_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._occurred_at_btw = f"{occurred_at_btw}" + return self + + def with_sort_by_asc(self, sort_by_asc: str) -> "ChargebeeRequestBuilder": + self._sort_by_asc = sort_by_asc + return self + + def with_sort_by_desc(self, sort_by_desc: str) -> "ChargebeeRequestBuilder": + self._sort_by_desc = sort_by_desc + return self + + def with_offset(self, offset: str) -> "ChargebeeRequestBuilder": + self._offset = offset + return self + + def with_limit(self, limit: int) -> "ChargebeeRequestBuilder": + self._limit = limit + return self + + def build(self) -> HttpRequest: + query_params= {} + if self._sort_by_asc: + query_params["sort_by[asc]"] = self._sort_by_asc + if self._sort_by_desc: + query_params["sort_by[desc]"] = self._sort_by_desc + if self._include_deleted: + query_params["include_deleted"] = self._include_deleted + if self._created_at_btw: + query_params["created_at[between]"] = self._created_at_btw + if self._updated_at_btw: + query_params["updated_at[between]"] = self._updated_at_btw + if self._occurred_at_btw: + query_params["occurred_at[between]"] = self._occurred_at_btw + if self._offset: + query_params["offset"] = self._offset + if self._limit: + query_params["limit"] = self._limit + + if self._any_query_params: + if query_params: + raise ValueError(f"Both `any_query_params` and {list(query_params.keys())} were configured. Provide only one of none but not both.") + query_params = ANY_QUERY_PARAMS + + return HttpRequest( + url=f"https://{self._site}.chargebee.com/api/v2/{self._resource}", + query_params=query_params, + headers={"Authorization": f"Basic {base64.b64encode((str(self._site_api_key) + ':').encode('utf-8')).decode('utf-8')}"}, + ) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py new file mode 100644 index 0000000000000..f9163b6be3a87 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py @@ -0,0 +1,14 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import Mapping + +from airbyte_cdk.test.mock_http import HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template + + +def a_response_with_status(status_code: int) -> HttpResponse: + return HttpResponse(json.dumps(find_template(str(status_code), __file__)), status_code) + +def a_response_with_status_and_header(status_code: int, header: Mapping[str, str]) -> HttpResponse: + return HttpResponse(json.dumps(find_template(str(status_code), __file__)), status_code, header) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py new file mode 100644 index 0000000000000..21ec3a51ac9a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py @@ -0,0 +1,206 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "addon" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "1.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.addon_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py new file mode 100644 index 0000000000000..9d938c97176cd --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py @@ -0,0 +1,217 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "coupon" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.coupon_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py new file mode 100644 index 0000000000000..31a24fe7f38f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py @@ -0,0 +1,216 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "customer" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.customer_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py new file mode 100644 index 0000000000000..030bf4a5a5e5b --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py @@ -0,0 +1,202 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "event" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "occurred_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.event_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400), + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(occurred_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(occurred_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py new file mode 100644 index 0000000000000..f0acbbb098f0b --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py @@ -0,0 +1,201 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "hosted_page" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.hosted_page_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400), + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py new file mode 100644 index 0000000000000..5f67711df72ec --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py @@ -0,0 +1,206 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "plan" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "1.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.plan_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py new file mode 100644 index 0000000000000..35f8cad27b2df --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py @@ -0,0 +1,232 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "site_migration_detail" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "entity_id" +_CURSOR_FIELD = "migrated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +''' +Note that this is a semi-incremental stream and tests will need to be adapated accordingly +''' + + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.site_migration_detail_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + # Site Migration Detail stream is a semi-incremental stream and therefore state acts differently than typical declarative incremental implementation -- state is updated to most recent cursor value read + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_cursor_field(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(migrated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_value_when_read_then_state_is_updated_to_most_recent_cursor_value(self, http_mocker: HttpMocker) -> None: + state_cursor_value = self._start_date_in_seconds + 1 + record_cursor_value = state_cursor_value + 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + + output = self._read(_config().with_start_date(self._start_date), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(migrated_at=record_cursor_value, prior_state={_CURSOR_FIELD: state_cursor_value}) + + @HttpMocker() + def test_given_record_returned_with_cursor_value_before_state_record_is_not_read_and_state_not_updated(self, http_mocker: HttpMocker) -> None: + state_cursor_value = self._start_date_in_seconds + record_cursor_value = self._start_date_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + + output = self._read(_config().with_start_date(self._start_date), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(migrated_at=state_cursor_value, prior_state={_CURSOR_FIELD: state_cursor_value}) + assert len(output.records) == 0 diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py new file mode 100644 index 0000000000000..cfaa742fbb6dc --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py @@ -0,0 +1,217 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "subscription" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.subscription_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py new file mode 100644 index 0000000000000..a1c908f0c2056 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py @@ -0,0 +1,206 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "virtual_bank_account" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.virtual_bank_account_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8))) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=cursor_value) + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + record_cursor_value = self._now_in_seconds - 1 + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(record_cursor_value)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated_at=record_cursor_value) diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json new file mode 100644 index 0000000000000..b28d57efa8f76 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json @@ -0,0 +1,7 @@ +{ + "message": "Resource has expired", + "api_error_code": "resource_limit_exhausted", + "error_code": "resource_limit_exhausted", + "error_msg": "Resource has expired", + "http_status_code": 400 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json new file mode 100644 index 0000000000000..f491037b2b519 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry,authentication failed.The basic authentication header has invalid format.", + "api_error_code": "api_authentication_failed", + "error_code": "api_authentication_wrong_format", + "error_msg": "Sorry,authentication failed.The basic authentication header has invalid format.", + "http_status_code": 401 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json new file mode 100644 index 0000000000000..3a2689b762fed --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry, authorization failed. The key does not have the required permissions.", + "api_error_code": "api_authorization_failed", + "error_code": "api_authorization_failed", + "error_msg": "Sorry, authorization failed. The key does not have the required permissions.", + "http_status_code": 403 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json new file mode 100644 index 0000000000000..e962e573e293c --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry, access has been blocked temporarily due to request count exceeding acceptable limits. Please try after some time.", + "api_error_code": "api_request_limit_exceeded", + "error_code": "api_request_limit_exceeded", + "error_msg": "Sorry, access has been blocked temporarily due to request count exceeding acceptable limits. Please try after some time.", + "http_status_code": 429 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json new file mode 100644 index 0000000000000..ad57f127be9e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry,Something went wrong when trying to process the request.", + "api_error_code": "internal_error", + "error_code": "internal_error", + "error_msg": "Sorry,Something went wrong when trying to process the request.", + "http_status_code": 500 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json new file mode 100644 index 0000000000000..90e6f366d97f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json @@ -0,0 +1,33 @@ +{ + "list": [ + { + "addon": { + "charge_type": "recurring", + "currency_code": "USD", + "enabled_in_portal": true, + "id": "tiered_addon", + "is_shippable": false, + "name": "Tiered Addon", + "object": "addon", + "period": 1, + "period_unit": "month", + "pricing_model": "tiered", + "resource_version": 1517505776000, + "show_description_in_invoices": false, + "show_description_in_quotes": false, + "status": "active", + "taxable": true, + "tiers": [ + { + "ending_unit": 10, + "object": "tier", + "price": 100, + "starting_unit": 1 + } + ], + "type": "tiered", + "updated_at": 1517505776 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json new file mode 100644 index 0000000000000..2ed681b35de3c --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json @@ -0,0 +1,27 @@ +{ + "list": [ + { + "coupon": { + "apply_discount_on": "not_applicable", + "apply_on": "each_specified_item", + "created_at": 1517495314, + "discount_percentage": 10, + "discount_type": "percentage", + "duration_type": "forever", + "id": "summer_offer", + "item_constraints": [ + { + "constraint": "all", + "item_type": "plan" + } + ], + "name": "Summer Offer", + "object": "coupon", + "redemptions": 0, + "resource_version": 1517495314967, + "status": "active", + "updated_at": 1517495314 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json new file mode 100644 index 0000000000000..23465a7b7d51f --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json @@ -0,0 +1,28 @@ +{ + "list": [ + { + "customer": { + "allow_direct_debit": false, + "auto_collection": "on", + "card_status": "no_card", + "created_at": 1517505747, + "deleted": false, + "email": "john@test.com", + "excess_payments": 0, + "first_name": "John", + "id": "__test__KyVnHhSBWlC1T2cj", + "last_name": "Doe", + "net_term_days": 0, + "object": "customer", + "pii_cleared": "active", + "preferred_currency_code": "USD", + "promotional_credits": 0, + "refundable_credits": 0, + "resource_version": 1517505747000, + "taxability": "taxable", + "unbilled_charges": 0, + "updated_at": 1517505747 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json new file mode 100644 index 0000000000000..58587184027c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json @@ -0,0 +1,204 @@ +{ + "list": [ + { + "event": { + "id": "ev_16BPgETyVrQbiGhA", + "occurred_at": 1706822167, + "source": "admin_console", + "user": "sarah@sarah.com", + "object": "event", + "api_version": "v2", + "content": { + "subscription": { + "id": "16BPgETyVrQVHGh1", + "billing_period": 1, + "billing_period_unit": "month", + "customer_id": "sarah", + "status": "active", + "current_term_start": 1702578600, + "current_term_end": 1705256999, + "next_billing_at": 1705257000, + "created_at": 1702645601, + "started_at": 1702578600, + "activated_at": 1702578600, + "created_from_ip": "10.0.0.1", + "updated_at": 1702645601, + "has_scheduled_changes": false, + "channel": "web", + "resource_version": 1702645601793, + "deleted": false, + "object": "subscription", + "currency_code": "INR", + "subscription_items": [ + { + "item_price_id": "cross-train-advanced-INR-1_MONTH", + "item_type": "plan", + "quantity": 1, + "quantity_in_decimal": "1.0000", + "unit_price": 11667, + "unit_price_in_decimal": "116.66667", + "amount": 11667, + "amount_in_decimal": "116.66667", + "free_quantity": 0, + "free_quantity_in_decimal": "0.0000", + "object": "subscription_item" + } + ], + "due_invoices_count": 0, + "mrr": 0, + "has_scheduled_advance_invoices": false, + "override_relationship": false, + "create_pending_invoices": false, + "auto_close_invoices": true, + "business_entity_id": "16CQtCTrgrYwi9n2E" + }, + "customer": { + "id": "sarah", + "auto_collection": "on", + "net_term_days": 0, + "allow_direct_debit": false, + "created_at": 1700038561, + "created_from_ip": "10.0.0.2", + "taxability": "taxable", + "updated_at": 1702645580, + "pii_cleared": "active", + "channel": "web", + "resource_version": 1702645580741, + "deleted": false, + "object": "customer", + "card_status": "valid", + "promotional_credits": 0, + "refundable_credits": 0, + "excess_payments": 0, + "unbilled_charges": 0, + "preferred_currency_code": "INR", + "mrr": 0, + "primary_payment_source_id": "pm_169vujTyVrL5fFDl", + "payment_method": { + "object": "payment_method", + "type": "card", + "reference_id": "tok_169vujTyVrL5LFDk", + "gateway": "chargebee", + "gateway_account_id": "gw_1mk51R4QrLmQtYMht", + "status": "valid" + }, + "business_entity_id": "16CQtCTrgrYwi9n2E", + "tax_providers_fields": [], + "auto_close_invoices": true + }, + "card": { + "status": "valid", + "gateway": "chargebee", + "gateway_account_id": "gw_1mk51R4QrLmQtYMht", + "iin": "411111", + "last4": "1111", + "card_type": "visa", + "funding_type": "credit", + "expiry_month": 12, + "expiry_year": 2024, + "created_at": 1702645580, + "updated_at": 1702645580, + "ip_address": "10.0.0.1", + "resource_version": 1702645580740, + "object": "card", + "masked_number": "************1111", + "customer_id": "boom", + "payment_source_id": "pm_169vujTyVrL5fFDl" + }, + "invoice": { + "id": "203", + "customer_id": "boom", + "subscription_id": "16BPgETyVrQVHGh1", + "recurring": true, + "status": "paid", + "price_type": "tax_exclusive", + "date": 1702578600, + "due_date": 1702578600, + "net_term_days": 0, + "exchange_rate": 83.283543, + "total": 11667, + "amount_paid": 11667, + "amount_adjusted": 0, + "write_off_amount": 0, + "credits_applied": 0, + "amount_due": 0, + "paid_at": 1702645601, + "updated_at": 1702645601, + "resource_version": 1702645601783, + "deleted": false, + "object": "invoice", + "first_invoice": true, + "amount_to_collect": 0, + "round_off_amount": 0, + "new_sales_amount": 11667, + "has_advance_charges": false, + "currency_code": "INR", + "base_currency_code": "USD", + "generated_at": 1702578600, + "is_gifted": false, + "term_finalized": true, + "channel": "web", + "tax": 0, + "line_items": [ + { + "id": "li_16BPgETyVrQWBGh3", + "date_from": 1702578600, + "date_to": 1705256999, + "unit_amount": 11667, + "quantity": 1, + "amount": 11667, + "pricing_model": "per_unit", + "is_taxed": false, + "tax_amount": 0, + "unit_amount_in_decimal": "116.66667", + "quantity_in_decimal": "1.0000", + "amount_in_decimal": "116.66667", + "object": "line_item", + "subscription_id": "16BPgETyVrQVHGh1", + "customer_id": "boom", + "description": "cross-train-advanced-INR-1_MONTH", + "entity_type": "plan_item_price", + "entity_id": "cross-train-advanced-INR-1_MONTH", + "metered": false, + "tax_exempt_reason": "export", + "discount_amount": 0, + "item_level_discount_amount": 0 + } + ], + "sub_total": 11667, + "linked_payments": [ + { + "txn_id": "txn_16BPgETyVrQXVGh4", + "applied_amount": 11667, + "applied_at": 1702645601, + "txn_status": "success", + "txn_date": 1702645601, + "txn_amount": 11667 + } + ], + "applied_credits": [], + "adjustment_credit_notes": [], + "issued_credit_notes": [], + "linked_orders": [], + "dunning_attempts": [], + "notes": [ + { + "note": "You can pay card." + } + ], + "business_entity_id": "16CQtCTrgrYwi9n2E" + } + }, + "event_type": "subscription_created", + "webhook_status": "not_configured", + "webhooks": [ + { + "id": "whv2_Azz5aITsMVdKtVWV", + "webhook_status": "not_applicable", + "object": "webhook" + } + ] + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json new file mode 100644 index 0000000000000..7846ef2df8d5e --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json @@ -0,0 +1,18 @@ +{ + "list": [ + { + "hosted_page": { + "created_at": 1517678804, + "embed": false, + "expires_at": 1517682404, + "id": "__test__yRVH4Pr8siRXJEPsjeJXlcd8Aq1fDqVzd", + "object": "hosted_page", + "resource_version": 1517678804000, + "state": "created", + "type": "claim_gift", + "updated_at": 1517678804, + "url": "https://yourapp.chargebee.com/pages/v3/__test__yRVH4Pr8siRXJEPsjeJXlcd8Aq1fDqVzd/claim_gift" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json new file mode 100644 index 0000000000000..9a3e8adcf359a --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json @@ -0,0 +1,37 @@ +{ + "list": [ + { + "plan": { + "addon_applicability": "all", + "charge_model": "tiered", + "currency_code": "USD", + "enabled_in_hosted_pages": true, + "enabled_in_portal": true, + "free_quantity": 0, + "giftable": false, + "id": "tiered_plan", + "invoice_name": "sample Tiered Plan", + "is_shippable": false, + "name": "Tiered Plan", + "object": "plan", + "period": 1, + "period_unit": "month", + "pricing_model": "tiered", + "resource_version": 1517505798000, + "show_description_in_invoices": false, + "show_description_in_quotes": false, + "status": "active", + "taxable": true, + "tiers": [ + { + "ending_unit": 10, + "object": "tier", + "price": 100, + "starting_unit": 1 + } + ], + "updated_at": 1517505798 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json new file mode 100644 index 0000000000000..1a0a3effd3fb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json @@ -0,0 +1,15 @@ +{ + "list": [ + { + "site_migration_detail": { + "entity_id": "__test__KyVnHhSBWT9AW8j", + "entity_id_at_other_site": "__test__KyVnHhSBWT9AW8j", + "entity_type": "customer", + "migrated_at": 1600704658, + "object": "site_migration_detail", + "other_site_name": "mannar", + "status": "moved_in" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json new file mode 100644 index 0000000000000..064a2121250d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json @@ -0,0 +1,63 @@ +{ + "list": [ + { + "customer": { + "allow_direct_debit": false, + "auto_collection": "off", + "card_status": "no_card", + "created_at": 1612890919, + "deleted": false, + "excess_payments": 0, + "first_name": "John", + "id": "__test__8asukSOXdvg4PD", + "last_name": "Doe", + "net_term_days": 0, + "object": "customer", + "pii_cleared": "active", + "preferred_currency_code": "USD", + "promotional_credits": 0, + "refundable_credits": 0, + "resource_version": 1612890919000, + "taxability": "taxable", + "unbilled_charges": 0, + "updated_at": 1612890919 + }, + "subscription": { + "activated_at": 1612890920, + "billing_period": 1, + "billing_period_unit": "month", + "created_at": 1612890920, + "currency_code": "USD", + "current_term_end": 1615310120, + "current_term_start": 1612890920, + "customer_id": "__test__8asukSOXdvg4PD", + "deleted": false, + "due_invoices_count": 1, + "due_since": 1612890920, + "has_scheduled_changes": false, + "id": "__test__8asukSOXdvliPG", + "mrr": 0, + "next_billing_at": 1615310120, + "object": "subscription", + "remaining_billing_cycles": 1, + "resource_version": 1612890920000, + "started_at": 1612890920, + "status": "active", + "subscription_items": [ + { + "amount": 1000, + "billing_cycles": 1, + "free_quantity": 0, + "item_price_id": "basic-USD", + "item_type": "plan", + "object": "subscription_item", + "quantity": 1, + "unit_price": 1000 + } + ], + "total_dues": 1100, + "updated_at": 1612890920 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json new file mode 100644 index 0000000000000..ac8667ffee032 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json @@ -0,0 +1,24 @@ +{ + "list": [ + { + "virtual_bank_account": { + "account_number": "test_5a576cb69dc2", + "bank_name": "TEST BANK", + "created_at": 1517501396, + "customer_id": "__test__KyVnHhSBWSvsr5M", + "deleted": false, + "email": "Duncan@ac.com", + "gateway": "stripe", + "gateway_account_id": "gw___test__KyVnGlSBWSv3GHt", + "id": "vba___test__KyVnHhSBWSw7J5O", + "object": "virtual_bank_account", + "reference_id": "cus_I57FLcFhampr4H/src_1HUx16Jv9j0DyntJh6X59egJ", + "resource_version": 1517501396000, + "routing_number": "110000000", + "scheme": "ach_credit", + "swift_code": "TSTEZ122", + "updated_at": 1705697624 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/test_component.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/test_component.py index 2d7752d6dabf5..431370e560cbf 100644 --- a/airbyte-integrations/connectors/source-chargebee/unit_tests/test_component.py +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/test_component.py @@ -1,10 +1,8 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - -from typing import Any, MutableMapping - import pytest +from airbyte_cdk.sources.declarative.types import Record, StreamSlice from source_chargebee.components import CustomFieldTransformation, IncrementalSingleSliceCursor @@ -17,8 +15,6 @@ { "pk": 1, "name": "example", - "cf_field1": "val1", - "cf_field2": "val2", "custom_fields": [{"name": "cf_field1", "value": "val1"}, {"name": "cf_field2", "value": "val2"}], }, ), @@ -30,16 +26,27 @@ def test_field_transformation(record, expected_record): transformed_record = transformer.transform(record) assert transformed_record == expected_record -def test_slicer(): +@pytest.mark.parametrize( + "record_data, expected", + [ + ({"pk": 1, "name": "example", "updated_at": 1662459011}, True), + ] +) +def test_slicer(record_data, expected): date_time_dict = {"updated_at": 1662459010} - slicer = IncrementalSingleSliceCursor(config={}, parameters={}, cursor_field="updated_at") - slicer.close_slice(date_time_dict, date_time_dict) - assert slicer.get_stream_state() == date_time_dict + new_state = {"updated_at": 1662459011} + slicer = IncrementalSingleSliceCursor(cursor_field="updated_at", config={}, parameters={}) + stream_slice = StreamSlice(partition={}, cursor_slice=date_time_dict) + record = Record(data=record_data, associated_slice=stream_slice) + slicer.observe(StreamSlice(partition={}, cursor_slice=date_time_dict), record) + slicer.close_slice(stream_slice) + assert slicer.get_stream_state() == new_state assert slicer.get_request_headers() == {} assert slicer.get_request_body_data() == {} assert slicer.get_request_params() == {} assert slicer.get_request_body_json() == {} + @pytest.mark.parametrize( "first_record, second_record, expected", [ @@ -84,4 +91,4 @@ def test_stream_slices(): slicer = IncrementalSingleSliceCursor(config={}, parameters={}, cursor_field="updated_at") stream_slices_instance = slicer.stream_slices() actual = next(stream_slices_instance) - assert actual == {} \ No newline at end of file + assert actual == {} diff --git a/airbyte-integrations/connectors/source-coda/Dockerfile b/airbyte-integrations/connectors/source-coda/Dockerfile deleted file mode 100644 index 681122ca5ce29..0000000000000 --- a/airbyte-integrations/connectors/source-coda/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_coda ./source_coda - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.2.0 -LABEL io.airbyte.name=airbyte/source-coda diff --git a/airbyte-integrations/connectors/source-coda/README.md b/airbyte-integrations/connectors/source-coda/README.md index a38c1310f4d20..848b2fc3d7696 100644 --- a/airbyte-integrations/connectors/source-coda/README.md +++ b/airbyte-integrations/connectors/source-coda/README.md @@ -1,69 +1,55 @@ -# Coda Source +# Coda source connector + This is the repository for the Coda source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/coda). +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/coda). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/coda) +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/coda) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_coda/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source coda test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-coda spec +poetry run source-coda check --config secrets/config.json +poetry run source-coda discover --config secrets/config.json +poetry run source-coda read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-coda build ``` -An image will be built with the tag `airbyte/source-coda:dev`. +An image will be available on your host with the tag `airbyte/source-coda:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-coda:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-coda:dev spec @@ -72,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-coda:dev discover --co docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-coda:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-coda test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-coda test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/coda.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/coda.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-coda/metadata.yaml b/airbyte-integrations/connectors/source-coda/metadata.yaml index d32bea1373c3c..6afb0f3555272 100644 --- a/airbyte-integrations/connectors/source-coda/metadata.yaml +++ b/airbyte-integrations/connectors/source-coda/metadata.yaml @@ -1,33 +1,35 @@ data: + ab_internal: + ql: 100 + sl: 100 allowedHosts: hosts: - https://coda.io/ - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-coda - registries: - oss: - enabled: true - cloud: - enabled: true + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 27f910fd-f832-4b2e-bcfd-6ab342e434d8 - dockerImageTag: 1.2.0 + dockerImageTag: 1.2.1 dockerRepository: airbyte/source-coda + documentationUrl: https://docs.airbyte.com/integrations/sources/coda githubIssueLabel: source-coda icon: coda.svg license: MIT name: Coda + registries: + cloud: + enabled: true + oss: + enabled: true releaseDate: 2023-08-19 releaseStage: alpha + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-coda supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/sources/coda tags: - language:python - cdk:low-code - ab_internal: - sl: 100 - ql: 100 metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-coda/poetry.lock b/airbyte-integrations/connectors/source-coda/poetry.lock new file mode 100644 index 0000000000000..9cd31bde97cd3 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/poetry.lock @@ -0,0 +1,1014 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.78.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "b45c7da2b07fd3a9a79c2ffac21f4db7af48b0884a6e1c9f41f17035161a5fab" diff --git a/airbyte-integrations/connectors/source-coda/pyproject.toml b/airbyte-integrations/connectors/source-coda/pyproject.toml new file mode 100644 index 0000000000000..3cd6a82e821f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.2.1" +name = "source-coda" +description = "Source implementation for Coda." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/coda" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_coda" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-coda = "source_coda.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.6.1" +pytest = "^6.2" diff --git a/airbyte-integrations/connectors/source-coda/setup.py b/airbyte-integrations/connectors/source-coda/setup.py deleted file mode 100644 index 92e0b6526fe7c..0000000000000 --- a/airbyte-integrations/connectors/source-coda/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "pytest~=6.2", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-coda=source_coda.run:run", - ], - }, - name="source_coda", - description="Source implementation for Coda.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-coda/source_coda/manifest.yaml b/airbyte-integrations/connectors/source-coda/source_coda/manifest.yaml index 2cae1c1e38f77..337e3b4e0fb47 100644 --- a/airbyte-integrations/connectors/source-coda/source_coda/manifest.yaml +++ b/airbyte-integrations/connectors/source-coda/source_coda/manifest.yaml @@ -53,7 +53,7 @@ definitions: type: "DefaultPaginator" pagination_strategy: type: "CursorPagination" - cursor_value: "{{ last_records['href'] }}" + cursor_value: "{{ last_record['href'] }}" page_token_option: type: "RequestPath" field_name: "from" diff --git a/airbyte-integrations/connectors/source-confluence/Dockerfile b/airbyte-integrations/connectors/source-confluence/Dockerfile deleted file mode 100644 index cf556f1a8c5de..0000000000000 --- a/airbyte-integrations/connectors/source-confluence/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_confluence ./source_confluence - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.2.0 -LABEL io.airbyte.name=airbyte/source-confluence diff --git a/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml b/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml index a43c6f34c3048..9b3f20d8885c4 100644 --- a/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml @@ -1,6 +1,7 @@ # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-confluence:dev +test_strictness_level: low acceptance_tests: spec: tests: @@ -18,9 +19,6 @@ acceptance_tests: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" - expect_records: - path: "integration_tests/expected_records.jsonl" - fail_on_extra_columns: false full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-confluence/metadata.yaml b/airbyte-integrations/connectors/source-confluence/metadata.yaml index cef482a1cbfc7..7225de02c39ae 100644 --- a/airbyte-integrations/connectors/source-confluence/metadata.yaml +++ b/airbyte-integrations/connectors/source-confluence/metadata.yaml @@ -16,8 +16,10 @@ data: enabled: true connectorSubtype: api connectorType: source + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 definitionId: cf40a7f8-71f8-45ce-a7fa-fca053e4028c - dockerImageTag: 0.2.0 + dockerImageTag: 0.2.1 dockerRepository: airbyte/source-confluence documentationUrl: https://docs.airbyte.com/integrations/sources/confluence githubIssueLabel: source-confluence diff --git a/airbyte-integrations/connectors/source-confluence/poetry.lock b/airbyte-integrations/connectors/source-confluence/poetry.lock new file mode 100644 index 0000000000000..018562e5170e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-confluence/poetry.lock @@ -0,0 +1,1051 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.73.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.9" +files = [ + {file = "airbyte-cdk-0.73.0.tar.gz", hash = "sha256:a03e0265a8a4afb1378d285993624659d9f481404aaf69cf7c0a5ddad3568ea2"}, + {file = "airbyte_cdk-0.73.0-py3-none-any.whl", hash = "sha256:339e42a7602461073a69bf0c4e11be26a7eea3157def43ffecdf9d0d73f32c6f"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "responses" +version = "0.13.4" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "responses-0.13.4-py2.py3-none-any.whl", hash = "sha256:d8d0f655710c46fd3513b9202a7f0dcedd02ca0f8cf4976f27fa8ab5b81e656d"}, + {file = "responses-0.13.4.tar.gz", hash = "sha256:9476775d856d3c24ae660bbebe29fb6d789d4ad16acd723efbfb6ee20990b899"}, +] + +[package.dependencies] +requests = ">=2.0" +six = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "148e0faeecb4dd4fa984afe32de3eed1921426977c15e7bf2de3c18f75ad700a" diff --git a/airbyte-integrations/connectors/source-confluence/pyproject.toml b/airbyte-integrations/connectors/source-confluence/pyproject.toml new file mode 100644 index 0000000000000..ddeea4160686f --- /dev/null +++ b/airbyte-integrations/connectors/source-confluence/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.2.1" +name = "source-confluence" +description = "Source implementation for Confluence." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/confluence" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_confluence" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.73.0" + + +[tool.poetry.scripts] +source-confluence = "source_confluence.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.2" +responses = "^0.13.3" diff --git a/airbyte-integrations/connectors/source-confluence/setup.py b/airbyte-integrations/connectors/source-confluence/setup.py deleted file mode 100644 index 993c131b35f92..0000000000000 --- a/airbyte-integrations/connectors/source-confluence/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", - "responses~=0.13.3", -] - -setup( - entry_points={ - "console_scripts": [ - "source-confluence=source_confluence.run:run", - ], - }, - name="source_confluence", - description="Source implementation for Confluence.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-confluence/source_confluence/manifest.yaml b/airbyte-integrations/connectors/source-confluence/source_confluence/manifest.yaml index 1af4e0e43f9be..4bf251c57efef 100644 --- a/airbyte-integrations/connectors/source-confluence/source_confluence/manifest.yaml +++ b/airbyte-integrations/connectors/source-confluence/source_confluence/manifest.yaml @@ -75,8 +75,7 @@ definitions: requester: $ref: "#/definitions/requester" request_parameters: - expand: >- - ["history","history.lastUpdated","history.previousVersion","history.contributors","restrictions.read.restrictions.user","version","descendants.comment","body","body.storage","body.view",] + expand: "history,history.lastUpdated,history.previousVersion,history.contributors,restrictions.read.restrictions.user,version,descendants.comment,body,body.storage,body.view" primary_key: "id" $parameters: name: "blog_posts" @@ -101,8 +100,7 @@ definitions: requester: $ref: "#/definitions/requester" request_parameters: - expand: >- - ["history","history.lastUpdated","history.previousVersion","history.contributors","restrictions.read.restrictions.user","version","descendants.comment","body","body.storage","body.view",] + expand: "history,history.lastUpdated,history.previousVersion,history.contributors,restrictions.read.restrictions.user,version,descendants.comment,body,body.storage,body.view" primary_key: "id" $parameters: name: "pages" @@ -115,7 +113,7 @@ definitions: requester: $ref: "#/definitions/requester" request_parameters: - expand: '["permissions","icon","description.plain","description.view"]' + expand: "permissions,icon,description.plain,description.view" primary_key: "id" $parameters: name: "space" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/README.md b/airbyte-integrations/connectors/source-declarative-manifest/README.md new file mode 100644 index 0000000000000..f7cfc6b502b1a --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/README.md @@ -0,0 +1,84 @@ +# Declarative-Manifest source connector + +This is the repository for the Declarative-Manifest source connector, written in Python. +The declarative manifest source connector is a special connector that can create an arbitrary source +connector from a declarative manifest file. This allows users to create a source connector without writing any code. + +**Note**: This connector is managed by the Airbyte Python CDK release process. It can be run as a standalone connector +in Docker and PyAirbyte, but is not yet meant to be run in the platform as a standalone connector. This source is +an interface to the low-code CDK and as such, should not be modified without a corresponding CDK change. + +## Local development + +### Prerequisites +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) + + +### Installing the connector +From this connector directory, run: +```bash +poetry install +``` + + +### Create credentials +The credentials for source-declarative-manifest are a little different. Your `config` will need to contain the +injected declarative manifest, as indicated in the `spec`. It will also need to contain the fields that the spec +coming out of the manifest requires. An example is available in `integration_tests/pokeapi_config.json`. To use +this example in the following instructions, copy this file to `secrets/config.json`. + + +### Locally running the connector +``` +poetry run source-declarative-manifest spec +poetry run source-declarative-manifest check --config secrets/config.json +poetry run source-declarative-manifest discover --config secrets/config.json +poetry run source-declarative-manifest read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` + +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-declarative-manifest build +``` + +An image will be available on your host with the tag `airbyte/source-declarative-manifest:dev`. + + +### Running as a docker container +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-declarative-manifest:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-declarative-manifest:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-declarative-manifest:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-declarative-manifest:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-declarative-manifest test +``` +This source does not currently pass the full test suite. + + +### Dependency Management +The manifest declarative source is built to be an interface to the low-code CDK source. This means that +this source should not have any production dependencies other than the Airbyte Python CDK. If for some reason +you feel that a new dependency is needed, you likely want to add it to the CDK instead. It is expected +that a given version of the source-declarative-manifest connector corresponds to the same version in +its CDK dependency. + + +## Publishing a new version of the connector +New versions of this connector should only be published (automatically) via the manual Airbyte CDK release process. +If you want to make a change to this connector that is not a result of a CDK change and a corresponding +CDK dependency bump, please reach out to the Connector Extensibility team for guidance. \ No newline at end of file diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-test/source_test/__init__.py b/airbyte-integrations/connectors/source-declarative-manifest/__init__.py similarity index 100% rename from airbyte-lib/tests/integration_tests/fixtures/source-test/source_test/__init__.py rename to airbyte-integrations/connectors/source-declarative-manifest/__init__.py diff --git a/airbyte-integrations/connectors/source-declarative-manifest/integration_tests/pokeapi_config.json b/airbyte-integrations/connectors/source-declarative-manifest/integration_tests/pokeapi_config.json new file mode 100644 index 0000000000000..54133f883b273 --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/integration_tests/pokeapi_config.json @@ -0,0 +1,973 @@ +{ + "__injected_declarative_manifest": { + "version": "0.29.0", + "definitions": { + "selector": { + "type": "RecordSelector", + "extractor": { + "type": "DpathExtractor", + "field_path": [] + } + }, + "requester": { + "type": "HttpRequester", + "url_base": "https://pokeapi.co/api/v2/pokemon", + "http_method": "GET", + "authenticator": { + "type": "NoAuth" + } + }, + "retriever": { + "type": "SimpleRetriever", + "record_selector": { + "$ref": "#/definitions/selector" + }, + "paginator": { + "type": "NoPagination" + }, + "requester": { + "$ref": "#/definitions/requester" + } + }, + "base_stream": { + "type": "DeclarativeStream", + "retriever": { + "$ref": "#/definitions/retriever" + } + }, + "pokemon_stream": { + "$ref": "#/definitions/base_stream", + "name": "pokemon", + "primary_key": "id", + "$parameters": { + "path": "/{{config['pokemon_name']}}" + } + } + }, + "streams": ["#/definitions/pokemon_stream"], + "check": { + "type": "CheckStream", + "stream_names": ["pokemon"] + }, + "spec": { + "type": "Spec", + "documentationUrl": "https://docs.airbyte.com/integrations/sources/pokeapi", + "connection_specification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Pokeapi Spec", + "type": "object", + "required": ["pokemon_name"], + "properties": { + "pokemon_name": { + "type": "string", + "title": "Pokemon Name", + "description": "Pokemon requested from the API.", + "pattern": "^[a-z0-9_\\-]+$", + "enum": [ + "bulbasaur", + "ivysaur", + "venusaur", + "charmander", + "charmeleon", + "charizard", + "squirtle", + "wartortle", + "blastoise", + "caterpie", + "metapod", + "butterfree", + "weedle", + "kakuna", + "beedrill", + "pidgey", + "pidgeotto", + "pidgeot", + "rattata", + "raticate", + "spearow", + "fearow", + "ekans", + "arbok", + "pikachu", + "raichu", + "sandshrew", + "sandslash", + "nidoranf", + "nidorina", + "nidoqueen", + "nidoranm", + "nidorino", + "nidoking", + "clefairy", + "clefable", + "vulpix", + "ninetales", + "jigglypuff", + "wigglytuff", + "zubat", + "golbat", + "oddish", + "gloom", + "vileplume", + "paras", + "parasect", + "venonat", + "venomoth", + "diglett", + "dugtrio", + "meowth", + "persian", + "psyduck", + "golduck", + "mankey", + "primeape", + "growlithe", + "arcanine", + "poliwag", + "poliwhirl", + "poliwrath", + "abra", + "kadabra", + "alakazam", + "machop", + "machoke", + "machamp", + "bellsprout", + "weepinbell", + "victreebel", + "tentacool", + "tentacruel", + "geodude", + "graveler", + "golem", + "ponyta", + "rapidash", + "slowpoke", + "slowbro", + "magnemite", + "magneton", + "farfetchd", + "doduo", + "dodrio", + "seel", + "dewgong", + "grimer", + "muk", + "shellder", + "cloyster", + "gastly", + "haunter", + "gengar", + "onix", + "drowzee", + "hypno", + "krabby", + "kingler", + "voltorb", + "electrode", + "exeggcute", + "exeggutor", + "cubone", + "marowak", + "hitmonlee", + "hitmonchan", + "lickitung", + "koffing", + "weezing", + "rhyhorn", + "rhydon", + "chansey", + "tangela", + "kangaskhan", + "horsea", + "seadra", + "goldeen", + "seaking", + "staryu", + "starmie", + "mrmime", + "scyther", + "jynx", + "electabuzz", + "magmar", + "pinsir", + "tauros", + "magikarp", + "gyarados", + "lapras", + "ditto", + "eevee", + "vaporeon", + "jolteon", + "flareon", + "porygon", + "omanyte", + "omastar", + "kabuto", + "kabutops", + "aerodactyl", + "snorlax", + "articuno", + "zapdos", + "moltres", + "dratini", + "dragonair", + "dragonite", + "mewtwo", + "mew", + "chikorita", + "bayleef", + "meganium", + "cyndaquil", + "quilava", + "typhlosion", + "totodile", + "croconaw", + "feraligatr", + "sentret", + "furret", + "hoothoot", + "noctowl", + "ledyba", + "ledian", + "spinarak", + "ariados", + "crobat", + "chinchou", + "lanturn", + "pichu", + "cleffa", + "igglybuff", + "togepi", + "togetic", + "natu", + "xatu", + "mareep", + "flaaffy", + "ampharos", + "bellossom", + "marill", + "azumarill", + "sudowoodo", + "politoed", + "hoppip", + "skiploom", + "jumpluff", + "aipom", + "sunkern", + "sunflora", + "yanma", + "wooper", + "quagsire", + "espeon", + "umbreon", + "murkrow", + "slowking", + "misdreavus", + "unown", + "wobbuffet", + "girafarig", + "pineco", + "forretress", + "dunsparce", + "gligar", + "steelix", + "snubbull", + "granbull", + "qwilfish", + "scizor", + "shuckle", + "heracross", + "sneasel", + "teddiursa", + "ursaring", + "slugma", + "magcargo", + "swinub", + "piloswine", + "corsola", + "remoraid", + "octillery", + "delibird", + "mantine", + "skarmory", + "houndour", + "houndoom", + "kingdra", + "phanpy", + "donphan", + "porygon2", + "stantler", + "smeargle", + "tyrogue", + "hitmontop", + "smoochum", + "elekid", + "magby", + "miltank", + "blissey", + "raikou", + "entei", + "suicune", + "larvitar", + "pupitar", + "tyranitar", + "lugia", + "ho-oh", + "celebi", + "treecko", + "grovyle", + "sceptile", + "torchic", + "combusken", + "blaziken", + "mudkip", + "marshtomp", + "swampert", + "poochyena", + "mightyena", + "zigzagoon", + "linoone", + "wurmple", + "silcoon", + "beautifly", + "cascoon", + "dustox", + "lotad", + "lombre", + "ludicolo", + "seedot", + "nuzleaf", + "shiftry", + "taillow", + "swellow", + "wingull", + "pelipper", + "ralts", + "kirlia", + "gardevoir", + "surskit", + "masquerain", + "shroomish", + "breloom", + "slakoth", + "vigoroth", + "slaking", + "nincada", + "ninjask", + "shedinja", + "whismur", + "loudred", + "exploud", + "makuhita", + "hariyama", + "azurill", + "nosepass", + "skitty", + "delcatty", + "sableye", + "mawile", + "aron", + "lairon", + "aggron", + "meditite", + "medicham", + "electrike", + "manectric", + "plusle", + "minun", + "volbeat", + "illumise", + "roselia", + "gulpin", + "swalot", + "carvanha", + "sharpedo", + "wailmer", + "wailord", + "numel", + "camerupt", + "torkoal", + "spoink", + "grumpig", + "spinda", + "trapinch", + "vibrava", + "flygon", + "cacnea", + "cacturne", + "swablu", + "altaria", + "zangoose", + "seviper", + "lunatone", + "solrock", + "barboach", + "whiscash", + "corphish", + "crawdaunt", + "baltoy", + "claydol", + "lileep", + "cradily", + "anorith", + "armaldo", + "feebas", + "milotic", + "castform", + "kecleon", + "shuppet", + "banette", + "duskull", + "dusclops", + "tropius", + "chimecho", + "absol", + "wynaut", + "snorunt", + "glalie", + "spheal", + "sealeo", + "walrein", + "clamperl", + "huntail", + "gorebyss", + "relicanth", + "luvdisc", + "bagon", + "shelgon", + "salamence", + "beldum", + "metang", + "metagross", + "regirock", + "regice", + "registeel", + "latias", + "latios", + "kyogre", + "groudon", + "rayquaza", + "jirachi", + "deoxys", + "turtwig", + "grotle", + "torterra", + "chimchar", + "monferno", + "infernape", + "piplup", + "prinplup", + "empoleon", + "starly", + "staravia", + "staraptor", + "bidoof", + "bibarel", + "kricketot", + "kricketune", + "shinx", + "luxio", + "luxray", + "budew", + "roserade", + "cranidos", + "rampardos", + "shieldon", + "bastiodon", + "burmy", + "wormadam", + "mothim", + "combee", + "vespiquen", + "pachirisu", + "buizel", + "floatzel", + "cherubi", + "cherrim", + "shellos", + "gastrodon", + "ambipom", + "drifloon", + "drifblim", + "buneary", + "lopunny", + "mismagius", + "honchkrow", + "glameow", + "purugly", + "chingling", + "stunky", + "skuntank", + "bronzor", + "bronzong", + "bonsly", + "mimejr", + "happiny", + "chatot", + "spiritomb", + "gible", + "gabite", + "garchomp", + "munchlax", + "riolu", + "lucario", + "hippopotas", + "hippowdon", + "skorupi", + "drapion", + "croagunk", + "toxicroak", + "carnivine", + "finneon", + "lumineon", + "mantyke", + "snover", + "abomasnow", + "weavile", + "magnezone", + "lickilicky", + "rhyperior", + "tangrowth", + "electivire", + "magmortar", + "togekiss", + "yanmega", + "leafeon", + "glaceon", + "gliscor", + "mamoswine", + "porygon-z", + "gallade", + "probopass", + "dusknoir", + "froslass", + "rotom", + "uxie", + "mesprit", + "azelf", + "dialga", + "palkia", + "heatran", + "regigigas", + "giratina", + "cresselia", + "phione", + "manaphy", + "darkrai", + "shaymin", + "arceus", + "victini", + "snivy", + "servine", + "serperior", + "tepig", + "pignite", + "emboar", + "oshawott", + "dewott", + "samurott", + "patrat", + "watchog", + "lillipup", + "herdier", + "stoutland", + "purrloin", + "liepard", + "pansage", + "simisage", + "pansear", + "simisear", + "panpour", + "simipour", + "munna", + "musharna", + "pidove", + "tranquill", + "unfezant", + "blitzle", + "zebstrika", + "roggenrola", + "boldore", + "gigalith", + "woobat", + "swoobat", + "drilbur", + "excadrill", + "audino", + "timburr", + "gurdurr", + "conkeldurr", + "tympole", + "palpitoad", + "seismitoad", + "throh", + "sawk", + "sewaddle", + "swadloon", + "leavanny", + "venipede", + "whirlipede", + "scolipede", + "cottonee", + "whimsicott", + "petilil", + "lilligant", + "basculin", + "sandile", + "krokorok", + "krookodile", + "darumaka", + "darmanitan", + "maractus", + "dwebble", + "crustle", + "scraggy", + "scrafty", + "sigilyph", + "yamask", + "cofagrigus", + "tirtouga", + "carracosta", + "archen", + "archeops", + "trubbish", + "garbodor", + "zorua", + "zoroark", + "minccino", + "cinccino", + "gothita", + "gothorita", + "gothitelle", + "solosis", + "duosion", + "reuniclus", + "ducklett", + "swanna", + "vanillite", + "vanillish", + "vanilluxe", + "deerling", + "sawsbuck", + "emolga", + "karrablast", + "escavalier", + "foongus", + "amoonguss", + "frillish", + "jellicent", + "alomomola", + "joltik", + "galvantula", + "ferroseed", + "ferrothorn", + "klink", + "klang", + "klinklang", + "tynamo", + "eelektrik", + "eelektross", + "elgyem", + "beheeyem", + "litwick", + "lampent", + "chandelure", + "axew", + "fraxure", + "haxorus", + "cubchoo", + "beartic", + "cryogonal", + "shelmet", + "accelgor", + "stunfisk", + "mienfoo", + "mienshao", + "druddigon", + "golett", + "golurk", + "pawniard", + "bisharp", + "bouffalant", + "rufflet", + "braviary", + "vullaby", + "mandibuzz", + "heatmor", + "durant", + "deino", + "zweilous", + "hydreigon", + "larvesta", + "volcarona", + "cobalion", + "terrakion", + "virizion", + "tornadus", + "thundurus", + "reshiram", + "zekrom", + "landorus", + "kyurem", + "keldeo", + "meloetta", + "genesect", + "chespin", + "quilladin", + "chesnaught", + "fennekin", + "braixen", + "delphox", + "froakie", + "frogadier", + "greninja", + "bunnelby", + "diggersby", + "fletchling", + "fletchinder", + "talonflame", + "scatterbug", + "spewpa", + "vivillon", + "litleo", + "pyroar", + "flabebe", + "floette", + "florges", + "skiddo", + "gogoat", + "pancham", + "pangoro", + "furfrou", + "espurr", + "meowstic", + "honedge", + "doublade", + "aegislash", + "spritzee", + "aromatisse", + "swirlix", + "slurpuff", + "inkay", + "malamar", + "binacle", + "barbaracle", + "skrelp", + "dragalge", + "clauncher", + "clawitzer", + "helioptile", + "heliolisk", + "tyrunt", + "tyrantrum", + "amaura", + "aurorus", + "sylveon", + "hawlucha", + "dedenne", + "carbink", + "goomy", + "sliggoo", + "goodra", + "klefki", + "phantump", + "trevenant", + "pumpkaboo", + "gourgeist", + "bergmite", + "avalugg", + "noibat", + "noivern", + "xerneas", + "yveltal", + "zygarde", + "diancie", + "hoopa", + "volcanion", + "rowlet", + "dartrix", + "decidueye", + "litten", + "torracat", + "incineroar", + "popplio", + "brionne", + "primarina", + "pikipek", + "trumbeak", + "toucannon", + "yungoos", + "gumshoos", + "grubbin", + "charjabug", + "vikavolt", + "crabrawler", + "crabominable", + "oricorio", + "cutiefly", + "ribombee", + "rockruff", + "lycanroc", + "wishiwashi", + "mareanie", + "toxapex", + "mudbray", + "mudsdale", + "dewpider", + "araquanid", + "fomantis", + "lurantis", + "morelull", + "shiinotic", + "salandit", + "salazzle", + "stufful", + "bewear", + "bounsweet", + "steenee", + "tsareena", + "comfey", + "oranguru", + "passimian", + "wimpod", + "golisopod", + "sandygast", + "palossand", + "pyukumuku", + "typenull", + "silvally", + "minior", + "komala", + "turtonator", + "togedemaru", + "mimikyu", + "bruxish", + "drampa", + "dhelmise", + "jangmo-o", + "hakamo-o", + "kommo-o", + "tapukoko", + "tapulele", + "tapubulu", + "tapufini", + "cosmog", + "cosmoem", + "solgaleo", + "lunala", + "nihilego", + "buzzwole", + "pheromosa", + "xurkitree", + "celesteela", + "kartana", + "guzzlord", + "necrozma", + "magearna", + "marshadow", + "poipole", + "naganadel", + "stakataka", + "blacephalon", + "zeraora", + "meltan", + "melmetal", + "grookey", + "thwackey", + "rillaboom", + "scorbunny", + "raboot", + "cinderace", + "sobble", + "drizzile", + "inteleon", + "skwovet", + "greedent", + "rookidee", + "corvisquire", + "corviknight", + "blipbug", + "dottler", + "orbeetle", + "nickit", + "thievul", + "gossifleur", + "eldegoss", + "wooloo", + "dubwool", + "chewtle", + "drednaw", + "yamper", + "boltund", + "rolycoly", + "carkol", + "coalossal", + "applin", + "flapple", + "appletun", + "silicobra", + "sandaconda", + "cramorant", + "arrokuda", + "barraskewda", + "toxel", + "toxtricity", + "sizzlipede", + "centiskorch", + "clobbopus", + "grapploct", + "sinistea", + "polteageist", + "hatenna", + "hattrem", + "hatterene", + "impidimp", + "morgrem", + "grimmsnarl", + "obstagoon", + "perrserker", + "cursola", + "sirfetchd", + "mrrime", + "runerigus", + "milcery", + "alcremie", + "falinks", + "pincurchin", + "snom", + "frosmoth", + "stonjourner", + "eiscue", + "indeedee", + "morpeko", + "cufant", + "copperajah", + "dracozolt", + "arctozolt", + "dracovish", + "arctovish", + "duraludon", + "dreepy", + "drakloak", + "dragapult", + "zacian", + "zamazenta", + "eternatus", + "kubfu", + "urshifu", + "zarude", + "regieleki", + "regidrago", + "glastrier", + "spectrier", + "calyrex" + ], + "examples": ["ditto", "luxray", "snorlax"] + } + } + } + } + }, + "pokemon_name": "ditto" +} diff --git a/airbyte-integrations/connectors/source-declarative-manifest/main.py b/airbyte-integrations/connectors/source-declarative-manifest/main.py new file mode 100644 index 0000000000000..fb1e853213d7a --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/main.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from source_declarative_manifest.run import run + +if __name__ == "__main__": + run() diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml new file mode 100644 index 0000000000000..cc49331729c07 --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -0,0 +1,33 @@ +data: + ab_internal: + ql: 100 + sl: 100 + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorSubtype: api + connectorType: source + definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 + # This version should not be updated manually - it is updated by the CDK release workflow. + dockerImageTag: 0.81.6 + dockerRepository: airbyte/source-declarative-manifest + # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. + documentationUrl: https://docs.airbyte.com/integrations/sources/low-code + githubIssueLabel: source-declarative-manifest + license: MIT + name: Low-Code Source + registries: + # The path for using this source in the Airbyte UI is still with the connector builder for now. + cloud: + enabled: false + oss: + enabled: false + releaseDate: 2023-03-01 + releaseStage: alpha + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-declarative-manifest + supportLevel: community + tags: + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock new file mode 100644 index 0000000000000..d177b29081aa9 --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -0,0 +1,1169 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.81.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.6-py3-none-any.whl", hash = "sha256:456a301edae4f8c99b77d89aaa2016827c43acfd7f9fb61e7a961760b954695b"}, + {file = "airbyte_cdk-0.81.6.tar.gz", hash = "sha256:3d4e2f3dc8177a109cb70e1d701583c870e2c31168dad7498cd2c84b0cc34637"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain_core = "0.1.42" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langsmith" +version = "0.1.47" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.47-py3-none-any.whl", hash = "sha256:17b0a908b8d39b6da3ecff658c8c00304b0b62f59945a5e16c2da5a254ea21a6"}, + {file = "langsmith-0.1.47.tar.gz", hash = "sha256:f5ddd17628baa03a775525c5547a543a559313e425cdb2bf23579ffcf6056a76"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "orjson" +version = "3.10.1" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "52cf6254ff232eb2037282705ebae9ba61e0f90b83bd6eba0ffabe679010a12b" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml new file mode 100644 index 0000000000000..47693e5f9ff92 --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -0,0 +1,28 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.81.6" +name = "source-declarative-manifest" +description = "Base source implementation for low-code sources." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/low-code" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_declarative_manifest" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "0.81.6" + +[tool.poetry.scripts] +source-declarative-manifest = "source_declarative_manifest.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.6.1" +pytest = "^6.2" +requests-mock = "^1.9.3" diff --git a/airbyte-lib/tests/lint_tests/__init__.py b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/__init__.py similarity index 100% rename from airbyte-lib/tests/lint_tests/__init__.py rename to airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/__init__.py diff --git a/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py new file mode 100644 index 0000000000000..b33bc51cda31e --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py @@ -0,0 +1,53 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import argparse +import json +import pkgutil +import sys +from typing import List + +from airbyte_cdk.connector import BaseConnector +from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch +from airbyte_cdk.models import AirbyteMessage, ConnectorSpecification, Type +from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource + + +def handle_command(args: List[str]) -> None: + """Overrides the spec command to return the generalized spec for the declarative manifest source. + + This is different from a typical low-code, but built and published separately source built as a ManifestDeclarativeSource, + because that will have a spec method that returns the spec for that specific source. Other than spec, + the generalized connector behaves the same as any other, since the manifest is provided in the config. + """ + if args[0] == "spec": + json_spec = pkgutil.get_data("source_declarative_manifest", "spec.json") + spec_obj = json.loads(json_spec) + spec = ConnectorSpecification.parse_obj(spec_obj) + + message = AirbyteMessage(type=Type.SPEC, spec=spec) + print(AirbyteEntrypoint.airbyte_message_to_string(message)) + else: + source = create_manifest(args) + launch(source, sys.argv[1:]) + + +def create_manifest(args: List[str]) -> ManifestDeclarativeSource: + """Creates the source with the injected config. + + This essentially does what other low-code sources do at build time, but at runtime, + with a user-provided manifest in the config. This better reflects what happens in the + connector builder. + """ + parsed_args = AirbyteEntrypoint.parse_args(args) + config = BaseConnector.read_config(parsed_args.config) + if "__injected_declarative_manifest" not in config: + raise ValueError( + f"Invalid config: `__injected_declarative_manifest` should be provided at the root of the config but config only has keys {list(config.keys())}" + ) + return ManifestDeclarativeSource(config.get("__injected_declarative_manifest")) + + +def run(): + args = sys.argv[1:] + handle_command(args) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/spec.json b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/spec.json new file mode 100644 index 0000000000000..73d6a81a5d6d9 --- /dev/null +++ b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/spec.json @@ -0,0 +1,17 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/low-code", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Low-code source spec", + "type": "object", + "required": ["__injected_declarative_manifest"], + "additionalProperties": true, + "properties": { + "__injected_declarative_manifest": { + "title": "Low-code manifest", + "type": "object", + "description": "The low-code manifest that defines the components of the source." + } + } + } +} diff --git a/airbyte-lib/tests/unit_tests/__init__.py b/airbyte-integrations/connectors/source-declarative-manifest/unit_tests/__init__.py similarity index 100% rename from airbyte-lib/tests/unit_tests/__init__.py rename to airbyte-integrations/connectors/source-declarative-manifest/unit_tests/__init__.py diff --git a/airbyte-cdk/python/unit_tests/test_source_declarative_manifest.py b/airbyte-integrations/connectors/source-declarative-manifest/unit_tests/test_source_declarative_manifest.py similarity index 92% rename from airbyte-cdk/python/unit_tests/test_source_declarative_manifest.py rename to airbyte-integrations/connectors/source-declarative-manifest/unit_tests/test_source_declarative_manifest.py index 7b1288c52a849..333eee4ed0cff 100644 --- a/airbyte-cdk/python/unit_tests/test_source_declarative_manifest.py +++ b/airbyte-integrations/connectors/source-declarative-manifest/unit_tests/test_source_declarative_manifest.py @@ -7,7 +7,7 @@ import pytest from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource -from source_declarative_manifest.main import create_manifest +from source_declarative_manifest.run import create_manifest, handle_command CONFIG = { "__injected_declarative_manifest": { @@ -59,9 +59,8 @@ def config_file_without_injection(tmp_path): return config_file -def test_on_spec_command_then_raise_value_error(): - with pytest.raises(ValueError): - create_manifest(["spec"]) +def test_spec_does_not_raise_value_error(): + handle_command(["spec"]) def test_given_no_injected_declarative_manifest_then_raise_value_error(config_file_without_injection): diff --git a/airbyte-integrations/connectors/source-facebook-marketing/README.md b/airbyte-integrations/connectors/source-facebook-marketing/README.md index be9a7cc5e992b..1d2f74775dfa8 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/README.md +++ b/airbyte-integrations/connectors/source-facebook-marketing/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-facebook-marketing spec poetry run source-facebook-marketing check --config secrets/config.json poetry run source-facebook-marketing discover --config secrets/config.json -poetry run source-facebook-marketing read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-facebook-marketing read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl index 4859b89843622..b960b32a3c993 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl @@ -1,29 +1,49 @@ -{"stream": "ad_account", "data": {"id": "act_212551616838260", "account_id": "212551616838260", "account_status": 1, "age": 1402.6937847222, "amount_spent": "39125", "balance": "0", "business": {"id": "1506473679510495", "name": "Airbyte"}, "business_city": "", "business_country_code": "US", "business_name": "", "business_street": "", "business_street2": "", "can_create_brand_lift_study": false, "capabilities": ["CAN_CREATE_CALL_ADS", "CAN_SEE_GROWTH_OPPORTUNITY_DATA", "ENABLE_IA_RECIRC_AD_DISPLAY_FORMAT", "CAN_USE_MOBILE_EXTERNAL_PAGE_TYPE", "CAN_USE_FB_FEED_POSITION_IN_VIDEO_VIEW_15S", "ENABLE_BIZ_DISCO_ADS", "ENABLE_BRAND_OBJECTIVES_FOR_BIZ_DISCO_ADS", "ENABLE_DIRECT_REACH_FOR_BIZ_DISCO_ADS", "ENABLE_DYNAMIC_ADS_ON_IG_STORIES_ADS", "ENABLE_IG_STORIES_ADS_PPE_OBJECTIVE", "ENABLE_IG_STORIES_ADS_MESSENGER_DESTINATION", "ENABLE_PAC_FOR_BIZ_DISCO_ADS", "CAN_USE_FB_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_IA_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_SUG_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_FEED_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_EXPLORE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_CLASSIC_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_REWARD_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_REACH_AND_FREQUENCY", "CAN_USE_RECURRING_BUDGET", "HAS_VALID_PAYMENT_METHODS", "CAN_USE_LINK_CLICK_BILLING_EVENT", "CAN_USE_CPA_BILLING_EVENT", "CAN_SEE_NEW_CONVERSION_WINDOW_NUX", "ADS_INSTREAM_INTERFACE_INTEGRITY", "ADS_INSTREAM_LINK_CLICK", "ADS_INSTREAM_LINK_CLICK_IMAGE", "ADS_IN_OBJECTIVES_DEPRECATION", "MESSENGER_INBOX_ADS_PRODUCT_CATALOG_SALES", "CAN_SHOW_MESSENGER_DUPLICSTION_UPSELL", "ALLOW_INSTREAM_ONLY_FOR_REACH", "ADS_INSTREAM_VIDEO_PLACEMENT_CONVERSIONS", "CAN_CREATE_INSTAGRAM_EXPLORE_ADS", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY", "ALLOW_INSTREAM_NON_INTERRUPTIVE_LEADGEN", "INSTREAM_VIDEO_AD_DESKTOP_CONVERSION_AD_PREVIEW", "ALLOW_INSTREAM_ONLY_FOR_BRAND_AWARENESS_AUCTION", "ALLOW_SUGGESTED_VIDEOS_PLACEMENT_ONLY", "WHATSAPP_DESTINATION_ADS", "CTM_ADS_CREATION_CLICK_TO_DIRECT", "CTW_ADS_ENABLE_IG_FEED_PLACEMENT", "CTW_ADS_FOR_NON_MESSAGES_OBJECTIVE", "CTW_ADS_TRUSTED_TIER_2_PLUS_ADVERTISER", "CTW_ADS_TRUSTED_TIER_ADVERTISER", "ADS_PLACEMENT_MARKETPLACE", "ADNW_DISABLE_INSTREAM_AND_WEB_PLACEMENT", "CAN_CHANGE_BILLING_THRESHOLD", "CAN_USE_APP_EVENT_AVERAGE_COST_BIDDING", "CAN_USE_LEAD_GEN_AVERAGE_COST_BIDDING", "ADS_VALUE_OPTIMIZATION_DYNAMIC_ADS_1D", "ADS_DELIVERY_INSIGHTS_IN_BIDDING_PRESET_EXPERIMENT", "ADS_DELIVERY_INSIGHTS_OPTIMIZATION_PRESET", "CAN_SEE_APP_AD_EVENTS", "CAN_SEE_NEW_STANDARD_EVENTS_BETA", "CAN_SEE_VCK_HOLIDAY_TEMPLATES", "ENABLE_DCO_FOR_FB_STORY_ADS", "CAN_USE_IG_EXPLORE_GRID_HOME_PLACEMENT", "CAN_USE_IG_EXPLORE_HOME_IN_REACH_AND_FREQUENCY", "CAN_USE_IG_EXPLORE_HOME_POST_ENGAGEMENT_MESSAGES", "CAN_USE_IG_SEARCH_PLACEMENT", "CAN_USE_IG_SEARCH_RESULTS_AUTO_PLACEMENT", "CAN_USE_IG_REELS_PAC_CAROUSEL", "CAN_USE_IG_REELS_POSITION", "CAN_SEE_CONVERSION_LIFT_SUMMARY", "CAN_USE_IG_PROFILE_FEED_POSITION", "CAN_USE_IG_REELS_REACH_AND_FREQUENCY", "CAN_USE_IG_REELS_OVERLAY_POSITION", "CAN_USE_IG_REELS_OVERLAY_PAC", "CAN_USE_IG_SHOP_TAB_PAC", "CAN_SEE_LEARNING_STAGE", "ENABLE_WEBSITE_CONVERSIONS_FOR_FB_STORY_ADS", "ENABLE_MESSENGER_INBOX_VIDEO_ADS", "ENABLE_VIDEO_VIEWS_FOR_FB_STORY_ADS", "ENABLE_LINK_CLICKS_FOR_FB_STORY_ADS", "ENABLE_REACH_FOR_FB_STORY_ADS", "CAN_USE_CALL_TO_ACTION_LINK_IMPORT_EXPORT", "ADS_INSTREAM_VIDEO_ENABLE_SLIDE_SHOW", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY_IN_VV_REACH_AND_FREQUENCY", "ENABLE_MOBILE_APP_INSTALLS_FOR_FB_STORY_ADS", "ENABLE_LEAD_GEN_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_REACH", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW", "CAN_USE_FB_MKT_PLACE_POSITION_IN_STORE_VISIT", "ENABLE_MOBILE_APP_ENGAGEMENT_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_BRAND_AWARENESS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_APP_INSTALLS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_LEAD_GENERATION", "CAN_USE_FB_MKT_PLACE_POSITION_IN_MESSAGE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_PAGE_LIKE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_POST_ENGAGEMENT", "RF_ALLOW_MARKETPLACE_ACCOUNT", "RF_ALLOW_SEARCH_ACCOUNT", "VERTICAL_VIDEO_PAC_INSTREAM_UPSELL", "IX_COLLECTION_ENABLED_FOR_BAO_AND_REACH", "ADS_BM_REQUIREMENTS_OCT_15_RELEASE", "ENABLE_POST_ENGAGEMENT_FOR_FB_STORY", "ENBABLE_CATALOG_SALES_FOR_FB_STORY", "CAN_USE_WHATSAPP_DESTINATION_ON_LINK_CLICKS_AND_CONVERSIONS", "CAN_USE_WHATSAPP_DESTINATION_ON_CONVERSIONS", "IS_NON_TAIL_AD_ACCOUNT", "IS_IN_IG_EXISTING_POST_CTA_DEFAULTING_EXPERIMENT", "IS_IN_SHORT_WA_LINK_CTWA_UNCONV_TRAFFIC_EXPERIMENT", "IS_IN_ODAX_EXPERIENCE", "IS_IN_REACH_BRAND_AWARENESS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_VIDEO_VIEWS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_WHATSAPP_DESTINATION_DEFAULTING_EXPERIMENT", "CAN_USE_MARKETPLACE_DESKTOP", "ADS_MERCHANT_OVERLAYS_DEPRECATION", "CONNECTIONS_DEPRECATION_V2", "CAN_USE_LIVE_VIDEO_FOR_THRUPLAY", "CAN_SEE_HEC_AM_FLOW", "CAN_SEE_POLITICAL_FLOW", "ADS_INSTREAM_PLACEMENT_CATALOG_SALES", "ENABLE_CONVERSIONS_FOR_FB_GROUP_TAB_ADS", "ENABLE_LINK_CLICK_FOR_FB_GROUP_TAB_ADS", "ENABLE_REACH_FOR_FB_GROUP_TAB_ADS", "CAN_USE_CONVERSATIONS_OPTIMIZATION", "ENABLE_THRUPLAY_OPTIMIZATION_MESSENGER_STORY_ADS", "CAN_USE_IG_STORY_POLLS_PAC_CREATION", "IOS14_CEO_CAMPAIGN_CREATION", "ENABLE_VIDEO_CHANNEL_PLACEMENT_FOR_RSVP_ADS", "DIGITAL_CIRCULAR_ADS", "CAN_SEE_SAFR_V3_FLOW", "CAN_USE_FB_REELS_POSITION", "CAN_USE_ADS_ON_FB_REELS_POSITION", "CAN_USE_FB_REELS_AUTO_PLACEMENT", "ENABLE_FB_REELS_CREATION_PAC_ADS", "ENABLE_FB_REELS_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_PAC_ADS", "RF_CPA_BILLING_DEPRECATION_PHASE_2", "ENABLE_APP_INSTALL_CUSTOM_PRODUCT_PAGES", "ENABLE_ADS_ON_FB_REELS_PLACEMENT_UNIFICATION", "ADS_RF_FB_REELS_PLACEMENT", "REELS_DM_ADS_ENABLE_REACH_AND_FREQUENCY", "ELIGIBLE_FOR_TEXT_GEN", "CAN_USE_BUDGET_SCHEDULING_API", "ADS_AEMV2_HAS_LAUNCHED"], "created_time": "2020-04-13T18:04:59-0700", "currency": "USD", "disable_reason": 0.0, "end_advertiser": 1506473679510495.0, "end_advertiser_name": "Airbyte", "fb_entity": 85.0, "funding_source": 2825262454257003.0, "funding_source_details": {"id": "2825262454257003", "type": 1}, "has_migrated_permissions": true, "is_attribution_spec_system_default": true, "is_direct_deals_enabled": false, "is_in_3ds_authorization_enabled_market": false, "is_notifications_enabled": true, "is_personal": 0.0, "is_prepay_account": false, "is_tax_id_required": false, "min_campaign_group_spend_cap": 10000.0, "min_daily_budget": 100.0, "name": "Airbyte", "offsite_pixels_tos_accepted": true, "owner": 1506473679510495.0, "rf_spec": {"min_reach_limits": {"US": 200000, "CA": 200000, "GB": 200000, "AR": 200000, "AU": 200000, "AT": 200000, "BE": 200000, "BR": 200000, "CL": 200000, "CN": 200000, "CO": 200000, "HR": 200000, "DK": 200000, "DO": 200000, "EG": 200000, "FI": 200000, "FR": 200000, "DE": 200000, "GR": 200000, "HK": 200000, "IN": 200000, "ID": 200000, "IE": 200000, "IL": 200000, "IT": 200000, "JP": 200000, "JO": 200000, "KW": 200000, "LB": 200000, "MY": 200000, "MX": 200000, "NL": 200000, "NZ": 200000, "NG": 200000, "NO": 200000, "PK": 200000, "PA": 200000, "PE": 200000, "PH": 200000, "PL": 200000, "RU": 200000, "SA": 200000, "RS": 200000, "SG": 200000, "ZA": 200000, "KR": 200000, "ES": 200000, "SE": 200000, "CH": 200000, "TW": 200000, "TH": 200000, "TR": 200000, "AE": 200000, "VE": 200000, "PT": 200000, "LU": 200000, "BG": 200000, "CZ": 200000, "SI": 200000, "IS": 200000, "SK": 200000, "LT": 200000, "TT": 200000, "BD": 200000, "LK": 200000, "KE": 200000, "HU": 200000, "MA": 200000, "CY": 200000, "JM": 200000, "EC": 200000, "RO": 200000, "BO": 200000, "GT": 200000, "CR": 200000, "QA": 200000, "SV": 200000, "HN": 200000, "NI": 200000, "PY": 200000, "UY": 200000, "PR": 200000, "BA": 200000, "PS": 200000, "TN": 200000, "BH": 200000, "VN": 200000, "GH": 200000, "MU": 200000, "UA": 200000, "MT": 200000, "BS": 200000, "MV": 200000, "OM": 200000, "MK": 200000, "LV": 200000, "EE": 200000, "IQ": 200000, "DZ": 200000, "AL": 200000, "NP": 200000, "MO": 200000, "ME": 200000, "SN": 200000, "GE": 200000, "BN": 200000, "UG": 200000, "GP": 200000, "BB": 200000, "AZ": 200000, "TZ": 200000, "LY": 200000, "MQ": 200000, "CM": 200000, "BW": 200000, "ET": 200000, "KZ": 200000, "NA": 200000, "MG": 200000, "NC": 200000, "MD": 200000, "FJ": 200000, "BY": 200000, "JE": 200000, "GU": 200000, "YE": 200000, "ZM": 200000, "IM": 200000, "HT": 200000, "KH": 200000, "AW": 200000, "PF": 200000, "AF": 200000, "BM": 200000, "GY": 200000, "AM": 200000, "MW": 200000, "AG": 200000, "RW": 200000, "GG": 200000, "GM": 200000, "FO": 200000, "LC": 200000, "KY": 200000, "BJ": 200000, "AD": 200000, "GD": 200000, "VI": 200000, "BZ": 200000, "VC": 200000, "MN": 200000, "MZ": 200000, "ML": 200000, "AO": 200000, "GF": 200000, "UZ": 200000, "DJ": 200000, "BF": 200000, "MC": 200000, "TG": 200000, "GL": 200000, "GA": 200000, "GI": 200000, "CD": 200000, "KG": 200000, "PG": 200000, "BT": 200000, "KN": 200000, "SZ": 200000, "LS": 200000, "LA": 200000, "LI": 200000, "MP": 200000, "SR": 200000, "SC": 200000, "VG": 200000, "TC": 200000, "DM": 200000, "MR": 200000, "AX": 200000, "SM": 200000, "SL": 200000, "NE": 200000, "CG": 200000, "AI": 200000, "YT": 200000, "CV": 200000, "GN": 200000, "TM": 200000, "BI": 200000, "TJ": 200000, "VU": 200000, "SB": 200000, "ER": 200000, "WS": 200000, "AS": 200000, "FK": 200000, "GQ": 200000, "TO": 200000, "KM": 200000, "PW": 200000, "FM": 200000, "CF": 200000, "SO": 200000, "MH": 200000, "VA": 200000, "TD": 200000, "KI": 200000, "ST": 200000, "TV": 200000, "NR": 200000, "RE": 200000, "LR": 200000, "ZW": 200000, "CI": 200000, "MM": 200000, "AN": 200000, "AQ": 200000, "BQ": 200000, "BV": 200000, "IO": 200000, "CX": 200000, "CC": 200000, "CK": 200000, "CW": 200000, "TF": 200000, "GW": 200000, "HM": 200000, "XK": 200000, "MS": 200000, "NU": 200000, "NF": 200000, "PN": 200000, "BL": 200000, "SH": 200000, "MF": 200000, "PM": 200000, "SX": 200000, "GS": 200000, "SS": 200000, "SJ": 200000, "TL": 200000, "TK": 200000, "UM": 200000, "WF": 200000, "EH": 200000}, "countries": ["US", "CA", "GB", "AR", "AU", "AT", "BE", "BR", "CL", "CN", "CO", "HR", "DK", "DO", "EG", "FI", "FR", "DE", "GR", "HK", "IN", "ID", "IE", "IL", "IT", "JP", "JO", "KW", "LB", "MY", "MX", "NL", "NZ", "NG", "NO", "PK", "PA", "PE", "PH", "PL", "RU", "SA", "RS", "SG", "ZA", "KR", "ES", "SE", "CH", "TW", "TH", "TR", "AE", "VE", "PT", "LU", "BG", "CZ", "SI", "IS", "SK", "LT", "TT", "BD", "LK", "KE", "HU", "MA", "CY", "JM", "EC", "RO", "BO", "GT", "CR", "QA", "SV", "HN", "NI", "PY", "UY", "PR", "BA", "PS", "TN", "BH", "VN", "GH", "MU", "UA", "MT", "BS", "MV", "OM", "MK", "EE", "LV", "IQ", "DZ", "AL", "NP", "MO", "ME", "SN", "GE", "BN", "UG", "GP", "BB", "ZW", "CI", "AZ", "TZ", "LY", "MQ", "MM", "CM", "BW", "ET", "KZ", "NA", "MG", "NC", "MD", "FJ", "BY", "JE", "GU", "YE", "ZM", "IM", "HT", "KH", "AW", "PF", "AF", "BM", "GY", "AM", "MW", "AG", "RW", "GG", "GM", "FO", "LC", "KY", "BJ", "AD", "GD", "VI", "BZ", "VC", "MN", "MZ", "ML", "AO", "GF", "UZ", "DJ", "BF", "MC", "TG", "GL", "GA", "GI", "CD", "KG", "PG", "BT", "KN", "SZ", "LS", "LA", "LI", "MP", "SR", "SC", "VG", "TC", "DM", "MR", "AX", "SM", "SL", "NE", "CG", "AI", "YT", "LR", "CV", "GN", "TM", "BI", "TJ", "VU", "SB", "ER", "WS", "AS", "FK", "GQ", "TO", "KM", "PW", "FM", "CF", "SO", "MH", "VA", "TD", "KI", "ST", "TV", "NR", "RE", "AN", "AQ", "BQ", "BV", "IO", "CX", "CC", "CK", "CW", "TF", "GW", "HM", "XK", "MS", "NU", "NF", "PN", "BL", "SH", "MF", "PM", "SX", "GS", "SS", "SJ", "TL", "TK", "UM", "WF", "EH"], "min_campaign_duration": {"US": 1, "CA": 1, "GB": 1, "AR": 1, "AU": 1, "AT": 1, "BE": 1, "BR": 1, "CL": 1, "CN": 1, "CO": 1, "HR": 1, "DK": 1, "DO": 1, "EG": 1, "FI": 1, "FR": 1, "DE": 1, "GR": 1, "HK": 1, "IN": 1, "ID": 1, "IE": 1, "IL": 1, "IT": 1, "JP": 1, "JO": 1, "KW": 1, "LB": 1, "MY": 1, "MX": 1, "NL": 1, "NZ": 1, "NG": 1, "NO": 1, "PK": 1, "PA": 1, "PE": 1, "PH": 1, "PL": 1, "RU": 1, "SA": 1, "RS": 1, "SG": 1, "ZA": 1, "KR": 1, "ES": 1, "SE": 1, "CH": 1, "TW": 1, "TH": 1, "TR": 1, "AE": 1, "VE": 1, "PT": 1, "LU": 1, "BG": 1, "CZ": 1, "SI": 1, "IS": 1, "SK": 1, "LT": 1, "TT": 1, "BD": 1, "LK": 1, "KE": 1, "HU": 1, "MA": 1, "CY": 1, "JM": 1, "EC": 1, "RO": 1, "BO": 1, "GT": 1, "CR": 1, "QA": 1, "SV": 1, "HN": 1, "NI": 1, "PY": 1, "UY": 1, "PR": 1, "BA": 1, "PS": 1, "TN": 1, "BH": 1, "VN": 1, "GH": 1, "MU": 1, "UA": 1, "MT": 1, "BS": 1, "MV": 1, "OM": 1, "MK": 1, "LV": 1, "EE": 1, "IQ": 1, "DZ": 1, "AL": 1, "NP": 1, "MO": 1, "ME": 1, "SN": 1, "GE": 1, "BN": 1, "UG": 1, "GP": 1, "BB": 1, "AZ": 1, "TZ": 1, "LY": 1, "MQ": 1, "CM": 1, "BW": 1, "ET": 1, "KZ": 1, "NA": 1, "MG": 1, "NC": 1, "MD": 1, "FJ": 1, "BY": 1, "JE": 1, "GU": 1, "YE": 1, "ZM": 1, "IM": 1, "HT": 1, "KH": 1, "AW": 1, "PF": 1, "AF": 1, "BM": 1, "GY": 1, "AM": 1, "MW": 1, "AG": 1, "RW": 1, "GG": 1, "GM": 1, "FO": 1, "LC": 1, "KY": 1, "BJ": 1, "AD": 1, "GD": 1, "VI": 1, "BZ": 1, "VC": 1, "MN": 1, "MZ": 1, "ML": 1, "AO": 1, "GF": 1, "UZ": 1, "DJ": 1, "BF": 1, "MC": 1, "TG": 1, "GL": 1, "GA": 1, "GI": 1, "CD": 1, "KG": 1, "PG": 1, "BT": 1, "KN": 1, "SZ": 1, "LS": 1, "LA": 1, "LI": 1, "MP": 1, "SR": 1, "SC": 1, "VG": 1, "TC": 1, "DM": 1, "MR": 1, "AX": 1, "SM": 1, "SL": 1, "NE": 1, "CG": 1, "AI": 1, "YT": 1, "CV": 1, "GN": 1, "TM": 1, "BI": 1, "TJ": 1, "VU": 1, "SB": 1, "ER": 1, "WS": 1, "AS": 1, "FK": 1, "GQ": 1, "TO": 1, "KM": 1, "PW": 1, "FM": 1, "CF": 1, "SO": 1, "MH": 1, "VA": 1, "TD": 1, "KI": 1, "ST": 1, "TV": 1, "NR": 1, "RE": 1, "LR": 1, "ZW": 1, "CI": 1, "MM": 1, "AN": 1, "AQ": 1, "BQ": 1, "BV": 1, "IO": 1, "CX": 1, "CC": 1, "CK": 1, "CW": 1, "TF": 1, "GW": 1, "HM": 1, "XK": 1, "MS": 1, "NU": 1, "NF": 1, "PN": 1, "BL": 1, "SH": 1, "MF": 1, "PM": 1, "SX": 1, "GS": 1, "SS": 1, "SJ": 1, "TL": 1, "TK": 1, "UM": 1, "WF": 1, "EH": 1}, "max_campaign_duration": {"US": 90, "CA": 90, "GB": 90, "AR": 90, "AU": 90, "AT": 90, "BE": 90, "BR": 90, "CL": 90, "CN": 90, "CO": 90, "HR": 90, "DK": 90, "DO": 90, "EG": 90, "FI": 90, "FR": 90, "DE": 90, "GR": 90, "HK": 90, "IN": 90, "ID": 90, "IE": 90, "IL": 90, "IT": 90, "JP": 90, "JO": 90, "KW": 90, "LB": 90, "MY": 90, "MX": 90, "NL": 90, "NZ": 90, "NG": 90, "NO": 90, "PK": 90, "PA": 90, "PE": 90, "PH": 90, "PL": 90, "RU": 90, "SA": 90, "RS": 90, "SG": 90, "ZA": 90, "KR": 90, "ES": 90, "SE": 90, "CH": 90, "TW": 90, "TH": 90, "TR": 90, "AE": 90, "VE": 90, "PT": 90, "LU": 90, "BG": 90, "CZ": 90, "SI": 90, "IS": 90, "SK": 90, "LT": 90, "TT": 90, "BD": 90, "LK": 90, "KE": 90, "HU": 90, "MA": 90, "CY": 90, "JM": 90, "EC": 90, "RO": 90, "BO": 90, "GT": 90, "CR": 90, "QA": 90, "SV": 90, "HN": 90, "NI": 90, "PY": 90, "UY": 90, "PR": 90, "BA": 90, "PS": 90, "TN": 90, "BH": 90, "VN": 90, "GH": 90, "MU": 90, "UA": 90, "MT": 90, "BS": 90, "MV": 90, "OM": 90, "MK": 90, "LV": 90, "EE": 90, "IQ": 90, "DZ": 90, "AL": 90, "NP": 90, "MO": 90, "ME": 90, "SN": 90, "GE": 90, "BN": 90, "UG": 90, "GP": 90, "BB": 90, "AZ": 90, "TZ": 90, "LY": 90, "MQ": 90, "CM": 90, "BW": 90, "ET": 90, "KZ": 90, "NA": 90, "MG": 90, "NC": 90, "MD": 90, "FJ": 90, "BY": 90, "JE": 90, "GU": 90, "YE": 90, "ZM": 90, "IM": 90, "HT": 90, "KH": 90, "AW": 90, "PF": 90, "AF": 90, "BM": 90, "GY": 90, "AM": 90, "MW": 90, "AG": 90, "RW": 90, "GG": 90, "GM": 90, "FO": 90, "LC": 90, "KY": 90, "BJ": 90, "AD": 90, "GD": 90, "VI": 90, "BZ": 90, "VC": 90, "MN": 90, "MZ": 90, "ML": 90, "AO": 90, "GF": 90, "UZ": 90, "DJ": 90, "BF": 90, "MC": 90, "TG": 90, "GL": 90, "GA": 90, "GI": 90, "CD": 90, "KG": 90, "PG": 90, "BT": 90, "KN": 90, "SZ": 90, "LS": 90, "LA": 90, "LI": 90, "MP": 90, "SR": 90, "SC": 90, "VG": 90, "TC": 90, "DM": 90, "MR": 90, "AX": 90, "SM": 90, "SL": 90, "NE": 90, "CG": 90, "AI": 90, "YT": 90, "CV": 90, "GN": 90, "TM": 90, "BI": 90, "TJ": 90, "VU": 90, "SB": 90, "ER": 90, "WS": 90, "AS": 90, "FK": 90, "GQ": 90, "TO": 90, "KM": 90, "PW": 90, "FM": 90, "CF": 90, "SO": 90, "MH": 90, "VA": 90, "TD": 90, "KI": 90, "ST": 90, "TV": 90, "NR": 90, "RE": 90, "LR": 90, "ZW": 90, "CI": 90, "MM": 90, "AN": 90, "AQ": 90, "BQ": 90, "BV": 90, "IO": 90, "CX": 90, "CC": 90, "CK": 90, "CW": 90, "TF": 90, "GW": 90, "HM": 90, "XK": 90, "MS": 90, "NU": 90, "NF": 90, "PN": 90, "BL": 90, "SH": 90, "MF": 90, "PM": 90, "SX": 90, "GS": 90, "SS": 90, "SJ": 90, "TL": 90, "TK": 90, "UM": 90, "WF": 90, "EH": 90}, "max_days_to_finish": {"US": 180, "CA": 180, "GB": 180, "AR": 180, "AU": 180, "AT": 180, "BE": 180, "BR": 180, "CL": 180, "CN": 180, "CO": 180, "HR": 180, "DK": 180, "DO": 180, "EG": 180, "FI": 180, "FR": 180, "DE": 180, "GR": 180, "HK": 180, "IN": 180, "ID": 180, "IE": 180, "IL": 180, "IT": 180, "JP": 180, "JO": 180, "KW": 180, "LB": 180, "MY": 180, "MX": 180, "NL": 180, "NZ": 180, "NG": 180, "NO": 180, "PK": 180, "PA": 180, "PE": 180, "PH": 180, "PL": 180, "RU": 180, "SA": 180, "RS": 180, "SG": 180, "ZA": 180, "KR": 180, "ES": 180, "SE": 180, "CH": 180, "TW": 180, "TH": 180, "TR": 180, "AE": 180, "VE": 180, "PT": 180, "LU": 180, "BG": 180, "CZ": 180, "SI": 180, "IS": 180, "SK": 180, "LT": 180, "TT": 180, "BD": 180, "LK": 180, "KE": 180, "HU": 180, "MA": 180, "CY": 180, "JM": 180, "EC": 180, "RO": 180, "BO": 180, "GT": 180, "CR": 180, "QA": 180, "SV": 180, "HN": 180, "NI": 180, "PY": 180, "UY": 180, "PR": 180, "BA": 180, "PS": 180, "TN": 180, "BH": 180, "VN": 180, "GH": 180, "MU": 180, "UA": 180, "MT": 180, "BS": 180, "MV": 180, "OM": 180, "MK": 180, "LV": 180, "EE": 180, "IQ": 180, "DZ": 180, "AL": 180, "NP": 180, "MO": 180, "ME": 180, "SN": 180, "GE": 180, "BN": 180, "UG": 180, "GP": 180, "BB": 180, "AZ": 180, "TZ": 180, "LY": 180, "MQ": 180, "CM": 180, "BW": 180, "ET": 180, "KZ": 180, "NA": 180, "MG": 180, "NC": 180, "MD": 180, "FJ": 180, "BY": 180, "JE": 180, "GU": 180, "YE": 180, "ZM": 180, "IM": 180, "HT": 180, "KH": 180, "AW": 180, "PF": 180, "AF": 180, "BM": 180, "GY": 180, "AM": 180, "MW": 180, "AG": 180, "RW": 180, "GG": 180, "GM": 180, "FO": 180, "LC": 180, "KY": 180, "BJ": 180, "AD": 180, "GD": 180, "VI": 180, "BZ": 180, "VC": 180, "MN": 180, "MZ": 180, "ML": 180, "AO": 180, "GF": 180, "UZ": 180, "DJ": 180, "BF": 180, "MC": 180, "TG": 180, "GL": 180, "GA": 180, "GI": 180, "CD": 180, "KG": 180, "PG": 180, "BT": 180, "KN": 180, "SZ": 180, "LS": 180, "LA": 180, "LI": 180, "MP": 180, "SR": 180, "SC": 180, "VG": 180, "TC": 180, "DM": 180, "MR": 180, "AX": 180, "SM": 180, "SL": 180, "NE": 180, "CG": 180, "AI": 180, "YT": 180, "CV": 180, "GN": 180, "TM": 180, "BI": 180, "TJ": 180, "VU": 180, "SB": 180, "ER": 180, "WS": 180, "AS": 180, "FK": 180, "GQ": 180, "TO": 180, "KM": 180, "PW": 180, "FM": 180, "CF": 180, "SO": 180, "MH": 180, "VA": 180, "TD": 180, "KI": 180, "ST": 180, "TV": 180, "NR": 180, "RE": 180, "LR": 180, "ZW": 180, "CI": 180, "MM": 180, "AN": 180, "AQ": 180, "BQ": 180, "BV": 180, "IO": 180, "CX": 180, "CC": 180, "CK": 180, "CW": 180, "TF": 180, "GW": 180, "HM": 180, "XK": 180, "MS": 180, "NU": 180, "NF": 180, "PN": 180, "BL": 180, "SH": 180, "MF": 180, "PM": 180, "SX": 180, "GS": 180, "SS": 180, "SJ": 180, "TL": 180, "TK": 180, "UM": 180, "WF": 180, "EH": 180}, "global_io_max_campaign_duration": 100}, "spend_cap": "0", "tax_id_status": 0.0, "tax_id_type": "0", "timezone_id": 1.0, "timezone_name": "America/Los_Angeles", "timezone_offset_hours_utc": -8.0, "tos_accepted": {"web_custom_audience_tos": 1}, "user_tasks": ["DRAFT", "ANALYZE", "ADVERTISE", "MANAGE"]}, "emitted_at": 1708020062150} -{"stream": "ads", "data": {"id": "23853620229650398", "bid_type": "ABSOLUTE_OCPM", "account_id": "212551616838260", "campaign_id": "23853619670350398", "adset_id": "23853619670380398", "status": "ACTIVE", "creative": {"id": "23853666124230398"}, "updated_time": "2023-03-21T22:41:46-0700", "created_time": "2023-03-17T08:04:31-0700", "name": "With The Highest Standard for Reliability", "targeting": {"age_max": 60, "age_min": 18, "custom_audiences": [{"id": "23853630753300398", "name": "Lookalike (US, 10%) - Airbyte Cloud Users"}, {"id": "23853683587660398", "name": "Web Traffic [ALL] - _copy"}], "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_relaxation_types": {"lookalike": 1, "custom_audience": 1}, "publisher_platforms": ["facebook", "instagram", "audience_network", "messenger"], "facebook_positions": ["feed", "biz_disco_feed", "facebook_reels", "facebook_reels_overlay", "right_hand_column", "video_feeds", "instant_article", "instream_video", "marketplace", "story", "search"], "instagram_positions": ["stream", "story", "explore", "reels", "shop", "explore_home", "profile_feed"], "device_platforms": ["mobile", "desktop"], "messenger_positions": ["story"], "audience_network_positions": ["classic", "instream_video", "rewarded_video"]}, "effective_status": "ACTIVE", "last_updated_by_app_id": "119211728144504", "source_ad_id": "0", "tracking_specs": [{"action.type": ["offsite_conversion"], "fb_pixel": ["917042523049733"]}, {"action.type": ["link_click"], "post": ["662226902575095"], "post.wall": ["112704783733939"]}, {"action.type": ["post_engagement"], "page": ["112704783733939"], "post": ["662226902575095"]}], "conversion_specs": [{"action.type": ["offsite_conversion"], "conversion_id": ["6015304265216283"]}]}, "emitted_at": 1707135365030} -{"stream": "ad_sets", "data": {"id": "23853619670380398", "name": "Lookalike audience_Free Connector Program", "promoted_object": {"pixel_id": "917042523049733", "custom_event_type": "COMPLETE_REGISTRATION"}, "account_id": "212551616838260", "updated_time": "2023-03-21T14:20:51-0700", "daily_budget": 2000.0, "budget_remaining": 2000.0, "effective_status": "ACTIVE", "campaign_id": "23853619670350398", "created_time": "2023-03-17T08:04:28-0700", "start_time": "2023-03-17T08:04:28-0700", "lifetime_budget": 0.0, "targeting": {"age_max": 60, "age_min": 18, "custom_audiences": [{"id": "23853630753300398", "name": "Lookalike (US, 10%) - Airbyte Cloud Users"}, {"id": "23853683587660398", "name": "Web Traffic [ALL] - _copy"}], "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_relaxation_types": {"lookalike": 1, "custom_audience": 1}, "publisher_platforms": ["facebook", "instagram", "audience_network", "messenger"], "facebook_positions": ["feed", "biz_disco_feed", "facebook_reels", "facebook_reels_overlay", "right_hand_column", "video_feeds", "instant_article", "instream_video", "marketplace", "story", "search"], "instagram_positions": ["stream", "story", "explore", "reels", "shop", "explore_home", "profile_feed"], "device_platforms": ["mobile", "desktop"], "messenger_positions": ["story"], "audience_network_positions": ["classic", "instream_video", "rewarded_video"]}, "bid_strategy": "LOWEST_COST_WITHOUT_CAP"}, "emitted_at": 1707135364623} -{"stream":"campaigns","data":{"id":"23846542053890398","account_id":"212551616838260","budget_rebalance_flag":false,"budget_remaining":0.0,"buying_type":"AUCTION","created_time":"2021-01-18T21:36:42-0800","configured_status":"PAUSED","effective_status":"PAUSED","name":"Fake Campaign 0","objective":"MESSAGES","smart_promotion_type":"GUIDED_CREATION","source_campaign_id":0.0,"special_ad_category":"NONE","start_time":"1969-12-31T15:59:59-0800","status":"PAUSED","updated_time":"2021-02-18T01:00:02-0800"},"emitted_at":1694795155769} -{"stream": "custom_audiences", "data": {"id": "23853683587660398", "account_id": "212551616838260", "approximate_count_lower_bound": 4700, "approximate_count_upper_bound": 5500, "customer_file_source": "PARTNER_PROVIDED_ONLY", "data_source": {"type": "UNKNOWN", "sub_type": "ANYTHING", "creation_params": "[]"}, "delivery_status": {"code": 200, "description": "This audience is ready for use."}, "description": "Custom Audience-Web Traffic [ALL] - _copy", "is_value_based": false, "name": "Web Traffic [ALL] - _copy", "operation_status": {"code": 200, "description": "Normal"}, "permission_for_actions": {"can_edit": true, "can_see_insight": "True", "can_share": "True", "subtype_supports_lookalike": "True", "supports_recipient_lookalike": "False"}, "retention_days": 0, "subtype": "CUSTOM", "time_content_updated": 1679433484, "time_created": 1679433479, "time_updated": 1679433484}, "emitted_at": 1698925454024} -{"stream": "ad_creatives", "data": {"id": "23853630774830398", "body": "Until a connector meets our GA reliability standards, you don't pay for it.", "image_url": "https://scontent.fiev6-1.fna.fbcdn.net/v/t45.1600-4/333773383_23853620180320398_4214441850420455541_n.png?_nc_cat=109&ccb=1-7&_nc_sid=c0a1f7&_nc_ohc=qbTWMi-gWi8AX8hFZLQ&_nc_ht=scontent.fiev6-1.fna&edm=ALjApogEAAAA&oh=00_AfC9KndALRjbR5Z4Xz_ZytJTb9rsS_S4_SDvmiegih69vQ&oe=65C8B50F", "account_id": "212551616838260", "actor_id": "112704783733939", "asset_feed_spec": {"bodies": [{"text": "Until a connector meets our GA reliability standards, you don't pay for it."}, {"text": "Reliability is the cornerstone of having an ELT tool you trust."}, {"text": "Don't compromise between cost and connector reliability."}, {"text": "Limitless data movement with free Alpha and Beta connectors"}], "descriptions": [{"text": "Until a connector meets our GA reliability standards, you don't pay for it. "}], "titles": [{"text": "Introducing: our free connector program"}], "optimization_type": "DEGREES_OF_FREEDOM"}, "call_to_action_type": "SIGN_UP", "effective_instagram_story_id": "5605802859523550", "effective_object_story_id": "112704783733939_660115876119531", "title": "Introducing: our free connector program", "name": "Introducing: our free connector program 2023-03-17-ccf7ed52a98e5e699299861a8a323194", "instagram_actor_id": "2185696824778148", "instagram_permalink_url": "https://www.instagram.com/p/Cp5PgWrjU8V/", "object_story_spec": {"page_id": "112704783733939", "instagram_actor_id": "2185696824778148", "link_data": {"link": "https://airbyte.com/free-connector-program?utm_medium=paid_social&utm_source=facebook&utm_campaign=q1_freeconnectorprogram_t", "image_hash": "970937d2f16de20c0a99e598aa876ac0", "call_to_action": {"type": "SIGN_UP"}}}, "object_type": "SHARE", "status": "ACTIVE", "thumbnail_url": "https://external.fiev6-1.fna.fbcdn.net/emg1/v/t13/8568826884261823966?url=https%3A%2F%2Fwww.facebook.com%2Fads%2Fimage%2F%3Fd%3DAQL3nBsTZ0CoQ_uD_vAVwqZKjwi7X3zsqa8EbE4S1aY7w8cjJ7x6BihYqZkQTgC3BzwY5Y_dxv11UvkOL0cMER5tPch9x6_Q2p3xtHYED2DHLT6v9o9CnYB8S5FMSQ91vMBQCbLFVHh_bSr0OT_4bW4V&fb_obo=1&utld=facebook.com&stp=c0.5000x0.5000f_dst-emg0_p64x64_q75&ccb=13-1&oh=06_AbE-j6xf-dGVCh9dJcOJdFM5v4Sydw74rDQJWynPZayneA&oe=65C511DE&_nc_sid=58080a", "image_hash": "970937d2f16de20c0a99e598aa876ac0"}, "emitted_at": 1707288372517} -{"stream":"activities","data":{"account_id":"212551616838260","actor_id":"122043039268043192","actor_name":"Payments RTU Processor","application_id":"0","date_time_in_timezone":"03/13/2023 at 6:30 AM","event_time":"2023-03-13T13:30:47+0000","event_type":"ad_account_billing_charge","extra_data":"{\"currency\":\"USD\",\"new_value\":1188,\"transaction_id\":\"5885578541558696-11785530\",\"action\":67,\"type\":\"payment_amount\"}","object_id":"212551616838260","object_name":"Airbyte","object_type":"ACCOUNT","translated_event_type":"Account billed"},"emitted_at":1696931251153} -{"stream":"custom_conversions","data":{"id":"694166388077667","account_id":"212551616838260","creation_time":"2020-04-22T01:36:00+0000","custom_event_type":"CONTACT","data_sources":[{"id":"2667253716886462","source_type":"PIXEL","name":"Dataline's Pixel"}],"default_conversion_value":0,"event_source_type":"pixel","is_archived":true,"is_unavailable":false,"name":"SubscribedButtonClick","retention_days":0,"rule":"{\"and\":[{\"event\":{\"eq\":\"PageView\"}},{\"or\":[{\"URL\":{\"i_contains\":\"SubscribedButtonClick\"}}]}]}"},"emitted_at":1692180839174} -{"stream":"images","data":{"id":"212551616838260:c1e94a8768a405f0f212d71fe8336647","account_id":"212551616838260","name":"Audience_1_Ad_3_1200x1200_blue_CTA_arrow.png_105","creatives":["23853630775340398","23853630871360398","23853666124200398"],"original_height":1200,"original_width":1200,"permalink_url":"https://www.facebook.com/ads/image/?d=AQIDNjjLb7VzVJ26jXb_HpudCEUJqbV_lLF2JVsdruDcBxnXQEKfzzd21VVJnkm0B-JLosUXNNg1BH78y7FxnK3AH-0D_lnk7kn39_bIcOMK7Z9HYyFInfsVY__adup3A5zGTIcHC9Y98Je5qK-yD8F6","status":"ACTIVE","url":"https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfDCqQ6viqrgLcfbO3O5-n030Usq7Zyt2c1TmsatqnYf7Q&oe=64E2779A","created_time":"2023-03-16T13:13:17-0700","hash":"c1e94a8768a405f0f212d71fe8336647","url_128":"https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?stp=dst-png_s128x128&_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfAY50CMpox2s4w_f18IVx7sZuXlg4quF6YNIJJ8D4PZew&oe=64E2779A","is_associated_creatives_in_adgroups":true,"updated_time":"2023-03-17T08:09:56-0700","height":1200,"width":1200},"emitted_at":1692180839582} -{"stream":"ads_insights","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","actions":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.007,"cost_per_inline_link_click":0.396667,"cost_per_inline_post_engagement":0.396667,"cost_per_unique_click":0.396667,"cost_per_unique_inline_link_click":0.396667,"cpc":0.396667,"cpm":0.902199,"cpp":0.948207,"created_time":"2021-02-09","ctr":0.227445,"date_start":"2021-02-15","date_stop":"2021-02-15","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":13.545817,"estimated_ad_recallers":170.0,"frequency":1.050996,"impressions":1319,"inline_link_click_ctr":0.227445,"inline_link_clicks":3,"inline_post_engagement":3,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","outbound_clicks":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"outbound_click","value":3.0}],"quality_ranking":"UNKNOWN","reach":1255,"social_spend":0.0,"spend":1.19,"unique_actions":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.239044,"unique_inline_link_click_ctr":0.239044,"unique_inline_link_clicks":3,"unique_link_clicks_ctr":0.239044,"unique_outbound_clicks":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"outbound_click","value":3.0}],"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"website_ctr":[{"action_type":"link_click","value":0.227445}],"wish_bid":0.0},"emitted_at":1682686057366} -{"stream":"ads_insights_action_carousel_card","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.007,"cost_per_inline_link_click":0.396667,"cost_per_inline_post_engagement":0.396667,"cost_per_unique_click":0.396667,"cost_per_unique_inline_link_click":0.396667,"cpc":0.396667,"cpm":0.902199,"cpp":0.948207,"created_time":"2021-02-09","ctr":0.227445,"date_start":"2021-02-15","date_stop":"2021-02-15","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":13.545817,"estimated_ad_recallers":170.0,"frequency":1.050996,"impressions":1319,"inline_link_click_ctr":0.227445,"inline_post_engagement":3,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":1255,"social_spend":0.0,"spend":1.19,"unique_actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.239044,"unique_inline_link_click_ctr":0.239044,"unique_inline_link_clicks":3,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"website_ctr":[{"action_type":"link_click","value":0.227445}],"wish_bid":0.0},"emitted_at":1692180857757} -{"stream":"ads_insights_action_conversion_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.004,"cpm":0.754717,"cpp":0.784314,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":19.607843,"estimated_ad_recallers":10.0,"frequency":1.039216,"impressions":53,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":51,"spend":0.04,"unique_clicks":0,"updated_time":"2021-08-27","device_platform":"desktop"},"emitted_at":1696936270620} -{"stream":"ads_insights_action_reaction","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936287351} -{"stream":"ads_insights_action_type","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936315908} -{"stream":"ads_insights_action_video_sound","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions": [{"action_type": "video_view"}],"wish_bid":0.0},"emitted_at":1696936296894} -{"stream":"ads_insights_action_video_type","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936306631} -{"stream":"ads_insights_age_and_gender","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.02,"cpm":0.869565,"cpp":0.952381,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":4.761905,"estimated_ad_recallers":1.0,"frequency":1.095238,"gender_targeting":"female","impressions":23,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":21,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","age":"55-64","gender":"female"},"emitted_at":1696939548058} -{"stream":"ads_insights_country","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":617,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","country":"US"},"emitted_at":1696936565587} -{"stream":"ads_insights_delivery_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0075,"cpm":1.630435,"cpp":1.744186,"created_time":"2021-02-09","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":23.255814,"estimated_ad_recallers":20.0,"frequency":1.069767,"impressions":92,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":86,"spend":0.15,"unique_clicks":0,"updated_time":"2021-08-27","device_platform":"desktop"},"emitted_at":1696936327621} -{"stream":"ads_insights_delivery_platform","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"cost_per_action_type":[{"action_type":"post_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"page_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"link_click","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39}],"cost_per_estimated_ad_recallers":0.006882,"cost_per_inline_link_click":0.39,"cost_per_inline_post_engagement":0.39,"cost_per_outbound_click":[{"action_type":"outbound_click","value":0.39}],"cost_per_unique_action_type":[{"action_type":"post_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"page_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"link_click","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39}],"cost_per_unique_click":0.39,"cost_per_unique_inline_link_click":0.39,"cost_per_unique_outbound_click":[{"action_type":"outbound_click","value":0.39}],"cpc":0.39,"cpm":0.922713,"cpp":0.971761,"created_time":"2021-02-09","ctr":0.236593,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":14.119601,"estimated_ad_recallers":170.0,"frequency":1.053156,"impressions":1268,"inline_link_click_ctr":0.236593,"inline_link_clicks":3,"inline_post_engagement":3,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","outbound_clicks":[{"action_type":"outbound_click","value":3.0}],"outbound_clicks_ctr":[{"action_type":"outbound_click","value":0.236593}],"reach":1204,"spend":1.17,"unique_actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.249169,"unique_inline_link_click_ctr":0.249169,"unique_inline_link_clicks":3,"unique_link_clicks_ctr":0.249169,"unique_outbound_clicks":[{"action_type":"outbound_click","value":3.0}],"unique_outbound_clicks_ctr":[{"action_type":"outbound_click","value":0.249169}],"updated_time":"2021-08-27","website_ctr":[{"action_type":"link_click","value":0.236593}],"publisher_platform":"facebook"},"emitted_at":1696936337306} -{"stream":"ads_insights_delivery_platform_and_device_platform","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.002,"cpm":0.392157,"cpp":0.392157,"created_time":"2021-02-09","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":19.607843,"estimated_ad_recallers":10.0,"frequency":1.0,"impressions":51,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":51,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","publisher_platform":"instagram","device_platform":"mobile_app"},"emitted_at":1696967644628} -{"stream":"ads_insights_demographics_age","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0085,"cpm":1.14094,"cpp":1.188811,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":13.986014,"estimated_ad_recallers":20.0,"frequency":1.041958,"impressions":149,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":143,"spend":0.17,"unique_clicks":0,"updated_time":"2021-08-27","age":"25-34"},"emitted_at":1696936389857} -{"stream":"ads_insights_demographics_country","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":617,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","country":"US"},"emitted_at":1696936440731} -{"stream":"ads_insights_demographics_dma_region","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recallers":1.0,"frequency":1.0,"impressions":1,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":1,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","dma":"Anchorage"},"emitted_at":1696936491393} -{"stream":"ads_insights_demographics_gender","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0085,"cpm":1.268657,"cpp":1.338583,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":15.748032,"estimated_ad_recallers":20.0,"frequency":1.055118,"gender_targeting":"female","impressions":134,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":127,"spend":0.17,"unique_clicks":0,"updated_time":"2021-08-27","gender":"female"},"emitted_at":1696967753477} -{"stream":"ads_insights_dma","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recallers":1.0,"frequency":1.0,"impressions":1,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":1,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","dma":"West Palm Beach-Ft. Pierce"},"emitted_at":1696936556045} -{"stream":"ads_insights_platform_and_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"cpp":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":12.5,"estimated_ad_recallers":1.0,"frequency":1.0,"impressions":8,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":8,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","publisher_platform":"instagram","platform_position":"feed","impression_device":"android_smartphone"},"emitted_at":1696936579028} -{"stream":"ads_insights_region","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.02,"cpm":1.111111,"cpp":1.111111,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":5.555556,"estimated_ad_recallers":1.0,"frequency":1.0,"impressions":18,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":18,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","region":"New York"},"emitted_at":1696936621899} -{"stream":"customcustom_insight_stream","data":{"account_id":"212551616838260","cpc":0.27,"ad_id":"23846765228310398","clicks":1,"account_name":"Airbyte","date_start":"2021-02-15","date_stop":"2021-02-15","gender":"female"},"emitted_at":1695385890508} +{"stream": "ad_account", "data": {"id": "act_212551616838260", "account_id": "212551616838260", "account_status": 1, "age": 1456.3683101852, "amount_spent": "39125", "balance": "0", "business": {"id": "1506473679510495", "name": "Airbyte"}, "business_city": "", "business_country_code": "US", "business_name": "", "business_street": "", "business_street2": "", "can_create_brand_lift_study": false, "capabilities": ["CAN_CREATE_CALL_ADS", "CAN_SEE_GROWTH_OPPORTUNITY_DATA", "ENABLE_IA_RECIRC_AD_DISPLAY_FORMAT", "CAN_USE_MOBILE_EXTERNAL_PAGE_TYPE", "CAN_USE_FB_FEED_POSITION_IN_VIDEO_VIEW_15S", "ENABLE_BIZ_DISCO_ADS", "ENABLE_BRAND_OBJECTIVES_FOR_BIZ_DISCO_ADS", "ENABLE_DIRECT_REACH_FOR_BIZ_DISCO_ADS", "ENABLE_DYNAMIC_ADS_ON_IG_STORIES_ADS", "ENABLE_IG_STORIES_ADS_PPE_OBJECTIVE", "ENABLE_IG_STORIES_ADS_MESSENGER_DESTINATION", "ENABLE_PAC_FOR_BIZ_DISCO_ADS", "CAN_USE_FB_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_IA_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_SUG_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_FEED_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_EXPLORE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_CLASSIC_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_REWARD_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_REACH_AND_FREQUENCY", "CAN_USE_RECURRING_BUDGET", "HAS_VALID_PAYMENT_METHODS", "CAN_USE_LINK_CLICK_BILLING_EVENT", "CAN_USE_CPA_BILLING_EVENT", "CAN_SEE_NEW_CONVERSION_WINDOW_NUX", "ADS_INSTREAM_INTERFACE_INTEGRITY", "ADS_INSTREAM_LINK_CLICK", "ADS_INSTREAM_LINK_CLICK_IMAGE", "ADS_IN_OBJECTIVES_DEPRECATION", "MESSENGER_INBOX_ADS_PRODUCT_CATALOG_SALES", "CAN_SHOW_MESSENGER_DUPLICSTION_UPSELL", "ALLOW_INSTREAM_ONLY_FOR_REACH", "ADS_INSTREAM_VIDEO_PLACEMENT_CONVERSIONS", "CAN_CREATE_INSTAGRAM_EXPLORE_ADS", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY", "ALLOW_INSTREAM_NON_INTERRUPTIVE_LEADGEN", "INSTREAM_VIDEO_AD_DESKTOP_CONVERSION_AD_PREVIEW", "ALLOW_INSTREAM_ONLY_FOR_BRAND_AWARENESS_AUCTION", "ALLOW_SUGGESTED_VIDEOS_PLACEMENT_ONLY", "WHATSAPP_DESTINATION_ADS", "CTM_ADS_CREATION_CLICK_TO_DIRECT", "CTW_ADS_ENABLE_IG_FEED_PLACEMENT", "CTW_ADS_FOR_NON_MESSAGES_OBJECTIVE", "CTW_ADS_TRUSTED_TIER_2_PLUS_ADVERTISER", "CTW_ADS_TRUSTED_TIER_ADVERTISER", "ADS_PLACEMENT_MARKETPLACE", "ADNW_DISABLE_INSTREAM_AND_WEB_PLACEMENT", "CAN_CHANGE_BILLING_THRESHOLD", "CAN_USE_APP_EVENT_AVERAGE_COST_BIDDING", "CAN_USE_LEAD_GEN_AVERAGE_COST_BIDDING", "ADS_VALUE_OPTIMIZATION_DYNAMIC_ADS_1D", "ADS_DELIVERY_INSIGHTS_IN_BIDDING_PRESET_EXPERIMENT", "ADS_DELIVERY_INSIGHTS_OPTIMIZATION_PRESET", "CAN_SEE_APP_AD_EVENTS", "CAN_SEE_NEW_STANDARD_EVENTS_BETA", "CAN_SEE_VCK_HOLIDAY_TEMPLATES", "ENABLE_DCO_FOR_FB_STORY_ADS", "CAN_USE_IG_EXPLORE_GRID_HOME_PLACEMENT", "CAN_USE_IG_EXPLORE_HOME_IN_REACH_AND_FREQUENCY", "CAN_USE_IG_EXPLORE_HOME_POST_ENGAGEMENT_MESSAGES", "CAN_USE_IG_SEARCH_PLACEMENT", "CAN_USE_IG_SEARCH_RESULTS_AUTO_PLACEMENT", "CAN_USE_IG_REELS_PAC_CAROUSEL", "CAN_USE_IG_REELS_POSITION", "CAN_SEE_CONVERSION_LIFT_SUMMARY", "CAN_USE_IG_PROFILE_FEED_POSITION", "CAN_USE_IG_REELS_REACH_AND_FREQUENCY", "CAN_USE_IG_REELS_OVERLAY_POSITION", "CAN_USE_IG_REELS_OVERLAY_PAC", "CAN_USE_IG_SHOP_TAB_PAC", "CAN_SEE_LEARNING_STAGE", "ENABLE_WEBSITE_CONVERSIONS_FOR_FB_STORY_ADS", "ENABLE_MESSENGER_INBOX_VIDEO_ADS", "ENABLE_VIDEO_VIEWS_FOR_FB_STORY_ADS", "ENABLE_LINK_CLICKS_FOR_FB_STORY_ADS", "ENABLE_REACH_FOR_FB_STORY_ADS", "CAN_USE_CALL_TO_ACTION_LINK_IMPORT_EXPORT", "ADS_INSTREAM_VIDEO_ENABLE_SLIDE_SHOW", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY_IN_VV_REACH_AND_FREQUENCY", "ENABLE_MOBILE_APP_INSTALLS_FOR_FB_STORY_ADS", "ENABLE_LEAD_GEN_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_REACH", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW", "CAN_USE_FB_MKT_PLACE_POSITION_IN_STORE_VISIT", "ENABLE_MOBILE_APP_ENGAGEMENT_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_BRAND_AWARENESS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_APP_INSTALLS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_LEAD_GENERATION", "CAN_USE_FB_MKT_PLACE_POSITION_IN_MESSAGE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_PAGE_LIKE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_POST_ENGAGEMENT", "RF_ALLOW_MARKETPLACE_ACCOUNT", "RF_ALLOW_SEARCH_ACCOUNT", "VERTICAL_VIDEO_PAC_INSTREAM_UPSELL", "IX_COLLECTION_ENABLED_FOR_BAO_AND_REACH", "ADS_BM_REQUIREMENTS_OCT_15_RELEASE", "ENABLE_POST_ENGAGEMENT_FOR_FB_STORY", "ENBABLE_CATALOG_SALES_FOR_FB_STORY", "CAN_USE_WHATSAPP_DESTINATION_ON_LINK_CLICKS_AND_CONVERSIONS", "CAN_USE_WHATSAPP_DESTINATION_ON_CONVERSIONS", "IS_NON_TAIL_AD_ACCOUNT", "IS_IN_IG_EXISTING_POST_CTA_DEFAULTING_EXPERIMENT", "IS_IN_SHORT_WA_LINK_CTWA_UNCONV_TRAFFIC_EXPERIMENT", "IS_IN_ODAX_EXPERIENCE", "IS_IN_REACH_BRAND_AWARENESS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_VIDEO_VIEWS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_WHATSAPP_DESTINATION_DEFAULTING_EXPERIMENT", "CAN_USE_MARKETPLACE_DESKTOP", "ADS_MERCHANT_OVERLAYS_DEPRECATION", "CONNECTIONS_DEPRECATION_V2", "CAN_USE_LIVE_VIDEO_FOR_THRUPLAY", "CAN_SEE_HEC_AM_FLOW", "CAN_SEE_POLITICAL_FLOW", "ADS_INSTREAM_PLACEMENT_CATALOG_SALES", "ENABLE_CONVERSIONS_FOR_FB_GROUP_TAB_ADS", "ENABLE_LINK_CLICK_FOR_FB_GROUP_TAB_ADS", "ENABLE_REACH_FOR_FB_GROUP_TAB_ADS", "CAN_USE_CONVERSATIONS_OPTIMIZATION", "ENABLE_THRUPLAY_OPTIMIZATION_MESSENGER_STORY_ADS", "CAN_USE_IG_STORY_POLLS_PAC_CREATION", "IOS14_CEO_CAMPAIGN_CREATION", "ENABLE_VIDEO_CHANNEL_PLACEMENT_FOR_RSVP_ADS", "DIGITAL_CIRCULAR_ADS", "CAN_SEE_SAFR_V3_FLOW", "CAN_USE_FB_REELS_POSITION", "CAN_USE_ADS_ON_FB_REELS_POSITION", "CAN_USE_FB_REELS_AUTO_PLACEMENT", "ENABLE_FB_REELS_CREATION_PAC_ADS", "ENABLE_FB_REELS_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_PAC_ADS", "RF_CPA_BILLING_DEPRECATION_PHASE_2", "ENABLE_APP_INSTALL_CUSTOM_PRODUCT_PAGES", "ENABLE_ADS_ON_FB_REELS_PLACEMENT_UNIFICATION", "ADS_RF_FB_REELS_PLACEMENT", "REELS_DM_ADS_ENABLE_REACH_AND_FREQUENCY", "ELIGIBLE_FOR_TEXT_GEN", "CAN_USE_BUDGET_SCHEDULING_API"], "created_time": "2020-04-13T18:04:59-0700", "currency": "USD", "disable_reason": 0.0, "end_advertiser": 1506473679510495.0, "end_advertiser_name": "Airbyte", "fb_entity": 85.0, "funding_source": 2825262454257003.0, "funding_source_details": {"id": "2825262454257003", "type": 1}, "has_migrated_permissions": true, "is_attribution_spec_system_default": true, "is_direct_deals_enabled": false, "is_in_3ds_authorization_enabled_market": false, "is_notifications_enabled": true, "is_personal": 0.0, "is_prepay_account": false, "is_tax_id_required": false, "min_campaign_group_spend_cap": 10000.0, "min_daily_budget": 100.0, "name": "Airbyte", "offsite_pixels_tos_accepted": true, "owner": 1506473679510495.0, "rf_spec": {"min_reach_limits": {"US": 200000, "CA": 200000, "GB": 200000, "AR": 200000, "AU": 200000, "AT": 200000, "BE": 200000, "BR": 200000, "CL": 200000, "CN": 200000, "CO": 200000, "HR": 200000, "DK": 200000, "DO": 200000, "EG": 200000, "FI": 200000, "FR": 200000, "DE": 200000, "GR": 200000, "HK": 200000, "IN": 200000, "ID": 200000, "IE": 200000, "IL": 200000, "IT": 200000, "JP": 200000, "JO": 200000, "KW": 200000, "LB": 200000, "MY": 200000, "MX": 200000, "NL": 200000, "NZ": 200000, "NG": 200000, "NO": 200000, "PK": 200000, "PA": 200000, "PE": 200000, "PH": 200000, "PL": 200000, "RU": 200000, "SA": 200000, "RS": 200000, "SG": 200000, "ZA": 200000, "KR": 200000, "ES": 200000, "SE": 200000, "CH": 200000, "TW": 200000, "TH": 200000, "TR": 200000, "AE": 200000, "VE": 200000, "PT": 200000, "LU": 200000, "BG": 200000, "CZ": 200000, "SI": 200000, "IS": 200000, "SK": 200000, "LT": 200000, "TT": 200000, "BD": 200000, "LK": 200000, "KE": 200000, "HU": 200000, "MA": 200000, "CY": 200000, "JM": 200000, "EC": 200000, "RO": 200000, "BO": 200000, "GT": 200000, "CR": 200000, "QA": 200000, "SV": 200000, "HN": 200000, "NI": 200000, "PY": 200000, "UY": 200000, "PR": 200000, "BA": 200000, "PS": 200000, "TN": 200000, "BH": 200000, "VN": 200000, "GH": 200000, "MU": 200000, "UA": 200000, "MT": 200000, "BS": 200000, "MV": 200000, "OM": 200000, "MK": 200000, "LV": 200000, "EE": 200000, "IQ": 200000, "DZ": 200000, "AL": 200000, "NP": 200000, "MO": 200000, "ME": 200000, "SN": 200000, "GE": 200000, "BN": 200000, "UG": 200000, "GP": 200000, "BB": 200000, "AZ": 200000, "TZ": 200000, "LY": 200000, "MQ": 200000, "CM": 200000, "BW": 200000, "ET": 200000, "KZ": 200000, "NA": 200000, "MG": 200000, "NC": 200000, "MD": 200000, "FJ": 200000, "BY": 200000, "JE": 200000, "GU": 200000, "YE": 200000, "ZM": 200000, "IM": 200000, "HT": 200000, "KH": 200000, "AW": 200000, "PF": 200000, "AF": 200000, "BM": 200000, "GY": 200000, "AM": 200000, "MW": 200000, "AG": 200000, "RW": 200000, "GG": 200000, "GM": 200000, "FO": 200000, "LC": 200000, "KY": 200000, "BJ": 200000, "AD": 200000, "GD": 200000, "VI": 200000, "BZ": 200000, "VC": 200000, "MN": 200000, "MZ": 200000, "ML": 200000, "AO": 200000, "GF": 200000, "UZ": 200000, "DJ": 200000, "BF": 200000, "MC": 200000, "TG": 200000, "GL": 200000, "GA": 200000, "GI": 200000, "CD": 200000, "KG": 200000, "PG": 200000, "BT": 200000, "KN": 200000, "SZ": 200000, "LS": 200000, "LA": 200000, "LI": 200000, "MP": 200000, "SR": 200000, "SC": 200000, "VG": 200000, "TC": 200000, "DM": 200000, "MR": 200000, "AX": 200000, "SM": 200000, "SL": 200000, "NE": 200000, "CG": 200000, "AI": 200000, "YT": 200000, "CV": 200000, "GN": 200000, "TM": 200000, "BI": 200000, "TJ": 200000, "VU": 200000, "SB": 200000, "ER": 200000, "WS": 200000, "AS": 200000, "FK": 200000, "GQ": 200000, "TO": 200000, "KM": 200000, "PW": 200000, "FM": 200000, "CF": 200000, "SO": 200000, "MH": 200000, "VA": 200000, "TD": 200000, "KI": 200000, "ST": 200000, "TV": 200000, "NR": 200000, "RE": 200000, "LR": 200000, "ZW": 200000, "CI": 200000, "MM": 200000, "AN": 200000, "AQ": 200000, "BQ": 200000, "BV": 200000, "IO": 200000, "CX": 200000, "CC": 200000, "CK": 200000, "CW": 200000, "TF": 200000, "GW": 200000, "HM": 200000, "XK": 200000, "MS": 200000, "NU": 200000, "NF": 200000, "PN": 200000, "BL": 200000, "SH": 200000, "MF": 200000, "PM": 200000, "SX": 200000, "GS": 200000, "SS": 200000, "SJ": 200000, "TL": 200000, "TK": 200000, "UM": 200000, "WF": 200000, "EH": 200000}, "countries": ["US", "CA", "GB", "AR", "AU", "AT", "BE", "BR", "CL", "CN", "CO", "HR", "DK", "DO", "EG", "FI", "FR", "DE", "GR", "HK", "IN", "ID", "IE", "IL", "IT", "JP", "JO", "KW", "LB", "MY", "MX", "NL", "NZ", "NG", "NO", "PK", "PA", "PE", "PH", "PL", "RU", "SA", "RS", "SG", "ZA", "KR", "ES", "SE", "CH", "TW", "TH", "TR", "AE", "VE", "PT", "LU", "BG", "CZ", "SI", "IS", "SK", "LT", "TT", "BD", "LK", "KE", "HU", "MA", "CY", "JM", "EC", "RO", "BO", "GT", "CR", "QA", "SV", "HN", "NI", "PY", "UY", "PR", "BA", "PS", "TN", "BH", "VN", "GH", "MU", "UA", "MT", "BS", "MV", "OM", "MK", "EE", "LV", "IQ", "DZ", "AL", "NP", "MO", "ME", "SN", "GE", "BN", "UG", "GP", "BB", "ZW", "CI", "AZ", "TZ", "LY", "MQ", "MM", "CM", "BW", "ET", "KZ", "NA", "MG", "NC", "MD", "FJ", "BY", "JE", "GU", "YE", "ZM", "IM", "HT", "KH", "AW", "PF", "AF", "BM", "GY", "AM", "MW", "AG", "RW", "GG", "GM", "FO", "LC", "KY", "BJ", "AD", "GD", "VI", "BZ", "VC", "MN", "MZ", "ML", "AO", "GF", "UZ", "DJ", "BF", "MC", "TG", "GL", "GA", "GI", "CD", "KG", "PG", "BT", "KN", "SZ", "LS", "LA", "LI", "MP", "SR", "SC", "VG", "TC", "DM", "MR", "AX", "SM", "SL", "NE", "CG", "AI", "YT", "LR", "CV", "GN", "TM", "BI", "TJ", "VU", "SB", "ER", "WS", "AS", "FK", "GQ", "TO", "KM", "PW", "FM", "CF", "SO", "MH", "VA", "TD", "KI", "ST", "TV", "NR", "RE", "AN", "AQ", "BQ", "BV", "IO", "CX", "CC", "CK", "CW", "TF", "GW", "HM", "XK", "MS", "NU", "NF", "PN", "BL", "SH", "MF", "PM", "SX", "GS", "SS", "SJ", "TL", "TK", "UM", "WF", "EH"], "min_campaign_duration": {"US": 1, "CA": 1, "GB": 1, "AR": 1, "AU": 1, "AT": 1, "BE": 1, "BR": 1, "CL": 1, "CN": 1, "CO": 1, "HR": 1, "DK": 1, "DO": 1, "EG": 1, "FI": 1, "FR": 1, "DE": 1, "GR": 1, "HK": 1, "IN": 1, "ID": 1, "IE": 1, "IL": 1, "IT": 1, "JP": 1, "JO": 1, "KW": 1, "LB": 1, "MY": 1, "MX": 1, "NL": 1, "NZ": 1, "NG": 1, "NO": 1, "PK": 1, "PA": 1, "PE": 1, "PH": 1, "PL": 1, "RU": 1, "SA": 1, "RS": 1, "SG": 1, "ZA": 1, "KR": 1, "ES": 1, "SE": 1, "CH": 1, "TW": 1, "TH": 1, "TR": 1, "AE": 1, "VE": 1, "PT": 1, "LU": 1, "BG": 1, "CZ": 1, "SI": 1, "IS": 1, "SK": 1, "LT": 1, "TT": 1, "BD": 1, "LK": 1, "KE": 1, "HU": 1, "MA": 1, "CY": 1, "JM": 1, "EC": 1, "RO": 1, "BO": 1, "GT": 1, "CR": 1, "QA": 1, "SV": 1, "HN": 1, "NI": 1, "PY": 1, "UY": 1, "PR": 1, "BA": 1, "PS": 1, "TN": 1, "BH": 1, "VN": 1, "GH": 1, "MU": 1, "UA": 1, "MT": 1, "BS": 1, "MV": 1, "OM": 1, "MK": 1, "LV": 1, "EE": 1, "IQ": 1, "DZ": 1, "AL": 1, "NP": 1, "MO": 1, "ME": 1, "SN": 1, "GE": 1, "BN": 1, "UG": 1, "GP": 1, "BB": 1, "AZ": 1, "TZ": 1, "LY": 1, "MQ": 1, "CM": 1, "BW": 1, "ET": 1, "KZ": 1, "NA": 1, "MG": 1, "NC": 1, "MD": 1, "FJ": 1, "BY": 1, "JE": 1, "GU": 1, "YE": 1, "ZM": 1, "IM": 1, "HT": 1, "KH": 1, "AW": 1, "PF": 1, "AF": 1, "BM": 1, "GY": 1, "AM": 1, "MW": 1, "AG": 1, "RW": 1, "GG": 1, "GM": 1, "FO": 1, "LC": 1, "KY": 1, "BJ": 1, "AD": 1, "GD": 1, "VI": 1, "BZ": 1, "VC": 1, "MN": 1, "MZ": 1, "ML": 1, "AO": 1, "GF": 1, "UZ": 1, "DJ": 1, "BF": 1, "MC": 1, "TG": 1, "GL": 1, "GA": 1, "GI": 1, "CD": 1, "KG": 1, "PG": 1, "BT": 1, "KN": 1, "SZ": 1, "LS": 1, "LA": 1, "LI": 1, "MP": 1, "SR": 1, "SC": 1, "VG": 1, "TC": 1, "DM": 1, "MR": 1, "AX": 1, "SM": 1, "SL": 1, "NE": 1, "CG": 1, "AI": 1, "YT": 1, "CV": 1, "GN": 1, "TM": 1, "BI": 1, "TJ": 1, "VU": 1, "SB": 1, "ER": 1, "WS": 1, "AS": 1, "FK": 1, "GQ": 1, "TO": 1, "KM": 1, "PW": 1, "FM": 1, "CF": 1, "SO": 1, "MH": 1, "VA": 1, "TD": 1, "KI": 1, "ST": 1, "TV": 1, "NR": 1, "RE": 1, "LR": 1, "ZW": 1, "CI": 1, "MM": 1, "AN": 1, "AQ": 1, "BQ": 1, "BV": 1, "IO": 1, "CX": 1, "CC": 1, "CK": 1, "CW": 1, "TF": 1, "GW": 1, "HM": 1, "XK": 1, "MS": 1, "NU": 1, "NF": 1, "PN": 1, "BL": 1, "SH": 1, "MF": 1, "PM": 1, "SX": 1, "GS": 1, "SS": 1, "SJ": 1, "TL": 1, "TK": 1, "UM": 1, "WF": 1, "EH": 1}, "max_campaign_duration": {"US": 90, "CA": 90, "GB": 90, "AR": 90, "AU": 90, "AT": 90, "BE": 90, "BR": 90, "CL": 90, "CN": 90, "CO": 90, "HR": 90, "DK": 90, "DO": 90, "EG": 90, "FI": 90, "FR": 90, "DE": 90, "GR": 90, "HK": 90, "IN": 90, "ID": 90, "IE": 90, "IL": 90, "IT": 90, "JP": 90, "JO": 90, "KW": 90, "LB": 90, "MY": 90, "MX": 90, "NL": 90, "NZ": 90, "NG": 90, "NO": 90, "PK": 90, "PA": 90, "PE": 90, "PH": 90, "PL": 90, "RU": 90, "SA": 90, "RS": 90, "SG": 90, "ZA": 90, "KR": 90, "ES": 90, "SE": 90, "CH": 90, "TW": 90, "TH": 90, "TR": 90, "AE": 90, "VE": 90, "PT": 90, "LU": 90, "BG": 90, "CZ": 90, "SI": 90, "IS": 90, "SK": 90, "LT": 90, "TT": 90, "BD": 90, "LK": 90, "KE": 90, "HU": 90, "MA": 90, "CY": 90, "JM": 90, "EC": 90, "RO": 90, "BO": 90, "GT": 90, "CR": 90, "QA": 90, "SV": 90, "HN": 90, "NI": 90, "PY": 90, "UY": 90, "PR": 90, "BA": 90, "PS": 90, "TN": 90, "BH": 90, "VN": 90, "GH": 90, "MU": 90, "UA": 90, "MT": 90, "BS": 90, "MV": 90, "OM": 90, "MK": 90, "LV": 90, "EE": 90, "IQ": 90, "DZ": 90, "AL": 90, "NP": 90, "MO": 90, "ME": 90, "SN": 90, "GE": 90, "BN": 90, "UG": 90, "GP": 90, "BB": 90, "AZ": 90, "TZ": 90, "LY": 90, "MQ": 90, "CM": 90, "BW": 90, "ET": 90, "KZ": 90, "NA": 90, "MG": 90, "NC": 90, "MD": 90, "FJ": 90, "BY": 90, "JE": 90, "GU": 90, "YE": 90, "ZM": 90, "IM": 90, "HT": 90, "KH": 90, "AW": 90, "PF": 90, "AF": 90, "BM": 90, "GY": 90, "AM": 90, "MW": 90, "AG": 90, "RW": 90, "GG": 90, "GM": 90, "FO": 90, "LC": 90, "KY": 90, "BJ": 90, "AD": 90, "GD": 90, "VI": 90, "BZ": 90, "VC": 90, "MN": 90, "MZ": 90, "ML": 90, "AO": 90, "GF": 90, "UZ": 90, "DJ": 90, "BF": 90, "MC": 90, "TG": 90, "GL": 90, "GA": 90, "GI": 90, "CD": 90, "KG": 90, "PG": 90, "BT": 90, "KN": 90, "SZ": 90, "LS": 90, "LA": 90, "LI": 90, "MP": 90, "SR": 90, "SC": 90, "VG": 90, "TC": 90, "DM": 90, "MR": 90, "AX": 90, "SM": 90, "SL": 90, "NE": 90, "CG": 90, "AI": 90, "YT": 90, "CV": 90, "GN": 90, "TM": 90, "BI": 90, "TJ": 90, "VU": 90, "SB": 90, "ER": 90, "WS": 90, "AS": 90, "FK": 90, "GQ": 90, "TO": 90, "KM": 90, "PW": 90, "FM": 90, "CF": 90, "SO": 90, "MH": 90, "VA": 90, "TD": 90, "KI": 90, "ST": 90, "TV": 90, "NR": 90, "RE": 90, "LR": 90, "ZW": 90, "CI": 90, "MM": 90, "AN": 90, "AQ": 90, "BQ": 90, "BV": 90, "IO": 90, "CX": 90, "CC": 90, "CK": 90, "CW": 90, "TF": 90, "GW": 90, "HM": 90, "XK": 90, "MS": 90, "NU": 90, "NF": 90, "PN": 90, "BL": 90, "SH": 90, "MF": 90, "PM": 90, "SX": 90, "GS": 90, "SS": 90, "SJ": 90, "TL": 90, "TK": 90, "UM": 90, "WF": 90, "EH": 90}, "max_days_to_finish": {"US": 180, "CA": 180, "GB": 180, "AR": 180, "AU": 180, "AT": 180, "BE": 180, "BR": 180, "CL": 180, "CN": 180, "CO": 180, "HR": 180, "DK": 180, "DO": 180, "EG": 180, "FI": 180, "FR": 180, "DE": 180, "GR": 180, "HK": 180, "IN": 180, "ID": 180, "IE": 180, "IL": 180, "IT": 180, "JP": 180, "JO": 180, "KW": 180, "LB": 180, "MY": 180, "MX": 180, "NL": 180, "NZ": 180, "NG": 180, "NO": 180, "PK": 180, "PA": 180, "PE": 180, "PH": 180, "PL": 180, "RU": 180, "SA": 180, "RS": 180, "SG": 180, "ZA": 180, "KR": 180, "ES": 180, "SE": 180, "CH": 180, "TW": 180, "TH": 180, "TR": 180, "AE": 180, "VE": 180, "PT": 180, "LU": 180, "BG": 180, "CZ": 180, "SI": 180, "IS": 180, "SK": 180, "LT": 180, "TT": 180, "BD": 180, "LK": 180, "KE": 180, "HU": 180, "MA": 180, "CY": 180, "JM": 180, "EC": 180, "RO": 180, "BO": 180, "GT": 180, "CR": 180, "QA": 180, "SV": 180, "HN": 180, "NI": 180, "PY": 180, "UY": 180, "PR": 180, "BA": 180, "PS": 180, "TN": 180, "BH": 180, "VN": 180, "GH": 180, "MU": 180, "UA": 180, "MT": 180, "BS": 180, "MV": 180, "OM": 180, "MK": 180, "LV": 180, "EE": 180, "IQ": 180, "DZ": 180, "AL": 180, "NP": 180, "MO": 180, "ME": 180, "SN": 180, "GE": 180, "BN": 180, "UG": 180, "GP": 180, "BB": 180, "AZ": 180, "TZ": 180, "LY": 180, "MQ": 180, "CM": 180, "BW": 180, "ET": 180, "KZ": 180, "NA": 180, "MG": 180, "NC": 180, "MD": 180, "FJ": 180, "BY": 180, "JE": 180, "GU": 180, "YE": 180, "ZM": 180, "IM": 180, "HT": 180, "KH": 180, "AW": 180, "PF": 180, "AF": 180, "BM": 180, "GY": 180, "AM": 180, "MW": 180, "AG": 180, "RW": 180, "GG": 180, "GM": 180, "FO": 180, "LC": 180, "KY": 180, "BJ": 180, "AD": 180, "GD": 180, "VI": 180, "BZ": 180, "VC": 180, "MN": 180, "MZ": 180, "ML": 180, "AO": 180, "GF": 180, "UZ": 180, "DJ": 180, "BF": 180, "MC": 180, "TG": 180, "GL": 180, "GA": 180, "GI": 180, "CD": 180, "KG": 180, "PG": 180, "BT": 180, "KN": 180, "SZ": 180, "LS": 180, "LA": 180, "LI": 180, "MP": 180, "SR": 180, "SC": 180, "VG": 180, "TC": 180, "DM": 180, "MR": 180, "AX": 180, "SM": 180, "SL": 180, "NE": 180, "CG": 180, "AI": 180, "YT": 180, "CV": 180, "GN": 180, "TM": 180, "BI": 180, "TJ": 180, "VU": 180, "SB": 180, "ER": 180, "WS": 180, "AS": 180, "FK": 180, "GQ": 180, "TO": 180, "KM": 180, "PW": 180, "FM": 180, "CF": 180, "SO": 180, "MH": 180, "VA": 180, "TD": 180, "KI": 180, "ST": 180, "TV": 180, "NR": 180, "RE": 180, "LR": 180, "ZW": 180, "CI": 180, "MM": 180, "AN": 180, "AQ": 180, "BQ": 180, "BV": 180, "IO": 180, "CX": 180, "CC": 180, "CK": 180, "CW": 180, "TF": 180, "GW": 180, "HM": 180, "XK": 180, "MS": 180, "NU": 180, "NF": 180, "PN": 180, "BL": 180, "SH": 180, "MF": 180, "PM": 180, "SX": 180, "GS": 180, "SS": 180, "SJ": 180, "TL": 180, "TK": 180, "UM": 180, "WF": 180, "EH": 180}, "global_io_max_campaign_duration": 100}, "spend_cap": "0", "tax_id_status": 0.0, "tax_id_type": "0", "timezone_id": 1.0, "timezone_name": "America/Los_Angeles", "timezone_offset_hours_utc": -7.0, "tos_accepted": {"web_custom_audience_tos": 1}, "user_tasks": ["DRAFT", "ANALYZE", "ADVERTISE", "MANAGE"]}, "emitted_at": 1712657541038} +{"stream": "ads", "data": {"id": "23853620229650398", "bid_type": "ABSOLUTE_OCPM", "account_id": "212551616838260", "campaign_id": "23853619670350398", "adset_id": "23853619670380398", "status": "ACTIVE", "creative": {"id": "23853666124230398"}, "updated_time": "2023-03-21T22:41:46-0700", "created_time": "2023-03-17T08:04:31-0700", "name": "With The Highest Standard for Reliability", "targeting": {"age_max": 60, "age_min": 18, "custom_audiences": [{"id": "23853630753300398", "name": "Lookalike (US, 10%) - Airbyte Cloud Users"}, {"id": "23853683587660398", "name": "Web Traffic [ALL] - _copy"}], "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_relaxation_types": {"lookalike": 1, "custom_audience": 1}, "publisher_platforms": ["facebook", "instagram", "audience_network", "messenger"], "facebook_positions": ["feed", "biz_disco_feed", "facebook_reels", "facebook_reels_overlay", "right_hand_column", "video_feeds", "instant_article", "instream_video", "marketplace", "story", "search"], "instagram_positions": ["stream", "story", "explore", "reels", "shop", "explore_home", "profile_feed"], "device_platforms": ["mobile", "desktop"], "messenger_positions": ["story"], "audience_network_positions": ["classic", "instream_video", "rewarded_video"]}, "effective_status": "ACTIVE", "last_updated_by_app_id": "119211728144504", "source_ad_id": "0", "tracking_specs": [{"action.type": ["offsite_conversion"], "fb_pixel": ["917042523049733"]}, {"action.type": ["link_click"], "post": ["662226902575095"], "post.wall": ["112704783733939"]}, {"action.type": ["post_engagement"], "page": ["112704783733939"], "post": ["662226902575095"]}], "conversion_specs": [{"action.type": ["offsite_conversion"], "conversion_id": ["6015304265216283"]}]}, "emitted_at": 1712658287112} +{"stream": "ad_sets", "data": {"id": "120208325579470399", "name": "New Awareness Ad Set", "promoted_object": {"page_id": "112704783733939"}, "account_id": "212551616838260", "updated_time": "2024-03-12T15:02:47-0700", "daily_budget": 500.0, "budget_remaining": 500.0, "effective_status": "ACTIVE", "campaign_id": "120208325579370399", "created_time": "2024-03-12T15:02:47-0700", "start_time": "2024-03-12T15:02:47-0700", "lifetime_budget": 0.0, "targeting": {"age_max": 65, "age_min": 18, "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_automation": {"advantage_audience": 1}}, "bid_info": {"REACH": 100}, "bid_strategy": "COST_CAP", "bid_amount": 100.0}, "emitted_at": 1712658345592} +{"stream": "campaigns", "data": {"id": "120208325579370399", "account_id": "212551616838260", "budget_rebalance_flag": false, "budget_remaining": 0.0, "buying_type": "AUCTION", "created_time": "2024-03-12T15:02:47-0700", "configured_status": "ACTIVE", "effective_status": "ACTIVE", "name": "New Awareness Campaign", "objective": "OUTCOME_AWARENESS", "smart_promotion_type": "GUIDED_CREATION", "source_campaign_id": 0.0, "special_ad_category": "NONE", "start_time": "2024-03-12T15:02:47-0700", "status": "ACTIVE", "updated_time": "2024-03-12T15:02:47-0700"}, "emitted_at": 1712658375093} +{"stream": "custom_audiences", "data": {"id": "23853683587660398", "account_id": "212551616838260", "approximate_count_lower_bound": 4700, "approximate_count_upper_bound": 5500, "customer_file_source": "PARTNER_PROVIDED_ONLY", "data_source": {"type": "UNKNOWN", "sub_type": "ANYTHING", "creation_params": "[]"}, "delivery_status": {"code": 200, "description": "This audience is ready for use."}, "description": "Custom Audience-Web Traffic [ALL] - _copy", "is_value_based": false, "name": "Web Traffic [ALL] - _copy", "operation_status": {"code": 200, "description": "Normal"}, "permission_for_actions": {"can_edit": true, "can_see_insight": "True", "can_share": "True", "subtype_supports_lookalike": "True", "supports_recipient_lookalike": "False"}, "retention_days": 0, "subtype": "CUSTOM", "time_content_updated": 1679433484, "time_created": 1679433479, "time_updated": 1679433484}, "emitted_at": 1712658595103} +{"stream": "ad_creatives", "data": {"id": "120208325594230399", "body": "The new open-source standard for data integration platform that syncs data from applications, APIs &", "image_url": "https://scontent.fiev6-1.fna.fbcdn.net/v/t45.1600-4/328822007_23853286818270398_3080145649137672390_n.png?stp=dst-jpg&_nc_cat=107&ccb=1-7&_nc_sid=5f2048&_nc_ohc=9ZAypKWXItIAb7mcCyl&_nc_ht=scontent.fiev6-1.fna&edm=ALjApogEAAAA&oh=00_AfBaau1h5A91_FllubHbSAjTZrrrE4iSShMuwEdOfCEeUA&oe=661AD1C7", "account_id": "212551616838260", "actor_id": "112704783733939", "effective_instagram_story_id": "7022050467905225", "effective_object_story_id": "112704783733939_909690077828775", "name": "The new open-source standard for data integration platform that syncs data from applications, APIs 2024-03-12-5a549cef294fb066e3a89015beefaa89", "instagram_actor_id": "3437258706290825", "instagram_permalink_url": "https://www.instagram.com/p/C4biWAhN22o/", "object_story_spec": {"page_id": "112704783733939", "instagram_actor_id": "3437258706290825", "photo_data": {"caption": "The new open-source standard for data integration platform that syncs data from applications, APIs &", "url": "https://scontent-lax3-2.xx.fbcdn.net/v/t39.30808-6/215641270_329693965368352_281138117304804673_n.png?_nc_cat=101&ccb=1-7&_nc_sid=5f2048&_nc_ohc=XUxh3539kg4AX_-P5ET&_nc_ht=scontent-lax3-2.xx&oh=00_AfC_FAkHtEDyPxE51H4tYqsSerhu4kh03iDs-8JAfSnBnA&oe=65F5F3B6", "image_hash": "0780208b440d95dbae5b5efe724de659"}}, "object_type": "PHOTO", "status": "ACTIVE", "thumbnail_url": "https://scontent.fiev6-1.fna.fbcdn.net/v/t39.30808-6/432222512_909690074495442_1200292255622979228_n.jpg?_nc_cat=100&ccb=1-7&_nc_ohc=ilMFP66G4mkAb5j0-j_&_nc_oc=Adi0qJd21AHY5LwYxTLkyjHkBFo7QNf3QTyuPndms3oSKao1nQu_-30Vv81oaU0JM4s&_nc_ht=scontent.fiev6-1.fna&edm=ALjApogEAAAA&stp=c0.5000x0.5000f_dst-emg0_p64x64_q75&ur=5f2048&_nc_sid=58080a&oh=00_AfBpKgsZpePn6a9m1_u-WvMxDXxck3EhlQXCohfzm-d9NQ&oe=661AFDEF", "image_hash": "0780208b440d95dbae5b5efe724de659"}, "emitted_at": 1712658644930} +{"stream": "activities", "data": {"actor_id": "10230353728925538", "actor_name": "Yowan Ramchoreeter", "application_id": "119211728144504", "application_name": "Power Editor", "date_time_in_timezone": "03/06/2023 at 1:25\u202fPM", "event_time": "2023-03-06T21:25:23+0000", "event_type": "update_campaign_run_status", "extra_data": "{\"run_status\":{\"old_value\":15,\"new_value\":1},\"old_value\":\"Inactive\",\"new_value\":\"Active\",\"type\":\"run_status\"}", "object_id": "23846765228240398", "object_name": "Airbyte Awareness Campaign 1 (sherif)", "object_type": "CAMPAIGN_GROUP", "translated_event_type": "Campaign status updated", "account_id": "212551616838260"}, "emitted_at": 1712658679389} +{"stream": "custom_conversions", "data": {"id": "694166388077667", "account_id": "212551616838260", "creation_time": "2020-04-22T01:36:00+0000", "custom_event_type": "CONTACT", "data_sources": [{"id": "2667253716886462", "source_type": "PIXEL", "name": "Dataline's Pixel"}], "default_conversion_value": 0.0, "event_source_type": "pixel", "is_archived": true, "is_unavailable": false, "name": "SubscribedButtonClick", "retention_days": 0.0, "rule": "{\"and\":[{\"event\":{\"eq\":\"PageView\"}},{\"or\":[{\"URL\":{\"i_contains\":\"SubscribedButtonClick\"}}]}]}"}, "emitted_at": 1712658865429} +{"stream": "images", "data": {"id": "212551616838260:88c2a788337bc15d94961d64b015cba0", "account_id": "212551616838260", "name": "untitled", "original_height": 900, "original_width": 900, "permalink_url": "https://www.facebook.com/ads/image/?d=AQKB_qBTd-uhSkLMyEUS3U61uVDspP0tZKm-sSzViHSXaRXvKWshHWpWGCt7KwCo3_16f-lGRJGqUVHXewu0yDrVgBvLEzk1rxiOczURsIB_hBSDYlmPj3cagCA1W3J_EiXcAoxd-y1qIOHE3iHlKpM-", "status": "ACTIVE", "url": "https://scontent.fiev6-1.fna.fbcdn.net/v/t45.1600-4/93114868_23844521652670398_7298890443847106560_n.png?stp=dst-jpg&_nc_cat=105&ccb=1-7&_nc_sid=5f2048&_nc_ohc=8hOMJcGgmPkAb5fEBc-&_nc_ht=scontent.fiev6-1.fna&edm=AJcBmwoEAAAA&oh=00_AfDDJ0NFo-BTe1FumaStJALbty9ADvWFvgvMyy9i7OSTLQ&oe=661B0269", "created_time": "2020-04-13T18:26:38-0700", "hash": "88c2a788337bc15d94961d64b015cba0", "url_128": "https://scontent.fiev6-1.fna.fbcdn.net/v/t45.1600-4/93114868_23844521652670398_7298890443847106560_n.png?stp=dst-jpg_s168x128&_nc_cat=105&ccb=1-7&_nc_sid=5f2048&_nc_ohc=8hOMJcGgmPkAb5fEBc-&_nc_ht=scontent.fiev6-1.fna&edm=AJcBmwoEAAAA&oh=00_AfCcMeXp4l0PYtZwVk4Zo5yjFdm6qyvx164bg-Huk5PLRA&oe=661B0269", "is_associated_creatives_in_adgroups": false, "updated_time": "2020-04-13T18:26:38-0700", "height": 900, "width": 900}, "emitted_at": 1712658893820} +{"stream": "ads_insights", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 344, "social_spend": 0.0, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712658959880} +{"stream": "ads_insights", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "landing_page_view", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "outbound_click", "value": 1.0}], "quality_ranking": "UNKNOWN", "reach": 419, "social_spend": 0.0, "spend": 0.73, "unique_actions": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "outbound_click", "value": 1.0}], "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "wish_bid": 0.0}, "emitted_at": 1712658959889} +{"stream": "ads_insights_action_carousel_card", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 344, "social_spend": 0.0, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659054759} +{"stream": "ads_insights_action_carousel_card", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 419, "social_spend": 0.0, "spend": 0.73, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "wish_bid": 0.0}, "emitted_at": 1712659054767} +{"stream": "ads_insights_action_conversion_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_action_type": [{"action_type": "link_click", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "page_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "post_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}], "cost_per_estimated_ad_recallers": 0.010333, "cost_per_inline_link_click": 0.62, "cost_per_inline_post_engagement": 0.62, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.62}], "cost_per_unique_action_type": [{"action_type": "link_click", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "post_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "page_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}], "cost_per_unique_click": 0.62, "cost_per_unique_inline_link_click": 0.62, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.62}], "cpc": 0.62, "cpm": 1.597938, "cpp": 1.606218, "created_time": "2021-02-11", "ctr": 0.257732, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.005181, "impressions": 388, "inline_link_click_ctr": 0.257732, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.257732}], "reach": 386, "spend": 0.62, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.259067, "unique_inline_link_click_ctr": 0.259067, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.259067, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.259067}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.257732}], "device_platform": "mobile_app"}, "emitted_at": 1712659139260} +{"stream": "ads_insights_action_conversion_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.01, "cpm": 2.5, "cpp": 2.5, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 4, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 4, "spend": 0.01, "unique_clicks": 0, "updated_time": "2021-08-27", "device_platform": "mobile_web"}, "emitted_at": 1712659139265} +{"stream": "ads_insights_action_reaction", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 344, "social_spend": 0.0, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659226670} +{"stream": "ads_insights_action_reaction", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "landing_page_view", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "quality_ranking": "UNKNOWN", "reach": 419, "social_spend": 0.0, "spend": 0.73, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "wish_bid": 0.0}, "emitted_at": 1712659226677} +{"stream": "ads_insights_action_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.04525, "cost_per_unique_click": 1.81, "cpc": 1.81, "cpm": 9.095477, "cpp": 9.141414, "created_time": "2021-02-09", "ctr": 0.502513, "date_start": "2021-09-02", "date_stop": "2021-09-02", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 40.0, "frequency": 1.005051, "impressions": 199, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 198, "social_spend": 0.0, "spend": 1.81, "unique_clicks": 1, "unique_ctr": 0.505051, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659336652} +{"stream": "ads_insights_action_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.018, "cpm": 2.337662, "cpp": 2.337662, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-02", "date_stop": "2021-09-02", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 20.0, "frequency": 1.0, "impressions": 154, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 154, "social_spend": 0.0, "spend": 0.36, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659336661} +{"stream": "ads_insights_action_video_sound", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 344, "social_spend": 0.0, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659437488} +{"stream": "ads_insights_action_video_sound", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "landing_page_view", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "quality_ranking": "UNKNOWN", "reach": 419, "social_spend": 0.0, "spend": 0.73, "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "wish_bid": 0.0}, "emitted_at": 1712659437493} +{"stream": "ads_insights_action_video_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 344, "social_spend": 0.0, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1712659552212} +{"stream": "ads_insights_action_video_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "landing_page_view", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "quality_ranking": "UNKNOWN", "reach": 419, "social_spend": 0.0, "spend": 0.73, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "wish_bid": 0.0}, "emitted_at": 1712659552219} +{"stream": "ads_insights_age_and_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 10, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 10, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "65+", "gender": "female"}, "emitted_at": 1712659638071} +{"stream": "ads_insights_age_and_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.05, "cpm": 1.785714, "cpp": 1.785714, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 28, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 28, "spend": 0.05, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "65+", "gender": "male"}, "emitted_at": 1712659638074} +{"stream": "ads_insights_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 344, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "country": "US"}, "emitted_at": 1712659720257} +{"stream": "ads_insights_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "outbound_click", "value": 1.0}], "reach": 419, "spend": 0.73, "unique_actions": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_destination": "243077367363346", "action_target_id": "243077367363346", "action_type": "outbound_click", "value": 1.0}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "country": "US"}, "emitted_at": 1712659720263} +{"stream": "ads_insights_delivery_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_action_type": [{"action_type": "link_click", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "page_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "post_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}], "cost_per_estimated_ad_recallers": 0.010333, "cost_per_inline_link_click": 0.62, "cost_per_inline_post_engagement": 0.62, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.62}], "cost_per_unique_action_type": [{"action_type": "link_click", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "post_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}, {"action_type": "page_engagement", "value": 0.62, "1d_click": 0.62, "7d_click": 0.62, "28d_click": 0.62}], "cost_per_unique_click": 0.62, "cost_per_unique_inline_link_click": 0.62, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.62}], "cpc": 0.62, "cpm": 1.597938, "cpp": 1.606218, "created_time": "2021-02-11", "ctr": 0.257732, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.005181, "impressions": 388, "inline_link_click_ctr": 0.257732, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.257732}], "reach": 386, "spend": 0.62, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.259067, "unique_inline_link_click_ctr": 0.259067, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.259067, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.259067}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.257732}], "device_platform": "mobile_app"}, "emitted_at": 1712659806097} +{"stream": "ads_insights_delivery_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.01, "cpm": 2.5, "cpp": 2.5, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 4, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 4, "spend": 0.01, "unique_clicks": 0, "updated_time": "2021-08-27", "device_platform": "mobile_web"}, "emitted_at": 1712659806103} +{"stream": "ads_insights_delivery_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_action_type": [{"action_type": "link_click", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "page_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "post_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}], "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.73}], "cost_per_unique_action_type": [{"action_type": "link_click", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "post_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "page_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}], "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.73}], "cpc": 0.73, "cpm": 1.793612, "cpp": 1.802469, "created_time": "2021-02-11", "ctr": 0.2457, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.004938, "impressions": 407, "inline_link_click_ctr": 0.2457, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.2457}], "reach": 405, "spend": 0.73, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.246914, "unique_inline_link_click_ctr": 0.246914, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.246914, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.246914}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.2457}], "publisher_platform": "facebook"}, "emitted_at": 1712659882782} +{"stream": "ads_insights_delivery_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 14, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 14, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram"}, "emitted_at": 1712659882790} +{"stream": "ads_insights_delivery_platform_and_device_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.01, "cpm": 2.5, "cpp": 2.5, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 4, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 4, "spend": 0.01, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "facebook", "device_platform": "mobile_web"}, "emitted_at": 1712659965465} +{"stream": "ads_insights_delivery_platform_and_device_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 14, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 14, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram", "device_platform": "mobile_app"}, "emitted_at": 1712659965469} +{"stream": "ads_insights_demographics_age", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.013, "cpm": 1.511628, "cpp": 1.511628, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 10.0, "frequency": 1.0, "impressions": 86, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 86, "spend": 0.13, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "55-64"}, "emitted_at": 1712660632747} +{"stream": "ads_insights_demographics_age", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.005, "cpm": 1.315789, "cpp": 1.315789, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 10.0, "frequency": 1.0, "impressions": 38, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 38, "spend": 0.05, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "65+"}, "emitted_at": 1712660632751} +{"stream": "ads_insights_demographics_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.012, "cpm": 2.086957, "cpp": 2.093023, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.002907, "impressions": 345, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 344, "spend": 0.72, "unique_clicks": 0, "updated_time": "2021-08-27", "country": "US"}, "emitted_at": 1712660729540} +{"stream": "ads_insights_demographics_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_action_type": [{"action_type": "link_click", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "page_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "post_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}], "cost_per_estimated_ad_recallers": 0.012167, "cost_per_inline_link_click": 0.73, "cost_per_inline_post_engagement": 0.73, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.73}], "cost_per_unique_action_type": [{"action_type": "link_click", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "post_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}, {"action_type": "page_engagement", "value": 0.73, "1d_click": 0.73, "7d_click": 0.73, "28d_click": 0.73}], "cost_per_unique_click": 0.73, "cost_per_unique_inline_link_click": 0.73, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.73}], "cpc": 0.73, "cpm": 1.733967, "cpp": 1.742243, "created_time": "2021-02-11", "ctr": 0.23753, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 60.0, "frequency": 1.004773, "impressions": 421, "inline_link_click_ctr": 0.23753, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.23753}], "reach": 419, "spend": 0.73, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.238663, "unique_inline_link_click_ctr": 0.238663, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.238663, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.238663}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.23753}], "country": "US"}, "emitted_at": 1712660729547} +{"stream": "ads_insights_demographics_dma_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "Yakima-Pasco-Rchlnd-Knnwck"}, "emitted_at": 1712660822624} +{"stream": "ads_insights_demographics_dma_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "Yuma-El Centro"}, "emitted_at": 1712660822627} +{"stream": "ads_insights_demographics_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_action_type": [{"action_type": "link_click", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}, {"action_type": "page_engagement", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}, {"action_type": "post_engagement", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}], "cost_per_estimated_ad_recallers": 0.0118, "cost_per_inline_link_click": 0.59, "cost_per_inline_post_engagement": 0.59, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.59}], "cost_per_unique_action_type": [{"action_type": "link_click", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}, {"action_type": "post_engagement", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}, {"action_type": "page_engagement", "value": 0.59, "1d_click": 0.59, "7d_click": 0.59, "28d_click": 0.59}], "cost_per_unique_click": 0.59, "cost_per_unique_inline_link_click": 0.59, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.59}], "cpc": 0.59, "cpm": 1.657303, "cpp": 1.666667, "created_time": "2021-02-11", "ctr": 0.280899, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 50.0, "frequency": 1.00565, "impressions": 356, "inline_link_click_ctr": 0.280899, "inline_link_clicks": 1, "inline_post_engagement": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.280899}], "reach": 354, "spend": 0.59, "unique_actions": [{"action_type": "link_click", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "page_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}, {"action_type": "post_engagement", "value": 1.0, "1d_click": 1.0, "7d_click": 1.0, "28d_click": 1.0}], "unique_clicks": 1, "unique_ctr": 0.282486, "unique_inline_link_click_ctr": 0.282486, "unique_inline_link_clicks": 1, "unique_link_clicks_ctr": 0.282486, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 1.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.282486}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.280899}], "gender": "male"}, "emitted_at": 1712660890317} +{"stream": "ads_insights_demographics_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 3, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 3, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "gender": "unknown"}, "emitted_at": 1712660890323} +{"stream": "ads_insights_dma", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "Yakima-Pasco-Rchlnd-Knnwck"}, "emitted_at": 1712660987377} +{"stream": "ads_insights_dma", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "Yuma-El Centro"}, "emitted_at": 1712660987380} +{"stream": "ads_insights_platform_and_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram", "platform_position": "instagram_explore", "impression_device": "android_smartphone"}, "emitted_at": 1712661063894} +{"stream": "ads_insights_platform_and_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram", "platform_position": "instagram_stories", "impression_device": "iphone"}, "emitted_at": 1712661063897} +{"stream": "ads_insights_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.01, "cpm": 2.0, "cpp": 2.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 5, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 5, "spend": 0.01, "unique_clicks": 0, "updated_time": "2021-08-27", "region": "West Virginia"}, "emitted_at": 1712661136056} +{"stream": "ads_insights_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.01, "cpm": 1.0, "cpp": 1.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-09-03", "date_stop": "2021-09-03", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 10, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 10, "spend": 0.01, "unique_clicks": 0, "updated_time": "2021-08-27", "region": "Wisconsin"}, "emitted_at": 1712661136060} +{"stream": "customcustom_insight_stream", "data": {"ad_id": "23846784938030398", "account_id": "212551616838260", "clicks": 1, "account_name": "Airbyte", "cpc": 0.59, "date_start": "2021-09-03", "date_stop": "2021-09-03", "gender": "male"}, "emitted_at": 1712661206440} +{"stream": "customcustom_insight_stream", "data": {"ad_id": "23846784938030398", "account_id": "212551616838260", "clicks": 0, "account_name": "Airbyte", "date_start": "2021-09-03", "date_stop": "2021-09-03", "gender": "unknown"}, "emitted_at": 1712661206445} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py index 073577cea7860..e3508bc24a01e 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py @@ -8,7 +8,15 @@ from typing import Any, List, MutableMapping, Set, Tuple import pytest -from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode, Type +from airbyte_cdk.models import ( + AirbyteMessage, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + StreamDescriptor, + SyncMode, + Type, +) from source_facebook_marketing.source import SourceFacebookMarketing @@ -47,7 +55,7 @@ class TestFacebookMarketingSource: @pytest.mark.parametrize( "stream_name, deleted_id", [ - ("ads", "23846756820320398"), + # ("ads", "23846756820320398"), ("campaigns", "23846541919710398"), ("ad_sets", "23846541706990398"), ], @@ -60,36 +68,39 @@ def test_streams_with_include_deleted(self, stream_name, deleted_id, config_with account_id = config_with_include_deleted["account_id"] assert states, "incremental read should produce states" - for name, state in states[-1].state.data.items(): - assert "filter_statuses" in state[account_id], f"State for {name} should include `filter_statuses` flag" + actual_stream_name = states[-1].state.stream.stream_descriptor.name + assert states[-1].state.stream.stream_descriptor == StreamDescriptor(name=stream_name) + assert "filter_statuses" in states[-1].state.stream.stream_state.dict()[account_id], f"State for {actual_stream_name} should include `filter_statuses` flag" - assert deleted_records, f"{stream_name} stream should have deleted records returned" - assert is_specific_deleted_pulled, f"{stream_name} stream should have a deleted record with id={deleted_id}" + # TODO: This should be converted into a mock server test. There is a 37 month query window and our deleted records + # can fall outside the window and affect these tests which hit the real Meta Graph API + # assert deleted_records, f"{stream_name} stream should have deleted records returned" + # assert is_specific_deleted_pulled, f"{stream_name} stream should have a deleted record with id={deleted_id}" @pytest.mark.parametrize( "stream_name, deleted_num, filter_statuses", [ - ("ads", 2, False), + # ("ads", 2, False), ("campaigns", 3, False), ("ad_sets", 1, False), - ( - "ads", - 0, - [ - "ACTIVE", - "ADSET_PAUSED", - "ARCHIVED", - "CAMPAIGN_PAUSED", - "DELETED", - "DISAPPROVED", - "IN_PROCESS", - "PAUSED", - "PENDING_BILLING_INFO", - "PENDING_REVIEW", - "PREAPPROVED", - "WITH_ISSUES", - ], - ), + # ( + # "ads", + # 0, + # [ + # "ACTIVE", + # "ADSET_PAUSED", + # "ARCHIVED", + # "CAMPAIGN_PAUSED", + # "DELETED", + # "DISAPPROVED", + # "IN_PROCESS", + # "PAUSED", + # "PENDING_BILLING_INFO", + # "PENDING_REVIEW", + # "PREAPPROVED", + # "WITH_ISSUES", + # ], + # ), ( "campaigns", 0, @@ -134,10 +145,13 @@ def test_streams_with_include_deleted_and_state( value["filter_statuses"] = filter_statuses catalog = self._slice_catalog(configured_catalog, {stream_name}) - records, states = self._read_records(config_with_include_deleted, catalog, state=state) - deleted_records = list(filter(self._deleted_record, records)) + # TODO: This should be converted into a mock server test. There is a 37 month query window and our deleted records + # can fall outside the window and affect these tests which hit the real Meta Graph API + self._read_records(config_with_include_deleted, catalog, state=state) + # records, states = self._read_records(config_with_include_deleted, catalog, state=state) + # deleted_records = list(filter(self._deleted_record, records)) - assert len(deleted_records) == deleted_num, f"{stream_name} should have {deleted_num} deleted records returned" + # assert len(deleted_records) == deleted_num, f"{stream_name} should have {deleted_num} deleted records returned" @staticmethod def _deleted_record(record: AirbyteMessage) -> bool: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index fa14a5309711f..9fad163e60013 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c - dockerImageTag: 2.0.1 + dockerImageTag: 2.1.3 dockerRepository: airbyte/source-facebook-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-marketing githubIssueLabel: source-facebook-marketing icon: facebook.svg license: ELv2 + maxSecondsBetweenMessages: 3600 name: Facebook Marketing remoteRegistries: pypi: @@ -30,7 +31,7 @@ data: releases: breakingChanges: 2.0.0: - message: "All Ads-Insights-* streams now have updated schemas. Users will need to retest source confguration, refresh the source schema and reset affected streams after upgrading. For more information [visit](https://docs.airbyte.com/integrations/sources/facebook-marketing-migrations)" + message: "All Ads-Insights-* streams now have updated schemas. Users will need to retest source configuration, refresh the source schema and reset affected streams after upgrading. Please pay attention that data older than 37 months will become unavailable due to FaceBook limitations. For more information [visit](https://docs.airbyte.com/integrations/sources/facebook-marketing-migrations)" upgradeDeadline: "2024-03-17" scopedImpact: - scopeType: stream diff --git a/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock b/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock index 64bb0c4e79270..d561c19e6fb9c 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock +++ b/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -112,13 +112,13 @@ frozenlist = ">=1.1.0" [[package]] name = "airbyte-cdk" -version = "0.62.1" +version = "0.70.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.62.1.tar.gz", hash = "sha256:3c934dd8b045079a9c807f699ca2012eaa5df755606e3f5b8b16247cbbd7e8c6"}, - {file = "airbyte_cdk-0.62.1-py3-none-any.whl", hash = "sha256:792399a602b7f5c3cd4ed2a5fce5910cfe3676b9b9199b9208f2d5236f5f42d3"}, + {file = "airbyte-cdk-0.70.0.tar.gz", hash = "sha256:09849c157946058cac3ff5023cb29f31c00aa899be668254968510414543ec2c"}, + {file = "airbyte_cdk-0.70.0-py3-none-any.whl", hash = "sha256:aac9c605b3de341b303ebf45b60148c3b35732383030cc5aab5cede40316bc00"}, ] [package.dependencies] @@ -142,8 +142,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -236,13 +236,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -842,13 +842,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1088,13 +1088,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1194,13 +1194,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1212,15 +1212,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1243,19 +1243,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1281,13 +1281,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1306,13 +1306,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1520,4 +1520,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "cac4564b0e204ad1f4b5d0d3abce8cb436e80193351a8253cf3c27b677ee908e" +content-hash = "617998c72c122de3e2a110f98e24d540d822a00975f1df5a3b0033659e12cd94" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml index 15fd8ec79f6f4..00ab841e93e01 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.1" +version = "2.1.3" name = "source-facebook-marketing" description = "Source implementation for Facebook Marketing." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_facebook_marketing" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.1" +airbyte-cdk = "^0" facebook-business = "19.0.0" cached-property = "==1.5.2" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py index 7a4962a30bf26..dc7e1836b9813 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py @@ -116,6 +116,10 @@ def insights_job_timeout(self): def list_objects(self, params: Mapping[str, Any]) -> Iterable: """Because insights has very different read_records we don't need this method anymore""" + def _add_account_id(self, record: dict[str, Any], account_id: str): + if "account_id" not in record: + record["account_id"] = account_id + def read_records( self, sync_mode: SyncMode, @@ -131,6 +135,7 @@ def read_records( for obj in job.get_result(): data = obj.export_all_data() if self._response_data_is_valid(data): + self._add_account_id(data, account_id) yield data except FacebookBadObjectError as e: raise AirbyteTracedException( @@ -314,6 +319,10 @@ def _get_start_date(self) -> Mapping[str, pendulum.Date]: start_date = min(start_date, refresh_date) else: start_date = self._start_date + + if start_date < self._start_date: + logger.warning(f"Ignore provided state and start sync from start_date ({self._start_date}).") + start_date = max(start_date, self._start_date) if start_date < oldest_date: logger.warning( f"Loading insights older then {self.INSIGHTS_RETENTION_PERIOD} is not possible. Start sync from {oldest_date}." diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py index b6e20f2c60a79..8578bb4714efc 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py @@ -4,6 +4,7 @@ import http.client import logging +import re import sys from typing import Any @@ -84,6 +85,12 @@ def revert_request_record_limit(details): # set the flag to the api class that the `limit` param is restored details.get("args")[0].request_record_limit_is_reduced = False + def is_transient_cannot_include_error(exc: FacebookRequestError) -> bool: + """After migration to API v19.0, some customers randomly face a BAD_REQUEST error (OAuthException) with the pattern:"Cannot include ..." + According to the last comment in https://developers.facebook.com/community/threads/286697364476462/, this might be a transient issue that can be solved with a retry.""" + pattern = r"Cannot include .* in summary param because they weren't there while creating the report run." + return bool(exc.http_status() == http.client.BAD_REQUEST and re.search(pattern, exc.api_error_message())) + def should_retry_api_error(exc): if isinstance(exc, FacebookRequestError): call_rate_limit_error = exc.api_error_code() in FACEBOOK_RATE_LIMIT_ERROR_CODES @@ -98,6 +105,7 @@ def should_retry_api_error(exc): unknown_error, call_rate_limit_error, batch_timeout_error, + is_transient_cannot_include_error(exc), connection_reset_error, temporary_oauth_error, server_error, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py index bc0907575c56b..3fdcc12f0860d 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py @@ -22,7 +22,7 @@ create_response_builder, find_template, ) -from airbyte_protocol.models import AirbyteStateMessage, SyncMode +from airbyte_protocol.models import AirbyteStateMessage, StreamDescriptor, SyncMode from source_facebook_marketing.streams.async_job import Status from .config import ACCESS_TOKEN, ACCOUNT_ID, DATE_FORMAT, END_DATE, NOW, START_DATE, ConfigBuilder @@ -419,7 +419,8 @@ def test_when_read_then_state_message_produced_and_state_match_start_interval(se ) output = self._read(config().with_account_ids([account_id]).with_start_date(start_date).with_end_date(end_date)) - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get(_CURSOR_FIELD) + cursor_value_from_state_message = output.most_recent_state.stream_state.dict().get(account_id, {}).get(_CURSOR_FIELD) + assert output.most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) assert cursor_value_from_state_message == start_date.strftime(DATE_FORMAT) @HttpMocker() @@ -462,8 +463,9 @@ def test_given_multiple_account_ids_when_read_then_state_produced_by_account_id_ ) output = self._read(config().with_account_ids([account_id_1, account_id_2]).with_start_date(start_date).with_end_date(end_date)) - cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get(_CURSOR_FIELD) - cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_1 = output.most_recent_state.stream_state.dict().get(account_id_1, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_2 = output.most_recent_state.stream_state.dict().get(account_id_2, {}).get(_CURSOR_FIELD) expected_cursor_value = start_date.strftime(DATE_FORMAT) + assert output.most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) assert cursor_value_from_state_account_1 == expected_cursor_value assert cursor_value_from_state_account_2 == expected_cursor_value diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py index 17c88f1c9c61d..ee814fd201f0b 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py @@ -224,7 +224,7 @@ def test_when_read_then_state_message_produced_and_state_match_latest_record(sel ) output = self._read(config().with_account_ids([account_id])) - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get(_CURSOR_FIELD) + cursor_value_from_state_message = output.most_recent_state.stream_state.dict().get(account_id, {}).get(_CURSOR_FIELD) assert cursor_value_from_state_message == max_cursor_value @HttpMocker() @@ -256,8 +256,8 @@ def test_given_multiple_account_ids_when_read_then_state_produced_by_account_id_ ) output = self._read(config().with_account_ids([account_id_1, account_id_2])) - cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get(_CURSOR_FIELD) - cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_1 = output.most_recent_state.stream_state.dict().get(account_id_1, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_2 = output.most_recent_state.stream_state.dict().get(account_id_2, {}).get(_CURSOR_FIELD) assert cursor_value_from_state_account_1 == max_cursor_value_account_id_1 assert cursor_value_from_state_account_2 == max_cursor_value_account_id_2 diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py index 592a5c1eda689..ffa6850e66bb6 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py @@ -97,7 +97,9 @@ def test_read_records_all(self, mocker, api, some_config): if read slice 2, 3, 1 state changed to 3 """ job = mocker.Mock(spec=InsightAsyncJob) - job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] + rec = mocker.Mock() + rec.export_all_data.return_value = {} + job.get_result.return_value = [rec, rec, rec] job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) stream = AdsInsights( api=api, @@ -124,8 +126,11 @@ def test_read_records_random_order(self, mocker, api, some_config): 2. if read slice 2, 3 state not changed if read slice 2, 3, 1 state changed to 3 """ + rec = mocker.Mock() + rec.export_all_data.return_value = {} + job = mocker.Mock(spec=AsyncJob) - job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] + job.get_result.return_value = [rec, rec, rec] job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) stream = AdsInsights( api=api, @@ -147,6 +152,38 @@ def test_read_records_random_order(self, mocker, api, some_config): assert len(records) == 3 + def test_read_records_add_account_id(self, mocker, api, some_config): + rec_without_account = mocker.Mock() + rec_without_account.export_all_data.return_value = {} + + rec_with_account = mocker.Mock() + rec_with_account.export_all_data.return_value = {"account_id": "some_account_id"} + + job = mocker.Mock(spec=AsyncJob) + job.get_result.return_value = [rec_without_account, rec_with_account] + job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) + stream = AdsInsights( + api=api, + account_ids=some_config["account_ids"], + start_date=datetime(2010, 1, 1), + end_date=datetime(2011, 1, 1), + insights_lookback_window=28, + ) + + records = list( + stream.read_records( + sync_mode=SyncMode.incremental, + stream_slice={ + "insight_job": job, + "account_id": some_config["account_ids"][0], + }, + ) + ) + + assert len(records) == 2 + for record in records: + assert record.get("account_id") + @pytest.mark.parametrize( "state,result_state", [ @@ -334,9 +371,9 @@ def test_stream_slices_with_state(self, api, async_manager_mock, start_date, som args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) # assert that we sync all periods including insight_lookback_period - assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1 - assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period - assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) + assert len(generated_jobs) == (end_date.date() - start_date).days + 1 + assert generated_jobs[0].interval.start == start_date.date() + assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): """Stream will use start_date when close to now and start_date close to now""" @@ -363,15 +400,15 @@ def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, re async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1 - assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period - assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) + assert len(generated_jobs) == (end_date.date() - start_date).days + 1 + assert generated_jobs[0].interval.start == start_date.date() + assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) @pytest.mark.parametrize("state_format", ["old_format", "new_format"]) def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date, some_config, state_format): """Stream will use cursor_value from state, but will skip saved slices""" - end_date = start_date + duration(days=10) - cursor_value = start_date + duration(days=5) + end_date = start_date + duration(days=40) + cursor_value = start_date + duration(days=32) if state_format == "old_format": state = { @@ -410,7 +447,7 @@ def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, star async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1, "should be 34 slices because we ignore slices which are within insights_lookback_period" + assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1, "should be 37 slices because we ignore slices which are within insights_lookback_period" assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py index cf748cee6b317..2ee4fa3b30a50 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py @@ -346,7 +346,6 @@ def test_config_error_during_account_info_read(self, requests_mock, name, friend assert error.failure_type == FailureType.config_error assert friendly_msg in error.message - # @pytest.mark.parametrize("name, friendly_msg, config_error_response", [CONFIG_ERRORS[-1]]) @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) def test_config_error_during_actual_nodes_read(self, requests_mock, name, friendly_msg, config_error_response): """Error raised during actual nodes read""" @@ -422,6 +421,37 @@ def test_config_error_insights_during_actual_nodes_read(self, requests_mock, nam assert error.failure_type == FailureType.config_error assert friendly_msg in error.message + def test_retry_for_cannot_include_error(self, requests_mock): + """Error raised randomly for insights stream. Oncall: https://github.com/airbytehq/oncall/issues/4868 """ + + api = API(access_token=some_config["access_token"], page_size=100) + stream = AdsInsights( + api=api, + account_ids=some_config["account_ids"], + start_date=datetime(2010, 1, 1), + end_date=datetime(2011, 1, 1), + fields=["account_id", "account_currency"], + insights_lookback_window=28, + ) + requests_mock.register_uri("GET", f"{act_url}", [ad_account_response]) + response = { + "status_code": 400, + "json": { + "error": { + "message": "(#100) Cannot include video_avg_time_watched_actions, video_continuous_2_sec_watched_actions in summary param because they weren't there while creating the report run.", + "type": "OAuthException", + "code": 100 + } + }, + } + call_insights = requests_mock.register_uri("GET", f"{act_url}insights", [response]) + + try: + slice = list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}))[0] + list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={})) + except Exception: + assert call_insights.call_count == 5 + @pytest.mark.parametrize( "failure_response", ( diff --git a/airbyte-integrations/connectors/source-facebook-pages/Dockerfile b/airbyte-integrations/connectors/source-facebook-pages/Dockerfile deleted file mode 100644 index e2436d7d20f98..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_facebook_pages ./source_facebook_pages - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.3.0 -LABEL io.airbyte.name=airbyte/source-facebook-pages diff --git a/airbyte-integrations/connectors/source-facebook-pages/README.md b/airbyte-integrations/connectors/source-facebook-pages/README.md index 98a52a206125a..9349965b5e1ad 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/README.md +++ b/airbyte-integrations/connectors/source-facebook-pages/README.md @@ -17,19 +17,70 @@ and place them into `secrets/config.json`. ### Locally running the connector docker image -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** + + +#### Use `airbyte-ci` to build your connector +The Airbyte way of building this connector is to use our `airbyte-ci` tool. +You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). +Then running the following command will build your connector: + ```bash -airbyte-ci connectors --name=source-facebook-pages build +airbyte-ci connectors --name source-facebook-pages build +``` +Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-facebook-pages:dev`. + +##### Customizing our build process +When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. +You can customize our build process by adding a `build_customization.py` module to your connector. +This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. +It will be imported at runtime by our build process and the functions will be called if they exist. + +Here is an example of a `build_customization.py` module: +```python +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # Feel free to check the dagger documentation for more information on the Container object and its methods. + # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ + from dagger import Container + + +async def pre_connector_install(base_image_container: Container) -> Container: + return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") + +async def post_connector_install(connector_container: Container) -> Container: + return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") ``` -An image will be built with the tag `airbyte/source-facebook-pages:dev`. +#### Build your own connector image +This connector is built using our dynamic built process in `airbyte-ci`. +The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. +The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). +It does not rely on a Dockerfile. + +If you would like to patch our connector and build your own a simple approach would be to: + +1. Create your own Dockerfile based on the latest version of the connector image. +```Dockerfile +FROM airbyte/source-facebook-pages:latest + +COPY . ./airbyte/integration_code +RUN pip install ./airbyte/integration_code -**Via `docker build`:** +# The entrypoint and default env vars are already set in the base image +# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] +``` +Please use this as an example. This is not optimized. + +2. Build your image: ```bash docker build -t airbyte/source-facebook-pages:dev . +# Running the spec command against your patched connector +docker run airbyte/source-facebook-pages:dev spec ``` - #### Run Then run any of the connector commands as follows: ``` @@ -64,4 +115,3 @@ You've checked out the repo, implemented a million dollar feature, and you're re 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml b/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml index 2a73177518457..f05663b2da97f 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml @@ -5,27 +5,38 @@ data: allowedHosts: hosts: - graph.facebook.com + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 010eb12f-837b-4685-892d-0a39f76a98f5 - dockerImageTag: 0.3.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-facebook-pages documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-pages githubIssueLabel: source-facebook-pages icon: facebook.svg license: ELv2 name: Facebook Pages - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-facebook-pages registries: cloud: - enabled: false # hide from cloud until https://github.com/airbytehq/airbyte/issues/25515 is finished + enabled: false oss: enabled: true releaseStage: beta + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-facebook-pages supportLevel: community + releases: + breakingChanges: + 1.0.0: + message: >- + This version updates the API version from v15 to v19. Deprecated fields have been removed and new fields have been added for the stream `Page`. + upgradeDeadline: 2024-04-18 + scopedImpact: + - scopeType: stream + impactedScopes: ["page"] tags: - language:python - cdk:low-code diff --git a/airbyte-integrations/connectors/source-facebook-pages/poetry.lock b/airbyte-integrations/connectors/source-facebook-pages/poetry.lock new file mode 100644 index 0000000000000..528be9ed40c62 --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-pages/poetry.lock @@ -0,0 +1,1033 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.70.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.70.1.tar.gz", hash = "sha256:fd27815350b8155fc42afd43d005a8d321c9f309c1adaedabbb0b74e9788648f"}, + {file = "airbyte_cdk-0.70.1-py3-none-any.whl", hash = "sha256:856b51c988c8e348f53df2806d8bf929919f220f5784696cf9a9578d7eb16e72"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "4624f76d4dc767d9b8cf0fe2a56e8b4b407596942e16b39cd0d2940baa2ec59b" diff --git a/airbyte-integrations/connectors/source-facebook-pages/pyproject.toml b/airbyte-integrations/connectors/source-facebook-pages/pyproject.toml new file mode 100644 index 0000000000000..d6239fc95d51c --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-pages/pyproject.toml @@ -0,0 +1,28 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.0.0" +name = "source-facebook-pages" +description = "Source implementation for Facebook Pages." +authors = [ "Airbyte ",] +license = "ELv2" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/facebook-pages" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_facebook_pages" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-facebook-pages = "source_facebook_pages.run:run" + +[tool.poetry.group.dev.dependencies] +pytest = "^6.1" +pytest-mock = "^3.6.1" +requests-mock = "^1.11.0" diff --git a/airbyte-integrations/connectors/source-facebook-pages/setup.py b/airbyte-integrations/connectors/source-facebook-pages/setup.py deleted file mode 100644 index 808c6ffb27c7a..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", -] - -TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock~=3.6.1", "requests-mock"] - -setup( - entry_points={ - "console_scripts": [ - "source-facebook-pages=source_facebook_pages.run:run", - ], - }, - name="source_facebook_pages", - description="Source implementation for Facebook Pages.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/manifest.yaml b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/manifest.yaml index 51f1081cae506..5f72679f58ea3 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/manifest.yaml +++ b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/manifest.yaml @@ -8,7 +8,7 @@ definitions: extractor: field_path: ["data"] requester: - url_base: "https://graph.facebook.com/v15.0" + url_base: "https://graph.facebook.com/v19.0" http_method: "GET" error_handler: type: CompositeErrorHandler @@ -142,7 +142,6 @@ definitions: 'members', 'merchant_review_status', 'messenger_ads_default_icebreakers', - 'messenger_ads_default_page_welcome_message', 'messenger_ads_default_quick_replies', 'messenger_ads_quick_replies_type', 'mission', @@ -200,6 +199,7 @@ definitions: 'whatsapp_number', 'written_by', 'albums', + 'call_to_actions', 'canvas_elements', 'events', 'feed', @@ -210,7 +210,12 @@ definitions: 'likes', 'live_videos', 'photos', + 'posts', 'product_catalogs', + 'published_posts', + 'ratings', + 'tabs', + 'tagged', 'rtb_dynamic_posts', 'video_lists', 'videos', diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/page.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/page.json index 7e317e9824295..b6d4be6011f8e 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/page.json +++ b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/page.json @@ -197,9 +197,6 @@ "instagram_business_account": { "$ref": "iguser.json" }, - "instant_articles_review_status": { - "type": ["string", "null"] - }, "is_always_open": { "type": ["boolean", "null"] }, @@ -230,15 +227,9 @@ "is_unclaimed": { "type": ["boolean", "null"] }, - "is_verified": { - "type": ["boolean", "null"] - }, "is_webhooks_subscribed": { "type": ["boolean", "null"] }, - "keywords": { - "type": ["string", "null"] - }, "leadgen_tos_acceptance_time": { "type": ["string", "null"], "format": "date-time" @@ -258,9 +249,6 @@ "members": { "type": ["string", "null"] }, - "merchant_id": { - "type": ["string", "null"] - }, "merchant_review_status": { "type": ["string", "null"] }, @@ -273,9 +261,6 @@ "type": ["string", "null"] } }, - "messenger_ads_default_page_welcome_message": { - "$ref": "messengerdestinationpagewelcomemessage.json" - }, "messenger_ads_default_quick_replies": { "type": ["array", "null"], "items": { @@ -285,9 +270,6 @@ "messenger_ads_quick_replies_type": { "type": ["string", "null"] }, - "mini_shop_storefront": { - "$ref": "shop.json" - }, "mission": { "type": ["string", "null"] }, @@ -426,9 +408,6 @@ "supports_donate_button_in_live_video": { "type": ["boolean", "null"] }, - "supports_instant_articles": { - "type": ["boolean", "null"] - }, "talking_about_count": { "type": ["integer", "null"] }, @@ -465,39 +444,6 @@ "written_by": { "type": ["string", "null"] }, - "admin_notes": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "pageadminnote.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "ads_posts": { "type": ["object", "null"], "properties": { @@ -795,105 +741,6 @@ } } }, - "claimed_urls": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "url.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, - "commerce_eligibility": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "pagecommerceeligibility.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, - "commerce_merchant_settings": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "commercemerchantsettings.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "commerce_orders": { "type": ["object", "null"], "properties": { @@ -927,72 +774,6 @@ } } }, - "commerce_payouts": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "commercepayout.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, - "commerce_transactions": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "commerceordertransactiondetail.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "conversations": { "type": ["object", "null"], "properties": { @@ -1026,39 +807,6 @@ } } }, - "copyright_whitelisted_partners": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "profile.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "crosspost_whitelisted_pages": { "type": ["object", "null"], "properties": { @@ -1422,72 +1170,6 @@ } } }, - "instant_articles": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "instantarticle.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, - "instant_articles_insights": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "instantarticleinsightsqueryresult.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "leadgen_forms": { "type": ["object", "null"], "properties": { @@ -1719,39 +1401,6 @@ } } }, - "nativeoffers": { - "type": ["object", "null"], - "properties": { - "data": { - "type": ["array", "null"], - "items": { - "$ref": "nativeoffer.json" - } - }, - "paging": { - "type": ["object", "null"], - "properties": { - "previous": { - "type": ["string", "null"] - }, - "next": { - "type": ["string", "null"] - }, - "cursors": { - "type": "object", - "properties": { - "before": { - "type": ["string", "null"] - }, - "after": { - "type": ["string", "null"] - } - } - } - } - } - } - }, "page_backed_instagram_accounts": { "type": ["object", "null"], "properties": { diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercemerchantsettings.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercemerchantsettings.json deleted file mode 100644 index 9566ea9ed0d87..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercemerchantsettings.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "braintree_merchant_id": { - "type": ["string", "null"] - }, - "checkout_message": { - "type": ["string", "null"] - }, - "contact_email": { - "type": ["string", "null"] - }, - "cta": { - "type": ["string", "null"] - }, - "disable_checkout_urls": { - "type": ["boolean", "null"] - }, - "display_name": { - "type": ["string", "null"] - }, - "external_merchant_id": { - "type": ["string", "null"] - }, - "facebook_channel": { - "type": ["string", "null"] - }, - "feature_eligibility": { - "type": ["string", "null"] - }, - "has_discount_code": { - "type": ["boolean", "null"] - }, - "has_onsite_intent": { - "type": ["boolean", "null"] - }, - "id": { - "type": ["string", "null"] - }, - "instagram_channel": { - "type": ["string", "null"] - }, - "merchant_alert_email": { - "type": ["string", "null"] - }, - "merchant_status": { - "type": ["string", "null"] - }, - "onsite_commerce_merchant": { - "type": ["string", "null"] - }, - "payment_provider": { - "type": ["string", "null"] - }, - "privacy_url_by_locale": { - "type": ["string", "null"] - }, - "review_rejection_messages": { - "type": ["array", "null"], - "items": { - "type": ["string", "null"] - } - }, - "review_rejection_reasons": { - "type": ["array", "null"], - "items": { - "type": ["string", "null"] - } - }, - "supported_card_types": { - "type": ["array", "null"], - "items": { - "type": ["string", "null"] - } - }, - "terms": { - "type": ["string", "null"] - }, - "terms_url_by_locale": { - "type": ["string", "null"] - }, - "whatsapp_channel": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commerceordertransactiondetail.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commerceordertransactiondetail.json deleted file mode 100644 index 2a6f76a68408b..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commerceordertransactiondetail.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "net_payment_amount": { - "type": ["string", "null"] - }, - "payout_reference_id": { - "type": ["string", "null"] - }, - "processing_fee": { - "type": ["string", "null"] - }, - "tax_rate": { - "type": ["string", "null"] - }, - "transaction_date": { - "type": ["string", "null"] - }, - "transaction_type": { - "type": ["string", "null"] - }, - "transfer_id": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercepayout.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercepayout.json deleted file mode 100644 index 5919931090602..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/commercepayout.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "amount": { - "type": ["string", "null"] - }, - "payout_date": { - "type": ["string", "null"] - }, - "payout_reference_id": { - "type": ["string", "null"] - }, - "status": { - "type": ["string", "null"] - }, - "transfer_id": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticle.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticle.json deleted file mode 100644 index df63505d41cef..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticle.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "canonical_url": { - "type": ["string", "null"] - }, - "development_mode": { - "type": ["boolean", "null"] - }, - "html_source": { - "type": ["string", "null"] - }, - "id": { - "type": ["string", "null"] - }, - "most_recent_import_status": { - "type": ["string", "null"] - }, - "publish_status": { - "type": ["string", "null"] - }, - "published": { - "type": ["boolean", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticleinsightsqueryresult.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticleinsightsqueryresult.json deleted file mode 100644 index 74d5ddf67633e..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/instantarticleinsightsqueryresult.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "breakdowns": { - "type": ["string", "null"] - }, - "name": { - "type": ["string", "null"] - }, - "time": { - "type": ["string", "null"], - "format": "date-time" - }, - "value": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/pagecommerceeligibility.json b/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/pagecommerceeligibility.json deleted file mode 100644 index ac48611309c20..0000000000000 --- a/airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/schemas/shared/pagecommerceeligibility.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": ["object", "null"], - "properties": { - "offsite": { - "type": ["string", "null"] - }, - "onsite": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/initial_record.json b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/initial_record.json index 0193cd73819ba..e1d164ae8c8e0 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/initial_record.json +++ b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/initial_record.json @@ -231,7 +231,7 @@ "before": "cursor", "after": "cursor" }, - "next": "https://graph.facebook.com/v15.0/" + "next": "https://graph.facebook.com/v19.0/" } }, "indexed_videos": { diff --git a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/test_streams.py index ddad5194e5e30..2de8bd6921215 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/test_streams.py @@ -10,7 +10,7 @@ def test_retries(mocker, requests_mock, error_code): mocker.patch("time.sleep") requests_mock.get("https://graph.facebook.com/1?fields=access_token&access_token=token", json={"access_token": "access"}) - requests_mock.get("https://graph.facebook.com/v15.0/1", [{"status_code": error_code}, {"json": {"data": {}}}]) + requests_mock.get("https://graph.facebook.com/v19.0/1", [{"status_code": error_code}, {"json": {"data": {}}}]) source = SourceFacebookPages() stream = source.streams({"page_id": 1, "access_token": "token"})[0] for slice_ in stream.stream_slices(sync_mode="full_refresh"): diff --git a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/transformed_record.json b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/transformed_record.json index 93a6a6a74a674..c7066f059a0cf 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/unit_tests/transformed_record.json +++ b/airbyte-integrations/connectors/source-facebook-pages/unit_tests/transformed_record.json @@ -231,7 +231,7 @@ "before": "cursor", "after": "cursor" }, - "next": "https://graph.facebook.com/v15.0/" + "next": "https://graph.facebook.com/v19.0/" } }, "indexed_videos": { diff --git a/airbyte-integrations/connectors/source-faker/metadata.yaml b/airbyte-integrations/connectors/source-faker/metadata.yaml index 63d6337577ec3..e49c21817e3f6 100644 --- a/airbyte-integrations/connectors/source-faker/metadata.yaml +++ b/airbyte-integrations/connectors/source-faker/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: api connectorType: source definitionId: dfd88b22-b603-4c3d-aad7-3701784586b1 - dockerImageTag: 6.0.2 + dockerImageTag: 6.1.0 dockerRepository: airbyte/source-faker documentationUrl: https://docs.airbyte.com/integrations/sources/faker githubIssueLabel: source-faker diff --git a/airbyte-integrations/connectors/source-faker/poetry.lock b/airbyte-integrations/connectors/source-faker/poetry.lock index e68b5b3b7c47e..f326061421f89 100644 --- a/airbyte-integrations/connectors/source-faker/poetry.lock +++ b/airbyte-integrations/connectors/source-faker/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.62.1" +version = "0.73.0" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "airbyte-cdk-0.62.1.tar.gz", hash = "sha256:3c934dd8b045079a9c807f699ca2012eaa5df755606e3f5b8b16247cbbd7e8c6"}, - {file = "airbyte_cdk-0.62.1-py3-none-any.whl", hash = "sha256:792399a602b7f5c3cd4ed2a5fce5910cfe3676b9b9199b9208f2d5236f5f42d3"}, + {file = "airbyte-cdk-0.73.0.tar.gz", hash = "sha256:a03e0265a8a4afb1378d285993624659d9f481404aaf69cf7c0a5ddad3568ea2"}, + {file = "airbyte_cdk-0.73.0-py3-none-any.whl", hash = "sha256:339e42a7602461073a69bf0c4e11be26a7eea3157def43ffecdf9d0d73f32c6f"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -478,13 +478,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -713,13 +713,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -748,7 +748,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -756,16 +755,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -782,7 +773,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -790,7 +780,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -819,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -837,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -868,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -906,13 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -931,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1042,4 +1031,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "2714d95fc3a63bfd137182625175cb88f55987b73bfad9cb2a2859c9be10ed44" +content-hash = "2cb19e13746801bc7484acb7ea3e52a1c3834a56d6708d23af52f944eb2c89d0" diff --git a/airbyte-integrations/connectors/source-faker/pyproject.toml b/airbyte-integrations/connectors/source-faker/pyproject.toml index d0c56fe9b1775..295b71f188a9f 100644 --- a/airbyte-integrations/connectors/source-faker/pyproject.toml +++ b/airbyte-integrations/connectors/source-faker/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "6.0.2" +version = "6.1.0" name = "source-faker" description = "Source implementation for fake but realistic looking data." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_faker" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.1" +airbyte-cdk = "^0.73.0" mimesis = "==6.1.1" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-faker/source_faker/record_data/products.json b/airbyte-integrations/connectors/source-faker/source_faker/record_data/products.json index 3969d502111a4..4b7650ddf91f7 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/record_data/products.json +++ b/airbyte-integrations/connectors/source-faker/source_faker/record_data/products.json @@ -3,800 +3,800 @@ "id": 1, "make": "Mazda", "model": "MX-5", - "year": 2008, - "price": 2869, - "created_at": "2022-02-01T17:02:19+00:00" + "year": 2023, + "price": 15001, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 2, "make": "Mercedes-Benz", "model": "C-Class", - "year": 2009, - "price": 42397, - "created_at": "2021-01-25T14:31:33+00:00" + "year": 2023, + "price": 34239, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 3, "make": "Honda", "model": "Accord Crosstour", - "year": 2011, - "price": 63293, - "created_at": "2021-02-11T05:36:03+00:00" + "year": 2023, + "price": 28895, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 4, "make": "GMC", "model": "Jimmy", - "year": 1998, - "price": 34079, - "created_at": "2022-01-24T03:00:03+00:00" + "year": 2023, + "price": 25054, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 5, "make": "Infiniti", "model": "FX", - "year": 2004, - "price": 17036, - "created_at": "2021-10-02T03:55:44+00:00" + "year": 2023, + "price": 23572, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 6, "make": "Dodge", "model": "Intrepid", - "year": 2002, - "price": 65498, - "created_at": "2022-01-18T00:41:08+00:00" + "year": 2023, + "price": 17942, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 7, "make": "Nissan", "model": "Frontier", - "year": 2005, - "price": 14516, - "created_at": "2021-04-22T16:37:44+00:00" + "year": 2023, + "price": 16398, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 8, "make": "Chevrolet", "model": "Express 1500", - "year": 2007, - "price": 13023, - "created_at": "2021-07-12T07:13:04+00:00" + "year": 2023, + "price": 16548, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 9, "make": "Bentley", "model": "Continental GTC", - "year": 2008, - "price": 43458, - "created_at": "2021-03-17T05:43:15+00:00" + "year": 2023, + "price": 34971, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 10, "make": "Cadillac", "model": "DTS", - "year": 2008, - "price": 43859, - "created_at": "2021-08-12T07:33:58+00:00" + "year": 2023, + "price": 37400, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 11, "make": "Dodge", "model": "Ram 2500", - "year": 2000, - "price": 82904, - "created_at": "2021-09-03T10:51:16+00:00" + "year": 2023, + "price": 19536, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 12, "make": "Suzuki", "model": "SJ 410", - "year": 1984, - "price": 38667, - "created_at": "2021-01-11T00:15:46+00:00" + "year": 2023, + "price": 29729, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 13, "make": "Audi", "model": "S4", - "year": 2005, - "price": 2391, - "created_at": "2021-09-06T03:31:10+00:00" + "year": 2023, + "price": 42323, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 14, "make": "Chevrolet", "model": "Suburban 2500", - "year": 1998, - "price": 55733, - "created_at": "2021-10-18T17:26:05+00:00" + "year": 2023, + "price": 21559, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 15, "make": "Ford", "model": "Ranger", - "year": 2000, - "price": 20228, - "created_at": "2022-03-24T04:03:19+00:00" + "year": 2023, + "price": 18192, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 16, "make": "Chevrolet", "model": "Corvette", - "year": 2009, - "price": 75052, - "created_at": "2021-12-31T03:38:21+00:00" + "year": 2023, + "price": 14126, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 17, "make": "Mitsubishi", "model": "Pajero", - "year": 1993, - "price": 84058, - "created_at": "2021-10-15T00:25:34+00:00" + "year": 2023, + "price": 32060, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 18, "make": "Lincoln", "model": "LS", - "year": 2002, - "price": 34081, - "created_at": "2022-02-14T22:12:01+00:00" + "year": 2023, + "price": 25142, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 19, "make": "Dodge", "model": "Magnum", - "year": 2005, - "price": 85545, - "created_at": "2021-07-25T22:49:48+00:00" + "year": 2023, + "price": 17701, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 20, "make": "Pontiac", "model": "Grand Am", - "year": 2001, - "price": 54837, - "created_at": "2021-10-15T14:08:30+00:00" + "year": 2023, + "price": 26205, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 21, "make": "Chevrolet", "model": "Suburban 1500", - "year": 2006, - "price": 89410, - "created_at": "2021-03-23T15:40:43+00:00" + "year": 2023, + "price": 14982, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 22, "make": "GMC", "model": "Sierra 1500", - "year": 2005, - "price": 14288, - "created_at": "2021-08-30T13:40:04+00:00" + "year": 2023, + "price": 22367, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 23, "make": "GMC", "model": "3500", - "year": 1995, - "price": 12011, - "created_at": "2022-04-24T13:11:08+00:00" + "year": 2023, + "price": 28438, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 24, "make": "Mazda", "model": "Mazda5", - "year": 2006, - "price": 6393, - "created_at": "2021-07-07T14:14:33+00:00" + "year": 2023, + "price": 17242, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 25, "make": "Chevrolet", "model": "Camaro", - "year": 1967, - "price": 71590, - "created_at": "2021-01-10T21:50:22+00:00" + "year": 2023, + "price": 23442, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 26, "make": "Ford", "model": "Explorer Sport Trac", - "year": 2010, - "price": 23498, - "created_at": "2022-04-20T00:52:20+00:00" + "year": 2023, + "price": 27677, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 27, "make": "Dodge", "model": "Caravan", - "year": 1985, - "price": 50071, - "created_at": "2022-01-05T10:13:31+00:00" + "year": 2023, + "price": 23033, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 28, "make": "Nissan", "model": "240SX", - "year": 1992, - "price": 38379, - "created_at": "2022-04-07T04:48:48+00:00" + "year": 2023, + "price": 19681, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 29, "make": "Oldsmobile", "model": "Intrigue", - "year": 2002, - "price": 21376, - "created_at": "2021-10-01T13:30:49+00:00" + "year": 2023, + "price": 27033, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 30, "make": "Audi", "model": "TT", - "year": 2011, - "price": 40893, - "created_at": "2021-02-28T23:06:37+00:00" + "year": 2023, + "price": 70136, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 31, "make": "Ford", "model": "Crown Victoria", - "year": 2006, - "price": 86225, - "created_at": "2021-01-28T23:33:27+00:00" + "year": 2023, + "price": 16564, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 32, "make": "Toyota", "model": "Tacoma", - "year": 2003, - "price": 73558, - "created_at": "2022-01-28T22:02:04+00:00" + "year": 2023, + "price": 17930, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 33, "make": "Buick", "model": "Regal", - "year": 1994, - "price": 32279, - "created_at": "2022-04-04T13:35:49+00:00" + "year": 2023, + "price": 31593, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 34, "make": "Mercedes-Benz", "model": "C-Class", - "year": 2001, - "price": 98732, - "created_at": "2021-03-30T23:16:05+00:00" + "year": 2023, + "price": 44508, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 35, "make": "GMC", "model": "Sierra 3500", - "year": 2002, - "price": 48267, - "created_at": "2021-07-30T20:29:51+00:00" + "year": 2023, + "price": 25102, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 36, "make": "Pontiac", "model": "G6", - "year": 2005, - "price": 16766, - "created_at": "2021-03-24T07:53:33+00:00" + "year": 2023, + "price": 22911, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 37, "make": "Subaru", "model": "Outback Sport", - "year": 2002, - "price": 34523, - "created_at": "2021-12-23T22:47:32+00:00" + "year": 2023, + "price": 26498, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 38, "make": "Ferrari", "model": "F430", - "year": 2007, - "price": 31677, - "created_at": "2021-01-11T04:49:57+00:00" + "year": 2023, + "price": 34657, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 39, "make": "Mitsubishi", "model": "Montero", - "year": 2003, - "price": 67136, - "created_at": "2021-05-10T07:37:56+00:00" + "year": 2023, + "price": 23593, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 40, "make": "Nissan", "model": "Sentra", - "year": 1993, - "price": 78236, - "created_at": "2021-11-10T23:48:26+00:00" + "year": 2023, + "price": 21899, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 41, "make": "Mitsubishi", "model": "3000GT", - "year": 1993, - "price": 58150, - "created_at": "2021-09-08T06:55:22+00:00" + "year": 2023, + "price": 31061, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 42, "make": "Ford", "model": "E350", - "year": 2012, - "price": 55270, - "created_at": "2021-03-24T13:17:37+00:00" + "year": 2023, + "price": 26650, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 43, "make": "Ford", "model": "Taurus", - "year": 1987, - "price": 13522, - "created_at": "2021-10-27T21:03:59+00:00" + "year": 2023, + "price": 23853, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 44, "make": "Chevrolet", "model": "Avalanche", - "year": 2012, - "price": 9862, - "created_at": "2021-07-13T12:22:26+00:00" + "year": 2023, + "price": 27412, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 45, "make": "Dodge", "model": "Charger", - "year": 2012, - "price": 81887, - "created_at": "2021-04-24T01:48:24+00:00" + "year": 2023, + "price": 27804, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 46, "make": "Jaguar", "model": "S-Type", - "year": 2005, - "price": 34372, - "created_at": "2021-04-03T08:56:17+00:00" + "year": 2023, + "price": 39852, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 47, "make": "Plymouth", "model": "Grand Voyager", - "year": 1994, - "price": 90637, - "created_at": "2022-04-21T09:21:08+00:00" + "year": 2023, + "price": 31205, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 48, "make": "Pontiac", "model": "6000", - "year": 1989, - "price": 65165, - "created_at": "2021-10-30T13:03:07+00:00" + "year": 2023, + "price": 32539, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 49, "make": "Lexus", "model": "IS", - "year": 2006, - "price": 22434, - "created_at": "2021-01-16T10:45:52+00:00" + "year": 2023, + "price": 35712, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 50, "make": "Isuzu", "model": "VehiCROSS", - "year": 2001, - "price": 38180, - "created_at": "2021-12-13T16:29:27+00:00" + "year": 2023, + "price": 23578, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 51, "make": "Buick", "model": "Regal", - "year": 2000, - "price": 38680, - "created_at": "2021-12-29T22:25:54+00:00" + "year": 2023, + "price": 26548, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 52, "make": "Mercedes-Benz", "model": "E-Class", - "year": 2007, - "price": 51556, - "created_at": "2021-07-06T11:42:23+00:00" + "year": 2023, + "price": 39967, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 53, "make": "Buick", "model": "LeSabre", - "year": 2001, - "price": 10904, - "created_at": "2022-01-05T18:23:35+00:00" + "year": 2023, + "price": 24308, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 54, "make": "Porsche", "model": "928", - "year": 1989, - "price": 70917, - "created_at": "2022-01-02T23:16:45+00:00" + "year": 2023, + "price": 53861, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 55, "make": "Lexus", "model": "RX", - "year": 2007, - "price": 5212, - "created_at": "2021-07-10T15:02:53+00:00" + "year": 2023, + "price": 38988, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 56, "make": "Ford", "model": "Econoline E250", - "year": 1996, - "price": 75095, - "created_at": "2021-02-04T16:17:18+00:00" + "year": 2023, + "price": 19533, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 57, "make": "Chevrolet", "model": "Blazer", - "year": 2001, - "price": 61918, - "created_at": "2021-12-08T07:25:30+00:00" + "year": 2023, + "price": 18865, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 58, "make": "GMC", "model": "Savana 3500", - "year": 2003, - "price": 30307, - "created_at": "2021-11-21T23:11:45+00:00" + "year": 2023, + "price": 25586, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 59, "make": "BMW", "model": "M", - "year": 2002, - "price": 24598, - "created_at": "2021-05-28T04:08:53+00:00" + "year": 2023, + "price": 40478, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 60, "make": "Saturn", "model": "S-Series", - "year": 1992, - "price": 96288, - "created_at": "2021-08-24T04:43:43+00:00" + "year": 2023, + "price": 27890, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 61, "make": "Chrysler", "model": "Sebring", - "year": 2003, - "price": 34753, - "created_at": "2021-02-11T11:25:35+00:00" + "year": 2023, + "price": 22732, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 62, "make": "Lotus", "model": "Evora", - "year": 2010, - "price": 42760, - "created_at": "2021-08-31T00:29:05+00:00" + "year": 2023, + "price": 22620, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 63, "make": "Jeep", "model": "Wrangler", - "year": 2011, - "price": 8684, - "created_at": "2021-06-24T10:38:05+00:00" + "year": 2023, + "price": 22206, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 64, "make": "Ford", "model": "Expedition", - "year": 2012, - "price": 25653, - "created_at": "2021-07-01T16:13:20+00:00" + "year": 2023, + "price": 30490, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 65, "make": "Chevrolet", "model": "Avalanche 2500", - "year": 2006, - "price": 3158, - "created_at": "2021-08-14T10:55:13+00:00" + "year": 2023, + "price": 17804, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 66, "make": "Mazda", "model": "Mazda3", - "year": 2012, - "price": 79820, - "created_at": "2021-05-25T21:55:52+00:00" + "year": 2023, + "price": 29136, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 67, "make": "Toyota", "model": "Tacoma", - "year": 2005, - "price": 73572, - "created_at": "2021-01-22T09:56:02+00:00" + "year": 2023, + "price": 17090, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 68, "make": "Ford", "model": "Explorer Sport", - "year": 2000, - "price": 64579, - "created_at": "2021-02-16T06:56:06+00:00" + "year": 2023, + "price": 20030, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 69, "make": "GMC", "model": "Savana Cargo Van", - "year": 2006, - "price": 65944, - "created_at": "2021-09-12T14:08:53+00:00" + "year": 2023, + "price": 23481, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 70, "make": "Chevrolet", "model": "HHR", - "year": 2009, - "price": 8953, - "created_at": "2021-08-17T04:25:43+00:00" + "year": 2023, + "price": 14739, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 71, "make": "Ford", "model": "Bronco II", - "year": 1989, - "price": 41811, - "created_at": "2021-07-14T14:20:28+00:00" + "year": 2023, + "price": 23330, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 72, "make": "Chevrolet", "model": "Suburban 2500", - "year": 2011, - "price": 57488, - "created_at": "2021-09-22T12:32:57+00:00" + "year": 2023, + "price": 27447, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 73, "make": "Suzuki", "model": "Grand Vitara", - "year": 2008, - "price": 6408, - "created_at": "2021-11-12T23:19:52+00:00" + "year": 2023, + "price": 20474, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 74, "make": "Mazda", "model": "Mazda6", - "year": 2012, - "price": 14805, - "created_at": "2021-06-01T01:55:32+00:00" + "year": 2023, + "price": 28367, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 75, "make": "Chevrolet", "model": "Tahoe", - "year": 1998, - "price": 33585, - "created_at": "2022-01-09T04:28:54+00:00" + "year": 2023, + "price": 18820, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 76, "make": "Ford", "model": "Explorer Sport Trac", - "year": 2010, - "price": 2087, - "created_at": "2022-03-28T00:28:16+00:00" + "year": 2023, + "price": 32124, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 77, "make": "Ford", "model": "F150", - "year": 2007, - "price": 17621, - "created_at": "2021-03-23T15:08:10+00:00" + "year": 2023, + "price": 15639, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 78, "make": "Ford", "model": "Taurus", - "year": 1995, - "price": 16478, - "created_at": "2021-06-07T22:29:50+00:00" + "year": 2023, + "price": 20215, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 79, "make": "Mitsubishi", "model": "Truck", - "year": 1992, - "price": 70616, - "created_at": "2022-01-30T05:14:02+00:00" + "year": 2023, + "price": 31651, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 80, "make": "Dodge", "model": "Colt", - "year": 1994, - "price": 34163, - "created_at": "2022-04-02T18:06:30+00:00" + "year": 2023, + "price": 21590, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 81, "make": "Mazda", "model": "RX-7", - "year": 1991, - "price": 29634, - "created_at": "2021-01-06T10:30:59+00:00" + "year": 2023, + "price": 19443, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 82, "make": "Pontiac", "model": "Grand Prix", - "year": 1984, - "price": 88575, - "created_at": "2021-02-24T06:06:57+00:00" + "year": 2023, + "price": 31330, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 83, "make": "Mazda", "model": "Mazdaspeed 3", - "year": 2012, - "price": 77723, - "created_at": "2021-11-11T22:48:05+00:00" + "year": 2023, + "price": 26400, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 84, "make": "Alfa Romeo", "model": "Spider", - "year": 1992, - "price": 64288, - "created_at": "2021-01-06T03:50:27+00:00" + "year": 2023, + "price": 53177, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 85, "make": "Audi", "model": "S8", - "year": 2002, - "price": 33718, - "created_at": "2021-07-21T11:14:54+00:00" + "year": 2023, + "price": 36694, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 86, "make": "Isuzu", "model": "Amigo", - "year": 1992, - "price": 53335, - "created_at": "2022-03-02T10:42:21+00:00" + "year": 2023, + "price": 27523, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 87, "make": "Toyota", "model": "Paseo", - "year": 1996, - "price": 74558, - "created_at": "2021-10-02 14:54:58+00:00" + "year": 2023, + "price": 21909, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 88, "make": "Lincoln", "model": "Continental Mark VII", - "year": 1986, - "price": 42150, - "created_at": "2021-10-02T04:48:53+00:00" + "year": 2023, + "price": 31219, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 89, "make": "Dodge", "model": "Dakota", - "year": 1997, - "price": 64516, - "created_at": "2021-09-09T23:13:26+00:00" + "year": 2023, + "price": 19825, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 90, "make": "Chevrolet", "model": "Tahoe", - "year": 1998, - "price": 51461, - "created_at": "2021-04-06T08:29:19+00:00" + "year": 2023, + "price": 18151, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 91, "make": "Pontiac", "model": "Vibe", - "year": 2006, - "price": 12134, - "created_at": "2021-01-11T22:30:14+00:00" + "year": 2023, + "price": 24722, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 92, "make": "Volkswagen", "model": "Eos", - "year": 2011, - "price": 53128, - "created_at": "2021-01-12T23:25:06+00:00" + "year": 2023, + "price": 31509, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 93, "make": "Mazda", "model": "Mazdaspeed6", - "year": 2007, - "price": 90902, - "created_at": "2021-12-29T14:29:03+00:00" + "year": 2023, + "price": 15108, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 94, "make": "Nissan", "model": "Xterra", - "year": 2005, - "price": 41532, - "created_at": "2021-09-07 09:00:49+00:00" + "year": 2023, + "price": 17102, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 95, "make": "Mercury", "model": "Sable", - "year": 2005, - "price": 71337, - "created_at": "2021-01-31T22:13:44+00:00" + "year": 2023, + "price": 23376, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 96, "make": "BMW", "model": "330", - "year": 2006, - "price": 14494, - "created_at": "2021-09-17T20:52:48+00:00" + "year": 2023, + "price": 38978, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 97, "make": "Audi", "model": "R8", - "year": 2008, - "price": 17642, - "created_at": "2021-09-21T11:56:24+00:00" + "year": 2023, + "price": 39638, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 98, "make": "Cadillac", "model": "CTS-V", - "year": 2007, - "price": 19914, - "created_at": "2021-09-02T15:38:46+00:00" + "year": 2023, + "price": 34590, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 99, "make": "GMC", "model": "1500 Club Coupe", - "year": 1997, - "price": 82288, - "created_at": "2021-04-20T18:58:15+00:00" + "year": 2023, + "price": 30154, + "created_at": "2024-04-08T18:07:20+00:00" }, { "id": 100, "make": "Buick", "model": "Somerset", - "year": 1986, - "price": 64148, - "created_at": "2021-06-10T19:07:38+00:00" + "year": 2023, + "price": 29981, + "created_at": "2024-04-08T18:07:20+00:00" } ] diff --git a/airbyte-integrations/connectors/source-faker/source_faker/source.py b/airbyte-integrations/connectors/source-faker/source_faker/source.py index e191687782b2a..2da213aea02cb 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/source.py +++ b/airbyte-integrations/connectors/source-faker/source_faker/source.py @@ -10,6 +10,8 @@ from .streams import Products, Purchases, Users +DEFAULT_COUNT = 1_000 + class SourceFaker(AbstractSource): def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: @@ -19,7 +21,7 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> return False, "Count option is missing" def streams(self, config: Mapping[str, Any]) -> List[Stream]: - count: int = config["count"] if "count" in config else 0 + count: int = config["count"] if "count" in config else DEFAULT_COUNT seed: int = config["seed"] if "seed" in config else None records_per_slice: int = config["records_per_slice"] if "records_per_slice" in config else 100 always_updated: bool = config["always_updated"] if "always_updated" in config else True diff --git a/airbyte-integrations/connectors/source-faker/source_faker/spec.json b/airbyte-integrations/connectors/source-faker/source_faker/spec.json index 82759547a3a03..482010ec350a3 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/spec.json +++ b/airbyte-integrations/connectors/source-faker/source_faker/spec.json @@ -4,12 +4,12 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Faker Source Spec", "type": "object", - "required": ["count"], + "required": [], "additionalProperties": true, "properties": { "count": { "title": "Count", - "description": "How many users should be generated in total. This setting does not apply to the purchases or products stream.", + "description": "How many users should be generated in total. The purchases table will be scaled to match, with 10 purchases created per 10 users. This setting does not apply to the products stream.", "type": "integer", "minimum": 1, "default": 1000, diff --git a/airbyte-integrations/connectors/source-file/acceptance-test-config.yml b/airbyte-integrations/connectors/source-file/acceptance-test-config.yml index 733a50b5aaa0e..f42f8ce80f8df 100644 --- a/airbyte-integrations/connectors/source-file/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-file/acceptance-test-config.yml @@ -26,6 +26,7 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no + validate_state_messages: False file_types: skip_test: yes bypass_reason: "Source is not based on file based CDK" diff --git a/airbyte-integrations/connectors/source-file/metadata.yaml b/airbyte-integrations/connectors/source-file/metadata.yaml index 2f1f4c97e1575..fcce3e7eda64d 100644 --- a/airbyte-integrations/connectors/source-file/metadata.yaml +++ b/airbyte-integrations/connectors/source-file/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: file connectorType: source definitionId: 778daa7c-feaf-4db6-96f3-70fd645acc77 - dockerImageTag: 0.4.1 + dockerImageTag: 0.5.0 dockerRepository: airbyte/source-file documentationUrl: https://docs.airbyte.com/integrations/sources/file githubIssueLabel: source-file icon: file.svg license: MIT + maxSecondsBetweenMessages: 5400 name: File (CSV, JSON, Excel, Feather, Parquet) remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-file/poetry.lock b/airbyte-integrations/connectors/source-file/poetry.lock index e59ccf50645dc..e116c4fde20be 100644 --- a/airbyte-integrations/connectors/source-file/poetry.lock +++ b/airbyte-integrations/connectors/source-file/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiobotocore" @@ -147,17 +147,17 @@ frozenlist = ">=1.1.0" [[package]] name = "airbyte-cdk" -version = "0.51.41" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.51.41.tar.gz", hash = "sha256:cce614d67872cf66a151e5b72d70f4bf26e2a1ce672c7abfc15a5cb4e45d8429"}, - {file = "airbyte_cdk-0.51.41-py3-none-any.whl", hash = "sha256:bbf82a45d9ec97c4a92b85e3312b327f8060fffec1f7c7ea7dfa720f9adcc13b"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -167,8 +167,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -176,20 +177,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -248,13 +249,13 @@ files = [ [[package]] name = "azure-core" -version = "1.30.0" +version = "1.30.1" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.30.0.tar.gz", hash = "sha256:6f3a7883ef184722f6bd997262eddaf80cfe7e5b3e0caaaf8db1695695893d35"}, - {file = "azure_core-1.30.0-py3-none-any.whl", hash = "sha256:3dae7962aad109610e68c9a7abb31d79720e1d982ddf61363038d175a5025e89"}, + {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, + {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, ] [package.dependencies] @@ -267,13 +268,13 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-storage-blob" -version = "12.19.0" +version = "12.19.1" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, - {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, + {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, + {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, ] [package.dependencies] @@ -405,13 +406,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -626,43 +627,43 @@ files = [ [[package]] name = "cryptography" -version = "42.0.2" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, - {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, - {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, - {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, - {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, - {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, - {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -898,13 +899,13 @@ files = [ [[package]] name = "google-api-core" -version = "2.17.0" +version = "2.17.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.0.tar.gz", hash = "sha256:de7ef0450faec7c75e0aea313f29ac870fdc44cfaec9d6499a9a17305980ef66"}, - {file = "google_api_core-2.17.0-py3-none-any.whl", hash = "sha256:08ed79ed8e93e329de5e3e7452746b734e6bf8438d8d64dd3319d21d3164890c"}, + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, ] [package.dependencies] @@ -920,13 +921,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.27.0" +version = "2.28.2" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, - {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, + {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, + {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, ] [package.dependencies] @@ -1098,13 +1099,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -1560,13 +1561,13 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1637,102 +1638,37 @@ invoke = ["invoke (>=1.3)"] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -1766,22 +1702,22 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.25.2" +version = "4.25.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -1957,6 +1893,21 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -2060,13 +2011,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2083,6 +2034,17 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyxlsb" version = "1.0.9" @@ -2106,6 +2068,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2113,8 +2076,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2131,6 +2101,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2138,6 +2109,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2166,13 +2138,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -2184,15 +2156,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -2215,13 +2187,13 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "1.4.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-1.4.0.tar.gz", hash = "sha256:acee623221e4a39abcbb919312c8ff04bd44e7e417087fb4bd5e2a2f53d5e79a"}, + {file = "requests_oauthlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:7a3130d94a17520169e38db6c8d75f2c974643788465ecc2e4b36d288bf13033"}, ] [package.dependencies] @@ -2284,19 +2256,19 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2361,24 +2333,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -2637,4 +2598,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "97d01109bc5d8b65e2ddbc2e85f378e0b8eeda49dc2344a79d9956167c0b1be6" +content-hash = "0341f923ef67294ebdbbfd917792aa140fbd4dee35f8e3f31aa967b0799ba2d2" diff --git a/airbyte-integrations/connectors/source-file/pyproject.toml b/airbyte-integrations/connectors/source-file/pyproject.toml index aaa8033925085..6ff58dbded253 100644 --- a/airbyte-integrations/connectors/source-file/pyproject.toml +++ b/airbyte-integrations/connectors/source-file/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.1" +version = "0.5.0" name = "source-file" description = "Source implementation for File" authors = [ "Airbyte ",] @@ -22,7 +22,7 @@ beautifulsoup4 = "==4.11.1" openpyxl = "==3.0.10" google-cloud-storage = "==2.5.0" pandas = "==1.4.3" -airbyte-cdk = "==0.51.41" +airbyte-cdk = "^0" paramiko = "==2.11.0" xlrd = "==2.0.1" boto3 = "==1.21.21" diff --git a/airbyte-integrations/connectors/source-file/source_file/source.py b/airbyte-integrations/connectors/source-file/source_file/source.py index 6c0ce4b1ba9dd..971af8a51bbda 100644 --- a/airbyte-integrations/connectors/source-file/source_file/source.py +++ b/airbyte-integrations/connectors/source-file/source_file/source.py @@ -173,11 +173,11 @@ def read( fields = self.selected_fields(catalog, config) name = client.stream_name - configured_stream = catalog.streams[0] + airbyte_stream = catalog.streams[0].stream logger.info(f"Syncing stream: {name} ({client.reader.full_url})...") - yield stream_status_as_airbyte_message(configured_stream, AirbyteStreamStatus.STARTED) + yield stream_status_as_airbyte_message(airbyte_stream, AirbyteStreamStatus.STARTED) record_counter = 0 try: @@ -187,19 +187,19 @@ def read( record_counter += 1 if record_counter == 1: logger.info(f"Marking stream {name} as RUNNING") - yield stream_status_as_airbyte_message(configured_stream, AirbyteStreamStatus.RUNNING) + yield stream_status_as_airbyte_message(airbyte_stream, AirbyteStreamStatus.RUNNING) yield AirbyteMessage(type=Type.RECORD, record=record) logger.info(f"Marking stream {name} as STOPPED") - yield stream_status_as_airbyte_message(configured_stream, AirbyteStreamStatus.COMPLETE) + yield stream_status_as_airbyte_message(airbyte_stream, AirbyteStreamStatus.COMPLETE) except Exception as err: reason = f"Failed to read data of {name} at {client.reader.full_url}: {repr(err)}\n{traceback.format_exc()}" logger.error(reason) logger.exception(f"Encountered an exception while reading stream {name}") logger.info(f"Marking stream {name} as STOPPED") - yield stream_status_as_airbyte_message(configured_stream, AirbyteStreamStatus.INCOMPLETE) + yield stream_status_as_airbyte_message(airbyte_stream, AirbyteStreamStatus.INCOMPLETE) raise err @staticmethod diff --git a/airbyte-integrations/connectors/source-freshdesk/README.md b/airbyte-integrations/connectors/source-freshdesk/README.md index 077bb798e93f9..fd99951c7739c 100644 --- a/airbyte-integrations/connectors/source-freshdesk/README.md +++ b/airbyte-integrations/connectors/source-freshdesk/README.md @@ -1,31 +1,35 @@ # Freshdesk source connector - -This is the repository for the Freshdesk source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/freshdesk). +This is the repository for the Freshdesk source connector, written in Python. For information about +how to use this connector within Airbyte, see +[the documentation](https://docs.airbyte.com/integrations/sources/freshdesk). ## Local development ### Prerequisites -* Python (~=3.9) -* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) +- Python (~=3.9) +- Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) ### Installing the connector + From this connector directory, run: + ```bash poetry install --with dev ``` - ### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/freshdesk) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_freshdesk/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `sample_files/sample_config.json` for a sample config file. +**If you are a community contributor**, follow the instructions in the +[documentation](https://docs.airbyte.com/integrations/sources/freshdesk) to generate the necessary +credentials. Then create a file `secrets/config.json` conforming to the `source_freshdesk/spec.yaml` +file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there +is no danger of accidentally checking in sensitive information. See +`sample_files/sample_config.json` for a sample config file. ### Locally running the connector + ``` poetry run source-freshdesk spec poetry run source-freshdesk check --config secrets/config.json @@ -34,23 +38,29 @@ poetry run source-freshdesk read --config secrets/config.json --catalog sample_f ``` ### Running unit tests + To run unit tests locally, from the connector directory run: + ``` poetry run pytest unit_tests ``` ### Building the docker image -1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) + +1. Install + [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) 2. Run the following command to build the docker image: + ```bash airbyte-ci connectors --name=source-freshdesk build ``` An image will be available on your host with the tag `airbyte/source-freshdesk:dev`. - ### Running as a docker container + Then run any of the connector commands as follows: + ``` docker run --rm airbyte/source-freshdesk:dev spec docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-freshdesk:dev check --config /secrets/config.json @@ -59,18 +69,25 @@ docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integrat ``` ### Running our CI test suite -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + +You can run our full test suite locally using +[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + ```bash airbyte-ci connectors --name=source-freshdesk test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See +[Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +for more information. If your connector requires to create or destroy resources for use during +acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. ### Dependency Management -All of your dependencies should be managed via Poetry. -To add a new dependency, run: + +All of your dependencies should be managed via Poetry. To add a new dependency, run: + ```bash poetry add ``` @@ -78,14 +95,22 @@ poetry add Please commit the changes to `pyproject.toml` and `poetry.lock` files. ## Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-freshdesk test` -2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): - - bump the `dockerImageTag` value in in `metadata.yaml` - - bump the `version` value in `pyproject.toml` + +You've checked out the repo, implemented a million dollar feature, and you're ready to share your +changes with the world. Now what? + +1. Make sure your changes are passing our test suite: + `airbyte-ci connectors --name=source-freshdesk test` +2. Bump the connector version (please follow + [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/freshdesk.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +4. Make sure the connector documentation and its changelog is up to date + (`docs/integrations/sources/freshdesk.md`). +5. Create a Pull Request: use + [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. -8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file +8. Once your PR is merged, the new version of the connector will be automatically published to + Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-freshdesk/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-freshdesk/integration_tests/configured_catalog.json index 7ca90be2f13b2..7017168de5d8d 100644 --- a/airbyte-integrations/connectors/source-freshdesk/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-freshdesk/integration_tests/configured_catalog.json @@ -226,10 +226,13 @@ "stream": { "name": "satisfaction_ratings", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"] }, { "stream": { diff --git a/airbyte-integrations/connectors/source-freshdesk/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-freshdesk/integration_tests/sample_state.json index 6e1d23b66dc43..85d6575172f9d 100644 --- a/airbyte-integrations/connectors/source-freshdesk/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-freshdesk/integration_tests/sample_state.json @@ -1,26 +1,4 @@ [ - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-01-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "agents" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "companies" - } - } - }, { "type": "STREAM", "stream": { @@ -32,39 +10,6 @@ } } }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "conversations" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "groups" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "roles" - } - } - }, { "type": "STREAM", "stream": { @@ -76,28 +21,6 @@ } } }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "skills" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "surveys" - } - } - }, { "type": "STREAM", "stream": { @@ -108,16 +31,5 @@ "name": "tickets" } } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "updated_at": "2021-11-01T00:00:00Z" - }, - "stream_descriptor": { - "name": "time_entries" - } - } } ] diff --git a/airbyte-integrations/connectors/source-freshdesk/metadata.yaml b/airbyte-integrations/connectors/source-freshdesk/metadata.yaml index 03e6358bd4991..58c602b135576 100644 --- a/airbyte-integrations/connectors/source-freshdesk/metadata.yaml +++ b/airbyte-integrations/connectors/source-freshdesk/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: ec4b9503-13cb-48ab-a4ab-6ade4be46567 - dockerImageTag: 3.0.7 + dockerImageTag: 3.1.0 dockerRepository: airbyte/source-freshdesk documentationUrl: https://docs.airbyte.com/integrations/sources/freshdesk githubIssueLabel: source-freshdesk icon: freshdesk.svg license: MIT + maxSecondsBetweenMessages: 60 name: Freshdesk remoteRegistries: pypi: @@ -30,5 +31,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-freshdesk/poetry.lock b/airbyte-integrations/connectors/source-freshdesk/poetry.lock index 1e53023de5b92..de660b2968515 100644 --- a/airbyte-integrations/connectors/source-freshdesk/poetry.lock +++ b/airbyte-integrations/connectors/source-freshdesk/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.7" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.7.tar.gz", hash = "sha256:00e379e2379b38683992027114a2190f49befec8cbac67d0a2c907786111e77b"}, - {file = "airbyte_cdk-0.58.7-py3-none-any.whl", hash = "sha256:09b31d32899cc6dc91e39716e8d1601503a7884d837752e683d1e3ef7dfe73be"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -750,7 +750,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +825,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +856,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +894,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +919,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "639f5cfef16b8664432e1a27660a1c5dd290c0103e4e3741e2ece95f35f425e4" +content-hash = "a7ec155d95a0a4cf4f6ab83f2738051dfa051e54821acf19b5570813652d80a5" diff --git a/airbyte-integrations/connectors/source-freshdesk/pyproject.toml b/airbyte-integrations/connectors/source-freshdesk/pyproject.toml index 48527aa9bc86a..5ac4cb37eee2f 100644 --- a/airbyte-integrations/connectors/source-freshdesk/pyproject.toml +++ b/airbyte-integrations/connectors/source-freshdesk/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.7" +version = "3.1.0" name = "source-freshdesk" description = "Source implementation for Freshdesk." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_freshdesk" [tool.poetry.dependencies] python = "^3.9,<3.12" backoff = "==1.10.0" -airbyte-cdk = "==0.58.7" +airbyte-cdk = "^0" [tool.poetry.scripts] source-freshdesk = "source_freshdesk.run:run" diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/availability_strategy.py b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/availability_strategy.py deleted file mode 100644 index 9ace31cb97758..0000000000000 --- a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/availability_strategy.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import requests -from airbyte_cdk.sources.streams.http.availability_strategy import HttpAvailabilityStrategy - - -class FreshdeskAvailabilityStrategy(HttpAvailabilityStrategy): - def reasons_for_unavailable_status_codes(self, stream, logger, source, error): - unauthorized_error_message = f"The endpoint to access stream '{stream.name}' returned 401: Unauthorized. " - unauthorized_error_message += "This is most likely due to wrong credentials. " - unauthorized_error_message += self._visit_docs_message(logger, source) - - reasons = super(FreshdeskAvailabilityStrategy, self).reasons_for_unavailable_status_codes(stream, logger, source, error) - reasons[requests.codes.UNAUTHORIZED] = unauthorized_error_message - - return reasons diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/components.py b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/components.py new file mode 100644 index 0000000000000..3c87fa809e6a1 --- /dev/null +++ b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/components.py @@ -0,0 +1,137 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from dataclasses import dataclass +from typing import Any, List, Mapping, MutableMapping, Optional + +import requests +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.requesters.http_requester import HttpRequester +from airbyte_cdk.sources.declarative.requesters.paginators.strategies.page_increment import PageIncrement +from airbyte_cdk.sources.declarative.requesters.request_option import RequestOptionType +from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_request_options_provider import ( + InterpolatedRequestOptionsProvider, + RequestInput, +) +from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState +from source_freshdesk.utils import CallCredit + + +@dataclass +class FreshdeskRequester(HttpRequester): + """ + This class is created to add call throttling using the optional requests_per_minute parameter + """ + + request_body_json: Optional[RequestInput] = None + request_headers: Optional[RequestInput] = None + request_parameters: Optional[RequestInput] = None + request_body_data: Optional[RequestInput] = None + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + requests_per_minute = self.config.get("requests_per_minute") + self._call_credit = CallCredit(balance=requests_per_minute) if requests_per_minute else None + + self.request_options_provider = InterpolatedRequestOptionsProvider( + request_body_data=self.request_body_data, + request_body_json=self.request_body_json, + request_headers=self.request_headers, + request_parameters=self.request_parameters, + config=self.config, + parameters=parameters or {}, + ) + super().__post_init__(parameters) + + def _consume_credit(self, credit): + """Consume call credit, if there is no credit left within current window will sleep til next period""" + if self._call_credit: + self._call_credit.consume(credit) + + def send_request( + self, + **kwargs, + ) -> Optional[requests.Response]: + call_credit_cost = kwargs.pop("call_credit_cost", 1) + self._consume_credit(call_credit_cost) + return super().send_request(**kwargs) + + +@dataclass +class FreshdeskTicketsIncrementalRequester(FreshdeskRequester): + """ + This class is created for the Tickets stream to modify parameters produced by stream slicer and paginator + When the paginator hit the page limit it will return the latest record cursor for the next_page_token + next_page_token will be used in the stream slicer to get updated cursor filter + """ + + def send_request( + self, + **kwargs, + ) -> Optional[requests.Response]: + # pagination strategy returns cursor_filter based on the latest record instead of page when the page limit is hit + if type(kwargs["request_params"].get("page")) == str: + kwargs["request_params"].pop("page") + # set correct call credit cost for Tickets stream + kwargs["call_credit_cost"] = 3 + return super().send_request(**kwargs) + + +@dataclass +class FreshdeskTicketsIncrementalSync(DatetimeBasedCursor): + """ + This class is created for Tickets stream. When paginator hit the page limit it will return latest record cursor as next_page_token + Request parameters will be updated with the next_page_token to continue iterating over results + """ + + def __post_init__(self, parameters: Mapping[str, Any]): + super().__post_init__(parameters=parameters) + self.updated_slice = None + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + # if next_page_token is str it is the latest record cursor from the paginator that will be used for updated cursor filter + # if next_page_token is int it is the page number + next_page_token = next_page_token.get("next_page_token") if next_page_token else None + if type(next_page_token) == str: + self.updated_slice = next_page_token + + # _get_request_options is modified to return updated cursor filter if exist + option_type = RequestOptionType.request_parameter + options: MutableMapping[str, Any] = {} + if not stream_slice: + return options + + if self.start_time_option and self.start_time_option.inject_into == option_type: + start_time = stream_slice.get(self._partition_field_start.eval(self.config)) if not self.updated_slice else self.updated_slice + options[self.start_time_option.field_name.eval(config=self.config)] = start_time # type: ignore # field_name is always casted to an interpolated string + if self.end_time_option and self.end_time_option.inject_into == option_type: + options[self.end_time_option.field_name.eval(config=self.config)] = stream_slice.get(self._partition_field_end.eval(self.config)) # type: ignore # field_name is always casted to an interpolated string + return options + + +@dataclass +class FreshdeskTicketsPaginationStrategy(PageIncrement): + """ + This pagination strategy will return latest record cursor for the next_page_token after hitting page count limit + """ + + PAGE_LIMIT = 300 + + def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + # Stop paginating when there are fewer records than the page size or the current page has no records, or maximum page number is hit + if (self._page_size and len(last_records) < self._page_size) or len(last_records) == 0: + return None + elif self._page >= self.PAGE_LIMIT: + # reset page count as cursor parameter will be updated in the stream slicer + self.reset() + # get last_record from latest batch, pos. -1, because of ACS order of records + last_record_updated_at = last_records[-1]["updated_at"] + # updating slicer request parameters with last_record state + return last_record_updated_at + else: + self._page += 1 + return self._page diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/manifest.yaml b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/manifest.yaml new file mode 100644 index 0000000000000..07bc67b490962 --- /dev/null +++ b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/manifest.yaml @@ -0,0 +1,503 @@ +version: 0.72.2 +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - agents + +spec: + type: Spec + connection_specification: + type: object + $schema: http://json-schema.org/draft-07/schema# + required: + - api_key + - domain + properties: + api_key: + type: string + order: 1 + title: API Key + description: 'Freshdesk API Key. See the docs for more information on how to obtain this key.' + airbyte_secret: true + domain: + type: string + order: 2 + title: Domain + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: integer + order: 3 + title: "Requests per minute" + description: "The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: "UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: integer + order: 5 + title: Lookback Window + default: 14 + description: Number of days for lookback window for the stream Satisfaction Ratings + additionalProperties: true + +definitions: + schema_loader: + type: JsonFileSchemaLoader + file_path: "./source_freshdesk/schemas/{{ parameters['name'] }}.json" + + basic_authenticator: + type: BasicHttpAuthenticator + username: "{{ config.get('api_key')}}" + + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + + paginator: + type: DefaultPaginator + pagination_strategy: + type: CursorPagination + cursor_value: "{{ headers['link']['next']['url'] }}" + stop_condition: "{{ 'next' not in headers['link'] }}" + page_size: 100 + page_size_option: + field_name: "per_page" + inject_into: "request_parameter" + page_token_option: + type: RequestPath + + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - http_codes: [401] + action: FAIL + error_message: "The endpoint to access stream '{{ parameters['name'] }}' returned 401: Unauthorized. This is most likely due to wrong credentials." + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + + requester: + # This requester is used to count call credits used for requests to Freshdesk + type: CustomRequester + class_name: source_freshdesk.components.FreshdeskRequester + path: "{{ parameters['path'] or parameters['name'] }}" + url_base: "https://{{ config['domain'] }}/api/v2/" + http_method: GET + request_headers: {} + authenticator: + $ref: "#/definitions/basic_authenticator" + error_handler: + $ref: "#/definitions/error_handler" + + base_stream: + schema_loader: + $ref: "#/definitions/schema_loader" + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/requester" + record_selector: + $ref: "#/definitions/record_selector" + paginator: + $ref: "#/definitions/paginator" + + incremental_sync: + type: DatetimeBasedCursor + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_field: "{{ parameters['cursor_field'] }}" + start_datetime: + datetime: "{{ config.get('start_date') or day_delta(-3650, '%Y-%m-%dT%H:%M:%SZ') }}" + start_time_option: + inject_into: request_parameter + field_name: "{{ parameters['cursor_filter'] }}" + type: RequestOption + + base_incremental_stream: + $ref: "#/definitions/base_stream" + incremental_sync: + $ref: "#/definitions/incremental_sync" + + agents: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "agents" + primary_key: "id" + + business_hours: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "business_hours" + primary_key: "id" + transformations: + - type: AddFields + fields: + - path: ["working_hours"] + value: "{{ record.get('business_hours') }}" + - type: RemoveFields + field_pointers: + - ["business_hours"] + + canned_response_folders: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "canned_response_folders" + primary_key: "id" + + companies: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "companies" + primary_key: "id" + + discussion_categories: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "discussion_categories" + path: "discussions/categories" + primary_key: "id" + + email_configs: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "email_configs" + primary_key: "id" + + email_mailboxes: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "email_mailboxes" + path: "email/mailboxes" + primary_key: "id" + + groups: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "groups" + primary_key: "id" + + products: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "products" + primary_key: "id" + + roles: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "roles" + primary_key: "id" + + scenario_automations: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "scenario_automations" + primary_key: "id" + + settings: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "settings" + path: "settings/helpdesk" + primary_key: "primary_language" + + skills: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "skills" + primary_key: "id" + + sla_policies: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "sla_policies" + primary_key: "id" + + solution_categories: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "solution_categories" + path: "solutions/categories" + primary_key: "id" + + surveys: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "surveys" + primary_key: "id" + + time_entries: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "time_entries" + primary_key: "id" + + ticket_fields: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + $parameters: + name: "ticket_fields" + primary_key: "id" + + contacts: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + incremental_sync: + $ref: "#/definitions/incremental_sync" + start_time_option: + $ref: "#/definitions/incremental_sync/start_time_option" + field_name: "_updated_since" + $parameters: + name: "contacts" + cursor_field: "updated_at" + primary_key: "id" + cursor_filter: "_updated_since" + + satisfaction_ratings: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + incremental_sync: + $ref: "#/definitions/incremental_sync" + start_time_option: + $ref: "#/definitions/incremental_sync/start_time_option" + field_name: "created_since" + lookback_window: "P{{ config['lookback_window_in_days'] or 14 }}D" + $parameters: + name: "satisfaction_ratings" + primary_key: "id" + path: "surveys/satisfaction_ratings" + cursor_field: "updated_at" + cursor_filter: "created_since" + + tickets: + type: DeclarativeStream + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/base_stream/retriever/requester" + class_name: source_freshdesk.components.FreshdeskTicketsIncrementalRequester + request_parameters: + order_type: "asc" + order_by: "updated_at" + include: "description,requester,stats" + paginator: + type: "DefaultPaginator" + pagination_strategy: + type: "CustomPaginationStrategy" + class_name: source_freshdesk.components.FreshdeskTicketsPaginationStrategy + page_size: 100 + start_from_page: 1 + page_size_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "per_page" + page_token_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "page" + incremental_sync: + $ref: "#/definitions/incremental_sync" + type: CustomIncrementalSync + class_name: source_freshdesk.components.FreshdeskTicketsIncrementalSync + start_time_option: + $ref: "#/definitions/incremental_sync/start_time_option" + field_name: "updated_since" + $parameters: + name: "tickets" + primary_key: "id" + cursor_field: "updated_at" + cursor_filter: "updated_since" + + canned_responses: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/base_stream/retriever/requester" + path: "canned_response_folders/{{ stream_slice.id }}/responses" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/canned_response_folders" + parent_key: id + partition_field: id + $parameters: + name: "canned_responses" + primary_key: "id" + + conversations: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "tickets/{{ stream_slice.id }}/conversations" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/tickets" + parent_key: id + partition_field: id + $parameters: + name: "conversations" + primary_key: "id" + + discussion_forums: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "discussions/categories/{{ stream_slice.id }}/forums" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/discussion_categories" + parent_key: id + partition_field: id + $parameters: + name: "discussion_forums" + primary_key: "id" + + discussion_topics: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "discussions/forums/{{ stream_slice.id }}/topics" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/discussion_forums" + parent_key: id + partition_field: id + $parameters: + name: "discussion_topics" + primary_key: "id" + + discussion_comments: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "discussions/topics/{{ stream_slice.id }}/comments" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/discussion_topics" + parent_key: id + partition_field: id + $parameters: + name: "discussion_comments" + primary_key: "id" + + solution_folders: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "solutions/categories/{{ stream_slice.id }}/folders" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/solution_categories" + parent_key: id + partition_field: id + transformations: + - type: AddFields + fields: + - path: ["category_id"] + value: "{{ record.get('category_id') or stream_slice.get('id') }}" + $parameters: + name: "solution_folders" + primary_key: "id" + + solution_articles: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/base_stream/retriever" + requester: + $ref: "#/definitions/requester" + path: "solutions/folders/{{ stream_slice.id }}/articles" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/solution_folders" + parent_key: id + partition_field: id + $parameters: + name: "solution_articles" + primary_key: "id" + +streams: + - $ref: "#/definitions/agents" + - $ref: "#/definitions/business_hours" + - $ref: "#/definitions/canned_response_folders" + - $ref: "#/definitions/canned_responses" + - $ref: "#/definitions/companies" + - $ref: "#/definitions/conversations" + - $ref: "#/definitions/contacts" + - $ref: "#/definitions/discussion_categories" + - $ref: "#/definitions/discussion_comments" + - $ref: "#/definitions/discussion_forums" + - $ref: "#/definitions/discussion_topics" + - $ref: "#/definitions/email_configs" + - $ref: "#/definitions/email_mailboxes" + - $ref: "#/definitions/groups" + - $ref: "#/definitions/products" + - $ref: "#/definitions/roles" + - $ref: "#/definitions/satisfaction_ratings" + - $ref: "#/definitions/scenario_automations" + - $ref: "#/definitions/settings" + - $ref: "#/definitions/skills" + - $ref: "#/definitions/sla_policies" + - $ref: "#/definitions/solution_articles" + - $ref: "#/definitions/solution_categories" + - $ref: "#/definitions/solution_folders" + - $ref: "#/definitions/surveys" + - $ref: "#/definitions/ticket_fields" + - $ref: "#/definitions/tickets" + - $ref: "#/definitions/time_entries" diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/source.py b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/source.py index 2efc9e06dc5a3..733f11f34cb4a 100644 --- a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/source.py +++ b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/source.py @@ -2,94 +2,10 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import logging -from typing import Any, List, Mapping -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.declarative.checks import CheckStream -from airbyte_cdk.sources.streams import Stream -from requests.auth import HTTPBasicAuth -from source_freshdesk.streams import ( - Agents, - BusinessHours, - CannedResponseFolders, - CannedResponses, - Companies, - Contacts, - Conversations, - DiscussionCategories, - DiscussionComments, - DiscussionForums, - DiscussionTopics, - EmailConfigs, - EmailMailboxes, - Groups, - Products, - Roles, - SatisfactionRatings, - ScenarioAutomations, - Settings, - Skills, - SlaPolicies, - SolutionArticles, - SolutionCategories, - SolutionFolders, - Surveys, - TicketFields, - Tickets, - TimeEntries, -) +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -class FreshdeskAuth(HTTPBasicAuth): - def __init__(self, api_key: str) -> None: - """ - Freshdesk expects the user to provide an api_key. Any string can be used as password: - https://developers.freshdesk.com/api/#authentication - """ - super().__init__(username=api_key, password="unused_with_api_key") - - -class SourceFreshdesk(AbstractSource): - @staticmethod - def _get_stream_kwargs(config: Mapping[str, Any]) -> dict: - return {"authenticator": FreshdeskAuth(config["api_key"]), "config": config} - - def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]): - try: - check_stream = CheckStream(stream_names=["settings"], parameters={}) - return check_stream.check_connection(self, logger, config) - except Exception as error: - return False, repr(error) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - return [ - Agents(**self._get_stream_kwargs(config)), - BusinessHours(**self._get_stream_kwargs(config)), - CannedResponseFolders(**self._get_stream_kwargs(config)), - CannedResponses(**self._get_stream_kwargs(config)), - Companies(**self._get_stream_kwargs(config)), - Contacts(**self._get_stream_kwargs(config)), - Conversations(**self._get_stream_kwargs(config)), - DiscussionCategories(**self._get_stream_kwargs(config)), - DiscussionComments(**self._get_stream_kwargs(config)), - DiscussionForums(**self._get_stream_kwargs(config)), - DiscussionTopics(**self._get_stream_kwargs(config)), - EmailConfigs(**self._get_stream_kwargs(config)), - EmailMailboxes(**self._get_stream_kwargs(config)), - Groups(**self._get_stream_kwargs(config)), - Products(**self._get_stream_kwargs(config)), - Roles(**self._get_stream_kwargs(config)), - ScenarioAutomations(**self._get_stream_kwargs(config)), - Settings(**self._get_stream_kwargs(config)), - Skills(**self._get_stream_kwargs(config)), - SlaPolicies(**self._get_stream_kwargs(config)), - SolutionArticles(**self._get_stream_kwargs(config)), - SolutionCategories(**self._get_stream_kwargs(config)), - SolutionFolders(**self._get_stream_kwargs(config)), - TimeEntries(**self._get_stream_kwargs(config)), - TicketFields(**self._get_stream_kwargs(config)), - Tickets(**self._get_stream_kwargs(config)), - SatisfactionRatings(**self._get_stream_kwargs(config)), - Surveys(**self._get_stream_kwargs(config)), - ] +class SourceFreshdesk(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json deleted file mode 100644 index a1113b9a26f1d..0000000000000 --- a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/freshdesk", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Freshdesk Spec", - "type": "object", - "required": ["domain", "api_key"], - "additionalProperties": true, - "properties": { - "domain": { - "type": "string", - "description": "Freshdesk domain", - "title": "Domain", - "examples": ["myaccount.freshdesk.com"], - "pattern": "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" - }, - "api_key": { - "type": "string", - "title": "API Key", - "description": "Freshdesk API Key. See the docs for more information on how to obtain this key.", - "airbyte_secret": true - }, - "requests_per_minute": { - "title": "Requests per minute", - "type": "integer", - "description": "The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account." - }, - "start_date": { - "title": "Start Date", - "type": "string", - "description": "UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2020-12-01T00:00:00Z"] - } - } - } -} diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py deleted file mode 100644 index 463cefae85173..0000000000000 --- a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py +++ /dev/null @@ -1,396 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -import re -from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional -from urllib import parse - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy -from airbyte_cdk.sources.streams.core import IncrementalMixin -from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream -from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer -from requests.auth import AuthBase -from source_freshdesk.availability_strategy import FreshdeskAvailabilityStrategy -from source_freshdesk.utils import CallCredit - -logger = logging.getLogger("airbyte") - - -class FreshdeskStream(HttpStream, ABC): - """Basic stream API that allows to iterate over entities""" - - call_credit = 1 # see https://developers.freshdesk.com/api/#embedding - result_return_limit = 100 - primary_key = "id" - link_regex = re.compile(r'<(.*?)>;\s*rel="next"') - raise_on_http_errors = True - forbidden_stream = False - - # regestring the default schema transformation - transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) - - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], *args, **kwargs): - super().__init__(authenticator=authenticator) - requests_per_minute = config.get("requests_per_minute") - self.domain = config["domain"] - self._call_credit = CallCredit(balance=requests_per_minute) if requests_per_minute else None - # By default, only tickets that have been created within the past 30 days will be returned. - # Since this logic rely not on updated tickets, it can break tickets dependant streams - conversations. - # So updated_since parameter will be always used in tickets streams. And start_date will be used too - # with default value 30 days look back. - self.start_date = config.get("start_date") or (pendulum.now() - pendulum.duration(days=30)).strftime("%Y-%m-%dT%H:%M:%SZ") - - @property - def url_base(self) -> str: - return parse.urljoin(f"https://{self.domain.rstrip('/')}", "/api/v2/") - - @property - def availability_strategy(self) -> Optional[AvailabilityStrategy]: - return FreshdeskAvailabilityStrategy() - - def backoff_time(self, response: requests.Response) -> Optional[float]: - if response.status_code == requests.codes.too_many_requests: - return float(response.headers.get("Retry-After", 0)) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - link_header = response.headers.get("Link") - if not link_header: - return {} - match = self.link_regex.search(link_header) - next_url = match.group(1) - params = parse.parse_qs(parse.urlparse(next_url).query) - return self.parse_link_params(link_query_params=params) - - def parse_link_params(self, link_query_params: Mapping[str, List[str]]) -> Mapping[str, Any]: - return {"per_page": link_query_params["per_page"][0], "page": link_query_params["page"][0]} - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - params = {"per_page": self.result_return_limit} - if next_page_token and "page" in next_page_token: - params["page"] = next_page_token["page"] - return params - - def _consume_credit(self, credit): - """Consume call credit, if there is no credit left within current window will sleep til next period""" - if self._call_credit: - self._call_credit.consume(credit) - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - self._consume_credit(self.call_credit) - yield from super().read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state - ) - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[MutableMapping]: - if self.forbidden_stream: - return [] - return response.json() or [] - - def should_retry(self, response: requests.Response) -> bool: - if response.status_code == requests.codes.FORBIDDEN: - # Issue: https://github.com/airbytehq/airbyte/issues/26717 - # we should skip the stream if subscription level had changed during sync - self.forbidden_stream = True - setattr(self, "raise_on_http_errors", False) - logger.warning(f"Stream `{self.name}` is not available. {response.text}") - return super().should_retry(response) - - -class IncrementalFreshdeskStream(FreshdeskStream, IncrementalMixin): - - cursor_filter = "updated_since" # Name of filter that corresponds to the state - state_checkpoint_interval = 100 - - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], *args, **kwargs): - super().__init__(authenticator=authenticator, config=config, *args, **kwargs) - self._cursor_value = "" - - @property - def cursor_field(self) -> str: - return "updated_at" - - @property - def state(self) -> MutableMapping[str, Any]: - return {self.cursor_field: self._cursor_value} if self._cursor_value else {} - - @state.setter - def state(self, value: MutableMapping[str, Any]): - self._cursor_value = value.get(self.cursor_field, self.start_date) - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params[self.cursor_filter] = stream_state.get(self.cursor_field, self.start_date) - return params - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - for record in super().read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state - ): - yield record - self._cursor_value = max(record[self.cursor_field], self._cursor_value) - - -class Agents(FreshdeskStream): - def path(self, **kwargs) -> str: - return "agents" - - -class BusinessHours(FreshdeskStream): - def path(self, **kwargs) -> str: - return "business_hours" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[MutableMapping]: - for record in super().parse_response(response, **kwargs): - record["working_hours"] = record.pop("business_hours", None) - yield record - - -class CannedResponseFolders(FreshdeskStream): - def path(self, **kwargs) -> str: - return "canned_response_folders" - - -class CannedResponses(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=CannedResponseFolders(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"canned_response_folders/{stream_slice['parent']['id']}/responses" - - -class Companies(FreshdeskStream): - def path(self, **kwargs) -> str: - return "companies" - - -class Contacts(IncrementalFreshdeskStream): - cursor_filter = "_updated_since" - - def path(self, **kwargs) -> str: - return "contacts" - - -class DiscussionCategories(FreshdeskStream): - def path(self, **kwargs) -> str: - return "discussions/categories" - - -class DiscussionForums(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=DiscussionCategories(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"discussions/categories/{stream_slice['parent']['id']}/forums" - - -class DiscussionTopics(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=DiscussionForums(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"discussions/forums/{stream_slice['parent']['id']}/topics" - - -class DiscussionComments(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=DiscussionTopics(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"discussions/topics/{stream_slice['parent']['id']}/comments" - - -class EmailConfigs(FreshdeskStream): - def path(self, **kwargs) -> str: - return "email_configs" - - -class EmailMailboxes(FreshdeskStream): - def path(self, **kwargs) -> str: - return "email/mailboxes" - - -class Groups(FreshdeskStream): - def path(self, **kwargs) -> str: - return "groups" - - -class Products(FreshdeskStream): - def path(self, **kwargs) -> str: - return "products" - - -class Roles(FreshdeskStream): - def path(self, **kwargs) -> str: - return "roles" - - -class ScenarioAutomations(FreshdeskStream): - def path(self, **kwargs) -> str: - return "scenario_automations" - - -class Settings(FreshdeskStream): - primary_key = "primary_language" - - def path(self, **kwargs) -> str: - return "settings/helpdesk" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield response.json() - - -class Skills(FreshdeskStream): - def path(self, **kwargs) -> str: - return "skills" - - -class SlaPolicies(FreshdeskStream): - def path(self, **kwargs) -> str: - return "sla_policies" - - -class SolutionCategories(FreshdeskStream): - def path(self, **kwargs) -> str: - return "solutions/categories" - - -class SolutionFolders(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=SolutionCategories(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"solutions/categories/{stream_slice['parent']['id']}/folders" - - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: - records = response.json() - category_id = stream_slice["parent"]["id"] - for record in records: - record.setdefault("category_id", category_id) - yield record - - -class SolutionArticles(HttpSubStream, FreshdeskStream): - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], **kwargs): - super().__init__( - authenticator=authenticator, config=config, parent=SolutionFolders(authenticator=authenticator, config=config, **kwargs) - ) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"solutions/folders/{stream_slice['parent']['id']}/articles" - - -class TimeEntries(FreshdeskStream): - def path(self, **kwargs) -> str: - return "time_entries" - - -class TicketFields(FreshdeskStream): - def path(self, **kwargs) -> str: - return "ticket_fields" - - -class Tickets(IncrementalFreshdeskStream): - ticket_paginate_limit = 300 - call_credit = 3 # each include consumes 2 additional credits - use_cache = True - - def path(self, **kwargs) -> str: - return "tickets" - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - includes = ["description", "requester", "stats"] - params.update( - {"order_type": "asc", "order_by": self.cursor_field, "include": ",".join(includes)} # ASC order, to get the old records first - ) - if next_page_token and self.cursor_filter in next_page_token: - params[self.cursor_filter] = next_page_token[self.cursor_filter] - return params - - def parse_link_params(self, link_query_params: Mapping[str, List[str]]) -> Mapping[str, Any]: - params = super().parse_link_params(link_query_params) - if self.cursor_filter in link_query_params: - params[self.cursor_filter] = link_query_params[self.cursor_filter][0] - return params - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """ - This block extends Incremental stream to overcome '300 page' server error. - Since the Ticket endpoint has a 300 page pagination limit, after 300 pages, update the parameters with - query using 'updated_since' = last_record, if there is more data remaining. - """ - next_page_token = super().next_page_token(response=response) - - if next_page_token and int(next_page_token["page"]) > self.ticket_paginate_limit: - # get last_record from latest batch, pos. -1, because of ACS order of records - last_record_updated_at = response.json()[-1]["updated_at"] - # updating request parameters with last_record state - next_page_token[self.cursor_filter] = last_record_updated_at - next_page_token.pop("page") - - return next_page_token - - -class Conversations(FreshdeskStream): - """Notes and Replies""" - - def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], *args, **kwargs): - super().__init__(authenticator=authenticator, config=config, args=args, kwargs=kwargs) - self.tickets_stream = Tickets(authenticator=authenticator, config=config) - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"tickets/{stream_slice['id']}/conversations" - - def stream_slices( - self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - for ticket in self.tickets_stream.read_records( - sync_mode=SyncMode.full_refresh, cursor_field=cursor_field, stream_slice={}, stream_state={} - ): - yield {"id": ticket["id"]} - - -class SatisfactionRatings(IncrementalFreshdeskStream): - cursor_filter = "created_since" - - def path(self, **kwargs) -> str: - return "surveys/satisfaction_ratings" - - -class Surveys(FreshdeskStream): - def path(self, **kwargs) -> str: - return "surveys" diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/conftest.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/conftest.py index dcfce4871d990..617f00bb1f29c 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/conftest.py @@ -1,9 +1,9 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - import pytest from requests.auth import HTTPBasicAuth +from source_freshdesk.source import SourceFreshdesk @pytest.fixture(name="config") @@ -14,3 +14,17 @@ def config_fixture(): @pytest.fixture(name="authenticator") def authenticator_fixture(config): return HTTPBasicAuth(username=config["api_key"], password="unused_with_api_key") + + +def find_stream(stream_name, config): + streams = SourceFreshdesk().streams(config=config) + + # cache should be disabled once this issue is fixed https://github.com/airbytehq/airbyte-internal-issues/issues/6513 + for stream in streams: + stream.retriever.requester.use_cache = True + + # find by name + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py index ea13ea2a86601..982a82aa8163b 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py @@ -4,79 +4,83 @@ import pytest from airbyte_cdk.models import SyncMode -from source_freshdesk.streams import Tickets +from conftest import find_stream @pytest.fixture(name="responses") def responses_fixtures(): return [ { - "url": "/api/v2/tickets?per_page=1&updated_since=2002-02-10T22%3A21%3A44Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2002-02-10T22%3A21%3A44Z", "json": [{"id": 1, "updated_at": "2018-01-02T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2002-02-10T22%3A21%3A44Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&page=2&updated_since=2002-02-10T22%3A21%3A44Z", "json": [{"id": 2, "updated_at": "2018-02-02T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&updated_since=2018-02-02T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2018-02-02T00%3A00%3A00Z", "json": [{"id": 2, "updated_at": "2018-02-02T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2018-02-02T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&page=2&updated_since=2018-02-02T00%3A00%3A00Z", "json": [{"id": 3, "updated_at": "2018-03-02T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&updated_since=2018-03-02T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2018-03-02T00%3A00%3A00Z", "json": [{"id": 3, "updated_at": "2018-03-02T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2018-03-02T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&page=2&updated_since=2018-03-02T00%3A00%3A00Z", "json": [{"id": 4, "updated_at": "2019-01-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&updated_since=2019-01-03T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2019-01-03T00%3A00%3A00Z", "json": [{"id": 4, "updated_at": "2019-01-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2019-01-03T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&page=2&updated_since=2019-01-03T00%3A00%3A00Z", "json": [{"id": 5, "updated_at": "2019-02-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&updated_since=2019-02-03T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2019-02-03T00%3A00%3A00Z", "json": [{"id": 5, "updated_at": "2019-02-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2019-02-03T00%3A00%3A00Z", + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&page=2&updated_since=2019-02-03T00%3A00%3A00Z", "json": [{"id": 6, "updated_at": "2019-03-03T00:00:00Z"}], }, + { + "url": "https://test.freshdesk.com/api/v2/tickets?per_page=1&updated_since=2019-03-03T00%3A00%3A00Z", + "json": [], + }, ] @@ -113,16 +117,14 @@ def test_not_all_records(self, requests_mock, authenticator, config, responses): {"id": 6, "updated_at": "2019-03-03T00:00:00Z"}, ] + # Create test_stream instance. + test_stream = find_stream("tickets", config) + # INT value of page number where the switch state should be triggered. # in this test case values from: 1 - 4, assuming we want to switch state on this page. - ticket_paginate_limit = 2 + test_stream.retriever.paginator.pagination_strategy.PAGE_LIMIT = 2 # This parameter mocks the "per_page" parameter in the API Call - result_return_limit = 1 - - # Create test_stream instance. - test_stream = Tickets(authenticator=authenticator, config=config) - test_stream.ticket_paginate_limit = ticket_paginate_limit - test_stream.result_return_limit = result_return_limit + test_stream.retriever.paginator.pagination_strategy._page_size = 1 # Mocking Request for response in responses: @@ -133,7 +135,10 @@ def test_not_all_records(self, requests_mock, authenticator, config, responses): headers=response.get("headers", {}), ) - records = list(test_stream.read_records(sync_mode=SyncMode.full_refresh)) + records = [] + for slice in test_stream.stream_slices(sync_mode=SyncMode.full_refresh): + records_generator = test_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice) + records.extend([dict(record) for record in records_generator]) # We're expecting 6 records to return from the tickets_stream assert records == expected_output diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_incremental_sync.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_incremental_sync.py new file mode 100644 index 0000000000000..619ad510a5065 --- /dev/null +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_incremental_sync.py @@ -0,0 +1,32 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + + +import pytest +from airbyte_cdk.sources.declarative.requesters.request_option import RequestOptionType +from source_freshdesk.components import FreshdeskTicketsIncrementalSync + + +class TestFreshdeskTicketsIncrementalSync: + @pytest.mark.parametrize( + "stream_state, stream_slice, next_page_token, expected_params", + [ + ({}, {"partition_field_start": "2022-01-01"}, {"next_page_token": 1}, {"partition_field_start": "2022-01-01"}), + ({}, {"partition_field_start": "2021-01-01"}, {"next_page_token": "2022-01-01"}, {"partition_field_start": "2022-01-01"}), + ], + ) + def test_initialization_and_inheritance(self, mocker, stream_state, stream_slice, next_page_token, expected_params): + sync = FreshdeskTicketsIncrementalSync("2022-01-01", "updated_at", "%Y-%m-%d", {}, {}) + + # Setup mock for start_time_option.field_name.eval + mock_field_name = mocker.MagicMock() + mock_field_name.eval.return_value = "partition_field_start" + + mock_start_time_option_field_name = mocker.patch.object(sync, "start_time_option") + mock_start_time_option_field_name.field_name = mock_field_name + mock_start_time_option_field_name.inject_into = RequestOptionType("request_parameter") + + mock_partition_field_start = mocker.patch.object(sync, "_partition_field_start") + mock_partition_field_start.eval.return_value = "partition_field_start" + + params = sync.get_request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + assert params == expected_params diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_pagination_strategy.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_pagination_strategy.py new file mode 100644 index 0000000000000..4cdcf74fe6c1e --- /dev/null +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_pagination_strategy.py @@ -0,0 +1,37 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import pytest +import requests +from source_freshdesk.components import FreshdeskTicketsPaginationStrategy + + +class TestFreshdeskTicketsPaginationStrategy: + + # returns None when there are fewer records than the page size + @pytest.mark.parametrize( + "response, current_page, last_records, expected", + [ + (requests.Response(), 1, [], None), # No records + (requests.Response(), 1, [1, 2, 3], None), # Fewer records than page size + (requests.Response(), 3, [1, 2, 3, 4], 4), # Page size records + ( + requests.Response(), + 6, + [ + {"updated_at": "2022-01-01"}, + {"updated_at": "2022-01-02"}, + {"updated_at": "2022-01-03"}, + {"updated_at": "2022-01-03"}, + {"updated_at": "2022-01-05"}, + ], + "2022-01-05", + ), # Page limit is hit + ], + ) + def test_returns_none_when_fewer_records_than_page_size(self, response, current_page, last_records, expected, config): + pagination_strategy = FreshdeskTicketsPaginationStrategy(config=config, page_size=4, parameters={}) + pagination_strategy.PAGE_LIMIT = 5 + pagination_strategy._page = current_page + assert pagination_strategy.next_page_token(response, last_records) == expected diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_requester.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_requester.py new file mode 100644 index 0000000000000..c9a527df5512d --- /dev/null +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_requester.py @@ -0,0 +1,78 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from unittest.mock import MagicMock, patch + +import pytest +from source_freshdesk.components import FreshdeskRequester, FreshdeskTicketsIncrementalRequester + + +@pytest.mark.parametrize( + "requests_per_minute, expected_call_credit_cost", + [ + (None, None), # No call to CallCredit.consume expected when requests_per_minute is None + (60, 1), # CallCredit.consume called with cost 1 when requests_per_minute is set + ], +) +def test_sends_request_with_default_parameters_and_receives_response(requests_per_minute, expected_call_credit_cost): + config = {"requests_per_minute": requests_per_minute} if requests_per_minute is not None else {} + parameters = {} + + # Patch CallCredit to monitor calls to its consume method + with patch("source_freshdesk.components.CallCredit") as MockCallCredit: + mock_call_credit_instance = MagicMock() + MockCallCredit.return_value = mock_call_credit_instance + + requester = FreshdeskRequester( + name="agents", url_base="https://freshdesk.com", path="/api/v2", parameters=parameters, config=config + ) + + # Patch the HttpRequester.send_request to prevent actual HTTP requests + with patch("source_freshdesk.components.HttpRequester.send_request", return_value=MagicMock()): + response = requester.send_request() + + # If requests_per_minute is None, _call_credit should not be created, thus CallCredit.consume should not be called + if expected_call_credit_cost is None: + mock_call_credit_instance.consume.assert_not_called() + else: + mock_call_credit_instance.consume.assert_called_once_with(expected_call_credit_cost) + + assert response is not None + + +@pytest.mark.parametrize( + "request_params, expected_modified_params, expected_call_credit_cost, requests_per_minute, consume_expected", + [ + ({"page": "1991-08-24"}, {}, 3, 60, True), # Rate limiting applied, expect _call_credit.consume to be called + ({"page": 1}, {"page": 1}, 3, 60, True), # Rate limiting applied, expect _call_credit.consume to be called + ({"page": "1991-08-24"}, {}, 3, None, False), # No rate limiting, do not expect _call_credit.consume to be called + ({"page": 1}, {"page": 1}, 3, None, False), # No rate limiting, do not expect _call_credit.consume to be called + ], +) +def test_freshdesk_tickets_incremental_requester_send_request( + request_params, expected_modified_params, expected_call_credit_cost, requests_per_minute, consume_expected +): + config = {"requests_per_minute": requests_per_minute} if requests_per_minute is not None else {} + + # Mock CallCredit to monitor calls to its consume method + with patch("source_freshdesk.components.CallCredit") as mock_call_credit: + mock_call_credit_instance = MagicMock() + mock_call_credit.return_value = mock_call_credit_instance + + # Initialize the requester with mock config + requester = FreshdeskTicketsIncrementalRequester( + name="tickets", url_base="https://example.com", path="/api/v2/tickets", parameters={}, config=config + ) + + # Patch the HttpRequester.send_request to prevent actual HTTP requests + with patch("source_freshdesk.components.HttpRequester.send_request", return_value=MagicMock()) as mock_super_send_request: + # Call send_request with test parameters + requester.send_request(request_params=request_params) + + # Check if _consume_credit was correctly handled based on requests_per_minute + if consume_expected: + mock_call_credit_instance.consume.assert_called_once_with(expected_call_credit_cost) + else: + mock_call_credit_instance.consume.assert_not_called() + + # Check if HttpRequester.send_request was called with the modified request_params + mock_super_send_request.assert_called_once_with(request_params=expected_modified_params) diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_source.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_source.py deleted file mode 100644 index 15cece664b686..0000000000000 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_source.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging - -from source_freshdesk import SourceFreshdesk - -logger = logging.getLogger("test_source") - - -def test_check_connection_ok(requests_mock, config): - json_resp = {"primary_language": "en", "supported_languages": [], "portal_languages": []} - - requests_mock.register_uri("GET", "/api/v2/settings/helpdesk", json=json_resp) - ok, error_msg = SourceFreshdesk().check_connection(logger, config=config) - - assert ok and not error_msg - - -def test_check_connection_invalid_api_key(requests_mock, config): - responses = [ - {"json": {"code": "invalid_credentials", "message": "You have to be logged in to perform this action."}, "status_code": 401} - ] - - requests_mock.register_uri("GET", "/api/v2/settings/helpdesk", responses) - ok, error_msg = SourceFreshdesk().check_connection(logger, config=config) - assert not ok - assert ( - "The endpoint to access stream 'settings' returned 401: Unauthorized. This is most likely due to wrong credentials. " in error_msg - ) - assert "You have to be logged in to perform this action." in error_msg - - -def test_check_connection_empty_config(config): - config = {} - - ok, error_msg = SourceFreshdesk().check_connection(logger, config=config) - - assert not ok and error_msg - - -def test_check_connection_invalid_config(config): - config.pop("api_key") - - ok, error_msg = SourceFreshdesk().check_connection(logger, config=config) - - assert not ok and error_msg - - -def test_check_connection_exception(requests_mock, config): - ok, error_msg = SourceFreshdesk().check_connection(logger, config=config) - - assert not ok and error_msg - - -def test_streams(config): - streams = SourceFreshdesk().streams(config) - - assert len(streams) == 28 diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py index 130e2eabd4eeb..1e5a1444574c3 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py @@ -4,46 +4,11 @@ import random from typing import Any, MutableMapping -from unittest.mock import PropertyMock import pytest from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams import Stream -from source_freshdesk.streams import ( - Agents, - BusinessHours, - CannedResponseFolders, - CannedResponses, - Companies, - Contacts, - Conversations, - DiscussionCategories, - DiscussionComments, - DiscussionForums, - DiscussionTopics, - EmailConfigs, - EmailMailboxes, - Groups, - Products, - Roles, - SatisfactionRatings, - ScenarioAutomations, - Settings, - Skills, - SlaPolicies, - SolutionArticles, - SolutionCategories, - SolutionFolders, - Surveys, - TicketFields, - Tickets, - TimeEntries, -) - - -@pytest.fixture(autouse=True) -def mock_tickets_use_cache(mocker): - mocker.patch("source_freshdesk.streams.Tickets.use_cache", new_callable=PropertyMock, return_value=False) +from conftest import find_stream def _read_full_refresh(stream_instance: Stream): @@ -65,69 +30,73 @@ def _read_incremental(stream_instance: Stream, stream_state: MutableMapping[str, @pytest.mark.parametrize( - "stream, resource", + "stream_name, resource", [ - (Agents, "agents"), - (Companies, "companies"), - (Contacts, "contacts"), - (Groups, "groups"), - (Roles, "roles"), - (Skills, "skills"), - (TimeEntries, "time_entries"), - (SatisfactionRatings, "surveys/satisfaction_ratings"), - (BusinessHours, "business_hours"), - (CannedResponseFolders, "canned_response_folders"), - (DiscussionCategories, "discussions/categories"), - (EmailConfigs, "email_configs"), - (EmailMailboxes, "email/mailboxes"), - (Products, "products"), - (ScenarioAutomations, "scenario_automations"), - (SlaPolicies, "sla_policies"), - (SolutionCategories, "solutions/categories"), - (TicketFields, "ticket_fields"), - (Surveys, "surveys"), + ("agents", "agents"), + ("companies", "companies"), + ("contacts", "contacts"), + ("groups", "groups"), + ("roles", "roles"), + ("skills", "skills"), + ("time_entries", "time_entries"), + ("satisfaction_ratings", "surveys/satisfaction_ratings"), + ("business_hours", "business_hours"), + ("canned_response_folders", "canned_response_folders"), + ("discussion_categories", "discussions/categories"), + ("email_configs", "email_configs"), + ("email_mailboxes", "email/mailboxes"), + ("products", "products"), + ("scenario_automations", "scenario_automations"), + ("sla_policies", "sla_policies"), + ("solution_categories", "solutions/categories"), + ("ticket_fields", "ticket_fields"), + ("surveys", "surveys"), ], ) -def test_full_refresh(stream, resource, authenticator, config, requests_mock): +def test_full_refresh(stream_name, resource, config, requests_mock): + stream = find_stream(stream_name, config) requests_mock.register_uri("GET", f"/api/v2/{resource}", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(25)]) - stream = stream(authenticator=authenticator, config=config) records = _read_full_refresh(stream) assert len(records) == 25 +# skipped due to https://github.com/airbytehq/airbyte-internal-issues/issues/6314 +@pytest.mark.skip def test_full_refresh_conversations(authenticator, config, requests_mock): requests_mock.register_uri("GET", "/api/v2/tickets", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) for i in range(5): requests_mock.register_uri("GET", f"/api/v2/tickets/{i}/conversations", json=[{"id": x} for x in range(10)]) - stream = Conversations(authenticator=authenticator, config=config) + stream = find_stream("conversations", config) records = _read_full_refresh(stream) assert len(records) == 50 -def test_full_refresh_settings(authenticator, config, requests_mock): +def test_full_refresh_settings(config, requests_mock): json_resp = {"primary_language": "en", "supported_languages": [], "portal_languages": []} requests_mock.register_uri("GET", "/api/v2/settings/helpdesk", json=json_resp) - stream = Settings(authenticator=authenticator, config=config) + stream = find_stream("settings", config) records = _read_full_refresh(stream) assert len(records) == 1 - assert records[0] == json_resp + assert dict(records[0]) == json_resp +# skipped due to https://github.com/airbytehq/airbyte-internal-issues/issues/6314 +@pytest.mark.skip @pytest.mark.parametrize( - "stream, resource", + "stream_name, resource", [ - (Contacts, "contacts"), - (Tickets, "tickets"), - (SatisfactionRatings, "surveys/satisfaction_ratings"), + ("contacts", "contacts"), + ("tickets", "tickets"), + ("satisfaction_ratings", "surveys/satisfaction_ratings"), ], ) -def test_incremental(stream, resource, authenticator, config, requests_mock): +def test_incremental(stream_name, resource, config, requests_mock): highest_updated_at = "2022-04-25T22:00:00Z" other_updated_at = "2022-04-01T00:00:00Z" highest_index = random.randint(0, 24) @@ -138,7 +107,7 @@ def test_incremental(stream, resource, authenticator, config, requests_mock): json=[{"id": x, "updated_at": highest_updated_at if x == highest_index else other_updated_at} for x in range(25)], ) - stream = stream(authenticator=authenticator, config=config) + stream = find_stream(stream_name, config=config) records, state = _read_incremental(stream, {}) assert len(records) == 25 @@ -147,55 +116,57 @@ def test_incremental(stream, resource, authenticator, config, requests_mock): @pytest.mark.parametrize( - "stream_class, parent_path, sub_paths", + "stream_name, parent_path, sub_paths", [ - (CannedResponses, "canned_response_folders", [f"canned_response_folders/{x}/responses" for x in range(5)]), - (Conversations, "tickets", [f"tickets/{x}/conversations" for x in range(5)]), - (DiscussionForums, "discussions/categories", [f"discussions/categories/{x}/forums" for x in range(5)]), - (SolutionFolders, "solutions/categories", [f"solutions/categories/{x}/folders" for x in range(5)]), + ("canned_responses", "canned_response_folders", [f"canned_response_folders/{x}/responses" for x in range(5)]), + # ("conversations", "tickets", [f"tickets/{x}/conversations" for x in range(5)]), Disabled due to issue with caching + ("discussion_forums", "discussions/categories", [f"discussions/categories/{x}/forums" for x in range(5)]), + ("solution_folders", "solutions/categories", [f"solutions/categories/{x}/folders" for x in range(5)]), ], ) -def test_substream_full_refresh(requests_mock, stream_class, parent_path, sub_paths, authenticator, config): +def test_substream_full_refresh(requests_mock, stream_name, parent_path, sub_paths, authenticator, config): requests_mock.register_uri("GET", "/api/v2/" + parent_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) for sub_path in sub_paths: requests_mock.register_uri("GET", "/api/v2/" + sub_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(10)]) - stream = stream_class(authenticator=authenticator, config=config) + stream = find_stream(stream_name=stream_name, config=config) records = _read_full_refresh(stream) assert len(records) == 50 +@pytest.mark.skip # Disabled due to issue with caching @pytest.mark.parametrize( - "stream_class, parent_path, sub_paths, sub_sub_paths", + "stream_name, parent_path, sub_paths, sub_sub_paths", [ ( - DiscussionTopics, + "discussion_topics", "discussions/categories", [f"discussions/categories/{x}/forums" for x in range(5)], [f"discussions/forums/{x}/topics" for x in range(5)], ), ( - SolutionArticles, + "solution_articles", "solutions/categories", [f"solutions/categories/{x}/folders" for x in range(5)], [f"solutions/folders/{x}/articles" for x in range(5)], ), ], ) -def test_full_refresh_with_two_sub_levels(requests_mock, stream_class, parent_path, sub_paths, sub_sub_paths, authenticator, config): +def test_full_refresh_with_two_sub_levels(requests_mock, stream_name, parent_path, sub_paths, sub_sub_paths, config): requests_mock.register_uri("GET", f"/api/v2/{parent_path}", json=[{"id": x} for x in range(5)]) for sub_path in sub_paths: requests_mock.register_uri("GET", f"/api/v2/{sub_path}", json=[{"id": x} for x in range(5)]) for sub_sub_path in sub_sub_paths: requests_mock.register_uri("GET", f"/api/v2/{sub_sub_path}", json=[{"id": x} for x in range(10)]) - stream = stream_class(authenticator=authenticator, config=config) + stream = find_stream(stream_name=stream_name, config=config) records = _read_full_refresh(stream) assert len(records) == 250 +@pytest.mark.skip # Disabled due to issue with caching def test_full_refresh_discussion_comments(requests_mock, authenticator, config): requests_mock.register_uri("GET", "/api/v2/discussions/categories", json=[{"id": x} for x in range(2)]) for i in range(2): @@ -205,7 +176,7 @@ def test_full_refresh_discussion_comments(requests_mock, authenticator, config): for k in range(4): requests_mock.register_uri("GET", f"/api/v2/discussions/topics/{k}/comments", json=[{"id": x} for x in range(5)]) - stream = DiscussionComments(authenticator=authenticator, config=config) + stream = find_stream(stream_name="discussion_comments", config=config) records = _read_full_refresh(stream) assert len(records) == 120 diff --git a/airbyte-integrations/connectors/source-gcs/README.md b/airbyte-integrations/connectors/source-gcs/README.md index 6938e3d96c0ce..119172d7c35eb 100644 --- a/airbyte-integrations/connectors/source-gcs/README.md +++ b/airbyte-integrations/connectors/source-gcs/README.md @@ -1,4 +1,5 @@ -# Gcs Source +# Gcs source connector + This is the repository for the Gcs source connector, written in Python. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/gcs). @@ -6,63 +7,49 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/gcs) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_gcs/spec.yaml` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source gcs test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-gcs spec +poetry run source-gcs check --config secrets/config.json +poetry run source-gcs discover --config secrets/config.json +poetry run source-gcs read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-gcs build ``` -An image will be built with the tag `airbyte/source-gcs:dev`. +An image will be available on your host with the tag `airbyte/source-gcs:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-gcs:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-gcs:dev spec @@ -71,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gcs:dev discover --con docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-gcs:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-gcs test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-gcs test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/gcs.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/gcs.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-gcs/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gcs/acceptance-test-config.yml index 2114c72238d19..23756e607992e 100644 --- a/airbyte-integrations/connectors/source-gcs/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-gcs/acceptance-test-config.yml @@ -11,8 +11,6 @@ acceptance_tests: tests: - config_path: "secrets/config.json" status: succeed - - config_path: "secrets/old_config.json" - status: succeed - config_path: "integration_tests/invalid_config.json" status: exception discovery: @@ -21,11 +19,7 @@ acceptance_tests: timeout_seconds: 2400 basic_read: tests: - - config_path: "secrets/old_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - expect_trace_message_on_failure: false - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" expect_trace_message_on_failure: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-gcs/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-gcs/integration_tests/abnormal_state.json index 18702dfa6966f..dd226c97b89e3 100644 --- a/airbyte-integrations/connectors/source-gcs/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-gcs/integration_tests/abnormal_state.json @@ -26,5 +26,19 @@ "name": "example_2" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "_ab_source_file_last_modified": "2024-03-21T16:13:20.571000Z\"_https://storage.googleapis.com/airbyte-integration-test-source-gcs/test_folder/simple_test.csv.gz", + "history": { + "https://storage.googleapis.com/airbyte-integration-test-source-gcs/test_folder/simple_test.csv.gz": "2024-03-21T16:13:20.571000Z" + } + }, + "stream_descriptor": { + "name": "example_gzip" + } + } } ] diff --git a/airbyte-integrations/connectors/source-gcs/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gcs/integration_tests/configured_catalog.json index f6dd1106608b1..b772480244ee2 100644 --- a/airbyte-integrations/connectors/source-gcs/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-gcs/integration_tests/configured_catalog.json @@ -17,6 +17,15 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "example_gzip", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-gcs/integration_tests/spec.json b/airbyte-integrations/connectors/source-gcs/integration_tests/spec.json index 5f69da41c02ac..f0b1dad433a4b 100644 --- a/airbyte-integrations/connectors/source-gcs/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-gcs/integration_tests/spec.json @@ -35,9 +35,7 @@ "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", "order": 1, "type": "array", - "items": { - "type": "string" - } + "items": { "type": "string" } }, "legacy_prefix": { "title": "Legacy Prefix", @@ -118,9 +116,7 @@ "description": "A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.", "default": [], "type": "array", - "items": { - "type": "string" - }, + "items": { "type": "string" }, "uniqueItems": true }, "strings_can_be_null": { @@ -144,9 +140,7 @@ "header_definition": { "title": "CSV Header Definition", "description": "How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.", - "default": { - "header_definition_type": "From CSV" - }, + "default": { "header_definition_type": "From CSV" }, "oneOf": [ { "title": "From CSV", @@ -188,9 +182,7 @@ "title": "Column Names", "description": "The column names that will be used while emitting the CSV records", "type": "array", - "items": { - "type": "string" - } + "items": { "type": "string" } } }, "required": ["column_names", "header_definition_type"] @@ -203,9 +195,7 @@ "description": "A set of case-sensitive strings that should be interpreted as true values.", "default": ["y", "yes", "t", "true", "on", "1"], "type": "array", - "items": { - "type": "string" - }, + "items": { "type": "string" }, "uniqueItems": true }, "false_values": { @@ -213,9 +203,7 @@ "description": "A set of case-sensitive strings that should be interpreted as false values.", "default": ["n", "no", "f", "false", "off", "0"], "type": "array", - "items": { - "type": "string" - }, + "items": { "type": "string" }, "uniqueItems": true }, "inference_type": { @@ -224,6 +212,12 @@ "default": "None", "airbyte_hidden": true, "enum": ["None", "Primitive Types Only"] + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" } }, "required": ["filetype"] diff --git a/airbyte-integrations/connectors/source-gcs/metadata.yaml b/airbyte-integrations/connectors/source-gcs/metadata.yaml index 08c09f6bfa2e0..246a60c6212ed 100644 --- a/airbyte-integrations/connectors/source-gcs/metadata.yaml +++ b/airbyte-integrations/connectors/source-gcs/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: file connectorType: source definitionId: 2a8c41ae-8c23-4be0-a73f-2ab10ca1a820 - dockerImageTag: 0.3.7 + dockerImageTag: 0.4.0 dockerRepository: airbyte/source-gcs documentationUrl: https://docs.airbyte.com/integrations/sources/gcs githubIssueLabel: source-gcs diff --git a/airbyte-integrations/connectors/source-gcs/poetry.lock b/airbyte-integrations/connectors/source-gcs/poetry.lock new file mode 100644 index 0000000000000..a88880257a965 --- /dev/null +++ b/airbyte-integrations/connectors/source-gcs/poetry.lock @@ -0,0 +1,2316 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.77.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.77.1-py3-none-any.whl", hash = "sha256:1530f4a5e44fc8a3e8f81132658222d9b89930385f7ecd9ef0a17a06cc16ea0b"}, + {file = "airbyte_cdk-0.77.1.tar.gz", hash = "sha256:5a4526c3e83cae8144170ec823093b51962c21db8038058e467574ad7574e6c5"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +unstructured = {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""} +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "google-api-core" +version = "2.18.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-storage" +version = "2.12.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.12.0.tar.gz", hash = "sha256:57c0bcda2f5e11f008a155d8636d8381d5abab46b58e0cae0e46dd5e595e6b46"}, + {file = "google_cloud_storage-2.12.0-py2.py3-none-any.whl", hash = "sha256:bc52563439d42981b6e21b071a76da2791672776eda3ba99d13a8061ebbd6e5e"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-resumable-media" +version = "2.7.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.1.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.7)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.2.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.7.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"}, + {file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smart-open" +version = "5.1.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6.*" +files = [ + {file = "smart_open-5.1.0-py3-none-any.whl", hash = "sha256:2059b07f530c8c9e2158e4e1575309aacb74bd813da2325c1f348015d04f3bd6"}, + {file = "smart_open-5.1.0.tar.gz", hash = "sha256:e4dc1350b240ef0759e343e4e2f361bfd4e5477bb2619866e97f80240652e92e"}, +] + +[package.dependencies] +google-cloud-storage = {version = "*", optional = true, markers = "extra == \"gcs\""} + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage"] +http = ["requests"] +s3 = ["boto3"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage", "moto[server] (==1.3.14)", "parameterizedtestcase", "paramiko", "pathlib2", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "46bfd60dbae2ee4a399404806636b0b8583b8e58d6f4170068485ee348127e1d" diff --git a/airbyte-integrations/connectors/source-gcs/pyproject.toml b/airbyte-integrations/connectors/source-gcs/pyproject.toml new file mode 100644 index 0000000000000..3425b7cbf47e1 --- /dev/null +++ b/airbyte-integrations/connectors/source-gcs/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.4.0" +name = "source-gcs" +description = "Source implementation for Gcs." +authors = [ "Airbyte ",] +license = "ELv2" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/gcs" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_gcs" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +pytz = "==2024.1" +google-cloud-storage = "==2.12.0" +smart-open = {extras = ["gcs"], version = "==5.1.0"} +airbyte-cdk = {extras = ["file-based"], version = "^0"} + +[tool.poetry.scripts] +source-gcs = "source_gcs.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.12.0" +requests-mock = "^1.11.0" +pytest = "^7.4" diff --git a/airbyte-integrations/connectors/source-gcs/setup.py b/airbyte-integrations/connectors/source-gcs/setup.py deleted file mode 100644 index 3bb310b4fbf0f..0000000000000 --- a/airbyte-integrations/connectors/source-gcs/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[file-based]>=0.61.0", - "google-cloud-storage==2.12.0", - "smart-open[s3]==5.1.0", - "pandas==1.5.3", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest-mock~=3.6.1", - "pytest~=6.2", -] - -setup( - entry_points={ - "console_scripts": [ - "source-gcs=source_gcs.run:run", - ], - }, - name="source_gcs", - description="Source implementation for Gcs.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-gcs/source_gcs/helpers.py b/airbyte-integrations/connectors/source-gcs/source_gcs/helpers.py index 3962cbef54589..35ac5411355e8 100644 --- a/airbyte-integrations/connectors/source-gcs/source_gcs/helpers.py +++ b/airbyte-integrations/connectors/source-gcs/source_gcs/helpers.py @@ -5,7 +5,6 @@ import io import json -import pandas as pd from google.cloud import storage from google.cloud.storage.blob import Blob from google.oauth2 import service_account @@ -26,30 +25,6 @@ def get_gcs_blobs(config): return blobs -def read_csv_file(blob: Blob, read_header_only=False): - file_obj = io.BytesIO() - blob.download_to_file(file_obj) - file_obj.seek(0) - if read_header_only: - df = pd.read_csv(file_obj, nrows=0) - else: - df = pd.read_csv(file_obj) - file_obj.close() - return df - - -def construct_file_schema(df): - # Fix all columns to string for maximum compability - - # Create a JSON schema object from the column data types - schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {col: {"type": "string"} for col in df.columns}, - } - return schema - - def get_stream_name(blob): blob_name = blob.name # Remove path from stream name diff --git a/airbyte-integrations/connectors/source-gcs/source_gcs/stream_reader.py b/airbyte-integrations/connectors/source-gcs/source_gcs/stream_reader.py index ec44dd27048ed..bb7018e0b714f 100644 --- a/airbyte-integrations/connectors/source-gcs/source_gcs/stream_reader.py +++ b/airbyte-integrations/connectors/source-gcs/source_gcs/stream_reader.py @@ -79,7 +79,9 @@ def get_matching_files(self, globs: List[str], prefix: Optional[str], logger: lo if FILE_FORMAT in blob.name.lower() and (not start_date or last_modified >= start_date): uri = blob.generate_signed_url(expiration=timedelta(hours=1), version="v4") - yield RemoteFile(uri=uri, last_modified=last_modified) + file_extension = ".".join(blob.name.split(".")[1:]) + + yield RemoteFile(uri=uri, last_modified=last_modified, mime_type=file_extension) except Exception as exc: self._handle_file_listing_error(exc, prefix, logger) @@ -98,8 +100,16 @@ def open_file(self, file: RemoteFile, mode: FileReadMode, encoding: Optional[str Open and yield a remote file from GCS for reading. """ logger.debug(f"Trying to open {file.uri}") + + # choose correct compression mode + file_extension = file.mime_type.split(".")[-1] + if file_extension in ["gz", "bz2"]: + compression = "." + file_extension + else: + compression = "disable" + try: - result = smart_open.open(file.uri, mode=mode.value, encoding=encoding) + result = smart_open.open(file.uri, mode=mode.value, compression=compression, encoding=encoding) except OSError as oe: logger.warning(ERROR_MESSAGE_ACCESS.format(uri=file.uri, bucket=self.config.bucket)) logger.exception(oe) diff --git a/airbyte-integrations/connectors/source-gcs/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-gcs/unit_tests/unit_test.py deleted file mode 100644 index f9c7751344fa9..0000000000000 --- a/airbyte-integrations/connectors/source-gcs/unit_tests/unit_test.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import unittest -from io import BytesIO - -import pandas as pd -from source_gcs.helpers import construct_file_schema - - -class TestGCSFunctions(unittest.TestCase): - def setUp(self): - # Initialize the mock config - self.config = {"service_account": '{"test_key": "test_value"}', "gcs_bucket": "test_bucket", "gcs_path": "test_path"} - - def test_construct_file_schema(self): - # Test that the function correctly constructs a JSON schema for a DataFrame - df = pd.read_csv(BytesIO(b"id,name\n1,Alice\n2,Bob\n3,Charlie\n")) - schema = construct_file_schema(df) - expected_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {"id": {"type": "string"}, "name": {"type": "string"}}, - } - self.assertEqual(schema, expected_schema) diff --git a/airbyte-integrations/connectors/source-github/metadata.yaml b/airbyte-integrations/connectors/source-github/metadata.yaml index aeb83b414d076..9e45ad943bd3d 100644 --- a/airbyte-integrations/connectors/source-github/metadata.yaml +++ b/airbyte-integrations/connectors/source-github/metadata.yaml @@ -10,13 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e - dockerImageTag: 1.6.4 + dockerImageTag: 1.7.1 dockerRepository: airbyte/source-github documentationUrl: https://docs.airbyte.com/integrations/sources/github githubIssueLabel: source-github icon: github.svg license: MIT - maxSecondsBetweenMessages: 5400 + maxSecondsBetweenMessages: 3600 name: GitHub remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-github/poetry.lock b/airbyte-integrations/connectors/source-github/poetry.lock index b5e7825c76f57..f185950f28bc7 100644 --- a/airbyte-integrations/connectors/source-github/poetry.lock +++ b/airbyte-integrations/connectors/source-github/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.68.4" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.68.4.tar.gz", hash = "sha256:da4d923d9dac9f13fbd2e89a0094c58d440dac85552e8084d19cbb0a73efd9d7"}, - {file = "airbyte_cdk-0.68.4-py3-none-any.whl", hash = "sha256:3b6a9b6adf81a1d9c2d40acecfe9016e73197dd95f1e6027423aeee85d3a7ee1"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -492,13 +492,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -727,13 +727,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -775,7 +775,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -833,13 +832,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -851,15 +850,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -902,19 +901,19 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "sgqlc" @@ -958,24 +957,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240311" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -994,13 +993,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1105,4 +1104,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "21dc716e53a3184f45aa9dd040b2fdd142daf95a2847aa6d9d2910c9ff637d5c" +content-hash = "1956fa861a3b061f4c5b4a84025ece587c69e0bada8df463e286cce19d696e7a" diff --git a/airbyte-integrations/connectors/source-github/pyproject.toml b/airbyte-integrations/connectors/source-github/pyproject.toml index 86d738db8f060..4b56030453c8c 100644 --- a/airbyte-integrations/connectors/source-github/pyproject.toml +++ b/airbyte-integrations/connectors/source-github/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.6.4" +version = "1.7.1" name = "source-github" description = "Source implementation for GitHub." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_github" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "^0.68.4" +airbyte-cdk = "^0" sgqlc = "==16.3" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-github/source_github/source.py b/airbyte-integrations/connectors/source-github/source_github/source.py index d3519052a0366..298e0c596a72a 100644 --- a/airbyte-integrations/connectors/source-github/source_github/source.py +++ b/airbyte-integrations/connectors/source-github/source_github/source.py @@ -3,7 +3,7 @@ # from os import getenv -from typing import Any, Dict, List, Mapping, MutableMapping, Tuple +from typing import Any, List, Mapping, MutableMapping, Optional, Tuple from urllib.parse import urlparse from airbyte_cdk import AirbyteLogger @@ -65,7 +65,9 @@ class SourceGithub(AbstractSource): continue_sync_on_stream_failure = True @staticmethod - def _get_org_repositories(config: Mapping[str, Any], authenticator: MultipleTokenAuthenticator) -> Tuple[List[str], List[str]]: + def _get_org_repositories( + config: Mapping[str, Any], authenticator: MultipleTokenAuthenticator + ) -> Tuple[List[str], List[str], Optional[str]]: """ Parse config/repositories and produce two lists: organizations, repositories. Args: @@ -78,16 +80,19 @@ def _get_org_repositories(config: Mapping[str, Any], authenticator: MultipleToke organizations = set() unchecked_repos = set() unchecked_orgs = set() + pattern = None for org_repos in config_repositories: - org, _, repos = org_repos.partition("/") - if repos == "*": - unchecked_orgs.add(org) + _, _, repos = org_repos.partition("/") + if "*" in repos: + unchecked_orgs.add(org_repos) else: unchecked_repos.add(org_repos) if unchecked_orgs: - stream = Repositories(authenticator=authenticator, organizations=unchecked_orgs, api_url=config.get("api_url")) + org_names = [org.split("/")[0] for org in unchecked_orgs] + pattern = "|".join([f"({org.replace('*', '.*')})" for org in unchecked_orgs]) + stream = Repositories(authenticator=authenticator, organizations=org_names, api_url=config.get("api_url"), pattern=pattern) for record in read_full_refresh(stream): repositories.add(record["full_name"]) organizations.add(record["organization"]) @@ -96,7 +101,7 @@ def _get_org_repositories(config: Mapping[str, Any], authenticator: MultipleToke if unchecked_repos: stream = RepositoryStats( authenticator=authenticator, - repositories=unchecked_repos, + repositories=list(unchecked_repos), api_url=config.get("api_url"), # This parameter is deprecated and in future will be used sane default, page_size: 10 page_size_for_large_streams=config.get("page_size_for_large_streams", constants.DEFAULT_PAGE_SIZE_FOR_LARGE_STREAM), @@ -107,7 +112,7 @@ def _get_org_repositories(config: Mapping[str, Any], authenticator: MultipleToke if organization: organizations.add(organization) - return list(organizations), list(repositories) + return list(organizations), list(repositories), pattern @staticmethod def get_access_token(config: Mapping[str, Any]): @@ -169,45 +174,6 @@ def _validate_branches(self, config: MutableMapping[str, Any]) -> MutableMapping def _is_http_allowed() -> bool: return getenv("DEPLOYMENT_MODE", "").upper() != "CLOUD" - @staticmethod - def _get_branches_data( - selected_branches: List, full_refresh_args: Dict[str, Any] = None - ) -> Tuple[Dict[str, str], Dict[str, List[str]]]: - selected_branches = set(selected_branches) - - # Get the default branch for each repository - default_branches = {} - repository_stats_stream = RepositoryStats(**full_refresh_args) - for stream_slice in repository_stats_stream.stream_slices(sync_mode=SyncMode.full_refresh): - default_branches.update( - { - repo_stats["full_name"]: repo_stats["default_branch"] - for repo_stats in repository_stats_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice) - } - ) - - all_branches = [] - branches_stream = Branches(**full_refresh_args) - for stream_slice in branches_stream.stream_slices(sync_mode=SyncMode.full_refresh): - for branch in branches_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): - all_branches.append(f"{branch['repository']}/{branch['name']}") - - # Create mapping of repository to list of branches to pull commits for - # If no branches are specified for a repo, use its default branch - branches_to_pull: Dict[str, List[str]] = {} - for repo in full_refresh_args["repositories"]: - repo_branches = [] - for branch in selected_branches: - branch_parts = branch.split("/", 2) - if "/".join(branch_parts[:2]) == repo and branch in all_branches: - repo_branches.append(branch_parts[-1]) - if not repo_branches: - repo_branches = [default_branches[repo]] - - branches_to_pull[repo] = repo_branches - - return default_branches, branches_to_pull - def user_friendly_error_message(self, message: str) -> str: user_message = "" if "404 Client Error: Not Found for url: https://api.github.com/repos/" in message: @@ -229,7 +195,7 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> config = self._validate_and_transform_config(config) try: authenticator = self._get_authenticator(config) - _, repositories = self._get_org_repositories(config=config, authenticator=authenticator) + _, repositories, _ = self._get_org_repositories(config=config, authenticator=authenticator) if not repositories: return ( False, @@ -246,7 +212,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: authenticator = self._get_authenticator(config) config = self._validate_and_transform_config(config) try: - organizations, repositories = self._get_org_repositories(config=config, authenticator=authenticator) + organizations, repositories, pattern = self._get_org_repositories(config=config, authenticator=authenticator) except Exception as e: message = repr(e) user_message = self.user_friendly_error_message(message) @@ -291,7 +257,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: } repository_args_with_start_date = {**repository_args, "start_date": start_date} - default_branches, branches_to_pull = self._get_branches_data(config.get("branch", []), repository_args) pull_requests_stream = PullRequests(**repository_args_with_start_date) projects_stream = Projects(**repository_args_with_start_date) project_columns_stream = ProjectColumns(projects_stream, **repository_args_with_start_date) @@ -307,7 +272,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Comments(**repository_args_with_start_date), CommitCommentReactions(**repository_args_with_start_date), CommitComments(**repository_args_with_start_date), - Commits(**repository_args_with_start_date, branches_to_pull=branches_to_pull, default_branches=default_branches), + Commits(**repository_args_with_start_date, branches_to_pull=config.get("branches", [])), ContributorActivity(**repository_args), Deployments(**repository_args_with_start_date), Events(**repository_args_with_start_date), @@ -327,7 +292,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: ProjectsV2(**repository_args_with_start_date), pull_requests_stream, Releases(**repository_args_with_start_date), - Repositories(**organization_args_with_start_date), + Repositories(**organization_args_with_start_date, pattern=pattern), ReviewComments(**repository_args_with_start_date), Reviews(**repository_args_with_start_date), Stargazers(**repository_args_with_start_date), diff --git a/airbyte-integrations/connectors/source-github/source_github/spec.json b/airbyte-integrations/connectors/source-github/source_github/spec.json index edfb6f9a6c398..149bcc7f15706 100644 --- a/airbyte-integrations/connectors/source-github/source_github/spec.json +++ b/airbyte-integrations/connectors/source-github/source_github/spec.json @@ -81,18 +81,19 @@ "type": "array", "items": { "type": "string", - "pattern": "^([\\w.-]+/(\\*|[\\w.-]+(? Iter self.logger.warning(error_msg) except GitHubAPILimitException as e: - message = f"Stream: `{self.name}`, slice: `{stream_slice}`. Limits for all provided tokens are reached, please try again later" - raise AirbyteTracedException(message) from e + internal_message = ( + f"Stream: `{self.name}`, slice: `{stream_slice}`. Limits for all provided tokens are reached, please try again later" + ) + message = "Rate Limits for all provided tokens are reached. For more information please refer to documentation: https://docs.airbyte.com/integrations/sources/github#limitations--troubleshooting" + raise AirbyteTracedException(internal_message=internal_message, message=message, failure_type=FailureType.config_error) from e class GithubStream(GithubStreamABC): @@ -437,12 +442,18 @@ class Repositories(SemiIncrementalMixin, Organizations): "direction": "desc", } + def __init__(self, *args, pattern: Optional[str] = None, **kwargs): + self._pattern = re.compile(pattern) if pattern else pattern + super().__init__(*args, **kwargs) + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: return f"orgs/{stream_slice['organization']}/repos" def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: for record in response.json(): # GitHub puts records in an array. - yield self.transform(record=record, stream_slice=stream_slice) + record = self.transform(record=record, stream_slice=stream_slice) + if not self._pattern or self._pattern.match(record["full_name"]): + yield record class Tags(GithubStream): @@ -672,10 +683,13 @@ class Commits(IncrementalMixin, GithubStream): cursor_field = "created_at" slice_keys = ["repository", "branch"] - def __init__(self, branches_to_pull: Mapping[str, List[str]], default_branches: Mapping[str, str], **kwargs): + def __init__(self, branches_to_pull: List[str], **kwargs): super().__init__(**kwargs) - self.branches_to_pull = branches_to_pull - self.default_branches = default_branches + kwargs.pop("start_date") + self.branches_to_repos = {} + self.branches_to_pull = set(branches_to_pull) + self.branches_stream = Branches(**kwargs) + self.repositories_stream = RepositoryStats(**kwargs) def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: params = super(IncrementalMixin, self).request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) @@ -686,9 +700,10 @@ def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[ return params def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + self._validate_branches_to_pull() for stream_slice in super().stream_slices(**kwargs): repository = stream_slice["repository"] - for branch in self.branches_to_pull.get(repository, []): + for branch in self.branches_to_repos.get(repository, []): yield {"branch": branch, "repository": repository} def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any]) -> MutableMapping[str, Any]: @@ -714,6 +729,30 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late current_stream_state.setdefault(repository, {}).setdefault(branch, {})[self.cursor_field] = updated_state return current_stream_state + def _validate_branches_to_pull(self): + # Get the default branch for each repository + default_branches = {} + for stream_slice in self.repositories_stream.stream_slices(sync_mode=SyncMode.full_refresh): + for repo_stats in self.repositories_stream.read_records(stream_slice=stream_slice, sync_mode=SyncMode.full_refresh): + default_branches[repo_stats["full_name"]] = repo_stats["default_branch"] + + all_branches = [] + for stream_slice in self.branches_stream.stream_slices(sync_mode=SyncMode.full_refresh): + for branch in self.branches_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + all_branches.append(f"{branch['repository']}/{branch['name']}") + + # Create mapping of repository to list of branches to pull commits for + # If no branches are specified for a repo, use its default branch + for repo in self.repositories: + repo_branches = [] + for branch in self.branches_to_pull: + branch_parts = branch.split("/", 2) + if "/".join(branch_parts[:2]) == repo and branch in all_branches: + repo_branches.append(branch_parts[-1]) + if not repo_branches: + repo_branches = [default_branches[repo]] + self.branches_to_repos[repo] = repo_branches + class Issues(IncrementalMixin, GithubStream): """ diff --git a/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py b/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py index 6e5eb6ee100f3..9bba2d8d64c8e 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py @@ -177,7 +177,9 @@ def test_when_read_incrementally_then_emit_state_message(self): .with_stream_state("events", {"airbytehq/integration-test": {"created_at": "2020-06-09T10:00:00Z"}}) .build(), ) - assert actual_messages.state_messages[0].state.stream.stream_state == {'airbytehq/integration-test': {'created_at': '2022-06-09T12:47:28Z'}} + assert actual_messages.state_messages[0].state.stream.stream_state == { + "airbytehq/integration-test": {"created_at": "2022-06-09T12:47:28Z"} + } def test_read_handles_expected_error_correctly_and_exits_with_complete_status(self): """Ensure read() method does not raise an Exception and log message with error is in output""" @@ -193,5 +195,5 @@ def test_read_handles_expected_error_correctly_and_exits_with_complete_status(se assert Level.ERROR in [x.log.level for x in actual_messages.logs] events_stream_complete_message = [x for x in actual_messages.trace_messages if x.trace.type == TraceType.STREAM_STATUS][-1] - assert events_stream_complete_message.trace.stream_status.stream_descriptor.name == 'events' + assert events_stream_complete_message.trace.stream_status.stream_descriptor.name == "events" assert events_stream_complete_message.trace.stream_status.status == AirbyteStreamStatus.COMPLETE diff --git a/airbyte-integrations/connectors/source-github/unit_tests/test_multiple_token_authenticator.py b/airbyte-integrations/connectors/source-github/unit_tests/test_multiple_token_authenticator.py index f7fabd23c04d3..ebe8eb56c4a4a 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/test_multiple_token_authenticator.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/test_multiple_token_authenticator.py @@ -9,6 +9,7 @@ import pytest import responses from airbyte_cdk.utils import AirbyteTracedException +from airbyte_protocol.models import FailureType from freezegun import freeze_time from source_github import SourceGithub from source_github.streams import Organizations @@ -89,6 +90,7 @@ def request_callback_orgs(request): message = ( "Stream: `organizations`, slice: `{'organization': 'org1'}`. Limits for all provided tokens are reached, please try again later" ) + assert e.value.failure_type == FailureType.config_error assert e.value.internal_message == message diff --git a/airbyte-integrations/connectors/source-github/unit_tests/test_source.py b/airbyte-integrations/connectors/source-github/unit_tests/test_source.py index 8942c27f93ac2..388e726e7dde0 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/test_source.py @@ -105,7 +105,7 @@ def test_check_connection_repos_and_org_repos(rate_limit_mock_response): @responses.activate def test_check_connection_org_only(rate_limit_mock_response): - repos = [{"name": f"name {i}", "full_name": f"full name {i}", "updated_at": "2020-01-01T00:00:00Z"} for i in range(1000)] + repos = [{"name": f"name {i}", "full_name": f"airbytehq/full name {i}", "updated_at": "2020-01-01T00:00:00Z"} for i in range(1000)] responses.add("GET", "https://api.github.com/orgs/airbytehq/repos", json=repos) status = check_source("airbytehq/*") @@ -115,49 +115,6 @@ def test_check_connection_org_only(rate_limit_mock_response): assert len(responses.calls) == 2 -@responses.activate -def test_get_branches_data(): - - repository_args = {"repositories": ["airbytehq/integration-test"], "page_size_for_large_streams": 10} - - source = SourceGithub() - - responses.add( - "GET", - "https://api.github.com/repos/airbytehq/integration-test", - json={"full_name": "airbytehq/integration-test", "default_branch": "master"}, - ) - - responses.add( - "GET", - "https://api.github.com/repos/airbytehq/integration-test/branches", - json=[ - {"repository": "airbytehq/integration-test", "name": "feature/branch_0"}, - {"repository": "airbytehq/integration-test", "name": "feature/branch_1"}, - {"repository": "airbytehq/integration-test", "name": "feature/branch_2"}, - {"repository": "airbytehq/integration-test", "name": "master"}, - ], - ) - - default_branches, branches_to_pull = source._get_branches_data([], repository_args) - assert default_branches == {"airbytehq/integration-test": "master"} - assert branches_to_pull == {"airbytehq/integration-test": ["master"]} - - default_branches, branches_to_pull = source._get_branches_data( - [ - "airbytehq/integration-test/feature/branch_0", - "airbytehq/integration-test/feature/branch_1", - "airbytehq/integration-test/feature/branch_3", - ], - repository_args, - ) - - assert default_branches == {"airbytehq/integration-test": "master"} - assert len(branches_to_pull["airbytehq/integration-test"]) == 2 - assert "feature/branch_0" in branches_to_pull["airbytehq/integration-test"] - assert "feature/branch_1" in branches_to_pull["airbytehq/integration-test"] - - @responses.activate def test_get_org_repositories(): responses.add( @@ -178,7 +135,7 @@ def test_get_org_repositories(): config = {"repositories": ["airbytehq/integration-test", "docker/*"]} source = SourceGithub() config = source._ensure_default_values(config) - organisations, repositories = source._get_org_repositories(config, authenticator=None) + organisations, repositories, _ = source._get_org_repositories(config, authenticator=None) assert set(repositories) == {"airbytehq/integration-test", "docker/docker-py", "docker/compose"} assert set(organisations) == {"airbytehq", "docker"} @@ -186,7 +143,7 @@ def test_get_org_repositories(): @responses.activate def test_organization_or_repo_available(monkeypatch, rate_limit_mock_response): - monkeypatch.setattr(SourceGithub, "_get_org_repositories", MagicMock(return_value=(False, False))) + monkeypatch.setattr(SourceGithub, "_get_org_repositories", MagicMock(return_value=(False, False, None))) source = SourceGithub() with pytest.raises(Exception) as exc_info: config = {"access_token": "test_token", "repository": ""} @@ -209,6 +166,7 @@ def test_check_config_repository(): "airbyte_hq/airbyte", "airbytehq/123", "airbytehq/airbytexgit", + "airbytehq/a*", ] repos_fail = [ @@ -242,7 +200,7 @@ def test_check_config_repository(): @responses.activate def test_streams_no_streams_available_error(monkeypatch, rate_limit_mock_response): - monkeypatch.setattr(SourceGithub, "_get_org_repositories", MagicMock(return_value=(False, False))) + monkeypatch.setattr(SourceGithub, "_get_org_repositories", MagicMock(return_value=(False, False, None))) with pytest.raises(AirbyteTracedException) as e: SourceGithub().streams(config={"access_token": "test_token", "repository": "airbytehq/airbyte-test"}) assert str(e.value) == "No streams available. Please check permissions" diff --git a/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py b/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py index 24f02e254e1ab..8cd73886b8779 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/test_stream.py @@ -405,13 +405,12 @@ def test_stream_commits_incremental_read(): "start_date": "2022-02-02T10:10:03Z", } - default_branches = {"organization/repository": "master"} - branches_to_pull = {"organization/repository": ["branch"]} + branches_to_pull = ["organization/repository/branch"] - stream = Commits(**repository_args_with_start_date, branches_to_pull=branches_to_pull, default_branches=default_branches) + stream = Commits(**repository_args_with_start_date, branches_to_pull=branches_to_pull) stream.page_size = 2 - data = [ + commits_data = [ {"sha": 1, "commit": {"author": {"date": "2022-02-02T10:10:02Z"}}}, {"sha": 2, "commit": {"author": {"date": "2022-02-02T10:10:04Z"}}}, {"sha": 3, "commit": {"author": {"date": "2022-02-02T10:10:06Z"}}}, @@ -421,27 +420,57 @@ def test_stream_commits_incremental_read(): {"sha": 7, "commit": {"author": {"date": "2022-02-02T10:10:14Z"}}}, ] - api_url = "https://api.github.com/repos/organization/repository/commits" + repo_api_url = "https://api.github.com/repos/organization/repository" + branches_api_url = "https://api.github.com/repos/organization/repository/branches" + commits_api_url = "https://api.github.com/repos/organization/repository/commits" responses.add( "GET", - api_url, - json=data[0:3], + repo_api_url, + json={"id": 1, "updated_at": "2022-02-02T10:10:02Z", "default_branch": "main", "full_name": "organization/repository"}, + ) + responses.add( + responses.GET, + branches_api_url, + json=[ + { + "name": "branch", + "commit": { + "sha": "74445338726f0f8e1c27c10dce90ca00c5ae2858", + "url": "https://api.github.com/repos/airbytehq/airbyte/commits/74445338726f0f8e1c27c10dce90ca00c5ae2858" + }, + "protected": False + }, + { + "name": "main", + "commit": { + "sha": "c27c10dce90ca00c5ae285874445338726f0f8e1", + "url": "https://api.github.com/repos/airbytehq/airbyte/commits/c27c10dce90ca00c5ae285874445338726f0f8e1" + }, + "protected": False + } + ], + status=200, + ) + responses.add( + "GET", + commits_api_url, + json=commits_data[0:3], match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:03Z", "sha": "branch", "per_page": "2"}, strict_match=False)], ) responses.add( "GET", - api_url, - json=data[3:5], + commits_api_url, + json=commits_data[3:5], headers={"Link": '; rel="next"'}, match=[matchers.query_param_matcher({"since": "2022-02-02T10:10:06Z", "sha": "branch", "per_page": "2"}, strict_match=False)], ) responses.add( "GET", - api_url, - json=data[5:7], + commits_api_url, + json=commits_data[5:7], match=[ matchers.query_param_matcher( {"since": "2022-02-02T10:10:06Z", "sha": "branch", "per_page": "2", "page": "2"}, strict_match=False diff --git a/airbyte-integrations/connectors/source-gitlab/.coveragerc b/airbyte-integrations/connectors/source-gitlab/.coveragerc new file mode 100644 index 0000000000000..1461a902bb0ca --- /dev/null +++ b/airbyte-integrations/connectors/source-gitlab/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_gitlab/run.py diff --git a/airbyte-integrations/connectors/source-gitlab/README.md b/airbyte-integrations/connectors/source-gitlab/README.md index 67d7ce2a8c7a2..acb7de8147d0e 100644 --- a/airbyte-integrations/connectors/source-gitlab/README.md +++ b/airbyte-integrations/connectors/source-gitlab/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-gitlab spec poetry run source-gitlab check --config secrets/config.json poetry run source-gitlab discover --config secrets/config.json -poetry run source-gitlab read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-gitlab read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-gitlab/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-gitlab/integration_tests/abnormal_state.json index 55f73258b91e2..b21cf8c2eadce 100644 --- a/airbyte-integrations/connectors/source-gitlab/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-gitlab/integration_tests/abnormal_state.json @@ -2,52 +2,162 @@ { "type": "STREAM", "stream": { - "stream_state": { - "25157276": { - "created_at": "2121-03-18T12:51:05+00:00" - } - }, "stream_descriptor": { - "name": "commits" + "name": "commits", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "id": 25156633, + "parent_slice": { + "id": "airbyte.io%2Fci-test-project" + } + }, + "cursor": { + "created_at": "2124-03-15T09:54:11Z" + } + }, + { + "partition": { + "id": 41551658, + "parent_slice": { + "id": "new-group-airbute%2Ftest-public-sg%2Ftest-sg-public-2%2Ftest-private-subsubg-1%2Ftest_project_in_nested_subgroup" + } + }, + "cursor": { + "created_at": "2124-03-15T09:54:17Z" + } + }, + { + "partition": { + "id": 25157276, + "parent_slice": { + "id": "new-group-airbute%2Fnew-ci-test-project" + } + }, + "cursor": { + "created_at": "2124-03-15T09:54:20Z" + } + } + ] } } }, { "type": "STREAM", "stream": { - "stream_state": { - "25157276": { - "updated_at": "2121-03-15T16:08:06.041000+00:00" - } - }, "stream_descriptor": { - "name": "issues" + "name": "issues", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "id": 25156633, + "parent_slice": { + "id": "airbyte.io%2Fci-test-project" + } + }, + "cursor": { + "updated_at": "2124-03-15T09:54:11Z" + } + }, + { + "partition": { + "id": 41551658, + "parent_slice": { + "id": "new-group-airbute%2Ftest-public-sg%2Ftest-sg-public-2%2Ftest-private-subsubg-1%2Ftest_project_in_nested_subgroup" + } + }, + "cursor": { + "updated_at": "2124-03-15T09:54:17Z" + } + }, + { + "partition": { + "id": 25157276, + "parent_slice": { + "id": "new-group-airbute%2Fnew-ci-test-project" + } + }, + "cursor": { + "updated_at": "2124-03-15T09:54:20Z" + } + }, + { + "partition": { + "id": 25032440, + "parent_slice": { + "id": "airbyte.io%2Flearn-gitlab" + } + }, + "cursor": { + "updated_at": "2124-03-15T10:04:22Z" + } + } + ] } } }, { "type": "STREAM", "stream": { - "stream_state": { - "25157276": { - "updated_at": "2121-03-18T12:51:06.319000+00:00" - } - }, "stream_descriptor": { - "name": "merge_requests" + "name": "merge_requests", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "id": 25157276, + "parent_slice": { + "id": "new-group-airbute%2Fnew-ci-test-project" + } + }, + "cursor": { + "updated_at": "2121-03-18T12:51:06.319000+00:00" + } + }, + { + "partition": { + "id": 25156633, + "parent_slice": { + "id": "airbyte.io%2Fci-test-project" + } + }, + "cursor": { + "updated_at": "2121-03-18T12:51:06.319000+00:00" + } + } + ] } } }, { "type": "STREAM", "stream": { - "stream_state": { - "25157276": { - "updated_at": "2121-03-18T12:51:52.007000+00:00" - } - }, "stream_descriptor": { - "name": "pipelines" + "name": "pipelines", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "id": 25157276, + "parent_slice": { + "id": "new-group-airbute%2Fnew-ci-test-project" + } + }, + "cursor": { + "updated_at": "2121-03-18T12:51:52.007000+00:00" + } + } + ] } } } diff --git a/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json index 67009e00a6da6..499fcc7e24f0c 100644 --- a/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-gitlab/integration_tests/configured_catalog.json @@ -27,11 +27,11 @@ "name": "branches", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["name"]] + "source_defined_primary_key": [["project_id"], ["name"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["name"]] + "primary_key": [["project_id"], ["name"]] }, { "stream": { @@ -110,44 +110,44 @@ "name": "project_members", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] + "source_defined_primary_key": [["project_id"], ["id"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["id"]] + "primary_key": [["project_id"], ["id"]] }, { "stream": { "name": "group_members", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] + "source_defined_primary_key": [["group_id"], ["id"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["id"]] + "primary_key": [["group_id"], ["id"]] }, { "stream": { "name": "project_labels", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] + "source_defined_primary_key": [["project_id"], ["id"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["id"]] + "primary_key": [["project_id"], ["id"]] }, { "stream": { "name": "group_labels", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] + "source_defined_primary_key": [["group_id"], ["id"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["id"]] + "primary_key": [["group_id"], ["id"]] }, { "stream": { @@ -189,7 +189,7 @@ "stream": { "name": "deployments", "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], + "supported_sync_modes": ["full_refresh"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], "source_defined_primary_key": [["id"]] @@ -203,11 +203,11 @@ "name": "tags", "json_schema": {}, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["name"]] + "source_defined_primary_key": [["project_id"], ["name"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": [["name"]] + "primary_key": [["project_id"], ["name"]] }, { "stream": { diff --git a/airbyte-integrations/connectors/source-gitlab/metadata.yaml b/airbyte-integrations/connectors/source-gitlab/metadata.yaml index e656b3361f6e6..ea050729ad2f8 100644 --- a/airbyte-integrations/connectors/source-gitlab/metadata.yaml +++ b/airbyte-integrations/connectors/source-gitlab/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 5e6175e5-68e1-4c17-bff9-56103bbb0d80 - dockerImageTag: 3.0.0 + dockerImageTag: 4.0.0 dockerRepository: airbyte/source-gitlab documentationUrl: https://docs.airbyte.com/integrations/sources/gitlab githubIssueLabel: source-gitlab icon: gitlab.svg license: MIT + maxSecondsBetweenMessages: 60 name: Gitlab remoteRegistries: pypi: @@ -29,6 +30,25 @@ data: releaseStage: generally_available releases: breakingChanges: + 4.0.0: + message: + In this release, several changes have been made to the Gitlab connector. + The primary key was changed for streams `group_members`, `group_labels`, `project_members`, `project_labels`, `branches`, and `tags`. + Users will need to refresh schemas and reset the affected streams after upgrading. + upgradeDeadline: "2024-04-15" + scopedImpact: + - scopeType: stream + impactedScopes: + - "commits" + - "issues" + - "merge_requests" + - "pipelines" + - "group_members" + - "group_labels" + - "project_members" + - "project_labels" + - "branches" + - "tags" 3.0.0: message: In this release, merge_request_commits stream schema has been fixed so that it returns commits for each merge_request. @@ -53,5 +73,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-gitlab/poetry.lock b/airbyte-integrations/connectors/source-gitlab/poetry.lock index 7056f01bf6e9a..8a852b5a75047 100644 --- a/airbyte-integrations/connectors/source-gitlab/poetry.lock +++ b/airbyte-integrations/connectors/source-gitlab/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.78.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte_cdk-0.78.1-py3-none-any.whl", hash = "sha256:73dfc03e55a7107bf28b5bbc4e43572d448c60e9b34368d22cf48b6536aa2263"}, + {file = "airbyte_cdk-0.78.1.tar.gz", hash = "sha256:700e5526ae29db1e453b3def8682726f7d8aa653ee2f3056488d0a484f055133"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -566,13 +565,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -784,30 +783,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -907,13 +906,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -925,50 +924,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -994,13 +991,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1019,13 +1016,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1250,4 +1247,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "94cc27fe8a4e14f6d9cfaaa0281dce3fd7ac7082d63c6152fa24a455a9872070" +content-hash = "9ea3511234a2c6204be6a224d0e707a1c8c52e6793ed1419f5ceb64317e6c51d" diff --git a/airbyte-integrations/connectors/source-gitlab/pyproject.toml b/airbyte-integrations/connectors/source-gitlab/pyproject.toml index d7d58f8a08235..8f7a768d6e24a 100644 --- a/airbyte-integrations/connectors/source-gitlab/pyproject.toml +++ b/airbyte-integrations/connectors/source-gitlab/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.0" +version = "4.0.0" name = "source-gitlab" description = "Source implementation for GitLab." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_gitlab" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" vcrpy = "==4.1.1" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/components/partition_routers.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/components/partition_routers.py new file mode 100644 index 0000000000000..ec770387995c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/components/partition_routers.py @@ -0,0 +1,43 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from dataclasses import dataclass +from typing import Iterable + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.types import StreamSlice + + +@dataclass +class GroupStreamsPartitionRouter(SubstreamPartitionRouter): + def stream_slices(self) -> Iterable[StreamSlice]: + parent_streams = {stream.stream.name: stream.stream for stream in self.parent_stream_configs} + groups_list = self.config.get("groups_list") + selected_parent = parent_streams["include_descendant_groups"] if groups_list else parent_streams["groups_list"] + + for stream_slice in selected_parent.stream_slices(sync_mode=SyncMode.full_refresh): + for record in selected_parent.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + yield StreamSlice(partition={"id": record["id"]}, cursor_slice={}) + + +@dataclass +class ProjectStreamsPartitionRouter(SubstreamPartitionRouter): + def stream_slices(self) -> Iterable[StreamSlice]: + parent_stream = self.parent_stream_configs[0].stream + projects_list = self.config.get("projects_list", []) + + group_project_ids = [] + for stream_slice in parent_stream.stream_slices(sync_mode=SyncMode.full_refresh): + for record in parent_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + group_project_ids.extend([i["path_with_namespace"] for i in record["projects"]]) + + if group_project_ids: + for project_id in group_project_ids: + if not projects_list or projects_list and project_id in projects_list: + yield StreamSlice(partition={"id": project_id.replace("/", "%2F")}, cursor_slice={}) + else: + for project_id in projects_list: + yield StreamSlice(partition={"id": project_id.replace("/", "%2F")}, cursor_slice={}) diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/manifest.yaml b/airbyte-integrations/connectors/source-gitlab/source_gitlab/manifest.yaml new file mode 100644 index 0000000000000..a41d680cd3e30 --- /dev/null +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/manifest.yaml @@ -0,0 +1,614 @@ +version: 0.78.1 +type: DeclarativeSource + +definitions: + # Authenticators + bearer_authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + oauth_authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_token: "{{ config['credentials']['refresh_token'] }}" + token_refresh_endpoint: "https://{{ config.get('api_url', 'gitlab.com') }}/oauth/token" + refresh_token_updater: + refresh_token_error_status_codes: [400, 401] + refresh_token_error_key: "error" + refresh_token_error_values: ["invalid_grant"] + authenticator: + type: SelectiveAuthenticator + authenticator_selection_path: ["credentials", "auth_type"] + authenticators: + oauth2.0: "#/definitions/oauth_authenticator" + access_token: "#/definitions/bearer_authenticator" + + # Requester + requester: + type: HttpRequester + url_base: >- + {% set parts = config.get('api_url', 'gitlab.com').split('://') %} + {{ parts[0] if parts|length > 1 else 'https' }}://{{ parts[1] if parts[1] else parts[0] }}/api/v4/ + http_method: GET + authenticator: "#/definitions/authenticator" + use_cache: true + error_handler: + type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: [403] + - type: HttpResponseFilter + action: FAIL + http_codes: [401] + error_message: Unable to refresh the `access_token`, please re-authenticate in Sources > Settings. + - type: HttpResponseFilter + action: FAIL + http_codes: [500] + error_message: Unable to connect to Gitlab API with the provided credentials + - type: HttpResponseFilter + action: FAIL + http_codes: [404] + error_message: Groups and/or projects that you provide are invalid or you don't have permission to view it. + request_parameters: "{{ parameters.get('request_parameters', {}) }}" + + # Selector + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + + # Paginator + paginator: + type: DefaultPaginator + pagination_strategy: + type: PageIncrement + page_size: 50 + start_from_page: 1 + inject_on_first_request: false + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + page_token_option: + type: RequestOption + field_name: page + inject_into: request_parameter + + # Retrievers + retriever: + type: SimpleRetriever + record_selector: "#/definitions/selector" + requester: "#/definitions/requester" + paginator: "#/definitions/paginator" + + group_streams_retriever: + $ref: "#/definitions/retriever" + partition_router: + type: CustomPartitionRouter + class_name: source_gitlab.components.partition_routers.GroupStreamsPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/groups_list_stream" + partition_field: "id" + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/include_descendant_groups_stream" + partition_field: "id" + + group_child_streams_retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/groups_stream" + partition_field: "id" + + projects_streams_retriever: + $ref: "#/definitions/retriever" + partition_router: + type: CustomPartitionRouter + class_name: source_gitlab.components.partition_routers.ProjectStreamsPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/groups_stream" + partition_field: "id" + + projects_child_streams_retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/projects_stream" + partition_field: "id" + + pipelines_child_streams_retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "id" + stream: "#/definitions/pipelines_stream" + partition_field: "id" + + # Transformations + add_project_id_field: + type: AddFields + fields: + - type: AddedFieldDefinition + path: ["project_id"] + value: "{{ stream_slice.id }}" + + add_group_id_field: + type: AddFields + fields: + - type: AddedFieldDefinition + path: ["group_id"] + value: "{{ stream_slice.id }}" + + # Service streams + base_full_refresh_stream: + type: DeclarativeStream + primary_key: "id" + + base_groups_child_stream: + $ref: "#/definitions/base_full_refresh_stream" + retriever: "#/definitions/group_child_streams_retriever" + + base_projects_child_stream: + $ref: "#/definitions/base_full_refresh_stream" + retriever: "#/definitions/projects_child_streams_retriever" + + base_projects_incremental_child_stream: + $ref: "#/definitions/base_projects_child_stream" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: "{{ parameters.get('cursor_field', 'updated_at') }}" + start_datetime: "{{ config.get('start_date', '2014-01-01T00:00:00Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%f%z" + - "%Y-%m-%dT%H:%M:%S.%fZ" + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + cursor_granularity: "PT1S" + step: "P180DT1S" + start_time_option: + type: RequestOption + field_name: "{{ parameters.get('lower_bound_filter', 'updated_after') }}" + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: "{{ parameters.get('upper_bound_filter', 'updated_before') }}" + inject_into: request_parameter + + groups_list_stream: + name: "groups_list" + $ref: "#/definitions/base_full_refresh_stream" + retriever: "#/definitions/retriever" + $parameters: + path: "groups" + request_parameters: {} + + include_descendant_groups_stream: + name: "include_descendant_groups" + $ref: "#/definitions/base_full_refresh_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: ListPartitionRouter + cursor_field: "slice" + values: | + {% set slices = [] %} + {% for group_id in config.get('groups_list', []) %} + {% set _ = slices.append({'path': 'groups/' + group_id}) %} + {% set _ = slices.append({'path': 'groups/' + group_id + '/descendant_groups'}) %} + {% endfor %} + {{ slices }} + $parameters: + path: "{{ stream_slice.slice.path }}" + request_parameters: {} + + # Full refresh streams + ## Groups-based streams + groups_stream: + name: "groups" + $ref: "#/definitions/base_full_refresh_stream" + retriever: "#/definitions/group_streams_retriever" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: + - projects + value: | + {% set projects = [] %} + {% for project in (record.get('projects') or []): %} + {% set _ = projects.append({'id': project['id'], 'path_with_namespace': project['path_with_namespace']}) %} + {% endfor %} + {{ projects }} + $parameters: + path: "groups/{{ stream_slice.id }}" + request_parameters: {} + + group_milestones_stream: + name: "group_milestones" + $ref: "#/definitions/base_groups_child_stream" + $parameters: + path: "groups/{{ stream_slice.id }}/milestones" + + group_members_stream: + name: "group_members" + $ref: "#/definitions/base_groups_child_stream" + primary_key: ["group_id", "id"] + transformations: + - "#/definitions/add_group_id_field" + $parameters: + path: "groups/{{ stream_slice.id }}/members" + + group_labels_stream: + name: "group_labels" + $ref: "#/definitions/base_groups_child_stream" + primary_key: ["group_id", "id"] + transformations: + - "#/definitions/add_group_id_field" + $parameters: + path: "groups/{{ stream_slice.id }}/labels" + + group_issue_boards_stream: + name: "group_issue_boards" + $ref: "#/definitions/base_groups_child_stream" + transformations: + - "#/definitions/add_group_id_field" + $parameters: + path: "groups/{{ stream_slice.id }}/boards" + + epics_stream: + name: "epics" + $ref: "#/definitions/base_groups_child_stream" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["author_id"] + value: "{{ (record.get('author') or {}).get('id') }}" + primary_key: "iid" + $parameters: + path: "groups/{{ stream_slice.id }}/epics" + + epic_issues_stream: + name: "epic_issues" + $ref: "#/definitions/base_groups_child_stream" + primary_key: "iid" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["milestone_id"] + value: "{{ (record.get('milestone') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignee_id"] + value: "{{ (record.get('assignee') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["author_id"] + value: "{{ (record.get('author') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignees"] + value: | + {% set ids = [] %} + {% for data in (record.get('assignees') or []) %} + {% set _ = ids.append(data.get('id')) %} + {% endfor %} + {{ ids }} + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "iid" + stream: "#/definitions/epics_stream" + partition_field: "iid" + $parameters: + path: "groups/{{ stream_slice.parent_slice.id }}/epics/{{ stream_slice.iid }}/issues" + + ## Projects-based streams + projects_stream: + name: "projects" + $ref: "#/definitions/base_full_refresh_stream" + retriever: "#/definitions/projects_streams_retriever" + $parameters: + path: "projects/{{ stream_slice.id }}" + request_parameters: + statistics: 1 + + project_milestones_stream: + name: "project_milestones" + $ref: "#/definitions/base_projects_child_stream" + $parameters: + path: "projects/{{ stream_slice.id }}/milestones" + + project_members_stream: + name: "project_members" + $ref: "#/definitions/base_projects_child_stream" + primary_key: ["project_id", "id"] + transformations: + - "#/definitions/add_project_id_field" + $parameters: + path: "projects/{{ stream_slice.id }}/members" + + project_labels_stream: + name: "project_labels" + $ref: "#/definitions/base_projects_child_stream" + primary_key: ["project_id", "id"] + transformations: + - "#/definitions/add_project_id_field" + $parameters: + path: "projects/{{ stream_slice.id }}/labels" + + branches_stream: + name: "branches" + $ref: "#/definitions/base_projects_child_stream" + transformations: + - "#/definitions/add_project_id_field" + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["commit_id"] + value: "{{ (record.get('commit') or {}).get('id') }}" + primary_key: ["project_id", "name"] + $parameters: + path: "projects/{{ stream_slice.id }}/repository/branches" + + releases_stream: + name: "releases" + $ref: "#/definitions/base_projects_child_stream" + primary_key: "name" + transformations: + - "#/definitions/add_project_id_field" + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["author_id"] + value: "{{ (record.get('author') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["commit_id"] + value: "{{ (record.get('commit') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["milestones"] + value: | + {% set ids = [] %} + {% for data in record.get('milestones', []) %} + {% set _ = ids.append(data.get('id')) %} + {% endfor %} + {{ ids }} + $parameters: + path: "projects/{{ stream_slice.id }}/releases" + + tags_stream: + name: "tags" + $ref: "#/definitions/base_projects_child_stream" + primary_key: ["project_id", "name"] + transformations: + - "#/definitions/add_project_id_field" + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["commit_id"] + value: "{{ (record.get('commit') or {}).get('id') }}" + $parameters: + path: "projects/{{ stream_slice.id }}/repository/tags" + + users_stream: + name: "users" + $ref: "#/definitions/base_projects_child_stream" + primary_key: "name" + $parameters: + path: "projects/{{ stream_slice.id }}/users" + + deployments_stream: + name: "deployments" + $ref: "#/definitions/base_projects_child_stream" + transformations: + - "#/definitions/add_project_id_field" + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["user_username"] + value: "{{ (record.get('user') or {}).get('username') }}" + - type: AddedFieldDefinition + path: ["user_full_name"] + value: "{{ (record.get('user') or {}).get('name') }}" + - type: AddedFieldDefinition + path: ["environment_name"] + value: "{{ (record.get('environment') or {}).get('name') }}" + - type: AddedFieldDefinition + path: ["user_id"] + value: "{{ (record.get('user') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["environment_id"] + value: "{{ (record.get('environment') or {}).get('id') }}" + $parameters: + path: "projects/{{ stream_slice.id }}/deployments" + + merge_request_commits_stream: + name: "merge_request_commits" + $ref: "#/definitions/base_projects_child_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: "iid" + stream: "#/definitions/merge_requests_stream" + partition_field: "iid" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["project_id"] + value: "{{ stream_slice.parent_slice.id }}" + - type: AddedFieldDefinition + path: ["merge_request_iid"] + value: "{{ stream_slice.iid }}" + $parameters: + path: "projects/{{ stream_slice.parent_slice.id }}/merge_requests/{{ stream_slice.iid }}/commits" + + pipelines_extended_stream: + name: "pipelines_extended" + $ref: "#/definitions/base_projects_child_stream" + retriever: "#/definitions/pipelines_child_streams_retriever" + $parameters: + path: "projects/{{ stream_slice.parent_slice.id }}/pipelines/{{ stream_slice.id }}" + + jobs_stream: + name: "jobs" + $ref: "#/definitions/base_projects_child_stream" + retriever: "#/definitions/pipelines_child_streams_retriever" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["project_id"] + value: "{{ stream_slice.parent_slice.id }}" + - type: AddedFieldDefinition + path: ["user_id"] + value: "{{ (record.get('user') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["pipeline_id"] + value: "{{ (record.get('pipeline') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["runner_id"] + value: "{{ (record.get('runner') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["commit_id"] + value: "{{ (record.get('commit') or {}).get('id') }}" + $parameters: + path: "projects/{{ stream_slice.parent_slice.id }}/pipelines/{{ stream_slice.id }}/jobs" + + # Incremental streams + commits_stream: + name: "commits" + $ref: "#/definitions/base_projects_incremental_child_stream" + transformations: + - "#/definitions/add_project_id_field" + $parameters: + path: "projects/{{ stream_slice.id }}/repository/commits" + cursor_field: "created_at" + lower_bound_filter: "since" + upper_bound_filter: "until" + request_parameters: + with_stats: "true" + + issues_stream: + name: "issues" + $ref: "#/definitions/base_projects_incremental_child_stream" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["author_id"] + value: "{{ (record.get('author') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignee_id"] + value: "{{ (record.get('assignee') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["closed_by_id"] + value: "{{ (record.get('closed_by') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["milestone_id"] + value: "{{ (record.get('milestone') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignees"] + value: | + {% set ids = [] %} + {% for data in record.get('assignees', []) %} + {% set _ = ids.append(data.get('id')) %} + {% endfor %} + {{ ids }} + $parameters: + path: "projects/{{ stream_slice.id }}/issues" + request_parameters: + scope: "all" + + merge_requests_stream: + name: "merge_requests" + $ref: "#/definitions/base_projects_incremental_child_stream" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["author_id"] + value: "{{ (record.get('author') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignee_id"] + value: "{{ (record.get('assignee') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["closed_by_id"] + value: "{{ (record.get('closed_by') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["milestone_id"] + value: "{{ (record.get('milestone') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["merged_by_id"] + value: "{{ (record.get('merged_by') or {}).get('id') }}" + - type: AddedFieldDefinition + path: ["assignees"] + value: | + {% set ids = [] %} + {% for data in record.get('assignees', []) %} + {% set _ = ids.append(data.get('id')) %} + {% endfor %} + {{ ids }} + $parameters: + path: "projects/{{ stream_slice.id }}/merge_requests" + request_parameters: + scope: "all" + + pipelines_stream: + name: "pipelines" + $ref: "#/definitions/base_projects_incremental_child_stream" + $parameters: + path: "projects/{{ stream_slice.id }}/pipelines" + +streams: + # Groups-based streams + - "#/definitions/groups_stream" + - "#/definitions/group_milestones_stream" + - "#/definitions/group_members_stream" + - "#/definitions/group_labels_stream" + - "#/definitions/group_issue_boards_stream" + - "#/definitions/epics_stream" + - "#/definitions/epic_issues_stream" + + # Projects-based streams + - "#/definitions/projects_stream" + - "#/definitions/project_milestones_stream" + - "#/definitions/project_members_stream" + - "#/definitions/project_labels_stream" + - "#/definitions/branches_stream" + - "#/definitions/releases_stream" + - "#/definitions/tags_stream" + - "#/definitions/users_stream" + - "#/definitions/deployments_stream" + - "#/definitions/commits_stream" + - "#/definitions/issues_stream" + - "#/definitions/merge_requests_stream" + - "#/definitions/merge_request_commits_stream" + - "#/definitions/pipelines_stream" + - "#/definitions/pipelines_extended_stream" + - "#/definitions/jobs_stream" + +check: + type: CheckStream + stream_names: + - projects diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py index 8a69c840b6164..4ce68d316f1ce 100644 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py +++ b/airbyte-integrations/connectors/source-gitlab/source_gitlab/source.py @@ -1,245 +1,30 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # -import os -from typing import Any, List, Mapping, MutableMapping, Optional, Tuple, Union +from typing import Any, MutableMapping, Tuple -import pendulum -from airbyte_cdk.config_observation import emit_configuration_as_airbyte_control_message -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import SingleUseRefreshTokenOauth2Authenticator -from airbyte_cdk.sources.streams.http.requests_native_auth.token import TokenAuthenticator -from airbyte_cdk.utils import AirbyteTracedException -from requests.auth import AuthBase -from requests.exceptions import HTTPError +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from airbyte_cdk.utils import is_cloud_environment -from .streams import ( - Branches, - Commits, - Deployments, - EpicIssues, - Epics, - GitlabStream, - GroupIssueBoards, - GroupLabels, - GroupMembers, - GroupMilestones, - GroupProjects, - Groups, - GroupsList, - IncludeDescendantGroups, - Issues, - Jobs, - MergeRequestCommits, - MergeRequests, - Pipelines, - PipelinesExtended, - ProjectLabels, - ProjectMembers, - ProjectMilestones, - Projects, - Releases, - Tags, - Users, -) from .utils import parse_url -class SingleUseRefreshTokenGitlabOAuth2Authenticator(SingleUseRefreshTokenOauth2Authenticator): - def __init__(self, *args, created_at_name: str = "created_at", **kwargs): - super().__init__(*args, **kwargs) - self._created_at_name = created_at_name - - def get_created_at_name(self) -> str: - return self._created_at_name - - def get_access_token(self) -> str: - if self.token_has_expired(): - new_access_token, access_token_expires_in, access_token_created_at, new_refresh_token = self.refresh_access_token() - new_token_expiry_date = self.get_new_token_expiry_date(access_token_expires_in, access_token_created_at) - self.access_token = new_access_token - self.set_refresh_token(new_refresh_token) - self.set_token_expiry_date(new_token_expiry_date) - emit_configuration_as_airbyte_control_message(self._connector_config) - return self.access_token - - @staticmethod - def get_new_token_expiry_date(access_token_expires_in: int, access_token_created_at: int) -> pendulum.DateTime: - return pendulum.from_timestamp(access_token_created_at + access_token_expires_in) - - def refresh_access_token(self) -> Tuple[str, int, int, str]: - response_json = self._get_refresh_access_token_response() - return ( - response_json[self.get_access_token_name()], - response_json[self.get_expires_in_name()], - response_json[self.get_created_at_name()], - response_json[self.get_refresh_token_name()], - ) - - -def get_authenticator(config: MutableMapping) -> AuthBase: - if config["credentials"]["auth_type"] == "access_token": - return TokenAuthenticator(token=config["credentials"]["access_token"]) - return SingleUseRefreshTokenGitlabOAuth2Authenticator( - config, - token_refresh_endpoint=f"https://{config['api_url']}/oauth/token", - refresh_token_error_status_codes=(400,), - refresh_token_error_key="error", - refresh_token_error_values="invalid_grant", - ) - - -class SourceGitlab(AbstractSource): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.__auth_params: Mapping[str, Any] = {} - self.__groups_stream: Optional[GitlabStream] = None - self.__projects_stream: Optional[GitlabStream] = None +class SourceGitlab(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) @staticmethod def _ensure_default_values(config: MutableMapping[str, Any]) -> MutableMapping[str, Any]: config["api_url"] = config.get("api_url") or "gitlab.com" return config - def _groups_stream(self, config: MutableMapping[str, Any]) -> Groups: - if not self.__groups_stream: - auth_params = self._auth_params(config) - group_ids = list(map(lambda x: x["id"], self._get_group_list(config))) - self.__groups_stream = Groups(group_ids=group_ids, **auth_params) - return self.__groups_stream - - def _projects_stream(self, config: MutableMapping[str, Any]) -> Union[Projects, GroupProjects]: - if not self.__projects_stream: - auth_params = self._auth_params(config) - project_ids = config.get("projects_list", []) - groups_stream = self._groups_stream(config) - if groups_stream.group_ids: - self.__projects_stream = GroupProjects(project_ids=project_ids, parent_stream=groups_stream, **auth_params) - return self.__projects_stream - self.__projects_stream = Projects(project_ids=project_ids, **auth_params) - return self.__projects_stream - - def _auth_params(self, config: MutableMapping[str, Any]) -> Mapping[str, Any]: - if not self.__auth_params: - auth = get_authenticator(config) - self.__auth_params = dict(authenticator=auth, api_url=config["api_url"]) - return self.__auth_params - - def _get_group_list(self, config: MutableMapping[str, Any]) -> List[str]: - group_ids = config.get("groups_list") - # Gitlab exposes different APIs to get a list of groups. - # We use https://docs.gitlab.com/ee/api/groups.html#list-groups in case there's no group IDs in the input config. - # This API provides full information about all available groups, including subgroups. - # - # In case there is a definitive list of groups IDs in the input config, the above API can not be used since - # it does not support filtering by group ID, so we use - # https://docs.gitlab.com/ee/api/groups.html#details-of-a-group and - # https: //docs.gitlab.com/ee/api/groups.html#list-a-groups-descendant-groups for each group ID. The latter one does not - # provide full group info so can only be used to retrieve alist of group IDs and pass it further to init a corresponding stream. - auth_params = self._auth_params(config) - stream = GroupsList(**auth_params) if not group_ids else IncludeDescendantGroups(group_ids=group_ids, **auth_params) - for stream_slice in stream.stream_slices(sync_mode=SyncMode.full_refresh): - yield from stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice) - - @staticmethod - def _is_http_allowed() -> bool: - return os.environ.get("DEPLOYMENT_MODE", "").upper() != "CLOUD" - - def _try_refresh_access_token(self, logger, config: Mapping[str, Any]) -> Mapping[str, Any]: - """ - This method attempts to refresh the expired `access_token`, while `refresh_token` is still valid. - In order to obtain the new `refresh_token`, the Customer should `re-auth` in the source settings. - """ - # get current authenticator - authenticator: Union[SingleUseRefreshTokenOauth2Authenticator, TokenAuthenticator] = self.__auth_params.get("authenticator") - if isinstance(authenticator, SingleUseRefreshTokenOauth2Authenticator): - try: - creds = authenticator.refresh_access_token() - # update the actual config values - config["credentials"]["access_token"] = creds[0] - config["credentials"]["refresh_token"] = creds[3] - config["credentials"]["token_expiry_date"] = authenticator.get_new_token_expiry_date(creds[1], creds[2]).to_rfc3339_string() - # update the config - emit_configuration_as_airbyte_control_message(config) - logger.info("The `access_token` was successfully refreshed.") - return config - except (AirbyteTracedException, HTTPError) as http_error: - raise http_error - except Exception as e: - raise Exception(f"Unknown error occurred while refreshing the `access_token`, details: {e}") - - def _handle_expired_access_token_error(self, logger, config: Mapping[str, Any]) -> Tuple[bool, Any]: - try: - return self.check_connection(logger, self._try_refresh_access_token(logger, config)) - except HTTPError as http_error: - return False, f"Unable to refresh the `access_token`, please re-authenticate in Sources > Settings. Details: {http_error}" - def check_connection(self, logger, config) -> Tuple[bool, Any]: config = self._ensure_default_values(config) is_valid, scheme, _ = parse_url(config["api_url"]) if not is_valid: return False, "Invalid API resource locator." - if scheme == "http" and not self._is_http_allowed(): + if scheme == "http" and is_cloud_environment(): return False, "Http scheme is not allowed in this environment. Please use `https` instead." - try: - projects = self._projects_stream(config) - for stream_slice in projects.stream_slices(sync_mode=SyncMode.full_refresh): - try: - next(projects.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) - return True, None - except StopIteration: - # in case groups/projects provided and 404 occurs - return False, "Groups and/or projects that you provide are invalid or you don't have permission to view it." - return True, None # in case there's no projects - except HTTPError as http_error: - if config["credentials"]["auth_type"] == "oauth2.0": - if http_error.response.status_code == 401: - return self._handle_expired_access_token_error(logger, config) - elif http_error.response.status_code == 500: - return False, f"Unable to connect to Gitlab API with the provided credentials - {repr(http_error)}" - else: - return False, f"Unable to connect to Gitlab API with the provided Private Access Token - {repr(http_error)}" - except Exception as error: - return False, f"Unknown error occurred while checking the connection - {repr(error)}" - - def streams(self, config: MutableMapping[str, Any]) -> List[Stream]: - config = self._ensure_default_values(config) - auth_params = self._auth_params(config) - start_date = config.get("start_date") - - groups, projects = self._groups_stream(config), self._projects_stream(config) - pipelines = Pipelines(parent_stream=projects, start_date=start_date, **auth_params) - merge_requests = MergeRequests(parent_stream=projects, start_date=start_date, **auth_params) - epics = Epics(parent_stream=groups, **auth_params) - - streams = [ - groups, - projects, - Branches(parent_stream=projects, repository_part=True, **auth_params), - Commits(parent_stream=projects, repository_part=True, start_date=start_date, **auth_params), - epics, - Deployments(parent_stream=projects, **auth_params), - EpicIssues(parent_stream=epics, **auth_params), - GroupIssueBoards(parent_stream=groups, **auth_params), - Issues(parent_stream=projects, start_date=start_date, **auth_params), - Jobs(parent_stream=pipelines, **auth_params), - ProjectMilestones(parent_stream=projects, **auth_params), - GroupMilestones(parent_stream=groups, **auth_params), - ProjectMembers(parent_stream=projects, **auth_params), - GroupMembers(parent_stream=groups, **auth_params), - ProjectLabels(parent_stream=projects, **auth_params), - GroupLabels(parent_stream=groups, **auth_params), - merge_requests, - MergeRequestCommits(parent_stream=merge_requests, **auth_params), - Releases(parent_stream=projects, **auth_params), - Tags(parent_stream=projects, repository_part=True, **auth_params), - pipelines, - PipelinesExtended(parent_stream=pipelines, **auth_params), - Users(parent_stream=projects, **auth_params), - ] - - return streams + return super().check_connection(logger, config) diff --git a/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py b/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py deleted file mode 100644 index bf269f83ed827..0000000000000 --- a/airbyte-integrations/connectors/source-gitlab/source_gitlab/streams.py +++ /dev/null @@ -1,431 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import datetime -from abc import ABC -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy -from airbyte_cdk.sources.streams.core import StreamData -from airbyte_cdk.sources.streams.http import HttpStream - -from .utils import parse_url - - -class GitlabStream(HttpStream, ABC): - primary_key = "id" - raise_on_http_errors = True - stream_base_params = {} - flatten_id_keys = [] - flatten_list_keys = [] - per_page = 50 - non_retriable_codes: List[int] = (403, 404) - - def __init__(self, api_url: str, **kwargs): - super().__init__(**kwargs) - self.api_url = api_url - self.page = 1 - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[StreamData]: - self.page = 1 - yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - params = {"per_page": self.per_page} - if next_page_token: - params.update(next_page_token) - params.update(self.stream_base_params) - return params - - @property - def url_base(self) -> str: - _, scheme, host = parse_url(self.api_url) - return f"{scheme}://{host}/api/v4/" - - @property - def availability_strategy(self) -> Optional["AvailabilityStrategy"]: - return None - - def should_retry(self, response: requests.Response) -> bool: - # Gitlab API returns a 403 response in case a feature is disabled in a project (pipelines/jobs for instance). - if response.status_code in self.non_retriable_codes: - setattr(self, "raise_on_http_errors", False) - self.logger.warning( - f"Got {response.status_code} error when accessing URL {response.request.url}." - f" Very likely the feature is disabled for this project and/or group. Please double check it, or report a bug otherwise." - ) - return False - return super().should_retry(response) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - if response.status_code in self.non_retriable_codes: - return - response_data = response.json() - if isinstance(response_data, dict): - return None - if len(response_data) == self.per_page: - self.page += 1 - return {"page": self.page} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - if response.status_code in self.non_retriable_codes: - return [] - response_data = response.json() - if isinstance(response_data, list): - for record in response_data: - yield self.transform(record, **kwargs) - elif isinstance(response_data, dict): - yield self.transform(response_data, **kwargs) - else: - self.logger.info(f"Unsupported type of response data for stream {self.name}") - - def transform(self, record: Dict[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs): - for key in self.flatten_id_keys: - self._flatten_id(record, key) - - for key in self.flatten_list_keys: - self._flatten_list(record, key) - - return record - - def _flatten_id(self, record: Dict[str, Any], target: str): - target_value = record.get(target, None) - record[target + "_id"] = target_value.get("id") if target_value else None - - def _flatten_list(self, record: Dict[str, Any], target: str): - record[target] = [target_data.get("id") for target_data in record.get(target, [])] - - -class GitlabChildStream(GitlabStream): - path_list = ["id"] - flatten_parent_id = False - - def __init__(self, parent_stream: GitlabStream, repository_part: bool = False, **kwargs): - super().__init__(**kwargs) - self.parent_stream = parent_stream - self.repo_url = repository_part - - @property - def path_template(self) -> str: - template = [self.parent_stream.name] + ["{" + path_key + "}" for path_key in self.path_list] - if self.repo_url: - template.append("repository") - return "/".join(template + [self.name]) - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, any]]]: - for slice in self.parent_stream.stream_slices(sync_mode=SyncMode.full_refresh): - for record in self.parent_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice): - yield {path_key: record[path_key] for path_key in self.path_list} - - def path(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> str: - return self.path_template.format(**{path_key: stream_slice[path_key] for path_key in self.path_list}) - - def transform(self, record: Dict[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - if self.flatten_parent_id: - record[f"{self.parent_stream.name[:-1]}_id"] = stream_slice["id"] - return record - - -class IncrementalGitlabChildStream(GitlabChildStream): - state_checkpoint_interval = 100 - cursor_field = "updated_at" - lower_bound_filter = "updated_after" - upper_bound_filter = "updated_before" - - def __init__(self, start_date, **kwargs): - super().__init__(**kwargs) - self._start_date = start_date - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - project_id = latest_record.get("project_id") - latest_cursor_value = latest_record.get(self.cursor_field) - current_state = current_stream_state.get(str(project_id)) - if current_state: - current_state = current_state.get(self.cursor_field) - current_state_value = current_state or latest_cursor_value - max_value = max(pendulum.parse(current_state_value), pendulum.parse(latest_cursor_value)) - current_stream_state[str(project_id)] = {self.cursor_field: max_value.to_iso8601_string()} - return current_stream_state - - @staticmethod - def _chunk_date_range(start_point: datetime.datetime) -> Iterable[Tuple[str, str]]: - end_point = datetime.datetime.now(datetime.timezone.utc) - if start_point > end_point: - return [] - current_start, current_end = start_point, start_point - while current_end < end_point: - current_end = current_start + datetime.timedelta(days=180) - current_end = min(current_end, end_point) - yield str(current_start), str(current_end) - current_start = current_end + datetime.timedelta(seconds=1) - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - stream_state = stream_state or {} - super_slices = super().stream_slices(sync_mode, cursor_field, stream_state) - for super_slice in super_slices: - state_project_value = stream_state.get(str(super_slice["id"])) - if self._start_date or state_project_value: - start_point = self._start_date - if state_project_value: - state_value = state_project_value.get(self.cursor_field) - if state_value and start_point: - start_point = max(start_point, state_value) - else: - start_point = state_value or start_point - for start_dt, end_dt in self._chunk_date_range(pendulum.parse(start_point)): - stream_slice = {key: value for key, value in super_slice.items()} - stream_slice[self.lower_bound_filter] = start_dt - stream_slice[self.upper_bound_filter] = end_dt - yield stream_slice - else: - stream_slice = {key: value for key, value in super_slice.items()} - yield stream_slice - - def request_params(self, stream_state=None, stream_slice: Mapping[str, Any] = None, **kwargs): - params = super().request_params(stream_state, stream_slice, **kwargs) - lower_bound_filter = stream_slice.get(self.lower_bound_filter) - upper_bound_filter = stream_slice.get(self.upper_bound_filter) - if lower_bound_filter and upper_bound_filter: - params[self.lower_bound_filter] = lower_bound_filter - params[self.upper_bound_filter] = upper_bound_filter - return params - - -class Groups(GitlabStream): - use_cache = True - - def __init__(self, group_ids: List, **kwargs): - super().__init__(**kwargs) - self.group_ids = group_ids - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"groups/{stream_slice['id']}" - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - for gid in self.group_ids: - yield {"id": gid} - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - record["projects"] = [ - {"id": project["id"], "path_with_namespace": project["path_with_namespace"]} for project in record.pop("projects", []) - ] - return record - - -class IncludeDescendantGroups(Groups): - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return stream_slice["path"] - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - for gid in self.group_ids: - yield {"path": f"groups/{gid}"} - yield {"path": f"groups/{gid}/descendant_groups"} - - -class GroupsList(GitlabStream): - def path(self, **kwargs) -> str: - return "groups" - - -class Projects(GitlabStream): - stream_base_params = {"statistics": 1} - use_cache = True - - def __init__(self, project_ids: List = None, **kwargs): - super().__init__(**kwargs) - self.project_ids = project_ids - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"projects/{stream_slice['id']}" - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - for pid in self.project_ids: - yield {"id": pid.replace("/", "%2F")} - - -class GroupProjects(Projects): - name = "projects" - - def __init__(self, parent_stream: GitlabStream = None, **kwargs): - super().__init__(**kwargs) - self.parent_stream = parent_stream - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - group_project_ids = set() - for slice in self.parent_stream.stream_slices(sync_mode=SyncMode.full_refresh): - for record in self.parent_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice): - group_project_ids.update({i["path_with_namespace"] for i in record["projects"]}) - for pid in group_project_ids: - if not self.project_ids or self.project_ids and pid in self.project_ids: - yield {"id": pid.replace("/", "%2F")} - - -class GroupMilestones(GitlabChildStream): - path_template = "groups/{id}/milestones" - - -class ProjectMilestones(GitlabChildStream): - path_template = "projects/{id}/milestones" - - -class GroupMembers(GitlabChildStream): - path_template = "groups/{id}/members" - flatten_parent_id = True - - -class ProjectMembers(GitlabChildStream): - path_template = "projects/{id}/members" - flatten_parent_id = True - - -class GroupLabels(GitlabChildStream): - path_template = "groups/{id}/labels" - flatten_parent_id = True - - -class ProjectLabels(GitlabChildStream): - path_template = "projects/{id}/labels" - flatten_parent_id = True - - -class Branches(GitlabChildStream): - primary_key = "name" - flatten_id_keys = ["commit"] - flatten_parent_id = True - - -class Commits(IncrementalGitlabChildStream): - cursor_field = "created_at" - lower_bound_filter = "since" - upper_bound_filter = "until" - flatten_parent_id = True - stream_base_params = {"with_stats": True} - - -class Issues(IncrementalGitlabChildStream): - stream_base_params = {"scope": "all"} - flatten_id_keys = ["author", "assignee", "closed_by", "milestone"] - flatten_list_keys = ["assignees"] - - -class MergeRequests(IncrementalGitlabChildStream): - stream_base_params = {"scope": "all"} - flatten_id_keys = ["author", "assignee", "closed_by", "milestone", "merged_by"] - flatten_list_keys = ["assignees"] - - -class MergeRequestCommits(GitlabChildStream): - """Docs: https://docs.gitlab.com/ee/api/merge_requests.html#get-single-merge-request-commits""" - - path_list = ["project_id", "iid"] - path_template = "projects/{project_id}/merge_requests/{iid}/commits" - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - record["project_id"] = stream_slice["project_id"] - record["merge_request_iid"] = stream_slice["iid"] - - return record - - -class Releases(GitlabChildStream): - primary_key = "name" - flatten_id_keys = ["author", "commit"] - flatten_list_keys = ["milestones"] - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - record["project_id"] = stream_slice["id"] - - return record - - -class Tags(GitlabChildStream): - primary_key = "name" - flatten_id_keys = ["commit"] - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - record["project_id"] = stream_slice["id"] - - return record - - -class Pipelines(IncrementalGitlabChildStream): - pass - - -class PipelinesExtended(GitlabChildStream): - path_list = ["project_id", "id"] - path_template = "projects/{project_id}/pipelines/{id}" - - -class Jobs(GitlabChildStream): - flatten_id_keys = ["user", "pipeline", "runner", "commit"] - path_list = ["project_id", "id"] - path_template = "projects/{project_id}/pipelines/{id}/jobs" - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - record["project_id"] = stream_slice["project_id"] - return record - - -class GroupIssueBoards(GitlabChildStream): - path_template = "groups/{id}/boards" - flatten_parent_id = True - - -class Users(GitlabChildStream): - pass - - -class Epics(GitlabChildStream): - primary_key = "iid" - flatten_id_keys = ["author"] - - -class EpicIssues(GitlabChildStream): - primary_key = "epic_issue_id" - path_list = ["group_id", "iid"] - flatten_id_keys = ["milestone", "assignee", "author"] - flatten_list_keys = ["assignees"] - path_template = "groups/{group_id}/epics/{iid}/issues" - - -class Deployments(GitlabChildStream): - primary_key = "id" - flatten_id_keys = ["user", "environment"] - path_template = "projects/{id}/deployments" - - def transform(self, record, stream_slice: Mapping[str, Any] = None, **kwargs): - super().transform(record, stream_slice, **kwargs) - record["user_username"] = record["user"]["username"] - record["user_full_name"] = record["user"]["name"] - record["environment_name"] = record["environment"]["name"] - record["project_id"] = stream_slice["id"] - return record diff --git a/airbyte-integrations/connectors/source-gitlab/unit_tests/conftest.py b/airbyte-integrations/connectors/source-gitlab/unit_tests/conftest.py index 8471fb8e8b06a..c6f180e38de9b 100644 --- a/airbyte-integrations/connectors/source-gitlab/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-gitlab/unit_tests/conftest.py @@ -1,29 +1,30 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # + +from typing import Any, Mapping + import pytest +from airbyte_cdk.sources.streams import Stream +from source_gitlab.source import SourceGitlab + +BASE_CONFIG = { + "start_date": "2021-01-01T00:00:00Z", + "api_url": "gitlab.com", + "credentials": {"auth_type": "access_token", "access_token": "token"}, +} +GROUPS_LIST_URL = "https://gitlab.com/api/v4/groups?per_page=50" @pytest.fixture(params=["gitlab.com", "http://gitlab.com", "https://gitlab.com"]) def config(request): - return { - "start_date": "2021-01-01T00:00:00Z", - "api_url": request.param, - "credentials": {"auth_type": "access_token", "access_token": "token"}, - } - - -@pytest.fixture(autouse=True) -def disable_cache(mocker): - mocker.patch("source_gitlab.streams.Projects.use_cache", new_callable=mocker.PropertyMock, return_value=False) - mocker.patch("source_gitlab.streams.Groups.use_cache", new_callable=mocker.PropertyMock, return_value=False) + return BASE_CONFIG | {"api_url": request.param} @pytest.fixture def oauth_config(): - return { - "api_url": "gitlab.com", + return BASE_CONFIG | { "credentials": { "auth_type": "oauth2.0", "client_id": "client_id", @@ -32,16 +33,17 @@ def oauth_config(): "token_expiry_date": "2023-01-01T00:00:00Z", "refresh_token": "refresh_token", }, - "start_date": "2021-01-01T00:00:00Z", } @pytest.fixture def config_with_project_groups(): - return { - "start_date": "2021-01-01T00:00:00Z", - "api_url": "https://gitlab.com", - "credentials": {"auth_type": "access_token", "access_token": "token"}, - "groups_list": ["g1"], - "projects_list": ["p1"], - } + return BASE_CONFIG | {"groups_list": ["g1"], "projects_list": ["p1"]} + + +def get_stream_by_name(stream_name: str, config: Mapping[str, Any]) -> Stream: + source = SourceGitlab() + matches_by_name = [stream_config for stream_config in source.streams(config) if stream_config.name == stream_name] + if not matches_by_name: + raise ValueError("Please provide a valid stream name.") + return matches_by_name[0] diff --git a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_partition_routers.py b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_partition_routers.py new file mode 100644 index 0000000000000..4e7e0b40483fc --- /dev/null +++ b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_partition_routers.py @@ -0,0 +1,89 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.types import StreamSlice +from conftest import BASE_CONFIG, GROUPS_LIST_URL, get_stream_by_name + + +class TestGroupStreamsPartitionRouter: + def test_groups_stream_slices_without_group_ids_in_config(self, requests_mock): + requests_mock.get(url=GROUPS_LIST_URL, json=[{"id": "group_id_1"}, {"id": "group_id_2"}]) + groups_stream = get_stream_by_name("groups", BASE_CONFIG) + assert list(groups_stream.stream_slices(sync_mode=SyncMode.full_refresh)) == [ + StreamSlice(partition={"id": "group_id_1"}, cursor_slice={}), + StreamSlice(partition={"id": "group_id_2"}, cursor_slice={}), + ] + + def test_groups_stream_slices_with_group_ids_in_config(self, requests_mock): + groups_list = ["group_id_1", "group_id_2"] + expected_stream_slices = [] + + for group_id in groups_list: + requests_mock.get(url=f"https://gitlab.com/api/v4/groups/{group_id}?per_page=50", json=[{"id": group_id}]) + requests_mock.get( + url=f"https://gitlab.com/api/v4/groups/{group_id}/descendant_groups?per_page=50", + json=[{"id": f"descendant_{group_id}"}], + ) + expected_stream_slices.append(StreamSlice(partition={"id": group_id}, cursor_slice={})) + expected_stream_slices.append(StreamSlice(partition={"id": f"descendant_{group_id}"}, cursor_slice={})) + + groups_stream = get_stream_by_name("groups", BASE_CONFIG | {"groups_list": groups_list}) + assert list(groups_stream.stream_slices(sync_mode=SyncMode.full_refresh)) == expected_stream_slices + + +class TestProjectStreamsPartitionRouter: + projects_config = {"projects_list": ["group_id_1/project_id_1", "group_id_2/project_id_2"]} + + def test_projects_stream_slices_without_group_project_ids(self, requests_mock): + requests_mock.get(url=GROUPS_LIST_URL, json=[]) + projects_stream = get_stream_by_name("projects", BASE_CONFIG | self.projects_config) + assert list(projects_stream.stream_slices(sync_mode=SyncMode.full_refresh)) == [ + StreamSlice(partition={"id": "group_id_1%2Fproject_id_1"}, cursor_slice={}), + StreamSlice(partition={"id": "group_id_2%2Fproject_id_2"}, cursor_slice={}), + ] + + def test_projects_stream_slices_with_group_project_ids(self, requests_mock): + groups_list = ["group_id_1", "group_id_2"] + groups_list_response = [] + expected_stream_slices = [] + + for group_id, project_id in zip(groups_list, self.projects_config["projects_list"]): + groups_list_response.append({"id": group_id}) + requests_mock.get( + url=f"https://gitlab.com/api/v4/groups/{group_id}?per_page=50", + json=[{"id": group_id, "projects": [{"id": project_id, "path_with_namespace": project_id}]}], + ) + expected_stream_slices.append( + StreamSlice(partition={"id": project_id.replace("/", "%2F")}, cursor_slice={}) + ) + + requests_mock.get(url=GROUPS_LIST_URL, json=groups_list_response) + + projects_stream = get_stream_by_name("projects", BASE_CONFIG | self.projects_config) + assert list(projects_stream.stream_slices(sync_mode=SyncMode.full_refresh)) == expected_stream_slices + + def test_projects_stream_slices_with_group_project_ids_filtered_by_projects_list_config(self, requests_mock): + group_id = "group_id_1" + project_id = self.projects_config["projects_list"][0] + unknown_project_id = "unknown_project_id" + requests_mock.get(url=GROUPS_LIST_URL, json=[{"id": group_id}]) + requests_mock.get( + url=f"https://gitlab.com/api/v4/groups/{group_id}?per_page=50", + json=[ + { + "id": group_id, + "projects": [ + {"id": project_id, "path_with_namespace": project_id}, + {"id": unknown_project_id, "path_with_namespace": unknown_project_id}, + ], + }, + ], + ) + + projects_stream = get_stream_by_name("projects", BASE_CONFIG | self.projects_config) + assert list(projects_stream.stream_slices(sync_mode=SyncMode.full_refresh)) == [ + StreamSlice(partition={"id": project_id.replace("/", "%2F")}, cursor_slice={}) + ] diff --git a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_source.py b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_source.py index 5454ee1d7d763..516ed27af2689 100644 --- a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_source.py @@ -1,39 +1,28 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # + import logging import pytest +from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from source_gitlab import SourceGitlab -from source_gitlab.streams import GitlabStream -def test_streams(config, requests_mock): - requests_mock.get("/api/v4/groups", json=[{"id": "g1"}, {"id": "g256"}]) +def test_streams(config): source = SourceGitlab() streams = source.streams(config) assert len(streams) == 23 - assert all([isinstance(stream, GitlabStream) for stream in streams]) - groups, projects, *_ = streams - assert groups.group_ids == ["g1", "g256"] - assert projects.project_ids == [] + assert all([isinstance(stream, DeclarativeStream) for stream in streams]) -@pytest.mark.parametrize( - "url_mocks", - ( - ( - {"url": "/api/v4/groups", "json": [{"id": "g1"}]}, - {"url": "/api/v4/groups/g1", "json": [{"id": "g1", "projects": [{"id": "p1", "path_with_namespace": "p1"}]}]}, - {"url": "/api/v4/projects/p1", "json": {"id": "p1"}}, - ), - ({"url": "/api/v4/groups", "json": []},), - ), -) -def test_connection_success(config, requests_mock, url_mocks): - for url_mock in url_mocks: - requests_mock.get(**url_mock) +def test_connection_success(config, requests_mock): + requests_mock.get(url="/api/v4/groups", json=[{"id": "g1"}]) + requests_mock.get( + url="/api/v4/groups/g1", json=[{"id": "g1", "projects": [{"id": "p1", "path_with_namespace": "p1"}]}] + ) + requests_mock.get(url="/api/v4/projects/p1", json={"id": "p1"}) source = SourceGitlab() status, msg = source.check_connection(logging.getLogger(), config) assert (status, msg) == (True, None) @@ -41,27 +30,27 @@ def test_connection_success(config, requests_mock, url_mocks): def test_connection_invalid_projects_and_projects(config_with_project_groups, requests_mock): requests_mock.register_uri("GET", "https://gitlab.com/api/v4/groups/g1?per_page=50", status_code=404) - requests_mock.register_uri("GET", "https://gitlab.com/api/v4/groups/g1/descendant_groups?per_page=50", status_code=404) + requests_mock.register_uri( + "GET", "https://gitlab.com/api/v4/groups/g1/descendant_groups?per_page=50", status_code=404 + ) requests_mock.register_uri("GET", "https://gitlab.com/api/v4/projects/p1?per_page=50&statistics=1", status_code=404) source = SourceGitlab() status, msg = source.check_connection(logging.getLogger(), config_with_project_groups) - assert (status, msg) == (False, "Groups and/or projects that you provide are invalid or you don't have permission to view it.") + assert status is False + assert msg == ( + "Unable to connect to stream projects - " + "Groups and/or projects that you provide are invalid or you don't have permission to view it." + ) -@pytest.mark.parametrize( - "errror_code, expected_status", - ( - (500, False), - (401, False), - ), -) -def test_connection_fail_due_to_api_error(errror_code, expected_status, config, mocker, requests_mock): +@pytest.mark.parametrize("error_code, expected_status", ((500, False), (401, False))) +def test_connection_fail_due_to_api_error(error_code, expected_status, config, mocker, requests_mock): mocker.patch("time.sleep") - requests_mock.get("/api/v4/groups", status_code=errror_code) + requests_mock.get("/api/v4/groups", status_code=error_code) source = SourceGitlab() status, msg = source.check_connection(logging.getLogger(), config) assert status is False - assert msg.startswith("Unable to connect to Gitlab API with the provided Private Access Token") + assert msg.startswith(f"Unable to connect to stream projects") def test_connection_fail_due_to_api_error_oauth(oauth_config, mocker, requests_mock): @@ -78,50 +67,19 @@ def test_connection_fail_due_to_api_error_oauth(oauth_config, mocker, requests_m source = SourceGitlab() status, msg = source.check_connection(logging.getLogger(), oauth_config) assert status is False - assert msg.startswith("Unable to connect to Gitlab API with the provided credentials") - - -def test_connection_fail_due_to_expired_access_token_error(oauth_config, requests_mock): - expected = "Unable to refresh the `access_token`, please re-authenticate in Sources > Settings." - requests_mock.post("https://gitlab.com/oauth/token", status_code=401) - source = SourceGitlab() - status, msg = source.check_connection(logging.getLogger("airbyte"), oauth_config) - assert status is False - assert expected in msg - - -def test_connection_refresh_access_token(oauth_config, requests_mock): - expected = "Unknown error occurred while checking the connection" - requests_mock.post("https://gitlab.com/oauth/token", status_code=200, json={"access_token": "new access token"}) - source = SourceGitlab() - status, msg = source.check_connection(logging.getLogger("airbyte"), oauth_config) - assert status is False - assert expected in msg - - -def test_refresh_expired_access_token_on_error(oauth_config, requests_mock): - test_response = { - "access_token": "new_access_token", - "expires_in": 7200, - "created_at": 1735689600, - # (7200 + 1735689600).timestamp().to_rfc3339_string() = "2025-01-01T02:00:00+00:00" - "refresh_token": "new_refresh_token", - } - expected_token_expiry_date = "2025-01-01T02:00:00+00:00" - requests_mock.post("https://gitlab.com/oauth/token", status_code=200, json=test_response) - requests_mock.get("https://gitlab.com/api/v4/groups?per_page=50", status_code=200, json=[]) - source = SourceGitlab() - source.check_connection(logging.getLogger("airbyte"), oauth_config) - # check the updated config values - assert test_response.get("access_token") == oauth_config.get("credentials").get("access_token") - assert test_response.get("refresh_token") == oauth_config.get("credentials").get("refresh_token") - assert expected_token_expiry_date == oauth_config.get("credentials").get("token_expiry_date") + assert msg.startswith( + "Unable to connect to stream projects - Unable to connect to Gitlab API with the provided credentials" + ) @pytest.mark.parametrize( "api_url, deployment_env, expected_message", ( - ("http://gitlab.my.company.org", "CLOUD", "Http scheme is not allowed in this environment. Please use `https` instead."), + ( + "http://gitlab.my.company.org", + "CLOUD", + "Http scheme is not allowed in this environment. Please use `https` instead.", + ), ("https://gitlab.com/api/v4", "CLOUD", "Invalid API resource locator."), ), ) @@ -135,27 +93,3 @@ def test_connection_fail_due_to_config_error(mocker, api_url, deployment_env, ex } status, msg = source.check_connection(logging.getLogger(), config) assert (status, msg) == (False, expected_message) - - -def test_try_refresh_access_token(oauth_config, requests_mock): - test_response = { - "access_token": "new_access_token", - "expires_in": 7200, - "created_at": 1735689600, - # (7200 + 1735689600).timestamp().to_rfc3339_string() = "2025-01-01T02:00:00+00:00" - "refresh_token": "new_refresh_token", - } - requests_mock.post("https://gitlab.com/oauth/token", status_code=200, json=test_response) - - expected = {"api_url": "gitlab.com", - "credentials": {"access_token": "new_access_token", - "auth_type": "oauth2.0", - "client_id": "client_id", - "client_secret": "client_secret", - "refresh_token": "new_refresh_token", - "token_expiry_date": "2025-01-01T02:00:00+00:00"}, - "start_date": "2021-01-01T00:00:00Z"} - - source = SourceGitlab() - source._auth_params(oauth_config) - assert source._try_refresh_access_token(logger=logging.getLogger(), config=oauth_config) == expected diff --git a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_streams.py index 9c29fa8808f93..c4d61694efafd 100644 --- a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_streams.py @@ -1,146 +1,76 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # -import datetime -from unittest.mock import MagicMock import pytest from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http.auth import NoAuth -from source_gitlab.streams import ( - Branches, - Commits, - Deployments, - Jobs, - MergeRequestCommits, - MergeRequests, - Pipelines, - Projects, - Releases, - Tags, -) - -auth_params = {"authenticator": NoAuth(), "api_url": "gitlab.com"} -start_date = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=14) - - -@pytest.fixture() -def projects(): - return Projects(project_ids=["p_1"], **auth_params) - - -@pytest.fixture() -def pipelines(projects): - return Pipelines(parent_stream=projects, start_date=str(start_date), **auth_params) - - -@pytest.fixture() -def merge_requests(projects): - return MergeRequests(parent_stream=projects, start_date=str(start_date), **auth_params) - - -@pytest.fixture() -def tags(projects): - return Tags(parent_stream=projects, repository_part=True, **auth_params) - - -@pytest.fixture() -def releases(projects): - return Releases(parent_stream=projects, **auth_params) - - -@pytest.fixture() -def jobs(pipelines): - return Jobs(parent_stream=pipelines, **auth_params) - - -@pytest.fixture() -def deployments(projects): - return Deployments(parent_stream=projects, **auth_params) - +from conftest import BASE_CONFIG, GROUPS_LIST_URL, get_stream_by_name -@pytest.fixture() -def merge_request_commits(merge_requests): - return MergeRequestCommits(parent_stream=merge_requests, **auth_params) - - -@pytest.fixture() -def branches(projects): - return Branches(parent_stream=projects, **auth_params) - - -@pytest.fixture() -def commits(projects): - return Commits(parent_stream=projects, repository_part=True, start_date=str(start_date), **auth_params) +CONFIG = BASE_CONFIG | {"projects_list": ["p_1"]} @pytest.mark.parametrize( - "stream, extra_mocks, expected_call_count", + ("stream_name", "extra_mocks"), ( - ("projects", ({"url": "/api/v4/projects/p_1", "status_code": 403},), 1), - ("projects", ({"url": "/api/v4/projects/p_1", "status_code": 404},), 1), + ("projects", ({"url": "/api/v4/projects/p_1", "status_code": 403},)), ( "branches", - ({"url": "/api/v4/projects/p_1", "json": [{"id": "p_1"}]}, {"url": "/api/v4/projects/p_1/branches", "status_code": 403}), - 2, - ), - ( - "branches", - ({"url": "/api/v4/projects/p_1", "json": [{"id": "p_1"}]}, {"url": "/api/v4/projects/p_1/branches", "status_code": 404}), - 2, + ( + {"url": "/api/v4/projects/p_1", "json": [{"id": "p_1"}]}, + {"url": "/api/v4/projects/p_1/repository/branches", "status_code": 403}, + ), ), ), ) -def test_should_retry(mocker, requests_mock, stream, extra_mocks, expected_call_count, request): - mocker.patch("time.sleep") - stream = request.getfixturevalue(stream) +def test_should_retry(requests_mock, stream_name, extra_mocks): + requests_mock.get(url=GROUPS_LIST_URL, status_code=200) + stream = get_stream_by_name(stream_name, CONFIG) for extra_mock in extra_mocks: requests_mock.get(**extra_mock) - for stream_slice in stream.stream_slices(sync_mode="full_refresh"): - records = list(stream.read_records(sync_mode="full_refresh", stream_slice=stream_slice)) + records = [] + for stream_slice in stream.stream_slices(sync_mode=SyncMode.full_refresh): + records.extend(list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice))) assert records == [] - assert requests_mock.call_count == expected_call_count + assert requests_mock.call_count == len(extra_mocks) + 1 test_cases = ( ( "jobs", ( - ( - "/api/v4/projects/p_1/pipelines", - [{"project_id": "p_1", "id": "build_project_p1"}], - ), + ("/api/v4/projects/p_1/pipelines", [{"project_id": "p_1", "id": "build_project_p1"}]), ( "/api/v4/projects/p_1/pipelines/build_project_p1/jobs", - [{"id": "j_1", "user": {"id": "u_1"}, "pipeline": {"id": "p_17"}, "runner": None, "commit": {"id": "c_23"}}], + [ + { + "id": "j_1", + "user": {"id": "u_1"}, + "pipeline": {"id": "p_17"}, + "runner": None, + "commit": {"id": "c_23"}, + }, + ], ), ), - [ - { - "commit": {"id": "c_23"}, - "commit_id": "c_23", - "id": "j_1", - "pipeline": {"id": "p_17"}, - "pipeline_id": "p_17", - "project_id": "p_1", - "runner": None, - "runner_id": None, - "user": {"id": "u_1"}, - "user_id": "u_1", - } - ], + { + "commit": {"id": "c_23"}, + "commit_id": "c_23", + "id": "j_1", + "pipeline": {"id": "p_17"}, + "pipeline_id": "p_17", + "project_id": "p_1", + "runner": None, + "runner_id": None, + "user": {"id": "u_1"}, + "user_id": "u_1", + }, ), ( "tags", - ( - ( - "/api/v4/projects/p_1/repository/tags", - [{"commit": {"id": "c_1"}, "name": "t_1", "target": "ddc89"}], - ), - ), - [{"commit": {"id": "c_1"}, "commit_id": "c_1", "project_id": "p_1", "name": "t_1", "target": "ddc89"}], + (("/api/v4/projects/p_1/repository/tags", [{"commit": {"id": "c_1"}, "name": "t_1", "target": "ddc89"}]),), + {"commit": {"id": "c_1"}, "commit_id": "c_1", "project_id": "p_1", "name": "t_1", "target": "ddc89"}, ), ( "releases", @@ -150,24 +80,22 @@ def test_should_retry(mocker, requests_mock, stream, extra_mocks, expected_call_ [ { "id": "r_1", - "author": {"name": "John", "id": "666"}, + "author": {"name": "John", "id": "john"}, "commit": {"id": "abcd689"}, "milestones": [{"id": "m1", "title": "Q1"}, {"id": "m2", "title": "Q2"}], } ], ), ), - [ - { - "author": {"id": "666", "name": "John"}, - "author_id": "666", - "commit": {"id": "abcd689"}, - "commit_id": "abcd689", - "id": "r_1", - "milestones": ["m1", "m2"], - "project_id": "p_1", - } - ], + { + "author": {"id": "john", "name": "John"}, + "author_id": "john", + "commit": {"id": "abcd689"}, + "commit_id": "abcd689", + "id": "r_1", + "milestones": ["m1", "m2"], + "project_id": "p_1", + }, ), ( "deployments", @@ -177,154 +105,69 @@ def test_should_retry(mocker, requests_mock, stream, extra_mocks, expected_call_ [ { "id": "r_1", - "user": {"name": "John", "id": "666", "username": "john"}, + "user": {"name": "John", "id": "john_123", "username": "john"}, "environment": {"name": "dev"}, "commit": {"id": "abcd689"}, } ], ), ), - [ - { - "id": "r_1", - "user": {"name": "John", "id": "666", "username": "john"}, - "environment": {"name": "dev"}, - "commit": {"id": "abcd689"}, - "user_id": "666", - "environment_id": None, - "user_username": "john", - "user_full_name": "John", - "environment_name": "dev", - "project_id": "p_1", - } - ], + { + "id": "r_1", + "user": {"name": "John", "id": "john_123", "username": "john"}, + "environment": {"name": "dev"}, + "commit": {"id": "abcd689"}, + "user_id": "john_123", + "environment_id": None, + "user_username": "john", + "user_full_name": "John", + "environment_name": "dev", + "project_id": "p_1", + } ), ( "merge_request_commits", ( - ( - "/api/v4/projects/p_1/merge_requests", - [{"id": "mr_1", "iid": "mr_1", "project_id": "p_1"}], - ), - ( - "/api/v4/projects/p_1/merge_requests/mr_1/commits", - [ - { - "id": "mrc_1", - } - ], - ), + ("/api/v4/projects/p_1/merge_requests", [{"id": "mr_1", "iid": "mr_1", "project_id": "p_1"}]), + ("/api/v4/projects/p_1/merge_requests/mr_1/commits", [{"id": "mrc_1"}]), ), - [{"id": "mrc_1", "project_id": "p_1", "merge_request_iid": "mr_1"}], + {"id": "mrc_1", "project_id": "p_1", "merge_request_iid": "mr_1"}, ), ) -@pytest.mark.parametrize("stream, response_mocks, expected_records", test_cases) -def test_transform(requests_mock, stream, response_mocks, expected_records, request): - stream = request.getfixturevalue(stream) +@pytest.mark.parametrize(("stream_name", "response_mocks", "expected_record"), test_cases) +def test_transform(requests_mock, stream_name, response_mocks, expected_record): + requests_mock.get(url=GROUPS_LIST_URL, status_code=200) + stream = get_stream_by_name(stream_name, CONFIG) requests_mock.get("/api/v4/projects/p_1", json=[{"id": "p_1"}]) for url, json in response_mocks: requests_mock.get(url, json=json) - records_iter = iter(expected_records) - for stream_slice in stream.stream_slices(sync_mode="full_refresh"): - for record in stream.read_records(sync_mode="full_refresh", stream_slice=stream_slice): - assert record == next(records_iter) + for stream_slice in stream.stream_slices(sync_mode=SyncMode.full_refresh): + for record in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice): + assert dict(record) == expected_record -@pytest.mark.parametrize( - "stream, current_state, latest_record, new_state", - ( - ( - "pipelines", - {"219445": {"updated_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"updated_at": "2021-03-11T08:56:40.001+02:00"}}, - {"project_id": "219445", "updated_at": "2022-12-16T00:12:41.005675+02:00"}, - {"219445": {"updated_at": "2022-12-16T00:12:41.005675+02:00"}, "211378": {"updated_at": "2021-03-11T08:56:40.001+02:00"}}, - ), - ( - "pipelines", - {"219445": {"updated_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"updated_at": "2021-03-11T08:56:40.012001+02:00"}}, - {"project_id": "211378", "updated_at": "2021-03-10T23:58:58.011+02:00"}, - {"219445": {"updated_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"updated_at": "2021-03-11T08:56:40.012001+02:00"}}, - ), - ( - "pipelines", - {}, - {"project_id": "211378", "updated_at": "2021-03-10T23:58:58.010001+02:00"}, - {"211378": {"updated_at": "2021-03-10T23:58:58.010001+02:00"}}, - ), - ( - "commits", - {"219445": {"created_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"created_at": "2021-03-11T08:56:40.001+02:00"}}, - {"project_id": "219445", "created_at": "2022-12-16T00:12:41.005675+02:00"}, - {"219445": {"created_at": "2022-12-16T00:12:41.005675+02:00"}, "211378": {"created_at": "2021-03-11T08:56:40.001+02:00"}}, - ), - ( - "commits", - {"219445": {"created_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"created_at": "2021-03-11T08:56:40.012001+02:00"}}, - {"project_id": "211378", "created_at": "2021-03-10T23:58:58.011+02:00"}, - {"219445": {"created_at": "2022-12-14T17:07:34.005675+02:00"}, "211378": {"created_at": "2021-03-11T08:56:40.012001+02:00"}}, - ), - ( - "commits", - {}, - {"project_id": "211378", "created_at": "2021-03-10T23:58:58.010001+02:00"}, - {"211378": {"created_at": "2021-03-10T23:58:58.010001+02:00"}}, - ), - ), -) -def test_updated_state(stream, current_state, latest_record, new_state, request): - stream = request.getfixturevalue(stream) - assert stream.get_updated_state(current_state, latest_record) == new_state +def test_stream_slices_child_stream(requests_mock): + commits = get_stream_by_name("commits", CONFIG) + requests_mock.get(url=GROUPS_LIST_URL, status_code=200) + requests_mock.get( + url="https://gitlab.com/api/v4/projects/p_1?per_page=50&statistics=1", + json=[{"id": 13082000, "description": "", "name": "New CI Test Project"}], + ) + stream_state = {"13082000": {""'created_at': "2021-03-10T23:58:1213"}} - -def test_parse_response_unsuported_response_type(request, caplog): - stream = request.getfixturevalue("pipelines") - from unittest.mock import MagicMock - response = MagicMock() - response.status_code = 200 - response.json = MagicMock(return_value="") - list(stream.parse_response(response=response)) - assert "Unsupported type of response data for stream pipelines" in caplog.text - - -def test_stream_slices_child_stream(request, requests_mock): - commits = request.getfixturevalue("commits") - requests_mock.get("https://gitlab.com/api/v4/projects/p_1?per_page=50&statistics=1", - json=[{"id": 13082000, "description": "", "name": "New CI Test Project"}]) - - slices = list(commits.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={"13082000": {""'created_at': "2021-03-10T23:58:1213"}})) + slices = list(commits.stream_slices(sync_mode=SyncMode.full_refresh, stream_state=stream_state)) assert slices -def test_next_page_token(request): - response = MagicMock() - response.status_code = 200 - response.json = MagicMock(return_value=["some data"]) - commits = request.getfixturevalue("commits") - assert not commits.next_page_token(response) - data = ["some data" for x in range(0, 50)] - response.json = MagicMock(return_value=data) - assert commits.next_page_token(response) == {'page': 2} - response.json = MagicMock(return_value={"data": "some data"}) - assert not commits.next_page_token(response) - - -def test_availability_strategy(request): - commits = request.getfixturevalue("commits") +def test_availability_strategy(): + commits = get_stream_by_name("commits", CONFIG) assert not commits.availability_strategy -def test_request_params(request): - commits = request.getfixturevalue("commits") - expected = {'per_page': 50, 'page': 2, 'with_stats': True} - assert commits.request_params(stream_slice={"updated_after": "2021-03-10T23:58:1213"}, next_page_token={'page': 2}) == expected - - -def test_chunk_date_range(request): - commits = request.getfixturevalue("commits") - # start point in future - start_point = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1) - assert not list(commits._chunk_date_range(start_point)) +def test_request_params(): + commits = get_stream_by_name("commits", CONFIG) + assert commits.retriever.requester.get_request_params() == {"with_stats": "true"} diff --git a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_utils.py b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_utils.py index bd107e1a16dc5..5019d87b7b44c 100644 --- a/airbyte-integrations/connectors/source-gitlab/unit_tests/test_utils.py +++ b/airbyte-integrations/connectors/source-gitlab/unit_tests/test_utils.py @@ -7,10 +7,10 @@ @pytest.mark.parametrize( "url, expected", ( - ("http://example.com", (True, "http", "example.com")), - ("http://example", (True, "http", "example")), - ("test://example.com", (False, "", "")), - ("https://example.com/test/test2", (False, "", "")), + ("http://example.com", (True, "http", "example.com")), + ("http://example", (True, "http", "example")), + ("test://example.com", (False, "", "")), + ("https://example.com/test/test2", (False, "", "")), ) ) def test_parse_url(url, expected): diff --git a/airbyte-integrations/connectors/source-google-ads/README.md b/airbyte-integrations/connectors/source-google-ads/README.md index 8489101ea912e..57f91437a435e 100644 --- a/airbyte-integrations/connectors/source-google-ads/README.md +++ b/airbyte-integrations/connectors/source-google-ads/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-google-ads spec poetry run source-google-ads check --config secrets/config.json poetry run source-google-ads discover --config secrets/config.json -poetry run source-google-ads read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-google-ads read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-google-ads/integration_tests/expected_records_click.jsonl b/airbyte-integrations/connectors/source-google-ads/integration_tests/expected_records_click.jsonl index 62c6a529e5bc6..2e09109f203ce 100644 --- a/airbyte-integrations/connectors/source-google-ads/integration_tests/expected_records_click.jsonl +++ b/airbyte-integrations/connectors/source-google-ads/integration_tests/expected_records_click.jsonl @@ -7,9 +7,9 @@ {"stream": "ad_group_custom", "data": {"ad_group.targeting_setting.target_restrictions": ["targeting_dimension: AUDIENCE\nbid_only: true\n", "targeting_dimension: AGE_RANGE\nbid_only: true\n", "targeting_dimension: GENDER\nbid_only: true\n", "targeting_dimension: PARENTAL_STATUS\nbid_only: true\n", "targeting_dimension: INCOME_RANGE\nbid_only: true\n"], "segments.date": "2024-01-02"}, "emitted_at": 1704408105943} {"stream": "account_performance_report", "data": {"customer.currency_code": "USD", "customer.descriptive_name": "Airbyte", "customer.time_zone": "America/Los_Angeles", "metrics.active_view_cpm": 0.0, "metrics.active_view_ctr": 0.0, "metrics.active_view_impressions": 0, "metrics.active_view_measurability": 0.0, "metrics.active_view_measurable_cost_micros": 0, "metrics.active_view_measurable_impressions": 0, "metrics.active_view_viewability": 0.0, "segments.ad_network_type": "SEARCH", "metrics.all_conversions_from_interactions_rate": 2.9861930909090906, "metrics.all_conversions_value": 32.848124, "metrics.all_conversions": 32.848124, "metrics.average_cost": 1398181.8181818181, "metrics.average_cpc": 1398181.8181818181, "metrics.average_cpe": 0.0, "metrics.average_cpm": 640833333.3333334, "metrics.average_cpv": 0.0, "customer.manager": false, "metrics.clicks": 11, "metrics.content_budget_lost_impression_share": 0.0, "metrics.content_impression_share": 0.0, "metrics.content_rank_lost_impression_share": 0.0, "metrics.conversions_from_interactions_rate": 0.0, "metrics.conversions_value": 0.0, "metrics.conversions": 0.0, "metrics.cost_micros": 15380000, "metrics.cost_per_all_conversions": 468215.4755626227, "metrics.cost_per_conversion": 0.0, "metrics.cross_device_conversions": 0.0, "metrics.ctr": 0.4583333333333333, "segments.date": "2023-12-31", "segments.day_of_week": "SUNDAY", "segments.device": "MOBILE", "metrics.engagement_rate": 0.0, "metrics.engagements": 0, "customer.id": 4651612872, "metrics.impressions": 24, "metrics.interaction_rate": 0.4583333333333333, "metrics.interaction_event_types": ["InteractionEventType.CLICK"], "metrics.interactions": 11, "customer.auto_tagging_enabled": true, "customer.test_account": false, "segments.month": "2023-12-01", "segments.quarter": "2023-10-01", "metrics.search_budget_lost_impression_share": 0.0, "metrics.search_exact_match_impression_share": 0.6666666666666666, "metrics.search_impression_share": 0.6153846153846154, "metrics.search_rank_lost_impression_share": 0.38461538461538464, "metrics.value_per_all_conversions": 1.0, "metrics.value_per_conversion": 0.0, "metrics.video_view_rate": 0.0, "metrics.video_views": 0, "metrics.view_through_conversions": 0, "segments.week": "2023-12-25", "segments.year": 2023}, "emitted_at": 1704408106623} {"stream": "account_performance_report", "data": {"customer.currency_code": "USD", "customer.descriptive_name": "Airbyte", "customer.time_zone": "America/Los_Angeles", "metrics.active_view_cpm": 0.0, "metrics.active_view_ctr": 0.0, "metrics.active_view_impressions": 0, "metrics.active_view_measurability": 0.0, "metrics.active_view_measurable_cost_micros": 0, "metrics.active_view_measurable_impressions": 0, "metrics.active_view_viewability": 0.0, "segments.ad_network_type": "SEARCH", "metrics.all_conversions_from_interactions_rate": 0.0, "metrics.all_conversions_value": 0.0, "metrics.all_conversions": 0.0, "metrics.average_cost": 0.0, "metrics.average_cpc": 0.0, "metrics.average_cpe": 0.0, "metrics.average_cpm": 0.0, "metrics.average_cpv": 0.0, "customer.manager": false, "metrics.clicks": 0, "metrics.content_budget_lost_impression_share": 0.0, "metrics.content_impression_share": 0.0, "metrics.content_rank_lost_impression_share": 0.0, "metrics.conversions_from_interactions_rate": 0.0, "metrics.conversions_value": 0.0, "metrics.conversions": 0.0, "metrics.cost_micros": 0, "metrics.cost_per_all_conversions": 0.0, "metrics.cost_per_conversion": 0.0, "metrics.cross_device_conversions": 0.0, "metrics.ctr": 0.0, "segments.date": "2023-12-31", "segments.day_of_week": "SUNDAY", "segments.device": "TABLET", "metrics.engagement_rate": 0.0, "metrics.engagements": 0, "customer.id": 4651612872, "metrics.impressions": 2, "metrics.interaction_rate": 0.0, "metrics.interaction_event_types": [], "metrics.interactions": 0, "customer.auto_tagging_enabled": true, "customer.test_account": false, "segments.month": "2023-12-01", "segments.quarter": "2023-10-01", "metrics.search_budget_lost_impression_share": 0.0, "metrics.search_exact_match_impression_share": 1.0, "metrics.search_impression_share": 1.0, "metrics.search_rank_lost_impression_share": 0.0, "metrics.value_per_all_conversions": 0.0, "metrics.value_per_conversion": 0.0, "metrics.video_view_rate": 0.0, "metrics.video_views": 0, "metrics.view_through_conversions": 0, "segments.week": "2023-12-25", "segments.year": 2023}, "emitted_at": 1704408106623} -{"stream": "click_view", "data": {"ad_group.name": "Airbyte", "click_view.gclid": "Cj0KCQiAv8SsBhC7ARIsALIkVT0aoRchs-JIhSNfsaUU1GQLPOaNU15XNhGEkNLQ0kpOpYoV_VDNNogaAl-2EALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/155311392438~676665180945", "click_view.keyword": "", "click_view.keyword_info.match_type": "UNSPECIFIED", "click_view.keyword_info.text": "", "campaign.id": 20643300404, "ad_group.id": 155311392438, "segments.date": "2023-12-31", "customer.id": 4651612872, "campaign.name": "mm_search_brand", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1704408107339} -{"stream": "click_view", "data": {"ad_group.name": "Airbyte", "click_view.gclid": "Cj0KCQiAv8SsBhC7ARIsALIkVT17gRC4RsmoYczHLguLKTaojzCB4bPA0GjBSa3x44kKTbWVCvXEe58aAkeHEALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/155311392438~676665180945", "click_view.keyword": "", "click_view.keyword_info.match_type": "UNSPECIFIED", "click_view.keyword_info.text": "", "campaign.id": 20643300404, "ad_group.id": 155311392438, "segments.date": "2023-12-31", "customer.id": 4651612872, "campaign.name": "mm_search_brand", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1704408107340} -{"stream": "click_view", "data": {"ad_group.name": "Airbyte", "click_view.gclid": "Cj0KCQiAv8SsBhC7ARIsALIkVT1H36_GC-jRtw1xNj-9Y5IdIZWa-1j-BqhYt5JSB82QzNE5-7OxgB4aAlU4EALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/155311392438~676665180945", "click_view.keyword": "", "click_view.keyword_info.match_type": "UNSPECIFIED", "click_view.keyword_info.text": "", "campaign.id": 20643300404, "ad_group.id": 155311392438, "segments.date": "2023-12-31", "customer.id": 4651612872, "campaign.name": "mm_search_brand", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1704408107340} +{"stream": "click_view", "data": {"ad_group.name": "Google Analytics To BigQuery", "click_view.gclid": "Cj0KCQiAwbitBhDIARIsABfFYILKwUR2XNOZR2B8tlOU7_ErJTOiDlZf0sdkdQJ1fjAzMDLHP2WXq9caAj4tEALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/154167257509~676946613078", "click_view.keyword": "customers/4651612872/adGroupCriteria/154167257509~1998751818607", "click_view.keyword_info.match_type": "PHRASE", "click_view.keyword_info.text": "Google Analytics To BigQuery", "campaign.id": 20656413085, "ad_group.id": 154167257509, "segments.date": "2024-01-22", "customer.id": 4651612872, "campaign.name": "mm_search_connections", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1712576959037} +{"stream": "click_view", "data": {"ad_group.name": "Google Analytics To BigQuery", "click_view.gclid": "Cj0KCQiAwbitBhDIARIsABfFYILt7whM_OplFtlf-iB_7iHK4SpYVanmMRzXsZ0EJf7bo1X3Re1JB7MaAsvgEALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/154167257509~676946613078", "click_view.keyword": "customers/4651612872/adGroupCriteria/154167257509~1998751818607", "click_view.keyword_info.match_type": "PHRASE", "click_view.keyword_info.text": "Google Analytics To BigQuery", "campaign.id": 20656413085, "ad_group.id": 154167257509, "segments.date": "2024-01-22", "customer.id": 4651612872, "campaign.name": "mm_search_connections", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1712576959037} +{"stream": "click_view", "data": {"ad_group.name": "HubSpot To MySQL", "click_view.gclid": "Cj0KCQiAwbitBhDIARIsABfFYIIZfgaE_BPBTQ0qPcL2H9-eAPup5bGEbuSYLsXKCYTxTlLpFfgZfqgaAqLUEALw_wcB", "click_view.ad_group_ad": "customers/4651612872/adGroupAds/154167258909~676946613249", "click_view.keyword": "customers/4651612872/adGroupCriteria/154167258909~1945516745698", "click_view.keyword_info.match_type": "PHRASE", "click_view.keyword_info.text": "HubSpot To MySQL", "campaign.id": 20656413085, "ad_group.id": 154167258909, "segments.date": "2024-01-22", "customer.id": 4651612872, "campaign.name": "mm_search_connections", "segments.ad_network_type": "SEARCH", "campaign.network_settings.target_content_network": false, "campaign.network_settings.target_google_search": true, "campaign.network_settings.target_partner_search_network": false, "campaign.network_settings.target_search_network": false}, "emitted_at": 1712576959037} {"stream": "geographic_view", "data": {"customer.id": 4651612872, "customer.descriptive_name": "Airbyte", "geographic_view.country_criterion_id": 2840, "geographic_view.location_type": "AREA_OF_INTEREST", "ad_group.id": 155311392438, "segments.date": "2023-12-31"}, "emitted_at": 1704408109676} {"stream": "geographic_view", "data": {"customer.id": 4651612872, "customer.descriptive_name": "Airbyte", "geographic_view.country_criterion_id": 2840, "geographic_view.location_type": "LOCATION_OF_PRESENCE", "ad_group.id": 155311392438, "segments.date": "2023-12-31"}, "emitted_at": 1704408109677} {"stream": "geographic_view", "data": {"customer.id": 4651612872, "customer.descriptive_name": "Airbyte", "geographic_view.country_criterion_id": 2840, "geographic_view.location_type": "AREA_OF_INTEREST", "ad_group.id": 155311392438, "segments.date": "2024-01-01"}, "emitted_at": 1704408109677} diff --git a/airbyte-integrations/connectors/source-google-ads/metadata.yaml b/airbyte-integrations/connectors/source-google-ads/metadata.yaml index a7b4b77066db0..0050a81006d3f 100644 --- a/airbyte-integrations/connectors/source-google-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-ads/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 - dockerImageTag: 3.3.6 + dockerImageTag: 3.4.1 dockerRepository: airbyte/source-google-ads documentationUrl: https://docs.airbyte.com/integrations/sources/google-ads githubIssueLabel: source-google-ads icon: google-adwords.svg license: Elv2 + maxSecondsBetweenMessages: 86400 name: Google Ads remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-google-ads/poetry.lock b/airbyte-integrations/connectors/source-google-ads/poetry.lock index 1b8c7d1afff5f..334659762fd7b 100644 --- a/airbyte-integrations/connectors/source-google-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-google-ads/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -374,13 +374,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.28.1" +version = "2.28.2" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, - {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, + {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, + {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, ] [package.dependencies] @@ -415,13 +415,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -432,84 +432,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.62.0" +version = "1.62.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.62.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271"}, - {file = "grpcio-1.62.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c"}, - {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0"}, - {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6"}, - {file = "grpcio-1.62.0-cp310-cp310-win32.whl", hash = "sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc"}, - {file = "grpcio-1.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa"}, - {file = "grpcio-1.62.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f"}, - {file = "grpcio-1.62.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021"}, - {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4"}, - {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e"}, - {file = "grpcio-1.62.0-cp311-cp311-win32.whl", hash = "sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd"}, - {file = "grpcio-1.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334"}, - {file = "grpcio-1.62.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8"}, - {file = "grpcio-1.62.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c"}, - {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873"}, - {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388"}, - {file = "grpcio-1.62.0-cp312-cp312-win32.whl", hash = "sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701"}, - {file = "grpcio-1.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842"}, - {file = "grpcio-1.62.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9"}, - {file = "grpcio-1.62.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4"}, - {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b"}, - {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b"}, - {file = "grpcio-1.62.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b"}, - {file = "grpcio-1.62.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35"}, - {file = "grpcio-1.62.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72"}, - {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f"}, - {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38"}, - {file = "grpcio-1.62.0-cp38-cp38-win32.whl", hash = "sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe"}, - {file = "grpcio-1.62.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270"}, - {file = "grpcio-1.62.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170"}, - {file = "grpcio-1.62.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7"}, - {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c"}, - {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a"}, - {file = "grpcio-1.62.0-cp39-cp39-win32.whl", hash = "sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93"}, - {file = "grpcio-1.62.0-cp39-cp39-win_amd64.whl", hash = "sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5"}, - {file = "grpcio-1.62.0.tar.gz", hash = "sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7"}, + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.62.0)"] +protobuf = ["grpcio-tools (>=1.62.1)"] [[package]] name = "grpcio-status" -version = "1.62.0" +version = "1.62.1" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.62.0.tar.gz", hash = "sha256:0d693e9c09880daeaac060d0c3dba1ae470a43c99e5d20dfeafd62cf7e08a85d"}, - {file = "grpcio_status-1.62.0-py3-none-any.whl", hash = "sha256:3baac03fcd737310e67758c4082a188107f771d32855bce203331cd4c9aa687a"}, + {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"}, + {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.0" +grpcio = ">=1.62.1" protobuf = ">=4.21.6" [[package]] @@ -684,13 +684,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -981,13 +981,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.9.0" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, - {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1135,13 +1135,13 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "1.4.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-1.4.0.tar.gz", hash = "sha256:acee623221e4a39abcbb919312c8ff04bd44e7e417087fb4bd5e2a2f53d5e79a"}, + {file = "requests_oauthlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:7a3130d94a17520169e38db6c8d75f2c974643788465ecc2e4b36d288bf13033"}, ] [package.dependencies] @@ -1167,18 +1167,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1341,4 +1341,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "85de59ee85d7cb6a28483c561e60f5fd6315a509a920e34639a1e3199f8ef4cd" +content-hash = "a25463d63da174630849b25c6ddf6b19d56e562e4b5e6ecb5fd8763db4ad6fe9" diff --git a/airbyte-integrations/connectors/source-google-ads/pyproject.toml b/airbyte-integrations/connectors/source-google-ads/pyproject.toml index b91a9d9917256..e085def2d81f0 100644 --- a/airbyte-integrations/connectors/source-google-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.3.6" +version = "3.4.1" name = "source-google-ads" description = "Source implementation for Google Ads." authors = [ "Airbyte ",] @@ -19,7 +19,7 @@ include = "source_google_ads" python = "^3.9,<3.12" google-ads = "==22.1.0" protobuf = "==4.25.2" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" [tool.poetry.scripts] source-google-ads = "source_google_ads.run:run" diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/config_migrations.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/config_migrations.py index be206ee13e623..81fcfb195f139 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/config_migrations.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/config_migrations.py @@ -3,7 +3,6 @@ # -import logging from typing import Any, List, Mapping from airbyte_cdk.config_observation import create_connector_config_control_message @@ -15,8 +14,6 @@ from .utils import GAQL -logger = logging.getLogger("airbyte_logger") - FULL_REFRESH_CUSTOM_TABLE = [ "asset", "asset_group_listing_group_filter", diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py index c34833154ce67..911f8d9351cb9 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py @@ -3,7 +3,6 @@ # -import logging from enum import Enum from typing import Any, Iterable, Iterator, List, Mapping, MutableMapping @@ -16,8 +15,9 @@ from google.auth import exceptions from proto.marshal.collections import Repeated, RepeatedComposite +from .utils import logger + API_VERSION = "v15" -logger = logging.getLogger("airbyte") class GoogleAds: @@ -74,7 +74,12 @@ def get_accessible_accounts(self): ), max_tries=5, ) - def send_request(self, query: str, customer_id: str, login_customer_id: str = "default") -> Iterator[SearchGoogleAdsResponse]: + def send_request( + self, + query: str, + customer_id: str, + login_customer_id: str = "default", + ) -> Iterator[SearchGoogleAdsResponse]: client = self.get_client(login_customer_id) search_request = client.get_type("SearchGoogleAdsRequest") search_request.query = query diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py index a51fe215868ad..d82e7d6911866 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py @@ -45,9 +45,7 @@ UserInterest, UserLocationView, ) -from .utils import GAQL - -logger = logging.getLogger("airbyte") +from .utils import GAQL, logger, traced_exception class SourceGoogleAds(AbstractSource): @@ -189,8 +187,9 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> logger.info(f"Found {len(customers)} customers: {[customer.id for customer in customers]}") # Check custom query request validity by sending metric request with non-existent time window - for customer in customers: - for query in config.get("custom_queries_array", []): + for query in config.get("custom_queries_array", []): + for customer in customers: + table_name = query["table_name"] query = query["query"] if customer.is_manager_account and self.is_metrics_in_custom_query(query): logger.warning( @@ -205,10 +204,18 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> query = IncrementalCustomQuery.insert_segments_date_expr(query, "1980-01-01", "1980-01-01") query = query.set_limit(1) - response = google_api.send_request(str(query), customer_id=customer.id, login_customer_id=customer.login_customer_id) + try: + response = google_api.send_request( + str(query), + customer_id=customer.id, + login_customer_id=customer.login_customer_id, + ) + except Exception as exc: + traced_exception(exc, customer.id, False, table_name) # iterate over the response otherwise exceptions will not be raised! for _ in response: pass + break return True, None def streams(self, config: Mapping[str, Any]) -> List[Stream]: diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py index ab54a6ee116c9..3be3e847c60c5 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py @@ -3,6 +3,7 @@ # import functools +import logging import queue import re import threading @@ -17,10 +18,12 @@ from google.ads.googleads.errors import GoogleAdsException from google.ads.googleads.v15.errors.types.authentication_error import AuthenticationErrorEnum from google.ads.googleads.v15.errors.types.authorization_error import AuthorizationErrorEnum +from google.ads.googleads.v15.errors.types.query_error import QueryErrorEnum from google.ads.googleads.v15.errors.types.quota_error import QuotaErrorEnum from google.ads.googleads.v15.errors.types.request_error import RequestErrorEnum from google.api_core.exceptions import Unauthenticated -from source_google_ads.google_ads import logger + +logger = logging.getLogger("airbyte") def get_resource_name(stream_name: str) -> str: @@ -54,7 +57,12 @@ def is_error_type(error_value, target_enum_value): return int(error_value) == int(target_enum_value) -def traced_exception(ga_exception: Union[GoogleAdsException, Unauthenticated], customer_id: str, catch_disabled_customer_error: bool): +def traced_exception( + ga_exception: Union[GoogleAdsException, Unauthenticated], + customer_id: str, + catch_disabled_customer_error: bool, + query_name: Optional[str] = None, +) -> None: """Add user-friendly message for GoogleAdsException""" messages = [] raise_exception = AirbyteTracedException @@ -100,6 +108,13 @@ def traced_exception(ga_exception: Union[GoogleAdsException, Unauthenticated], c "https://support.google.com/google-ads/answer/2375392." ) + elif is_error_type(query_error, QueryErrorEnum.QueryError.UNRECOGNIZED_FIELD): + message = ( + f"The Custom Query: `{query_name}` has {error.message.lower()} Please make sure the field exists or name entered is valid." + ) + # additionally log the error for visability during `check_connection` in UI. + logger.error(message) + elif query_error: message = f"Incorrect custom query. {error.message}" diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py index b2bff404d6e27..680f2744c564f 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/common.py @@ -92,6 +92,11 @@ def __init__(self, name): "failure_msg": "Error in query: unexpected end of query.", "error_type": "queryError", }, + "UNRECOGNIZED_FIELD": { + "failure_code": QueryErrorEnum.QueryError.UNRECOGNIZED_FIELD, + "failure_msg": "unrecognized field in the query.", + "error_type": "queryError", + }, "RESOURCE_EXHAUSTED": {"failure_code": QuotaErrorEnum.QuotaError.RESOURCE_EXHAUSTED, "failure_msg": "msg4", "error_type": "quotaError"}, "UNEXPECTED_ERROR": { "failure_code": AuthorizationErrorEnum.AuthorizationError.UNKNOWN, diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_errors.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_errors.py index e71263296007d..5765059854db3 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_errors.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_errors.py @@ -35,6 +35,7 @@ def mock_get_customers(mocker): "Failed to access the customer '123'. Ensure the customer is linked to your manager account or check your permissions to access this customer account.", ), (["QUERY_ERROR"], "Incorrect custom query. Error in query: unexpected end of query."), + (["UNRECOGNIZED_FIELD"], "The Custom Query: `None` has unrecognized field in the query. Please make sure the field exists or name entered is valid."), ( ["RESOURCE_EXHAUSTED"], ( diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml index 8cfca1eec1105..58f264244449e 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml @@ -12,7 +12,7 @@ data: connectorSubtype: api connectorType: source definitionId: 3cc2eafd-84aa-4dca-93af-322d9dfeec1a - dockerImageTag: 2.4.1 + dockerImageTag: 2.4.2 dockerRepository: airbyte/source-google-analytics-data-api documentationUrl: https://docs.airbyte.com/integrations/sources/google-analytics-data-api githubIssueLabel: source-google-analytics-data-api diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock b/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock index fc68d9d8fe8f5..ddf883ab62793 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.61.0" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.61.0.tar.gz", hash = "sha256:8beda008c5a177041ac02860a431ce7b1ecd00062a4a8f31fe6ac446cbed3e70"}, - {file = "airbyte_cdk-0.61.0-py3-none-any.whl", hash = "sha256:3f989bfe692c9519d61f9120ddb744ab82c432c2caf25374d4d6f5cdc374a1e9"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -632,13 +632,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -966,13 +966,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1083,13 +1083,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1101,15 +1101,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1132,19 +1132,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1170,24 +1170,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -1206,13 +1206,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1317,4 +1317,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e30751a43a938ad9b4dc31ecc79afd469721b19b982cf9b1470b4a45ea707b8e" +content-hash = "ebf656b11e5eb19a2305a7dcbcc6f2a69d02a6f4f42c0e5c5ec753b2f711e0cf" diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml b/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml index b3a3b51f287c3..4c6f962a5a4cd 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.4.1" +version = "2.4.2" name = "source-google-analytics-data-api" description = "Source implementation for Google Analytics Data Api." authors = [ "Airbyte ",] @@ -19,7 +19,7 @@ include = "source_google_analytics_data_api" python = "^3.9,<3.12" cryptography = "==37.0.4" requests = "==2.31.0" -airbyte-cdk = "==0.61.0" +airbyte-cdk = "^0" PyJWT = "==2.4.0" pandas = "==2.2.0" diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py index b84a91cfafc68..89958f13139bb 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/source_google_analytics_data_api/source.py @@ -283,7 +283,15 @@ def parse_response( record["endDate"] = stream_slice["endDate"] yield record - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + def get_updated_state( + self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any] + ) -> MutableMapping[str, Any]: + if not self.cursor_field: + # Some implementations of the GoogleAnalyticsDataApiBaseStream might not have a cursor because it's + # based on the `dimensions` config setting. This results in a full_refresh only stream that implements + # get_updated_state(), but does not define a cursor. For this scenario, there is no state value to extract + return {} + updated_state = ( utils.string_to_date(latest_record[self.cursor_field], self._record_date_format) if self.cursor_field == "date" diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migration_cohortspec/test_config_migration_cohortspec.py b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migration_cohortspec/test_config_migration_cohortspec.py index de76bda4e8a4a..340d6db7660ea 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migration_cohortspec/test_config_migration_cohortspec.py +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migration_cohortspec/test_config_migration_cohortspec.py @@ -32,7 +32,8 @@ def test_migrate_config(capsys): # migrate the test_config migration_instance.migrate(SOURCE_INPUT_ARGS, SOURCE) - control_msg = json.loads(capsys.readouterr().out) + what = capsys.readouterr().out + control_msg = json.loads(what) assert control_msg["type"] == Type.CONTROL.value assert control_msg["control"]["type"] == OrchestratorType.CONNECTOR_CONFIG.value diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migrations/test_config.json b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migrations/test_config.json index 9b00b00f949f4..4b03ebb2315eb 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migrations/test_config.json +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_migrations/test_config.json @@ -1,8 +1,5 @@ { - "credentials": { - "auth_type": "Service", - "credentials_json": "" - }, + "credentials": { "auth_type": "Service", "credentials_json": "" }, "custom_reports": "[{\"name\": \"custom_dimensions\", \"dimensions\": [\"date\", \"country\", \"device\"]}]", "date_ranges_start_date": "2023-09-01", "window_in_days": 30, diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_streams.py index 92b57153f5c8a..e61d3be7af15e 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/unit_tests/test_streams.py @@ -10,7 +10,7 @@ import pytest from freezegun import freeze_time -from source_google_analytics_data_api.source import GoogleAnalyticsDataApiBaseStream +from source_google_analytics_data_api.source import GoogleAnalyticsDataApiBaseStream, SourceGoogleAnalyticsDataApi from .utils import read_incremental @@ -391,3 +391,54 @@ def test_read_incremental(requests_mock): {"property_id": 123, "yearWeek": "202202", "totalUsers": 125, "startDate": "2022-01-09", "endDate": "2022-01-09"}, {"property_id": 123, "yearWeek": "202202", "totalUsers": 140, "startDate": "2022-01-10", "endDate": "2022-01-10"}, ] + +@pytest.mark.parametrize( + "config_dimensions, expected_state", + [ + pytest.param(["browser", "country", "language", "date"], {"date": "20240320"}, id="test_date_no_cursor_field_dimension"), + pytest.param(["browser", "country", "language"], {}, id="test_date_cursor_field_dimension"), + ] +) +def test_get_updated_state(config_dimensions, expected_state): + config = { + "credentials": { + "auth_type": "Service", + "credentials_json": "{ \"client_email\": \"a@gmail.com\", \"client_id\": \"1234\", \"client_secret\": \"5678\", \"private_key\": \"5678\"}" + }, + "date_ranges_start_date": "2023-04-01", + "window_in_days": 30, + "property_ids": ["123"], + "custom_reports_array": [ + { + "name": "pivot_report", + "dateRanges": [{"startDate": "2020-09-01", "endDate": "2020-09-15"}], + "dimensions": config_dimensions, + "metrics": ["sessions"], + "pivots": [ + { + "fieldNames": ["browser"], + "limit": 5 + }, + { + "fieldNames": ["country"], + "limit": 250 + }, + { + "fieldNames": ["language"], + "limit": 15 + } + ], + "cohortSpec": { + "enabled": "false" + } + } + ] + } + source = SourceGoogleAnalyticsDataApi() + config = source._validate_and_transform(config, report_names=set()) + config["authenticator"] = source.get_authenticator(config) + report_stream = source.instantiate_report_class(config["custom_reports_array"][0], False, config, page_size=100) + + actual_state = report_stream.get_updated_state(current_stream_state={}, latest_record={"date": "20240320"}) + + assert actual_state == expected_state diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml index ea9c7148f538e..fb586b68d53cb 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml @@ -13,12 +13,13 @@ data: connectorSubtype: api connectorType: source definitionId: eff3616a-f9c3-11eb-9a03-0242ac130003 - dockerImageTag: 0.2.5 + dockerImageTag: 0.3.0 dockerRepository: airbyte/source-google-analytics-v4 documentationUrl: https://docs.airbyte.com/integrations/sources/google-analytics-v4 githubIssueLabel: source-google-analytics-v4 icon: google-analytics.svg license: Elv2 + maxSecondsBetweenMessages: 86400 name: Google Analytics (Universal Analytics) remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/poetry.lock b/airbyte-integrations/connectors/source-google-analytics-v4/poetry.lock index e5efccf125507..ebc0f61075aa3 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/poetry.lock +++ b/airbyte-integrations/connectors/source-google-analytics-v4/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.59.2" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.59.2.tar.gz", hash = "sha256:cd8a2b679ddd01ac1db9d42e4326c4b4e815dcaf7e1654cbe327cfce7654f07b"}, - {file = "airbyte_cdk-0.59.2-py3-none-any.whl", hash = "sha256:90f9144f1519e0c66e260b68be94bb9b8f87130276353073c416df2e62ce7c7e"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -590,13 +590,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -853,13 +853,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -901,7 +901,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -959,13 +958,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -977,15 +976,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1008,19 +1007,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1046,13 +1045,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1071,13 +1070,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1182,4 +1181,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "71852fc65c462f57b22c7c55c2df234d3c27e121649a777a65dd74fd75c9d91b" +content-hash = "d68ea8c1a87cfb855bbfa6c6ec097c762368af2bac525b588c3f2fff9c48bc2c" diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/pyproject.toml b/airbyte-integrations/connectors/source-google-analytics-v4/pyproject.toml index b0e774e072a9f..c0ad577ff23ab 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.2.5" +version = "0.3.0" name = "source-google-analytics-v4" description = "Source implementation for Google Analytics V4." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_google_analytics_v4" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.59.2" +airbyte-cdk = "^0" PyJWT = "==2.8.0" requests = "==2.31.0" cryptography = "==41.0.4" diff --git a/airbyte-integrations/connectors/source-google-drive/README.md b/airbyte-integrations/connectors/source-google-drive/README.md index 586d5cdecae97..c93c464c64316 100644 --- a/airbyte-integrations/connectors/source-google-drive/README.md +++ b/airbyte-integrations/connectors/source-google-drive/README.md @@ -1,69 +1,55 @@ -# Google Drive Source +# Google Drive source connector + This is the repository for the Google Drive source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/google-drive). +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/google-drive). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.10.0` +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/google-drive) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_google_drive/spec.json` file. +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/google-drive) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_google_drive/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source google-drive test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-google-drive spec +poetry run source-google-drive check --config secrets/config.json +poetry run source-google-drive discover --config secrets/config.json +poetry run source-google-drive read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-google-drive build ``` -An image will be built with the tag `airbyte/source-google-drive:dev`. +An image will be available on your host with the tag `airbyte/source-google-drive:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-google-drive:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-google-drive:dev spec @@ -72,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-google-drive:dev disco docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-google-drive:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-google-drive test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-google-drive test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/google-drive.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/google-drive.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-google-drive/integration_tests/spec.json b/airbyte-integrations/connectors/source-google-drive/integration_tests/spec.json index e1341d1bbe272..451041e8afa61 100644 --- a/airbyte-integrations/connectors/source-google-drive/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-google-drive/integration_tests/spec.json @@ -31,9 +31,9 @@ }, "globs": { "title": "Globs", + "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", "default": ["**"], "order": 1, - "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", "type": "array", "items": { "type": "string" @@ -53,8 +53,8 @@ "primary_key": { "title": "Primary Key", "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", - "type": "string", - "airbyte_hidden": true + "airbyte_hidden": true, + "type": "string" }, "days_to_sync_if_history_is_full": { "title": "Days To Sync If History Is Full", @@ -229,6 +229,12 @@ "type": "string" }, "uniqueItems": true + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" } }, "required": ["filetype"] @@ -276,20 +282,20 @@ "type": "string" }, "skip_unprocessable_files": { - "type": "boolean", - "default": true, "title": "Skip Unprocessable Files", "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", - "always_show": true + "default": true, + "always_show": true, + "type": "boolean" }, "strategy": { - "type": "string", + "title": "Parsing Strategy", + "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", + "default": "auto", "always_show": true, "order": 0, - "default": "auto", - "title": "Parsing Strategy", "enum": ["auto", "fast", "ocr_only", "hi_res"], - "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + "type": "string" }, "processing": { "title": "Processing", diff --git a/airbyte-integrations/connectors/source-google-drive/metadata.yaml b/airbyte-integrations/connectors/source-google-drive/metadata.yaml index 4eaddbf2414d8..bfeab0fa033ab 100644 --- a/airbyte-integrations/connectors/source-google-drive/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-drive/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: file connectorType: source definitionId: 9f8dda77-1048-4368-815b-269bf54ee9b8 - dockerImageTag: 0.0.9 + dockerImageTag: 0.0.10 dockerRepository: airbyte/source-google-drive githubIssueLabel: source-google-drive icon: google-drive.svg diff --git a/airbyte-integrations/connectors/source-google-drive/poetry.lock b/airbyte-integrations/connectors/source-google-drive/poetry.lock new file mode 100644 index 0000000000000..ed38ba2ea8f9c --- /dev/null +++ b/airbyte-integrations/connectors/source-google-drive/poetry.lock @@ -0,0 +1,2508 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.78.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} +"pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +unstructured = {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""} +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "google-api-core" +version = "2.18.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-api-python-client" +version = "2.104.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-python-client-2.104.0.tar.gz", hash = "sha256:bbc66520e7fe9417b93fd113f2a0a1afa789d686de9009b6e94e48fdea50a60f"}, + {file = "google_api_python_client-2.104.0-py2.py3-none-any.whl", hash = "sha256:867061526aa6dc6c1481d118e913a8a38a02a01eed589413968397ebd77df71d"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.19.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.1.0" +httplib2 = ">=0.15.0,<1.dev0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-api-python-client-stubs" +version = "1.18.0" +description = "Type stubs for google-api-python-client" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "google_api_python_client_stubs-1.18.0-py3-none-any.whl", hash = "sha256:1e42927ae6ba68aabb5a3791cacbd2ac63f98297d8eccb366086b27224bae3c8"}, + {file = "google_api_python_client_stubs-1.18.0.tar.gz", hash = "sha256:3166203a298b10c9f90b8974d513880c42ea026980e763a23719bbed4477f0a1"}, +] + +[package.dependencies] +google-api-python-client = ">=2.102.0" +typing-extensions = ">=3.10.0" + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-auth-httplib2" +version = "0.1.1" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, + {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-auth-oauthlib" +version = "1.1.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "google-auth-oauthlib-1.1.0.tar.gz", hash = "sha256:83ea8c3b0881e453790baff4448e8a6112ac8778d1de9da0b68010b843937afb"}, + {file = "google_auth_oauthlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.2.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pdf2image" +version = "1.16.3" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +optional = false +python-versions = "*" +files = [ + {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, + {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, +] + +[package.dependencies] +pillow = "*" + +[[package]] +name = "pdfminer-six" +version = "20221105" +description = "PDF parser and analyzer" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, + {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, +] + +[package.dependencies] +charset-normalizer = ">=2.0.0" +cryptography = ">=36.0.0" + +[package.extras] +dev = ["black", "mypy (==0.931)", "nox", "pytest"] +docs = ["sphinx", "sphinx-argparse"] +image = ["Pillow"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.7.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"}, + {file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "740193014842eb714f947bdb93e482a8d31a8cca0402434e852d94e18c92ecde" diff --git a/airbyte-integrations/connectors/source-google-drive/pyproject.toml b/airbyte-integrations/connectors/source-google-drive/pyproject.toml new file mode 100644 index 0000000000000..6f43d85800d38 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-drive/pyproject.toml @@ -0,0 +1,32 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.0.10" +name = "source-google-drive" +description = "Source implementation for Google Drive." +authors = [ "Airbyte ",] +license = "ELv2" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/google-drive" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_google_drive" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +google-api-python-client = "==2.104.0" +google-auth-httplib2 = "==0.1.1" +google-auth-oauthlib = "==1.1.0" +google-api-python-client-stubs = "==1.18.0" +airbyte-cdk = {extras = ["file-based"], version = "^0"} + + +[tool.poetry.scripts] +source-google-drive = "source_google_drive.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.12.0" +pytest = "^7.4" diff --git a/airbyte-integrations/connectors/source-google-drive/requirements.txt b/airbyte-integrations/connectors/source-google-drive/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/source-google-drive/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-google-drive/setup.py b/airbyte-integrations/connectors/source-google-drive/setup.py deleted file mode 100644 index 20f6da8ae9096..0000000000000 --- a/airbyte-integrations/connectors/source-google-drive/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[file-based]>=0.61.0", - "google-api-python-client==2.104.0", - "google-auth-httplib2==0.1.1", - "google-auth-oauthlib==1.1.0", - "google-api-python-client-stubs==1.18.0", -] - -TEST_REQUIREMENTS = [ - "pytest-mock~=3.6.1", - "pytest~=6.1", -] - -setup( - name="source_google_drive", - description="Source implementation for Google Drive.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, - entry_points={ - "console_scripts": [ - "source-google-drive=source_google_drive.run:run", - ], - }, -) diff --git a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml index aaf7caeb5c8ed..d73c05c4f894f 100755 --- a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml @@ -28,19 +28,6 @@ acceptance_tests: path: "integration_tests/expected_records.jsonl" exact_order: no timeout_seconds: 3600 - empty_streams: - - name: search_analytics_page_report - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_site_report_by_page - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_site_report_by_site - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_page_report - bypass_reason: "Fast changing data" - - name: search_analytics_site_report_by_page - bypass_reason: "Fast changing data" - - name: search_analytics_site_report_by_site - bypass_reason: "Fast changing data" full_refresh: tests: - config_path: "secrets/config.json" @@ -53,6 +40,3 @@ acceptance_tests: timeout_seconds: 3600 future_state: future_state_path: "integration_tests/abnormal_state.json" - # Incremental read with current config produces multiple empty state messages before emitting first record. - # This leads to identical consecutive sync results which fail the test - skip_comprehensive_incremental_tests: true diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index 457a58444b62a..03d98bca329ad 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -1,16 +1,29 @@ -{"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709211825229} -{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799185696} -{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799186044} -{"stream": "search_analytics_by_date", "data": {"clicks": 160, "impressions": 6097, "ctr": 0.026242414302115796, "position": 27.335410857798916, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22"}, "emitted_at": 1709284338937} -{"stream": "search_analytics_by_date", "data": {"clicks": 227, "impressions": 7309, "ctr": 0.031057600218908195, "position": 25.308523737857435, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23"}, "emitted_at": 1709284338938} -{"stream": "search_analytics_by_country", "data": {"clicks": 37, "impressions": 1246, "ctr": 0.02969502407704655, "position": 31.96548956661316, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22", "country": "usa"}, "emitted_at": 1709558064452} -{"stream": "search_analytics_by_country", "data": {"clicks": 31, "impressions": 1282, "ctr": 0.02418096723868955, "position": 30.254290171606865, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "country": "usa"}, "emitted_at": 1709558064452} -{"stream": "search_analytics_by_device", "data": {"clicks": 203, "impressions": 6206, "ctr": 0.03271028037383177, "position": 23.797937479858202, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "DESKTOP"}, "emitted_at": 1709558104602} -{"stream": "search_analytics_by_device", "data": {"clicks": 21, "impressions": 1084, "ctr": 0.01937269372693727, "position": 34.21678966789668, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "MOBILE"}, "emitted_at": 1709558104603} -{"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 197, "ctr": 0.04060913705583756, "position": 8.802030456852792, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709558151837} -{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte authentication"}, "emitted_at": 1709558202703} -{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 11, "ctr": 0.18181818181818182, "position": 2.090909090909091, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte cloud"}, "emitted_at": 1709558202703} -{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 9, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "aut", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/cannot-build-docker-images-for-python-destination-connector/1454", "query": "fatal error: ffi.h: no such file or directory"}, "emitted_at": 1709558247944} -{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bel", "device": "DESKTOP", "page": "https://cloud.airbyte.io/login", "query": "airbyte login"}, "emitted_at": 1709558247944} -{"stream": "custom_dimensions", "data": {"clicks": 29, "impressions": 521, "ctr": 0.05566218809980806, "position": 11.186180422264876, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "fra", "device": "DESKTOP"}, "emitted_at": 1709559198005} -{"stream": "custom_dimensions", "data": {"clicks": 27, "impressions": 421, "ctr": 0.06413301662707839, "position": 14.931116389548693, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bra", "device": "DESKTOP"}, "emitted_at": 1709559198006} +{"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709913944973} +{"stream": "sitemaps", "data": {"path": "https://discuss.airbyte.io/sitemap.xml", "lastSubmitted": "2024-02-10T17:31:13.470Z", "isPending": false, "isSitemapsIndex": true, "lastDownloaded": "2024-03-08T04:51:33.425Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "1778", "indexed": "0"}]}, "emitted_at": 1709913945327} +{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2024-03-08T02:25:38.869Z", "warnings": "6", "errors": "0", "contents": [{"type": "web", "submitted": "30333", "indexed": "0"}]}, "emitted_at": 1709913945328} +{"stream": "search_analytics_by_date", "data": {"clicks": 120, "impressions": 5267, "ctr": 0.022783368141256883, "position": 35.45490791722043, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-01"}, "emitted_at": 1709913946169} +{"stream": "search_analytics_by_date", "data": {"clicks": 439, "impressions": 10076, "ctr": 0.043568876538308855, "position": 24.655617308455735, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02"}, "emitted_at": 1709913946170} +{"stream": "search_analytics_by_country", "data": {"clicks": 98, "impressions": 2544, "ctr": 0.03852201257861635, "position": 25.294025157232703, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa"}, "emitted_at": 1709913947363} +{"stream": "search_analytics_by_country", "data": {"clicks": 84, "impressions": 2389, "ctr": 0.03516115529510255, "position": 28.137295939723735, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa"}, "emitted_at": 1709913947364} +{"stream": "search_analytics_by_device", "data": {"clicks": 453, "impressions": 10313, "ctr": 0.043925143023368564, "position": 22.476873848540677, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "device": "DESKTOP"}, "emitted_at": 1709913947994} +{"stream": "search_analytics_by_device", "data": {"clicks": 415, "impressions": 9270, "ctr": 0.0447680690399137, "position": 23.817044228694716, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "device": "DESKTOP"}, "emitted_at": 1709913947996} +{"stream": "search_analytics_by_page", "data": {"clicks": 14, "impressions": 178, "ctr": 0.07865168539325842, "position": 7.162921348314606, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "page": "https://discuss.airbyte.io/t/nil-pointer-error-when-deploying-helm-chart/601"}, "emitted_at": 1709913949344} +{"stream": "search_analytics_by_page", "data": {"clicks": 14, "impressions": 59, "ctr": 0.23728813559322035, "position": 7.5423728813559325, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170"}, "emitted_at": 1709913949345} +{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 6, "ctr": 0.8333333333333334, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "query": "internal server error: cannot invoke \"io.airbyte.api.model.generated.airbytecatalog.getstreams()\" because \"discovered\" is null"}, "emitted_at": 1709913950680} +{"stream": "search_analytics_by_query", "data": {"clicks": 3, "impressions": 4, "ctr": 0.75, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "query": "the selection criterion does not match any nodes"}, "emitted_at": 1709913950680} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 3, "ctr": 0.6666666666666666, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "gbr", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170", "query": "the selection criterion does not match any nodes"}, "emitted_at": 1709913953146} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/integrating-keycloak-iam-with-airbyte/2826", "query": "airbyte keycloak"}, "emitted_at": 1709913953146} +{"stream": "custom_dimensions", "data": {"clicks": 97, "impressions": 2392, "ctr": 0.040551839464882944, "position": 24.149247491638796, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913970183} +{"stream": "custom_dimensions", "data": {"clicks": 81, "impressions": 2220, "ctr": 0.03648648648648649, "position": 27.025675675675675, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913970184} +{"stream": "search_analytics_keyword_page_report", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 6, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-08", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file", "page": "https://discuss.airbyte.io/t/how-to-set-workspace-folder-job-id-in-entrypoint-arguments-for-custom-dbt-transformation/2805"}, "emitted_at": 1709913956708} +{"stream": "search_analytics_keyword_page_report", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 4, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-09", "country": "usa", "device": "DESKTOP", "query": "could not find a version that satisfies the requirement comm>=0.1.3", "page": "https://discuss.airbyte.io/t/error-could-not-find-a-version-that-satisfies-the-requirement-airbyte-cdk-0-1-56/1397"}, "emitted_at": 1709913956709} +{"stream": "search_analytics_page_report", "data": {"clicks": 2, "impressions": 4, "ctr": 0.5, "position": 2.5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "gbr", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170"}, "emitted_at": 1709913968085} +{"stream": "search_analytics_page_report", "data": {"clicks": 2, "impressions": 3, "ctr": 0.6666666666666666, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "deu", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/mixpanel-connector-issue-follow-up-on-previous-case/2814"}, "emitted_at": 1709913968086} +{"stream": "search_analytics_keyword_site_report_by_page", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 6, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-08", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file"}, "emitted_at": 1709913961303} +{"stream": "search_analytics_keyword_site_report_by_page", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 4, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-09", "country": "usa", "device": "DESKTOP", "query": "could not find a version that satisfies the requirement comm>=0.1.3"}, "emitted_at": 1709913961305} +{"stream": "search_analytics_keyword_site_report_by_site", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-10", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file"}, "emitted_at": 1709913966177} +{"stream": "search_analytics_keyword_site_report_by_site", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-11", "country": "usa", "device": "DESKTOP", "query": "dbt_project.yml not found"}, "emitted_at": 1709913966179} +{"stream": "search_analytics_site_report_by_page", "data": {"clicks": 105, "impressions": 2905, "ctr": 0.03614457831325301, "position": 21.6447504302926, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913968684} +{"stream": "search_analytics_site_report_by_page", "data": {"clicks": 87, "impressions": 2598, "ctr": 0.03348729792147806, "position": 24.50269438029253, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913968685} +{"stream": "search_analytics_site_report_by_site", "data": {"clicks": 97, "impressions": 2392, "ctr": 0.040551839464882944, "position": 24.149247491638796, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913969703} +{"stream": "search_analytics_site_report_by_site", "data": {"clicks": 81, "impressions": 2220, "ctr": 0.03648648648648649, "position": 27.025675675675675, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913969704} diff --git a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml index d5393b3291630..a1a2137826c28 100644 --- a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: eb4c9e00-db83-4d63-a386-39cfa91012a8 - dockerImageTag: 1.3.7 + dockerImageTag: 1.4.0 dockerRepository: airbyte/source-google-search-console documentationUrl: https://docs.airbyte.com/integrations/sources/google-search-console githubIssueLabel: source-google-search-console icon: googlesearchconsole.svg license: Elv2 + maxSecondsBetweenMessages: 86400 name: Google Search Console remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-google-search-console/poetry.lock b/airbyte-integrations/connectors/source-google-search-console/poetry.lock index 88c8ca379b0f9..e47e78f8c7bdd 100644 --- a/airbyte-integrations/connectors/source-google-search-console/poetry.lock +++ b/airbyte-integrations/connectors/source-google-search-console/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.52.4" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.52.4.tar.gz", hash = "sha256:d2d5d2c3a988259ed3e270b4d77ea7d6c0ca1a9f57aec8ae54ff64b99ad9b2e8"}, - {file = "airbyte_cdk-0.52.4-py3-none-any.whl", hash = "sha256:94219a67d125e80924a81bb809be90b045359159904c4905c38f68f69c8fd723"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx] (==0.10.19)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx] (==0.10.19)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -312,13 +313,13 @@ files = [ [[package]] name = "google-api-core" -version = "2.17.0" +version = "2.17.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.0.tar.gz", hash = "sha256:de7ef0450faec7c75e0aea313f29ac870fdc44cfaec9d6499a9a17305980ef66"}, - {file = "google_api_core-2.17.0-py3-none-any.whl", hash = "sha256:08ed79ed8e93e329de5e3e7452746b734e6bf8438d8d64dd3319d21d3164890c"}, + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, ] [package.dependencies] @@ -390,13 +391,13 @@ httplib2 = ">=0.19.0" [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -575,113 +576,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -715,22 +651,22 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.25.2" +version = "4.25.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -823,18 +759,33 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -933,18 +884,29 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -970,7 +932,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1028,13 +989,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1046,15 +1007,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1091,19 +1052,19 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1129,24 +1090,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1176,13 +1126,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1287,4 +1237,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "5b3037c68d5c9a557b84ace020c5f76a13cd928610dfb0fbc2434d344695180b" +content-hash = "d68dc19392fa6e162b5c452e93bc7d92de3b500ad9e559f312c9b98b9c5c13d6" diff --git a/airbyte-integrations/connectors/source-google-search-console/pyproject.toml b/airbyte-integrations/connectors/source-google-search-console/pyproject.toml index d4c8989d1930c..f837584d80932 100644 --- a/airbyte-integrations/connectors/source-google-search-console/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-search-console/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.3.7" +version = "1.4.0" name = "source-google-search-console" description = "Source implementation for Google Search Console." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_google_search_console" [tool.poetry.dependencies] python = "^3.9,<3.12" google-api-python-client = "==2.105.0" -airbyte-cdk = "==0.52.4" +airbyte-cdk = "^0" google-auth = "==2.23.3" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-google-sheets/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-sheets/acceptance-test-config.yml index d0e00aadd2800..3b54ea572a889 100644 --- a/airbyte-integrations/connectors/source-google-sheets/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-sheets/acceptance-test-config.yml @@ -6,6 +6,7 @@ acceptance_tests: - config_path: secrets/service_config.json expect_records: path: integration_tests/expected_records.txt + validate_state_messages: False file_types: skip_test: true bypass_reason: "The source only supports Google Sheets" diff --git a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml index 8fe33f1026ce9..3ed0234b4c331 100644 --- a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: file connectorType: source definitionId: 71607ba1-c0ac-4799-8049-7f4b90dd50f7 - dockerImageTag: 0.3.17 + dockerImageTag: 0.5.1 dockerRepository: airbyte/source-google-sheets documentationUrl: https://docs.airbyte.com/integrations/sources/google-sheets githubIssueLabel: source-google-sheets icon: google-sheets.svg license: Elv2 + maxSecondsBetweenMessages: 60 name: Google Sheets remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-google-sheets/poetry.lock b/airbyte-integrations/connectors/source-google-sheets/poetry.lock index 099dc9c3eed6a..c61c62b407f22 100644 --- a/airbyte-integrations/connectors/source-google-sheets/poetry.lock +++ b/airbyte-integrations/connectors/source-google-sheets/poetry.lock @@ -2,49 +2,49 @@ [[package]] name = "airbyte-cdk" -version = "0.51.8" +version = "0.77.2" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.51.8.tar.gz", hash = "sha256:0f327408ea5d9e913dcd8601ba937489270366f23a2323b13a27bfd49360b371"}, - {file = "airbyte_cdk-0.51.8-py3-none-any.whl", hash = "sha256:ac841fbf20fcadd7b5d7ff4f0872dd70e56c951d63ab8ad02175f756ab0fb541"}, + {file = "airbyte_cdk-0.77.2-py3-none-any.whl", hash = "sha256:6dffbe0c4b3454a5cdd20525b4f1e9cfef2e80c005b6b30473fc5bf6f75af64e"}, + {file = "airbyte_cdk-0.77.2.tar.gz", hash = "sha256:84aeb27862a18e135c7bc3a5dfc363037665d428e7495e8824673f853adcca70"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.0" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" -pydantic = ">=1.9.2,<2.0.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.0" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.0-py3-none-any.whl", hash = "sha256:e6a31fcd237504198a678d02c0040a8798f281c39203da61a5abce67842c5360"}, - {file = "airbyte_protocol_models-0.4.0.tar.gz", hash = "sha256:518736015c29ac60b6b8964a1b0d9b52e40020bcbd89e2545cc781f0b37d0f2b"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -312,18 +312,19 @@ files = [ [[package]] name = "google-api-core" -version = "2.16.2" +version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.16.2.tar.gz", hash = "sha256:032d37b45d1d6bdaf68fb11ff621e2593263a239fa9246e2e94325f9c47876d2"}, - {file = "google_api_core-2.16.2-py3-none-any.whl", hash = "sha256:449ca0e3f14c179b4165b664256066c7861610f70b6ffe54bb01a04e9b466929"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -352,13 +353,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.27.0" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, - {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -390,13 +391,13 @@ httplib2 = ">=0.19.0" [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -474,13 +475,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -575,113 +576,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -713,24 +649,41 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] name = "protobuf" -version = "4.25.2" +version = "4.25.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -746,75 +699,76 @@ files = [ [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pydantic" -version = "1.9.2" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" files = [ - {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, - {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, - {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, - {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, - {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, - {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, - {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, - {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, - {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, - {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, - {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -822,18 +776,33 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -901,35 +870,46 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -955,6 +935,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1012,13 +993,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1030,34 +1011,32 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.0" description = "Mock out responses from the requests package" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "rsa" @@ -1075,19 +1054,19 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1113,24 +1092,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2023.4" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1171,13 +1139,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1282,4 +1250,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "07b6fe5e724aeac85999fbabc5f09f42f26e4037ec9abd1849dcd56883263b5a" +content-hash = "e36976dd2f219f11bfa33216fb46151880a01a9e5032c2f1880ba9bee03f8ce2" diff --git a/airbyte-integrations/connectors/source-google-sheets/pyproject.toml b/airbyte-integrations/connectors/source-google-sheets/pyproject.toml index 4629c02cf92e9..e212c5eada090 100644 --- a/airbyte-integrations/connectors/source-google-sheets/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-sheets/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.17" +version = "0.5.1" name = "source-google-sheets" description = "Source implementation for Google Sheets." authors = [ "Airbyte ",] @@ -12,18 +12,15 @@ readme = "README.md" documentation = "https://docs.airbyte.com/integrations/sources/google-sheets" homepage = "https://airbyte.com" repository = "https://github.com/airbytehq/airbyte" -packages = [ { include = "source_google_sheets"}] +[[tool.poetry.packages]] +include = "source_google_sheets" [tool.poetry.dependencies] python = "^3.9" -requests = "==2.31.0" -backoff = "==2.2.1" +airbyte-cdk = "^0" google-auth-httplib2 = "==0.2.0" Unidecode = "==1.3.8" -pydantic = "==1.9.2" -airbyte-cdk = "==0.51.8" google-api-python-client = "==2.114.0" -PyYAML = "==6.0.1" [tool.poetry.scripts] source-google-sheets = "source_google_sheets.run:run" diff --git a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/client.py b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/client.py index 1a521ae2467fe..6dff3db5a41dc 100644 --- a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/client.py +++ b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/client.py @@ -21,7 +21,7 @@ class Backoff: @classmethod def increase_row_batch_size(cls, details): if details["exception"].status_code == status_codes.TOO_MANY_REQUESTS and cls.row_batch_size < 1000: - cls.row_batch_size = cls.row_batch_size + 10 + cls.row_batch_size = cls.row_batch_size + 100 logger.info(f"Increasing number of records fetching due to rate limits. Current value: {cls.row_batch_size}") @staticmethod diff --git a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py index 1c98ffa6a5634..6926ac43bd94b 100644 --- a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py +++ b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py @@ -149,6 +149,7 @@ def _read( catalog: ConfiguredAirbyteCatalog, ) -> Generator[AirbyteMessage, None, None]: client = GoogleSheetsClient(self.get_credentials(config)) + client.Backoff.row_batch_size = config.get("batch_size", 200) sheet_to_column_name = Helpers.parse_sheet_and_column_names_from_catalog(catalog) stream_name_to_stream = {stream.stream.name: stream for stream in catalog.streams} @@ -164,7 +165,7 @@ def _read( logger.info(f"Row counts: {sheet_row_counts}") for sheet in sheet_to_column_index_to_name.keys(): logger.info(f"Syncing sheet {sheet}") - stream = stream_name_to_stream.get(sheet) + stream = stream_name_to_stream.get(sheet).stream yield as_airbyte_message(stream, AirbyteStreamStatus.STARTED) # We revalidate the sheet here to avoid errors in case the sheet was changed after the sync started is_valid, reason = Helpers.check_sheet_is_valid(client, spreadsheet_id, sheet) diff --git a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/spec.yaml b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/spec.yaml index 5a2541b0d1a5b..af52bc87a1260 100644 --- a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/spec.yaml +++ b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/spec.yaml @@ -8,6 +8,21 @@ connectionSpecification: - credentials additionalProperties: true properties: + batch_size: + type: integer + title: Row Batch Size + description: >- + Default value is 200. + An integer representing row batch size for each sent request to Google Sheets API. + Row batch size means how many rows are processed from the google sheet, for example default value 200 + would process rows 1-201, then 201-401 and so on. + Based on Google Sheets API limits documentation, + it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, + otherwise the request returns a timeout error. In regards to this information, consider network speed and + number of columns of the google sheet when deciding a batch_size value. + Default value should cover most of the cases, but if a google sheet has over 100,000 records or more, + consider increasing batch_size value. + default: 200 spreadsheet_id: type: string title: Spreadsheet Link diff --git a/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_client.py b/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_client.py index f4c3ed88dfaf3..eb588ef1cd3c3 100644 --- a/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_client.py +++ b/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_client.py @@ -24,8 +24,8 @@ def test_backoff_increase_row_batch_size(): e = requests.HTTPError("error") e.status_code = 429 client.Backoff.increase_row_batch_size({"exception": e}) - assert client.Backoff.row_batch_size == 210 - assert client._create_range("spreadsheet_id", 0) == "spreadsheet_id!0:210" + assert client.Backoff.row_batch_size == 300 + assert client._create_range("spreadsheet_id", 0) == "spreadsheet_id!0:300" client.Backoff.row_batch_size = 1000 client.Backoff.increase_row_batch_size({"exception": e}) assert client.Backoff.row_batch_size == 1000 @@ -57,7 +57,7 @@ def test_client_get_values_on_backoff(caplog): e = requests.HTTPError("error") e.status_code = 429 client_google_sheets.Backoff.increase_row_batch_size({"exception": e}) - assert client_google_sheets.Backoff.row_batch_size == 220 + assert client_google_sheets.Backoff.row_batch_size == 310 client_google_sheets.get_values( sheet="sheet", row_cursor=0, @@ -65,4 +65,4 @@ def test_client_get_values_on_backoff(caplog): majorDimension="ROWS", ) - assert "Fetching range sheet!0:220" in caplog.text + assert "Fetching range sheet!0:310" in caplog.text diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/.dockerignore b/airbyte-integrations/connectors/source-google-workspace-admin-reports/.dockerignore deleted file mode 100644 index 59ee1b829e955..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_google_workspace_admin_reports -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/.gitignore b/airbyte-integrations/connectors/source-google-workspace-admin-reports/.gitignore deleted file mode 100644 index 29fffc6a50cc9..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/.gitignore +++ /dev/null @@ -1 +0,0 @@ -NEW_SOURCE_CHECKLIST.md diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/Dockerfile b/airbyte-integrations/connectors/source-google-workspace-admin-reports/Dockerfile deleted file mode 100644 index 6a4be680b7761..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_google_workspace_admin_reports ./source_google_workspace_admin_reports - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.8 -LABEL io.airbyte.name=airbyte/source-google-workspace-admin-reports diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/README.md b/airbyte-integrations/connectors/source-google-workspace-admin-reports/README.md deleted file mode 100644 index baf8c2eeffabe..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Wikipedia Pageviews Source - -This is the repository for the Wikipedia Pageviews configuration based source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/wikipedia-pageviews). - -## Local development - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/wikipedia-pageviews) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_wikipedia_pageviews/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source wikipedia-pageviews test creds` -and place them into `secrets/config.json`. - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name source-wikipedia-pageviews build -``` - -An image will be built with the tag `airbyte/source-wikipedia-pageviews:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-wikipedia-pageviews:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-wikipedia-pageviews:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-wikipedia-pageviews:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-wikipedia-pageviews:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-wikipedia-pageviews:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-google-workspace-admin-reports test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-google-workspace-admin-reports test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/google-workspace-admin-reports.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-workspace-admin-reports/acceptance-test-config.yml deleted file mode 100755 index 78e2d1fe2aa67..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/acceptance-test-config.yml +++ /dev/null @@ -1,26 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-google-workspace-admin-reports:dev -tests: - spec: - - spec_path: "source_google_workspace_admin_reports/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: ["admin"] - # We have active test account. New records in reports appear frequently. - # Therefore, second activity differs from first, and it brakes test. - # full_refresh: - # - config_path: "secrets/config.json" - # configured_catalog_path: "integration_tests/configured_catalog.json" - incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/abnormal_state.json deleted file mode 100755 index 10417016c164c..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/abnormal_state.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "drive": { - "time": "2121-04-04T22:01:22.313Z" - }, - "oauth_tokens": { - "time": "2121-04-05T03:06:30.849Z" - }, - "admin": { - "time": "2121-04-05T03:06:30.849Z" - }, - "logins": { - "time": "2121-04-05T03:06:30.849Z" - }, - "meet": { - "time": "2121-04-05T03:06:30.849Z" - }, - "mobile": { - "time": "2121-04-05T03:06:30.849Z" - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/acceptance.py deleted file mode 100755 index 82823254d2666..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/acceptance.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - yield diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/configured_catalog.json deleted file mode 100755 index 2427cc906dbb6..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/configured_catalog.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "admin", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "drive", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "logins", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "meet", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "mobile", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "oauth_tokens", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/invalid_config.json deleted file mode 100755 index b774ecc0558b9..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/integration_tests/invalid_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "credentials_json": "{\n \"type\": \"service_account\"}\n", - "email": "test_email", - "lookback": 0 -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/main.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/main.py deleted file mode 100644 index 1e88c29a963d2..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_google_workspace_admin_reports.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml b/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml index e8d6d40b7ad68..9735413e3c63d 100644 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml @@ -16,9 +16,9 @@ data: registries: cloud: dockerImageTag: 0.1.4 - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/google-workspace-admin-reports tags: @@ -27,5 +27,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/requirements.txt b/airbyte-integrations/connectors/source-google-workspace-admin-reports/requirements.txt deleted file mode 100644 index 7b9114ed5867e..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/sample_files/configured_catalog.json deleted file mode 100644 index f1a5b1d583b75..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/sample_files/configured_catalog.json +++ /dev/null @@ -1,1008 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "admin", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqueQualifier", - "applicationName", - "customerId" - ] - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - }, - "key": { - "type": "string" - } - }, - "required": ["callerType", "email", "profileId", "key"] - }, - "ownerDomain": { - "type": "string" - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - }, - "intValue": { - "type": "string" - }, - "boolValue": { - "type": "string" - } - }, - "required": ["name", "value", "intValue", "boolValue"] - } - ] - } - }, - "required": ["type", "name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "drive", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqQualifier", - "applicationName", - "customerId" - ] - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - }, - "key": { - "type": "string" - } - }, - "required": ["callerType", "email", "profileId", "key"] - }, - "ownerDomain": { - "type": "string" - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - }, - "required": ["name", "boolValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - }, - "required": ["name", "boolValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - } - ] - } - }, - "required": ["type", "name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "logins", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqueQualifier", - "applicationName", - "customerId" - ] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["email", "profileId"] - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": [ - { - "type": "string" - } - ] - } - }, - "required": ["name", "multiValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - }, - "required": ["name", "boolValue"] - } - ] - } - }, - "required": ["type", "name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "meet", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqueQualifier", - "applicationName", - "customerId" - ] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["callerType", "email", "profileId"] - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - }, - "required": ["name", "intValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - } - ] - } - }, - "required": ["type", "name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "mobile", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqueQualifier", - "applicationName", - "customerId" - ] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["callerType", "email", "profileId"] - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - }, - "required": ["name", "intValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - } - ] - } - }, - "required": ["type", "name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "oauth_tokens", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": [ - "time", - "uniqueQualifier", - "applicationName", - "customerId" - ] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["email", "profileId"] - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiMessageValue": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "parameter": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": [ - { - "type": "string" - } - ] - } - }, - "required": ["name", "multiValue"] - } - ] - } - }, - "required": ["parameter"] - }, - { - "type": "object", - "properties": { - "parameter": { - "type": "array", - "items": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "required": ["name", "value"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": [ - { - "type": "string" - } - ] - } - }, - "required": ["name", "multiValue"] - } - ] - } - }, - "required": ["parameter"] - } - ] - } - }, - "required": ["name", "multiMessageValue"] - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": [ - { - "type": "string" - }, - { - "type": "string" - } - ] - } - }, - "required": ["name", "multiValue"] - } - ] - } - }, - "required": ["name", "parameters"] - } - ] - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["time"] - }, - "sync_mode": "incremental", - "cursor_field": ["time"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/setup.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/setup.py deleted file mode 100644 index 9d85298d1b15f..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/setup.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", - "google-api-python-client==2.0.2", - "google-auth-httplib2==0.1.0", - "google-auth-oauthlib==0.4.3", - "backoff==1.10.0", - "pendulum==2.1.2", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-google-workspace-admin-reports=source_google_workspace_admin_reports.run:run", - ], - }, - name="source_google_workspace_admin_reports", - description="Source implementation for Google Workspace Admin Reports.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/__init__.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/__init__.py deleted file mode 100644 index 61310a7b08beb..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .source import SourceGoogleWorkspaceAdminReports - -__all__ = ["SourceGoogleWorkspaceAdminReports"] diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/api.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/api.py deleted file mode 100644 index 1fb9b723c15a5..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/api.py +++ /dev/null @@ -1,193 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -import socket -from abc import ABC, abstractmethod -from datetime import datetime, timedelta -from functools import partial -from typing import Any, Callable, Dict, Iterator, Mapping, Optional, Sequence - -import backoff -import pendulum -import pytz -from airbyte_cdk.entrypoint import logger -from google.oauth2 import service_account -from googleapiclient.discovery import Resource, build -from googleapiclient.errors import HttpError as GoogleApiHttpError - -from .utils import rate_limit_handling - -SCOPES = ["https://www.googleapis.com/auth/admin.reports.audit.readonly", "https://www.googleapis.com/auth/admin.reports.usage.readonly"] - - -class API: - def __init__(self, credentials_json: str, email: str, lookback: Optional[int] = None): - self._creds = None - self._credentials_json = credentials_json - self._admin_email = email - self._resource = None - self.lookback = lookback - - def _load_account_info(self) -> Dict: - account_info = json.loads(self._credentials_json) - return account_info - - def _obtain_creds(self) -> service_account.Credentials: - account_info = self._load_account_info() - creds = service_account.Credentials.from_service_account_info(account_info, scopes=SCOPES) - self._creds = creds.with_subject(self._admin_email) - - def _construct_resource(self) -> Resource: - if not self._creds: - self._obtain_creds() - service = build("admin", "reports_v1", credentials=self._creds) - return service - - def _get_resource(self, name: str): - service = self._construct_resource() - return getattr(service, name) - - @backoff.on_exception(backoff.expo, (GoogleApiHttpError, socket.timeout), max_tries=7, giveup=rate_limit_handling) - def get(self, name: str, params: Dict = None) -> Dict: - if not self._resource: - self._resource = self._get_resource(name) - response = self._resource().list(**params).execute() - return response - - -class StreamAPI(ABC): - results_per_page = 100 - - def __init__(self, api: API, *args, **kwargs): - super().__init__(*args, **kwargs) - self._api = api - self._start_time = None - if self._api.lookback: - base_start_time = datetime.utcnow() - timedelta(self._api.lookback) - self._start_time = base_start_time.replace(tzinfo=pytz.UTC).isoformat() - - @property - @abstractmethod - def name(self): - """Name of the stream""" - - def _api_get(self, resource: str, params: Dict = None): - return self._api.get(resource, params=params) - - @abstractmethod - def list(self, fields: Sequence[str] = None) -> Iterator[dict]: - """Iterate over entities""" - - @abstractmethod - def process_response(self, response: Dict) -> Iterator[dict]: - """Process Google Workspace Admin SDK Reports API response""" - - def read(self, getter: Callable, params: Dict = None) -> Iterator: - """Read using getter""" - params = params or {} - params["maxResults"] = self.results_per_page - while True: - batch = getter(params={**params}) - yield from self.process_response(batch) - - if "nextPageToken" in batch: - params["pageToken"] = batch["nextPageToken"] - else: - break - - -class IncrementalStreamAPI(StreamAPI, ABC): - """Stream that supports state and incremental read""" - - state_pk = "time" - - @property - def state(self) -> Optional[Mapping[str, Any]]: - """Current state, if wasn't set return None""" - if self._state: - return {self.state_pk: self._state.isoformat()} - return None - - @state.setter - def state(self, value): - self._state = pendulum.parse(value[self.state_pk]) - self._start_time = self._state.to_iso8601_string() - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._state = None - - def read(self, getter: Callable, params: Mapping[str, Any] = None) -> Iterator: - """Update cursor(state)""" - params = params or {} - cursor = None - for record in super().read(getter, params): - "Report API return records from newest to oldest" - if not cursor: - cursor = pendulum.parse(record[self.state_pk]) - record[self.state_pk] = pendulum.parse(record[self.state_pk]).isoformat() - yield record - - if cursor: - new_state = max(cursor, self._state) if self._state else cursor - if new_state != self._state: - logger.info(f"Advancing bookmark for {self.name} stream from {self._state} to {new_state}") - self._state = new_state - - -class ActivitiesAPI(IncrementalStreamAPI): - application_name = None - - def get_params(self) -> Dict: - params = {"userKey": "all", "applicationName": self.application_name} - - if self._start_time: - params["startTime"] = self._start_time - - return params - - def process_response(self, response: Dict) -> Iterator[dict]: - activities = response.get("items", []) - for activity in activities: - activity_id = activity.get("id", {}) - if "time" in activity_id: - # place time property in top level - activity["time"] = activity_id["time"] - yield activity - - def list(self, fields: Sequence[str] = None) -> Iterator[dict]: - params = self.get_params() - yield from self.read(partial(self._api_get, resource="activities"), params=params) - - -class AdminAPI(ActivitiesAPI): - name = "Admin" - application_name = "admin" - - -class DriveAPI(ActivitiesAPI): - name = "Drive" - application_name = "drive" - - -class LoginsAPI(ActivitiesAPI): - name = "Logins" - application_name = "login" - - -class MeetAPI(ActivitiesAPI): - name = "Meet" - application_name = "meet" - - -class MobileAPI(ActivitiesAPI): - name = "Mobile" - application_name = "mobile" - - -class OAuthTokensAPI(ActivitiesAPI): - name = "OAuth Tokens" - application_name = "token" diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/client.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/client.py deleted file mode 100644 index 8498f4a615119..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/client.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Any, Mapping, Optional, Tuple - -from airbyte_cdk.sources.deprecated.client import BaseClient - -from .api import API, AdminAPI, DriveAPI, IncrementalStreamAPI, LoginsAPI, MeetAPI, MobileAPI, OAuthTokensAPI - - -class Client(BaseClient): - def __init__(self, credentials_json: str, email: str, lookback: Optional[int] = None): - self._api = API(credentials_json, email, lookback) - self._apis = { - "admin": AdminAPI(self._api), - "drive": DriveAPI(self._api), - "logins": LoginsAPI(self._api), - "meet": MeetAPI(self._api), - "mobile": MobileAPI(self._api), - "oauth_tokens": OAuthTokensAPI(self._api), - } - super().__init__() - - def stream_has_state(self, name: str) -> bool: - """Tell if stream supports incremental sync""" - return isinstance(self._apis[name], IncrementalStreamAPI) - - def get_stream_state(self, name: str) -> Any: - """Get state of stream with corresponding name""" - return self._apis[name].state - - def set_stream_state(self, name: str, state: Any): - """Set state of stream with corresponding name""" - self._apis[name].state = state - - def _enumerate_methods(self) -> Mapping[str, callable]: - return {name: api.list for name, api in self._apis.items()} - - def health_check(self) -> Tuple[bool, str]: - alive = True - error_msg = None - - try: - params = {"userKey": "all", "applicationName": "login"} - self._api.get(name="activities", params=params) - except Exception as error: - alive = False - error_msg = repr(error) - - return alive, error_msg diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/run.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/run.py deleted file mode 100644 index b5ecb1fabd637..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_google_workspace_admin_reports import SourceGoogleWorkspaceAdminReports - - -def run(): - source = SourceGoogleWorkspaceAdminReports() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/admin.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/admin.json deleted file mode 100644 index 06589fde0fdbe..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/admin.json +++ /dev/null @@ -1,121 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - } - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - }, - "key": { - "type": "string" - } - } - }, - "ownerDomain": { - "type": "string" - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/drive.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/drive.json deleted file mode 100644 index 80595ba3b7b0d..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/drive.json +++ /dev/null @@ -1,115 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - } - }, - "etag": { - "type": ["string"] - }, - "actor": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - } - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/logins.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/logins.json deleted file mode 100644 index fcd940b36634e..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/logins.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": ["time", "uniqueQualifier", "applicationName", "customerId"] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["email"] - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/meet.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/meet.json deleted file mode 100644 index d7599231bc2e8..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/meet.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": ["time", "uniqueQualifier", "applicationName", "customerId"] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "profileId": { - "type": ["null", "string"] - } - } - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/mobile.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/mobile.json deleted file mode 100644 index e653311d6f851..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/mobile.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": ["time", "uniqueQualifier", "applicationName", "customerId"] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "callerType": { - "type": "string" - }, - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["callerType", "email", "profileId"] - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/oauth_tokens.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/oauth_tokens.json deleted file mode 100644 index 9acdc97e92eba..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/schemas/oauth_tokens.json +++ /dev/null @@ -1,185 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "kind": { - "type": "string" - }, - "id": { - "type": "object", - "properties": { - "time": { - "type": "string" - }, - "uniqueQualifier": { - "type": "string" - }, - "applicationName": { - "type": "string" - }, - "customerId": { - "type": "string" - } - }, - "required": ["time", "uniqueQualifier", "applicationName", "customerId"] - }, - "etag": { - "type": "string" - }, - "actor": { - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "profileId": { - "type": "string" - } - }, - "required": ["email", "profileId"] - }, - "ipAddress": { - "type": "string" - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiMessageValue": { - "type": "array", - "items": { - "type": "object", - "properties": { - "parameter": { - "type": "array", - "items": { - "type": "object", - "anyOf": [ - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "intValue": { - "type": "string" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "boolValue": { - "type": "boolean" - } - } - }, - { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "multiValue": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - ] - } - } - } - } - } - } - } - ] - } - } - } - } - }, - "time": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/source.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/source.py deleted file mode 100644 index cbe4964a0157e..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/source.py +++ /dev/null @@ -1,12 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.sources.deprecated.base_source import BaseSource - -from .client import Client - - -class SourceGoogleWorkspaceAdminReports(BaseSource): - client_class = Client diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/spec.json b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/spec.json deleted file mode 100644 index 25a5263cedaca..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/spec.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/google-workspace-admin-reports", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Google Directory Spec", - "type": "object", - "required": ["credentials_json", "email"], - "additionalProperties": false, - "properties": { - "credentials_json": { - "type": "string", - "title": "Credentials JSON", - "description": "The contents of the JSON service account key. See the docs for more information on how to generate this key.", - "airbyte_secret": true - }, - "email": { - "type": "string", - "title": "Email", - "description": "The email of the user, who has permissions to access the Google Workspace Admin APIs." - }, - "lookback": { - "type": "integer", - "title": "Lookback Window in Days", - "minimum": 0, - "maximum": 180, - "description": "Sets the range of time shown in the report. The maximum value allowed by the Google API is 180 days." - } - } - } -} diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/utils.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/utils.py deleted file mode 100644 index 4eae3d9c218e0..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/source_google_workspace_admin_reports/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from googleapiclient.errors import HttpError - - -def rate_limit_handling(error): - retried_cases = [ - (503,), - ] - - if error.__class__ == HttpError: - return (error.resp.status,) not in retried_cases - return False diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-google-workspace-admin-reports/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c724..0000000000000 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-greenhouse/metadata.yaml b/airbyte-integrations/connectors/source-greenhouse/metadata.yaml index 60f85374bd96b..4e609c099ef77 100644 --- a/airbyte-integrations/connectors/source-greenhouse/metadata.yaml +++ b/airbyte-integrations/connectors/source-greenhouse/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 59f1e50a-331f-4f09-b3e8-2e8d4d355f44 - dockerImageTag: 0.5.0 + dockerImageTag: 0.5.1 dockerRepository: airbyte/source-greenhouse documentationUrl: https://docs.airbyte.com/integrations/sources/greenhouse githubIssueLabel: source-greenhouse icon: greenhouse.svg license: MIT + maxSecondsBetweenMessages: 10 name: Greenhouse remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-greenhouse/poetry.lock b/airbyte-integrations/connectors/source-greenhouse/poetry.lock index d00402201707a..c3ef42e0e3759 100644 --- a/airbyte-integrations/connectors/source-greenhouse/poetry.lock +++ b/airbyte-integrations/connectors/source-greenhouse/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.70.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte-cdk-0.70.0.tar.gz", hash = "sha256:09849c157946058cac3ff5023cb29f31c00aa899be668254968510414543ec2c"}, + {file = "airbyte_cdk-0.70.0-py3-none-any.whl", hash = "sha256:aac9c605b3de341b303ebf45b60148c3b35732383030cc5aab5cede40316bc00"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -486,13 +486,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -721,13 +721,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -876,19 +876,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -914,13 +914,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1050,4 +1050,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "ad41b784b46cb6a10f61c59bcdea7023d6326bd7d735a29e673ac23cf4ef9e0e" +content-hash = "78dcdf1b3175080151595804ca598d98f57880fb4ac8b267a9a60dc66beec03c" diff --git a/airbyte-integrations/connectors/source-greenhouse/pyproject.toml b/airbyte-integrations/connectors/source-greenhouse/pyproject.toml index 2da2664aef987..ca3a28c44b884 100644 --- a/airbyte-integrations/connectors/source-greenhouse/pyproject.toml +++ b/airbyte-integrations/connectors/source-greenhouse/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.5.0" +version = "0.5.1" name = "source-greenhouse" description = "Source implementation for Greenhouse." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_greenhouse" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.63.2" +airbyte-cdk = "^0" dataclasses-jsonschema = "==2.15.1" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/components.py b/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/components.py index fe355f0984bb3..f68b28703faa6 100644 --- a/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/components.py +++ b/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/components.py @@ -25,7 +25,7 @@ def __post_init__(self, parameters: Mapping[str, Any]): self._state = {} def stream_slices(self) -> Iterable[StreamSlice]: - yield {self.request_cursor_field: self._state.get(self.cursor_field, self.START_DATETIME)} + yield StreamSlice(partition={}, cursor_slice={self.request_cursor_field: self._state.get(self.cursor_field, self.START_DATETIME)}) def _max_dt_str(self, *args: str) -> Optional[str]: new_state_candidates = list(map(lambda x: datetime.datetime.strptime(x, self.DATETIME_FORMAT), filter(None, args))) @@ -108,12 +108,15 @@ def stream_slices(self) -> Iterable[StreamSlice]: for parent_record in self.parent_stream.read_records( sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=parent_stream_slice, stream_state=None ): - parent_state_value = parent_record.get(self.parent_key) - yield { - self.stream_slice_field: parent_state_value, - self.request_cursor_field: self._state.get(str(parent_state_value), {}).get(self.cursor_field, self.START_DATETIME), + parent_primary_key = parent_record.get(self.parent_key) + + partition = {self.stream_slice_field: parent_primary_key} + cursor_slice = { + self.request_cursor_field: self._state.get(str(parent_primary_key), {}).get(self.cursor_field, self.START_DATETIME) } + yield StreamSlice(partition=partition, cursor_slice=cursor_slice) + def set_initial_state(self, stream_state: StreamState) -> None: if self.stream_slice_field in stream_state: return diff --git a/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/manifest.yaml b/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/manifest.yaml index f3fc8f6b5c77a..8b7f18e3c51f5 100644 --- a/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/manifest.yaml +++ b/airbyte-integrations/connectors/source-greenhouse/source_greenhouse/manifest.yaml @@ -1,4 +1,4 @@ -version: "0.29.0" +version: "0.70.0" definitions: schema_loader: diff --git a/airbyte-integrations/connectors/source-harvest/README.md b/airbyte-integrations/connectors/source-harvest/README.md index a4e109d1f20c3..ed2dcbaaa64f5 100644 --- a/airbyte-integrations/connectors/source-harvest/README.md +++ b/airbyte-integrations/connectors/source-harvest/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-harvest spec poetry run source-harvest check --config secrets/config.json poetry run source-harvest discover --config secrets/config.json -poetry run source-harvest read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-harvest read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-harvest/acceptance-test-config.yml b/airbyte-integrations/connectors/source-harvest/acceptance-test-config.yml index 8333c153e8259..46c1472ab0e5c 100644 --- a/airbyte-integrations/connectors/source-harvest/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-harvest/acceptance-test-config.yml @@ -3,34 +3,30 @@ test_strictness_level: "high" acceptance_tests: spec: tests: - - spec_path: "source_harvest/spec.json" + - spec_path: "source_harvest/spec.yaml" connection: tests: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "secrets/old_config.json" - status: "succeed" - config_path: "secrets/config_oauth.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" basic_read: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" expect_records: path: "integration_tests/expected_records.jsonl" fail_on_extra_columns: false incremental: tests: - - config_path: "secrets/config_with_date_range.json" + - config_path: "secrets/config_oauth.json" configured_catalog_path: "integration_tests/incremental_catalog.json" future_state: future_state_path: "integration_tests/abnormal_state.json" timeout_seconds: 2400 full_refresh: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-harvest/integration_tests/abnormal_state.json index cd404aa473ec7..d76c8db3df054 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/abnormal_state.json @@ -20,20 +20,6 @@ "stream_descriptor": { "name": "invoices" } } }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2217-06-26T21:20:07Z" }, - "stream_descriptor": { "name": "invoice_messages" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2217-06-26T21:20:07Z" }, - "stream_descriptor": { "name": "invoice_payments" } - } - }, { "type": "STREAM", "stream": { @@ -48,13 +34,6 @@ "stream_descriptor": { "name": "estimates" } } }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2217-06-26T21:20:07Z" }, - "stream_descriptor": { "name": "estimate_messages" } - } - }, { "type": "STREAM", "stream": { @@ -125,13 +104,6 @@ "stream_descriptor": { "name": "users" } } }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2217-06-26T21:20:07Z" }, - "stream_descriptor": { "name": "project_assignments" } - } - }, { "type": "STREAM", "stream": { @@ -194,5 +166,81 @@ "stream_state": { "to": "22170626" }, "stream_descriptor": { "name": "time_team" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { "id": 2695071, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-27T18:12:42Z" } + } + ] + }, + "stream_descriptor": { "name": "estimate_messages" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { "id": 3758384, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-05T08:19:35Z" } + }, + { + "partition": { "id": 3758383, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-05T08:19:35Z" } + }, + { + "partition": { "id": 3758382, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-05T08:19:35Z" } + }, + { + "partition": { "id": 3758381, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-05T08:19:35Z" } + }, + { + "partition": { "id": 3758380, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-05T12:52:20Z" } + } + ] + }, + "stream_descriptor": { "name": "project_assignments" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { "id": 28174531, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-25T16:46:28Z" } + } + ] + }, + "stream_descriptor": { "name": "invoice_messages" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { "id": 28174545, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-26T09:07:06Z" } + }, + { + "partition": { "id": 28174531, "parent_slice": {} }, + "cursor": { "updated_at": "2199-05-26T09:06:37Z" } + } + ] + }, + "stream_descriptor": { "name": "invoice_payments" } + } } ] diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-harvest/integration_tests/configured_catalog.json index 44d6c1d90a8b1..1e1bc72784a5d 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/configured_catalog.json @@ -6,7 +6,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -18,7 +19,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -39,7 +41,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -51,7 +54,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -63,7 +67,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -75,7 +80,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -87,7 +93,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -99,7 +106,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -111,7 +119,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -123,7 +132,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -135,7 +145,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -147,7 +158,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -159,7 +171,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -171,7 +184,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -183,7 +197,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -195,7 +210,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -207,7 +223,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -219,7 +236,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -249,7 +267,8 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "cursor_field": ["updated_at"], @@ -307,10 +326,13 @@ "stream": { "name": "uninvoiced", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["to"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["to"], + "destination_sync_mode": "append" }, { "stream": { diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-harvest/integration_tests/expected_records.jsonl index 131e7fe6b2eba..33df610e0e0ff 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/expected_records.jsonl @@ -9,7 +9,6 @@ {"stream": "invoice_messages", "data": {"id": 57176997, "sent_by": "Airbyte Developer", "sent_by_email": "integration-test@airbyte.io", "sent_from": "Airbyte Developer", "sent_from_email": "integration-test@airbyte.io", "send_me_a_copy": false, "thank_you": false, "reminder": false, "send_reminder_on": null, "created_at": "2021-05-25T16:46:28Z", "updated_at": "2021-05-25T16:46:28Z", "attach_pdf": false, "event_type": null, "recipients": [{"name": "Airbyte Developer", "email": "integration-test@airbyte.io"}], "include_link_to_client_invoice": false, "subject": "Invoice #1 from Airbyte", "body": "---------------------------------------------\r\nInvoice Summary\r\n---------------------------------------------\r\nInvoice ID: 1\r\nIssue Date: 05/25/2021\r\nClient: First client\r\nP.O. Number: \r\nAmount: $76.90\r\nDue: 05/25/2021 (upon receipt)\r\n\r\nThank you!\r\n---------------------------------------------", "parent_id": 28174531}, "emitted_at": 1708017738014} {"stream": "invoice_messages", "data": {"id": 57176927, "sent_by": "Airbyte Developer", "sent_by_email": "integration-test@airbyte.io", "sent_from": "Airbyte Developer", "sent_from_email": "integration-test@airbyte.io", "send_me_a_copy": false, "thank_you": false, "reminder": false, "send_reminder_on": null, "created_at": "2021-05-25T16:43:30Z", "updated_at": "2021-05-25T16:43:30Z", "attach_pdf": true, "event_type": null, "recipients": [{"name": "Airbyte Developer", "email": "integration-test@airbyte.io"}], "include_link_to_client_invoice": false, "subject": "Invoice #1 from Airbyte", "body": "---------------------------------------------\r\nInvoice Summary\r\n---------------------------------------------\r\nInvoice ID: 1\r\nIssue Date: 05/25/2021\r\nClient: First client\r\nP.O. Number: \r\nAmount: $76.90\r\nDue: 05/25/2021 (upon receipt)\r\n\r\nThe detailed invoice is attached as a PDF.\r\n\r\nThank you!\r\n---------------------------------------------", "parent_id": 28174531}, "emitted_at": 1708017738015} {"stream": "invoice_payments", "data": {"id": 21857618, "amount": 500.0, "paid_at": "2021-05-26T00:00:00Z", "recorded_by": "Airbyte Developer", "recorded_by_email": "integration-test@airbyte.io", "notes": "", "transaction_id": null, "created_at": "2021-05-26T09:07:06Z", "updated_at": "2021-05-26T09:07:06Z", "paid_date": "2021-05-26", "payment_gateway": {"id": null, "name": null}, "parent_id": 28174545}, "emitted_at": 1690884275279} -{"stream": "invoice_payments", "data": {"id": 21857615, "amount": 76.9, "paid_at": "2021-05-25T00:00:00Z", "recorded_by": "Airbyte Developer", "recorded_by_email": "integration-test@airbyte.io", "notes": "Payed", "transaction_id": null, "created_at": "2021-05-26T09:06:37Z", "updated_at": "2021-05-26T09:06:37Z", "paid_date": "2021-05-25", "payment_gateway": {"id": null, "name": null}, "parent_id": 28174531}, "emitted_at": 1690884276439} {"stream": "invoice_item_categories", "data": {"id": 2732435, "name": "Product", "use_as_service": false, "use_as_expense": true, "created_at": "2021-05-05T08:17:57Z", "updated_at": "2021-05-05T08:17:57Z"}, "emitted_at": 1690884276919} {"stream": "invoice_item_categories", "data": {"id": 2732434, "name": "Service", "use_as_service": true, "use_as_expense": false, "created_at": "2021-05-05T08:17:57Z", "updated_at": "2021-05-05T08:17:57Z"}, "emitted_at": 1690884276920} {"stream": "estimates", "data": {"id": 2695071, "client_key": "de25b9eb3e82c0d5032777559e8ac0cfdfbf82b1", "number": "1", "purchase_order": "", "amount": 0.0, "tax": null, "tax_amount": 0.0, "tax2": null, "tax2_amount": 0.0, "discount": null, "discount_amount": 0.0, "subject": "", "notes": "", "state": "sent", "issue_date": "2021-05-27", "sent_at": "2021-05-27T18:12:42Z", "created_at": "2021-05-27T18:12:30Z", "updated_at": "2021-05-27T18:12:42Z", "accepted_at": null, "declined_at": null, "currency": "USD", "client": {"id": 10748670, "name": "[SAMPLE] Client A"}, "creator": {"id": 3758380, "name": "Airbyte Developer"}, "line_items": []}, "emitted_at": 1690884277393} diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/incremental_catalog.json b/airbyte-integrations/connectors/source-harvest/integration_tests/incremental_catalog.json index 088860b5a25e6..77ca3d48f8552 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/incremental_catalog.json +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/incremental_catalog.json @@ -280,10 +280,13 @@ "stream": { "name": "uninvoiced", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["to"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["to"], + "destination_sync_mode": "append" }, { "stream": { diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-harvest/integration_tests/invalid_config.json index 57b07556f3ff6..2026ab120b78b 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/invalid_config.json @@ -1,5 +1,9 @@ { "api_token": "1111111.aa.wrong-api-token", "account_id": "1111111", - "replication_start_date": "1000-06-26T21:20:07Z" + "replication_start_date": "1000-06-26T21:20:07Z", + "credentials": { + "auth_type": "Token", + "api_token": "invalid_token" + } } diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config.json index a9d85c28758bc..9f8b462a425d1 100644 --- a/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config.json @@ -1,5 +1,8 @@ { - "api_token": "", "account_id": "", - "replication_start_date": "2021-01-01T00:00:00Z" + "start_date": "2021-01-01T00:00:00Z", + "credentials": { + "auth_type": "bearer", + "api_token": "" + } } diff --git a/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config_oauth.json b/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config_oauth.json new file mode 100644 index 0000000000000..4638e73e93217 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/integration_tests/sample_config_oauth.json @@ -0,0 +1,10 @@ +{ + "account_id": "", + "replication_start_date": "2021-01-01T00:00:00Z", + "credentials": { + "auth_type": "Client", + "client_id": "", + "client_secret": "", + "client_refresh_token": "" + } +} diff --git a/airbyte-integrations/connectors/source-harvest/metadata.yaml b/airbyte-integrations/connectors/source-harvest/metadata.yaml index c358af730a2d0..3ae663bf1050f 100644 --- a/airbyte-integrations/connectors/source-harvest/metadata.yaml +++ b/airbyte-integrations/connectors/source-harvest/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: fe2b4084-3386-4d3b-9ad6-308f61a6f1e6 - dockerImageTag: 0.1.24 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-harvest documentationUrl: https://docs.airbyte.com/integrations/sources/harvest githubIssueLabel: source-harvest icon: harvest.svg license: MIT + maxSecondsBetweenMessages: 15 name: Harvest remoteRegistries: pypi: @@ -26,9 +27,32 @@ data: enabled: true oss: enabled: true + releases: + breakingChanges: + 1.0.0: + message: "Several changes have been made to the Harvest connector. This update requires a reset for the following streams to due an update in the format of state: `expenses_clients`, `expenses_categories`, `expenses_projects`, `expenses_team`, `time_clients`, `time_projects`, `time_tasks`, `time_team`, `uninvoiced`, `estimate_messages`, `invoice_payments`, `invoice_messages`, `project_assignments`." + upgradeDeadline: "2024-04-29" + scopedImpact: + - scopeType: stream + impactedScopes: + [ + "expenses_clients", + "expenses_categories", + "expenses_projects", + "expenses_team", + "time_clients", + "time_projects", + "time_tasks", + "time_team", + "uninvoiced", + "estimate_messages", + "invoice_payments", + "invoice_messages", + "project_assignments", + ] releaseStage: generally_available supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-harvest/poetry.lock b/airbyte-integrations/connectors/source-harvest/poetry.lock index 1ab272398503f..3f76d5f9b0e6b 100644 --- a/airbyte-integrations/connectors/source-harvest/poetry.lock +++ b/airbyte-integrations/connectors/source-harvest/poetry.lock @@ -2,39 +2,38 @@ [[package]] name = "airbyte-cdk" -version = "0.62.1" +version = "0.79.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.62.1.tar.gz", hash = "sha256:3c934dd8b045079a9c807f699ca2012eaa5df755606e3f5b8b16247cbbd7e8c6"}, - {file = "airbyte_cdk-0.62.1-py3-none-any.whl", hash = "sha256:792399a602b7f5c3cd4ed2a5fce5910cfe3676b9b9199b9208f2d5236f5f42d3"}, + {file = "airbyte_cdk-0.79.1-py3-none-any.whl", hash = "sha256:36c4b1fe98448b7d116f16c612982af8e22cbff28ea37da918c851d7feb1093c"}, + {file = "airbyte_cdk-0.79.1.tar.gz", hash = "sha256:a49d10b3c87770ab1e7b7ebf9a1e945d49274c18548756f93a841ebd4c195146"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +300,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -366,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -553,47 +566,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -685,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +906,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -920,13 +931,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1042,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "c286c47fbc557061975b5d48e9b98b439952f4a2678139b5584efa12f9735165" +content-hash = "34258a7e220323a05f6aadce404d78c226095a9fd5e0d7fe4db8f0ea9662b490" diff --git a/airbyte-integrations/connectors/source-harvest/pyproject.toml b/airbyte-integrations/connectors/source-harvest/pyproject.toml index 692575a4bd202..8a12c762d0d4c 100644 --- a/airbyte-integrations/connectors/source-harvest/pyproject.toml +++ b/airbyte-integrations/connectors/source-harvest/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.24" +version = "1.0.0" name = "source-harvest" description = "Source implementation for Harvest." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_harvest" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = ">=0.62.1" +airbyte-cdk = "^0" [tool.poetry.scripts] source-harvest = "source_harvest.run:run" @@ -26,3 +26,4 @@ source-harvest = "source_harvest.run:run" requests-mock = "^1.11.0" pytest-mock = "^3.6.1" pytest = "^6.1" +freezegun = "^1.4.0" diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/auth.py b/airbyte-integrations/connectors/source-harvest/source_harvest/auth.py deleted file mode 100644 index bbb07dfba4c57..0000000000000 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/auth.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Mapping - -from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator, TokenAuthenticator - - -class HarvestMixin: - """ - Mixin class for providing additional HTTP header for specifying account ID - https://help.getharvest.com/api-v2/authentication-api/authentication/authentication/ - """ - - def __init__(self, *, account_id: str, account_id_header: str = "Harvest-Account-ID", **kwargs): - super().__init__(**kwargs) - self.account_id = account_id - self.account_id_header = account_id_header - - def get_auth_header(self) -> Mapping[str, Any]: - return {**super().get_auth_header(), self.account_id_header: self.account_id} - - -class HarvestTokenAuthenticator(HarvestMixin, TokenAuthenticator): - """ - Auth class for Personal Access Token - https://help.getharvest.com/api-v2/authentication-api/authentication/authentication/#personal-access-tokens - """ - - -class HarvestOauth2Authenticator(HarvestMixin, Oauth2Authenticator): - """ - Auth class for OAuth2 - https://help.getharvest.com/api-v2/authentication-api/authentication/authentication/#for-server-side-applications - """ diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/availability_strategy.py b/airbyte-integrations/connectors/source-harvest/source_harvest/availability_strategy.py deleted file mode 100644 index c3d1a622c2891..0000000000000 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/availability_strategy.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import logging -from typing import Dict - -import requests -from airbyte_cdk.sources import Source -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.availability_strategy import HttpAvailabilityStrategy -from requests import HTTPError - - -class HarvestAvailabilityStrategy(HttpAvailabilityStrategy): - """ - This class is tested as part of test_source.check_connection - """ - - def reasons_for_unavailable_status_codes( - self, stream: Stream, logger: logging.Logger, source: Source, error: HTTPError - ) -> Dict[int, str]: - reasons_for_codes: Dict[int, str] = { - requests.codes.UNAUTHORIZED: "Please ensure your credentials are valid.", - requests.codes.FORBIDDEN: "This is most likely due to insufficient permissions on the credentials in use.", - requests.codes.NOT_FOUND: "Please ensure that your account ID is properly set. If it is the case and you are still seeing this error, please contact Airbyte support.", - } - return reasons_for_codes diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/config_migrations.py b/airbyte-integrations/connectors/source-harvest/source_harvest/config_migrations.py new file mode 100644 index 0000000000000..3d7d22a2eb10f --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/source_harvest/config_migrations.py @@ -0,0 +1,104 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import logging +from typing import Any, List, Mapping + +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository + +logger = logging.getLogger("airbyte_logger") + + +class MigrateAuthType: + """ + This class stands for migrating the config at runtime. + This migration is backwards compatible with the previous version, as new `auth_type` property will be created and populated. + When falling back to the previous source version connector will not require the `auth_type` field. + """ + + message_repository: MessageRepository = InMemoryMessageRepository() + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + Determines if a configuration requires migration. + + Args: + - config (Mapping[str, Any]): The configuration data to check. + + Returns: + - True: If the configuration requires migration (i.e. "auth_type" does not exist in the credentials being read). + - False: Otherwise. + """ + return "auth_type" not in config["credentials"] + + @classmethod + def set_auth_type(cls, config: Mapping[str, Any], source: Source = None) -> Mapping[str, Any]: + """ + Sets `auth_type` to "Token" if api_token exists in the credentials, sets it to "Client" for when client_id exists. Otherwise does not set `auth_type` as user has not provided any credentials. + + Args: + - config (Mapping[str, Any]): The configuration from which the `auth_type` should be added and set. + - source (Source, optional): The data source. Defaults to None. + + Returns: + - Mapping[str, Any]: The configuration after removing the key. + """ + if "api_token" in config["credentials"]: + config["credentials"]["auth_type"] = "Token" + elif "client_id" in config["credentials"]: + config["credentials"]["auth_type"] = "Client" + return config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Modifies the configuration and then saves it back to the source. + + Args: + - config_path (str): The path where the configuration is stored. + - source (Source): The data source. + - config (Mapping[str, Any]): The current configuration. + + Returns: + - Mapping[str, Any]: The updated configuration. + """ + migrated_config = cls.set_auth_type(config, source) + source.write_config(migrated_config, config_path) + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + """ + Emits the control messages related to configuration migration. + + Args: + - migrated_config (Mapping[str, Any]): The migrated configuration. + """ + cls.message_repository.emit_message(create_connector_config_control_message(migrated_config)) + for message in cls.message_repository._message_queue: + print(message.json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + Orchestrates the configuration migration process. + + It first checks if the `--config` argument is provided, and if so, + determines whether migration is needed, and then performs the migration + if required. + + Args: + - args (List[str]): List of command-line arguments. + - source (Source): The data source. + """ + config_path = AirbyteEntrypoint(source).extract_config(args) + if config_path: + config = source.read_config(config_path) + if cls.should_migrate(config): + cls.emit_control_message(cls.modify_and_save(config_path, source, config)) diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/manifest.yaml b/airbyte-integrations/connectors/source-harvest/source_harvest/manifest.yaml new file mode 100644 index 0000000000000..8f7c9949726ac --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/source_harvest/manifest.yaml @@ -0,0 +1,2270 @@ +version: 0.61.2 + +definitions: + authenticator: + type: SelectiveAuthenticator + authenticator_selection_path: ["credentials", "auth_type"] + authenticators: + Token: + type: BearerAuthenticator + api_token: "{{ config['credentials']['api_token'] }}" + Client: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + grant_type: refresh_token + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_token: "{{ config['credentials']['refresh_token'] }}" + refresh_request_body: {} + token_refresh_endpoint: https://id.getharvest.com/api/v2/oauth2/token + composite_error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + response_filters: + - http_codes: [401] + action: IGNORE + error_message: "Please ensure your credentials are valid." + - http_codes: [403] + action: IGNORE + error_message: "This is most likely due to insufficient permissions on the credentials in use." + - http_codes: [404] + action: IGNORE + error_message: "Please ensure that your account ID is properly set. If it is the case and you are still seeing this error, please contact Airbyte support." +spec: + connection_specification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Harvest Spec" + type: object + required: + - account_id + - replication_start_date + additionalProperties: true + properties: + account_id: + title: "Account ID" + description: "Harvest account ID. Required for all Harvest requests in pair with Personal Access Token" + airbyte_secret: true + type: string + order: 0 + replication_start_date: + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: string + order: 1 + format: date-time + replication_end_date: + title: "End Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: string + airbyte_hidden: true + order: 2 + format: date-time + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: object + order: 3 + oneOf: + - type: object + title: "Authenticate via Harvest (OAuth)" + required: + - client_id + - client_secret + - refresh_token + additionalProperties: true + properties: + auth_type: + type: string + const: "Client" + order: 0 + client_id: + title: "Client ID" + type: string + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: string + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: string + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: object + title: "Authenticate with Personal Access Token" + required: + - api_token + additionalProperties: true + properties: + auth_type: + type: string + const: "Token" + order: 0 + api_token: + title: "Personal Access Token" + description: 'Log into Harvest and then create new personal access token.' + type: string + airbyte_secret: true +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - company +streams: + - type: DeclarativeStream + name: clients + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /clients + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - clients + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: company + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /company + http_method: GET + request_parameters: {} + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + partition_router: [] + - type: DeclarativeStream + name: contacts + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /contacts + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - contacts + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: estimate_item_categories + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /estimate_item_categories + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - estimate_item_categories + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: estimates + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /estimates + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - estimates + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: expense_categories + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /expense_categories + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - expense_categories + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: expenses + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /expenses + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - expenses + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: invoice_item_categories + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoice_item_categories + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoice_item_categories + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: invoices + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoices + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoices + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: projects + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /projects + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - projects + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: roles + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /roles + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - roles + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: task_assignments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /task_assignments + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - task_assignments + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: tasks + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /tasks + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - tasks + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: time_entries + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /time_entries + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - time_entries + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: user_assignments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /user_assignments + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - user_assignments + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - users + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: expenses_categories + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/expenses/categories + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: expenses_clients + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/expenses/clients + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: expenses_projects + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/expenses/projects + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: expenses_team + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/expenses/team + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: project_budget + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/project_budget + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + - type: DeclarativeStream + name: time_clients + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/time/clients + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: time_projects + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/time/projects + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: time_tasks + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/time/tasks + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: time_team + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/time/team + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: uninvoiced + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /reports/uninvoiced + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + transformations: + - type: AddFields + fields: + - path: + - from + value: '"{{ stream_partition.start_time }}"' + - type: AddFields + fields: + - path: + - to + value: '"{{ stream_partition.end_time }}"' + incremental_sync: + type: DatetimeBasedCursor + cursor_field: to + cursor_datetime_formats: + - "%Y%m%d" + datetime_format: "%Y%m%d" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: from + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: to + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P365D + cursor_granularity: P1D + - type: DeclarativeStream + name: estimate_messages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /estimates/{{stream_partition.id}}/messages + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - estimate_messages + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: estimates + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /estimates + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - estimates + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: invoice_messages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoices/{{stream_partition.id}}/messages + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoice_messages + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: invoices + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoices + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoices + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: invoice_payments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoices/{{stream_partition.id}}/payments + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoice_payments + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: invoices + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /invoices + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - invoices + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: project_assignments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users/{{stream_partition.id}}/project_assignments + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - project_assignments + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - users + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + - type: DeclarativeStream + name: billable_rates + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users/{{stream_partition['id']}}/billable_rates + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - billable_rates + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - users + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" + - type: DeclarativeStream + name: cost_rates + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users/{{stream_partition.id}}/cost_rates + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + $ref: "#/definitions/composite_error_handler" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - cost_rates + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.harvestapp.com/v2/ + path: /users + http_method: GET + request_parameters: + per_page: "50" + request_headers: + Harvest-Account-Id: "{{ config['account_id'] }}" + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - users + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + page_size: 1 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['replication_start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: updated_since + inject_into: request_parameter + transformations: + - type: AddFields + fields: + - path: + - parent_id + value: "{{stream_partition.id}}" +metadata: + autoImportSchema: + clients: true + company: true + contacts: true + estimate_item_categories: true + estimates: true + expense_categories: true + expenses: true + invoice_item_categories: true + invoices: true + projects: true + roles: true + task_assignments: true + tasks: true + time_entries: true + user_assignments: true + users: true + expenses_categories: true + expenses_clients: true + expenses_projects: true + expenses_team: true + project_budget: true + time_clients: true + time_projects: true + time_tasks: true + time_team: true + uninvoiced: true + estimate_messages: true + invoice_messages: true + invoice_payments: true + project_assignments: true + billable_rates: true + cost_rates: true diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/run.py b/airbyte-integrations/connectors/source-harvest/source_harvest/run.py index 53406b411f86e..9f4222013e936 100644 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/run.py +++ b/airbyte-integrations/connectors/source-harvest/source_harvest/run.py @@ -8,7 +8,10 @@ from airbyte_cdk.entrypoint import launch from source_harvest import SourceHarvest +from .config_migrations import MigrateAuthType + def run(): source = SourceHarvest() + MigrateAuthType.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/source.py b/airbyte-integrations/connectors/source-harvest/source_harvest/source.py index 3fbc74c04ab4f..8bf4afedbc0cb 100644 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/source.py +++ b/airbyte-integrations/connectors/source-harvest/source_harvest/source.py @@ -1,131 +1,18 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import logging -from typing import Any, List, Mapping, Optional, Tuple -import pendulum -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType -from source_harvest.availability_strategy import HarvestAvailabilityStrategy -from source_harvest.streams import ( - BillableRates, - Clients, - Company, - Contacts, - CostRates, - EstimateItemCategories, - EstimateMessages, - Estimates, - ExpenseCategories, - Expenses, - ExpensesCategories, - ExpensesClients, - ExpensesProjects, - ExpensesTeam, - InvoiceItemCategories, - InvoiceMessages, - InvoicePayments, - Invoices, - ProjectAssignments, - ProjectBudget, - Projects, - Roles, - TaskAssignments, - Tasks, - TimeClients, - TimeEntries, - TimeProjects, - TimeTasks, - TimeTeam, - Uninvoiced, - UserAssignments, - Users, -) +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -from .auth import HarvestOauth2Authenticator, HarvestTokenAuthenticator +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. +WARNING: Do not modify this file. +""" -class SourceHarvest(AbstractSource): - @staticmethod - def get_authenticator(config): - credentials = config.get("credentials", {}) - if credentials and "client_id" in credentials: - if "account_id" not in config: - raise AirbyteTracedException( - "Config validation error: 'account_id' is a required property", - failure_type=FailureType.config_error, - ) - return HarvestOauth2Authenticator( - token_refresh_endpoint="https://id.getharvest.com/api/v2/oauth2/token", - client_id=credentials.get("client_id"), - client_secret=credentials.get("client_secret"), - refresh_token=credentials.get("refresh_token"), - account_id=config["account_id"], - ) - api_token = credentials.get("api_token", config.get("api_token")) - if not api_token: - raise AirbyteTracedException( - "Config validation error: 'api_token' is a required property", - failure_type=FailureType.config_error, - ) - return HarvestTokenAuthenticator(token=api_token, account_id=config["account_id"]) - - def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[str]]: - auth = self.get_authenticator(config) - replication_start_date = pendulum.parse(config["replication_start_date"]) - users_stream = Users(authenticator=auth, replication_start_date=replication_start_date) - return HarvestAvailabilityStrategy().check_availability(users_stream, logger, self) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - """ - :param config: A Mapping of the user input configuration as defined in the connector spec. - """ - auth = self.get_authenticator(config) - replication_start_date = pendulum.parse(config["replication_start_date"]) - from_date = replication_start_date.date() - replication_end_date = config.get("replication_end_date") - replication_end_date = replication_end_date and pendulum.parse(replication_end_date) - to_date = replication_end_date and replication_end_date.date() - date_range = {"from_date": from_date, "to_date": to_date} - - streams = [ - Clients(authenticator=auth, replication_start_date=replication_start_date), - Contacts(authenticator=auth, replication_start_date=replication_start_date), - Company(authenticator=auth), - Invoices(authenticator=auth, replication_start_date=replication_start_date), - InvoiceMessages(authenticator=auth, replication_start_date=replication_start_date), - InvoicePayments(authenticator=auth, replication_start_date=replication_start_date), - InvoiceItemCategories(authenticator=auth, replication_start_date=replication_start_date), - Estimates(authenticator=auth, replication_start_date=replication_start_date), - EstimateMessages(authenticator=auth, replication_start_date=replication_start_date), - EstimateItemCategories(authenticator=auth, replication_start_date=replication_start_date), - Expenses(authenticator=auth, replication_start_date=replication_start_date), - ExpenseCategories(authenticator=auth, replication_start_date=replication_start_date), - Tasks(authenticator=auth, replication_start_date=replication_start_date), - TimeEntries(authenticator=auth, replication_start_date=replication_start_date), - UserAssignments(authenticator=auth, replication_start_date=replication_start_date), - TaskAssignments(authenticator=auth, replication_start_date=replication_start_date), - Projects(authenticator=auth, replication_start_date=replication_start_date), - Roles(authenticator=auth, replication_start_date=replication_start_date), - Users(authenticator=auth, replication_start_date=replication_start_date), - BillableRates(authenticator=auth), - CostRates(authenticator=auth), - ProjectAssignments(authenticator=auth, replication_start_date=replication_start_date), - ExpensesClients(authenticator=auth, **date_range), - ExpensesProjects(authenticator=auth, **date_range), - ExpensesCategories(authenticator=auth, **date_range), - ExpensesTeam(authenticator=auth, **date_range), - Uninvoiced(authenticator=auth, **date_range), - TimeClients(authenticator=auth, **date_range), - TimeProjects(authenticator=auth, **date_range), - TimeTasks(authenticator=auth, **date_range), - TimeTeam(authenticator=auth, **date_range), - ProjectBudget(authenticator=auth, **date_range), - ] - - return streams +# Declarative Source +class SourceHarvest(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json b/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json deleted file mode 100644 index 55b5ba2abef27..0000000000000 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json +++ /dev/null @@ -1,140 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/harvest", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Harvest Spec", - "type": "object", - "required": ["account_id", "replication_start_date"], - "additionalProperties": true, - "properties": { - "account_id": { - "title": "Account ID", - "description": "Harvest account ID. Required for all Harvest requests in pair with Personal Access Token", - "airbyte_secret": true, - "type": "string", - "order": 0 - }, - "replication_start_date": { - "title": "Start Date", - "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-25T00:00:00Z"], - "type": "string", - "order": 1, - "format": "date-time" - }, - "replication_end_date": { - "title": "End Date", - "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-25T00:00:00Z"], - "type": "string", - "airbyte_hidden": true, - "order": 2, - "format": "date-time" - }, - "credentials": { - "title": "Authentication mechanism", - "description": "Choose how to authenticate to Harvest.", - "type": "object", - "order": 3, - "oneOf": [ - { - "type": "object", - "title": "Authenticate via Harvest (OAuth)", - "required": ["client_id", "client_secret", "refresh_token"], - "additionalProperties": true, - "properties": { - "auth_type": { - "type": "string", - "const": "Client", - "order": 0 - }, - "client_id": { - "title": "Client ID", - "type": "string", - "description": "The Client ID of your Harvest developer application." - }, - "client_secret": { - "title": "Client Secret", - "type": "string", - "description": "The Client Secret of your Harvest developer application.", - "airbyte_secret": true - }, - "refresh_token": { - "title": "Refresh Token", - "type": "string", - "description": "Refresh Token to renew the expired Access Token.", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "title": "Authenticate with Personal Access Token", - "required": ["api_token"], - "additionalProperties": true, - "properties": { - "auth_type": { - "type": "string", - "const": "Token", - "order": 0 - }, - "api_token": { - "title": "Personal Access Token", - "description": "Log into Harvest and then create new personal access token.", - "type": "string", - "airbyte_secret": true - } - } - } - ] - } - } - }, - "supportsIncremental": true, - "supported_destination_sync_modes": ["append"], - "advanced_auth": { - "auth_flow_type": "oauth2.0", - "predicate_key": ["credentials", "auth_type"], - "predicate_value": "Client", - "oauth_config_specification": { - "complete_oauth_output_specification": { - "type": "object", - "additionalProperties": true, - "properties": { - "refresh_token": { - "type": "string", - "path_in_connector_config": ["credentials", "refresh_token"] - } - } - }, - "complete_oauth_server_input_specification": { - "type": "object", - "additionalProperties": true, - "properties": { - "client_id": { - "type": "string" - }, - "client_secret": { - "type": "string" - } - } - }, - "complete_oauth_server_output_specification": { - "type": "object", - "additionalProperties": true, - "properties": { - "client_id": { - "type": "string", - "path_in_connector_config": ["credentials", "client_id"] - }, - "client_secret": { - "type": "string", - "path_in_connector_config": ["credentials", "client_secret"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/streams.py b/airbyte-integrations/connectors/source-harvest/source_harvest/streams.py deleted file mode 100644 index a910073440775..0000000000000 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/streams.py +++ /dev/null @@ -1,448 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import Any, Iterable, Mapping, MutableMapping, Optional -from urllib.parse import parse_qsl, urlparse - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http import HttpStream - - -class HarvestStream(HttpStream, ABC): - url_base = "https://api.harvestapp.com/v2/" - per_page = 50 - primary_key = "id" - - @property - def data_field(self) -> str: - """ - :return: Default field name to get data from response - """ - return self.name - - def backoff_time(self, response: requests.Response): - if "Retry-After" in response.headers: - return int(response.headers["Retry-After"]) - else: - self.logger.info("Retry-after header not found. Using default backoff value") - return super().backoff_time(response) - - def path(self, **kwargs) -> str: - return self.name - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - stream_data = response.json() - if stream_data.get("next_page"): - return { - "page": stream_data["next_page"], - } - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params["per_page"] = self.per_page - if next_page_token: - params.update(**next_page_token) - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """ - :return an iterable containing each record in the response - """ - stream_data = response.json() - - # depending on stream type we may get either: - # * nested records iterable in response object; - # * not nested records iterable; - # * single object to yield. - if self.data_field: - stream_data = response.json().get(self.data_field, []) - - if isinstance(stream_data, list): - yield from stream_data - else: - yield stream_data - - -class IncrementalHarvestStream(HarvestStream, ABC): - cursor_field = "updated_at" - - def __init__(self, replication_start_date: Optional[pendulum.DateTime] = None, **kwargs) -> None: - super().__init__(**kwargs) - self._replication_start_date = replication_start_date - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_benchmark = latest_record[self.cursor_field] - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - - replication_start_date = None - if stream_state.get(self.cursor_field): - replication_start_date = stream_state.get(self.cursor_field) - elif self._replication_start_date: - replication_start_date = self._replication_start_date.format("YYYY-MM-DDTHH:mm:ssZ") - params.update({"updated_since": replication_start_date}) - return params - - -class HarvestSubStream(HarvestStream, ABC): - @property - @abstractmethod - def path_template(self) -> str: - """ - :return: sub stream path template - """ - - @property - @abstractmethod - def parent_stream(self) -> IncrementalHarvestStream: - """ - :return: parent stream class - """ - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - items = self.parent_stream(authenticator=self.authenticator) - for item in items.read_records(sync_mode=SyncMode.full_refresh): - yield {"parent_id": item["id"]} - - def path(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> str: - return self.path_template.format(parent_id=stream_slice["parent_id"]) - - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: - for record in super().parse_response(response, stream_slice=stream_slice, **kwargs): - record["parent_id"] = stream_slice["parent_id"] - yield record - - -class Contacts(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/clients-api/clients/contacts/ - """ - - -class Clients(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/clients-api/clients/clients/ - """ - - -class Company(HarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/company-api/company/company/ - """ - - primary_key = None - data_field = None - - -class Invoices(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/invoices-api/invoices/invoices/ - """ - - -class InvoiceMessages(HarvestSubStream, IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/invoices-api/invoices/invoice-messages/ - """ - - parent_stream = Invoices - path_template = "invoices/{parent_id}/messages" - - -class InvoicePayments(HarvestSubStream, IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/invoices-api/invoices/invoice-payments/ - """ - - parent_stream = Invoices - path_template = "invoices/{parent_id}/payments" - - -class InvoiceItemCategories(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/invoices-api/invoices/invoice-item-categories/ - """ - - -class Estimates(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/estimates-api/estimates/estimates/ - """ - - -class EstimateMessages(HarvestSubStream, IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/estimates-api/estimates/estimate-messages/ - """ - - parent_stream = Estimates - path_template = "estimates/{parent_id}/messages" - - -class EstimateItemCategories(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/estimates-api/estimates/estimate-item-categories/ - """ - - -class Expenses(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/expenses-api/expenses/expenses/ - """ - - -class ExpenseCategories(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/expenses-api/expenses/expense-categories/ - """ - - -class Tasks(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/tasks-api/tasks/tasks/ - """ - - -class TimeEntries(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/timesheets-api/timesheets/time-entries/ - """ - - -class UserAssignments(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/projects-api/projects/user-assignments/ - """ - - -class TaskAssignments(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/projects-api/projects/task-assignments/ - """ - - -class Projects(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/projects-api/projects/projects/ - """ - - -class Roles(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/roles-api/roles/roles/ - """ - - -class Users(IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/users-api/users/users/ - """ - - -class BillableRates(HarvestSubStream): - """ - Docs: https://help.getharvest.com/api-v2/users-api/users/billable-rates/ - """ - - parent_stream = Users - path_template = "users/{parent_id}/billable_rates" - - -class CostRates(HarvestSubStream): - """ - Docs: https://help.getharvest.com/api-v2/users-api/users/cost-rates/ - """ - - parent_stream = Users - path_template = "users/{parent_id}/cost_rates" - - -class ProjectAssignments(HarvestSubStream, IncrementalHarvestStream): - """ - Docs: https://help.getharvest.com/api-v2/users-api/users/project-assignments/ - """ - - parent_stream = Users - path_template = "users/{parent_id}/project_assignments" - - -class ReportsBase(HarvestStream, ABC): - data_field = "results" - date_param_template = "%Y%m%d" - primary_key = None - - @property - @abstractmethod - def report_path(self): - """ - :return: report path suffix - """ - - def __init__(self, from_date: Optional[pendulum.date] = None, to_date: Optional[pendulum.date] = None, **kwargs): - super().__init__(**kwargs) - - current_date = pendulum.now().date() - self._from_date = from_date or current_date.subtract(days=365) - self._to_date = to_date or current_date - # `to` date greater than `from` date causes an exception on Harvest - if self._from_date > current_date: - self._to_date = from_date - - def path(self, **kwargs) -> str: - return f"reports/{self.report_path}" - - -class IncrementalReportsBase(ReportsBase, ABC): - cursor_field = "to" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - parsed_url = urlparse(response.url) - params = dict(parse_qsl(parsed_url.query)) - - records = response.json().get(self.data_field, []) - for record in records: - record.update( - { - "from": params.get("from", self._from_date.strftime(self.date_param_template)), - "to": params.get("to", self._to_date.strftime(self.date_param_template)), - } - ) - yield record - - def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state, **kwargs) - params = {**params, **stream_slice} if stream_slice else params - return params - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_benchmark = latest_record[self.cursor_field] - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[MutableMapping[str, any]]]: - """ - Override default stream_slices CDK method to provide date_slices as page chunks for data fetch. - """ - start_date = self._from_date - end_date = self._to_date - - # determine stream_state, if no stream_state we use start_date - if stream_state: - start_date = pendulum.parse(stream_state.get(self.cursor_field)).date() - - while start_date < end_date: - # Max size of date chunks is 365 days - # Docs: https://help.getharvest.com/api-v2/reports-api/reports/time-reports/ - end_date_slice = end_date if start_date >= end_date.subtract(days=365) else start_date.add(days=365) - date_slice = {"from": start_date.strftime(self.date_param_template), "to": end_date_slice.strftime(self.date_param_template)} - - start_date = end_date_slice - - yield date_slice - - -class ExpensesClients(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#clients-report - """ - - report_path = "expenses/clients" - - -class ExpensesProjects(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#projects-report - """ - - report_path = "expenses/projects" - - -class ExpensesCategories(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#expense-categories-report - """ - - report_path = "expenses/categories" - - -class ExpensesTeam(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#team-report - """ - - report_path = "expenses/team" - - -class Uninvoiced(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/uninvoiced-report/ - """ - - report_path = "uninvoiced" - - -class TimeClients(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#clients-report - """ - - report_path = "time/clients" - - -class TimeProjects(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#projects-report - """ - - report_path = "time/projects" - - -class TimeTasks(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#tasks-report - """ - - report_path = "time/tasks" - - -class TimeTeam(IncrementalReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/time-reports/ - """ - - report_path = "time/team" - - -class ProjectBudget(ReportsBase): - """ - Docs: https://help.getharvest.com/api-v2/reports-api/reports/project-budget-report/#project-budget-report - """ - - report_path = "project_budget" diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/config.py b/airbyte-integrations/connectors/source-harvest/unit_tests/config.py index 8f806344d9b19..1156237c441ea 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/config.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/config.py @@ -10,7 +10,8 @@ def __init__(self) -> None: "account_id": "an account id", "replication_start_date": "2021-01-01T00:00:00Z", "credentials": { - "api_token": "an api key" + "api_token": "an api key", + "auth_type": "Token" } } @@ -19,7 +20,7 @@ def with_account_id(self, account_id: str) -> "ConfigBuilder": return self def with_replication_start_date(self, replication_start_date: datetime) -> "ConfigBuilder": - self._config["start_date"] = replication_start_date.isoformat()[:-13]+"Z" + self._config["replication_start_date"] = replication_start_date.strftime('%Y-%m-%dT%H:%M:%SZ') return self def with_api_token(self, api_token: str) -> "ConfigBuilder": @@ -30,5 +31,9 @@ def with_client_id(self, client_id: str) -> "ConfigBuilder": self._config["credentials"]["client_id"] = client_id return self + def with_auth_type(self, auth_type: str) -> "ConfigBuilder": + self._config["credentials"]["auth_type"] = auth_type + return self + def build(self) -> Dict[str, Any]: return self._config diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/conftest.py b/airbyte-integrations/connectors/source-harvest/unit_tests/conftest.py index 188f75b627d61..259dbdb56c628 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/conftest.py @@ -10,8 +10,7 @@ def config_fixture(requests_mock): url = "https://id.getharvest.com/api/v2/oauth2/token" requests_mock.get(url, json={}) - config = {"account_id": "ID", "replication_start_date": "2021-01-01T21:20:07Z", "credentials": {"api_token": "TOKEN"}} - + config = {"account_id": "ID", "replication_start_date": "2021-01-01T21:20:07Z", "credentials": {"auth_type": "Token", "api_token": "TOKEN"}} return config diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoice_messages.py b/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoice_messages.py index ed1ddd9f1bc32..6e0fa98cc0b78 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoice_messages.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoice_messages.py @@ -19,7 +19,7 @@ from config import ConfigBuilder from source_harvest import SourceHarvest -_A_REPLICATION_START_DATE = "2021-01-01T00:00:00+00:00" +_A_START_DATE = "2021-01-01T00:00:00+00:00" _AN_ACCOUNT_ID = "1209384" _AN_API_KEY = "harvestapikey" _AN_INVOICE_ID = "an-invoice-id" @@ -74,13 +74,19 @@ def _read( class InvoicesTest(TestCase): + + def setUp(self) -> None: + self._datetime_start_date = datetime.fromisoformat(_A_START_DATE) + self._string_formatted_start_date = self._datetime_start_date.strftime('%Y-%m-%dT%H:%M:%SZ') + @HttpMocker() - def test_given_replication_start_date_when_read_then_request_is_created_properly(self, http_mocker: HttpMocker): + def test_given_start_date_when_read_then_request_is_created_properly(self, http_mocker: HttpMocker): http_mocker.get( HttpRequest( url="https://api.harvestapp.com/v2/invoices", query_params={ "per_page": "50", + "updated_since": self._string_formatted_start_date, }, ), _invoices_response().with_record(_an_invoice().with_id(_AN_INVOICE_ID)).build() @@ -90,12 +96,12 @@ def test_given_replication_start_date_when_read_then_request_is_created_properly url=f"https://api.harvestapp.com/v2/invoices/{_AN_INVOICE_ID}/messages", query_params={ "per_page": "50", - "updated_since": _A_REPLICATION_START_DATE, + "updated_since": self._string_formatted_start_date, }, ), _invoices_response().with_record(_a_message()).build() ) - _read(ConfigBuilder().with_account_id(_AN_ACCOUNT_ID).with_api_token(_AN_API_KEY).with_replication_start_date(datetime.fromisoformat(_A_REPLICATION_START_DATE))) + _read(ConfigBuilder().with_account_id(_AN_ACCOUNT_ID).with_api_token(_AN_API_KEY).with_replication_start_date(self._datetime_start_date)) # endpoint is called diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoices.py b/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoices.py index 791f3fd785e5e..ff1c1678fd4cd 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoices.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/integration/test_invoices.py @@ -19,7 +19,7 @@ from config import ConfigBuilder from source_harvest import SourceHarvest -_A_REPLICATION_START_DATE = "2021-01-01T00:00:00+00:00" +_A_START_DATE = "2021-01-01T00:00:00+00:00" _AN_ACCOUNT_ID = "1209384" _AN_API_KEY = "harvestapikey" _STREAM_NAME = "invoices" @@ -57,13 +57,17 @@ def _read( class InvoicesTest(TestCase): @HttpMocker() - def test_given_replication_start_date_when_read_then_request_is_created_properly(self, http_mocker: HttpMocker): + def test_given_start_date_when_read_then_request_is_created_properly(self, http_mocker: HttpMocker): + + datetime_start_date = datetime.fromisoformat(_A_START_DATE) + string_formatted_start_date = datetime_start_date.strftime('%Y-%m-%dT%H:%M:%SZ') + http_mocker.get( HttpRequest( url="https://api.harvestapp.com/v2/invoices", query_params={ "per_page": "50", - "updated_since": _A_REPLICATION_START_DATE, + "updated_since": string_formatted_start_date, }, headers={ "Authorization": f"Bearer {_AN_API_KEY}", @@ -73,6 +77,6 @@ def test_given_replication_start_date_when_read_then_request_is_created_properly _invoices_response().build() ) - _read(ConfigBuilder().with_account_id(_AN_ACCOUNT_ID).with_api_token(_AN_API_KEY).with_replication_start_date(datetime.fromisoformat(_A_REPLICATION_START_DATE))) + _read(ConfigBuilder().with_account_id(_AN_ACCOUNT_ID).with_api_token(_AN_API_KEY).with_replication_start_date(datetime_start_date)) # endpoint is called diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/pagination.py b/airbyte-integrations/connectors/source-harvest/unit_tests/pagination.py new file mode 100644 index 0000000000000..351c3c576a3e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/pagination.py @@ -0,0 +1,11 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + + +class HarvestPaginationStrategy(PaginationStrategy): + @staticmethod + def update(response: Dict[str, Any]) -> None: + response["links"]["next"] = "https://api.harvestapp.com/v2/invoices?page=2&per_page=50" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/request_builder.py b/airbyte-integrations/connectors/source-harvest/unit_tests/request_builder.py new file mode 100644 index 0000000000000..bd38dd0a4701a --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/request_builder.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import base64 +from datetime import datetime +from typing import List, Optional + +from airbyte_cdk.test.mock_http import HttpRequest +from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS + + +class HarvestRequestBuilder: + + @classmethod + def invoices_endpoint(cls, account_id: str) -> "HarvestRequestBuilder": + return cls("invoices", account_id) + + @classmethod + def invoice_messages_endpoint(cls, account_id: str, invoice_id: str) -> "HarvestRequestBuilder": + return cls(f"invoices/{invoice_id}/messages", account_id) + + @classmethod + def expenses_clients_endpoint(cls, account_id: str) -> "HarvestRequestBuilder": + return cls("reports/expenses/clients", account_id) + + def __init__(self, resource: str, account_id: str) -> "HarvestRequestBuilder": + self._resource: str = resource + self._account_id: str = account_id + self._per_page: Optional[int] = None + self._page: Optional[int] = None + self._updated_since: Optional[str] = None + self._from: Optional[str] = None + self._to: Optional[str] = None + self._any_query_params: bool = False + + + def with_any_query_params(self) -> "HarvestRequestBuilder": + self._any_query_params = True + return self + + def with_per_page(self, per_page: int) -> "HarvestRequestBuilder": + self._per_page = per_page + return self + + def with_page(self, page: int) -> "HarvestRequestBuilder": + self._page = page + return self + + def with_updated_since(self, updated_since: str) -> "HarvestRequestBuilder": + self._updated_since = updated_since + return self + + def with_from(self, _from: datetime) -> "HarvestRequestBuilder": + self._from = datetime.strftime(_from, "%Y%m%d") + return self + + def with_to(self, to: datetime) -> "HarvestRequestBuilder": + self._to = datetime.strftime(to, "%Y%m%d") + return self + + def build(self) -> HttpRequest: + query_params = {} + if self._page: + query_params["page"] = self._page + if self._per_page: + query_params["per_page"] = self._per_page + if self._updated_since: + query_params["updated_since"] = self._updated_since + if self._from: + query_params["from"] = self._from + if self._to: + query_params["to"] = self._to + + if self._any_query_params: + if query_params: + raise ValueError(f"Both `any_query_params` and {list(query_params.keys())} were configured. Provide only one of none but not both.") + query_params = ANY_QUERY_PARAMS + + return HttpRequest( + url=f"https://api.harvestapp.com/v2/{self._resource}", + query_params=query_params, + headers={} + ) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/400.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/400.json new file mode 100644 index 0000000000000..4ef4f111d37c0 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/400.json @@ -0,0 +1,4 @@ +{ + "code": 400, + "explanation": "Cannot determine what actual 400 response is so this is a placeholder explanation." +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/401.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/401.json new file mode 100644 index 0000000000000..97870c0f131f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/401.json @@ -0,0 +1,4 @@ +{ + "code": 401, + "explanation": "Cannot determine what actual 401 response is so this is a placeholder explanation." +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/403.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/403.json new file mode 100644 index 0000000000000..ff2e4117e6b8c --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/403.json @@ -0,0 +1,4 @@ +{ + "code": 403, + "explanation": "The object you requested was found but you don’t have authorization to perform your request." +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/404.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/404.json new file mode 100644 index 0000000000000..2d99e509a1285 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/404.json @@ -0,0 +1,4 @@ +{ + "code": 404, + "explanation": "The object you requested can’t be found." +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/expenses_clients.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/expenses_clients.json new file mode 100644 index 0000000000000..5ccf60e2c4c4a --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/expenses_clients.json @@ -0,0 +1,30 @@ +{ + "results": [ + { + "client_id": 5735776, + "client_name": "123 Industries", + "total_amount": 100, + "billable_amount": 100, + "currency": "EUR" + }, + { + "client_id": 5735774, + "client_name": "ABC Corp", + "total_amount": 133.35, + "billable_amount": 133.35, + "currency": "USD" + } + ], + "per_page": 50, + "total_pages": 1, + "total_entries": 2, + "next_page": null, + "previous_page": null, + "page": 1, + "links": { + "first": null, + "next": null, + "previous": null, + "last": null + } +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoice_messages.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoice_messages.json index fb4cedccb1779..9bc7cbd4f71a6 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoice_messages.json +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoice_messages.json @@ -53,16 +53,16 @@ "body": "---------------------------------------------\r\nInvoice Summary\r\n---------------------------------------------\r\nInvoice ID: 1001\r\nIssue Date: 04/01/2017\r\nClient: 123 Industries\r\nP.O. Number: \r\nAmount: €288.90\r\nDue: 04/01/2017 (upon receipt)\r\n\r\nThe detailed invoice is attached as a PDF.\r\n\r\nThank you!\r\n---------------------------------------------" } ], - "per_page": 2000, + "per_page": 50, "total_pages": 1, "total_entries": 2, "next_page": null, "previous_page": null, "page": 1, "links": { - "first": "https://api.harvestapp.com/api/v2/invoices/13150403/messages?page=1&per_page=2000", + "first": null, "next": null, "previous": null, - "last": "https://api.harvestapp.com/v2/invoices/13150403/messages?page=1&per_page=2000" + "last": null } } diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoices.json b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoices.json index 3be9cee64a0c9..da211208d90f4 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoices.json +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/resource/http/response/invoices.json @@ -133,16 +133,16 @@ ] } ], - "per_page": 2000, + "per_page": 50, "total_pages": 1, "total_entries": 2, "next_page": null, "previous_page": null, "page": 1, "links": { - "first": "https://api.harvestapp.com/v2/invoices?page=1&per_page=2000", + "first": null, "next": null, "previous": null, - "last": "https://api.harvestapp.com/v2/invoices?page=1&per_page=2000" + "last": null } } diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config.json b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config.json new file mode 100644 index 0000000000000..f0cbd4f67ac69 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config.json @@ -0,0 +1,5 @@ +{ + "account_id": "", + "start_date": "2021-01-01T00:00:00Z", + "credentials": { "api_token": "" } +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_migrations.py b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_migrations.py new file mode 100644 index 0000000000000..d665f3162f5e3 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_migrations.py @@ -0,0 +1,80 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +import os +from typing import Any, Mapping + +import pytest +from airbyte_cdk.models import OrchestratorType, Type +from airbyte_cdk.sources import Source +from source_harvest.config_migrations import MigrateAuthType +from source_harvest.source import SourceHarvest + +# BASE ARGS +CMD = "check" +TEST_TOKEN_CONFIG_PATH = f"{os.path.dirname(__file__)}/test_config.json" +NEW_TEST_TOKEN_CONFIG_PATH = f"{os.path.dirname(__file__)}/test_new_config.json" +TEST_OAUTH_CONFIG_PATH = f"{os.path.dirname(__file__)}/test_config_oauth.json" +NEW_TEST_OAUTH_CONFIG_PATH = f"{os.path.dirname(__file__)}/test_new_config_oauth.json" +SOURCE: Source = SourceHarvest() + +# HELPERS +def load_config(config_path: str) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +def revert_migration(config_path: str) -> None: + with open(config_path, "r") as test_config: + config = json.load(test_config) + config["credentials"].pop("auth_type") + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + +@pytest.mark.parametrize( + "config_path, expected_auth_type", + [ + (TEST_TOKEN_CONFIG_PATH, "Token"), + (TEST_OAUTH_CONFIG_PATH, "Client"), + ], +) +def test_migrate_config(config_path, expected_auth_type): + + source_input_args = [CMD, "--config", config_path] + + migration_instance = MigrateAuthType + + migration_instance.migrate(source_input_args, SOURCE) + + test_migrated_config = load_config(config_path) + + # Verify migrated property + assert "auth_type" in test_migrated_config["credentials"] + assert test_migrated_config["credentials"]["auth_type"] == expected_auth_type + + # Test CONTROL MESSAGE was emitted) + control_message = migration_instance.message_repository._message_queue[0] + assert control_message.type == Type.CONTROL + assert control_message.control.type == OrchestratorType.CONNECTOR_CONFIG + + revert_migration(config_path) + + +@pytest.mark.parametrize( + "config_path", + [ + NEW_TEST_TOKEN_CONFIG_PATH, + NEW_TEST_OAUTH_CONFIG_PATH, + ], +) +def test_should_not_migrate_new(config_path): + new_config = load_config(config_path) + instance = MigrateAuthType + assert not instance.should_migrate(new_config) + + diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_oauth.json b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_oauth.json new file mode 100644 index 0000000000000..dc68e718ac23a --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_config_oauth.json @@ -0,0 +1,9 @@ +{ + "account_id": "", + "replication_start_date": "2021-01-01T00:00:00Z", + "credentials": { + "client_id": "", + "client_secret": "", + "client_refresh_token": "" + } +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config.json b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config.json new file mode 100644 index 0000000000000..9f8b462a425d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config.json @@ -0,0 +1,8 @@ +{ + "account_id": "", + "start_date": "2021-01-01T00:00:00Z", + "credentials": { + "auth_type": "bearer", + "api_token": "" + } +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config_oauth.json b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config_oauth.json new file mode 100644 index 0000000000000..4638e73e93217 --- /dev/null +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_migrations/test_new_config_oauth.json @@ -0,0 +1,10 @@ +{ + "account_id": "", + "replication_start_date": "2021-01-01T00:00:00Z", + "credentials": { + "auth_type": "Client", + "client_id": "", + "client_secret": "", + "client_refresh_token": "" + } +} diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/test_source.py b/airbyte-integrations/connectors/source-harvest/unit_tests/test_source.py index d88b352500620..113ff6d5f42b0 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/test_source.py @@ -1,25 +1,34 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import json import unittest from unittest.mock import Mock, patch import pytest import requests from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import FailureType +from airbyte_cdk.models import ConfiguredAirbyteCatalog, FailureType, Status, SyncMode +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template from airbyte_cdk.utils import AirbyteTracedException from config import ConfigBuilder from requests import HTTPError -from source_harvest.source import SourceHarvest +from source_harvest import SourceHarvest -def _a_response(status_code: int) -> requests.Response: - response = Mock(spec=requests.Response) - response.status_code = status_code - response.url = "any url" - response.reason = "any reason" - return response +def _a_response_with_error_code(status_code: int) -> HttpResponse: + return HttpResponse(json.dumps(find_template(str(status_code), __file__)), status_code) +def _a_request() -> HttpMocker: + return HttpRequest( + url="https://api.harvestapp.com/v2/company", + query_params="any query_parameters" + ) + +def _catalog() -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream("company", SyncMode.full_refresh).build() class SourceTest(unittest.TestCase): @@ -28,48 +37,61 @@ def setUp(self) -> None: self._logger = Mock(spec=AirbyteLogger) self._config = ConfigBuilder().build() - def test_given_config_with_client_id_without_account_id_when_check_connection_then_raise_config_error(self) -> None: + def test_given_config_with_client_id_without_account_id_when_check_connection_then_not_available(self) -> None: config = ConfigBuilder().with_client_id("a client id").build() config.pop("account_id") - with pytest.raises(AirbyteTracedException) as exception_trace: - self._source.check_connection(self._logger, config) - assert exception_trace.value.failure_type == FailureType.config_error + is_available, error = self._source.check_connection(self._logger, config) + assert not is_available + assert error == "Unable to connect to stream company - Request to https://api.harvestapp.com/v2/company failed with status code 401 and error message invalid_token" - def test_given_config_no_authentication_in_config_when_check_connection_then_raise_config_error(self) -> None: + def test_given_config_no_authentication_in_config_when_check_connection_then_not_available(self) -> None: config = ConfigBuilder().build() config["credentials"].pop("api_token", None) config["credentials"].pop("client_id", None) - with pytest.raises(AirbyteTracedException) as exception_trace: - self._source.check_connection(self._logger, config) - assert exception_trace.value.failure_type == FailureType.config_error - - @patch("source_harvest.source.Users.read_records") - def test_given_400_http_error_when_check_connection_then_raise_non_config_error(self, mocked_user_read_records) -> None: - """ - Following https://github.com/airbytehq/airbyte/pull/35305 where no page alerts were emitted - """ - mocked_user_read_records.side_effect = HTTPError(response=_a_response(400)) - - with pytest.raises(Exception) as exception: - self._source.check_connection(self._logger, self._config) - assert not isinstance(exception, AirbyteTracedException) or exception.failure_type != FailureType.config_error - - @patch("source_harvest.source.Users.read_records") - def test_given_401_http_error_when_check_connection_then_is_not_available(self, mocked_user_read_records) -> None: - mocked_user_read_records.side_effect = HTTPError(response=_a_response(401)) - is_available, _ = self._source.check_connection(self._logger, self._config) + is_available, error = self._source.check_connection(self._logger, config) assert not is_available - @patch("source_harvest.source.Users.read_records") - def test_given_403_http_error_when_check_connection_then_is_not_available(self, mocked_user_read_records) -> None: - mocked_user_read_records.side_effect = HTTPError(response=_a_response(403)) - is_available, _ = self._source.check_connection(self._logger, self._config) - assert not is_available + @HttpMocker() + def test_given_400_http_error_read_then_raises_config_error(self, http_mocker: HttpMocker) -> None: - @patch("source_harvest.source.Users.read_records") - def test_given_404_http_error_when_check_connection_then_is_not_available(self, mocked_user_read_records) -> None: - mocked_user_read_records.side_effect = HTTPError(response=_a_response(404)) - is_available, _ = self._source.check_connection(self._logger, self._config) - assert not is_available + http_mocker.get( + _a_request(), + _a_response_with_error_code(400) + ) + + output = read(self._source, self._config, _catalog(), state=None, expecting_exception=True) + assert output.errors[-1].trace.error.failure_type== FailureType.config_error + + @HttpMocker() + def test_given_401_http_error_when_read_then_raises_config_error(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + _a_request(), + _a_response_with_error_code(401) + ) + output = read(self._source, self._config, _catalog(), state=None, expecting_exception=True) + print(output) + + assert output.errors[-1].trace.error.failure_type== FailureType.config_error + + @HttpMocker() + def test_given_403_http_error_when_read_then_raises_config_error(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request(), + _a_response_with_error_code(403) + ) + + output = read(self._source, self._config, _catalog(), state=None, expecting_exception=True) + assert output.errors[-1].trace.error.failure_type== FailureType.config_error + + @HttpMocker() + def test_given_404_http_error_when_read_then_raises_config_error(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request(), + _a_response_with_error_code(404) + ) + + output = read(self._source, self._config, _catalog(), state=None, expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error diff --git a/airbyte-integrations/connectors/source-harvest/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-harvest/unit_tests/unit_test.py index 23f8051bc6194..f5737eea78bcc 100644 --- a/airbyte-integrations/connectors/source-harvest/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-harvest/unit_tests/unit_test.py @@ -2,50 +2,151 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources.streams.http.auth import NoAuth +import json +from datetime import datetime, timedelta +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from config import ConfigBuilder +from freezegun import freeze_time +from pagination import HarvestPaginationStrategy +from request_builder import HarvestRequestBuilder from source_harvest.source import SourceHarvest -from source_harvest.streams import ExpensesClients, HarvestStream, InvoicePayments -logger = AirbyteLogger() +def _a_record(stream_name: str, data_path: str, primary_key: str) -> RecordBuilder: + return create_record_builder( + find_template(stream_name, __file__), + records_path=FieldPath(data_path), + record_id_path=FieldPath(primary_key), + record_cursor_path=None + ) -def test_streams(config): - streams = SourceHarvest().streams(config) +def _a_response(stream_name: str, data_path: str) -> HttpResponseBuilder: + return create_response_builder( + find_template(stream_name, __file__), + records_path=FieldPath(data_path), + pagination_strategy=HarvestPaginationStrategy() + ) - assert len(streams) == 32 +@freeze_time("2024-03-24") +class UnitTest(TestCase): + def setUp(self) -> None: + self._config = ConfigBuilder().build() -def test_next_page_token(config, mocker): - next_page = 2 - expected = {"page": next_page} + def test_streams(self): + streams = SourceHarvest().streams(self._config) + assert len(streams) == 32 - instance = HarvestStream(authenticator=NoAuth()) + @HttpMocker() + def test_next_page_token(self, http_mocker: HttpMocker): - response = mocker.Mock(spec=requests.Response, request=mocker.Mock(spec=requests.Request)) - response.json.return_value = {"next_page": next_page} + catalog = CatalogBuilder().with_stream("invoices", SyncMode.full_refresh).build() - assert instance.next_page_token(response) == expected + stream_name = "invoices" + stream_pk = "id" + http_mocker.get( + HarvestRequestBuilder.invoices_endpoint("account_id").with_per_page(50).with_updated_since("2021-01-01T00:00:00Z").build(), + _a_response(stream_name="invoices", data_path="invoices").with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk)).with_pagination().build() + ) -def test_child_stream_slices(config, replication_start_date, mock_stream): - object_id = 1 - mock_stream("invoices", response={"invoices": [{"id": object_id}]}) - mock_stream(f"invoices/{object_id}/payments", {"invoice_payments": [{"id": object_id}]}) + http_mocker.get( + HarvestRequestBuilder.invoices_endpoint("account_id").with_page(2).with_per_page(50).with_updated_since("2021-01-01T00:00:00Z").build(), + _a_response(stream_name="invoices", data_path="invoices").with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk)).build() + ) - invoice_payments_instance = InvoicePayments(authenticator=NoAuth(), replication_start_date=replication_start_date) - stream_slice = next(invoice_payments_instance.stream_slices(sync_mode=None)) - invoice_payments = invoice_payments_instance.read_records(sync_mode=None, stream_slice=stream_slice) + output = read(SourceHarvest(), config=self._config, catalog=catalog) + len(output.records) == 2 - assert next(invoice_payments) + @HttpMocker() + def test_child_stream_partitions(self, http_mocker: HttpMocker): + stream_name = "invoices" + stream_pk = "id" -def test_report_base_stream(config, from_date, mock_stream): - mock_stream("reports/expenses/clients", response={"results": [{"client_id": 1}]}) + http_mocker.get( + HarvestRequestBuilder.invoices_endpoint("account_id").with_any_query_params().build(), + [_a_response(stream_name=stream_name, data_path=stream_name).with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk).with_field(FieldPath(stream_pk), 1)).with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk).with_field(FieldPath(stream_pk), 2)).build()] + ) - invoice_payments_instance = ExpensesClients(authenticator=NoAuth(), from_date=from_date) - stream_slice = next(invoice_payments_instance.stream_slices(sync_mode=None)) - invoice_payments = invoice_payments_instance.read_records(sync_mode=None, stream_slice=stream_slice) + output_1 = read(SourceHarvest(), config=self._config, catalog=CatalogBuilder().with_stream("invoices", SyncMode.full_refresh).build()) - assert next(invoice_payments) + invoice_1_id = json.loads(output_1.records[0].json())["record"]["data"]["id"] + invoice_2_id = json.loads(output_1.records[1].json())["record"]["data"]["id"] + + stream_name = "invoice_messages" + + http_mocker.get( + HarvestRequestBuilder.invoice_messages_endpoint("account_id", invoice_1_id).with_any_query_params().build(), + _a_response(stream_name=stream_name, data_path=stream_name).with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk)).build() + ) + + http_mocker.get( + HarvestRequestBuilder.invoice_messages_endpoint("account_id", invoice_2_id).with_any_query_params().build(), + _a_response(stream_name=stream_name, data_path=stream_name).with_record(_a_record(stream_name=stream_name, data_path=stream_name, primary_key=stream_pk)).build() + ) + + read(SourceHarvest(), config=self._config, catalog=CatalogBuilder().with_stream("invoice_messages", SyncMode.full_refresh).build()) + # Http Matcher test + + @HttpMocker() + def test_report_based_stream(self, http_mocker: HttpMocker): + + stream_name = "expenses_clients" + stream_pk = "client_id" + data_path = "results" + + http_mocker.get( + HarvestRequestBuilder.expenses_clients_endpoint("account_id").with_any_query_params().build(), + _a_response(stream_name=stream_name, data_path=data_path).with_record(_a_record(stream_name, data_path, stream_pk)).build() + ) + + output = read(SourceHarvest(), config=self._config, catalog=CatalogBuilder().with_stream(stream_name, SyncMode.full_refresh).build()) + + len(output.records) == 1 + + @HttpMocker() + def test_report_based_stream_slices(self, http_mocker: HttpMocker): + + stream_name = "expenses_clients" + stream_pk = "client_id" + data_path = "results" + + replication_start_date = "2021-01-01T00:00:00Z" + replication_start_datetime = datetime.strptime(replication_start_date, "%Y-%m-%dT%H:%M:%SZ") + + config = ConfigBuilder().with_replication_start_date(replication_start_datetime).build() + + while replication_start_datetime < datetime.now(): + + # Adds 364 days to create a 365-day-long duration, which is max for Harvest API + if replication_start_datetime + timedelta(days=364) < datetime.now(): + end_datetime = replication_start_datetime + timedelta(days=364) + else: + end_datetime = datetime.now() + + end_datetime = replication_start_datetime + timedelta(days=364) if replication_start_datetime + timedelta(days=364) < datetime.now() else datetime.now() + + http_mocker.get( + HarvestRequestBuilder.expenses_clients_endpoint("account_id").with_per_page(50).with_from(replication_start_datetime).with_to(end_datetime).build(), + _a_response(stream_name=stream_name, data_path=data_path).with_record(_a_record(stream_name, data_path, stream_pk)).build() + ) + + replication_start_datetime = end_datetime + timedelta(days=1) + + output = read(SourceHarvest(), config=config, catalog=CatalogBuilder().with_stream(stream_name, SyncMode.full_refresh).build()) + + assert len(output.records) == 4 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml index ec50dfc82a725..b8cd948c97e66 100644 --- a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml @@ -29,35 +29,35 @@ acceptance_tests: timeout_seconds: 3600 empty_streams: - name: engagements_calls - bypass_reason: Unable to populate cost $20/month + bypass_reason: Unable to populate (cost $20/month) - covered by integration tests - name: owners_archived - bypass_reason: unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: tickets_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: deals_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: companies_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: engagements_calls_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: engagements_emails_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: engagements_meetings_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: engagements_notes_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: engagements_tasks_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: goals_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: line_items_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: products_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: pets_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests - name: cars_web_analytics - bypass_reason: Unable to populate + bypass_reason: Unable to populate - covered by integration tests full_refresh: tests: - config_path: secrets/config.json @@ -131,14 +131,16 @@ acceptance_tests: - name: properties/hs_time_* bypass_reason: Hubspot time depend on current time incremental: - tests: - - config_path: secrets/config_oauth.json - configured_catalog_path: sample_files/incremental_catalog.json - future_state: - future_state_path: integration_tests/abnormal_state.json - timeout_seconds: 7200 - - config_path: secrets/config_oauth_no_start_date.json - configured_catalog_path: sample_files/incremental_catalog.json - future_state: - future_state_path: integration_tests/abnormal_state.json - timeout_seconds: 7200 + # Waiting for CAT incremental fix: https://github.com/airbytehq/airbyte/pull/36814 + bypass_reason: It takes incredible amount of time to complete + # tests: + # - config_path: secrets/config_oauth.json + # configured_catalog_path: sample_files/incremental_catalog.json + # future_state: + # future_state_path: integration_tests/abnormal_state.json + # timeout_seconds: 7200 + # - config_path: secrets/config_oauth_no_start_date.json + # configured_catalog_path: sample_files/incremental_catalog.json + # future_state: + # future_state_path: integration_tests/abnormal_state.json + # timeout_seconds: 7200 diff --git a/airbyte-integrations/connectors/source-hubspot/metadata.yaml b/airbyte-integrations/connectors/source-hubspot/metadata.yaml index a42e4b8190f48..4250438a3b055 100644 --- a/airbyte-integrations/connectors/source-hubspot/metadata.yaml +++ b/airbyte-integrations/connectors/source-hubspot/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c - dockerImageTag: 4.0.0 + dockerImageTag: 4.1.1 dockerRepository: airbyte/source-hubspot documentationUrl: https://docs.airbyte.com/integrations/sources/hubspot githubIssueLabel: source-hubspot icon: hubspot.svg license: ELv2 + maxSecondsBetweenMessages: 86400 name: HubSpot remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-hubspot/poetry.lock b/airbyte-integrations/connectors/source-hubspot/poetry.lock index 10e4cc8d33a89..841fcdc13383a 100644 --- a/airbyte-integrations/connectors/source-hubspot/poetry.lock +++ b/airbyte-integrations/connectors/source-hubspot/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.61.2" +version = "0.78.6" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.61.2.tar.gz", hash = "sha256:494192d4e52bc30b88ae45d0161ce9ad351e7c4090fd5ec44649adb30323ad74"}, - {file = "airbyte_cdk-0.61.2-py3-none-any.whl", hash = "sha256:345382749991d628fd45d05c6045bde57f7ce493ace672ad6a93c0d1296fb735"}, + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +300,21 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "0.3.4" +description = "Let your Python tests travel through time" +optional = false +python-versions = "*" +files = [ + {file = "freezegun-0.3.4-py2.py3-none-any.whl", hash = "sha256:d15d5daa22260891955d436899f94c8b80525daa895aec74c0afa5a25ac0230e"}, + {file = "freezegun-0.3.4.tar.gz", hash = "sha256:8d5eb5656c324125cce80e2e9ae572af6da997b7065b3bb6599c20f1b28dcf46"}, +] + +[package.dependencies] +python-dateutil = ">=1.0,<2.0 || >2.0" +six = "*" + [[package]] name = "genson" version = "1.2.2" @@ -366,13 +380,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -483,13 +497,13 @@ test = ["pytest", "pytest-cov"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -569,47 +583,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -701,35 +715,46 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pytzdata" version = "2020.1" @@ -824,13 +849,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -842,50 +867,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -911,13 +934,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -936,13 +959,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1047,4 +1070,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "c2ede1134d353ed454678bde83d4114935f614a63f4e086bb3df790798d9fb4e" +content-hash = "e39f4f8e565e1e1df86e7377c1281dfb3a763bb3411f9998deeb0b76dd7c7910" diff --git a/airbyte-integrations/connectors/source-hubspot/pyproject.toml b/airbyte-integrations/connectors/source-hubspot/pyproject.toml index 6d37d0ae1b2c6..400bec2a8c2b9 100644 --- a/airbyte-integrations/connectors/source-hubspot/pyproject.toml +++ b/airbyte-integrations/connectors/source-hubspot/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.0" +version = "4.1.1" name = "source-hubspot" description = "Source implementation for HubSpot." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_hubspot" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.61.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-hubspot = "source_hubspot.run:run" @@ -27,3 +27,5 @@ requests-mock = "^1.9.3" mock = "^5.1.0" pytest-mock = "^3.6" pytest = "^6.2" +pytz = "2024.1" +freezegun = "0.3.4" diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py index baa562164e875..fb705de82aa34 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py @@ -5,7 +5,7 @@ import logging from http import HTTPStatus from itertools import chain -from typing import Any, List, Mapping, Optional, Tuple +from typing import Any, Generator, List, Mapping, Optional, Tuple import requests from airbyte_cdk.logger import AirbyteLogger @@ -115,7 +115,9 @@ def get_common_params(self, config) -> Mapping[str, Any]: start_date = config.get("start_date", DEFAULT_START_DATE) credentials = config["credentials"] api = self.get_api(config=config) - return dict(api=api, start_date=start_date, credentials=credentials) + # Additional configuration is necessary for testing certain streams due to their specific restrictions. + acceptance_test_config = config.get("acceptance_test_config", {}) + return dict(api=api, start_date=start_date, credentials=credentials, acceptance_test_config=acceptance_test_config) def streams(self, config: Mapping[str, Any]) -> List[Stream]: credentials = config.get("credentials", {}) @@ -195,11 +197,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: self.logger.info("No scopes to grant when authenticating with API key.") available_streams = streams - available_streams.extend(self.get_custom_object_streams(api=api, common_params=common_params)) + custom_object_streams = list(self.get_custom_object_streams(api=api, common_params=common_params)) + available_streams.extend(custom_object_streams) if enable_experimental_streams: custom_objects_web_analytics_streams = self.get_web_analytics_custom_objects_stream( - custom_object_stream_instances=self.get_custom_object_streams(api=api, common_params=common_params), + custom_object_stream_instances=custom_object_streams, common_params=common_params, ) available_streams.extend(custom_objects_web_analytics_streams) @@ -218,7 +221,7 @@ def get_custom_object_streams(self, api: API, common_params: Mapping[str, Any]): def get_web_analytics_custom_objects_stream( self, custom_object_stream_instances: List[CustomObject], common_params: Any - ) -> WebAnalyticsStream: + ) -> Generator[WebAnalyticsStream, None, None]: for custom_object_stream_instance in custom_object_stream_instances: def __init__(self, **kwargs: Any): diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index 7445af401cd19..4296b864d574b 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -24,6 +24,7 @@ from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream from airbyte_cdk.sources.streams.http.availability_strategy import HttpAvailabilityStrategy from airbyte_cdk.sources.streams.http.requests_native_auth import Oauth2Authenticator, TokenAuthenticator +from airbyte_cdk.sources.utils import casing from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from airbyte_cdk.utils import AirbyteTracedException @@ -377,7 +378,14 @@ def _property_wrapper(self) -> IURLPropertyRepresentation: return APIv2Property(properties) return APIv3Property(properties) - def __init__(self, api: API, start_date: Union[str, pendulum.datetime], credentials: Mapping[str, Any] = None, **kwargs): + def __init__( + self, + api: API, + start_date: Union[str, pendulum.datetime], + credentials: Mapping[str, Any] = None, + acceptance_test_config: Mapping[str, Any] = None, + **kwargs, + ): super().__init__(**kwargs) self._api: API = api self._credentials = credentials @@ -392,6 +400,12 @@ def __init__(self, api: API, start_date: Union[str, pendulum.datetime], credenti if creds_title in (OAUTH_CREDENTIALS, PRIVATE_APP_CREDENTIALS): self._authenticator = api.get_authenticator() + # Additional configuration is necessary for testing certain streams due to their specific restrictions. + if acceptance_test_config is None: + acceptance_test_config = {} + self._is_test = self.name in acceptance_test_config + self._acceptance_test_config = acceptance_test_config.get(self.name, {}) + def should_retry(self, response: requests.Response) -> bool: if response.status_code == HTTPStatus.UNAUTHORIZED: message = response.json().get("message") @@ -780,7 +794,7 @@ def _get_field_props(field_type: str) -> Mapping[str, List[str]]: @property @lru_cache() def properties(self) -> Mapping[str, Any]: - """Some entities has dynamic set of properties, so we trying to resolve those at runtime""" + """Some entities have dynamic set of properties, so we're trying to resolve those at runtime""" props = {} if not self.entity: return props @@ -1641,6 +1655,7 @@ def read_records( "api": self._api, "start_date": since_date, "credentials": self._credentials, + "acceptance_test_config": {casing.camel_to_snake(EngagementsRecent.__name__): self._acceptance_test_config}, } try: @@ -2344,6 +2359,12 @@ def stream_slices( object_id = parent_slice["parent"][self.object_id_field] + # We require this workaround to shorten the duration of the acceptance test run. + # The web analytics stream alone takes over 3 hours to complete. + # Consequently, we aim to run the test against a limited number of object IDs. + if self._is_test and object_id not in self._acceptance_test_config.get("object_ids", []): + continue + # Take the initial datetime either form config or from state depending whichever value is higher # In case when state is detected add a 1 millisecond to avoid duplicates from previous sync from_datetime = ( diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py index ea8070af9d879..c64d689bde9ef 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py @@ -85,3 +85,8 @@ def fake_properties_list(): @pytest.fixture(name="api") def api(some_credentials): return API(some_credentials) + + +@pytest.fixture +def http_mocker(): + return None diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/__init__.py new file mode 100644 index 0000000000000..f8ebf27a08c25 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/__init__.py @@ -0,0 +1,133 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import copy +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +import freezegun +import pytz +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import FieldPath, HttpResponseBuilder, RecordBuilder, create_record_builder, find_template +from airbyte_protocol.models import AirbyteStateMessage, SyncMode +from source_hubspot import SourceHubspot + +from .config_builder import ConfigBuilder +from .request_builders.api import CustomObjectsRequestBuilder, OAuthRequestBuilder, PropertiesRequestBuilder, ScopesRequestBuilder +from .request_builders.streams import CRMStreamRequestBuilder, IncrementalCRMStreamRequestBuilder, WebAnalyticsRequestBuilder +from .response_builder.helpers import RootHttpResponseBuilder +from .response_builder.api import ScopesResponseBuilder +from .response_builder.streams import GenericResponseBuilder, HubspotStreamResponseBuilder + + +@freezegun.freeze_time("2024-03-03T14:42:00Z") +class HubspotTestCase: + DT_FORMAT = '%Y-%m-%dT%H:%M:%SZ' + OBJECT_ID = "testID" + ACCESS_TOKEN = "new_access_token" + CURSOR_FIELD = "occurredAt" + PROPERTIES = { + "closed_date": "datetime", + "createdate": "datetime", + } + + @classmethod + def now(cls): + return datetime.now(pytz.utc) + + @classmethod + def start_date(cls): + return cls.now() - timedelta(days=30) + + @classmethod + def updated_at(cls): + return cls.now() - timedelta(days=1) + + @classmethod + def dt_str(cls, dt: datetime.date) -> str: + return dt.strftime(cls.DT_FORMAT) + + @classmethod + def oauth_config(cls, start_date: Optional[str] = None) -> Dict[str, Any]: + start_date = start_date or cls.dt_str(cls.start_date()) + return ConfigBuilder().with_start_date(start_date).with_auth( + { + "credentials_title": "OAuth Credentials", + "redirect_uri": "https://airbyte.io", + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token", + } + ).build() + + @classmethod + def private_token_config(cls, token: str, start_date: Optional[str] = None) -> Dict[str, Any]: + start_date = start_date or cls.dt_str(cls.start_date()) + return ConfigBuilder().with_start_date(start_date).with_auth( + { + "credentials_title": "Private App Credentials", + "access_token": token, + } + ).build() + + @classmethod + def mock_oauth(cls, http_mocker: HttpMocker, token: str): + creds = cls.oauth_config()["credentials"] + req = OAuthRequestBuilder().with_client_id( + creds["client_id"] + ).with_client_secret( + creds["client_secret"] + ).with_refresh_token( + creds["refresh_token"] + ).build() + response = GenericResponseBuilder().with_value("access_token", token).with_value("expires_in", 7200).build() + http_mocker.post(req, response) + + @classmethod + def mock_scopes(cls, http_mocker: HttpMocker, token: str, scopes: List[str]): + http_mocker.get(ScopesRequestBuilder().with_access_token(token).build(), ScopesResponseBuilder(scopes).build()) + + @classmethod + def mock_custom_objects(cls, http_mocker: HttpMocker): + http_mocker.get( + CustomObjectsRequestBuilder().build(), + HttpResponseBuilder({}, records_path=FieldPath("results"), pagination_strategy=None).build() + ) + + @classmethod + def mock_properties(cls, http_mocker: HttpMocker, object_type: str, properties: Dict[str, str]): + templates = find_template("properties", __file__) + record_builder = lambda: RecordBuilder(copy.deepcopy(templates[0]), id_path=None, cursor_path=None) + + response_builder = RootHttpResponseBuilder(templates) + for name, type in properties.items(): + record = record_builder().with_field(FieldPath("name"), name).with_field(FieldPath("type"), type) + response_builder = response_builder.with_record(record) + + http_mocker.get( + PropertiesRequestBuilder().for_entity(object_type).build(), + response_builder.build() + ) + + @classmethod + def mock_response(cls, http_mocker: HttpMocker, request, responses, method: str = "get"): + if not isinstance(responses, (list, tuple)): + responses = [responses] + getattr(http_mocker, method)(request, responses) + + @classmethod + def record_builder(cls, stream: str, record_cursor_path): + return create_record_builder( + find_template(stream, __file__), records_path=FieldPath("results"), record_id_path=None, record_cursor_path=record_cursor_path + ) + + @classmethod + def catalog(cls, stream: str, sync_mode: SyncMode): + return CatalogBuilder().with_stream(stream, sync_mode).build() + + @classmethod + def read_from_stream( + cls, cfg, stream: str, sync_mode: SyncMode, state: Optional[List[AirbyteStateMessage]] = None, expecting_exception: bool = False + ) -> EntrypointOutput: + return read(SourceHubspot(), cfg, cls.catalog(stream, sync_mode), state, expecting_exception) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/config_builder.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/config_builder.py new file mode 100644 index 0000000000000..048142759ca2a --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/config_builder.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Mapping + + +class ConfigBuilder: + def __init__(self): + self._config = { + "enable_experimental_streams": True + } + + def with_start_date(self, start_date: str): + self._config["start_date"] = start_date + return self + + def with_auth(self, credentials: Mapping[str, str]): + self._config["credentials"] = credentials + return self + + def build(self) -> Mapping[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/__init__.py new file mode 100644 index 0000000000000..5fd9458a7a05a --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import abc + + +class AbstractRequestBuilder: + @abc.abstractmethod + def build(self): + pass diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/api.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/api.py new file mode 100644 index 0000000000000..17ba71bebf3c1 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/api.py @@ -0,0 +1,66 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from airbyte_cdk.test.mock_http import HttpRequest + +from . import AbstractRequestBuilder + + +class OAuthRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/oauth/v1/token" + + def __init__(self): + self._params = {} + + def with_client_id(self, client_id: str): + self._params["client_id"] = client_id + return self + + def with_client_secret(self, client_secret: str): + self._params["client_secret"] = client_secret + return self + + def with_refresh_token(self, refresh_token: str): + self._params["refresh_token"] = refresh_token + return self + + def build(self) -> HttpRequest: + client_id, client_secret, refresh_token = self._params["client_id"], self._params["client_secret"], self._params["refresh_token"] + return HttpRequest( + url=self.URL, + body=f"grant_type=refresh_token&client_id={client_id}&client_secret={client_secret}&refresh_token={refresh_token}" + ) + + +class ScopesRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/oauth/v1/access-tokens/{token}" + + def __init__(self): + self._token = None + + def with_access_token(self, token: str): + self._token = token + return self + + def build(self) -> HttpRequest: + return HttpRequest(url=self.URL.format(token=self._token)) + + +class CustomObjectsRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/crm/v3/schemas" + + def build(self) -> HttpRequest: + return HttpRequest(url=self.URL) + + +class PropertiesRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/properties/v2/{resource}/properties" + + def __init__(self): + self._resource = None + + def for_entity(self, entity): + self._resource = entity + return self + + def build(self) -> HttpRequest: + return HttpRequest(url=self.URL.format(resource=self._resource)) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/streams.py new file mode 100644 index 0000000000000..10c2519080f7c --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/request_builders/streams.py @@ -0,0 +1,137 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Iterable, Tuple + +from airbyte_cdk.test.mock_http import HttpRequest + +from . import AbstractRequestBuilder + + +class WebAnalyticsRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/events/v3/events" + + def __init__(self): + self._token = None + self._query_params = {} + self._request_body = None + self._headers = {} + + def with_token(self, token: str): + self._token = token + return self + + @property + def headers(self) -> Dict[str, Any]: + return {"Authorization": f"Bearer {self._token}"} + + def with_query(self, qp): + self._query_params = qp + return self + + def build(self) -> HttpRequest: + return HttpRequest( + url=self.URL, + query_params=self._query_params, + headers=self.headers, + body=self._request_body + ) + + +class CRMStreamRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/crm/v3/objects/{resource}" + + def __init__(self): + self._resource = None + self._associations = "" + self._dt_range = "" + self._properties = "" + self._after = None + self._search = False + + def for_entity(self, entity): + self._resource = entity + return self + + def with_dt_range(self, start_date: Tuple, end_date: Tuple): + self._dt_range = "&".join(["{}={}".format(*start_date), "{}={}".format(*end_date)]) + return self + + def with_page_token(self, next_page_token: Dict): + self._after = "&".join([f"{str(key)}={str(val)}" for key, val in next_page_token.items()]) + return self + + def with_associations(self, associations: Iterable[str]): + self._associations = "&".join([f"associations={a}" for a in associations]) + return self + + def with_properties(self, properties: Iterable[str]): + self._properties = "properties=" + ",".join(properties) + return self + + @property + def _limit(self): + return "limit=100" + + @property + def _archived(self): + return "archived=false" + + @property + def _query_params(self): + return [ + self._archived, + self._associations, + self._limit, + self._after, + self._dt_range, + self._properties + ] + + def build(self): + q = "&".join(filter(None, self._query_params)) + url = self.URL.format(resource=self._resource) + return HttpRequest(url, query_params=q) + + +class IncrementalCRMStreamRequestBuilder(CRMStreamRequestBuilder): + @property + def _query_params(self): + return [ + self._limit, + self._after, + self._dt_range, + self._archived, + self._associations, + self._properties + ] + + +class OwnersArchivedStreamRequestBuilder(AbstractRequestBuilder): + URL = "https://api.hubapi.com/crm/v3/owners" + + def __init__(self): + self._after = None + + @property + def _limit(self): + return "limit=100" + + @property + def _archived(self): + return "archived=true" + + @property + def _query_params(self): + return filter(None, [ + self._limit, + self._after, + self._archived, + ]) + + def with_page_token(self, next_page_token: Dict): + self._after = "&".join([f"{str(key)}={str(val)}" for key, val in next_page_token.items()]) + return self + + def build(self): + q = "&".join(filter(None, self._query_params)) + return HttpRequest(self.URL, query_params=q) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/__init__.py new file mode 100644 index 0000000000000..e20b564abdc51 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import abc + +from airbyte_cdk.test.mock_http import HttpResponse + + +class AbstractResponseBuilder: + @abc.abstractmethod + def build(self) -> HttpResponse: + pass diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/api.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/api.py new file mode 100644 index 0000000000000..77ee027d612e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/api.py @@ -0,0 +1,17 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import List + +from airbyte_cdk.test.mock_http import HttpResponse + +from . import AbstractResponseBuilder + + +class ScopesResponseBuilder(AbstractResponseBuilder): + def __init__(self, scopes: List[str]): + self._scopes = scopes + + def build(self): + body = json.dumps({"scopes": self._scopes}) + return HttpResponse(body=body, status_code=200) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/helpers.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/helpers.py new file mode 100644 index 0000000000000..595a02232d439 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/helpers.py @@ -0,0 +1,25 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import Any, List, Optional, Union + +from airbyte_cdk.test.mock_http import HttpResponse +from airbyte_cdk.test.mock_http.response_builder import FieldPath, HttpResponseBuilder, NestedPath, PaginationStrategy, RecordBuilder + + +class RootHttpResponseBuilder(HttpResponseBuilder): + def __init__( + self, + template: List[Any], + records_path: Optional[Union[FieldPath, NestedPath]] = None, + pagination_strategy: Optional[PaginationStrategy] = None + ): + self._response = template + self._records: List[RecordBuilder] = [] + self._records_path = records_path + self._pagination_strategy = pagination_strategy + self._status_code = 200 + + def build(self) -> HttpResponse: + self._response.extend([record.build() for record in self._records]) + return HttpResponse(json.dumps(self._response), self._status_code) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/pagination.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/pagination.py new file mode 100644 index 0000000000000..ded25153204f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/pagination.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + + +class HubspotPaginationStrategy(PaginationStrategy): + NEXT_PAGE_TOKEN = {"after": "256"} + + def update(self, response: Dict[str, Any]) -> None: + response["paging"] = { + "next": { + "link": "link_to_the_next_page", + **self.NEXT_PAGE_TOKEN + }, + "prev": { + "before": None, + "link": None + } + } diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/streams.py new file mode 100644 index 0000000000000..6b02d952e14c0 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/response_builder/streams.py @@ -0,0 +1,32 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json + +from airbyte_cdk.test.mock_http import HttpResponse +from airbyte_cdk.test.mock_http.response_builder import FieldPath, HttpResponseBuilder, find_template + +from . import AbstractResponseBuilder +from .pagination import HubspotPaginationStrategy + + +class HubspotStreamResponseBuilder(HttpResponseBuilder): + @property + def pagination_strategy(self): + return self._pagination_strategy + + @classmethod + def for_stream(cls, stream: str): + return cls(find_template(stream, __file__), FieldPath("results"), HubspotPaginationStrategy()) + + +class GenericResponseBuilder(AbstractResponseBuilder): + def __init__(self): + self._body = {} + + def with_value(self, key: str, value: str): + self._body[key] = value + return self + + def build(self): + body = json.dumps(self._body) + return HttpResponse(body, status_code=200) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_engagements_calls.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_engagements_calls.py new file mode 100644 index 0000000000000..395be501e4882 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_engagements_calls.py @@ -0,0 +1,154 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import http +from typing import Dict, Optional + +import freezegun +import mock +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import FieldPath +from airbyte_protocol.models import SyncMode + +from . import HubspotTestCase +from .request_builders.streams import CRMStreamRequestBuilder +from .response_builder.streams import HubspotStreamResponseBuilder + + +@freezegun.freeze_time("2024-03-03T14:42:00Z") +class TestEngagementCallsStream(HubspotTestCase): + SCOPES = ["crm.objects.contacts.read"] + CURSOR_FIELD = "updatedAt" + STREAM_NAME = "engagements_calls" + OBJECT_TYPE = "calls" + ASSOCIATIONS = ["contacts", "deal", "company", "tickets"] + + @property + def response_builder(self): + return HubspotStreamResponseBuilder.for_stream(self.STREAM_NAME) + + def request(self, page_token: Optional[Dict[str, str]] = None): + request_builder = CRMStreamRequestBuilder().for_entity( + self.OBJECT_TYPE + ).with_associations( + self.ASSOCIATIONS + ).with_properties( + list(self.PROPERTIES.keys()) + ) + if page_token: + request_builder = request_builder.with_page_token(page_token) + return request_builder.build() + + def response(self, with_pagination: bool = False): + record = self.record_builder(self.STREAM_NAME, FieldPath(self.CURSOR_FIELD)).with_field( + FieldPath(self.CURSOR_FIELD), self.dt_str(self.updated_at()) + ).with_field( + FieldPath("id"), self.OBJECT_ID + ) + response = self.response_builder.with_record(record) + if with_pagination: + response = response.with_pagination() + return response.build() + + def _set_up_oauth(self, http_mocker: HttpMocker): + self.mock_oauth(http_mocker, self.ACCESS_TOKEN) + self.mock_scopes(http_mocker, self.ACCESS_TOKEN, self.SCOPES) + + def _set_up_requests(self, http_mocker: HttpMocker, with_oauth: bool = False): + if with_oauth: + self._set_up_oauth(http_mocker) + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, self.OBJECT_TYPE, self.PROPERTIES) + + @HttpMocker() + def test_given_oauth_authentication_when_read_then_perform_authenticated_queries(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker, with_oauth=True) + self.read_from_stream(self.oauth_config(), self.STREAM_NAME, SyncMode.full_refresh) + + @HttpMocker() + def test_given_records_when_read_extract_desired_records(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker, with_oauth=True) + self.mock_response(http_mocker, self.request(), self.response()) + output = self.read_from_stream(self.oauth_config(), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_one_page_when_read_stream_private_token_then_return_records(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), self.response()) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_two_pages_when_read_then_return_records(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), self.response(with_pagination=True)) + self.mock_response( + http_mocker, + self.request(page_token=self.response_builder.pagination_strategy.NEXT_PAGE_TOKEN), + self.response() + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_error_response_when_read_analytics_then_get_trace_message(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), HttpResponse(status_code=500, body="{}")) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 0 + assert len(output.trace_messages) > 0 + assert len(output.errors) > 0 + + @HttpMocker() + def test_given_500_then_200_when_read_then_return_records(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response( + http_mocker, + self.request(), + [ + HttpResponse(status_code=500, body="{}"), + self.response() + ] + ) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 1 + assert len(output.trace_messages) > 0 + assert len(output.errors) == 0 + + @HttpMocker() + def test_given_missing_scopes_error_when_read_then_stop_sync(self, http_mocker: HttpMocker): + self.mock_oauth(http_mocker, self.ACCESS_TOKEN) + self.mock_scopes(http_mocker, self.ACCESS_TOKEN, []) + self.read_from_stream(self.oauth_config(), self.STREAM_NAME, SyncMode.full_refresh, expecting_exception=True) + + @HttpMocker() + def test_given_unauthorized_error_when_read_then_stop_sync(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), HttpResponse(status_code=http.HTTPStatus.UNAUTHORIZED, body="{}")) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 0 + assert len(output.trace_messages) > 0 + assert len(output.errors) > 0 + + @HttpMocker() + def test_given_one_page_when_read_then_get_records_with_flattened_properties(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), self.response()) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + record = output.records[0].record.data + assert "properties" in record # legacy struct remains to not introduce breaking changes + prop_fields = len([f for f in record if f.startswith("properties_")]) + assert prop_fields > 0 + + @HttpMocker() + def test_given_incremental_sync_when_read_then_state_message_produced_and_state_match_latest_record(self, http_mocker: HttpMocker): + self._set_up_requests(http_mocker) + self.mock_response(http_mocker, self.request(), self.response()) + output = self.read_from_stream( + self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.incremental + ) + assert len(output.state_messages) == 1 + diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_owners_archived.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_owners_archived.py new file mode 100644 index 0000000000000..d54a94ca6a9d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_owners_archived.py @@ -0,0 +1,60 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import FieldPath +from airbyte_protocol.models import SyncMode + +from . import HubspotTestCase +from .request_builders.streams import OwnersArchivedStreamRequestBuilder +from .response_builder.streams import HubspotStreamResponseBuilder + + +@freezegun.freeze_time("2024-03-03T14:42:00Z") +class TestOwnersArchivedStream(HubspotTestCase): + """ + The test case contains a single test - this is just a sanity check, as the tested + stream is identical to the `Owners` stream (which is covered by acceptance tests), except for a single url param. + """ + SCOPES = ["crm.objects.owners.read"] + CURSOR_FIELD = "updatedAt" + STREAM_NAME = "owners_archived" + + def request(self): + return OwnersArchivedStreamRequestBuilder() + + @property + def response_builder(self): + return HubspotStreamResponseBuilder.for_stream(self.STREAM_NAME) + + def response(self, with_pagination: bool = False): + record = self.record_builder(self.STREAM_NAME, FieldPath(self.CURSOR_FIELD)).with_field( + FieldPath(self.CURSOR_FIELD), self.dt_str(self.updated_at()) + ).with_field( + FieldPath("id"), self.OBJECT_ID + ) + response = self.response_builder.with_record(record) + if with_pagination: + response = response.with_pagination() + return response + + @HttpMocker() + def test_given_one_page_when_read_stream_oauth_then_return_records(self, http_mocker: HttpMocker): + self.mock_oauth(http_mocker, self.ACCESS_TOKEN) + self.mock_scopes(http_mocker, self.ACCESS_TOKEN, self.SCOPES) + self.mock_custom_objects(http_mocker) + self.mock_response(http_mocker, self.request().build(), self.response().build()) + output = self.read_from_stream(self.oauth_config(), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_two_pages_when_read_stream_private_token_then_return_records(self, http_mocker: HttpMocker): + self.mock_custom_objects(http_mocker) + self.mock_response(http_mocker, self.request().build(), self.response(with_pagination=True).build()) + self.mock_response( + http_mocker, + self.request().with_page_token(self.response_builder.pagination_strategy.NEXT_PAGE_TOKEN).build(), + self.response().build() + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), self.STREAM_NAME, SyncMode.full_refresh) + assert len(output.records) == 2 diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_web_analytics_streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_web_analytics_streams.py new file mode 100644 index 0000000000000..3f1ceff5b3988 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/integrations/test_web_analytics_streams.py @@ -0,0 +1,629 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import http +from datetime import datetime, timedelta +from typing import List, Optional, Tuple + +import freezegun +import mock +import pytest +import pytz +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import FieldPath +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStateMessage, AirbyteStateType, AirbyteStreamState, StreamDescriptor, SyncMode + +from . import HubspotTestCase +from .request_builders.streams import CRMStreamRequestBuilder, IncrementalCRMStreamRequestBuilder, WebAnalyticsRequestBuilder +from .response_builder.streams import HubspotStreamResponseBuilder + +CRM_STREAMS = ( + ("tickets_web_analytics", "tickets", "ticket", ["contacts", "deals", "companies"]), + ("deals_web_analytics", "deals", "deal", ["contacts", "companies", "line_items"]), + ("companies_web_analytics", "companies", "company", ["contacts"]), + ("contacts_web_analytics", "contacts", "contact", ["contacts", "companies"]), + ("engagements_calls_web_analytics", "engagements_calls", "calls", ["contacts", "deal", "company", "tickets"]), + ("engagements_emails_web_analytics", "engagements_emails", "emails", ["contacts", "deal", "company", "tickets"]), + ("engagements_meetings_web_analytics", "engagements_meetings", "meetings", ["contacts", "deal", "company", "tickets"]), + ("engagements_notes_web_analytics", "engagements_notes", "notes", ["contacts", "deal", "company", "tickets"]), + ("engagements_tasks_web_analytics", "engagements_tasks", "tasks", ["contacts", "deal", "company", "tickets"]), +) + +CRM_INCREMENTAL_STREAMS = ( + ("goals_web_analytics", "goals", "goal_targets", []), + ("line_items_web_analytics", "line_items", "line_item", []), + ("products_web_analytics", "products", "product", []), +) + + +class WebAnalyticsTestCase(HubspotTestCase): + PARENT_CURSOR_FIELD = "updatedAt" + + @classmethod + def response_builder(cls, stream): + return HubspotStreamResponseBuilder.for_stream(stream) + + @classmethod + def web_analytics_request( + cls, + stream: str, + token: str, + object_id: str, + object_type: str, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + first_page: bool = True + ): + start_date = start_date or cls.dt_str(cls.start_date()) + end_date = end_date or cls.dt_str(cls.now()) + query = { + "limit": 100, + "occurredAfter": start_date, + "occurredBefore": end_date, + "objectId": object_id, + "objectType": object_type + } + + if not first_page: + query.update(cls.response_builder(stream).pagination_strategy.NEXT_PAGE_TOKEN) + return WebAnalyticsRequestBuilder().with_token(token).with_query(query).build() + + @classmethod + def web_analytics_response( + cls, stream: str, with_pagination: bool = False, updated_on: Optional[str] = None, id: Optional[str] = None + ) -> HttpResponse: + updated_on = updated_on or cls.dt_str(cls.updated_at()) + record = cls.record_builder(stream, FieldPath(cls.CURSOR_FIELD)).with_field(FieldPath(cls.CURSOR_FIELD), updated_on) + if id: + record = record.with_field(FieldPath("objectId"), id) + response = cls.response_builder(stream).with_record(record) + if with_pagination: + response = response.with_pagination() + return response.build() + + @classmethod + def mock_parent_object( + cls, + http_mocker: HttpMocker, + object_ids: List[str], + object_type: str, + stream_name: str, + associations: List[str], + properties: List[str], + first_page: bool = True, + with_pagination: bool = False, + date_range: Optional[Tuple[str, ...]] = None, + ): + response_builder = cls.response_builder(stream_name) + for object_id in object_ids: + record = cls.record_builder(stream_name, FieldPath(cls.PARENT_CURSOR_FIELD)).with_field( + FieldPath(cls.PARENT_CURSOR_FIELD), cls.dt_str(cls.updated_at()) + ).with_field( + FieldPath("id"), object_id + ) + response_builder = response_builder.with_record(record) + if with_pagination: + response_builder = response_builder.with_pagination() + + request_builder = CRMStreamRequestBuilder().for_entity(object_type).with_associations(associations).with_properties(properties) + if not first_page: + request_builder = request_builder.with_page_token(response_builder.pagination_strategy.NEXT_PAGE_TOKEN) + http_mocker.get(request_builder.build(), response_builder.build()) + + +@freezegun.freeze_time("2024-03-03T14:42:00Z") +class TestCRMWebAnalyticsStream(WebAnalyticsTestCase): + SCOPES = ["tickets", "crm.objects.contacts.read", "crm.objects.companies.read", "contacts", "crm.objects.deals.read", "oauth"] + + @classmethod + def extended_dt_ranges(cls) -> Tuple[Tuple[str, ...], ...]: + return ( + (cls.dt_str(cls.now() - timedelta(days=60)), cls.dt_str(cls.now() - timedelta(days=30))), + (cls.dt_str(cls.now() - timedelta(days=30)), cls.dt_str(cls.now())), + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_one_page_when_read_stream_oauth_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_oauth(http_mocker, self.ACCESS_TOKEN) + self.mock_scopes(http_mocker, self.ACCESS_TOKEN, self.SCOPES) + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name) + ) + output = self.read_from_stream(self.oauth_config(), stream_name, SyncMode.full_refresh) + assert len(output.records) == 1 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_one_page_when_read_stream_private_token_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name) + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 1 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_two_pages_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name, with_pagination=True) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type, first_page=False), + self.web_analytics_response(stream_name) + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 2 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_two_parent_pages_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, + [self.OBJECT_ID], + object_type, + parent_stream_name, + parent_stream_associations, + with_pagination=True, + properties=list(self.PROPERTIES.keys()) + ) + self.mock_parent_object( + http_mocker, + ["another_object_id"], + object_type, + parent_stream_name, + parent_stream_associations, + first_page=False, + properties=list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, "another_object_id", object_type), + self.web_analytics_response(stream_name) + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 2 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_wide_date_range_and_multiple_parent_records_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + date_ranges = self.extended_dt_ranges() + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + start_to_end = (date_ranges[0][0], date_ranges[-1][-1]) + self.mock_parent_object( + http_mocker, + [self.OBJECT_ID, "another_object_id"], + object_type, + parent_stream_name, + parent_stream_associations, + list(self.PROPERTIES.keys()), + date_range=start_to_end + ) + for dt_range in date_ranges: + for _id in (self.OBJECT_ID, "another_object_id"): + start, end = dt_range + web_analytics_response = self.web_analytics_response(stream_name) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, _id, object_type, start, end), + web_analytics_response + ) + config_start_dt = date_ranges[0][0] + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN, config_start_dt), stream_name, SyncMode.full_refresh) + assert len(output.records) == 4 # 2 parent objects * 2 datetime slices + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_error_response_when_read_analytics_then_get_trace_message( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + HttpResponse(status_code=500, body="{}") + ) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 0 + assert len(output.trace_messages) > 0 + assert len(output.errors) > 0 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_500_then_200_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + [ + HttpResponse(status_code=500, body="{}"), + self.web_analytics_response(stream_name) + ] + ) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 1 + assert len(output.trace_messages) > 0 + assert len(output.errors) == 0 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_missing_scopes_error_when_read_then_hault( + self, + stream_name, + parent_stream_name, + object_type, + parent_stream_associations, + http_mocker: HttpMocker + ): + self.mock_oauth(http_mocker, self.ACCESS_TOKEN) + self.mock_scopes(http_mocker, self.ACCESS_TOKEN, []) + self.read_from_stream(self.oauth_config(), stream_name, SyncMode.full_refresh, expecting_exception=True) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_unauthorized_error_when_read_then_hault( + self, + stream_name, + parent_stream_name, + object_type, + parent_stream_associations, + http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + HttpResponse(status_code=http.HTTPStatus.UNAUTHORIZED, body="{}") + ) + with mock.patch("time.sleep"): + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 0 + assert len(output.trace_messages) > 0 + assert len(output.errors) > 0 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_one_page_when_read_then_get_transformed_records( + self, + stream_name, + parent_stream_name, + object_type, + parent_stream_associations, + http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name) + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + record = output.records[0].record.data + assert "properties" not in record + prop_fields = len([f for f in record if f.startswith("properties_")]) + assert prop_fields > 0 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_one_page_when_read_then_get_no_records_filtered( + self, + stream_name, + parent_stream_name, + object_type, + parent_stream_associations, + http_mocker: HttpMocker + ): + # validate that no filter is applied on the record set received from the API response + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name, updated_on=self.dt_str(self.now() - timedelta(days=365))) + ) + output = self.read_from_stream(self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.full_refresh) + assert len(output.records) == 1 + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_incremental_sync_when_read_then_state_message_produced_and_state_match_latest_record( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name, id=self.OBJECT_ID) + ) + output = self.read_from_stream( + self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.incremental + ) + assert len(output.state_messages) == 1 + + cursor_value_from_state_message = output.most_recent_state.stream_state.dict().get(self.OBJECT_ID, {}).get(self.CURSOR_FIELD) + cursor_value_from_latest_record = output.records[-1].record.data.get(self.CURSOR_FIELD) + assert cursor_value_from_state_message == cursor_value_from_latest_record + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_state_with_no_current_slice_when_read_then_current_slice_in_state( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name, id=self.OBJECT_ID) + ) + another_object_id = "another_object_id" + current_state = AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name=stream_name), + stream_state=AirbyteStateBlob(**{another_object_id: {self.CURSOR_FIELD: self.dt_str(self.now())}}) + ) + ) + output = self.read_from_stream( + self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.incremental, state=[current_state] + ) + assert len(output.state_messages) == 1 + assert output.most_recent_state.stream_state.dict().get(self.OBJECT_ID, {}).get(self.CURSOR_FIELD) + assert output.most_recent_state.stream_state.dict().get(another_object_id, {}).get(self.CURSOR_FIELD) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_STREAMS) + @HttpMocker() + def test_given_state_with_current_slice_when_read_then_state_is_updated( + self, stream_name, parent_stream_name, object_type, parent_stream_associations, http_mocker: HttpMocker + ): + self.mock_custom_objects(http_mocker) + self.mock_properties(http_mocker, object_type, self.PROPERTIES) + self.mock_parent_object( + http_mocker, [self.OBJECT_ID], object_type, parent_stream_name, parent_stream_associations, list(self.PROPERTIES.keys()) + ) + self.mock_response( + http_mocker, + self.web_analytics_request(stream_name, self.ACCESS_TOKEN, self.OBJECT_ID, object_type), + self.web_analytics_response(stream_name, id=self.OBJECT_ID) + ) + current_state = AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name=stream_name), + stream_state=AirbyteStateBlob(**{self.OBJECT_ID: {self.CURSOR_FIELD: self.dt_str(self.start_date() - timedelta(days=30))}}) + ) + ) + output = self.read_from_stream( + self.private_token_config(self.ACCESS_TOKEN), stream_name, SyncMode.incremental, state=[current_state] + ) + assert len(output.state_messages) == 1 + assert output.most_recent_state.stream_state.dict().get(self.OBJECT_ID, {}).get(self.CURSOR_FIELD) == self.dt_str(self.updated_at()) + + +@freezegun.freeze_time("2024-03-03T14:42:00Z") +class TestIncrementalCRMWebAnalyticsStreamFullRefresh(TestCRMWebAnalyticsStream): + SCOPES = ["e-commerce", "oauth", "crm.objects.feedback_submissions.read", "crm.objects.goals.read"] + + @classmethod + def dt_conversion(cls, dt: str) -> str: + return str(int(datetime.strptime(dt, cls.DT_FORMAT).replace(tzinfo=pytz.utc).timestamp()) * 1000) + + @classmethod + def mock_parent_object( + cls, + http_mocker: HttpMocker, + object_ids: List[str], + object_type: str, + stream_name: str, + associations: List[str], + properties: List[str], + first_page: bool = True, + with_pagination: bool = False, + date_range: Optional[Tuple[str]] = None, + ): + date_range = date_range or (cls.dt_str(cls.start_date()), cls.dt_str(cls.now())) + response_builder = cls.response_builder(stream_name) + for object_id in object_ids: + record = cls.record_builder(stream_name, FieldPath(cls.PARENT_CURSOR_FIELD)).with_field( + FieldPath(cls.PARENT_CURSOR_FIELD), cls.dt_str(cls.updated_at()) + ).with_field( + FieldPath("id"), object_id + ) + response_builder = response_builder.with_record(record) + if with_pagination: + response_builder = response_builder.with_pagination() + + start, end = date_range + request_builder = IncrementalCRMStreamRequestBuilder().for_entity( + object_type + ).with_associations( + associations + ).with_dt_range( + ("startTimestamp", cls.dt_conversion(start)), + ("endTimestamp", cls.dt_conversion(end)) + ).with_properties(properties) + if not first_page: + request_builder = request_builder.with_page_token(response_builder.pagination_strategy.NEXT_PAGE_TOKEN) + + http_mocker.get(request_builder.build(), response_builder.build()) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_one_page_when_read_stream_oauth_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_one_page_when_read_stream_oauth_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_one_page_when_read_stream_private_token_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_one_page_when_read_stream_private_token_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_two_pages_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_two_pages_when_read_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_wide_date_range_and_multiple_parent_records_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_wide_date_range_and_multiple_parent_records_when_read_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_error_response_when_read_analytics_then_get_trace_message( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_error_response_when_read_analytics_then_get_trace_message( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_500_then_200_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_500_then_200_when_read_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_missing_scopes_error_when_read_then_hault( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_missing_scopes_error_when_read_then_hault( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_unauthorized_error_when_read_then_hault( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_unauthorized_error_when_read_then_hault( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_one_page_when_read_then_get_transformed_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_one_page_when_read_then_get_transformed_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_one_page_when_read_then_get_no_records_filtered( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_one_page_when_read_then_get_no_records_filtered( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_incremental_sync_when_read_then_state_message_produced_and_state_match_latest_record( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_incremental_sync_when_read_then_state_message_produced_and_state_match_latest_record( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_state_with_no_current_slice_when_read_then_current_slice_in_state( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_state_with_no_current_slice_when_read_then_current_slice_in_state( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_state_with_current_slice_when_read_then_state_is_updated( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_state_with_current_slice_when_read_then_state_is_updated( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) + + @pytest.mark.parametrize(("stream_name", "parent_stream_name", "object_type", "parent_stream_associations"), CRM_INCREMENTAL_STREAMS) + def test_given_two_parent_pages_when_read_then_return_records( + self, stream_name, parent_stream_name, object_type, parent_stream_associations + ): + super().test_given_two_parent_pages_when_read_then_return_records( + stream_name, parent_stream_name, object_type, parent_stream_associations + ) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/__init__.py new file mode 100644 index 0000000000000..f70ecfc3a89e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/__init__.py new file mode 100644 index 0000000000000..f70ecfc3a89e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/__init__.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/__init__.py new file mode 100644 index 0000000000000..f70ecfc3a89e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies.json new file mode 100644 index 0000000000000..c27f8edf74dc5 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies.json @@ -0,0 +1,15 @@ +{ + "results": [ + { + "id": "312929580", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "contacts": ["contact A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies_web_analytics.json new file mode 100644 index 0000000000000..9f66d27f168d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/companies_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "COMPANY", + "objectId": "153", + "eventType": "pe8727216_airbyte_company_custom_event", + "occurredAt": "2023-12-01T21:50:11.799Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eed21", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts.json new file mode 100644 index 0000000000000..bb273173fc34d --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts.json @@ -0,0 +1,15 @@ +{ + "results": [ + { + "id": "312929580", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["5000526215", "5000526215"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts_web_analytics.json new file mode 100644 index 0000000000000..4d77dd8f20995 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/contacts_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "CONTACT", + "objectId": "153", + "eventType": "pe8727216_airbyte_contact_custom_event", + "occurredAt": "2023-12-01T21:50:11.801Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eed8", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals.json new file mode 100644 index 0000000000000..fd6283097b536 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals.json @@ -0,0 +1,17 @@ +{ + "results": [ + { + "id": "312929580", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "line_items": ["line item A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals_web_analytics.json new file mode 100644 index 0000000000000..d1d871ea34af9 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/deals_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "DEAL", + "objectId": "152", + "eventType": "pe8727216_airbyte_deal_custom_event", + "occurredAt": "2023-12-01T21:50:11.798Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eed1", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls.json new file mode 100644 index 0000000000000..3824c7e06ef15 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls.json @@ -0,0 +1,18 @@ +{ + "results": [ + { + "id": "312929680", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "deals": ["deal A"], + "tickets": ["ticket A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls_web_analytics.json new file mode 100644 index 0000000000000..57ca2f6c1f1f0 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_calls_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "ENGAGEMENT_CALL", + "objectId": "133", + "eventType": "pe8727216_airbyte_engagement_call_custom_event", + "occurredAt": "2023-12-01T21:50:11.001Z", + "id": "b850d903-254c-4df6-b159-9263b2b6eed8", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails.json new file mode 100644 index 0000000000000..f62dd4f3ea303 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails.json @@ -0,0 +1,18 @@ +{ + "results": [ + { + "id": "312929681", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "deals": ["deal A"], + "tickets": ["ticket A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails_web_analytics.json new file mode 100644 index 0000000000000..ce1fa57616f4c --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_emails_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "ENGAGEMENT_EMAIL", + "objectId": "134", + "eventType": "pe8727216_airbyte_engagement_email_custom_event", + "occurredAt": "2023-12-01T21:50:11.002Z", + "id": "b850d903-254c-4df6-b159-9263b2b6eed9", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings.json new file mode 100644 index 0000000000000..94f86f1759efd --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings.json @@ -0,0 +1,18 @@ +{ + "results": [ + { + "id": "312929682", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "deals": ["deal A"], + "tickets": ["ticket A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings_web_analytics.json new file mode 100644 index 0000000000000..299d706c6b284 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_meetings_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "ENGAGEMENT_MEETING", + "objectId": "135", + "eventType": "pe8727216_airbyte_engagement_meeting_custom_event", + "occurredAt": "2023-12-01T21:50:12.001Z", + "id": "b850d903-254c-4df6-b159-9263c2b6eed8", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes.json new file mode 100644 index 0000000000000..e18a492f26104 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes.json @@ -0,0 +1,18 @@ +{ + "results": [ + { + "id": "312929690", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "deals": ["deal A"], + "tickets": ["ticket A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes_web_analytics.json new file mode 100644 index 0000000000000..7bcf0d63137ac --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_notes_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "ENGAGEMENT_NOTE", + "objectId": "146", + "eventType": "pe8727216_airbyte_engagement_note_custom_event", + "occurredAt": "2023-12-01T21:50:14.001Z", + "id": "b850d903-254c-4df6-b159-9233c2b6eed8", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks.json new file mode 100644 index 0000000000000..8730e598fc4bd --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks.json @@ -0,0 +1,18 @@ +{ + "results": [ + { + "id": "312929661", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "companies": ["company A"], + "contacts": ["contact A"], + "deals": ["deal A"], + "tickets": ["ticket A"], + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks_web_analytics.json new file mode 100644 index 0000000000000..462586dae566b --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/engagements_tasks_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "ENGAGEMENT_TASK", + "objectId": "196", + "eventType": "pe8727216_airbyte_engagement_task_custom_event", + "occurredAt": "2023-12-01T21:50:14.301Z", + "id": "b850d903-254c-4df6-b159-9233d2b6eed8", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals.json new file mode 100644 index 0000000000000..19a5235e314e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals.json @@ -0,0 +1,14 @@ +{ + "results": [ + { + "id": "312929590", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals_web_analytics.json new file mode 100644 index 0000000000000..c7097e531d7a8 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/goals_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "GOAL", + "objectId": "155", + "eventType": "pe8727216_airbyte_goal_custom_event", + "occurredAt": "2023-12-01T21:50:11.809Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eee2", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items.json new file mode 100644 index 0000000000000..e5090b5a28b14 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items.json @@ -0,0 +1,14 @@ +{ + "results": [ + { + "id": "312929581", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items_web_analytics.json new file mode 100644 index 0000000000000..751b909f5a5bd --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/line_items_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "LINE_ITEM", + "objectId": "154", + "eventType": "pe8727216_airbyte_line_item_custom_event", + "occurredAt": "2023-12-01T21:50:11.808Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eee1", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/owners_archived.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/owners_archived.json new file mode 100644 index 0000000000000..28851eafbb2d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/owners_archived.json @@ -0,0 +1,14 @@ +{ + "results": [ + { + "id": "52550153", + "email": "integration-test@airbyte.io", + "firstName": "Team-1", + "lastName": "Airbyte", + "userId": 12282590, + "createdAt": "2020-10-28T21:17:56.082Z", + "updatedAt": "2023-01-31T00:25:34.448Z", + "archived": true + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products.json new file mode 100644 index 0000000000000..70144a3d6c36d --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products.json @@ -0,0 +1,14 @@ +{ + "results": [ + { + "id": "312929580", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products_web_analytics.json new file mode 100644 index 0000000000000..c958901eea56e --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/products_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "PRODUCT", + "objectId": "153", + "eventType": "pe8727216_airbyte_product_custom_event", + "occurredAt": "2023-12-01T21:50:11.799Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eed3", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/properties.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/properties.json new file mode 100644 index 0000000000000..fb9f0a76f63e3 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/properties.json @@ -0,0 +1,34 @@ +[ + { + "name": "closed_date", + "label": "Close date", + "description": "The date the ticket was closed", + "groupName": "ticketinformation", + "type": "datetime", + "fieldType": "text", + "hidden": false, + "options": [], + "calculated": false, + "externalOptions": false, + "isCustomizedDefault": false, + "deleted": null, + "createdAt": null, + "updatedAt": null, + "displayOrder": -1, + "readOnlyValue": false, + "readOnlyDefinition": true, + "mutableDefinitionNotDeletable": false, + "favorited": false, + "favoritedOrder": -1, + "displayMode": "current_value", + "showCurrencySymbol": null, + "createdUserId": null, + "textDisplayHint": null, + "numberDisplayHint": null, + "optionsAreMutable": null, + "referencedObjectType": null, + "formField": false, + "hubspotDefined": true, + "updatedUserId": null + } +] diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets.json new file mode 100644 index 0000000000000..db24185aa477e --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets.json @@ -0,0 +1,14 @@ +{ + "results": [ + { + "id": "312929579", + "createdAt": "2021-02-23T20:08:49.603Z", + "updatedAt": "2021-02-23T20:08:53.371Z", + "archived": false, + "properties": { + "closed_date": "2021-02-23T20:08:49.603000+00:00", + "createdate": "2021-02-23T20:08:49.603000+00:00" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets_web_analytics.json b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets_web_analytics.json new file mode 100644 index 0000000000000..655fb7107647f --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/resource/http/response/tickets_web_analytics.json @@ -0,0 +1,57 @@ +{ + "results": [ + { + "objectType": "TICKET", + "objectId": "151", + "eventType": "pe8727216_airbyte_ticket_custom_event", + "occurredAt": "2023-12-01T21:50:11.797Z", + "id": "b850d903-254c-4df6-b159-9263b2b7eed0", + "properties": { + "hs_asset_description": "dolor architecto", + "hs_asset_type": "Donec scelerisque est sed libero tempor venenatis", + "hs_browser": "Donec lobortis nibh sit amet dictum cursus", + "hs_campaign_id": "illum quas dolor modi exercitationem", + "hs_city": "reiciendis placeat dolor placeat architecto dolor", + "hs_country": "Mauris sagittis sem eu neque pellentesque gravida", + "hs_hs_device_name": "elit. molestias, elit. amet", + "hs_device_type": "accusantium dolor sit elit. veniam reprehenderit", + "hs_title": "In egestas nisl commodo pellentesque facilisis", + "hs_form_correlation_id": "Pellentesque tincidunt purus id rhoncus blandit", + "hs_element_class": "Nulla luctus tortor at fermentum aliquet", + "hs_element_id": "culpa! sit ipsum sit Lorem consectetur quas odit", + "hs_element_text": "Hic molestias, Lorem ipsum, possimus adipisicing", + "hs_language": "officiis adipisicing", + "hs_document_id": "Proin sed lacus eget sapien viverra vestibulum vel sit amet massa", + "hs_presentation_id": "Mauris et nisl a metus porttitor molestie id viverra ex", + "hs_user_id": "Quisque consequat nunc at eleifend tempus", + "hs_link_href": "officiis exercitationem adipisicing odit dolor", + "hs_operating_system": "magnam, molestias,", + "hs_operating_version": "Lorem ipsum culpa! illum elit. esse esse officiis", + "hs_page_content_type": "elit. libero Lorem", + "hs_page_id": "magnam, magnam,", + "hs_page_title": "Praesent auctor sem et purus facilisis, at volutpat lorem tristique", + "hs_page_url": "accusantium quas architecto ipsum ipsum possimus", + "hs_parent_module_id": "Aliquam eleifend ex in ligula gravida mollis", + "hs_referrer": "Sed nec eros quis sem euismod tempor", + "hs_region": "consectetur ipsum, architecto ipsum Lorem nobis", + "hs_url": "Aenean feugiat quam in urna iaculis, vitae ultrices metus scelerisque", + "hs_screen_height": "Donec laoreet est ut lorem viverra tempus", + "hs_screen_width": "Curabitur sit amet augue luctus, congue erat congue, vestibulum lectus", + "hs_touchpoint_source": "dolor", + "hs_tracking_name": "quas ipsum amet illum molestias,", + "hs_user_agent": "Aliquam eget libero consectetur, consectetur sem ac, vehicula orci", + "hs_utm_campaign": "magnam, Lorem modi culpa!", + "hs_utm_content": "Proin pulvinar nulla sed lacus venenatis blandit", + "hs_utm_medium": "placeat dolor dolor consectetur elit.", + "hs_utm_source": "Duis non erat at justo euismod lobortis", + "hs_utm_term": "Proin sit amet leo tincidunt, ultrices dolor ac, hendrerit mi", + "hs_base_url": "Maecenas ut risus eget ligula pharetra pellentesque sed et mi", + "hs_form_id": "Quisque cursus sem sit amet libero feugiat rhoncus", + "hs_form_type": "Pellentesque tristique velit at velit lacinia vulputate", + "hs_url_domain": "Quisque rhoncus diam at ex eleifend aliquet", + "hs_url_path": "Suspendisse blandit sem in consectetur imperdiet", + "hs_visitor_type": "Nunc porttitor metus a mauris commodo, id dictum lectus feugiat" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-instagram/integration_tests/test_streams.py b/airbyte-integrations/connectors/source-instagram/integration_tests/test_streams.py index 89649d7662ea0..6687d9e16f0c4 100644 --- a/airbyte-integrations/connectors/source-instagram/integration_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-instagram/integration_tests/test_streams.py @@ -7,7 +7,7 @@ import pendulum import pytest -from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Type +from airbyte_cdk.models import AirbyteMessage, AirbyteStateBlob, ConfiguredAirbyteCatalog, Type from source_instagram.source import SourceInstagram @@ -34,10 +34,14 @@ def test_incremental_streams(self, configured_catalog, config, state): assert len(records) <= 60 - 10 - 5, "UserInsights should have less records returned when non empty STATE provided" assert states, "insights should produce states" - for state in states: - assert "user_insights" in state.state.data - assert isinstance(state.state.data["user_insights"], dict) - assert len(state.state.data["user_insights"].keys()) == 2 + for state_msg in states: + stream_name, stream_state, state_keys_count = (state_msg.state.stream.stream_descriptor.name, + state_msg.state.stream.stream_state, + len(state_msg.state.stream.stream_state.dict())) + + assert stream_name == "user_insights", f"each state message should reference 'user_insights' stream, got {stream_name} instead" + assert isinstance(stream_state, AirbyteStateBlob), f"Stream state should be type AirbyteStateBlob, got {type(stream_state)} instead" + assert state_keys_count == 2, f"Stream state should contain 2 partition keys, got {state_keys_count} instead" @staticmethod def slice_catalog(catalog: ConfiguredAirbyteCatalog, predicate: Callable[[str], bool]) -> ConfiguredAirbyteCatalog: diff --git a/airbyte-integrations/connectors/source-instagram/metadata.yaml b/airbyte-integrations/connectors/source-instagram/metadata.yaml index 15025dd3066ea..37fe352855624 100644 --- a/airbyte-integrations/connectors/source-instagram/metadata.yaml +++ b/airbyte-integrations/connectors/source-instagram/metadata.yaml @@ -7,11 +7,12 @@ data: connectorSubtype: api connectorType: source definitionId: 6acf6b55-4f1e-4fca-944e-1a3caef8aba8 - dockerImageTag: 3.0.4 + dockerImageTag: 3.0.5 dockerRepository: airbyte/source-instagram githubIssueLabel: source-instagram icon: instagram.svg license: MIT + maxSecondsBetweenMessages: 86400 name: Instagram remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-instagram/poetry.lock b/airbyte-integrations/connectors/source-instagram/poetry.lock index 82e6349aba768..8c619dfe31aa6 100644 --- a/airbyte-integrations/connectors/source-instagram/poetry.lock +++ b/airbyte-integrations/connectors/source-instagram/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -112,13 +112,13 @@ frozenlist = ">=1.1.0" [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] @@ -142,8 +142,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -236,13 +236,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -815,13 +815,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1061,13 +1061,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1167,13 +1167,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1185,15 +1185,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1216,19 +1216,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1254,13 +1254,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1279,13 +1279,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1493,4 +1493,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "0ca461e187737a477daf3b3609b25d12853d6e7bed738f2104d0ed027bc49ca8" +content-hash = "6ff2a5e27d9a445768a9ed72cdd449e2904a155430ab5bb36edf2fdfedcab774" diff --git a/airbyte-integrations/connectors/source-instagram/pyproject.toml b/airbyte-integrations/connectors/source-instagram/pyproject.toml index 65f3991a4b504..d95a6135ceb2f 100644 --- a/airbyte-integrations/connectors/source-instagram/pyproject.toml +++ b/airbyte-integrations/connectors/source-instagram/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.4" +version = "3.0.5" name = "source-instagram" description = "Source implementation for Instagram." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_instagram" [tool.poetry.dependencies] python = "^3.9,<3.12" facebook-business = "==18.0.5" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" cached-property = "==1.5.2" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-intercom/metadata.yaml b/airbyte-integrations/connectors/source-intercom/metadata.yaml index 6c5bc29357415..3bf7ee093b022 100644 --- a/airbyte-integrations/connectors/source-intercom/metadata.yaml +++ b/airbyte-integrations/connectors/source-intercom/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: d8313939-3782-41b0-be29-b3ca20d8dd3a - dockerImageTag: 0.6.0 + dockerImageTag: 0.6.3 dockerRepository: airbyte/source-intercom documentationUrl: https://docs.airbyte.com/integrations/sources/intercom githubIssueLabel: source-intercom icon: intercom.svg license: MIT + maxSecondsBetweenMessages: 60 name: Intercom remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-intercom/poetry.lock b/airbyte-integrations/connectors/source-intercom/poetry.lock index 3afe43237c13b..57a3a3020baab 100644 --- a/airbyte-integrations/connectors/source-intercom/poetry.lock +++ b/airbyte-integrations/connectors/source-intercom/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.62.0" +version = "0.74.0" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "airbyte-cdk-0.62.0.tar.gz", hash = "sha256:622f56bd7101493a74f11c33a45a31c251032333989996f137cac8370873c614"}, - {file = "airbyte_cdk-0.62.0-py3-none-any.whl", hash = "sha256:b21330a566b33dbdddde33243eb9855f086ad4272e3585ca626be1225451a3b8"}, + {file = "airbyte-cdk-0.74.0.tar.gz", hash = "sha256:74241a055c205403a951383f43801067b7f451370e14d553d13d0cc476cbfff7"}, + {file = "airbyte_cdk-0.74.0-py3-none-any.whl", hash = "sha256:7e5b201d69ec0e7daab7e627dbc6add4dbba4a2f779132e86aaf6713650ff4d5"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -94,13 +94,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -457,13 +457,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -640,13 +640,13 @@ files = [ [[package]] name = "pytest" -version = "8.0.1" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, - {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -654,38 +654,38 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -834,18 +834,18 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -872,13 +872,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1008,4 +1008,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "892fad6a9e1ef67e204e4009a08694c4f73ee2891e8895165e0f646d72eecfec" +content-hash = "4032921e0d35ea77958d6097cbb31454596ed5d949304617dc314b1594e4012e" diff --git a/airbyte-integrations/connectors/source-intercom/pyproject.toml b/airbyte-integrations/connectors/source-intercom/pyproject.toml index 24942ecbbb9cc..256a04560e859 100644 --- a/airbyte-integrations/connectors/source-intercom/pyproject.toml +++ b/airbyte-integrations/connectors/source-intercom/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.6.0" +version = "0.6.3" name = "source-intercom" description = "Source implementation for Intercom Yaml." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_intercom" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.0" +airbyte-cdk = ">=0.62.0" [tool.poetry.scripts] source-intercom = "source_intercom.run:run" diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/components.py b/airbyte-integrations/connectors/source-intercom/source_intercom/components.py index 600ba64945b16..75eb81e807bb1 100644 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/components.py +++ b/airbyte-integrations/connectors/source-intercom/source_intercom/components.py @@ -33,6 +33,7 @@ class IncrementalSingleSliceCursor(Cursor): def __post_init__(self, parameters: Mapping[str, Any]): self._state = {} + self._cursor = None self.cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) def get_request_params( @@ -87,15 +88,29 @@ def set_initial_state(self, stream_state: StreamState): if cursor_value: self._state[cursor_field] = cursor_value self._state["prior_state"] = self._state.copy() + self._cursor = cursor_value - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: - latest_record = self._state if self.is_greater_than_or_equal(self._state, most_recent_record) else most_recent_record - if latest_record: - cursor_field = self.cursor_field.eval(self.config) - self._state[cursor_field] = latest_record[cursor_field] + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Register a record with the cursor; the cursor instance can then use it to manage the state of the in-progress stream read. + + :param stream_slice: The current slice, which may or may not contain the most recently observed record + :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the + stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. + """ + record_cursor_value = record.get(self.cursor_field.eval(self.config)) + if not record_cursor_value: + return + + if self.is_greater_than_or_equal(record, self._state): + self._cursor = record_cursor_value + + def close_slice(self, stream_slice: StreamSlice) -> None: + cursor_field = self.cursor_field.eval(self.config) + self._state[cursor_field] = self._cursor def stream_slices(self) -> Iterable[Mapping[str, Any]]: - yield {} + yield StreamSlice(partition={}, cursor_slice={}) def should_be_synced(self, record: Record) -> bool: """ @@ -138,50 +153,89 @@ def __post_init__(self, parameters: Mapping[str, Any]): self.parent_sync_mode: SyncMode = SyncMode.incremental if self.parent_stream.supports_incremental is True else SyncMode.full_refresh self.substream_slice_field: str = self.parent_stream_configs[0].partition_field.eval(self.config) self.parent_field: str = self.parent_stream_configs[0].parent_key.eval(self.config) + self._parent_cursor: Optional[str] = None def set_initial_state(self, stream_state: StreamState): super().set_initial_state(stream_state=stream_state) if self.parent_stream_name in stream_state and stream_state.get(self.parent_stream_name, {}).get(self.parent_cursor_field): - parent_stream_state = {self.parent_cursor_field: stream_state[self.parent_stream_name][self.parent_cursor_field]} + parent_stream_state = { + self.parent_cursor_field: stream_state[self.parent_stream_name][self.parent_cursor_field], + } self._state[self.parent_stream_name] = parent_stream_state if "prior_state" in self._state: self._state["prior_state"][self.parent_stream_name] = parent_stream_state - def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: - super().close_slice(stream_slice=stream_slice, most_recent_record=most_recent_record) - if self.parent_stream: - self._state[self.parent_stream_name] = self.parent_stream.state + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Extended the default method to be able to track the parent STATE. + """ + + # save parent cursor value (STATE) from slice + parent_cursor = stream_slice.get(self.parent_stream_name) + if parent_cursor: + self._parent_cursor = parent_cursor.get(self.parent_cursor_field) + + # observe the substream + super().observe(stream_slice, record) + + def close_slice(self, stream_slice: StreamSlice) -> None: + super().close_slice(stream_slice=stream_slice) def stream_slices(self) -> Iterable[Mapping[str, Any]]: parent_state = (self._state or {}).get(self.parent_stream_name, {}) - slices_generator = self.read_parent_stream(self.parent_sync_mode, self.parent_cursor_field, parent_state) + slices_generator: Iterable[StreamSlice] = self.read_parent_stream(self.parent_sync_mode, self.parent_cursor_field, parent_state) yield from [slice for slice in slices_generator] if self.parent_complete_fetch else slices_generator + def track_parent_cursor(self, parent_record: dict) -> None: + """ + Tracks the Parent Stream Cursor, using `parent_cursor_field`. + """ + self._parent_cursor = parent_record.get(self.parent_cursor_field) + if self._parent_cursor: + self._state[self.parent_stream_name] = {self.parent_cursor_field: self._parent_cursor} + def read_parent_stream( - self, sync_mode: SyncMode, cursor_field: Optional[str], stream_state: Mapping[str, Any] + self, + sync_mode: SyncMode, + cursor_field: Optional[str], + stream_state: Mapping[str, Any], ) -> Iterable[Mapping[str, Any]]: + self.parent_stream.state = stream_state parent_stream_slices_gen = self.parent_stream.stream_slices( - sync_mode=sync_mode, cursor_field=cursor_field, stream_state=stream_state + sync_mode=sync_mode, + cursor_field=cursor_field, + stream_state=stream_state, ) for parent_slice in parent_stream_slices_gen: parent_records_gen = self.parent_stream.read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=parent_slice, stream_state=stream_state + sync_mode=sync_mode, + cursor_field=cursor_field, + stream_slice=parent_slice, + stream_state=stream_state, ) for parent_record in parent_records_gen: + # update parent cursor + self.track_parent_cursor(parent_record) substream_slice_value = parent_record.get(self.parent_field) if substream_slice_value: cursor_field = self.cursor_field.eval(self.config) - yield { - self.substream_slice_field: substream_slice_value, - cursor_field: self._state.get(cursor_field), - self.parent_stream_name: { - self.parent_cursor_field: self._state.get(self.parent_stream_name, {}).get(self.parent_cursor_field) + substream_cursor_value = self._state.get(cursor_field) + parent_cursor_value = self._state.get(self.parent_stream_name, {}).get(self.parent_cursor_field) + yield StreamSlice( + partition={ + self.substream_slice_field: substream_slice_value, }, - } + cursor_slice={ + cursor_field: substream_cursor_value, + self.parent_stream_name: { + self.parent_cursor_field: parent_cursor_value, + }, + }, + ) @dataclass diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml b/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml index efb838f7ce006..5649cca6b5070 100644 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml +++ b/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml @@ -1,4 +1,4 @@ -version: "0.50.2" +version: 0.72.1 definitions: ## bases @@ -20,8 +20,9 @@ definitions: type: BearerAuthenticator api_token: "{{ config['access_token'] }}" request_headers: - # API version header - Intercom-Version: "2.10" + # There is a bug in interpolation, causing the `2.10` string to be evaluated to `2.1`, cutting off the `0`. + # the workaround is to put the `string` inside the `string`, then it's evaluated properly to `2.10` + Intercom-Version: "'2.10'" Accept: "application/json" error_handler: type: "DefaultErrorHandler" @@ -292,6 +293,16 @@ definitions: page_size: 150 conversations: $ref: "#/definitions/stream_incremental_search" + retriever: + $ref: "#/definitions/stream_incremental_search/retriever" + requester: + $ref: "#/definitions/requester_incremental_search" + request_headers: + # API version header + # There are 404 - User Not Found issue, when `2.10` is used, for certain users: + # https://github.com/airbytehq/oncall/issues/4514 + Intercom-Version: "2.9" + Accept: "application/json" $parameters: name: "conversations" path: "conversations/search" diff --git a/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py b/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py index 2da6517110ece..1a1681aabdb73 100644 --- a/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py +++ b/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py @@ -53,7 +53,8 @@ def get_requester(): def test_slicer(): date_time_dict = {"updated_at": 1662459010} slicer = IncrementalSingleSliceCursor(config={}, parameters={}, cursor_field="updated_at") - slicer.close_slice(date_time_dict, date_time_dict) + slicer.observe(date_time_dict, date_time_dict) + slicer.close_slice(date_time_dict) assert slicer.get_stream_state() == date_time_dict assert slicer.get_request_headers() == {} assert slicer.get_request_body_data() == {} @@ -95,7 +96,8 @@ def test_sub_slicer(last_record, expected, records): ) slicer.set_initial_state(expected) stream_slice = next(slicer.stream_slices()) if records else {} - slicer.close_slice(stream_slice, last_record) + slicer.observe(stream_slice, last_record) + slicer.close_slice(stream_slice) assert slicer.get_stream_state() == expected diff --git a/airbyte-integrations/connectors/source-iterable/metadata.yaml b/airbyte-integrations/connectors/source-iterable/metadata.yaml index 0c2549cf3e441..d4507f02667b8 100644 --- a/airbyte-integrations/connectors/source-iterable/metadata.yaml +++ b/airbyte-integrations/connectors/source-iterable/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 2e875208-0c0b-4ee4-9e92-1cb3156ea799 - dockerImageTag: 0.3.0 + dockerImageTag: 0.5.0 dockerRepository: airbyte/source-iterable documentationUrl: https://docs.airbyte.com/integrations/sources/iterable githubIssueLabel: source-iterable icon: iterable.svg license: MIT + maxSecondsBetweenMessages: 60 name: Iterable remoteRegistries: pypi: @@ -30,5 +31,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-iterable/poetry.lock b/airbyte-integrations/connectors/source-iterable/poetry.lock index 03b1a7d4555cc..fa3071c8281f4 100644 --- a/airbyte-integrations/connectors/source-iterable/poetry.lock +++ b/airbyte-integrations/connectors/source-iterable/poetry.lock @@ -2,39 +2,38 @@ [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.78.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte_cdk-0.78.3-py3-none-any.whl", hash = "sha256:699d61ace9f8ca4477e06af3ff1bc56856e955a444081a1701c41d94629dcd74"}, + {file = "airbyte_cdk-0.78.3.tar.gz", hash = "sha256:192c2594d0e93140a7ec635fea3d4644318faada6aa986805752adf4caf9b126"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -380,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -481,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -699,17 +698,17 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -852,22 +851,20 @@ yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "responses" @@ -891,19 +888,19 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -929,24 +926,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240311" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1076,4 +1073,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "21991266470e2efceb259f4d42e1cdc62654126c7ed975ac9e0cbaf43f588347" +content-hash = "404417cf9484ff88cd17961f867f011e7ddbb6c4e909099b3452423e381749c4" diff --git a/airbyte-integrations/connectors/source-iterable/pyproject.toml b/airbyte-integrations/connectors/source-iterable/pyproject.toml index 9c92df9262176..233a959cef37d 100644 --- a/airbyte-integrations/connectors/source-iterable/pyproject.toml +++ b/airbyte-integrations/connectors/source-iterable/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.0" +version = "0.5.0" name = "source-iterable" description = "Source implementation for Iterable." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_iterable" [tool.poetry.dependencies] python = "^3.9,<3.12" pendulum = "==2.1.2" -airbyte-cdk = "==0.63.2" +airbyte-cdk = "^0" requests = "==2.31.0" python-dateutil = "==2.8.2" diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/components.py b/airbyte-integrations/connectors/source-iterable/source_iterable/components.py new file mode 100644 index 0000000000000..323fc1450b347 --- /dev/null +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/components.py @@ -0,0 +1,41 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from dataclasses import dataclass +from io import StringIO + +import requests +from airbyte_cdk.sources.declarative.extractors.dpath_extractor import DpathExtractor +from airbyte_cdk.sources.declarative.types import Config, Record, StreamSlice, StreamState + + +@dataclass +class XJsonRecordExtractor(DpathExtractor): + def extract_records(self, response: requests.Response) -> list[Record]: + return [json.loads(record) for record in response.iter_lines()] + + +@dataclass +class ListUsersRecordExtractor(DpathExtractor): + def extract_records(self, response: requests.Response) -> list[Record]: + return [{"email": record.decode()} for record in response.iter_lines()] + + +@dataclass +class EventsRecordExtractor(DpathExtractor): + common_fields = ("itblInternal", "_type", "createdAt", "email") + + def extract_records(self, response: requests.Response) -> list[Record]: + jsonl_records = StringIO(response.text) + records = [] + for record in jsonl_records: + record_dict = json.loads(record) + record_dict_common_fields = {} + for field in self.common_fields: + record_dict_common_fields[field] = record_dict.pop(field, None) + + records.append({**record_dict_common_fields, "data": record_dict}) + + return records diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/manifest.yaml b/airbyte-integrations/connectors/source-iterable/source_iterable/manifest.yaml new file mode 100644 index 0000000000000..ff8f3411d23e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/manifest.yaml @@ -0,0 +1,440 @@ +spec: + type: Spec + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: Iterable Spec + type: object + required: + - start_date + - api_key + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Key" + description: >- + Iterable API Key. See the docs + for more information on how to obtain this key. + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: >- + The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. + All data generated after this date will be replicated. + examples: ["2021-04-01T00:00:00Z"] + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + order: 1 + format: "date-time" +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - lists +streams: + - name: lists + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: lists + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - lists + partition_router: [] + primary_key: + - id + - name: list_users + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: lists/getUsers + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + class_name: source_iterable.components.ListUsersRecordExtractor + field_path: + - getUsers + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + request_option: + inject_into: request_parameter + type: RequestOption + field_name: listId + partition_field: list_id + stream: + type: DeclarativeStream + name: lists + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.iterable.com/api/ + path: lists + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - lists + paginator: + type: NoPagination + partition_router: [] + primary_key: + - listId + transformations: + - type: AddFields + fields: + - path: + - listId + value: "{{ stream_slice.list_id }}" + - name: campaigns + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: campaigns + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - campaigns + partition_router: [] + primary_key: + - id + - name: channels + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: channels + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - channels + partition_router: [] + primary_key: + - id + - name: message_types + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: messageTypes + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - messageTypes + partition_router: [] + primary_key: + - id + - name: metadata + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: metadata + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + partition_router: [] + primary_key: [] + - name: users + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: export/data.json + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: + stream: "True" + dataTypeName: user + record_selector: + type: RecordSelector + extractor: + class_name: source_iterable.components.XJsonRecordExtractor + field_path: + - users + partition_router: [] + primary_key: [] + incremental_sync: + step: P90D + type: DatetimeBasedCursor + cursor_field: profileUpdatedAt + end_datetime: + type: MinMaxDatetime + datetime: "{{ config['end_date'] if config['end_date'] else now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%Y-%m-%d %H:%M:%S" + end_time_option: + type: RequestOption + field_name: endDateTime + inject_into: request_parameter + start_time_option: + type: RequestOption + field_name: startDateTime + inject_into: request_parameter + cursor_granularity: PT1S + cursor_datetime_formats: + - "%Y-%m-%d %H:%M:%S %z" + - "%Y-%m-%dT%H:%M:%S%z" + - name: events + primary_key: [] + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.iterable.com/api/ + path: export/userEvents + http_method: GET + request_parameters: + includeCustomEvents: "true" + request_headers: {} + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + class_name: source_iterable.components.EventsRecordExtractor + field_path: + - events + paginator: + type: NoPagination + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: email + request_option: + inject_into: request_parameter + type: RequestOption + field_name: email + partition_field: email + stream: + name: list_users + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: NoPagination + requester: + path: lists/getUsers + type: HttpRequester + url_base: https://api.iterable.com/api/ + http_method: GET + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_headers: {} + request_body_json: {} + request_parameters: {} + record_selector: + type: RecordSelector + extractor: + class_name: source_iterable.components.ListUsersRecordExtractor + field_path: + - getUsers + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + request_option: + inject_into: request_parameter + type: RequestOption + field_name: listId + partition_field: list_id + stream: + type: DeclarativeStream + name: lists + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.iterable.com/api/ + path: lists + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: Api-Key + inject_into: header + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - lists + paginator: + type: NoPagination + partition_router: [] + primary_key: + - id + transformations: + - type: AddFields + fields: + - path: + - list_id + value: "{{ stream_slice.list_id }}" +version: 0.65.0 +metadata: + autoImportSchema: + users: false + lists: false + channels: false + metadata: false + campaigns: false + message_types: false diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/source.py b/airbyte-integrations/connectors/source-iterable/source_iterable/source.py index b269b1a8e4b8a..83fa25aa174b9 100644 --- a/airbyte-integrations/connectors/source-iterable/source_iterable/source.py +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/source.py @@ -2,20 +2,14 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Any, List, Mapping, Tuple +from typing import Any, List, Mapping -import requests.exceptions -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from source_iterable.utils import read_full_refresh from .streams import ( - AccessCheck, - Campaigns, CampaignsMetrics, - Channels, CustomEvent, EmailBounce, EmailClick, @@ -25,7 +19,6 @@ EmailSendSkip, EmailSubscribe, EmailUnsubscribe, - Events, HostedUnsubscribeClick, InAppClick, InAppClose, @@ -36,10 +29,6 @@ InAppSendSkip, InboxMessageImpression, InboxSession, - Lists, - ListUsers, - MessageTypes, - Metadata, Purchase, PushBounce, PushOpen, @@ -53,101 +42,70 @@ SmsSendSkip, SmsUsageInfo, Templates, - Users, WebPushClick, WebPushSend, WebPushSendSkip, ) +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -class SourceIterable(AbstractSource): - """ - Note: there are some redundant endpoints - (e.g. [`export/userEvents`](https://api.iterable.com/api/docs#export_exportUserEvents) - and [`events/{email}`](https://api.iterable.com/api/docs#events_User_events)). - In this case it's better to use the one which takes params as a query param rather than as part of the url param. - """ +WARNING: Do not modify this file. +""" - def check_connection(self, logger, config) -> Tuple[bool, any]: - try: - authenticator = TokenAuthenticator(token=config["api_key"], auth_header="Api-Key", auth_method="") - list_gen = Lists(authenticator=authenticator).read_records(sync_mode=SyncMode.full_refresh) - next(list_gen) - return True, None - except Exception as e: - return False, f"Unable to connect to Iterable API with the provided credentials - {e}" +# Declarative Source +class SourceIterable(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) def streams(self, config: Mapping[str, Any]) -> List[Stream]: - def all_streams_accessible(): - access_check_stream = AccessCheck(authenticator=authenticator) - try: - next(read_full_refresh(access_check_stream), None) - except requests.exceptions.RequestException as e: - if e.response.status_code == requests.codes.UNAUTHORIZED: - return False - raise - return True + streams = super().streams(config=config) authenticator = TokenAuthenticator(token=config["api_key"], auth_header="Api-Key", auth_method="") # end date is provided for integration tests only start_date, end_date = config["start_date"], config.get("end_date") date_range = {"start_date": start_date, "end_date": end_date} - streams = [ - Campaigns(authenticator=authenticator), - CampaignsMetrics(authenticator=authenticator, **date_range), - Channels(authenticator=authenticator), - Lists(authenticator=authenticator), - MessageTypes(authenticator=authenticator), - Metadata(authenticator=authenticator), - Templates(authenticator=authenticator, **date_range), - ] - # Iterable supports two types of Server-side api keys: - # - read only - # - server side - # The first one has a limited set of supported APIs, so others are filtered out here. - # A simple check is done - a read operation on a stream that can be accessed only via a Server side API key. - # If read is successful - other streams should be supported as well. - # More on this - https://support.iterable.com/hc/en-us/articles/360043464871-API-Keys- - if all_streams_accessible(): - streams.extend( - [ - Users(authenticator=authenticator, **date_range), - ListUsers(authenticator=authenticator), - EmailBounce(authenticator=authenticator, **date_range), - EmailClick(authenticator=authenticator, **date_range), - EmailComplaint(authenticator=authenticator, **date_range), - EmailOpen(authenticator=authenticator, **date_range), - EmailSend(authenticator=authenticator, **date_range), - EmailSendSkip(authenticator=authenticator, **date_range), - EmailSubscribe(authenticator=authenticator, **date_range), - EmailUnsubscribe(authenticator=authenticator, **date_range), - PushSend(authenticator=authenticator, **date_range), - PushSendSkip(authenticator=authenticator, **date_range), - PushOpen(authenticator=authenticator, **date_range), - PushUninstall(authenticator=authenticator, **date_range), - PushBounce(authenticator=authenticator, **date_range), - WebPushSend(authenticator=authenticator, **date_range), - WebPushClick(authenticator=authenticator, **date_range), - WebPushSendSkip(authenticator=authenticator, **date_range), - InAppSend(authenticator=authenticator, **date_range), - InAppOpen(authenticator=authenticator, **date_range), - InAppClick(authenticator=authenticator, **date_range), - InAppClose(authenticator=authenticator, **date_range), - InAppDelete(authenticator=authenticator, **date_range), - InAppDelivery(authenticator=authenticator, **date_range), - InAppSendSkip(authenticator=authenticator, **date_range), - InboxSession(authenticator=authenticator, **date_range), - InboxMessageImpression(authenticator=authenticator, **date_range), - SmsSend(authenticator=authenticator, **date_range), - SmsBounce(authenticator=authenticator, **date_range), - SmsClick(authenticator=authenticator, **date_range), - SmsReceived(authenticator=authenticator, **date_range), - SmsSendSkip(authenticator=authenticator, **date_range), - SmsUsageInfo(authenticator=authenticator, **date_range), - Purchase(authenticator=authenticator, **date_range), - CustomEvent(authenticator=authenticator, **date_range), - HostedUnsubscribeClick(authenticator=authenticator, **date_range), - Events(authenticator=authenticator), - ] - ) + + # TODO: migrate streams below to low code as slicer logic will be migrated to generator based + streams.extend( + [ + CampaignsMetrics(authenticator=authenticator, **date_range), + Templates(authenticator=authenticator, **date_range), + EmailBounce(authenticator=authenticator, **date_range), + EmailClick(authenticator=authenticator, **date_range), + EmailComplaint(authenticator=authenticator, **date_range), + EmailOpen(authenticator=authenticator, **date_range), + EmailSend(authenticator=authenticator, **date_range), + EmailSendSkip(authenticator=authenticator, **date_range), + EmailSubscribe(authenticator=authenticator, **date_range), + EmailUnsubscribe(authenticator=authenticator, **date_range), + PushSend(authenticator=authenticator, **date_range), + PushSendSkip(authenticator=authenticator, **date_range), + PushOpen(authenticator=authenticator, **date_range), + PushUninstall(authenticator=authenticator, **date_range), + PushBounce(authenticator=authenticator, **date_range), + WebPushSend(authenticator=authenticator, **date_range), + WebPushClick(authenticator=authenticator, **date_range), + WebPushSendSkip(authenticator=authenticator, **date_range), + InAppSend(authenticator=authenticator, **date_range), + InAppOpen(authenticator=authenticator, **date_range), + InAppClick(authenticator=authenticator, **date_range), + InAppClose(authenticator=authenticator, **date_range), + InAppDelete(authenticator=authenticator, **date_range), + InAppDelivery(authenticator=authenticator, **date_range), + InAppSendSkip(authenticator=authenticator, **date_range), + InboxSession(authenticator=authenticator, **date_range), + InboxMessageImpression(authenticator=authenticator, **date_range), + SmsSend(authenticator=authenticator, **date_range), + SmsBounce(authenticator=authenticator, **date_range), + SmsClick(authenticator=authenticator, **date_range), + SmsReceived(authenticator=authenticator, **date_range), + SmsSendSkip(authenticator=authenticator, **date_range), + SmsUsageInfo(authenticator=authenticator, **date_range), + Purchase(authenticator=authenticator, **date_range), + CustomEvent(authenticator=authenticator, **date_range), + HostedUnsubscribeClick(authenticator=authenticator, **date_range), + ] + ) return streams diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/spec.json b/airbyte-integrations/connectors/source-iterable/source_iterable/spec.json index c44f475617a7f..f867b348695ba 100644 --- a/airbyte-integrations/connectors/source-iterable/source_iterable/spec.json +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/spec.json @@ -1,5 +1,4 @@ { - "documentationUrl": "https://docs.airbyte.com/integrations/sources/iterable", "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Iterable Spec", @@ -10,14 +9,14 @@ "api_key": { "type": "string", "title": "API Key", - "description": "Iterable API Key. See the docs for more information on how to obtain this key.", + "description": "Iterable API Key. See the docs for more information on how to obtain this key.", "airbyte_secret": true, "order": 0 }, "start_date": { "type": "string", "title": "Start Date", - "description": "The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", + "description": "The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", "examples": ["2021-04-01T00:00:00Z"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", "order": 1, diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/streams.py b/airbyte-integrations/connectors/source-iterable/source_iterable/streams.py index 177c7987dd38d..b24b90518c3cd 100644 --- a/airbyte-integrations/connectors/source-iterable/source_iterable/streams.py +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/streams.py @@ -4,7 +4,6 @@ import csv import json -import urllib.parse as urlparse from abc import ABC, abstractmethod from io import StringIO from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Union @@ -66,11 +65,6 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, """ return None - def check_unauthorized_key(self, response: requests.Response) -> bool: - if response.status_code == codes.UNAUTHORIZED: - self.logger.warning(f"Provided API Key has not sufficient permissions to read from stream: {self.data_field}") - return True - def check_generic_error(self, response: requests.Response) -> bool: """ https://github.com/airbytehq/oncall/issues/1592#issuecomment-1499109251 @@ -129,9 +123,6 @@ def read_records( yield from super().read_records(sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state) except (HTTPError, UserDefinedBackoffException, DefaultBackoffException) as e: response = e.response - if self.check_unauthorized_key(response): - self.ignore_further_slices = True - return if self.check_generic_error(response): return raise e @@ -342,42 +333,6 @@ def get_json_schema(self) -> Mapping[str, Any]: return ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("events") -class Lists(IterableStream): - data_field = "lists" - - def path(self, **kwargs) -> str: - return "lists" - - -class ListUsers(IterableStream): - primary_key = "listId" - data_field = "getUsers" - name = "list_users" - # enable caching, because this stream used by other ones - use_cache = True - - def path(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> str: - return f"lists/{self.data_field}?listId={stream_slice['list_id']}" - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - lists = Lists(authenticator=self._cred) - for list_record in lists.read_records(sync_mode=kwargs.get("sync_mode", SyncMode.full_refresh)): - yield {"list_id": list_record["id"]} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - list_id = self._get_list_id(response.url) - for user in response.iter_lines(): - yield {"email": user.decode(), "listId": list_id} - - @staticmethod - def _get_list_id(url: str) -> int: - parsed_url = urlparse.urlparse(url) - for q in parsed_url.query.split("&"): - key, value = q.split("=") - if key == "listId": - return int(value) - - class Campaigns(IterableStream): data_field = "campaigns" @@ -465,71 +420,6 @@ def _parse_csv_string_to_dict(csv_string: str) -> List[Dict[str, Any]]: return result -class Channels(IterableStream): - data_field = "channels" - - def path(self, **kwargs) -> str: - return "channels" - - -class MessageTypes(IterableStream): - data_field = "messageTypes" - name = "message_types" - - def path(self, **kwargs) -> str: - return "messageTypes" - - -class Metadata(IterableStream): - primary_key = None - data_field = "results" - - def path(self, **kwargs) -> str: - return "metadata" - - -class Events(IterableStream): - """ - https://api.iterable.com/api/docs#export_exportUserEvents - """ - - primary_key = None - data_field = "events" - common_fields = ("itblInternal", "_type", "createdAt", "email") - - def path(self, **kwargs) -> str: - return "export/userEvents" - - def request_params(self, stream_slice: Optional[Mapping[str, Any]], **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - params.update({"email": stream_slice["email"], "includeCustomEvents": "true"}) - - return params - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - lists = ListUsers(authenticator=self._cred) - stream_slices = lists.stream_slices() - - for stream_slice in stream_slices: - for list_record in lists.read_records(sync_mode=kwargs.get("sync_mode", SyncMode.full_refresh), stream_slice=stream_slice): - yield {"email": list_record["email"]} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """ - Parse jsonl response body. - Put common event fields at the top level. - Put the rest of the fields in the `data` subobject. - """ - jsonl_records = StringIO(response.text) - for record in jsonl_records: - record_dict = json.loads(record) - record_dict_common_fields = {} - for field in self.common_fields: - record_dict_common_fields[field] = record_dict.pop(field, None) - - yield {**record_dict_common_fields, "data": record_dict} - - class EmailBounce(IterableExportStreamAdjustableRange): data_field = "emailBounce" @@ -687,16 +577,3 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp for record in records: record[self.cursor_field] = self._field_to_datetime(record[self.cursor_field]) yield record - - -class Users(IterableExportStreamRanged): - data_field = "user" - cursor_field = "profileUpdatedAt" - - -class AccessCheck(ListUsers): - # since 401 error is failed silently in all the streams, - # we need another class to distinguish an empty stream from 401 response - def check_unauthorized_key(self, response: requests.Response) -> bool: - # this allows not retrying 401 and raising the error upstream - return response.status_code != codes.UNAUTHORIZED diff --git a/airbyte-integrations/connectors/source-iterable/source_iterable/utils.py b/airbyte-integrations/connectors/source-iterable/source_iterable/utils.py index 044a215db8870..97232f4316107 100644 --- a/airbyte-integrations/connectors/source-iterable/source_iterable/utils.py +++ b/airbyte-integrations/connectors/source-iterable/source_iterable/utils.py @@ -24,10 +24,3 @@ def dateutil_parse(text): dt.microsecond, tz=dt.tzinfo or pendulum.tz.UTC, ) - - -def read_full_refresh(stream_instance: Stream): - slices = stream_instance.stream_slices(sync_mode=SyncMode.full_refresh) - for _slice in slices: - for record in stream_instance.read_records(stream_slice=_slice, sync_mode=SyncMode.full_refresh): - yield record diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/conftest.py b/airbyte-integrations/connectors/source-iterable/unit_tests/conftest.py index 15eb9a6418758..73deefe0d4026 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/conftest.py @@ -3,14 +3,10 @@ # import pytest +import responses from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream -@pytest.fixture(autouse=True) -def disable_cache(mocker): - mocker.patch("source_iterable.streams.ListUsers.use_cache", False) - - @pytest.fixture def catalog(request): return ConfiguredAirbyteCatalog( @@ -31,7 +27,7 @@ def config_fixture(): @pytest.fixture() def mock_lists_resp(mocker): - mocker.patch("source_iterable.streams.Lists.read_records", return_value=iter([{"id": 1}, {"id": 2}])) + responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}, {"id": 2}]}) @pytest.fixture(name="lists_stream") diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_export_adjustable_range.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_export_adjustable_range.py index 2fd48039348a7..90bd5b6ab2860 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_export_adjustable_range.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_export_adjustable_range.py @@ -67,7 +67,7 @@ def response_cb(req): assert records assert sum(ranges) == DAYS_DURATION # since read is called on source instance, under the hood .streams() is called which triggers one more http call - assert len(responses.calls) == len(ranges) + 1 + assert len(responses.calls) == len(ranges) assert ranges == [ AdjustableSliceGenerator.INITIAL_RANGE_DAYS, *([int(DAYS_PER_MINUTE_RATE / AdjustableSliceGenerator.REQUEST_PER_MINUTE_LIMIT)] * 35), @@ -114,4 +114,4 @@ def response_cb(req): assert sum(ranges) == days_duration assert len(ranges) == len(records) # since read is called on source instance, under the hood .streams() is called which triggers one more http call - assert len(responses.calls) == 3 * len(ranges) + 1 + assert len(responses.calls) == 3 * len(ranges) diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_exports_stream.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_exports_stream.py index 2592b27d5fe0e..0e0663cd34650 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_exports_stream.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_exports_stream.py @@ -9,8 +9,8 @@ import pytest import responses from airbyte_cdk.models import SyncMode -from source_iterable.slice_generators import StreamSlice -from source_iterable.streams import Users +from airbyte_cdk.sources.declarative.types import StreamSlice +from source_iterable.source import SourceIterable @pytest.fixture @@ -23,13 +23,21 @@ def session_mock(): response_mock.status_code = 200 yield session_mock + @responses.activate -def test_stream_correct(): - stream_slice = StreamSlice(start_date=pendulum.parse("2020"), end_date=pendulum.parse("2021")) - record_js = {"profileUpdatedAt": "2020"} - NUMBER_OF_RECORDS = 10**2 - resp_body = "\n".join([json.dumps(record_js)] * NUMBER_OF_RECORDS) +def test_stream_correct(config): + start_date = pendulum.parse("2020-01-01 00:00:00+00:00") + end_date = pendulum.parse("2021-01-01 00:00:00+00:00") + stream_slice = StreamSlice(partition={}, cursor_slice={"start_time": start_date, "end_time": end_date}) + record_js = {"profileUpdatedAt": "2020-01-01 00:00:00 +00:00"} + number_of_records = 10 ** 2 + resp_body = "\n".join([json.dumps(record_js)] * number_of_records) + responses.add("GET", "https://api.iterable.com/api/export/data.json", body=resp_body) - stream = Users(start_date="2020", authenticator=None) + + stream_name = "users" + source_iterable = SourceIterable() + stream = next(filter(lambda x: x.name == stream_name, source_iterable.streams(config=config))) records = list(stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=stream_slice, stream_state={})) - assert len(records) == NUMBER_OF_RECORDS + + assert len(records) == number_of_records diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_extractors.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_extractors.py new file mode 100644 index 0000000000000..87bdefadff9da --- /dev/null +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_extractors.py @@ -0,0 +1,65 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from io import StringIO +from unittest.mock import Mock + +import pytest +from source_iterable.components import EventsRecordExtractor, ListUsersRecordExtractor, XJsonRecordExtractor + + +@pytest.fixture +def mock_response(): + mock_response = Mock() + return mock_response + + +def test_list_users_extraction(mock_response): + mock_response.iter_lines.return_value = [b'user1@example.com', b'user2@example.com'] + + extractor = ListUsersRecordExtractor( + field_path=["getUsers"], + config={}, + parameters={}, + ) + records = extractor.extract_records(mock_response) + + assert len(records) == 2 + assert records[0]["email"] == "user1@example.com" + assert records[1]["email"] == "user2@example.com" + + +def test_xjson_extraction(mock_response): + mock_response.iter_lines.return_value = [ + b'{"id": 1, "name": "Alice"}', + b'{"id": 2, "name": "Bob"}' + ] + + extractor = XJsonRecordExtractor( + field_path=["users"], + config={}, + parameters={}, + ) + records = extractor.extract_records(mock_response) + + assert len(records) == 2 + assert records[0] == {"id": 1, "name": "Alice"} + assert records[1] == {"id": 2, "name": "Bob"} + + +def test_events_extraction(mock_response): + mock_response.text = '{"itblInternal": 1, "_type": "event", "createdAt": "2024-03-21", "email": "user@example.com", "data": {"event_type": "click"}}\n' \ + '{"_type": "event", "createdAt": "2024-03-22", "data": {"event_type": "purchase"}}' + + extractor = EventsRecordExtractor( + field_path=["events"], + config={}, + parameters={}, + ) + records = extractor.extract_records(mock_response) + + assert len(records) == 2 + assert records[0] == {'_type': 'event', 'createdAt': '2024-03-21', 'data': {'data': {'event_type': 'click'}}, 'email': 'user@example.com', 'itblInternal': 1} + assert records[1] == {'_type': 'event', 'createdAt': '2024-03-22', 'data': {'data': {'event_type': 'purchase'}}, 'email': None, 'itblInternal': None} diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_slice_generator.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_slice_generator.py index 9907849bc11e6..b4b171505b30d 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_slice_generator.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_slice_generator.py @@ -2,6 +2,8 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from datetime import datetime, timedelta + import freezegun import pendulum import pytest @@ -39,42 +41,42 @@ def test_slice_gen_no_range_adjust(): "start_day,end_day,days,range", [ ( - "2020-01-01", - "2020-01-10", - 5, - [ - (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")), - (pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10")), - ], + "2020-01-01", + "2020-01-10", + 5, + [ + (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")), + (pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10")), + ], ), ( - "2020-01-01", - "2020-01-10 20:00:12", - 5, - [ - (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")), - (pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10 20:00:12")), - ], + "2020-01-01", + "2020-01-10 20:00:12", + 5, + [ + (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")), + (pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10 20:00:12")), + ], ), ( - "2020-01-01", - "2020-01-01 20:00:12", - 5, - [ - (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-01 20:00:12")), - ], + "2020-01-01", + "2020-01-01 20:00:12", + 5, + [ + (pendulum.parse("2020-01-01"), pendulum.parse("2020-01-01 20:00:12")), + ], ), ( - "2020-01-01", - "2020-01-10", - 50, - [(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-10"))], + "2020-01-01", + "2020-01-10", + 50, + [(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-10"))], ), ( - "2020-01-01", - "2020-01-01", - 50, - [], + "2020-01-01", + "2020-01-01", + 50, + [], ), ], ) @@ -92,3 +94,11 @@ def test_datetime_wrong_range(): end_day = pendulum.parse("2000") with pytest.raises(StopIteration): next(RangeSliceGenerator.make_datetime_ranges(start_day, end_day, 1)) + + +def test_reduce_range(): + slice_generator = AdjustableSliceGenerator(start_date=datetime(2022, 1, 1), end_date=datetime(2022, 1, 31)) + next(slice_generator) + reduced_slice = slice_generator.reduce_range() + assert reduced_slice.start_date == datetime(2022, 1, 1) + assert reduced_slice.end_date == datetime(2022, 1, 31) diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_source.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_source.py index 14269c41b72ae..5e73da4093c18 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_source.py @@ -2,27 +2,33 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pytest import responses from source_iterable.source import SourceIterable -from source_iterable.streams import Lists @responses.activate -@pytest.mark.parametrize("body, status, expected_streams", ((b"", 401, 7), (b"alpha@gmail.com\nbeta@gmail.com", 200, 44))) -def test_source_streams(mock_lists_resp, config, body, status, expected_streams): +@pytest.mark.parametrize("body, status, expected_streams", [(b"alpha@gmail.com\nbeta@gmail.com", 200, 44)]) +def test_source_streams(config, body, status, expected_streams): + responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}]}) responses.add(responses.GET, "https://api.iterable.com/api/lists/getUsers?listId=1", body=body, status=status) + streams = SourceIterable().streams(config=config) + assert len(streams) == expected_streams -def test_source_check_connection_ok(config): - with patch.object(Lists, "read_records", return_value=iter([{"id": 1}])): - assert SourceIterable().check_connection(MagicMock(), config=config) == (True, None) +@responses.activate +def test_source_check_connection_failed(config): + responses.get("https://api.iterable.com/api/lists", json={}, status=401) + assert SourceIterable().check_connection(MagicMock(), config=config)[0] is False -def test_source_check_connection_failed(config): - with patch.object(Lists, "read_records", return_value=iter([])): - assert SourceIterable().check_connection(MagicMock(), config=config)[0] is False + +@responses.activate +def test_source_check_connection_ok(config): + responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}]}) + + assert SourceIterable().check_connection(MagicMock(), config=config) == (True, None) diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_stream_events.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_stream_events.py index a92343c6acdf3..163225f682ad2 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_stream_events.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_stream_events.py @@ -4,10 +4,13 @@ import json +import pendulum import pytest import requests import responses -from source_iterable.streams import Events +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.types import StreamSlice +from source_iterable.source import SourceIterable @responses.activate @@ -185,18 +188,26 @@ ), ], ) -def test_events_parse_response(response_objects, expected_objects, jsonl_body): +def test_events_parse_response(response_objects, expected_objects, jsonl_body, config): if jsonl_body: response_body = "\n".join([json.dumps(obj) for obj in response_objects]) else: response_body = json.dumps(response_objects) - responses.add(responses.GET, "https://example.com", body=response_body) - response = requests.get("https://example.com") - stream = Events(authenticator=None) + + responses.add( + responses.GET, + "https://api.iterable.com/api/export/userEvents?includeCustomEvents=true&email=user1", + body=response_body + ) + + response = requests.get("https://api.iterable.com/api/export/userEvents?includeCustomEvents=true&email=user1") + + stream = next(filter(lambda x: x.name == "events", SourceIterable().streams(config=config))) if jsonl_body: - records = [record for record in stream.parse_response(response)] + stream_slice = StreamSlice(partition={'email': 'user1', 'parent_slice': {'list_id': 111111, 'parent_slice': {}}}, cursor_slice={}) + records = list(map(lambda record: record.data, stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice))) assert records == expected_objects else: with pytest.raises(TypeError): - [record for record in stream.parse_response(response)] + [record for record in stream.retriever._parse_response(response)] diff --git a/airbyte-integrations/connectors/source-iterable/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-iterable/unit_tests/test_streams.py index 68634a9a7ac29..b39ddb2b8f0e3 100644 --- a/airbyte-integrations/connectors/source-iterable/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-iterable/unit_tests/test_streams.py @@ -3,198 +3,54 @@ # import json -from unittest.mock import MagicMock -import pendulum import pytest import requests import responses from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.exceptions import ReadException +from airbyte_cdk.sources.declarative.types import StreamSlice +from source_iterable.source import SourceIterable from source_iterable.streams import ( Campaigns, CampaignsMetrics, - Channels, - Events, - Lists, - ListUsers, - MessageTypes, - Metadata, + IterableExportEventsStreamAdjustableRange, + IterableExportStreamAdjustableRange, + IterableExportStreamRanged, + IterableStream, Templates, - Users, ) -from source_iterable.utils import dateutil_parse, read_full_refresh - - -@pytest.mark.parametrize( - "stream,date,slice,expected_path", - [ - (Lists, False, {}, "lists"), - (Campaigns, False, {}, "campaigns"), - (Channels, False, {}, "channels"), - (Events, False, {}, "export/userEvents"), - (MessageTypes, False, {}, "messageTypes"), - (Metadata, False, {}, "metadata"), - (ListUsers, False, {"list_id": 1}, "lists/getUsers?listId=1"), - (CampaignsMetrics, True, {}, "campaigns/metrics"), - (Templates, True, {}, "templates"), - ], -) -def test_path(config, stream, date, slice, expected_path): - args = {"authenticator": None} - if date: - args["start_date"] = "2019-10-10T00:00:00" - - assert stream(**args).path(stream_slice=slice) == expected_path +from source_iterable.utils import dateutil_parse def test_campaigns_metrics_csv(): csv_string = "a,b,c,d\n1, 2,,3\n6,,1, 2\n" output = [{"a": 1, "b": 2, "d": 3}, {"a": 6, "c": 1, "d": 2}] - assert CampaignsMetrics._parse_csv_string_to_dict(csv_string) == output -@pytest.mark.parametrize( - "url,id", - [ - ("http://google.com?listId=1&another=another", 1), - ("http://google.com?another=another", None), - ], -) -def test_list_users_get_list_id(url, id): - assert ListUsers._get_list_id(url) == id - - def test_campaigns_metrics_request_params(): stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00") params = stream.request_params(stream_slice={"campaign_ids": "c101"}, stream_state=None) assert params == {"campaignId": "c101", "startDateTime": "2019-10-10T00:00:00"} -def test_events_request_params(): - stream = Events(authenticator=None) - params = stream.request_params(stream_slice={"email": "a@a.a"}, stream_state=None) - assert params == {"email": "a@a.a", "includeCustomEvents": "true"} - - -def test_templates_parse_response(): - stream = Templates(authenticator=None, start_date="2019-10-10T00:00:00") - with responses.RequestsMock() as rsps: - rsps.add( - responses.GET, - "https://api.iterable.com/api/1/foobar", - json={"templates": [{"createdAt": "2022-01-01", "id": 1}]}, - status=200, - content_type="application/json", - ) - resp = requests.get("https://api.iterable.com/api/1/foobar") - - records = stream.parse_response(response=resp) - - assert list(records) == [{"id": 1, "createdAt": dateutil_parse("2022-01-01")}] - - -def test_list_users_parse_response(): - stream = ListUsers(authenticator=None) - with responses.RequestsMock() as rsps: - rsps.add( - responses.GET, - "https://api.iterable.com/lists/getUsers?listId=100", - body="user100", - status=200, - content_type="application/json", - ) - resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100") - - records = stream.parse_response(response=resp) - - assert list(records) == [{"email": "user100", "listId": 100}] - - -def test_campaigns_metrics_parse_response(): - - stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00") - with responses.RequestsMock() as rsps: - rsps.add( - responses.GET, - "https://api.iterable.com/lists/getUsers?listId=100", - body="""a,b,c,d -1, 2,, 3 -6,, 1, 2 -""", - status=200, - content_type="application/json", - ) - resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100") - - records = stream.parse_response(response=resp) - - assert list(records) == [ - {"data": {"a": 1, "b": 2, "d": 3}}, - {"data": {"a": 6, "c": 1, "d": 2}}, - ] - - -def test_iterable_stream_parse_response(): - stream = Lists(authenticator=None) - with responses.RequestsMock() as rsps: - rsps.add( - responses.GET, - "https://api.iterable.com/lists/getUsers?listId=100", - json={"lists": [{"id": 1}, {"id": 2}]}, - status=200, - content_type="application/json", - ) - resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100") - - records = stream.parse_response(response=resp) - - assert list(records) == [{"id": 1}, {"id": 2}] - - -def test_iterable_stream_backoff_time(): - stream = Lists(authenticator=None) - assert stream.backoff_time(response=None) is None - - -def test_iterable_export_stream_backoff_time(): - stream = Users(authenticator=None, start_date="2019-10-10T00:00:00") - assert stream.backoff_time(response=None) is None - - -@pytest.mark.parametrize( - "current_state,record_date,expected_state", - [ - ({}, "2022", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}), - ({"profileUpdatedAt": "2020-01-01T00:00:00+00:00"}, "2022", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}), - ({"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}, "2020", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}), - ], -) -def test_get_updated_state(current_state, record_date, expected_state): - stream = Users(authenticator=None, start_date="2019-10-10T00:00:00") - state = stream.get_updated_state( - current_stream_state=current_state, - latest_record={"profileUpdatedAt": pendulum.parse(record_date)}, - ) - assert state == expected_state - - @responses.activate -def test_stream_stops_on_401(mock_lists_resp): - # no requests should be made after getting 401 error despite the multiple slices - users_stream = ListUsers(authenticator=None) +def test_stream_stops_on_401(config): + responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}, {"id": 2}]}) + users_stream = next(filter(lambda x: x.name == "list_users", SourceIterable().streams(config=config))) responses.add(responses.GET, "https://api.iterable.com/api/lists/getUsers?listId=1", json={}, status=401) + responses.add(responses.GET, "https://api.iterable.com/api/lists/getUsers?listId=2", json={}, status=401) slices = 0 - for slice_ in users_stream.stream_slices(sync_mode=SyncMode.full_refresh): - slices += 1 - _ = list(users_stream.read_records(stream_slice=slice_, sync_mode=SyncMode.full_refresh)) - assert len(responses.calls) == 1 - assert slices > 1 + with pytest.raises(ReadException): + for slice_ in users_stream.stream_slices(sync_mode=SyncMode.full_refresh): + slices += 1 + _ = list(users_stream.read_records(stream_slice=slice_, sync_mode=SyncMode.full_refresh)) @responses.activate -def test_listuser_stream_keep_working_on_500(): - users_stream = ListUsers(authenticator=None) +def test_listuser_stream_keep_working_on_500(config): + stream = next(filter(lambda x: x.name == "list_users", SourceIterable().streams(config=config))) msg_error = "An error occurred. Please try again later. If problem persists, please contact your CSM" generic_error1 = {"msg": msg_error, "code": "GenericError"} @@ -202,11 +58,15 @@ def test_listuser_stream_keep_working_on_500(): responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1000}, {"id": 2000}, {"id": 3000}]}) responses.get("https://api.iterable.com/api/lists/getUsers?listId=1000", json=generic_error1, status=500) + responses.get("https://api.iterable.com/api/lists/getUsers?listId=1000", body="one@d1.com\ntwo@d1.com\nthree@d1.com") responses.get("https://api.iterable.com/api/lists/getUsers?listId=2000", body="one@d1.com\ntwo@d1.com\nthree@d1.com") responses.get("https://api.iterable.com/api/lists/getUsers?listId=3000", json=generic_error2, status=500) responses.get("https://api.iterable.com/api/lists/getUsers?listId=3000", body="one@d2.com\ntwo@d2.com\nthree@d2.com") expected_records = [ + {"email": "one@d1.com", "listId": 1000}, + {"email": "two@d1.com", "listId": 1000}, + {"email": "three@d1.com", "listId": 1000}, {"email": "one@d1.com", "listId": 2000}, {"email": "two@d1.com", "listId": 2000}, {"email": "three@d1.com", "listId": 2000}, @@ -214,57 +74,111 @@ def test_listuser_stream_keep_working_on_500(): {"email": "two@d2.com", "listId": 3000}, {"email": "three@d2.com", "listId": 3000}, ] - - records = list(read_full_refresh(users_stream)) + stream_slices = [ + StreamSlice(partition={"list_id": 1000}, cursor_slice={}), + StreamSlice(partition={"list_id": 2000}, cursor_slice={}), + StreamSlice(partition={"list_id": 3000}, cursor_slice={}), + ] + records = [] + for stream_slice in stream_slices: + slice_records = list(map(lambda record: record.data, stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice))) + records.extend(slice_records) assert records == expected_records @responses.activate -def test_events_read_full_refresh(): - stream = Events(authenticator=None) +def test_events_read_full_refresh(config): + stream = next(filter(lambda x: x.name == "events", SourceIterable().streams(config=config))) + responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}]}) - responses.get("https://api.iterable.com/api/lists/getUsers?listId=1", body="user1\nuser2\nuser3\nuser4\nuser5\nuser6") + responses.get("https://api.iterable.com/api/lists/getUsers?listId=1", body="user1\nuser2\nuser3\nuser4") def get_body(emails): return "\n".join([json.dumps({"email": email}) for email in emails]) + "\n" - msg_error = "An error occurred. Please try again later. If problem persists, please contact your CSM" - generic_error1 = {"msg": msg_error, "code": "GenericError"} - generic_error2 = {"msg": msg_error, "code": "Generic Error"} - responses.get("https://api.iterable.com/api/export/userEvents?email=user1&includeCustomEvents=true", body=get_body(["user1"])) - - responses.get("https://api.iterable.com/api/export/userEvents?email=user2&includeCustomEvents=true", json=generic_error1, status=500) responses.get("https://api.iterable.com/api/export/userEvents?email=user2&includeCustomEvents=true", body=get_body(["user2"])) - responses.get("https://api.iterable.com/api/export/userEvents?email=user3&includeCustomEvents=true", body=get_body(["user3"])) + responses.get("https://api.iterable.com/api/export/userEvents?email=user4&includeCustomEvents=true", body=get_body(["user4"])) - responses.get("https://api.iterable.com/api/export/userEvents?email=user4&includeCustomEvents=true", json=generic_error1, status=500) + stream_slices = [ + StreamSlice(partition={'email': 'user1', 'parent_slice': {'list_id': 111111, 'parent_slice': {}}}, cursor_slice={}), + StreamSlice(partition={'email': 'user2', 'parent_slice': {'list_id': 111111, 'parent_slice': {}}}, cursor_slice={}), + StreamSlice(partition={'email': 'user3', 'parent_slice': {'list_id': 111111, 'parent_slice': {}}}, cursor_slice={}), + StreamSlice(partition={'email': 'user4', 'parent_slice': {'list_id': 111111, 'parent_slice': {}}}, cursor_slice={}), + ] - responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", json=generic_error2, status=500) - responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", json=generic_error2, status=500) - responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", body=get_body(["user5"])) + records = [] + for stream_slice in stream_slices: + slice_records = list(map(lambda record: record.data, stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice))) + records.extend(slice_records) - m = responses.get( - "https://api.iterable.com/api/export/userEvents?email=user6&includeCustomEvents=true", json=generic_error2, status=500 - ) + assert [r["email"] for r in records] == ["user1", "user2", "user3", "user4"] - records = list(read_full_refresh(stream)) - assert [r["email"] for r in records] == ["user1", "user2", "user3", "user5"] - assert m.call_count == 3 +@responses.activate +def test_campaigns_metric_slicer(config): + responses.get("https://api.iterable.com/api/campaigns", json={"campaigns": [{"id": 1}]}) + responses.get("https://api.iterable.com/api/campaigns/metrics?campaignId=1&startDateTime=2019-10-10T00%3A00%3A00", json={"id": 1, "Total Email Sends": 1}) -def test_retry_read_timeout(): - stream = Lists(authenticator=None) - stream._session.send = MagicMock(side_effect=requests.exceptions.ReadTimeout) - with pytest.raises(requests.exceptions.ReadTimeout): - list(read_full_refresh(stream)) - stream._session.send.call_args[1] == {"timeout": (60, 300)} - assert stream._session.send.call_count == stream.max_retries + 1 + stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00") + expected = [{'campaign_ids': [1]}] - stream = Campaigns(authenticator=None) - stream._session.send = MagicMock(side_effect=requests.exceptions.ConnectionError) - with pytest.raises(requests.exceptions.ConnectionError): - list(read_full_refresh(stream)) - stream._session.send.call_args[1] == {"timeout": (60, 300)} - assert stream._session.send.call_count == stream.max_retries + 1 + assert list(stream.stream_slices(sync_mode=SyncMode.full_refresh)) == expected + + +def test_templates_parse_response(): + stream = Templates(authenticator=None, start_date="2019-10-10T00:00:00") + with responses.RequestsMock() as rsps: + rsps.add( + responses.GET, + "https://api.iterable.com/api/1/foobar", + json={"templates": [{"createdAt": "2022-01-01", "id": 1}]}, + status=200, + content_type="application/json", + ) + resp = requests.get("https://api.iterable.com/api/1/foobar") + + records = stream.parse_response(response=resp) + + assert list(records) == [{"id": 1, "createdAt": dateutil_parse("2022-01-01")}] + + +@pytest.mark.parametrize( + "stream,date,slice,expected_path", + [ + (Campaigns, False, {}, "campaigns"), + (CampaignsMetrics, True, {}, "campaigns/metrics"), + (Templates, True, {}, "templates"), + ], +) +def test_path(config, stream, date, slice, expected_path): + args = {"authenticator": None} + if date: + args["start_date"] = "2019-10-10T00:00:00" + + assert stream(**args).path(stream_slice=slice) == expected_path + + +def test_campaigns_metrics_parse_response(): + + stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00") + with responses.RequestsMock() as rsps: + rsps.add( + responses.GET, + "https://api.iterable.com/lists/getUsers?listId=100", + body="""a,b,c,d +1, 2,, 3 +6,, 1, 2 +""", + status=200, + content_type="application/json", + ) + resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100") + + records = stream.parse_response(response=resp) + + assert list(records) == [ + {"data": {"a": 1, "b": 2, "d": 3}}, + {"data": {"a": 6, "c": 1, "d": 2}}, + ] diff --git a/airbyte-integrations/connectors/source-jira/metadata.yaml b/airbyte-integrations/connectors/source-jira/metadata.yaml index efe78ce01bf08..5cb6c743a3c20 100644 --- a/airbyte-integrations/connectors/source-jira/metadata.yaml +++ b/airbyte-integrations/connectors/source-jira/metadata.yaml @@ -10,13 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 - dockerImageTag: 1.1.0 + dockerImageTag: 1.2.0 dockerRepository: airbyte/source-jira documentationUrl: https://docs.airbyte.com/integrations/sources/jira githubIssueLabel: source-jira icon: jira.svg license: MIT - maxSecondsBetweenMessages: 21600 + maxSecondsBetweenMessages: 5400 name: Jira remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-jira/poetry.lock b/airbyte-integrations/connectors/source-jira/poetry.lock index 41a088f72c9bb..049e52da98660 100644 --- a/airbyte-integrations/connectors/source-jira/poetry.lock +++ b/airbyte-integrations/connectors/source-jira/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.59.0" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.59.0.tar.gz", hash = "sha256:2f7bc07556cc7f42f0daf41d09be08fd22102864d087a27c8999f6f13fe67aad"}, - {file = "airbyte_cdk-0.59.0-py3-none-any.whl", hash = "sha256:94c561c053b8be3a66bfefe420812ced9237403441249408e2af5445214a6f7b"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -750,7 +750,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +825,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -877,19 +876,19 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -915,24 +914,24 @@ files = [ [[package]] name = "types-toml" -version = "0.10.8.7" +version = "0.10.8.20240310" description = "Typing stubs for toml" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-toml-0.10.8.7.tar.gz", hash = "sha256:58b0781c681e671ff0b5c0319309910689f4ab40e8a2431e205d70c94bb6efb1"}, - {file = "types_toml-0.10.8.7-py3-none-any.whl", hash = "sha256:61951da6ad410794c97bec035d59376ce1cbf4453dc9b6f90477e81e4442d631"}, + {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, + {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -951,13 +950,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1062,4 +1061,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "044624d19c85475b787665da59cf1f03c423991291bd66013076f5cfb84d4e22" +content-hash = "3f8e072ee654503bd0291f7d9ecf2085ea1f5ec74ada1aa41235c395210ccd8d" diff --git a/airbyte-integrations/connectors/source-jira/pyproject.toml b/airbyte-integrations/connectors/source-jira/pyproject.toml index a8b5cd007acab..8739efae8b4e3 100644 --- a/airbyte-integrations/connectors/source-jira/pyproject.toml +++ b/airbyte-integrations/connectors/source-jira/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.1.0" +version = "1.2.0" name = "source-jira" description = "Source implementation for Jira." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_jira" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.59.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-jira = "source_jira.run:run" diff --git a/airbyte-integrations/connectors/source-klaviyo/.coveragerc b/airbyte-integrations/connectors/source-klaviyo/.coveragerc new file mode 100644 index 0000000000000..f75d1e84fd28f --- /dev/null +++ b/airbyte-integrations/connectors/source-klaviyo/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_klaviyo/run.py diff --git a/airbyte-integrations/connectors/source-klaviyo/README.md b/airbyte-integrations/connectors/source-klaviyo/README.md index fa1d8bad18916..76b9e4d8d6e2d 100644 --- a/airbyte-integrations/connectors/source-klaviyo/README.md +++ b/airbyte-integrations/connectors/source-klaviyo/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-klaviyo spec poetry run source-klaviyo check --config secrets/config.json poetry run source-klaviyo discover --config secrets/config.json -poetry run source-klaviyo read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-klaviyo read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-klaviyo/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-klaviyo/integration_tests/abnormal_state.json index 5c4fe027f477f..d03d5ef58ed87 100644 --- a/airbyte-integrations/connectors/source-klaviyo/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-klaviyo/integration_tests/abnormal_state.json @@ -2,67 +2,95 @@ { "type": "STREAM", "stream": { - "stream_state": { "datetime": "2120-10-10T00:00:00Z" }, - "stream_descriptor": { "name": "events" } + "stream_state": { + "datetime": "2120-10-10 00:00:00+00:00" + }, + "stream_descriptor": { + "name": "events" + } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated": "2120-10-10T00:00:00Z" }, - "stream_descriptor": { "name": "global_exclusions" } + "stream_state": { + "updated": "2120-10-10T00:00:00+00:00" + }, + "stream_descriptor": { + "name": "global_exclusions" + } } }, { "type": "STREAM", "stream": { "stream_state": { - "updated": "2120-10-10 00:00:00", + "updated": "2120-10-10T00:00:00+00:00", "archived": { - "updated": "2120-10-10 00:00:00" + "updated": "2120-10-10T00:00:00+00:00" } }, - "stream_descriptor": { "name": "flows" } + "stream_descriptor": { + "name": "flows" + } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated": "2120-10-10 00:00:00" }, - "stream_descriptor": { "name": "metrics" } + "stream_state": { + "updated": "2120-10-10T00:00:00+00:00" + }, + "stream_descriptor": { + "name": "metrics" + } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated": "2120-10-10 00:00:00" }, - "stream_descriptor": { "name": "lists" } + "stream_state": { + "updated": "2120-10-10T00:00:00+00:00" + }, + "stream_descriptor": { + "name": "lists" + } } }, { "type": "STREAM", "stream": { "stream_state": { - "updated_at": "2120-10-10 00:00:00", + "updated_at": "2120-10-10T00:00:00+00:00", "archived": { - "updated_at": "2120-10-10 00:00:00" + "updated_at": "2120-10-10T00:00:00+00:00" } }, - "stream_descriptor": { "name": "campaigns" } + "stream_descriptor": { + "name": "campaigns" + } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated": "2120-10-10 00:00:00" }, - "stream_descriptor": { "name": "profiles" } + "stream_state": { + "updated": "2120-10-10T00:00:00+00:00" + }, + "stream_descriptor": { + "name": "profiles" + } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated": "2120-10-10 00:00:00" }, - "stream_descriptor": { "name": "email_templates" } + "stream_state": { + "updated": "2120-10-10T00:00:00+00:00" + }, + "stream_descriptor": { + "name": "email_templates" + } } } ] diff --git a/airbyte-integrations/connectors/source-klaviyo/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-klaviyo/integration_tests/configured_catalog.json index 7203e0bc0a6eb..0fdd48d9cc24e 100644 --- a/airbyte-integrations/connectors/source-klaviyo/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-klaviyo/integration_tests/configured_catalog.json @@ -4,14 +4,14 @@ "stream": { "name": "campaigns", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": null, - "default_cursor_field": null, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated_at"], "destination_sync_mode": "append", "primary_key": [["id"]] }, @@ -25,7 +25,7 @@ "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] @@ -36,11 +36,11 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["timestamp"], + "default_cursor_field": ["datetime"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "cursor_field": ["datetime"], "destination_sync_mode": "append", "primary_key": [["id"]] @@ -49,14 +49,14 @@ "stream": { "name": "global_exclusions", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": null, - "default_cursor_field": null, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] }, @@ -64,14 +64,14 @@ "stream": { "name": "lists", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": null, - "default_cursor_field": null, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] }, @@ -80,13 +80,13 @@ "name": "flows", "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": null, - "default_cursor_field": null, + "source_defined_cursor": true, + "default_cursor_field": ["updated"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] }, @@ -94,14 +94,14 @@ "stream": { "name": "metrics", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": null, - "default_cursor_field": null, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] }, @@ -109,14 +109,14 @@ "stream": { "name": "email_templates", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": null, - "default_cursor_field": null, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], "source_defined_primary_key": [["id"]], "namespace": null }, - "sync_mode": "full_refresh", - "cursor_field": null, + "sync_mode": "incremental", + "cursor_field": ["updated"], "destination_sync_mode": "append", "primary_key": [["id"]] } diff --git a/airbyte-integrations/connectors/source-klaviyo/metadata.yaml b/airbyte-integrations/connectors/source-klaviyo/metadata.yaml index 1c1d865c735fa..3eb246580232c 100644 --- a/airbyte-integrations/connectors/source-klaviyo/metadata.yaml +++ b/airbyte-integrations/connectors/source-klaviyo/metadata.yaml @@ -7,12 +7,13 @@ data: connectorType: source definitionId: 95e8cffd-b8c4-4039-968e-d32fb4a69bde connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c - dockerImageTag: 2.2.0 + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + dockerImageTag: 2.5.0 dockerRepository: airbyte/source-klaviyo githubIssueLabel: source-klaviyo icon: klaviyo.svg license: MIT + maxSecondsBetweenMessages: 60 name: Klaviyo remoteRegistries: pypi: @@ -33,16 +34,16 @@ data: - flows releases: breakingChanges: - 1.0.0: - message: In this release, for 'events' stream changed type of 'event_properties/items/quantity' field from integer to number. Users will need to refresh the source schema and reset events streams after upgrading. - upgradeDeadline: "2023-11-30" 2.0.0: message: In this release, streams 'campaigns', 'email_templates', 'events', 'flows', 'global_exclusions', 'lists', and 'metrics' are now pulling data using latest API which has a different schema. Users will need to refresh the source schemas and reset these streams after upgrading. upgradeDeadline: "2023-11-30" + 1.0.0: + message: In this release, for 'events' stream changed type of 'event_properties/items/quantity' field from integer to number. Users will need to refresh the source schema and reset events streams after upgrading. + upgradeDeadline: "2023-11-30" documentationUrl: https://docs.airbyte.com/integrations/sources/klaviyo tags: - language:python - - cdk:python + - cdk:low-code ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-klaviyo/poetry.lock b/airbyte-integrations/connectors/source-klaviyo/poetry.lock index 40442646b18a7..82f44a31082cf 100644 --- a/airbyte-integrations/connectors/source-klaviyo/poetry.lock +++ b/airbyte-integrations/connectors/source-klaviyo/poetry.lock @@ -1,51 +1,50 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.62.0" +version = "0.81.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.62.0.tar.gz", hash = "sha256:622f56bd7101493a74f11c33a45a31c251032333989996f137cac8370873c614"}, - {file = "airbyte_cdk-0.62.0-py3-none-any.whl", hash = "sha256:b21330a566b33dbdddde33243eb9855f086ad4272e3585ca626be1225451a3b8"}, + {file = "airbyte_cdk-0.81.3-py3-none-any.whl", hash = "sha256:c168acef484120f5b392cbf0c43bb8180d8596a0c87cfe416ac2e8e7fe1ab93a"}, + {file = "airbyte_cdk-0.81.3.tar.gz", hash = "sha256:e91e7ca66b3f4d5714b44304ff3cb1bb9b703933cf6b38d32e7f06384e9e1108"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = "*" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -313,13 +312,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +466,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -553,47 +552,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -685,30 +684,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +825,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +892,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -920,13 +917,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1028,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "d6958c9aa0a930adeb8df2c2a407dd6c5ea9495429478ef15b6fa9ff99a17471" +content-hash = "4bc76f477e3530839ebebee8e7f83b8310a760100ea5f0797a1a018ef829bfc7" diff --git a/airbyte-integrations/connectors/source-klaviyo/pyproject.toml b/airbyte-integrations/connectors/source-klaviyo/pyproject.toml index e99b8dd08e068..ba510cf77cb7d 100644 --- a/airbyte-integrations/connectors/source-klaviyo/pyproject.toml +++ b/airbyte-integrations/connectors/source-klaviyo/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.2.0" +version = "2.5.0" name = "source-klaviyo" description = "Source implementation for Klaviyo." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_klaviyo" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-klaviyo = "source_klaviyo.run:run" diff --git a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/components/datetime_based_cursor.py b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/components/datetime_based_cursor.py new file mode 100644 index 0000000000000..498337b61fd53 --- /dev/null +++ b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/components/datetime_based_cursor.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from dataclasses import dataclass +from typing import Any, Mapping, Optional + +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState + + +@dataclass +class KlaviyoDatetimeBasedCursor(DatetimeBasedCursor): + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + if not stream_slice: + return {} + + field = self._cursor_field.eval(self.config) + value = stream_slice.get(self._partition_field_start.eval(self.config)) + return {"filter": f"greater-than({field},{value})", "sort": field} diff --git a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/manifest.yaml b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/manifest.yaml new file mode 100644 index 0000000000000..c02da2dc905ad --- /dev/null +++ b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/manifest.yaml @@ -0,0 +1,194 @@ +version: 0.81.3 +type: DeclarativeSource + +definitions: + # Authenticator + authenticator: + type: ApiKeyAuthenticator + api_token: "Klaviyo-API-Key {{ config['api_key'] }}" + inject_into: + type: RequestOption + field_name: "Authorization" + inject_into: header + + # Requester + requester: + type: HttpRequester + url_base: "https://a.klaviyo.com/api/" + authenticator: "#/definitions/authenticator" + http_method: GET + error_handler: + type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: FAIL + http_codes: [401, 403] + error_message: Please provide a valid API key and make sure it has permissions to read specified streams. + request_headers: + Accept: "application/json" + Revision: "2023-10-15" + + # Selector + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["data"] + + # Paginator + cursor_pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get('links', {}).get('next') }}" + + paginator: + type: DefaultPaginator + pagination_strategy: "#/definitions/cursor_pagination_strategy" + page_token_option: + type: RequestPath + + # Retrievers + base_retriever: + type: SimpleRetriever + record_selector: "#/definitions/selector" + requester: "#/definitions/requester" + paginator: "#/definitions/paginator" + + semi_incremental_retriever: + $ref: "#/definitions/base_retriever" + record_selector: + $ref: "#/definitions/selector" + record_filter: + type: RecordFilter + condition: | + {% set starting_point = stream_state.get('updated', config.get('start_date')) %} + {{ starting_point and record.get('attributes', {}).get('updated') > starting_point or not starting_point }} + + profiles_retriever: + $ref: "#/definitions/base_retriever" + paginator: + $ref: "#/definitions/paginator" + pagination_strategy: + $ref: "#/definitions/cursor_pagination_strategy" + page_size: 100 + page_size_option: + type: RequestOption + field_name: "page[size]" + inject_into: request_parameter + requester: + $ref: "#/definitions/requester" + request_headers: + Accept: "application/json" + Revision: "2023-02-22" + request_parameters: + "additional-fields[profile]": "predictive_analytics" + + # Base streams + base_stream: + type: DeclarativeStream + primary_key: "id" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["updated"] + value: "{{ record.get('attributes', {}).get('updated') }}" + + base_incremental_stream: + $ref: "#/definitions/base_stream" + retriever: "#/definitions/base_retriever" + incremental_sync: + type: CustomIncrementalSync + class_name: source_klaviyo.components.datetime_based_cursor.KlaviyoDatetimeBasedCursor + cursor_field: "{{ parameters.get('cursor_field', 'updated') }}" + start_datetime: "{{ config.get('start_date', '2012-01-01T00:00:00Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%f%z" + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%d %H:%M:%S%z" + start_time_option: + type: RequestOption + field_name: "{{ parameters.get('cursor_field', 'updated') }}" + inject_into: request_parameter + + base_semi_incremental_stream: + $ref: "#/definitions/base_stream" + retriever: "#/definitions/semi_incremental_retriever" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: "updated" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + start_datetime: "{{ config.get('start_date', '2012-01-01T00:00:00Z') }}" + + # Incremental streams + profiles_stream: + # Docs: https://developers.klaviyo.com/en/v2023-02-22/reference/get_profiles + name: "profiles" + $ref: "#/definitions/base_incremental_stream" + retriever: "#/definitions/profiles_retriever" + $parameters: + path: "profiles" + + global_exclusions_stream: + # Docs: https://developers.klaviyo.com/en/v2023-02-22/reference/get_profiles + name: "global_exclusions" + $ref: "#/definitions/profiles_stream" + retriever: + $ref: "#/definitions/profiles_retriever" + record_selector: + $ref: "#/definitions/selector" + record_filter: + type: RecordFilter + condition: "{{ record['attributes']['subscriptions']['email']['marketing']['suppressions'] }}" + + events_stream: + # Docs: https://developers.klaviyo.com/en/reference/get_events + name: "events" + $ref: "#/definitions/base_incremental_stream" + transformations: + - type: AddFields + fields: + - type: AddedFieldDefinition + path: ["datetime"] + value: "{{ record.get('attributes', {}).get('datetime') }}" + $parameters: + path: "events" + cursor_field: "datetime" + + email_templates_stream: + # Docs: https://developers.klaviyo.com/en/reference/get_templates + name: "email_templates" + $ref: "#/definitions/base_incremental_stream" + $parameters: + path: "templates" + + # Semi-Incremental streams + metrics_stream: + # Docs: https://developers.klaviyo.com/en/reference/get_metrics + name: "metrics" + $ref: "#/definitions/base_semi_incremental_stream" + $parameters: + path: "metrics" + + lists_stream: + # Docs: https://developers.klaviyo.com/en/reference/get_lists + name: "lists" + $ref: "#/definitions/base_semi_incremental_stream" + $parameters: + path: "lists" + +streams: + # Incremental streams + - "#/definitions/profiles_stream" + - "#/definitions/global_exclusions_stream" + - "#/definitions/events_stream" + - "#/definitions/email_templates_stream" + + # Semi-Incremental streams + - "#/definitions/metrics_stream" + - "#/definitions/lists_stream" + +check: + type: CheckStream + stream_names: + - metrics diff --git a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/schemas/campaigns.json b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/schemas/campaigns.json index d4ac3b074b1a3..6e142c87f8c05 100644 --- a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/schemas/campaigns.json +++ b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/schemas/campaigns.json @@ -17,16 +17,18 @@ "audiences": { "type": ["null", "object"], "additionalProperties": true, - "included": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "excluded": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] + "properties": { + "included": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "excluded": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } } } }, diff --git a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/source.py b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/source.py index 5647ca33ea676..a405efea32544 100644 --- a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/source.py +++ b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/source.py @@ -2,62 +2,29 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import re -from http import HTTPStatus -from typing import Any, List, Mapping, Tuple -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource +from typing import Any, List, Mapping + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream -from requests.exceptions import HTTPError -from source_klaviyo.streams import Campaigns, EmailTemplates, Events, Flows, GlobalExclusions, Lists, Metrics, Profiles +from source_klaviyo.streams import Campaigns, Flows -class SourceKlaviyo(AbstractSource): - def check_connection(self, logger, config: Mapping[str, Any]) -> Tuple[bool, Any]: - """Connection check to validate that the user-provided config can be used to connect to the underlying API - :param config: the user-input config object conforming to the connector's spec.json - :param logger: logger object - :return Tuple[bool, Any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. - """ - try: - # we use metrics endpoint because it never returns an error - _ = list(Metrics(api_key=config["api_key"]).read_records(sync_mode=SyncMode.full_refresh)) - except HTTPError as e: - if e.response.status_code in (HTTPStatus.FORBIDDEN, HTTPStatus.UNAUTHORIZED): - message = "Please provide a valid API key and make sure it has permissions to read specified streams." - else: - message = "Unable to connect to Klaviyo API with provided credentials." - return False, message - except Exception as e: - original_error_message = repr(e) - - # Regular expression pattern to match the API key - pattern = r"api_key=\b\w+\b" - - # Remove the API key from the error message - error_message = re.sub(pattern, "api_key=***", original_error_message) - - return False, error_message - return True, None +class SourceKlaviyo(YamlDeclarativeSource): + def __init__(self) -> None: + super().__init__(**{"path_to_yaml": "manifest.yaml"}) def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ Discovery method, returns available streams :param config: A Mapping of the user input configuration as defined in the connector spec. """ + api_key = config["api_key"] start_date = config.get("start_date") - return [ - Campaigns(api_key=api_key, start_date=start_date), - Events(api_key=api_key, start_date=start_date), - GlobalExclusions(api_key=api_key, start_date=start_date), - Lists(api_key=api_key, start_date=start_date), - Metrics(api_key=api_key, start_date=start_date), - Flows(api_key=api_key, start_date=start_date), - EmailTemplates(api_key=api_key, start_date=start_date), - Profiles(api_key=api_key, start_date=start_date), - ] + streams = super().streams(config) + streams.extend([Campaigns(api_key=api_key, start_date=start_date), Flows(api_key=api_key, start_date=start_date)]) + return streams def continue_sync_on_stream_failure(self) -> bool: return True diff --git a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/streams.py b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/streams.py index 9f1830ddfe956..348e179e3bbb2 100644 --- a/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/streams.py +++ b/airbyte-integrations/connectors/source-klaviyo/source_klaviyo/streams.py @@ -2,17 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import urllib.parse from abc import ABC, abstractmethod from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union import pendulum -import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer +from requests import Response from .availability_strategy import KlaviyoAvailabilityStrategy from .exceptions import KlaviyoBackoffError @@ -26,7 +26,7 @@ class KlaviyoStream(HttpStream, ABC): page_size = None api_revision = "2023-10-15" - def __init__(self, api_key: str, start_date: Optional[str] = None, **kwargs): + def __init__(self, api_key: str, start_date: Optional[str] = None, **kwargs: Any) -> None: super().__init__(**kwargs) self._api_key = api_key self._start_ts = start_date @@ -42,8 +42,10 @@ def request_headers(self, **kwargs) -> Mapping[str, Any]: "Authorization": f"Klaviyo-API-Key {self._api_key}", } - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """This method should return a Mapping (e.g: dict) containing whatever information required to make paginated requests. + def next_page_token(self, response: Response) -> Optional[Mapping[str, Any]]: + """ + This method should return a Mapping (e.g: dict) containing whatever information + required to make paginated requests. Klaviyo uses cursor-based pagination https://developers.klaviyo.com/en/reference/api_overview#pagination This method returns the params in the pre-constructed url nested in links[next] @@ -51,12 +53,11 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, decoded_response = response.json() - links = decoded_response.get("links", {}) - next = links.get("next") - if not next: + next_page_link = decoded_response.get("links", {}).get("next") + if not next_page_link: return None - next_url = urllib.parse.urlparse(next) + next_url = urllib.parse.urlparse(next_page_link) return {str(k): str(v) for (k, v) in urllib.parse.parse_qsl(next_url.query)} def request_params( @@ -71,8 +72,8 @@ def request_params( else: return {"page[size]": self.page_size} if self.page_size else {} - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """:return an iterable containing each record in the response""" + def parse_response(self, response: Response, **kwargs) -> Iterable[Mapping]: + """Return an iterable containing each record in the response""" response_json = response.json() for record in response_json.get("data", []): # API returns records in a container array "data" @@ -87,8 +88,9 @@ def map_record(self, record: MutableMapping[str, Any]) -> MutableMapping[str, An def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: """ - Override to determine the latest state after reading the latest record. This typically compared the cursor_field from the latest - record and the current state and picks the 'most' recent cursor. This is how a stream's state is determined. + Override to determine the latest state after reading the latest record. + This typically compared the cursor_field from the latest record and the current state and picks + the 'most' recent cursor. This is how a stream's state is determined. Required for incremental. """ @@ -99,7 +101,7 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late current_stream_state[self.cursor_field] = latest_cursor.isoformat() return current_stream_state - def backoff_time(self, response: requests.Response) -> Optional[float]: + def backoff_time(self, response: Response) -> Optional[float]: if response.status_code == 429: retry_after = response.headers.get("Retry-After") retry_after = float(retry_after) if retry_after else None @@ -129,8 +131,9 @@ class IncrementalKlaviyoStream(KlaviyoStream, ABC): @abstractmethod def cursor_field(self) -> Union[str, List[str]]: """ - Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is - usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. + Override to return the cursor field used by this stream e.g: an API entity might always use + created_at as the cursor field. This is usually id or date based. This field's presence tells the framework + this in an incremental stream. Required for incremental. :return str: The name of the cursor field. """ @@ -161,36 +164,15 @@ def request_params( return params -class SemiIncrementalKlaviyoStream(KlaviyoStream, ABC): - """Base class for all streams that have a cursor field, but underlying API does not support either sorting or filtering""" - - @property - @abstractmethod - def cursor_field(self) -> Union[str, List[str]]: - """ - Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is - usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. - :return str: The name of the cursor field. - """ - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: Optional[List[str]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - stream_state: Optional[Mapping[str, Any]] = None, - ) -> Iterable[StreamData]: - stream_state = stream_state or {} - starting_point = stream_state.get(self.cursor_field, self._start_ts) - for record in super().read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state - ): - if starting_point and record[self.cursor_field] > starting_point or not starting_point: - yield record - - class ArchivedRecordsStream(IncrementalKlaviyoStream): - def __init__(self, path: str, cursor_field: str, start_date: Optional[str] = None, api_revision: Optional[str] = None, **kwargs): + def __init__( + self, + path: str, + cursor_field: str, + start_date: Optional[str] = None, + api_revision: Optional[str] = None, + **kwargs: Any, + ) -> None: super().__init__(start_date=start_date, **kwargs) self._path = path self._cursor_field = cursor_field @@ -224,7 +206,7 @@ class ArchivedRecordsMixin(IncrementalKlaviyoStream, ABC): """A mixin class which should be used when archived records need to be read""" @property - def archived_campaigns(self) -> ArchivedRecordsStream: + def archived_stream(self) -> ArchivedRecordsStream: return ArchivedRecordsStream(self.path(), self.cursor_field, self._start_ts, self.api_revision, api_key=self._api_key) def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: @@ -250,31 +232,7 @@ def read_records( stream_state: Optional[Mapping[str, Any]] = None, ) -> Iterable[StreamData]: yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) - yield from self.archived_campaigns.read_records(sync_mode, cursor_field, stream_slice, stream_state) - - -class Profiles(IncrementalKlaviyoStream): - """Docs: https://developers.klaviyo.com/en/v2023-02-22/reference/get_profiles""" - - transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) - - cursor_field = "updated" - api_revision = "2023-02-22" - page_size = 100 - state_checkpoint_interval = 100 # API can return maximum 100 records per page - - def path(self, *args, next_page_token: Optional[Mapping[str, Any]] = None, **kwargs) -> str: - return "profiles" - - def request_params( - self, - stream_state: Optional[Mapping[str, Any]], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params.update({"additional-fields[profile]": "predictive_analytics"}) - return params + yield from self.archived_stream.read_records(sync_mode, cursor_field, stream_slice, stream_state) class Campaigns(ArchivedRecordsMixin, IncrementalKlaviyoStream): @@ -287,48 +245,6 @@ def path(self, **kwargs) -> str: return "campaigns" -class Lists(SemiIncrementalKlaviyoStream): - """Docs: https://developers.klaviyo.com/en/reference/get_lists""" - - max_retries = 10 - cursor_field = "updated" - - def path(self, **kwargs) -> str: - return "lists" - - -class GlobalExclusions(Profiles): - """ - Docs: https://developers.klaviyo.com/en/v2023-02-22/reference/get_profiles - This stream takes data from 'profiles' endpoint, but suppressed records only - """ - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - for record in super().parse_response(response, **kwargs): - if not record["attributes"].get("subscriptions", {}).get("email", {}).get("marketing", {}).get("suppressions"): - continue - yield record - - -class Metrics(SemiIncrementalKlaviyoStream): - """Docs: https://developers.klaviyo.com/en/reference/get_metrics""" - - cursor_field = "updated" - - def path(self, **kwargs) -> str: - return "metrics" - - -class Events(IncrementalKlaviyoStream): - """Docs: https://developers.klaviyo.com/en/reference/get_events""" - - cursor_field = "datetime" - state_checkpoint_interval = 200 # API can return maximum 200 records per page - - def path(self, **kwargs) -> str: - return "events" - - class Flows(ArchivedRecordsMixin, IncrementalKlaviyoStream): """Docs: https://developers.klaviyo.com/en/reference/get_flows""" @@ -337,13 +253,3 @@ class Flows(ArchivedRecordsMixin, IncrementalKlaviyoStream): def path(self, **kwargs) -> str: return "flows" - - -class EmailTemplates(IncrementalKlaviyoStream): - """Docs: https://developers.klaviyo.com/en/reference/get_templates""" - - cursor_field = "updated" - state_checkpoint_interval = 10 # API can return maximum 10 records per page - - def path(self, **kwargs) -> str: - return "templates" diff --git a/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_source.py b/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_source.py index f8dfa207ad9d6..ded7d1ebb34d3 100644 --- a/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_source.py @@ -2,30 +2,39 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + +import logging + import pendulum import pytest from source_klaviyo.source import SourceKlaviyo +logger = logging.getLogger("airbyte") + @pytest.mark.parametrize( - "status_code, response, is_connection_successful, error_msg", + ("status_code", "is_connection_successful", "error_msg"), ( - (200, "", True, None), + (200, True, None), ( 400, - "Bad request", False, - "Unable to connect to Klaviyo API with provided credentials.", + ( + "Unable to connect to stream metrics - " + "Request to https://a.klaviyo.com/api/metrics failed with status code 400 and error message None" + ), ), ( 403, - "Forbidden", False, - "Please provide a valid API key and make sure it has permissions to read specified streams.", + ( + "Unable to connect to stream metrics - Please provide a valid API key and " + "make sure it has permissions to read specified streams." + ), ), ), ) -def test_check_connection(requests_mock, status_code, response, is_connection_successful, error_msg): +def test_check_connection(requests_mock, status_code, is_connection_successful, error_msg): requests_mock.register_uri( "GET", "https://a.klaviyo.com/api/metrics", @@ -33,21 +42,18 @@ def test_check_connection(requests_mock, status_code, response, is_connection_su json={"end": 1, "total": 1} if 200 >= status_code < 300 else {}, ) source = SourceKlaviyo() - success, error = source.check_connection(logger=None, config={"api_key": "api_key"}) + success, error = source.check_connection(logger=logger, config={"api_key": "api_key"}) assert success is is_connection_successful assert error == error_msg def test_check_connection_unexpected_error(requests_mock): - requests_mock.register_uri( - "GET", - "https://a.klaviyo.com/api/metrics", - exc=Exception("Something went wrong, api_key=some_api_key"), - ) + exception_info = "Something went wrong" + requests_mock.register_uri("GET", "https://a.klaviyo.com/api/metrics", exc=Exception(exception_info)) source = SourceKlaviyo() - success, error = source.check_connection(logger=None, config={"api_key": "api_key"}) + success, error = source.check_connection(logger=logger, config={"api_key": "api_key"}) assert success is False - assert error == "Exception('Something went wrong, api_key=***')" + assert error == f"Unable to connect to stream metrics - {exception_info}" def test_streams(): diff --git a/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_streams.py index 6da099bba11b2..9ca5c6abd3a4f 100644 --- a/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-klaviyo/unit_tests/test_streams.py @@ -1,47 +1,57 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Optional + + +from typing import Any, List, Mapping, Optional from unittest import mock import pendulum import pytest import requests from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream from pydantic import BaseModel from source_klaviyo.availability_strategy import KlaviyoAvailabilityStrategy from source_klaviyo.exceptions import KlaviyoBackoffError -from source_klaviyo.streams import ( - ArchivedRecordsStream, - Campaigns, - GlobalExclusions, - IncrementalKlaviyoStream, - KlaviyoStream, - Profiles, - SemiIncrementalKlaviyoStream, -) +from source_klaviyo.source import SourceKlaviyo +from source_klaviyo.streams import ArchivedRecordsStream, Campaigns, IncrementalKlaviyoStream, KlaviyoStream API_KEY = "some_key" START_DATE = pendulum.datetime(2020, 10, 10) +CONFIG = {"api_key": API_KEY, "start_date": START_DATE} -class SomeStream(KlaviyoStream): - schema = mock.Mock(spec=BaseModel) - max_time = 60 * 10 +def get_stream_by_name(stream_name: str, config: Mapping[str, Any]) -> Stream: + source = SourceKlaviyo() + matches_by_name = [stream_config for stream_config in source.streams(config) if stream_config.name == stream_name] + if not matches_by_name: + raise ValueError("Please provide a valid stream name.") + return matches_by_name[0] - def path(self, **kwargs) -> str: - return "sub_path" +def get_records(stream: Stream, sync_mode: Optional[SyncMode] = SyncMode.full_refresh) -> List[Mapping[str, Any]]: + records = [] + for stream_slice in stream.stream_slices(sync_mode=sync_mode): + for record in stream.read_records(sync_mode=sync_mode, stream_slice=stream_slice): + records.append(dict(record)) + return records -class SomeIncrementalStream(IncrementalKlaviyoStream): + +@pytest.fixture(name="response") +def response_fixture(mocker): + return mocker.Mock(spec=requests.Response) + + +class SomeStream(KlaviyoStream): schema = mock.Mock(spec=BaseModel) - cursor_field = "updated" + max_time = 60 * 10 def path(self, **kwargs) -> str: return "sub_path" -class SomeSemiIncrementalStream(SemiIncrementalKlaviyoStream): +class SomeIncrementalStream(IncrementalKlaviyoStream): schema = mock.Mock(spec=BaseModel) cursor_field = "updated" @@ -49,21 +59,13 @@ def path(self, **kwargs) -> str: return "sub_path" -@pytest.fixture(name="response") -def response_fixture(mocker): - return mocker.Mock(spec=requests.Response) - - class TestKlaviyoStream: def test_request_headers(self): stream = SomeStream(api_key=API_KEY) - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} expected_headers = { - "Accept": "application/json", - "Revision": stream.api_revision, - "Authorization": f"Klaviyo-API-Key {API_KEY}", + "Accept": "application/json", "Revision": stream.api_revision, "Authorization": f"Klaviyo-API-Key {API_KEY}" } - assert stream.request_headers(**inputs) == expected_headers + assert stream.request_headers() == expected_headers @pytest.mark.parametrize( ("next_page_token", "page_size", "expected_params"), @@ -77,8 +79,7 @@ def test_request_headers(self): def test_request_params(self, next_page_token, page_size, expected_params): stream = SomeStream(api_key=API_KEY) stream.page_size = page_size - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": next_page_token} - assert stream.request_params(**inputs) == expected_params + assert stream.request_params(stream_state=None, next_page_token=next_page_token) == expected_params @pytest.mark.parametrize( ("response_json", "next_page_token"), @@ -90,11 +91,11 @@ def test_request_params(self, next_page_token, page_size, expected_params): ], "links": { "self": "https://a.klaviyo.com/api/profiles/", - "next": "https://a.klaviyo.com/api/profiles/?page%5Bcursor%5D=aaA0aAo0aAA0AaAaAaa0AaaAAAaaA00AAAa0AA00A0AAAaAa", + "next": "https://a.klaviyo.com/api/profiles/?page%5Bcursor%5D=aaA0aAo0aAA0AaAaAaa0AaaAAAaa", "prev": "null", }, }, - {"page[cursor]": "aaA0aAo0aAA0AaAaAaa0AaaAAAaaA00AAAa0AA00A0AAAaAa"}, + {"page[cursor]": "aaA0aAo0aAA0AaAaAaa0AaaAAAaa"}, ), ( { @@ -114,7 +115,6 @@ def test_next_page_token(self, response, response_json, next_page_token): response.json.return_value = response_json stream = SomeStream(api_key=API_KEY) result = stream.next_page_token(response) - assert result == next_page_token def test_availability_strategy(self): @@ -126,7 +126,9 @@ def test_availability_strategy(self): "This is most likely due to insufficient permissions on the credentials in use. " "Try to create and use an API key with read permission for the 'some_stream' stream granted" ) - reasons_for_unavailable_status_codes = stream.availability_strategy.reasons_for_unavailable_status_codes(stream, None, None, None) + reasons_for_unavailable_status_codes = stream.availability_strategy.reasons_for_unavailable_status_codes( + stream, None, None, None + ) assert expected_status_code in reasons_for_unavailable_status_codes assert reasons_for_unavailable_status_codes[expected_status_code] == expected_message @@ -149,14 +151,18 @@ def test_backoff_time_large_retry_after(self): response_mock.headers = {"Retry-After": retry_after} with pytest.raises(KlaviyoBackoffError) as e: stream.backoff_time(response_mock) - error_message = f"Stream some_stream has reached rate limit with 'Retry-After' of {float(retry_after)} seconds, exit from stream." + error_message = ( + f"Stream some_stream has reached rate limit with 'Retry-After' of {float(retry_after)} seconds, " + "exit from stream." + ) assert str(e.value) == error_message class TestIncrementalKlaviyoStream: def test_cursor_field_is_required(self): with pytest.raises( - TypeError, match="Can't instantiate abstract class IncrementalKlaviyoStream with abstract methods cursor_field, path" + expected_exception=TypeError, + match="Can't instantiate abstract class IncrementalKlaviyoStream with abstract methods cursor_field, path", ): IncrementalKlaviyoStream(api_key=API_KEY, start_date=START_DATE.isoformat()) @@ -212,8 +218,7 @@ def test_cursor_field_is_required(self): ) def test_request_params(self, config_start_date, stream_state_date, next_page_token, expected_params): stream = SomeIncrementalStream(api_key=API_KEY, start_date=config_start_date) - inputs = {"stream_state": stream_state_date, "next_page_token": next_page_token} - assert stream.request_params(**inputs) == expected_params + assert stream.request_params(stream_state=stream_state_date, next_page_token=next_page_token) == expected_params @pytest.mark.parametrize( ("config_start_date", "current_cursor", "latest_cursor", "expected_cursor"), @@ -227,190 +232,129 @@ def test_request_params(self, config_start_date, stream_state_date, next_page_to ) def test_get_updated_state(self, config_start_date, current_cursor, latest_cursor, expected_cursor): stream = SomeIncrementalStream(api_key=API_KEY, start_date=config_start_date) - inputs = { - "current_stream_state": {stream.cursor_field: current_cursor} if current_cursor else {}, - "latest_record": {stream.cursor_field: latest_cursor}, - } - assert stream.get_updated_state(**inputs) == {stream.cursor_field: expected_cursor} + assert stream.get_updated_state( + current_stream_state={stream.cursor_field: current_cursor} if current_cursor else {}, + latest_record={stream.cursor_field: latest_cursor}, + ) == {stream.cursor_field: expected_cursor} class TestSemiIncrementalKlaviyoStream: - def test_cursor_field_is_required(self): - with pytest.raises( - TypeError, match="Can't instantiate abstract class SemiIncrementalKlaviyoStream with abstract methods cursor_field, path" - ): - SemiIncrementalKlaviyoStream(api_key=API_KEY, start_date=START_DATE.isoformat()) - @pytest.mark.parametrize( ("start_date", "stream_state", "input_records", "expected_records"), ( ( - "2021-11-08T00:00:00", - "2022-11-07T00:00:00", - [ - {"attributes": {"updated": "2022-11-08T00:00:00"}}, - {"attributes": {"updated": "2023-11-08T00:00:00"}}, - {"attributes": {"updated": "2021-11-08T00:00:00"}}, - ], - [ - {"attributes": {"updated": "2022-11-08T00:00:00"}, "updated": "2022-11-08T00:00:00"}, - {"attributes": {"updated": "2023-11-08T00:00:00"}, "updated": "2023-11-08T00:00:00"}, - ], - ), - ( - "2021-11-08T00:00:00", - None, + "2021-11-08T00:00:00+00:00", + "2022-11-07T00:00:00+00:00", [ - {"attributes": {"updated": "2022-11-08T00:00:00"}}, - {"attributes": {"updated": "2023-11-08T00:00:00"}}, - {"attributes": {"updated": "2021-11-08T00:00:00"}}, + {"attributes": {"updated": "2022-11-08T00:00:00+00:00"}}, + {"attributes": {"updated": "2023-11-08T00:00:00+00:00"}}, + {"attributes": {"updated": "2021-11-08T00:00:00+00:00"}}, ], [ - {"attributes": {"updated": "2022-11-08T00:00:00"}, "updated": "2022-11-08T00:00:00"}, - {"attributes": {"updated": "2023-11-08T00:00:00"}, "updated": "2023-11-08T00:00:00"}, + {"attributes": {"updated": "2022-11-08T00:00:00+00:00"}, "updated": "2022-11-08T00:00:00+00:00"}, + {"attributes": {"updated": "2023-11-08T00:00:00+00:00"}, "updated": "2023-11-08T00:00:00+00:00"}, ], ), ( - None, + "2021-11-08T00:00:00+00:00", None, [ - {"attributes": {"updated": "2022-11-08T00:00:00"}}, - {"attributes": {"updated": "2023-11-08T00:00:00"}}, - {"attributes": {"updated": "2021-11-08T00:00:00"}}, + {"attributes": {"updated": "2022-11-08T00:00:00+00:00"}}, + {"attributes": {"updated": "2023-11-08T00:00:00+00:00"}}, + {"attributes": {"updated": "2021-11-08T00:00:00+00:00"}}, ], [ - {"attributes": {"updated": "2022-11-08T00:00:00"}, "updated": "2022-11-08T00:00:00"}, - {"attributes": {"updated": "2023-11-08T00:00:00"}, "updated": "2023-11-08T00:00:00"}, - {"attributes": {"updated": "2021-11-08T00:00:00"}, "updated": "2021-11-08T00:00:00"}, + {"attributes": {"updated": "2022-11-08T00:00:00+00:00"}, "updated": "2022-11-08T00:00:00+00:00"}, + {"attributes": {"updated": "2023-11-08T00:00:00+00:00"}, "updated": "2023-11-08T00:00:00+00:00"}, ], ), - ( - "2021-11-08T00:00:00", - "2022-11-07T00:00:00", - [], - [], - ), + ("2021-11-08T00:00:00+00:00", "2022-11-07T00:00:00+00:00", [], []), ), ) def test_read_records(self, start_date, stream_state, input_records, expected_records, requests_mock): - stream = SomeSemiIncrementalStream(api_key=API_KEY, start_date=start_date) - requests_mock.register_uri("GET", f"https://a.klaviyo.com/api/{stream.path()}", status_code=200, json={"data": input_records}) - inputs = { - "sync_mode": SyncMode.incremental, - "cursor_field": stream.cursor_field, - "stream_slice": None, - "stream_state": {stream.cursor_field: stream_state} if stream_state else None, - } - assert list(stream.read_records(**inputs)) == expected_records + stream = get_stream_by_name("metrics", CONFIG | {"start_date": start_date}) + requests_mock.register_uri( + "GET", f"https://a.klaviyo.com/api/metrics", status_code=200, json={"data": input_records} + ) + stream.stream_state = {stream.cursor_field: stream_state if stream_state else start_date} + records = get_records(stream=stream, sync_mode=SyncMode.incremental) + assert records == expected_records class TestProfilesStream: - @pytest.mark.parametrize( - ("next_page_token", "page_size", "expected_params"), - ( - ( - {"page[cursor]": "aaA0aAo0aAA0A"}, - None, - {"page[cursor]": "aaA0aAo0aAA0A", "additional-fields[profile]": "predictive_analytics", "sort": "updated"}, - ), - ( - {"page[cursor]": "aaA0aAo0aAA0A"}, - 100, - {"page[cursor]": "aaA0aAo0aAA0A", "additional-fields[profile]": "predictive_analytics", "sort": "updated"}, - ), - (None, None, {"additional-fields[profile]": "predictive_analytics", "sort": "updated"}), - (None, 100, {"page[size]": 100, "additional-fields[profile]": "predictive_analytics", "sort": "updated"}), - ), - ) - def test_request_params(self, next_page_token: Optional[dict], page_size: Optional[int], expected_params: dict): - stream = Profiles(api_key=API_KEY) - stream.page_size = page_size - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": next_page_token} - assert stream.request_params(**inputs) == expected_params + def test_request_params(self): + stream = get_stream_by_name("profiles", CONFIG) + assert stream.retriever.requester.get_request_params() == {"additional-fields[profile]": "predictive_analytics"} - def test_parse_response(self, mocker): - stream = Profiles(api_key=API_KEY, start_date=START_DATE.isoformat()) + def test_read_records(self, requests_mock): + stream = get_stream_by_name("profiles", CONFIG) json = { "data": [ { "type": "profile", "id": "00AA0A0AA0AA000AAAAAAA0AA0", - "attributes": {"email": "name@airbyte.io", "phone_number": "+11111111111", "updated": "2023-03-10T20:36:36+00:00"}, + "attributes": {"email": "name@airbyte.io", "updated": "2023-03-10T20:36:36+00:00"}, "properties": {"Status": "onboarding_complete"}, }, { "type": "profile", "id": "AAAA1A1AA1AA111AAAAAAA1AA1", - "attributes": {"email": "name2@airbyte.io", "phone_number": "+2222222222", "updated": "2023-02-10T20:36:36+00:00"}, + "attributes": {"email": "name2@airbyte.io", "updated": "2023-02-10T20:36:36+00:00"}, "properties": {"Status": "onboarding_started"}, }, ], - "links": { - "self": "https://a.klaviyo.com/api/profiles/", - "next": "https://a.klaviyo.com/api/profiles/?page%5Bcursor%5D=aaA0aAo0aAA0AaAaAaa0AaaAAAaaA00AAAa0AA00A0AAAaAa", - "prev": "null", - }, } - records = list(stream.parse_response(mocker.Mock(json=mocker.Mock(return_value=json)))) + requests_mock.register_uri("GET", f"https://a.klaviyo.com/api/profiles", status_code=200, json=json) + + records = get_records(stream=stream) assert records == [ { "type": "profile", "id": "00AA0A0AA0AA000AAAAAAA0AA0", "updated": "2023-03-10T20:36:36+00:00", - "attributes": {"email": "name@airbyte.io", "phone_number": "+11111111111", "updated": "2023-03-10T20:36:36+00:00"}, + "attributes": {"email": "name@airbyte.io", "updated": "2023-03-10T20:36:36+00:00"}, "properties": {"Status": "onboarding_complete"}, }, { "type": "profile", "id": "AAAA1A1AA1AA111AAAAAAA1AA1", "updated": "2023-02-10T20:36:36+00:00", - "attributes": {"email": "name2@airbyte.io", "phone_number": "+2222222222", "updated": "2023-02-10T20:36:36+00:00"}, + "attributes": {"email": "name2@airbyte.io", "updated": "2023-02-10T20:36:36+00:00"}, "properties": {"Status": "onboarding_started"}, }, ] class TestGlobalExclusionsStream: - def test_parse_response(self, mocker): - stream = GlobalExclusions(api_key=API_KEY, start_date=START_DATE.isoformat()) + def test_read_records(self, requests_mock): + stream = get_stream_by_name("global_exclusions", CONFIG) json = { "data": [ { "type": "profile", "id": "00AA0A0AA0AA000AAAAAAA0AA0", "attributes": { - "email": "name@airbyte.io", - "phone_number": "+11111111111", "updated": "2023-03-10T20:36:36+00:00", - "subscriptions": { - "email": {"marketing": {"suppressions": [{"reason": "SUPPRESSED", "timestamp": "2021-05-18T01:29:51+00:00"}]}}, - }, + "subscriptions": {"email": {"marketing": {"suppressions": [{"reason": "SUPPRESSED"}]}}}, }, }, { "type": "profile", "id": "AAAA1A1AA1AA111AAAAAAA1AA1", - "attributes": {"email": "name2@airbyte.io", "phone_number": "+2222222222", "updated": "2023-02-10T20:36:36+00:00"}, + "attributes": {"updated": "2023-02-10T20:36:36+00:00"}, }, ], - "links": { - "self": "https://a.klaviyo.com/api/profiles/", - "next": "https://a.klaviyo.com/api/profiles/?page%5Bcursor%5D=aaA0aAo0aAA0AaAaAaa0AaaAAAaaA00AAAa0AA00A0AAAaAa", - "prev": "null", - }, } - records = list(stream.parse_response(mocker.Mock(json=mocker.Mock(return_value=json)))) + requests_mock.register_uri("GET", f"https://a.klaviyo.com/api/profiles", status_code=200, json=json) + + records = get_records(stream=stream) assert records == [ { "type": "profile", "id": "00AA0A0AA0AA000AAAAAAA0AA0", "attributes": { - "email": "name@airbyte.io", - "phone_number": "+11111111111", "updated": "2023-03-10T20:36:36+00:00", - "subscriptions": { - "email": {"marketing": {"suppressions": [{"reason": "SUPPRESSED", "timestamp": "2021-05-18T01:29:51+00:00"}]}}, - }, + "subscriptions": {"email": {"marketing": {"suppressions": [{"reason": "SUPPRESSED"}]}}}, }, "updated": "2023-03-10T20:36:36+00:00", } @@ -429,7 +373,11 @@ def test_read_records(self, requests_mock): stream = Campaigns(api_key=API_KEY) requests_mock.register_uri( - "GET", "https://a.klaviyo.com/api/campaigns?sort=updated_at", status_code=200, json={"data": input_records}, complete_qs=True + "GET", + "https://a.klaviyo.com/api/campaigns?sort=updated_at", + status_code=200, + json={"data": input_records}, + complete_qs=True, ) requests_mock.register_uri( "GET", @@ -439,7 +387,6 @@ def test_read_records(self, requests_mock): complete_qs=True, ) - inputs = {"sync_mode": SyncMode.full_refresh, "cursor_field": stream.cursor_field, "stream_slice": None, "stream_state": None} expected_records = [ { "attributes": {"name": "Some name 1", "archived": False, "updated_at": "2021-05-12T20:45:47+00:00"}, @@ -454,7 +401,7 @@ def test_read_records(self, requests_mock): "updated_at": "2021-05-12T20:45:47+00:00", }, ] - assert list(stream.read_records(**inputs)) == expected_records + assert list(stream.read_records(sync_mode=SyncMode.full_refresh)) == expected_records @pytest.mark.parametrize( ("latest_record", "current_stream_state", "expected_state"), @@ -536,4 +483,6 @@ class TestArchivedRecordsStream: ) def test_request_params(self, stream_state, next_page_token, expected_params): archived_stream = ArchivedRecordsStream(api_key="API_KEY", cursor_field="updated_at", path="path") - assert archived_stream.request_params(stream_state=stream_state, next_page_token=next_page_token) == expected_params + assert archived_stream.request_params( + stream_state=stream_state, next_page_token=next_page_token + ) == expected_params diff --git a/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/expected_records.jsonl index 2ccd9635d7f7c..e5b8614b184b4 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-linkedin-ads/integration_tests/expected_records.jsonl @@ -4,20 +4,20 @@ {"stream": "account_users", "data": {"role": "ACCOUNT_BILLING_ADMIN", "user": "urn:li:person:HRnXB4kIO7", "account": "urn:li:sponsoredAccount:508720451", "created": "2021-06-14T10:09:22+00:00", "lastModified": "2021-06-14T10:09:22+00:00"}, "emitted_at": 1697196559364} {"stream": "account_users", "data": {"role": "ACCOUNT_BILLING_ADMIN", "user": "urn:li:person:HRnXB4kIO7", "account": "urn:li:sponsoredAccount:508774356", "created": "2021-08-21T21:28:19+00:00", "lastModified": "2021-08-21T21:28:19+00:00"}, "emitted_at": 1697196559760} {"stream": "account_users", "data": {"role": "ACCOUNT_BILLING_ADMIN", "user": "urn:li:person:HRnXB4kIO7", "account": "urn:li:sponsoredAccount:508777244", "created": "2021-08-21T21:27:55+00:00", "lastModified": "2021-08-21T21:27:55+00:00"}, "emitted_at": 1697196560036} -{"stream":"ad_campaign_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":-2E-18,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":-2E-18,"documentThirdQuartileCompletions":0.0,"externalWebsiteConversions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":0.0,"cardClicks":0.0,"approximateUniqueImpressions":0.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-26","end_date":"2023-08-26","pivotValue":"urn:li:sponsoredCampaign:252074216","oneClickLeads":0.0,"landingPageClicks":0.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":1.0,"otherEngagements":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"pivotValues":["urn:li:sponsoredCampaign:252074216"],"likes":0.0},"emitted_at":1702655286996} -{"stream":"ad_campaign_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":100.00000000000004,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":100.00000000000004,"documentThirdQuartileCompletions":0.0,"externalWebsiteConversions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":106.0,"cardClicks":0.0,"approximateUniqueImpressions":17392.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","pivotValue":"urn:li:sponsoredCampaign:252074216","oneClickLeads":0.0,"landingPageClicks":106.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":19464.0,"otherEngagements":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"pivotValues":["urn:li:sponsoredCampaign:252074216"],"likes":0.0,"videoCompletions":0.0,"viralCardImpressions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"viralCardClicks":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":106.0,"reactions":0.0,"videoViews":0.0},"emitted_at":1702655287003} -{"stream":"ad_creative_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":-2E-18,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":-2E-18,"documentThirdQuartileCompletions":0.0,"externalWebsiteConversions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":0.0,"cardClicks":0.0,"approximateUniqueImpressions":0.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-26","end_date":"2023-08-26","pivotValue":"urn:li:sponsoredCreative:287513206","oneClickLeads":0.0,"landingPageClicks":0.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":1.0,"otherEngagements":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"pivotValues":["urn:li:sponsoredCreative:287513206"],"likes":0.0},"emitted_at":1702656821471} -{"stream":"ad_creative_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":100.00000000000004,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":100.00000000000004,"documentThirdQuartileCompletions":0.0,"externalWebsiteConversions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":106.0,"cardClicks":0.0,"approximateUniqueImpressions":17392.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","pivotValue":"urn:li:sponsoredCreative:287513206","oneClickLeads":0.0,"landingPageClicks":106.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":19464.0,"otherEngagements":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"pivotValues":["urn:li:sponsoredCreative:287513206"],"likes":0.0,"videoCompletions":0.0,"viralCardImpressions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"viralCardClicks":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":106.0,"reactions":0.0,"videoViews":0.0},"emitted_at":1702656821475} -{"stream": "ad_impression_device_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 2.29, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 2.29, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "cardImpressions": 0.0, "documentCompletions": 0.0, "clicks": 0.0, "cardClicks": 0.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "oneClickLeadFormOpens": 0.0, "follows": 0.0, "impressions": 498.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["UNDETECTED"], "likes": 0.0, "videoCompletions": 0.0, "viralCardImpressions": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "viralCardClicks": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 20.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196622374} -{"stream": "ad_impression_device_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": -2e-18, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": -2e-18, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "cardImpressions": 0.0, "documentCompletions": 0.0, "clicks": 0.0, "cardClicks": 0.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-26", "end_date": "2023-08-26", "pivotValue": "urn:li:sponsoredCampaign:252074216", "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "oneClickLeadFormOpens": 0.0, "follows": 0.0, "impressions": 1.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["MOBILE_WEB"], "likes": 0.0}, "emitted_at": 1697196622395} -{"stream": "ad_member_company_size_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 24.457317520310493, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 24.457317520310493, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 9.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 9.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 1480.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["SIZE_2_TO_10"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 8.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196644434} -{"stream": "ad_member_country_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 317.93414846943944, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 318.93414846943944, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 110.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 107.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 19464.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:geo:103644278"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 109.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196666347} -{"stream": "ad_member_job_function_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 16.428626738541787, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 16.428626738541787, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 7.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 4.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 1064.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:function:7"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 4.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196688970} -{"stream": "ad_member_job_title_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 13.656450854809513, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 11.656450854809513, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 6.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 39.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:title:68"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 0.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196712131} -{"stream": "ad_member_industry_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 7.4596902377485215, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 7.4596902377485215, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 3.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 165.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:industry:99"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 0.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196734580} -{"stream": "ad_member_seniority_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 61.54563022857992, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 61.54563022857992, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 25.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 28.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 3762.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:seniority:4"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 28.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196756108} -{"stream": "ad_member_region_analytics", "data": {"documentFirstQuartileCompletions": 0.0, "actionClicks": 0.0, "comments": 0.0, "costInUsd": 12.261068694077421, "commentLikes": 0.0, "adUnitClicks": 0.0, "companyPageClicks": 0.0, "costInLocalCurrency": 11.261068694077421, "documentThirdQuartileCompletions": 0.0, "externalWebsiteConversions": 0.0, "documentCompletions": 0.0, "clicks": 8.0, "documentMidpointCompletions": 0.0, "downloadClicks": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 90.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:geo:90009446"], "likes": 0.0, "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 0.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196779059} -{"stream": "ad_member_company_analytics", "data": {"externalWebsitePostClickConversions": 0.0, "externalWebsitePostViewConversions": 0.0, "oneClickLeads": 0.0, "landingPageClicks": 0.0, "fullScreenPlays": 0.0, "follows": 0.0, "oneClickLeadFormOpens": 0.0, "impressions": 34.0, "otherEngagements": 0.0, "leadGenerationMailContactInfoShares": 0.0, "opens": 0.0, "leadGenerationMailInterestedClicks": 0.0, "pivotValues": ["urn:li:organization:33200573"], "likes": 0.0, "start_date": "2023-08-25", "end_date": "2023-08-25", "pivotValue": "urn:li:sponsoredCampaign:252074216", "videoCompletions": 0.0, "talentLeads": 0.0, "videoFirstQuartileCompletions": 0.0, "textUrlClicks": 0.0, "videoStarts": 0.0, "sends": 0.0, "shares": 0.0, "videoMidpointCompletions": 0.0, "validWorkEmailLeads": 0.0, "videoThirdQuartileCompletions": 0.0, "totalEngagements": 0.0, "reactions": 0.0, "videoViews": 0.0}, "emitted_at": 1697196801205} +{"stream":"ad_campaign_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":-2E-18,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":-2E-18,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":0.0,"cardClicks":0.0,"pivotValues":["urn:li:sponsoredCampaign:252074216"],"approximateUniqueImpressions":0.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-26","end_date":"2023-08-26","sponsoredCampaign":"252074216","pivot":"CAMPAIGN","oneClickLeads":0.0,"landingPageClicks":0.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":1.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0},"emitted_at":1712752647161} +{"stream":"ad_campaign_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":100.0,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":100.0,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":106.0,"cardClicks":0.0,"pivotValues":["urn:li:sponsoredCampaign:252074216"],"approximateUniqueImpressions":17392.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"CAMPAIGN","oneClickLeads":0.0,"landingPageClicks":106.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":19464.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":106.0,"reactions":0.0},"emitted_at":1712752647168} +{"stream":"ad_creative_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":-2E-18,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":-2E-18,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":0.0,"cardClicks":0.0,"pivotValues":["urn:li:sponsoredCreative:287513206"],"approximateUniqueImpressions":0.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-26","end_date":"2023-08-26","sponsoredCreative":"287513206","pivot":"CREATIVE","oneClickLeads":0.0,"landingPageClicks":0.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":1.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0},"emitted_at":1712752665838} +{"stream":"ad_creative_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":100.0,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":100.0,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":106.0,"cardClicks":0.0,"pivotValues":["urn:li:sponsoredCreative:287513206"],"approximateUniqueImpressions":17392.0,"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCreative":"287513206","pivot":"CREATIVE","oneClickLeads":0.0,"landingPageClicks":106.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":19464.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":106.0,"reactions":0.0},"emitted_at":1712752665841} +{"stream":"ad_impression_device_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":60.06999999999999,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":60.06999999999999,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":54.0,"cardClicks":0.0,"pivotValues":["MOBILE_APP"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"IMPRESSION_DEVICE_TYPE","oneClickLeads":0.0,"landingPageClicks":54.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":11707.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":54.0,"reactions":0.0},"emitted_at":1712752683361} +{"stream":"ad_impression_device_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":18.13,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":18.13,"documentThirdQuartileCompletions":0.0,"cardImpressions":0.0,"documentCompletions":0.0,"clicks":32.0,"cardClicks":0.0,"pivotValues":["DESKTOP_WEB"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"IMPRESSION_DEVICE_TYPE","oneClickLeads":0.0,"landingPageClicks":32.0,"fullScreenPlays":0.0,"oneClickLeadFormOpens":0.0,"follows":0.0,"impressions":3581.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":32.0,"reactions":0.0},"emitted_at":1712752683365} +{"stream":"ad_member_company_size_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":17.680254254819,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":14.680254254819,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":7.0,"pivotValues":["SIZE_201_TO_500"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_COMPANY_SIZE","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":6.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":1018.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":5.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752699771} +{"stream":"ad_member_country_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":317.93414846943944,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":318.93414846943944,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":110.0,"pivotValues":["urn:li:geo:103644278"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_COUNTRY_V2","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":107.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":19464.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":109.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752716776} +{"stream":"ad_member_job_function_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":6.3118985810929855,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":9.311898581092985,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":7.0,"pivotValues":["urn:li:function:1"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_JOB_FUNCTION","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":9.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":392.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":6.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752733925} +{"stream":"ad_member_job_title_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":13.656450854809513,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":11.656450854809513,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":6.0,"pivotValues":["urn:li:title:1"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_JOB_TITLE","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":5.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":828.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":4.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752751546} +{"stream":"ad_member_industry_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":9.040292565562618,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":11.040292565562618,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":3.0,"pivotValues":["urn:li:industry:11"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_INDUSTRY","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":4.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":665.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":5.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752768755} +{"stream":"ad_member_seniority_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":18.04101662569067,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":16.04101662569067,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":6.0,"pivotValues":["urn:li:seniority:6"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_SENIORITY","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":7.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":1166.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":10.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752785616} +{"stream":"ad_member_region_analytics","data":{"documentFirstQuartileCompletions":0.0,"actionClicks":0.0,"comments":0.0,"costInUsd":21.786568843265876,"commentLikes":0.0,"adUnitClicks":0.0,"companyPageClicks":0.0,"costInLocalCurrency":23.786568843265876,"documentThirdQuartileCompletions":0.0,"documentCompletions":0.0,"clicks":9.0,"pivotValues":["urn:li:geo:90000070"],"documentMidpointCompletions":0.0,"downloadClicks":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_REGION_V2","externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":12.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":1342.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"likes":0.0,"videoCompletions":0.0,"talentLeads":0.0,"videoFirstQuartileCompletions":0.0,"textUrlClicks":0.0,"videoStarts":0.0,"sends":0.0,"shares":0.0,"videoMidpointCompletions":0.0,"validWorkEmailLeads":0.0,"videoThirdQuartileCompletions":0.0,"totalEngagements":11.0,"reactions":0.0,"viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752802713} +{"stream":"ad_member_company_analytics","data":{"externalWebsitePostClickConversions":0.0,"externalWebsitePostViewConversions":0.0,"oneClickLeads":0.0,"landingPageClicks":0.0,"fullScreenPlays":0.0,"follows":0.0,"oneClickLeadFormOpens":0.0,"impressions":6.0,"otherEngagements":0.0,"externalWebsiteConversions":0.0,"leadGenerationMailContactInfoShares":0.0,"opens":0.0,"leadGenerationMailInterestedClicks":0.0,"pivotValues":["urn:li:organization:3677"],"likes":0.0,"start_date":"2023-08-25","end_date":"2023-08-25","sponsoredCampaign":"252074216","pivot":"MEMBER_COMPANY","viralComments":0.0,"viralImpressions":0.0,"viralDocumentCompletions":0.0,"viralFollows":0.0,"viralCommentLikes":0.0,"viralExternalWebsiteConversions":0.0,"viralExternalWebsitePostClickConversions":0.0,"viralDownloadClicks":0.0,"viralFullScreenPlays":0.0,"viralExternalWebsitePostViewConversions":0.0,"viralDocumentFirstQuartileCompletions":0.0,"viralClicks":0.0,"viralCompanyPageClicks":0.0,"viralDocumentMidpointCompletions":0.0,"videoViews":0.0,"viralDocumentThirdQuartileCompletions":0.0},"emitted_at":1712752820269} {"stream": "campaign_groups", "data": {"runSchedule": {"start": 1623665362312}, "test": false, "name": "Default Campaign Group", "servingStatuses": ["RUNNABLE"], "backfilled": true, "id": 615492066, "account": "urn:li:sponsoredAccount:508720451", "status": "ACTIVE", "created": "2021-06-14T10:09:22+00:00", "lastModified": "2021-06-14T10:09:22+00:00"}, "emitted_at": 1697196810514} {"stream": "campaign_groups", "data": {"runSchedule": {"start": 1628229693058, "end": 1695253500000}, "test": false, "totalBudget": {"currencyCode": "USD", "amount": "200"}, "name": "Airbyte Test", "servingStatuses": ["CAMPAIGN_GROUP_END_DATE_HOLD", "CAMPAIGN_GROUP_TOTAL_BUDGET_HOLD"], "backfilled": false, "id": 616471656, "account": "urn:li:sponsoredAccount:508720451", "status": "ACTIVE", "created": "2021-08-06T06:01:33+00:00", "lastModified": "2023-09-20T23:33:45+00:00"}, "emitted_at": 1697196810515} {"stream": "campaign_groups", "data": {"runSchedule": {"start": 1629581299760}, "test": false, "name": "Test Campaign Group 2", "servingStatuses": ["STOPPED", "BILLING_HOLD"], "backfilled": false, "id": 616749096, "account": "urn:li:sponsoredAccount:508774356", "status": "PAUSED", "created": "2021-08-21T21:28:19+00:00", "lastModified": "2021-08-21T21:29:27+00:00"}, "emitted_at": 1697196810793} diff --git a/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml b/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml index 0c00c634873c9..52e098a5d7d9b 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 137ece28-5434-455c-8f34-69dc3782f451 - dockerImageTag: 0.7.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-linkedin-ads documentationUrl: https://docs.airbyte.com/integrations/sources/linkedin-ads githubIssueLabel: source-linkedin-ads @@ -29,6 +29,25 @@ data: oss: enabled: true releaseStage: generally_available + releases: + breakingChanges: + 1.0.0: + message: This upgrade brings changes in primary key to *-analytics streams. + upgradeDeadline: "2024-04-30" + scopedImpact: + - scopeType: stream + impactedScopes: + - "ad_campaign_analytics" + - "ad_creative_analytics" + - "ad_impression_device_analytics" + - "ad_member_company_size_analytics" + - "ad_member_country_analytics" + - "ad_member_job_function_analytics" + - "ad_member_job_title_analytics" + - "ad_member_industry_analytics" + - "ad_member_seniority_analytics" + - "ad_member_region_analytics" + - "ad_member_company_analytics" suggestedStreams: streams: - accounts diff --git a/airbyte-integrations/connectors/source-linkedin-ads/poetry.lock b/airbyte-integrations/connectors/source-linkedin-ads/poetry.lock index 4c1cbe822ae26..a07d3ad41dbee 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-linkedin-ads/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -750,7 +750,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -857,19 +856,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +894,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1031,4 +1030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "861d01a7b883a61e01367a1c883fbc1699ef39984c364c6bd9984703c1747375" +content-hash = "4624f76d4dc767d9b8cf0fe2a56e8b4b407596942e16b39cd0d2940baa2ec59b" diff --git a/airbyte-integrations/connectors/source-linkedin-ads/pyproject.toml b/airbyte-integrations/connectors/source-linkedin-ads/pyproject.toml index bcbb55b925737..83d635ed38ebc 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-linkedin-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.7.0" +version = "1.0.0" name = "source-linkedin-ads" description = "Source implementation for Linkedin Ads." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_linkedin_ads" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.63.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-linkedin-ads = "source_linkedin_ads.run:run" diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/analytics_streams.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/analytics_streams.py index f58da0e25c8b5..e0835840630e7 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/analytics_streams.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/analytics_streams.py @@ -127,13 +127,15 @@ class LinkedInAdsAnalyticsStream(IncrementalLinkedinAdsStream, ABC): endpoint = "adAnalytics" # For Analytics streams, the primary_key is the entity of the pivot [Campaign URN, Creative URN, etc.] + `end_date` - primary_key = ["pivotValue", "end_date"] + primary_key = ["pivotValues", "end_date"] cursor_field = "end_date" records_limit = 15000 - FIELDS_CHUNK_SIZE = 19 + FIELDS_CHUNK_SIZE = 18 def get_json_schema(self) -> Mapping[str, Any]: - return ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ad_analytics") + schema = ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ad_analytics") + schema["properties"].update({self.search_param_value: {"type": ["null", "string"]}}) + return schema def __init__(self, name: str = None, pivot_by: str = None, time_granularity: str = None, **kwargs): self.user_stream_name = name @@ -286,6 +288,8 @@ def chunk_analytics_fields( for chunk in chunks: if "dateRange" not in chunk: chunk.append("dateRange") + if "pivotValues" not in chunk: + chunk.append("pivotValues") yield from chunks def read_records( @@ -294,7 +298,7 @@ def read_records( merged_records = defaultdict(dict) for field_slice in stream_slice: for rec in super().read_records(stream_slice=field_slice, **kwargs): - merged_records[rec[self.cursor_field]].update(rec) + merged_records[f"{rec[self.cursor_field]}-{rec['pivotValues']}"].update(rec) yield from merged_records.values() def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: @@ -302,7 +306,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp We need to get out the nested complex data structures for further normalization, so the transform_data method is applied. """ for rec in transform_data(response.json().get("elements")): - yield rec | {"pivotValue": f"urn:li:{self.search_param_value}:{self.get_primary_key_from_slice(kwargs.get('stream_slice'))}"} + yield rec | {self.search_param_value: self.get_primary_key_from_slice(kwargs.get("stream_slice")), "pivot": self.pivot_by} class AdCampaignAnalytics(LinkedInAdsAnalyticsStream): diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/schemas/ad_analytics.json b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/schemas/ad_analytics.json index 100daeaebb493..e81dc02e9a6f6 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/schemas/ad_analytics.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/schemas/ad_analytics.json @@ -107,9 +107,6 @@ "otherEngagements": { "type": ["null", "number"] }, - "pivotValue": { - "type": ["null", "string"] - }, "pivotValues": { "type": ["null", "array"], "items": { @@ -299,6 +296,9 @@ }, "viralVideoViews": { "type": ["null", "number"] + }, + "pivot": { + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/output_slices.json b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/output_slices.json index edab652341716..2df6f3200175b 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/output_slices.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/output_slices.json @@ -10,117 +10,141 @@ "start.month": 1, "start.year": 2021 }, - "fields": "actionClicks,adUnitClicks,approximateUniqueImpressions,cardClicks,cardImpressions,clicks,commentLikes,comments,companyPageClicks,conversionValueInLocalCurrency,costInLocalCurrency,costInUsd,dateRange,documentCompletions,documentFirstQuartileCompletions,documentMidpointCompletions,documentThirdQuartileCompletions,downloadClicks,externalWebsiteConversions" + "fields": "actionClicks,adUnitClicks,approximateUniqueImpressions,cardClicks,cardImpressions,clicks,commentLikes,comments,companyPageClicks,conversionValueInLocalCurrency,costInLocalCurrency,costInUsd,dateRange,documentCompletions,documentFirstQuartileCompletions,documentMidpointCompletions,documentThirdQuartileCompletions,downloadClicks,pivotValues" }, { "campaign_id": 123, + "fields": "externalWebsiteConversions,externalWebsitePostClickConversions,externalWebsitePostViewConversions,follows,fullScreenPlays,impressions,jobApplications,jobApplyClicks,landingPageClicks,leadGenerationMailContactInfoShares,leadGenerationMailInterestedClicks,likes,oneClickLeadFormOpens,oneClickLeads,opens,otherEngagements,pivotValues,postClickJobApplications,dateRange", "dateRange": { - "end.day": 31, - "end.month": 1, - "end.year": 2021, "start.day": 1, "start.month": 1, - "start.year": 2021 - }, - "fields": "externalWebsitePostClickConversions,externalWebsitePostViewConversions,follows,fullScreenPlays,impressions,jobApplications,jobApplyClicks,landingPageClicks,leadGenerationMailContactInfoShares,leadGenerationMailInterestedClicks,likes,oneClickLeadFormOpens,oneClickLeads,opens,otherEngagements,pivotValues,postClickJobApplications,postClickJobApplyClicks,postClickRegistrations,dateRange" + "start.year": 2021, + "end.day": 31, + "end.month": 1, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "postClickJobApplyClicks,postClickRegistrations,postViewJobApplications,postViewJobApplyClicks,postViewRegistrations,reactions,registrations,sends,shares,talentLeads,textUrlClicks,totalEngagements,validWorkEmailLeads,videoCompletions,videoFirstQuartileCompletions,videoMidpointCompletions,videoStarts,videoThirdQuartileCompletions,dateRange,pivotValues", "dateRange": { - "end.day": 31, - "end.month": 1, - "end.year": 2021, "start.day": 1, "start.month": 1, - "start.year": 2021 - }, - "fields": "postViewJobApplications,postViewJobApplyClicks,postViewRegistrations,reactions,registrations,sends,shares,talentLeads,textUrlClicks,totalEngagements,validWorkEmailLeads,videoCompletions,videoFirstQuartileCompletions,videoMidpointCompletions,videoStarts,videoThirdQuartileCompletions,videoViews,viralCardClicks,viralCardImpressions,dateRange" + "start.year": 2021, + "end.day": 31, + "end.month": 1, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "videoViews,viralCardClicks,viralCardImpressions,viralClicks,viralCommentLikes,viralComments,viralCompanyPageClicks,viralDocumentCompletions,viralDocumentFirstQuartileCompletions,viralDocumentMidpointCompletions,viralDocumentThirdQuartileCompletions,viralDownloadClicks,viralExternalWebsiteConversions,viralExternalWebsitePostClickConversions,viralExternalWebsitePostViewConversions,viralFollows,viralFullScreenPlays,viralImpressions,dateRange,pivotValues", "dateRange": { - "end.day": 31, - "end.month": 1, - "end.year": 2021, "start.day": 1, "start.month": 1, - "start.year": 2021 - }, - "fields": "viralClicks,viralCommentLikes,viralComments,viralCompanyPageClicks,viralDocumentCompletions,viralDocumentFirstQuartileCompletions,viralDocumentMidpointCompletions,viralDocumentThirdQuartileCompletions,viralDownloadClicks,viralExternalWebsiteConversions,viralExternalWebsitePostClickConversions,viralExternalWebsitePostViewConversions,viralFollows,viralFullScreenPlays,viralImpressions,viralJobApplications,viralJobApplyClicks,viralLandingPageClicks,viralLikes,dateRange" + "start.year": 2021, + "end.day": 31, + "end.month": 1, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "viralJobApplications,viralJobApplyClicks,viralLandingPageClicks,viralLikes,viralOneClickLeadFormOpens,viralOneClickLeads,viralOtherEngagements,viralPostClickJobApplications,viralPostClickJobApplyClicks,viralPostClickRegistrations,viralPostViewJobApplications,viralPostViewJobApplyClicks,viralPostViewRegistrations,viralReactions,viralRegistrations,viralShares,viralTotalEngagements,viralVideoCompletions,dateRange,pivotValues", "dateRange": { + "start.day": 1, + "start.month": 1, + "start.year": 2021, "end.day": 31, "end.month": 1, - "end.year": 2021, + "end.year": 2021 + } + }, + { + "campaign_id": 123, + "fields": "viralVideoFirstQuartileCompletions,viralVideoMidpointCompletions,viralVideoStarts,viralVideoThirdQuartileCompletions,viralVideoViews,dateRange,pivotValues", + "dateRange": { "start.day": 1, "start.month": 1, - "start.year": 2021 - }, - "fields": "viralOneClickLeadFormOpens,viralOneClickLeads,viralOtherEngagements,viralPostClickJobApplications,viralPostClickJobApplyClicks,viralPostClickRegistrations,viralPostViewJobApplications,viralPostViewJobApplyClicks,viralPostViewRegistrations,viralReactions,viralRegistrations,viralShares,viralTotalEngagements,viralVideoCompletions,viralVideoFirstQuartileCompletions,viralVideoMidpointCompletions,viralVideoStarts,viralVideoThirdQuartileCompletions,viralVideoViews,dateRange" + "start.year": 2021, + "end.day": 31, + "end.month": 1, + "end.year": 2021 + } } ], [ { "campaign_id": 123, + "fields": "actionClicks,adUnitClicks,approximateUniqueImpressions,cardClicks,cardImpressions,clicks,commentLikes,comments,companyPageClicks,conversionValueInLocalCurrency,costInLocalCurrency,costInUsd,dateRange,documentCompletions,documentFirstQuartileCompletions,documentMidpointCompletions,documentThirdQuartileCompletions,downloadClicks,pivotValues", "dateRange": { - "end.day": 2, - "end.month": 3, - "end.year": 2021, "start.day": 31, "start.month": 1, - "start.year": 2021 - }, - "fields": "actionClicks,adUnitClicks,approximateUniqueImpressions,cardClicks,cardImpressions,clicks,commentLikes,comments,companyPageClicks,conversionValueInLocalCurrency,costInLocalCurrency,costInUsd,dateRange,documentCompletions,documentFirstQuartileCompletions,documentMidpointCompletions,documentThirdQuartileCompletions,downloadClicks,externalWebsiteConversions" + "start.year": 2021, + "end.day": 2, + "end.month": 3, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "externalWebsiteConversions,externalWebsitePostClickConversions,externalWebsitePostViewConversions,follows,fullScreenPlays,impressions,jobApplications,jobApplyClicks,landingPageClicks,leadGenerationMailContactInfoShares,leadGenerationMailInterestedClicks,likes,oneClickLeadFormOpens,oneClickLeads,opens,otherEngagements,pivotValues,postClickJobApplications,dateRange", "dateRange": { - "end.day": 2, - "end.month": 3, - "end.year": 2021, "start.day": 31, "start.month": 1, - "start.year": 2021 - }, - "fields": "externalWebsitePostClickConversions,externalWebsitePostViewConversions,follows,fullScreenPlays,impressions,jobApplications,jobApplyClicks,landingPageClicks,leadGenerationMailContactInfoShares,leadGenerationMailInterestedClicks,likes,oneClickLeadFormOpens,oneClickLeads,opens,otherEngagements,pivotValues,postClickJobApplications,postClickJobApplyClicks,postClickRegistrations,dateRange" + "start.year": 2021, + "end.day": 2, + "end.month": 3, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "postClickJobApplyClicks,postClickRegistrations,postViewJobApplications,postViewJobApplyClicks,postViewRegistrations,reactions,registrations,sends,shares,talentLeads,textUrlClicks,totalEngagements,validWorkEmailLeads,videoCompletions,videoFirstQuartileCompletions,videoMidpointCompletions,videoStarts,videoThirdQuartileCompletions,dateRange,pivotValues", "dateRange": { - "end.day": 2, - "end.month": 3, - "end.year": 2021, "start.day": 31, "start.month": 1, - "start.year": 2021 - }, - "fields": "postViewJobApplications,postViewJobApplyClicks,postViewRegistrations,reactions,registrations,sends,shares,talentLeads,textUrlClicks,totalEngagements,validWorkEmailLeads,videoCompletions,videoFirstQuartileCompletions,videoMidpointCompletions,videoStarts,videoThirdQuartileCompletions,videoViews,viralCardClicks,viralCardImpressions,dateRange" + "start.year": 2021, + "end.day": 2, + "end.month": 3, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "videoViews,viralCardClicks,viralCardImpressions,viralClicks,viralCommentLikes,viralComments,viralCompanyPageClicks,viralDocumentCompletions,viralDocumentFirstQuartileCompletions,viralDocumentMidpointCompletions,viralDocumentThirdQuartileCompletions,viralDownloadClicks,viralExternalWebsiteConversions,viralExternalWebsitePostClickConversions,viralExternalWebsitePostViewConversions,viralFollows,viralFullScreenPlays,viralImpressions,dateRange,pivotValues", "dateRange": { - "end.day": 2, - "end.month": 3, - "end.year": 2021, "start.day": 31, "start.month": 1, - "start.year": 2021 - }, - "fields": "viralClicks,viralCommentLikes,viralComments,viralCompanyPageClicks,viralDocumentCompletions,viralDocumentFirstQuartileCompletions,viralDocumentMidpointCompletions,viralDocumentThirdQuartileCompletions,viralDownloadClicks,viralExternalWebsiteConversions,viralExternalWebsitePostClickConversions,viralExternalWebsitePostViewConversions,viralFollows,viralFullScreenPlays,viralImpressions,viralJobApplications,viralJobApplyClicks,viralLandingPageClicks,viralLikes,dateRange" + "start.year": 2021, + "end.day": 2, + "end.month": 3, + "end.year": 2021 + } }, { "campaign_id": 123, + "fields": "viralJobApplications,viralJobApplyClicks,viralLandingPageClicks,viralLikes,viralOneClickLeadFormOpens,viralOneClickLeads,viralOtherEngagements,viralPostClickJobApplications,viralPostClickJobApplyClicks,viralPostClickRegistrations,viralPostViewJobApplications,viralPostViewJobApplyClicks,viralPostViewRegistrations,viralReactions,viralRegistrations,viralShares,viralTotalEngagements,viralVideoCompletions,dateRange,pivotValues", "dateRange": { + "start.day": 31, + "start.month": 1, + "start.year": 2021, "end.day": 2, "end.month": 3, - "end.year": 2021, + "end.year": 2021 + } + }, + { + "campaign_id": 123, + "fields": "viralVideoFirstQuartileCompletions,viralVideoMidpointCompletions,viralVideoStarts,viralVideoThirdQuartileCompletions,viralVideoViews,dateRange,pivotValues", + "dateRange": { "start.day": 31, "start.month": 1, - "start.year": 2021 - }, - "fields": "viralOneClickLeadFormOpens,viralOneClickLeads,viralOtherEngagements,viralPostClickJobApplications,viralPostClickJobApplyClicks,viralPostClickRegistrations,viralPostViewJobApplications,viralPostViewJobApplyClicks,viralPostViewRegistrations,viralReactions,viralRegistrations,viralShares,viralTotalEngagements,viralVideoCompletions,viralVideoFirstQuartileCompletions,viralVideoMidpointCompletions,viralVideoStarts,viralVideoThirdQuartileCompletions,viralVideoViews,dateRange" + "start.year": 2021, + "end.day": 2, + "end.month": 3, + "end.year": 2021 + } } ] ] diff --git a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_1.json b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_1.json index a68474329ce06..7e1bcbb080058 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_1.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_1.json @@ -22,6 +22,7 @@ "year": 2023 } }, + "pivotValues": ["urn:li:sponsoredCreative:1"], "commentLikes": 0, "adUnitClicks": 0, "companyPageClicks": 0, @@ -53,6 +54,7 @@ "year": 2023 } }, + "pivotValues": ["urn:li:sponsoredCreative:1"], "commentLikes": 0, "adUnitClicks": 0, "companyPageClicks": 0, diff --git a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_3.json b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_3.json index 5e128840ccb48..f9cf67fd44ece 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_3.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/responses/ad_member_country_analytics/response_3.json @@ -19,6 +19,7 @@ "year": 2023 } }, + "pivotValues": ["urn:li:sponsoredCreative:1"], "viralCardImpressions": 0, "videoFirstQuartileCompletions": 0, "textUrlClicks": 0, diff --git a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/test_analytics_streams.py b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/test_analytics_streams.py index 3936c7fad7e6d..3485e12b8bb8c 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/test_analytics_streams.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/unit_tests/test_analytics_streams.py @@ -81,10 +81,10 @@ def test_chunk_analytics_fields(): with "dateRange" field presented in each chunk. """ expected_output = [ - ["field_1", "base_field_1", "field_2", "dateRange"], - ["base_field_2", "field_3", "field_4", "dateRange"], - ["field_5", "field_6", "field_7", "dateRange"], - ["field_8", "dateRange"], + ["field_1", "base_field_1", "field_2", "dateRange", "pivotValues"], + ["base_field_2", "field_3", "field_4", "dateRange", "pivotValues"], + ["field_5", "field_6", "field_7", "dateRange", "pivotValues"], + ["field_8", "dateRange", "pivotValues"], ] assert list(LinkedInAdsAnalyticsStream.chunk_analytics_fields(TEST_ANALYTICS_FIELDS, TEST_FIELDS_CHUNK_SIZE)) == expected_output diff --git a/airbyte-integrations/connectors/source-mailchimp/.coveragerc b/airbyte-integrations/connectors/source-mailchimp/.coveragerc new file mode 100644 index 0000000000000..e8793cf04be38 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_mailchimp/run.py \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml index c5aeffc15774c..8ded4d12f3e70 100644 --- a/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml @@ -12,12 +12,9 @@ acceptance_tests: # for auth with oauth2 token - config_path: "secrets/config_oauth.json" status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - config_path: "integration_tests/invalid_config_apikey.json" status: "failed" - - config_path: "integration_tests/invalid_config_oauth.json" - status: "failed" + timeout_seconds: 300 discovery: tests: # for auth with API token @@ -33,24 +30,15 @@ acceptance_tests: empty_streams: - name: "automations" bypass_reason: "Cannot seed in free sandbox account, need to upgrade to paid account." - - config_path: "secrets/config_oauth.json" - expect_records: - path: "integration_tests/expected_records.jsonl" - fail_on_extra_columns: false - empty_streams: - - name: "automations" - bypass_reason: "Cannot seed in free sandbox account, need to upgrade to paid account." incremental: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" future_state: - future_state_path: "integration_tests/state.json" + future_state_path: "integration_tests/abnormal_state.json" # Email activities stream has working campaigns with email newsletters. # Due to this sequential_reads test could be failed. full_refresh: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog_without_email_activities.json" - - config_path: "secrets/config_oauth.json" - configured_catalog_path: "integration_tests/configured_catalog_without_email_activities.json" diff --git a/airbyte-integrations/connectors/source-mailchimp/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-mailchimp/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..80f92d1400fa3 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/integration_tests/abnormal_state.json @@ -0,0 +1,370 @@ +[ + { + "type": "STREAM", + "stream": { + "stream_state": { + "create_time": "2220-11-23T05:42:11+00:00" + }, + "stream_descriptor": { + "name": "campaigns" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "date_created": "2220-09-25T04:47:31+00:00" + }, + "stream_descriptor": { + "name": "lists" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "create_time": "2220-11-23T05:42:11+00:00" + }, + "stream_descriptor": { + "name": "automations" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { + "id": "324b8a398e", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "3cbed9a0fc", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "2aa901afd0", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "e974db8443", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "a79651273b", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "d983b83b95", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + }, + { + "partition": { + "id": "7847cdaeff", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2230-11-23T05:42:10+0000" + } + } + ] + }, + "stream_descriptor": { + "name": "email_activity" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { + "id": "16d6ec4ffc", + "parent_slice": {} + }, + "cursor": { + "last_changed": "2230-02-26T05:42:10.000000Z" + } + } + ] + }, + "stream_descriptor": { + "name": "list_members" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "send_time": "2230-02-26T05:42:10+00:00" + }, + "stream_descriptor": { + "name": "reports" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { + "id": 13506120, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 13506124, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 13506128, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 13506132, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 13506136, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 14351124, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 14351128, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 14351488, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 14351504, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + }, + { + "partition": { + "id": 14351532, + "parent_slice": { + "id": "16d6ec4ffc", + "parent_slice": {} + } + }, + "cursor": { + "last_changed": "2222-12-27T08:34:39+0000" + } + } + ] + }, + "stream_descriptor": { + "name": "segment_members" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { + "id": "16d6ec4ffc", + "parent_slice": {} + }, + "cursor": { + "updated_at": "2230-02-26T05:42:10Z" + } + } + ] + }, + "stream_descriptor": { + "name": "segments" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "states": [ + { + "partition": { + "id": "324b8a398e", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "3cbed9a0fc", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "2aa901afd0", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "e974db8443", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "a79651273b", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "d983b83b95", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + }, + { + "partition": { + "id": "7847cdaeff", + "parent_slice": {} + }, + "cursor": { + "timestamp": "2231-09-26T05:42:10+0000" + } + } + ] + }, + "stream_descriptor": { + "name": "unsubscribes" + } + } + } +] diff --git a/airbyte-integrations/connectors/source-mailchimp/integration_tests/state.json b/airbyte-integrations/connectors/source-mailchimp/integration_tests/state.json deleted file mode 100644 index 26b656926fd56..0000000000000 --- a/airbyte-integrations/connectors/source-mailchimp/integration_tests/state.json +++ /dev/null @@ -1,80 +0,0 @@ -[ - { - "type": "STREAM", - "stream": { - "stream_state": { "create_time": "2220-11-23T05:42:11+00:00" }, - "stream_descriptor": { "name": "campaigns" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "date_created": "2220-09-25T04:47:31+00:00" }, - "stream_descriptor": { "name": "lists" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "create_time": "2220-11-23T05:42:11+00:00" }, - "stream_descriptor": { "name": "automations" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "7847cdaeff": { "timestamp": "2230-11-23T05:42:10+00:00" } - }, - "stream_descriptor": { "name": "email_activity" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "16d6ec4ffc": { "last_changed": "2230-02-26T05:42:10+00:00" } - }, - "stream_descriptor": { "name": "list_members" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "send_time": "2230-02-26T05:42:10+00:00" }, - "stream_descriptor": { "name": "reports" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "13506120": { "last_changed": "2222-12-27T08:34:39+00:00" }, - "13506136": { "last_changed": "2222-12-27T08:34:39+00:00" }, - "14351124": { "last_changed": "2222-12-27T08:34:39+00:00" }, - "14351504": { "last_changed": "2222-12-27T07:56:47+00:00" }, - "14351128": { "last_changed": "2222-12-27T08:34:39+00:00" }, - "13506132": { "last_changed": "2222-12-27T08:34:39+00:00" } - }, - "stream_descriptor": { "name": "segment_members" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "16d6ec4ffc": { "updated_at": "2230-02-26T05:42:10+00:00" } - }, - "stream_descriptor": { "name": "segments" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "7847cdaeff": { "timestamp": "2231-09-26T05:42:10+00:00" } - }, - "stream_descriptor": { "name": "unsubscribes" } - } - } -] diff --git a/airbyte-integrations/connectors/source-mailchimp/metadata.yaml b/airbyte-integrations/connectors/source-mailchimp/metadata.yaml index 670f7387202e1..0c074f855cb1b 100644 --- a/airbyte-integrations/connectors/source-mailchimp/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailchimp/metadata.yaml @@ -5,17 +5,19 @@ data: allowedHosts: hosts: - "*.api.mailchimp.com" + - "login.mailchimp.com" connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: b03a9f3e-22a5-11eb-adc1-0242ac120002 - dockerImageTag: 1.1.2 + dockerImageTag: 2.0.0 dockerRepository: airbyte/source-mailchimp documentationUrl: https://docs.airbyte.com/integrations/sources/mailchimp githubIssueLabel: source-mailchimp icon: mailchimp.svg license: MIT + maxSecondsBetweenMessages: 120 name: Mailchimp remoteRegistries: pypi: @@ -28,6 +30,12 @@ data: enabled: true releases: breakingChanges: + 2.0.0: + message: The source Mailchimp connector is being migrated from the Python CDK to our declarative low-code CDK. Due to changes in primary key for streams `Segment Members` and `List Members`, this migration constitutes a breaking change. After updating, please reset your source before resuming syncs. For more information, see our migration documentation for source Mailchimp. + upgradeDeadline: "2024-04-10" + scopedImpact: + - scopeType: stream + impactedScopes: ["segment_members", "list_members"] 1.0.0: message: Version 1.0.0 introduces schema changes to all incremental streams. @@ -44,5 +52,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-mailchimp/poetry.lock b/airbyte-integrations/connectors/source-mailchimp/poetry.lock index a247c0d1074e5..58ab3dcc25b53 100644 --- a/airbyte-integrations/connectors/source-mailchimp/poetry.lock +++ b/airbyte-integrations/connectors/source-mailchimp/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.78.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte_cdk-0.78.1-py3-none-any.whl", hash = "sha256:73dfc03e55a7107bf28b5bbc4e43572d448c60e9b34368d22cf48b6536aa2263"}, + {file = "airbyte_cdk-0.78.1.tar.gz", hash = "sha256:700e5526ae29db1e453b3def8682726f7d8aa653ee2f3056488d0a484f055133"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +300,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -366,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -685,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,68 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] - -[[package]] -name = "responses" -version = "0.19.0" -description = "A utility library for mocking out the `requests` Python library." -optional = false -python-versions = ">=3.7" -files = [ - {file = "responses-0.19.0-py3-none-any.whl", hash = "sha256:53354b5de163aa2074312c71d8ebccb8bd1ab336cff7053abb75e84dc5637abe"}, - {file = "responses-0.19.0.tar.gz", hash = "sha256:3fc29c3117e14136b833a0a6d4e7f1217c6301bf08b6086db468e12f1e3290e2"}, -] - -[package.dependencies] -requests = ">=2.0,<3.0" -urllib3 = ">=1.25.10" - -[package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"] [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -913,13 +906,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -938,13 +931,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1049,4 +1042,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "77ffe28d193ca607c7abf9ea05aee9498d1a9e17ee0f93adea80bf7537e58e5d" +content-hash = "987a9fd3716b6001482423ffd138cfe7a77609236390f1a48a686daebf28ac68" diff --git a/airbyte-integrations/connectors/source-mailchimp/pyproject.toml b/airbyte-integrations/connectors/source-mailchimp/pyproject.toml index fa7b587b60081..f5d046778126d 100644 --- a/airbyte-integrations/connectors/source-mailchimp/pyproject.toml +++ b/airbyte-integrations/connectors/source-mailchimp/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.1.2" +version = "2.0.0" name = "source-mailchimp" description = "Source implementation for Mailchimp." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_mailchimp" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" pytest = "==6.2.5" [tool.poetry.scripts] @@ -25,5 +25,5 @@ source-mailchimp = "source_mailchimp.run:run" [tool.poetry.group.dev.dependencies] pytest-mock = "^3.6.1" -responses = "^0.19.0" requests-mock = "^1.9.3" +freezegun = "^1.4.0" diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/components.py b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/components.py new file mode 100644 index 0000000000000..b1f1c2733159c --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/components.py @@ -0,0 +1,55 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from dataclasses import InitVar +from typing import Any, List, Mapping, Optional + +import pendulum +import requests +from airbyte_cdk.sources.declarative.extractors import DpathExtractor +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState + + +class MailChimpRecordFilter(RecordFilter): + """ + Filter applied on a list of Records. + """ + + parameters: InitVar[Mapping[str, Any]] + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + self.parameters = parameters + + def filter_records( + self, + records: List[Mapping[str, Any]], + stream_state: StreamState, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> List[Mapping[str, Any]]: + current_state = [x for x in stream_state.get("states", []) if x["partition"]["id"] == stream_slice.partition["id"]] + cursor_value = self.get_filter_date(self.config.get("start_date"), current_state) + return [record for record in records if record[self.parameters["cursor_field"]] > cursor_value] if cursor_value else records + + def get_filter_date(self, start_date: str, state_value: list) -> str: + """ + Calculate the filter date to pass in the request parameters by comparing the start_date + with the value of state obtained from the stream_slice. + If only one value exists, use it by default. Otherwise, return None. + If no filter_date is provided, the API will fetch all available records. + """ + + start_date_parsed = pendulum.parse(start_date).to_iso8601_string() if start_date else None + state_date_parsed = ( + pendulum.parse(state_value[0]["cursor"][self.parameters["cursor_field"]]).to_iso8601_string() if state_value else None + ) + + # Return the max of the two dates if both are present. Otherwise return whichever is present, or None. + if start_date_parsed or state_date_parsed: + return max(filter(None, [start_date_parsed, state_date_parsed]), default=None) + + +class MailChimpRecordExtractorEmailActivity(DpathExtractor): + def extract_records(self, response: requests.Response) -> List[Mapping[str, Any]]: + records = super().extract_records(response=response) + return [{**record, **activity_item} for record in records for activity_item in record.pop("activity", [])] diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/config_migrations.py b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/config_migrations.py new file mode 100644 index 0000000000000..621edd5763f11 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/config_migrations.py @@ -0,0 +1,96 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import logging +from typing import Any, List, Mapping + +import requests +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.utils import AirbyteTracedException +from airbyte_protocol.models import FailureType + +logger = logging.getLogger("airbyte_logger") + + +class MigrateDataCenter: + """ + This class stands for migrating the config at runtime, + Set data_center property in config based on credential type. + """ + + @classmethod + def get_data_center_location(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: + if config.get("credentials", {}).get("auth_type") == "apikey": + data_center = config["credentials"]["apikey"].split("-").pop() + else: + data_center = cls.get_oauth_data_center(config["credentials"]["access_token"]) + config["data_center"] = data_center + return config + + @staticmethod + def get_oauth_data_center(access_token: str) -> str: + """ + Every Mailchimp API request must be sent to a specific data center. + The data center is already embedded in API keys, but not OAuth access tokens. + This method retrieves the data center for OAuth credentials. + """ + response = requests.get("https://login.mailchimp.com/oauth2/metadata", headers={"Authorization": "OAuth {}".format(access_token)}) + + # Requests to this endpoint will return a 200 status code even if the access token is invalid. + error = response.json().get("error") + if error == "invalid_token": + raise AirbyteTracedException( + failure_type=FailureType.config_error, + internal_message=error, + message="The access token you provided was invalid. Please check your credentials and try again.", + ) + return response.json()["dc"] + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Modifies the configuration and then saves it back to the source. + + Args: + - config_path (str): The path where the configuration is stored. + - source (Source): The data source. + - config (Mapping[str, Any]): The current configuration. + + Returns: + - Mapping[str, Any]: The updated configuration. + """ + migrated_config = cls.get_data_center_location(config) + source.write_config(migrated_config, config_path) + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + """ + Emits the control messages related to configuration migration. + + Args: + - migrated_config (Mapping[str, Any]): The migrated configuration. + """ + print(create_connector_config_control_message(migrated_config).json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + Orchestrates the configuration migration process. + + It first checks if the `--config` argument is provided, and if so, + determines whether migration is needed, and then performs the migration + if required. + + Args: + - args (List[str]): List of command-line arguments. + - source (Source): The data source. + """ + config_path = AirbyteEntrypoint(source).extract_config(args) + if config_path: + config = source.read_config(config_path) + cls.emit_control_message(cls.modify_and_save(config_path, source, config)) diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/manifest.yaml b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/manifest.yaml new file mode 100644 index 0000000000000..43c331439f71e --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/manifest.yaml @@ -0,0 +1,367 @@ +version: 0.52.0 +type: DeclarativeSource + +check: + type: CheckStream + stream_names: + - "campaigns" + +definitions: + bearer_authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + basic_authenticator: + type: BasicHttpAuthenticator + username: "anystring" + password: "{{ config.get('apikey') or config['credentials']['apikey'] }}" + + transformer_remove_empty_fields: + type: RemoveFields + field_pointers: + - ["**"] + condition: "{{ property|string == '' }}" + + retriever: + type: SimpleRetriever + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["{{ parameters.get('data_field') }}"] + paginator: + type: "DefaultPaginator" + pagination_strategy: + type: "OffsetIncrement" + page_size: 1000 + page_size_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "count" + page_token_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "offset" + requester: + url_base: https://{{ config['data_center'] }}.api.mailchimp.com/3.0/ + http_method: GET + authenticator: + type: SelectiveAuthenticator + authenticator_selection_path: ["credentials", "auth_type"] + authenticators: + oauth2.0: "#/definitions/bearer_authenticator" + apikey: "#/definitions/basic_authenticator" + request_parameters: + exclude_fields: "{{ parameters.get('data_field') }}._links" + + base_stream: + retriever: + $ref: "#/definitions/retriever" + + base_incremental_stream: + retriever: + $ref: "#/definitions/retriever" + requester: + $ref: "#/definitions/retriever/requester" + request_parameters: + sort_field: "{{ parameters['cursor_field'] }}" + sort_dir: "ASC" + exclude_fields: "{{ parameters.get('data_field') }}._links" + transformations: + - "#/definitions/transformer_remove_empty_fields" + incremental_sync: + type: DatetimeBasedCursor + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + cursor_field: "{{ parameters['cursor_field'] }}" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00.0Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + lookback_window: PT0.1S + start_time_option: + inject_into: request_parameter + field_name: "since_{{ parameters['cursor_field'] }}" + type: RequestOption + end_time_option: + inject_into: request_parameter + field_name: "before_{{ parameters['cursor_field'] }}" + type: RequestOption + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%S.%fZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + lookback_window: PT1S + + automations_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "automations" + primary_key: "id" + path: "automations" + data_field: "automations" + cursor_field: "create_time" + + campaigns_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "campaigns" + primary_key: "id" + path: "/campaigns" + data_field: "campaigns" + cursor_field: "create_time" + + list_members_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/lists_stream" + parent_key: id + partition_field: id + state_migrations: + - type: LegacyToPerPartitionStateMigration + $parameters: + name: "list_members" + primary_key: ["id", "list_id"] + path: "/lists/{{ stream_slice.id }}/members" + data_field: "members" + cursor_field: "last_changed" + + lists_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "lists" + primary_key: "id" + path: "lists" + data_field: "lists" + cursor_field: "date_created" + + tags_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/lists_stream" + parent_key: id + partition_field: id + transformations: + - type: AddFields + fields: + - path: ["list_id"] + value: "{{ stream_slice.id }}" + $parameters: + name: "tags" + primary_key: "id" + path: "lists/{{ stream_slice.id }}/tag-search" + data_field: "tags" + + interest_categories_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + transformations: + - type: AddFields + fields: + - path: ["list_id"] + value: "{{ stream_slice.id }}" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/lists_stream" + parent_key: id + partition_field: id + $parameters: + name: "interest_categories" + primary_key: "id" + path: "lists/{{ stream_slice.id }}/interest-categories" + data_field: "categories" + + interests_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/interest_categories_stream" + parent_key: id + partition_field: id + $parameters: + name: "interests" + primary_key: "id" + path: "lists/{{ stream_slice.parent_slice.id }}/interest-categories/{{ stream_slice.id }}/interests" + data_field: "interests" + + reports_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "reports" + primary_key: "id" + path: "reports" + data_field: "reports" + cursor_field: "send_time" + + segments_stream: + type: DeclarativeStream + $ref: "#/definitions/base_incremental_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/lists_stream" + parent_key: id + partition_field: id + state_migrations: + - type: LegacyToPerPartitionStateMigration + $parameters: + name: "segments" + primary_key: "id" + path: "/lists/{{ stream_slice.id }}/segments" + data_field: "segments" + cursor_field: "updated_at" + + segment_members_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/segments_stream" + parent_key: id + partition_field: id + record_selector: + $ref: "#/definitions/retriever/record_selector" + record_filter: + type: CustomRecordFilter + class_name: source_mailchimp.components.MailChimpRecordFilter + incremental_sync: + type: DatetimeBasedCursor + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + cursor_field: "{{ parameters['cursor_field'] }}" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00.0Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + lookback_window: PT0.1S + transformations: + - type: AddFields + fields: + - path: ["segment_id"] + value: "{{ stream_slice.id }}" + - "#/definitions/transformer_remove_empty_fields" + state_migrations: + - type: LegacyToPerPartitionStateMigration + $parameters: + name: "segment_members" + primary_key: ["id", "segment_id"] + path: "/lists/{{ stream_slice.parent_slice.id }}/segments/{{ stream_slice.id }}/members" + data_field: "members" + cursor_field: "last_changed" + + unsubscribes_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/campaigns_stream" + parent_key: id + partition_field: id + record_selector: + $ref: "#/definitions/retriever/record_selector" + record_filter: + type: CustomRecordFilter + class_name: source_mailchimp.components.MailChimpRecordFilter + incremental_sync: + type: DatetimeBasedCursor + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + cursor_field: "{{ parameters['cursor_field'] }}" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00.0Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + lookback_window: PT0.1S + state_migrations: + - type: LegacyToPerPartitionStateMigration + $parameters: + name: "unsubscribes" + primary_key: ["campaign_id", "email_id", "timestamp"] + path: "/reports/{{ stream_slice.id }}/unsubscribed" + data_field: "unsubscribes" + cursor_field: "timestamp" + + email_activity_stream: + type: DeclarativeStream + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/definitions/campaigns_stream" + parent_key: id + partition_field: id + record_selector: + type: RecordSelector + extractor: + type: CustomRecordExtractor + class_name: source_mailchimp.components.MailChimpRecordExtractorEmailActivity + field_path: ["{{ parameters.get('data_field') }}"] + incremental_sync: + type: DatetimeBasedCursor + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + cursor_field: "{{ parameters['cursor_field'] }}" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + lookback_window: PT1S + start_time_option: + inject_into: request_parameter + field_name: "since" + type: RequestOption + lookback_window: PT0.1S + state_migrations: + - type: LegacyToPerPartitionStateMigration + $parameters: + name: "email_activity" + primary_key: ["timestamp", "email_id", "action"] + path: "/reports/{{ stream_slice.id }}/email-activity" + data_field: "emails" + cursor_field: "timestamp" + +streams: + - "#/definitions/automations_stream" + - "#/definitions/campaigns_stream" + - "#/definitions/email_activity_stream" + - "#/definitions/lists_stream" + - "#/definitions/list_members_stream" + - "#/definitions/tags_stream" + - "#/definitions/interest_categories_stream" + - "#/definitions/interests_stream" + - "#/definitions/reports_stream" + - "#/definitions/segments_stream" + - "#/definitions/segment_members_stream" + - "#/definitions/unsubscribes_stream" diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/run.py b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/run.py index 15226fdfeebd0..c4f1b04c5c4fb 100644 --- a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/run.py +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/run.py @@ -7,8 +7,10 @@ from airbyte_cdk.entrypoint import launch from source_mailchimp import SourceMailchimp +from source_mailchimp.config_migrations import MigrateDataCenter def run(): source = SourceMailchimp() + MigrateDataCenter.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/source.py b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/source.py index 0edf00993e5fb..ba650f4cc6529 100644 --- a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/source.py +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/source.py @@ -3,147 +3,9 @@ # -import base64 -import re -from typing import Any, List, Mapping, Tuple +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -import pendulum -import requests -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator -from pendulum.parsing.exceptions import ParserError -from requests.auth import AuthBase -from .streams import ( - Automations, - Campaigns, - EmailActivity, - InterestCategories, - Interests, - ListMembers, - Lists, - Reports, - SegmentMembers, - Segments, - Tags, - Unsubscribes, -) - - -class MailChimpAuthenticator: - @staticmethod - def get_oauth_data_center(access_token: str) -> str: - """ - Every Mailchimp API request must be sent to a specific data center. - The data center is already embedded in API keys, but not OAuth access tokens. - This method retrieves the data center for OAuth credentials. - """ - try: - response = requests.get( - "https://login.mailchimp.com/oauth2/metadata", headers={"Authorization": "OAuth {}".format(access_token)} - ) - - # Requests to this endpoint will return a 200 status code even if the access token is invalid. - error = response.json().get("error") - if error == "invalid_token": - raise ValueError("The access token you provided was invalid. Please check your credentials and try again.") - return response.json()["dc"] - - # Handle any other exceptions that may occur. - except Exception as e: - raise Exception(f"An error occured while retrieving the data center for your account. \n {repr(e)}") - - def get_auth(self, config: Mapping[str, Any]) -> AuthBase: - authorization = config.get("credentials", {}) - auth_type = authorization.get("auth_type") - if auth_type == "apikey" or not authorization: - # API keys have the format -. - # See https://mailchimp.com/developer/marketing/docs/fundamentals/#api-structure - apikey = authorization.get("apikey") or config.get("apikey") - if not apikey: - raise Exception("Please provide a valid API key for authentication.") - auth_string = f"anystring:{apikey}".encode("utf8") - b64_encoded = base64.b64encode(auth_string).decode("utf8") - auth = TokenAuthenticator(token=b64_encoded, auth_method="Basic") - auth.data_center = apikey.split("-").pop() - - elif auth_type == "oauth2.0": - access_token = authorization["access_token"] - auth = TokenAuthenticator(token=access_token, auth_method="Bearer") - auth.data_center = self.get_oauth_data_center(access_token) - - else: - raise Exception(f"Invalid auth type: {auth_type}") - - return auth - - -class SourceMailchimp(AbstractSource): - def _validate_start_date(self, config: Mapping[str, Any]): - start_date = config.get("start_date") - - if start_date: - pattern = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z") - if not pattern.match(start_date): # Compare against the pattern descriptor. - return "Please check the format of the start date against the pattern descriptor." - - try: # Handle invalid dates. - parsed_start_date = pendulum.parse(start_date) - except ParserError: - return "The provided start date is not a valid date. Please check the date you input and try again." - - if parsed_start_date > pendulum.now("UTC"): # Handle future start date. - return "The start date cannot be greater than the current date." - - return None - - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: - # First, check for a valid start date if it is provided - start_date_validation_error = self._validate_start_date(config) - if start_date_validation_error: - return False, start_date_validation_error - - try: - authenticator = MailChimpAuthenticator().get_auth(config) - response = requests.get( - f"https://{authenticator.data_center}.api.mailchimp.com/3.0/ping", headers=authenticator.get_auth_header() - ) - - # A successful response will return a simple JSON object with a single key: health_status. - # Otherwise, errors are returned as a JSON object with keys: - # {type, title, status, detail, instance} - - if not response.json().get("health_status"): - error_title = response.json().get("title", "Unknown Error") - error_details = response.json().get("details", "An unknown error occurred. Please verify your credentials and try again.") - return False, f"Encountered an error while connecting to Mailchimp. Type: {error_title}. Details: {error_details}" - return True, None - - # Handle any other exceptions that may occur. - except Exception as e: - return False, repr(e) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - authenticator = MailChimpAuthenticator().get_auth(config) - campaign_id = config.get("campaign_id") - start_date = config.get("start_date") - - lists = Lists(authenticator=authenticator, start_date=start_date) - interest_categories = InterestCategories(authenticator=authenticator, parent=lists) - - return [ - Automations(authenticator=authenticator, start_date=start_date), - Campaigns(authenticator=authenticator, start_date=start_date), - EmailActivity(authenticator=authenticator, start_date=start_date, campaign_id=campaign_id), - interest_categories, - Interests(authenticator=authenticator, parent=interest_categories), - lists, - ListMembers(authenticator=authenticator, start_date=start_date), - Reports(authenticator=authenticator, start_date=start_date), - SegmentMembers(authenticator=authenticator, start_date=start_date), - Segments(authenticator=authenticator, start_date=start_date), - Tags(authenticator=authenticator, parent=lists), - Unsubscribes(authenticator=authenticator, start_date=start_date, campaign_id=campaign_id), - ] +class SourceMailchimp(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/spec.json b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/spec.json index f88649faa1533..11fb4936ae92a 100644 --- a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/spec.json +++ b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/spec.json @@ -70,9 +70,10 @@ "pattern_descriptor": "YYYY-MM-DDTHH:MM:SS.000Z", "examples": ["2020-01-01T00:00:00.000Z"] }, - "campaign_id": { + "data_center": { + "title": "DataCenter", + "description": "Technical fields used to identify datacenter to send request to", "type": "string", - "title": "ID of a campaign to sync email activities", "airbyte_hidden": true } } diff --git a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/streams.py b/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/streams.py deleted file mode 100644 index 158eaf1e8b47d..0000000000000 --- a/airbyte-integrations/connectors/source-mailchimp/source_mailchimp/streams.py +++ /dev/null @@ -1,518 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -import math -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.core import StreamData -from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream - -logger = logging.getLogger("airbyte") - - -class MailChimpStream(HttpStream, ABC): - primary_key = "id" - page_size = 1000 - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.current_offset = 0 - self.data_center = kwargs["authenticator"].data_center - - @property - def url_base(self) -> str: - return f"https://{self.data_center}.api.mailchimp.com/3.0/" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - decoded_response = response.json() - api_data = decoded_response[self.data_field] - if len(api_data) < self.page_size: - self.current_offset = 0 - return None - else: - self.current_offset += self.page_size - return {"offset": self.current_offset} - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - - # The ._links field is returned by most Mailchimp endpoints and contains non-relevant schema metadata. - params = {"count": self.page_size, "exclude_fields": f"{self.data_field}._links"} - - # Handle pagination by inserting the next page's token in the request parameters - if next_page_token: - params.update(next_page_token) - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - yield from response_json[self.data_field] - - @property - @abstractmethod - def data_field(self) -> str: - """The response entry that contains useful data""" - pass - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[StreamData]: - try: - yield from super().read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state - ) - except requests.exceptions.JSONDecodeError: - logger.error(f"Unknown error while reading stream {self.name}. Response cannot be read properly. ") - - -class IncrementalMailChimpStream(MailChimpStream, ABC): - state_checkpoint_interval = math.inf - - def __init__(self, **kwargs): - self.start_date = kwargs.pop("start_date", None) - super().__init__(**kwargs) - - @property - @abstractmethod - def cursor_field(self) -> str: - """ - Defining a cursor field indicates that a stream is incremental, so any incremental stream must extend this class - and define a cursor field. - """ - pass - - @property - def filter_field(self): - return f"since_{self.cursor_field}" - - @property - def sort_field(self): - return self.cursor_field - - def filter_empty_fields(self, element: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Many Mailchimp endpoints return empty strings instead of null values. - This causes validation errors on datetime columns, so for safety, we need to check for empty strings and set their value to None/null. - This method recursively traverses each element in a record and replaces any "" values with None, based on three conditions: - - 1. If the element is a dictionary, apply the method recursively to each value in the dictionary. - 2. If the element is a list, apply the method recursively to each item in the list. - 3. If the element is a string, check if it is an empty string. If so, replace it with None. - """ - - if isinstance(element, dict): - element = {k: self.filter_empty_fields(v) if v != "" else None for k, v in element.items()} - elif isinstance(element, list): - element = [self.filter_empty_fields(v) for v in element] - return element - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_state = latest_record.get(self.cursor_field) - current_state = current_stream_state.get(self.cursor_field) or latest_state - return {self.cursor_field: max(latest_state, current_state)} - - def stream_slices( - self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - slice_ = {} - stream_state = stream_state or {} - cursor_value = self.get_filter_date(self.start_date, stream_state.get(self.cursor_field)) - if cursor_value: - slice_[self.filter_field] = cursor_value - yield slice_ - - @staticmethod - def get_filter_date(start_date: str, state_date: str) -> str: - """ - Calculate the filter date to pass in the request parameters by comparing the start_date - with the value of state obtained from the stream_slice. - If only one value exists, use it by default. Otherwise, return None. - If no filter_date is provided, the API will fetch all available records. - """ - - start_date_parsed = pendulum.parse(start_date).to_iso8601_string() if start_date else None - state_date_parsed = pendulum.parse(state_date).to_iso8601_string() if state_date else None - - # Return the max of the two dates if both are present. Otherwise return whichever is present, or None. - if start_date_parsed or state_date_parsed: - return max(filter(None, [start_date_parsed, state_date_parsed]), default=None) - - def filter_old_records(self, records: Iterable, filter_date) -> Iterable: - """ - Filters out records with older cursor_values than the filter_date. - This can be used to enforce the filter for incremental streams that do not support sorting/filtering via query params. - """ - for record in records: - record_cursor_value = record.get(self.cursor_field) - if not filter_date or record_cursor_value >= filter_date: - yield record - - def request_params(self, stream_state=None, stream_slice=None, **kwargs): - stream_state = stream_state or {} - stream_slice = stream_slice or {} - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) - default_params = {"sort_field": self.sort_field, "sort_dir": "ASC", **stream_slice} - params.update(default_params) - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response = super().parse_response(response, **kwargs) - for record in response: - yield self.filter_empty_fields(record) - - -class MailChimpListSubStream(IncrementalMailChimpStream): - """ - Base class for incremental Mailchimp streams that are children of the Lists stream. - """ - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - stream_state = stream_state or {} - parent = Lists(authenticator=self.authenticator).read_records(sync_mode=SyncMode.full_refresh) - for parent_record in parent: - slice = {"list_id": parent_record["id"]} - cursor_value = self.get_filter_date(self.start_date, stream_state.get(parent_record["id"], {}).get(self.cursor_field)) - if cursor_value: - slice[self.filter_field] = cursor_value - yield slice - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - list_id = stream_slice.get("list_id") - return f"lists/{list_id}/{self.data_field}" - - def request_params(self, stream_state=None, stream_slice=None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) - - # Get the current state value for this list_id, if it exists - # Then, use the value in state to filter the request - current_slice = stream_slice.get("list_id") - filter_date = stream_state.get(current_slice) - if filter_date: - params[self.filter_field] = filter_date.get(self.cursor_field) - return params - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - current_stream_state = current_stream_state or {} - list_id = latest_record.get("list_id") - latest_cursor_value = latest_record.get(self.cursor_field) - - # Get the current state value for this list, if it exists - list_state = current_stream_state.get(list_id, {}) - current_cursor_value = list_state.get(self.cursor_field, latest_cursor_value) - - # Update the cursor value and set it in state - updated_cursor_value = max(current_cursor_value, latest_cursor_value) - current_stream_state[list_id] = {self.cursor_field: updated_cursor_value} - - return current_stream_state - - -class Lists(IncrementalMailChimpStream): - cursor_field = "date_created" - data_field = "lists" - - def path(self, **kwargs) -> str: - return "lists" - - -class Campaigns(IncrementalMailChimpStream): - cursor_field = "create_time" - data_field = "campaigns" - - def path(self, **kwargs) -> str: - return "campaigns" - - -class Automations(IncrementalMailChimpStream): - """Doc Link: https://mailchimp.com/developer/marketing/api/automation/get-automation-info/""" - - cursor_field = "create_time" - data_field = "automations" - - def path(self, **kwargs) -> str: - return "automations" - - -class EmailActivity(IncrementalMailChimpStream): - cursor_field = "timestamp" - filter_field = "since" - sort_field = "create_time" - data_field = "emails" - primary_key = ["timestamp", "email_id", "action"] - - def __init__(self, campaign_id: Optional[str] = None, **kwargs): - super().__init__(**kwargs) - self.campaign_id = campaign_id - - def stream_slices( - self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - stream_state = stream_state or {} - if self.campaign_id: - # this is a workaround to speed up SATs and enable incremental tests - campaigns = [{"id": self.campaign_id}] - else: - campaigns = Campaigns(authenticator=self.authenticator).read_records(sync_mode=SyncMode.full_refresh) - for campaign in campaigns: - slice_ = {"campaign_id": campaign["id"]} - state_value = stream_state.get(campaign["id"], {}).get(self.cursor_field) - cursor_value = self.get_filter_date(self.start_date, state_value) - if cursor_value: - slice_[self.filter_field] = cursor_value - yield slice_ - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - campaign_id = stream_slice["campaign_id"] - return f"reports/{campaign_id}/email-activity" - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the campaign_id and cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - campaign_id = latest_record.get("campaign_id") - latest_cursor_value = latest_record.get(self.cursor_field) - current_stream_state = current_stream_state or {} - current_state = current_stream_state.get(campaign_id) if current_stream_state else None - if current_state: - current_state = current_state.get(self.cursor_field) - current_state_value = current_state or latest_cursor_value - max_value = max(current_state_value, latest_cursor_value) - new_value = {self.cursor_field: max_value} - - current_stream_state[campaign_id] = new_value - return current_stream_state - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - try: - response_json = response.json() - except requests.exceptions.JSONDecodeError: - logger.error(f"Response returned with {response.status_code=}, {response.content=}") - response_json = {} - # transform before save - # [{'campaign_id', 'list_id', 'list_is_active', 'email_id', 'email_address', 'activity[array[object]]', '_links'}] -> - # -> [[{'campaign_id', 'list_id', 'list_is_active', 'email_id', 'email_address', '**activity[i]', '_links'}, ...]] - data = response_json.get(self.data_field, []) - for item in data: - for activity_item in item.pop("activity", []): - yield {**item, **activity_item} - - -class InterestCategories(MailChimpStream, HttpSubStream): - """ - Get information about interest categories for a specific list. - Docs link: https://mailchimp.com/developer/marketing/api/interest-categories/list-interest-categories/ - """ - - data_field = "categories" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - """ - Get the list_id from the parent stream slice and use it to construct the path. - """ - list_id = stream_slice.get("parent").get("id") - return f"lists/{list_id}/interest-categories" - - -class Interests(MailChimpStream, HttpSubStream): - """ - Get a list of interests for a specific interest category. - Docs link: https://mailchimp.com/developer/marketing/api/interests/list-interests-in-category/ - """ - - data_field = "interests" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - """ - Get the list_id from the parent stream slice and use it to construct the path. - """ - list_id = stream_slice.get("parent").get("list_id") - category_id = stream_slice.get("parent").get("id") - return f"lists/{list_id}/interest-categories/{category_id}/interests" - - -class ListMembers(MailChimpListSubStream): - """ - Get information about members in a specific Mailchimp list. - Docs link: https://mailchimp.com/developer/marketing/api/list-members/list-members-info/ - """ - - cursor_field = "last_changed" - data_field = "members" - - -class Reports(IncrementalMailChimpStream): - cursor_field = "send_time" - data_field = "reports" - - def path(self, **kwargs) -> str: - return "reports" - - -class SegmentMembers(MailChimpListSubStream): - """ - Get information about members in a specific segment. - Docs link: https://mailchimp.com/developer/marketing/api/list-segment-members/list-members-in-segment/ - """ - - cursor_field = "last_changed" - data_field = "members" - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - """ - Each slice consists of a list_id and segment_id pair - """ - segments_slices = Segments(authenticator=self.authenticator).stream_slices(sync_mode=SyncMode.full_refresh) - - for slice in segments_slices: - segment_records = Segments(authenticator=self.authenticator).read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice) - - for segment in segment_records: - yield {"list_id": segment["list_id"], "segment_id": segment["id"]} - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - list_id = stream_slice.get("list_id") - segment_id = stream_slice.get("segment_id") - return f"lists/{list_id}/segments/{segment_id}/members" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice, **kwargs) -> Iterable[Mapping]: - """ - The SegmentMembers endpoint does not support sorting or filtering, - so we need to apply our own filtering logic before reading. - The foreign key "segment_id" is also added to each record before being read. - """ - response = super().parse_response(response, **kwargs) - - # Calculate the filter date to compare all records against in this slice - slice_cursor_value = stream_state.get(str(stream_slice.get("segment_id")), {}).get(self.cursor_field) - filter_date = self.get_filter_date(self.start_date, slice_cursor_value) - - for record in self.filter_old_records(response, filter_date): - # Add the segment_id foreign_key to each record - record["segment_id"] = stream_slice.get("segment_id") - yield record - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - current_stream_state = current_stream_state or {} - segment_id = str(latest_record.get("segment_id")) - latest_cursor_value = latest_record.get(self.cursor_field) - - # Get the current state value for this list, if it exists - segment_state = current_stream_state.get(segment_id, {}) - current_cursor_value = segment_state.get(self.cursor_field, latest_cursor_value) - - # Update the cursor value and set it in state - updated_cursor_value = max(current_cursor_value, latest_cursor_value) - current_stream_state[segment_id] = {self.cursor_field: updated_cursor_value} - return current_stream_state - - -class Segments(MailChimpListSubStream): - """ - Get information about all available segments for a specific list. - Docs link: https://mailchimp.com/developer/marketing/api/list-segments/list-segments/ - """ - - cursor_field = "updated_at" - data_field = "segments" - - -class Tags(MailChimpStream, HttpSubStream): - """ - Get information about tags for a specific list. - Docs link: https://mailchimp.com/developer/marketing/api/list-tags/list-tags-for-list/ - """ - - data_field = "tags" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - list_id = stream_slice.get("parent").get("id") - return f"lists/{list_id}/tag-search" - - def parse_response(self, response: requests.Response, stream_slice, **kwargs) -> Iterable[Mapping]: - """ - Tags do not reference parent_ids, so we need to add the list_id to each record. - """ - response = super().parse_response(response, **kwargs) - - for record in response: - record["list_id"] = stream_slice.get("parent").get("id") - yield record - - -class Unsubscribes(IncrementalMailChimpStream): - """ - List of members who have unsubscribed from a specific campaign. - Docs link: https://mailchimp.com/developer/marketing/api/unsub-reports/list-unsubscribed-members/ - """ - - cursor_field = "timestamp" - data_field = "unsubscribes" - # There is no unique identifier for unsubscribes, so we use a composite key - # consisting of the campaign_id, email_id, and timestamp. - primary_key = ["campaign_id", "email_id", "timestamp"] - - def __init__(self, campaign_id: Optional[str] = None, **kwargs): - super().__init__(**kwargs) - self.campaign_id = campaign_id - - def stream_slices( - self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - - if self.campaign_id: - # Similar to EmailActivity stream, this is a workaround to speed up SATs - # and enable incremental tests by reading from a single campaign - campaigns = [{"id": self.campaign_id}] - else: - campaigns = Campaigns(authenticator=self.authenticator).read_records(sync_mode=SyncMode.full_refresh) - for campaign in campaigns: - yield {"campaign_id": campaign["id"]} - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - campaign_id = stream_slice.get("campaign_id") - return f"reports/{campaign_id}/unsubscribed" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice, **kwargs) -> Iterable[Mapping]: - """ - The Unsubscribes endpoint does not support sorting or filtering, - so we need to apply our own filtering logic before reading. - """ - - response = super().parse_response(response, **kwargs) - - slice_cursor_value = stream_state.get(stream_slice.get("campaign_id", {}), {}).get(self.cursor_field) - filter_date = self.get_filter_date(self.start_date, slice_cursor_value) - yield from self.filter_old_records(response, filter_date) - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - current_stream_state = current_stream_state or {} - campaign_id = latest_record.get("campaign_id") - latest_cursor_value = latest_record.get(self.cursor_field) - - # Get the current state value for this campaign, if it exists - campaign_state = current_stream_state.get(campaign_id, {}) - current_cursor_value = campaign_state.get(self.cursor_field, latest_cursor_value) - - # Update the cursor value and set it in state - updated_cursor_value = max(current_cursor_value, latest_cursor_value) - current_stream_state[campaign_id] = {self.cursor_field: updated_cursor_value} - return current_stream_state diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/conftest.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/conftest.py index 5305f0dadab45..c387004a5110b 100644 --- a/airbyte-integrations/connectors/source-mailchimp/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/conftest.py @@ -3,8 +3,6 @@ # from pytest import fixture -from source_mailchimp.source import MailChimpAuthenticator -from source_mailchimp.streams import Campaigns, Unsubscribes @fixture(name="data_center") @@ -42,36 +40,3 @@ def apikey_config_fixture(data_center): @fixture(name="wrong_config") def wrong_config_fixture(): return {"credentials": {"auth_type": "not auth_type"}} - - -@fixture(name="auth") -def authenticator_fixture(apikey_config): - return MailChimpAuthenticator().get_auth(apikey_config) - - -@fixture(name="campaigns_stream") -def campaigns_stream_fixture(auth): - return Campaigns(authenticator=auth) - - -@fixture(name="unsubscribes_stream") -def unsubscribes_stream_fixture(auth): - return Unsubscribes(authenticator=auth) - - -@fixture(name="mock_campaigns_response") -def mock_campaigns_response_fixture(): - return [ - {"id": "campaign_1", "web_id": 1, "type": "regular", "create_time": "2022-01-01T00:00:00Z"}, - {"id": "campaign_2", "web_id": 2, "type": "plaintext", "create_time": "2022-01-02T00:00:00Z"}, - {"id": "campaign_3", "web_id": 3, "type": "variate", "create_time": "2022-01-03T00:00:00Z"}, - ] - - -@fixture(name="mock_unsubscribes_state") -def mock_unsubscribes_state_fixture(): - return { - "campaign_1": {"timestamp": "2022-01-01T00:00:00Z"}, - "campaign_2": {"timestamp": "2022-01-02T00:00:00Z"}, - "campaign_3": {"timestamp": "2022-01-03T00:00:00Z"}, - } diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/__init__.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/config.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/config.py new file mode 100644 index 0000000000000..7e363fea96775 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/config.py @@ -0,0 +1,16 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime +from typing import Any, Dict + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: Dict[str, Any] = {"credentials": {"auth_type": "apikey", "apikey": "Mailchimp_token-us10"}, "data_center": "us10"} + + def with_start_date(self, start_datetime: datetime) -> "ConfigBuilder": + self._config["start_date"] = start_datetime.isoformat()[:-3] + "Z" + return self + + def build(self) -> Dict[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/test_automations.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/test_automations.py new file mode 100644 index 0000000000000..602562aa2db46 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/integration/test_automations.py @@ -0,0 +1,119 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import datetime +import json +from unittest import TestCase + +import freezegun +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template +from airbyte_cdk.test.state_builder import StateBuilder +from source_mailchimp import SourceMailchimp + +from .config import ConfigBuilder + +_CONFIG = ConfigBuilder().with_start_date(datetime.datetime(2023, 1, 1, 0, 0, 0, 1000)).build() + + +def _create_catalog(sync_mode: SyncMode = SyncMode.full_refresh): + return CatalogBuilder().with_stream(name="automations", sync_mode=sync_mode).build() + + +@freezegun.freeze_time("2023-01-31T23:59:59.001000Z") +class AutomationsTest(TestCase): + def setUp(self) -> None: + """Base setup for all tests. Enter test mocker.""" + + self.r_mock = HttpMocker() + self.r_mock.__enter__() + + def teardown(self): + """Stops and resets HttpMocker instance.""" + self.r_mock.__exit__() + + def test_read_full_refresh_no_pagination(self): + """Ensure http integration and record extraction""" + self.r_mock.get( + HttpRequest( + url="https://us10.api.mailchimp.com/3.0/automations", + query_params={ + "sort_field": "create_time", + "sort_dir": "ASC", + "exclude_fields": "automations._links", + "count": 1000, + "since_create_time": "2022-12-31T23:59:59.001000Z", + "before_create_time": "2023-01-31T23:59:59.001000Z", + }, + ), + HttpResponse(json.dumps(find_template("automations", __file__)), 200), + ) + + source = SourceMailchimp() + actual_messages = read(source, config=_CONFIG, catalog=_create_catalog()) + + assert len(actual_messages.records) == 1 + + def test_full_refresh_with_pagination(self): + """Ensure pagination""" + self.r_mock.get( + HttpRequest( + url="https://us10.api.mailchimp.com/3.0/automations", + query_params={ + "sort_field": "create_time", + "sort_dir": "ASC", + "exclude_fields": "automations._links", + "count": 1000, + "since_create_time": "2022-12-31T23:59:59.001000Z", + "before_create_time": "2023-01-31T23:59:59.001000Z", + }, + ), + HttpResponse(json.dumps({"automations": find_template("automations", __file__)["automations"] * 1002}), 200), + ) + self.r_mock.get( + HttpRequest( + url="https://us10.api.mailchimp.com/3.0/automations", + query_params={ + "sort_field": "create_time", + "sort_dir": "ASC", + "exclude_fields": "automations._links", + "count": 1000, + "offset": 1002, + "since_create_time": "2022-12-31T23:59:59.001000Z", + "before_create_time": "2023-01-31T23:59:59.001000Z", + }, + ), + HttpResponse(json.dumps(find_template("automations", __file__)), 200), + ) + source = SourceMailchimp() + actual_messages = read(source, config=_CONFIG, catalog=_create_catalog()) + + assert len(actual_messages.records) == 1003 + + def test_when_read_incrementally_then_emit_state_message(self): + """Ensure incremental sync emits correct stream state message""" + + self.r_mock.get( + HttpRequest( + url="https://us10.api.mailchimp.com/3.0/automations", + query_params={ + "sort_field": "create_time", + "sort_dir": "ASC", + "exclude_fields": "automations._links", + "count": 1000, + "since_create_time": "2022-12-31T23:59:59.001000Z", + "before_create_time": "2023-01-31T23:59:59.001000Z", + }, + ), + HttpResponse(json.dumps(find_template("automations", __file__)), 200), + ) + + source = SourceMailchimp() + actual_messages = read( + source, + config=_CONFIG, + catalog=_create_catalog(sync_mode=SyncMode.incremental), + state=StateBuilder().with_stream_state("automations", {"create_time": "2220-11-23T05:42:11+00:00"}).build(), + ) + actual_messages.state_messages[0].state.stream.stream_state == {"create_time": "2220-11-23T05:42:11+00:00"} diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/resource/http/response/automations.json b/airbyte-integrations/connectors/source-mailchimp/unit_tests/resource/http/response/automations.json new file mode 100644 index 0000000000000..3be7ec7ee9726 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/resource/http/response/automations.json @@ -0,0 +1,68 @@ +{ + "automations": [ + { + "id": "string", + "create_time": "2019-08-24T14:15:22Z", + "start_time": "2019-08-24T14:15:22Z", + "status": "save", + "emails_sent": 0, + "recipients": { + "list_id": "string", + "list_is_active": true, + "list_name": "string", + "segment_opts": { + "saved_segment_id": 0, + "match": "any", + "conditions": [null] + }, + "store_id": "1a2df69xxx" + }, + "settings": { + "title": "string", + "from_name": "string", + "reply_to": "string", + "use_conversation": true, + "to_name": "string", + "authenticate": true, + "auto_footer": true, + "inline_css": true + }, + "tracking": { + "opens": true, + "html_clicks": true, + "text_clicks": true, + "goal_tracking": true, + "ecomm360": true, + "google_analytics": "string", + "clicktale": "string", + "salesforce": { + "campaign": true, + "notes": true + }, + "capsule": { + "notes": true + } + }, + "trigger_settings": { + "workflow_type": "abandonedBrowse", + "workflow_title": "string", + "runtime": { + "days": ["sunday"], + "hours": { + "type": "send_asap" + } + }, + "workflow_emails_count": 0 + }, + "report_summary": { + "opens": 0, + "unique_opens": 0, + "open_rate": 0, + "clicks": 0, + "subscriber_clicks": 0, + "click_rate": 0 + } + } + ], + "total_items": 2 +} diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_email_activity_extractor.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_email_activity_extractor.py new file mode 100644 index 0000000000000..20332dd740813 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_email_activity_extractor.py @@ -0,0 +1,66 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json + +import requests +from airbyte_cdk.sources.declarative.decoders import JsonDecoder +from source_mailchimp.components import MailChimpRecordExtractorEmailActivity + + +def test_email_activity_extractor(): + decoder = JsonDecoder(parameters={}) + field_path = ["emails"] + config = {"response_override": "stop_if_you_see_me"} + extractor = MailChimpRecordExtractorEmailActivity(field_path=field_path, decoder=decoder, config=config, parameters={}) + + body = { + "emails": [ + { + "campaign_id": "string", + "list_id": "string", + "list_is_active": True, + "email_id": "string", + "email_address": "AirbyteMailchimpUser@gmail.com", + "activity": [ + {"action": "close", "type": "string", "timestamp": "2019-08-24T14:15:22Z", "url": "string", "ip": "string"}, + {"action": "open", "type": "string", "timestamp": "2019-08-24T14:15:22Z", "url": "string", "ip": "string"}, + ], + } + ], + "campaign_id": "string", + "total_items": 0, + } + response = requests.Response() + response._content = json.dumps(body).encode("utf-8") + + expected_records = [ + { + "action": "close", + "campaign_id": "string", + "email_address": "AirbyteMailchimpUser@gmail.com", + "email_id": "string", + "ip": "string", + "list_id": "string", + "list_is_active": True, + "timestamp": "2019-08-24T14:15:22Z", + "type": "string", + "url": "string", + }, + { + "action": "open", + "campaign_id": "string", + "email_address": "AirbyteMailchimpUser@gmail.com", + "email_id": "string", + "ip": "string", + "list_id": "string", + "list_is_active": True, + "timestamp": "2019-08-24T14:15:22Z", + "type": "string", + "url": "string", + }, + ] + + assert extractor.extract_records(response=response) == expected_records diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_filter.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_filter.py new file mode 100644 index 0000000000000..e4454a92f8772 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_component_custom_filter.py @@ -0,0 +1,80 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest +from airbyte_cdk.sources.declarative.types import StreamSlice +from source_mailchimp.components import MailChimpRecordFilter + + +@pytest.mark.parametrize( + ["config", "stream_state", "len_expected_records"], + [ + [ + {"start_date": "2020-02-16T17:30:00.000Z"}, + { + "states": [ + { + "partition": { + "id": "7847cdaeff", + "parent_slice": {"end_time": "2023-01-07T12:50:16.411612Z", "start_time": "2022-12-07T12:50:17.411612Z"}, + }, + "cursor": {"timestamp": "2024-02-19T12:50:18+0000"}, + } + ] + }, + 0, + ], + [{"start_date": "2020-02-16T17:30:00.000Z"}, {}, 2], + [{}, {}, 2], + [ + {}, + { + "states": [ + { + "partition": { + "id": "7847cdaeff", + "parent_slice": {"end_time": "2023-01-07T12:50:16.411612Z", "start_time": "2022-12-07T12:50:17.411612Z"}, + }, + "cursor": {"timestamp": "2021-02-19T12:50:18+0000"}, + } + ] + }, + 1, + ], + ], + ids=[ + "start_date_and_stream_state", + "start_date_and_NO_stream_state", + "NO_start_date_and_NO_stream_state", + "NO_start_date_and_stream_state", + ], +) +def test_mailchimp_custom_filter(config: dict, stream_state: dict, len_expected_records: int): + stream_slice = StreamSlice( + partition={"id": "7847cdaeff"}, cursor_slice={"end_time": "2024-02-19T13:33:56+0000", "start_time": "2022-10-07T13:33:56+0000"} + ) + parameters = { + "name": "segment_members", + "cursor_field": "timestamp", + } + record_filter = MailChimpRecordFilter(config=config, condition="", parameters=parameters) + + records = [ + { + "id": "1dd067951f91190b65b43305b9166bc7", + "timestamp": "2020-12-27T08:34:39+00:00", + "campaign_id": "7847cdaeff", + "segment_id": 13506120, + }, + { + "id": "1dd067951f91190b65b43305b9166bc7", + "timestamp": "2022-12-27T08:34:39+00:00", + "campaign_id": "7847cdaeff", + "segment_id": 13506120, + }, + ] + + actual_records = record_filter.filter_records(records, stream_state=stream_state, stream_slice=stream_slice) + assert len(actual_records) == len_expected_records diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_config_datacenter_migration.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_config_datacenter_migration.py new file mode 100644 index 0000000000000..20fe8312352e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_config_datacenter_migration.py @@ -0,0 +1,37 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import json +import os +from typing import Any, Mapping + +import pytest +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from source_mailchimp import SourceMailchimp +from source_mailchimp.config_migrations import MigrateDataCenter + +# BASE ARGS +SOURCE: YamlDeclarativeSource = SourceMailchimp() + + +# HELPERS +def load_config(config_path: str) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +@pytest.mark.parametrize( + "config_path", + [ + (f"{os.path.dirname(__file__)}/test_configs/test_config_api_key.json"), + (f"{os.path.dirname(__file__)}/test_configs/test_config_oauth.json"), + ], + ids=["test_requester_datacenter_with_api_key", "test_requester_datacenter_with_oauth_flow"], +) +def test_mailchimp_config_migration(config_path: str, requests_mock): + requests_mock.get("https://login.mailchimp.com/oauth2/metadata", json={"dc": "us10"}) + + migration_instance = MigrateDataCenter + migration_instance.migrate(["check", "--config", config_path], SOURCE) + test_migrated_config = load_config(config_path) + assert test_migrated_config.get("data_center") == "us10" diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_api_key.json b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_api_key.json new file mode 100644 index 0000000000000..20866d9bfcf7b --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_api_key.json @@ -0,0 +1,4 @@ +{ + "credentials": { "auth_type": "apikey", "apikey": "random_api_key-us10" }, + "data_center": "us10" +} diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_oauth.json b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_oauth.json new file mode 100644 index 0000000000000..ef7ef97ee2413 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_configs/test_config_oauth.json @@ -0,0 +1,8 @@ +{ + "credentials": { + "auth_type": "oauth2.0", + "client_id": "client_id", + "client_secret": "client_secret", + "access_token": "access_token" + } +} diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_source.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_source.py deleted file mode 100644 index b1ccfcddac6ad..0000000000000 --- a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_source.py +++ /dev/null @@ -1,116 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging - -import pytest -from source_mailchimp.source import MailChimpAuthenticator, SourceMailchimp - -logger = logging.getLogger("airbyte") - - -def test_check_connection_ok(requests_mock, config, data_center): - responses = [ - {"json": {"health_status": "Everything's Chimpy!"}}, - ] - requests_mock.register_uri("GET", f"https://{data_center}.api.mailchimp.com/3.0/ping", responses) - ok, error_msg = SourceMailchimp().check_connection(logger, config=config) - - assert ok - assert not error_msg - - -@pytest.mark.parametrize( - "response, expected_message", - [ - ( - { - "json": { - "title": "API Key Invalid", - "details": "Your API key may be invalid, or you've attempted to access the wrong datacenter.", - } - }, - "Encountered an error while connecting to Mailchimp. Type: API Key Invalid. Details: Your API key may be invalid, or you've attempted to access the wrong datacenter.", - ), - ( - {"json": {"title": "Forbidden", "details": "You don't have permission to access this resource."}}, - "Encountered an error while connecting to Mailchimp. Type: Forbidden. Details: You don't have permission to access this resource.", - ), - ( - {"json": {}}, - "Encountered an error while connecting to Mailchimp. Type: Unknown Error. Details: An unknown error occurred. Please verify your credentials and try again.", - ), - ], - ids=["API Key Invalid", "Forbidden", "Unknown Error"], -) -def test_check_connection_error(requests_mock, config, data_center, response, expected_message): - requests_mock.register_uri("GET", f"https://{data_center}.api.mailchimp.com/3.0/ping", json=response["json"]) - ok, error_msg = SourceMailchimp().check_connection(logger, config=config) - - assert not ok - assert error_msg == expected_message - - -def test_get_oauth_data_center_ok(requests_mock, access_token, data_center): - responses = [ - {"json": {"dc": data_center}, "status_code": 200}, - ] - requests_mock.register_uri("GET", "https://login.mailchimp.com/oauth2/metadata", responses) - assert MailChimpAuthenticator().get_oauth_data_center(access_token) == data_center - - -def test_get_oauth_data_center_exception(requests_mock, access_token): - responses = [ - {"json": {}, "status_code": 200}, - {"json": {"error": "invalid_token"}, "status_code": 200}, - {"status_code": 403}, - ] - requests_mock.register_uri("GET", "https://login.mailchimp.com/oauth2/metadata", responses) - with pytest.raises(Exception): - MailChimpAuthenticator().get_oauth_data_center(access_token) - - -def test_oauth_config(requests_mock, oauth_config, data_center): - responses = [ - {"json": {"dc": data_center}, "status_code": 200}, - ] - requests_mock.register_uri("GET", "https://login.mailchimp.com/oauth2/metadata", responses) - assert MailChimpAuthenticator().get_auth(oauth_config) - - -def test_apikey_config(apikey_config): - assert MailChimpAuthenticator().get_auth(apikey_config) - - -def test_wrong_config(wrong_config): - with pytest.raises(Exception): - MailChimpAuthenticator().get_auth(wrong_config) - - -@pytest.mark.parametrize( - "config, expected_return", - [ - ({}, None), - ({"start_date": "2021-01-01T00:00:00.000Z"}, None), - ({"start_date": "2021-99-99T79:89:99.123Z"}, "The provided start date is not a valid date. Please check the date you input and try again."), - ({"start_date": "2021-01-01T00:00:00.000"}, "Please check the format of the start date against the pattern descriptor."), - ({"start_date": "2025-01-25T00:00:00.000Z"}, "The start date cannot be greater than the current date."), - ], - ids=[ - "No start date", - "Valid start date", - "Invalid start date", - "Invalid format", - "Future start date", - ] -) -def test_validate_start_date(config, expected_return): - source = SourceMailchimp() - result = source._validate_start_date(config) - assert result == expected_return - - -def test_streams_count(config): - streams = SourceMailchimp().streams(config) - assert len(streams) == 12 diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_streams.py deleted file mode 100644 index b441fe26f7b3e..0000000000000 --- a/airbyte-integrations/connectors/source-mailchimp/unit_tests/test_streams.py +++ /dev/null @@ -1,696 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -from unittest.mock import MagicMock - -import pytest -import requests -import responses -from airbyte_cdk.models import SyncMode -from requests.exceptions import HTTPError -from source_mailchimp.streams import ( - Automations, - Campaigns, - EmailActivity, - InterestCategories, - Interests, - ListMembers, - Lists, - Reports, - SegmentMembers, - Segments, - Tags, - Unsubscribes, -) -from utils import read_full_refresh, read_incremental - - -@pytest.mark.parametrize( - "stream, endpoint", - [ - (Lists, "lists"), - (Campaigns, "campaigns"), - (Segments, "lists/123/segments"), - ], -) -def test_stream_read(requests_mock, auth, stream, endpoint): - args = {"authenticator": auth} - stream = stream(**args) - stream_responses = [ - { - "json": { - stream.data_field: [{"id": "test_id"}], - } - } - ] - stream_url = stream.url_base + endpoint - requests_mock.register_uri("GET", stream_url, stream_responses) - - # Mock the 'lists' endpoint as Segments stream_slice - lists_url = stream.url_base + "lists" - lists_response = {"json": {"lists": [{"id": "123"}]}} - requests_mock.register_uri("GET", lists_url, [lists_response]) - records = read_full_refresh(stream) - - assert records - - -def test_next_page_token(auth): - args = {"authenticator": auth} - stream = Lists(**args) - inputs = {"response": MagicMock()} - expected_token = None - assert stream.next_page_token(**inputs) == expected_token - - resp = {"lists": [{"id": i} for i in range(1001)]} - inputs = {"response": MagicMock(json=MagicMock(return_value=resp))} - expected_token = {"offset": 1000} - assert stream.next_page_token(**inputs) == expected_token - - -@pytest.mark.parametrize( - "stream, inputs, expected_params", - [ - ( - Lists, - {"stream_slice": None, "stream_state": None, "next_page_token": None}, - {"count": 1000, "sort_dir": "ASC", "sort_field": "date_created", "exclude_fields": "lists._links"}, - ), - ( - Lists, - {"stream_slice": None, "stream_state": None, "next_page_token": {"offset": 1000}}, - {"count": 1000, "sort_dir": "ASC", "sort_field": "date_created", "offset": 1000, "exclude_fields": "lists._links"}, - ), - ( - InterestCategories, - {"stream_slice": {"parent": {"id": "123"}}, "stream_state": None, "next_page_token": None}, - {"count": 1000, "exclude_fields": "categories._links"}, - ), - ( - Interests, - {"stream_slice": {"parent": {"id": "123"}}, "stream_state": None, "next_page_token": {"offset": 2000}}, - {"count": 1000, "exclude_fields": "interests._links", "offset": 2000}, - ), - ], - ids=[ - "Lists: no next_page_token or state to add to request params", - "Lists: next_page_token added to request params", - "InterestCategories: no next_page_token to add to request params", - "Interests: next_page_token added to request params", - ], -) -def test_request_params(auth, stream, inputs, expected_params): - args = {"authenticator": auth} - if stream == InterestCategories: - args["parent"] = Lists(**args) - elif stream == Interests: - args["parent"] = InterestCategories(authenticator=auth, parent=Lists(authenticator=auth)) - stream = stream(**args) - assert stream.request_params(**inputs) == expected_params - - -@pytest.mark.parametrize( - "current_state_stream, latest_record, expected_state", - [ - ({}, {"date_created": "2020-01-01"}, {"date_created": "2020-01-01"}), - ({"date_created": "2020-01-01"}, {"date_created": "2021-01-01"}, {"date_created": "2021-01-01"}), - ({"date_created": "2021-01-01"}, {"date_created": "2022-01-01"}, {"date_created": "2022-01-01"}), - ], -) -def test_get_updated_state(auth, current_state_stream, latest_record, expected_state): - args = {"authenticator": auth} - stream = Lists(**args) - - new_stream_state = stream.get_updated_state(current_state_stream, latest_record) - assert new_stream_state == expected_state - - -@responses.activate -def test_stream_teams_read(auth): - args = {"authenticator": auth} - stream = EmailActivity(**args) - stream_url = stream.url_base + "reports/123/email-activity" - campaigns_stream_url = stream.url_base + "campaigns" - responses.add("GET", campaigns_stream_url, json={"campaigns": [{"id": 123}]}) - - response = {"emails": [{"campaign_id": 123, "activity": [{"action": "q", "timestamp": "2021-08-24T14:15:22Z"}]}]} - responses.add("GET", stream_url, json=response) - records = read_incremental(stream, {}) - - assert records - assert records == [{"campaign_id": 123, "action": "q", "timestamp": "2021-08-24T14:15:22Z"}] - assert len(responses.calls) == 2 - - -@responses.activate -def test_stream_parse_json_error(auth, caplog): - args = {"authenticator": auth} - stream = EmailActivity(**args) - stream_url = stream.url_base + "reports/123/email-activity" - campaigns_stream_url = stream.url_base + "campaigns" - responses.add("GET", campaigns_stream_url, json={"campaigns": [{"id": 123}]}) - responses.add("GET", stream_url, body="not_valid_json") - read_incremental(stream, {}) - assert "response.content=b'not_valid_json'" in caplog.text - - -@pytest.mark.parametrize( - "stream_class, stream_slice, stream_state, next_page_token, expected_params", - [ - # Test case 1: no state, no next_page_token - ( - Segments, - {"list_id": "123"}, - {}, - None, - {"count": 1000, "sort_dir": "ASC", "sort_field": "updated_at", "list_id": "123", "exclude_fields": "segments._links"}, - ), - # Test case 2: state and next_page_token - ( - ListMembers, - {"list_id": "123", "since_last_changed": "2023-10-15T00:00:00Z"}, - {"123": {"last_changed": "2023-10-15T00:00:00Z"}}, - {"offset": 1000}, - { - "count": 1000, - "sort_dir": "ASC", - "sort_field": "last_changed", - "list_id": "123", - "offset": 1000, - "exclude_fields": "members._links", - "since_last_changed": "2023-10-15T00:00:00Z", - }, - ), - ], - ids=[ - "Segments: no next_page_token or state to add to request params", - "ListMembers: next_page_token and state filter added to request params", - ], -) -def test_list_child_request_params(auth, stream_class, stream_slice, stream_state, next_page_token, expected_params): - """ - Tests the request_params method for the shared MailChimpListSubStream class. - """ - stream = stream_class(authenticator=auth) - params = stream.request_params(stream_slice=stream_slice, stream_state=stream_state, next_page_token=next_page_token) - assert params == expected_params - - -@pytest.mark.parametrize( - "stream_class, current_stream_state,latest_record,expected_state", - [ - # Test case 1: current_stream_state is empty - (Segments, {}, {"list_id": "list_1", "updated_at": "2023-10-15T00:00:00Z"}, {"list_1": {"updated_at": "2023-10-15T00:00:00Z"}}), - # Test case 2: latest_record's cursor is higher than current_stream_state for list_1 and updates it - ( - Segments, - {"list_1": {"updated_at": "2023-10-14T00:00:00Z"}, "list_2": {"updated_at": "2023-10-15T00:00:00Z"}}, - {"list_id": "list_1", "updated_at": "2023-10-15T00:00:00Z"}, - {"list_1": {"updated_at": "2023-10-15T00:00:00Z"}, "list_2": {"updated_at": "2023-10-15T00:00:00Z"}}, - ), - # Test case 3: latest_record's cursor is lower than current_stream_state for list_2, no state update - ( - ListMembers, - {"list_1": {"last_changed": "2023-10-15T00:00:00Z"}, "list_2": {"last_changed": "2023-10-15T00:00:00Z"}}, - {"list_id": "list_2", "last_changed": "2023-10-14T00:00:00Z"}, - {"list_1": {"last_changed": "2023-10-15T00:00:00Z"}, "list_2": {"last_changed": "2023-10-15T00:00:00Z"}}, - ), - ( - SegmentMembers, - {"segment_1": {"last_changed": "2023-10-15T00:00:00Z"}, "segment_2": {"last_changed": "2023-10-15T00:00:00Z"}}, - {"segment_id": "segment_1", "last_changed": "2023-10-16T00:00:00Z"}, - {"segment_1": {"last_changed": "2023-10-16T00:00:00Z"}, "segment_2": {"last_changed": "2023-10-15T00:00:00Z"}}, - ), - ( - SegmentMembers, - {"segment_1": {"last_changed": "2023-10-15T00:00:00Z"}}, - {"segment_id": "segment_2", "last_changed": "2023-10-16T00:00:00Z"}, - {"segment_1": {"last_changed": "2023-10-15T00:00:00Z"}, "segment_2": {"last_changed": "2023-10-16T00:00:00Z"}}, - ) - ], - ids=[ - "Segments: no current_stream_state", - "Segments: latest_record's cursor > than current_stream_state for list_1", - "ListMembers: latest_record's cursor < current_stream_state for list_2", - "SegmentMembers: latest_record's cursor > current_stream_state for segment_1", - "SegmentMembers: no stream_state for current slice, new slice added to state" - ], -) -def test_list_child_get_updated_state(auth, stream_class, current_stream_state, latest_record, expected_state): - """ - Tests that the get_updated_state method for the shared MailChimpListSubStream class - correctly updates state only for its slice. - """ - segments_stream = stream_class(authenticator=auth) - updated_state = segments_stream.get_updated_state(current_stream_state, latest_record) - assert updated_state == expected_state - - -@pytest.mark.parametrize( - "stream_state, records, expected", - [ - # Test case 1: No stream state, all records should be yielded - ( - {}, - {"members": [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-01-02T00:00:00Z"} - ]}, - [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-01-02T00:00:00Z"} - ] - ), - - # Test case 2: Records older than stream state should be filtered out - ( - {"segment_1": {"last_changed": "2021-02-01T00:00:00Z"}}, - {"members": [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-03-01T00:00:00Z"} - ]}, - [{"id": 2, "segment_id": "segment_1", "last_changed": "2021-03-01T00:00:00Z"}] - ), - - # Test case 3: Two lists in stream state, only state for segment_id_1 determines filtering - ( - {"segment_1": {"last_changed": "2021-01-02T00:00:00Z"}, "segment_2": {"last_changed": "2022-01-01T00:00:00Z"}}, - {"members": [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-03-01T00:00:00Z"} - ]}, - [{"id": 2, "segment_id": "segment_1", "last_changed": "2021-03-01T00:00:00Z"}] - ), - ], - ids=[ - "No stream state, all records should be yielded", - "Record < stream state, should be filtered out", - "Record >= stream state, should be yielded", - ] -) -def test_segment_members_parse_response(auth, stream_state, records, expected): - segment_members_stream = SegmentMembers(authenticator=auth) - response = MagicMock() - response.json.return_value = records - parsed_records = list(segment_members_stream.parse_response(response, stream_state, stream_slice={"segment_id": "segment_1"})) - assert parsed_records == expected, f"Expected: {expected}, Actual: {parsed_records}" - - -@pytest.mark.parametrize( - "stream, record, expected_record", - [ - ( - SegmentMembers, - {"id": 1, "email_address": "a@gmail.com", "email_type": "html", "opt_timestamp": ""}, - {"id": 1, "email_address": "a@gmail.com", "email_type": "html", "opt_timestamp": None} - ), - ( - SegmentMembers, - {"id": 1, "email_address": "a@gmail.com", "email_type": "html", "opt_timestamp": "2022-01-01T00:00:00.000Z", "merge_fields": {"FNAME": "Bob", "LNAME": "", "ADDRESS": "", "PHONE": ""}}, - {"id": 1, "email_address": "a@gmail.com", "email_type": "html", "opt_timestamp": "2022-01-01T00:00:00.000Z", "merge_fields": {"FNAME": "Bob", "LNAME": None, "ADDRESS": None, "PHONE": None}} - ), - ( - Campaigns, - {"id": "1", "web_id": 2, "email_type": "html", "create_time": "2022-01-01T00:00:00.000Z", "send_time": ""}, - {"id": "1", "web_id": 2, "email_type": "html", "create_time": "2022-01-01T00:00:00.000Z", "send_time": None} - ), - ( - Reports, - {"id": "1", "type": "rss", "clicks": {"clicks_total": 1, "last_click": "2022-01-01T00:00:00Z"}, "opens": {"opens_total": 0, "last_open": ""}}, - {"id": "1", "type": "rss", "clicks": {"clicks_total": 1, "last_click": "2022-01-01T00:00:00Z"}, "opens": {"opens_total": 0, "last_open": None}} - ), - ( - Lists, - {"id": "1", "name": "Santa's List", "stats": {"last_sub_date": "2022-01-01T00:00:00Z", "last_unsub_date": ""}}, - {"id": "1", "name": "Santa's List", "stats": {"last_sub_date": "2022-01-01T00:00:00Z", "last_unsub_date": None}} - ) - ], - ids=[ - "segment_members: opt_timestamp nullified", - "segment_members: nested merge_fields nullified", - "campaigns: send_time nullified", - "reports: nested opens.last_open nullified", - "lists: stats.last_unsub_date nullified" - ] -) -def test_filter_empty_fields(auth, stream, record, expected_record): - """ - Tests that empty string values are converted to None. - """ - stream = stream(authenticator=auth) - assert stream.filter_empty_fields(record) == expected_record - - -def test_unsubscribes_stream_slices(requests_mock, unsubscribes_stream, campaigns_stream, mock_campaigns_response): - campaigns_url = campaigns_stream.url_base + campaigns_stream.path() - requests_mock.register_uri("GET", campaigns_url, json={"campaigns": mock_campaigns_response}) - - expected_slices = [{"campaign_id": "campaign_1"}, {"campaign_id": "campaign_2"}, {"campaign_id": "campaign_3"}] - slices = list(unsubscribes_stream.stream_slices(sync_mode=SyncMode.incremental)) - assert slices == expected_slices - - -@pytest.mark.parametrize( - "stream_state, expected_records", - [ - ( # Test case 1: all records >= state - {"campaign_1": {"timestamp": "2022-01-01T00:00:00Z"}}, - [ - {"campaign_id": "campaign_1", "email_id": "email_1", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_3", "timestamp": "2022-01-01T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-03T00:00:00Z"}, - ], - ), - ( # Test case 2: one record < state - {"campaign_1": {"timestamp": "2022-01-02T00:00:00Z"}}, - [ - {"campaign_id": "campaign_1", "email_id": "email_1", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-03T00:00:00Z"}, - ], - ), - ( # Test case 3: one record >= state - {"campaign_1": {"timestamp": "2022-01-03T00:00:00Z"}}, - [ - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-03T00:00:00Z"}, - ], - ), - ( # Test case 4: no state, all records returned - {}, - [ - {"campaign_id": "campaign_1", "email_id": "email_1", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_3", "timestamp": "2022-01-01T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-03T00:00:00Z"}, - ], - ), - ], - ids=[ - "all records >= state", - "one record < state", - "one record >= state", - "no state, all records returned", - ], -) -def test_parse_response(stream_state, expected_records, unsubscribes_stream): - mock_response = MagicMock(spec=requests.Response) - mock_response.json.return_value = { - "unsubscribes": [ - {"campaign_id": "campaign_1", "email_id": "email_1", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_3", "timestamp": "2022-01-01T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-03T00:00:00Z"}, - ] - } - stream_slice = {"campaign_id": "campaign_1"} - records = list(unsubscribes_stream.parse_response(response=mock_response, stream_slice=stream_slice, stream_state=stream_state)) - assert records == expected_records - - -@pytest.mark.parametrize( - "latest_record, expected_updated_state", - [ - # Test case 1: latest_record > and updates the state of campaign_1 - ( - { - "email_id": "email_1", - "email_address": "address1@email.io", - "reason": "None given", - "timestamp": "2022-01-05T00:00:00Z", - "campaign_id": "campaign_1", - }, - { - "campaign_1": {"timestamp": "2022-01-05T00:00:00Z"}, - "campaign_2": {"timestamp": "2022-01-02T00:00:00Z"}, - "campaign_3": {"timestamp": "2022-01-03T00:00:00Z"}, - }, - ), - # Test case 2: latest_record > and updates the state of campaign_2 - ( - { - "email_id": "email_2", - "email_address": "address2@email.io", - "reason": "Inappropriate content", - "timestamp": "2022-01-05T00:00:00Z", - "campaign_id": "campaign_2", - }, - { - "campaign_1": {"timestamp": "2022-01-01T00:00:00Z"}, - "campaign_2": {"timestamp": "2022-01-05T00:00:00Z"}, - "campaign_3": {"timestamp": "2022-01-03T00:00:00Z"}, - }, - ), - # Test case 3: latest_record < and does not update the state of campaign_3 - ( - { - "email_id": "email_3", - "email_address": "address3@email.io", - "reason": "No longer interested", - "timestamp": "2021-01-01T00:00:00Z", - "campaign_id": "campaign_3", - }, - { - "campaign_1": {"timestamp": "2022-01-01T00:00:00Z"}, - "campaign_2": {"timestamp": "2022-01-02T00:00:00Z"}, - "campaign_3": {"timestamp": "2022-01-03T00:00:00Z"}, - }, - ), - # Test case 4: latest_record sets state campaign_4 - ( - { - "email_id": "email_4", - "email_address": "address4@email.io", - "reason": "No longer interested", - "timestamp": "2022-01-04T00:00:00Z", - "campaign_id": "campaign_4", - }, - { - "campaign_1": {"timestamp": "2022-01-01T00:00:00Z"}, - "campaign_2": {"timestamp": "2022-01-02T00:00:00Z"}, - "campaign_3": {"timestamp": "2022-01-03T00:00:00Z"}, - "campaign_4": {"timestamp": "2022-01-04T00:00:00Z"}, - }, - ), - ], - ids=[ - "latest_record > and updates the state of campaign_1", - "latest_record > and updates the state of campaign_2", - "latest_record < and does not update the state of campaign_3", - "latest_record sets state of campaign_4", - ], -) -def test_unsubscribes_get_updated_state(unsubscribes_stream, mock_unsubscribes_state, latest_record, expected_updated_state): - updated_state = unsubscribes_stream.get_updated_state(mock_unsubscribes_state, latest_record) - assert updated_state == expected_updated_state - - -@pytest.mark.parametrize( - "stream,url,status_code,response_content,expected_availability,expected_reason_substring", - [ - ( - Campaigns, - "https://some_dc.api.mailchimp.com/3.0/campaigns", - 403, - b'{"object": "error", "status": 403, "code": "restricted_resource"}', - False, - "Unable to read campaigns stream", - ), - ( - EmailActivity, - "https://some_dc.api.mailchimp.com/3.0/reports/123/email-activity", - 403, - b'{"object": "error", "status": 403, "code": "restricted_resource"}', - False, - "Unable to read email_activity stream", - ), - ( - Lists, - "https://some_dc.api.mailchimp.com/3.0/lists", - 200, - b'{ "lists": [{"id": "123", "date_created": "2022-01-01T00:00:00+000"}]}', - True, - None, - ), - ( - Lists, - "https://some_dc.api.mailchimp.com/3.0/lists", - 400, - b'{ "object": "error", "status": 404, "code": "invalid_action"}', - False, - None, - ), - ], - ids=[ - "Campaigns 403 error", - "EmailActivity 403 error", - "Lists 200 success", - "Lists 400 error", - ], -) -def test_403_error_handling( - auth, requests_mock, stream, url, status_code, response_content, expected_availability, expected_reason_substring -): - """ - Test that availability strategy flags streams with 403 error as unavailable - and returns appropriate message. - """ - - requests_mock.get(url=url, status_code=status_code, content=response_content) - - stream = stream(authenticator=auth) - - if stream.__class__.__name__ == "EmailActivity": - stream.stream_slices = MagicMock(return_value=[{"campaign_id": "123"}]) - - try: - is_available, reason = stream.check_availability(logger=logging.Logger, source=MagicMock()) - - assert is_available is expected_availability - - if expected_reason_substring: - assert expected_reason_substring in reason - else: - assert reason is None - - # Handle non-403 error - except HTTPError as e: - assert e.response.status_code == status_code - - -@pytest.mark.parametrize( - "stream, stream_slice, expected_endpoint", - [ - (Automations, {}, "automations"), - (Lists, {}, "lists"), - (Campaigns, {}, "campaigns"), - (EmailActivity, {"campaign_id": "123"}, "reports/123/email-activity"), - (InterestCategories, {"parent": {"id": "123"}}, "lists/123/interest-categories"), - (Interests, {"parent": {"list_id": "123", "id": "456"}}, "lists/123/interest-categories/456/interests"), - (ListMembers, {"list_id": "123"}, "lists/123/members"), - (Reports, {}, "reports"), - (SegmentMembers, {"list_id": "123", "segment_id": "456"}, "lists/123/segments/456/members"), - (Segments, {"list_id": "123"}, "lists/123/segments"), - (Tags, {"parent": {"id": "123"}}, "lists/123/tag-search"), - (Unsubscribes, {"campaign_id": "123"}, "reports/123/unsubscribed"), - ], - ids=[ - "Automations", - "Lists", - "Campaigns", - "EmailActivity", - "InterestCategories", - "Interests", - "ListMembers", - "Reports", - "SegmentMembers", - "Segments", - "Tags", - "Unsubscribes", - ], -) -def test_path(auth, stream, stream_slice, expected_endpoint): - """ - Test the path method for each stream. - """ - - # Add parent stream where necessary - if stream is InterestCategories or stream is Tags: - stream = stream(authenticator=auth, parent=Lists(authenticator=auth)) - elif stream is Interests: - stream = stream(authenticator=auth, parent=InterestCategories(authenticator=auth, parent=Lists(authenticator=auth))) - else: - stream = stream(authenticator=auth) - - endpoint = stream.path(stream_slice=stream_slice) - - assert endpoint == expected_endpoint, f"Stream {stream}: expected path '{expected_endpoint}', got '{endpoint}'" - - -@pytest.mark.parametrize( - "start_date, state_date, expected_return_value", - [ - ( - "2021-01-01T00:00:00.000Z", - "2020-01-01T00:00:00+00:00", - "2021-01-01T00:00:00Z" - ), - ( - "2021-01-01T00:00:00.000Z", - "2023-10-05T00:00:00+00:00", - "2023-10-05T00:00:00+00:00" - ), - ( - None, - "2022-01-01T00:00:00+00:00", - "2022-01-01T00:00:00+00:00" - ), - ( - "2020-01-01T00:00:00.000Z", - None, - "2020-01-01T00:00:00Z" - ), - ( - None, - None, - None - ) - ] -) -def test_get_filter_date(auth, start_date, state_date, expected_return_value): - """ - Tests that the get_filter_date method returns the correct date string - """ - stream = Campaigns(authenticator=auth, start_date=start_date) - result = stream.get_filter_date(start_date, state_date) - assert result == expected_return_value, f"Expected: {expected_return_value}, Actual: {result}" - - -@pytest.mark.parametrize( - "stream_class, records, filter_date, expected_return_value", - [ - ( - Unsubscribes, - [ - {"campaign_id": "campaign_1", "email_id": "email_1", "timestamp": "2022-01-02T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-04T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_3", "timestamp": "2022-01-03T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_4", "timestamp": "2022-01-01T00:00:00Z"}, - ], - "2022-01-02T12:00:00+00:00", - [ - {"campaign_id": "campaign_1", "email_id": "email_2", "timestamp": "2022-01-04T00:00:00Z"}, - {"campaign_id": "campaign_1", "email_id": "email_3", "timestamp": "2022-01-03T00:00:00Z"}, - ], - ), - ( - SegmentMembers, - [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-04T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 3, "segment_id": "segment_1", "last_changed": "2021-01-03T00:00:00Z"}, - {"id": 4, "segment_id": "segment_1", "last_changed": "2021-01-02T00:00:00Z"}, - ], - None, - [ - {"id": 1, "segment_id": "segment_1", "last_changed": "2021-01-04T00:00:00Z"}, - {"id": 2, "segment_id": "segment_1", "last_changed": "2021-01-01T00:00:00Z"}, - {"id": 3, "segment_id": "segment_1", "last_changed": "2021-01-03T00:00:00Z"}, - {"id": 4, "segment_id": "segment_1", "last_changed": "2021-01-02T00:00:00Z"}, - ], - ) - ], - ids=[ - "Unsubscribes: filter_date is set, records filtered", - "SegmentMembers: filter_date is None, all records returned" - ] -) -def test_filter_old_records(auth, stream_class, records, filter_date, expected_return_value): - """ - Tests the logic for filtering old records in streams that do not support query_param filtering. - """ - stream = stream_class(authenticator=auth) - filtered_records = list(stream.filter_old_records(records, filter_date)) - assert filtered_records == expected_return_value diff --git a/airbyte-integrations/connectors/source-mailchimp/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-mailchimp/unit_tests/unit_test.py deleted file mode 100644 index 0371987d8762a..0000000000000 --- a/airbyte-integrations/connectors/source-mailchimp/unit_tests/unit_test.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.logger import AirbyteLogger -from source_mailchimp import SourceMailchimp - - -def test_client_wrong_credentials(): - source = SourceMailchimp() - status, error = source.check_connection(logger=AirbyteLogger, config={"username": "Jonny", "apikey": "blah-blah"}) - assert not status diff --git a/airbyte-integrations/connectors/source-marketo/metadata.yaml b/airbyte-integrations/connectors/source-marketo/metadata.yaml index 534b879809997..e40a5401a6cb8 100644 --- a/airbyte-integrations/connectors/source-marketo/metadata.yaml +++ b/airbyte-integrations/connectors/source-marketo/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 9e0556f4-69df-4522-a3fb-03264d36b348 - dockerImageTag: 1.2.6 + dockerImageTag: 1.3.0 dockerRepository: airbyte/source-marketo documentationUrl: https://docs.airbyte.com/integrations/sources/marketo githubIssueLabel: source-marketo icon: marketo.svg license: ELv2 + maxSecondsBetweenMessages: 86400 name: Marketo remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-marketo/poetry.lock b/airbyte-integrations/connectors/source-marketo/poetry.lock index d478bc7103947..f460a88417c22 100644 --- a/airbyte-integrations/connectors/source-marketo/poetry.lock +++ b/airbyte-integrations/connectors/source-marketo/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -303,13 +303,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "23.1.0" +version = "24.3.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-23.1.0-py3-none-any.whl", hash = "sha256:60e89e5c0b584e285a7db05eceba35011a241954afdab2853cb246c8a56700a2"}, - {file = "Faker-23.1.0.tar.gz", hash = "sha256:b7f76bb1b2ac4cdc54442d955e36e477c387000f31ce46887fb9722a041be60b"}, + {file = "Faker-24.3.0-py3-none-any.whl", hash = "sha256:9978025e765ba79f8bf6154c9630a9c2b7f9c9b0f175d4ad5e04b19a82a8d8d6"}, + {file = "Faker-24.3.0.tar.gz", hash = "sha256:5fb5aa9749d09971e04a41281ae3ceda9414f683d4810a694f8a8eebb8f9edec"}, ] [package.dependencies] @@ -481,13 +481,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -729,13 +729,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -777,7 +777,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -835,13 +834,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -853,15 +852,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -884,19 +883,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -922,13 +921,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -947,13 +946,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1058,4 +1057,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "9345aade7d55768682023c361cdcf0a5fd1c24460ac815588361db515dbbcb29" +content-hash = "77cf2713ed363005f2b939541221a97ad0658efdc820068bf7a5da721c3925fa" diff --git a/airbyte-integrations/connectors/source-marketo/pyproject.toml b/airbyte-integrations/connectors/source-marketo/pyproject.toml index c530ce9cb4ac9..9217485a60b86 100644 --- a/airbyte-integrations/connectors/source-marketo/pyproject.toml +++ b/airbyte-integrations/connectors/source-marketo/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.2.6" +version = "1.3.0" name = "source-marketo" description = "Source implementation for Marketo." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_marketo" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" [tool.poetry.scripts] source-marketo = "source_marketo.run:run" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/abnormal_state.json index 1c86c5e27bee3..def1060e067de 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/abnormal_state.json @@ -6,10 +6,13 @@ "name": "test_csv" }, "stream_state": { - "_ab_source_file_last_modified": "2023-12-23T06:49:25.000000Z_Test_folder_2/TestFileOneDrive.csv", "history": { - "Test_folder_2/TestFileOneDrive.csv": "2023-12-23T06:49:25.000000Z" - } + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/TestFile.csv": "2023-11-17T13:52:35.000000Z", + "Test_folder_2/TestFileOneDrive.csv": "2023-11-23T06:49:25.000000Z", + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_folder_2/TestFileSharePoint.csv": "2023-12-15T17:34:08.000000Z", + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv": "2024-01-16T12:45:20.000000Z" + }, + "_ab_source_file_last_modified": "2024-01-16T12:45:20.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv" } } }, @@ -20,10 +23,11 @@ "name": "test_unstructured" }, "stream_state": { - "_ab_source_file_last_modified": "2023-12-23T06:49:25.000000Z_simple_pdf_file.pdf", "history": { - "simple_pdf_file.pdf": "2023-12-23T06:49:25.000000Z" - } + "simple_pdf_file.pdf": "2023-12-13T11:24:38.000000Z", + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf": "2023-12-15T16:47:21.000000Z" + }, + "_ab_source_file_last_modified": "2023-12-15T16:47:21.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf" } } } diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/spec.json b/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/spec.json index 24ee82201f5a1..e94d919d94931 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/integration_tests/spec.json @@ -52,7 +52,8 @@ }, "primary_key": { "title": "Primary Key", - "description": "The column or columns (for a composite key) that serves as the unique identifier of a record.", + "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", + "airbyte_hidden": true, "type": "string" }, "days_to_sync_if_history_is_full": { @@ -274,12 +275,46 @@ "const": "unstructured", "type": "string" }, - "skip_unprocessable_file_types": { - "title": "Skip Unprocessable File Types", - "description": "If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.", + "skip_unprocessable_files": { + "title": "Skip Unprocessable Files", + "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", "default": true, "always_show": true, "type": "boolean" + }, + "strategy": { + "title": "Parsing Strategy", + "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", + "default": "auto", + "always_show": true, + "order": 0, + "enum": ["auto", "fast", "ocr_only", "hi_res"], + "type": "string" + }, + "processing": { + "title": "Processing", + "description": "Processing configuration", + "default": { + "mode": "local" + }, + "type": "object", + "oneOf": [ + { + "title": "Local", + "type": "object", + "properties": { + "mode": { + "title": "Mode", + "default": "local", + "const": "local", + "enum": ["local"], + "type": "string" + } + }, + "description": "Process files locally, supporting `fast` and `ocr` modes. This is the default option.", + "required": ["mode"] + } + ] } }, "description": "Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.", @@ -400,20 +435,39 @@ "order": 2, "type": "string" }, + "search_scope": { + "title": "Search Scope", + "description": "Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.", + "default": "ALL", + "enum": ["ACCESSIBLE_DRIVES", "SHARED_ITEMS", "ALL"], + "order": 3, + "type": "string" + }, "folder_path": { "title": "Folder Path", - "description": "Path to folder of the Microsoft OneDrive drive where the file(s) exist.", - "order": 3, + "description": "Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.", + "default": ".", + "order": 4, "type": "string" } }, - "required": ["streams", "credentials", "folder_path"] + "required": ["streams", "credentials"] }, "advanced_auth": { "auth_flow_type": "oauth2.0", "predicate_key": ["credentials", "auth_type"], "predicate_value": "Client", "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "tenant_id": { + "type": "string", + "path_in_connector_config": ["credentials", "tenant_id"] + } + } + }, "complete_oauth_output_specification": { "type": "object", "additionalProperties": false, diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml index dae4db06bebd6..c84600ea455e9 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml @@ -20,7 +20,7 @@ data: connectorSubtype: file connectorType: source definitionId: 01d1c685-fd4a-4837-8f4c-93fe5a0d2188 - dockerImageTag: 0.1.8 + dockerImageTag: 0.2.0 dockerRepository: airbyte/source-microsoft-onedrive githubIssueLabel: source-microsoft-onedrive icon: microsoft-onedrive.svg diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock index f9e32cc272b6e..8709d07711bbe 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.61.0" +version = "0.71.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.61.0.tar.gz", hash = "sha256:8beda008c5a177041ac02860a431ce7b1ecd00062a4a8f31fe6ac446cbed3e70"}, - {file = "airbyte_cdk-0.61.0-py3-none-any.whl", hash = "sha256:3f989bfe692c9519d61f9120ddb744ab82c432c2caf25374d4d6f5cdc374a1e9"}, + {file = "airbyte-cdk-0.71.0.tar.gz", hash = "sha256:110959840681b770e9378f9bcbca7a4b50c75b11de74e9fb809112407c4f50fa"}, + {file = "airbyte_cdk-0.71.0-py3-none-any.whl", hash = "sha256:730365365e826311d88dc0a8a5ebbd6227cc41b3dc342ef1525061b6d93f889c"}, ] [package.dependencies] @@ -28,7 +28,7 @@ markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} "pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} pendulum = "<3.0.0" -pyarrow = {version = "12.0.1", optional = true, markers = "extra == \"file-based\""} +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} @@ -44,8 +44,8 @@ unstructured = [ wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -566,6 +566,22 @@ lz4 = ["lz4"] snappy = ["python-snappy"] zstandard = ["zstandard"] +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "filetype" version = "1.2.0" @@ -577,6 +593,41 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "fsspec" +version = "2024.3.0" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.0-py3-none-any.whl", hash = "sha256:779001bd0122c9c4975cf03827d5e86c3afb914a3ae27040f15d341ab506a693"}, + {file = "fsspec-2024.3.0.tar.gz", hash = "sha256:f13a130c0ed07e15c4e1aeb0472a823e9c426b0b5792a1f40d902b0a71972d43"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + [[package]] name = "genson" version = "1.2.2" @@ -587,6 +638,39 @@ files = [ {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, ] +[[package]] +name = "huggingface-hub" +version = "0.21.4" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.21.4-py3-none-any.whl", hash = "sha256:df37c2c37fc6c82163cdd8a67ede261687d80d1e262526d6c0ce73b6b3630a7b"}, + {file = "huggingface_hub-0.21.4.tar.gz", hash = "sha256:e1f4968c93726565a80edf6dc309763c7b546d0cfe79aa221206034d50155531"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + [[package]] name = "idna" version = "3.6" @@ -600,22 +684,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -811,13 +895,13 @@ source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" -version = "3.5.2" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.dependencies] @@ -1037,13 +1121,13 @@ ntlmprovider = ["requests-ntlm"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1242,40 +1326,51 @@ files = [ [[package]] name = "pyarrow" -version = "12.0.1" +version = "15.0.1" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, - {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, - {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, - {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, - {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, - {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, - {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, - {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, - {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c2ddb3be5ea938c329a84171694fc230b241ce1b6b0ff1a0280509af51c375fa"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7543ea88a0ff72f8e6baaf9bfdbec2c62aeabdbede9e4a571c71cc3bc43b6302"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1519e218a6941fc074e4501088d891afcb2adf77c236e03c34babcf3d6a0d1c7"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cafa86e1944761970d3b3fc0411b14ff9b5c2b73cd22aaf470d7a3976335f5"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:be5c3d463e33d03eab496e1af7916b1d44001c08f0f458ad27dc16093a020638"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:47b1eda15d3aa3f49a07b1808648e1397e5dc6a80a30bf87faa8e2d02dad7ac3"}, + {file = "pyarrow-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e524a31be7db22deebbbcf242b189063ab9a7652c62471d296b31bc6e3cae77b"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a476fefe8bdd56122fb0d4881b785413e025858803cc1302d0d788d3522b374d"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:309e6191be385f2e220586bfdb643f9bb21d7e1bc6dd0a6963dc538e347b2431"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bc586903dbeb4365cbc72b602f99f70b96c5882e5dfac5278813c7d624ca3c"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e652daac6d8b05280cd2af31c0fb61a4490ec6a53dc01588014d9fa3fdbee9"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:abad2e08652df153a72177ce20c897d083b0c4ebeec051239e2654ddf4d3c996"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cde663352bc83ad75ba7b3206e049ca1a69809223942362a8649e37bd22f9e3b"}, + {file = "pyarrow-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1b6e237dd7a08482a8b8f3f6512d258d2460f182931832a8c6ef3953203d31e1"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7bd167536ee23192760b8c731d39b7cfd37914c27fd4582335ffd08450ff799d"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c08bb31eb2984ba5c3747d375bb522e7e536b8b25b149c9cb5e1c49b0ccb736"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f9c1d630ed2524bd1ddf28ec92780a7b599fd54704cd653519f7ff5aec177a"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5186048493395220550bca7b524420471aac2d77af831f584ce132680f55c3df"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31dc30c7ec8958da3a3d9f31d6c3630429b2091ede0ecd0d989fd6bec129f0e4"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3f111a014fb8ac2297b43a74bf4495cc479a332908f7ee49cb7cbd50714cb0c1"}, + {file = "pyarrow-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a6d1f7c15d7f68f08490d0cb34611497c74285b8a6bbeab4ef3fc20117310983"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9ad931b996f51c2f978ed517b55cb3c6078272fb4ec579e3da5a8c14873b698d"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:738f6b53ab1c2f66b2bde8a1d77e186aeaab702d849e0dfa1158c9e2c030add3"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1c3fc16bc74e33bf8f1e5a212938ed8d88e902f372c4dac6b5bad328567d2f"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fa92512128f6c1b8dde0468c1454dd70f3bff623970e370d52efd4d24fd0be"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b4157f307c202cbbdac147d9b07447a281fa8e63494f7fc85081da351ec6ace9"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b75e7da26f383787f80ad76143b44844ffa28648fcc7099a83df1538c078d2f2"}, + {file = "pyarrow-15.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a99eac76ae14096c209850935057b9e8ce97a78397c5cde8724674774f34e5d"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:dd532d3177e031e9b2d2df19fd003d0cc0520d1747659fcabbd4d9bb87de508c"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce8c89848fd37e5313fc2ce601483038ee5566db96ba0808d5883b2e2e55dc53"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:862eac5e5f3b6477f7a92b2f27e560e1f4e5e9edfca9ea9da8a7478bb4abd5ce"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f0ea3a29cd5cb99bf14c1c4533eceaa00ea8fb580950fb5a89a5c771a994a4e"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb902f780cfd624b2e8fd8501fadab17618fdb548532620ef3d91312aaf0888a"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:4f87757f02735a6bb4ad2e1b98279ac45d53b748d5baf52401516413007c6999"}, + {file = "pyarrow-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:efd3816c7fbfcbd406ac0f69873cebb052effd7cdc153ae5836d1b00845845d7"}, + {file = "pyarrow-15.0.1.tar.gz", hash = "sha256:21d812548d39d490e0c6928a7c663f37b96bf764034123d4b4ab4530ecc757a9"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pycparser" @@ -1588,7 +1683,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1897,20 +1991,152 @@ six = "*" fixture = ["fixtures"] test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] +[[package]] +name = "safetensors" +version = "0.4.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safetensors-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:69d8bb8384dc2cb5b72c36c4d6980771b293d1a1377b378763f5e37b6bb8d133"}, + {file = "safetensors-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d420e19fcef96d0067f4de4699682b4bbd85fc8fea0bd45fcd961fdf3e8c82c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca54742122fa3c4821754adb67318e1cd25c3a22bbf0c5520d5176e77a099ac"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b47aa643afdfd66cf7ce4c184092ae734e15d10aba2c2948f24270211801c3c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d88a16bbc330f27e7f2d4caaf6fb061ad0b8a756ecc4033260b0378e128ce8a2"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9223b8ac21085db614a510eb3445e7083cae915a9202357555fa939695d4f57"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6cb86133dc8930a7ab5e7438545a7f205f7a1cdd5aaf108c1d0da6bdcfbc2b"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8a628e0ae2bbc334b62952c384aa5f41621d01850f8d67b04a96b9c39dd7326"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:88d6beb7f811a081e0e5f1d9669fdac816c45340c04b1eaf7ebfda0ce93ea403"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b57fc5b1b54cb12d8690a58a4cf4b7144730d4bde9d98aa0e1dab6295a1cd579"}, + {file = "safetensors-0.4.2-cp310-none-win32.whl", hash = "sha256:9d87a1c98803c16cf113b9ba03f07b2dce5e8eabfd1811a7f7323fcaa2a1bf47"}, + {file = "safetensors-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:18930ec1d1ecb526d3d9835abc2489b8f1530877518f0c541e77ef0b7abcbd99"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c5dd2ed788730ed56b415d1a11c62026b8cc8c573f55a2092afb3ab383e94fff"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc41791b33efb9c83a59b731619f3d15f543dfe71f3a793cb8fbf9bd5d0d5d71"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c888bf71d5ca12a720f1ed87d407c4918afa022fb247a6546d8fac15b1f112b"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6b2feb4b47226a16a792e6fac3f49442714884a3d4c1008569d5068a3941be9"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f41cc0ee4b838ae8f4d8364a1b162067693d11a3893f0863be8c228d40e4d0ee"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51b7228e46c0a483c40ba4b9470dea00fb1ff8685026bb4766799000f6328ac2"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02697f8f2be8ca3c37a4958702dbdb1864447ef765e18b5328a1617022dcf164"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:27fd8f65cf7c80e4280cae1ee6bcd85c483882f6580821abe71ee1a0d3dcfca7"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c487b5f113b0924c9534a07dc034830fb4ef05ce9bb6d78cfe016a7dedfe281f"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:da7f6483f3fe67ff39b3a55552552c67930ea10a36e9f2539d36fc205273d767"}, + {file = "safetensors-0.4.2-cp311-none-win32.whl", hash = "sha256:52a7012f6cb9cb4a132760b6308daede18a9f5f8952ce08adc7c67a7d865c2d8"}, + {file = "safetensors-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:4d1361a097ac430b310ce9eed8ed4746edee33ddafdfbb965debc8966fc34dc2"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:77af8aa0edcc2863760fd6febbfdb82e88fd75d0e60c1ce4ba57208ba5e4a89b"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846666c1c5a8c8888d2dfda8d3921cb9cb8e2c5f78365be756c11021e75a0a2a"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f4bfc7ea19b446bfad41510d4b4c76101698c00caaa8a332c8edd8090a412ef"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:233436fd30f27ffeb3c3780d0b84f496518868445c7a8db003639a649cc98453"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a09237a795d11cd11f9dae505d170a29b5616151db1e10c14f892b11caadc7d"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de01c9a3a3b7b69627d624ff69d9f11d28ce9908eea2fb6245adafa4b1d43df6"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f25c5069ee42a5bcffdc66c300a407941edd73f3239e9fdefd26216407391"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a73b3649456d09ca8506140d44484b63154a7378434cc1e8719f8056550b224"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1625a8d07d046e968bd5c4961810aba1225984e4fb9243626f9d04a06ed3fee"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f74c86b25615cb24ad4cff765a2eefc09d71bf0fed97588cf585aad9c38fbb4"}, + {file = "safetensors-0.4.2-cp312-none-win32.whl", hash = "sha256:8523b9c5777d771bcde5c2389c03f1cdf7ebe8797432a1bd5e345efe25c55987"}, + {file = "safetensors-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:dcff0243e1737a21f83d664c63fed89d1f532c23fc6830d0427279fabd789ccb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:96ad3d7d472612e26cbe413922b4fb13933310f0511d346ea5cc9a1e856e52eb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:88250922401b5ae4e37de929178caf46be47ed16c817b2237b81679bec07c120"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d40443554142fc0ab30652d5cc8554c4b7a613513bde00373e18afd5de8cbe4b"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27f53f70106224d32d874aacecbeb4a6e4c5b16a1d2006d0e876d97229086d71"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc068afe23734dfb26ce19db0a7877499ddf73b1d55ceb762417e8da4a1b05fb"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9be1918eb8d43a11a6f8806759fccfa0eeb0542b12924caba66af8a7800ad01a"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41911087d20a7bbd78cb4ad4f98aab0c431533107584df6635d8b54b99945573"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50771c662aab909f31e94d048e76861fd027d66076ea773eef2e66c717766e24"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13f2e57be007b7ea9329133d2399e6bdfcf1910f655440a4da17df3a45afcd30"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c772147e6395bc829842e0a98e1b30c67fe25d816299c28196488511d5a5e951"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:36239a0060b537a3e8c473df78cffee14c3ec4f51d5f1a853af99371a2fb2a35"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:d0cbb7664fad2c307f95195f951b7059e95dc23e0e1822e5978c8b500098543c"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b3e55adb6bd9dc1c2a341e72f48f075953fa35d173dd8e29a95b3b02d0d1462"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42f743b3cca863fba53ca57a193f510e5ec359b97f38c282437716b6768e4a25"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e6af4a6dbeb06c4e6e7d46cf9c716cbc4cc5ef62584fd8a7c0fe558562df45"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a492ba21b5c8f14ee5ec9b20f42ba969e53ca1f909a4d04aad736b66a341dcc2"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b25b8233a1a85dc67e39838951cfb01595d792f3b7b644add63edb652992e030"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd27e063fbdafe776f7b1714da59110e88f270e86db00788a8fd65f4eacfeba7"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b6fa399f251bbeb52029bf5a0ac2878d7705dd3612a2f8895b48e9c11f0367d"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de642d46b459e4afd5c2020b26c0d6d869a171ea00411897d5776c127cac74f0"}, + {file = "safetensors-0.4.2-cp37-none-win32.whl", hash = "sha256:77b72d17754c93bb68f3598182f14d78776e0b9b31682ca5bb2c7c5bd9a75267"}, + {file = "safetensors-0.4.2-cp37-none-win_amd64.whl", hash = "sha256:d36ee3244d461cd655aeef493792c3bccf4875282f8407fd9af99e9a41cf2530"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:16b6b3884f7876c6b3b23a742428223a7170a5a9dac819d8c12a1569422c4b5a"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee25d311493fbbe0be9d395faee46e9d79e8948f461e388ff39e59875ed9a350"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eed8097968585cd752a1171f86fce9aa1d89a29033e5cd8bec5a502e29f6b7af"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:880e6865cf72cb67f9ab8d04a3c4b49dd95ae92fb1583929ce65aed94e1f685f"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91290f83daf80ce6d1a7f629b244443c200060a80f908b29d879021409e5ea94"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3517d568486ab3508a7acc360b82d7a4a3e26b86efdf210a9ecd9d233c40708a"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f43a77eb38540f782999e5dc5645164fe9027d3f0194f6c9a5126168017efa"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b684d9818aa5d63fddc65f7d0151968037d255d91adf74eba82125b41c680aaa"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ab1f5d84185f9fefaf21413efb764e4908057b8a9a0b987ede890c353490fd70"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bd979642e6c3a517ef4b84ff36c2fee4015664fea05a61154fc565978347553"}, + {file = "safetensors-0.4.2-cp38-none-win32.whl", hash = "sha256:11be6e7afed29e5a5628f0aa6214e34bc194da73f558dc69fc7d56e07037422a"}, + {file = "safetensors-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:2f7a6e5d29bd2cc340cffaa391fa437b1be9d21a2bd8b8724d2875d13a6ef2a9"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a5a921b4fe6925f9942adff3ebae8c16e0487908c54586a5a42f35b59fd69794"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b691727228c28f2d82d8a92b2bc26e7a1f129ee40b2f2a3185b5974e038ed47c"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91ca1056decc4e981248786e87b2a202d4841ee5f99d433f1adf3d44d4bcfa0e"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55969fd2e6fdb38dc221b0ab380668c21b0efa12a7562db9924759faa3c51757"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae429bfaecc10ab5fe78c93009b3d1656c1581da560041e700eadb497dbe7a4"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff88f194fe4ac50b463a4a6f0c03af9ad72eb5d24ec6d6730af59522e37fedb"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80cb48d0a447f8dd18e61813efa7d3f8f8d52edf0f05806abc0c59b83431f57"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b286fb7adfee70a4189898ac2342b8a67d5f493e6b21b0af89ca8eac1b967cbf"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ceeff9ddbab4f78738489eb6682867ae946178776f33699737b2129b5394dc1"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a26fae748a7488cb3aac381eddfa818c42052c87b5e689fb4c6e82ed58cec209"}, + {file = "safetensors-0.4.2-cp39-none-win32.whl", hash = "sha256:039a42ab33c9d68b39706fd38f1922ace26866eff246bf20271edb619f5f848b"}, + {file = "safetensors-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:b3a3e1f5b85859e398773f064943b62a4059f225008a2a8ee6add1edcf77cacf"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4e70d442ad17e8b153ef9095bf48ea64f15a66bf26dc2b6ca94660c154edbc24"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b90f1d9809caf4ff395951b4703295a68d12907f6945bbc3129e934ff8ae46f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c7ac9ad3728838006598e296b3ae9f27d80b489effd4685b92d97b3fc4c98f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5730d77e6ff7f4c7039e20913661ad0ea2f86c09e71c039e73dfdd1f394f08"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:44feb8cb156d6803dcd19fc6b81b27235f29b877660605a6ac35e1da7d64f0e4"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:523a241c33e7c827ab9a3a23760d75c7d062f43dfe55b6b019409f89b0fb52d1"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fb18300e8eb74291225214f26c9a8ae2110fd61a6c9b5a2ff4c4e0eb1bb9a998"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fe5437ff9fb116e44f2ab558981249ae63f978392b4576e62fcfe167d353edbc"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9304a0934ced5a5d272f39de36291dc141dfc152d277f03fb4d65f2fb2ffa7c"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:160ba1b1e11cf874602c233ab80a14f588571d09556cbc3586900121d622b5ed"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04fcd6fcf7d9c13c7e5dc7e08de5e492ee4daa8f4ad74b4d8299d3eb0224292f"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:906d14c4a677d35834fb0f3a5455ef8305e1bba10a5e0f2e0f357b3d1ad989f2"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:df3fcdec0cd543084610d1f09c65cdb10fb3079f79bceddc092b0d187c6a265b"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5ca76f13fb1cef242ea3ad2cb37388e7d005994f42af8b44bee56ba48b2d45ce"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:278a1a3414c020785decdcd741c578725721274d2f9f787fcc930882e83b89cc"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b5a461cc68ecd42d9d546e5e1268a39d8ede7934a68d1ce17c3c659cb829d6"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2341411412a41671d25e26bed59ec121e46bf4fadb8132895e610411c4b9681"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3497ac3895acf17c5f98197f1fa4769f09c5e7ede07fcb102f1c201e663e052c"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:01b5e71d3754d2201294f1eb7a6d59cce3a5702ff96d83d226571b2ca2183837"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3627dbd1ea488dd8046a0491de5087f3c0d641e7acc80c0189a33c69398f1cd1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9d56f0ef53afad26ec54ceede78a43e9a23a076dadbbda7b44d304c591abf4c1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b259ca73d42daf658a1bda463f1f83885ae4d93a60869be80d7f7dfcc9d8bbb5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebc3cd401e4eb54e7c0a70346be565e81942d9a41fafd5f4bf7ab3a55d10378"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc384a0309b706aa0425c93abb0390508a61bf029ce99c7d9df4220f25871a5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:af2d8f7235d8a08fbccfb8394387890e7fa38942b349a94e6eff13c52ac98087"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0911315bbcc5289087d063c2c2c7ccd711ea97a7e557a7bce005ac2cf80146aa"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1efe31673be91832d73439a2af426743e1395fc9ef7b081914e9e1d567bd7b5f"}, + {file = "safetensors-0.4.2.tar.gz", hash = "sha256:acc85dcb09ec5e8aa787f588d7ad4d55c103f31e4ff060e17d92cc0e8b8cac73"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface_hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools_rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1970,6 +2196,133 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + [[package]] name = "toml" version = "0.10.2" @@ -2001,6 +2354,74 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.38.2" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "transformers-4.38.2-py3-none-any.whl", hash = "sha256:c4029cb9f01b3dd335e52f364c52d2b37c65b4c78e02e6a08b1919c5c928573e"}, + {file = "transformers-4.38.2.tar.gz", hash = "sha256:c5fc7ad682b8a50a48b2a4c05d4ea2de5567adb1bdd00053619dbe5960857dd5"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.19.3,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.1" +tokenizers = ">=0.14,<0.19" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +docs-specific = ["hf-doc-builder"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)", "decord (==0.6.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + [[package]] name = "typing-extensions" version = "4.10.0" @@ -2259,20 +2680,20 @@ files = [ [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "eee8676fc20f015e8f2496c3cb4c46ef8e8d81d828f49466448868efadb0b53a" +content-hash = "a681f27b61c03298e227dda475b28a81ba0d6a98cec9ea3b028dace04b992bcc" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml index 234d789c1ec4e..4f40e46627be5 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.8" +version = "0.2.0" name = "source-microsoft-onedrive" description = "Source implementation for Microsoft OneDrive." authors = [ "Airbyte ",] @@ -20,13 +20,15 @@ python = "^3.9,<3.12" Office365-REST-Python-Client = "==2.5.5" smart-open = "==6.4.0" msal = "==1.25.0" +# override transitive dependency that had a vulnerability https://nvd.nist.gov/vuln/detail/CVE-2023-6730 +transformers = "4.38.2" [tool.poetry.scripts] source-microsoft-onedrive = "source_microsoft_onedrive.run:run" [tool.poetry.dependencies.airbyte-cdk] extras = [ "file-based",] -version = "==0.61.0" +version = "^0" [tool.poetry.group.dev.dependencies] requests-mock = "^1.11.0" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/spec.py b/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/spec.py index 180993a685c7f..b5bc8890dacce 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/spec.py +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/spec.py @@ -89,8 +89,20 @@ class Config: drive_name: Optional[str] = Field( title="Drive Name", description="Name of the Microsoft OneDrive drive where the file(s) exist.", default="OneDrive", order=2 ) + + search_scope: str = Field( + title="Search Scope", + description="Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.", + default="ALL", + enum=["ACCESSIBLE_DRIVES", "SHARED_ITEMS", "ALL"], + order=3, + ) + folder_path: str = Field( - title="Folder Path", description="Path to folder of the Microsoft OneDrive drive where the file(s) exist.", order=3 + title="Folder Path", + description="Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.", + order=4, + default=".", ) @classmethod diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/stream_reader.py b/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/stream_reader.py index 1fbd5d665bb99..d60985c838ed4 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/stream_reader.py +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/source_microsoft_onedrive/stream_reader.py @@ -3,10 +3,12 @@ # import logging +from datetime import datetime from functools import lru_cache from io import IOBase -from typing import Iterable, List, Optional +from typing import Iterable, List, Optional, Tuple +import requests import smart_open from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader, FileReadMode from airbyte_cdk.sources.file_based.remote_file import RemoteFile @@ -75,14 +77,30 @@ class SourceMicrosoftOneDriveStreamReader(AbstractFileBasedStreamReader): def __init__(self): super().__init__() + self._auth_client = None + self._one_drive_client = None @property def config(self) -> SourceMicrosoftOneDriveSpec: return self._config @property - def one_drive_client(self) -> SourceMicrosoftOneDriveSpec: - return SourceMicrosoftOneDriveClient(self._config).client + def auth_client(self): + # Lazy initialization of the auth_client + if self._auth_client is None: + self._auth_client = SourceMicrosoftOneDriveClient(self._config) + return self._auth_client + + @property + def one_drive_client(self): + # Lazy initialization of the one_drive_client + if self._one_drive_client is None: + self._one_drive_client = self.auth_client.client + return self._one_drive_client + + def get_access_token(self): + # Directly fetch a new access token from the auth_client each time it's called + return self.auth_client._get_access_token()["access_token"] @config.setter def config(self, value: SourceMicrosoftOneDriveSpec): @@ -95,6 +113,81 @@ def config(self, value: SourceMicrosoftOneDriveSpec): assert isinstance(value, SourceMicrosoftOneDriveSpec) self._config = value + @property + @lru_cache(maxsize=None) + def drives(self): + """ + Retrieves and caches OneDrive drives, including the user's drive based on authentication type. + """ + drives = self.one_drive_client.drives.get().execute_query() + + if self.config.credentials.auth_type == "Client": + my_drive = self.one_drive_client.me.drive.get().execute_query() + else: + my_drive = ( + self.one_drive_client.users.get_by_principal_name(self.config.credentials.user_principal_name).drive.get().execute_query() + ) + + drives.add_child(my_drive) + + # filter only onedrive drives + drives = list(filter(lambda drive: drive.drive_type in ["personal", "business"], drives)) + + return drives + + def _get_shared_drive_object(self, drive_id: str, object_id: str, path: str) -> List[Tuple[str, str, datetime]]: + """ + Retrieves a list of all nested files under the specified object. + Args: + drive_id: The ID of the drive containing the object. + object_id: The ID of the object to start the search from. + Returns: + A list of tuples containing file information (name, download URL, and last modified datetime). + Raises: + RuntimeError: If an error occurs during the request. + """ + + access_token = self.get_access_token() + headers = {"Authorization": f"Bearer {access_token}"} + base_url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}" + + def get_files(url: str, path: str) -> List[Tuple[str, str, datetime]]: + response = requests.get(url, headers=headers) + if response.status_code != 200: + error_info = response.json().get("error", {}).get("message", "No additional error information provided.") + raise RuntimeError(f"Failed to retrieve files from URL '{url}'. HTTP status: {response.status_code}. Error: {error_info}") + + data = response.json() + for child in data.get("value", []): + new_path = path + "/" + child["name"] + if child.get("file"): # Object is a file + last_modified = datetime.strptime(child["lastModifiedDateTime"], "%Y-%m-%dT%H:%M:%SZ") + yield (new_path, child["@microsoft.graph.downloadUrl"], last_modified) + else: # Object is a folder, retrieve children + child_url = f"{base_url}/items/{child['id']}/children" # Use item endpoint for nested objects + yield from get_files(child_url, new_path) + yield from [] + + # Initial request to item endpoint + item_url = f"{base_url}/items/{object_id}" + item_response = requests.get(item_url, headers=headers) + if item_response.status_code != 200: + error_info = item_response.json().get("error", {}).get("message", "No additional error information provided.") + raise RuntimeError( + f"Failed to retrieve the initial shared object with ID '{object_id}' from drive '{drive_id}'. " + f"HTTP status: {item_response.status_code}. Error: {error_info}" + ) + + # Check if the object is a file or a folder + item_data = item_response.json() + if item_data.get("file"): # Initial object is a file + new_path = path + "/" + item_data["name"] + last_modified = datetime.strptime(item_data["lastModifiedDateTime"], "%Y-%m-%dT%H:%M:%SZ") + yield (new_path, item_data["@microsoft.graph.downloadUrl"], last_modified) + else: + # Initial object is a folder, start file retrieval + yield from get_files(f"{item_url}/children", path) + def list_directories_and_files(self, root_folder, path=None): """Enumerates folders and files starting from a root folder.""" drive_items = root_folder.children.get().execute_query() @@ -102,48 +195,60 @@ def list_directories_and_files(self, root_folder, path=None): for item in drive_items: item_path = path + "/" + item.name if path else item.name if item.is_file: - found_items.append((item, item_path)) + found_items.append((item_path, item.properties["@microsoft.graph.downloadUrl"], item.properties["lastModifiedDateTime"])) else: found_items.extend(self.list_directories_and_files(item, item_path)) return found_items - def get_files_by_drive_name(self, drives, drive_name, folder_path): + def get_files_by_drive_name(self, drive_name, folder_path): """Yields files from the specified drive.""" path_levels = [level for level in folder_path.split("/") if level] folder_path = "/".join(path_levels) - for drive in drives: - is_onedrive = drive.drive_type in ["personal", "business"] - if drive.name == drive_name and is_onedrive: + for drive in self.drives: + if drive.name == drive_name: folder = drive.root if folder_path in self.ROOT_PATH else drive.root.get_by_path(folder_path).get().execute_query() yield from self.list_directories_and_files(folder) + def _get_shared_files_from_all_drives(self, parsed_drive_id: str): + shared_drive_items = self.one_drive_client.me.drive.shared_with_me().execute_query() + for drive_item in shared_drive_items: + parent_reference = drive_item.remote_item.parentReference + + # check if drive is already parsed + if parent_reference and parent_reference["driveId"] != parsed_drive_id: + yield from self._get_shared_drive_object(parent_reference["driveId"], drive_item.id, drive_item.web_url) + + def get_all_files(self): + if self.config.search_scope in ("ACCESSIBLE_DRIVES", "ALL"): + # Get files from accessible drives + yield from self.get_files_by_drive_name(self.config.drive_name, self.config.folder_path) + + if self.config.search_scope in ("SHARED_ITEMS", "ALL"): + selected_drive = list(filter(lambda drive: drive.name == self.config.drive_name, self.drives)) + selected_drive_id = selected_drive[0].id if selected_drive else None + + if self.config.search_scope == "SHARED_ITEMS": + selected_drive_id = None + + # Get files from shared items + yield from self._get_shared_files_from_all_drives(selected_drive_id) + def get_matching_files(self, globs: List[str], prefix: Optional[str], logger: logging.Logger) -> Iterable[RemoteFile]: """ Retrieve all files matching the specified glob patterns in OneDrive. """ - drives = self.one_drive_client.drives.get().execute_query() - - if self.config.credentials.auth_type == "Client": - my_drive = self.one_drive_client.me.drive.get().execute_query() - else: - my_drive = ( - self.one_drive_client.users.get_by_principal_name(self.config.credentials.user_principal_name).drive.get().execute_query() - ) - - drives.add_child(my_drive) - - files = self.get_files_by_drive_name(drives, self.config.drive_name, self.config.folder_path) + files = self.get_all_files() try: - first_file, path = next(files) + path, download_url, last_modified = next(files) yield from self.filter_files_by_globs_and_start_date( [ MicrosoftOneDriveRemoteFile( uri=path, - download_url=first_file.properties["@microsoft.graph.downloadUrl"], - last_modified=first_file.properties["lastModifiedDateTime"], + download_url=download_url, + last_modified=last_modified, ) ], globs, @@ -161,10 +266,10 @@ def get_matching_files(self, globs: List[str], prefix: Optional[str], logger: lo [ MicrosoftOneDriveRemoteFile( uri=path, - download_url=file.properties["@microsoft.graph.downloadUrl"], - last_modified=file.properties["lastModifiedDateTime"], + download_url=download_url, + last_modified=last_modified, ) - for file, path in files + for path, download_url, last_modified in files ], globs, ) diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/unit_tests/unit_tests.py b/airbyte-integrations/connectors/source-microsoft-onedrive/unit_tests/unit_tests.py index f610ad67a646a..f89fdd2870817 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/unit_tests/unit_tests.py +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/unit_tests/unit_tests.py @@ -2,8 +2,10 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import Mock, patch +from datetime import datetime +from unittest.mock import MagicMock, Mock, PropertyMock, call, patch +import pytest from source_microsoft_onedrive.spec import SourceMicrosoftOneDriveSpec from source_microsoft_onedrive.stream_reader import FileReadMode, SourceMicrosoftOneDriveClient, SourceMicrosoftOneDriveStreamReader @@ -28,7 +30,7 @@ def test_open_file(mock_smart_open): with stream_reader.open_file(mock_file, FileReadMode.READ, "utf-8", mock_logger) as result: pass - mock_smart_open.assert_called_once_with(mock_file.download_url, mode='r', encoding='utf-8') + mock_smart_open.assert_called_once_with(mock_file.download_url, mode="r", encoding="utf-8") assert result is not None @@ -40,15 +42,18 @@ def test_microsoft_onedrive_client_initialization(requests_mock): "client_id": "client_id", "tenant_id": "tenant_id", "client_secret": "client_secret", - "refresh_token": "refresh_token" + "refresh_token": "refresh_token", }, "drive_name": "drive_name", "folder_path": "folder_path", - "streams": [{"name": "test_stream", "globs": ["*.csv"], "validation_policy": "Emit Record", "format": {"filetype": "csv"}}] + "streams": [{"name": "test_stream", "globs": ["*.csv"], "validation_policy": "Emit Record", "format": {"filetype": "csv"}}], } - authority_url = 'https://login.microsoftonline.com/tenant_id/v2.0/.well-known/openid-configuration' - mock_response = {'authorization_endpoint': 'https://login.microsoftonline.com/tenant_id/oauth2/v2.0/authorize', 'token_endpoint': 'https://login.microsoftonline.com/tenant_id/oauth2/v2.0/token'} + authority_url = "https://login.microsoftonline.com/tenant_id/v2.0/.well-known/openid-configuration" + mock_response = { + "authorization_endpoint": "https://login.microsoftonline.com/tenant_id/oauth2/v2.0/authorize", + "token_endpoint": "https://login.microsoftonline.com/tenant_id/oauth2/v2.0/token", + } requests_mock.get(authority_url, json=mock_response, status_code=200) client = SourceMicrosoftOneDriveClient(SourceMicrosoftOneDriveSpec(**config)) @@ -92,9 +97,199 @@ def test_get_files_by_drive_name(mock_list_directories_and_files): stream_reader = SourceMicrosoftOneDriveStreamReader() stream_reader._config = Mock() - # Call the method - files = list(stream_reader.get_files_by_drive_name([mock_drive], "testDrive", "/test/path")) + with patch.object(SourceMicrosoftOneDriveStreamReader, "drives", new_callable=PropertyMock) as mock_drives: + mock_drives.return_value = [mock_drive] + + # Call the method + files = list(stream_reader.get_files_by_drive_name("testDrive", "/test/path")) # Assertions assert len(files) == 1 assert files[0].name == "testFile.txt" + + +@pytest.mark.parametrize( + "selected_drive_id, drive_ids, shared_drive_item_dicts, expected_result, expected_calls", + [ + (None, [1, 2, 3], [], [], []), + (1, [1, 2, 3], [{"drive_id": 1, "id": 4, "web_url": "test_url4"}], [], []), + (1, [1, 2, 3], [{"drive_id": 4, "id": 4, "web_url": "test_url4"}], [4], [call(4, 4, "test_url4")]), + ( + 2, + [1, 2, 3], + [{"drive_id": 4, "id": 4, "web_url": "test_url4"}, {"drive_id": 5, "id": 5, "web_url": "test_url5"}], + [4, 5], + [call(4, 4, "test_url4"), call(5, 5, "test_url5")], + ), + ( + 3, + [1, 2, 3], + [ + {"drive_id": 4, "id": 4, "web_url": "test_url4"}, + {"drive_id": 5, "id": 5, "web_url": "test_url5"}, + {"drive_id": 6, "id": 6, "web_url": "test_url6"}, + ], + [4, 5, 6], + [call(4, 4, "test_url4"), call(5, 5, "test_url5"), call(6, 6, "test_url6")], + ), + ], +) +def test_get_shared_files_from_all_drives(selected_drive_id, drive_ids, shared_drive_item_dicts, expected_result, expected_calls): + stream_reader = SourceMicrosoftOneDriveStreamReader() + stream_reader._config = Mock() + + # Mock _get_shared_drive_object method + with patch.object( + SourceMicrosoftOneDriveStreamReader, "_get_shared_drive_object", return_value=expected_result + ) as mock_get_shared_drive_object: + # Setup shared_drive_items mock objects + shared_drive_items = [ + MagicMock(remote_item=MagicMock(parentReference={"driveId": item["drive_id"]}), id=item["id"], web_url=item["web_url"]) + for item in shared_drive_item_dicts + ] + + with patch.object(SourceMicrosoftOneDriveStreamReader, "one_drive_client", new_callable=PropertyMock) as mock_one_drive_client: + mock_one_drive_client.return_value.me.drive.shared_with_me.return_value.execute_query.return_value = shared_drive_items + + with patch.object(SourceMicrosoftOneDriveStreamReader, "drives", new_callable=PropertyMock) as mock_drives: + mock_drives.return_value = [Mock(id=drive_id) for drive_id in drive_ids] + + # Execute the method under test + list(stream_reader._get_shared_files_from_all_drives(selected_drive_id)) + + # Assert _get_shared_drive_object was called correctly + mock_get_shared_drive_object.assert_has_calls(expected_calls, any_order=True) + + +# Sample data for mocking responses +file_response = { + "file": True, + "name": "TestFile.txt", + "@microsoft.graph.downloadUrl": "http://example.com/download", + "lastModifiedDateTime": "2021-01-01T00:00:00Z", +} + +empty_folder_response = {"folder": True, "value": []} + +# Adjusting the folder_with_nested_files to represent the initial folder response +folder_with_nested_files_initial = { + "folder": True, + "value": [ + {"id": "subfolder1", "folder": True, "name": "subfolder1"}, + {"id": "subfolder2", "folder": True, "name": "subfolder2"}, + ], # Empty subfolder # Subfolder with a file +} + +# Response for the empty subfolder (subfolder1) +empty_subfolder_response = {"value": [], "name": "subfolder1"} # No files or folders inside subfolder1 + +# Response for the subfolder with a file (subfolder2) +not_empty_subfolder_response = { + "value": [ + { + "file": True, + "name": "NestedFile.txt", + "@microsoft.graph.downloadUrl": "http://example.com/nested", + "lastModifiedDateTime": "2021-01-02T00:00:00Z", + } + ], + "name": "subfolder2", +} + + +@pytest.mark.parametrize( + "initial_response, subsequent_responses, expected_result, raises_error, expected_error_message, initial_path", + [ + # Object ID is a file + ( + file_response, + [], + [ + ( + "http://example.com/TestFile.txt", + "http://example.com/download", + datetime.strptime("2021-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + ) + ], + False, + None, + "http://example.com", + ), + # Object ID is an empty folder + (empty_folder_response, [empty_subfolder_response], [], False, None, "http://example.com"), + # Object ID is a folder with empty subfolders and files + ( + {"folder": True, "name": "root"}, # Initial folder response + [ + folder_with_nested_files_initial, + empty_subfolder_response, + not_empty_subfolder_response, + ], + [ + ( + "http://example.com/subfolder2/NestedFile.txt", + "http://example.com/nested", + datetime.strptime("2021-01-02T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + ) + ], + False, + None, + "http://example.com", + ), + # Error response on initial request + ( + MagicMock(status_code=400, json=MagicMock(return_value={"error": {"message": "Bad Request"}})), + [], + [], + True, + "Failed to retrieve the initial shared object with ID 'dummy_object_id' from drive 'dummy_drive_id'. HTTP status: 400. Error: Bad Request", + "http://example.com", + ), + # Error response while iterating over nested + ( + {"folder": True, "name": "root"}, + [MagicMock(status_code=400, json=MagicMock(return_value={"error": {"message": "Bad Request"}}))], + [], + True, + ( + "Failed to retrieve files from URL " + "'https://graph.microsoft.com/v1.0/drives/dummy_drive_id/items/dummy_object_id/children'. " + "HTTP status: 400. Error: Bad Request" + ), + "http://example.com", + ), + ], +) +@patch("source_microsoft_onedrive.stream_reader.requests.get") +@patch("source_microsoft_onedrive.stream_reader.SourceMicrosoftOneDriveStreamReader.get_access_token") +def test_get_shared_drive_object( + mock_get_access_token, + mock_requests_get, + initial_response, + subsequent_responses, + expected_result, + raises_error, + expected_error_message, + initial_path, +): + mock_get_access_token.return_value = "dummy_access_token" + mock_responses = [ + initial_response + if isinstance(initial_response, MagicMock) + else MagicMock(status_code=200, json=MagicMock(return_value=initial_response)) + ] + for response in subsequent_responses: + mock_responses.append( + response if isinstance(response, MagicMock) else MagicMock(status_code=200, json=MagicMock(return_value=response)) + ) + mock_requests_get.side_effect = mock_responses + + reader = SourceMicrosoftOneDriveStreamReader() + + if raises_error: + with pytest.raises(RuntimeError) as exc_info: + list(reader._get_shared_drive_object("dummy_drive_id", "dummy_object_id", initial_path)) + assert str(exc_info.value) == expected_error_message + else: + result = list(reader._get_shared_drive_object("dummy_drive_id", "dummy_object_id", initial_path)) + assert result == expected_result diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/.coveragerc b/airbyte-integrations/connectors/source-microsoft-sharepoint/.coveragerc index 6ab3ced7f624b..b6fc574be1138 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/.coveragerc +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/.coveragerc @@ -1,4 +1,5 @@ [run] omit = source_microsoft_sharepoint/spec.py - source_microsoft_sharepoint/source.py \ No newline at end of file + source_microsoft_sharepoint/source.py + source_microsoft_sharepoint/run.py \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/README.md b/airbyte-integrations/connectors/source-microsoft-sharepoint/README.md index fa4d35e78654b..5ff95071916a9 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/README.md +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/README.md @@ -1,4 +1,5 @@ -# Microsoft SharePoint Source +# Microsoft SharePoint source connector + This is the repository for the Microsoft SharePoint source connector, written in Python. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/microsoft-sharepoint). @@ -6,113 +7,49 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Minimum Python version required `= 3.9.0` -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/microsoft-sharepoint) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_sharepoint/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-sharepoint test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-microsoft-sharepoint spec +poetry run source-microsoft-sharepoint check --config secrets/config.json +poetry run source-microsoft-sharepoint discover --config secrets/config.json +poetry run source-microsoft-sharepoint read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: - -```bash -airbyte-ci connectors --name source-microsoft-sharepoint build +### Running unit tests +To run unit tests locally, from the connector directory run: ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-microsoft-sharepoint:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest unit_tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. - -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-microsoft-sharepoint:latest - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-microsoft-sharepoint build ``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-microsoft-sharepoint:dev . -# Running the spec command against your patched connector -docker run airbyte/source-microsoft-sharepoint:dev spec -```` +An image will be available on your host with the tag `airbyte/source-microsoft-sharepoint:dev`. -#### Run + +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-microsoft-sharepoint:dev spec @@ -120,47 +57,35 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-sharepoint:d docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-sharepoint:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-sharepoint:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-microsoft-sharepoint test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: ```bash -airbyte-ci connectors --name source-microsoft-sharepoint test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-sharepoint test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-sharepoint.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-sharepoint.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/abnormal_state.json index ae378266f2630..90f2f7011c244 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/abnormal_state.json @@ -7,11 +7,11 @@ }, "stream_state": { "history": { - "Test_folder/TestFile.csv": "2130-11-17T13:52:35.000000Z", - "Test_folder/Test_folder_2/TestFileSharePoint.csv": "2130-12-15T17:34:08.000000Z", - "Test_folder/simple_test.csv": "2130-01-16T12:45:20.000000Z" + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/TestFile.csv": "2130-11-17T13:52:35.000000Z", + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_folder_2/TestFileSharePoint.csv": "2130-12-15T17:34:08.000000Z", + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv": "2130-01-16T12:45:20.000000Z" }, - "_ab_source_file_last_modified": "2130-01-16T12:45:20.000000Z_Test_folder/simple_test.csv" + "_ab_source_file_last_modified": "2130-01-16T12:45:20.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv" } } }, @@ -22,10 +22,10 @@ "name": "test_unstructured" }, "stream_state": { - "_ab_source_file_last_modified": "2130-12-23T16:47:21.000000Z_Test_folder/Test_foler_2_1/simple_pdf_file.pdf", "history": { - "Test_folder/Test_foler_2_1/simple_pdf_file.pdf": "2130-12-23T16:47:21.000000Z" - } + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf": "2130-12-15T16:47:21.000000Z" + }, + "_ab_source_file_last_modified": "2130-12-15T16:47:21.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf" } } }, @@ -37,9 +37,9 @@ }, "stream_state": { "history": { - "Test_folder/simple_test.csv.gz": "2130-01-18T11:24:38.000000Z" + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv.gz": "2130-01-18T11:24:38.000000Z" }, - "_ab_source_file_last_modified": "2130-01-18T11:24:38.000000Z_Test_folder/simple_test.csv.gz" + "_ab_source_file_last_modified": "2130-01-18T11:24:38.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv.gz" } } }, @@ -51,9 +51,9 @@ }, "stream_state": { "history": { - "Test_folder/test_parquet/simple_test.parquet": "2130-01-17T11:47:39.000000Z" + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_parquet/simple_test.parquet": "2130-01-17T11:47:39.000000Z" }, - "_ab_source_file_last_modified": "2130-01-17T11:47:39.000000Z_Test_folder/test_parquet/simple_test.parquet" + "_ab_source_file_last_modified": "2130-01-17T11:47:39.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_parquet/simple_test.parquet" } } }, @@ -65,9 +65,23 @@ }, "stream_state": { "history": { - "Test_folder/test_avro/test_sample.avro": "2130-01-17T11:46:55.000000Z" + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_avro/test_sample.avro": "2130-01-17T11:46:55.000000Z" + }, + "_ab_source_file_last_modified": "2130-01-17T11:46:55.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_avro/test_sample.avro" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "test_jsonl" + }, + "stream_state": { + "history": { + "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.jsonl": "2130-01-17T11:46:55.000000Z" }, - "_ab_source_file_last_modified": "2130-01-17T11:46:55.000000Z_Test_folder/test_avro/test_sample.avro" + "_ab_source_file_last_modified": "2130-01-17T11:46:55.000000Z_https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.jsonl" } } } diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/configured_catalog.json index 09e4d159a2db1..ffbb035e198ee 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/configured_catalog.json @@ -54,6 +54,17 @@ }, "sync_mode": "incremental", "destination_sync_mode": "append" + }, + { + "stream": { + "name": "test_jsonl", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["_ab_source_file_last_modified"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/expected_records.jsonl index b6d5ccdbd7c46..5601765708983 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/expected_records.jsonl @@ -1,13 +1,15 @@ -{"stream": "test_avro", "data": {"id": 0, "fullname_and_valid": {"fullname": "cfjwIzCRTL", "valid": false}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1706099187061} -{"stream": "test_avro", "data": {"id": 1, "fullname_and_valid": {"fullname": "LYOnPyuTWw", "valid": true}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1706099187063} -{"stream": "test_avro", "data": {"id": 2, "fullname_and_valid": {"fullname": "hyTFbsxlRB", "valid": false}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1706099187063} -{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"160532120248153\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"160532120248153\"\"\";\" \"\"impressions\"\": \"\"4695\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Remarketing - [Cyclone Dust Collector] - 7\";14;30;\" 60 Days\"\"}}\";\" \"\"emitted_at\"\": 1691500775316}}\"", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "Test_folder/TestFile.csv"}, "emitted_at": 1706099256586} -{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"196774177063693\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"196774177063693\"\"\";\" \"\"impressions\"\": \"\"0\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Remarketing - [Cyclone Dust Collector] - 30 Days - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775316}}\";;;", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "Test_folder/TestFile.csv"}, "emitted_at": 1706099256588} -{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"57815899765268\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"57815899765268\"\"\";\" \"\"impressions\"\": \"\"0\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Similar To - [Cyclone Dust Collector] - 30 Days - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775317}}\";;;", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "Test_folder/TestFile.csv"}, "emitted_at": 1706099256588} -{"stream": "test_csv_gzip", "data": {"id": 1, "name": "PVdhmjb1", "valid": false, "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "Test_folder/simple_test.csv.gz"}, "emitted_at": 1706099153362} -{"stream": "test_csv_gzip", "data": {"id": 2, "name": "j4DyXTS7", "valid": true, "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "Test_folder/simple_test.csv.gz"}, "emitted_at": 1706099153363} -{"stream": "test_csv_gzip", "data": {"id": 3, "name": "v0w8fTME", "valid": false, "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "Test_folder/simple_test.csv.gz"}, "emitted_at": 1706099153363} -{"stream": "test_parquet", "data": {"number": 1.0, "name": "foo", "flag": true, "delta": -1.0, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1706099224674} -{"stream": "test_parquet", "data": {"number": 2.0, "name": null, "flag": false, "delta": 2.5, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1706099224675} -{"stream": "test_parquet", "data": {"number": 3.0, "name": "bar", "flag": null, "delta": 0.1, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1706099224675} -{"stream": "test_unstructured", "data": {"content": "# A Simple PDF File\n\nThis is a small demonstration .pdf file -\n\njust for use in the Virtual Mechanics tutorials. More text. And more text. And more text. And more text. And more text.\n\nAnd more text. And more text. And more text. And more text. And more text. And more text. Boring, zzzzz. And more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text.\n\nAnd more text. And more text. And more text. And more text. And more text. And more text. And more text. Even more. Continued on page 2 ...\n\n# Simple PDF File 2\n\n...continued from page 1. Yet more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text. Oh, how boring typing this stuff. But not as boring as watching paint dry. And more text. And more text. And more text. And more text. Boring. More, a little more text. The end, and just as well.", "document_key": "Test_folder/Test_foler_2_1/simple_pdf_file.pdf", "_ab_source_file_parse_error": null, "_ab_source_file_last_modified": "2023-12-15T16:47:21.000000Z", "_ab_source_file_url": "Test_folder/Test_foler_2_1/simple_pdf_file.pdf"}, "emitted_at": 1706099290771} +{"stream": "test_avro", "data": {"id": 0, "fullname_and_valid": {"fullname": "cfjwIzCRTL", "valid": false}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1711637241976} +{"stream": "test_avro", "data": {"id": 1, "fullname_and_valid": {"fullname": "LYOnPyuTWw", "valid": true}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1711637241991} +{"stream": "test_avro", "data": {"id": 2, "fullname_and_valid": {"fullname": "hyTFbsxlRB", "valid": false}, "_ab_source_file_last_modified": "2024-01-17T11:46:55.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_avro/test_sample.avro"}, "emitted_at": 1711637241992} +{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"160532120248153\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"160532120248153\"\"\";\" \"\"impressions\"\": \"\"4695\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Remarketing - [Cyclone Dust Collector] - 7\";14;30;\" 60 Days\"\"}}\";\" \"\"emitted_at\"\": 1691500775316}}\"", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/TestFile.csv"}, "emitted_at": 1711637306294} +{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"196774177063693\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"196774177063693\"\"\";\" \"\"impressions\"\": \"\"0\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Remarketing - [Cyclone Dust Collector] - 30 Days - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775316}}\";;;", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/TestFile.csv"}, "emitted_at": 1711637306313} +{"stream": "test_csv", "data": {"\ufeff\"{\"\"type\"\": \"\"RECORD\"\"\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"200818142097572\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"200818142097572\"\"\";\" \"\"impressions\"\": \"\"85\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - PROD - [Cyclone Dust Collector] - Competitors - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775298}}\";;;": "{\"type\": \"RECORD\";\" \"\"record\"\": {\"\"stream\"\": \"\"sponsored_display_report_stream\"\"\";\" \"\"data\"\": {\"\"profileId\"\": 1560175474359565\";\" \"\"recordType\"\": \"\"campaigns\"\"\";\" \"\"reportDate\"\": \"\"20230701\"\"\";\" \"\"recordId\"\": \"\"57815899765268\"\"\";\" \"\"metric\"\": {\"\"campaignId\"\": \"\"57815899765268\"\"\";\" \"\"impressions\"\": \"\"0\"\"\";\" \"\"campaignName\"\": \"\"**LP - SD - ATV - Similar To - [Cyclone Dust Collector] - 30 Days - Video\"\"}}\";\" \"\"emitted_at\"\": 1691500775317}}\";;;", "_ab_source_file_last_modified": "2023-11-17T13:52:35.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/TestFile.csv"}, "emitted_at": 1711637306313} +{"stream": "test_csv_gzip", "data": {"id": "1", "name": "PVdhmjb1", "valid": "False", "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv.gz"}, "emitted_at": 1711637219737} +{"stream": "test_csv_gzip", "data": {"id": "2", "name": "j4DyXTS7", "valid": "True", "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv.gz"}, "emitted_at": 1711637219755} +{"stream": "test_csv_gzip", "data": {"id": "3", "name": "v0w8fTME", "valid": "False", "_ab_source_file_last_modified": "2024-01-18T11:24:38.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.csv.gz"}, "emitted_at": 1711637219755} +{"stream": "test_jsonl", "data": {"id": 1, "name": "PVdhmjb1", "valid": false, "value": 1.2, "event_date": "2022-01-01T00:00:00Z", "_ab_source_file_last_modified": "2024-03-28T14:26:52.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.jsonl"}, "emitted_at": 1711638750209} +{"stream": "test_jsonl", "data": {"id": 2, "name": "ABCDEF", "valid": true, "value": 1, "event_date": "2023-01-01T00:00:00Z", "_ab_source_file_last_modified": "2024-03-28T14:26:52.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/simple_test.jsonl"}, "emitted_at": 1711638750222} +{"stream": "test_parquet", "data": {"number": 1.0, "name": "foo", "flag": true, "delta": -1.0, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1711637274920} +{"stream": "test_parquet", "data": {"number": 2.0, "name": null, "flag": false, "delta": 2.5, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1711637274943} +{"stream": "test_parquet", "data": {"number": 3.0, "name": "bar", "flag": null, "delta": 0.1, "_ab_source_file_last_modified": "2024-01-17T11:47:39.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/test_parquet/simple_test.parquet"}, "emitted_at": 1711637274943} +{"stream": "test_unstructured", "data": {"content": "# A Simple PDF File\n\nThis is a small demonstration .pdf file -\n\njust for use in the Virtual Mechanics tutorials. More text. And more text. And more text. And more text. And more text.\n\nAnd more text. And more text. And more text. And more text. And more text. And more text. Boring, zzzzz. And more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text.\n\nAnd more text. And more text. And more text. And more text. And more text. And more text. And more text. Even more. Continued on page 2 ...\n\n# Simple PDF File 2\n\n...continued from page 1. Yet more text. And more text. And more text. And more text. And more text. And more text. And more text. And more text. Oh, how boring typing this stuff. But not as boring as watching paint dry. And more text. And more text. And more text. And more text. Boring. More, a little more text. The end, and just as well.", "document_key": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf", "_ab_source_file_parse_error": null, "_ab_source_file_last_modified": "2023-12-15T16:47:21.000000Z", "_ab_source_file_url": "https://airbyte179.sharepoint.com/Shared%20Documents/Test_folder/Test_foler_2_1/simple_pdf_file.pdf"}, "emitted_at": 1710365942797} diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/spec.json b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/spec.json index 6018f9bfbdd47..b5843ec9cea08 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/integration_tests/spec.json @@ -1,480 +1,505 @@ { - "documentationUrl": "https://docs.airbyte.com/integrations/sources/one-drive", - "connectionSpecification": { - "title": "Microsoft SharePoint Source Spec", - "description": "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source Specification.\nThis class combines the authentication details with additional configuration for the SharePoint API.", - "type": "object", - "properties": { - "start_date": { - "title": "Start Date", - "description": "UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.", - "examples": ["2021-01-01T00:00:00.000000Z"], - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$", - "pattern_descriptor": "YYYY-MM-DDTHH:mm:ss.SSSSSSZ", - "order": 1, - "type": "string" - }, - "streams": { - "title": "The list of streams to sync", - "description": "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.", - "order": 10, - "type": "array", - "items": { - "title": "FileBasedStreamConfig", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "The name of the stream.", - "type": "string" - }, - "globs": { - "title": "Globs", - "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", - "default": ["**"], - "order": 1, - "type": "array", - "items": { + "type": "SPEC", + "spec": { + "documentationUrl": "https://docs.airbyte.com/integrations/sources/microsoft-sharepoint", + "connectionSpecification": { + "title": "Microsoft SharePoint Source Spec", + "description": "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source Specification.\nThis class combines the authentication details with additional configuration for the SharePoint API.", + "type": "object", + "properties": { + "start_date": { + "title": "Start Date", + "description": "UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.", + "examples": ["2021-01-01T00:00:00.000000Z"], + "format": "date-time", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$", + "pattern_descriptor": "YYYY-MM-DDTHH:mm:ss.SSSSSSZ", + "order": 1, + "type": "string" + }, + "streams": { + "title": "The list of streams to sync", + "description": "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.", + "order": 10, + "type": "array", + "items": { + "title": "FileBasedStreamConfig", + "type": "object", + "properties": { + "name": { + "title": "Name", + "description": "The name of the stream.", "type": "string" - } - }, - "validation_policy": { - "title": "Validation Policy", - "description": "The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.", - "default": "Emit Record", - "enum": ["Emit Record", "Skip Record", "Wait for Discover"] - }, - "input_schema": { - "title": "Input Schema", - "description": "The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.", - "type": "string" - }, - "primary_key": { - "title": "Primary Key", - "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", - "airbyte_hidden": true, - "type": "string" - }, - "days_to_sync_if_history_is_full": { - "title": "Days To Sync If History Is Full", - "description": "When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.", - "default": 3, - "type": "integer" - }, - "format": { - "title": "Format", - "description": "The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.", - "type": "object", - "oneOf": [ - { - "title": "Avro Format", - "type": "object", - "properties": { - "filetype": { - "title": "Filetype", - "default": "avro", - "const": "avro", - "type": "string" + }, + "globs": { + "title": "Globs", + "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", + "default": ["**"], + "order": 1, + "type": "array", + "items": { + "type": "string" + } + }, + "validation_policy": { + "title": "Validation Policy", + "description": "The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.", + "default": "Emit Record", + "enum": ["Emit Record", "Skip Record", "Wait for Discover"] + }, + "input_schema": { + "title": "Input Schema", + "description": "The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.", + "type": "string" + }, + "primary_key": { + "title": "Primary Key", + "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", + "airbyte_hidden": true, + "type": "string" + }, + "days_to_sync_if_history_is_full": { + "title": "Days To Sync If History Is Full", + "description": "When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.", + "default": 3, + "type": "integer" + }, + "format": { + "title": "Format", + "description": "The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.", + "type": "object", + "oneOf": [ + { + "title": "Avro Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "avro", + "const": "avro", + "type": "string" + }, + "double_as_string": { + "title": "Convert Double Fields to Strings", + "description": "Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.", + "default": false, + "type": "boolean" + } }, - "double_as_string": { - "title": "Convert Double Fields to Strings", - "description": "Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.", - "default": false, - "type": "boolean" - } + "required": ["filetype"] }, - "required": ["filetype"] - }, - { - "title": "CSV Format", - "type": "object", - "properties": { - "filetype": { - "title": "Filetype", - "default": "csv", - "const": "csv", - "type": "string" - }, - "delimiter": { - "title": "Delimiter", - "description": "The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\\t'.", - "default": ",", - "type": "string" - }, - "quote_char": { - "title": "Quote Character", - "description": "The character used for quoting CSV values. To disallow quoting, make this field blank.", - "default": "\"", - "type": "string" - }, - "escape_char": { - "title": "Escape Character", - "description": "The character used for escaping special characters. To disallow escaping, leave this field blank.", - "type": "string" - }, - "encoding": { - "title": "Encoding", - "description": "The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.", - "default": "utf8", - "type": "string" - }, - "double_quote": { - "title": "Double Quote", - "description": "Whether two quotes in a quoted CSV value denote a single quote in the data.", - "default": true, - "type": "boolean" - }, - "null_values": { - "title": "Null Values", - "description": "A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.", - "default": [], - "type": "array", - "items": { + { + "title": "CSV Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "csv", + "const": "csv", "type": "string" }, - "uniqueItems": true - }, - "strings_can_be_null": { - "title": "Strings Can Be Null", - "description": "Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.", - "default": true, - "type": "boolean" - }, - "skip_rows_before_header": { - "title": "Skip Rows Before Header", - "description": "The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.", - "default": 0, - "type": "integer" - }, - "skip_rows_after_header": { - "title": "Skip Rows After Header", - "description": "The number of rows to skip after the header row.", - "default": 0, - "type": "integer" - }, - "header_definition": { - "title": "CSV Header Definition", - "description": "How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.", - "default": { - "header_definition_type": "From CSV" + "delimiter": { + "title": "Delimiter", + "description": "The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\\t'.", + "default": ",", + "type": "string" }, - "oneOf": [ - { - "title": "From CSV", - "type": "object", - "properties": { - "header_definition_type": { - "title": "Header Definition Type", - "default": "From CSV", - "const": "From CSV", - "type": "string" - } - }, - "required": ["header_definition_type"] + "quote_char": { + "title": "Quote Character", + "description": "The character used for quoting CSV values. To disallow quoting, make this field blank.", + "default": "\"", + "type": "string" + }, + "escape_char": { + "title": "Escape Character", + "description": "The character used for escaping special characters. To disallow escaping, leave this field blank.", + "type": "string" + }, + "encoding": { + "title": "Encoding", + "description": "The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.", + "default": "utf8", + "type": "string" + }, + "double_quote": { + "title": "Double Quote", + "description": "Whether two quotes in a quoted CSV value denote a single quote in the data.", + "default": true, + "type": "boolean" + }, + "null_values": { + "title": "Null Values", + "description": "A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.", + "default": [], + "type": "array", + "items": { + "type": "string" }, - { - "title": "Autogenerated", - "type": "object", - "properties": { - "header_definition_type": { - "title": "Header Definition Type", - "default": "Autogenerated", - "const": "Autogenerated", - "type": "string" - } - }, - "required": ["header_definition_type"] + "uniqueItems": true + }, + "strings_can_be_null": { + "title": "Strings Can Be Null", + "description": "Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.", + "default": true, + "type": "boolean" + }, + "skip_rows_before_header": { + "title": "Skip Rows Before Header", + "description": "The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.", + "default": 0, + "type": "integer" + }, + "skip_rows_after_header": { + "title": "Skip Rows After Header", + "description": "The number of rows to skip after the header row.", + "default": 0, + "type": "integer" + }, + "header_definition": { + "title": "CSV Header Definition", + "description": "How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.", + "default": { + "header_definition_type": "From CSV" }, - { - "title": "User Provided", - "type": "object", - "properties": { - "header_definition_type": { - "title": "Header Definition Type", - "default": "User Provided", - "const": "User Provided", - "type": "string" + "oneOf": [ + { + "title": "From CSV", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "From CSV", + "const": "From CSV", + "type": "string" + } }, - "column_names": { - "title": "Column Names", - "description": "The column names that will be used while emitting the CSV records", - "type": "array", - "items": { + "required": ["header_definition_type"] + }, + { + "title": "Autogenerated", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "Autogenerated", + "const": "Autogenerated", "type": "string" } - } + }, + "required": ["header_definition_type"] }, - "required": ["column_names", "header_definition_type"] - } - ], - "type": "object" + { + "title": "User Provided", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "User Provided", + "const": "User Provided", + "type": "string" + }, + "column_names": { + "title": "Column Names", + "description": "The column names that will be used while emitting the CSV records", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "column_names", + "header_definition_type" + ] + } + ], + "type": "object" + }, + "true_values": { + "title": "True Values", + "description": "A set of case-sensitive strings that should be interpreted as true values.", + "default": ["y", "yes", "t", "true", "on", "1"], + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "false_values": { + "title": "False Values", + "description": "A set of case-sensitive strings that should be interpreted as false values.", + "default": ["n", "no", "f", "false", "off", "0"], + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } }, - "true_values": { - "title": "True Values", - "description": "A set of case-sensitive strings that should be interpreted as true values.", - "default": ["y", "yes", "t", "true", "on", "1"], - "type": "array", - "items": { + "required": ["filetype"] + }, + { + "title": "Jsonl Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "jsonl", + "const": "jsonl", "type": "string" - }, - "uniqueItems": true + } }, - "false_values": { - "title": "False Values", - "description": "A set of case-sensitive strings that should be interpreted as false values.", - "default": ["n", "no", "f", "false", "off", "0"], - "type": "array", - "items": { + "required": ["filetype"] + }, + { + "title": "Parquet Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "parquet", + "const": "parquet", "type": "string" }, - "uniqueItems": true - } + "decimal_as_float": { + "title": "Convert Decimal Fields to Floats", + "description": "Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.", + "default": false, + "type": "boolean" + } + }, + "required": ["filetype"] }, - "required": ["filetype"] + { + "title": "Document File Type Format (Experimental)", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "unstructured", + "const": "unstructured", + "type": "string" + }, + "skip_unprocessable_files": { + "title": "Skip Unprocessable Files", + "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", + "default": true, + "always_show": true, + "type": "boolean" + }, + "strategy": { + "title": "Parsing Strategy", + "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", + "default": "auto", + "always_show": true, + "order": 0, + "enum": ["auto", "fast", "ocr_only", "hi_res"], + "type": "string" + }, + "processing": { + "title": "Processing", + "description": "Processing configuration", + "default": { + "mode": "local" + }, + "type": "object", + "oneOf": [ + { + "title": "Local", + "type": "object", + "properties": { + "mode": { + "title": "Mode", + "default": "local", + "const": "local", + "enum": ["local"], + "type": "string" + } + }, + "description": "Process files locally, supporting `fast` and `ocr` modes. This is the default option.", + "required": ["mode"] + } + ] + } + }, + "description": "Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.", + "required": ["filetype"] + } + ] + }, + "schemaless": { + "title": "Schemaless", + "description": "When enabled, syncs will not validate or structure records against the stream's schema.", + "default": false, + "type": "boolean" + } + }, + "required": ["name", "format"] + } + }, + "credentials": { + "title": "Authentication", + "description": "Credentials for connecting to the One Drive API", + "type": "object", + "order": 0, + "oneOf": [ + { + "title": "Authenticate via Microsoft (OAuth)", + "description": "OAuthCredentials class to hold authentication details for Microsoft OAuth authentication.\nThis class uses pydantic for data validation and settings management.", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "Client", + "const": "Client", + "enum": ["Client"], + "type": "string" }, - { - "title": "Jsonl Format", - "type": "object", - "properties": { - "filetype": { - "title": "Filetype", - "default": "jsonl", - "const": "jsonl", - "type": "string" - } - }, - "required": ["filetype"] + "tenant_id": { + "title": "Tenant ID", + "description": "Tenant ID of the Microsoft SharePoint user", + "airbyte_secret": true, + "type": "string" }, - { - "title": "Parquet Format", - "type": "object", - "properties": { - "filetype": { - "title": "Filetype", - "default": "parquet", - "const": "parquet", - "type": "string" - }, - "decimal_as_float": { - "title": "Convert Decimal Fields to Floats", - "description": "Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.", - "default": false, - "type": "boolean" - } - }, - "required": ["filetype"] + "client_id": { + "title": "Client ID", + "description": "Client ID of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" }, - { - "title": "Document File Type Format (Experimental)", - "type": "object", - "properties": { - "filetype": { - "title": "Filetype", - "default": "unstructured", - "const": "unstructured", - "type": "string" - }, - "skip_unprocessable_files": { - "title": "Skip Unprocessable Files", - "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", - "default": true, - "always_show": true, - "type": "boolean" - }, - "strategy": { - "title": "Parsing Strategy", - "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", - "default": "auto", - "always_show": true, - "order": 0, - "enum": ["auto", "fast", "ocr_only", "hi_res"], - "type": "string" - }, - "processing": { - "title": "Processing", - "description": "Processing configuration", - "default": { - "mode": "local" - }, - "type": "object", - "oneOf": [ - { - "title": "Local", - "type": "object", - "properties": { - "mode": { - "title": "Mode", - "default": "local", - "const": "local", - "enum": ["local"], - "type": "string" - } - }, - "description": "Process files locally, supporting `fast` and `ocr` modes. This is the default option.", - "required": ["mode"] - } - ] - } - }, - "description": "Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.", - "required": ["filetype"] + "client_secret": { + "title": "Client Secret", + "description": "Client Secret of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + }, + "refresh_token": { + "title": "Refresh Token", + "description": "Refresh Token of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" } + }, + "required": [ + "tenant_id", + "client_id", + "client_secret", + "refresh_token" ] }, - "schemaless": { - "title": "Schemaless", - "description": "When enabled, syncs will not validate or structure records against the stream's schema.", - "default": false, - "type": "boolean" + { + "title": "Service Key Authentication", + "description": "ServiceCredentials class for service key authentication.\nThis class is structured similarly to OAuthCredentials but for a different authentication method.", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "Service", + "const": "Service", + "enum": ["Service"], + "type": "string" + }, + "tenant_id": { + "title": "Tenant ID", + "description": "Tenant ID of the Microsoft SharePoint user", + "airbyte_secret": true, + "type": "string" + }, + "user_principal_name": { + "title": "User Principal Name", + "description": "Special characters such as a period, comma, space, and the at sign (@) are converted to underscores (_). More details: https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls", + "airbyte_secret": true, + "type": "string" + }, + "client_id": { + "title": "Client ID", + "description": "Client ID of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + }, + "client_secret": { + "title": "Client Secret", + "description": "Client Secret of your Microsoft developer application", + "airbyte_secret": true, + "type": "string" + } + }, + "required": [ + "tenant_id", + "user_principal_name", + "client_id", + "client_secret" + ] } - }, - "required": ["name", "format"] + ] + }, + "search_scope": { + "title": "Search Scope", + "description": "Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.", + "default": "ALL", + "enum": ["ACCESSIBLE_DRIVES", "SHARED_ITEMS", "ALL"], + "order": 3, + "type": "string" + }, + "folder_path": { + "title": "Folder Path", + "description": "Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.", + "default": ".", + "order": 4, + "type": "string" } }, - "credentials": { - "title": "Authentication", - "description": "Credentials for connecting to the One Drive API", - "type": "object", - "order": 0, - "oneOf": [ - { - "title": "Authenticate via Microsoft (OAuth)", - "description": "OAuthCredentials class to hold authentication details for Microsoft OAuth authentication.\nThis class uses pydantic for data validation and settings management.", - "type": "object", - "properties": { - "auth_type": { - "title": "Auth Type", - "default": "Client", - "const": "Client", - "enum": ["Client"], - "type": "string" - }, - "tenant_id": { - "title": "Tenant ID", - "description": "Tenant ID of the Microsoft SharePoint user", - "airbyte_secret": true, - "type": "string" - }, - "client_id": { - "title": "Client ID", - "description": "Client ID of your Microsoft developer application", - "airbyte_secret": true, - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret of your Microsoft developer application", - "airbyte_secret": true, - "type": "string" - }, - "refresh_token": { - "title": "Refresh Token", - "description": "Refresh Token of your Microsoft developer application", - "airbyte_secret": true, - "type": "string" - } - }, - "required": [ - "tenant_id", - "client_id", - "client_secret", - "refresh_token" - ] - }, - { - "title": "Service Key Authentication", - "description": "ServiceCredentials class for service key authentication.\nThis class is structured similarly to OAuthCredentials but for a different authentication method.", - "type": "object", - "properties": { - "auth_type": { - "title": "Auth Type", - "default": "Service", - "const": "Service", - "enum": ["Service"], - "type": "string" - }, - "tenant_id": { - "title": "Tenant ID", - "description": "Tenant ID of the Microsoft SharePoint user", - "airbyte_secret": true, - "type": "string" - }, - "user_principal_name": { - "title": "User Principal Name", - "description": "Special characters such as a period, comma, space, and the at sign (@) are converted to underscores (_). More details: https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls", - "airbyte_secret": true, - "type": "string" - }, - "client_id": { - "title": "Client ID", - "description": "Client ID of your Microsoft developer application", - "airbyte_secret": true, - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret of your Microsoft developer application", - "airbyte_secret": true, - "type": "string" - } - }, - "required": [ - "tenant_id", - "user_principal_name", - "client_id", - "client_secret" - ] - } - ] - }, - "folder_path": { - "title": "Folder Path", - "description": "Path to folder of the Microsoft SharePoint drive where the file(s) exist.", - "order": 3, - "type": "string" - } + "required": ["streams", "credentials"] }, - "required": ["streams", "credentials", "folder_path"] - }, - "advanced_auth": { - "auth_flow_type": "oauth2.0", - "predicate_key": ["credentials", "auth_type"], - "predicate_value": "Client", - "oauth_config_specification": { - "complete_oauth_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "refresh_token": { - "type": "string", - "path_in_connector_config": ["credentials", "refresh_token"] + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "tenant_id": { + "type": "string", + "path_in_connector_config": ["credentials", "tenant_id"] + } } - } - }, - "complete_oauth_server_input_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string" - }, - "client_secret": { - "type": "string" + }, + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } } - } - }, - "complete_oauth_server_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string", - "path_in_connector_config": ["credentials", "client_id"] - }, - "client_secret": { - "type": "string", - "path_in_connector_config": ["credentials", "client_secret"] + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } } } } diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/main.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/main.py index d9b4eee715dcb..4823f26520493 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/main.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/main.py @@ -2,15 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - -import sys - -from airbyte_cdk import AirbyteEntrypoint -from airbyte_cdk.entrypoint import launch -from source_microsoft_sharepoint import SourceMicrosoftSharePoint +from source_microsoft_sharepoint.run import run if __name__ == "__main__": - args = sys.argv[1:] - catalog_path = AirbyteEntrypoint.extract_catalog(args) - source = SourceMicrosoftSharePoint(catalog_path) - launch(source, args) + run() diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml index a96fa802e95f6..f893fa125da0a 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml @@ -1,11 +1,15 @@ data: ab_internal: - ql: 300 + ql: 400 sl: 200 allowedHosts: hosts: - graph.microsoft.com - login.microsoftonline.com + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-microsoft-sharepoint registries: oss: enabled: true @@ -16,13 +20,13 @@ data: connectorSubtype: file connectorType: source definitionId: 59353119-f0f2-4e5a-a8ba-15d887bc34f6 - dockerImageTag: 0.1.0 + dockerImageTag: 0.2.2 dockerRepository: airbyte/source-microsoft-sharepoint githubIssueLabel: source-microsoft-sharepoint icon: microsoft-sharepoint.svg license: MIT name: Microsoft SharePoint - supportLevel: community + supportLevel: certified releaseStage: alpha releaseDate: 2024-02-02 documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-sharepoint diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/poetry.lock b/airbyte-integrations/connectors/source-microsoft-sharepoint/poetry.lock new file mode 100644 index 0000000000000..738a50b0cb4dd --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/poetry.lock @@ -0,0 +1,2288 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.74.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.9" +files = [ + {file = "airbyte-cdk-0.74.0.tar.gz", hash = "sha256:74241a055c205403a951383f43801067b7f451370e14d553d13d0cc476cbfff7"}, + {file = "airbyte_cdk-0.74.0-py3-none-any.whl", hash = "sha256:7e5b201d69ec0e7daab7e627dbc6add4dbba4a2f779132e86aaf6713650ff4d5"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} +"pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +unstructured = [ + {version = "0.10.27", optional = true, markers = "extra == \"file-based\""}, + {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""}, +] +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.10.1" +description = "Emoji for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "emoji-2.10.1-py2.py3-none-any.whl", hash = "sha256:11fb369ea79d20c14efa4362c732d67126df294a7959a2c98bfd7447c12a218e"}, + {file = "emoji-2.10.1.tar.gz", hash = "sha256:16287283518fb7141bde00198f9ffff4e1c1cb570efb68b2f1ec50975c3a581d"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.1.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.7)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "msal" +version = "1.25.0" +description = "The Microsoft Authentication Library (MSAL) for Python library" +optional = false +python-versions = ">=2.7" +files = [ + {file = "msal-1.25.0-py2.py3-none-any.whl", hash = "sha256:386df621becb506bc315a713ec3d4d5b5d6163116955c7dde23622f156b81af6"}, + {file = "msal-1.25.0.tar.gz", hash = "sha256:f44329fdb59f4f044c779164a34474b8a44ad9e4940afbc4c3a3a2bbe90324d9"}, +] + +[package.dependencies] +cryptography = ">=0.6,<44" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.13.2,<0.14)"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "office365-rest-python-client" +version = "2.5.5" +description = "Microsoft 365 & Microsoft Graph Library for Python" +optional = false +python-versions = "*" +files = [ + {file = "Office365-REST-Python-Client-2.5.5.tar.gz", hash = "sha256:2396f3ac1bc544646abff3db9e45f0e43a28d20668ed9a4736554c5262e70a86"}, + {file = "Office365_REST_Python_Client-2.5.5-py3-none-any.whl", hash = "sha256:d64dcb9b3fe76859f8d570136c0e448a36ae26a8d71b52b4c5127eb9ae2290ca"}, +] + +[package.dependencies] +msal = "*" +pytz = "*" +requests = "*" +typing-extensions = ">=4.0.0" + +[package.extras] +ntlmprovider = ["requests-ntlm"] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pdf2image" +version = "1.16.3" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +optional = false +python-versions = "*" +files = [ + {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, + {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, +] + +[package.dependencies] +pillow = "*" + +[[package]] +name = "pdfminer-six" +version = "20221105" +description = "PDF parser and analyzer" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, + {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, +] + +[package.dependencies] +charset-normalizer = ">=2.0.0" +cryptography = ">=36.0.0" + +[package.extras] +dev = ["black", "mypy (==0.931)", "nox", "pytest"] +docs = ["sphinx", "sphinx-argparse"] +image = ["Pillow"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.2.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.6.2" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a5637e6bf11b15b5aff6ee818c76bdec99ad208511b78985e6209ba648a6e3ee"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:380586664f2f63807050ddb95e7702888b4f0b425abf17655940c411f39287ad"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3168ff565d4b8c239cf11fb604dd2507d30e9bcaac76a4077c0ac23cf2c866ed"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be69f7fd46b5c6467fe5e2fd4cff3816b0c03048eed8a4becb9a73e6000960e7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbd5894f23fdf5697499cf759523639838ac822bd1600e343fdce7313baa02ae"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85a5b6e026393fe39fb61146b9c17c5af66fffbe1410e992c4bb06d9ec327bd3"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab269adfc64480f209e99f253391a10735edd5c09046e04899adab5fb132f20e"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35aeac852bca06023d6bbd50c1fc504ca5a9a3613d5e75a140f0be7601fa34ef"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e706f302c6a3ae0d74edd0d6ace46aee1ae07c563b436ccf5ff04db2b3571e60"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bec353f022011e6e5cd28ccb8700fbd2a33918197af0d4e0abb3c3f4845cc864"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ef3925daaa93eed20401012e219f569ff0c039ed5bf4ce2d3737b4f75d441622"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6ee98d88ae9ccc77ff61992ed33b2496478def5dc0da55c9a9aa06fcb725a352"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:423c7c588b09d618601097b7a0017dfcb91132a2076bef29023c5f3cd2dc3de1"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win32.whl", hash = "sha256:c17c5efee347a40a6f4c1eec59e3d7d1e22f7613a97f8b8a07733ef723483a04"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:4209816626d8d6ff8ae7dc248061c6059e618b70c6e6f6e4d7444ae3740b2b85"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c54d3c85e522d3ac9ee39415f183c8fa184c4f87e7e5a37938f15a6d50e853a"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e06f6d270112f5db001f1cba5a97e1a48aee3d3dbdcbea3ec027c230462dbf9b"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:080cb71b50cb6aff11d1c6aeb157f273e2da0b2bdb3f9d7b01257e49e69a8576"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7895e04a22d6515bc91a850e0831f2405547605aa311d1ffec51e4818abc3c1"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82f9838519136b7083dd1e3149ee80344521f3dc37f744f227505ff0883efb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a945567c2b0b6e069454c9782d5234b0b6795718adf7a9f868bd3144afa6a023"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673ba2c343644805acdae1cb949c6a4de71aa2f62a998978551ebea59603af3f"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d457c89bac1471442002e70551e8268e639b3870b4a4521eae363c07253be87"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495c0d8e14e6f12520eb7fc71b9ba9fcaafb47fc23a654e6e89b6c7985ec0020"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d67b649bf3e1b1722d04eca44d37919aef88305ce7ad05564502d013cf550fd"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e48dde8ca83d11daa00900cf6a5d281a1297aef9b7bfa73801af6e8822be5019"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:824cc381cf81cbf8d158f6935664ec2a69e6ac3b1d39fa201988bf81a257f775"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfe4c24957474ce0ac75d886387e30e292b4be39228a6d71f76de414dc187db"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d57b98013b802621bbc8b12a46bfc9d36ac552ab51ca207f7ce167ad46adabeb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win32.whl", hash = "sha256:9a07dffac439223b4f1025dbfc68f4445a3460a859309c9858c2a3fa29617cdc"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:95a49c6b8bf1229743ae585dd5b7d57f0d15a7eb6e826866d5c9965ba958503c"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_arm64.whl", hash = "sha256:af7c19ec86e11488539380d3db1755be5d561a3c0e7b04ff9d07abd7f9a8e9d8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:de8adc12161bf282c60f12dc9233bb31632f71d446a010fe7469a69b8153427f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:337e357f693130c4c6be740652542b260e36f622c59e01fa33d58f1d2750c930"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6468f8bc8c3c50604f43631550ef9cfec873515dba5023ca34d461be94669fc8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74c6773b11445b5e5cf93ca383171cd0ac0cdeafea11a7b2a5688f8bf8d813e6"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1507fc5769aa109dda4de3a15f822a0f6a03e18d627bd0ba3ddbb253cf70e07"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:617949a70150e6fffdaed19253dd49f7a53528411dc8bf7663d499ba21e0f61e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8b77779174b1b40aa70827692571ab457061897846255ad7d5d559e2edb1932"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80e51b22a7da83f9c87a97e92df07ed0612c74c35496590255f4b5d5b4212dfe"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3ae7c86914cb6673e97e187ba431b9c4cf4177d9ae77f8a1e5b2ba9a5628839e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ddc380ffaa90f204cc9ddcb779114b9ab6f015246d549de9d47871a97ef9f18a"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3c1dc078ef371fce09f9f3eec2ca4eaa2a8cd412ec53941015b4f39f14d34407"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a74102fc5a2534fe91f7507838623e1f3a149d8e05648389c42bb42e14b1c3f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:48e1eaea8fcd522fca7f04f0480663f0f0cfb77957092cce60a93f4462864996"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win32.whl", hash = "sha256:66b008bf2972740cd2dda5d382eb8bdb87265cd88198e71c7797bdc0d1f79d20"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:87ac3a87f2251ae2e95fc9478ca5c759de6d141d04c84d3fec9f9cdcfc167b33"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:b593cc51aed887e93b78c2f94dfae9008be2b23d17afd3b1f1d3eb3913b58f26"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d830bc7a9b586a374147ec60b08b1f9ae5996b43f75cc514f37faef3866b519"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbee7f5ff11872b76505cbd87c814abc823e8757f11c69062eb3b25130a283da"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c011fb31f2c3f82f503aedd6097d3d3854e574e327a119a3b7eb2cf90b79ca"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda81d0e0ce0c13abfa46b24e10c1e85f9c6acb628f0a9a948f5779f9c2076a2"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c279928651ce0e9e5220dcb25a00cc53b65e592a0861336a38299bcdca3a596"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35bd4bc9c40e6994c5d6edea4b9319388b4d9711c13c66d543bb4c37624b4184"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07899506a5a8760448d9df036d528b55a554bf571714173635c79eef4a86e58"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2e51d01b9c6d6954a3e055c57a80d4685b4fc82719db5519fc153566bcd6bb"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:153d065e353371cc0aeff32b99999a5758266a64e958d1364189367c1c9f6814"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4edcceebb85ebfa49a3ddcde20ad891d36c08dc0fd592efdab0e7d313a4e36af"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3549123fca5bb817341025f98e8e49ca99f84596c7c4f92b658f8e5836040d4a"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:84c1032ae42628465b7a5cc35249906061e18a8193c9c27cbd2db54e9823a9a6"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc91ebd8fc69a6bd3b5711c8250f5f4e70606b4da75ef415f57ad209978205"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win32.whl", hash = "sha256:f3a70f341c4c111bad910d2df69c78577a98af140319a996af24c9385939335d"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:354ad5fe655beb7b279390cb58334903931c5452ecbad1b1666ffb06786498e2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1b86b93d93020c2b3edc1665d75c8855784845fc0a739b312c26c3a4bf0c80d5"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28243086ed0e50808bb56632e5442c457241646aeafafd501ac87901f40a3237"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed52461ae5a9ea4c400d38e2649c74a413f1a6d8fb8308b66f1fbd122514732f"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a46220f86a5f9cb016af31525e0d0865cad437d02239aa0d8aed2ab8bff1f1c"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81a630ed2fc3ec5fc7400eb66bab1f87e282b4d47f0abe3e48c6634dfa13b5e4"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8e5a437b9089df6242a718d9c31ab1742989e9400a0977af012ef483b63b4c2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16270b5529de83b7bae7457e952e4d9cf3fbf029a837dd32d415bb9e0eb8e599"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378c04102c7f084cde30a100154fa6d7e2baf0d51a6bdd2f912545559c1fb35"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f18397c8d6a65fc0b288d2fc29bc7baeea6ba91eeb95163a3cd98f23cd3bc85"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2acd2514defce81e6ff4bbff50252d5e7df8e85a731442c4b83e44c86cf1c916"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:1df2faf80201952e252413b6fac6f3e146080dcebb87bb1bb722508e67558ed8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6440ed0b3007c1c9286b0b88fe2ab2d9e83edd60cd62293b3dfabb732b4e8a30"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fcfa23b5553b27f4016df77c53172ea743454cf12c28cfa7c35a309a2be93b3"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win32.whl", hash = "sha256:2d580d937146e803c8e5e1b87916cab8d6f84013b6392713e201efcda335c7d8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:fe2a68be734e8e88af23385c68d6467e15818b6b1df1cbfebf7bff577226c957"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_arm64.whl", hash = "sha256:6478f7803efebf5f644d0b758439c5b25728550fdfbb19783d150004c46a75a9"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36ce7b68a7b90b787cdd73480a68d2f1ca63c31a3a9d5a79a8736f978e1e9344"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53597fd72a9340bcdd80d3620f4957c2b92f9b569313b969a3abdaffd193aae6"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4f6de745fe6ce46a422d353ee10599013631d7d714a36d025f164b2d4e8c000"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62df2136068e2515ed8beb01756381ff62c29384d785e3bf46e3111d4ea3ba1e"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7382c90170f60c846c81a07ddd80bb2e8c43c8383754486fa37f67391a571897"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f31314fd2e2f3dc3e519e6f93669462ce7953df2def1c344aa8f5345976d0eb2"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012221629d54d3bee954148247f711eb86d4d390b589ebfe03172ea0b37a7531"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41dd59a70decfce6595315367a2fea2af660d92a9d144acc6479030501014d7"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9fa14136a5b0cba1ec42531f7c3e0b0d3edb7fd6bc5e5ae7b498541f3855ab"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:259364199cbfeca33b1af369fc7951f71717aa285184a3fa5a7b1772da1b89db"}, + {file = "rapidfuzz-3.6.2.tar.gz", hash = "sha256:cf911e792ab0c431694c9bf2648afabfd92099103f2e31492893e078ddca5e1a"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smart-open" +version = "6.4.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, +] + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "8a242704adb0eb220fab0c27e0335b4fcdd954cc4e1a3420f551666978167b00" diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-sharepoint/pyproject.toml new file mode 100644 index 0000000000000..cfac4817ddb2b --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.2.2" +name = "source-microsoft-sharepoint" +description = "Source implementation for Microsoft SharePoint." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/microsoft-sharepoint" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_microsoft_sharepoint" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +msal = "==1.25.0" +Office365-REST-Python-Client = "==2.5.5" +smart-open = "==6.4.0" + +[tool.poetry.scripts] +source-microsoft-sharepoint = "source_microsoft_sharepoint.run:run" + +[tool.poetry.dependencies.airbyte-cdk] +extras = [ "file-based",] +version = "^0" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.6.1" +requests-mock = "^1.11.0" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/setup.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/setup.py deleted file mode 100644 index ce83f37b59409..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/setup.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[file-based]==0.59.2", - "msal~=1.25.0", - "Office365-REST-Python-Client~=2.5.2", - "smart-open~=6.4.0", -] - -TEST_REQUIREMENTS = [ - "pytest-mock~=3.6.1", - "pytest~=6.1", - "requests-mock~=1.11.0", -] - -setup( - name="source_microsoft_sharepoint", - description="Source implementation for Microsoft SharePoint.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/run.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/run.py new file mode 100644 index 0000000000000..dc64645c0196e --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/run.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk import AirbyteEntrypoint +from airbyte_cdk.entrypoint import launch +from source_microsoft_sharepoint.source import SourceMicrosoftSharePoint + + +def run(): + args = sys.argv[1:] + catalog_path = AirbyteEntrypoint.extract_catalog(args) + config_path = AirbyteEntrypoint.extract_config(args) + state_path = AirbyteEntrypoint.extract_state(args) + source = SourceMicrosoftSharePoint( + SourceMicrosoftSharePoint.read_catalog(catalog_path) if catalog_path else None, + SourceMicrosoftSharePoint.read_config(config_path) if config_path else None, + SourceMicrosoftSharePoint.read_state(state_path) if state_path else None, + ) + launch(source, args) diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/source.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/source.py index 5515c22e2398f..c3f517434132a 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/source.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/source.py @@ -2,21 +2,25 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Any -from airbyte_cdk.models import AdvancedAuth, ConnectorSpecification, OAuthConfigSpecification +from typing import Any, Mapping, Optional + +from airbyte_cdk.models import AdvancedAuth, ConfiguredAirbyteCatalog, ConnectorSpecification, OAuthConfigSpecification from airbyte_cdk.sources.file_based.file_based_source import FileBasedSource from airbyte_cdk.sources.file_based.stream.cursor.default_file_based_cursor import DefaultFileBasedCursor +from airbyte_cdk.sources.source import TState from source_microsoft_sharepoint.spec import SourceMicrosoftSharePointSpec from source_microsoft_sharepoint.stream_reader import SourceMicrosoftSharePointStreamReader class SourceMicrosoftSharePoint(FileBasedSource): - def __init__(self, catalog_path: str): + def __init__(self, catalog: Optional[ConfiguredAirbyteCatalog], config: Optional[Mapping[str, Any]], state: Optional[TState]): super().__init__( stream_reader=SourceMicrosoftSharePointStreamReader(), spec_class=SourceMicrosoftSharePointSpec, - catalog_path=catalog_path, + catalog=catalog, + config=config, + state=state, cursor_cls=DefaultFileBasedCursor, ) diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/spec.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/spec.py index 45434d78b155a..89f4f0a9b971c 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/spec.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/spec.py @@ -86,8 +86,19 @@ class Config: order=0, ) + search_scope: str = Field( + title="Search Scope", + description="Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.", + default="ALL", + enum=["ACCESSIBLE_DRIVES", "SHARED_ITEMS", "ALL"], + order=3, + ) + folder_path: str = Field( - title="Folder Path", description="Path to folder of the Microsoft SharePoint drive where the file(s) exist.", order=3 + title="Folder Path", + description="Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.", + order=4, + default=".", ) @classmethod diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/stream_reader.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/stream_reader.py index 97804668cd30a..0c27a28e73c6c 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/stream_reader.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/stream_reader.py @@ -3,16 +3,17 @@ # import logging +from datetime import datetime from functools import lru_cache from io import IOBase -from typing import Iterable, List, Optional +from typing import Iterable, List, Optional, Tuple +import requests import smart_open from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader, FileReadMode from airbyte_cdk.sources.file_based.remote_file import RemoteFile from airbyte_cdk.utils.traced_exception import AirbyteTracedException, FailureType from msal import ConfidentialClientApplication -from msal.exceptions import MsalServiceError from office365.graph_client import GraphClient from source_microsoft_sharepoint.spec import SourceMicrosoftSharePointSpec @@ -69,6 +70,7 @@ class SourceMicrosoftSharePointStreamReader(AbstractFileBasedStreamReader): def __init__(self): super().__init__() + self._auth_client = None self._one_drive_client = None @property @@ -76,11 +78,23 @@ def config(self) -> SourceMicrosoftSharePointSpec: return self._config @property - def one_drive_client(self) -> SourceMicrosoftSharePointSpec: + def auth_client(self): + # Lazy initialization of the auth_client + if self._auth_client is None: + self._auth_client = SourceMicrosoftSharePointClient(self._config) + return self._auth_client + + @property + def one_drive_client(self): + # Lazy initialization of the one_drive_client if self._one_drive_client is None: - self._one_drive_client = SourceMicrosoftSharePointClient(self._config).client + self._one_drive_client = self.auth_client.client return self._one_drive_client + def get_access_token(self): + # Directly fetch a new access token from the auth_client each time it's called + return self.auth_client._get_access_token()["access_token"] + @config.setter def config(self, value: SourceMicrosoftSharePointSpec): """ @@ -92,17 +106,72 @@ def config(self, value: SourceMicrosoftSharePointSpec): assert isinstance(value, SourceMicrosoftSharePointSpec) self._config = value - def _list_directories_and_files(self, root_folder, path=None): + def _get_shared_drive_object(self, drive_id: str, object_id: str, path: str) -> List[Tuple[str, str, datetime]]: + """ + Retrieves a list of all nested files under the specified object. + + Args: + drive_id: The ID of the drive containing the object. + object_id: The ID of the object to start the search from. + + Returns: + A list of tuples containing file information (name, download URL, and last modified datetime). + + Raises: + RuntimeError: If an error occurs during the request. + """ + + access_token = self.get_access_token() + headers = {"Authorization": f"Bearer {access_token}"} + base_url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}" + + def get_files(url: str, path: str) -> List[Tuple[str, str, datetime]]: + response = requests.get(url, headers=headers) + if response.status_code != 200: + error_info = response.json().get("error", {}).get("message", "No additional error information provided.") + raise RuntimeError(f"Failed to retrieve files from URL '{url}'. HTTP status: {response.status_code}. Error: {error_info}") + + data = response.json() + for child in data.get("value", []): + new_path = path + "/" + child["name"] + if child.get("file"): # Object is a file + last_modified = datetime.strptime(child["lastModifiedDateTime"], "%Y-%m-%dT%H:%M:%SZ") + yield (new_path, child["@microsoft.graph.downloadUrl"], last_modified) + else: # Object is a folder, retrieve children + child_url = f"{base_url}/items/{child['id']}/children" # Use item endpoint for nested objects + yield from get_files(child_url, new_path) + yield from [] + + # Initial request to item endpoint + item_url = f"{base_url}/items/{object_id}" + item_response = requests.get(item_url, headers=headers) + if item_response.status_code != 200: + error_info = item_response.json().get("error", {}).get("message", "No additional error information provided.") + raise RuntimeError( + f"Failed to retrieve the initial shared object with ID '{object_id}' from drive '{drive_id}'. " + f"HTTP status: {item_response.status_code}. Error: {error_info}" + ) + + # Check if the object is a file or a folder + item_data = item_response.json() + if item_data.get("file"): # Initial object is a file + new_path = path + "/" + item_data["name"] + last_modified = datetime.strptime(item_data["lastModifiedDateTime"], "%Y-%m-%dT%H:%M:%SZ") + yield (new_path, item_data["@microsoft.graph.downloadUrl"], last_modified) + else: + # Initial object is a folder, start file retrieval + yield from get_files(f"{item_url}/children", path) + + def _list_directories_and_files(self, root_folder, path): """Enumerates folders and files starting from a root folder.""" drive_items = execute_query_with_retry(root_folder.children.get()) - found_items = [] for item in drive_items: item_path = path + "/" + item.name if path else item.name if item.is_file: - found_items.append((item, item_path)) + yield (item_path, item.properties["@microsoft.graph.downloadUrl"], item.properties["lastModifiedDateTime"]) else: - found_items.extend(self._list_directories_and_files(item, item_path)) - return found_items + yield from self._list_directories_and_files(item, item_path) + yield from [] def _get_files_by_drive_name(self, drives, folder_path): """Yields files from the specified drive.""" @@ -112,10 +181,15 @@ def _get_files_by_drive_name(self, drives, folder_path): for drive in drives: is_sharepoint = drive.drive_type == "documentLibrary" if is_sharepoint: - folder = ( - drive.root if folder_path in self.ROOT_PATH else execute_query_with_retry(drive.root.get_by_path(folder_path).get()) - ) - yield from self._list_directories_and_files(folder) + # Define base path for drive files to differentiate files between drives + if folder_path in self.ROOT_PATH: + folder = drive.root + folder_path_url = drive.web_url + else: + folder = execute_query_with_retry(drive.root.get_by_path(folder_path).get()) + folder_path_url = drive.web_url + "/" + folder_path + + yield from self._list_directories_and_files(folder, folder_path_url) @property @lru_cache(maxsize=None) @@ -136,21 +210,43 @@ def drives(self): return drives + def _get_shared_files_from_all_drives(self, parsed_drives): + drive_ids = [drive.id for drive in parsed_drives] + + shared_drive_items = execute_query_with_retry(self.one_drive_client.me.drive.shared_with_me()) + for drive_item in shared_drive_items: + parent_reference = drive_item.remote_item.parentReference + + # check if drive is already parsed + if parent_reference and parent_reference["driveId"] not in drive_ids: + yield from self._get_shared_drive_object(parent_reference["driveId"], drive_item.id, drive_item.web_url) + + def get_all_files(self): + if self.config.search_scope in ("ACCESSIBLE_DRIVES", "ALL"): + # Get files from accessible drives + yield from self._get_files_by_drive_name(self.drives, self.config.folder_path) + + if self.config.search_scope in ("SHARED_ITEMS", "ALL"): + parsed_drives = [] if self.config.search_scope == "SHARED_ITEMS" else self.drives + + # Get files from shared items + yield from self._get_shared_files_from_all_drives(parsed_drives) + def get_matching_files(self, globs: List[str], prefix: Optional[str], logger: logging.Logger) -> Iterable[RemoteFile]: """ Retrieve all files matching the specified glob patterns in SharePoint. """ - files = self._get_files_by_drive_name(self.drives, self.config.folder_path) + files = self.get_all_files() files_generator = filter_http_urls( self.filter_files_by_globs_and_start_date( [ MicrosoftSharePointRemoteFile( uri=path, - download_url=file.properties["@microsoft.graph.downloadUrl"], - last_modified=file.properties["lastModifiedDateTime"], + download_url=download_url, + last_modified=last_modified, ) - for file, path in files + for path, download_url, last_modified in files ], globs, ), diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/utils.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/utils.py index c77cd9badbde6..f2c33afacceab 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/utils.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/source_microsoft_sharepoint/utils.py @@ -2,6 +2,7 @@ import time from datetime import datetime, timedelta +from enum import Enum from http import HTTPStatus from airbyte_cdk.models import FailureType @@ -9,6 +10,12 @@ from airbyte_cdk.utils.traced_exception import AirbyteTracedException +class SearchScope(Enum): + OWN_DRIVES = "OWN_DRIVES" + SHARED_ITEMS = "SHARED_ITEMS" + BOTH = "BOTH" + + class MicrosoftSharePointRemoteFile(RemoteFile): download_url: str diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/unit_tests/test_stream_reader.py b/airbyte-integrations/connectors/source-microsoft-sharepoint/unit_tests/test_stream_reader.py index 4a0d5d7116e50..caa0db37706a4 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/unit_tests/test_stream_reader.py +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/unit_tests/test_stream_reader.py @@ -1,12 +1,11 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import datetime -from unittest.mock import Mock, patch +from datetime import datetime +from unittest.mock import MagicMock, Mock, PropertyMock, call, patch import pytest from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from msal.exceptions import MsalServiceError from source_microsoft_sharepoint.spec import SourceMicrosoftSharePointSpec from source_microsoft_sharepoint.stream_reader import ( FileReadMode, @@ -19,7 +18,7 @@ def create_mock_drive_item(is_file, name, children=None): """Helper function to create a mock drive item.""" - mock_item = Mock() + mock_item = MagicMock(properties={"@microsoft.graph.downloadUrl": "test_url", "lastModifiedDateTime": "1991-08-24"}) mock_item.is_file = is_file mock_item.name = name mock_item.children.get.return_value.execute_query = Mock(return_value=children or []) @@ -34,6 +33,7 @@ def setup_reader_class(): config.credentials = Mock() config.folder_path = "." config.credentials.auth_type = "Client" + config.search_scope = "ALL" reader.config = config # Set up the necessary configuration # Mock the client creation @@ -43,22 +43,25 @@ def setup_reader_class(): yield reader -@pytest.fixture -def mock_drive_files(): - # Mock files returned by SharePoint +@pytest.fixture(name="mock_drive_files") +def create_mock_drive_files(): + """ + Provides mock data for SharePoint drive files (personal drive). + """ return [ - Mock( - properties={ - "@microsoft.graph.downloadUrl": "https://example.com/file1.csv", - "lastModifiedDateTime": datetime.datetime(2021, 1, 1), - } - ), - Mock( - properties={ - "@microsoft.graph.downloadUrl": "https://example.com/file2.txt", - "lastModifiedDateTime": datetime.datetime(2021, 1, 1), - } - ), + ("file1.csv", "https://example.com/file1.csv", datetime(2021, 1, 1)), + ("file2.txt", "https://example.com/file2.txt", datetime(2021, 1, 1)), + ] + + +@pytest.fixture(name="mock_shared_drive_files") +def create_mock_shared_drive_files(): + """ + Provides mock data for SharePoint drive files (shared drives). + """ + return [ + ("file3.csv", "https://example.com/file3.csv", datetime(2021, 3, 1)), + ("file4.txt", "https://example.com/file4.txt", datetime(2021, 4, 1)), ] @@ -112,9 +115,10 @@ def test_get_access_token(setup_client_class, has_refresh_token, token_response, @patch("source_microsoft_sharepoint.stream_reader.execute_query_with_retry") @patch("source_microsoft_sharepoint.stream_reader.SourceMicrosoftSharePointStreamReader.filter_files_by_globs_and_start_date") -def test_get_matching_files(mock_filter_files, mock_execute_query, setup_reader_class, mock_drive_files): +def test_get_matching_files(mock_filter_files, mock_execute_query, setup_reader_class, mock_drive_files, mock_shared_drive_files): instance = setup_reader_class - instance._get_files_by_drive_name = Mock(return_value=iter([(mock_drive_files[0], "file1.csv"), (mock_drive_files[1], "file2.txt")])) + instance._get_files_by_drive_name = Mock(return_value=mock_drive_files) + instance._get_shared_files_from_all_drives = Mock(return_value=mock_shared_drive_files) # Set up mocks mock_drive = Mock() @@ -131,15 +135,21 @@ def test_get_matching_files(mock_filter_files, mock_execute_query, setup_reader_ files = list(instance.get_matching_files(globs, prefix, logger)) # Assertions - assert len(files) == 1 + assert len(files) == 2 + assert isinstance(files[0], MicrosoftSharePointRemoteFile) assert files[0].uri == "file1.csv" assert "https://example.com/file1.csv" in files[0].download_url + assert isinstance(files[1], MicrosoftSharePointRemoteFile) + assert files[1].uri == "file3.csv" + assert "https://example.com/file3.csv" in files[1].download_url + def test_get_matching_files_empty_drive(setup_reader_class): instance = setup_reader_class instance._get_files_by_drive_name = Mock(return_value=iter([])) + instance._get_shared_files_from_all_drives = Mock(return_value=iter([])) # Define test parameters globs = ["*.csv"] @@ -213,11 +223,13 @@ def test_list_directories_and_files(): stream_reader = SourceMicrosoftSharePointStreamReader() - result = stream_reader._list_directories_and_files(mock_root_folder) + result = list(stream_reader._list_directories_and_files(mock_root_folder, "https://example.com/root")) assert len(result) == 2 - assert result[0][1] == "folder1/file1.txt" - assert result[1][1] == "file2.txt" + assert result == [ + ("https://example.com/root/folder1/file1.txt", "test_url", "1991-08-24"), + ("https://example.com/root/file2.txt", "test_url", "1991-08-24"), + ] @pytest.mark.parametrize( @@ -232,6 +244,7 @@ def test_get_files_by_drive_name(mock_list_directories_and_files, drive_type, fi # Helper function usage mock_drive = Mock() mock_drive.name = "testDrive" + mock_drive.web_url = "https://example.com/testDrive" mock_drive.drive_type = drive_type mock_drive.root.get_by_path.return_value.get().execute_query_with_incremental_retry.return_value = create_mock_drive_item( is_file=False, name="root" @@ -252,3 +265,230 @@ def test_get_files_by_drive_name(mock_list_directories_and_files, drive_type, fi assert len(files) == files_number if files_number: assert files[0].name == "testFile.txt" + + +@pytest.mark.parametrize( + "drive_ids, shared_drive_item_dicts, expected_result, expected_calls", + [ + ([1, 2, 3], [], [], []), + ([1, 2, 3], [{"drive_id": 1, "id": 4, "web_url": "test_url4"}], [], []), + ([1, 2, 3], [{"drive_id": 4, "id": 4, "web_url": "test_url4"}], [4], [call(4, 4, "test_url4")]), + ( + [1, 2, 3], + [{"drive_id": 4, "id": 4, "web_url": "test_url4"}, {"drive_id": 5, "id": 5, "web_url": "test_url5"}], + [4, 5], + [call(4, 4, "test_url4"), call(5, 5, "test_url5")], + ), + ( + [1, 2, 3], + [ + {"drive_id": 4, "id": 4, "web_url": "test_url4"}, + {"drive_id": 5, "id": 5, "web_url": "test_url5"}, + {"drive_id": 6, "id": 6, "web_url": "test_url6"}, + ], + [4, 5, 6], + [call(4, 4, "test_url4"), call(5, 5, "test_url5"), call(6, 6, "test_url6")], + ), + ], +) +@patch("source_microsoft_sharepoint.stream_reader.execute_query_with_retry", side_effect=lambda x: x) +def test_get_shared_files_from_all_drives( + mock_execute_query_with_retry, drive_ids, shared_drive_item_dicts, expected_result, expected_calls +): + stream_reader = SourceMicrosoftSharePointStreamReader() + stream_reader._config = Mock() + + # Mock _get_shared_drive_object method + with patch.object( + SourceMicrosoftSharePointStreamReader, "_get_shared_drive_object", return_value=expected_result + ) as mock_get_shared_drive_object: + # Setup shared_drive_items mock objects + shared_drive_items = [ + MagicMock(remote_item=MagicMock(parentReference={"driveId": item["drive_id"]}), id=item["id"], web_url=item["web_url"]) + for item in shared_drive_item_dicts + ] + + with patch.object(SourceMicrosoftSharePointStreamReader, "one_drive_client", new_callable=PropertyMock) as mock_one_drive_client: + mock_one_drive_client.return_value.me.drive.shared_with_me.return_value = shared_drive_items + + mock_drives = [Mock(id=drive_id) for drive_id in drive_ids] + + # Execute the method under test + list(stream_reader._get_shared_files_from_all_drives(mock_drives)) + + # Assert _get_shared_drive_object was called correctly + mock_get_shared_drive_object.assert_has_calls(expected_calls, any_order=True) + + +# Sample data for mocking responses +file_response = { + "file": True, + "name": "TestFile.txt", + "@microsoft.graph.downloadUrl": "http://example.com/download", + "lastModifiedDateTime": "2021-01-01T00:00:00Z", +} + +empty_folder_response = {"folder": True, "value": []} + +# Adjusting the folder_with_nested_files to represent the initial folder response +folder_with_nested_files_initial = { + "folder": True, + "value": [ + {"id": "subfolder1", "folder": True, "name": "subfolder1"}, + {"id": "subfolder2", "folder": True, "name": "subfolder2"}, + ], # Empty subfolder # Subfolder with a file +} + +# Response for the empty subfolder (subfolder1) +empty_subfolder_response = {"value": [], "name": "subfolder1"} # No files or folders inside subfolder1 + +# Response for the subfolder with a file (subfolder2) +not_empty_subfolder_response = { + "value": [ + { + "file": True, + "name": "NestedFile.txt", + "@microsoft.graph.downloadUrl": "http://example.com/nested", + "lastModifiedDateTime": "2021-01-02T00:00:00Z", + } + ], + "name": "subfolder2", +} + + +@pytest.mark.parametrize( + "initial_response, subsequent_responses, expected_result, raises_error, expected_error_message, initial_path", + [ + # Object ID is a file + ( + file_response, + [], + [ + ( + "http://example.com/TestFile.txt", + "http://example.com/download", + datetime.strptime("2021-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + ) + ], + False, + None, + "http://example.com", + ), + # Object ID is an empty folder + (empty_folder_response, [empty_subfolder_response], [], False, None, "http://example.com"), + # Object ID is a folder with empty subfolders and files + ( + {"folder": True, "name": "root"}, # Initial folder response + [ + folder_with_nested_files_initial, + empty_subfolder_response, + not_empty_subfolder_response, + ], + [ + ( + "http://example.com/subfolder2/NestedFile.txt", + "http://example.com/nested", + datetime.strptime("2021-01-02T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + ) + ], + False, + None, + "http://example.com", + ), + # Error response on initial request + ( + MagicMock(status_code=400, json=MagicMock(return_value={"error": {"message": "Bad Request"}})), + [], + [], + True, + "Failed to retrieve the initial shared object with ID 'dummy_object_id' from drive 'dummy_drive_id'. HTTP status: 400. Error: Bad Request", + "http://example.com", + ), + # Error response while iterating over nested + ( + {"folder": True, "name": "root"}, + [MagicMock(status_code=400, json=MagicMock(return_value={"error": {"message": "Bad Request"}}))], + [], + True, + ( + "Failed to retrieve files from URL " + "'https://graph.microsoft.com/v1.0/drives/dummy_drive_id/items/dummy_object_id/children'. " + "HTTP status: 400. Error: Bad Request" + ), + "http://example.com", + ), + ], +) +@patch("source_microsoft_sharepoint.stream_reader.requests.get") +@patch("source_microsoft_sharepoint.stream_reader.SourceMicrosoftSharePointStreamReader.get_access_token") +def test_get_shared_drive_object( + mock_get_access_token, + mock_requests_get, + initial_response, + subsequent_responses, + expected_result, + raises_error, + expected_error_message, + initial_path, +): + mock_get_access_token.return_value = "dummy_access_token" + mock_responses = [ + initial_response + if isinstance(initial_response, MagicMock) + else MagicMock(status_code=200, json=MagicMock(return_value=initial_response)) + ] + for response in subsequent_responses: + mock_responses.append( + response if isinstance(response, MagicMock) else MagicMock(status_code=200, json=MagicMock(return_value=response)) + ) + mock_requests_get.side_effect = mock_responses + + reader = SourceMicrosoftSharePointStreamReader() + + if raises_error: + with pytest.raises(RuntimeError) as exc_info: + list(reader._get_shared_drive_object("dummy_drive_id", "dummy_object_id", initial_path)) + assert str(exc_info.value) == expected_error_message + else: + result = list(reader._get_shared_drive_object("dummy_drive_id", "dummy_object_id", initial_path)) + assert result == expected_result + + +@pytest.mark.parametrize( + "auth_type, user_principal_name", + [ + ("Client", None), + ("User", "user@example.com"), + ], +) +def test_drives_property(auth_type, user_principal_name): + with patch("source_microsoft_sharepoint.stream_reader.execute_query_with_retry") as mock_execute_query, patch( + "source_microsoft_sharepoint.stream_reader.SourceMicrosoftSharePointStreamReader.one_drive_client" + ) as mock_one_drive_client: + + # Setup for different authentication types + config_mock = MagicMock(credentials=MagicMock(auth_type=auth_type, user_principal_name=user_principal_name)) + + # Mock responses for the drives list and a single drive (my_drive) + drives_response = MagicMock() + my_drive = MagicMock() + drives_response.add_child = MagicMock() + + # Set up mock responses for the two different calls within the property based on auth_type + if auth_type == "Client": + mock_execute_query.side_effect = [drives_response, my_drive] + else: + # For User auth_type, assume a call to get user's principal name drive + mock_execute_query.side_effect = [drives_response, my_drive] + + # Create an instance of the reader and set its config mock + reader = SourceMicrosoftSharePointStreamReader() + reader._config = config_mock + + # Access the drives property to trigger the retrieval and caching logic + drives = reader.drives + + # Assertions + assert drives is not None + mock_execute_query.assert_called() + drives_response.add_child.assert_called_once_with(my_drive) diff --git a/airbyte-integrations/connectors/source-microsoft-teams/.dockerignore b/airbyte-integrations/connectors/source-microsoft-teams/.dockerignore deleted file mode 100644 index e474c376ce683..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_microsoft_teams -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-microsoft-teams/.gitignore b/airbyte-integrations/connectors/source-microsoft-teams/.gitignore deleted file mode 100644 index 29fffc6a50cc9..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/.gitignore +++ /dev/null @@ -1 +0,0 @@ -NEW_SOURCE_CHECKLIST.md diff --git a/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile b/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile deleted file mode 100644 index 4b206258d0b3e..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base libffi-dev - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_microsoft_teams ./source_microsoft_teams - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.0.0 -LABEL io.airbyte.name=airbyte/source-microsoft-teams diff --git a/airbyte-integrations/connectors/source-microsoft-teams/README.md b/airbyte-integrations/connectors/source-microsoft-teams/README.md index de8db4cc28b2b..54da8169d26f5 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/README.md +++ b/airbyte-integrations/connectors/source-microsoft-teams/README.md @@ -1,99 +1,103 @@ -# Rabbitmq Destination +# Microsoft Teams Source -This is the repository for the Rabbitmq destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq). +This is the repository for the Microsoft Teams configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/microsoft-teams). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.7.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_rabbitmq/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +### Create credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/microsoft-teams) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_microsoft_teams/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination rabbitmq test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-microsoft-teams spec +poetry run source-microsoft-teams check --config secrets/config.json +poetry run source-microsoft-teams discover --config secrets/config.json +poetry run source-microsoft-teams read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests +To run tests locally, from the connector directory run: -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name destination-rabbitmq build +``` +poetry run pytest tests ``` -An image will be built with the tag `airbyte/destination-rabbitmq:dev`. +### Building the docker image -**Via `docker build`:** +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash -docker build -t airbyte/destination-rabbitmq:dev . +airbyte-ci connectors --name=source-microsoft-teams build ``` -#### Run +An image will be available on your host with the tag `airbyte/source-microsoft-teams:dev`. + + +### Running as a docker container + Then run any of the connector commands as follows: ``` -docker run --rm airbyte/destination-rabbitmq:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rabbitmq:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-rabbitmq:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +docker run --rm airbyte/source-microsoft-teams:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-teams:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-teams:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-teams:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite + You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-microsoft-teams test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-teams test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-teams.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-teams.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/__init__.py b/airbyte-integrations/connectors/source-microsoft-teams/__init__.py similarity index 100% rename from airbyte-integrations/connector-templates/source-singer/integration_tests/__init__.py rename to airbyte-integrations/connectors/source-microsoft-teams/__init__.py diff --git a/airbyte-integrations/connectors/source-microsoft-teams/acceptance-test-config.yml b/airbyte-integrations/connectors/source-microsoft-teams/acceptance-test-config.yml index b87584405b266..123609ca78c36 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-microsoft-teams/acceptance-test-config.yml @@ -1,22 +1,35 @@ # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-microsoft-teams:dev -tests: +acceptance_tests: spec: - - spec_path: "source_microsoft_teams/spec.json" + tests: + - spec_path: "source_microsoft_teams/spec.yaml" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "secrets/old_config.json" - status: "succeed" - - config_path: "secrets/config_oauth.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" basic_read: - - config_path: "secrets/config.json" - empty_streams: [] + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: team_device_usage_report + bypass_reason: "Could not seed this stream with sandbox credentails" + incremental: + bypass_reason: "This connector does not implement incremental sync" + # TODO uncomment this block this block if your connector implements incremental sync: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state: + # future_state_path: "integration_tests/abnormal_state.json" full_refresh: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/__init__.py b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/__init__.py index 46b7376756ec6..c941b30457953 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/__init__.py +++ b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/__init__.py @@ -1,3 +1,3 @@ # -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. # diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/abnormal_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/integration_tests/abnormal_state.json rename to airbyte-integrations/connectors/source-microsoft-teams/integration_tests/abnormal_state.json diff --git a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/acceptance.py index 82823254d2666..9e6409236281f 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/acceptance.py @@ -11,4 +11,6 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-microsoft-teams/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/configured_catalog.json similarity index 100% rename from airbyte-integrations/connectors/source-microsoft-teams/sample_files/configured_catalog.json rename to airbyte-integrations/connectors/source-microsoft-teams/integration_tests/configured_catalog.json diff --git a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/invalid_config.json index 3eaef7a0ea205..f4538c693ee40 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/invalid_config.json @@ -1,6 +1,9 @@ { - "client_id": "wrong_client_id", - "tenant_id": "wrong_tenant_id", - "client_secret": "wrong_client_secret", + "credentials": { + "auth_type": "Token", + "client_id": "wrong_client_id", + "tenant_id": "wrong_tenant_id", + "client_secret": "wrong_client_secret" + }, "period": "D180" } diff --git a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_config.json index d187c3d46affb..7e2bed5642b29 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_config.json @@ -1,6 +1,9 @@ { - "tenant_id": "", - "client_id": "", - "client_secret": "", - "period": "D7" + "credentials": { + "auth_type": "Token", + "client_id": "client_id", + "tenant_id": "tenant_id", + "client_secret": "client_secret" + }, + "period": "D180" } diff --git a/airbyte-integrations/connector-templates/source-python-http-api/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/integration_tests/sample_state.json rename to airbyte-integrations/connectors/source-microsoft-teams/integration_tests/sample_state.json diff --git a/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml index ce68d172788c2..ae6127b7a62ae 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml @@ -1,22 +1,32 @@ data: + allowedHosts: + hosts: + - graph.microsoft.com + registries: + oss: + enabled: true + cloud: + enabled: true + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-microsoft-teams + connectorBuildOptions: + # Please update to the latest version of the connector base image. + # https://hub.docker.com/r/airbyte/python-connector-base + # Please use the full address with sha256 hash to guarantee build reproducibility. + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: eaf50f04-21dd-4620-913b-2a83f5635227 - dockerImageTag: 1.0.0 + dockerImageTag: 1.1.0 dockerRepository: airbyte/source-microsoft-teams githubIssueLabel: source-microsoft-teams icon: microsoft-teams.svg license: MIT - name: Microsoft teams - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-microsoft-teams - registries: - cloud: - enabled: true - oss: - enabled: true + name: Microsoft Teams + releaseDate: 2020-12-21 + releaseStage: alpha releases: breakingChanges: 1.0.0: @@ -25,13 +35,9 @@ data: A full schema refresh is required to upgrade to this version. For more details, see our migration guide. upgradeDeadline: "2024-01-24" - releaseStage: alpha + supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-teams tags: - language:python - - cdk:python - ab_internal: - sl: 100 - ql: 100 - supportLevel: community + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-microsoft-teams/poetry.lock b/airbyte-integrations/connectors/source-microsoft-teams/poetry.lock new file mode 100644 index 0000000000000..b2ceb43cf9540 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-teams/poetry.lock @@ -0,0 +1,1008 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.81.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.81.0-py3-none-any.whl", hash = "sha256:d2b1998776a299271b5e055065f14f199bb157b25b333cc7f1504574edbd7012"}, + {file = "airbyte_cdk-0.81.0.tar.gz", hash = "sha256:031f9b5485f4b61b42e325b26a30c12ef990c18135e2beb581e98969d715ab82"}, +] + +[package.dependencies] +airbyte-protocol-models = "*" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.9.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "acd5908c82765b55ec5859799db1bcbb616d044db689a3ba94346d8b1d2f9b5c" diff --git a/airbyte-integrations/connectors/source-microsoft-teams/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-teams/pyproject.toml new file mode 100644 index 0000000000000..11649fe3788e3 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-teams/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.1.0" +name = "source-microsoft-teams" +description = "Source implementation for microsoft-teams." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/microsoft-teams" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_microsoft_teams" }, {include = "main.py" } ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-microsoft-teams = "source_microsoft_teams.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "*" +pytest-mock = "*" +pytest = "*" diff --git a/airbyte-integrations/connectors/source-microsoft-teams/requirements.txt b/airbyte-integrations/connectors/source-microsoft-teams/requirements.txt deleted file mode 100644 index 7b9114ed5867e..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connectors/source-microsoft-teams/sample_files/catalog.json b/airbyte-integrations/connectors/source-microsoft-teams/sample_files/catalog.json deleted file mode 100644 index b0b9df76c74d1..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/sample_files/catalog.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "group_owners", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"] - }, - "group_id": { - "type": ["null", "string"] - }, - "business_phones": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "null" - } - ] - }, - "display_name": { - "type": ["null", "string"] - }, - "given_name": { - "type": ["null", "string"] - }, - "job_title": { - "type": ["null", "string"] - }, - "mail": { - "type": ["null", "string"] - }, - "mobile_phone": { - "type": ["null", "string"] - }, - "office_location": { - "type": ["null", "string"] - }, - "preferred_language": { - "type": ["null", "string"] - }, - "surname": { - "type": ["null", "string"] - }, - "user_principal_name": { - "type": ["null", "string"] - } - } - } - }, - "sync_mode": "full_refresh" - } - ] -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/sample_files/sample_config.json b/airbyte-integrations/connectors/source-microsoft-teams/sample_files/sample_config.json deleted file mode 100644 index d187c3d46affb..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/sample_files/sample_config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "tenant_id": "", - "client_id": "", - "client_secret": "", - "period": "D7" -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/setup.py b/airbyte-integrations/connectors/source-microsoft-teams/setup.py deleted file mode 100644 index 07b984cad43d8..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", - "requests", - "msal==1.7.0", - "backoff", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-microsoft-teams=source_microsoft_teams.run:run", - ], - }, - name="source_microsoft_teams", - description="Source implementation for Microsoft Teams.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/__init__.py b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/__init__.py index 4f8ee32f96042..e45356f1e410c 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/__init__.py +++ b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/__init__.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + from .source import SourceMicrosoftTeams __all__ = ["SourceMicrosoftTeams"] diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/client.py b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/client.py deleted file mode 100644 index c16459025f986..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/client.py +++ /dev/null @@ -1,268 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import csv -import io -import json -import pkgutil -import sys -import time -from functools import lru_cache -from typing import Dict, List, Optional, Tuple, Union - -import backoff -import msal -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models.airbyte_protocol import AirbyteStream -from msal.exceptions import MsalServiceError - -LOGGER = AirbyteLogger() - - -def log_backoff_attempt(details): - LOGGER.info(f"Encountered exception when querying the Microsoft API: {str(sys.exc_info()[1])}. Backing off: {details.get('tries')} try") - - -class Client: - """ - Microsoft Teams API Reference: https://docs.microsoft.com/en-us/graph/api/resources/teams-api-overview?view=graph-rest-1.0 - """ - - MICROSOFT_GRAPH_BASE_API_URL: str = "https://graph.microsoft.com/" - MICROSOFT_GRAPH_API_VERSION: str = "v1.0" - PAGINATION_COUNT: Optional[int] = 20 - - def __init__(self, config: json): - self.ENTITY_MAP = { - "users": self.get_users, - "groups": self.get_groups, - "group_members": self.get_group_members, - "group_owners": self.get_group_owners, - "channels": self.get_channels, - "channel_members": self.get_channel_members, - "channel_tabs": self.get_channel_tabs, - "conversations": self.get_conversations, - "conversation_threads": self.get_conversation_threads, - "conversation_posts": self.get_conversation_posts, - "team_drives": self.get_team_drives, - "team_device_usage_report": self.get_team_device_usage_report, - } - self.configs = config - self.credentials = config.get("credentials") - if not self.credentials: - self.credentials = { - "tenant_id": config["tenant_id"], - "client_id": config["client_id"], - "client_secret": config["client_secret"], - } - self._group_ids = None - - @property - @lru_cache(maxsize=None) - def msal_app(self): - return msal.ConfidentialClientApplication( - self.credentials["client_id"], - authority=f"https://login.microsoftonline.com/" f"{self.credentials['tenant_id']}", - client_credential=self.credentials["client_secret"], - ) - - def _get_api_url(self, endpoint: str) -> str: - api_url = f"{self.MICROSOFT_GRAPH_BASE_API_URL}{self.MICROSOFT_GRAPH_API_VERSION}/{endpoint}/" - return api_url - - def _get_access_token(self) -> str: - scope = ["https://graph.microsoft.com/.default"] - refresh_token = self.credentials.get("refresh_token") - if refresh_token: - result = self.msal_app.acquire_token_by_refresh_token(refresh_token, scopes=scope) - else: - result = self.msal_app.acquire_token_for_client(scopes=scope) - if "access_token" in result: - return result["access_token"] - else: - raise MsalServiceError(error=result.get("error"), error_description=result.get("error_description")) - - @backoff.on_exception( - backoff.expo, - (requests.exceptions.ConnectionError, MsalServiceError, requests.exceptions.RequestException), - max_tries=7, - on_backoff=log_backoff_attempt, - ) - def _make_request(self, api_url: str, params: Optional[Dict] = None) -> Union[Dict, object]: - access_token = self._get_access_token() - headers = {"Authorization": f"Bearer {access_token}"} - response = requests.get(api_url, headers=headers, params=params) - if response.status_code == 429: - if "Retry-After" in response.headers: - pause_time = float(response.headers["Retry-After"]) - time.sleep(pause_time) - response = requests.get(api_url, headers=headers, params=params) - if response.status_code != 200: - raise requests.exceptions.RequestException(response.text) - if response.headers["Content-Type"] == "application/octet-stream": - raw_response = response.content - else: - raw_response = response.json() - return raw_response - - @staticmethod - def _get_response_value_unsafe(raw_response: Dict) -> List: - value = raw_response["value"] - return value - - def _get_request_params(self, params: Optional[Dict] = None, pagination: bool = True) -> Dict: - if self.PAGINATION_COUNT and pagination: - params = params if params else {} - if "$top" not in params: - params["$top"] = self.PAGINATION_COUNT - return params - - def _fetch_data(self, endpoint: str, params: Optional[Dict] = None, pagination: bool = True): - api_url = self._get_api_url(endpoint) - params = self._get_request_params(params, pagination) - while api_url: - raw_response = self._make_request(api_url, params) - value = self._get_response_value_unsafe(raw_response) - params = None - api_url = raw_response.get("@odata.nextLink", "") - yield value - - def health_check(self) -> Tuple[bool, object]: - try: - self._get_access_token() - return True, None - except MsalServiceError as err: - return False, err.args[0] - except Exception as e: - return False, str(e) - - def get_streams(self): - streams = [] - for schema, method in self.ENTITY_MAP.items(): - raw_schema = json.loads(pkgutil.get_data(self.__class__.__module__.split(".")[0], f"schemas/{schema}.json")) - streams.append(AirbyteStream(name=schema, json_schema=raw_schema, supported_sync_modes=["full_refresh"])) - return streams - - def get_users(self): - for users in self._fetch_data("users"): - yield users - - def get_groups(self): - for groups in self._fetch_data("groups"): - yield filter(lambda item: "Team" in item["resourceProvisioningOptions"], groups) - - def _get_group_ids(self): - if not self._group_ids: - api_url = self._get_api_url("groups") - params = {"$select": "id,resourceProvisioningOptions"} - groups = self._get_response_value_unsafe(self._make_request(api_url, params=params)) - self._group_ids = [item["id"] for item in groups if "Team" in item["resourceProvisioningOptions"]] - return self._group_ids - - def get_group_members(self): - for group_id in self._get_group_ids(): - for members in self._fetch_data(f"groups/{group_id}/members"): - yield members - - def get_group_owners(self): - for group_id in self._get_group_ids(): - for owners in self._fetch_data(f"groups/{group_id}/owners"): - yield owners - - def get_channels(self): - for group_id in self._get_group_ids(): - for channels in self._fetch_data(f"teams/{group_id}/channels", pagination=False): - yield channels - - def _get_channel_ids(self, group_id: str): - api_url = self._get_api_url(f"teams/{group_id}/channels") - # TODO: pass params={"$select": "id"} to make_request once the related bug in the MSFT API - # is fixed: microsoftgraph/microsoft-graph-docs#11494 - channels_ids = self._get_response_value_unsafe(self._make_request(api_url)) - return channels_ids - - def get_channel_members(self): - for group_id in self._get_group_ids(): - channels = self._get_channel_ids(group_id=group_id) - for channel in channels: - for members in self._fetch_data(f'teams/{group_id}/channels/{channel["id"]}/members'): - yield members - - def get_channel_tabs(self): - for group_id in self._get_group_ids(): - channels = self._get_channel_ids(group_id=group_id) - for channel in channels: - for tabs in self._fetch_data(f'teams/{group_id}/channels/{channel["id"]}/tabs', pagination=False): - yield tabs - - def get_conversations(self): - for group_id in self._get_group_ids(): - for conversations in self._fetch_data(f"groups/{group_id}/conversations"): - yield conversations - - def _get_conversation_ids(self, group_id: str): - api_url = self._get_api_url(f"groups/{group_id}/conversations") - params = {"$select": "id"} - conversation_ids = self._get_response_value_unsafe(self._make_request(api_url, params=params)) - return conversation_ids - - def get_conversation_threads(self): - for group_id in self._get_group_ids(): - conversations = self._get_conversation_ids(group_id=group_id) - for conversation in conversations: - for threads in self._fetch_data(f'groups/{group_id}/conversations/{conversation["id"]}/threads'): - yield threads - - def _get_thread_ids(self, group_id: str, conversation_id: str): - api_url = self._get_api_url(f"groups/{group_id}/conversations/{conversation_id}/threads") - params = {"$select": "id"} - thread_ids = self._get_response_value_unsafe(self._make_request(api_url, params=params)) - return thread_ids - - def get_conversation_posts(self): - for group_id in self._get_group_ids(): - conversations = self._get_conversation_ids(group_id=group_id) - for conversation in conversations: - threads = self._get_thread_ids(group_id, conversation["id"]) - for thread in threads: - for posts in self._fetch_data(f'groups/{group_id}/conversations/{conversation["id"]}/threads/{thread["id"]}/posts'): - yield posts - - def get_team_drives(self): - for group_id in self._get_group_ids(): - for drives in self._fetch_data(f"groups/{group_id}/drives"): - yield drives - - def get_team_device_usage_report(self): - period = self.configs["period"] - api_url = self._get_api_url(f"reports/getTeamsDeviceUsageUserDetail(period='{period}')") - csv_response = io.BytesIO(self._make_request(api_url)) - csv_response.readline() - with io.TextIOWrapper(csv_response, encoding="utf-8-sig") as text_file: - field_names = [ - "reportRefreshDate", - "userId", - "userPrincipalName", - "lastActivityDate", - "isDeleted", - "deletedDate", - "usedWeb", - "usedWindowsPhone", - "usedIOs", - "usedMac", - "usedAndroidPhone", - "usedWindows", - "usedChromeOS", - "usedLinux", - "isLisenced", - "reportPeriod", - ] - - reader = csv.DictReader(text_file, fieldnames=field_names) - for row in reader: - yield [ - row, - ] diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/manifest.yaml b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/manifest.yaml new file mode 100644 index 0000000000000..7db956b0e32be --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/manifest.yaml @@ -0,0 +1,5887 @@ +version: 0.44.0 +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - users +streams: + - type: DeclarativeStream + name: users + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + businessPhones: + items: + type: + - "null" + - string + type: + - "null" + - array + displayName: + type: + - "null" + - string + givenName: + type: + - "null" + - string + id: + type: + - "null" + - string + jobTitle: + type: + - "null" + - string + mail: + type: + - "null" + - string + mobilePhone: + type: + - "null" + - string + officeLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + surname: + type: + - "null" + - string + userPrincipalName: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: users?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: group_members + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + "@odata.type": + type: + - "null" + - string + businessPhones: + items: + type: + - "null" + - string + type: + - "null" + - array + displayName: + type: + - "null" + - string + givenName: + type: + - "null" + - string + id: + type: + - "null" + - string + jobTitle: + type: + - "null" + - string + mail: + type: + - "null" + - string + mobilePhone: + type: + - "null" + - string + officeLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + surname: + type: + - "null" + - string + userPrincipalName: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/members?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: group_owners + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + "@odata.type": + type: + - "null" + - string + businessPhones: + items: + type: + - "null" + - string + type: + - "null" + - array + displayName: + type: + - "null" + - string + givenName: + type: + - "null" + - string + groupId: + type: + - "null" + - string + id: + type: + - "null" + - string + jobTitle: + type: + - "null" + - string + mail: + type: + - "null" + - string + mobilePhone: + type: + - "null" + - string + officeLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + surname: + type: + - "null" + - string + userPrincipalName: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/owners?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: conversations + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/conversations?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 403 + error_message: App only access is restricting some resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: channels + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + email: + type: + - "null" + - string + id: + type: + - "null" + - string + isFavoriteByDefault: + type: + - "null" + - boolean + membershipType: + type: + - "null" + - string + tenantId: + type: + - "null" + - string + webUrl: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: teams/{{ stream_partition.group_id }}/channels + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'No threadId found for TeamId' in response }}" + http_codes: + - 404 + error_message: Resource not found for team Ids + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: NoPagination + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: channel_members + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + "@odata.type": + type: + - "null" + - string + channelId: + type: + - "null" + - string + displayName: + type: + - "null" + - string + email: + type: + - "null" + - string + id: + type: + - "null" + - string + roles: + items: + type: + - "null" + - string + type: + - "null" + - array + tenantId: + type: + - "null" + - string + userId: + type: + - "null" + - string + visibleHistoryStartDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + teams/{{ stream_partition.group_id }}/channels/{{ + stream_partition.channel_id }}/members?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'No Team found with Group id' in response }}" + http_codes: + - 404 + error_message: Resource not found for group Ids + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: channel_id + stream: + type: DeclarativeStream + name: channels + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + email: + type: + - "null" + - string + id: + type: + - "null" + - string + isFavoriteByDefault: + type: + - "null" + - boolean + membershipType: + type: + - "null" + - string + tenantId: + type: + - "null" + - string + webUrl: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: teams/{{ stream_partition.group_id }}/channels + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'No threadId found for TeamId' in response }}" + http_codes: + - 404 + error_message: Resource not found for team Ids + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: NoPagination + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + - type: DeclarativeStream + name: channel_tabs + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + channelId: + type: + - "null" + - string + configuration: + additionalProperties: true + properties: + contentUrl: + type: + - "null" + - string + entityId: + type: + - "null" + - string + hasContent: + type: + - "null" + - boolean + removeUrl: + type: + - "null" + - string + websiteUrl: + type: + - "null" + - string + wikiDefaultTab: + type: + - "null" + - boolean + wikiTabId: + type: + - "null" + - integer + type: + - "null" + - object + displayName: + type: + - "null" + - string + groupId: + type: + - "null" + - string + id: + type: + - "null" + - string + sortOrderIndex: + type: + - "null" + - string + teamsApp: + additionalProperties: true + properties: + displayName: + type: + - "null" + - string + distributionMethod: + type: + - "null" + - string + id: + type: + - "null" + - string + type: + - "null" + - object + webUrl: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + teams/{{ stream_partition.group_id }}/channels/{{ + stream_partition.channel_id }}/tabs + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'No Team found with Group id' in response }}" + http_codes: + - 404 + error_message: Resource not found for group Ids + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: NoPagination + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: channel_id + stream: + type: DeclarativeStream + name: channels + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + email: + type: + - "null" + - string + id: + type: + - "null" + - string + isFavoriteByDefault: + type: + - "null" + - boolean + membershipType: + type: + - "null" + - string + tenantId: + type: + - "null" + - string + webUrl: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: teams/{{ stream_partition.group_id }}/channels + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'No threadId found for TeamId' in response }}" + http_codes: + - 404 + error_message: Resource not found for team Ids + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: NoPagination + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + - type: DeclarativeStream + name: conversation_threads + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + conversationId: + type: + - "null" + - string + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + isLocked: + type: + - "null" + - boolean + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/conversations/{{ + stream_partition.conversation_id }}/threads?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'App Only access is not allowed' in response }}" + http_codes: + - 404 + - 403 + error_message: Some resources not found for parent resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: conversation_id + stream: + type: DeclarativeStream + name: conversations + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id + }}/conversations?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 403 + error_message: App only access is restricting some resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + - type: DeclarativeStream + name: conversation_posts + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + "@odata.etag": + type: + - "null" + - string + body: + additionalProperties: true + properties: + content: + type: + - "null" + - string + contentType: + type: + - "null" + - string + type: + - "null" + - object + categories: + items: + type: + - "null" + - string + type: + - "null" + - array + changeKey: + type: + - "null" + - string + conversationId: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + from: + additionalProperties: true + properties: + emailAddress: + properties: + address: + type: + - "null" + - string + name: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - object + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + lastModifiedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + receivedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + sender: + additionalProperties: true + properties: + emailAddress: + additionalProperties: true + properties: + address: + type: + - "null" + - string + name: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - object + threadId: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/conversations/{{ + stream_partition.conversation_id }}/threads/{{ + stream_partition.thread_id }}/posts?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: "{{ 'App Only access is not allowed' in response }}" + http_codes: + - 404 + - 403 + - 503 + error_message: Some resources are not allowed to extract + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: conversation_id + stream: + type: DeclarativeStream + name: conversations + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id + }}/conversations?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 403 + error_message: App only access is restricting some resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: thread_id + stream: + type: DeclarativeStream + name: conversation_threads + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + conversationId: + type: + - "null" + - string + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + isLocked: + type: + - "null" + - boolean + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/conversations/{{ + stream_partition.conversation_id }}/threads?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + predicate: >- + {{ 'App Only access is not allowed' in response + }} + http_codes: + - 404 + - 403 + error_message: Some resources not found for parent resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: conversation_id + stream: + type: DeclarativeStream + name: conversations + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + groupId: + type: + - "null" + - string + hasAttachments: + type: + - "null" + - boolean + id: + type: + - "null" + - string + lastDeliveredDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + preview: + type: + - "null" + - string + topic: + type: + - "null" + - string + uniqueSenders: + items: + type: + - "null" + - string + type: + - "null" + - array + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id + }}/conversations?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 403 + error_message: >- + App only access is restricting some + resources + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: >- + {{ not response.get("@odata.nextLink", {}) + }} + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ + config['credentials']['tenant_id'] }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: >- + {{ response.get("@odata.nextLink", {}) + }} + stop_condition: >- + {{ not response.get("@odata.nextLink", + {}) }} + - type: DeclarativeStream + name: team_drives + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + createdBy: + additionalProperties: true + properties: + user: + additionalProperties: true + properties: + displayName: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - object + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + driveType: + type: + - "null" + - string + id: + type: + - "null" + - string + lastModifiedBy: + additionalProperties: true + properties: + displayName: + type: + - "null" + - string + id: + type: + - "null" + - string + type: + - "null" + - object + lastModifiedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + name: + type: + - "null" + - string + owner: + additionalProperties: true + properties: + group: + additionalProperties: true + properties: + displayName: + type: + - "null" + - string + email: + type: + - "null" + - string + id: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - object + quota: + additionalProperties: true + properties: + deleted: + type: + - "null" + - integer + remaining: + type: + - "null" + - number + state: + type: + - "null" + - string + total: + type: + - "null" + - number + used: + type: + - "null" + - integer + type: + - "null" + - object + webUrl: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + groups/{{ stream_partition.group_id }}/drives?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 404 + - 403 + error_message: Some resources are not found or not allowed to access + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: group_id + stream: + type: DeclarativeStream + name: groups + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + classification: + type: + - "null" + - string + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + creationOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + deletedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + description: + type: + - "null" + - string + displayName: + type: + - "null" + - string + expirationDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + groupTypes: + items: + type: + - "null" + - string + type: + - "null" + - array + id: + type: + - "null" + - string + isAssignableToRole: + type: + - "null" + - boolean + mail: + type: + - "null" + - string + mailEnabled: + type: + - "null" + - boolean + mailNickname: + type: + - "null" + - string + membershipRule: + type: + - "null" + - string + membershipRuleProcessingState: + type: + - "null" + - string + onPremisesDomainName: + type: + - "null" + - string + onPremisesLastSyncDateTime: + format: date-time + type: + - "null" + - string + onPremisesNetBiosName: + type: + - "null" + - string + onPremisesProvisioningErrors: + items: + type: + - "null" + - string + type: + - "null" + - array + onPremisesSamAccountName: + type: + - "null" + - string + onPremisesSecurityIdentifier: + type: + - "null" + - string + onPremisesSyncEnabled: + type: + - "null" + - boolean + preferredDataLocation: + type: + - "null" + - string + preferredLanguage: + type: + - "null" + - string + proxyAddresses: + items: + type: + - "null" + - string + type: + - "null" + - array + renewedDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + resourceBehaviorOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + resourceProvisioningOptions: + items: + type: + - "null" + - string + type: + - "null" + - array + securityEnabled: + type: + - "null" + - boolean + securityIdentifier: + type: + - "null" + - string + serviceProvisioningErrors: + items: + properties: + createdDateTime: + airbyte_type: timestamp_without_timezone + format: date-time + type: + - "null" + - string + isResolved: + type: + - "null" + - boolean + serviceInstance: + type: + - "null" + - string + type: + - "null" + - object + type: + - "null" + - array + theme: + type: + - "null" + - string + visibility: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: groups?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' + - type: DeclarativeStream + name: team_device_usage_report + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + deletedDate: + type: + - "null" + - string + isDeleted: + type: + - "null" + - string + isLisenced: + type: + - "null" + - string + lastActivityDate: + type: + - "null" + - string + reportPeriod: + type: + - "null" + - string + reportRefreshDate: + format: date + type: + - "null" + - string + usedAndroidPhone: + type: + - "null" + - string + usedChromeOS: + type: + - "null" + - string + usedIOs: + type: + - "null" + - string + usedLinux: + type: + - "null" + - string + usedMac: + type: + - "null" + - string + usedWeb: + type: + - "null" + - string + usedWindows: + type: + - "null" + - string + usedWindowsPhone: + type: + - "null" + - string + userId: + type: + - "null" + - string + userPrincipalName: + type: + - "null" + - string + type: + - "null" + - object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://graph.microsoft.com/v1.0/ + path: >- + reports/getTeamsDeviceUsageUserDetail(period='{{ config['period'] + }}')?$top={{ config.get('page_size', 20) }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: OAuthAuthenticator + client_id: "{{ config['credentials']['client_id'] }}" + client_secret: "{{ config['credentials']['client_secret'] }}" + refresh_request_body: + scope: https://graph.microsoft.com/.default + token_refresh_endpoint: >- + https://login.microsoftonline.com/{{ config['credentials']['tenant_id'] + }}/oauth2/v2.0/token + grant_type: client_credentials + scopes: [] + access_token_name: access_token + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message: Personal account not supported + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - value + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("@odata.nextLink", {}) }}' + stop_condition: '{{ not response.get("@odata.nextLink", {}) }}' +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/microsoft-teams + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: Microsoft Teams Spec + type: object + required: + - period + additionalProperties: true + properties: + period: + type: string + title: Period + description: + "Specifies the length of time over which the Team Device Report + stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - D7 + credentials: + title: Authentication mechanism + description: Choose how to authenticate to Microsoft + type: object + oneOf: + - type: object + title: Authenticate via Microsoft (OAuth 2.0) + required: + - tenant_id + - client_id + - client_secret + - refresh_token + properties: + auth_type: + type: string + const: Client + enum: + - Client + default: Client + order: 0 + tenant_id: + title: Directory (tenant) ID + type: string + description: + "A globally unique identifier (GUID) that is different than + your organization name or domain. Follow these steps to obtain: open + one of the Teams where you belong inside the Teams Application -> Click + on the … next to the Team title -> Click on Get link to team -> Copy + the link to the team and grab the tenant ID form the URL" + airbyte_secret: true + client_id: + title: Client ID + type: string + description: The Client ID of your Microsoft Teams developer application. + client_secret: + title: Client Secret + type: string + description: The Client Secret of your Microsoft Teams developer application. + airbyte_secret: true + refresh_token: + title: Refresh Token + type: string + description: A Refresh Token to renew the expired Access Token. + airbyte_secret: true + - type: object + title: Authenticate via Microsoft + required: + - tenant_id + - client_id + - client_secret + properties: + auth_type: + type: string + const: Token + enum: + - Token + default: Token + order: 0 + tenant_id: + title: Directory (tenant) ID + type: string + description: + "A globally unique identifier (GUID) that is different than + your organization name or domain. Follow these steps to obtain: open + one of the Teams where you belong inside the Teams Application -> Click + on the … next to the Team title -> Click on Get link to team -> Copy + the link to the team and grab the tenant ID form the URL" + airbyte_secret: true + client_id: + title: Client ID + type: string + description: The Client ID of your Microsoft Teams developer application. + client_secret: + title: Client Secret + type: string + description: The Client Secret of your Microsoft Teams developer application. + airbyte_secret: true + advanced_auth: + auth_flow_type: oauth2.0 + predicate_key: + - credentials + - auth_type + predicate_value: Client + oauth_config_specification: + complete_oauth_output_specification: + type: object + additionalProperties: false + properties: + refresh_token: + type: string + path_in_connector_config: + - credentials + - refresh_token + complete_oauth_server_input_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + client_secret: + type: string + complete_oauth_server_output_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + path_in_connector_config: + - credentials + - client_id + client_secret: + type: string + path_in_connector_config: + - credentials + - client_secret + oauth_user_input_from_connector_config_specification: + type: object + additionalProperties: false + properties: + tenant_id: + type: string + path_in_connector_config: + - credentials + - tenant_id +metadata: + autoImportSchema: + users: false + groups: false + group_members: false + group_owners: false + conversations: false + channels: false + channel_members: false + channel_tabs: false + conversation_threads: false + conversation_posts: false + team_drives: false + team_device_usage_report: false diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/run.py b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/run.py index f22e12386ac78..4cb08cf9e11c3 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/run.py +++ b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/run.py @@ -6,7 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_microsoft_teams import SourceMicrosoftTeams + +from .source import SourceMicrosoftTeams def run(): diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel.json deleted file mode 100644 index b99d57b039407..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "roles": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "description": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_members.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_members.json deleted file mode 100644 index 3c236063c1c61..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_members.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "@odata.type": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "roles": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "userId": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "channelId": { - "type": ["null", "string"] - }, - "tenantId": { - "type": ["null", "string"] - }, - "visibleHistoryStartDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_message_replies.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_message_replies.json deleted file mode 100644 index ac7cbb06d0023..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_message_replies.json +++ /dev/null @@ -1,236 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "replyToId": { - "type": ["null", "string"] - }, - "etag": { - "type": ["null", "string"] - }, - "messageType": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "lastModifiedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "lastEditedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "deletedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "subject": { - "type": ["null", "string"] - }, - "summary": { - "type": ["null", "string"] - }, - "chatId": { - "type": ["null", "string"] - }, - "importance": { - "type": ["null", "string"] - }, - "locale": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - }, - "policyViolation": { - "type": ["null", "string"] - }, - "from": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - }, - "tenantId": { - "type": ["null", "string"] - } - } - } - } - }, - "body": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "contentType": { - "type": ["null", "string"] - }, - "content": { - "type": ["null", "string"] - } - } - }, - "channelIdentity": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "teamId": { - "type": ["null", "string"] - }, - "channelId": { - "type": ["null", "string"] - } - } - }, - "attachments": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "contentType": { - "type": ["null", "string"] - }, - "contentUrl": { - "type": ["null", "string"] - }, - "content": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "thumbnailUrl": { - "type": ["null", "string"] - } - } - } - }, - "mentions": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "integer"] - }, - "mentionText": { - "type": ["null", "string"] - }, - "mentioned": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - } - } - } - } - } - } - } - }, - "reactions": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "reactionType": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - } - } - } - } - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_messages.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_messages.json deleted file mode 100644 index ced93bf8121bb..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_messages.json +++ /dev/null @@ -1,248 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "replyToId": { - "type": ["null", "string"] - }, - "etag": { - "type": ["null", "string"] - }, - "messageType": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "lastModifiedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "deletedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "subject": { - "type": ["null", "string"] - }, - "summary": { - "type": ["null", "string"] - }, - "chatId": { - "type": ["null", "string"] - }, - "importance": { - "type": ["null", "string"] - }, - "locale": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - }, - "policyViolation": { - "type": ["null", "string"] - }, - "from": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - } - } - } - } - }, - "body": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "contentType": { - "type": ["null", "string"] - }, - "content": { - "type": ["null", "string"] - } - } - }, - "channelIdentity": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "teamId": { - "type": ["null", "string"] - }, - "channelId": { - "type": ["null", "string"] - } - } - }, - "attachments": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "contentType": { - "type": ["null", "string"] - }, - "contentUrl": { - "type": ["null", "string"] - }, - "content": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "thumbnailUrl": { - "type": ["null", "string"] - } - } - } - }, - { - "type": "null" - } - ] - }, - "mentions": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "integer"] - }, - "mentionText": { - "type": ["null", "string"] - }, - "mentioned": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - } - } - } - } - } - } - } - }, - { - "type": "null" - } - ] - }, - "reactions": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "reactionType": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time" - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "application": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "conversation": { - "type": ["null", "string"] - }, - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "userIdentityType": { - "type": ["null", "string"] - } - } - } - } - } - } - } - }, - { - "type": "null" - } - ] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_tabs.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_tabs.json deleted file mode 100644 index ed0867b1e4c77..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channel_tabs.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "groupId": { - "type": ["null", "string"] - }, - "channelId": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - }, - "sortOrderIndex": { - "type": ["null", "string"] - }, - "teamsApp": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "distributionMethod": { - "type": ["null", "string"] - } - } - }, - "configuration": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "entityId": { - "type": ["null", "string"] - }, - "contentUrl": { - "type": ["null", "string"] - }, - "removeUrl": { - "type": ["null", "string"] - }, - "websiteUrl": { - "type": ["null", "string"] - }, - "wikiTabId": { - "type": ["null", "integer"] - }, - "wikiDefaultTab": { - "type": ["null", "boolean"] - }, - "hasContent": { - "type": ["null", "boolean"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channels.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channels.json deleted file mode 100644 index 999eae607c23a..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/channels.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "displayName": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "isFavoriteByDefault": { - "type": ["null", "boolean"] - }, - "membershipType": { - "type": ["null", "string"] - }, - "tenantId": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_posts.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_posts.json deleted file mode 100644 index 4389b581962aa..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_posts.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "threadId": { - "type": ["null", "string"] - }, - "conversationId": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "lastModifiedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "@odata.etag": { - "type": ["null", "string"] - }, - "changeKey": { - "type": ["null", "string"] - }, - "categories": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "receivedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "hasAttachments": { - "type": ["null", "boolean"] - }, - "body": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "contentType": { - "type": ["null", "string"] - }, - "content": { - "type": ["null", "string"] - } - } - }, - "from": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "emailAddress": { - "type": ["null", "object"], - "properties": { - "name": { - "type": ["null", "string"] - }, - "address": { - "type": ["null", "string"] - } - } - } - } - }, - "sender": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "emailAddress": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "name": { - "type": ["null", "string"] - }, - "address": { - "type": ["null", "string"] - } - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_threads.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_threads.json deleted file mode 100644 index 54c27157062e4..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversation_threads.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "groupId": { - "type": ["null", "string"] - }, - "conversationId": { - "type": ["null", "string"] - }, - "topic": { - "type": ["null", "string"] - }, - "hasAttachments": { - "type": ["null", "boolean"] - }, - "lastDeliveredDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "uniqueSenders": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "preview": { - "type": ["null", "string"] - }, - "isLocked": { - "type": ["null", "boolean"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversations.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversations.json deleted file mode 100644 index e9045284dd5cd..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/conversations.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "groupId": { - "type": ["null", "string"] - }, - "topic": { - "type": ["null", "string"] - }, - "hasAttachments": { - "type": ["null", "boolean"] - }, - "lastDeliveredDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "uniqueSenders": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "preview": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_members.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_members.json deleted file mode 100644 index 2bf02fd72977b..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_members.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "@odata.type": { - "type": ["null", "string"] - }, - "businessPhones": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "displayName": { - "type": ["null", "string"] - }, - "givenName": { - "type": ["null", "string"] - }, - "jobTitle": { - "type": ["null", "string"] - }, - "mail": { - "type": ["null", "string"] - }, - "mobilePhone": { - "type": ["null", "string"] - }, - "officeLocation": { - "type": ["null", "string"] - }, - "preferredLanguage": { - "type": ["null", "string"] - }, - "surname": { - "type": ["null", "string"] - }, - "userPrincipalName": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_owners.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_owners.json deleted file mode 100644 index aa1b8915682d9..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/group_owners.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "groupId": { - "type": ["null", "string"] - }, - "@odata.type": { - "type": ["null", "string"] - }, - "businessPhones": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "displayName": { - "type": ["null", "string"] - }, - "givenName": { - "type": ["null", "string"] - }, - "jobTitle": { - "type": ["null", "string"] - }, - "mail": { - "type": ["null", "string"] - }, - "mobilePhone": { - "type": ["null", "string"] - }, - "officeLocation": { - "type": ["null", "string"] - }, - "preferredLanguage": { - "type": ["null", "string"] - }, - "surname": { - "type": ["null", "string"] - }, - "userPrincipalName": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/groups.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/groups.json deleted file mode 100644 index 2876585b6fad0..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/groups.json +++ /dev/null @@ -1,149 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "deletedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "classification": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "creationOptions": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "description": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "expirationDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "groupTypes": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "isAssignableToRole": { - "type": ["null", "boolean"] - }, - "mail": { - "type": ["null", "string"] - }, - "mailEnabled": { - "type": ["null", "boolean"] - }, - "mailNickname": { - "type": ["null", "string"] - }, - "membershipRule": { - "type": ["null", "string"] - }, - "membershipRuleProcessingState": { - "type": ["null", "string"] - }, - "onPremisesDomainName": { - "type": ["null", "string"] - }, - "onPremisesLastSyncDateTime": { - "type": ["null", "string"], - "format": "date-time" - }, - "onPremisesNetBiosName": { - "type": ["null", "string"] - }, - "onPremisesSamAccountName": { - "type": ["null", "string"] - }, - "onPremisesSecurityIdentifier": { - "type": ["null", "string"] - }, - "onPremisesSyncEnabled": { - "type": ["null", "boolean"] - }, - "preferredDataLocation": { - "type": ["null", "string"] - }, - "preferredLanguage": { - "type": ["null", "string"] - }, - "proxyAddresses": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "renewedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "resourceBehaviorOptions": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "resourceProvisioningOptions": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "securityEnabled": { - "type": ["null", "boolean"] - }, - "securityIdentifier": { - "type": ["null", "string"] - }, - "serviceProvisioningErrors": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "isResolved": { - "type": ["null", "boolean"] - }, - "serviceInstance": { - "type": ["null", "string"] - } - } - } - }, - "theme": { - "type": ["null", "string"] - }, - "visibility": { - "type": ["null", "string"] - }, - "onPremisesProvisioningErrors": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_device_usage_report.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_device_usage_report.json deleted file mode 100644 index 8ae6a571f5d3b..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_device_usage_report.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "reportRefreshDate": { - "type": ["null", "string"], - "format": "date" - }, - "userId": { - "type": ["null", "string"] - }, - "userPrincipalName": { - "type": ["null", "string"] - }, - "lastActivityDate": { - "type": ["null", "string"] - }, - "isDeleted": { - "type": ["null", "string"] - }, - "deletedDate": { - "type": ["null", "string"] - }, - "usedWeb": { - "type": ["null", "string"] - }, - "usedWindowsPhone": { - "type": ["null", "string"] - }, - "usedIOs": { - "type": ["null", "string"] - }, - "usedMac": { - "type": ["null", "string"] - }, - "usedAndroidPhone": { - "type": ["null", "string"] - }, - "usedWindows": { - "type": ["null", "string"] - }, - "usedChromeOS": { - "type": ["null", "string"] - }, - "usedLinux": { - "type": ["null", "string"] - }, - "isLisenced": { - "type": ["null", "string"] - }, - "reportPeriod": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_drives.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_drives.json deleted file mode 100644 index fbb40f7dcf972..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/team_drives.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"] - }, - "createdDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "description": { - "type": ["null", "string"] - }, - "lastModifiedBy": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "displayName": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "string"] - } - } - }, - "lastModifiedDateTime": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "name": { - "type": ["null", "string"] - }, - "webUrl": { - "type": ["null", "string"] - }, - "driveType": { - "type": ["null", "string"] - }, - "createdBy": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "user": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "displayName": { - "type": ["null", "string"] - } - } - } - } - }, - "owner": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "group": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "email": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - } - } - } - } - }, - "quota": { - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "deleted": { - "type": ["null", "integer"] - }, - "remaining": { - "type": ["null", "number"] - }, - "state": { - "type": ["null", "string"] - }, - "total": { - "type": ["null", "number"] - }, - "used": { - "type": ["null", "integer"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/users.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/users.json deleted file mode 100644 index e02d86a531062..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/schemas/users.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "additionalProperties": true, - "properties": { - "businessPhones": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "displayName": { - "type": ["null", "string"] - }, - "givenName": { - "type": ["null", "string"] - }, - "jobTitle": { - "type": ["null", "string"] - }, - "mail": { - "type": ["null", "string"] - }, - "mobilePhone": { - "type": ["null", "string"] - }, - "officeLocation": { - "type": ["null", "string"] - }, - "preferredLanguage": { - "type": ["null", "string"] - }, - "surname": { - "type": ["null", "string"] - }, - "userPrincipalName": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/source.py b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/source.py index c087b4f0210c2..2a5e4f383d32f 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/source.py +++ b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/source.py @@ -2,50 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -import json -from datetime import datetime -from typing import Dict, Generator +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models.airbyte_protocol import AirbyteCatalog, AirbyteMessage, AirbyteRecordMessage, ConfiguredAirbyteCatalog, Type -from airbyte_cdk.sources.deprecated.base_source import BaseSource +WARNING: Do not modify this file. +""" -from .client import Client - - -class SourceMicrosoftTeams(BaseSource): - client_class = Client +# Declarative Source +class SourceMicrosoftTeams(YamlDeclarativeSource): def __init__(self): - super().__init__() - - def _get_client(self, config: json): - """Construct client""" - client = self.client_class(config=config) - return client - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - client = self._get_client(config) - return AirbyteCatalog(streams=client.get_streams()) - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: Dict[str, any] - ) -> Generator[AirbyteMessage, None, None]: - client = self._get_client(config) - - logger.info(f"Starting syncing {self.__class__.__name__}") - for configured_stream in catalog.streams: - stream = configured_stream.stream - if stream.name not in client.ENTITY_MAP.keys(): - continue - logger.info(f"Syncing {stream.name} stream") - for record in self._read_record(client=client, stream=stream.name): - yield AirbyteMessage(type=Type.RECORD, record=record) - logger.info(f"Finished syncing {self.__class__.__name__}") - - def _read_record(self, client: Client, stream: str): - for record in client.ENTITY_MAP[stream](): - for item in record: - now = int(datetime.now().timestamp()) * 1000 - yield AirbyteRecordMessage(stream=stream, data=item, emitted_at=now) + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json deleted file mode 100644 index 39de5a8b8a96d..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/microsoft-teams", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Microsoft Teams Spec", - "type": "object", - "required": ["period"], - "additionalProperties": true, - "properties": { - "period": { - "type": "string", - "title": "Period", - "description": "Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.", - "examples": ["D7"] - }, - "credentials": { - "title": "Authentication mechanism", - "description": "Choose how to authenticate to Microsoft", - "type": "object", - "oneOf": [ - { - "type": "object", - "title": "Authenticate via Microsoft (OAuth 2.0)", - "required": [ - "tenant_id", - "client_id", - "client_secret", - "refresh_token" - ], - "properties": { - "auth_type": { - "type": "string", - "const": "Client", - "enum": ["Client"], - "default": "Client", - "order": 0 - }, - "tenant_id": { - "title": "Directory (tenant) ID", - "type": "string", - "description": "A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL", - "airbyte_secret": true - }, - "client_id": { - "title": "Client ID", - "type": "string", - "description": "The Client ID of your Microsoft Teams developer application." - }, - "client_secret": { - "title": "Client Secret", - "type": "string", - "description": "The Client Secret of your Microsoft Teams developer application.", - "airbyte_secret": true - }, - "refresh_token": { - "title": "Refresh Token", - "type": "string", - "description": "A Refresh Token to renew the expired Access Token.", - "airbyte_secret": true - } - } - }, - { - "type": "object", - "title": "Authenticate via Microsoft", - "required": ["tenant_id", "client_id", "client_secret"], - "properties": { - "auth_type": { - "type": "string", - "const": "Token", - "enum": ["Token"], - "default": "Token", - "order": 0 - }, - "tenant_id": { - "title": "Directory (tenant) ID", - "type": "string", - "description": "A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL", - "airbyte_secret": true - }, - "client_id": { - "title": "Client ID", - "type": "string", - "description": "The Client ID of your Microsoft Teams developer application." - }, - "client_secret": { - "title": "Client Secret", - "type": "string", - "description": "The Client Secret of your Microsoft Teams developer application.", - "airbyte_secret": true - } - } - } - ] - } - } - }, - "advanced_auth": { - "auth_flow_type": "oauth2.0", - "predicate_key": ["credentials", "auth_type"], - "predicate_value": "Client", - "oauth_config_specification": { - "complete_oauth_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "refresh_token": { - "type": "string", - "path_in_connector_config": ["credentials", "refresh_token"] - } - } - }, - "complete_oauth_server_input_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string" - }, - "client_secret": { - "type": "string" - } - } - }, - "complete_oauth_server_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string", - "path_in_connector_config": ["credentials", "client_id"] - }, - "client_secret": { - "type": "string", - "path_in_connector_config": ["credentials", "client_secret"] - } - } - }, - "oauth_user_input_from_connector_config_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "tenant_id": { - "type": "string", - "path_in_connector_config": ["credentials", "tenant_id"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-microsoft-teams/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c724..0000000000000 --- a/airbyte-integrations/connectors/source-microsoft-teams/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-mixpanel/README.md b/airbyte-integrations/connectors/source-mixpanel/README.md index 14431eab38bab..98574c7aaf0ca 100644 --- a/airbyte-integrations/connectors/source-mixpanel/README.md +++ b/airbyte-integrations/connectors/source-mixpanel/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-mixpanel spec poetry run source-mixpanel check --config secrets/config.json poetry run source-mixpanel discover --config secrets/config.json -poetry run source-mixpanel read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-mixpanel read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl index 3b576d1a580f4..4b6b4f8787d8d 100644 --- a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl @@ -2,11 +2,11 @@ {"stream": "engage", "data": {"distinct_id": "123@gmail.com", "email": "123@gmail.com", "name": "123", "123": "123456", "last_seen": "2023-01-01T00:00:00", "how are you": "just fine"}, "emitted_at": 1695642956746} {"stream": "engage", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956748} {"stream": "engage", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956749} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-26", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-27", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-28", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "revenue", "data": {"date": "2024-02-26", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343316} -{"stream": "revenue", "data": {"date": "2024-02-27", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} -{"stream": "revenue", "data": {"date": "2024-02-28", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-21", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1712914793332} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-22", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1712914793334} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-23", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1712914793335} +{"stream": "revenue", "data": {"date": "2023-01-21", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1712914801991} +{"stream": "revenue", "data": {"date": "2023-01-22", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1712914801992} +{"stream": "revenue", "data": {"date": "2023-01-23", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1712914801992} {"stream": "cohort_members", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214153} {"stream": "cohort_members", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214154} diff --git a/airbyte-integrations/connectors/source-mixpanel/metadata.yaml b/airbyte-integrations/connectors/source-mixpanel/metadata.yaml index cf59460ef035c..d4714ba5eafe1 100644 --- a/airbyte-integrations/connectors/source-mixpanel/metadata.yaml +++ b/airbyte-integrations/connectors/source-mixpanel/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a - dockerImageTag: 2.1.0 + dockerImageTag: 2.2.0 dockerRepository: airbyte/source-mixpanel documentationUrl: https://docs.airbyte.com/integrations/sources/mixpanel githubIssueLabel: source-mixpanel icon: mixpanel.svg license: MIT + maxSecondsBetweenMessages: 3600 name: Mixpanel remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-mixpanel/poetry.lock b/airbyte-integrations/connectors/source-mixpanel/poetry.lock index 7941e3a9b3df5..f2da87fe78351 100644 --- a/airbyte-integrations/connectors/source-mixpanel/poetry.lock +++ b/airbyte-integrations/connectors/source-mixpanel/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -750,7 +750,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +825,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +856,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +894,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +919,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "b9f1312ff855d2ea6c2f6c7a329923044ad6cd1b88c9c3de3b49736510b45be6" +content-hash = "dedee3fe65d06e7ceb8403980b7cb1fadb463183c7c25b2cda747e60bcd7be03" diff --git a/airbyte-integrations/connectors/source-mixpanel/pyproject.toml b/airbyte-integrations/connectors/source-mixpanel/pyproject.toml index bb4f7f27ce3f7..4734e8bcbadbe 100644 --- a/airbyte-integrations/connectors/source-mixpanel/pyproject.toml +++ b/airbyte-integrations/connectors/source-mixpanel/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.1.0" +version = "2.2.0" name = "source-mixpanel" description = "Source implementation for Mixpanel." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_mixpanel" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" [tool.poetry.scripts] source-mixpanel = "source_mixpanel.run:run" diff --git a/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml b/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml index 12b53f7ebca0d..cf9f0f482c6d6 100644 --- a/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml @@ -35,6 +35,7 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no + validate_state_messages: False empty_streams: - name: teams bypass_reason: "The stream has no test data and tested with integration tests" @@ -42,6 +43,7 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no + validate_state_messages: False empty_streams: - name: teams bypass_reason: "The stream has no test data and tested with integration tests" diff --git a/airbyte-integrations/connectors/source-monday/metadata.yaml b/airbyte-integrations/connectors/source-monday/metadata.yaml index 1586899f7deaa..24e32375ba0ce 100644 --- a/airbyte-integrations/connectors/source-monday/metadata.yaml +++ b/airbyte-integrations/connectors/source-monday/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b - dockerImageTag: 2.0.4 + dockerImageTag: 2.1.1 releases: breakingChanges: 2.0.0: @@ -33,6 +33,7 @@ data: githubIssueLabel: source-monday icon: monday.svg license: MIT + maxSecondsBetweenMessages: 60 name: Monday remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-monday/poetry.lock b/airbyte-integrations/connectors/source-monday/poetry.lock index b6f4aa5d14b00..0d28dc06353e8 100644 --- a/airbyte-integrations/connectors/source-monday/poetry.lock +++ b/airbyte-integrations/connectors/source-monday/poetry.lock @@ -2,39 +2,38 @@ [[package]] name = "airbyte-cdk" -version = "0.62.0" +version = "0.78.6" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.62.0.tar.gz", hash = "sha256:622f56bd7101493a74f11c33a45a31c251032333989996f137cac8370873c614"}, - {file = "airbyte_cdk-0.62.0-py3-none-any.whl", hash = "sha256:b21330a566b33dbdddde33243eb9855f086ad4272e3585ca626be1225451a3b8"}, + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +466,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -685,30 +684,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +825,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +892,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +917,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1028,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "1ba7362086cf24723e7be8cf2bc3ea3414137e3e56c9721b6593a0b8c89d269e" +content-hash = "25d79195c052c9654e64e6cd73809188b3aa16bd228841f214ff871a895c9c6c" diff --git a/airbyte-integrations/connectors/source-monday/pyproject.toml b/airbyte-integrations/connectors/source-monday/pyproject.toml index 7a6e9d54a01f9..fa6a0ee1d759a 100644 --- a/airbyte-integrations/connectors/source-monday/pyproject.toml +++ b/airbyte-integrations/connectors/source-monday/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.4" +version = "2.1.1" name = "source-monday" description = "Source implementation for Monday." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_monday" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-monday = "source_monday.run:run" diff --git a/airbyte-integrations/connectors/source-monday/source_monday/components.py b/airbyte-integrations/connectors/source-monday/source_monday/components.py index fde4f2f3b9584..802d23aacc811 100644 --- a/airbyte-integrations/connectors/source-monday/source_monday/components.py +++ b/airbyte-integrations/connectors/source-monday/source_monday/components.py @@ -82,7 +82,7 @@ def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Re self._state[self.cursor_field.eval(self.config)] = latest_record[self.cursor_field.eval(self.config)] def stream_slices(self) -> Iterable[Mapping[str, Any]]: - yield {} + yield StreamSlice(partition={}, cursor_slice={}) def should_be_synced(self, record: Record) -> bool: """ @@ -170,7 +170,7 @@ def read_parent_stream( # check if state is empty -> if not stream_state.get(self.parent_cursor_field): # yield empty slice for complete fetch of items stream - yield {} + yield StreamSlice(partition={}, cursor_slice={}) return all_ids = set() @@ -200,11 +200,11 @@ def read_parent_stream( # yield slice with desired number of ids if self.nested_items_per_page == len(slice_ids): - yield {self.substream_slice_field: slice_ids} + yield StreamSlice(partition={self.substream_slice_field: slice_ids}, cursor_slice={}) slice_ids = list() # yield leftover ids if any left if slice_ids: - yield {self.substream_slice_field: slice_ids} + yield StreamSlice(partition={self.substream_slice_field: slice_ids}, cursor_slice={}) # If the parent slice contains no records if empty_parent_slice: diff --git a/airbyte-integrations/connectors/source-monday/source_monday/manifest.yaml b/airbyte-integrations/connectors/source-monday/source_monday/manifest.yaml index 658c635cf2061..b41e94e89032c 100644 --- a/airbyte-integrations/connectors/source-monday/source_monday/manifest.yaml +++ b/airbyte-integrations/connectors/source-monday/source_monday/manifest.yaml @@ -28,6 +28,8 @@ definitions: response_filters: - predicate: "{{ 'error_code' in response and response['error_code'] == 'ComplexityException' }}" action: RETRY + - predicate: "{{ 'error_code' in response and response['error_code'] == 'complexityBudgetExhausted' }}" + action: RETRY backoff_strategies: - type: ConstantBackoffStrategy backoff_time_in_seconds: 60 diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/error_response_builder.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/error_response_builder.py index a746a8d68a059..779d64d80af7c 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/error_response_builder.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/error_response_builder.py @@ -1,6 +1,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. import json +from typing import Optional from airbyte_cdk.test.mock_http import HttpResponse from airbyte_cdk.test.mock_http.response_builder import find_template @@ -14,5 +15,8 @@ def __init__(self, status_code: int): def response_with_status(cls, status_code) -> "ErrorResponseBuilder": return cls(status_code) - def build(self) -> HttpResponse: - return HttpResponse(json.dumps(find_template(str(self._status_code), __file__)), self._status_code) + def build(self, file_path: Optional[str] = None) -> HttpResponse: + if not file_path: + return HttpResponse(json.dumps(find_template(str(self._status_code), __file__)), self._status_code) + return HttpResponse(json.dumps(find_template(str(file_path), __file__)), self._status_code) + diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py index 4e91ea4dca9f5..fab99700ad3d0 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py @@ -44,27 +44,33 @@ def test_given_retryable_error_and_one_page_when_read_teams_then_return_records( """ A full refresh sync without pagination completes successfully after one retry """ - api_token_authenticator = self.get_authenticator(self._config) - - http_mocker.get( - TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), - [ - ErrorResponseBuilder.response_with_status(200).build(), - TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build(), - ], - ) - - with patch("time.sleep", return_value=None): - output = read_stream("teams", SyncMode.full_refresh, self._config) - - assert len(output.records) == 1 - - error_logs = [ - error - for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) - if f'Response Code: 200, Response Text: {json.dumps({"error_code": "ComplexityException", "status_code": 200})}' in error + test_cases = [ + ("200_ComplexityException", "ComplexityException"), + ("200_complexityBudgetExhausted", "complexityBudgetExhausted"), ] - assert len(error_logs) == 1 + for test_values in test_cases: + response, error_code = test_values[0], test_values[1] + api_token_authenticator = self.get_authenticator(self._config) + + http_mocker.get( + TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), + [ + ErrorResponseBuilder.response_with_status(200).build(response), + TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build(), + ], + ) + + with patch("time.sleep", return_value=None): + output = read_stream("teams", SyncMode.full_refresh, self._config) + + assert len(output.records) == 1 + + error_logs = [ + error + for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) + if f'Response Code: 200, Response Text: {json.dumps({"error_code": error_code, "status_code": 200})}' in error + ] + assert len(error_logs) == 1 @HttpMocker() def test_given_retryable_error_when_read_teams_then_stop_syncing(self, http_mocker): @@ -87,7 +93,7 @@ def test_given_retryable_error_when_read_teams_then_stop_syncing(self, http_mock for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) if f'Response Code: 200, Response Text: {json.dumps({"error_code": "ComplexityException", "status_code": 200})}' in error ] - assert len(error_logs) == 5 + assert len(error_logs) == 6 @HttpMocker() def test_given_retryable_500_error_when_read_teams_then_stop_syncing(self, http_mocker): @@ -110,7 +116,7 @@ def test_given_retryable_500_error_when_read_teams_then_stop_syncing(self, http_ for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) if f'Response Code: 500, Response Text: {json.dumps({"error_message": "Internal server error", "status_code": 500})}' in error ] - assert len(error_logs) == 5 + assert len(error_logs) == 6 @HttpMocker() def test_given_403_error_when_read_teams_then_ignore_the_stream(self, http_mocker): diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_ComplexityException.json b/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_ComplexityException.json new file mode 100644 index 0000000000000..825533274645c --- /dev/null +++ b/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_ComplexityException.json @@ -0,0 +1,4 @@ +{ + "error_code": "ComplexityException", + "status_code": 200 +} diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_complexityBudgetExhausted.json b/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_complexityBudgetExhausted.json new file mode 100644 index 0000000000000..6fa16a3f40eee --- /dev/null +++ b/airbyte-integrations/connectors/source-monday/unit_tests/resource/http/response/200_complexityBudgetExhausted.json @@ -0,0 +1,4 @@ +{ + "error_code": "complexityBudgetExhausted", + "status_code": 200 +} diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py b/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py index fc2fcfc48221a..b7cefbef6aea4 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py @@ -33,6 +33,7 @@ ) def test_item_pagination_strategy(response_json, last_records, expected): strategy = ItemPaginationStrategy( + config={}, page_size=1, parameters={"items_per_page": 1}, ) @@ -73,6 +74,7 @@ def test_item_pagination_strategy(response_json, last_records, expected): ) def test_item_cursor_pagination_strategy(response_json, last_records, expected): strategy = ItemCursorPaginationStrategy( + config={}, page_size=1, parameters={"items_per_page": 1}, ) diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index a04fc19cbecd1..8b27a61300b87 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -1,10 +1,9 @@ plugins { id 'airbyte-java-connector' - id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.8' + cdkVersionRequired = '0.30.4' features = ['db-sources', 'datastore-mongo'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/gradle.properties b/airbyte-integrations/connectors/source-mongodb-v2/gradle.properties new file mode 100644 index 0000000000000..ddf001a304717 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/gradle.properties @@ -0,0 +1 @@ +JunitMethodExecutionTimeout=2 m \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml index 18a4f31c21529..2f23d76c9574d 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml +++ b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml @@ -2,15 +2,19 @@ data: ab_internal: ql: 200 sl: 200 + allowedHosts: + hosts: + - ${connection_string} connectorSubtype: database connectorType: source definitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e - dockerImageTag: 1.2.16 + dockerImageTag: 1.3.4 dockerRepository: airbyte/source-mongodb-v2 documentationUrl: https://docs.airbyte.com/integrations/sources/mongodb-v2 githubIssueLabel: source-mongodb-v2 icon: mongodb.svg license: ELv2 + maxSecondsBetweenMessages: 7200 name: MongoDb registries: cloud: diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java index 08321ded68538..4930fb43b352d 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java @@ -6,11 +6,7 @@ import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; -import com.mongodb.client.model.Accumulators; -import com.mongodb.client.model.Aggregates; -import com.mongodb.client.model.Filters; -import com.mongodb.client.model.Projections; -import com.mongodb.client.model.Sorts; +import com.mongodb.client.model.*; import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.exceptions.ConfigErrorException; @@ -22,16 +18,10 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.SyncMode; import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.bson.BsonDocument; -import org.bson.BsonInt32; -import org.bson.BsonInt64; -import org.bson.BsonObjectId; -import org.bson.BsonString; -import org.bson.Document; +import org.bson.*; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import org.slf4j.Logger; @@ -53,15 +43,11 @@ public List> getIterators( final List streams, final MongoDbStateManager stateManager, final MongoDatabase database, - final int checkpointInterval, - final boolean isEnforceSchema) { + final MongoDbSourceConfig config) { + final boolean isEnforceSchema = config.getEnforceSchema(); + final var checkpointInterval = config.getCheckpointInterval(); return streams .stream() - .peek(airbyteStream -> { - if (!airbyteStream.getSyncMode().equals(SyncMode.INCREMENTAL)) - LOGGER.warn("Stream {} configured with unsupported sync mode: {}", airbyteStream.getStream().getName(), airbyteStream.getSyncMode()); - }) - .filter(airbyteStream -> airbyteStream.getSyncMode().equals(SyncMode.INCREMENTAL)) .map(airbyteStream -> { final var collectionName = airbyteStream.getStream().getName(); final var collection = database.getCollection(collectionName); @@ -88,6 +74,9 @@ public List> getIterators( // "where _id > [last saved state] order by _id ASC". // If no state exists, it will create a query akin to "where 1=1 order by _id ASC" final Bson filter = existingState + // Full refresh streams that finished set their id to null + // This tells us to start over + .filter(state -> state.id() != null) .map(state -> Filters.gt(MongoConstants.ID_FIELD, switch (state.idType()) { case STRING -> new BsonString(state.id()); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoCatalogHelper.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoCatalogHelper.java index 37f0c51dd1baa..bb395870efcd3 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoCatalogHelper.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoCatalogHelper.java @@ -35,7 +35,7 @@ public class MongoCatalogHelper { /** * The list of supported sync modes for a given stream. */ - public static final List SUPPORTED_SYNC_MODES = List.of(SyncMode.INCREMENTAL); + public static final List SUPPORTED_SYNC_MODES = List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL); /** * Name of the property in the JSON representation of an Airbyte stream that contains the discovered diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java index 39974e73cec7b..487252e7e3e14 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.source.mongodb; +import static io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcInitialSnapshotUtils.validateStateSyncMode; + import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.mongodb.MongoCommandException; @@ -19,9 +21,11 @@ import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcInitializer; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcState; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; import io.airbyte.protocol.models.v0.*; import java.time.Instant; +import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -126,6 +130,7 @@ public AutoCloseableIterator read(final JsonNode config, final var stateManager = MongoDbStateManager.createStateManager(state, sourceConfig); if (catalog != null) { + validateStateSyncMode(stateManager, catalog.getStreams()); MongoUtil.checkSchemaModeMismatch(sourceConfig.getEnforceSchema(), stateManager.getCdcState() != null ? stateManager.getCdcState().schema_enforced() : sourceConfig.getEnforceSchema(), catalog); } @@ -133,12 +138,23 @@ public AutoCloseableIterator read(final JsonNode config, try { // WARNING: do not close the client here since it needs to be used by the iterator final MongoClient mongoClient = createMongoClient(sourceConfig); - try { - final var iteratorList = - cdcInitializer.createCdcIterators(mongoClient, cdcMetadataInjector, catalog, - stateManager, emittedAt, sourceConfig); - return AutoCloseableIterators.concatWithEagerClose(iteratorList, AirbyteTraceMessageUtility::emitStreamStatusTrace); + final List fullRefreshStreams = + catalog.getStreams().stream().filter(s -> s.getSyncMode() == SyncMode.FULL_REFRESH).toList(); + final List incrementalStreams = catalog.getStreams().stream().filter(s -> !fullRefreshStreams.contains(s)).toList(); + + List> iterators = new ArrayList<>(); + if (!fullRefreshStreams.isEmpty()) { + LOGGER.info("There are {} Full refresh streams", fullRefreshStreams.size()); + iterators.addAll(createFullRefreshIterators(sourceConfig, mongoClient, fullRefreshStreams, stateManager, emittedAt)); + } + + if (!incrementalStreams.isEmpty()) { + LOGGER.info("There are {} Incremental streams", incrementalStreams.size()); + iterators + .addAll(cdcInitializer.createCdcIterators(mongoClient, cdcMetadataInjector, incrementalStreams, stateManager, emittedAt, sourceConfig)); + } + return AutoCloseableIterators.concatWithEagerClose(iterators, AirbyteTraceMessageUtility::emitStreamStatusTrace); } catch (final Exception e) { mongoClient.close(); throw e; @@ -153,4 +169,22 @@ protected MongoClient createMongoClient(final MongoDbSourceConfig config) { return MongoConnectionUtils.createMongoClient(config); } + List> createFullRefreshIterators(final MongoDbSourceConfig sourceConfig, + final MongoClient mongoClient, + final List streams, + final MongoDbStateManager stateManager, + final Instant emmitedAt) { + final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); + if (stateManager.getCdcState() == null) { + stateManager.updateCdcState(new MongoDbCdcState(null, sourceConfig.getEnforceSchema())); + } + final List> fullRefreshIterators = initialSnapshotHandler.getIterators( + streams, + stateManager, + mongoClient.getDatabase(sourceConfig.getDatabaseName()), + sourceConfig); + + return fullRefreshIterators; + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtils.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtils.java index 1e9e296a51e5c..55245baf0a207 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtils.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtils.java @@ -248,6 +248,7 @@ private static ObjectNode readField(final BsonReader reader, case JAVASCRIPT -> o.put(fieldName, reader.readJavaScript()); case JAVASCRIPT_WITH_SCOPE -> readJavaScriptWithScope(o, reader, fieldName); case REGULAR_EXPRESSION -> o.put(fieldName, readRegularExpression(reader.readRegularExpression())); + case NULL -> readNull(o, reader, fieldName); default -> reader.skipValue(); } @@ -289,6 +290,11 @@ private static byte[] toByteArray(final BsonBinary value) { return value == null ? null : value.getData(); } + private static void readNull(final ObjectNode o, final BsonReader reader, final String fieldName) { + o.putNull(fieldName); + reader.readNull(); + } + private static void readJavaScriptWithScope(final ObjectNode o, final BsonReader reader, final String fieldName) { final var code = reader.readJavaScriptWithScope(); final var scope = readDocument(reader, (ObjectNode) Jsons.jsonNode(Collections.emptyMap()), Set.of("scope"), false); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitialSnapshotUtils.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitialSnapshotUtils.java index 1e844f4949ca7..4387326396ce7 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitialSnapshotUtils.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitialSnapshotUtils.java @@ -9,10 +9,12 @@ import com.google.common.collect.Sets; import com.mongodb.client.MongoClient; import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.mongodb.MongoUtil; import io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; +import io.airbyte.integrations.source.mongodb.state.MongoDbStreamState; import io.airbyte.protocol.models.v0.AirbyteEstimateTraceMessage; import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -38,6 +40,9 @@ public class MongoDbCdcInitialSnapshotUtils { private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbCdcInitialSnapshotUtils.class); private static final Predicate SYNC_MODE_FILTER = c -> SyncMode.INCREMENTAL.equals(c.getSyncMode()); + private static final Map> syncModeToStatusValidationMap = Map.of( + SyncMode.INCREMENTAL, List.of(InitialSnapshotStatus.IN_PROGRESS, InitialSnapshotStatus.COMPLETE), + SyncMode.FULL_REFRESH, List.of(InitialSnapshotStatus.FULL_REFRESH)); /** * Returns the list of configured Airbyte streams that need to perform the initial snapshot portion @@ -130,4 +135,18 @@ private static void estimateInitialSnapshotSyncSize(final MongoClient mongoClien }); } + private static boolean isValidInitialSnapshotStatus(final SyncMode syncMode, final MongoDbStreamState state) { + return syncModeToStatusValidationMap.get(syncMode).contains(state.status()); + } + + public static void validateStateSyncMode(final MongoDbStateManager stateManager, final List streams) { + streams.forEach(stream -> { + final var existingState = stateManager.getStreamState(stream.getStream().getName(), stream.getStream().getNamespace()); + if (existingState.isPresent() && !isValidInitialSnapshotStatus(stream.getSyncMode(), existingState.get())) { + throw new ConfigErrorException("Stream " + stream.getStream().getName() + " is " + stream.getSyncMode() + " but the saved status " + + existingState.get().status() + " doesn't match. Please reset this stream"); + } + }); + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java index 9760b0d4cacc5..0c7661bc4b945 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java @@ -78,14 +78,15 @@ public MongoDbCdcInitializer() { public List> createCdcIterators( final MongoClient mongoClient, final MongoDbCdcConnectorMetadataInjector cdcMetadataInjector, - final ConfiguredAirbyteCatalog catalog, + final List streams, final MongoDbStateManager stateManager, final Instant emittedAt, final MongoDbSourceConfig config) { + ConfiguredAirbyteCatalog incrementalOnlyStreamsCatalog = new ConfiguredAirbyteCatalog().withStreams(streams); final Duration firstRecordWaitTime = Duration.ofSeconds(config.getInitialWaitingTimeSeconds()); // #35059: debezium heartbeats are not sent on the expected interval. this is - // a worksaround to allow making subsequent wait time configurable. + // a workaround to allow making subsequent wait time configurable. final Duration subsequentRecordWaitTime = firstRecordWaitTime; LOGGER.info("Subsequent cdc record wait time: {} seconds", subsequentRecordWaitTime); final int queueSize = MongoUtil.getDebeziumEventQueueSize(config); @@ -93,15 +94,17 @@ public List> createCdcIterators( final boolean isEnforceSchema = config.getEnforceSchema(); final Properties defaultDebeziumProperties = MongoDbCdcProperties.getDebeziumProperties(); logOplogInfo(mongoClient); - final BsonDocument initialResumeToken = MongoDbResumeTokenHelper.getMostRecentResumeToken(mongoClient, databaseName, catalog); + final BsonDocument initialResumeToken = + MongoDbResumeTokenHelper.getMostRecentResumeToken(mongoClient, databaseName, incrementalOnlyStreamsCatalog); final JsonNode initialDebeziumState = mongoDbDebeziumStateUtil.constructInitialDebeziumState(initialResumeToken, mongoClient, databaseName); - final MongoDbCdcState cdcState = (stateManager.getCdcState() == null || stateManager.getCdcState().state() == null) - ? new MongoDbCdcState(initialDebeziumState, isEnforceSchema) - : new MongoDbCdcState(Jsons.clone(stateManager.getCdcState().state()), stateManager.getCdcState().schema_enforced()); + final MongoDbCdcState cdcState = + (stateManager.getCdcState() == null || stateManager.getCdcState().state() == null || stateManager.getCdcState().state().isNull()) + ? new MongoDbCdcState(initialDebeziumState, isEnforceSchema) + : new MongoDbCdcState(Jsons.clone(stateManager.getCdcState().state()), stateManager.getCdcState().schema_enforced()); final Optional optSavedOffset = mongoDbDebeziumStateUtil.savedOffset( Jsons.clone(defaultDebeziumProperties), - catalog, + incrementalOnlyStreamsCatalog, cdcState.state(), config.getDatabaseConfig(), mongoClient); @@ -131,23 +134,26 @@ public List> createCdcIterators( } final MongoDbCdcState stateToBeUsed = - (!savedOffsetIsValid || stateManager.getCdcState() == null || stateManager.getCdcState().state() == null) - ? new MongoDbCdcState(initialDebeziumState, config.getEnforceSchema()) - : stateManager.getCdcState(); + (!savedOffsetIsValid || stateManager.getCdcState() == null || stateManager.getCdcState().state() == null + || stateManager.getCdcState().state().isNull()) + ? new MongoDbCdcState(initialDebeziumState, config.getEnforceSchema()) + : stateManager.getCdcState(); final List initialSnapshotStreams = - MongoDbCdcInitialSnapshotUtils.getStreamsForInitialSnapshot(mongoClient, stateManager, catalog, savedOffsetIsValid); + MongoDbCdcInitialSnapshotUtils.getStreamsForInitialSnapshot(mongoClient, stateManager, incrementalOnlyStreamsCatalog, savedOffsetIsValid); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); final List> initialSnapshotIterators = initialSnapshotHandler.getIterators(initialSnapshotStreams, stateManager, mongoClient.getDatabase(databaseName), - config.getCheckpointInterval(), isEnforceSchema); + config); final AirbyteDebeziumHandler handler = new AirbyteDebeziumHandler<>(config.getDatabaseConfig(), new MongoDbCdcTargetPosition(initialResumeToken), false, firstRecordWaitTime, subsequentRecordWaitTime, queueSize, false); final MongoDbCdcStateHandler mongoDbCdcStateHandler = new MongoDbCdcStateHandler(stateManager); final MongoDbCdcSavedInfoFetcher cdcSavedInfoFetcher = new MongoDbCdcSavedInfoFetcher(stateToBeUsed); - final var propertiesManager = new MongoDbDebeziumPropertiesManager(defaultDebeziumProperties, config.getDatabaseConfig(), catalog); - final var eventConverter = new MongoDbDebeziumEventConverter(cdcMetadataInjector, catalog, emittedAt, config.getDatabaseConfig()); + final var propertiesManager = + new MongoDbDebeziumPropertiesManager(defaultDebeziumProperties, config.getDatabaseConfig(), incrementalOnlyStreamsCatalog); + final var eventConverter = + new MongoDbDebeziumEventConverter(cdcMetadataInjector, incrementalOnlyStreamsCatalog, emittedAt, config.getDatabaseConfig()); final Supplier> incrementalIteratorSupplier = () -> handler.getIncrementalIterators( propertiesManager, eventConverter, cdcSavedInfoFetcher, mongoDbCdcStateHandler); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcTargetPosition.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcTargetPosition.java index c2d20f9bc4e98..bd114760c4281 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcTargetPosition.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcTargetPosition.java @@ -13,6 +13,7 @@ import io.debezium.connector.mongodb.ResumeTokens; import java.util.Map; import java.util.Objects; +import javax.annotation.Nullable; import org.bson.BsonDocument; import org.bson.BsonTimestamp; import org.slf4j.Logger; @@ -88,7 +89,7 @@ public boolean isEventAheadOffset(final Map offset, final Change } @Override - public boolean isSameOffset(final Map offsetA, final Map offsetB) { + public boolean isSameOffset(@Nullable final Map offsetA, @Nullable final Map offsetB) { if (offsetA == null || offsetA.size() != 1) { return false; } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/InitialSnapshotStatus.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/InitialSnapshotStatus.java index 192ef6607e81a..44c4b8bc7adf5 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/InitialSnapshotStatus.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/InitialSnapshotStatus.java @@ -12,5 +12,9 @@ public enum InitialSnapshotStatus { IN_PROGRESS, - COMPLETE + COMPLETE, + // A Full Refresh stream state behaves like In Progress, + // but its value set to null when collection is fully read + // Rather than turning into Complete + FULL_REFRESH } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java index 47f7cd87b6578..2a23fc2e97102 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java @@ -4,6 +4,10 @@ package io.airbyte.integrations.source.mongodb.state; +import static io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus.FULL_REFRESH; +import static io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus.IN_PROGRESS; +import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; + import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -16,16 +20,8 @@ import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcEventUtils; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcState; -import io.airbyte.protocol.models.v0.AirbyteGlobalState; -import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.*; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStreamState; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.StreamDescriptor; import java.time.Instant; import java.util.HashMap; import java.util.List; @@ -160,6 +156,12 @@ public void updateStreamState(final String streamName, final String streamNamesp pairToStreamState.put(airbyteStreamNameNamespacePair, streamState); } + public void deleteStreamState(final String streamName, final String streamNamespace) { + final AirbyteStreamNameNamespacePair airbyteStreamNameNamespacePair = new AirbyteStreamNameNamespacePair(streamName, streamNamespace); + LOGGER.debug("Deleting stream state for stream {}:{} ...", streamNamespace, streamName); + pairToStreamState.remove(airbyteStreamNameNamespacePair); + } + /** * Resets the state stored in this manager by overwriting the CDC state and clearing the stream * state. @@ -238,14 +240,16 @@ private boolean isValidStreamDescriptor(final StreamDescriptor streamDescriptor) @Override public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); - + final var syncMode = stream.getSyncMode(); // Assuming we will always process at least 1 record message before sending out the state message. // shouldEmitStateMessage should guard this. var lastId = streamPairToLastIdMap.get(pair); if (lastId != null) { final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); - final var state = new MongoDbStreamState(lastId.toString(), InitialSnapshotStatus.IN_PROGRESS, idType); + final var state = new MongoDbStreamState(lastId.toString(), + syncMode == INCREMENTAL ? IN_PROGRESS : FULL_REFRESH, + idType); updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); } return toState(); @@ -260,7 +264,7 @@ public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final var jsonNode = isEnforceSchema ? MongoDbCdcEventUtils.toJsonNode(document, fields) : MongoDbCdcEventUtils.toJsonNodeNoSchema(document); - var lastId = document.get(MongoConstants.ID_FIELD); + final var lastId = document.get(MongoConstants.ID_FIELD); final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); streamPairToLastIdMap.put(pair, lastId); @@ -270,7 +274,7 @@ public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, .withStream(stream.getStream().getName()) .withNamespace(stream.getStream().getNamespace()) .withEmittedAt(emittedAt.toEpochMilli()) - .withData(injectMetadata(jsonNode))); + .withData((stream.getSyncMode() == INCREMENTAL) ? injectMetadata(jsonNode) : jsonNode)); } private JsonNode injectMetadata(final JsonNode jsonNode) { @@ -286,20 +290,32 @@ private JsonNode injectMetadata(final JsonNode jsonNode) { */ @Override public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { - final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); - if (!streamPairToLastIdMap.containsKey(pair)) { - var initialLastId = getStreamState(stream.getStream().getName(), stream.getStream().getNamespace()).map(MongoDbStreamState::id).orElse(null); - streamPairToLastIdMap.put(pair, initialLastId); - } - var lastId = streamPairToLastIdMap.get(pair); - if (lastId != null) { - LOGGER.debug("Emitting final state status for stream {}:{}...", stream.getStream().getNamespace(), stream.getStream().getName()); - final var finalStateStatus = InitialSnapshotStatus.COMPLETE; - final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) - .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); - final var state = new MongoDbStreamState(lastId.toString(), finalStateStatus, idType); - - updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); + if (stream.getSyncMode() == INCREMENTAL) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + if (!streamPairToLastIdMap.containsKey(pair)) { + var initialLastId = getStreamState(stream.getStream().getName(), stream.getStream().getNamespace()).map(MongoDbStreamState::id).orElse(null); + streamPairToLastIdMap.put(pair, initialLastId); + } + var lastId = streamPairToLastIdMap.get(pair); + if (lastId != null) { + LOGGER.debug("Emitting final state status for stream {}:{}...", stream.getStream().getNamespace(), stream.getStream().getName()); + final var finalStateStatus = InitialSnapshotStatus.COMPLETE; + final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) + .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); + final var state = new MongoDbStreamState(lastId.toString(), finalStateStatus, idType); + + updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); + } + } else { + // deleteStreamState(stream.getStream().getName(), stream.getStream().getNamespace()); + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + var lastId = streamPairToLastIdMap.get(pair); + if (lastId != null) { + final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) + .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); + updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), + new MongoDbStreamState(null, FULL_REFRESH, idType)); + } } return toState(); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java index c66d8c97c6c63..cca740b60b46d 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -38,10 +37,12 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.v0.AirbyteGlobalState; import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.AirbyteStreamState; +import io.airbyte.protocol.models.v0.AirbyteTraceMessage; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.ConnectorSpecification; @@ -511,7 +512,13 @@ void testSyncShouldHandlePurgedLogsGracefully() throws Exception { final JsonNode state = Jsons.jsonNode(List.of(stateMessage)); // Re-run the sync to prove that a config error is thrown due to invalid resume token - assertThrows(Exception.class, () -> runRead(configuredCatalog, state)); + List messages1 = runRead(configuredCatalog, state); + List records = messages1.stream().filter(r -> r.getType() == Type.RECORD).toList(); + // In this sync, there should be no records expected - only error trace messages indicating that the + // offset is not valid. + assertEquals(0, records.size()); + List traceMessages = messages1.stream().filter(r -> r.getType() == Type.TRACE).toList(); + assertOplogErrorTracePresent(traceMessages); } @Test @@ -654,4 +661,12 @@ private void validateCdcEventRecordData(final AirbyteRecordMessage airbyteRecord } } + private void assertOplogErrorTracePresent(List traceMessages) { + final boolean oplogTracePresent = traceMessages + .stream() + .anyMatch(trace -> trace.getTrace().getType().equals(AirbyteTraceMessage.Type.ERROR) + && trace.getTrace().getError().getMessage().contains("Saved offset is not valid")); + assertTrue(oplogTracePresent); + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java index 3acc708372a06..d750f188c74a2 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java @@ -26,6 +26,7 @@ import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants; import io.airbyte.integrations.source.mongodb.state.IdType; +import io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; import io.airbyte.integrations.source.mongodb.state.MongoDbStreamState; import io.airbyte.protocol.models.Field; @@ -64,12 +65,15 @@ class InitialSnapshotHandlerTest { private static final String OBJECT_ID1_STRING = "64c0029d95ad260d69ef28a1"; private static final String OBJECT_ID2_STRING = "64c0029d95ad260d69ef28a2"; private static final String OBJECT_ID3_STRING = "64c0029d95ad260d69ef28a3"; + private static final String OBJECT_ID4_STRING = "64c0029d95ad260d69ef28a4"; + private static final String OBJECT_ID5_STRING = "64c0029d95ad260d69ef28a5"; + private static final String OBJECT_ID6_STRING = "64c0029d95ad260d69ef28a6"; private static final ObjectId OBJECT_ID1 = new ObjectId(OBJECT_ID1_STRING); private static final ObjectId OBJECT_ID2 = new ObjectId(OBJECT_ID2_STRING); private static final ObjectId OBJECT_ID3 = new ObjectId(OBJECT_ID3_STRING); - private static final ObjectId OBJECT_ID4 = new ObjectId("64c0029d95ad260d69ef28a4"); - private static final ObjectId OBJECT_ID5 = new ObjectId("64c0029d95ad260d69ef28a5"); - private static final ObjectId OBJECT_ID6 = new ObjectId("64c0029d95ad260d69ef28a6"); + private static final ObjectId OBJECT_ID4 = new ObjectId(OBJECT_ID4_STRING); + private static final ObjectId OBJECT_ID5 = new ObjectId(OBJECT_ID5_STRING); + private static final ObjectId OBJECT_ID6 = new ObjectId(OBJECT_ID6_STRING); private static final String NAME1 = "name1"; private static final String NAME2 = "name2"; @@ -160,13 +164,13 @@ void testGetIteratorsEmptyInitialState() { final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); final MongoDbStateManager stateManager = spy(ogStateManager); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true); + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), CONFIG); - assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); + assertEquals(iterators.size(), 3); final AutoCloseableIterator collection1 = iterators.get(0); final AutoCloseableIterator collection2 = iterators.get(1); + final AutoCloseableIterator collection3 = iterators.get(2); // collection1 final AirbyteMessage collection1StreamMessage1 = collection1.next(); @@ -217,6 +221,20 @@ void testGetIteratorsEmptyInitialState() { assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection2.hasNext()); + + final AirbyteMessage collection3StreamMessage1 = collection3.next(); + assertEquals(Type.RECORD, collection3StreamMessage1.getType()); + assertEquals(COLLECTION3, collection3StreamMessage1.getRecord().getStream()); + assertEquals(OBJECT_ID6.toString(), collection3StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); + // Full refresh record have no cdc fields + assertTrue(collection3StreamMessage1.getRecord().getData().has(CURSOR_FIELD)); + assertFalse(collection3StreamMessage1.getRecord().getData().has(CDC_UPDATED_AT)); + assertFalse(collection3StreamMessage1.getRecord().getData().has(CDC_DELETED_AT)); + assertFalse(collection3StreamMessage1.getRecord().getData().has(CDC_DEFAULT_CURSOR)); + + final AirbyteMessage collection3SateMessage = collection3.next(); + assertEquals(Type.STATE, collection3SateMessage.getType(), "State message is expected after all records in a stream are emitted"); + } @Test @@ -234,19 +252,29 @@ void testGetIteratorsNonEmptyInitialState() { CURSOR_FIELD, OBJECT_ID3, NAME_FIELD, NAME3)))); + insertDocuments(COLLECTION3, List.of( + new Document(Map.of( + CURSOR_FIELD, OBJECT_ID4, + NAME_FIELD, NAME4)), + new Document(Map.of( + CURSOR_FIELD, OBJECT_ID5, + NAME_FIELD, NAME5)))); + final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); final MongoDbStateManager stateManager = spy(ogStateManager); when(stateManager.getStreamState(COLLECTION1, NAMESPACE)) .thenReturn(Optional.of(new MongoDbStreamState(OBJECT_ID1_STRING, null, IdType.OBJECT_ID))); + when(stateManager.getStreamState(COLLECTION3, NAMESPACE)) + .thenReturn(Optional.of(new MongoDbStreamState(OBJECT_ID4_STRING, InitialSnapshotStatus.FULL_REFRESH, IdType.OBJECT_ID))); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true); + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), CONFIG); - assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); + assertEquals(iterators.size(), 3); final AutoCloseableIterator collection1 = iterators.get(0); final AutoCloseableIterator collection2 = iterators.get(1); + final AutoCloseableIterator collection3 = iterators.get(2); // collection1, first document should be skipped final AirbyteMessage collection1StreamMessage1 = collection1.next(); @@ -274,6 +302,17 @@ void testGetIteratorsNonEmptyInitialState() { assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection2.hasNext()); + + // collection3 will skip the first document + final AirbyteMessage collection3StreamMessage1 = collection3.next(); + assertEquals(Type.RECORD, collection3StreamMessage1.getType()); + assertEquals(COLLECTION3, collection3StreamMessage1.getRecord().getStream()); + assertEquals(OBJECT_ID5.toString(), collection3StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); + assertEquals(NAME5, collection3StreamMessage1.getRecord().getData().get(NAME_FIELD).asText()); + + final AirbyteMessage collection3StateMessage = collection3.next(); + assertEquals(Type.STATE, collection3StateMessage.getType(), "State message is expected after all records in a stream are emitted"); + assertFalse(collection3.hasNext()); } @Test @@ -291,7 +330,7 @@ void testGetIteratorsThrowsExceptionWhenThereAreDifferentIdTypes() { final var thrown = assertThrows(ConfigErrorException.class, () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true)); + /* MongoConstants.CHECKPOINT_INTERVAL, true */ CONFIG)); assertTrue(thrown.getMessage().contains("must be consistently typed")); } @@ -307,7 +346,7 @@ void testGetIteratorsThrowsExceptionWhenThereAreUnsupportedIdTypes() { final var thrown = assertThrows(ConfigErrorException.class, () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true)); + /* MongoConstants.CHECKPOINT_INTERVAL, true */ CONFIG)); assertTrue(thrown.getMessage().contains("_id fields with the following types are currently supported")); } @@ -333,13 +372,13 @@ void testGetIteratorsWithOneEmptyCollection() { final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); final MongoDbStateManager stateManager = spy(ogStateManager); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true); + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), CONFIG); - assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); + assertEquals(iterators.size(), 3); final AutoCloseableIterator collection1 = iterators.get(0); final AutoCloseableIterator collection2 = iterators.get(1); + final AutoCloseableIterator collection3 = iterators.get(2); // collection1 final AirbyteMessage collection1StreamMessage1 = collection1.next(); @@ -360,6 +399,12 @@ void testGetIteratorsWithOneEmptyCollection() { final AirbyteMessage collection2StateMessage = collection2.next(); assertEquals(Type.STATE, collection2StateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection2.hasNext()); + + // collection3 will generate a final state. + + final AirbyteMessage collection3StateMessage = collection3.next(); + assertEquals(Type.STATE, collection3StateMessage.getType(), "State message is expected after all records in a stream are emitted"); + assertFalse(collection3.hasNext()); } @Test @@ -377,25 +422,31 @@ void testGetIteratorsWithInitialStateNonDefaultIdType() { CURSOR_FIELD, OBJECT_ID3_STRING, NAME_FIELD, NAME3)))); + insertDocuments(COLLECTION3, List.of( + new Document(Map.of( + CURSOR_FIELD, OBJECT_ID4_STRING, + NAME_FIELD, NAME4)))); + final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); final MongoDbStateManager stateManager = spy(ogStateManager); when(stateManager.getStreamState(COLLECTION1, NAMESPACE)) .thenReturn(Optional.of(new MongoDbStreamState(OBJECT_ID1_STRING, null, IdType.STRING))); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), - MongoConstants.CHECKPOINT_INTERVAL, true); + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), CONFIG); - assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); + assertEquals(iterators.size(), 3); final AutoCloseableIterator collection1 = iterators.get(0); final AutoCloseableIterator collection2 = iterators.get(1); + final AutoCloseableIterator collection3 = iterators.get(2); // collection1, first document should be skipped final AirbyteMessage collection1StreamMessage1 = collection1.next(); System.out.println("message 1: " + collection1StreamMessage1); final AirbyteMessage collection2StreamMessage1 = collection2.next(); System.out.println("message 2: " + collection2StreamMessage1); + final AirbyteMessage collection3StreamMessage1 = collection3.next(); assertEquals(Type.RECORD, collection1StreamMessage1.getType()); assertEquals(COLLECTION1, collection1StreamMessage1.getRecord().getStream()); @@ -420,6 +471,16 @@ void testGetIteratorsWithInitialStateNonDefaultIdType() { assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection2.hasNext()); + + // collection3, no documents should be skipped + assertEquals(Type.RECORD, collection3StreamMessage1.getType()); + assertEquals(COLLECTION3, collection3StreamMessage1.getRecord().getStream()); + assertEquals(OBJECT_ID4.toString(), collection3StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); + + final AirbyteMessage collection3SateMessage = collection3.next(); + assertEquals(Type.STATE, collection3SateMessage.getType(), "State message is expected after all records in a stream are emitted"); + + assertFalse(collection3.hasNext()); } } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbSourceTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbSourceTest.java index 6b6f661ebb78b..76cf614d04d99 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbSourceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbSourceTest.java @@ -5,34 +5,17 @@ package io.airbyte.integrations.source.mongodb; import static io.airbyte.integrations.source.mongodb.MongoCatalogHelper.DEFAULT_CURSOR_FIELD; -import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; -import static io.airbyte.integrations.source.mongodb.MongoConstants.DEFAULT_DISCOVER_SAMPLE_SIZE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static io.airbyte.integrations.source.mongodb.MongoConstants.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.*; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.mongodb.MongoCredential; import com.mongodb.MongoSecurityException; -import com.mongodb.client.AggregateIterable; -import com.mongodb.client.ChangeStreamIterable; -import com.mongodb.client.MongoChangeStreamCursor; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.MongoDatabase; -import com.mongodb.client.MongoIterable; +import com.mongodb.client.*; import com.mongodb.connection.ClusterDescription; import com.mongodb.connection.ClusterType; import io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter; @@ -45,11 +28,7 @@ import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.*; import org.bson.BsonDocument; import org.bson.Document; import org.junit.jupiter.api.BeforeEach; @@ -60,6 +39,7 @@ class MongoDbSourceTest { private static final String DB_NAME = "airbyte_test"; private JsonNode airbyteSourceConfig; + private JsonNode airbyteSourceConfigWithoutSchema; private MongoDbSourceConfig sourceConfig; private MongoClient mongoClient; private MongoDbCdcInitializer cdcInitializer; @@ -68,6 +48,8 @@ class MongoDbSourceTest { @BeforeEach void setup() { airbyteSourceConfig = createConfiguration(Optional.empty(), Optional.empty(), true); + airbyteSourceConfigWithoutSchema = createConfiguration(Optional.empty(), Optional.empty(), false); + sourceConfig = new MongoDbSourceConfig(airbyteSourceConfig); mongoClient = mock(MongoClient.class); cdcInitializer = mock(MongoDbCdcInitializer.class); @@ -302,7 +284,7 @@ void testReadKeepsMongoClientOpen() { when(changeStreamIterable.cursor()).thenReturn(mongoChangeStreamCursor); when(mongoClient.watch(BsonDocument.class)).thenReturn(changeStreamIterable); when(cdcInitializer.createCdcIterators(any(), any(), any(), any(), any(), any())).thenReturn(Collections.emptyList()); - source.read(airbyteSourceConfig, null, null); + source.read(airbyteSourceConfigWithoutSchema, new ConfiguredAirbyteCatalog(), null); verify(mongoClient, never()).close(); } @@ -312,7 +294,7 @@ private static JsonNode createConfiguration(final Optional username, fin MongoConstants.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://localhost:27017/", MongoConstants.AUTH_SOURCE_CONFIGURATION_KEY, "admin", MongoConstants.DISCOVER_SAMPLE_SIZE_CONFIGURATION_KEY, DEFAULT_DISCOVER_SAMPLE_SIZE, - MongoConstants.SCHEMA_ENFORCED_CONFIGURATION_KEY, isSchemaEnforced); + SCHEMA_ENFORCED_CONFIGURATION_KEY, isSchemaEnforced); final Map config = new HashMap<>(baseConfig); username.ifPresent(u -> config.put(MongoConstants.USERNAME_CONFIGURATION_KEY, u)); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java index d830fd53acab1..bf9919a3d4c59 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java @@ -32,12 +32,15 @@ import java.time.Duration; import java.util.List; import java.util.Map; +import java.util.stream.Stream; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; @@ -155,8 +158,9 @@ public Document answer(final InvocationOnMock invocation) { assertFalse(iter.hasNext(), "should have no more records"); } - @Test - void treatHasNextExceptionAsFalse() { + @ParameterizedTest + @MethodSource("provideCatalogArguments") + void treatHasNextExceptionAsFalse(final ConfiguredAirbyteCatalog catalog) { final var docs = docs(); // on the second hasNext call, throw an exception @@ -166,7 +170,7 @@ void treatHasNextExceptionAsFalse() { when(mongoCursor.next()).thenReturn(docs.get(0)); - final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); + final var stream = catalog.getStreams().stream().findFirst().orElseThrow(); final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, MongoConstants.CHECKPOINT_DURATION)); @@ -207,7 +211,8 @@ void anInvalidIdFieldThrowsAnException() { assertThrows(ConfigErrorException.class, iter::hasNext); } - @Test + @ParameterizedTest + @MethodSource("provideCatalogArguments") void initialStateIsReturnedIfUnderlyingIteratorIsEmpty() { // underlying cursor is empty. when(mongoCursor.hasNext()).thenReturn(false); @@ -241,7 +246,8 @@ void initialStateIsReturnedIfUnderlyingIteratorIsEmpty() { assertFalse(iter.hasNext(), "should have no more records"); } - @Test + @ParameterizedTest + @MethodSource("provideCatalogArguments") void stateEmittedAfterDuration() throws InterruptedException { // force a 1.5s wait between messages when(mongoCursor.hasNext()) @@ -322,7 +328,8 @@ void stateEmittedAfterDuration() throws InterruptedException { assertFalse(iter.hasNext(), "should have no more records"); } - @Test + @ParameterizedTest + @MethodSource("provideCatalogArguments") void hasNextNoInitialStateAndNoMoreRecordsInCursor() { when(mongoCursor.hasNext()).thenReturn(false); final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); @@ -335,7 +342,7 @@ void hasNextNoInitialStateAndNoMoreRecordsInCursor() { assertFalse(iter.hasNext()); } - private ConfiguredAirbyteCatalog catalog() { + private static ConfiguredAirbyteCatalog catalog() { return new ConfiguredAirbyteCatalog().withStreams(List.of( new ConfiguredAirbyteStream() .withSyncMode(SyncMode.INCREMENTAL) @@ -351,6 +358,92 @@ private ConfiguredAirbyteCatalog catalog() { .withDefaultCursorField(List.of("_id"))))); } + @Test + void happyPathFullRefresh() { + final var docs = docs(); + + when(mongoCursor.hasNext()).thenAnswer(new Answer() { + + private int count = 0; + + @Override + public Boolean answer(final InvocationOnMock invocation) { + count++; + // hasNext will be called for each doc plus for each state message + return count <= (docs.size() + (docs.size() % CHECKPOINT_INTERVAL)); + } + + }); + + when(mongoCursor.next()).thenAnswer(new Answer() { + + private int offset = 0; + + @Override + public Document answer(final InvocationOnMock invocation) { + final var doc = docs.get(offset); + offset++; + return doc; + } + + }); + + final var stream = catalogFullRefresh().getStreams().stream().findFirst().orElseThrow(); + + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, + MongoConstants.CHECKPOINT_DURATION)); + + // with a batch size of 2, the MongoDbStateIterator should return the following after each + // `hasNext`/`next` call: + // true, record Air Force Blue + // true, record Alice Blue + // true, state (with Alice Blue as the state) + // true, record Alizarin Crimson + // true, state (with Alizarin Crimson) + // false + AirbyteMessage message; + assertTrue(iter.hasNext(), "air force blue should be next"); + message = iter.next(); + assertEquals(Type.RECORD, message.getType()); + assertEquals(docs.get(0).get("_id").toString(), message.getRecord().getData().get("_id").asText()); + + assertTrue(iter.hasNext(), "alice blue should be next"); + message = iter.next(); + assertEquals(Type.RECORD, message.getType()); + assertEquals(docs.get(1).get("_id").toString(), message.getRecord().getData().get("_id").asText()); + + assertTrue(iter.hasNext(), "state should be next"); + message = iter.next(); + assertEquals(Type.STATE, message.getType()); + assertEquals( + docs.get(1).get("_id").toString(), + message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("id").asText(), + "state id should match last record id"); + Assertions.assertEquals( + InitialSnapshotStatus.FULL_REFRESH.toString(), + message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("status").asText(), + "state status should remain full_refresh"); + + assertTrue(iter.hasNext(), "alizarin crimson should be next"); + message = iter.next(); + assertEquals(Type.RECORD, message.getType()); + assertEquals(docs.get(2).get("_id").toString(), message.getRecord().getData().get("_id").asText()); + + assertTrue(iter.hasNext(), "state should be next"); + message = iter.next(); + assertEquals(Type.STATE, message.getType()); + assertEquals( + "null", + message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("id").asText(), + "state id should be null upon completion"); + assertEquals( + InitialSnapshotStatus.FULL_REFRESH.toString(), + message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("status").asText(), + "state status should remain full_refresh upon completion"); + + assertFalse(iter.hasNext(), "should have no more records"); + } + private List docs() { return List.of( new Document("_id", new ObjectId("64c0029d95ad260d69ef28a0")) @@ -361,4 +454,24 @@ private List docs() { .append("name", "Alizarin Crimson").append("hex", "#e32636")); } + private static ConfiguredAirbyteCatalog catalogFullRefresh() { + return new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withCursorField(List.of("_id")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withCursorField(List.of("_id")) + .withStream(CatalogHelpers.createAirbyteStream( + "test.unit", + Field.of("_id", JsonSchemaType.STRING), + Field.of("name", JsonSchemaType.STRING), + Field.of("hex", JsonSchemaType.STRING)) + .withSupportedSyncModes(List.of(SyncMode.INCREMENTAL)) + .withDefaultCursorField(List.of("_id"))))); + } + + private static Stream provideCatalogArguments() { + return Stream.of(catalog(), catalogFullRefresh()); + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtilsTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtilsTest.java index 3068668bb9720..5a5b26ce4a8ba 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtilsTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcEventUtilsTest.java @@ -17,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.JsonNodeType; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.cdk.db.DataTypeUtils; import io.airbyte.commons.json.Jsons; @@ -150,7 +151,8 @@ void testTransformDataTypes() { assertEquals("code2", transformed.get("field13").get("code").asText()); assertEquals("scope", transformed.get("field13").get("scope").get("scope").asText()); assertEquals("pattern", transformed.get("field14").asText()); - assertFalse(transformed.has("field15")); + assertTrue(transformed.has("field15")); + assertEquals(JsonNodeType.NULL, transformed.get("field15").getNodeType()); assertEquals("value", transformed.get("field16").get("key").asText()); // Assert that UUIDs can be serialized. Currently, they will be represented as base 64 encoded // strings. Since the original mongo source @@ -247,7 +249,8 @@ void testTransformDataTypesNoSchema() { assertTrue(abDataNode.has("field12")); assertTrue(abDataNode.has("field13")); assertTrue(abDataNode.has("field14")); - assertFalse(abDataNode.has("field15")); + assertTrue(abDataNode.has("field15")); + assertEquals(JsonNodeType.NULL, abDataNode.get("field15").getNodeType()); assertTrue(abDataNode.has("field16")); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java index f7b48c8bcb955..0417f119fe92c 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java @@ -95,7 +95,7 @@ class MongoDbCdcInitializerTest { .withSupportedSyncModes(List.of(SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(List.of(List.of("_id"))))); protected static final ConfiguredAirbyteCatalog CONFIGURED_CATALOG = toConfiguredCatalog(CATALOG); - + protected static final List CONFIGURED_CATALOG_STREAMS = CONFIGURED_CATALOG.getStreams(); final MongoDbSourceConfig CONFIG = new MongoDbSourceConfig(Jsons.jsonNode( Map.of(DATABASE_CONFIG_CONFIGURATION_KEY, Map.of( @@ -167,7 +167,7 @@ void setUp() { void testCreateCdcIteratorsEmptyInitialState() { final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); assertTrue(iterators.get(0).hasNext(), @@ -179,7 +179,7 @@ void testCreateCdcIteratorsEmptyInitialStateEmptyCollections() { when(findCursor.hasNext()).thenReturn(false); final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); } @@ -189,7 +189,7 @@ void testCreateCdcIteratorsFromInitialStateWithInProgressInitialSnapshot() { final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS), CONFIG); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); assertTrue(iterators.get(0).hasNext(), @@ -201,7 +201,7 @@ void testCreateCdcIteratorsFromInitialStateWithCompletedInitialSnapshot() { final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); assertFalse(iterators.get(0).hasNext(), "Initial snapshot iterator should have no messages if its snapshot state is set as complete"); @@ -215,8 +215,9 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidDefault .thenReturn(mongoChangeStreamCursor); final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); - assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, - stateManager, EMITTED_AT, CONFIG)); + assertThrows(ConfigErrorException.class, + () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, + stateManager, EMITTED_AT, CONFIG)); } @Test @@ -227,8 +228,9 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetFailOption() { .thenReturn(mongoChangeStreamCursor); final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); - assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, - stateManager, EMITTED_AT, CONFIG)); + assertThrows(ConfigErrorException.class, + () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, + stateManager, EMITTED_AT, CONFIG)); } @Test @@ -241,7 +243,7 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidResyncO final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, resyncConfig); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, resyncConfig); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); assertTrue(iterators.get(0).hasNext(), @@ -264,7 +266,8 @@ void testUnableToExtractOffsetFromStateException() { MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); doReturn(Optional.empty()).when(mongoDbDebeziumStateUtil).savedOffset(any(), any(), any(), any(), any()); assertThrows(RuntimeException.class, - () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); + () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, + CONFIG)); } @Test @@ -280,7 +283,7 @@ void testMultipleIdTypesThrowsException() { MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS), CONFIG); final var thrown = assertThrows(ConfigErrorException.class, () -> cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG)); assertTrue(thrown.getMessage().contains("must be consistently typed")); } @@ -295,7 +298,7 @@ void testUnsupportedIdTypeThrowsException() { final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final var thrown = assertThrows(ConfigErrorException.class, () -> cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG_STREAMS, stateManager, EMITTED_AT, CONFIG)); assertTrue(thrown.getMessage().contains("_id fields with the following types are currently supported")); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java index f095f676d0ee7..89241e63f3715 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java @@ -67,7 +67,7 @@ void testDebeziumProperties() { final var debeziumPropertiesManager = new MongoDbDebeziumPropertiesManager(cdcProperties, config, catalog); final Properties debeziumProperties = debeziumPropertiesManager.getDebeziumProperties(offsetManager); - assertEquals(20 + cdcProperties.size(), debeziumProperties.size()); + assertEquals(21 + cdcProperties.size(), debeziumProperties.size()); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(NAME_KEY)); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(TOPIC_PREFIX_KEY)); assertEquals(EXPECTED_CONNECTION_STRING, debeziumProperties.get(MONGODB_CONNECTION_STRING_KEY)); @@ -97,7 +97,7 @@ void testDebeziumPropertiesConnectionStringCredentialsPlaceholder() { final var debeziumPropertiesManager = new MongoDbDebeziumPropertiesManager(cdcProperties, config, catalog); final Properties debeziumProperties = debeziumPropertiesManager.getDebeziumProperties(offsetManager); - assertEquals(20 + cdcProperties.size(), debeziumProperties.size()); + assertEquals(21 + cdcProperties.size(), debeziumProperties.size()); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(NAME_KEY)); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(TOPIC_PREFIX_KEY)); assertEquals(EXPECTED_CONNECTION_STRING, debeziumProperties.get(MONGODB_CONNECTION_STRING_KEY)); @@ -126,7 +126,7 @@ void testDebeziumPropertiesQuotedConnectionString() { final var debeziumPropertiesManager = new MongoDbDebeziumPropertiesManager(cdcProperties, config, catalog); final Properties debeziumProperties = debeziumPropertiesManager.getDebeziumProperties(offsetManager); - assertEquals(20 + cdcProperties.size(), debeziumProperties.size()); + assertEquals(21 + cdcProperties.size(), debeziumProperties.size()); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(NAME_KEY)); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(TOPIC_PREFIX_KEY)); assertEquals(EXPECTED_CONNECTION_STRING, debeziumProperties.get(MONGODB_CONNECTION_STRING_KEY)); @@ -155,7 +155,7 @@ void testDebeziumPropertiesNoCredentials() { final var debeziumPropertiesManager = new MongoDbDebeziumPropertiesManager(cdcProperties, config, catalog); final Properties debeziumProperties = debeziumPropertiesManager.getDebeziumProperties(offsetManager); - assertEquals(17 + cdcProperties.size(), debeziumProperties.size()); + assertEquals(18 + cdcProperties.size(), debeziumProperties.size()); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(NAME_KEY)); assertEquals(MongoDbDebeziumPropertiesManager.normalizeName(DATABASE_NAME), debeziumProperties.get(TOPIC_PREFIX_KEY)); assertEquals(EXPECTED_CONNECTION_STRING, debeziumProperties.get(MONGODB_CONNECTION_STRING_KEY)); diff --git a/airbyte-integrations/connectors/source-mongodb/.gitignore b/airbyte-integrations/connectors/source-mongodb/.gitignore deleted file mode 100644 index 10bd420c0524a..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -NEW_SOURCE_CHECKLIST.md -.ruby-gemset -.byebug_history - -tmp diff --git a/airbyte-integrations/connectors/source-mongodb/.ruby-version b/airbyte-integrations/connectors/source-mongodb/.ruby-version deleted file mode 100644 index 4a36342fcab70..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/.ruby-version +++ /dev/null @@ -1 +0,0 @@ -3.0.0 diff --git a/airbyte-integrations/connectors/source-mongodb/Dockerfile b/airbyte-integrations/connectors/source-mongodb/Dockerfile deleted file mode 100644 index 7d38acfba3ce4..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM ruby:3.0-alpine - -RUN apk update -RUN apk add --update build-base libffi-dev - -WORKDIR /airbyte - -COPY . ./ - -RUN gem install bundler -RUN bundle install - -ENV AIRBYTE_ENTRYPOINT "ruby /airbyte/source.rb" -ENTRYPOINT ["ruby", "/airbyte/source.rb"] - -LABEL io.airbyte.name=airbyte/source-mongodb -LABEL io.airbyte.version=0.3.3 diff --git a/airbyte-integrations/connectors/source-mongodb/Dockerfile.test b/airbyte-integrations/connectors/source-mongodb/Dockerfile.test deleted file mode 100644 index 080bd7062f8ff..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/Dockerfile.test +++ /dev/null @@ -1,8 +0,0 @@ -FROM mongo:4.0.23 - -COPY ./integration_tests /integration_tests - -RUN echo "mongorestore --archive=integration_tests/dump/analytics.archive" > /docker-entrypoint-initdb.d/init.sh - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/mongodb-integration-test-seed diff --git a/airbyte-integrations/connectors/source-mongodb/Gemfile b/airbyte-integrations/connectors/source-mongodb/Gemfile deleted file mode 100644 index 411ef252d7dc9..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/Gemfile +++ /dev/null @@ -1,8 +0,0 @@ -source 'https://rubygems.org' - -gem 'mongo' -gem 'slop' -gem 'dry-types' -gem 'dry-struct' - -# gem 'byebug' diff --git a/airbyte-integrations/connectors/source-mongodb/Gemfile.lock b/airbyte-integrations/connectors/source-mongodb/Gemfile.lock deleted file mode 100644 index 1c26567c4ab03..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/Gemfile.lock +++ /dev/null @@ -1,44 +0,0 @@ -GEM - remote: https://rubygems.org/ - specs: - bson (4.12.0) - concurrent-ruby (1.1.8) - dry-configurable (0.12.1) - concurrent-ruby (~> 1.0) - dry-core (~> 0.5, >= 0.5.0) - dry-container (0.7.2) - concurrent-ruby (~> 1.0) - dry-configurable (~> 0.1, >= 0.1.3) - dry-core (0.5.0) - concurrent-ruby (~> 1.0) - dry-inflector (0.2.0) - dry-logic (1.1.0) - concurrent-ruby (~> 1.0) - dry-core (~> 0.5, >= 0.5) - dry-struct (1.4.0) - dry-core (~> 0.5, >= 0.5) - dry-types (~> 1.5) - ice_nine (~> 0.11) - dry-types (1.5.1) - concurrent-ruby (~> 1.0) - dry-container (~> 0.3) - dry-core (~> 0.5, >= 0.5) - dry-inflector (~> 0.1, >= 0.1.2) - dry-logic (~> 1.0, >= 1.0.2) - ice_nine (0.11.2) - mongo (2.14.0) - bson (>= 4.8.2, < 5.0.0) - slop (4.8.2) - -PLATFORMS - x86_64-darwin-19 - x86_64-linux - -DEPENDENCIES - dry-struct - dry-types - mongo - slop - -BUNDLED WITH - 2.2.3 diff --git a/airbyte-integrations/connectors/source-mongodb/README.md b/airbyte-integrations/connectors/source-mongodb/README.md deleted file mode 100644 index 7f108fb5b5919..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Mongodb Source - -This is the repository for the Mongodb source connector, written in Ruby. - -## Local development -### Requirements - -#### Ruby Version -This module uses `rbenv` to manage its Ruby version. If you have `rbenv` installed, you should be running the correct Ruby version. - -While it is _highly_ recommended to use `rbenv`, if you don't want to, just make sure your system is running whatever ruby version is present in the file `.ruby-version`. - -#### Install dependencies -1. Install the correct `bundle` version (found at the bottom of `Gemfile`). Currently this is `gem install bundle:2.2.3`. -2. `bundle install` - -### Local iteration -1. Change code -2. `ruby source.rb ` - -For example, to verify if your provided credentials are valid and can be used to connect to a mongo DB, run: -``` -ruby source.rb check --config -``` - -The full list of commands are: - -1. `ruby source.rb spec` -2. `ruby source.rb check --config ` -3. `ruby source.rb discover --config ` -4. `ruby source.rb read --config --catalog [--state ]` - -These commands correspond to the ones in the [Airbyte Protocol](). - -### Build connector Docker image -First, build the module by running the following from the airbyte project root directory: -``` -cd airbyte-integrations/connectors/source-mongodb/ -docker build . -t airbyte/source-mongodb:dev -``` - -### Integration Tests -From the airbyte project root, run: -``` -./gradlew clean :airbyte-integrations:connectors:source-mongodb:integrationTest -``` - -## Configure credentials -Create a `secrets` folder (which is gitignored by default) and place your credentials as a JSON file in it. An example of the needed credentials is available in `integration_tests/valid_config.json`. - -## Discover phase -MongoDB does not have anything like table definition, thus we have to define column types from actual attributes and their values. Discover phase have two steps: - -### Step 1. Find all unique properties -Connector runs the map-reduce command which returns all unique document props in the collection. Map-reduce approach should be sufficient even for large clusters. - -### Step 2. Determine property types -For each property found, connector selects 10k documents from the collection where this property is not empty. If all the selected values have the same type - connector will set appropriate type to the property. In all other cases connector will fallback to `string` type. - -## Author -This connector was authored by [Yury Koleda](https://github.com/FUT). diff --git a/airbyte-integrations/connectors/source-mongodb/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-mongodb/integration_tests/configured_catalog.json deleted file mode 100644 index a32da7f833e3c..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/integration_tests/configured_catalog.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "streams": [ - { - "sync_mode": "incremental", - "destination_sync_mode": "append", - "cursor_field": ["bucket_end_date"], - "stream": { - "name": "transactions", - "supported_sync_modes": ["full_refresh", "incremental"], - "json_schema": { - "properties": { - "_id": { - "type": "string" - }, - "account_id": { - "type": "integer" - }, - "transaction_count": { - "type": "integer" - }, - "bucket_start_date": { - "type": "string" - }, - "bucket_end_date": { - "type": "string" - }, - "transactions": { - "type": "array" - } - } - } - } - }, - { - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite", - "stream": { - "name": "customers", - "supported_sync_modes": ["full_refresh", "incremental"], - "json_schema": { - "properties": { - "_id": { - "type": "string" - }, - "username": { - "type": "string" - }, - "name": { - "type": "string" - }, - "address": { - "type": "string" - }, - "birthdate": { - "type": "string" - }, - "active": { - "type": "boolean" - }, - "accounts": { - "type": "array" - }, - "tier_and_details": { - "type": "string" - } - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/source-mongodb/integration_tests/dump/analytics.archive b/airbyte-integrations/connectors/source-mongodb/integration_tests/dump/analytics.archive deleted file mode 100644 index c491368bcf472..0000000000000 Binary files a/airbyte-integrations/connectors/source-mongodb/integration_tests/dump/analytics.archive and /dev/null differ diff --git a/airbyte-integrations/connectors/source-mongodb/integration_tests/valid_config.json b/airbyte-integrations/connectors/source-mongodb/integration_tests/valid_config.json deleted file mode 100644 index 0bff6cfb1fb2d..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/integration_tests/valid_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "host": "127.0.0.1", - "port": "27888", - "database": "sample_analytics", - "user": "user", - "password": "password", - "auth_source": "admin", - "ssl": false -} diff --git a/airbyte-integrations/connectors/source-mongodb/lib/airbyte_logger.rb b/airbyte-integrations/connectors/source-mongodb/lib/airbyte_logger.rb deleted file mode 100644 index a519e42cb58bb..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/airbyte_logger.rb +++ /dev/null @@ -1,27 +0,0 @@ -require_relative './airbyte_protocol.rb' - -class AirbyteLogger - def self.format_log(text, log_level=Level::Info) - alm = AirbyteLogMessage.from_dynamic!({ - 'level' => log_level, - 'message' => text - }) - - AirbyteMessage.from_dynamic!({ - 'type' => Type::Log, - 'log' => alm.to_dynamic - }).to_json - end - - def self.logger_formatter - proc { |severity, datetime, progname, msg| - format_log("[#{datetime}] #{severity} : #{progname} | #{msg.dump}\n\n") - } - end - - def self.log(text, log_level=Level::Info) - message = format_log(text, log_level=Level::Info) - - puts message - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/airbyte_protocol.rb b/airbyte-integrations/connectors/source-mongodb/lib/airbyte_protocol.rb deleted file mode 100644 index 61c4024059701..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/airbyte_protocol.rb +++ /dev/null @@ -1,462 +0,0 @@ -# The file was generated in several steps. -# -# 1. Convert airbyte_protocol.yaml to JSON -# 2. Use https://app.quicktype.io/?l=ruby to generate code. Parameters: -# Type strictness: Coercible -# Plain types only: Disabled -# 3. Fix module `Types` according to https://dry-rb.org/gems/dry-types/master/built-in-types/ -# 4. Replace all instance variable calls to just method calls (remove all characters) -# 4. Add `.compact` call to resulting object in every `to_dynamic` method -# -# -# -# This code may look unusually verbose for Ruby (and it is), but -# it performs some subtle and complex validation of JSON data. -# -# To parse this JSON, add 'dry-struct' and 'dry-types' gems, then do: -# -# airbyte = Airbyte.from_json! "{…}" -# puts airbyte.configured_airbyte_catalog&.streams.first.stream.supported_sync_modes&.first -# -# If from_json! succeeds, the value returned matches the schema. - -require 'json' -require 'dry-types' -require 'dry-struct' - -module Types - include Dry::Types() - - Int = Coercible::Integer - Bool = Strict::Bool - Hash = Coercible::Hash - String = Coercible::String - Type = Coercible::String.enum("CATALOG", "CONNECTION_STATUS", "LOG", "RECORD", "SPEC", "STATE") - SyncMode = Coercible::String.enum("full_refresh", "incremental") - Status = Coercible::String.enum("FAILED", "SUCCEEDED") - Level = Coercible::String.enum("DEBUG", "ERROR", "FATAL", "INFO", "TRACE", "WARN") -end - -# Message type -module Type - Catalog = "CATALOG" - ConnectionStatus = "CONNECTION_STATUS" - Log = "LOG" - Record = "RECORD" - Spec = "SPEC" - State = "STATE" -end - -module SyncMode - FullRefresh = "full_refresh" - Incremental = "incremental" -end - -class AirbyteStream < Dry::Struct - - # Path to the field that will be used to determine if a record is new or modified since the - # last sync. If not provided by the source, the end user will have to specify the - # comparable themselves. - attribute :default_cursor_field, Types.Array(Types::String).optional - - # Stream schema using Json Schema specs. - attribute :json_schema, Types::Hash.meta(of: Types::Any) - - # Stream's name. - attribute :airbyte_stream_name, Types::String - - # If the source defines the cursor field, then it does any other cursor field inputs will - # be ignored. If it does not either the user_provided one is used or as a backup the - # default one is used. - attribute :source_defined_cursor, Types::Bool.optional - - attribute :supported_sync_modes, Types.Array(Types::SyncMode).optional - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - default_cursor_field: d["default_cursor_field"], - json_schema: Types::Hash[d.fetch("json_schema")].map { |k, v| [k, Types::Any[v]] }.to_h, - airbyte_stream_name: d.fetch("name"), - source_defined_cursor: d["source_defined_cursor"], - supported_sync_modes: d["supported_sync_modes"], - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "default_cursor_field" => default_cursor_field, - "json_schema" => json_schema, - "name" => airbyte_stream_name, - "source_defined_cursor" => source_defined_cursor, - "supported_sync_modes" => supported_sync_modes, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# log message: any kind of logging you want the platform to know about. -# -# Airbyte stream schema catalog -class AirbyteCatalog < Dry::Struct - attribute :streams, Types.Array(AirbyteStream) - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - streams: d.fetch("streams").map { |x| AirbyteStream.from_dynamic!(x) }, - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "streams" => streams.map { |x| x.to_dynamic }, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -module Status - Failed = "FAILED" - Succeeded = "SUCCEEDED" -end - -# Airbyte connection status -class AirbyteConnectionStatus < Dry::Struct - attribute :message, Types::String.optional - attribute :status, Types::Status - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - message: d["message"], - status: d.fetch("status"), - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "message" => message, - "status" => status, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# the type of logging -module Level - Debug = "DEBUG" - Error = "ERROR" - Fatal = "FATAL" - Info = "INFO" - Trace = "TRACE" - Warn = "WARN" -end - -# log message: any kind of logging you want the platform to know about. -class AirbyteLogMessage < Dry::Struct - - # the type of logging - attribute :level, Types::Level - - # the log message - attribute :message, Types::String - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - level: d.fetch("level"), - message: d.fetch("message"), - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "level" => level, - "message" => message, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# record message: the record -class AirbyteRecordMessage < Dry::Struct - - # the record data - attribute :data, Types::Hash.meta(of: Types::Any) - - # when the data was emitted from the source. epoch in millisecond. - attribute :emitted_at, Types::Int - - # the name of the stream for this record - attribute :stream, Types::String - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - data: Types::Hash[d.fetch("data")].map { |k, v| [k, Types::Any[v]] }.to_h, - emitted_at: d.fetch("emitted_at"), - stream: d.fetch("stream"), - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "data" => data, - "emitted_at" => emitted_at, - "stream" => stream, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# Specification of a connector (source/destination) -class ConnectorSpecification < Dry::Struct - attribute :changelog_url, Types::String.optional - - # ConnectorDefinition specific blob. Must be a valid JSON string. - attribute :connection_specification, Types::Hash.meta(of: Types::Any) - - attribute :documentation_url, Types::String.optional - - # If the connector supports incremental mode or not. - attribute :supports_incremental, Types::Bool.optional - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - changelog_url: d["changelogUrl"], - connection_specification: Types::Hash[d.fetch("connectionSpecification")].map { |k, v| [k, Types::Any[v]] }.to_h, - documentation_url: d["documentationUrl"], - supports_incremental: d["supportsIncremental"], - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "changelogUrl" => changelog_url, - "connectionSpecification" => connection_specification, - "documentationUrl" => documentation_url, - "supportsIncremental" => supports_incremental, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# schema message: the state. Must be the last message produced. The platform uses this -# information -class AirbyteStateMessage < Dry::Struct - - # the state data - attribute :data, Types::Hash.meta(of: Types::Any) - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - data: Types::Hash[d.fetch("data")].map { |k, v| [k, Types::Any[v]] }.to_h, - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "data" => data, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -class AirbyteMessage < Dry::Struct - - # log message: any kind of logging you want the platform to know about. - attribute :catalog, AirbyteCatalog.optional - - attribute :connection_status, AirbyteConnectionStatus.optional - - # log message: any kind of logging you want the platform to know about. - attribute :log, AirbyteLogMessage.optional - - # record message: the record - attribute :record, AirbyteRecordMessage.optional - - attribute :spec, ConnectorSpecification.optional - - # schema message: the state. Must be the last message produced. The platform uses this - # information - attribute :state, AirbyteStateMessage.optional - - # Message type - attribute :airbyte_message_type, Types::Type - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - catalog: d["catalog"] ? AirbyteCatalog.from_dynamic!(d["catalog"]) : nil, - connection_status: d["connectionStatus"] ? AirbyteConnectionStatus.from_dynamic!(d["connectionStatus"]) : nil, - log: d["log"] ? AirbyteLogMessage.from_dynamic!(d["log"]) : nil, - record: d["record"] ? AirbyteRecordMessage.from_dynamic!(d["record"]) : nil, - spec: d["spec"] ? ConnectorSpecification.from_dynamic!(d["spec"]) : nil, - state: d["state"] ? AirbyteStateMessage.from_dynamic!(d["state"]) : nil, - airbyte_message_type: d.fetch("type"), - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "catalog" => catalog&.to_dynamic, - "connectionStatus" => connection_status&.to_dynamic, - "log" => log&.to_dynamic, - "record" => record&.to_dynamic, - "spec" => spec&.to_dynamic, - "state" => state&.to_dynamic, - "type" => airbyte_message_type, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -class ConfiguredAirbyteStream < Dry::Struct - - # Path to the field that will be used to determine if a record is new or modified since the - # last sync. This field is REQUIRED if `sync_mode` is `incremental`. Otherwise it is - # ignored. - attribute :cursor_field, Types.Array(Types::String).optional - - attribute :stream, AirbyteStream - attribute :sync_mode, Types::SyncMode.optional - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - cursor_field: d["cursor_field"], - stream: AirbyteStream.from_dynamic!(d.fetch("stream")), - sync_mode: d["sync_mode"], - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "cursor_field" => cursor_field, - "stream" => stream.to_dynamic, - "sync_mode" => sync_mode, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# Airbyte stream schema catalog -class ConfiguredAirbyteCatalog < Dry::Struct - attribute :streams, Types.Array(ConfiguredAirbyteStream) - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - streams: d.fetch("streams").map { |x| ConfiguredAirbyteStream.from_dynamic!(x) }, - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "streams" => streams.map { |x| x.to_dynamic }, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end - -# AirbyteProtocol structs -class Airbyte < Dry::Struct - attribute :airbyte_message, AirbyteMessage.optional - attribute :configured_airbyte_catalog, ConfiguredAirbyteCatalog.optional - - def self.from_dynamic!(d) - d = Types::Hash[d] - new( - airbyte_message: d["airbyte_message"] ? AirbyteMessage.from_dynamic!(d["airbyte_message"]) : nil, - configured_airbyte_catalog: d["configured_airbyte_catalog"] ? ConfiguredAirbyteCatalog.from_dynamic!(d["configured_airbyte_catalog"]) : nil, - ) - end - - def self.from_json!(json) - from_dynamic!(JSON.parse(json)) - end - - def to_dynamic - { - "airbyte_message" => airbyte_message&.to_dynamic, - "configured_airbyte_catalog" => configured_airbyte_catalog&.to_dynamic, - }.compact - end - - def to_json(options = nil) - JSON.generate(to_dynamic, options) - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/base.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/base.rb deleted file mode 100644 index a06ee2fd6f3e0..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/base.rb +++ /dev/null @@ -1,43 +0,0 @@ -require_relative '../airbyte_logger.rb' - -module MongodbConfiguredStream - class Base - attr_reader :processed_count, :configured_stream - - def initialize(configured_stream:, state:, client:) - @configured_stream = configured_stream - @state = state - @client = client - - @processed_count = 0 - end - - def stream - @stream ||= configured_stream['stream'] - end - - def stream_name - @stream_name ||= configured_stream['stream']['name'] - end - - def sync_mode - @sync_mode ||= configured_stream['sync_mode'] - end - - def compose_query - {} - end - - def valid? - true - end - - def after_item_processed(item) - @processed_count += 1 - end - - def after_stream_processed - AirbyteLogger.log("Stream #{stream_name} successfully processed!") - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/factory.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/factory.rb deleted file mode 100644 index 189f4b195ff61..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/factory.rb +++ /dev/null @@ -1,17 +0,0 @@ -require_relative '../airbyte_logger.rb' - -require_relative './full_refresh.rb' -require_relative './incremental.rb' - -class MongodbConfiguredStream::Factory - def self.build(configured_stream:, state:, client:) - case configured_stream['sync_mode'] - when SyncMode::FullRefresh - MongodbConfiguredStream::FullRefresh.new(configured_stream: configured_stream, state: state, client: client) - when SyncMode::Incremental - MongodbConfiguredStream::Incremental.new(configured_stream: configured_stream, state: state, client: client) - else - AirbyteLogger.log("Sync mode #{configured_stream['sync_mode']} is not supported!", Level::Fatal) - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/full_refresh.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/full_refresh.rb deleted file mode 100644 index 0ad730cfd9ad8..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/full_refresh.rb +++ /dev/null @@ -1,6 +0,0 @@ -require_relative '../airbyte_logger.rb' - -require_relative './base.rb' - -class MongodbConfiguredStream::FullRefresh < MongodbConfiguredStream::Base -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/incremental.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/incremental.rb deleted file mode 100644 index 2fba51fc6ee52..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_configured_stream/incremental.rb +++ /dev/null @@ -1,93 +0,0 @@ -require_relative '../airbyte_logger.rb' - -require_relative './base.rb' -require_relative '../mongodb_types_converter.rb' -require_relative '../mongodb_types_explorer.rb' - -class MongodbConfiguredStream::Incremental < MongodbConfiguredStream::Base - DATETIME_TYPES = [Date, Time, DateTime] - CURSOR_TYPES = { - datetime: 'DATETIME', - integer: 'integer', - } - - attr_reader :cursor_field_type - - def initialize(configured_stream:, state:, client:) - super - - @cursor_field_type = determine_cursor_field_type - AirbyteLogger.log("Cursor type was determined as: #{@cursor_field_type}") - - value = @state.get(stream_name: stream_name, cursor_field: cursor_field) - @cursor = value && convert_cursor(value) - end - - def cursor_field - @cursor_field ||= configured_stream['cursor_field']&.first - end - - def compose_query - if @cursor - { - cursor_field => { - "$gt": @cursor - } - } - else - {} - end - end - - def valid? - if configured_stream['cursor_field'].count != 1 - AirbyteLogger.log("Stream #{stream_name} has invalid configuration. Cursor field #{wrapper['cursor_field']} configuration is invalid. Should contain exactly one document property name.", Level::Fatal) - return false - end - - true - end - - def after_item_processed(item) - super - - if item[cursor_field] - converted_cursor = convert_cursor(item[cursor_field]) - if !@cursor || converted_cursor && converted_cursor > @cursor - @cursor = converted_cursor - end - else - AirbyteLogger.log("Cursor is empty! Incremental sync results might be unpredictable! Item: #{item}", Level::Fatal) - end - end - - def after_stream_processed - super - - @state.set(stream_name: stream_name, cursor_field: cursor_field, cursor: @cursor) - @state.dump_state! - end - - private - - def determine_cursor_field_type - MongodbTypesExplorer.run(collection: @client[stream_name], field: cursor_field) do |type| - if DATETIME_TYPES.include?(type) - CURSOR_TYPES[:datetime] - else - CURSOR_TYPES[:integer] - end - end - end - - def convert_cursor(value) - if cursor_field_type == CURSOR_TYPES[:datetime] - Time.parse(value) - elsif cursor_field_type == CURSOR_TYPES[:integer] - value.to_i - else - AirbyteLogger.log("Cursor type #{cursor_field_type} is not supported!", Level::Fatal) - end - end - -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_reader.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_reader.rb deleted file mode 100644 index b0bb8a4776559..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_reader.rb +++ /dev/null @@ -1,68 +0,0 @@ -require_relative './airbyte_protocol.rb' -require_relative './airbyte_logger.rb' - -require_relative './mongodb_stream.rb' -require_relative './mongodb_types_converter.rb' -require_relative './mongodb_configured_stream/factory.rb' - -class MongodbReader - BATCH_SIZE = 10_000 - LOG_BATCH_SIZE = 10_000 - - def initialize(client:, catalog:, state:) - @client = client - @catalog = catalog - @state = state - end - - def read - @catalog['streams'].each do |configured_stream| - wrapper = MongodbConfiguredStream::Factory.build(configured_stream: configured_stream, state: @state, client: @client) - - AirbyteLogger.log("Reading stream #{wrapper.stream_name} in #{wrapper.sync_mode} mode") - - if wrapper.valid? - read_configured_stream(wrapper) - end - end - end - - private - - def read_configured_stream(wrapper) - collection = @client[wrapper.stream_name] - - projection_config = wrapper.stream['json_schema']['properties'].keys.each_with_object({}) do |key, obj| - obj[key] = 1 - end - - full_count = collection.count - - collection.find(wrapper.compose_query).projection(projection_config).batch_size(BATCH_SIZE).each do |item| - item.each_pair do |key, value| - item[key] = MongodbTypesConverter.convert_value_to_type(value, wrapper.stream['json_schema']['properties'][key]['type']) - end - - record = AirbyteRecordMessage.from_dynamic!({ - "data" => item, - "emitted_at" => Time.now.to_i * 1000, - "stream" => wrapper.stream_name, - }) - - message = AirbyteMessage.from_dynamic!({ - 'type' => Type::Record, - 'record' => record.to_dynamic, - }) - - puts message.to_json - - wrapper.after_item_processed(item) - - if wrapper.processed_count % LOG_BATCH_SIZE == 0 - AirbyteLogger.log("[#{wrapper.processed_count}/#{full_count}}] Reading stream #{wrapper.stream_name} is in progress") - end - end - - wrapper.after_stream_processed - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_source.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_source.rb deleted file mode 100644 index ad30fcd839160..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_source.rb +++ /dev/null @@ -1,84 +0,0 @@ -require_relative './airbyte_protocol.rb' -require_relative './airbyte_logger.rb' - -require_relative './mongodb_stream.rb' -require_relative './mongodb_reader.rb' -require_relative './mongodb_state.rb' - -class MongodbSource - def spec - spec = JSON.parse(File.read(__dir__ + '/spec.json')) - - message = AirbyteMessage.from_dynamic!({ - 'type' => Type::Spec, - 'spec' => spec, - }) - - puts message.to_json - end - - def check(config:) - @config = JSON.parse(File.read(config)) - - result = begin - client.collections.first.find.limit(1).first - {'status' => Status::Succeeded} - rescue Exception => e - AirbyteLogger.log(e.backtrace.join("\n"), Level::Fatal) - {'status' => Status::Failed, 'message' => 'Authentication failed.'} - end - - message = AirbyteMessage.from_dynamic!({ - 'type' => Type::ConnectionStatus, - 'connectionStatus' => result, - }) - - puts message.to_json - end - - def discover(config:) - @config = JSON.parse(File.read(config)) - - streams = client.collections.map do |collection| - AirbyteLogger.log("Discovering stream #{collection.name}") - MongodbStream.new(collection: collection).discover - end - - catalog = AirbyteCatalog.from_dynamic!({ - 'streams' => streams, - }) - - puts AirbyteMessage.from_dynamic!({ - 'type' => Type::Catalog, - 'catalog' => catalog.to_dynamic - }).to_json - end - - def read(config:, catalog:, state: nil) - @config = JSON.parse(File.read(config)) - @catalog = JSON.parse(File.read(catalog)) - @state = MongodbState.new(state_file: state) - - MongodbReader.new(client: client, catalog: @catalog, state: @state).read - end - - def method_missing(m, *args, &block) - AirbyteLogger.log("There's no method called #{m}", Level::Fatal) - end - - private - - def client - @client ||= begin - uri = "mongodb://#{@config['user']}:#{@config['password']}@#{@config['host']}:#{@config['port']}/#{@config['database']}?authSource=#{@config['auth_source']}" - if !@config.fetch(:"replica_set", "").strip.empty? - uri += "&replicaSet=#{@config['replica_set']}&ssl=true" - elsif ["true", true].include?(@config.fetch("ssl", false)) - uri += "&ssl=true" - end - @client = Mongo::Client.new(uri) - @client.logger.formatter = AirbyteLogger.logger_formatter - @client - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_state.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_state.rb deleted file mode 100644 index 5c95985ee721d..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_state.rb +++ /dev/null @@ -1,43 +0,0 @@ -require_relative './airbyte_protocol.rb' -require_relative './airbyte_logger.rb' - -require 'json' - -class MongodbState - - def initialize(state_file:) - @state = if state_file - JSON.parse(File.read(state_file)) - else - {} - end - - AirbyteLogger.log("Initialized with state:\n#{JSON.pretty_generate(@state)}") - end - - def get(stream_name:, cursor_field:) - @state.dig(stream_name, cursor_field) - end - - def set(stream_name:, cursor_field:, cursor:) - @state[stream_name] ||= {} - @state[stream_name][cursor_field] = cursor - end - - def dump_state! - json = @state.to_json - - AirbyteLogger.log("Saving state:\n#{JSON.pretty_generate(@state)}") - - asm = AirbyteStateMessage.from_dynamic!({ - 'data' => @state, - }) - - message = AirbyteMessage.from_dynamic!({ - 'type' => Type::State, - 'state' => asm.to_dynamic, - }) - - puts message.to_json - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_stream.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_stream.rb deleted file mode 100644 index 4d16e4a2f48a4..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_stream.rb +++ /dev/null @@ -1,76 +0,0 @@ -require_relative './airbyte_protocol.rb' -require_relative './airbyte_logger.rb' - -require_relative './mongodb_types_explorer.rb' - -class MongodbStream - DISCOVER_LIMIT = 10_000 - - AIRBYTE_TYPES = { - boolean: 'boolean', - number: 'number', - integer: 'integer', - string: 'string', - object: 'object', - array: 'array', - } - - TYPES_MAPPING = { - Float => AIRBYTE_TYPES[:number], - Integer => AIRBYTE_TYPES[:integer], - String => AIRBYTE_TYPES[:string], - DateTime => AIRBYTE_TYPES[:string], - TrueClass => AIRBYTE_TYPES[:boolean], - FalseClass => AIRBYTE_TYPES[:boolean], - Array => AIRBYTE_TYPES[:array], - Hash => AIRBYTE_TYPES[:object], - } - FALLBACK_TYPE = AIRBYTE_TYPES[:string] - - def initialize(collection:) - @collection = collection - @properties = {} - end - - def discover - discover_properties - - AirbyteStream.from_dynamic!({ - "name" => @collection.name, - "supported_sync_modes" => [SyncMode::FullRefresh, SyncMode::Incremental], - "source_defined_cursor" => false, - "json_schema" => { - "properties": @properties - } - }).to_dynamic - end - - private - - - def discover_property_type(property) - MongodbTypesExplorer.run(collection: @collection, field: property) do |type| - TYPES_MAPPING[type] || FALLBACK_TYPE - end || FALLBACK_TYPE - end - - def discover_properties - map = "function() { for (var key in this) { emit(key, null); } }" - reduce = "function(key, stuff) { return null; }" - - opts = { - out: {inline: 1}, - raw: true, - } - - view = Mongo::Collection::View.new(@collection, {}, limit: DISCOVER_LIMIT) - props = view.map_reduce(map, reduce, opts).map do |obj| - obj['_id'] - end - - props.each do |prop| - @properties[prop] = { 'type' => discover_property_type(prop) } - AirbyteLogger.log(" #{@collection.name}.#{prop} TYPE IS #{@properties[prop]['type']}") - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_converter.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_converter.rb deleted file mode 100644 index b36bd4372913a..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_converter.rb +++ /dev/null @@ -1,24 +0,0 @@ -require_relative './airbyte_protocol.rb' - -require_relative './mongodb_stream.rb' - -class MongodbTypesConverter - def self.convert_value_to_type(value, type) - case type - when MongodbStream::AIRBYTE_TYPES[:boolean] - !!value - when MongodbStream::AIRBYTE_TYPES[:number] - value.to_f - when MongodbStream::AIRBYTE_TYPES[:integer] - value.to_i - when MongodbStream::AIRBYTE_TYPES[:string] - value.to_s - when MongodbStream::AIRBYTE_TYPES[:object] - value.is_a?(Hash) ? value : { 'value' => value.to_s } - when MongodbStream::AIRBYTE_TYPES[:array] - value.is_a?(Array) ? value : [ value.to_s ] - else - value.to_s - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_explorer.rb b/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_explorer.rb deleted file mode 100644 index 44668f5b34e1e..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/mongodb_types_explorer.rb +++ /dev/null @@ -1,40 +0,0 @@ -require_relative './airbyte_protocol.rb' - -require_relative './mongodb_stream.rb' - -class MongodbTypesExplorer - EXPLORE_LIMIT = 1_000 - - @@cache = {} - - def self.run(collection:, field:, limit: EXPLORE_LIMIT, &type_mapping_block) - determine_field_types_for_collection(collection: collection, limit: limit, &type_mapping_block) - - @@cache[collection.name][field] - end - - private - - def self.determine_field_types_for_collection(collection:, limit:, &type_mapping_block) - return if @@cache[collection.name] - - airbyte_types = {} - - collection.find.limit(limit).each do |item| - item.each_pair do |key, value| - mapped_value = type_mapping_block[value.class] - - airbyte_types[key] ||= Set[] - airbyte_types[key].add(mapped_value) - end - end - - @@cache[collection.name] = {} - airbyte_types.each_pair do |field, types| - # Has one specific type - if types.count == 1 - @@cache[collection.name][field] = types.first - end - end - end -end diff --git a/airbyte-integrations/connectors/source-mongodb/lib/spec.json b/airbyte-integrations/connectors/source-mongodb/lib/spec.json deleted file mode 100644 index 390f196ccdfe4..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/lib/spec.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/mongodb", - "changelogUrl": "https://docs.airbyte.com/integrations/sources/mongodb", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Mongodb Source Spec", - "type": "object", - "required": ["host", "port", "database", "user", "password", "auth_source"], - "additionalProperties": false, - "properties": { - "host": { - "title": "Host", - "type": "string", - "description": "Host of a Mongo database to be replicated.", - "order": 0 - }, - "port": { - "title": "Port", - "type": "integer", - "description": "Port of a Mongo database to be replicated.", - "minimum": 0, - "maximum": 65536, - "default": 27017, - "examples": ["27017"], - "order": 1 - }, - "database": { - "title": "Database name", - "type": "string", - "description": "Database to be replicated.", - "order": 2 - }, - "user": { - "title": "User", - "type": "string", - "description": "User", - "order": 3 - }, - "password": { - "title": "Password", - "type": "string", - "description": "Password", - "airbyte_secret": true, - "order": 4 - }, - "auth_source": { - "title": "Authentication source", - "type": "string", - "description": "Authentication source where user information is stored. See the Mongo docs for more info.", - "default": "admin", - "examples": ["admin"], - "order": 5 - }, - "replica_set": { - "title": "Replica Set", - "type": "string", - "description": "The name of the set to filter servers by, when connecting to a replica set (Under this condition, the 'TLS connection' value automatically becomes 'true'). See the Mongo docs for more info.", - "default": "", - "order": 6 - }, - "ssl": { - "title": "TLS connection", - "type": "boolean", - "description": "If this switch is enabled, TLS connections will be used to connect to MongoDB.", - "default": false, - "order": 7 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-mongodb/source.rb b/airbyte-integrations/connectors/source-mongodb/source.rb deleted file mode 100644 index 5aac0c5b1ae4b..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb/source.rb +++ /dev/null @@ -1,16 +0,0 @@ -require 'optparse' -require 'mongo' -require 'slop' -require_relative './lib/mongodb_source' - -# require 'byebug' - -parsed = Slop.parse do |o| - o.string '--config', 'Config file path' - o.string '--catalog', 'Catalog file path' - o.string '--state', 'State file path' -end - -opts = parsed.to_hash.select { |_, value| value } - -MongodbSource.new.public_send(parsed.arguments.first, **opts) diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index 627f938cba086..98c2e01f8878a 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,7 +3,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.15' + cdkVersionRequired = '0.30.5' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index a9a11767436a5..b0b22bf89bb6d 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,12 +9,13 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 4.0.0 + dockerImageTag: 4.0.13 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql icon: mssql.svg license: ELv2 + maxSecondsBetweenMessages: 7200 name: Microsoft SQL Server (MSSQL) registries: cloud: @@ -22,7 +23,7 @@ data: oss: enabled: true releaseStage: generally_available - supportLevel: community + supportLevel: certified tags: - language:java releases: diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java index 0af1ea2873d7f..3ef45dfb19695 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java @@ -41,7 +41,7 @@ public boolean isCdcCheckpointEnabled() { public AirbyteMessage saveState(final Map offset, final SchemaHistory dbHistory) { final Map state = new HashMap<>(); state.put(MSSQL_CDC_OFFSET, offset); - state.put(MSSQL_DB_HISTORY, dbHistory.schema()); + state.put(MSSQL_DB_HISTORY, dbHistory.getSchema()); state.put(IS_COMPRESSED, dbHistory.isCompressed()); final JsonNode asJson = Jsons.jsonNode(state); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlDebeziumConverter.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlDebeziumConverter.java index afa721f609dd6..3625e0d8b8d3d 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlDebeziumConverter.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlDebeziumConverter.java @@ -14,13 +14,9 @@ import io.debezium.spi.converter.RelationalColumn; import java.math.BigDecimal; import java.sql.Timestamp; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; +import java.time.*; import java.time.format.DateTimeFormatter; -import java.util.Base64; -import java.util.Objects; -import java.util.Properties; -import java.util.Set; +import java.util.*; import microsoft.sql.DateTimeOffset; import org.apache.kafka.connect.data.SchemaBuilder; import org.slf4j.Logger; @@ -43,7 +39,7 @@ public class MssqlDebeziumConverter implements CustomConverter queryTableFullRefresh(final JdbcDatabase database, - final List columnNames, - final String schemaName, - final String tableName, - final SyncMode syncMode, - final Optional cursorField) { - LOGGER.info("Queueing query for table: {}", tableName); - // This corresponds to the initial sync for in INCREMENTAL_MODE. The ordering of the records matters - // as intermediate state messages are emitted. - if (syncMode.equals(SyncMode.INCREMENTAL)) { - final String quotedCursorField = enquoteIdentifier(cursorField.get(), getQuoteString()); - final String newIdentifiers = getWrappedColumnNames(database, null, columnNames, schemaName, tableName); - final String preparedSqlQuery = - String.format("SELECT %s FROM %s ORDER BY %s ASC", newIdentifiers, - getFullyQualifiedTableNameWithQuoting(schemaName, tableName, getQuoteString()), quotedCursorField); - LOGGER.info("Prepared SQL query for TableFullRefresh is: " + preparedSqlQuery); - return queryTable(database, preparedSqlQuery, tableName, schemaName); - } else { - // If we are in FULL_REFRESH mode, state messages are never emitted, so we don't care about ordering - // of the records. - final String newIdentifiers = getWrappedColumnNames(database, null, columnNames, schemaName, tableName); - final String preparedSqlQuery = - String.format("SELECT %s FROM %s", newIdentifiers, getFullyQualifiedTableNameWithQuoting(schemaName, tableName, getQuoteString())); - - LOGGER.info("Prepared SQL query for TableFullRefresh is: " + preparedSqlQuery); - return queryTable(database, preparedSqlQuery, tableName, schemaName); - } - } - /** * See {@link MssqlQueryUtils#getWrappedColumnNames} */ @@ -202,7 +144,7 @@ protected String getWrappedColumnNames(final JdbcDatabase database, final List columnNames, final String schemaName, final String tableName) { - return MssqlQueryUtils.getWrappedColumnNames(database, quoteString, columnNames, schemaName, tableName); + return MssqlQueryUtils.getWrappedColumnNames(database, getQuoteString(), columnNames, schemaName, tableName); } @Override @@ -468,7 +410,7 @@ public List> getIncrementalIterators(final final MssqlCursorBasedStateManager cursorBasedStateManager = new MssqlCursorBasedStateManager(stateManager.getRawStateMessages(), catalog); final InitialLoadStreams initialLoadStreams = streamsForInitialOrderedColumnLoad(cursorBasedStateManager, catalog); final Map pairToCursorBasedStatus = - getCursorBasedSyncStatusForStreams(database, initialLoadStreams.streamsForInitialLoad(), stateManager, quoteString); + getCursorBasedSyncStatusForStreams(database, initialLoadStreams.streamsForInitialLoad(), stateManager, getQuoteString()); final CursorBasedStreams cursorBasedStreams = new CursorBasedStreams(identifyStreamsForCursorBased(catalog, initialLoadStreams.streamsForInitialLoad()), pairToCursorBasedStatus); @@ -476,11 +418,11 @@ public List> getIncrementalIterators(final logStreamSyncStatus(cursorBasedStreams.streamsForCursorBased(), "Cursor"); final MssqlInitialLoadStreamStateManager mssqlInitialLoadStreamStateManager = new MssqlInitialLoadStreamStateManager(catalog, - initialLoadStreams, initPairToOrderedColumnInfoMap(database, initialLoadStreams, tableNameToTable, quoteString)); + initialLoadStreams, initPairToOrderedColumnInfoMap(database, initialLoadStreams, tableNameToTable, getQuoteString()), + namespacePair -> Jsons.jsonNode(pairToCursorBasedStatus.get(convertNameNamespacePairFromV0(namespacePair)))); final MssqlInitialLoadHandler initialLoadHandler = - new MssqlInitialLoadHandler(sourceConfig, database, new MssqlSourceOperations(), quoteString, mssqlInitialLoadStreamStateManager, - namespacePair -> Jsons.jsonNode(pairToCursorBasedStatus.get(convertNameNamespacePairFromV0(namespacePair))), - getTableSizeInfoForStreams(database, initialLoadStreams.streamsForInitialLoad(), quoteString)); + new MssqlInitialLoadHandler(sourceConfig, database, new MssqlSourceOperations(), getQuoteString(), mssqlInitialLoadStreamStateManager, + getTableSizeInfoForStreams(database, initialLoadStreams.streamsForInitialLoad(), getQuoteString())); final List> initialLoadIterator = new ArrayList<>(initialLoadHandler.getIncrementalIterators( new ConfiguredAirbyteCatalog().withStreams(initialLoadStreams.streamsForInitialLoad()), @@ -643,7 +585,7 @@ public Collection> readStreams(JsonNode co } private boolean cloudDeploymentMode() { - return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(featureFlags.deploymentMode()); + return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(getFeatureFlags().deploymentMode()); } public Duration getConnectionTimeoutMssql(final Map connectionProperties) { diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java index 1e891a16a16d7..e301bbb95c46a 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java @@ -16,7 +16,9 @@ import com.microsoft.sqlserver.jdbc.Geography; import com.microsoft.sqlserver.jdbc.Geometry; import com.microsoft.sqlserver.jdbc.SQLServerResultSetMetaData; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcSourceOperations; +import io.airbyte.integrations.source.mssql.initialsync.CdcMetadataInjector; import io.airbyte.protocol.models.JsonSchemaType; import java.sql.JDBCType; import java.sql.PreparedStatement; @@ -28,6 +30,7 @@ import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.Base64; +import java.util.Optional; import microsoft.sql.DateTimeOffset; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,6 +39,29 @@ public class MssqlSourceOperations extends JdbcSourceOperations { private static final Logger LOGGER = LoggerFactory.getLogger(MssqlSourceOperations.class); + private final Optional metadataInjector; + + public MssqlSourceOperations() { + super(); + this.metadataInjector = Optional.empty(); + } + + public MssqlSourceOperations(final Optional metadataInjector) { + super(); + this.metadataInjector = metadataInjector; + } + + @Override + public AirbyteRecordData convertDatabaseRowToAirbyteRecordData(final ResultSet queryContext) throws SQLException { + final AirbyteRecordData recordData = super.convertDatabaseRowToAirbyteRecordData(queryContext); + final ObjectNode jsonNode = (ObjectNode) recordData.rawRowData(); + if (!metadataInjector.isPresent()) { + return recordData; + } + metadataInjector.get().inject(jsonNode); + return new AirbyteRecordData(jsonNode, recordData.meta()); + } + /** * The method is used to set json value by type. Need to be overridden as MSSQL has some its own * specific types (ex. Geometry, Geography, Hierarchyid, etc) @@ -45,14 +71,20 @@ public class MssqlSourceOperations extends JdbcSourceOperations { @Override public void copyToJsonField(final ResultSet resultSet, final int colIndex, final ObjectNode json) throws SQLException { - final SQLServerResultSetMetaData metadata = (SQLServerResultSetMetaData) resultSet .getMetaData(); final String columnName = metadata.getColumnName(colIndex); final String columnTypeName = metadata.getColumnTypeName(colIndex); final JDBCType columnType = safeGetJdbcType(metadata.getColumnType(colIndex)); - if (columnTypeName.equalsIgnoreCase("time")) { + // Attempt to access the column. this allows us to know if it is null before we do + // type-specific parsing. If the column is null, we will populate the null value and skip attempting + // to + // parse the column value. + resultSet.getObject(colIndex); + if (resultSet.wasNull()) { + json.putNull(columnName); + } else if (columnTypeName.equalsIgnoreCase("time")) { putTime(json, columnName, resultSet, colIndex); } else if (columnTypeName.equalsIgnoreCase("geometry")) { putGeometry(json, columnName, resultSet, colIndex); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java index e2a4064d72107..c61a3293b4af6 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java @@ -92,7 +92,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, // If no event such as an empty table, generating schema history may take a few cycles // depending on the size of history. schemaHistory = schemaHistoryStorage.read(); - schemaHistoryRead = Objects.nonNull(schemaHistory) && StringUtils.isNotBlank(schemaHistory.schema()); + schemaHistoryRead = Objects.nonNull(schemaHistory) && StringUtils.isNotBlank(schemaHistory.getSchema()); if (event != null || schemaHistoryRead) { publisher.close(); @@ -122,7 +122,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, assert !offset.isEmpty(); assert Objects.nonNull(schemaHistory); - assert Objects.nonNull(schemaHistory.schema()); + assert Objects.nonNull(schemaHistory.getSchema()); final JsonNode asJson = serialize(offset, schemaHistory); LOGGER.info("Initial Debezium state constructed. offset={}", Jsons.jsonNode(offset)); @@ -137,7 +137,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, private static JsonNode serialize(final Map offset, final SchemaHistory dbHistory) { final Map state = new HashMap<>(); state.put(MssqlCdcStateConstants.MSSQL_CDC_OFFSET, offset); - state.put(MssqlCdcStateConstants.MSSQL_DB_HISTORY, dbHistory.schema()); + state.put(MssqlCdcStateConstants.MSSQL_DB_HISTORY, dbHistory.getSchema()); state.put(MssqlCdcStateConstants.IS_COMPRESSED, dbHistory.isCompressed()); return Jsons.jsonNode(state); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/CdcMetadataInjector.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/CdcMetadataInjector.java new file mode 100644 index 0000000000000..419da08fb0ee2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/CdcMetadataInjector.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mssql.initialsync; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.integrations.source.mssql.MssqlCdcConnectorMetadataInjector; +import io.airbyte.integrations.source.mssql.cdc.MssqlDebeziumStateUtil.MssqlDebeziumStateAttributes; + +public class CdcMetadataInjector { + + private final String transactionTimestamp; + private final MssqlDebeziumStateAttributes stateAttributes; + private final MssqlCdcConnectorMetadataInjector metadataInjector; + + public CdcMetadataInjector(final String transactionTimestamp, + final MssqlDebeziumStateAttributes stateAttributes, + final MssqlCdcConnectorMetadataInjector metadataInjector) { + this.transactionTimestamp = transactionTimestamp; + this.stateAttributes = stateAttributes; + this.metadataInjector = metadataInjector; + } + + public void inject(final ObjectNode record) { + metadataInjector.addMetaDataToRowsFetchedOutsideDebezium(record, transactionTimestamp, stateAttributes); + } + +} diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadGlobalStateManager.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadGlobalStateManager.java index e947e4052f41d..fadda2fdd6559 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadGlobalStateManager.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadGlobalStateManager.java @@ -24,11 +24,11 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; -public class MssqlInitialLoadGlobalStateManager implements MssqlInitialLoadStateManager { +public class MssqlInitialLoadGlobalStateManager extends MssqlInitialLoadStateManager { - private final Map pairToOrderedColLoadStatus; private final Map pairToOrderedColInfo; private final CdcState cdcState; @@ -39,11 +39,13 @@ public class MssqlInitialLoadGlobalStateManager implements MssqlInitialLoadState public MssqlInitialLoadGlobalStateManager(final InitialLoadStreams initialLoadStreams, final Map pairToOrderedColInfo, final CdcState cdcState, - final ConfiguredAirbyteCatalog catalog) { + final ConfiguredAirbyteCatalog catalog, + final Function streamStateForIncrementalRunSupplier) { this.cdcState = cdcState; this.pairToOrderedColLoadStatus = MssqlInitialLoadStateManager.initPairToOrderedColumnLoadStatusMap(initialLoadStreams.pairToInitialLoadStatus()); this.pairToOrderedColInfo = pairToOrderedColInfo; this.streamsThatHaveCompletedSnapshot = initStreamsCompletedSnapshot(initialLoadStreams, catalog); + this.streamStateForIncrementalRunSupplier = streamStateForIncrementalRunSupplier; } private static Set initStreamsCompletedSnapshot(final InitialLoadStreams initialLoadStreams, @@ -83,11 +85,6 @@ private AirbyteStreamState getAirbyteStreamState(final AirbyteStreamNameNamespac .withStreamState(stateData); } - @Override - public void updateOrderedColumnLoadState(final AirbyteStreamNameNamespacePair pair, final OrderedColumnLoadStatus ocLoadStatus) { - pairToOrderedColLoadStatus.put(pair, ocLoadStatus); - } - @Override public AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, final JsonNode streamStateForIncrementalRun) { streamsThatHaveCompletedSnapshot.add(pair); @@ -105,11 +102,6 @@ public AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamesp .withGlobal(globalState); } - @Override - public OrderedColumnLoadStatus getOrderedColumnLoadStatus(final AirbyteStreamNameNamespacePair pair) { - return pairToOrderedColLoadStatus.get(pair); - } - @Override public OrderedColumnInfo getOrderedColumnInfo(final AirbyteStreamNameNamespacePair pair) { return pairToOrderedColInfo.get(pair); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java index 2b7b01db0f9bb..97928ed19a0e5 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java @@ -15,11 +15,14 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import io.airbyte.cdk.db.SqlDatabase; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants; import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; -import io.airbyte.cdk.integrations.source.relationaldb.models.OrderedColumnLoadStatus; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.stream.AirbyteStreamUtils; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -30,6 +33,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -42,7 +46,6 @@ import java.time.Instant; import java.util.*; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Function; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,7 +61,6 @@ public class MssqlInitialLoadHandler { private final MssqlInitialLoadStateManager initialLoadStateManager; private static final long QUERY_TARGET_SIZE_GB = 1_073_741_824; private static final long DEFAULT_CHUNK_SIZE = 1_000_000; - private final Function streamStateForIncrementalRunSupplier; final Map tableSizeInfoMap; public MssqlInitialLoadHandler( @@ -67,14 +69,12 @@ public MssqlInitialLoadHandler( final MssqlSourceOperations sourceOperations, final String quoteString, final MssqlInitialLoadStateManager initialLoadStateManager, - final Function streamStateForIncrementalRunSupplier, final Map tableSizeInfoMap) { this.config = config; this.database = database; this.sourceOperations = sourceOperations; this.quoteString = quoteString; this.initialLoadStateManager = initialLoadStateManager; - this.streamStateForIncrementalRunSupplier = streamStateForIncrementalRunSupplier; this.tableSizeInfoMap = tableSizeInfoMap; } @@ -168,12 +168,12 @@ public List> getIncrementalIterators( } }); - final AutoCloseableIterator queryStream = + final AutoCloseableIterator queryStream = new MssqlInitialLoadRecordIterator(database, sourceOperations, quoteString, initialLoadStateManager, selectedDatabaseFields, pair, calculateChunkSize(tableSizeInfoMap.get(pair), pair), isCompositePrimaryKey(airbyteStream)); final AutoCloseableIterator recordIterator = getRecordIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()); - final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, pair); + final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, airbyteStream); iteratorList.add(augmentWithLogs(recordAndMessageIterator, pair, streamName)); } } @@ -182,7 +182,7 @@ public List> getIncrementalIterators( // Transforms the given iterator to create an {@link AirbyteRecordMessage} private AutoCloseableIterator getRecordIterator( - final AutoCloseableIterator recordIterator, + final AutoCloseableIterator recordIterator, final String streamName, final String namespace, final long emittedAt) { @@ -192,7 +192,12 @@ private AutoCloseableIterator getRecordIterator( .withStream(streamName) .withNamespace(namespace) .withEmittedAt(emittedAt) - .withData(r))); + .withData(r.rawRowData()) + .withMeta(isMetaChangesEmptyOrNull(r.meta()) ? null : r.meta()))); + } + + private boolean isMetaChangesEmptyOrNull(AirbyteRecordMessageMeta meta) { + return meta == null || meta.getChanges() == null || meta.getChanges().isEmpty(); } // Augments the given iterator with record count logs. @@ -212,22 +217,18 @@ private AutoCloseableIterator augmentWithLogs(final AutoCloseabl } private AutoCloseableIterator augmentWithState(final AutoCloseableIterator recordIterator, - final AirbyteStreamNameNamespacePair pair) { - final OrderedColumnLoadStatus currentOcLoadStatus = initialLoadStateManager.getOrderedColumnLoadStatus(pair); - final JsonNode incrementalState = - (currentOcLoadStatus == null || currentOcLoadStatus.getIncrementalState() == null) - ? streamStateForIncrementalRunSupplier.apply(pair) - : currentOcLoadStatus.getIncrementalState(); + final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); final Duration syncCheckpointDuration = config.get(SYNC_CHECKPOINT_DURATION_PROPERTY) != null ? Duration.ofSeconds(config.get(SYNC_CHECKPOINT_DURATION_PROPERTY).asLong()) - : MssqlInitialSyncStateIterator.SYNC_CHECKPOINT_DURATION; + : DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION; final Long syncCheckpointRecords = config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY) != null ? config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY).asLong() - : MssqlInitialSyncStateIterator.SYNC_CHECKPOINT_RECORDS; + : DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; return AutoCloseableIterators.transformIterator( - r -> new MssqlInitialSyncStateIterator(r, pair, initialLoadStateManager, incrementalState, syncCheckpointDuration, syncCheckpointRecords), + r -> new SourceStateIterator<>(r, stream, initialLoadStateManager, new StateEmitFrequency(syncCheckpointRecords, syncCheckpointDuration)), recordIterator, pair); } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadRecordIterator.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadRecordIterator.java index 54c94aff8707b..676cf497eb3b3 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadRecordIterator.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadRecordIterator.java @@ -7,9 +7,9 @@ import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifier; import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.AbstractIterator; import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.source.relationaldb.models.OrderedColumnLoadStatus; import io.airbyte.commons.util.AutoCloseableIterator; @@ -28,12 +28,12 @@ import org.slf4j.LoggerFactory; @SuppressWarnings("try") -public class MssqlInitialLoadRecordIterator extends AbstractIterator - implements AutoCloseableIterator { +public class MssqlInitialLoadRecordIterator extends AbstractIterator + implements AutoCloseableIterator { private static final Logger LOGGER = LoggerFactory.getLogger(MssqlInitialLoadRecordIterator.class); - private AutoCloseableIterator currentIterator; + private AutoCloseableIterator currentIterator; private final JdbcDatabase database; private int numSubqueries = 0; private final String quoteString; @@ -67,7 +67,7 @@ public class MssqlInitialLoadRecordIterator extends AbstractIterator @CheckForNull @Override - protected JsonNode computeNext() { + protected AirbyteRecordData computeNext() { if (shouldBuildNextSubquery()) { try { // We will only issue one query for a composite key load. If we have already processed all the data @@ -82,8 +82,8 @@ protected JsonNode computeNext() { } LOGGER.info("Subquery number : {}", numSubqueries); - final Stream stream = database.unsafeQuery( - this::getOcPreparedStatement, sourceOperations::rowToJson); + final Stream stream = database.unsafeQuery( + this::getOcPreparedStatement, sourceOperations::convertDatabaseRowToAirbyteRecordData); currentIterator = AutoCloseableIterators.fromStream(stream, pair); numSubqueries++; // If the current subquery has no records associated with it, the entire stream has been read. diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadSourceOperations.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadSourceOperations.java deleted file mode 100644 index 506f1c26c257d..0000000000000 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadSourceOperations.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mssql.initialsync; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.source.mssql.MssqlCdcConnectorMetadataInjector; -import io.airbyte.integrations.source.mssql.MssqlSourceOperations; -import io.airbyte.integrations.source.mssql.cdc.MssqlDebeziumStateUtil.MssqlDebeziumStateAttributes; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.util.Collections; -import java.util.Optional; - -public class MssqlInitialLoadSourceOperations extends MssqlSourceOperations { - - private final Optional metadataInjector; - - public MssqlInitialLoadSourceOperations(final Optional metadataInjector) { - super(); - this.metadataInjector = metadataInjector; - } - - @Override - public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { - if (metadataInjector.isPresent()) { - // the first call communicates with the database. after that the result is cached. - final ResultSetMetaData metadata = queryContext.getMetaData(); - final int columnCount = metadata.getColumnCount(); - final ObjectNode jsonNode = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); - for (int i = 1; i <= columnCount; i++) { - // attempt to access the column. this allows us to know if it is null before we do type-specific - // parsing. if it is null, we can move on. while awkward, this seems to be the agreed upon way of - // checking for null values with jdbc. - queryContext.getObject(i); - if (queryContext.wasNull()) { - continue; - } - - // convert to java types that will convert into reasonable json. - copyToJsonField(queryContext, i, jsonNode); - } - - metadataInjector.get().inject(jsonNode); - return jsonNode; - } else { - return super.rowToJson(queryContext); - } - } - - public static class CdcMetadataInjector { - - private final String transactionTimestamp; - private final MssqlDebeziumStateAttributes stateAttributes; - private final MssqlCdcConnectorMetadataInjector metadataInjector; - - public CdcMetadataInjector(final String transactionTimestamp, - final MssqlDebeziumStateAttributes stateAttributes, - final MssqlCdcConnectorMetadataInjector metadataInjector) { - this.transactionTimestamp = transactionTimestamp; - this.stateAttributes = stateAttributes; - this.metadataInjector = metadataInjector; - } - - private void inject(final ObjectNode record) { - metadataInjector.addMetaDataToRowsFetchedOutsideDebezium(record, transactionTimestamp, stateAttributes); - } - - } - -} diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStateManager.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStateManager.java index c253ba765dfe8..836d2b5c5b794 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStateManager.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStateManager.java @@ -5,19 +5,30 @@ package io.airbyte.integrations.source.mssql.initialsync; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.cdk.integrations.source.relationaldb.models.InternalModels.StateType; import io.airbyte.cdk.integrations.source.relationaldb.models.OrderedColumnLoadStatus; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer; import io.airbyte.integrations.source.mssql.initialsync.MssqlInitialReadUtil.OrderedColumnInfo; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; -public interface MssqlInitialLoadStateManager { +public abstract class MssqlInitialLoadStateManager implements SourceStateMessageProducer { - public static long MSSQL_STATE_VERSION = 2; - String STATE_TYPE_KEY = "state_type"; - String ORDERED_COL_STATE_TYPE = "ordered_column"; + public static final long MSSQL_STATE_VERSION = 2; + public static final String STATE_TYPE_KEY = "state_type"; + public static final String ORDERED_COL_STATE_TYPE = "ordered_column"; + protected Map pairToOrderedColLoadStatus; + + private OrderedColumnLoadStatus ocStatus; + + protected Function streamStateForIncrementalRunSupplier; /** * Returns an intermediate state message for the initial sync. @@ -26,7 +37,8 @@ public interface MssqlInitialLoadStateManager { * @param ocLoadStatus ordered column load status * @return state message */ - AirbyteStateMessage createIntermediateStateMessage(final AirbyteStreamNameNamespacePair pair, final OrderedColumnLoadStatus ocLoadStatus); + public abstract AirbyteStateMessage createIntermediateStateMessage(final AirbyteStreamNameNamespacePair pair, + final OrderedColumnLoadStatus ocLoadStatus); /** * Updates the {@link OrderedColumnLoadStatus} for the state associated with the given pair. @@ -34,7 +46,9 @@ public interface MssqlInitialLoadStateManager { * @param pair pair * @param ocLoadStatus updated status */ - void updateOrderedColumnLoadState(final AirbyteStreamNameNamespacePair pair, final OrderedColumnLoadStatus ocLoadStatus); + public void updateOrderedColumnLoadState(final AirbyteStreamNameNamespacePair pair, final OrderedColumnLoadStatus ocLoadStatus) { + pairToOrderedColLoadStatus.put(pair, ocLoadStatus); + } /** * Returns the final state message for the initial sync.. @@ -43,7 +57,7 @@ public interface MssqlInitialLoadStateManager { * @param streamStateForIncrementalRun incremental status * @return state message */ - AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, final JsonNode streamStateForIncrementalRun); + public abstract AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, final JsonNode streamStateForIncrementalRun); /** * Returns the previous state emitted. Represented as a {@link OrderedColumnLoadStatus} associated @@ -52,7 +66,9 @@ public interface MssqlInitialLoadStateManager { * @param pair pair * @return load status */ - OrderedColumnLoadStatus getOrderedColumnLoadStatus(final AirbyteStreamNameNamespacePair pair); + public OrderedColumnLoadStatus getOrderedColumnLoadStatus(final AirbyteStreamNameNamespacePair pair) { + return pairToOrderedColLoadStatus.get(pair); + } /** * Returns the current {@OrderedColumnInfo}, associated with the stream. This includes the data type @@ -61,7 +77,7 @@ public interface MssqlInitialLoadStateManager { * @param pair pair * @return load status */ - OrderedColumnInfo getOrderedColumnInfo(final AirbyteStreamNameNamespacePair pair); + public abstract OrderedColumnInfo getOrderedColumnInfo(final AirbyteStreamNameNamespacePair pair); static Map initPairToOrderedColumnLoadStatusMap( final Map pairToOcStatus) { @@ -71,4 +87,45 @@ static Map initPairToOr Entry::getValue)); } + @Override + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + return createIntermediateStateMessage(pair, ocStatus); + } + + @Override + public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final AirbyteMessage message) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + final String ocFieldName = getOrderedColumnInfo(pair).ocFieldName(); + final String lastOcVal = message.getRecord().getData().get(ocFieldName).asText(); + ocStatus = new OrderedColumnLoadStatus() + .withVersion(MSSQL_STATE_VERSION) + .withStateType(StateType.ORDERED_COLUMN) + .withOrderedCol(ocFieldName) + .withOrderedColVal(lastOcVal) + .withIncrementalState(getIncrementalState(stream)); + updateOrderedColumnLoadState(pair, ocStatus); + return message; + } + + @Override + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + return createFinalStateMessage(pair, getIncrementalState(stream)); + } + + @Override + public boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream) { + return Objects.nonNull(getOrderedColumnInfo(new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()))); + } + + private JsonNode getIncrementalState(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + final OrderedColumnLoadStatus currentOcLoadStatus = getOrderedColumnLoadStatus(pair); + + return (currentOcLoadStatus == null || currentOcLoadStatus.getIncrementalState() == null) + ? streamStateForIncrementalRunSupplier.apply(pair) + : currentOcLoadStatus.getIncrementalState(); + } + } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStreamStateManager.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStreamStateManager.java index 4348e75e0348d..9596a34547f67 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStreamStateManager.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadStreamStateManager.java @@ -17,6 +17,7 @@ import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.StreamDescriptor; import java.util.Map; +import java.util.function.Function; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -25,23 +26,18 @@ * keys to the stream state when they're going through the iterator Once we have verified that * expanding StreamStateManager itself to include this functionality, this class will be removed */ -public class MssqlInitialLoadStreamStateManager implements MssqlInitialLoadStateManager { +public class MssqlInitialLoadStreamStateManager extends MssqlInitialLoadStateManager { private static final Logger LOGGER = LoggerFactory.getLogger(MssqlInitialLoadStateManager.class); - private final Map pairToOrderedColLoadStatus; - private final Map pairToOrderedColInfo; public MssqlInitialLoadStreamStateManager(final ConfiguredAirbyteCatalog catalog, final InitialLoadStreams initialLoadStreams, - final Map pairToOrderedColInfo) { + final Map pairToOrderedColInfo, + final Function streamStateForIncrementalRunSupplier) { this.pairToOrderedColInfo = pairToOrderedColInfo; this.pairToOrderedColLoadStatus = MssqlInitialLoadStateManager.initPairToOrderedColumnLoadStatusMap(initialLoadStreams.pairToInitialLoadStatus()); - } - - @Override - public void updateOrderedColumnLoadState(final AirbyteStreamNameNamespacePair pair, final OrderedColumnLoadStatus ocLoadStatus) { - pairToOrderedColLoadStatus.put(pair, ocLoadStatus); + this.streamStateForIncrementalRunSupplier = streamStateForIncrementalRunSupplier; } @Override @@ -51,11 +47,6 @@ public AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamesp .withStream(getAirbyteStreamState(pair, streamStateForIncrementalRun)); } - @Override - public OrderedColumnLoadStatus getOrderedColumnLoadStatus(final AirbyteStreamNameNamespacePair pair) { - return pairToOrderedColLoadStatus.get(pair); - } - @Override public OrderedColumnInfo getOrderedColumnInfo(final AirbyteStreamNameNamespacePair pair) { return pairToOrderedColInfo.get(pair); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java index 7aa60d045d682..9d656ddc557ad 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java @@ -40,9 +40,9 @@ import io.airbyte.integrations.source.mssql.MssqlCdcStateHandler; import io.airbyte.integrations.source.mssql.MssqlCdcTargetPosition; import io.airbyte.integrations.source.mssql.MssqlQueryUtils; +import io.airbyte.integrations.source.mssql.MssqlSourceOperations; import io.airbyte.integrations.source.mssql.cdc.MssqlDebeziumStateUtil; import io.airbyte.integrations.source.mssql.cdc.MssqlDebeziumStateUtil.MssqlDebeziumStateAttributes; -import io.airbyte.integrations.source.mssql.initialsync.MssqlInitialLoadSourceOperations.CdcMetadataInjector; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; @@ -136,15 +136,14 @@ public static List> getCdcReadIterators(fi final MssqlInitialLoadStateManager initialLoadStateManager = new MssqlInitialLoadGlobalStateManager(initialLoadStreams, initPairToOrderedColumnInfoMap(database, initialLoadStreams, tableNameToTable, quoteString), - stateToBeUsed, catalog); + stateToBeUsed, catalog, namespacePair -> Jsons.emptyObject()); final MssqlDebeziumStateAttributes stateAttributes = MssqlDebeziumStateUtil.getStateAttributesFromDB(database); - final MssqlInitialLoadSourceOperations sourceOperations = - new MssqlInitialLoadSourceOperations(Optional.of(new CdcMetadataInjector(emittedAt.toString(), stateAttributes, metadataInjector))); + final MssqlSourceOperations sourceOperations = + new MssqlSourceOperations(Optional.of(new CdcMetadataInjector(emittedAt.toString(), stateAttributes, metadataInjector))); final MssqlInitialLoadHandler initialLoadHandler = new MssqlInitialLoadHandler(sourceConfig, database, sourceOperations, quoteString, initialLoadStateManager, - namespacePair -> Jsons.emptyObject(), getTableSizeInfoForStreams(database, initialLoadStreams.streamsForInitialLoad(), quoteString)); initialLoadIterator.addAll(initialLoadHandler.getIncrementalIterators( diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java deleted file mode 100644 index b96b8a705deef..0000000000000 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mssql.initialsync; - -import static io.airbyte.integrations.source.mssql.initialsync.MssqlInitialLoadStateManager.MSSQL_STATE_VERSION; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.AbstractIterator; -import io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants; -import io.airbyte.cdk.integrations.source.relationaldb.models.InternalModels.StateType; -import io.airbyte.cdk.integrations.source.relationaldb.models.OrderedColumnLoadStatus; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import java.time.Duration; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.util.Iterator; -import java.util.Objects; -import javax.annotation.CheckForNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MssqlInitialSyncStateIterator extends AbstractIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(MssqlInitialSyncStateIterator.class); - public static final Duration SYNC_CHECKPOINT_DURATION = DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION; - public static final Integer SYNC_CHECKPOINT_RECORDS = DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; - - private final Iterator messageIterator; - private final AirbyteStreamNameNamespacePair pair; - private boolean hasEmittedFinalState = false; - private OrderedColumnLoadStatus ocStatus; - private final JsonNode streamStateForIncrementalRun; - private final MssqlInitialLoadStateManager stateManager; - private long recordCount = 0L; - private Instant lastCheckpoint = Instant.now(); - private final Duration syncCheckpointDuration; - private final Long syncCheckpointRecords; - private final String ocFieldName; - - public MssqlInitialSyncStateIterator(final Iterator messageIterator, - final AirbyteStreamNameNamespacePair pair, - final MssqlInitialLoadStateManager stateManager, - final JsonNode streamStateForIncrementalRun, - final Duration checkpointDuration, - final Long checkpointRecords) { - this.messageIterator = messageIterator; - this.pair = pair; - this.stateManager = stateManager; - this.streamStateForIncrementalRun = streamStateForIncrementalRun; - this.syncCheckpointDuration = checkpointDuration; - this.syncCheckpointRecords = checkpointRecords; - this.ocFieldName = stateManager.getOrderedColumnInfo(pair).ocFieldName(); - this.ocStatus = stateManager.getOrderedColumnLoadStatus(pair); - } - - @CheckForNull - @Override - protected AirbyteMessage computeNext() { - if (messageIterator.hasNext()) { - if ((recordCount >= syncCheckpointRecords || Duration.between(lastCheckpoint, OffsetDateTime.now()).compareTo(syncCheckpointDuration) > 0) - && Objects.nonNull(ocStatus)) { - LOGGER.info("Emitting initial sync ordered col state for stream {}, state is {}", pair, ocStatus); - recordCount = 0L; - lastCheckpoint = Instant.now(); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(stateManager.createIntermediateStateMessage(pair, ocStatus)); - } - // Use try-catch to catch Exception that could occur when connection to the database fails - try { - final AirbyteMessage message = messageIterator.next(); - if (Objects.nonNull(message)) { - final String lastOcVal = message.getRecord().getData().get(ocFieldName).asText(); - ocStatus = new OrderedColumnLoadStatus() - .withVersion(MSSQL_STATE_VERSION) - .withStateType(StateType.ORDERED_COLUMN) - .withOrderedCol(ocFieldName) - .withOrderedColVal(lastOcVal) - .withIncrementalState(streamStateForIncrementalRun); - stateManager.updateOrderedColumnLoadState(pair, ocStatus); - } - recordCount++; - return message; - } catch (final Exception e) { - throw new RuntimeException(e); - } - } else if (!hasEmittedFinalState) { - hasEmittedFinalState = true; - final AirbyteStateMessage finalStateMessage = stateManager.createFinalStateMessage(pair, streamStateForIncrementalRun); - LOGGER.info("Finished initial sync of stream {}, Emitting final state.", pair); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(finalStateMessage); - } else { - return endOfData(); - } - } - -} diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json index a984211957ccf..082967b199bc0 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json @@ -135,10 +135,10 @@ "initial_waiting_seconds": { "type": "integer", "title": "Initial Waiting Time in Seconds (Advanced)", - "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about initial waiting time.", "default": 300, "min": 120, - "max": 1200, + "max": 3600, "order": 3 }, "invalid_cdc_cursor_position_behavior": { diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java index c8b22931a464f..4a85ce730a6f3 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java @@ -344,6 +344,14 @@ protected void initTests() { } + addDataTypeTestData( + TestDataHolder.builder() + .sourceType("int") + .airbyteType(JsonSchemaType.INTEGER) + .addInsertValues("null", "1234", "7878") + .addExpectedValues(null, "1234", "7878") + .createTablePatternSql("CREATE TABLE %1$s(%2$s INTEGER NULL DEFAULT ((7878)), %3$s %4$s)") + .build()); } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java index a1b5deafcc8e4..41242ca839753 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java @@ -38,8 +38,9 @@ public abstract class AbstractSshMssqlSourceAcceptanceTest extends SourceAccepta static private final Logger LOGGER = LoggerFactory.getLogger(AbstractSshMssqlSourceAcceptanceTest.class); - private static final String STREAM_NAME = "dbo.id_and_name"; - private static final String STREAM_NAME2 = "dbo.starships"; + private static final String SCHEMA_NAME = "dbo"; + private static final String STREAM_NAME = "id_and_name"; + private static final String STREAM_NAME2 = "starships"; public abstract SshTunnel.TunnelMethod getTunnelMethod(); @@ -126,7 +127,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - STREAM_NAME, + STREAM_NAME, SCHEMA_NAME, Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes( @@ -136,7 +137,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - STREAM_NAME2, + STREAM_NAME2, SCHEMA_NAME, Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes( diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java index 0db3f1eb31cb2..e9179d2b8b951 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java @@ -4,9 +4,13 @@ package io.airbyte.integrations.source.mssql; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static io.airbyte.protocol.models.v0.SyncMode.FULL_REFRESH; +import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; +import static org.junit.jupiter.api.Assertions.*; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; @@ -28,7 +32,9 @@ import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; +import org.junit.Assert; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; @@ -43,6 +49,7 @@ public class CdcMssqlSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String CDC_ROLE_NAME = "cdc_selector"; + private static final String STREAM_NAME3 = "stream3"; private MsSQLTestDatabase testdb; @@ -75,8 +82,7 @@ protected List getConfiguredAirbyteStreams() { .withSyncMode(SyncMode.INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME), - String.format("%s", SCHEMA_NAME), + STREAM_NAME, SCHEMA_NAME, Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSourceDefinedCursor(true) @@ -87,8 +93,7 @@ protected List getConfiguredAirbyteStreams() { .withSyncMode(SyncMode.INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME2), - String.format("%s", SCHEMA_NAME), + STREAM_NAME2, SCHEMA_NAME, Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSourceDefinedCursor(true) @@ -111,12 +116,15 @@ protected void setupEnvironment(final TestDestinationEnv environment) { // create tables .with("CREATE TABLE %s.%s(id INTEGER PRIMARY KEY, name VARCHAR(200));", SCHEMA_NAME, STREAM_NAME) .with("CREATE TABLE %s.%s(id INTEGER PRIMARY KEY, name VARCHAR(200));", SCHEMA_NAME, STREAM_NAME2) + .with("CREATE TABLE %s.%s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL);", SCHEMA_NAME, STREAM_NAME3) // populate tables .with("INSERT INTO %s.%s (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');", SCHEMA_NAME, STREAM_NAME) .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');", SCHEMA_NAME, STREAM_NAME2) + .with("INSERT INTO %s.%s (id, name) VALUES (4,'voyager');", SCHEMA_NAME, STREAM_NAME3) // enable cdc on tables for designated role .withCdcForTable(SCHEMA_NAME, STREAM_NAME, CDC_ROLE_NAME) .withCdcForTable(SCHEMA_NAME, STREAM_NAME2, CDC_ROLE_NAME) + .withCdcForTable(SCHEMA_NAME, STREAM_NAME3, CDC_ROLE_NAME) // revoke user permissions .with("REVOKE ALL FROM %s CASCADE;", testdb.getUserName()) .with("EXEC sp_msforeachtable \"REVOKE ALL ON '?' TO %s;\"", testdb.getUserName()) @@ -177,4 +185,63 @@ private List filterStateMessages(final List .collect(Collectors.toList()); } + @Test + protected void testNullValueConversion() throws Exception { + final List configuredAirbyteStreams = + Lists.newArrayList(new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME3, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + Field.of("userid", JsonSchemaType.NUMBER)) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))) + .withSupportedSyncModes(Lists.newArrayList(FULL_REFRESH, INCREMENTAL)))); + + final ConfiguredAirbyteCatalog configuredCatalogWithOneStream = + new ConfiguredAirbyteCatalog().withStreams(List.of(configuredAirbyteStreams.get(0))); + + final List airbyteMessages = runRead(configuredCatalogWithOneStream, getState()); + final List recordMessages = filterRecords(airbyteMessages); + final List stateMessages = airbyteMessages + .stream() + .filter(m -> m.getType() == AirbyteMessage.Type.STATE) + .map(AirbyteMessage::getState) + .collect(Collectors.toList()); + Assert.assertEquals(recordMessages.size(), 1); + assertFalse(stateMessages.isEmpty(), "Reason"); + ObjectMapper mapper = new ObjectMapper(); + + assertTrue(cdcFieldsOmitted(recordMessages.get(0).getData()).equals( + mapper.readTree("{\"id\":4, \"name\":\"voyager\", \"userid\":null}"))); + + // when we run incremental sync again there should be no new records. Run a sync with the latest + // state message and assert no records were emitted. + JsonNode latestState = extractLatestState(stateMessages); + + testdb.getDatabase().query(c -> c.query("INSERT INTO %s.%s (id, name) VALUES (5,'deep space nine')".formatted(SCHEMA_NAME, STREAM_NAME3))) + .execute(); + + assert Objects.nonNull(latestState); + final List secondSyncRecords = filterRecords(runRead(configuredCatalogWithOneStream, latestState)); + assertFalse( + secondSyncRecords.isEmpty(), + "Expected the second incremental sync to produce records."); + assertEquals(cdcFieldsOmitted(secondSyncRecords.get(0).getData()), + mapper.readTree("{\"id\":5, \"name\":\"deep space nine\", \"userid\":null}")); + } + + private JsonNode cdcFieldsOmitted(final JsonNode node) { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode object = mapper.createObjectNode(); + node.fieldNames().forEachRemaining(name -> { + if (!name.toLowerCase().startsWith("_ab_cdc_")) { + object.put(name, node.get(name)); + } + }); + return object; + } + } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CloudDeploymentSslEnabledMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CloudDeploymentSslEnabledMssqlSourceAcceptanceTest.java index f2a311d6b4556..608971be54ade 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CloudDeploymentSslEnabledMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CloudDeploymentSslEnabledMssqlSourceAcceptanceTest.java @@ -26,7 +26,10 @@ protected void setupEnvironment(final TestDestinationEnv environment) { "(1,'picard', '2124-03-04T01:01:01Z'), " + "(2, 'crusher', '2124-03-04T01:01:01Z'), " + "(3, 'vash', '2124-03-04T01:01:01Z');") - .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2); + .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2) + .with("CREATE TABLE %s.%s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL);", SCHEMA_NAME, STREAM_NAME3) + .with("INSERT INTO %s.%s (id, name) VALUES (4,'voyager');", SCHEMA_NAME, STREAM_NAME3); + } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java index 4bdc5cecf61ab..bb48874fdb10b 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceAcceptanceTest.java @@ -4,9 +4,13 @@ package io.airbyte.integrations.source.mssql; +import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; @@ -29,6 +33,7 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; import org.junit.jupiter.api.Test; @@ -37,19 +42,23 @@ public class MssqlSourceAcceptanceTest extends SourceAcceptanceTest { protected static final String SCHEMA_NAME = "dbo"; protected static final String STREAM_NAME = "id_and_name"; protected static final String STREAM_NAME2 = "starships"; + protected static final String STREAM_NAME3 = "stream3"; protected MsSQLTestDatabase testdb; @Override protected void setupEnvironment(final TestDestinationEnv environment) throws SQLException { testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022) - .with("CREATE TABLE id_and_name (id INTEGER, name VARCHAR(200), born DATETIMEOFFSET(7));") + .with("CREATE TABLE %s.%s (id INTEGER, name VARCHAR(200), born DATETIMEOFFSET(7));", SCHEMA_NAME, STREAM_NAME) .with("CREATE TABLE %s.%s(id INTEGER PRIMARY KEY, name VARCHAR(200));", SCHEMA_NAME, STREAM_NAME2) .with("INSERT INTO id_and_name (id, name, born) VALUES " + "(1, 'picard', '2124-03-04T01:01:01Z'), " + "(2, 'crusher', '2124-03-04T01:01:01Z'), " + "(3, 'vash', '2124-03-04T01:01:01Z');") - .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2); + .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2) + .with("CREATE TABLE %s.%s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL);", SCHEMA_NAME, STREAM_NAME3) + .with("INSERT INTO %s.%s (id, name) VALUES (4,'voyager');", SCHEMA_NAME, STREAM_NAME3); + } @Override @@ -76,12 +85,25 @@ protected JsonNode getConfig() { @Override protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - return CatalogHelpers.createConfiguredAirbyteCatalog( - STREAM_NAME, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING), - Field.of("born", JsonSchemaType.STRING)); + return new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( + new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withCursorField(Lists.newArrayList("id")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream( + STREAM_NAME, SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))), + new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withCursorField(Lists.newArrayList("id")) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream( + STREAM_NAME2, SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))))); } @Override @@ -97,14 +119,14 @@ protected void testAddNewStreamToExistingSync() throws Exception { Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withSyncMode(SyncMode.INCREMENTAL) + .withSyncMode(INCREMENTAL) .withCursorField(List.of("id")), CatalogHelpers.createConfiguredAirbyteStream(STREAM_NAME2, SCHEMA_NAME, Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withSyncMode(SyncMode.INCREMENTAL) + .withSyncMode(INCREMENTAL) .withCursorField(List.of("id"))); final ConfiguredAirbyteCatalog configuredCatalogWithOneStream = new ConfiguredAirbyteCatalog().withStreams(List.of(configuredAirbyteStreams.get(0))); @@ -139,6 +161,52 @@ protected void testAddNewStreamToExistingSync() throws Exception { assertEquals(SCHEMA_NAME, stateMessages2.get(1).getStream().getStreamDescriptor().getNamespace()); } + @Test + protected void testNullValueConversion() throws Exception { + final List configuredAirbyteStreams = + Lists.newArrayList(CatalogHelpers.createConfiguredAirbyteStream(STREAM_NAME3, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + Field.of("userid", JsonSchemaType.NUMBER)) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withSyncMode(INCREMENTAL) + .withCursorField(List.of("id"))); + final ConfiguredAirbyteCatalog configuredCatalogWithOneStream = + new ConfiguredAirbyteCatalog().withStreams(List.of(configuredAirbyteStreams.get(0))); + + final List airbyteMessages = runRead(configuredCatalogWithOneStream, getState()); + final List recordMessages = filterRecords(airbyteMessages); + final List stateMessages = airbyteMessages + .stream() + .filter(m -> m.getType() == AirbyteMessage.Type.STATE) + .map(AirbyteMessage::getState) + .collect(Collectors.toList()); + assertEquals(recordMessages.size(), 1); + assertFalse(stateMessages.isEmpty(), "Reason"); + ObjectMapper mapper = new ObjectMapper(); + + assertTrue(recordMessages.get(0).getData().equals( + mapper.readTree("{\"id\":4, \"name\":\"voyager\", \"userid\":null}}"))); + + // when we run incremental sync again there should be no new records. Run a sync with the latest + // state message and assert no records were emitted. + JsonNode latestState = extractLatestState(stateMessages); + + testdb.getDatabase().query(c -> { + return c.query("INSERT INTO %s.%s (id, name) VALUES (5,'deep space nine');".formatted(SCHEMA_NAME, STREAM_NAME3)); + }).execute(); + + assert Objects.nonNull(latestState); + final List secondSyncRecords = filterRecords(runRead(configuredCatalogWithOneStream, latestState)); + assertFalse( + secondSyncRecords.isEmpty(), + "Expected the second incremental sync to produce records."); + assertTrue(secondSyncRecords.get(0).getData().equals( + mapper.readTree("{\"id\":5, \"name\":\"deep space nine\", \"userid\":null}}"))); + + } + private List filterStateMessages(final List messages) { return messages.stream().filter(r -> r.getType() == AirbyteMessage.Type.STATE).map(AirbyteMessage::getState) .collect(Collectors.toList()); diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java index ccd887c9a4b90..ac68865487bd6 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/SslEnabledMssqlSourceAcceptanceTest.java @@ -31,7 +31,10 @@ protected void setupEnvironment(final TestDestinationEnv environment) { "(1, 'picard', '2124-03-04T01:01:01Z'), " + "(2, 'crusher', '2124-03-04T01:01:01Z'), " + "(3, 'vash', '2124-03-04T01:01:01Z');") - .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2);; + .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato'), (4, 'Argo');", SCHEMA_NAME, STREAM_NAME2) + .with("CREATE TABLE %s.%s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL);", SCHEMA_NAME, STREAM_NAME3) + .with("INSERT INTO %s.%s (id, name) VALUES (4,'voyager');", SCHEMA_NAME, STREAM_NAME3); + } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json index 1289b14f70791..e0f1fc92d0ae4 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json @@ -135,10 +135,10 @@ "initial_waiting_seconds": { "type": "integer", "title": "Initial Waiting Time in Seconds (Advanced)", - "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about initial waiting time.", "default": 300, "min": 120, - "max": 1200, + "max": 3600, "order": 3 }, "invalid_cdc_cursor_position_behavior": { diff --git a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MssqlSourcePerformanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MssqlSourcePerformanceTest.java index 0ba7e248d14b3..62876374b1661 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MssqlSourcePerformanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MssqlSourcePerformanceTest.java @@ -27,13 +27,13 @@ protected String getImageName() { protected void setupDatabase(final String dbName) { final JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); - config = Jsons.jsonNode(ImmutableMap.builder() + setConfig(Jsons.jsonNode(ImmutableMap.builder() .put(JdbcUtils.HOST_KEY, plainConfig.get(JdbcUtils.HOST_KEY)) .put(JdbcUtils.PORT_KEY, plainConfig.get(JdbcUtils.PORT_KEY)) .put(JdbcUtils.DATABASE_KEY, dbName) .put(JdbcUtils.USERNAME_KEY, plainConfig.get(JdbcUtils.USERNAME_KEY)) .put(JdbcUtils.PASSWORD_KEY, plainConfig.get(JdbcUtils.PASSWORD_KEY)) - .build()); + .build())); } /** diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java index 222f9e1c4fa5d..c12cbec7d0c21 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java @@ -381,13 +381,13 @@ protected void addCdcDefaultCursorField(final AirbyteStream stream) { } @Override - protected void assertExpectedStateMessages(final List stateMessages) { + protected void assertExpectedStateMessages(final List stateMessages) { assertEquals(7, stateMessages.size()); assertStateTypes(stateMessages, 4); } @Override - protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { + protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { assertEquals(1, stateMessages.size()); assertNotNull(stateMessages.get(0).getData()); for (final AirbyteStateMessage stateMessage : stateMessages) { @@ -397,17 +397,17 @@ protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { + protected void assertExpectedStateMessagesForNoData(final List stateMessages) { assertEquals(2, stateMessages.size()); } @Override - protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { + protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { assertEquals(27, stateAfterFirstBatch.size()); assertStateTypes(stateAfterFirstBatch, 24); } - private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectOcState) { + private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectOcState) { JsonNode sharedState = null; for (int i = 0; i < stateMessages.size(); i++) { final AirbyteStateMessage stateMessage = stateMessages.get(i); diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java index 605737a09c367..7c91ca0ef2c4c 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlJdbcSourceAcceptanceTest.java @@ -418,7 +418,7 @@ protected DbStreamState buildStreamState(final ConfiguredAirbyteStream configure // Override from parent class as we're no longer including the legacy Data field. @Override - protected List createExpectedTestMessages(final List states, final long numRecords) { + protected List createExpectedTestMessages(final List states, final long numRecords) { return states.stream() .map(s -> new AirbyteMessage().withType(Type.STATE) .withState( diff --git a/airbyte-integrations/connectors/source-my-hours/.dockerignore b/airbyte-integrations/connectors/source-my-hours/.dockerignore deleted file mode 100644 index f241c407ccc26..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_my_hours -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-my-hours/Dockerfile b/airbyte-integrations/connectors/source-my-hours/Dockerfile deleted file mode 100644 index 8e2bd95d7e2d0..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_my_hours ./source_my_hours - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.2 -LABEL io.airbyte.name=airbyte/source-my-hours diff --git a/airbyte-integrations/connectors/source-my-hours/README.md b/airbyte-integrations/connectors/source-my-hours/README.md index a63c9d3147c32..5353a0bdad093 100644 --- a/airbyte-integrations/connectors/source-my-hours/README.md +++ b/airbyte-integrations/connectors/source-my-hours/README.md @@ -1,69 +1,63 @@ # My Hours Source -This is the repository for the My Hours source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/my-hours). +This is the repository for the My Hours configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/my-hours). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.7.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/my-hours) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_my_hours/spec.json` file. +### Create credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/my-hours) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `spec` inside `manifest/yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source my-hours test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-my-hours spec +poetry run source-my-hours check --config secrets/config.json +poetry run source-my-hours discover --config secrets/config.json +poetry run source-my-hours read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests +To run tests locally, from the connector directory run: -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-my-hours build +``` +poetry run pytest tests ``` -An image will be built with the tag `airbyte/source-my-hours:dev`. +### Building the docker image -**Via `docker build`:** +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash -docker build -t airbyte/source-my-hours:dev . +airbyte-ci connectors --name=source-my-hours build ``` -#### Run +An image will be available on your host with the tag `airbyte/source-my-hours:dev`. + + +### Running as a docker container + Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-my-hours:dev spec @@ -72,29 +66,38 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-my-hours:dev discover docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-my-hours:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite + You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-my-hours test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-my-hours test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/my-hours.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/my-hours.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/__init__.py b/airbyte-integrations/connectors/source-my-hours/__init__.py similarity index 100% rename from airbyte-integrations/connectors/source-rss/unit_tests/__init__.py rename to airbyte-integrations/connectors/source-my-hours/__init__.py diff --git a/airbyte-integrations/connectors/source-my-hours/acceptance-test-config.yml b/airbyte-integrations/connectors/source-my-hours/acceptance-test-config.yml index 61597984a9184..5d04f8ef83373 100644 --- a/airbyte-integrations/connectors/source-my-hours/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-my-hours/acceptance-test-config.yml @@ -1,10 +1,11 @@ # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-my-hours:dev +test_strictness_level: low acceptance_tests: spec: tests: - - spec_path: "source_my_hours/spec.json" + - spec_path: "source_my_hours/spec.yaml" connection: tests: - config_path: "secrets/config.json" @@ -18,10 +19,19 @@ acceptance_tests: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" - # time_logs stream contains a number of empty fields that are not - # documented in the API. Until we can verify the types on these fields, - # we need to disable this check. - fail_on_extra_columns: false + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.jsonl" + # exact_order: no + incremental: + bypass_reason: "This connector does not implement incremental sync" + # TODO uncomment this block this block if your connector implements incremental sync: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state: + # future_state_path: "integration_tests/abnormal_state.json" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-my-hours/integration_tests/__init__.py b/airbyte-integrations/connectors/source-my-hours/integration_tests/__init__.py index 46b7376756ec6..c941b30457953 100644 --- a/airbyte-integrations/connectors/source-my-hours/integration_tests/__init__.py +++ b/airbyte-integrations/connectors/source-my-hours/integration_tests/__init__.py @@ -1,3 +1,3 @@ # -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. # diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-my-hours/integration_tests/abnormal_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-singer/integration_tests/abnormal_state.json rename to airbyte-integrations/connectors/source-my-hours/integration_tests/abnormal_state.json diff --git a/airbyte-integrations/connectors/source-my-hours/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-my-hours/integration_tests/acceptance.py index 82823254d2666..9e6409236281f 100644 --- a/airbyte-integrations/connectors/source-my-hours/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-my-hours/integration_tests/acceptance.py @@ -11,4 +11,6 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-my-hours/integration_tests/catalog.json b/airbyte-integrations/connectors/source-my-hours/integration_tests/catalog.json deleted file mode 100644 index 9f35a8d070240..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/integration_tests/catalog.json +++ /dev/null @@ -1,332 +0,0 @@ -{ - "streams": [ - { - "name": "clients", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "contactName": { - "type": ["null", "string"] - }, - "contactEmail": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - { - "name": "projects", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - }, - "dateCreated": { - "type": ["null", "string"] - }, - "clientName": { - "type": ["null", "string"] - }, - "budgetAlertPercent": { - "type": ["null", "number"] - }, - "budgetType": { - "type": ["null", "number"] - }, - "totalTimeLogged": { - "type": ["null", "number"] - }, - "budgetValue": { - "type": ["null", "number"] - }, - "totalAmount": { - "type": ["null", "number"] - }, - "totalExpense": { - "type": ["null", "number"] - }, - "totalCost": { - "type": ["null", "number"] - }, - "billableTimeLogged": { - "type": ["null", "number"] - }, - "totalBillableAmount": { - "type": ["null", "number"] - }, - "billable": { - "type": ["null", "boolean"] - }, - "roundType": { - "type": ["null", "number"] - }, - "roundInterval": { - "type": ["null", "number"] - }, - "budgetSpentPercentage": { - "type": ["null", "number"] - }, - "budgetTarget": { - "type": ["null", "number"] - }, - "budgetPeriodType": { - "type": ["null", "number"] - }, - "budgetSpent": { - "type": ["null", "number"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - { - "name": "tags", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "hexColor": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - { - "name": "time_logs", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "logId": { - "type": ["number"] - }, - "userId": { - "type": ["null", "number"] - }, - "date": { - "type": ["null", "string"] - }, - "userName": { - "type": ["null", "string"] - }, - "clientId": { - "type": ["null", "number"] - }, - "clientName": { - "type": ["null", "string"] - }, - "projectId": { - "type": ["null", "number"] - }, - "projectName": { - "type": ["null", "string"] - }, - "projectInvoiceMethod": { - "type": ["null", "number"] - }, - "taskId": { - "type": ["null", "number"] - }, - "taskName": { - "type": ["null", "string"] - }, - "tags": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "billable": { - "type": ["null", "boolean"] - }, - "locked": { - "type": ["null", "boolean"] - }, - "billableAmount": { - "type": ["null", "number"] - }, - "amount": { - "type": ["null", "number"] - }, - "laborCost": { - "type": ["null", "number"] - }, - "laborRate": { - "type": ["null", "number"] - }, - "laborDuration": { - "type": ["null", "number"] - }, - "logduration": { - "type": ["null", "number"] - }, - "expense": { - "type": ["null", "number"] - }, - "cost": { - "type": ["null", "number"] - }, - "note": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "number"] - }, - "invoiceId": { - "type": ["null", "number"] - }, - "invoiced": { - "type": ["null", "boolean"] - }, - "billableHours": { - "type": ["null", "number"] - }, - "laborHours": { - "type": ["null", "number"] - }, - "customField1": { - "type": ["null", "number"] - }, - "customField2": { - "type": ["null", "string"] - }, - "customField3": { - "type": ["null", "string"] - }, - "monthOfYear": { - "type": ["null", "string"] - }, - "weekOfYear": { - "type": ["null", "string"] - }, - "times": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "id": { - "type": ["null", "number"] - }, - "duration": { - "type": ["null", "number"] - }, - "startTime": { - "type": ["null", "string"] - }, - "endTime": { - "type": ["null", "string"] - }, - "running": { - "type": ["null", "boolean"] - }, - "deleted": { - "type": ["null", "boolean"] - } - } - }, - "tagsData": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "attachments": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "roundtype": { - "type": ["null", "number"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["logId"]] - }, - { - "name": "users", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - }, - "active": { - "type": ["null", "boolean"] - }, - "accountOwner": { - "type": ["null", "boolean"] - }, - "email": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "billableRate": { - "type": ["null", "number"] - }, - "admin": { - "type": ["null", "boolean"] - }, - "isProjectManager": { - "type": ["null", "boolean"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - } - ] -} diff --git a/airbyte-integrations/connectors/source-my-hours/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-my-hours/integration_tests/invalid_config.json index ecaef4a83980a..db7b668e8bd6a 100644 --- a/airbyte-integrations/connectors/source-my-hours/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-my-hours/integration_tests/invalid_config.json @@ -1,6 +1,6 @@ { "email": "john@doe.com", "password": "pw1234", - "start_date": "2016-01-01", - "logs_batch_size": 30 + "logs_batch_size": 30, + "start_date": "2021-12-01" } diff --git a/airbyte-integrations/connectors/source-my-hours/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-my-hours/integration_tests/sample_config.json index 2eff8e358c9b1..0eea2c8016363 100644 --- a/airbyte-integrations/connectors/source-my-hours/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-my-hours/integration_tests/sample_config.json @@ -1,6 +1,6 @@ { "email": "email", "password": "password", - "start_date": "2016-01-01", - "logs_batch_size": 30 + "logs_batch_size": 30, + "start_date": "2021-12-01" } diff --git a/airbyte-integrations/connector-templates/source-singer/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-my-hours/integration_tests/sample_state.json similarity index 100% rename from airbyte-integrations/connector-templates/source-singer/integration_tests/sample_state.json rename to airbyte-integrations/connectors/source-my-hours/integration_tests/sample_state.json diff --git a/airbyte-integrations/connectors/source-my-hours/metadata.yaml b/airbyte-integrations/connectors/source-my-hours/metadata.yaml index 577a4c1c4360e..b7929bde74a0e 100644 --- a/airbyte-integrations/connectors/source-my-hours/metadata.yaml +++ b/airbyte-integrations/connectors/source-my-hours/metadata.yaml @@ -1,13 +1,7 @@ data: - connectorSubtype: api - connectorType: source - definitionId: 722ba4bf-06ec-45a4-8dd5-72e4a5cf3903 - dockerImageTag: 0.1.2 - dockerRepository: airbyte/source-my-hours - githubIssueLabel: source-my-hours - icon: my-hours.svg - license: MIT - name: My Hours + allowedHosts: + hosts: + - api2.myhours.com remoteRegistries: pypi: enabled: true @@ -17,13 +11,28 @@ data: enabled: true oss: enabled: true + connectorBuildOptions: + # Please update to the latest version of the connector base image. + # https://hub.docker.com/r/airbyte/python-connector-base + # Please use the full address with sha256 hash to guarantee build reproducibility. + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorSubtype: api + connectorType: source + definitionId: 722ba4bf-06ec-45a4-8dd5-72e4a5cf3903 + dockerImageTag: 0.2.0 + dockerRepository: airbyte/source-my-hours + githubIssueLabel: source-my-hours + icon: my-hours.svg + license: MIT + name: My Hours + releaseDate: 2021-12-21 releaseStage: alpha + supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/my-hours tags: - language:python - - cdk:python + - cdk:low-code ab_internal: sl: 100 ql: 100 - supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-my-hours/poetry.lock b/airbyte-integrations/connectors/source-my-hours/poetry.lock new file mode 100644 index 0000000000000..aff4fee11d57a --- /dev/null +++ b/airbyte-integrations/connectors/source-my-hours/poetry.lock @@ -0,0 +1,1008 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.79.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.79.1-py3-none-any.whl", hash = "sha256:36c4b1fe98448b7d116f16c612982af8e22cbff28ea37da918c851d7feb1093c"}, + {file = "airbyte_cdk-0.79.1.tar.gz", hash = "sha256:a49d10b3c87770ab1e7b7ebf9a1e945d49274c18548756f93a841ebd4c195146"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "acd5908c82765b55ec5859799db1bcbb616d044db689a3ba94346d8b1d2f9b5c" diff --git a/airbyte-integrations/connectors/source-my-hours/pyproject.toml b/airbyte-integrations/connectors/source-my-hours/pyproject.toml new file mode 100644 index 0000000000000..4dddeb20869d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-my-hours/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.2.0" +name = "source-my-hours" +description = "Source implementation for my-hours." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/my-hours" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_my_hours" }, {include = "main.py" } ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-my-hours = "source_my_hours.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "*" +pytest-mock = "*" +pytest = "*" diff --git a/airbyte-integrations/connectors/source-my-hours/requirements.txt b/airbyte-integrations/connectors/source-my-hours/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-my-hours/setup.py b/airbyte-integrations/connectors/source-my-hours/setup.py deleted file mode 100644 index 0f1e5c67af1ac..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", - "responses~=0.16.0", -] - -setup( - entry_points={ - "console_scripts": [ - "source-my-hours=source_my_hours.run:run", - ], - }, - name="source_my_hours", - description="Source implementation for My Hours.", - author="Wisse Jelgersma", - author_email="wisse@vrowl.nl", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/__init__.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/__init__.py index 8363b2357057e..bfddad074e1d4 100644 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/__init__.py +++ b/airbyte-integrations/connectors/source-my-hours/source_my_hours/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/auth.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/auth.py deleted file mode 100644 index f16e7222c59e8..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/auth.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any, Mapping, MutableMapping, Tuple - -import pendulum -import requests -from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import Oauth2Authenticator - -from .constants import REQUEST_HEADERS, URL_BASE - - -class MyHoursAuthenticator(Oauth2Authenticator): - def __init__(self, email: str, password: str): - super().__init__( - token_refresh_endpoint=f"{URL_BASE}/tokens/refresh", - client_id=None, - client_secret=None, - refresh_token=None, - access_token_name="accessToken", - expires_in_name="expiresIn", - ) - - self.retrieve_refresh_token(email, password) - - def retrieve_refresh_token(self, email: str, password: str): - t0 = pendulum.now() - payload = json.dumps({"grantType": "password", "email": email, "password": password, "clientId": "api"}) - response = requests.post(f"{URL_BASE}/tokens/login", headers=REQUEST_HEADERS, data=payload) - response.raise_for_status() - json_response = response.json() - - self.refresh_token = json_response["refreshToken"] - self._access_token = json_response[self._access_token_name] - self._token_expiry_date = t0.add(seconds=json_response[self._expires_in_name]) - - def get_refresh_request_body(self) -> Mapping[str, Any]: - payload: MutableMapping[str, Any] = { - "grantType": "refresh_token", - "refreshToken": self.refresh_token, - } - - return payload - - def refresh_access_token(self) -> Tuple[str, int]: - try: - response = requests.request(method="POST", url=self._token_refresh_endpoint, data=self.get_refresh_request_body()) - response.raise_for_status() - response_json = response.json() - self.refresh_token = response_json["refreshToken"] - return response_json[self._access_token_name], response_json[self._expires_in_name] - except Exception as e: - raise Exception(f"Error while refreshing access token: {e}") from e diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/components.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/components.py new file mode 100644 index 0000000000000..4377ee24ff37c --- /dev/null +++ b/airbyte-integrations/connectors/source-my-hours/source_my_hours/components.py @@ -0,0 +1,85 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from http import HTTPStatus +from typing import Any, Mapping, Union + +import requests +from airbyte_cdk.sources.declarative.auth.declarative_authenticator import NoAuth +from airbyte_cdk.sources.declarative.interpolation import InterpolatedString +from airbyte_cdk.sources.declarative.types import Config +from requests import HTTPError + +# https://docs.airbyte.com/integrations/sources/my-hours +# The Bearer token generated will expire in five days + + +@dataclass +class CustomAuthenticator(NoAuth): + config: Config + email: Union[InterpolatedString, str] + password: Union[InterpolatedString, str] + + _access_token = None + _refreshToken = None + + def __post_init__(self, parameters: Mapping[str, Any]): + self._email = InterpolatedString.create(self.email, parameters=parameters).eval(self.config) + self._password = InterpolatedString.create(self.password, parameters=parameters).eval(self.config) + + def __call__(self, request: requests.PreparedRequest) -> requests.PreparedRequest: + """Attach the page access token to params to authenticate on the HTTP request""" + if self._access_token is None or self._refreshToken is None: + self._access_token, self._refreshToken = self.generate_access_token() + headers = {self.auth_header: f"Bearer {self._access_token}", "Accept": "application/json", "api-version": "1.0"} + request.headers.update(headers) + return request + + @property + def auth_header(self) -> str: + return "Authorization" + + @property + def token(self) -> str: + return self._access_token + + def _get_refresh_access_token_response(self): + url = f"https://api2.myhours.com/api/tokens/refresh" + headers = {"Content-Type": "application/json", "api-version": "1.0", self.auth_header: f"Bearer {self._access_token}"} + + data = { + "refreshToken": self._refreshToken, + "grantType": "refresh_token", + } + try: + response = requests.post(url, headers=headers, json=data) + response.raise_for_status() + modified_response = { + "access_token": response.json().get("accessToken"), + "refresh_token": response.json().get("refreshToken"), + "expires_in": response.json().get("expiresIn"), + } + return modified_response + except Exception as e: + raise Exception(f"Error while refreshing access token: {e}") from e + + def generate_access_token(self) -> tuple[str, str]: + try: + headers = {"Content-Type": "application/json", "api-version": "1.0"} + + data = { + "email": self._email, + "password": self._password, + "grantType": "password", + "clientId": "api", + } + + url = "https://api2.myhours.com/api/tokens/login" + rest = requests.post(url, headers=headers, json=data) + if rest.status_code != HTTPStatus.OK: + raise HTTPError(rest.text) + return (rest.json().get("accessToken"), rest.json().get("refreshToken")) + except Exception as e: + raise Exception(f"Error while generating access token: {e}") from e diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/constants.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/constants.py deleted file mode 100644 index 494186deccd1e..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/constants.py +++ /dev/null @@ -1,6 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -URL_BASE = "https://api2.myhours.com/api" -REQUEST_HEADERS = {"accept": "application/json", "api-version": "1.0", "Content-Type": "application/json"} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/manifest.yaml b/airbyte-integrations/connectors/source-my-hours/source_my_hours/manifest.yaml new file mode 100644 index 0000000000000..6800af83e89f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-my-hours/source_my_hours/manifest.yaml @@ -0,0 +1,666 @@ +version: 0.44.0 +type: DeclarativeSource + +check: + type: CheckStream + stream_names: + - users + +streams: + - type: DeclarativeStream + name: users + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/users_schema" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api2.myhours.com/api + path: Users/getAll + http_method: GET + authenticator: + $ref: "#/definitions/custom_authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + - type: DeclarativeStream + name: time_logs + primary_key: + - logId + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/time_logs_schema" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api2.myhours.com/api + path: Reports/activity + http_method: GET + request_parameters: + DateFrom: "{{ config['start_date'] }}" + DateTo: "{{ today_utc() }}" + authenticator: + $ref: "#/definitions/custom_authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + - type: DeclarativeStream + name: tags + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/tags_schema" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api2.myhours.com/api + path: Tags + http_method: GET + authenticator: + $ref: "#/definitions/custom_authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + - type: DeclarativeStream + name: projects + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/projects_schema" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api2.myhours.com/api + path: Projects/getAll + http_method: GET + authenticator: + $ref: "#/definitions/custom_authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + - type: DeclarativeStream + name: clients + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/clients_schema" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api2.myhours.com/api + path: Clients + http_method: GET + authenticator: + $ref: "#/definitions/custom_authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + paginator: + type: NoPagination + +definitions: + custom_authenticator: + type: CustomAuthenticator + class_name: source_my_hours.components.CustomAuthenticator + email: "{{ config['email'] }}" + password: "{{ config['password'] }}" + + users_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + properties: + id: + type: + - "null" + - number + name: + type: + - "null" + - string + archived: + type: + - "null" + - boolean + dateArchived: + type: + - "null" + - string + active: + type: + - "null" + - boolean + accountOwner: + type: + - "null" + - boolean + email: + type: + - "null" + - string + rate: + type: + - "null" + - number + billableRate: + type: + - "null" + - number + admin: + type: + - "null" + - boolean + isProjectManager: + type: + - "null" + - boolean + roleType: + type: + - "null" + - integer + customId: + type: + - "null" + - string + + clients_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + properties: + id: + type: + - "null" + - number + customId: + type: + - "null" + - string + contactName: + type: + - "null" + - string + contactEmail: + type: + - "null" + - string + name: + type: + - "null" + - string + archived: + type: + - "null" + - boolean + dateArchived: + type: + - "null" + - string + + projects_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + properties: + id: + type: + - "null" + - number + name: + type: + - "null" + - string + archived: + type: + - "null" + - boolean + dateArchived: + type: + - "null" + - string + dateCreated: + type: + - "null" + - string + clientName: + type: + - "null" + - string + clientId: + type: + - "null" + - integer + customId: + type: + - "null" + - string + budgetAlertPercent: + type: + - "null" + - number + budgetType: + type: + - "null" + - number + laborCost: + type: + - "null" + - number + totalTimeLogged: + type: + - "null" + - number + budgetValue: + type: + - "null" + - number + totalAmount: + type: + - "null" + - number + totalExpense: + type: + - "null" + - number + totalCost: + type: + - "null" + - number + billableTimeLogged: + type: + - "null" + - number + totalBillableAmount: + type: + - "null" + - number + billable: + type: + - "null" + - boolean + roundType: + type: + - "null" + - number + roundInterval: + type: + - "null" + - number + budgetSpentPercentage: + type: + - "null" + - number + budgetTarget: + type: + - "null" + - number + budgetPeriodType: + type: + - "null" + - number + budgetSpent: + type: + - "null" + - number + + tags_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + properties: + id: + type: + - "null" + - number + name: + type: + - "null" + - string + hexColor: + type: + - "null" + - string + archived: + type: + - "null" + - boolean + dateArchived: + type: + - "null" + - string + + time_logs_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + properties: + logId: + type: + - "null" + - number + userId: + type: + - "null" + - number + date: + type: + - "null" + - string + userName: + type: + - "null" + - string + clientId: + type: + - "null" + - number + clientName: + type: + - "null" + - string + projectId: + type: + - "null" + - number + projectName: + type: + - "null" + - string + projectInvoiceMethod: + type: + - "null" + - number + taskId: + type: + - "null" + - number + taskName: + type: + - "null" + - string + tags: + type: + - "null" + - string + rate: + type: + - "null" + - number + billable: + type: + - "null" + - boolean + inLockedPeriod: + type: + - "null" + - boolean + billableAmount: + type: + - "null" + - number + amount: + type: + - "null" + - number + laborCost: + type: + - "null" + - number + laborRate: + type: + - "null" + - number + laborDuration: + type: + - "null" + - number + logDuration: + type: + - "null" + - number + expense: + type: + - "null" + - number + cost: + type: + - "null" + - number + note: + type: + - "null" + - string + status: + type: + - "null" + - number + invoiceId: + type: + - "null" + - number + invoiced: + type: + - "null" + - boolean + billableHours: + type: + - "null" + - number + laborHours: + type: + - "null" + - number + customField1: + type: + - "null" + - number + customField2: + type: + - "null" + - number + customField3: + type: + - "null" + - number + monthOfYear: + type: + - "null" + - string + weekNo: + type: + - "null" + - number + weekOfYear: + type: + - "null" + - string + times: + type: + - "null" + - array + items: + type: + - object + properties: + id: + type: + - "null" + - number + duration: + type: + - "null" + - number + startTime: + type: + - "null" + - string + endTime: + type: + - "null" + - string + running: + type: + - "null" + - boolean + deleted: + type: + - "null" + - boolean + roundtype: + type: + - "null" + - number + attachments: + type: + - "null" + - array + balance: + type: + - "null" + - number + billableExpense: + type: + - "null" + - number + billableHoursLogBillable: + type: + - "null" + - number + clientCustomId: + type: + - "null" + - string + endTime: + type: + - "null" + - string + invoicedAmount: + type: + - "null" + - number + logDurationBillable: + type: + - "null" + - number + startTime: + type: + - "null" + - string + format: date-time + startEndTime: + type: + - "null" + - string + teams: + type: + - "null" + - array + items: + type: + - "null" + - string + teamsNames: + type: + - "null" + - string + taskListName: + type: + - "null" + - string + taskDueDate: + type: + - "null" + - string + taskStartDate: + type: + - "null" + - string + tagsData: + type: + - "null" + - array + uninvoicedAmount: + type: + - "null" + - number + +spec: + documentation_url: https://docs.airbyte.com/integrations/sources/my-hours + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: My Hours Spec + type: object + required: + - email + - password + - start_date + additionalProperties: true + properties: + email: + title: Email + type: string + description: Your My Hours username + example: john@doe.com + password: + title: Password + type: string + description: The password associated to the username + airbyte_secret: true + start_date: + title: Start Date + description: Start date for collecting time logs + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: string + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: Time logs batch size + description: Pagination size used for retrieving logs in days + examples: + - 30 + type: integer + minimum: 1 + maximum: 365 + default: 30 + type: Spec +metadata: + autoImportSchema: + users: false + time_logs: false + tags: false + projects: false + clients: false diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/run.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/run.py index adad265e0bac6..b25bcb39ec360 100644 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/run.py +++ b/airbyte-integrations/connectors/source-my-hours/source_my_hours/run.py @@ -6,7 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_my_hours import SourceMyHours + +from .source import SourceMyHours def run(): diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/clients.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/clients.json deleted file mode 100644 index 20153f54d6ec6..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/clients.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "contactName": { - "type": ["null", "string"] - }, - "contactEmail": { - "type": ["null", "string"] - }, - "customId": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/projects.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/projects.json deleted file mode 100644 index 692e72c151e28..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/projects.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - }, - "dateCreated": { - "type": ["null", "string"] - }, - "clientName": { - "type": ["null", "string"] - }, - "clientId": { - "type": ["null", "integer"] - }, - "customId": { - "type": ["null", "string"] - }, - "budgetAlertPercent": { - "type": ["null", "number"] - }, - "budgetType": { - "type": ["null", "number"] - }, - "laborCost": { - "type": ["null", "number"] - }, - "totalTimeLogged": { - "type": ["null", "number"] - }, - "budgetValue": { - "type": ["null", "number"] - }, - "totalAmount": { - "type": ["null", "number"] - }, - "totalExpense": { - "type": ["null", "number"] - }, - "totalCost": { - "type": ["null", "number"] - }, - "billableTimeLogged": { - "type": ["null", "number"] - }, - "totalBillableAmount": { - "type": ["null", "number"] - }, - "billable": { - "type": ["null", "boolean"] - }, - "roundType": { - "type": ["null", "number"] - }, - "roundInterval": { - "type": ["null", "number"] - }, - "budgetSpentPercentage": { - "type": ["null", "number"] - }, - "budgetTarget": { - "type": ["null", "number"] - }, - "budgetPeriodType": { - "type": ["null", "number"] - }, - "budgetSpent": { - "type": ["null", "number"] - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/tags.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/tags.json deleted file mode 100644 index 38f354a72d071..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/tags.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "hexColor": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/time_logs.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/time_logs.json deleted file mode 100644 index ffb58a1712df0..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/time_logs.json +++ /dev/null @@ -1,196 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "additionalProperties": true, - "properties": { - "logId": { - "type": ["number"] - }, - "userId": { - "type": ["null", "number"] - }, - "date": { - "type": ["null", "string"] - }, - "userName": { - "type": ["null", "string"] - }, - "clientId": { - "type": ["null", "number"] - }, - "clientName": { - "type": ["null", "string"] - }, - "projectId": { - "type": ["null", "number"] - }, - "projectName": { - "type": ["null", "string"] - }, - "projectInvoiceMethod": { - "type": ["null", "number"] - }, - "taskId": { - "type": ["null", "number"] - }, - "taskName": { - "type": ["null", "string"] - }, - "tags": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "billable": { - "type": ["null", "boolean"] - }, - "inLockedPeriod": { - "type": ["null", "boolean"] - }, - "billableAmount": { - "type": ["null", "number"] - }, - "amount": { - "type": ["null", "number"] - }, - "laborCost": { - "type": ["null", "number"] - }, - "laborRate": { - "type": ["null", "number"] - }, - "laborDuration": { - "type": ["null", "number"] - }, - "logDuration": { - "type": ["null", "number"] - }, - "expense": { - "type": ["null", "number"] - }, - "cost": { - "type": ["null", "number"] - }, - "note": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "number"] - }, - "invoiceId": { - "type": ["null", "number"] - }, - "invoiced": { - "type": ["null", "boolean"] - }, - "billableHours": { - "type": ["null", "number"] - }, - "laborHours": { - "type": ["null", "number"] - }, - "customField1": { - "type": ["null", "number"] - }, - "customField2": { - "type": ["null", "number"] - }, - "customField3": { - "type": ["null", "number"] - }, - "monthOfYear": { - "type": ["null", "string"] - }, - "weekNo": { - "type": ["null", "number"] - }, - "weekOfYear": { - "type": ["null", "string"] - }, - "times": { - "type": ["null", "array"], - "items": { - "type": ["object"], - "properties": { - "id": { - "type": ["null", "number"] - }, - "duration": { - "type": ["null", "number"] - }, - "startTime": { - "type": ["null", "string"] - }, - "endTime": { - "type": ["null", "string"] - }, - "running": { - "type": ["null", "boolean"] - }, - "deleted": { - "type": ["null", "boolean"] - } - } - } - }, - "roundtype": { - "type": ["null", "number"] - }, - "attachments": { - "type": ["null", "array"] - }, - "balance": { - "type": ["null", "number"] - }, - "billableExpense": { - "type": ["null", "number"] - }, - "billableHoursLogBillable": { - "type": ["null", "number"] - }, - "clientCustomId": { - "type": ["null", "string"] - }, - "endTime": { - "type": ["null", "string"] - }, - "invoicedAmount": { - "type": ["null", "number"] - }, - "logDurationBillable": { - "type": ["null", "number"] - }, - "startTime": { - "type": ["null", "string"], - "format": "date-time" - }, - "startEndTime": { - "type": ["null", "string"] - }, - "teams": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "teamsNames": { - "type": ["null", "string"] - }, - "taskListName": { - "type": ["null", "string"] - }, - "taskDueDate": { - "type": ["null", "string"] - }, - "taskStartDate": { - "type": ["null", "string"] - }, - "tagsData": { - "type": ["null", "array"] - }, - "uninvoicedAmount": { - "type": ["null", "number"] - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/users.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/users.json deleted file mode 100644 index 6c5d22739d6e1..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/schemas/users.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["number"] - }, - "name": { - "type": ["null", "string"] - }, - "archived": { - "type": ["null", "boolean"] - }, - "dateArchived": { - "type": ["null", "string"] - }, - "active": { - "type": ["null", "boolean"] - }, - "accountOwner": { - "type": ["null", "boolean"] - }, - "email": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "billableRate": { - "type": ["null", "number"] - }, - "admin": { - "type": ["null", "boolean"] - }, - "isProjectManager": { - "type": ["null", "boolean"] - }, - "roleType": { - "type": ["null", "integer"] - }, - "customId": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/source.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/source.py index c9a6747bb4992..93039a00f4b72 100644 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/source.py +++ b/airbyte-integrations/connectors/source-my-hours/source_my_hours/source.py @@ -2,133 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Any, List, Mapping, MutableMapping, Optional, Tuple -from urllib.parse import parse_qs, urlparse +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -import pendulum -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from source_my_hours.auth import MyHoursAuthenticator -from source_my_hours.stream import MyHoursStream +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -from .constants import REQUEST_HEADERS, URL_BASE +WARNING: Do not modify this file. +""" -class Clients(MyHoursStream): - primary_key = "id" - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return "Clients" - - -class Projects(MyHoursStream): - primary_key = "id" - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return "Projects/getAll" - - -class Tags(MyHoursStream): - primary_key = "id" - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return "Tags" - - -class TimeLogs(MyHoursStream): - primary_key = "logId" - - def __init__( - self, - authenticator: TokenAuthenticator, - start_date: str, - batch_size: int, - **kwargs, - ): - super().__init__(authenticator=authenticator) - - self.start_date = pendulum.parse(start_date) - self.batch_size = batch_size - - if self.start_date > pendulum.now(): - self.logger.warn(f'Stream {self.name}: start_date "{start_date.isoformat()}" should be before today.') - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return "Reports/activity" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - previous_query = parse_qs(urlparse(response.request.url).query) - previous_end = pendulum.parse(previous_query["DateTo"][0]) - - new_from = previous_end.add(days=1) - new_to = new_from.add(days=self.batch_size - 1) - - if new_from > pendulum.now(): - return None - - return { - "DateFrom": new_from.to_date_string(), - "DateTo": new_to.to_date_string(), - } - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - - if next_page_token is None: - return {"DateFrom": self.start_date.to_date_string(), "DateTo": self.start_date.add(days=self.batch_size - 1).to_date_string()} - return next_page_token - - -class Users(MyHoursStream): - primary_key = "id" - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return "Users/getAll" - - -# Source -class SourceMyHours(AbstractSource): - def check_connection(self, logger: AirbyteLogger, config) -> Tuple[bool, any]: - url = f"{URL_BASE}/Clients" - - try: - authenticator = self._make_authenticator(config) - headers = authenticator.get_auth_header() - headers.update(REQUEST_HEADERS) - - response = requests.get(url, headers=headers) - response.raise_for_status() - return True, None - except Exception as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = self._make_authenticator(config) - return [ - Clients(authenticator=auth), - Projects(authenticator=auth), - Tags(authenticator=auth), - TimeLogs(authenticator=auth, start_date=config["start_date"], batch_size=config["logs_batch_size"]), - Users(authenticator=auth), - ] - - @staticmethod - def _make_authenticator(config) -> MyHoursAuthenticator: - return MyHoursAuthenticator(config["email"], config["password"]) +# Declarative Source +class SourceMyHours(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/spec.json b/airbyte-integrations/connectors/source-my-hours/source_my_hours/spec.json deleted file mode 100644 index eef075c9ad3a3..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/spec.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/my-hours", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "My Hours Spec", - "type": "object", - "required": ["email", "password", "start_date"], - "additionalProperties": true, - "properties": { - "email": { - "title": "Email", - "type": "string", - "description": "Your My Hours username", - "example": "john@doe.com" - }, - "password": { - "title": "Password", - "type": "string", - "description": "The password associated to the username", - "airbyte_secret": true - }, - "start_date": { - "title": "Start Date", - "description": "Start date for collecting time logs", - "examples": ["%Y-%m-%d", "2016-01-01"], - "type": "string", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - }, - "logs_batch_size": { - "title": "Time logs batch size", - "description": "Pagination size used for retrieving logs in days", - "examples": [30], - "type": "integer", - "minimum": 1, - "maximum": 365, - "default": 30 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-my-hours/source_my_hours/stream.py b/airbyte-integrations/connectors/source-my-hours/source_my_hours/stream.py deleted file mode 100644 index ebde5fadb9988..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/source_my_hours/stream.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from abc import ABC -from typing import Any, Iterable, Mapping, Optional - -import requests -from airbyte_cdk.sources.streams.http import HttpStream - -from .constants import REQUEST_HEADERS, URL_BASE - - -class MyHoursStream(HttpStream, ABC): - url_base = URL_BASE + "/" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - for record in response.json(): - yield record - - def request_headers( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> Mapping[str, Any]: - return REQUEST_HEADERS diff --git a/airbyte-integrations/connectors/source-my-hours/unit_tests/__init__.py b/airbyte-integrations/connectors/source-my-hours/unit_tests/__init__.py deleted file mode 100644 index 46b7376756ec6..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/unit_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_auth.py b/airbyte-integrations/connectors/source-my-hours/unit_tests/test_auth.py deleted file mode 100644 index e36c8c7edaff8..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_auth.py +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import responses -from source_my_hours.auth import MyHoursAuthenticator -from source_my_hours.constants import URL_BASE - -DEFAULT_CONFIG = {"email": "john@doe.com", "password": "pwd"} - - -@responses.activate -def test_init(mocker): - responses.add(responses.POST, f"{URL_BASE}/tokens/login", json={"accessToken": "at", "refreshToken": "rt", "expiresIn": 100}) - - authenticator = MyHoursAuthenticator(email="email", password="password") - authenticator._access_token - assert authenticator._access_token == "at" - - -@responses.activate -def test_refresh(mocker): - responses.add(responses.POST, f"{URL_BASE}/tokens/login", json={"accessToken": "at", "refreshToken": "rt", "expiresIn": 0}) - responses.add(responses.POST, f"{URL_BASE}/tokens/refresh", json={"accessToken": "at2", "refreshToken": "rt2", "expiresIn": 100}) - - authenticator = MyHoursAuthenticator(email="email", password="password") - access_token = authenticator.get_access_token() - - assert access_token == "at2" - assert authenticator.refresh_token == "rt2" diff --git a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_source.py b/airbyte-integrations/connectors/source-my-hours/unit_tests/test_source.py deleted file mode 100644 index 2d3ee06218ac5..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_source.py +++ /dev/null @@ -1,71 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import MagicMock - -import responses -from source_my_hours.constants import URL_BASE -from source_my_hours.source import SourceMyHours, TimeLogs - -DEFAULT_CONFIG = {"email": "john@doe.com", "password": "pwd"} - - -@responses.activate -def test_check_connection_success(mocker): - source = SourceMyHours() - logger_mock = MagicMock() - - responses.add(responses.POST, f"{URL_BASE}/tokens/login", json={"accessToken": "at", "refreshToken": "rt", "expiresIn": 100}) - responses.add( - responses.GET, - f"{URL_BASE}/Clients", - ) - - assert source.check_connection(logger_mock, DEFAULT_CONFIG) == (True, None) - - -@responses.activate -def test_check_connection_authentication_failure(mocker): - source = SourceMyHours() - logger_mock = MagicMock() - - responses.add(responses.POST, f"{URL_BASE}/tokens/login", status=403, json={"message": "Incorrect email or password"}) - - success, exception = source.check_connection(logger_mock, DEFAULT_CONFIG) - - assert success is False - assert exception is not None - - -@responses.activate -def test_check_connection_connection_failure(mocker): - source = SourceMyHours() - logger_mock = MagicMock() - - responses.add(responses.POST, f"{URL_BASE}/tokens/login", json={"accessToken": "at", "refreshToken": "rt", "expiresIn": 100}) - responses.add(responses.GET, f"{URL_BASE}/Clients", status=403) - - success, exception = source.check_connection(logger_mock, DEFAULT_CONFIG) - assert success is False - assert exception is not None - - -@responses.activate -def test_streams(mocker): - source = SourceMyHours() - responses.add(responses.POST, f"{URL_BASE}/tokens/login", json={"accessToken": "at", "refreshToken": "rt", "expiresIn": 100}) - config = {"email": "john@doe.com", "password": "pwd", "logs_batch_size": 30, "start_date": "2021-01-01"} - - streams = source.streams(config) - expected_streams_number = 5 - assert len(streams) == expected_streams_number - - -def test_time_logs_next_page_token(mocker): - stream = TimeLogs(authenticator=MagicMock(), start_date="2021-01-01", batch_size=10) - reponse_mock = MagicMock() - reponse_mock.request.url = "https://myhours.com/test?DateTo=2021-01-01" - inputs = {"response": reponse_mock} - expected_token = {"DateFrom": "2021-01-02", "DateTo": "2021-01-11"} - assert stream.next_page_token(**inputs) == expected_token diff --git a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_stream.py b/airbyte-integrations/connectors/source-my-hours/unit_tests/test_stream.py deleted file mode 100644 index 27b2fe575c7fd..0000000000000 --- a/airbyte-integrations/connectors/source-my-hours/unit_tests/test_stream.py +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import MagicMock - -import pytest -import requests -from source_my_hours.constants import REQUEST_HEADERS -from source_my_hours.stream import MyHoursStream - - -@pytest.fixture -def patch_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(MyHoursStream, "path", "v0/example_endpoint") - mocker.patch.object(MyHoursStream, "primary_key", "test_primary_key") - mocker.patch.object(MyHoursStream, "__abstractmethods__", set()) - - -def test_next_page_token(patch_base_class): - stream = MyHoursStream() - inputs = {"response": MagicMock()} - expected_token = None - assert stream.next_page_token(**inputs) == expected_token - - -def test_parse_response(patch_base_class, requests_mock): - stream = MyHoursStream() - requests_mock.get("https://dummy", json=[{"name": "test"}]) - resp = requests.get("https://dummy") - inputs = {"response": resp} - expected_parsed_object = {"name": "test"} - assert next(stream.parse_response(**inputs)) == expected_parsed_object - - -def test_request_headers(patch_base_class): - stream = MyHoursStream() - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - expected_headers = REQUEST_HEADERS - assert stream.request_headers(**inputs) == expected_headers diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index 88b501de22046..b491ec5d8350b 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -6,7 +6,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.8' + cdkVersionRequired = '0.30.5' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mysql/metadata.yaml b/airbyte-integrations/connectors/source-mysql/metadata.yaml index 044fd28e7d8eb..e7376ed33951a 100644 --- a/airbyte-integrations/connectors/source-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mysql/metadata.yaml @@ -9,12 +9,13 @@ data: connectorSubtype: database connectorType: source definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad - dockerImageTag: 3.3.13 + dockerImageTag: 3.3.20 dockerRepository: airbyte/source-mysql documentationUrl: https://docs.airbyte.com/integrations/sources/mysql githubIssueLabel: source-mysql icon: mysql.svg license: ELv2 + maxSecondsBetweenMessages: 7200 name: MySQL registries: cloud: diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlQueryUtils.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlQueryUtils.java index 2651e13f4d562..871e837437ba9 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlQueryUtils.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlQueryUtils.java @@ -212,6 +212,7 @@ private static List getTableEstimate(final JdbcDatabase database, fina // Construct the table estimate query. final String tableEstimateQuery = String.format(TABLE_ESTIMATE_QUERY, TABLE_SIZE_BYTES_COL, AVG_ROW_LENGTH, namespace, name); + LOGGER.info("Querying for table size estimate: {}", tableEstimateQuery); final List jsonNodes = database.bufferedResultSetQuery(conn -> conn.createStatement().executeQuery(tableEstimateQuery), resultSet -> JdbcUtils.getDefaultSourceOperations().rowToJson(resultSet)); Preconditions.checkState(jsonNodes.size() == 1); diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java index d6117aec6e5be..631cfd5d3cee6 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSource.java @@ -401,7 +401,7 @@ public List> getIncrementalIterators(final final MySqlCursorBasedStateManager cursorBasedStateManager = new MySqlCursorBasedStateManager(stateManager.getRawStateMessages(), catalog); final InitialLoadStreams initialLoadStreams = streamsForInitialPrimaryKeyLoad(cursorBasedStateManager, catalog); final Map pairToCursorBasedStatus = - getCursorBasedSyncStatusForStreams(database, initialLoadStreams.streamsForInitialLoad(), stateManager, quoteString); + getCursorBasedSyncStatusForStreams(database, initialLoadStreams.streamsForInitialLoad(), stateManager, getQuoteString()); final CursorBasedStreams cursorBasedStreams = new CursorBasedStreams(MySqlInitialReadUtil.identifyStreamsForCursorBased(catalog, initialLoadStreams.streamsForInitialLoad()), pairToCursorBasedStatus); @@ -411,7 +411,7 @@ public List> getIncrementalIterators(final final MySqlInitialLoadStreamStateManager mySqlInitialLoadStreamStateManager = new MySqlInitialLoadStreamStateManager(catalog, initialLoadStreams, - initPairToPrimaryKeyInfoMap(database, initialLoadStreams, tableNameToTable, quoteString)); + initPairToPrimaryKeyInfoMap(database, initialLoadStreams, tableNameToTable, getQuoteString())); final MySqlInitialLoadHandler initialLoadHandler = new MySqlInitialLoadHandler(sourceConfig, database, new MySqlSourceOperations(), getQuoteString(), mySqlInitialLoadStreamStateManager, namespacePair -> Jsons.jsonNode(pairToCursorBasedStatus.get(convertNameNamespacePairFromV0(namespacePair))), @@ -502,7 +502,7 @@ private boolean convertToBoolean(final String value) { } private boolean cloudDeploymentMode() { - return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(featureFlags.deploymentMode()); + return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(getFeatureFlags().deploymentMode()); } @Override @@ -539,7 +539,7 @@ public JdbcDatabase createDatabase(final JsonNode sourceConfig) throws SQLExcept sourceOperations, streamingQueryConfigProvider); - quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); + setQuoteString((getQuoteString() == null ? database.getMetaData().getIdentifierQuoteString() : getQuoteString())); database.setSourceConfig(sourceConfig); database.setDatabaseConfig(jdbcConfig); return database; diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java index 53bdf69e4fe59..c322ebb2ed60d 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java @@ -46,6 +46,8 @@ import com.mysql.cj.result.Field; import io.airbyte.cdk.db.SourceOperations; import io.airbyte.cdk.db.jdbc.AbstractJdbcCompatibleSourceOperations; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; +import io.airbyte.integrations.source.mysql.initialsync.CdcMetadataInjector; import io.airbyte.protocol.models.JsonSchemaType; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -54,6 +56,7 @@ import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.format.DateTimeParseException; +import java.util.Optional; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,6 +69,29 @@ public class MySqlSourceOperations extends AbstractJdbcCompatibleSourceOperation FLOAT, FLOAT_UNSIGNED, DOUBLE, DOUBLE_UNSIGNED, DECIMAL, DECIMAL_UNSIGNED, DATE, DATETIME, TIMESTAMP, TIME, YEAR, VARCHAR, TINYTEXT, TEXT, MEDIUMTEXT, LONGTEXT); + private final Optional metadataInjector; + + public MySqlSourceOperations() { + super(); + this.metadataInjector = Optional.empty(); + } + + public MySqlSourceOperations(final Optional metadataInjector) { + super(); + this.metadataInjector = metadataInjector; + } + + @Override + public AirbyteRecordData convertDatabaseRowToAirbyteRecordData(final ResultSet queryContext) throws SQLException { + final AirbyteRecordData recordData = super.convertDatabaseRowToAirbyteRecordData(queryContext); + final ObjectNode jsonNode = (ObjectNode) recordData.rawRowData(); + if (!metadataInjector.isPresent()) { + return recordData; + } + metadataInjector.get().inject(jsonNode); + return new AirbyteRecordData(jsonNode, recordData.meta()); + } + /** * @param colIndex 1-based column index. */ @@ -76,53 +102,62 @@ public void copyToJsonField(final ResultSet resultSet, final int colIndex, final final String columnName = field.getName(); final MysqlType columnType = field.getMysqlType(); - // https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-type-conversions.html - switch (columnType) { - case BIT -> { - if (field.getLength() == 1L) { - // BIT(1) is boolean - putBoolean(json, columnName, resultSet, colIndex); - } else { - putBinary(json, columnName, resultSet, colIndex); + // Attempt to access the column. this allows us to know if it is null before we do + // type-specific parsing. If the column is null, we will populate the null value and skip attempting + // to + // parse the column value. + resultSet.getObject(colIndex); + if (resultSet.wasNull()) { + json.putNull(columnName); + } else { + // https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-type-conversions.html + switch (columnType) { + case BIT -> { + if (field.getLength() == 1L) { + // BIT(1) is boolean + putBoolean(json, columnName, resultSet, colIndex); + } else { + putBinary(json, columnName, resultSet, colIndex); + } } - } - case BOOLEAN -> putBoolean(json, columnName, resultSet, colIndex); - case TINYINT -> { - if (field.getLength() == 1L) { - // TINYINT(1) is boolean - putBoolean(json, columnName, resultSet, colIndex); - } else { - putShortInt(json, columnName, resultSet, colIndex); + case BOOLEAN -> putBoolean(json, columnName, resultSet, colIndex); + case TINYINT -> { + if (field.getLength() == 1L) { + // TINYINT(1) is boolean + putBoolean(json, columnName, resultSet, colIndex); + } else { + putShortInt(json, columnName, resultSet, colIndex); + } } - } - case TINYINT_UNSIGNED, YEAR -> putShortInt(json, columnName, resultSet, colIndex); - case SMALLINT, SMALLINT_UNSIGNED, MEDIUMINT, MEDIUMINT_UNSIGNED -> putInteger(json, columnName, resultSet, colIndex); - case INT, INT_UNSIGNED -> { - if (field.isUnsigned()) { - putBigInt(json, columnName, resultSet, colIndex); - } else { - putInteger(json, columnName, resultSet, colIndex); + case TINYINT_UNSIGNED, YEAR -> putShortInt(json, columnName, resultSet, colIndex); + case SMALLINT, SMALLINT_UNSIGNED, MEDIUMINT, MEDIUMINT_UNSIGNED -> putInteger(json, columnName, resultSet, colIndex); + case INT, INT_UNSIGNED -> { + if (field.isUnsigned()) { + putBigInt(json, columnName, resultSet, colIndex); + } else { + putInteger(json, columnName, resultSet, colIndex); + } } - } - case BIGINT, BIGINT_UNSIGNED -> putBigInt(json, columnName, resultSet, colIndex); - case FLOAT, FLOAT_UNSIGNED -> putFloat(json, columnName, resultSet, colIndex); - case DOUBLE, DOUBLE_UNSIGNED -> putDouble(json, columnName, resultSet, colIndex); - case DECIMAL, DECIMAL_UNSIGNED -> { - if (field.getDecimals() == 0) { - putBigInt(json, columnName, resultSet, colIndex); - } else { - putBigDecimal(json, columnName, resultSet, colIndex); + case BIGINT, BIGINT_UNSIGNED -> putBigInt(json, columnName, resultSet, colIndex); + case FLOAT, FLOAT_UNSIGNED -> putFloat(json, columnName, resultSet, colIndex); + case DOUBLE, DOUBLE_UNSIGNED -> putDouble(json, columnName, resultSet, colIndex); + case DECIMAL, DECIMAL_UNSIGNED -> { + if (field.getDecimals() == 0) { + putBigInt(json, columnName, resultSet, colIndex); + } else { + putBigDecimal(json, columnName, resultSet, colIndex); + } } + case DATE -> putDate(json, columnName, resultSet, colIndex); + case DATETIME -> putTimestamp(json, columnName, resultSet, colIndex); + case TIMESTAMP -> putTimestampWithTimezone(json, columnName, resultSet, colIndex); + case TIME -> putTime(json, columnName, resultSet, colIndex); + case CHAR, VARCHAR -> putString(json, columnName, resultSet, colIndex); + case TINYBLOB, BLOB, MEDIUMBLOB, LONGBLOB, BINARY, VARBINARY, GEOMETRY -> putBinary(json, columnName, resultSet, colIndex); + case TINYTEXT, TEXT, MEDIUMTEXT, LONGTEXT, JSON, ENUM, SET -> putString(json, columnName, resultSet, colIndex); + case NULL -> json.set(columnName, NullNode.instance); + default -> putDefault(json, columnName, resultSet, colIndex); } - case DATE -> putDate(json, columnName, resultSet, colIndex); - case DATETIME -> putTimestamp(json, columnName, resultSet, colIndex); - case TIMESTAMP -> putTimestampWithTimezone(json, columnName, resultSet, colIndex); - case TIME -> putTime(json, columnName, resultSet, colIndex); - case CHAR, VARCHAR -> putString(json, columnName, resultSet, colIndex); - case TINYBLOB, BLOB, MEDIUMBLOB, LONGBLOB, BINARY, VARBINARY, GEOMETRY -> putBinary(json, columnName, resultSet, colIndex); - case TINYTEXT, TEXT, MEDIUMTEXT, LONGTEXT, JSON, ENUM, SET -> putString(json, columnName, resultSet, colIndex); - case NULL -> json.set(columnName, NullNode.instance); - default -> putDefault(json, columnName, resultSet, colIndex); } } diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/cdc/MySqlDebeziumStateUtil.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/cdc/MySqlDebeziumStateUtil.java index dda584d1de03e..97aa8dc96b4dd 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/cdc/MySqlDebeziumStateUtil.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/cdc/MySqlDebeziumStateUtil.java @@ -282,7 +282,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, assert !offset.isEmpty(); assert Objects.nonNull(schemaHistory); - assert Objects.nonNull(schemaHistory.schema()); + assert Objects.nonNull(schemaHistory.getSchema()); final JsonNode asJson = serialize(offset, schemaHistory); LOGGER.info("Initial Debezium state constructed: {}", asJson); @@ -296,7 +296,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, public static JsonNode serialize(final Map offset, final SchemaHistory dbHistory) { final Map state = new HashMap<>(); state.put(MysqlCdcStateConstants.MYSQL_CDC_OFFSET, offset); - state.put(MysqlCdcStateConstants.MYSQL_DB_HISTORY, dbHistory.schema()); + state.put(MysqlCdcStateConstants.MYSQL_DB_HISTORY, dbHistory.getSchema()); state.put(MysqlCdcStateConstants.IS_COMPRESSED, dbHistory.isCompressed()); return Jsons.jsonNode(state); diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/CdcMetadataInjector.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/CdcMetadataInjector.java new file mode 100644 index 0000000000000..cde1f645a60fe --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/CdcMetadataInjector.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mysql.initialsync; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.integrations.source.mysql.cdc.MySqlCdcConnectorMetadataInjector; +import io.airbyte.integrations.source.mysql.cdc.MySqlDebeziumStateUtil.MysqlDebeziumStateAttributes; + +public class CdcMetadataInjector { + + private final String transactionTimestamp; + private final MysqlDebeziumStateAttributes stateAttributes; + private final MySqlCdcConnectorMetadataInjector metadataInjector; + + public CdcMetadataInjector(final String transactionTimestamp, + final MysqlDebeziumStateAttributes stateAttributes, + final MySqlCdcConnectorMetadataInjector metadataInjector) { + this.transactionTimestamp = transactionTimestamp; + this.stateAttributes = stateAttributes; + this.metadataInjector = metadataInjector; + } + + public void inject(final ObjectNode record) { + metadataInjector.addMetaDataToRowsFetchedOutsideDebezium(record, transactionTimestamp, stateAttributes); + } + +} diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java index 6b03ff28128f0..2457bc5924fe5 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.mysql.cj.MysqlType; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants; import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; @@ -27,6 +28,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -110,7 +112,7 @@ public List> getIncrementalIterators( } }); - final AutoCloseableIterator queryStream = + final AutoCloseableIterator queryStream = new MySqlInitialLoadRecordIterator(database, sourceOperations, quoteString, initialLoadStateManager, selectedDatabaseFields, pair, calculateChunkSize(tableSizeInfoMap.get(pair), pair), isCompositePrimaryKey(airbyteStream)); final AutoCloseableIterator recordIterator = @@ -144,7 +146,7 @@ public static long calculateChunkSize(final TableSizeInfo tableSizeInfo, final A // Transforms the given iterator to create an {@link AirbyteRecordMessage} private AutoCloseableIterator getRecordIterator( - final AutoCloseableIterator recordIterator, + final AutoCloseableIterator recordIterator, final String streamName, final String namespace, final long emittedAt) { @@ -154,7 +156,12 @@ private AutoCloseableIterator getRecordIterator( .withStream(streamName) .withNamespace(namespace) .withEmittedAt(emittedAt) - .withData(r))); + .withData(r.rawRowData()) + .withMeta(isMetaChangesEmptyOrNull(r.meta()) ? null : r.meta()))); + } + + private boolean isMetaChangesEmptyOrNull(AirbyteRecordMessageMeta meta) { + return meta == null || meta.getChanges() == null || meta.getChanges().isEmpty(); } // Augments the given iterator with record count logs. diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadRecordIterator.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadRecordIterator.java index a0803cea4f061..7c1c600766a8a 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadRecordIterator.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadRecordIterator.java @@ -4,13 +4,10 @@ package io.airbyte.integrations.source.mysql.initialsync; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.enquoteIdentifier; -import static io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting; - -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.AbstractIterator; import com.mysql.cj.MysqlType; import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils; import io.airbyte.commons.util.AutoCloseableIterator; @@ -40,8 +37,8 @@ * records processed here. */ @SuppressWarnings("try") -public class MySqlInitialLoadRecordIterator extends AbstractIterator - implements AutoCloseableIterator { +public class MySqlInitialLoadRecordIterator extends AbstractIterator + implements AutoCloseableIterator { private static final Logger LOGGER = LoggerFactory.getLogger(MySqlInitialLoadRecordIterator.class); @@ -57,7 +54,7 @@ public class MySqlInitialLoadRecordIterator extends AbstractIterator private final PrimaryKeyInfo pkInfo; private final boolean isCompositeKeyLoad; private int numSubqueries = 0; - private AutoCloseableIterator currentIterator; + private AutoCloseableIterator currentIterator; MySqlInitialLoadRecordIterator( final JdbcDatabase database, @@ -81,7 +78,7 @@ public class MySqlInitialLoadRecordIterator extends AbstractIterator @CheckForNull @Override - protected JsonNode computeNext() { + protected AirbyteRecordData computeNext() { if (shouldBuildNextSubquery()) { try { // We will only issue one query for a composite key load. If we have already processed all the data @@ -96,8 +93,8 @@ protected JsonNode computeNext() { } LOGGER.info("Subquery number : {}", numSubqueries); - final Stream stream = database.unsafeQuery( - this::getPkPreparedStatement, sourceOperations::rowToJson); + final Stream stream = database.unsafeQuery( + this::getPkPreparedStatement, sourceOperations::convertDatabaseRowToAirbyteRecordData); currentIterator = AutoCloseableIterators.fromStream(stream, pair); numSubqueries++; @@ -123,7 +120,7 @@ private PreparedStatement getPkPreparedStatement(final Connection connection) { final String tableName = pair.getName(); final String schemaName = pair.getNamespace(); LOGGER.info("Preparing query for table: {}", tableName); - final String fullTableName = getFullyQualifiedTableNameWithQuoting(schemaName, tableName, + final String fullTableName = RelationalDbQueryUtils.getFullyQualifiedTableNameWithQuoting(schemaName, tableName, quoteString); final String wrappedColumnNames = RelationalDbQueryUtils.enquoteIdentifierList(columnNames, quoteString); @@ -132,7 +129,7 @@ private PreparedStatement getPkPreparedStatement(final Connection connection) { if (pkLoadStatus == null) { LOGGER.info("pkLoadStatus is null"); - final String quotedCursorField = enquoteIdentifier(pkInfo.pkFieldName(), quoteString); + final String quotedCursorField = RelationalDbQueryUtils.enquoteIdentifier(pkInfo.pkFieldName(), quoteString); final String sql; // We cannot load in chunks for a composite key load, since each field might not have distinct // values. @@ -148,7 +145,7 @@ private PreparedStatement getPkPreparedStatement(final Connection connection) { return preparedStatement; } else { LOGGER.info("pkLoadStatus value is : {}", pkLoadStatus.getPkVal()); - final String quotedCursorField = enquoteIdentifier(pkLoadStatus.getPkName(), quoteString); + final String quotedCursorField = RelationalDbQueryUtils.enquoteIdentifier(pkLoadStatus.getPkName(), quoteString); final String sql; // We cannot load in chunks for a composite key load, since each field might not have distinct // values. Furthermore, we have to issue a >= diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadSourceOperations.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadSourceOperations.java deleted file mode 100644 index 74d9f985e3acf..0000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadSourceOperations.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mysql.initialsync; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.source.mysql.MySqlSourceOperations; -import io.airbyte.integrations.source.mysql.cdc.MySqlCdcConnectorMetadataInjector; -import io.airbyte.integrations.source.mysql.cdc.MySqlDebeziumStateUtil.MysqlDebeziumStateAttributes; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.util.Collections; -import java.util.Optional; - -public class MySqlInitialLoadSourceOperations extends MySqlSourceOperations { - - private final Optional metadataInjector; - - public MySqlInitialLoadSourceOperations(final Optional metadataInjector) { - super(); - this.metadataInjector = metadataInjector; - } - - @Override - public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { - if (metadataInjector.isPresent()) { - // the first call communicates with the database. after that the result is cached. - final ResultSetMetaData metadata = queryContext.getMetaData(); - final int columnCount = metadata.getColumnCount(); - final ObjectNode jsonNode = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); - for (int i = 1; i <= columnCount; i++) { - // attempt to access the column. this allows us to know if it is null before we do type-specific - // parsing. if it is null, we can move on. while awkward, this seems to be the agreed upon way of - // checking for null values with jdbc. - queryContext.getObject(i); - if (queryContext.wasNull()) { - continue; - } - - // convert to java types that will convert into reasonable json. - copyToJsonField(queryContext, i, jsonNode); - } - - metadataInjector.get().inject(jsonNode); - return jsonNode; - } else { - return super.rowToJson(queryContext); - } - } - - public static class CdcMetadataInjector { - - private final String transactionTimestamp; - private final MysqlDebeziumStateAttributes stateAttributes; - private final MySqlCdcConnectorMetadataInjector metadataInjector; - - public CdcMetadataInjector(final String transactionTimestamp, - final MysqlDebeziumStateAttributes stateAttributes, - final MySqlCdcConnectorMetadataInjector metadataInjector) { - this.transactionTimestamp = transactionTimestamp; - this.stateAttributes = stateAttributes; - this.metadataInjector = metadataInjector; - } - - private void inject(final ObjectNode record) { - metadataInjector.addMetaDataToRowsFetchedOutsideDebezium(record, transactionTimestamp, stateAttributes); - } - - } - -} diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java index 58c2e0780eaf2..e38a6973c1155 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java @@ -32,6 +32,7 @@ import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.integrations.source.mysql.MySqlQueryUtils; +import io.airbyte.integrations.source.mysql.MySqlSourceOperations; import io.airbyte.integrations.source.mysql.cdc.MySqlCdcConnectorMetadataInjector; import io.airbyte.integrations.source.mysql.cdc.MySqlCdcPosition; import io.airbyte.integrations.source.mysql.cdc.MySqlCdcProperties; @@ -40,7 +41,6 @@ import io.airbyte.integrations.source.mysql.cdc.MySqlCdcTargetPosition; import io.airbyte.integrations.source.mysql.cdc.MySqlDebeziumStateUtil; import io.airbyte.integrations.source.mysql.cdc.MySqlDebeziumStateUtil.MysqlDebeziumStateAttributes; -import io.airbyte.integrations.source.mysql.initialsync.MySqlInitialLoadSourceOperations.CdcMetadataInjector; import io.airbyte.integrations.source.mysql.internal.models.CursorBasedStatus; import io.airbyte.integrations.source.mysql.internal.models.PrimaryKeyLoadStatus; import io.airbyte.protocol.models.CommonField; @@ -141,8 +141,8 @@ public static List> getCdcReadIterators(fi stateToBeUsed, catalog); final MysqlDebeziumStateAttributes stateAttributes = MySqlDebeziumStateUtil.getStateAttributesFromDB(database); - final MySqlInitialLoadSourceOperations sourceOperations = - new MySqlInitialLoadSourceOperations( + final MySqlSourceOperations sourceOperations = + new MySqlSourceOperations( Optional.of(new CdcMetadataInjector(emittedAt.toString(), stateAttributes, metadataInjector))); final MySqlInitialLoadHandler initialLoadHandler = new MySqlInitialLoadHandler(sourceConfig, database, sourceOperations, diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshMySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshMySqlSourceAcceptanceTest.java index 61b79b8d48df7..765495b854697 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshMySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshMySqlSourceAcceptanceTest.java @@ -63,7 +63,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s.%s", config.get(JdbcUtils.DATABASE_KEY).asText(), STREAM_NAME), + String.format("%s", STREAM_NAME), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))), @@ -72,7 +72,8 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s.%s", config.get(JdbcUtils.DATABASE_KEY).asText(), STREAM_NAME2), + String.format("%s", STREAM_NAME2), + config.get(JdbcUtils.DATABASE_KEY).asText(), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))))); diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSourceAcceptanceTest.java index 9e12122460b7d..8286c30879918 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSourceAcceptanceTest.java @@ -4,12 +4,15 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static io.airbyte.protocol.models.v0.SyncMode.FULL_REFRESH; import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; @@ -31,14 +34,17 @@ import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; +import org.junit.Assert; import org.junit.jupiter.api.Test; public class CdcMySqlSourceAcceptanceTest extends SourceAcceptanceTest { protected static final String STREAM_NAME = "id_and_name"; protected static final String STREAM_NAME2 = "starships"; + protected static final String STREAM_NAME3 = "stream3"; protected MySQLTestDatabase testdb; @@ -67,8 +73,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withSyncMode(INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME), - testdb.getDatabaseName(), + STREAM_NAME, testdb.getDatabaseName(), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSourceDefinedCursor(true) @@ -79,8 +84,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withSyncMode(INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME2), - testdb.getDatabaseName(), + STREAM_NAME2, testdb.getDatabaseName(), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSourceDefinedCursor(true) @@ -101,7 +105,9 @@ protected void setupEnvironment(final TestDestinationEnv environment) { .with("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));") .with("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');") .with("CREATE TABLE starships(id INTEGER, name VARCHAR(200));") - .with("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + .with("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');") + .with("CREATE TABLE %s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL);", STREAM_NAME3) + .with("INSERT INTO %s (id, name) VALUES (4,'voyager');", STREAM_NAME3); } protected ContainerModifier[] getContainerModifiers() { @@ -158,8 +164,7 @@ private ConfiguredAirbyteCatalog getConfiguredCatalogWithPartialColumns() { .withSyncMode(INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME), - testdb.getDatabaseName(), + STREAM_NAME, testdb.getDatabaseName(), Field.of("id", JsonSchemaType.NUMBER) /* no name field */) .withSourceDefinedCursor(true) @@ -170,8 +175,7 @@ private ConfiguredAirbyteCatalog getConfiguredCatalogWithPartialColumns() { .withSyncMode(INCREMENTAL) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME2), - testdb.getDatabaseName(), + STREAM_NAME2, testdb.getDatabaseName(), /* no name field */ Field.of("id", JsonSchemaType.NUMBER)) .withSourceDefinedCursor(true) @@ -185,4 +189,66 @@ private void verifyFieldNotExist(final List records, final "Records contain unselected columns [%s:%s]".formatted(stream, field)); } + @Test + protected void testNullValueConversion() throws Exception { + final List configuredAirbyteStreams = + Lists.newArrayList(new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream(STREAM_NAME3, + testdb.getDatabaseName(), + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + Field.of("userid", JsonSchemaType.NUMBER)) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))) + .withSupportedSyncModes(Lists.newArrayList(FULL_REFRESH, INCREMENTAL)))); + + final ConfiguredAirbyteCatalog configuredCatalogWithOneStream = + new ConfiguredAirbyteCatalog().withStreams(List.of(configuredAirbyteStreams.get(0))); + + final List airbyteMessages = runRead(configuredCatalogWithOneStream, getState()); + final List recordMessages = filterRecords(airbyteMessages); + final List stateMessages = airbyteMessages + .stream() + .filter(m -> m.getType() == AirbyteMessage.Type.STATE) + .map(AirbyteMessage::getState) + .collect(Collectors.toList()); + Assert.assertEquals(recordMessages.size(), 1); + assertFalse(stateMessages.isEmpty(), "Reason"); + ObjectMapper mapper = new ObjectMapper(); + + assertEquals(cdcFieldsOmitted(recordMessages.get(0).getData()), + mapper.readTree("{\"id\":4, \"name\":\"voyager\", \"userid\":null}")); + + // when we run incremental sync again there should be no new records. Run a sync with the latest + // state message and assert no records were emitted. + JsonNode latestState = extractLatestState(stateMessages); + + testdb.getDatabase().query(c -> { + return c.query("INSERT INTO %s.%s (id, name) VALUES (5,'deep space nine');".formatted(testdb.getDatabaseName(), STREAM_NAME3)); + }).execute(); + + assert Objects.nonNull(latestState); + final List secondSyncRecords = filterRecords(runRead(configuredCatalogWithOneStream, latestState)); + assertFalse( + secondSyncRecords.isEmpty(), + "Expected the second incremental sync to produce records."); + assertEquals(cdcFieldsOmitted(secondSyncRecords.get(0).getData()), + mapper.readTree("{\"id\":5, \"name\":\"deep space nine\", \"userid\":null}")); + + } + + private JsonNode cdcFieldsOmitted(final JsonNode node) { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode object = mapper.createObjectNode(); + node.fieldNames().forEachRemaining(name -> { + if (!name.toLowerCase().startsWith("_ab_cdc_")) { + object.put(name, node.get(name)); + } + + }); + return object; + } + } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MySqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MySqlSourceAcceptanceTest.java index d6a2adffe2c94..6044c66cf9cb5 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MySqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MySqlSourceAcceptanceTest.java @@ -4,7 +4,12 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static io.airbyte.protocol.models.v0.SyncMode.INCREMENTAL; +import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; import io.airbyte.cdk.integrations.standardtest.source.SourceAcceptanceTest; @@ -15,14 +20,13 @@ import io.airbyte.integrations.source.mysql.MySQLTestDatabase.ContainerModifier; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; +import io.airbyte.protocol.models.v0.*; import java.util.HashMap; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; +import org.junit.jupiter.api.Test; public class MySqlSourceAcceptanceTest extends SourceAcceptanceTest { @@ -75,7 +79,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s.%s", testdb.getDatabaseName(), STREAM_NAME), + STREAM_NAME, testdb.getDatabaseName(), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))), @@ -84,7 +88,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { .withCursorField(Lists.newArrayList("id")) .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s.%s", testdb.getDatabaseName(), STREAM_NAME2), + STREAM_NAME2, testdb.getDatabaseName(), Field.of("id", JsonSchemaType.NUMBER), Field.of("name", JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))))); @@ -95,4 +99,63 @@ protected JsonNode getState() { return Jsons.jsonNode(new HashMap<>()); } + @Test + protected void testNullValueConversion() throws Exception { + final String STREAM_NAME3 = "stream3"; + testdb.getDatabase().query(c -> { + return c.query(""" + CREATE TABLE %s.%s (id INTEGER PRIMARY KEY, name VARCHAR(200), userid INTEGER DEFAULT NULL); + """.formatted(testdb.getDatabaseName(), STREAM_NAME3)); + }).execute(); + + testdb.getDatabase().query(c -> { + return c.query(""" + INSERT INTO %s.%s (id, name) VALUES (4,'voyager'); + """.formatted(testdb.getDatabaseName(), STREAM_NAME3)); + }).execute(); + + final List configuredAirbyteStreams = + Lists.newArrayList(CatalogHelpers.createConfiguredAirbyteStream(STREAM_NAME3, + testdb.getDatabaseName(), + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING), + Field.of("userid", JsonSchemaType.NUMBER)) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withSyncMode(INCREMENTAL) + .withCursorField(List.of("id"))); + final ConfiguredAirbyteCatalog configuredCatalogWithOneStream = + new ConfiguredAirbyteCatalog().withStreams(List.of(configuredAirbyteStreams.get(0))); + + final List airbyteMessages = runRead(configuredCatalogWithOneStream, getState()); + final List recordMessages = filterRecords(airbyteMessages); + final List stateMessages = airbyteMessages + .stream() + .filter(m -> m.getType() == AirbyteMessage.Type.STATE) + .map(AirbyteMessage::getState) + .collect(Collectors.toList()); + assertEquals(recordMessages.size(), 1); + assertFalse(stateMessages.isEmpty(), "Reason"); + ObjectMapper mapper = new ObjectMapper(); + + assertEquals(recordMessages.get(0).getData(), + mapper.readTree("{\"id\":4, \"name\":\"voyager\", \"userid\":null}")); + + // when we run incremental sync again there should be no new records. Run a sync with the latest + // state message and assert no records were emitted. + JsonNode latestState = extractLatestState(stateMessages); + + testdb.getDatabase().query(c -> { + return c.query("INSERT INTO %s.%s (id, name) VALUES (5,'deep space nine');".formatted(testdb.getDatabaseName(), STREAM_NAME3)); + }).execute(); + + assert Objects.nonNull(latestState); + final List secondSyncRecords = filterRecords(runRead(configuredCatalogWithOneStream, latestState)); + assertFalse( + secondSyncRecords.isEmpty(), + "Expected the second incremental sync to produce records."); + assertEquals(secondSyncRecords.get(0).getData(), + mapper.readTree("{\"id\":5, \"name\":\"deep space nine\", \"userid\":null}")); + + } + } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java deleted file mode 100644 index d9ff7f3620565..0000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mysql; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.JdbcConnector; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.standardtest.source.performancetest.AbstractSourceFillDbWithTestData; -import io.airbyte.commons.json.Jsons; -import java.util.Map; -import java.util.stream.Stream; -import org.jooq.SQLDialect; -import org.junit.jupiter.params.provider.Arguments; - -public class FillMySqlTestDbScriptTest extends AbstractSourceFillDbWithTestData { - - private JsonNode config; - - @Override - protected JsonNode getConfig() { - return config; - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) {} - - @Override - protected String getImageName() { - return "airbyte/source-mysql:dev"; - } - - @Override - protected Database setupDatabase(final String dbName) throws Exception { - - final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() - .put("method", "STANDARD") - .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, "your_host") - .put(JdbcUtils.PORT_KEY, 3306) - .put(JdbcUtils.DATABASE_KEY, dbName) // set your db name - .put(JdbcUtils.USERNAME_KEY, "your_username") - .put(JdbcUtils.PASSWORD_KEY, "your_pass") - .put("replication_method", replicationMethod) - .build()); - - final Database database = new Database( - DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.MYSQL.getDriverClassName(), - String.format(DatabaseDriver.MYSQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.MYSQL, - Map.of("zeroDateTimeBehavior", "convertToNull"), - JdbcConnector.CONNECT_TIMEOUT_DEFAULT)); - - // It disable strict mode in the DB and allows to insert specific values. - // For example, it's possible to insert date with zero values "2021-00-00" - database.query(ctx -> ctx.execute("SET @@sql_mode=''")); - - return database; - } - - /** - * This is a data provider for fill DB script,, Each argument's group would be ran as a separate - * test. 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName - * that will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected - * records retrieved in each stream. 4th arg - a number of messages batches - * (numberOfMessages*numberOfBatches, ex. 100*2=200 messages in total in each stream) 5th arg - a - * number of columns in each stream\table that will be use for Airbyte Cataloq configuration 6th arg - * - a number of streams to read in configured airbyte Catalog. Each stream\table in DB should be - * names like "test_0", "test_1",..., test_n. - */ - @Override - protected Stream provideParameters() { - // for MySQL DB name ans schema name would be the same - return Stream.of(Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000)); - } - -} diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java deleted file mode 100644 index d65977312b043..0000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mysql; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.JdbcConnector; -import io.airbyte.cdk.integrations.standardtest.source.performancetest.AbstractSourcePerformanceTest; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.nio.file.Path; -import java.util.Map; -import java.util.stream.Stream; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.params.provider.Arguments; - -public class MySqlRdsSourcePerformanceSecretTest extends AbstractSourcePerformanceTest { - - private static final String PERFORMANCE_SECRET_CREDS = "secrets/performance-config.json"; - - @Override - protected String getImageName() { - return "airbyte/source-mysql:dev"; - } - - @Override - protected void setupDatabase(final String dbName) throws Exception { - final JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, plainConfig.get(JdbcUtils.HOST_KEY)) - .put(JdbcUtils.PORT_KEY, plainConfig.get(JdbcUtils.PORT_KEY)) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, plainConfig.get(JdbcUtils.USERNAME_KEY)) - .put(JdbcUtils.PASSWORD_KEY, plainConfig.get(JdbcUtils.PASSWORD_KEY)) - .put("replication_method", plainConfig.get("replication_method")) - .build()); - - final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.MYSQL.getDriverClassName(), - String.format(DatabaseDriver.MYSQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.MYSQL, - Map.of("zeroDateTimeBehavior", "convertToNull"), - JdbcConnector.CONNECT_TIMEOUT_DEFAULT); - - final Database database = new Database(dslContext); - - // It disable strict mode in the DB and allows to insert specific values. - // For example, it's possible to insert date with zero values "2021-00-00" - database.query(ctx -> ctx.execute("SET @@sql_mode=''")); - } - - /** - * This is a data provider for performance tests, Each argument's group would be ran as a separate - * test. 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName - * that will be used as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected - * records retrieved in each stream. 4th arg - a number of columns in each stream\table that will be - * use for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte - * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. - */ - @Override - protected Stream provideParameters() { - return Stream.of( - Arguments.of("t1000_c240_r200", "t1000_c240_r200", 200, 240, 1000), - Arguments.of("t25_c8_r50k_s10kb", "t25_c8_r50k_s10kb", 50000, 8, 25), - Arguments.of("t1000_c8_r10k_s500b", "t1000_c8_r10k_s500b", 10000, 8, 1000)); - } - -} diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/sql/create_mysql_benchmarks.sql b/airbyte-integrations/connectors/source-mysql/src/test-performance/sql/create_mysql_benchmarks.sql deleted file mode 100644 index b24773e849771..0000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/sql/create_mysql_benchmarks.sql +++ /dev/null @@ -1,280 +0,0 @@ -delimiter # CREATE - PROCEDURE table_copy( - IN tablecount INT - ) BEGIN -SET - @v_max_table = tablecount; -SET -@v_counter_table = 1; - -while @v_counter_table < @v_max_table do -SET -@tnamee = concat( - 'create table IF NOT EXISTS test_', - @v_counter_table, - ' SELECT * FROM test;' -); - -PREPARE stmt -FROM -@tnamee; - -EXECUTE stmt; - -DEALLOCATE PREPARE stmt; -SET -@v_counter_table = @v_counter_table + 1; -END while; - -COMMIT; -END # delimiter; - -delimiter # CREATE - PROCEDURE insert_rows( - IN allrows INT, - IN insertcount INT, - IN value longblob - ) BEGIN -SET - @dummyIpsum = '\' dummy_ipsum\''; -SET -@fieldText = value; -SET -@vmax = allrows; -SET -@vmaxx = allrows; -SET -@vmaxoneinsert = insertcount; -SET -@counter = 1; -SET -@lastinsertcounter = 1; -SET -@lastinsert = 0; -SET -@fullloop = 0; -SET -@fullloopcounter = 0; - -while @vmaxx <= @vmaxoneinsert do -SET -@vmaxoneinsert = @vmaxx; -SET -@fullloop = @fullloop + 1; -SET -@vmaxx = @vmaxx + 1; -END while; - -COMMIT; - -while @vmax > @vmaxoneinsert do -SET -@fullloop = @fullloop + 1; -SET -@vmax = @vmax - @vmaxoneinsert; -SET -@lastinsert = @vmax; -END while; - -COMMIT; -SET -@insertTable = concat('insert into test (varchar1, varchar2, varchar3, varchar4, varchar5, longtextfield, timestampfield) values ('); - -while @counter < @vmaxoneinsert do -SET -@insertTable = concat( - @insertTable, - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @fieldText, - ', CURRENT_TIMESTAMP), (' -); -SET -@counter = @counter + 1; -END while; - -COMMIT; -SET -@insertTable = concat( - @insertTable, - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @fieldText, - ', CURRENT_TIMESTAMP);' -); - -while @vmax < 1 do -SET -@fullloop = 0; -SET -@vmax = 1; -END while; - -COMMIT; - -while @fullloopcounter < @fullloop do PREPARE runinsert -FROM -@insertTable; - -EXECUTE runinsert; - -DEALLOCATE PREPARE runinsert; -SET -@fullloopcounter = @fullloopcounter + 1; -END while; - -COMMIT; -SET -@insertTableLasted = concat('insert into test (varchar1, varchar2, varchar3, varchar4, varchar5, longtextfield, timestampfield) values ('); - -while @lastinsertcounter < @lastinsert do -SET -@insertTableLasted = concat( - @insertTableLasted, - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @fieldText, - ', CURRENT_TIMESTAMP), (' -); -SET -@lastinsertcounter = @lastinsertcounter + 1; -END while; - -COMMIT; -SET -@insertTableLasted = concat( - @insertTableLasted, - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @dummyIpsum, - ', ', - @fieldText, - ', CURRENT_TIMESTAMP);' -); - -while @lastinsert > 0 do PREPARE runinsert -FROM -@insertTableLasted; - -EXECUTE runinsert; - -DEALLOCATE PREPARE runinsert; -SET -@lastinsert = 0; -END while; - -COMMIT; -END # delimiter; - -delimiter # CREATE - PROCEDURE table_create() BEGIN CREATE - TABLE - test( - id INT unsigned NOT NULL auto_increment PRIMARY KEY, - varchar1 VARCHAR(255), - varchar2 VARCHAR(255), - varchar3 VARCHAR(255), - varchar4 VARCHAR(255), - varchar5 VARCHAR(255), - longtextfield longtext, - timestampfield TIMESTAMP - ) engine = innodb; -SET -@extraSmallText = '\' test weight 50 b - SOME text, -SOME text, -SOME text\''; -SET -@smallText = CONCAT( - '\' test weight 500 b - ', REPEAT(' SOME text, - SOME text, - ', 20), ' \'' -); -SET -@regularText = CONCAT( - '\' test weight 10 kb - ', REPEAT(' SOME text, - SOME text, - ', 590), ' \'' -); -SET -@largeText = CONCAT( - '\' test weight 100 kb - ', REPEAT(' SOME text, - SOME text, - ', 4450), ' \'' -); - --- TODO: change the following @allrows to control the number of records with different sizes --- number of 50B records -CALL insert_rows( - 0, - 5000000, - @extraSmallText -); - --- number of 500B records -CALL insert_rows( - 0, - 50000, - @smallText -); - --- number of 10KB records -CALL insert_rows( - 0, - 5000, - @regularText -); - --- number of 100KB records -CALL insert_rows( - 0, - 50, - @largeText -); -END # delimiter; - -CALL table_create(); - -DROP - PROCEDURE IF EXISTS table_create; - -DROP - PROCEDURE IF EXISTS insert_rows; - --- TODO: change the value to control the number of tables -CALL table_copy(1); - -DROP - PROCEDURE IF EXISTS table_copy; - -ALTER TABLE - test RENAME test_0; \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java index b88b5baa64207..99104ed17cc19 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java @@ -25,6 +25,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -50,6 +51,10 @@ import io.airbyte.protocol.models.v0.AirbyteGlobalState; import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Reason; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.v0.AirbyteStream; @@ -59,6 +64,7 @@ import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.StreamDescriptor; import io.airbyte.protocol.models.v0.SyncMode; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -67,6 +73,7 @@ import java.util.Properties; import java.util.Random; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -79,6 +86,11 @@ public class CdcMysqlSourceTest extends CdcSourceTest DATE_TIME_RECORDS = ImmutableList.of( + Jsons.jsonNode(ImmutableMap.of(COL_ID, 120, COL_DATE_TIME, "'2023-00-00 20:37:47'"))); + @Override protected MySQLTestDatabase createTestDatabase() { return MySQLTestDatabase.in(BaseImage.MYSQL_8, ContainerModifier.INVALID_TIMEZONE_CEST).withCdcPermissions(); @@ -113,11 +125,15 @@ protected String createTableSqlFmt() { @Override protected String modelsSchema() { - return testdb.getDatabaseName(); + return getDatabaseName(); } @Override protected String randomSchema() { + return getDatabaseName(); + } + + protected String getDatabaseName() { return testdb.getDatabaseName(); } @@ -296,6 +312,8 @@ protected void syncShouldHandlePurgedLogsGracefully() throws Exception { * @throws Exception Exception happening in the test. */ @Test + @Timeout(value = 5, + unit = TimeUnit.MINUTES) protected void verifyCheckpointStatesByRecords() throws Exception { // We require a huge amount of records, otherwise Debezium will notify directly the last offset. final int recordsToCreate = 20_000; @@ -328,7 +346,7 @@ protected void verifyCheckpointStatesByRecords() throws Exception { } @Override - protected void assertExpectedStateMessages(final List stateMessages) { + protected void assertExpectedStateMessages(final List stateMessages) { assertEquals(7, stateMessages.size()); assertStateTypes(stateMessages, 4); } @@ -342,7 +360,7 @@ protected void assertExpectedStateMessagesWithTotalCount(final List stateMessages) { + protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { assertEquals(1, stateMessages.size()); assertNotNull(stateMessages.get(0).getData()); for (final AirbyteStateMessage stateMessage : stateMessages) { @@ -367,17 +385,17 @@ private void assertStateForSyncShouldHandlePurgedLogsGracefully(final List stateAfterFirstBatch) { + protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { assertEquals(27, stateAfterFirstBatch.size()); assertStateTypes(stateAfterFirstBatch, 24); } @Override - protected void assertExpectedStateMessagesForNoData(final List stateMessages) { + protected void assertExpectedStateMessagesForNoData(final List stateMessages) { assertEquals(2, stateMessages.size()); } - private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectPkState) { + private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectPkState) { JsonNode sharedState = null; for (int i = 0; i < stateMessages.size(); i++) { final AirbyteStateMessage stateMessage = stateMessages.get(i); @@ -401,7 +419,7 @@ private void assertStateTypes(final List stateMessages, fin } @Override - protected void assertStateMessagesForNewTableSnapshotTest(final List stateMessages, + protected void assertStateMessagesForNewTableSnapshotTest(final List stateMessages, final AirbyteStateMessage stateMessageEmittedAfterFirstSyncCompletion) { assertEquals(7, stateMessages.size()); for (int i = 0; i <= 4; i++) { @@ -416,13 +434,13 @@ protected void assertStateMessagesForNewTableSnapshotTest(final List { final JsonNode streamState = s.getStreamState(); if (s.getStreamDescriptor().equals(new StreamDescriptor().withName(MODELS_STREAM_NAME + "_random").withNamespace(randomSchema()))) { assertEquals(PRIMARY_KEY_STATE_TYPE, streamState.get(STATE_TYPE_KEY).asText()); - } else if (s.getStreamDescriptor().equals(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(testdb.getDatabaseName()))) { + } else if (s.getStreamDescriptor().equals(new StreamDescriptor().withName(MODELS_STREAM_NAME).withNamespace(getDatabaseName()))) { assertFalse(streamState.has(STATE_TYPE_KEY)); } else { throw new RuntimeException("Unknown stream"); @@ -441,7 +459,7 @@ protected void assertStateMessagesForNewTableSnapshotTest(final List { final JsonNode streamState = s.getStreamState(); assertFalse(streamState.has(STATE_TYPE_KEY)); @@ -459,7 +477,8 @@ protected void assertStateMessagesForNewTableSnapshotTest(final List read2 = source() @@ -672,7 +691,7 @@ public void testTwoStreamSync() throws Exception { recordMessages2, names, names, - testdb.getDatabaseName()); + getDatabaseName()); } /** @@ -726,6 +745,70 @@ public void testCompressedSchemaHistory() throws Exception { assertEquals(recordsToCreate, extractRecordMessages(dataFromSecondBatch).size()); } + private void writeDateRecords( + final JsonNode recordJson, + final String dbName, + final String streamName, + final String idCol, + final String dateCol) { + testdb.with("INSERT INTO `%s` .`%s` (%s, %s) VALUES (%s, %s);", dbName, streamName, + idCol, dateCol, + recordJson.get(idCol).asInt(), recordJson.get(dateCol).asText()); + } + + @Test + public void testInvalidDatetime_metaChangesPopulated() throws Exception { + final ConfiguredAirbyteCatalog configuredCatalog = Jsons.clone(getConfiguredCatalog()); + + // Add a datetime stream to the catalog + testdb + .withoutStrictMode() + .with(createTableSqlFmt(), getDatabaseName(), TEST_DATE_STREAM_NAME, + columnClause(ImmutableMap.of(COL_ID, "INTEGER", COL_DATE_TIME, "DATETIME"), Optional.of(COL_ID))); + + for (final JsonNode recordJson : DATE_TIME_RECORDS) { + writeDateRecords(recordJson, getDatabaseName(), TEST_DATE_STREAM_NAME, COL_ID, COL_DATE_TIME); + } + + final ConfiguredAirbyteStream airbyteStream = new ConfiguredAirbyteStream() + .withStream(CatalogHelpers.createAirbyteStream( + TEST_DATE_STREAM_NAME, + getDatabaseName(), + Field.of(COL_ID, JsonSchemaType.INTEGER), + Field.of(COL_DATE_TIME, JsonSchemaType.STRING_TIMESTAMP_WITHOUT_TIMEZONE)) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID)))); + airbyteStream.setSyncMode(SyncMode.INCREMENTAL); + + final List streams = new ArrayList<>(); + streams.add(airbyteStream); + configuredCatalog.withStreams(streams); + + final AutoCloseableIterator read1 = source() + .read(config(), configuredCatalog, null); + final List actualRecords = AutoCloseableIterators.toListAndClose(read1); + + // Sync is expected to succeed with one record. However, the meta changes column should be populated + // for this record + // as it is an invalid date. As a result, this field will be omitted as Airbyte is unable to + // serialize the source value. + final Set recordMessages = extractRecordMessages(actualRecords); + assertEquals(recordMessages.size(), 1); + final AirbyteRecordMessage invalidDateRecord = recordMessages.stream().findFirst().get(); + + final AirbyteRecordMessageMetaChange expectedChange = + new AirbyteRecordMessageMetaChange().withReason(Reason.SOURCE_SERIALIZATION_ERROR).withChange( + Change.NULLED).withField(COL_DATE_TIME); + final AirbyteRecordMessageMeta expectedMessageMeta = new AirbyteRecordMessageMeta().withChanges(List.of(expectedChange)); + assertEquals(expectedMessageMeta, invalidDateRecord.getMeta()); + + ObjectMapper mapper = new ObjectMapper(); + final JsonNode expectedDataWithoutCdcFields = mapper.readTree("{\"id\":120}"); + removeCDCColumns((ObjectNode) invalidDateRecord.getData()); + assertEquals(expectedDataWithoutCdcFields, invalidDateRecord.getData()); + } + private void createTablesToIncreaseSchemaHistorySize() { for (int i = 0; i <= 200; i++) { final String tableName = generateRandomStringOf32Characters(); diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceWithSpecialDbNameTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceWithSpecialDbNameTest.java index 9dd4170500eca..74dec8a9ce779 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceWithSpecialDbNameTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceWithSpecialDbNameTest.java @@ -31,7 +31,7 @@ public TestDatabaseWithInvalidDatabaseName(MySQLContainer container) { @Override public String getDatabaseName() { - return INVALID_DB_NAME; + return withNamespace(INVALID_DB_NAME); } } diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CloudDeploymentMySqlSslTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CloudDeploymentMySqlSslTest.java index 92fded997c4d7..9cc966b6b5df7 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CloudDeploymentMySqlSslTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CloudDeploymentMySqlSslTest.java @@ -19,11 +19,10 @@ import io.airbyte.commons.resources.MoreResources; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.ConnectorSpecification; +import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.parallel.Execution; -import org.junit.jupiter.api.parallel.ExecutionMode; +import org.junit.jupiter.api.Timeout; -@Execution(ExecutionMode.CONCURRENT) public class CloudDeploymentMySqlSslTest { private MySQLTestDatabase createTestDatabase(String... containerFactoryMethods) { @@ -131,6 +130,8 @@ void testStrictSSLUnsecuredWithTunnel() throws Exception { } @Test + @Timeout(value = 5, + unit = TimeUnit.MINUTES) void testCheckWithSslModeDisabled() throws Exception { try (final var testdb = createTestDatabase("withNetwork")) { try (final SshBastionContainer bastion = new SshBastionContainer()) { diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java index 79a27260480ee..3f9f8b80282e0 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java @@ -288,7 +288,7 @@ protected void testReadMultipleTablesIncrementally() throws Exception { } @Test - void testSpec() throws Exception { + public void testSpec() throws Exception { final ConnectorSpecification actual = source().spec(); final ConnectorSpecification expected = Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class); @@ -479,7 +479,7 @@ protected AirbyteCatalog getCatalog(final String defaultNamespace) { // Override from parent class as we're no longer including the legacy Data field. @Override - protected List createExpectedTestMessages(final List states, final long numRecords) { + protected List createExpectedTestMessages(final List states, final long numRecords) { return states.stream() .map(s -> new AirbyteMessage().withType(Type.STATE) .withState( @@ -493,7 +493,7 @@ protected List createExpectedTestMessages(final List createState(final List states) { + protected List createState(final List states) { return states.stream() .map(s -> new AirbyteStateMessage().withType(AirbyteStateType.STREAM) .withStream(new AirbyteStreamState() diff --git a/airbyte-integrations/connectors/source-notion/.coveragerc b/airbyte-integrations/connectors/source-notion/.coveragerc new file mode 100644 index 0000000000000..d598bdbac7ab1 --- /dev/null +++ b/airbyte-integrations/connectors/source-notion/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_notion/run.py diff --git a/airbyte-integrations/connectors/source-notion/README.md b/airbyte-integrations/connectors/source-notion/README.md index 13b0ba9f2b6b9..ef00488941265 100644 --- a/airbyte-integrations/connectors/source-notion/README.md +++ b/airbyte-integrations/connectors/source-notion/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-notion spec poetry run source-notion check --config secrets/config.json poetry run source-notion discover --config secrets/config.json -poetry run source-notion read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-notion read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-notion/acceptance-test-config.yml b/airbyte-integrations/connectors/source-notion/acceptance-test-config.yml index ab14b89dc5c8e..fd8331acf8806 100644 --- a/airbyte-integrations/connectors/source-notion/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-notion/acceptance-test-config.yml @@ -33,6 +33,8 @@ acceptance_tests: configured_catalog_path: "integration_tests/incremental_catalog.json" future_state: future_state_path: "integration_tests/abnormal_state.json" + # Skipping this test since the correct stream state is only emitted after all records have been read + skip_comprehensive_incremental_tests: true full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-notion/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-notion/integration_tests/abnormal_state.json index b791f731fbb36..b80e0db6db90a 100644 --- a/airbyte-integrations/connectors/source-notion/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-notion/integration_tests/abnormal_state.json @@ -31,5 +31,57 @@ "name": "blocks" } } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { "name": "comments", "namespace": null }, + "stream_state": { + "states": [ + { + "partition": { + "id": "9cca331a-1edb-4120-9a86-cf40214fac37", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-10-10T13:51:00.000Z" } + }, + { + "partition": { + "id": "8fbb79d0-f858-4fe7-9e88-6b4019c72365", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-10-10T13:53:00.000Z" } + }, + { + "partition": { + "id": "18fe94d2-60ec-4a7e-be86-b43e5c9198af", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-10-10T13:47:00.000Z" } + }, + { + "partition": { + "id": "249f3796-7e81-47b0-9075-00ed2d06439d", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-09-12T20:55:00.000Z" } + }, + { + "partition": { + "id": "29299296-ef3f-4aff-aef5-02d651a59be3", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-09-12T20:56:00.000Z" } + }, + { + "partition": { + "id": "a55d276e-4bc2-4fcc-9fb3-e60b867c86e7", + "parent_slice": {} + }, + "cursor": { "last_edited_time": "2099-10-10T13:52:00.000Z" } + } + ] + } + } } ] diff --git a/airbyte-integrations/connectors/source-notion/integration_tests/catalog.json b/airbyte-integrations/connectors/source-notion/integration_tests/catalog.json deleted file mode 100644 index 3b76e23aa3c90..0000000000000 --- a/airbyte-integrations/connectors/source-notion/integration_tests/catalog.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "streams": [ - { - "name": "users", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": true, - "json_schema": {} - }, - { - "name": "databases", - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": "last_edited_time", - "json_schema": {} - }, - { - "name": "pages", - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": "last_edited_time", - "json_schema": {} - }, - { - "name": "blocks", - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": "last_edited_time", - "json_schema": {} - }, - { - "name": "comments", - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": "page_last_edited_time", - "json_schema": {} - } - ] -} diff --git a/airbyte-integrations/connectors/source-notion/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-notion/integration_tests/configured_catalog.json index d0fa294445a5b..99ba1e2ef3a75 100644 --- a/airbyte-integrations/connectors/source-notion/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-notion/integration_tests/configured_catalog.json @@ -8,7 +8,7 @@ "default_primary_key": [["id"]] }, "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite", + "destination_sync_mode": "append", "primary_key": [["id"]] }, { @@ -22,7 +22,7 @@ }, "primary_key": [["id"]], "cursor_field": ["last_edited_time"], - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -36,7 +36,7 @@ }, "primary_key": [["id"]], "cursor_field": ["last_edited_time"], - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -50,7 +50,7 @@ }, "primary_key": [["id"]], "cursor_field": ["last_edited_time"], - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -60,11 +60,11 @@ "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "default_primary_key": [["id"]], - "default_cursor_field": ["page_last_edited_time"] + "default_cursor_field": ["last_edited_time"] }, "primary_key": [["id"]], - "cursor_field": ["page_last_edited_time"], - "sync_mode": "incremental", + "cursor_field": ["last_edited_time"], + "sync_mode": "full_refresh", "destination_sync_mode": "append" } ] diff --git a/airbyte-integrations/connectors/source-notion/integration_tests/incremental_catalog.json b/airbyte-integrations/connectors/source-notion/integration_tests/incremental_catalog.json index 478309ad76412..00bb2e67cd792 100644 --- a/airbyte-integrations/connectors/source-notion/integration_tests/incremental_catalog.json +++ b/airbyte-integrations/connectors/source-notion/integration_tests/incremental_catalog.json @@ -41,6 +41,20 @@ "cursor_field": ["last_edited_time"], "sync_mode": "incremental", "destination_sync_mode": "append" + }, + { + "stream": { + "name": "comments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_primary_key": [["id"]], + "default_cursor_field": ["last_edited_time"] + }, + "primary_key": [["id"]], + "cursor_field": ["last_edited_time"], + "sync_mode": "incremental", + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-notion/metadata.yaml b/airbyte-integrations/connectors/source-notion/metadata.yaml index 3c6c34dd66389..736a732e1c6dc 100644 --- a/airbyte-integrations/connectors/source-notion/metadata.yaml +++ b/airbyte-integrations/connectors/source-notion/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 6e00b415-b02e-4160-bf02-58176a0ae687 - dockerImageTag: 2.1.0 + dockerImageTag: 3.0.0 dockerRepository: airbyte/source-notion documentationUrl: https://docs.airbyte.com/integrations/sources/notion githubIssueLabel: source-notion icon: notion.svg license: MIT + maxSecondsBetweenMessages: 1 name: Notion remoteRegistries: pypi: @@ -29,6 +30,18 @@ data: releaseStage: generally_available releases: breakingChanges: + 3.0.0: + message: + The source Notion connector is being migrated from the Python CDK to our declarative low-code CDK. + Due to changes in the handling of state format between these CDKs, this migration constitutes a breaking change + for users syncing the `Comments` stream. + To ensure a smooth migration, please reset your data for this stream upon updating. This will facilitate a fresh first sync. + If you are not syncing the `Comments` stream, you can upgrade without any further action. + For more information, see our migration documentation for source Notion. + upgradeDeadline: "2024-04-29" + scopedImpact: + - scopeType: stream + impactedScopes: ["comments"] 2.0.0: message: Version 2.0.0 introduces schema changes to multiple properties shared @@ -45,5 +58,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-notion/poetry.lock b/airbyte-integrations/connectors/source-notion/poetry.lock index deaacddf791ae..9b2d8da024b36 100644 --- a/airbyte-integrations/connectors/source-notion/poetry.lock +++ b/airbyte-integrations/connectors/source-notion/poetry.lock @@ -1,50 +1,50 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.52.7" +version = "0.78.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.52.7.tar.gz", hash = "sha256:73c55464ed57e030681fb4407613a5c0f07c519a4ba63aa9a4dd43d05cdf100b"}, - {file = "airbyte_cdk-0.52.7-py3-none-any.whl", hash = "sha256:f11665fc8f8dd2632d94e57f53991f7aaba8e9643a8ee7557f3040e40ea677ff"}, + {file = "airbyte_cdk-0.78.3-py3-none-any.whl", hash = "sha256:699d61ace9f8ca4477e06af3ff1bc56856e955a444081a1701c41d94629dcd74"}, + {file = "airbyte_cdk-0.78.3.tar.gz", hash = "sha256:192c2594d0e93140a7ec635fea3d4644318faada6aa986805752adf4caf9b126"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.19)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.19)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -379,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -480,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -616,6 +616,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -683,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -806,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -824,50 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -893,13 +906,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -918,13 +931,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1029,4 +1042,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "fde07907def42fe31b6513c3d374b49cd501667cb0368ed468012b58391fb29f" +content-hash = "34258a7e220323a05f6aadce404d78c226095a9fd5e0d7fe4db8f0ea9662b490" diff --git a/airbyte-integrations/connectors/source-notion/pyproject.toml b/airbyte-integrations/connectors/source-notion/pyproject.toml index 3c4250763099d..2e14459f9ddd6 100644 --- a/airbyte-integrations/connectors/source-notion/pyproject.toml +++ b/airbyte-integrations/connectors/source-notion/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.1.0" +version = "3.0.0" name = "source-notion" description = "Source implementation for Notion." authors = [ "Airbyte ",] @@ -17,8 +17,7 @@ include = "source_notion" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.52.7" -pendulum = "==2.1.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-notion = "source_notion.run:run" diff --git a/airbyte-integrations/connectors/source-notion/source_notion/components.py b/airbyte-integrations/connectors/source-notion/source_notion/components.py new file mode 100644 index 0000000000000..77edccca1950f --- /dev/null +++ b/airbyte-integrations/connectors/source-notion/source_notion/components.py @@ -0,0 +1,124 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from dataclasses import dataclass, field +from typing import Any, List, Mapping, MutableMapping, Optional + +import pendulum +from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.transformations import RecordTransformation +from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState + + +@dataclass +class NotionUserTransformation(RecordTransformation): + """ + Custom transformation that conditionally transforms Notion User records of type "bot", + only when the record contains additional nested "owner" info. + This transformation moves the data in the `owner.{owner_type}` field into a new `owner.info` field for clarity. + """ + + def transform(self, record: MutableMapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + owner = record.get("bot", {}).get("owner") + if owner: + owner_type = owner.get("type") + owner_info = owner.get(owner_type) + if owner_type and owner_info: + record["bot"]["owner"]["info"] = owner_info + del record["bot"]["owner"][owner_type] + return record + + +@dataclass +class NotionPropertiesTransformation(RecordTransformation): + """ + Transforms the nested 'properties' object within a Notion Page/Database record into a more + normalized form. In Notion's API response, 'properties' is a dictionary where each key + represents the name of a property and its value contains various metadata and the property's + actual value. + + The transformed 'properties' will consist of an array where each element is a dictionary + with two keys: 'name', holding the original property name, and 'value', containing the + property's content. + """ + + def transform(self, record: MutableMapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + properties = record.get("properties", {}) + transformed_properties = [{"name": name, "value": value} for name, value in properties.items()] + record["properties"] = transformed_properties + return record + + +@dataclass +class NotionDataFeedFilter(RecordFilter): + """ + Custom filter to implement functioning incremental sync for Data Feed endpoints. + The Data Feed incremental logic doesn't seem to play nice with Notion's cursor-based pagination, + and if the current state is not far enough in the future, at least one page will be queried, + causing any records in that page to be read despite not passing the state threshold. Setting the + page_size to a lower value can help mitigate this issue, but it's not a perfect solution, and the more + granular the page size, the greater the traffic. By using this filter, we can ensure the value of state is respected, + while still using the max page_size in requests. + """ + + def filter_records( + self, records: List[Mapping[str, Any]], stream_state: StreamState, stream_slice: Optional[StreamSlice] = None, **kwargs + ) -> List[Mapping[str, Any]]: + """ + Filters a list of records, returning only those with a cursor_value greater than the current value in state. + """ + current_state = stream_state.get("last_edited_time", {}) + cursor_value = self._get_filter_date(self.config.get("start_date"), current_state) + if cursor_value: + return [record for record in records if record["last_edited_time"] >= cursor_value] + return records + + def _get_filter_date(self, start_date: str, state_value: list) -> str: + """ + Calculates the filter date to pass in the request parameters by comparing the start_date with the value of state obtained from the stream_slice. + If only the start_date exists, use it by default. + """ + + start_date_timestamp = start_date or None + state_value_timestamp = state_value or None + + if state_value_timestamp: + return max(filter(None, [start_date_timestamp, state_value_timestamp]), default=start_date_timestamp) + return start_date_timestamp + + +@dataclass +class NotionSemiIncrementalFilter(RecordFilter): + """ + Custom filter to implement semi-incremental syncing for the Comments endpoints, which does not support sorting or filtering. + This filter emulates incremental behavior by filtering out records based on the comparison of the cursor value with current value in state, + ensuring only records updated after the cutoff timestamp are synced. + """ + + def filter_records( + self, records: List[Mapping[str, Any]], stream_state: StreamState, stream_slice: Optional[StreamSlice] = None, **kwargs + ) -> List[Mapping[str, Any]]: + """ + Filters a list of records, returning only those with a cursor_value greater than the current value in state. + """ + current_state = [ + state_value + for state_value in stream_state.get("states", []) + if state_value.get("partition", {}).get("id") == stream_slice.get("id") + ] + cursor_value = self._get_filter_date(self.config.get("start_date"), current_state) + if cursor_value: + return [record for record in records if record["last_edited_time"] >= cursor_value] + return records + + def _get_filter_date(self, start_date: str, state_value: list) -> str: + """ + Calculates the filter date to pass in the request parameters by comparing the start_date with the value of state obtained from the stream_slice. + If only the start_date exists, use it by default. + """ + + start_date_timestamp = start_date or None + state_value_timestamp = state_value[0]["cursor"]["last_edited_time"] if state_value else None + + if state_value_timestamp: + return max(filter(None, [start_date_timestamp, state_value_timestamp]), default=start_date_timestamp) + return start_date_timestamp diff --git a/airbyte-integrations/connectors/source-notion/source_notion/manifest.yaml b/airbyte-integrations/connectors/source-notion/source_notion/manifest.yaml new file mode 100644 index 0000000000000..8a69b063eda6e --- /dev/null +++ b/airbyte-integrations/connectors/source-notion/source_notion/manifest.yaml @@ -0,0 +1,388 @@ +version: 0.78.3 +type: DeclarativeSource + +check: + type: CheckStream + stream_names: + - pages + +streams: + - type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.notion.com/v1/ + authenticator: + type: BearerAuthenticator + api_token: "{{ config.get('credentials', {}).get('token') if config.get('credentials', {}).get('auth_type') == 'token' else config.get('credentials', {}).get('access_token') if config.get('credentials', {}).get('auth_type') == 'OAuth2.0' else config.get('access_token', '') }}" + path: users + http_method: GET + request_parameters: {} + request_headers: + Notion-Version: "2022-06-28" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: retry-after + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 429 + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message_contains: "The start_cursor provided is invalid:" + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: start_cursor + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_cursor") }}' + stop_condition: '{{ not response.get("has_more") }}' + partition_router: [] + transformations: + - type: CustomTransformation + class_name: source_notion.components.NotionUserTransformation + + - type: DeclarativeStream + name: databases + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.notion.com/v1/ + path: search + http_method: POST + request_parameters: {} + request_headers: + Notion-Version: "2022-06-28" + authenticator: + authenticator: + type: BearerAuthenticator + api_token: "{{ config.get('credentials', {}).get('token') if config.get('credentials', {}).get('auth_type') == 'token' else config.get('credentials', {}).get('access_token') if config.get('credentials', {}).get('auth_type') == 'OAuth2.0' else config.get('access_token', '') }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: retry-after + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 429 + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message_contains: "The start_cursor provided is invalid:" + request_body_json: + sort: '{ "direction": "descending", "timestamp": "last_edited_time" }' + filter: '{ "property": "object", "value": "database" }' + record_selector: + type: RecordSelector + record_filter: + type: CustomRecordFilter + class_name: source_notion.components.NotionDataFeedFilter + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: body_json + field_name: start_cursor + page_size_option: + type: RequestOption + field_name: page_size + inject_into: body_json + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_cursor") }}' + stop_condition: '{{ not response.get("has_more") }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: last_edited_time + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%fZ" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + is_data_feed: true + transformations: + - type: CustomTransformation + class_name: source_notion.components.NotionPropertiesTransformation + + - type: DeclarativeStream + name: pages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.notion.com/v1/ + path: search + http_method: POST + request_parameters: {} + request_headers: + Notion-Version: "2022-06-28" + authenticator: + authenticator: + type: BearerAuthenticator + api_token: "{{ config.get('credentials', {}).get('token') if config.get('credentials', {}).get('auth_type') == 'token' else config.get('credentials', {}).get('access_token') if config.get('credentials', {}).get('auth_type') == 'OAuth2.0' else config.get('access_token', '') }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: retry-after + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 429 + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message_contains: "The start_cursor provided is invalid:" + request_body_json: + sort: '{ "direction": "descending", "timestamp": "last_edited_time" }' + filter: '{ "property": "object", "value": "page" }' + record_selector: + type: RecordSelector + record_filter: + type: CustomRecordFilter + class_name: source_notion.components.NotionDataFeedFilter + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: body_json + field_name: start_cursor + page_size_option: + type: RequestOption + field_name: page_size + inject_into: body_json + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_cursor") }}' + stop_condition: '{{ not response.get("has_more") }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: last_edited_time + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%fZ" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + is_data_feed: true + transformations: + - type: CustomTransformation + class_name: source_notion.components.NotionPropertiesTransformation + + - type: DeclarativeStream + name: comments + primary_key: + - id + incremental_sync: + type: DatetimeBasedCursor + cursor_field: "last_edited_time" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%fZ" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', day_delta(-730, '%Y-%m-%dT%H:%M:%S.%fZ'))}}" + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.notion.com/v1/ + path: comments + http_method: GET + request_parameters: {} + request_headers: + Notion-Version: "2022-06-28" + authenticator: + authenticator: + type: BearerAuthenticator + api_token: "{{ config.get('credentials', {}).get('token') if config.get('credentials', {}).get('auth_type') == 'token' else config.get('credentials', {}).get('access_token') if config.get('credentials', {}).get('auth_type') == 'OAuth2.0' else config.get('access_token', '') }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: retry-after + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 429 + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message_contains: "The start_cursor provided is invalid:" + request_body_json: {} + record_selector: + type: RecordSelector + record_filter: + type: CustomRecordFilter + class_name: source_notion.components.NotionSemiIncrementalFilter + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: start_cursor + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_cursor") }}' + stop_condition: '{{ not response.get("has_more") }}' + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + request_option: + type: RequestOption + field_name: block_id + inject_into: request_parameter + partition_field: id + stream: + type: DeclarativeStream + name: pages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.notion.com/v1/ + path: search + http_method: POST + request_parameters: {} + request_headers: + Notion-Version: "2022-06-28" + authenticator: + type: BearerAuthenticator + api_token: "{{ config.get('credentials', {}).get('token') if config.get('credentials', {}).get('auth_type') == 'token' else config.get('credentials', {}).get('access_token') if config.get('credentials', {}).get('auth_type') == 'OAuth2.0' else config.get('access_token', '') }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: retry-after + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 429 + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 400 + error_message_contains: "The start_cursor provided is invalid:" + request_body_json: + sort: >- + { "direction": "descending", "timestamp": + "last_edited_time" } + filter: '{ "property": "object", "value": "page" }' + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: body_json + field_name: start_cursor + page_size_option: + type: RequestOption + field_name: page_size + inject_into: body_json + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_cursor") }}' + stop_condition: '{{ not response.get("has_more") }}' + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: last_edited_time + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%fZ" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + is_data_feed: true diff --git a/airbyte-integrations/connectors/source-notion/source_notion/source.py b/airbyte-integrations/connectors/source-notion/source_notion/source.py index 08262edfeee4c..10191839f60da 100644 --- a/airbyte-integrations/connectors/source-notion/source_notion/source.py +++ b/airbyte-integrations/connectors/source-notion/source_notion/source.py @@ -2,23 +2,23 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from typing import Any, List, Mapping -import logging -import re -from typing import Any, List, Mapping, Tuple - -import pendulum -import requests -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from pendulum.parsing.exceptions import ParserError +from source_notion.streams import Blocks, Pages -from .streams import Blocks, Comments, Databases, Pages, Users +class SourceNotion(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) -class SourceNotion(AbstractSource): def _get_authenticator(self, config: Mapping[str, Any]) -> TokenAuthenticator: + """ + Creates and returns the appropriate authenticator for the Blocks stream. + Supports legacy auth format as well as current token/oauth implementations. + """ credentials = config.get("credentials", {}) auth_type = credentials.get("auth_type") token = credentials.get("access_token") if auth_type == "OAuth2.0" else credentials.get("token") @@ -26,73 +26,23 @@ def _get_authenticator(self, config: Mapping[str, Any]) -> TokenAuthenticator: if credentials and token: return TokenAuthenticator(token) - # The original implementation did not support OAuth, and therefore had no "credentials" key. - # We can maintain backwards compatibility for OG connections by checking for the deprecated "access_token" key, just in case. + # The original implementation did not support multiple auth methods, and therefore had no "credentials" key. if config.get("access_token"): return TokenAuthenticator(config["access_token"]) - def _validate_start_date(self, config: Mapping[str, Any]): - start_date = config.get("start_date") - - if start_date: - pattern = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z") - if not pattern.match(start_date): # Compare against the pattern descriptor. - return "Please check the format of the start date against the pattern descriptor." - - try: # Handle invalid dates. - parsed_start_date = pendulum.parse(start_date) - except ParserError: - return "The provided start date is not a valid date. Please check the format and try again." - - if parsed_start_date > pendulum.now("UTC"): # Handle future start date. - return "The start date cannot be greater than the current date." - - return None - - def _extract_error_message(self, response: requests.Response) -> str: + def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ - Return a human-readable error message from a Notion API response, for use in connection check. + Overrides the declarative streams method to instantiate and append the Python Blocks stream + to the list of declarative streams. """ - error_json = response.json() - error_code = error_json.get("code", "unknown_error") - error_message = error_json.get( - "message", "An unspecified error occurred while connecting to Notion. Please check your credentials and try again." - ) - - if error_code == "unauthorized": - return "The provided API access token is invalid. Please double-check that you input the correct token and have granted the necessary permissions to your Notion integration." - if error_code == "restricted_resource": - return "The provided API access token does not have the correct permissions configured. Please double-check that you have granted all the necessary permissions to your Notion integration." - return f"Error: {error_message} (Error code: {error_code})" - - def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: - # First confirm that if start_date is set by user, it is valid. - validation_error = self._validate_start_date(config) - if validation_error: - return False, validation_error - try: - authenticator = self._get_authenticator(config) - # Notion doesn't have a dedicated ping endpoint, so we can use the users/me endpoint instead. - # Endpoint docs: https://developers.notion.com/reference/get-self - ping_endpoint = "https://api.notion.com/v1/users/me" - notion_version = {"Notion-Version": "2022-06-28"} - response = requests.get(ping_endpoint, auth=authenticator, headers=notion_version) - - if response.status_code == 200: - return True, None - else: - error_message = self._extract_error_message(response) - return False, error_message - - except requests.exceptions.RequestException as e: - return False, str(e) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: + streams = super().streams(config) authenticator = self._get_authenticator(config) args = {"authenticator": authenticator, "config": config} - pages = Pages(**args) - blocks = Blocks(parent=pages, **args) - comments = Comments(parent=pages, **args) - return [Users(**args), Databases(**args), pages, blocks, comments] + # Blocks stream is a substream of Pages, so we also need to instantiate the parent stream. + blocks_parent = Pages(**args) + blocks_stream = Blocks(parent=blocks_parent, **args) + + streams.append(blocks_stream) + return streams diff --git a/airbyte-integrations/connectors/source-notion/source_notion/spec.json b/airbyte-integrations/connectors/source-notion/source_notion/spec.json index 4b833a5674540..fb67d98b6dece 100644 --- a/airbyte-integrations/connectors/source-notion/source_notion/spec.json +++ b/airbyte-integrations/connectors/source-notion/source_notion/spec.json @@ -4,7 +4,6 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Notion Source Spec", "type": "object", - "required": ["credentials"], "properties": { "start_date": { "title": "Start Date", diff --git a/airbyte-integrations/connectors/source-notion/source_notion/streams.py b/airbyte-integrations/connectors/source-notion/source_notion/streams.py index f7cb9e456e250..0a0acb310a826 100644 --- a/airbyte-integrations/connectors/source-notion/source_notion/streams.py +++ b/airbyte-integrations/connectors/source-notion/source_notion/streams.py @@ -17,8 +17,6 @@ from airbyte_cdk.sources.streams.http.exceptions import UserDefinedBackoffException from requests import HTTPError -from .utils import transform_properties - # maximum block hierarchy recursive request depth MAX_BLOCK_DEPTH = 30 @@ -221,7 +219,7 @@ def parse_response(self, response: requests.Response, stream_state: Mapping[str, if isinstance(state_lmd, StateValueWrapper): state_lmd = state_lmd.value if (not stream_state or record_lmd >= state_lmd) and record_lmd >= self.start_date: - yield from transform_properties(record) + yield record def get_updated_state( self, @@ -238,48 +236,6 @@ def get_updated_state( return {self.cursor_field: state_value} -class Users(NotionStream): - """ - Docs: https://developers.notion.com/reference/get-users - """ - - def path(self, **kwargs) -> str: - return "users" - - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = {"page_size": self.page_size} - if next_page_token: - params["start_cursor"] = next_page_token["next_cursor"] - return params - - def transform(self, record: MutableMapping[str, Any]) -> MutableMapping[str, Any]: - owner = record.get("bot", {}).get("owner") - if owner: - owner_type = owner.get("type") - owner_info = owner.get(owner_type) - if owner_type and owner_info: - record["bot"]["owner"]["info"] = owner_info - del record["bot"]["owner"][owner_type] - return record - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - # sometimes notion api returns response without results object - data = response.json().get("results", []) - for record in data: - yield self.transform(record) - - -class Databases(IncrementalNotionStream): - """ - Docs: https://developers.notion.com/reference/post-search - """ - - state_checkpoint_interval = 100 - - def __init__(self, **kwargs): - super().__init__(obj_type="database", **kwargs) - - class Pages(IncrementalNotionStream): """ Docs: https://developers.notion.com/reference/post-search @@ -390,64 +346,3 @@ def should_retry(self, response: requests.Response) -> bool: else: return super().should_retry(response) return super().should_retry(response) - - -class Comments(HttpSubStream, IncrementalNotionStream): - """ - Comments Object Docs: https://developers.notion.com/reference/comment-object - Comments Endpoint Docs: https://developers.notion.com/reference/retrieve-a-comment - """ - - http_method = "GET" - # We can use the "last edited time" of the parent Page as the cursor field, - # since we cannot guarantee the order of comments between pages. - cursor_field = "page_last_edited_time" - - def path(self, **kwargs) -> str: - return "comments" - - def request_params( - self, next_page_token: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - block_id = stream_slice.get("block_id") - params = {"block_id": block_id, "page_size": self.page_size} - - if next_page_token: - params["start_cursor"] = next_page_token["next_cursor"] - - return params - - def parse_response( - self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping]: - - # Get the parent's "last edited time" to compare against state - page_last_edited_time = stream_slice.get("page_last_edited_time", "") - records = response.json().get("results", []) - - for record in records: - record["page_last_edited_time"] = page_last_edited_time - state_last_edited_time = stream_state.get(self.cursor_field, "") - - if isinstance(state_last_edited_time, StateValueWrapper): - state_last_edited_time = state_last_edited_time.value - - if not stream_state or page_last_edited_time >= state_last_edited_time: - yield from transform_properties(record) - - def read_records(self, **kwargs) -> Iterable[Mapping[str, Any]]: - - yield from IncrementalNotionStream.read_records(self, **kwargs) - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, **kwargs - ) -> Iterable[Optional[Mapping[str, Any]]]: - - # Gather parent stream records in full - parent_records = self.parent.read_records(sync_mode=SyncMode.full_refresh, cursor_field=self.parent.cursor_field) - - # The parent stream is the Pages stream, but we have to pass its id to the request_params as "block_id" - # because pages are also blocks in the Notion API. - # We also grab the last_edited_time from the parent record to use as the cursor field. - for record in parent_records: - yield {"block_id": record["id"], "page_last_edited_time": record["last_edited_time"]} diff --git a/airbyte-integrations/connectors/source-notion/source_notion/utils.py b/airbyte-integrations/connectors/source-notion/source_notion/utils.py deleted file mode 100644 index 6103f00fb6b97..0000000000000 --- a/airbyte-integrations/connectors/source-notion/source_notion/utils.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Any, Mapping - - -def transform_properties(record: Mapping[str, Any], dict_key: str = "properties") -> Mapping[str, Any]: - """ - Transform nested `properties` object. - Move unique named entities into `name`, `value` to handle normalization. - EXAMPLE INPUT: - { - {...}, - "properties": { - "some_unique_name1": { - "id": "some_id", - "type": "relation", - "relation": [] - }, - "some_unique_name2": { - "id": "some_id", - "type": "date", - "date": null - }, - ... - }, - {...} - } - - EXAMPLE OUTPUT: - { - {...}, - "properties": [ - { - "name": "some_unique_name1", - "value": { - "id": "some_id", - "type": "relation", - "relation": [] - } - }, - { - "name": "some_unique_name2", - "value": { - "id": "some_id", - "type": "date", - "date": null - } - }, - ], - {...} - } - - """ - properties = record.get(dict_key) - if properties: - new_properties = [] - for k, v in properties.items(): - new_properties.append({"name": k, "value": v}) - record[dict_key] = new_properties - yield record diff --git a/airbyte-integrations/connectors/source-notion/unit_tests/test_components.py b/airbyte-integrations/connectors/source-notion/unit_tests/test_components.py new file mode 100644 index 0000000000000..1036b31c7ea8f --- /dev/null +++ b/airbyte-integrations/connectors/source-notion/unit_tests/test_components.py @@ -0,0 +1,172 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_notion.components import * + + +def test_users_stream_transformation(): + input_record = { + "object": "user", "id": "123", "name": "Airbyte", "avatar_url": "some url", "type": "bot", + "bot": {"owner": {"type": "user", "user": {"object": "user", "id": "id", "name": "Test User", "avatar_url": None, "type": "person", + "person": {"email": "email"}}}, "workspace_name": "test"} + } + output_record = { + "object": "user", "id": "123", "name": "Airbyte", "avatar_url": "some url", "type": "bot", + "bot": {"owner": {"type": "user", "info": {"object": "user", "id": "id", "name": "Test User", "avatar_url": None, "type": "person", + "person": {"email": "email"}}}, "workspace_name": "test"} + } + assert NotionUserTransformation().transform(input_record) == output_record + + +def test_notion_properties_transformation(): + input_record = { + "id": "123", "properties": { + "Due date": { + "id": "M%3BBw", "type": "date", "date": { + "start": "2023-02-23", "end": None, "time_zone": None + } + }, + "Status": { + "id": "Z%3ClH", "type": "status", "status": { + "id": "86ddb6ec-0627-47f8-800d-b65afd28be13", "name": "Not started", "color": "default" + } + } + } + } + + output_record = { + "id": "123", "properties": [ + { + "name": "Due date", "value": { + "id": "M%3BBw", "type": "date", "date": { + "start": "2023-02-23", "end": None, "time_zone": None + } + } + }, + { + "name": "Status", + "value": { + "id": "Z%3ClH", "type": "status", "status": { + "id": "86ddb6ec-0627-47f8-800d-b65afd28be13", "name": "Not started", "color": "default" + } + } + } + ] + } + assert NotionPropertiesTransformation().transform(input_record) == output_record + + +state_test_records = [ + {"id": "1", "last_edited_time": "2022-01-02T00:00:00.000Z"}, + {"id": "2", "last_edited_time": "2022-01-03T00:00:00.000Z"}, + {"id": "3", "last_edited_time": "2022-01-04T00:00:00.000Z"}, +] + +@pytest.fixture +def data_feed_config(): + return NotionDataFeedFilter(parameters={}, config={"start_date": "2021-01-01T00:00:00.000Z"}) + +@pytest.mark.parametrize( + "state_value, expected_return", + [ + ( + "2021-02-01T00:00:00.000Z", "2021-02-01T00:00:00.000Z" + ), + ( + "2020-01-01T00:00:00.000Z", "2021-01-01T00:00:00.000Z" + ), + ( + {}, "2021-01-01T00:00:00.000Z" + ) + ], + ids=["State value is greater than start_date", "State value is less than start_date", "Empty state, default to start_date"] +) +def test_data_feed_get_filter_date(data_feed_config, state_value, expected_return): + start_date = data_feed_config.config["start_date"] + + result = data_feed_config._get_filter_date(start_date, state_value) + assert result == expected_return, f"Expected {expected_return}, but got {result}." + + +@pytest.mark.parametrize("stream_state,stream_slice,expected_records", [ + ( + {"last_edited_time": "2022-01-01T00:00:00.000Z"}, + {"id": "some_id"}, + state_test_records + ), + ( + {"last_edited_time": "2022-01-03T00:00:00.000Z"}, + {"id": "some_id"}, + [state_test_records[-2], state_test_records[-1]] + ), + ( + {"last_edited_time": "2022-01-05T00:00:00.000Z"}, + {"id": "some_id"}, + [] + ), + ( + {}, + {"id": "some_id"}, + state_test_records + ) +], +ids=["No records filtered", "Some records filtered", "All records filtered", "Empty state: no records filtered"]) +def test_data_feed_filter_records(data_feed_config, stream_state, stream_slice, expected_records): + filtered_records = data_feed_config.filter_records(state_test_records, stream_state, stream_slice) + assert filtered_records == expected_records, "Filtered records do not match the expected records." + + +@pytest.fixture +def semi_incremental_config_start_date(): + return NotionSemiIncrementalFilter(parameters={}, config={"start_date": "2021-01-01T00:00:00.000Z"}) + +@pytest.mark.parametrize( + "state_value, expected_return", + [ + ( + [{"cursor": {"last_edited_time": "2021-02-01T00:00:00.000Z"}}], "2021-02-01T00:00:00.000Z" + ), + ( + [{"cursor": {"last_edited_time": "2020-01-01T00:00:00.000Z"}}], "2021-01-01T00:00:00.000Z" + ), + ( + [], "2021-01-01T00:00:00.000Z" + ) + ], + ids=["State value is greater than start_date", "State value is less than start_date", "Empty state, default to start_date"] +) +def test_semi_incremental_get_filter_date(semi_incremental_config_start_date, state_value, expected_return): + start_date = semi_incremental_config_start_date.config["start_date"] + + result = semi_incremental_config_start_date._get_filter_date(start_date, state_value) + assert result == expected_return, f"Expected {expected_return}, but got {result}." + + +@pytest.mark.parametrize("stream_state,stream_slice,expected_records", [ + ( + {"states": [{"partition": {"id": "some_id"}, "cursor": {"last_edited_time": "2022-01-01T00:00:00.000Z"}}]}, + {"id": "some_id"}, + state_test_records + ), + ( + {"states": [{"partition": {"id": "some_id"}, "cursor": {"last_edited_time": "2022-01-03T00:00:00.000Z"}}]}, + {"id": "some_id"}, + [state_test_records[-2], state_test_records[-1]] + ), + ( + {"states": [{"partition": {"id": "some_id"}, "cursor": {"last_edited_time": "2022-01-05T00:00:00.000Z"}}]}, + {"id": "some_id"}, + [] + ), + ( + {"states": []}, + {"id": "some_id"}, + state_test_records + ) +], +ids=["No records filtered", "Some records filtered", "All records filtered", "Empty state: no records filtered"]) +def test_semi_incremental_filter_records(semi_incremental_config_start_date, stream_state, stream_slice, expected_records): + filtered_records = semi_incremental_config_start_date.filter_records(state_test_records, stream_state, stream_slice) + assert filtered_records == expected_records, "Filtered records do not match the expected records." diff --git a/airbyte-integrations/connectors/source-notion/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-notion/unit_tests/test_incremental_streams.py deleted file mode 100644 index d34afd131fc00..0000000000000 --- a/airbyte-integrations/connectors/source-notion/unit_tests/test_incremental_streams.py +++ /dev/null @@ -1,403 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import re -import time -from unittest.mock import MagicMock, patch - -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http.exceptions import DefaultBackoffException, UserDefinedBackoffException -from pytest import fixture, mark -from source_notion.streams import Blocks, Comments, IncrementalNotionStream, Pages - - -@fixture -def patch_incremental_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(IncrementalNotionStream, "path", "v0/example_endpoint") - mocker.patch.object(IncrementalNotionStream, "primary_key", "test_primary_key") - mocker.patch.object(IncrementalNotionStream, "__abstractmethods__", set()) - - -@fixture -def args(): - return {"authenticator": None, "config": {"access_token": "", "start_date": "2021-01-01T00:00:00.000Z"}} - - -@fixture -def parent(args): - return Pages(**args) - - -@fixture -def stream(patch_incremental_base_class, args): - return IncrementalNotionStream(**args) - - -@fixture -def blocks(parent, args): - return Blocks(parent=parent, **args) - - -@fixture -def comments(parent, args): - return Comments(parent=parent, **args) - - -def test_cursor_field(stream): - expected_cursor_field = "last_edited_time" - assert stream.cursor_field == expected_cursor_field - - -def test_get_updated_state(stream): - stream.is_finished = False - - inputs = { - "current_stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, - "latest_record": {"last_edited_time": "2021-10-20T00:00:00.000Z"}, - } - expected_state = "2021-10-10T00:00:00.000Z" - state = stream.get_updated_state(**inputs) - assert state["last_edited_time"].value == expected_state - - inputs = {"current_stream_state": state, "latest_record": {"last_edited_time": "2021-10-30T00:00:00.000Z"}} - state = stream.get_updated_state(**inputs) - assert state["last_edited_time"].value == expected_state - - # after stream sync is finished, state should output the max cursor time - stream.is_finished = True - inputs = {"current_stream_state": state, "latest_record": {"last_edited_time": "2021-10-10T00:00:00.000Z"}} - expected_state = "2021-10-30T00:00:00.000Z" - state = stream.get_updated_state(**inputs) - assert state["last_edited_time"].value == expected_state - - -def test_stream_slices(blocks, requests_mock): - stream = blocks - requests_mock.post( - "https://api.notion.com/v1/search", - json={ - "results": [ - {"id": "aaa", "last_edited_time": "2022-10-10T00:00:00.000Z"}, - {"id": "bbb", "last_edited_time": "2022-10-10T00:00:00.000Z"}, - ], - "next_cursor": None, - }, - ) - inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} - expected_stream_slice = [{"page_id": "aaa"}, {"page_id": "bbb"}] - assert list(stream.stream_slices(**inputs)) == expected_stream_slice - - -def test_end_of_stream_state(blocks, requests_mock): - stream = blocks - requests_mock.post( - "https://api.notion.com/v1/search", json={"results": [{"id": "aaa"}, {"id": "bbb"}, {"id": "ccc"}], "next_cursor": None} - ) - requests_mock.get( - "https://api.notion.com/v1/blocks/aaa/children", - json={ - "results": [{"id": "block 1", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-30T00:00:00.000Z"}], - "next_cursor": None, - }, - ) - requests_mock.get( - "https://api.notion.com/v1/blocks/bbb/children", - json={ - "results": [{"id": "block 2", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-20T00:00:00.000Z"}], - "next_cursor": None, - }, - ) - requests_mock.get( - "https://api.notion.com/v1/blocks/ccc/children", - json={ - "results": [{"id": "block 3", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-10T00:00:00.000Z"}], - "next_cursor": None, - }, - ) - - state = {"last_edited_time": "2021-10-01T00:00:00.000Z"} - sync_mode = SyncMode.incremental - - for idx, app_slice in enumerate(stream.stream_slices(sync_mode, **MagicMock())): - for record in stream.read_records(sync_mode=sync_mode, stream_slice=app_slice): - state = stream.get_updated_state(state, record) - state_value = state["last_edited_time"].value - if idx == 2: # the last slice - assert state_value == "2021-10-30T00:00:00.000Z" - else: - assert state_value == "2021-10-01T00:00:00.000Z" - - -def test_supports_incremental(stream, mocker): - mocker.patch.object(IncrementalNotionStream, "cursor_field", "dummy_field") - assert stream.supports_incremental - - -def test_source_defined_cursor(stream): - assert stream.source_defined_cursor - - -def test_stream_checkpoint_interval(stream): - expected_checkpoint_interval = None - assert stream.state_checkpoint_interval == expected_checkpoint_interval - - -def test_request_params(blocks): - stream = blocks - inputs = {"stream_state": {}, "next_page_token": {"next_cursor": "aaa"}} - expected_request_params = {"page_size": 100, "start_cursor": "aaa"} - assert stream.request_params(**inputs) == expected_request_params - - -def test_record_filter(blocks, requests_mock): - stream = blocks - sync_mode = SyncMode.incremental - - root = "aaa" - record = {"id": "id1", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-20T00:00:00.000Z"} - requests_mock.get(f"https://api.notion.com/v1/blocks/{root}/children", json={"results": [record], "next_cursor": None}) - - inputs = { - "sync_mode": sync_mode, - "stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, - } - stream.block_id_stack = [root] - assert next(stream.read_records(**inputs)) == record - - inputs = { - "sync_mode": sync_mode, - "stream_state": {"last_edited_time": "2021-10-30T00:00:00.000Z"}, - } - stream.block_id_stack = [root] - assert list(stream.read_records(**inputs)) == [] - - # 'child_page' and 'child_database' should not be included - record["type"] = "child_page" - inputs = { - "sync_mode": sync_mode, - "stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, - } - stream.block_id_stack = [root] - assert list(stream.read_records(**inputs)) == [] - record["type"] = "child_database" - stream.block_id_stack = [root] - assert list(stream.read_records(**inputs)) == [] - - -def test_recursive_read(blocks, requests_mock): - stream = blocks - - # block records tree: - # - # root |-> record1 -> record2 -> record3 - # |-> record4 - - root = "aaa" - record1 = {"id": "id1", "type": "heading_1", "has_children": True, "last_edited_time": "2022-10-10T00:00:00.000Z"} - record2 = {"id": "id2", "type": "heading_1", "has_children": True, "last_edited_time": "2022-10-10T00:00:00.000Z"} - record3 = {"id": "id3", "type": "heading_1", "has_children": False, "last_edited_time": "2022-10-10T00:00:00.000Z"} - record4 = {"id": "id4", "type": "heading_1", "has_children": False, "last_edited_time": "2022-10-10T00:00:00.000Z"} - requests_mock.get(f"https://api.notion.com/v1/blocks/{root}/children", json={"results": [record1, record4], "next_cursor": None}) - requests_mock.get(f"https://api.notion.com/v1/blocks/{record1['id']}/children", json={"results": [record2], "next_cursor": None}) - requests_mock.get(f"https://api.notion.com/v1/blocks/{record2['id']}/children", json={"results": [record3], "next_cursor": None}) - - inputs = {"sync_mode": SyncMode.incremental} - stream.block_id_stack = [root] - assert list(stream.read_records(**inputs)) == [record3, record2, record1, record4] - - -def test_invalid_start_cursor(parent, requests_mock, caplog): - stream = parent - error_message = "The start_cursor provided is invalid: wrong_start_cursor" - search_endpoint = requests_mock.post( - "https://api.notion.com/v1/search", - status_code=400, - json={"object": "error", "status": 400, "code": "validation_error", "message": error_message}, - ) - - inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} - with patch.object(stream, "backoff_time", return_value=0.1): - list(stream.read_records(**inputs)) - assert search_endpoint.call_count == 8 - assert f"Skipping stream pages, error message: {error_message}" in caplog.messages - - -@mark.parametrize( - "status_code,error_code,error_message, expected_backoff_time", - [ - (400, "validation_error", "The start_cursor provided is invalid: wrong_start_cursor", [10, 10, 10, 10, 10, 10, 10]), - (429, "rate_limited", "Rate Limited", [5, 5, 5, 5, 5, 5, 5]), # Retry-header is set to 5 seconds for test - (500, "internal_server_error", "Internal server error", [5, 10, 20, 40, 80, 5, 10]), - ], -) -def test_retry_logic(status_code, error_code, error_message, expected_backoff_time, parent, requests_mock, caplog): - stream = parent - - # Set up a generator that alternates between error and success responses, to check the reset of backoff time between failures - mock_responses = ( - [ - { - "status_code": status_code, - "response": {"object": "error", "status": status_code, "code": error_code, "message": error_message}, - } - for _ in range(5) - ] - + [{"status_code": 200, "response": {"object": "list", "results": [], "has_more": True, "next_cursor": "dummy_cursor"}}] - + [ - { - "status_code": status_code, - "response": {"object": "error", "status": status_code, "code": error_code, "message": error_message}, - } - for _ in range(2) - ] - + [{"status_code": 200, "response": {"object": "list", "results": [], "has_more": False, "next_cursor": None}}] - ) - - def response_callback(request, context): - # Get the next response from the mock_responses list - response = mock_responses.pop(0) - context.status_code = response["status_code"] - return response["response"] - - # Mock the time.sleep function to avoid waiting during tests - with patch.object(time, "sleep", return_value=None): - search_endpoint = requests_mock.post( - "https://api.notion.com/v1/search", - json=response_callback, - headers={"retry-after": "5"}, - ) - - inputs = {"sync_mode": SyncMode.full_refresh, "cursor_field": [], "stream_state": {}} - try: - list(stream.read_records(**inputs)) - except (UserDefinedBackoffException, DefaultBackoffException) as e: - return e - - # Check that the endpoint was called the expected number of times - assert search_endpoint.call_count == 9 - - # Additional assertions to check reset of backoff time - # Find the backoff times from the message logs to compare against expected backoff times - log_messages = [record.message for record in caplog.records] - backoff_times = [ - round(float(re.search(r"(\d+(\.\d+)?) seconds", msg).group(1))) - for msg in log_messages - if any(word in msg for word in ["Sleeping", "Waiting"]) - ] - - assert backoff_times == expected_backoff_time, f"Unexpected backoff times: {backoff_times}" - - -# Tests for Comments stream -def test_comments_path(comments): - assert comments.path() == "comments" - - -def test_comments_request_params(comments): - """ - Test that the request_params function returns the correct parameters for the Comments endpoint - """ - params = comments.request_params( - next_page_token=None, stream_slice={"block_id": "block1", "page_last_edited_time": "2021-01-01T00:00:00.000Z"} - ) - - assert params == {"block_id": "block1", "page_size": comments.page_size} - - -def test_comments_stream_slices(comments, requests_mock): - """ - Test that the stream_slices function returns the parent page ids as "block_id" and the last edited time as "page_last_edited_time" - """ - - inputs = {"sync_mode": SyncMode.incremental, "cursor_field": comments.cursor_field, "stream_state": {}} - - requests_mock.post( - "https://api.notion.com/v1/search", - json={ - "results": [ - {"name": "page_1", "id": "id_1", "last_edited_time": "2021-01-01T00:00:00.000Z"}, - {"name": "page_2", "id": "id_2", "last_edited_time": "2021-20-01T00:00:00.000Z"}, - ], - "next_cursor": None, - }, - ) - - expected_stream_slice = [ - {"block_id": "id_1", "page_last_edited_time": "2021-01-01T00:00:00.000Z"}, - {"block_id": "id_2", "page_last_edited_time": "2021-20-01T00:00:00.000Z"}, - ] - - actual_stream_slices_list = list(comments.stream_slices(**inputs)) - assert actual_stream_slices_list == expected_stream_slice - - -@mark.parametrize( - "stream_slice, stream_state, mock_data, expected_records", - [ - # Test that comments with page_last_edited_time >= stream_state are replicated, regardless of each record's LMD - ( - {"block_id": "block_id_1", "page_last_edited_time": "2023-10-10T00:00:00.000Z"}, - {"page_last_edited_time": "2021-10-10T00:00:00.000Z"}, - [ - { - "id": "comment_id_1", - "rich_text": [{"type": "text", "text": {"content": "I am the Alpha comment"}}], - "last_edited_time": "2021-01-01T00:00:00.000Z", - }, - { - "id": "comment_id_2", - "rich_text": [{"type": "text", "text": {"content": "I am the Omega comment"}}], - "last_edited_time": "2022-12-31T00:00:00.000Z", - }, - ], - [ - { - "id": "comment_id_1", - "rich_text": [{"type": "text", "text": {"content": "I am the Alpha comment"}}], - "last_edited_time": "2021-01-01T00:00:00.000Z", - "page_last_edited_time": "2023-10-10T00:00:00.000Z", - }, - { - "id": "comment_id_2", - "rich_text": [{"type": "text", "text": {"content": "I am the Omega comment"}}], - "last_edited_time": "2022-12-31T00:00:00.000Z", - "page_last_edited_time": "2023-10-10T00:00:00.000Z", - }, - ], - ), - # Test that comments with page_last_edited_time < stream_state are not replicated, regardless of each record's LMD - ( - {"block_id": "block_id_2", "page_last_edited_time": "2021-01-01T00:00:00.000Z"}, - {"page_last_edited_time": "2022-20-20T00:00:00.000Z"}, - [ - { - "id": "comment_id_1", - "rich_text": [{"type": "text", "text": {"content": "I will not be replicated"}}], - "last_edited_time": "2021-10-30T00:00:00.000Z", - }, - { - "id": "comment_id_2", - "rich_text": [{"type": "text", "text": {"content": "I will also not be replicated"}}], - "last_edited_time": "2023-01-01T00:00:00.000Z", - }, - ], - [], - ), - ], -) -def test_comments_read_records(comments, requests_mock, stream_slice, stream_state, mock_data, expected_records): - inputs = { - "sync_mode": SyncMode.incremental, - "cursor_field": comments.cursor_field, - "stream_state": stream_state, - "stream_slice": stream_slice, - } - - requests_mock.get( - f"https://api.notion.com/v1/comments?block_id={stream_slice['block_id']}", json={"results": mock_data, "next_cursor": None} - ) - - response = list(comments.read_records(**inputs)) - assert response == expected_records diff --git a/airbyte-integrations/connectors/source-notion/unit_tests/test_python_streams.py b/airbyte-integrations/connectors/source-notion/unit_tests/test_python_streams.py new file mode 100644 index 0000000000000..cf972a5d74aa3 --- /dev/null +++ b/airbyte-integrations/connectors/source-notion/unit_tests/test_python_streams.py @@ -0,0 +1,474 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import re +import time +from http import HTTPStatus +from unittest.mock import MagicMock, patch + +import freezegun +import pytest +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams.http.exceptions import DefaultBackoffException, UserDefinedBackoffException +from pytest import fixture, mark +from source_notion.streams import Blocks, IncrementalNotionStream, NotionStream, Pages + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(NotionStream, "path", "v0/example_endpoint") + mocker.patch.object(NotionStream, "primary_key", "test_primary_key") + mocker.patch.object(NotionStream, "__abstractmethods__", set()) + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalNotionStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalNotionStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalNotionStream, "__abstractmethods__", set()) + + +@fixture +def args(): + return {"authenticator": None, "config": {"access_token": "", "start_date": "2021-01-01T00:00:00.000Z"}} + + +@fixture +def parent(args): + return Pages(**args) + + +@fixture +def stream(args): + return IncrementalNotionStream(**args) + + +@fixture +def blocks(parent, args): + return Blocks(parent=parent, **args) + + +def test_cursor_field(stream): + expected_cursor_field = "last_edited_time" + assert stream.cursor_field == expected_cursor_field + + +def test_supports_incremental(stream, mocker): + mocker.patch.object(IncrementalNotionStream, "cursor_field", "dummy_field") + assert stream.supports_incremental + + +def test_source_defined_cursor(stream): + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(stream): + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval + + +def test_http_method(patch_base_class): + stream = NotionStream(config=MagicMock()) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + "response_json, expected_output", + [ + ({"next_cursor": "some_cursor", "has_more": True}, {"next_cursor": "some_cursor"}), + ({"has_more": False}, None), + ({}, None) + ], + ids=["Next_page_token exists with cursor", "No next_page_token", "No next_page_token"], +) +def test_next_page_token(patch_base_class, response_json, expected_output): + stream = NotionStream(config=MagicMock()) + mock_response = MagicMock() + mock_response.json.return_value = response_json + result = stream.next_page_token(mock_response) + assert result == expected_output + + +@pytest.mark.parametrize( + "config, expected_start_date, current_time", + [ + ( + {"authenticator": "secret_token", "start_date": "2021-09-01T00:00:00.000Z"}, + "2021-09-01T00:00:00.000Z", + "2022-09-22T00:00:00.000Z", + ), + ({"authenticator": "super_secret_token", "start_date": None}, "2020-09-22T00:00:00.000Z", "2022-09-22T00:00:00.000Z"), + ({"authenticator": "even_more_secret_token"}, "2021-01-01T12:30:00.000Z", "2023-01-01T12:30:00.000Z"), + ], +) +def test_set_start_date(patch_base_class, config, expected_start_date, current_time): + """ + Test that start_date in config is either: + 1. set to the value provided by the user + 2. defaults to two years from the present date set by the test environment. + """ + with freezegun.freeze_time(current_time): + stream = NotionStream(config=config) + assert stream.start_date == expected_start_date + + +def test_request_params(blocks): + stream = blocks + inputs = {"stream_state": {}, "next_page_token": {"next_cursor": "aaa"}} + expected_request_params = {"page_size": 100, "start_cursor": "aaa"} + assert stream.request_params(**inputs) == expected_request_params + + +def test_stream_slices(blocks, requests_mock): + stream = blocks + requests_mock.post( + "https://api.notion.com/v1/search", + json={ + "results": [ + {"id": "aaa", "last_edited_time": "2022-10-10T00:00:00.000Z"}, + {"id": "bbb", "last_edited_time": "2022-10-10T00:00:00.000Z"}, + ], + "next_cursor": None, + }, + ) + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + expected_stream_slice = [{"page_id": "aaa"}, {"page_id": "bbb"}] + assert list(stream.stream_slices(**inputs)) == expected_stream_slice + + +def test_end_of_stream_state(blocks, requests_mock): + stream = blocks + requests_mock.post( + "https://api.notion.com/v1/search", json={"results": [{"id": "aaa"}, {"id": "bbb"}, {"id": "ccc"}], "next_cursor": None} + ) + requests_mock.get( + "https://api.notion.com/v1/blocks/aaa/children", + json={ + "results": [{"id": "block 1", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-30T00:00:00.000Z"}], + "next_cursor": None, + }, + ) + requests_mock.get( + "https://api.notion.com/v1/blocks/bbb/children", + json={ + "results": [{"id": "block 2", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-20T00:00:00.000Z"}], + "next_cursor": None, + }, + ) + requests_mock.get( + "https://api.notion.com/v1/blocks/ccc/children", + json={ + "results": [{"id": "block 3", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-10T00:00:00.000Z"}], + "next_cursor": None, + }, + ) + + state = {"last_edited_time": "2021-10-01T00:00:00.000Z"} + sync_mode = SyncMode.incremental + + for idx, app_slice in enumerate(stream.stream_slices(sync_mode, **MagicMock())): + for record in stream.read_records(sync_mode=sync_mode, stream_slice=app_slice): + state = stream.get_updated_state(state, record) + state_value = state["last_edited_time"].value + if idx == 2: # the last slice + assert state_value == "2021-10-30T00:00:00.000Z" + else: + assert state_value == "2021-10-01T00:00:00.000Z" + + +def test_get_updated_state(stream): + stream.is_finished = False + + inputs = { + "current_stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, + "latest_record": {"last_edited_time": "2021-10-20T00:00:00.000Z"}, + } + expected_state = "2021-10-10T00:00:00.000Z" + state = stream.get_updated_state(**inputs) + assert state["last_edited_time"].value == expected_state + + inputs = {"current_stream_state": state, "latest_record": {"last_edited_time": "2021-10-30T00:00:00.000Z"}} + state = stream.get_updated_state(**inputs) + assert state["last_edited_time"].value == expected_state + + # after stream sync is finished, state should output the max cursor time + stream.is_finished = True + inputs = {"current_stream_state": state, "latest_record": {"last_edited_time": "2021-10-10T00:00:00.000Z"}} + expected_state = "2021-10-30T00:00:00.000Z" + state = stream.get_updated_state(**inputs) + assert state["last_edited_time"].value == expected_state + + +def test_record_filter(blocks, requests_mock): + stream = blocks + sync_mode = SyncMode.incremental + + root = "aaa" + record = {"id": "id1", "type": "heading_1", "has_children": False, "last_edited_time": "2021-10-20T00:00:00.000Z"} + requests_mock.get(f"https://api.notion.com/v1/blocks/{root}/children", json={"results": [record], "next_cursor": None}) + + inputs = { + "sync_mode": sync_mode, + "stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, + } + stream.block_id_stack = [root] + assert next(stream.read_records(**inputs)) == record + + inputs = { + "sync_mode": sync_mode, + "stream_state": {"last_edited_time": "2021-10-30T00:00:00.000Z"}, + } + stream.block_id_stack = [root] + assert list(stream.read_records(**inputs)) == [] + + # 'child_page' and 'child_database' should not be included + record["type"] = "child_page" + inputs = { + "sync_mode": sync_mode, + "stream_state": {"last_edited_time": "2021-10-10T00:00:00.000Z"}, + } + stream.block_id_stack = [root] + assert list(stream.read_records(**inputs)) == [] + record["type"] = "child_database" + stream.block_id_stack = [root] + assert list(stream.read_records(**inputs)) == [] + + +def test_recursive_read(blocks, requests_mock): + stream = blocks + + # block records tree: + # + # root |-> record1 -> record2 -> record3 + # |-> record4 + + root = "aaa" + record1 = {"id": "id1", "type": "heading_1", "has_children": True, "last_edited_time": "2022-10-10T00:00:00.000Z"} + record2 = {"id": "id2", "type": "heading_1", "has_children": True, "last_edited_time": "2022-10-10T00:00:00.000Z"} + record3 = {"id": "id3", "type": "heading_1", "has_children": False, "last_edited_time": "2022-10-10T00:00:00.000Z"} + record4 = {"id": "id4", "type": "heading_1", "has_children": False, "last_edited_time": "2022-10-10T00:00:00.000Z"} + requests_mock.get(f"https://api.notion.com/v1/blocks/{root}/children", json={"results": [record1, record4], "next_cursor": None}) + requests_mock.get(f"https://api.notion.com/v1/blocks/{record1['id']}/children", json={"results": [record2], "next_cursor": None}) + requests_mock.get(f"https://api.notion.com/v1/blocks/{record2['id']}/children", json={"results": [record3], "next_cursor": None}) + + inputs = {"sync_mode": SyncMode.incremental} + stream.block_id_stack = [root] + assert list(stream.read_records(**inputs)) == [record3, record2, record1, record4] + + +def test_invalid_start_cursor(parent, requests_mock, caplog): + stream = parent + error_message = "The start_cursor provided is invalid: wrong_start_cursor" + search_endpoint = requests_mock.post( + "https://api.notion.com/v1/search", + status_code=400, + json={"object": "error", "status": 400, "code": "validation_error", "message": error_message}, + ) + + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + with patch.object(stream, "backoff_time", return_value=0.1): + list(stream.read_records(**inputs)) + assert search_endpoint.call_count == 8 + assert f"Skipping stream pages, error message: {error_message}" in caplog.messages + + +@mark.parametrize( + "status_code,error_code,error_message, expected_backoff_time", + [ + (400, "validation_error", "The start_cursor provided is invalid: wrong_start_cursor", [10, 10, 10, 10, 10, 10, 10]), + (429, "rate_limited", "Rate Limited", [5, 5, 5, 5, 5, 5, 5]), # Retry-header is set to 5 seconds for test + (500, "internal_server_error", "Internal server error", [5, 10, 20, 40, 80, 5, 10]), + ], +) +def test_retry_logic(status_code, error_code, error_message, expected_backoff_time, parent, requests_mock, caplog): + stream = parent + + # Set up a generator that alternates between error and success responses, to check the reset of backoff time between failures + mock_responses = ( + [ + { + "status_code": status_code, + "response": {"object": "error", "status": status_code, "code": error_code, "message": error_message}, + } + for _ in range(5) + ] + + [{"status_code": 200, "response": {"object": "list", "results": [], "has_more": True, "next_cursor": "dummy_cursor"}}] + + [ + { + "status_code": status_code, + "response": {"object": "error", "status": status_code, "code": error_code, "message": error_message}, + } + for _ in range(2) + ] + + [{"status_code": 200, "response": {"object": "list", "results": [], "has_more": False, "next_cursor": None}}] + ) + + def response_callback(request, context): + # Get the next response from the mock_responses list + response = mock_responses.pop(0) + context.status_code = response["status_code"] + return response["response"] + + # Mock the time.sleep function to avoid waiting during tests + with patch.object(time, "sleep", return_value=None): + search_endpoint = requests_mock.post( + "https://api.notion.com/v1/search", + json=response_callback, + headers={"retry-after": "5"}, + ) + + inputs = {"sync_mode": SyncMode.full_refresh, "cursor_field": [], "stream_state": {}} + try: + list(stream.read_records(**inputs)) + except (UserDefinedBackoffException, DefaultBackoffException) as e: + return e + + # Check that the endpoint was called the expected number of times + assert search_endpoint.call_count == 9 + + # Additional assertions to check reset of backoff time + # Find the backoff times from the message logs to compare against expected backoff times + log_messages = [record.message for record in caplog.records] + backoff_times = [ + round(float(re.search(r"(\d+(\.\d+)?) seconds", msg).group(1))) + for msg in log_messages + if any(word in msg for word in ["Sleeping", "Waiting"]) + ] + + assert backoff_times == expected_backoff_time, f"Unexpected backoff times: {backoff_times}" + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, True), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + (HTTPStatus.BAD_GATEWAY, True), + (HTTPStatus.FORBIDDEN, False), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = NotionStream(config=MagicMock()) + assert stream.should_retry(response_mock) == should_retry + + +def test_should_not_retry_with_ai_block(requests_mock): + stream = Blocks(parent=None, config=MagicMock()) + json_response = { + "object": "error", + "status": 400, + "code": "validation_error", + "message": "Block type ai_block is not supported via the API.", + } + requests_mock.get("https://api.notion.com/v1/blocks/123", json=json_response, status_code=400) + test_response = requests.get("https://api.notion.com/v1/blocks/123") + assert not stream.should_retry(test_response) + + +def test_should_not_retry_with_not_found_block(requests_mock): + stream = Blocks(parent=None, config=MagicMock()) + json_response = { + "object": "error", + "status": 404, + "message": "Not Found for url: https://api.notion.com/v1/blocks/123/children?page_size=100", + } + requests_mock.get("https://api.notion.com/v1/blocks/123", json=json_response, status_code=404) + test_response = requests.get("https://api.notion.com/v1/blocks/123") + assert not stream.should_retry(test_response) + + +def test_empty_blocks_results(requests_mock): + stream = Blocks(parent=None, config=MagicMock()) + requests_mock.get( + "https://api.notion.com/v1/blocks/aaa/children", + json={ + "next_cursor": None, + }, + ) + stream.block_id_stack = ["aaa"] + assert list(stream.read_records(sync_mode=SyncMode.incremental, stream_slice=[])) == [] + + +@pytest.mark.parametrize( + "status_code,retry_after_header,expected_backoff", + [ + (429, "10", 10.0), # Case for 429 error with retry-after header + (429, None, 5.0), # Case for 429 error without retry-after header, should default to 5.0 + (504, None, None), # Case for 500-level error, should default to None and use CDK exponential backoff + (400, None, 10.0), # Case for specific 400-level error handled by check_invalid_start_cursor + ], +) +def test_backoff_time(status_code, retry_after_header, expected_backoff, patch_base_class): + response_mock = MagicMock(spec=requests.Response) + response_mock.status_code = status_code + response_mock.headers = {"retry-after": retry_after_header} if retry_after_header else {} + stream = NotionStream(config=MagicMock()) + + assert stream.backoff_time(response_mock) == expected_backoff + + +@pytest.mark.parametrize( + "initial_page_size, expected_page_size, mock_response", + [ + (100, 50, {"status_code": 504, "json": {}, "headers": {"retry-after": "1"}}), + (50, 25, {"status_code": 504, "json": {}, "headers": {"retry-after": "1"}}), + (100, 100, {"status_code": 429, "json": {}, "headers": {"retry-after": "1"}}), + (50, 100, {"status_code": 200, "json": {"data": "success"}, "headers": {}}), + ], + ids=[ + "504 error, page_size 100 -> 50", + "504 error, page_size 50 -> 25", + "429 error, page_size 100 -> 100", + "200 success, page_size 50 -> 100", + ], +) +def test_request_throttle(initial_page_size, expected_page_size, mock_response, requests_mock): + """ + Tests that the request page_size is halved when a 504 error is encountered. + Once a 200 success is encountered, the page_size is reset to 100, for use in the next call. + """ + requests_mock.register_uri( + "GET", + "https://api.notion.com/v1/users", + [{"status_code": mock_response["status_code"], "json": mock_response["json"], "headers": mock_response["headers"]}], + ) + + stream = Pages(config={"authenticator": "auth"}) + stream.page_size = initial_page_size + response = requests.get("https://api.notion.com/v1/users") + + stream.should_retry(response=response) + + assert stream.page_size == expected_page_size + + +def test_block_record_transformation(): + stream = Blocks(parent=None, config=MagicMock()) + response_record = { + "object": "block", "id": "id", "parent": {"type": "page_id", "page_id": "id"}, "created_time": "2021-10-19T13:33:00.000Z", "last_edited_time": "2021-10-19T13:33:00.000Z", + "created_by": {"object": "user", "id": "id"}, "last_edited_by": {"object": "user", "id": "id"}, "has_children": False, "archived": False, "type": "paragraph", + "paragraph": {"rich_text": [{"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, + {"type": "text", "text": {"content": "@", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": True, "color": "default"}, "plain_text": "@", "href": None}, + {"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, + {"type": "mention", "mention": {"type": "page", "page": {"id": "id"}}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, + "plain_text": "test", "href": "https://www.notion.so/id"}], "color": "default"} + } + expected_record = { + "object": "block", "id": "id", "parent": {"type": "page_id", "page_id": "id"}, "created_time": "2021-10-19T13:33:00.000Z", "last_edited_time": "2021-10-19T13:33:00.000Z", + "created_by": {"object": "user", "id": "id"}, "last_edited_by": {"object": "user", "id": "id"}, "has_children": False, "archived": False, "type": "paragraph", + "paragraph": {"rich_text": [{"type": "text", "text": {"content": "test", "link": None}, "annotations":{"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text":"test", "href": None}, + {"type": "text", "text": {"content": "@", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": True, "color": "default"}, "plain_text": "@", "href": None}, + {"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, + {"type": "mention", "mention": {"type": "page", "info": {"id": "id"}}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": "https://www.notion.so/id"}], + "color": "default"} + } + assert stream.transform(response_record) == expected_record diff --git a/airbyte-integrations/connectors/source-notion/unit_tests/test_source.py b/airbyte-integrations/connectors/source-notion/unit_tests/test_source.py index 2831b1b0f8ed7..c270f0894e5e4 100644 --- a/airbyte-integrations/connectors/source-notion/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-notion/unit_tests/test_source.py @@ -2,85 +2,35 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import MagicMock - import pytest from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator from source_notion.source import SourceNotion -UNAUTHORIZED_ERROR_MESSAGE = "The provided API access token is invalid. Please double-check that you input the correct token and have granted the necessary permissions to your Notion integration." -RESTRICTED_RESOURCE_ERROR_MESSAGE = "The provided API access token does not have the correct permissions configured. Please double-check that you have granted all the necessary permissions to your Notion integration." -GENERIC_ERROR_MESSAGE = "Conflict occurred while saving. Please try again." -DEFAULT_ERROR_MESSAGE = "An unspecified error occurred while connecting to Notion. Please check your credentials and try again." - - -def test_check_connection(mocker, requests_mock): - source = SourceNotion() - logger_mock, config_mock = MagicMock(), {"access_token": "test_token", "start_date": "2021-01-01T00:00:00.000Z"} - requests_mock.get( - "https://api.notion.com/v1/users/me", - json={"results": [{"id": "aaa", "last_edited_time": "2022-01-01T00:00:00.000Z"}], "next_cursor": None}, - ) - assert source.check_connection(logger_mock, config_mock) == (True, None) - - -@pytest.mark.parametrize( - "status_code,json_response,expected_message", - [ - (401, {"code": "unauthorized"}, UNAUTHORIZED_ERROR_MESSAGE), - (403, {"code": "restricted_resource"}, RESTRICTED_RESOURCE_ERROR_MESSAGE), - (409, {"code": "conflict_error", "message": GENERIC_ERROR_MESSAGE}, f"Error: {GENERIC_ERROR_MESSAGE} (Error code: conflict_error)"), - (400, {}, f"Error: {DEFAULT_ERROR_MESSAGE} (Error code: unknown_error)"), - ], -) -def test_check_connection_errors(mocker, requests_mock, status_code, json_response, expected_message): - source = SourceNotion() - logger_mock, config_mock = MagicMock(), {"access_token": "test_token", "start_date": "2021-01-01T00:00:00.000Z"} - requests_mock.get("https://api.notion.com/v1/users/me", status_code=status_code, json=json_response) - result, message = source.check_connection(logger_mock, config_mock) - - assert result is False - assert message == expected_message - - -def test_streams(mocker): - source = SourceNotion() - config_mock = MagicMock() - streams = source.streams(config_mock) - expected_streams_number = 5 - assert len(streams) == expected_streams_number - @pytest.mark.parametrize( "config, expected_token", [ - ({"credentials": {"auth_type": "OAuth2.0", "access_token": "oauth_token"}}, "Bearer oauth_token"), - ({"credentials": {"auth_type": "token", "token": "other_token"}}, "Bearer other_token"), + ({"credentials": {"auth_type": "OAuth2.0", "access_token": "oauth_token_123"}}, "Bearer oauth_token_123"), + ({"credentials": {"auth_type": "token", "token": "api_token_456"}}, "Bearer api_token_456"), + ({"access_token": "legacy_token_789"}, "Bearer legacy_token_789"), ({}, None), ], ) def test_get_authenticator(config, expected_token): source = SourceNotion() - authenticator = source._get_authenticator(config) # Fixed line + authenticator = source._get_authenticator(config) if expected_token: assert isinstance(authenticator, TokenAuthenticator) - assert authenticator.token == expected_token # Replace with the actual way to access the token from the authenticator + assert authenticator.token == expected_token else: assert authenticator is None -@pytest.mark.parametrize( - "config, expected_return", - [ - ({}, None), - ({"start_date": "2021-01-01T00:00:00.000Z"}, None), - ({"start_date": "2021-99-99T79:89:99.123Z"}, "The provided start date is not a valid date. Please check the format and try again."), - ({"start_date": "2021-01-01T00:00:00.000"}, "Please check the format of the start date against the pattern descriptor."), - ({"start_date": "2025-01-25T00:00:00.000Z"}, "The start date cannot be greater than the current date."), - ], -) -def test_validate_start_date(config, expected_return): +def test_streams(): source = SourceNotion() - result = source._validate_start_date(config) - assert result == expected_return + config_mock = {"start_date": "2020-01-01T00:00:00.000Z", + "credentials": {"auth_type": "token", "token": "abcd"}} + streams = source.streams(config_mock) + expected_streams_number = 5 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-notion/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-notion/unit_tests/test_streams.py deleted file mode 100644 index d369b201beccc..0000000000000 --- a/airbyte-integrations/connectors/source-notion/unit_tests/test_streams.py +++ /dev/null @@ -1,352 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -import random -from http import HTTPStatus -from unittest.mock import MagicMock - -import freezegun -import pytest -import requests -from airbyte_cdk.models import SyncMode -from source_notion.streams import Blocks, NotionStream, Pages, Users - - -@pytest.fixture -def patch_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(NotionStream, "path", "v0/example_endpoint") - mocker.patch.object(NotionStream, "primary_key", "test_primary_key") - mocker.patch.object(NotionStream, "__abstractmethods__", set()) - - -def test_request_params(patch_base_class): - stream = NotionStream(config=MagicMock()) - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - expected_params = {} - assert stream.request_params(**inputs) == expected_params - - -def test_next_page_token(patch_base_class, requests_mock): - stream = NotionStream(config=MagicMock()) - requests_mock.get("https://dummy", json={"next_cursor": "aaa"}) - inputs = {"response": requests.get("https://dummy")} - expected_token = {"next_cursor": "aaa"} - assert stream.next_page_token(**inputs) == expected_token - - -@pytest.mark.parametrize( - "response_json, expected_output", - [({"next_cursor": "some_cursor", "has_more": True}, {"next_cursor": "some_cursor"}), ({"has_more": False}, None), ({}, None)], -) -def test_next_page_token_with_no_cursor(patch_base_class, response_json, expected_output): - stream = NotionStream(config=MagicMock()) - mock_response = MagicMock() - mock_response.json.return_value = response_json - result = stream.next_page_token(mock_response) - assert result == expected_output - - -def test_parse_response(patch_base_class, requests_mock): - stream = NotionStream(config=MagicMock()) - requests_mock.get("https://dummy", json={"results": [{"a": 123}, {"b": "xx"}]}) - resp = requests.get("https://dummy") - inputs = {"response": resp, "stream_state": MagicMock()} - expected_parsed_object = [{"a": 123}, {"b": "xx"}] - assert list(stream.parse_response(**inputs)) == expected_parsed_object - - -def test_request_headers(patch_base_class): - stream = NotionStream(config=MagicMock()) - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - expected_headers = {"Notion-Version": "2022-06-28"} - assert stream.request_headers(**inputs) == expected_headers - - -def test_http_method(patch_base_class): - stream = NotionStream(config=MagicMock()) - expected_method = "GET" - assert stream.http_method == expected_method - - -@pytest.mark.parametrize( - ("http_status", "should_retry"), - [ - (HTTPStatus.OK, False), - (HTTPStatus.BAD_REQUEST, True), - (HTTPStatus.TOO_MANY_REQUESTS, True), - (HTTPStatus.INTERNAL_SERVER_ERROR, True), - (HTTPStatus.BAD_GATEWAY, True), - (HTTPStatus.FORBIDDEN, False), - ], -) -def test_should_retry(patch_base_class, http_status, should_retry): - response_mock = MagicMock() - response_mock.status_code = http_status - stream = NotionStream(config=MagicMock()) - assert stream.should_retry(response_mock) == should_retry - - -def test_should_not_retry_with_ai_block(requests_mock): - stream = Blocks(parent=None, config=MagicMock()) - json_response = { - "object": "error", - "status": 400, - "code": "validation_error", - "message": "Block type ai_block is not supported via the API.", - } - requests_mock.get("https://api.notion.com/v1/blocks/123", json=json_response, status_code=400) - test_response = requests.get("https://api.notion.com/v1/blocks/123") - assert not stream.should_retry(test_response) - - -def test_should_not_retry_with_not_found_block(requests_mock): - stream = Blocks(parent=None, config=MagicMock()) - json_response = { - "object": "error", - "status": 404, - "message": "Not Found for url: https://api.notion.com/v1/blocks/123/children?page_size=100", - } - requests_mock.get("https://api.notion.com/v1/blocks/123", json=json_response, status_code=404) - test_response = requests.get("https://api.notion.com/v1/blocks/123") - assert not stream.should_retry(test_response) - - -def test_empty_blocks_results(requests_mock): - stream = Blocks(parent=None, config=MagicMock()) - requests_mock.get( - "https://api.notion.com/v1/blocks/aaa/children", - json={ - "next_cursor": None, - }, - ) - stream.block_id_stack = ["aaa"] - assert list(stream.read_records(sync_mode=SyncMode.incremental, stream_slice=[])) == [] - - -@pytest.mark.parametrize( - "status_code,retry_after_header,expected_backoff", - [ - (429, "10", 10.0), # Case for 429 error with retry-after header - (429, None, 5.0), # Case for 429 error without retry-after header, should default to 5.0 - (504, None, None), # Case for 500-level error, should default to None and use CDK exponential backoff - (400, None, 10.0), # Case for specific 400-level error handled by check_invalid_start_cursor - ], -) -def test_backoff_time(status_code, retry_after_header, expected_backoff, patch_base_class): - response_mock = MagicMock(spec=requests.Response) - response_mock.status_code = status_code - response_mock.headers = {"retry-after": retry_after_header} if retry_after_header else {} - stream = NotionStream(config=MagicMock()) - - assert stream.backoff_time(response_mock) == expected_backoff - - -def test_users_request_params(patch_base_class): - stream = Users(config=MagicMock()) - - # No next_page_token. First pull - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - expected_params = {"page_size": 100} - assert stream.request_params(**inputs) == expected_params - - # When getting pages after the first pull. - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"next_cursor": "123"}} - expected_params = {"start_cursor": "123", "page_size": 100} - assert stream.request_params(**inputs) == expected_params - - -def test_user_stream_handles_pagination_correctly(requests_mock): - """ - Test shows that Users stream uses pagination as per Notion API docs. - """ - - response_body = { - "object": "list", - "results": [{"id": f"{x}", "object": "user", "type": ["person", "bot"][random.randint(0, 1)]} for x in range(100)], - "next_cursor": "bc48234b-77b2-41a6-95a3-6a8abb7887d5", - "has_more": True, - "type": "user", - } - requests_mock.get("https://api.notion.com/v1/users?page_size=100", json=response_body) - - response_body = { - "object": "list", - "results": [{"id": f"{x}", "object": "user", "type": ["person", "bot"][random.randint(0, 1)]} for x in range(100, 200)], - "next_cursor": "67030467-b97b-4729-8fd6-2fb33d012da4", - "has_more": True, - "type": "user", - } - requests_mock.get("https://api.notion.com/v1/users?page_size=100&start_cursor=bc48234b-77b2-41a6-95a3-6a8abb7887d5", json=response_body) - - response_body = { - "object": "list", - "results": [{"id": f"{x}", "object": "user", "type": ["person", "bot"][random.randint(0, 1)]} for x in range(200, 220)], - "next_cursor": None, - "has_more": False, - "type": "user", - } - requests_mock.get("https://api.notion.com/v1/users?page_size=100&start_cursor=67030467-b97b-4729-8fd6-2fb33d012da4", json=response_body) - - stream = Users(config=MagicMock()) - - records = stream.read_records(sync_mode=SyncMode.full_refresh) - records_length = sum(1 for _ in records) - assert records_length == 220 - - -@pytest.mark.parametrize( - "config, expected_start_date, current_time", - [ - ( - {"authenticator": "secret_token", "start_date": "2021-09-01T00:00:00.000Z"}, - "2021-09-01T00:00:00.000Z", - "2022-09-22T00:00:00.000Z", - ), - ({"authenticator": "super_secret_token", "start_date": None}, "2020-09-22T00:00:00.000Z", "2022-09-22T00:00:00.000Z"), - ({"authenticator": "even_more_secret_token"}, "2021-01-01T12:30:00.000Z", "2023-01-01T12:30:00.000Z"), - ], -) -def test_set_start_date(patch_base_class, config, expected_start_date, current_time): - """ - Test that start_date in config is either: - 1. set to the value provided by the user - 2. defaults to two years from the present date set by the test environment. - """ - with freezegun.freeze_time(current_time): - stream = NotionStream(config=config) - assert stream.start_date == expected_start_date - - -@pytest.mark.parametrize( - "stream,parent,url,status_code,response_content,expected_availability,expected_reason_substring", - [ - ( - Users, - None, - "https://api.notion.com/v1/users", - 403, - b'{"object": "error", "status": 403, "code": "restricted_resource"}', - False, - "This is likely due to insufficient permissions for your Notion integration.", - ), - ( - Blocks, - Pages, - "https://api.notion.com/v1/blocks/123/children", - 403, - b'{"object": "error", "status": 403, "code": "restricted_resource"}', - False, - "This is likely due to insufficient permissions for your Notion integration.", - ), - ( - Users, - None, - "https://api.notion.com/v1/users", - 200, - b'{"object": "list", "results": [{"id": "123", "object": "user", "type": "person"}]}', - True, - None, - ), - ], -) -def test_403_error_handling( - requests_mock, stream, parent, url, status_code, response_content, expected_availability, expected_reason_substring -): - """ - Test that availability strategy flags streams with 403 error as unavailable - and returns custom Notion integration message. - """ - - requests_mock.get(url=url, status_code=status_code, content=response_content) - - if parent: - stream = stream(parent=parent, config=MagicMock()) - stream.parent.stream_slices = MagicMock(return_value=[{"id": "123"}]) - stream.parent.read_records = MagicMock(return_value=[{"id": "123", "object": "page"}]) - else: - stream = stream(config=MagicMock()) - - is_available, reason = stream.check_availability(logger=logging.Logger, source=MagicMock()) - - assert is_available is expected_availability - - if expected_reason_substring: - assert expected_reason_substring in reason - else: - assert reason is None - - -@pytest.mark.parametrize( - "initial_page_size, expected_page_size, mock_response", - [ - (100, 50, {"status_code": 504, "json": {}, "headers": {"retry-after": "1"}}), - (50, 25, {"status_code": 504, "json": {}, "headers": {"retry-after": "1"}}), - (100, 100, {"status_code": 429, "json": {}, "headers": {"retry-after": "1"}}), - (50, 100, {"status_code": 200, "json": {"data": "success"}, "headers": {}}), - ], - ids=[ - "504 error, page_size 100 -> 50", - "504 error, page_size 50 -> 25", - "429 error, page_size 100 -> 100", - "200 success, page_size 50 -> 100", - ], -) -def test_request_throttle(initial_page_size, expected_page_size, mock_response, requests_mock): - """ - Tests that the request page_size is halved when a 504 error is encountered. - Once a 200 success is encountered, the page_size is reset to 100, for use in the next call. - """ - requests_mock.register_uri( - "GET", - "https://api.notion.com/v1/users", - [{"status_code": mock_response["status_code"], "json": mock_response["json"], "headers": mock_response["headers"]}], - ) - - stream = Users(config={"authenticator": "auth"}) - stream.page_size = initial_page_size - response = requests.get("https://api.notion.com/v1/users") - - stream.should_retry(response=response) - - assert stream.page_size == expected_page_size - - -def test_users_record_transformer(): - stream = Users(config=MagicMock()) - response_record = { - "object": "user", "id": "id", "name": "Airbyte", "avatar_url": "some url", "type": "bot", - "bot": {"owner": {"type": "user", "user": {"object": "user", "id": "id", "name": "Test User", "avatar_url": None, "type": "person", - "person": {"email": "email"}}}, "workspace_name": "test"} - } - expected_record = { - "object": "user", "id": "id", "name": "Airbyte", "avatar_url": "some url", "type": "bot", - "bot": {"owner": {"type": "user", "info": {"object": "user", "id": "id", "name": "Test User", "avatar_url": None, "type": "person", - "person": {"email": "email"}}}, "workspace_name": "test"} - } - assert stream.transform(response_record) == expected_record - - -def test_block_record_transformer(): - stream = Blocks(parent=None, config=MagicMock()) - response_record = { - "object": "block", "id": "id", "parent": {"type": "page_id", "page_id": "id"}, "created_time": "2021-10-19T13:33:00.000Z", "last_edited_time": "2021-10-19T13:33:00.000Z", - "created_by": {"object": "user", "id": "id"}, "last_edited_by": {"object": "user", "id": "id"}, "has_children": False, "archived": False, "type": "paragraph", - "paragraph": {"rich_text": [{"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, - {"type": "text", "text": {"content": "@", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": True, "color": "default"}, "plain_text": "@", "href": None}, - {"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, - {"type": "mention", "mention": {"type": "page", "page": {"id": "id"}}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, - "plain_text": "test", "href": "https://www.notion.so/id"}], "color": "default"} - } - expected_record = { - "object": "block", "id": "id", "parent": {"type": "page_id", "page_id": "id"}, "created_time": "2021-10-19T13:33:00.000Z", "last_edited_time": "2021-10-19T13:33:00.000Z", - "created_by": {"object": "user", "id": "id"}, "last_edited_by": {"object": "user", "id": "id"}, "has_children": False, "archived": False, "type": "paragraph", - "paragraph": {"rich_text": [{"type": "text", "text": {"content": "test", "link": None}, "annotations":{"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text":"test", "href": None}, - {"type": "text", "text": {"content": "@", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": True, "color": "default"}, "plain_text": "@", "href": None}, - {"type": "text", "text": {"content": "test", "link": None}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": None}, - {"type": "mention", "mention": {"type": "page", "info": {"id": "id"}}, "annotations": {"bold": False, "italic": False, "strikethrough": False, "underline": False, "code": False, "color": "default"}, "plain_text": "test", "href": "https://www.notion.so/id"}], - "color": "default"} - } - assert stream.transform(response_record) == expected_record diff --git a/airbyte-integrations/connectors/source-openweather/.dockerignore b/airbyte-integrations/connectors/source-openweather/.dockerignore deleted file mode 100644 index 37cf7cb2b27c4..0000000000000 --- a/airbyte-integrations/connectors/source-openweather/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_openweather -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile deleted file mode 100644 index 6c1b15ff1af72..0000000000000 --- a/airbyte-integrations/connectors/source-openweather/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_openweather ./source_openweather - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.2.0 -LABEL io.airbyte.name=airbyte/source-openweather diff --git a/airbyte-integrations/connectors/source-openweather/metadata.yaml b/airbyte-integrations/connectors/source-openweather/metadata.yaml index ff3d57350d290..0a1fa0f3dbe13 100644 --- a/airbyte-integrations/connectors/source-openweather/metadata.yaml +++ b/airbyte-integrations/connectors/source-openweather/metadata.yaml @@ -13,8 +13,10 @@ data: enabled: false connectorSubtype: api connectorType: source + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 definitionId: 561d7787-b45e-4f3b-af58-0163c3ba9d5a - dockerImageTag: 0.2.0 + dockerImageTag: 0.2.1 dockerRepository: airbyte/source-openweather githubIssueLabel: source-openweather icon: openweather.svg diff --git a/airbyte-integrations/connectors/source-openweather/poetry.lock b/airbyte-integrations/connectors/source-openweather/poetry.lock new file mode 100644 index 0000000000000..00866a3a07a8c --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/poetry.lock @@ -0,0 +1,1032 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.74.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.9" +files = [ + {file = "airbyte-cdk-0.74.0.tar.gz", hash = "sha256:74241a055c205403a951383f43801067b7f451370e14d553d13d0cc476cbfff7"}, + {file = "airbyte_cdk-0.74.0-py3-none-any.whl", hash = "sha256:7e5b201d69ec0e7daab7e627dbc6add4dbba4a2f779132e86aaf6713650ff4d5"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "f2e54e7857735a9e46d63807a60b76373823881c9d6ff352105d75b82119e297" diff --git a/airbyte-integrations/connectors/source-openweather/pyproject.toml b/airbyte-integrations/connectors/source-openweather/pyproject.toml new file mode 100644 index 0000000000000..f8945611bc0fb --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.2.1" +name = "source-openweather" +description = "Source implementation for Open Weather." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/orb" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_openweather" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.74.0" + +[tool.poetry.scripts] +source-openweather = "source_openweather.run:run" + +[tool.poetry.group.dev.dependencies] +pytest = "^6.2" +requests-mock = "^1.11.0" +pytest-mock = "^3.6.1" + diff --git a/airbyte-integrations/connectors/source-openweather/setup.py b/airbyte-integrations/connectors/source-openweather/setup.py deleted file mode 100644 index 2f978664b0578..0000000000000 --- a/airbyte-integrations/connectors/source-openweather/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.2", - "pytest-mock~=3.6.1", - "connector-acceptance-test", -] - -setup( - entry_points={ - "console_scripts": [ - "source-openweather=source_openweather.run:run", - ], - }, - name="source_openweather", - description="Source implementation for Openweather.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/manifest.yaml b/airbyte-integrations/connectors/source-openweather/source_openweather/manifest.yaml index 7fd4496d18524..b74f3ee6732e9 100644 --- a/airbyte-integrations/connectors/source-openweather/source_openweather/manifest.yaml +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/manifest.yaml @@ -87,18 +87,19 @@ spec: properties: lat: type: string - pattern: "^[-]?\\d{1,2}(\\.\\d+)?$" + pattern: "^(\\+|-)?(?:90(?:(?:\\.0+)?)|(?:[0-9]|[1-8][0-9])(?:(?:\\.[0-9]+)?))$" description: "Latitude, decimal (-90; 90). If you need the geocoder to automatic convert city names and zip-codes to geo coordinates and the other way around, please use our Geocoding API" examples: - "45.7603" - "-21.249107858038816" lon: type: string - pattern: "^[-]?\\d{1,2}(\\.\\d+)?$" + pattern: "^(\\+|-)?(?:180(?:(?:\\.0+)?)|(?:[0-9]|[1-9][0-9]|1[0-7][0-9])(?:(?:\\.[0-9]+)?))$" description: "Longitude, decimal (-180; 180). If you need the geocoder to automatic convert city names and zip-codes to geo coordinates and the other way around, please use our Geocoding API" examples: - "4.835659" - "-70.39482074115321" + - "180.000" appid: type: string description: "API KEY" diff --git a/airbyte-integrations/connectors/source-orb/metadata.yaml b/airbyte-integrations/connectors/source-orb/metadata.yaml index 1e701b726ebf8..914a6f74ccf7f 100644 --- a/airbyte-integrations/connectors/source-orb/metadata.yaml +++ b/airbyte-integrations/connectors/source-orb/metadata.yaml @@ -4,7 +4,7 @@ data: connectorSubtype: api connectorType: source definitionId: 7f0455fb-4518-4ec0-b7a3-d808bf8081cc - dockerImageTag: 1.1.1 + dockerImageTag: 1.2.0 dockerRepository: airbyte/source-orb githubIssueLabel: source-orb icon: orb.svg diff --git a/airbyte-integrations/connectors/source-orb/poetry.lock b/airbyte-integrations/connectors/source-orb/poetry.lock index 26a785d5b49c6..52c6abe7dcab0 100644 --- a/airbyte-integrations/connectors/source-orb/poetry.lock +++ b/airbyte-integrations/connectors/source-orb/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.68.4" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.68.4.tar.gz", hash = "sha256:da4d923d9dac9f13fbd2e89a0094c58d440dac85552e8084d19cbb0a73efd9d7"}, - {file = "airbyte_cdk-0.68.4-py3-none-any.whl", hash = "sha256:3b6a9b6adf81a1d9c2d40acecfe9016e73197dd95f1e6027423aeee85d3a7ee1"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -303,13 +303,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "24.0.0" +version = "24.3.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-24.0.0-py3-none-any.whl", hash = "sha256:2456d674f40bd51eb3acbf85221277027822e529a90cc826453d9a25dff932b1"}, - {file = "Faker-24.0.0.tar.gz", hash = "sha256:ea6f784c40730de0f77067e49e78cdd590efb00bec3d33f577492262206c17fc"}, + {file = "Faker-24.3.0-py3-none-any.whl", hash = "sha256:9978025e765ba79f8bf6154c9630a9c2b7f9c9b0f175d4ad5e04b19a82a8d8d6"}, + {file = "Faker-24.3.0.tar.gz", hash = "sha256:5fb5aa9749d09971e04a41281ae3ceda9414f683d4810a694f8a8eebb8f9edec"}, ] [package.dependencies] @@ -481,13 +481,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -903,18 +903,18 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1077,4 +1077,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "098ea5d9961a0222868250b2f49e4800c4166c74038591d896b564764a7b0703" +content-hash = "cd9333e494617c61915a157183e5601205ddf3b7e8782c1b1b80f49716499bc4" diff --git a/airbyte-integrations/connectors/source-orb/pyproject.toml b/airbyte-integrations/connectors/source-orb/pyproject.toml index 12519c1662f69..ae680fdf96c90 100644 --- a/airbyte-integrations/connectors/source-orb/pyproject.toml +++ b/airbyte-integrations/connectors/source-orb/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.1.1" +version = "1.2.0" name = "source-orb" description = "Source implementation for Orb." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_orb" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.68.4" +airbyte-cdk = "==0.72.1" pendulum = "==2.1.2" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-orb/source_orb/source.py b/airbyte-integrations/connectors/source-orb/source_orb/source.py index a33620be7c4a5..e16aca7d7fde9 100644 --- a/airbyte-integrations/connectors/source-orb/source_orb/source.py +++ b/airbyte-integrations/connectors/source-orb/source_orb/source.py @@ -27,9 +27,10 @@ class OrbStream(HttpStream, ABC): page_size = 50 url_base = ORB_API_BASE_URL - def __init__(self, start_date: Optional[pendulum.DateTime] = None, **kwargs): + def __init__(self, start_date: Optional[pendulum.DateTime] = None, end_date: Optional[pendulum.DateTime] = None, **kwargs): super().__init__(**kwargs) self.start_date = start_date + self.end_date = end_date def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: """ @@ -143,6 +144,10 @@ def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMa # This may (reasonably) override the existing `created_at[gte]` set based on the start_date # of the stream, as configured. params[f"{self.cursor_field}[gte]"] = state_based_start_timestamp + + if self.end_date: + params[f"{self.cursor_field}[lte]"] = self.end_date + return params @@ -151,6 +156,8 @@ class Customers(IncrementalOrbStream): API Docs: https://docs.withorb.com/reference/list-customers """ + use_cache = True + def path(self, **kwargs) -> str: return "customers" @@ -590,14 +597,11 @@ def enrich_ledger_entries_with_event_data(self, ledger_entries): # Build up a list of the subset of ledger entries we are expected # to enrich with event metadata. event_id_to_ledger_entries = {} - min_created_at_timestamp = pendulum.now() - max_created_at_timestamp = pendulum.now() for entry in ledger_entries: maybe_event_id: Optional[str] = entry.get("event_id") if maybe_event_id: - min_created_at_timestamp = min(min_created_at_timestamp, pendulum.parse(entry["created_at"])) - max_created_at_timestamp = max(max_created_at_timestamp, pendulum.parse(entry["created_at"])) + created_at_timestamp = pendulum.parse(entry.get("created_at", pendulum.now())) # There can be multiple entries with the same event ID event_id_to_ledger_entries[maybe_event_id] = event_id_to_ledger_entries.get(maybe_event_id, []) + [entry] @@ -628,8 +632,8 @@ def modify_ledger_entry_schema(ledger_entry): # event_ids to filter on request_filter_json = { "event_ids": list(event_id_to_ledger_entries), - "timeframe_start": min_created_at_timestamp.to_iso8601_string(), - "timeframe_end": max_created_at_timestamp.add(minutes=1).to_iso8601_string(), + "timeframe_start": created_at_timestamp.to_iso8601_string(), + "timeframe_end": created_at_timestamp.add(days=30).to_iso8601_string(), } # Prepare request with self._session, which should @@ -638,7 +642,11 @@ def modify_ledger_entry_schema(ledger_entry): prepared_request = self._session.prepare_request(requests.Request(**args)) events_response: requests.Response = self._session.send(prepared_request) # Error for invalid responses - events_response.raise_for_status() + if events_response.status_code != 200: + self.logger.info(request_filter_json) + self.logger.error(events_response.text) + events_response.raise_for_status() + paginated_events_response_body = events_response.json() if paginated_events_response_body["pagination_metadata"]["has_more"]: @@ -730,14 +738,13 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: subscription_usage_grouping_key = config.get("subscription_usage_grouping_key") plan_id = config.get("plan_id") start_date = to_datetime(config.get("start_date")) - # this field is not exposed to spec, used only for testing purposes end_date = to_datetime(config.get("end_date")) if not self.input_keys_mutually_exclusive(string_event_properties_keys, numeric_event_properties_keys): raise ValueError("Supplied property keys for string and numeric valued property values must be mutually exclusive.") return [ - Customers(authenticator=authenticator, lookback_window_days=lookback_window, start_date=start_date), + Customers(authenticator=authenticator, lookback_window_days=lookback_window, start_date=start_date, end_date=end_date), Subscriptions(authenticator=authenticator, lookback_window_days=lookback_window, start_date=start_date), Plans(authenticator=authenticator, lookback_window_days=lookback_window, start_date=start_date), Invoices(authenticator=authenticator, lookback_window_days=lookback_window), diff --git a/airbyte-integrations/connectors/source-orb/source_orb/spec.json b/airbyte-integrations/connectors/source-orb/source_orb/spec.json index 423d85cf64849..14a413fd4675b 100644 --- a/airbyte-integrations/connectors/source-orb/source_orb/spec.json +++ b/airbyte-integrations/connectors/source-orb/source_orb/spec.json @@ -22,13 +22,21 @@ "examples": ["2022-03-01T00:00:00Z"], "order": 2 }, + "end_date": { + "type": "string", + "title": "End Date", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + "description": "UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at after this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.", + "examples": ["2024-03-01T00:00:00Z"], + "order": 3 + }, "lookback_window_days": { "type": "integer", "title": "Lookback Window (in days)", "default": 0, "minimum": 0, "description": "When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.", - "order": 3 + "order": 4 }, "string_event_properties_keys": { "type": "array", @@ -37,7 +45,7 @@ }, "title": "Event properties keys (string values)", "description": "Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.", - "order": 4 + "order": 5 }, "numeric_event_properties_keys": { "type": "array", @@ -46,7 +54,7 @@ }, "title": "Event properties keys (numeric values)", "description": "Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.", - "order": 5 + "order": 6 }, "subscription_usage_grouping_key": { "type": "string", diff --git a/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml b/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml index da379fa294754..a34076dafe518 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml +++ b/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: d913b0f2-cc51-4e55-a44c-8ba1697b9239 - dockerImageTag: 2.4.0 + dockerImageTag: 2.5.1 dockerRepository: airbyte/source-paypal-transaction documentationUrl: https://docs.airbyte.com/integrations/sources/paypal-transaction githubIssueLabel: source-paypal-transaction icon: paypal.svg license: MIT + maxSecondsBetweenMessages: 5400 name: Paypal Transaction remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-paypal-transaction/poetry.lock b/airbyte-integrations/connectors/source-paypal-transaction/poetry.lock index 4200fc8953a19..4db804fca46be 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/poetry.lock +++ b/airbyte-integrations/connectors/source-paypal-transaction/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.70.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte-cdk-0.70.1.tar.gz", hash = "sha256:fd27815350b8155fc42afd43d005a8d321c9f309c1adaedabbb0b74e9788648f"}, + {file = "airbyte_cdk-0.70.1-py3-none-any.whl", hash = "sha256:856b51c988c8e348f53df2806d8bf929919f220f5784696cf9a9578d7eb16e72"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -94,13 +94,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -546,13 +546,13 @@ attrs = ">=19.2.0" [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -752,13 +752,13 @@ files = [ [[package]] name = "pytest" -version = "8.0.1" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, - {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -766,11 +766,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" @@ -791,13 +791,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -964,19 +964,19 @@ urllib3 = {version = ">=1.26,<3", extras = ["socks"]} [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -991,13 +991,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1060,13 +1060,13 @@ wsproto = ">=0.14" [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1213,4 +1213,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "3e201a0e17ac40faf9a73f3e2b9e924e0d3993ee8982646436ea6517f8f6cd7b" +content-hash = "5086cb2d8b18a1081b5e2f26f9389a94cc39edb1b81169b6e69ba9658f7c3003" diff --git a/airbyte-integrations/connectors/source-paypal-transaction/pyproject.toml b/airbyte-integrations/connectors/source-paypal-transaction/pyproject.toml index 36ea7e8161e93..89e1fee8171b4 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/pyproject.toml +++ b/airbyte-integrations/connectors/source-paypal-transaction/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.4.0" +version = "2.5.1" name = "source-paypal-transaction" description = "Source implementation for Paypal Transaction." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_paypal_transaction" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.63.2" +airbyte-cdk = "^0" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/list_disputes.json b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/list_disputes.json index 8d4c9ff57a974..01fa01acdda66 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/list_disputes.json +++ b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/schemas/list_disputes.json @@ -27,6 +27,36 @@ "method": { "type": ["null", "string"] } } } + }, + "disputed_transactions": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "buyer_transaction_id": { + "type": ["null", "string"] + }, + "seller": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "merchant_id": { + "type": ["null", "string"] + } + } + } + } + } + }, + "outcome": { + "type": ["null", "string"] + }, + "dispute_life_cycle_stage": { + "type": ["null", "string"] + }, + "dispute_channel": { + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-pinterest/icon.svg b/airbyte-integrations/connectors/source-pinterest/icon.svg index aa9ac6080415b..310a8bf095e2b 100644 --- a/airbyte-integrations/connectors/source-pinterest/icon.svg +++ b/airbyte-integrations/connectors/source-pinterest/icon.svg @@ -1 +1,5 @@ - \ No newline at end of file + + + + + diff --git a/airbyte-integrations/connectors/source-pinterest/metadata.yaml b/airbyte-integrations/connectors/source-pinterest/metadata.yaml index 0ba688fc8f1bd..ae3beb7c8ffba 100644 --- a/airbyte-integrations/connectors/source-pinterest/metadata.yaml +++ b/airbyte-integrations/connectors/source-pinterest/metadata.yaml @@ -5,13 +5,14 @@ data: connectorSubtype: api connectorType: source definitionId: 5cb7e5fe-38c2-11ec-8d3d-0242ac130003 - dockerImageTag: 1.2.0 + dockerImageTag: 1.3.2 dockerRepository: airbyte/source-pinterest connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c githubIssueLabel: source-pinterest icon: pinterest.svg license: MIT + maxSecondsBetweenMessages: 86400 name: Pinterest remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-pinterest/poetry.lock b/airbyte-integrations/connectors/source-pinterest/poetry.lock index f3f3fee593a6a..083f5e8fde3f9 100644 --- a/airbyte-integrations/connectors/source-pinterest/poetry.lock +++ b/airbyte-integrations/connectors/source-pinterest/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.78.6" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +466,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -553,47 +552,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -685,30 +684,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -838,22 +837,20 @@ yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "responses" @@ -876,19 +873,19 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -914,13 +911,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1050,4 +1047,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "07657dff3674cca2f376658bfa717048c9caff7fa3b6f23144133f1d6dbac33a" +content-hash = "c687cc1569212e11c2b56bd6920299690c161ee833b33e367a68488092a08b06" diff --git a/airbyte-integrations/connectors/source-pinterest/pyproject.toml b/airbyte-integrations/connectors/source-pinterest/pyproject.toml index 486999671d097..edb7211ef17ff 100644 --- a/airbyte-integrations/connectors/source-pinterest/pyproject.toml +++ b/airbyte-integrations/connectors/source-pinterest/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.2.0" +version = "1.3.2" name = "source-pinterest" description = "Source implementation for Pinterest." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_pinterest" [tool.poetry.dependencies] python = "^3.9,<3.12" pendulum = "==2.1.2" -airbyte-cdk = "==0.63.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-pinterest = "source_pinterest.run:run" diff --git a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml index 0cb56b20af67c..035cef238ff04 100644 --- a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml @@ -37,9 +37,11 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.txt" + validate_state_messages: false - config_path: "secrets/config_cdc.json" expect_records: path: "integration_tests/expected_records.txt" + validate_state_messages: false full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index df72ad0baa1b9..e9eda04cbaca6 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -12,7 +12,7 @@ java { } airbyteJavaConnector { - cdkVersionRequired = '0.23.17' + cdkVersionRequired = '0.29.13' features = ['db-sources', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-postgres/gradle.properties b/airbyte-integrations/connectors/source-postgres/gradle.properties index bc88ea85ebd89..45e99e438d748 100644 --- a/airbyte-integrations/connectors/source-postgres/gradle.properties +++ b/airbyte-integrations/connectors/source-postgres/gradle.properties @@ -1,3 +1,3 @@ testExecutionConcurrency=-1 -JunitMethodExecutionTimeout=5 m \ No newline at end of file +JunitMethodExecutionTimeout=2 m \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index f5bc3ca80c87c..5bbdee1cecc08 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.15 + dockerImageTag: 3.3.26 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index 98519ba3f651d..ebc43eed54b6d 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -327,7 +327,7 @@ public JdbcDatabase createDatabase(final JsonNode sourceConfig) throws SQLExcept sourceOperations, streamingQueryConfigProvider); - quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); + setQuoteString((getQuoteString() == null ? database.getMetaData().getIdentifierQuoteString() : getQuoteString())); database.setSourceConfig(sourceConfig); database.setDatabaseConfig(jdbcConfig); @@ -843,7 +843,7 @@ private List getFullTableEstimate(final JdbcDatabase database, } private boolean cloudDeploymentMode() { - return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(featureFlags.deploymentMode()); + return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(getFeatureFlags().deploymentMode()); } } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java index e0ea271195d79..d5a4405b6b6d1 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceOperations.java @@ -33,13 +33,11 @@ import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; -import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; import java.time.*; import java.time.format.DateTimeParseException; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.postgresql.PGStatement; @@ -78,21 +76,6 @@ public class PostgresSourceOperations extends AbstractJdbcCompatibleSourceOperat Arrays.stream(PostgresType.class.getEnumConstants()).forEach(c -> POSTGRES_TYPE_DICT.put(c.type, c)); } - @Override - public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { - // the first call communicates with the database, after that the result is cached. - final ResultSetMetaData metadata = queryContext.getMetaData(); - final int columnCount = metadata.getColumnCount(); - final ObjectNode jsonNode = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); - - for (int i = 1; i <= columnCount; i++) { - // convert to java types that will convert into reasonable json. - copyToJsonField(queryContext, i, jsonNode); - } - - return jsonNode; - } - @Override public void setCursorField(final PreparedStatement preparedStatement, final int parameterIndex, diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcConnectorMetadataInjector.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcConnectorMetadataInjector.java index c337542bf6aab..13439a522c637 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcConnectorMetadataInjector.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcConnectorMetadataInjector.java @@ -14,6 +14,20 @@ public class PostgresCdcConnectorMetadataInjector implements CdcMetadataInjector { + private final String transactionTimestamp; + + private final Long lsn; + + PostgresCdcConnectorMetadataInjector() { + this.transactionTimestamp = null; + this.lsn = null; + } + + PostgresCdcConnectorMetadataInjector(final String transactionTimestamp, final Long lsn) { + this.transactionTimestamp = transactionTimestamp; + this.lsn = lsn; + } + @Override public void addMetaData(final ObjectNode event, final JsonNode source) { final long lsn = source.get("lsn").asLong(); @@ -21,7 +35,7 @@ public void addMetaData(final ObjectNode event, final JsonNode source) { } @Override - public void addMetaDataToRowsFetchedOutsideDebezium(final ObjectNode record, final String transactionTimestamp, final Long lsn) { + public void addMetaDataToRowsFetchedOutsideDebezium(final ObjectNode record) { record.put(CDC_UPDATED_AT, transactionTimestamp); record.put(CDC_LSN, lsn); record.put(CDC_DELETED_AT, (String) null); diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java index 5d6baced6c9ec..9a9774ce1f99d 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java @@ -32,7 +32,6 @@ import io.airbyte.integrations.source.postgres.cdc.PostgresCdcCtidUtils.CtidStreams; import io.airbyte.integrations.source.postgres.ctid.CtidGlobalStateManager; import io.airbyte.integrations.source.postgres.ctid.CtidPostgresSourceOperations; -import io.airbyte.integrations.source.postgres.ctid.CtidPostgresSourceOperations.CdcMetadataInjector; import io.airbyte.integrations.source.postgres.ctid.CtidStateManager; import io.airbyte.integrations.source.postgres.ctid.CtidUtils; import io.airbyte.integrations.source.postgres.ctid.FileNodeHandler; @@ -151,8 +150,7 @@ public static List> cdcCtidIteratorsCombin quoteString); final CtidStateManager ctidStateManager = new CtidGlobalStateManager(ctidStreams, fileNodeHandler, stateToBeUsed, catalog); final CtidPostgresSourceOperations ctidPostgresSourceOperations = new CtidPostgresSourceOperations( - Optional.of(new CdcMetadataInjector( - emittedAt.toString(), io.airbyte.cdk.db.PostgresUtils.getLsn(database).asLong(), new PostgresCdcConnectorMetadataInjector()))); + Optional.of(new PostgresCdcConnectorMetadataInjector(emittedAt.toString(), io.airbyte.cdk.db.PostgresUtils.getLsn(database).asLong()))); final Map tableBlockSizes = PostgresQueryUtils.getTableBlockSizeForStreams( database, diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcTargetPosition.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcTargetPosition.java index d669db72755cc..dd12c75a07ef0 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcTargetPosition.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcTargetPosition.java @@ -4,7 +4,9 @@ package io.airbyte.integrations.source.postgres.cdc; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import io.airbyte.cdk.db.PgLsn; import io.airbyte.cdk.db.PostgresUtils; @@ -14,6 +16,7 @@ import io.airbyte.cdk.integrations.debezium.internals.SnapshotMetadata; import io.airbyte.commons.json.Jsons; import java.sql.SQLException; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -105,10 +108,47 @@ public boolean isEventAheadOffset(final Map offset, final Change final JsonNode offsetJson = Jsons.deserialize((String) offset.values().toArray()[0]); - final String offset_lsn = - offsetJson.get("lsn_commit") != null ? String.valueOf(offsetJson.get("lsn_commit")) : String.valueOf(offsetJson.get("lsn")); - final String event_lsn = String.valueOf(event.eventValueAsJson().get("source").get("lsn")); - return Long.parseLong(event_lsn) > Long.parseLong(offset_lsn); + if (offsetJson.get("lsn_commit") == null) { + return false; + } + final String stateOffsetLsnCommit = String.valueOf(offsetJson.get("lsn_commit")); + + try { + ObjectMapper objectMapper = new ObjectMapper(); + TypeReference> listType = new TypeReference<>() {}; + /* @formatter:off + The event source structure is : + { + "version":"2.4.0.Final", + "connector":"postgresql", + "name":"db_pkgzzfnybb", + "ts_ms":1710283178042, + "snapshot":"false", + "db":"db_pkgzzfnybb", + "sequence":"[\"30660608\",\"30660608\"]", + "schema":"models_schema", + "table":"models", + "txId":777, + "lsn":30660608, + "xmin":null + } + See https://debezium.io/documentation/reference/2.4/connectors/postgresql.html#postgresql-create-events for the full event structure. + @formatter:on + */ + final JsonNode lsnSequenceNode = event.eventValueAsJson().get("source").get("sequence"); + List lsnSequence = objectMapper.readValue(lsnSequenceNode.asText(), listType); + // The sequence field is a pair of [lsn_commit, lsn_processed]. We want to make sure + // lsn_commit(event) is compared against lsn_commit(state_offset). For the event, either of the lsn + // values can be null. + String eventLsnCommit = lsnSequence.get(0); + if (eventLsnCommit == null) { + return false; + } + return Long.parseLong(eventLsnCommit) > Long.parseLong(stateOffsetLsnCommit); + } catch (Exception e) { + LOGGER.info("Encountered an error while attempting to parse event's LSN sequence {}", e.getCause()); + return false; + } } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresDebeziumStateUtil.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresDebeziumStateUtil.java index 4bff1cd798290..ef86c3f4b48c9 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresDebeziumStateUtil.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresDebeziumStateUtil.java @@ -100,6 +100,7 @@ public void commitLSNToPostgresDatabase(final JsonNode jdbcConfig, final LogSequenceNumber logSequenceNumber = LogSequenceNumber.valueOf(savedOffset.getAsLong()); + LOGGER.info("Committing upto LSN: {}", savedOffset.getAsLong()); try (final BaseConnection pgConnection = (BaseConnection) PostgresReplicationConnection.createConnection(jdbcConfig)) { ChainedLogicalStreamBuilder streamBuilder = pgConnection .getReplicationAPI() diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidPostgresSourceOperations.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidPostgresSourceOperations.java index 3a032319295eb..a9d43c2464df5 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidPostgresSourceOperations.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidPostgresSourceOperations.java @@ -4,23 +4,35 @@ package io.airbyte.integrations.source.postgres.ctid; -import com.fasterxml.jackson.databind.JsonNode; +import static io.airbyte.cdk.db.DbAnalyticsUtils.dataTypesSerializationErrorMessage; + import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; +import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.postgres.PostgresSourceOperations; import io.airbyte.integrations.source.postgres.cdc.PostgresCdcConnectorMetadataInjector; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Change; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange.Reason; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.Objects; import java.util.Optional; +import org.slf4j.LoggerFactory; public class CtidPostgresSourceOperations extends PostgresSourceOperations { - private final Optional cdcMetadataInjector; + private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(CtidPostgresSourceOperations.class); + + private final Optional cdcMetadataInjector; - public CtidPostgresSourceOperations(final Optional cdcMetadataInjector) { + public CtidPostgresSourceOperations(final Optional cdcMetadataInjector) { super(); this.cdcMetadataInjector = cdcMetadataInjector; } @@ -33,47 +45,37 @@ public RowDataWithCtid recordWithCtid(final ResultSet queryContext) throws SQLEx final int columnCount = metadata.getColumnCount(); final ObjectNode jsonNode = (ObjectNode) Jsons.jsonNode(Collections.emptyMap()); String ctid = null; + final List metaChanges = new ArrayList<>(); for (int i = 1; i <= columnCount; i++) { final String columnName = metadata.getColumnName(i); - if (columnName.equalsIgnoreCase(CTID)) { - ctid = queryContext.getString(i); - continue; + final String columnTypeName = metadata.getColumnTypeName(i); + try { + if (columnName.equalsIgnoreCase(CTID)) { + ctid = queryContext.getString(i); + continue; + } + + // convert to java types that will convert into reasonable json. + copyToJsonField(queryContext, i, jsonNode); + } catch (Exception e) { + LOGGER.info("Failed to serialize column: {}, of type {}, with error {}", columnName, columnTypeName, e.getMessage()); + AirbyteTraceMessageUtility.emitAnalyticsTrace(dataTypesSerializationErrorMessage()); + metaChanges.add( + new AirbyteRecordMessageMetaChange() + .withField(columnName) + .withChange(Change.NULLED) + .withReason(Reason.SOURCE_SERIALIZATION_ERROR)); } - - // convert to java types that will convert into reasonable json. - copyToJsonField(queryContext, i, jsonNode); } if (Objects.nonNull(cdcMetadataInjector) && cdcMetadataInjector.isPresent()) { - cdcMetadataInjector.get().inject(jsonNode); + cdcMetadataInjector.get().addMetaDataToRowsFetchedOutsideDebezium(jsonNode); } assert Objects.nonNull(ctid); - return new RowDataWithCtid(jsonNode, ctid); + return new RowDataWithCtid(new AirbyteRecordData(jsonNode, new AirbyteRecordMessageMeta().withChanges(metaChanges)), ctid); } - public record RowDataWithCtid(JsonNode data, String ctid) { - - } - - public static class CdcMetadataInjector { - - private final String transactionTimestamp; - private final long lsn; - private final PostgresCdcConnectorMetadataInjector metadataInjector; - - public CdcMetadataInjector(final String transactionTimestamp, - final long lsn, - final PostgresCdcConnectorMetadataInjector metadataInjector) { - this.transactionTimestamp = transactionTimestamp; - this.lsn = lsn; - this.metadataInjector = metadataInjector; - } - - private void inject(final ObjectNode record) { - metadataInjector.addMetaDataToRowsFetchedOutsideDebezium(record, transactionTimestamp, lsn); - } - - } + public record RowDataWithCtid(AirbyteRecordData recordData, String ctid) {} } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java index 8c18d1937706e..4454d7c4c8d77 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java @@ -26,6 +26,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -146,10 +147,15 @@ private AutoCloseableIterator getRecordIterator( .withStream(streamName) .withNamespace(namespace) .withEmittedAt(emittedAt) - .withData(r.data())), + .withData(r.recordData().rawRowData()) + .withMeta(isMetaChangesEmptyOrNull(r.recordData().meta()) ? null : r.recordData().meta())), r.ctid())); } + private boolean isMetaChangesEmptyOrNull(AirbyteRecordMessageMeta meta) { + return meta == null || meta.getChanges() == null || meta.getChanges().isEmpty(); + } + // Augments the given iterator with record count logs. private AutoCloseableIterator augmentWithLogs(final AutoCloseableIterator iterator, final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair pair, diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java index 6d93443aa4e6d..7bc10a84e0e7e 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java @@ -9,6 +9,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import io.airbyte.cdk.db.JdbcCompatibleSourceOperations; +import io.airbyte.cdk.db.jdbc.AirbyteRecordData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils; @@ -25,6 +26,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -99,7 +101,7 @@ public List> getIncrementalIterators( .filter(CatalogHelpers.getTopLevelFieldNames(airbyteStream)::contains) .collect(Collectors.toList()); - final AutoCloseableIterator queryStream = queryTableXmin(selectedDatabaseFields, table.getNameSpace(), table.getName()); + final AutoCloseableIterator queryStream = queryTableXmin(selectedDatabaseFields, table.getNameSpace(), table.getName()); final AutoCloseableIterator recordIterator = getRecordIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()); final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, airbyteStream, pair); @@ -111,18 +113,18 @@ public List> getIncrementalIterators( return iteratorList; } - private AutoCloseableIterator queryTableXmin( - final List columnNames, - final String schemaName, - final String tableName) { + private AutoCloseableIterator queryTableXmin( + final List columnNames, + final String schemaName, + final String tableName) { LOGGER.info("Queueing query for table: {}", tableName); final AirbyteStreamNameNamespacePair airbyteStream = AirbyteStreamUtils.convertFromNameAndNamespace(tableName, schemaName); return AutoCloseableIterators.lazyIterator(() -> { try { - final Stream stream = database.unsafeQuery( + final Stream stream = database.unsafeQuery( connection -> createXminQueryStatement(connection, columnNames, schemaName, tableName, airbyteStream), - sourceOperations::rowToJson); + sourceOperations::convertDatabaseRowToAirbyteRecordData); return AutoCloseableIterators.fromStream(stream, airbyteStream); } catch (final SQLException e) { throw new RuntimeException(e); @@ -206,17 +208,22 @@ public static boolean shouldPerformFullSync(final XminStatus currentXminStatus, // Transforms the given iterator to create an {@link AirbyteRecordMessage} private static AutoCloseableIterator getRecordIterator( - final AutoCloseableIterator recordIterator, + final AutoCloseableIterator recordIterator, final String streamName, final String namespace, final long emittedAt) { - return AutoCloseableIterators.transform(recordIterator, r -> new AirbyteMessage() + return AutoCloseableIterators.transform(recordIterator, airbyteRecordData -> new AirbyteMessage() .withType(Type.RECORD) .withRecord(new AirbyteRecordMessage() .withStream(streamName) .withNamespace(namespace) .withEmittedAt(emittedAt) - .withData(r))); + .withData(airbyteRecordData.rawRowData()) + .withMeta(isMetaChangesEmptyOrNull(airbyteRecordData.meta()) ? null : airbyteRecordData.meta()))); + } + + private static boolean isMetaChangesEmptyOrNull(AirbyteRecordMessageMeta meta) { + return meta == null || meta.getChanges() == null || meta.getChanges().isEmpty(); } // Augments the given iterator with record count logs. diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java index db8552a90e096..df37dd58f477e 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java @@ -13,7 +13,7 @@ import io.airbyte.cdk.integrations.base.ssh.SshBastionContainer; import io.airbyte.cdk.integrations.base.ssh.SshTunnel; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.commons.functional.CheckedFunction; +import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.postgres.PostgresTestDatabase; import io.airbyte.integrations.source.postgres.PostgresTestDatabase.BaseImage; @@ -50,7 +50,7 @@ private void populateDatabaseTestData() throws Exception { outerConfig, JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY, - (CheckedFunction, Exception>) mangledConfig -> getDatabaseFromConfig(mangledConfig) + (CheckedConsumer) mangledConfig -> getDatabaseFromConfig(mangledConfig) .query(ctx -> { ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java index c099d9bce9300..4222f6f7e3195 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java @@ -36,7 +36,7 @@ protected List runRead(final ConfiguredAirbyteCatalog configured @Override protected void postSetup() throws Exception { final Database database = setupDatabase(); - for (final TestDataHolder test : testDataHolders) { + for (final TestDataHolder test : getTestDataHolders()) { database.query(ctx -> { ctx.fetch(test.getCreateSqlQuery()); return null; @@ -56,7 +56,7 @@ protected void postSetup() throws Exception { if (stateAfterFirstSync == null) { throw new RuntimeException("stateAfterFirstSync should not be null"); } - for (final TestDataHolder test : testDataHolders) { + for (final TestDataHolder test : getTestDataHolders()) { database.query(ctx -> { test.getInsertSqlQueries().forEach(ctx::fetch); return null; diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java deleted file mode 100644 index 64a21f76a100a..0000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.postgres; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.standardtest.source.performancetest.AbstractSourceFillDbWithTestData; -import io.airbyte.commons.json.Jsons; -import java.util.stream.Stream; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.params.provider.Arguments; - -public class FillPostgresTestDbScriptTest extends AbstractSourceFillDbWithTestData { - - private JsonNode config; - private DSLContext dslContext; - - @Override - protected JsonNode getConfig() { - return config; - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) {} - - @Override - protected String getImageName() { - return "airbyte/source-postgres:dev"; - } - - @Override - protected Database setupDatabase(final String dbName) throws Exception { - final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() - .put("method", "Standard") - .build()); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, "your_host") - .put(JdbcUtils.PORT_KEY, 5432) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.USERNAME_KEY, "your_username") - .put(JdbcUtils.PASSWORD_KEY, "your_pass") - .put("replication_method", replicationMethod) - .build()); - - dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - final Database database = new Database(dslContext); - - return database; - } - - /** - * This is a data provider for fill DB script, Each argument's group would be ran as a separate - * test. 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName - * that will be ised as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected - * records retrieved in each stream. 4th arg - a number of messages batches - * (numberOfMessages*numberOfBatches, ex. 100*2=200 messages in total in each stream) 5th arg - a - * number of columns in each stream\table that will be use for Airbyte Cataloq configuration 6th arg - * - a number of streams to read in configured airbyte Catalog. Each stream\table in DB should be - * names like "test_0", "test_1",..., test_n. - */ - @Override - protected Stream provideParameters() { - return Stream.of(Arguments.of("postgres", "\"your_schema_name\"", 100, 2, 240, 1000)); - } - -} diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java deleted file mode 100644 index e0d551e25c4df..0000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.postgres; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.standardtest.source.performancetest.AbstractSourcePerformanceTest; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.nio.file.Path; -import java.util.List; -import java.util.stream.Stream; -import org.junit.jupiter.params.provider.Arguments; - -public class PostgresRdsSourcePerformanceTest extends AbstractSourcePerformanceTest { - - private static final String PERFORMANCE_SECRET_CREDS = "secrets/performance-config.json"; - private static final List SCHEMAS = List.of("t1000_c240_r200", - "t25_c8_r50k_s10kb", "t1000_c8_r10k_s500b"); - - @Override - protected String getImageName() { - return "airbyte/source-postgres:dev"; - } - - @Override - protected void setupDatabase(final String dbName) { - final JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); - - final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() - .put("method", "Standard") - .build()); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, plainConfig.get(JdbcUtils.HOST_KEY)) - .put(JdbcUtils.PORT_KEY, plainConfig.get(JdbcUtils.PORT_KEY)) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.SCHEMAS_KEY, List.of(dbName)) - .put(JdbcUtils.USERNAME_KEY, plainConfig.get(JdbcUtils.USERNAME_KEY)) - .put(JdbcUtils.PASSWORD_KEY, plainConfig.get(JdbcUtils.PASSWORD_KEY)) - .put(JdbcUtils.SSL_KEY, true) - .put("replication_method", replicationMethod) - .build()); - } - - /** - * This is a data provider for performance tests, Each argument's group would be ran as a separate - * test. 1st arg - a name of DB that will be used in jdbc connection string. 2nd arg - a schemaName - * that will be used as a NameSpace in Configured Airbyte Catalog. 3rd arg - a number of expected - * records retrieved in each stream. 4th arg - a number of columns in each stream\table that will be - * use for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte - * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. - */ - @Override - protected Stream provideParameters() { - return Stream.of( - Arguments.of(SCHEMAS.get(0), SCHEMAS.get(0), 200, 240, 1000), - Arguments.of(SCHEMAS.get(1), SCHEMAS.get(1), 50000, 8, 25), - Arguments.of(SCHEMAS.get(2), SCHEMAS.get(2), 10000, 8, 1000)); - } - -} diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/1-create-copy-tables-procedure.sql b/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/1-create-copy-tables-procedure.sql deleted file mode 100644 index 861714f0b9fc7..0000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/1-create-copy-tables-procedure.sql +++ /dev/null @@ -1,25 +0,0 @@ -CREATE - OR replace PROCEDURE copy_table( - tablecount INT - ) LANGUAGE plpgsql AS $$ DECLARE v_max_table INT; - -v_counter_table INT; - -v_tnamee VARCHAR(255); - -BEGIN v_max_table := tablecount; - -v_counter_table := 1; - -while v_counter_table < v_max_table loop EXECUTE format( - 'create table test_%s as (select * from test t)', - v_counter_table -); - -v_counter_table := v_counter_table + 1; -END loop; - -COMMIT; -END; - -$$ diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/2-create-insert-rows-to-table-procedure.sql b/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/2-create-insert-rows-to-table-procedure.sql deleted file mode 100644 index 6f8b07fc6b123..0000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/2-create-insert-rows-to-table-procedure.sql +++ /dev/null @@ -1,168 +0,0 @@ -CREATE - OR replace PROCEDURE insert_rows( - allrows INT, - insertcount INT, - value text - ) LANGUAGE plpgsql AS $$ DECLARE dummyIpsum VARCHAR(255); - -fieldText text; - -vmax INT; - -vmaxx INT; - -vmaxoneinsert INT; - -counter INT; - -DECLARE lastinsertcounter INT; - -lastinsert INT; - -fullloop INT; - -fullloopcounter INT; - -insertTable text; - -insertTableLasted text; - -BEGIN fieldText := value; - -dummyIpsum = '''dummy_ipsum'''; - -vmax = allrows; - -vmaxx = allrows; - -vmaxoneinsert = insertcount; - -counter = 1; - -lastinsertcounter = 1; - -lastinsert = 0; - -fullloop = 0; - -fullloopcounter = 0; - -while vmaxx <= vmaxoneinsert loop vmaxoneinsert := vmaxx; - -fullloop := fullloop + 1; - -vmaxx := vmaxx + 1; -END loop; - -COMMIT; - -while vmax > vmaxoneinsert loop fullloop := fullloop + 1; - -vmax := vmax - vmaxoneinsert; - -lastinsert := vmax; -END loop; - -COMMIT; - -insertTable := 'insert into test (varchar1, varchar2, varchar3, varchar4, varchar5, longblobfield, timestampfield) values ('; - -while counter < vmaxoneinsert loop insertTable := concat( - insertTable, - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - fieldText, - ', CURRENT_TIMESTAMP), (' -); - -counter := counter + 1; -END loop; - -COMMIT; - -insertTable := concat( - insertTable, - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - fieldText, - ', CURRENT_TIMESTAMP);' -); - -while vmax < 1 loop fullloop := 0; - -vmax := 1; -END loop; - -COMMIT; - -while fullloopcounter < fullloop loop EXECUTE insertTable; - -fullloopcounter := fullloopcounter + 1; -END loop; - -COMMIT; - -insertTableLasted := 'insert into test (varchar1, varchar2, varchar3, varchar4, varchar5, longblobfield, timestampfield) values ('; - -while lastinsertcounter < lastinsert loop insertTableLasted := concat( - insertTableLasted, - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - fieldText, - ', CURRENT_TIMESTAMP), (' -); - -lastinsertcounter := lastinsertcounter + 1; -END loop; - -COMMIT; - -insertTableLasted := concat( - insertTableLasted, - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - dummyIpsum, - ', ', - fieldText, - ', CURRENT_TIMESTAMP);' -); - -while lastinsert > 0 loop EXECUTE insertTableLasted; - -lastinsert := 0; -END loop; - -COMMIT; -END; - -$$ diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/3-run-script.sql b/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/3-run-script.sql deleted file mode 100644 index 5eb1655c3f2e3..0000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/sql/3-run-script.sql +++ /dev/null @@ -1,73 +0,0 @@ -CREATE - SEQUENCE test_seq; - -CREATE - TABLE - test( - id INT CHECK( - id > 0 - ) NOT NULL DEFAULT nextval('test_seq') PRIMARY KEY, - varchar1 VARCHAR(255), - varchar2 VARCHAR(255), - varchar3 VARCHAR(255), - varchar4 VARCHAR(255), - varchar5 VARCHAR(255), - longblobfield bytea, - timestampfield TIMESTAMP(0) - ); - --- TODO: change the following @allrows to control the number of records with different sizes --- number of 50B records -CALL insert_rows( - 0, - 500000, - '''test weight 50b - some text, some text, some text''' -); - --- number of 500B records -CALL insert_rows( - 0, - 50000, - CONCAT( - '''test weight 500b - ', - repeat( - 'some text, some text, ', - 20 - ), - 'some text''' - ) -); - --- number of 10KB records -CALL insert_rows( - 0, - 5000, - CONCAT( - '''test weight 10kb - ', - repeat( - 'some text, some text, some text, some text, ', - 295 - ), - 'some text''' - ) -); - --- number of 100KB records -CALL insert_rows( - 0, - 50, - CONCAT( - '''test weight 100kb - ', - repeat( - 'some text, some text, ', - 4450 - ), - 'some text''' - ) -); - --- TODO: change the value to control the number of tables -CALL copy_table(0); - -ALTER TABLE - test RENAME TO test_0; diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index 10e3ccec65411..30d11722452db 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -83,7 +83,7 @@ protected void setBaseImage() { } @Override - protected void assertExpectedStateMessageCountMatches(final List stateMessages, long totalCount) { + protected void assertExpectedStateMessageCountMatches(final List stateMessages, long totalCount) { AtomicLong count = new AtomicLong(0L); stateMessages.stream().forEach(stateMessage -> count.addAndGet(stateMessage.getSourceStats().getRecordCount().longValue())); assertEquals(totalCount, count.get()); @@ -201,18 +201,18 @@ void testCheckWithoutReplicationSlot() throws Exception { } @Override - protected void assertExpectedStateMessages(final List stateMessages) { + protected void assertExpectedStateMessages(final List stateMessages) { assertEquals(7, stateMessages.size()); assertStateTypes(stateMessages, 4); } @Override - protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { + protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(final List stateAfterFirstBatch) { assertEquals(27, stateAfterFirstBatch.size()); assertStateTypes(stateAfterFirstBatch, 24); } - private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectCtidState) { + private void assertStateTypes(final List stateMessages, final int indexTillWhichExpectCtidState) { JsonNode sharedState = null; for (int i = 0; i < stateMessages.size(); i++) { final AirbyteStateMessage stateMessage = stateMessages.get(i); @@ -241,7 +241,7 @@ private void assertStateTypes(final List stateMessages, fin } @Override - protected void assertStateMessagesForNewTableSnapshotTest(final List stateMessages, + protected void assertStateMessagesForNewTableSnapshotTest(final List stateMessages, final AirbyteStateMessage stateMessageEmittedAfterFirstSyncCompletion) { assertEquals(7, stateMessages.size(), stateMessages.toString()); for (int i = 0; i <= 4; i++) { @@ -451,12 +451,12 @@ public void testTwoStreamSync() throws Exception { } @Override - protected void assertExpectedStateMessagesForNoData(final List stateMessages) { + protected void assertExpectedStateMessagesForNoData(final List stateMessages) { assertEquals(2, stateMessages.size()); } @Override - protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { + protected void assertExpectedStateMessagesFromIncrementalSync(final List stateMessages) { assertEquals(1, stateMessages.size()); assertNotNull(stateMessages.get(0).getData()); } @@ -871,9 +871,10 @@ protected void verifyCheckpointStatesByRecords() throws Exception { .toListAndClose(secondBatchIterator); assertEquals(recordsToCreate, extractRecordMessages(dataFromSecondBatch).size()); final List stateMessagesCDC = extractStateMessages(dataFromSecondBatch); - if (!isOnLegacyPostgres()) { - assertTrue(stateMessagesCDC.size() > 1, "Generated only the final state."); - } + // We expect only one cdc state message, as all the records are inserted in a single transaction. + // Since + // lsn_commit only increases with a new transaction, we expect only one state message. + assertTrue(stateMessagesCDC.size() == 1, "Generated only the final state."); assertEquals(stateMessagesCDC.size(), stateMessagesCDC.stream().distinct().count(), "There are duplicated states."); } @@ -912,9 +913,10 @@ protected void verifyCheckpointStatesBySeconds() throws Exception { assertEquals(recordsToCreate, extractRecordMessages(dataFromSecondBatch).size()); final List stateMessagesCDC = extractStateMessages(dataFromSecondBatch); - if (!isOnLegacyPostgres()) { - assertTrue(stateMessagesCDC.size() > 1, "Generated only the final state."); - } + // We expect only one cdc state message, as all the records are inserted in a single transaction. + // Since + // lsn_commit only increases with a new transaction, we expect only one state message. + assertTrue(stateMessagesCDC.size() == 1, "Generated only the final state."); assertEquals(stateMessagesCDC.size(), stateMessagesCDC.stream().distinct().count(), "There are duplicated states."); } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java index 705cf416fdc4d..d257eef9358bb 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java @@ -21,7 +21,6 @@ import io.airbyte.cdk.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.util.MoreIterators; import io.airbyte.integrations.source.postgres.PostgresTestDatabase.BaseImage; import io.airbyte.integrations.source.postgres.internal.models.CursorBasedStatus; @@ -37,7 +36,6 @@ import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.ConnectorSpecification; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.util.ArrayList; @@ -234,14 +232,6 @@ public boolean supportsSchemas() { return true; } - @Test - void testSpec() throws Exception { - final ConnectorSpecification actual = source().spec(); - final ConnectorSpecification expected = Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class); - - assertEquals(expected, actual); - } - @Override protected List getTestMessages() { return getTestMessages(streamName()); diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java index 87df3553e7dac..b5ac69f6393be 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java @@ -9,8 +9,8 @@ import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.RECORD_MESSAGE_2; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.RECORD_MESSAGE_3; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.STREAM_NAME1; -import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.XMIN_STATE_MESSAGE_1; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.XMIN_STATUS1; +import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.createStateMessage1WithCount; import static org.junit.Assert.assertThrows; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -19,7 +19,6 @@ import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.sql.SQLException; @@ -70,12 +69,9 @@ void testSuccessfulSync() { manager, new StateEmitFrequency(0L, Duration.ofSeconds(1L))); - var expectedStateMessage = - XMIN_STATE_MESSAGE_1.withState(XMIN_STATE_MESSAGE_1.getState().withSourceStats(new AirbyteStateStats().withRecordCount(2.0))); - assertEquals(RECORD_MESSAGE_1, iterator.next()); assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(expectedStateMessage, iterator.next()); + assertEquals(createStateMessage1WithCount(2.0), iterator.next()); assertFalse(iterator.hasNext()); } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminTestConstants.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminTestConstants.java index 35c5d881cb052..a7ba5aef2d75c 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminTestConstants.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminTestConstants.java @@ -13,6 +13,7 @@ import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.v0.AirbyteStateStats; import io.airbyte.protocol.models.v0.AirbyteStreamState; import io.airbyte.protocol.models.v0.StreamDescriptor; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; @@ -72,4 +73,18 @@ public static AirbyteMessage createRecordMessage(final String recordValue) { .withData(Jsons.jsonNode(ImmutableMap.of(UUID_FIELD_NAME, recordValue)))); } + public static AirbyteMessage createStateMessage1WithCount(final double count) { + return new AirbyteMessage() + .withType(Type.STATE) + .withState(new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor( + new StreamDescriptor() + .withName(PAIR1.getName()) + .withNamespace(PAIR1.getNamespace())) + .withStreamState(new ObjectMapper().valueToTree(XMIN_STATUS1))) + .withSourceStats(new AirbyteStateStats().withRecordCount(count))); + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java index 374ed7dceede1..8f82aa2a14c73 100644 --- a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java +++ b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java @@ -186,8 +186,8 @@ public PostgresConfigBuilder withCdcReplication(String LsnCommitBehaviour, Strin .with("is_test", true) .with("replication_method", Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", testDatabase.getReplicationSlotName()) - .put("publication", testDatabase.getPublicationName()) + .put("replication_slot", getTestDatabase().getReplicationSlotName()) + .put("publication", getTestDatabase().getPublicationName()) .put("initial_waiting_seconds", DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds()) .put("lsn_commit_behaviour", LsnCommitBehaviour) .put(INVALID_CDC_CURSOR_POSITION_PROPERTY, cdcCursorFailBehaviour) diff --git a/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml b/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml index c773184a55404..a3e6455722e6f 100644 --- a/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml @@ -19,25 +19,25 @@ acceptance_tests: - config_path: "secrets/config.json" backward_compatibility_tests_config: disable_for_version: "2.0.0" #change type of a field in payments schema - basic_read: - tests: - - config_path: "secrets/config.json" - empty_streams: - - name: vendor_credits - bypass_reason: "unable to populate" - - name: vendors - bypass_reason: "unable to populate" - - name: classes - bypass_reason: "unable to populate" - - name: budgets - bypass_reason: "unable to populate" - - name: transfers - bypass_reason: "unable to populate" - - name: departments - bypass_reason: "unable to populate" - # expect_records: - # path: "integration_tests/expected_records.jsonl" - # exact_order: no + # basic_read: + # tests: + # - config_path: "secrets/config.json" + # empty_streams: + # - name: vendor_credits + # bypass_reason: "unable to populate" + # - name: vendors + # bypass_reason: "unable to populate" + # - name: classes + # bypass_reason: "unable to populate" + # - name: budgets + # bypass_reason: "unable to populate" + # - name: transfers + # bypass_reason: "unable to populate" + # - name: departments + # bypass_reason: "unable to populate" + # expect_records: + # path: "integration_tests/expected_records.jsonl" + # exact_order: no incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-quickbooks/metadata.yaml b/airbyte-integrations/connectors/source-quickbooks/metadata.yaml index fa9cdf172a2cb..b474c434573fb 100644 --- a/airbyte-integrations/connectors/source-quickbooks/metadata.yaml +++ b/airbyte-integrations/connectors/source-quickbooks/metadata.yaml @@ -9,7 +9,7 @@ data: baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c connectorType: source definitionId: cf9c4355-b171-4477-8f2d-6c5cc5fc8b7e - dockerImageTag: 3.0.2 + dockerImageTag: 3.0.3 dockerRepository: airbyte/source-quickbooks githubIssueLabel: source-quickbooks icon: quickbooks.svg @@ -21,7 +21,7 @@ data: packageName: airbyte-source-quickbooks registries: cloud: - enabled: true + enabled: false oss: enabled: true releaseStage: alpha diff --git a/airbyte-integrations/connectors/source-quickbooks/pyproject.toml b/airbyte-integrations/connectors/source-quickbooks/pyproject.toml index 33b9c403e84e8..b1201d94f6f1b 100644 --- a/airbyte-integrations/connectors/source-quickbooks/pyproject.toml +++ b/airbyte-integrations/connectors/source-quickbooks/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.2" +version = "3.0.3" name = "source-quickbooks" description = "Source implementation for quickbooks." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/manifest.yaml b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/manifest.yaml index 267f04d857d08..ac70de7bbc6a4 100644 --- a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/manifest.yaml +++ b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/manifest.yaml @@ -26,6 +26,7 @@ definitions: client_id: "{{ config['credentials']['client_id'] }}" client_secret: "{{ config['credentials']['client_secret'] }}" refresh_token: "{{ config['credentials']['refresh_token'] }}" + refresh_token_updater: {} retriever: type: SimpleRetriever record_selector: diff --git a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/bills.json b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/bills.json index 166120688ebc0..84e22ed37dca0 100644 --- a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/bills.json +++ b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/bills.json @@ -134,6 +134,18 @@ "BillableStatus": { "type": ["null", "string"] }, + "ClassRef": { + "additionalProperties": true, + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, "AccountRef": { "properties": { "name": { diff --git a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/deposits.json b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/deposits.json index e89b73f293bfe..cd24d629bd43d 100644 --- a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/deposits.json +++ b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/deposits.json @@ -65,7 +65,22 @@ "type": ["null", "number"] }, "DepositLineDetail": { + "additionalProperties": true, "properties": { + "Entity": { + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, "PaymentMethodRef": { "properties": { "value": { diff --git a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/journal_entries.json b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/journal_entries.json index 82bbe402c26a7..9c1ae4199ee9e 100644 --- a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/journal_entries.json +++ b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/journal_entries.json @@ -58,8 +58,40 @@ }, "type": ["null", "object"] }, + "ClassRef": { + "additionalProperties": true, + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, "PostingType": { "type": ["null", "string"] + }, + "Entity": { + "additionalProperties": true, + "properties": { + "Type": { + "type": ["null", "string"] + }, + "EntityRef": { + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + } + }, + "type": ["null", "object"] } }, "type": ["null", "object"] diff --git a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/purchases.json b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/purchases.json index 4e05402c1f2d0..337de55db12aa 100644 --- a/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/purchases.json +++ b/airbyte-integrations/connectors/source-quickbooks/source_quickbooks/schemas/purchases.json @@ -160,6 +160,18 @@ } } }, + "ClassRef": { + "additionalProperties": true, + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, "CustomerRef": { "type": ["null", "object"], "properties": { diff --git a/airbyte-integrations/connectors/source-recharge/README.md b/airbyte-integrations/connectors/source-recharge/README.md index 03ee5b5b276ed..c09b9cde6f48f 100644 --- a/airbyte-integrations/connectors/source-recharge/README.md +++ b/airbyte-integrations/connectors/source-recharge/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-recharge spec poetry run source-recharge check --config secrets/config.json poetry run source-recharge discover --config secrets/config.json -poetry run source-recharge read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-recharge read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml index 6055da4f21f18..7f1eee56def28 100644 --- a/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml @@ -4,29 +4,23 @@ acceptance_tests: - config_path: secrets/config.json empty_streams: - name: collections - bypass_reason: "volatile data" + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" - name: discounts - bypass_reason: "volatile data" + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" - name: onetimes - bypass_reason: "no data from stream" - timeout_seconds: 7200 + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" expect_records: path: "integration_tests/expected_records.jsonl" - exact_order: no - fail_on_extra_columns: false - config_path: secrets/config_order_modern_api.json empty_streams: - name: collections - bypass_reason: "volatile data" + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" - name: discounts - bypass_reason: "volatile data" + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" - name: onetimes - bypass_reason: "no data from stream" - timeout_seconds: 7200 + bypass_reason: "The stream is tested with `Integration Tests`, since no data is available" expect_records: path: "integration_tests/expected_records_orders_modern_api.jsonl" - exact_order: no - fail_on_extra_columns: false connection: tests: - config_path: secrets/config.json @@ -36,7 +30,7 @@ acceptance_tests: discovery: tests: - backward_compatibility_tests_config: - disable_for_version: 1.1.2 + disable_for_version: 1.1.5 config_path: secrets/config.json full_refresh: tests: diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-recharge/integration_tests/abnormal_state.json index 0e0a42363b5d8..9def67c7e481e 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/abnormal_state.json @@ -2,49 +2,49 @@ { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "addresses" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "charges" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "customers" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "discounts" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "onetimes" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "orders" } } }, { "type": "STREAM", "stream": { - "stream_state": { "updated_at": "2050-05-18T00:00:00" }, + "stream_state": { "updated_at": "2050-05-18T00:00:00Z" }, "stream_descriptor": { "name": "subscriptions" } } } diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recharge/integration_tests/configured_catalog.json index 279ceb19c7e3c..6a716c1f8f36c 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/configured_catalog.json @@ -62,6 +62,19 @@ "destination_sync_mode": "append", "cursor_field": ["updated_at"] }, + { + "stream": { + "name": "onetimes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"] + }, { "stream": { "name": "discounts", @@ -108,19 +121,6 @@ "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, - { - "stream": { - "name": "subscriptions", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append", - "cursor_field": ["updated_at"] - }, { "stream": { "name": "metafields", diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl index 0187e149bfd89..fde8d4ba0dd25 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl @@ -1,14 +1,14 @@ -{"stream": "addresses", "data": {"id": 69105381, "customer_id": 64817252, "payment_method_id": 12482012, "address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "created_at": "2021-05-12T12:04:06+00:00", "discounts": [], "first_name": "Jane", "last_name": "Doe", "order_attributes": [], "order_note": null, "phone": "1234567890", "presentment_currency": "USD", "province": "California", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-01-16T09:59:09+00:00", "zip": "94118"}, "emitted_at": 1706644129288} -{"stream": "addresses", "data": {"id": 69282975, "customer_id": 64962974, "payment_method_id": 12482030, "address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "created_at": "2021-05-13T09:46:46+00:00", "discounts": [], "first_name": "Kelly", "last_name": "Kozakevich", "order_attributes": [], "order_note": null, "phone": "+16145550188", "presentment_currency": "USD", "province": "Illinois", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-05-13T04:07:34+00:00", "zip": "60510"}, "emitted_at": 1706644130026} -{"stream": "charges", "data": {"id": 386976088, "address_id": 69105381, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Karina", "last_name": "Kuznetsova", "phone": null, "province": "California", "zip": "94118"}, "charge_attempts": 6, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2021-05-12T12:04:07+00:00", "currency": "USD", "customer": {"id": 64817252, "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "hash": "23dee52d73734a81"}, "discounts": [], "error": "None\r\n [May 12, 12:06AM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 13, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 19, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 25, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 31, 4:09PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [Jun 06, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']", "error_type": "CLOSED_MAX_RETRIES_REACHED", "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "external_variant_not_found": null, "has_uncommitted_changes": false, "last_charge_attempt": "2022-06-06T20:10:19+00:00", "line_items": [{"purchase_item_id": 153224593, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684722131115"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T1", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "S / Black"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": "2022-06-12T04:00:00+00:00", "scheduled_at": "2022-05-12", "shipping_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Jane", "last_name": "Doe", "phone": "1234567890", "province": "California", "zip": "94118"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "error", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-01-16T18:08:54+00:00"}, "emitted_at": 1706644132446} -{"stream": "charges", "data": {"id": 817715206, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:34+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": null, "scheduled_at": "2024-05-12", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "queued", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:07:47+00:00"}, "emitted_at": 1706644133275} -{"stream": "charges", "data": {"id": 580825303, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2022-05-13T04:07:39+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_transaction_id": {"payment_processor": "43114102955"}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 1, "payment_processor": "shopify_payments", "processed_at": "2023-05-13T04:07:33+00:00", "retry_date": null, "scheduled_at": "2023-05-13", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": false, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1706644133278} -{"stream": "customers", "data": {"id": 64817252, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-12T12:04:06+00:00", "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "first_charge_processed_at": "2021-05-12T16:03:59+00:00", "first_name": "Karina", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "23dee52d73734a81", "last_name": "Kuznetsova", "phone": null, "subscriptions_active_count": 0, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-01-16T18:08:45+00:00"}, "emitted_at": 1706644139386} -{"stream": "customers", "data": {"id": 64962974, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-13T09:46:44+00:00", "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "first_charge_processed_at": "2021-05-13T13:46:39+00:00", "first_name": "Kelly", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "f99bd4a6877257af", "last_name": "Kozakevich", "phone": "+16145550188", "subscriptions_active_count": 1, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-05-13T04:16:36+00:00"}, "emitted_at": 1706644140190} -{"stream": "metafields", "data": {"id": 3627108, "owner_id": "64962974", "created_at": "2023-04-10T07:10:45", "description": "customer_phone_number", "key": "phone_number", "namespace": "personal_info", "owner_resource": "customer", "updated_at": "2023-04-10T07:10:45", "value": "3103103101", "value_type": "integer"}, "emitted_at": 1706644151126} -{"stream": "orders", "data": {"address_id": 69282975, "address_is_active": 1, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country": "United States", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "browser_ip": null, "charge_id": 580825303, "charge_status": "SUCCESS", "created_at": "2023-05-13T00:07:28", "currency": "USD", "customer": {"accepts_marketing": true, "email": "kozakevich_k@example.com", "first_name": "Kelly", "last_name": "Kozakevich", "phone": null, "send_email_welcome": false, "verified_email": true}, "customer_id": 64962974, "discount_codes": null, "email": "kozakevich_k@example.com", "error": null, "first_name": "Kelly", "hash": "f99bd4a6877257af", "id": 534919106, "is_prepaid": 0, "last_name": "Kozakevich", "line_items": [{"external_inventory_policy": "decrement_obeying_policy", "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": 24.3, "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "shopify_product_id": "6642695864491", "shopify_variant_id": "39684723835051", "sku": "T3", "subscription_id": 153601366, "tax_lines": [], "title": "Airbit Box Corner Short sleeve t-shirt", "variant_title": "L / City Green"}], "note": null, "note_attributes": [], "payment_processor": "shopify_payments", "processed_at": "2023-05-13T00:07:33", "scheduled_at": "2023-05-13T00:00:00", "shipped_date": "2023-05-13T00:07:33", "shipping_address": {"address1": "1921 W Wilson St", "address2": "", "city": "Batavia", "company": null, "country": "United States", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_date": "2023-05-13T00:00:00", "shipping_lines": [{"code": "Economy", "price": "4.90", "source": "shopify", "title": "Economy"}], "shopify_cart_token": null, "shopify_customer_id": "5213433266347", "shopify_id": "5006149877931", "shopify_order_id": "5006149877931", "shopify_order_number": 1016, "status": "SUCCESS", "subtotal_price": 24.3, "tags": "Subscription, Subscription Recurring Order", "tax_lines": [], "total_discounts": 0.0, "total_duties": "0.0", "total_line_items_price": 24.3, "total_price": 29.2, "total_refunds": null, "total_tax": "0.0", "total_weight": 0, "transaction_id": "43114102955", "type": "RECURRING", "updated_at": "2023-05-13T00:16:51"}, "emitted_at": 1706644162075} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1706644170248} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1706644170251} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1706644170252} -{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709114164153} -{"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1706644181724} +{"stream": "addresses", "data": {"id": 69282975, "customer_id": 64962974, "payment_method_id": 12482030, "address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "created_at": "2021-05-13T09:46:46+00:00", "discounts": [], "first_name": "Kelly", "last_name": "Kozakevich", "order_attributes": [], "order_note": null, "phone": "+16145550188", "presentment_currency": "USD", "province": "Illinois", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-05-13T04:07:34+00:00", "zip": "60510"}, "emitted_at": 1709035723343} +{"stream": "addresses", "data": {"id": 69105381, "customer_id": 64817252, "payment_method_id": 12482012, "address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "created_at": "2021-05-12T12:04:06+00:00", "discounts": [], "first_name": "Jane", "last_name": "Doe", "order_attributes": [], "order_note": null, "phone": "1234567890", "presentment_currency": "USD", "province": "California", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-01-16T09:59:09+00:00", "zip": "94118"}, "emitted_at": 1709035723348} +{"stream": "charges", "data": {"id": 817715206, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:34+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": null, "scheduled_at": "2024-05-12", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "queued", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:07:47+00:00"}, "emitted_at": 1709035724071} +{"stream": "charges", "data": {"id": 580825303, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2022-05-13T04:07:39+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_transaction_id": {"payment_processor": "43114102955"}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 1, "payment_processor": "shopify_payments", "processed_at": "2023-05-13T04:07:33+00:00", "retry_date": null, "scheduled_at": "2023-05-13", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": false, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1709035724078} +{"stream": "charges", "data": {"id": 386976088, "address_id": 69105381, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Karina", "last_name": "Kuznetsova", "phone": null, "province": "California", "zip": "94118"}, "charge_attempts": 6, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2021-05-12T12:04:07+00:00", "currency": "USD", "customer": {"id": 64817252, "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "hash": "23dee52d73734a81"}, "discounts": [], "error": "None\r\n [May 12, 12:06AM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 13, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 19, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 25, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 31, 4:09PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [Jun 06, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']", "error_type": "CLOSED_MAX_RETRIES_REACHED", "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "external_variant_not_found": null, "has_uncommitted_changes": false, "last_charge_attempt": "2022-06-06T20:10:19+00:00", "line_items": [{"purchase_item_id": 153224593, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684722131115"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T1", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "S / Black"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": "2022-06-12T04:00:00+00:00", "scheduled_at": "2022-05-12", "shipping_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Jane", "last_name": "Doe", "phone": "1234567890", "province": "California", "zip": "94118"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "error", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-01-16T18:08:54+00:00"}, "emitted_at": 1709035724083} +{"stream": "customers", "data": {"id": 64962974, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-13T09:46:44+00:00", "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "first_charge_processed_at": "2021-05-13T13:46:39+00:00", "first_name": "Kelly", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "f99bd4a6877257af", "last_name": "Kozakevich", "phone": "+16145550188", "subscriptions_active_count": 1, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-05-13T04:16:36+00:00"}, "emitted_at": 1709035725565} +{"stream": "customers", "data": {"id": 64817252, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-12T12:04:06+00:00", "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "first_charge_processed_at": "2021-05-12T16:03:59+00:00", "first_name": "Karina", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "23dee52d73734a81", "last_name": "Kuznetsova", "phone": null, "subscriptions_active_count": 0, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-01-16T18:08:45+00:00"}, "emitted_at": 1709035725569} +{"stream": "metafields", "data": {"id": 3627108, "owner_id": "64962974", "created_at": "2023-04-10T07:10:45", "description": "customer_phone_number", "key": "phone_number", "namespace": "personal_info", "owner_resource": "customer", "updated_at": "2023-04-10T07:10:45", "value": "3103103101", "value_type": "integer"}, "emitted_at": 1709035727500} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1709035729322} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1709035729325} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1709035729328} +{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709035729971} +{"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1709035730656} +{"stream": "orders", "data": {"address_id": 69282975, "address_is_active": 1, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country": "United States", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "browser_ip": null, "charge_id": 580825303, "charge_status": "SUCCESS", "created_at": "2023-05-13T00:07:28", "currency": "USD", "customer": {"accepts_marketing": true, "email": "kozakevich_k@example.com", "first_name": "Kelly", "last_name": "Kozakevich", "phone": null, "send_email_welcome": false, "verified_email": true}, "customer_id": 64962974, "discount_codes": null, "email": "kozakevich_k@example.com", "error": null, "first_name": "Kelly", "hash": "f99bd4a6877257af", "id": 534919106, "is_prepaid": 0, "last_name": "Kozakevich", "line_items": [{"external_inventory_policy": "decrement_obeying_policy", "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": 24.3, "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "shopify_product_id": "6642695864491", "shopify_variant_id": "39684723835051", "sku": "T3", "subscription_id": 153601366, "tax_lines": [], "title": "Airbit Box Corner Short sleeve t-shirt", "variant_title": "L / City Green"}], "note": null, "note_attributes": [], "payment_processor": "shopify_payments", "processed_at": "2023-05-13T00:07:33", "scheduled_at": "2023-05-13T00:00:00", "shipped_date": "2023-05-13T00:07:33", "shipping_address": {"address1": "1921 W Wilson St", "address2": "", "city": "Batavia", "company": null, "country": "United States", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_date": "2023-05-13T00:00:00", "shipping_lines": [{"code": "Economy", "price": "4.90", "source": "shopify", "title": "Economy"}], "shopify_cart_token": null, "shopify_customer_id": "5213433266347", "shopify_id": "5006149877931", "shopify_order_id": "5006149877931", "shopify_order_number": 1016, "status": "SUCCESS", "subtotal_price": 24.3, "tags": "Subscription, Subscription Recurring Order", "tax_lines": [], "total_discounts": 0.0, "total_duties": "0.0", "total_line_items_price": 24.3, "total_price": 29.2, "total_refunds": null, "total_tax": "0.0", "total_weight": 0, "transaction_id": "43114102955", "type": "RECURRING", "updated_at": "2023-05-13T00:16:51"}, "emitted_at": 1709035732348} diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl index e4230976f8bc8..93fc0f46961a0 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl @@ -1,14 +1,14 @@ -{"stream": "addresses", "data": {"id": 69105381, "customer_id": 64817252, "payment_method_id": 12482012, "address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "created_at": "2021-05-12T12:04:06+00:00", "discounts": [], "first_name": "Jane", "last_name": "Doe", "order_attributes": [], "order_note": null, "phone": "1234567890", "presentment_currency": "USD", "province": "California", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-01-16T09:59:09+00:00", "zip": "94118"}, "emitted_at": 1706644270838} -{"stream": "addresses", "data": {"id": 69282975, "customer_id": 64962974, "payment_method_id": 12482030, "address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "created_at": "2021-05-13T09:46:46+00:00", "discounts": [], "first_name": "Kelly", "last_name": "Kozakevich", "order_attributes": [], "order_note": null, "phone": "+16145550188", "presentment_currency": "USD", "province": "Illinois", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-05-13T04:07:34+00:00", "zip": "60510"}, "emitted_at": 1706644271610} -{"stream": "charges", "data": {"id": 386976088, "address_id": 69105381, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Karina", "last_name": "Kuznetsova", "phone": null, "province": "California", "zip": "94118"}, "charge_attempts": 6, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2021-05-12T12:04:07+00:00", "currency": "USD", "customer": {"id": 64817252, "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "hash": "23dee52d73734a81"}, "discounts": [], "error": "None\r\n [May 12, 12:06AM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 13, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 19, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 25, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 31, 4:09PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [Jun 06, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']", "error_type": "CLOSED_MAX_RETRIES_REACHED", "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "external_variant_not_found": null, "has_uncommitted_changes": false, "last_charge_attempt": "2022-06-06T20:10:19+00:00", "line_items": [{"purchase_item_id": 153224593, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684722131115"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T1", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "S / Black"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": "2022-06-12T04:00:00+00:00", "scheduled_at": "2022-05-12", "shipping_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Jane", "last_name": "Doe", "phone": "1234567890", "province": "California", "zip": "94118"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "error", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-01-16T18:08:54+00:00"}, "emitted_at": 1706644274123} -{"stream": "charges", "data": {"id": 817715206, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:34+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": null, "scheduled_at": "2024-05-12", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "queued", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:07:47+00:00"}, "emitted_at": 1706644274939} -{"stream": "charges", "data": {"id": 580825303, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2022-05-13T04:07:39+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_transaction_id": {"payment_processor": "43114102955"}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 1, "payment_processor": "shopify_payments", "processed_at": "2023-05-13T04:07:33+00:00", "retry_date": null, "scheduled_at": "2023-05-13", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": false, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1706644274942} -{"stream": "customers", "data": {"id": 64817252, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-12T12:04:06+00:00", "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "first_charge_processed_at": "2021-05-12T16:03:59+00:00", "first_name": "Karina", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "23dee52d73734a81", "last_name": "Kuznetsova", "phone": null, "subscriptions_active_count": 0, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-01-16T18:08:45+00:00"}, "emitted_at": 1706644280530} -{"stream": "customers", "data": {"id": 64962974, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-13T09:46:44+00:00", "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "first_charge_processed_at": "2021-05-13T13:46:39+00:00", "first_name": "Kelly", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "f99bd4a6877257af", "last_name": "Kozakevich", "phone": "+16145550188", "subscriptions_active_count": 1, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-05-13T04:16:36+00:00"}, "emitted_at": 1706644281267} -{"stream": "metafields", "data": {"id": 3627108, "owner_id": "64962974", "created_at": "2023-04-10T07:10:45", "description": "customer_phone_number", "key": "phone_number", "namespace": "personal_info", "owner_resource": "customer", "updated_at": "2023-04-10T07:10:45", "value": "3103103101", "value_type": "integer"}, "emitted_at": 1706644292270} -{"stream": "orders", "data": {"id": 534919106, "address_id": 69282975, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "charge": {"id": 580825303, "external_transaction_id": {"payment_processor": "43114102955"}, "payment_processor_name": "shopify_payments", "status": "success"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:28+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "external_cart_token": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_order_name": {"ecommerce": "#1016"}, "external_order_number": {"ecommerce": "1016"}, "is_prepaid": 0, "line_items": [{"purchase_item_id": 153601366, "external_inventory_policy": "decrement_obeying_policy", "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "note": null, "order_attributes": [], "processed_at": "2023-05-13T04:07:33+00:00", "scheduled_at": "2023-05-13T04:00:00+00:00", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "source": "shopify", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": 24.3, "tags": "Subscription, Subscription Recurring Order", "tax_lines": [], "taxable": false, "total_discounts": 0.0, "total_duties": "0.00", "total_line_items_price": 24.3, "total_price": 29.2, "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1706644303256} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1706644311039} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1706644311045} -{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1706644311046} -{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709114209054} -{"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1706644322400} +{"stream": "addresses", "data": {"id": 69282975, "customer_id": 64962974, "payment_method_id": 12482030, "address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "created_at": "2021-05-13T09:46:46+00:00", "discounts": [], "first_name": "Kelly", "last_name": "Kozakevich", "order_attributes": [], "order_note": null, "phone": "+16145550188", "presentment_currency": "USD", "province": "Illinois", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-05-13T04:07:34+00:00", "zip": "60510"}, "emitted_at": 1709035647334} +{"stream": "addresses", "data": {"id": 69105381, "customer_id": 64817252, "payment_method_id": 12482012, "address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "created_at": "2021-05-12T12:04:06+00:00", "discounts": [], "first_name": "Jane", "last_name": "Doe", "order_attributes": [], "order_note": null, "phone": "1234567890", "presentment_currency": "USD", "province": "California", "shipping_lines_conserved": [], "shipping_lines_override": [], "updated_at": "2023-01-16T09:59:09+00:00", "zip": "94118"}, "emitted_at": 1709035647340} +{"stream": "charges", "data": {"id": 817715206, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:34+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": null, "scheduled_at": "2024-05-12", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "queued", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:07:47+00:00"}, "emitted_at": 1709035648060} +{"stream": "charges", "data": {"id": 580825303, "address_id": 69282975, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2022-05-13T04:07:39+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "error_type": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_transaction_id": {"payment_processor": "43114102955"}, "has_uncommitted_changes": false, "line_items": [{"purchase_item_id": 153601366, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 1, "payment_processor": "shopify_payments", "processed_at": "2023-05-13T04:07:33+00:00", "retry_date": null, "scheduled_at": "2023-05-13", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": false, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1709035648066} +{"stream": "charges", "data": {"id": 386976088, "address_id": 69105381, "analytics_data": {"utm_params": []}, "billing_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Karina", "last_name": "Kuznetsova", "phone": null, "province": "California", "zip": "94118"}, "charge_attempts": 6, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2021-05-12T12:04:07+00:00", "currency": "USD", "customer": {"id": 64817252, "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "hash": "23dee52d73734a81"}, "discounts": [], "error": "None\r\n [May 12, 12:06AM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 13, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 19, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 25, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [May 31, 4:09PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']\r\n [Jun 06, 4:10PM] ['Inventory unavailable S / Black T1 6642695864491 requested qty. 1, inventory was: -1']", "error_type": "CLOSED_MAX_RETRIES_REACHED", "external_order_id": {"ecommerce": null}, "external_transaction_id": {"payment_processor": null}, "external_variant_not_found": null, "has_uncommitted_changes": false, "last_charge_attempt": "2022-06-06T20:10:19+00:00", "line_items": [{"purchase_item_id": 153224593, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684722131115"}, "grams": 0, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": "24.30", "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T1", "tax_due": "0.00", "tax_lines": [], "taxable": true, "taxable_amount": "24.30", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "S / Black"}], "merged_at": null, "note": null, "order_attributes": [], "orders_count": 0, "payment_processor": "shopify_payments", "processed_at": null, "retry_date": "2022-06-12T04:00:00+00:00", "scheduled_at": "2022-05-12", "shipping_address": {"address1": "1 9th Ave", "address2": "1", "city": "San Francisco", "company": null, "country_code": "US", "first_name": "Jane", "last_name": "Doe", "phone": "1234567890", "province": "California", "zip": "94118"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "retrieved_at": null, "source": "shopify", "status": "active", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "error", "subtotal_price": "24.30", "tags": "Subscription, Subscription Recurring Order", "tax_lines": "[]", "taxable": true, "taxes_included": false, "total_discounts": "0.00", "total_duties": "0.00", "total_line_items_price": "24.30", "total_price": "29.20", "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-01-16T18:08:54+00:00"}, "emitted_at": 1709035648071} +{"stream": "customers", "data": {"id": 64962974, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-13T09:46:44+00:00", "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "first_charge_processed_at": "2021-05-13T13:46:39+00:00", "first_name": "Kelly", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "f99bd4a6877257af", "last_name": "Kozakevich", "phone": "+16145550188", "subscriptions_active_count": 1, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-05-13T04:16:36+00:00"}, "emitted_at": 1709035649348} +{"stream": "customers", "data": {"id": 64817252, "analytics_data": {"utm_params": []}, "apply_credit_to_next_checkout_charge": false, "apply_credit_to_next_recurring_charge": false, "created_at": "2021-05-12T12:04:06+00:00", "email": "nikolaevaka@yahoo.com", "external_customer_id": {"ecommerce": "5212085977259"}, "first_charge_processed_at": "2021-05-12T16:03:59+00:00", "first_name": "Karina", "has_payment_method_in_dunning": false, "has_valid_payment_method": true, "hash": "23dee52d73734a81", "last_name": "Kuznetsova", "phone": null, "subscriptions_active_count": 0, "subscriptions_total_count": 1, "tax_exempt": false, "updated_at": "2023-01-16T18:08:45+00:00"}, "emitted_at": 1709035649352} +{"stream": "metafields", "data": {"id": 3627108, "owner_id": "64962974", "created_at": "2023-04-10T07:10:45", "description": "customer_phone_number", "key": "phone_number", "namespace": "personal_info", "owner_resource": "customer", "updated_at": "2023-04-10T07:10:45", "value": "3103103101", "value_type": "integer"}, "emitted_at": 1709035651342} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1709035653155} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1709035653159} +{"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1709035653161} +{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709035654067} +{"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1709035655558} +{"stream": "orders", "data": {"id": 534919106, "address_id": 69282975, "billing_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "charge": {"id": 580825303, "external_transaction_id": {"payment_processor": "43114102955"}, "payment_processor_name": "shopify_payments", "status": "success"}, "client_details": {"browser_ip": null, "user_agent": null}, "created_at": "2023-05-13T04:07:28+00:00", "currency": "USD", "customer": {"id": 64962974, "email": "kozakevich_k@example.com", "external_customer_id": {"ecommerce": "5213433266347"}, "hash": "f99bd4a6877257af"}, "discounts": [], "error": null, "external_cart_token": null, "external_order_id": {"ecommerce": "5006149877931"}, "external_order_name": {"ecommerce": "#1016"}, "external_order_number": {"ecommerce": "1016"}, "is_prepaid": 0, "line_items": [{"purchase_item_id": 153601366, "external_inventory_policy": "decrement_obeying_policy", "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "grams": null, "handle": null, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "original_price": 24.3, "properties": [], "purchase_item_type": "subscription", "quantity": 1, "sku": "T3", "tax_due": "0.00", "tax_lines": [], "taxable": false, "taxable_amount": "0.00", "title": "Airbit Box Corner Short sleeve t-shirt", "total_price": "24.30", "unit_price": "24.30", "unit_price_includes_tax": false, "variant_title": "L / City Green"}], "note": null, "order_attributes": [], "processed_at": "2023-05-13T04:07:33+00:00", "scheduled_at": "2023-05-13T04:00:00+00:00", "shipping_address": {"address1": "1921 W Wilson St", "address2": null, "city": "Batavia", "company": null, "country_code": "US", "first_name": "Kelly", "last_name": "Kozakevich", "phone": "+16145550188", "province": "Illinois", "zip": "60510"}, "shipping_lines": [{"code": "Economy", "price": "4.90", "source": "shopify", "tax_lines": [], "taxable": false, "title": "Economy"}], "status": "success", "subtotal_price": 24.3, "tags": "Subscription, Subscription Recurring Order", "tax_lines": [], "taxable": false, "total_discounts": 0.0, "total_duties": "0.00", "total_line_items_price": 24.3, "total_price": 29.2, "total_refunds": "0.00", "total_tax": "0.00", "total_weight_grams": 0, "type": "recurring", "updated_at": "2023-05-13T04:16:51+00:00"}, "emitted_at": 1709035661452} diff --git a/airbyte-integrations/connectors/source-recharge/metadata.yaml b/airbyte-integrations/connectors/source-recharge/metadata.yaml index eddf63305e544..d6e1a4a6a4344 100644 --- a/airbyte-integrations/connectors/source-recharge/metadata.yaml +++ b/airbyte-integrations/connectors/source-recharge/metadata.yaml @@ -5,13 +5,14 @@ data: connectorSubtype: api connectorType: source connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 definitionId: 45d2e135-2ede-49e1-939f-3e3ec357a65e - dockerImageTag: 1.1.5 + dockerImageTag: 1.2.0 dockerRepository: airbyte/source-recharge githubIssueLabel: source-recharge icon: recharge.svg license: MIT + maxSecondsBetweenMessages: 1 name: Recharge remoteRegistries: pypi: @@ -26,7 +27,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/recharge tags: - language:python - - cdk:python + - cdk:low-code ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-recharge/poetry.lock b/airbyte-integrations/connectors/source-recharge/poetry.lock index 4135398b392f1..a997948dfbcac 100644 --- a/airbyte-integrations/connectors/source-recharge/poetry.lock +++ b/airbyte-integrations/connectors/source-recharge/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.60.1" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.60.1.tar.gz", hash = "sha256:fc5212b2962c1dc6aca9cc6f1c2000d7636b7509915846c126420c2b0c814317"}, - {file = "airbyte_cdk-0.60.1-py3-none-any.whl", hash = "sha256:94b33c0f6851d1e2546eac3cec54c67489239595d9e0a496ef57c3fc808e89e3"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +301,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -467,13 +481,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +716,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +822,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +840,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +871,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +909,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +934,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1045,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "da1dbc89f0a40d0a16baa47814c1e57b38c5afec44baa89789bc069fe9a7a7af" +content-hash = "6c8e1b56b7d37fab639950309e88d5e32cf513f66bceb08ab8d7950c535db192" diff --git a/airbyte-integrations/connectors/source-recharge/pyproject.toml b/airbyte-integrations/connectors/source-recharge/pyproject.toml index 15ca7a7471c8c..5bc47b80b6eaa 100644 --- a/airbyte-integrations/connectors/source-recharge/pyproject.toml +++ b/airbyte-integrations/connectors/source-recharge/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.1.5" +version = "1.2.0" name = "source-recharge" description = "Source implementation for Recharge." authors = [ "Airbyte ",] @@ -17,7 +17,8 @@ include = "source_recharge" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.60.1" +airbyte-cdk = "^0" +freezegun = "^1.4.0" [tool.poetry.scripts] source-recharge = "source_recharge.run:run" diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/api.py b/airbyte-integrations/connectors/source-recharge/source_recharge/api.py deleted file mode 100644 index aaf2cb91cf2e2..0000000000000 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/api.py +++ /dev/null @@ -1,268 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer - - -class RechargeStream(HttpStream, ABC): - primary_key = "id" - url_base = "https://api.rechargeapps.com/" - - limit = 250 - page_num = 1 - period_in_days = 30 # Slice data request for 1 month - raise_on_http_errors = True - - # registering the default schema transformation - transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) - - def __init__(self, config, **kwargs): - super().__init__(**kwargs) - self._start_date = config["start_date"] - - @property - def data_path(self): - return self.name - - @property - @abstractmethod - def api_version(self) -> str: - pass - - def request_headers( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> Mapping[str, Any]: - return {"x-recharge-version": self.api_version} - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return self.name - - @abstractmethod - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - pass - - @abstractmethod - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - pass - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - stream_data = self.get_stream_data(response_data) - - yield from stream_data - - def get_stream_data(self, response_data: Any) -> List[dict]: - if self.data_path: - return response_data.get(self.data_path, []) - else: - return [response_data] - - def should_retry(self, response: requests.Response) -> bool: - content_length = int(response.headers.get("Content-Length", 0)) - incomplete_data_response = response.status_code == 200 and content_length > len(response.content) - - if incomplete_data_response: - return True - - return super().should_retry(response) - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - start_date = (stream_state or {}).get(self.cursor_field, self._start_date) if self.cursor_field else self._start_date - - now = pendulum.now() - - # dates are inclusive, so we add 1 second so that time periods do not overlap - start_date = pendulum.parse(start_date).add(seconds=1) - - while start_date <= now: - end_date = start_date.add(days=self.period_in_days) - yield {"start_date": start_date.strftime("%Y-%m-%d %H:%M:%S"), "end_date": end_date.strftime("%Y-%m-%d %H:%M:%S")} - start_date = end_date.add(seconds=1) - - -class RechargeStreamModernAPI(RechargeStream): - api_version = "2021-11" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - cursor = response.json().get("next_cursor") - if cursor: - return {"cursor": cursor} - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = {"limit": self.limit} - - # if a cursor value is passed, only limit can be passed with it! - if next_page_token: - params.update(next_page_token) - else: - params.update( - { - "updated_at_min": (stream_slice or {}).get("start_date", self._start_date), - "updated_at_max": (stream_slice or {}).get("end_date", self._start_date), - } - ) - return params - - -class RechargeStreamDeprecatedAPI(RechargeStream): - api_version = "2021-01" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - stream_data = self.get_stream_data(response.json()) - if len(stream_data) == self.limit: - self.page_num += 1 - return {"page": self.page_num} - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = { - "limit": self.limit, - "updated_at_min": (stream_slice or {}).get("start_date", self._start_date), - "updated_at_max": (stream_slice or {}).get("end_date", self._start_date), - } - - if next_page_token: - params.update(next_page_token) - - return params - - -class IncrementalRechargeStream(RechargeStream, ABC): - cursor_field = "updated_at" - state_checkpoint_interval = 250 - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - latest_benchmark = latest_record[self.cursor_field] - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - -class Addresses(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Addresses Stream: https://developer.rechargepayments.com/v1-shopify?python#list-addresses - """ - - -class Charges(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Charges Stream: https://developer.rechargepayments.com/v1-shopify?python#list-charges - """ - - -class Collections(RechargeStreamModernAPI): - """ - Collections Stream - """ - - -class Customers(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Customers Stream: https://developer.rechargepayments.com/v1-shopify?python#list-customers - """ - - -class Discounts(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Discounts Stream: https://developer.rechargepayments.com/v1-shopify?python#list-discounts - """ - - -class Metafields(RechargeStreamModernAPI): - """ - Metafields Stream: https://developer.rechargepayments.com/v1-shopify?python#list-metafields - """ - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = {"limit": self.limit, "owner_resource": (stream_slice or {}).get("owner_resource")} - if next_page_token: - params.update(next_page_token) - - return params - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - owner_resources = ["customer", "store", "subscription"] - yield from [{"owner_resource": owner} for owner in owner_resources] - - -class Onetimes(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Onetimes Stream: https://developer.rechargepayments.com/v1-shopify?python#list-onetimes - """ - - -class OrdersDeprecatedApi(RechargeStreamDeprecatedAPI, IncrementalRechargeStream): - """ - Orders Stream: https://developer.rechargepayments.com/v1-shopify?python#list-orders - Using old API version to avoid schema changes and loosing email, first_name, last_name columns, because in new version it not present - """ - - name = "orders" - - -class OrdersModernApi(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Orders Stream: https://developer.rechargepayments.com/v1-shopify?python#list-orders - Using newer API version to fetch all the data, based on the Customer's UI toggle `use_deprecated_api: FALSE`. - """ - - name = "orders" - - -class Products(RechargeStreamDeprecatedAPI): - """ - Products Stream: https://developer.rechargepayments.com/v1-shopify?python#list-products - Products endpoint has 422 error with 2021-11 API version - """ - - -class Shop(RechargeStreamDeprecatedAPI): - """ - Shop Stream: https://developer.rechargepayments.com/v1-shopify?python#shop - Shop endpoint is not available in 2021-11 API version - """ - - primary_key = ["shop", "store"] - data_path = None - - def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - return [{}] - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - return {} - - -class Subscriptions(RechargeStreamModernAPI, IncrementalRechargeStream): - """ - Subscriptions Stream: https://developer.rechargepayments.com/v1-shopify?python#list-subscriptions - """ - - # reduce the slice date range to avoid 504 - Gateway Timeout on the Server side, - # since this stream could contain lots of data, causing the server to timeout. - # related issue: https://github.com/airbytehq/oncall/issues/3424 - period_in_days = 14 diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/components/datetime_based_cursor.py b/airbyte-integrations/connectors/source-recharge/source_recharge/components/datetime_based_cursor.py new file mode 100644 index 0000000000000..7957a3c0d9068 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/components/datetime_based_cursor.py @@ -0,0 +1,70 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from dataclasses import dataclass +from datetime import datetime +from typing import Any, List, Mapping, Optional, Union + +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState + + +@dataclass +class RechargeDateTimeBasedCursor(DatetimeBasedCursor): + """ + Override for the default `DatetimeBasedCursor`. + + `get_request_params()` - to guarantee the records are returned in `ASC` order. + + Currently the `HttpRequester` couldn't handle the case when, + we need to omit all other `request_params` but `next_page_token` param, + typically when the `CursorPagination` straregy is applied. + + We should have the `request_parameters` structure like this, or similar to either keep or omit the parameter, + based on the paginated result: + ``` + HttpRequester: + ... + request_parameters: + # The `sort_by` param, will be omitted intentionaly on the paginated result + - sort_by: "updated_at-asc" + ignore_on_pagination: true + # the `some_other_param` param, will be kept on the paginated result + - some_other_param: "string_value" + ignore_on_pagination: false + ``` + + Because there is a `ignore_stream_slicer_parameters_on_paginated_requests` set to True for the `SimpleRetriever`, + we are able to omit everthing what we pass from the `DatetimeBasedCursor.get_request_params()` having the initial request as expected, + all subsequent requests are made based on Paginated Results. + """ + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + super().__post_init__(parameters=parameters) + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + """ + The override to add additional param to the api request to guarantee the `ASC` records order. + + Background: + There is no possability to pass multiple request params from the YAML for the incremental streams, + in addition to the `start_time_option` or similar, having them ignored those additional params, + when we have `next_page_token`, which must be the single param to be passed to satisfy the API requirements. + """ + + params = super().get_request_params( + stream_state=stream_state, + stream_slice=stream_slice, + next_page_token=next_page_token, + ) + params["sort_by"] = "updated_at-asc" + return params diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/manifest.yaml b/airbyte-integrations/connectors/source-recharge/source_recharge/manifest.yaml new file mode 100644 index 0000000000000..45f3b06f50027 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/manifest.yaml @@ -0,0 +1,272 @@ +version: 0.72.2 + +definitions: + # COMMON PARTS + schema_loader: + type: JsonFileSchemaLoader + file_path: "./source_recharge/schemas/{{ parameters['name'] }}.json" + selector: + description: >- + Base records selector for Full Refresh streams + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["{{ parameters.get('data_path')}}"] + # apply default schema normalization + schema_normalization: Default + authenticator: + type: ApiKeyAuthenticator + api_token: "{{ config['access_token'] }}" + inject_into: + type: RequestOption + inject_into: header + field_name: X-Recharge-Access-Token + + # PAGINATORS + paginator_deprecated_api: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: page + page_size_option: + inject_into: request_parameter + field_name: limit + type: RequestOption + pagination_strategy: + type: PageIncrement + start_from_page: 1 + page_size: 250 + inject_on_first_request: false + paginator_modern_api: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: cursor + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: limit + pagination_strategy: + type: CursorPagination + page_size: 250 + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + + # REQUESTERS + requester_base: + description: >- + Default Base Requester for Full Refresh streams + type: HttpRequester + url_base: https://api.rechargeapps.com/ + path: "{{ parameters['name'] }}" + http_method: GET + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: DefaultErrorHandler + description: >- + The default error handler + requester_deprecated_api: + $ref: "#/definitions/requester_base" + # for deprecated retriever we should use `2021-01` api version + request_headers: + x-recharge-version: "2021-01" + requester_modern_api: + $ref: "#/definitions/requester_base" + # for modern retriever we should use >= `2021-11` api version + request_headers: + x-recharge-version: "2021-11" + + # RETRIEVER FOR `DEPRECATED API` + retriever_api_deprecated: + description: >- + Default Retriever for Deprecated API `2021-01` Full Refresh streams. + record_selector: + $ref: "#/definitions/selector" + requester: + $ref: "#/definitions/requester_deprecated_api" + paginator: + $ref: "#/definitions/paginator_deprecated_api" + # RETRIEVER FOR `MODERN API` + retriever_api_modern: + description: >- + Default Retriever for Modern API `2021-11` Full Refresh streams. + record_selector: + $ref: "#/definitions/selector" + requester: + $ref: "#/definitions/requester_modern_api" + paginator: + $ref: "#/definitions/paginator_modern_api" + # we should ignore all other req.params once we have the `next_page_token` in response + # for pagination in `2021-11` - modern api. + ignore_stream_slicer_parameters_on_paginated_requests: true + # RETRIEVER FOR `METAFIELDS` STREAM + retriever_metafields: + $ref: "#/definitions/retriever_api_modern" + partition_router: + type: ListPartitionRouter + cursor_field: owner_resource + values: + - address + - order + - charge + - customer + - store + - subscription + request_option: + inject_into: request_parameter + type: RequestOption + field_name: owner_resource + + # BASE STREAMS + # FULL-REFRESH + base_stream: + primary_key: "id" + schema_loader: + $ref: "#/definitions/schema_loader" + base_deprecated_api_stream: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever_api_deprecated" + base_modern_api_stream: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever_api_modern" + # INCREMENTAL + base_incremental_stream: + $ref: "#/definitions/base_modern_api_stream" + incremental_sync: + type: CustomIncrementalSync + class_name: source_recharge.components.datetime_based_cursor.RechargeDateTimeBasedCursor + cursor_field: "updated_at" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: "updated_at_min" + inject_into: request_parameter + + # FULL-REFRESH STREAMS + # COLLECTIONS + collections_stream: + description: >- + Collections Stream: https://developer.rechargepayments.com/2021-11/collections/collections_list + $ref: "#/definitions/base_modern_api_stream" + $parameters: + name: "collections" + data_path: "collections" + # METAFIELDS + metafields_stream: + description: >- + Metafields Stream: https://developer.rechargepayments.com/2021-11/metafields + $ref: "#/definitions/base_modern_api_stream" + retriever: + $ref: "#/definitions/retriever_metafields" + $parameters: + name: "metafields" + data_path: "metafields" + # PRODUCTS + products_stream: + description: >- + Products Stream: https://developer.rechargepayments.com/2021-11/products/products_list + Products endpoint has 422 error with 2021-11 API version + $ref: "#/definitions/base_deprecated_api_stream" + $parameters: + name: "products" + data_path: "products" + # SHOP + shop_stream: + description: >- + Shop Stream: https://developer.rechargepayments.com/v1-shopify?python#shop + Shop endpoint is not available in 2021-11 API version + $ref: "#/definitions/base_deprecated_api_stream" + retriever: + $ref: "#/definitions/retriever_api_deprecated" + paginator: + type: NoPagination + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + primary_key: ["shop", "store"] + $parameters: + name: "shop" + + # INCREMENTAL STREAMS + # ADDRESSES + addresses_stream: + description: >- + Addresses Stream: https://developer.rechargepayments.com/2021-11/addresses/list_addresses + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "addresses" + data_path: "addresses" + # CHARGES + charges_stream: + description: >- + Charges Stream: https://developer.rechargepayments.com/2021-11/charges/charge_list + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "charges" + data_path: "charges" + # CUSTOMERS + customers_stream: + description: >- + Customers Stream: https://developer.rechargepayments.com/2021-11/customers/customers_list + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "customers" + data_path: "customers" + # DISCOUNTS + discounts_stream: + description: >- + Discounts Stream: https://developer.rechargepayments.com/2021-11/discounts/discounts_list + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "discounts" + data_path: "discounts" + # ONETIMES + onetimes_stream: + description: >- + Onetimes Stream: https://developer.rechargepayments.com/2021-11/onetimes/onetimes_list + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "onetimes" + data_path: "onetimes" + # SUBSCRIPTIONS + subscriptions_stream: + # description: >- + # Subscriptions Stream: https://developer.rechargepayments.com/2021-11/subscriptions/subscriptions_list + $ref: "#/definitions/base_incremental_stream" + $parameters: + name: "subscriptions" + data_path: "subscriptions" + +streams: + - "#/definitions/addresses_stream" + - "#/definitions/charges_stream" + - "#/definitions/collections_stream" + - "#/definitions/customers_stream" + - "#/definitions/discounts_stream" + - "#/definitions/metafields_stream" + - "#/definitions/onetimes_stream" + - "#/definitions/products_stream" + - "#/definitions/shop_stream" + - "#/definitions/subscriptions_stream" + # The `orders` stream remains implemented in `streams.py` due to: + # 1. Inability to resolve `$ref` conditionally + # 2. Inability to dynamically switch between paginators (diff api versions, require diff pagination approach) (or create the CustomPaginator component) + +check: + type: CheckStream + stream_names: + - shop diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/addresses.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/addresses.json index a8ec355493cdd..489e7b128f4fa 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/addresses.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/addresses.json @@ -49,7 +49,40 @@ "type": ["null", "string"] }, "shipping_lines_override": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "code": { + "type": ["null", "string"] + }, + "price": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + } + } + } + }, + "shipping_lines_conserved": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "code": { + "type": ["null", "string"] + }, + "price": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + } + } + } }, "updated_at": { "type": ["null", "string"], @@ -57,6 +90,42 @@ }, "zip": { "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "discounts": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "id": { + "type": ["null", "integer"] + } + } + } + }, + "order_attributes": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "id": { + "type": ["null", "string"] + } + } + } + }, + "order_note": { + "type": ["null", "string"] + }, + "payment_method_id": { + "type": ["null", "integer"] + }, + "presentment_currency": { + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/charges.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/charges.json index 40faaeb50fe61..4e70968fd8116 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/charges.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/charges.json @@ -35,6 +35,9 @@ "country": { "type": ["null", "string"] }, + "country_code": { + "type": ["null", "string"] + }, "customer_id": { "type": ["null", "integer"] }, @@ -72,6 +75,125 @@ } } }, + "charge_attempts": { + "type": ["null", "integer"] + }, + "currency": { + "type": ["null", "string"] + }, + "customer": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "email": { + "type": ["null", "string"] + }, + "external_customer_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "hash": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "integer"] + } + } + }, + "discounts": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "id": { + "type": ["null", "integer"] + }, + "code": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "integer"] + }, + "value_type": { + "type": ["null", "string"] + } + } + } + }, + "external_order_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "external_transaction_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "payment_processor": { + "type": ["null", "string"] + } + } + }, + "external_variant_id_not_found": { + "type": ["null", "boolean"] + }, + "external_variant_not_found": { + "type": ["null", "boolean"] + }, + "has_uncommitted_changes": { + "type": ["null", "boolean"] + }, + "last_charge_attempt": { + "type": ["null", "string"], + "format": "date-time" + }, + "merged_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "order_attributes": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + }, + "orders_count": { + "type": ["null", "integer"] + }, + "payment_processor": { + "type": ["null", "string"] + }, + "total_duties": { + "type": ["null", "string"] + }, + "total_weight_grams": { + "type": ["null", "integer"] + }, + "taxable": { + "type": ["null", "boolean"] + }, + "taxes_included": { + "type": ["null", "boolean"] + }, "client_details": { "type": ["null", "object"] }, @@ -166,6 +288,9 @@ "country": { "type": ["null", "string"] }, + "country_code": { + "type": ["null", "string"] + }, "customer_id": { "type": ["null", "integer"] }, diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/collections.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/collections.json index 0c6b5ce3dd736..a50682e7f3887 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/collections.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/collections.json @@ -8,6 +8,15 @@ "name": { "type": ["null", "string"] }, + "description": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, "created_at": { "type": ["null", "string"], "format": "date-time" @@ -15,6 +24,9 @@ "updated_at": { "type": ["null", "string"], "format": "date-time" + }, + "sort_order": { + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/customers.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/customers.json index 9771a8c7cdb62..bb1a56d26b81e 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/customers.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/customers.json @@ -94,6 +94,33 @@ }, "apply_credit_to_next_recurring_charge": { "type": ["null", "boolean"] + }, + "apply_credit_to_next_checkout_charge": { + "type": ["null", "boolean"] + }, + "external_customer_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "has_payment_method_in_dunning": { + "type": ["null", "boolean"] + }, + "phone": { + "type": ["null", "string"] + }, + "subscriptions_active_count": { + "type": ["null", "integer"] + }, + "subscriptions_total_count": { + "type": ["null", "integer"] + }, + "tax_exempt": { + "type": ["null", "boolean"] } } } diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/orders.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/orders.json index 0112494af378c..0bdec0fb8047a 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/orders.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/orders.json @@ -5,6 +5,18 @@ "id": { "type": ["null", "integer"] }, + "accepts_marketing": { + "type": ["null", "boolean"] + }, + "send_email_welcome": { + "type": ["null", "boolean"] + }, + "verified_email": { + "type": ["null", "boolean"] + }, + "phone": { + "type": ["null", "string"] + }, "address_id": { "type": ["null", "integer"] }, @@ -35,6 +47,9 @@ "country": { "type": ["null", "string"] }, + "country_code": { + "type": ["null", "string"] + }, "customer_id": { "type": ["null", "integer"] }, @@ -184,6 +199,27 @@ "id": { "type": ["null", "integer"] }, + "accepts_marketing": { + "type": ["null", "boolean"] + }, + "send_email_welcome": { + "type": ["null", "boolean"] + }, + "verified_email": { + "type": ["null", "boolean"] + }, + "phone": { + "type": ["null", "string"] + }, + "external_customer_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, "billing_address1": { "type": ["null", "string"] }, @@ -293,9 +329,78 @@ "external_inventory_policy": { "type": ["null", "string"] }, + "external_product_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "external_variant_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, "grams": { "type": ["null", "number"] }, + "handle": { + "type": ["null", "string"] + }, + "purchase_item_id": { + "type": ["null", "integer"] + }, + "purchase_item_type": { + "type": ["null", "string"] + }, + "tax_due": { + "type": ["null", "string"] + }, + "taxable_amount": { + "type": ["null", "string"] + }, + "taxable": { + "type": ["null", "boolean"] + }, + "total_price": { + "type": ["null", "string"] + }, + "unit_price": { + "type": ["null", "string"] + }, + "unit_price_includes_tax": { + "type": ["null", "boolean"] + }, + "original_price": { + "type": ["null", "number"] + }, + "product_title": { + "type": ["null", "string"] + }, + "tax_lines": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "price": { + "type": ["null", "string"] + }, + "rate": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + } + } + } + }, "images": { "type": ["null", "object"] }, @@ -377,6 +482,9 @@ "country": { "type": ["null", "string"] }, + "country_code": { + "type": ["null", "string"] + }, "customer_id": { "type": ["null", "integer"] }, @@ -468,6 +576,13 @@ "updated_at": { "type": ["null", "string"], "format": "date-time" + }, + "shipping_date": { + "type": ["null", "string"], + "format": "date-time" + }, + "shopify_id": { + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/products.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/products.json index d0e10f87794c1..3d76001d76ee2 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/products.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/products.json @@ -5,6 +5,9 @@ "id": { "type": ["null", "integer"] }, + "product_id": { + "type": ["null", "integer"] + }, "charge_interval_frequency": { "type": ["null", "integer"] }, diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/subscriptions.json b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/subscriptions.json index 7556bb211549d..40fca1f49e630 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/subscriptions.json +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/schemas/subscriptions.json @@ -40,6 +40,27 @@ "expire_after_specific_number_of_charges": { "type": ["null", "integer"] }, + "external_product_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "external_variant_id": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "ecommerce": { + "type": ["null", "string"] + } + } + }, + "presentment_currency": { + "type": ["null", "string"] + }, "has_queued_charges": { "type": ["null", "integer"] }, diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/source.py b/airbyte-integrations/connectors/source-recharge/source_recharge/source.py index 1d1ea875f3e35..be0b9d43509d9 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/source.py +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/source.py @@ -3,65 +3,26 @@ # -from typing import Any, List, Mapping, Tuple, Union +from typing import Any, List, Mapping -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from source_recharge.streams import Orders, RechargeTokenAuthenticator -from .api import ( - Addresses, - Charges, - Collections, - Customers, - Discounts, - Metafields, - Onetimes, - OrdersDeprecatedApi, - OrdersModernApi, - Products, - Shop, - Subscriptions, -) +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. +WARNING: Do not modify this file. +""" -class RechargeTokenAuthenticator(TokenAuthenticator): - def get_auth_header(self) -> Mapping[str, Any]: - return {"X-Recharge-Access-Token": self._token} - - -class SourceRecharge(AbstractSource): - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: - auth = RechargeTokenAuthenticator(token=config["access_token"]) - stream = Shop(config, authenticator=auth) - try: - result = list(stream.read_records(SyncMode.full_refresh))[0] - if stream.name in result.keys(): - return True, None - except Exception as error: - return False, f"Unable to connect to Recharge API with the provided credentials - {repr(error)}" - - def select_orders_stream(self, config: Mapping[str, Any], **kwargs) -> Union[OrdersDeprecatedApi, OrdersModernApi]: - if config.get("use_orders_deprecated_api"): - return OrdersDeprecatedApi(config, **kwargs) - else: - return OrdersModernApi(config, **kwargs) +# Declarative Source +class SourceRecharge(YamlDeclarativeSource): + def __init__(self) -> None: + super().__init__(**{"path_to_yaml": "manifest.yaml"}) def streams(self, config: Mapping[str, Any]) -> List[Stream]: auth = RechargeTokenAuthenticator(token=config["access_token"]) - return [ - Addresses(config, authenticator=auth), - Charges(config, authenticator=auth), - Collections(config, authenticator=auth), - Customers(config, authenticator=auth), - Discounts(config, authenticator=auth), - Metafields(config, authenticator=auth), - Onetimes(config, authenticator=auth), - # select the Orders stream class, based on the UI toggle "Use `Orders` Deprecated API" - self.select_orders_stream(config, authenticator=auth), - Products(config, authenticator=auth), - Shop(config, authenticator=auth), - Subscriptions(config, authenticator=auth), - ] + streams = super().streams(config=config) + streams.append(Orders(config, authenticator=auth)) + return streams diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/streams.py b/airbyte-integrations/connectors/source-recharge/source_recharge/streams.py new file mode 100644 index 0000000000000..dfdae52526eb5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/streams.py @@ -0,0 +1,137 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from enum import Enum +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional + +import pendulum +import requests +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator +from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer + + +class ApiVersion(Enum): + DEPRECATED = "2021-01" + MODERN = "2021-11" + + +class RechargeTokenAuthenticator(TokenAuthenticator): + def get_auth_header(self) -> Mapping[str, Any]: + return {"X-Recharge-Access-Token": self._token} + + +class Orders(HttpStream, ABC): + """ + Orders Stream: https://developer.rechargepayments.com/v1-shopify?python#list-orders + Notes: + Using `2021-01` the: `email`, `first_name`, `last_name` columns are not available, + because these are not present in `2021-11` as DEPRECATED fields. + """ + + primary_key: str = "id" + url_base: str = "https://api.rechargeapps.com/" + cursor_field: str = "updated_at" + page_size: int = 250 + page_num: int = 1 + period_in_days: int = 30 # Slice data request for 1 month + raise_on_http_errors: bool = True + state_checkpoint_interval: int = 250 + + # registering the default schema transformation + transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) + + def __init__(self, config: Mapping[str, Any], **kwargs) -> None: + super().__init__(**kwargs) + self._start_date = config["start_date"] + self.api_version = ApiVersion.DEPRECATED if config.get("use_orders_deprecated_api") else ApiVersion.MODERN + + @property + def data_path(self) -> str: + return self.name + + def request_headers(self, **kwargs) -> Mapping[str, Any]: + return {"x-recharge-version": self.api_version.value} + + def path(self, **kwargs) -> str: + return self.name + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + next_page_token = None + if self.api_version == ApiVersion.MODERN: + cursor = response.json().get("next_cursor") + if cursor: + next_page_token = {"cursor": cursor} + else: + stream_data = self.get_stream_data(response.json()) + if len(stream_data) == self.page_size: + self.page_num += 1 + next_page_token = {"page": self.page_num} + return next_page_token + + def _update_params_with_min_max_date_range( + self, + params: MutableMapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + ) -> MutableMapping[str, Any]: + params.update( + { + "sort_by": "updated_at-asc", + "updated_at_min": (stream_slice or {}).get("start_date"), + "updated_at_max": (stream_slice or {}).get("end_date"), + } + ) + return params + + def request_params( + self, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs + ) -> MutableMapping[str, Any]: + params = {"limit": self.page_size} + if self.api_version == ApiVersion.MODERN: + # if a cursor value is passed, only limit can be passed with it! + if next_page_token: + params.update(next_page_token) + else: + params = self._update_params_with_min_max_date_range(params, stream_slice) + return params + else: + params = self._update_params_with_min_max_date_range(params, stream_slice) + if next_page_token: + params.update(next_page_token) + return params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_data = response.json() + stream_data = self.get_stream_data(response_data) + yield from stream_data + + def get_stream_data(self, response_data: Any) -> List[dict]: + if self.data_path: + return response_data.get(self.data_path, []) + else: + return [response_data] + + def should_retry(self, response: requests.Response) -> bool: + content_length = int(response.headers.get("Content-Length", 0)) + incomplete_data_response = response.status_code == 200 and content_length > len(response.content) + if incomplete_data_response: + return True + return super().should_retry(response) + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + start_date_value = (stream_state or {}).get(self.cursor_field, self._start_date) if self.cursor_field else self._start_date + now = pendulum.now() + # dates are inclusive, so we add 1 second so that time periods do not overlap + start_date = pendulum.parse(start_date_value).add(seconds=1) + while start_date <= now: + end_date = start_date.add(days=self.period_in_days) + yield {"start_date": start_date.strftime("%Y-%m-%d %H:%M:%S"), "end_date": end_date.strftime("%Y-%m-%d %H:%M:%S")} + start_date = end_date.add(seconds=1) + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + latest_benchmark = latest_record[self.cursor_field] + if current_stream_state.get(self.cursor_field): + return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} + return {self.cursor_field: latest_benchmark} diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/conftest.py b/airbyte-integrations/connectors/source-recharge/unit_tests/conftest.py new file mode 100644 index 0000000000000..51d407d2dd18d --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/conftest.py @@ -0,0 +1,22 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Mapping +from unittest.mock import patch + +import pytest + + +@pytest.fixture(name="config") +def config() -> Mapping[str, Any]: + return { + "authenticator": None, + "access_token": "access_token", + "start_date": "2021-08-15T00:00:00Z", + } + + +@pytest.fixture(name="logger_mock") +def logger_mock_fixture() -> None: + return patch("source_recharge.source.AirbyteLogger") + + diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/__init__.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/config.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/config.py new file mode 100644 index 0000000000000..6776c88e59f1e --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/config.py @@ -0,0 +1,31 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from __future__ import annotations + +import datetime as dt +from typing import Any, MutableMapping + +import pendulum + +START_DATE = "2023-01-01T00:00:00Z" +ACCESS_TOKEN = "test_access_token" +DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z" +NOW = pendulum.now(tz="utc") + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: MutableMapping[str, Any] = { + "access_token": ACCESS_TOKEN, + "start_date": START_DATE, + } + + def with_start_date(self, start_date: str) -> ConfigBuilder: + self._config["start_date"] = dt.datetime.strptime(start_date, DATE_TIME_FORMAT).strftime(DATE_TIME_FORMAT) + return self + + def build(self) -> MutableMapping[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/pagination.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/pagination.py new file mode 100644 index 0000000000000..4522eec9675e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/pagination.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.request import HttpRequest +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + +NEXT_PAGE_TOKEN = "New_Next_Page_Token" + + +class RechargePaginationStrategy(PaginationStrategy): + def __init__(self, request: HttpRequest, next_page_token: str) -> None: + self._next_page_token = next_page_token + + def update(self, response: Dict[str, Any]) -> None: + response["next_cursor"] = self._next_page_token diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/request_builder.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/request_builder.py new file mode 100644 index 0000000000000..e54bed651559a --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/request_builder.py @@ -0,0 +1,52 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from __future__ import annotations + +import datetime as dt +from typing import Any, MutableMapping + +from airbyte_cdk.test.mock_http.request import HttpRequest + +from .config import ACCESS_TOKEN, DATE_TIME_FORMAT + + +def get_stream_request(stream_name: str) -> RequestBuilder: + return RequestBuilder.get_endpoint(stream_name).with_limit(250) + + +class RequestBuilder: + + @classmethod + def get_endpoint(cls, endpoint: str) -> RequestBuilder: + return cls(endpoint=endpoint) + + def __init__(self, endpoint: str) -> None: + self._endpoint: str = endpoint + self._api_version: str = "2021-11" + self._query_params: MutableMapping[str, Any] = {} + + def with_limit(self, limit: int) -> RequestBuilder: + self._query_params["limit"] = limit + return self + + def with_updated_at_min(self, value: str) -> RequestBuilder: + self._query_params["updated_at_min"] = dt.datetime.strptime(value, DATE_TIME_FORMAT).strftime(DATE_TIME_FORMAT) + self._query_params["sort_by"] = "updated_at-asc" + return self + + def with_next_page_token(self, next_page_token: str) -> RequestBuilder: + self._query_params["cursor"] = next_page_token + return self + + def build(self) -> HttpRequest: + return HttpRequest( + url=f"https://api.rechargeapps.com/{self._endpoint}", + query_params=self._query_params, + headers={ + "X-Recharge-Version": self._api_version, + "X-Recharge-Access-Token": ACCESS_TOKEN, + }, + ) diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/response_builder.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/response_builder.py new file mode 100644 index 0000000000000..bd2872d3db673 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/response_builder.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +import json +from http import HTTPStatus +from typing import Any, List, Mapping, Optional, Union + +from airbyte_cdk.test.mock_http import HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) + +from .pagination import NEXT_PAGE_TOKEN, RechargePaginationStrategy +from .request_builder import get_stream_request + + +def build_response( + body: Union[Mapping[str, Any], List[Mapping[str, Any]]], + status_code: HTTPStatus, + headers: Optional[Mapping[str, str]] = None, +) -> HttpResponse: + headers = headers or {} + return HttpResponse( + body=json.dumps(body), + status_code=status_code.value, + headers=headers, + ) + + +def get_stream_response(stream_name: str) -> HttpResponseBuilder: + return create_response_builder( + response_template=find_template(stream_name, __file__), + records_path=FieldPath(stream_name), + pagination_strategy=RechargePaginationStrategy( + request=get_stream_request(stream_name).build(), + next_page_token=NEXT_PAGE_TOKEN, + ), + ) + + +def get_stream_record( + stream_name: str, + record_id_path: str, + cursor_field: Optional[str] = None, +) -> RecordBuilder: + return create_record_builder( + response_template=find_template(stream_name, __file__), + records_path=FieldPath(stream_name), + record_id_path=FieldPath(record_id_path), + record_cursor_path=FieldPath(cursor_field) if cursor_field else None, + ) diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/__init__.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_collections.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_collections.py new file mode 100644 index 0000000000000..8d33879f39002 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_collections.py @@ -0,0 +1,41 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker + +from ..config import NOW +from ..request_builder import get_stream_request +from ..response_builder import NEXT_PAGE_TOKEN, get_stream_record, get_stream_response +from ..utils import config, read_full_refresh + +_STREAM_NAME = "collections" + + +@freezegun.freeze_time(NOW.isoformat()) +class TestFullRefresh(TestCase): + @HttpMocker() + def test_given_one_page_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + get_stream_request(_STREAM_NAME).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id")).build(), + ) + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + get_stream_request(_STREAM_NAME).with_next_page_token(NEXT_PAGE_TOKEN).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id")).build(), + ) + http_mocker.get( + get_stream_request(_STREAM_NAME).build(), + get_stream_response(_STREAM_NAME).with_pagination().with_record(get_stream_record(_STREAM_NAME, "id")).build(), + ) + + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 2 diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_discounts.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_discounts.py new file mode 100644 index 0000000000000..903456d1ddb55 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_discounts.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker + +from ..config import NOW, START_DATE +from ..request_builder import get_stream_request +from ..response_builder import NEXT_PAGE_TOKEN, get_stream_record, get_stream_response +from ..utils import config, get_cursor_value_from_state_message, read_full_refresh, read_incremental + +_STREAM_NAME = "discounts" +_CURSOR_FIELD = "updated_at" + + +@freezegun.freeze_time(NOW.isoformat()) +class TestFullRefresh(TestCase): + @HttpMocker() + def test_given_one_page_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + get_stream_request(_STREAM_NAME).with_next_page_token(NEXT_PAGE_TOKEN).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME).with_pagination().with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 2 + + +@freezegun.freeze_time(NOW.isoformat()) +class TestIncremental(TestCase): + @HttpMocker() + def test_state_message_produced_while_read_and_state_match_latest_record(self, http_mocker: HttpMocker) -> None: + min_cursor_value = "2024-01-01T00:00:00+00:00" + max_cursor_value = "2024-02-01T00:00:00+00:00" + + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(min_cursor_value)) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(max_cursor_value)) + .build(), + ) + + output = read_incremental(config(), _STREAM_NAME) + test_cursor_value = get_cursor_value_from_state_message(output, _CURSOR_FIELD) + assert test_cursor_value == max_cursor_value + + @HttpMocker() + def test_given_multiple_pages_when_read_then_return_records_with_state(self, http_mocker: HttpMocker) -> None: + min_cursor_value = "2024-01-01T00:00:00+00:00" + max_cursor_value = "2024-02-01T00:00:00+00:00" + http_mocker.get( + get_stream_request(_STREAM_NAME).with_next_page_token(NEXT_PAGE_TOKEN).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME) + .with_pagination() + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(min_cursor_value)) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(max_cursor_value)) + .build(), + ) + + output = read_incremental(config(), _STREAM_NAME) + assert len(output.records) == 3 diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_onetimes.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_onetimes.py new file mode 100644 index 0000000000000..6f0fbd1b63697 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/streams/test_onetimes.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker + +from ..config import NOW, START_DATE +from ..request_builder import get_stream_request +from ..response_builder import NEXT_PAGE_TOKEN, get_stream_record, get_stream_response +from ..utils import config, get_cursor_value_from_state_message, read_full_refresh, read_incremental + +_STREAM_NAME = "onetimes" +_CURSOR_FIELD = "updated_at" + + +@freezegun.freeze_time(NOW.isoformat()) +class TestFullRefresh(TestCase): + @HttpMocker() + def test_given_one_page_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + get_stream_request(_STREAM_NAME).with_next_page_token(NEXT_PAGE_TOKEN).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME).with_pagination().with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + + output = read_full_refresh(config(), _STREAM_NAME) + assert len(output.records) == 2 + + +@freezegun.freeze_time(NOW.isoformat()) +class TestIncremental(TestCase): + @HttpMocker() + def test_state_message_produced_while_read_and_state_match_latest_record(self, http_mocker: HttpMocker) -> None: + min_cursor_value = "2024-01-01T00:00:00+00:00" + max_cursor_value = "2024-02-01T00:00:00+00:00" + + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(min_cursor_value)) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(max_cursor_value)) + .build(), + ) + + output = read_incremental(config(), _STREAM_NAME) + test_cursor_value = get_cursor_value_from_state_message(output, _CURSOR_FIELD) + assert test_cursor_value == max_cursor_value + + @HttpMocker() + def test_given_multiple_pages_when_read_then_return_records_with_state(self, http_mocker: HttpMocker) -> None: + min_cursor_value = "2024-01-01T00:00:00+00:00" + max_cursor_value = "2024-02-01T00:00:00+00:00" + http_mocker.get( + get_stream_request(_STREAM_NAME).with_next_page_token(NEXT_PAGE_TOKEN).build(), + get_stream_response(_STREAM_NAME).with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD)).build(), + ) + http_mocker.get( + get_stream_request(_STREAM_NAME).with_updated_at_min(START_DATE).build(), + get_stream_response(_STREAM_NAME) + .with_pagination() + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(min_cursor_value)) + .with_record(get_stream_record(_STREAM_NAME, "id", _CURSOR_FIELD).with_cursor(max_cursor_value)) + .build(), + ) + + output = read_incremental(config(), _STREAM_NAME) + assert len(output.records) == 3 diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/integration/utils.py b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/utils.py new file mode 100644 index 0000000000000..3c4cf5430e68d --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/integration/utils.py @@ -0,0 +1,73 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from lib2to3.pgen2.literals import test +from typing import Any, List, Mapping, Optional + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_protocol.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, SyncMode +from source_recharge import SourceRecharge + +from .config import ConfigBuilder + + +def config() -> ConfigBuilder: + return ConfigBuilder() + + +def catalog(stream_name: str, sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(stream_name, sync_mode).build() + + +def source() -> SourceRecharge: + return SourceRecharge() + + +def read_output( + config_builder: ConfigBuilder, + stream_name: str, + sync_mode: SyncMode, + state: Optional[List[AirbyteStateMessage]] = None, + expected_exception: Optional[bool] = False, +) -> EntrypointOutput: + _catalog = catalog(stream_name, sync_mode) + _config = config_builder.build() + return read(source(), _config, _catalog, state, expected_exception) + + +def read_full_refresh( + config_: ConfigBuilder, + stream_name: str, + expected_exception: bool = False, +) -> EntrypointOutput: + return read_output( + config_builder=config_, + stream_name=stream_name, + sync_mode=SyncMode.full_refresh, + expected_exception=expected_exception, + ) + + +def read_incremental( + config_: ConfigBuilder, + stream_name: str, + state: Optional[List[AirbyteStateMessage]] = None, + expected_exception: bool = False, +) -> EntrypointOutput: + return read_output( + config_builder=config_, + stream_name=stream_name, + sync_mode=SyncMode.incremental, + state=state, + expected_exception=expected_exception, + ) + + +def get_cursor_value_from_state_message( + test_output: Mapping[str, Any], + cursor_field: Optional[str] = None, +) -> str: + return dict(test_output.most_recent_state.stream_state).get(cursor_field) diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/collections.json b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/collections.json new file mode 100644 index 0000000000000..61c2acfefdd93 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/collections.json @@ -0,0 +1,26 @@ +{ + "next_cursor": null, + "previous_cursor": null, + "collections": [ + { + "id": 134129, + "created_at": "2022-03-28T12:27:03+00:00", + "name": "test_collection_134129", + "description": "kitten accessories soft.", + "sort_order": "id-asc", + "title": "Soft Kitty", + "type": "manual", + "updated_at": "2022-03-28T12:27:03+00:00" + }, + { + "id": 134136, + "created_at": "2022-03-28T15:38:27+00:00", + "name": "test_collection_134136", + "description": "cat products august 2022", + "sort_order": "title-asc", + "title": "Cats", + "type": "manual", + "updated_at": "2022-03-28T15:38:27+00:00" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/discounts.json b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/discounts.json new file mode 100644 index 0000000000000..469a3f67d2268 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/discounts.json @@ -0,0 +1,46 @@ +{ + "next_cursor": null, + "previous_cursor": null, + "discounts": [ + { + "id": 59568555, + "applies_to": { + "ids": [], + "purchase_item_type": "ALL", + "resource": null + }, + "channel_settings": { + "api": { + "can_apply": true + }, + "checkout_page": { + "can_apply": true + }, + "customer_portal": { + "can_apply": true + }, + "merchant_portal": { + "can_apply": true + } + }, + "code": "Discount1", + "created_at": "2021-07-26T19:16:17+00:00", + "ends_at": null, + "external_discount_id": { + "ecommerce": null + }, + "external_discount_source": null, + "prerequisite_subtotal_min": null, + "starts_at": null, + "status": "enabled", + "updated_at": "2024-02-01T00:00:00+00:00", + "usage_limits": { + "first_time_customer_restriction": false, + "max_subsequent_redemptions": null, + "one_application_per_customer": false + }, + "value": "100.00", + "value_type": "percentage" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/onetimes.json b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/onetimes.json new file mode 100644 index 0000000000000..68932c41d8848 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/resource/http/response/onetimes.json @@ -0,0 +1,33 @@ +{ + "next": null, + "previous": null, + "onetimes": [ + { + "id": 16909886, + "address_id": 45154492, + "created_at": "2024-01-01T00:00:00+00:00", + "customer_id": 40565990, + "external_product_id": { + "ecommerce": "4950280863846" + }, + "external_variant_id": { + "ecommerce": "32139793137766" + }, + "is_cancelled": false, + "next_charge_scheduled_at": "2025-02-01T00:00:00+00:00", + "price": "6.00", + "product_title": "ABC Shirt", + "properties": [ + { + "name": "Color", + "value": "Blue" + } + ], + "quantity": 1, + "sku": "TOM0001", + "sku_override": false, + "updated_at": "2024-02-01T00:00:00+00:00", + "variant_title": "Blue star" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py b/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py deleted file mode 100644 index 3981d725c0479..0000000000000 --- a/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py +++ /dev/null @@ -1,364 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from http import HTTPStatus -from unittest.mock import patch - -import pytest -import requests -from source_recharge.api import ( - Addresses, - Charges, - Collections, - Customers, - Discounts, - Metafields, - Onetimes, - OrdersDeprecatedApi, - OrdersModernApi, - Products, - RechargeStreamDeprecatedAPI, - RechargeStreamModernAPI, - Shop, - Subscriptions, -) - - -# config -@pytest.fixture(name="config") -def config(): - return { - "authenticator": None, - "access_token": "access_token", - "start_date": "2021-08-15T00:00:00Z", - } - - -class TestCommon: - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Addresses, "id"), - (Charges, "id"), - (Collections, "id"), - (Customers, "id"), - (Discounts, "id"), - (Metafields, "id"), - (Onetimes, "id"), - (OrdersDeprecatedApi, "id"), - (OrdersModernApi, "id"), - (Products, "id"), - (Shop, ["shop", "store"]), - (Subscriptions, "id"), - ], - ) - def test_primary_key(self, stream_cls, expected): - assert expected == stream_cls.primary_key - - @pytest.mark.parametrize( - "stream_cls, stream_type, expected", - [ - (Addresses, "incremental", "addresses"), - (Charges, "incremental", "charges"), - (Collections, "full-refresh", "collections"), - (Customers, "incremental", "customers"), - (Discounts, "incremental", "discounts"), - (Metafields, "full-refresh", "metafields"), - (Onetimes, "incremental", "onetimes"), - (OrdersDeprecatedApi, "incremental", "orders"), - (OrdersModernApi, "incremental", "orders"), - (Products, "full-refresh", "products"), - (Shop, "full-refresh", None), - (Subscriptions, "incremental", "subscriptions"), - ], - ) - def test_data_path(self, config, stream_cls, stream_type, expected): - if stream_type == "incremental": - result = stream_cls(config, authenticator=None).data_path - else: - result = stream_cls(config, authenticator=None).data_path - assert expected == result - - @pytest.mark.parametrize( - "stream_cls, stream_type, expected", - [ - (Addresses, "incremental", "addresses"), - (Charges, "incremental", "charges"), - (Collections, "full-refresh", "collections"), - (Customers, "incremental", "customers"), - (Discounts, "incremental", "discounts"), - (Metafields, "full-refresh", "metafields"), - (Onetimes, "incremental", "onetimes"), - (OrdersDeprecatedApi, "incremental", "orders"), - (OrdersModernApi, "incremental", "orders"), - (Products, "full-refresh", "products"), - (Shop, "full-refresh", "shop"), - (Subscriptions, "incremental", "subscriptions"), - ], - ) - def test_path(self, config, stream_cls, stream_type, expected): - if stream_type == "incremental": - result = stream_cls(config, authenticator=None).path() - else: - result = stream_cls(config, authenticator=None).path() - assert expected == result - - @pytest.mark.parametrize( - ("http_status", "headers", "should_retry"), - [ - (HTTPStatus.OK, {"Content-Length": 256}, True), - (HTTPStatus.BAD_REQUEST, {}, False), - (HTTPStatus.TOO_MANY_REQUESTS, {}, True), - (HTTPStatus.INTERNAL_SERVER_ERROR, {}, True), - (HTTPStatus.FORBIDDEN, {}, False), - ], - ) - @pytest.mark.parametrize("stream_cls", (RechargeStreamDeprecatedAPI, RechargeStreamModernAPI)) - def test_should_retry(self, config, http_status, headers, should_retry, stream_cls): - response = requests.Response() - response.status_code = http_status - response._content = b"" - response.headers = headers - stream = stream_cls(config, authenticator=None) - assert stream.should_retry(response) == should_retry - - -class TestFullRefreshStreams: - def generate_records(self, stream_name, count): - if not stream_name: - return {f"record_{1}": f"test_{1}"} - result = [] - for i in range(0, count): - result.append({f"record_{i}": f"test_{i}"}) - return {stream_name: result} - - @pytest.mark.parametrize( - "stream_cls, cursor_response, expected", - [ - (Collections, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Metafields, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (OrdersModernApi, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Products, {}, {"page": 2}), - (Shop, {}, None), - (OrdersDeprecatedApi, {}, {"page": 2}), - ], - ) - def test_next_page_token(self, config, stream_cls, cursor_response, requests_mock, expected): - stream = stream_cls(config, authenticator=None) - stream.limit = 2 - url = f"{stream.url_base}{stream.path()}" - response = {**cursor_response, **self.generate_records(stream.data_path, 2)} - requests_mock.get(url, json=response) - response = requests.get(url) - assert stream.next_page_token(response) == expected - - @pytest.mark.parametrize( - "stream_cls, next_page_token, stream_state, stream_slice, expected", - [ - ( - Collections, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - (Metafields, {"cursor": "12353"}, {"updated_at": "2030-01-01"}, {}, {"limit": 250, "owner_resource": None, "cursor": "12353"}), - ( - Products, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - (Shop, None, {}, {}, {}), - ], - ) - def test_request_params(self, config, stream_cls, next_page_token, stream_state, stream_slice, expected): - stream = stream_cls(config, authenticator=None) - result = stream.request_params(stream_state, stream_slice, next_page_token) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, data, expected", - [ - (Collections, [{"test": 123}], [{"test": 123}]), - (Metafields, [{"test2": 234}], [{"test2": 234}]), - (Products, [{"test3": 345}], [{"test3": 345}]), - (Shop, {"test4": 456}, [{"test4": 456}]), - ], - ) - def test_parse_response(self, config, stream_cls, data, requests_mock, expected): - stream = stream_cls(config, authenticator=None) - url = f"{stream.url_base}{stream.path()}" - data = {stream.data_path: data} if stream.data_path else data - requests_mock.get(url, json=data) - response = requests.get(url) - assert list(stream.parse_response(response)) == expected - - @pytest.mark.parametrize( - "stream_cls, data, expected", - [ - (Collections, [{"test": 123}], [{"test": 123}]), - (Metafields, [{"test2": 234}], [{"test2": 234}]), - (Products, [{"test3": 345}], [{"test3": 345}]), - (Shop, {"test4": 456}, [{"test4": 456}]), - ], - ) - def get_stream_data(self, config, stream_cls, data, requests_mock, expected): - stream = stream_cls(config, authenticator=None) - url = f"{stream.url_base}{stream.path()}" - data = {stream.data_path: data} if stream.data_path else data - requests_mock.get(url, json=data) - response = requests.get(url) - assert list(stream.parse_response(response)) == expected - - @pytest.mark.parametrize("owner_resource, expected", [({"customer": {"id": 123}}, {"customer": {"id": 123}})]) - def test_metafields_read_records(self, config, owner_resource, expected): - with patch.object(Metafields, "read_records", return_value=owner_resource): - result = Metafields(config).read_records(stream_slice={"owner_resource": owner_resource}) - assert result == expected - - -class TestIncrementalStreams: - def generate_records(self, stream_name, count): - result = [] - for i in range(0, count): - result.append({f"record_{i}": f"test_{i}"}) - return {stream_name: result} - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Addresses, "updated_at"), - (Charges, "updated_at"), - (Customers, "updated_at"), - (Discounts, "updated_at"), - (Onetimes, "updated_at"), - (OrdersDeprecatedApi, "updated_at"), - (OrdersModernApi, "updated_at"), - (Subscriptions, "updated_at"), - ], - ) - def test_cursor_field(self, config, stream_cls, expected): - stream = stream_cls(config, authenticator=None) - result = stream.cursor_field - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, cursor_response, expected", - [ - (Addresses, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Charges, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Customers, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Discounts, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Onetimes, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (OrdersDeprecatedApi, {}, {"page": 2}), - (OrdersModernApi, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - (Subscriptions, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), - ], - ) - def test_next_page_token(self, config, stream_cls, cursor_response, requests_mock, expected): - stream = stream_cls(config, authenticator=None) - stream.limit = 2 - url = f"{stream.url_base}{stream.path()}" - response = {**cursor_response, **self.generate_records(stream.data_path, 2)} - requests_mock.get(url, json=response) - response = requests.get(url) - assert stream.next_page_token(response) == expected - - @pytest.mark.parametrize( - "stream_cls, next_page_token, stream_state, stream_slice, expected", - [ - ( - Addresses, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ( - Charges, - {"cursor": "123"}, - {"updated_at": "2030-01-01"}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "cursor": "123"}, - ), - ( - Customers, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ( - Discounts, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ( - Onetimes, - {"cursor": "123"}, - {"updated_at": "2030-01-01"}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "cursor": "123"}, - ), - ( - OrdersDeprecatedApi, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ( - OrdersModernApi, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ( - Subscriptions, - None, - {}, - {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, - ), - ], - ) - def test_request_params(self, config, stream_cls, next_page_token, stream_state, stream_slice, expected): - stream = stream_cls(config, authenticator=None) - result = stream.request_params(stream_state, stream_slice, next_page_token) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, current_state, latest_record, expected", - [ - (Addresses, {}, {"updated_at": 2}, {"updated_at": 2}), - (Charges, {"updated_at": 2}, {"updated_at": 3}, {"updated_at": 3}), - (Customers, {"updated_at": 3}, {"updated_at": 4}, {"updated_at": 4}), - (Discounts, {}, {"updated_at": 2}, {"updated_at": 2}), - (Onetimes, {}, {"updated_at": 2}, {"updated_at": 2}), - (OrdersDeprecatedApi, {"updated_at": 5}, {"updated_at": 5}, {"updated_at": 5}), - (OrdersModernApi, {"updated_at": 5}, {"updated_at": 5}, {"updated_at": 5}), - (Subscriptions, {"updated_at": 6}, {"updated_at": 7}, {"updated_at": 7}), - ], - ) - def test_get_updated_state(self, config, stream_cls, current_state, latest_record, expected): - stream = stream_cls(config, authenticator=None) - result = stream.get_updated_state(current_state, latest_record) - assert result == expected - - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Addresses, {'start_date': '2021-08-15 00:00:01', 'end_date': '2021-09-14 00:00:01'}), - ], - ) - def test_stream_slices(self, config, stream_cls, expected): - stream = stream_cls(config, authenticator=None) - result = list(stream.stream_slices(sync_mode=None, cursor_field=stream.cursor_field, stream_state=None)) - assert result[0] == expected diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/test_source.py b/airbyte-integrations/connectors/source-recharge/unit_tests/test_source.py deleted file mode 100644 index 17b8e92123f06..0000000000000 --- a/airbyte-integrations/connectors/source-recharge/unit_tests/test_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from unittest.mock import patch - -import pytest -from requests.exceptions import HTTPError -from source_recharge.api import Shop -from source_recharge.source import RechargeTokenAuthenticator, SourceRecharge - - -# config -@pytest.fixture(name="config") -def config(): - return { - "authenticator": None, - "access_token": "access_token", - "start_date": "2021-08-15T00:00:00Z", - } - - -# logger -@pytest.fixture(name="logger_mock") -def logger_mock_fixture(): - return patch("source_recharge.source.AirbyteLogger") - - -def test_get_auth_header(config): - expected = {"X-Recharge-Access-Token": config.get("access_token")} - actual = RechargeTokenAuthenticator(token=config["access_token"]).get_auth_header() - assert actual == expected - - -@pytest.mark.parametrize( - "patch, expected", - [ - ( - patch.object(Shop, "read_records", return_value=[{"shop": {"id": 123}}]), - (True, None), - ), - ( - patch.object(Shop, "read_records", side_effect=HTTPError(403)), - (False, "Unable to connect to Recharge API with the provided credentials - HTTPError(403)"), - ), - ], - ids=["success", "fail"], -) -def test_check_connection(logger_mock, config, patch, expected): - with patch: - result = SourceRecharge().check_connection(logger_mock, config=config) - assert result == expected - - -def test_streams(config): - streams = SourceRecharge().streams(config) - assert len(streams) == 11 diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-recharge/unit_tests/test_streams.py new file mode 100644 index 0000000000000..636f109ff66d5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/test_streams.py @@ -0,0 +1,258 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from http import HTTPStatus +from typing import Any, List, Mapping, MutableMapping, Union + +import pytest +import requests +from source_recharge.source import Orders, RechargeTokenAuthenticator, SourceRecharge + + +def use_orders_deprecated_api_config( + config: Mapping[str, Any] = None, + use_deprecated_api: bool = False, +) -> MutableMapping[str, Any]: + test_config = config + if use_deprecated_api: + test_config["use_orders_deprecated_api"] = use_deprecated_api + return test_config + + +def test_get_auth_header(config) -> None: + expected = {"X-Recharge-Access-Token": config.get("access_token")} + actual = RechargeTokenAuthenticator(token=config["access_token"]).get_auth_header() + assert actual == expected + + +def test_streams(config) -> None: + streams = SourceRecharge().streams(config) + assert len(streams) == 11 + + +class TestCommon: + @pytest.mark.parametrize( + "stream_cls, expected", + [ + (Orders, "id"), + ], + ) + def test_primary_key(self, stream_cls, expected) -> None: + assert expected == stream_cls.primary_key + + @pytest.mark.parametrize( + "stream_cls, stream_type, expected", + [ + (Orders, "incremental", "orders"), + ], + ) + def test_data_path(self, config, stream_cls, stream_type, expected) -> None: + if stream_type == "incremental": + result = stream_cls(config, authenticator=None).data_path + else: + result = stream_cls(config, authenticator=None).data_path + assert expected == result + + @pytest.mark.parametrize( + "stream_cls, stream_type, expected", + [ + (Orders, "incremental", "orders"), + ], + ) + def test_path(self, config, stream_cls, stream_type, expected) -> None: + if stream_type == "incremental": + result = stream_cls(config, authenticator=None).path() + else: + result = stream_cls(config, authenticator=None).path() + assert expected == result + + @pytest.mark.parametrize( + ("http_status", "headers", "should_retry"), + [ + (HTTPStatus.OK, {"Content-Length": 256}, True), + (HTTPStatus.BAD_REQUEST, {}, False), + (HTTPStatus.TOO_MANY_REQUESTS, {}, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, {}, True), + (HTTPStatus.FORBIDDEN, {}, False), + ], + ) + def test_should_retry(self, config, http_status, headers, should_retry) -> None: + response = requests.Response() + response.status_code = http_status + response._content = b"" + response.headers = headers + stream = Orders(config, authenticator=None) + assert stream.should_retry(response) == should_retry + + +class TestFullRefreshStreams: + def generate_records(self, stream_name, count) -> Union[Mapping[str, List[Mapping[str, Any]]], Mapping[str, Any]]: + if not stream_name: + return {f"record_{1}": f"test_{1}"} + result = [] + for i in range(0, count): + result.append({f"record_{i}": f"test_{i}"}) + return {stream_name: result} + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, cursor_response, expected", + [ + (Orders, True, {}, {"page": 2}), + (Orders, False, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), + ], + ) + def test_next_page_token(self, config, use_deprecated_api, stream_cls, cursor_response, requests_mock, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + stream.page_size = 2 + url = f"{stream.url_base}{stream.path()}" + response = {**cursor_response, **self.generate_records(stream.data_path, 2)} + requests_mock.get(url, json=response) + response = requests.get(url) + assert stream.next_page_token(response) == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, next_page_token, stream_slice, expected", + [ + ( + Orders, + True, + None, + {"start_date": "2023-01-01 00:00:01", "end_date": "2023-01-31 00:00:01"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2023-01-01 00:00:01", "updated_at_max": "2023-01-31 00:00:01"}, + ), + ( + Orders, + False, + None, + {"start_date": "2023-01-01 00:00:01", "end_date": "2023-01-31 00:00:01"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2023-01-01 00:00:01", "updated_at_max": "2023-01-31 00:00:01"}, + ), + ], + ) + def test_request_params(self, config, stream_cls, use_deprecated_api, next_page_token, stream_slice, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + result = stream.request_params(stream_slice, next_page_token) + assert result == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, data, expected", + [ + (Orders, True, [{"test": 123}], [{"test": 123}]), + (Orders, False, [{"test": 123}], [{"test": 123}]), + ], + ) + def test_parse_response(self, config, stream_cls, use_deprecated_api, data, requests_mock, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + url = f"{stream.url_base}{stream.path()}" + data = {stream.data_path: data} if stream.data_path else data + requests_mock.get(url, json=data) + response = requests.get(url) + assert list(stream.parse_response(response)) == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, data, expected", + [ + (Orders, True, [{"test": 123}], [{"test": 123}]), + (Orders, False, [{"test": 123}], [{"test": 123}]), + ], + ) + def get_stream_data(self, config, stream_cls, use_deprecated_api, data, requests_mock, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + url = f"{stream.url_base}{stream.path()}" + data = {stream.data_path: data} if stream.data_path else data + requests_mock.get(url, json=data) + response = requests.get(url) + assert list(stream.parse_response(response)) == expected + + +class TestIncrementalStreams: + def generate_records(self, stream_name, count) -> Mapping[str, List[Mapping[str, Any]]]: + result = [] + for i in range(0, count): + result.append({f"record_{i}": f"test_{i}"}) + return {stream_name: result} + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, expected", + [ + (Orders, True, "updated_at"), + (Orders, False, "updated_at"), + ], + ) + def test_cursor_field(self, config, stream_cls, use_deprecated_api, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + result = stream.cursor_field + assert result == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, cursor_response, expected", + [ + (Orders, True, {}, {"page": 2}), + (Orders, False, {"next_cursor": "some next cursor"}, {"cursor": "some next cursor"}), + ], + ) + def test_next_page_token(self, config, stream_cls, use_deprecated_api, cursor_response, requests_mock, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + stream.page_size = 2 + url = f"{stream.url_base}{stream.path()}" + response = {**cursor_response, **self.generate_records(stream.data_path, 2)} + requests_mock.get(url, json=response) + response = requests.get(url) + assert stream.next_page_token(response) == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, next_page_token, stream_slice, expected", + [ + ( + Orders, + True, + None, + {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + ), + ( + Orders, + False, + None, + {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + ), + ], + ) + def test_request_params(self, config, stream_cls, use_deprecated_api, next_page_token, stream_slice, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + result = stream.request_params(stream_slice, next_page_token) + assert result == expected + + @pytest.mark.parametrize( + "stream_cls, use_deprecated_api, current_state, latest_record, expected", + [ + (Orders, True, {"updated_at": 5}, {"updated_at": 5}, {"updated_at": 5}), + (Orders, False, {"updated_at": 5}, {"updated_at": 5}, {"updated_at": 5}), + ], + ) + def test_get_updated_state(self, config, stream_cls, use_deprecated_api, current_state, latest_record, expected) -> None: + test_config = use_orders_deprecated_api_config(config, use_deprecated_api) + stream = stream_cls(test_config, authenticator=None) + result = stream.get_updated_state(current_state, latest_record) + assert result == expected + + @pytest.mark.parametrize( + "stream_cls, expected", + [ + (Orders, {"start_date": "2021-08-15 00:00:01", "end_date": "2021-09-14 00:00:01"}), + ], + ) + def test_stream_slices(self, config, stream_cls, expected) -> None: + stream = stream_cls(config, authenticator=None) + result = list(stream.stream_slices(sync_mode=None, cursor_field=stream.cursor_field, stream_state=None)) + assert result[0] == expected diff --git a/airbyte-integrations/connectors/source-rss/.dockerignore b/airbyte-integrations/connectors/source-rss/.dockerignore deleted file mode 100644 index 0e472e73c29b1..0000000000000 --- a/airbyte-integrations/connectors/source-rss/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_rss -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-rss/Dockerfile b/airbyte-integrations/connectors/source-rss/Dockerfile deleted file mode 100644 index afeb0625ec217..0000000000000 --- a/airbyte-integrations/connectors/source-rss/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.13-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_rss ./source_rss - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-rss diff --git a/airbyte-integrations/connectors/source-rss/README.md b/airbyte-integrations/connectors/source-rss/README.md index 85bf860c36333..9e8d2019abec4 100644 --- a/airbyte-integrations/connectors/source-rss/README.md +++ b/airbyte-integrations/connectors/source-rss/README.md @@ -1,99 +1,103 @@ -# Rabbitmq Destination +# Rss Source -This is the repository for the Rabbitmq destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq). +This is the repository for the Rss configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/rss). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.7.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_rabbitmq/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +### Create credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/rss) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_rss/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination rabbitmq test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-rss spec +poetry run source-rss check --config secrets/config.json +poetry run source-rss discover --config secrets/config.json +poetry run source-rss read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests +To run tests locally, from the connector directory run: -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name destination-rabbitmq build +``` +poetry run pytest tests ``` -An image will be built with the tag `airbyte/destination-rabbitmq:dev`. +### Building the docker image -**Via `docker build`:** +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash -docker build -t airbyte/destination-rabbitmq:dev . +airbyte-ci connectors --name=source-rss build ``` -#### Run +An image will be available on your host with the tag `airbyte/source-rss:dev`. + + +### Running as a docker container + Then run any of the connector commands as follows: ``` -docker run --rm airbyte/destination-rabbitmq:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rabbitmq:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-rabbitmq:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +docker run --rm airbyte/source-rss:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rss:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rss:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-rss:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite + You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-rss test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-rss test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/rss.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/rss.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rss/__init__.py b/airbyte-integrations/connectors/source-rss/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml b/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml index f0b01cea43e88..9d939e4bc91bc 100644 --- a/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml @@ -4,29 +4,29 @@ connector_image: airbyte/source-rss:dev acceptance_tests: spec: tests: - - config_path: "integration_tests/sample_config.json" - spec_path: "source_rss/spec.yaml" + - spec_path: "source_rss/spec.yaml" connection: tests: - - config_path: "integration_tests/sample_config.json" + - config_path: "secrets/config.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: tests: - - config_path: "integration_tests/sample_config.json" + - config_path: "secrets/config.json" basic_read: tests: - - config_path: "integration_tests/sample_config.json" + - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] incremental: - tests: - - config_path: "integration_tests/sample_config.json" - configured_catalog_path: "integration_tests/incremental_configured_catalog.json" - future_state: - future_state_path: "integration_tests/abnormal_state.json" + bypass_reason: "This connector does not implement incremental sync" + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state: + # future_state_path: "integration_tests/abnormal_state.json" full_refresh: tests: - - config_path: "integration_tests/sample_config.json" + - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json index 68ac9906773a4..3587e579822d0 100644 --- a/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json @@ -1,5 +1,5 @@ { - "items": { - "published": "3333-10-24T16:16:00+00:00" + "todo-stream-name": { + "todo-field-name": "value" } } diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py index 82823254d2666..9e6409236281f 100644 --- a/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py @@ -11,4 +11,6 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json deleted file mode 100644 index 6d00beed5497f..0000000000000 --- a/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "streams": [ - { - "name": "items", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "required": ["published"], - "properties": { - "title": { - "type": ["null", "string"] - }, - "link": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "author": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "comments": { - "type": ["null", "string"] - }, - "enclosure": { - "type": ["null", "string"] - }, - "guid": { - "type": ["null", "string"] - }, - "published": { - "type": ["string"], - "format": "date-time" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["published"] - } - ] -} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json index 7ab22cc8dbba9..92185d4254e4c 100644 --- a/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json @@ -3,42 +3,8 @@ { "stream": { "name": "items", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "required": ["published"], - "properties": { - "title": { - "type": ["null", "string"] - }, - "link": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "author": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "comments": { - "type": ["null", "string"] - }, - "enclosure": { - "type": ["null", "string"] - }, - "guid": { - "type": ["null", "string"] - }, - "published": { - "type": ["string"], - "format": "date-time" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json deleted file mode 100644 index d4ff280c9201e..0000000000000 --- a/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "items", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "required": ["published"], - "properties": { - "title": { - "type": ["null", "string"] - }, - "link": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "author": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "comments": { - "type": ["null", "string"] - }, - "enclosure": { - "type": ["null", "string"] - }, - "guid": { - "type": ["null", "string"] - }, - "published": { - "type": ["string"], - "format": "date-time" - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json index 0029691134c3e..596cca2318eb3 100644 --- a/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json @@ -1,3 +1,3 @@ { - "url": "http://somewebsitethatdoesnotexistatall.com/something.rss" + "url": "https://jsonplaceholder.typicode.com/nonexistent" } diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json index e9493cafc5575..3587e579822d0 100644 --- a/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json @@ -1,5 +1,5 @@ { - "items": { - "published": "2022-10-24T16:16:00+00:00" + "todo-stream-name": { + "todo-field-name": "value" } } diff --git a/airbyte-integrations/connectors/source-rss/metadata.yaml b/airbyte-integrations/connectors/source-rss/metadata.yaml index 35ff213a67c07..b84931a59df96 100644 --- a/airbyte-integrations/connectors/source-rss/metadata.yaml +++ b/airbyte-integrations/connectors/source-rss/metadata.yaml @@ -1,29 +1,43 @@ data: + allowedHosts: + hosts: + - "*" + registries: + oss: + enabled: true + cloud: + enabled: true + releases: + breakingChanges: + 1.0.0: + upgradeDeadline: "2024-04-30" + message: "The verison migrates the Rss connector to the low-code framework for greater maintainability." + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-rss + connectorBuildOptions: + # Please update to the latest version of the connector base image. + # https://hub.docker.com/r/airbyte/python-connector-base + # Please use the full address with sha256 hash to guarantee build reproducibility. + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 0efee448-6948-49e2-b786-17db50647908 - dockerImageTag: 0.1.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-rss githubIssueLabel: source-rss icon: rss.svg license: MIT - name: RSS - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-rss - registries: - cloud: - enabled: true - oss: - enabled: true + name: Rss + releaseDate: 2022-10-12 releaseStage: alpha + supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/rss tags: - language:python - - cdk:python + - cdk:low-code ab_internal: sl: 100 ql: 100 - supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-rss/poetry.lock b/airbyte-integrations/connectors/source-rss/poetry.lock new file mode 100644 index 0000000000000..9aa0f7f134ddd --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/poetry.lock @@ -0,0 +1,1043 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.79.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.79.1-py3-none-any.whl", hash = "sha256:36c4b1fe98448b7d116f16c612982af8e22cbff28ea37da918c851d7feb1093c"}, + {file = "airbyte_cdk-0.79.1.tar.gz", hash = "sha256:a49d10b3c87770ab1e7b7ebf9a1e945d49274c18548756f93a841ebd4c195146"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "feedparser" +version = "6.0.10" +description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" +optional = false +python-versions = ">=3.6" +files = [ + {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, + {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, +] + +[package.dependencies] +sgmllib3k = "*" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2022.6" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, + {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "sgmllib3k" +version = "1.0.0" +description = "Py3k port of sgmllib." +optional = false +python-versions = "*" +files = [ + {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "d29e5990fc6b1b9506119e4916406107234cc90b796b7558a59231a795c60cfe" diff --git a/airbyte-integrations/connectors/source-rss/pyproject.toml b/airbyte-integrations/connectors/source-rss/pyproject.toml new file mode 100644 index 0000000000000..b826c16d8883b --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.0.0" +name = "source-rss" +description = "Source implementation for rss." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/rss" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_rss" }, { include = "main.py" } ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" +pytz = "2022.6" +feedparser = "6.0.10" + +[tool.poetry.scripts] +source-rss = "source_rss.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "*" +pytest-mock = "*" +pytest = "*" diff --git a/airbyte-integrations/connectors/source-rss/requirements.txt b/airbyte-integrations/connectors/source-rss/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/source-rss/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-rss/setup.py b/airbyte-integrations/connectors/source-rss/setup.py deleted file mode 100644 index f49ef214cb6ab..0000000000000 --- a/airbyte-integrations/connectors/source-rss/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.2", "feedparser~=6.0.10", "pytz~=2022.6"] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-rss=source_rss.run:run", - ], - }, - name="source_rss", - description="Source implementation for Rss.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-rss/source_rss/components.py b/airbyte-integrations/connectors/source-rss/source_rss/components.py new file mode 100644 index 0000000000000..571d1a0a2e0ca --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/components.py @@ -0,0 +1,73 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import logging +from calendar import timegm +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Iterable, List, Mapping, Optional + +import feedparser +import pytz +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.extractors.record_extractor import RecordExtractor +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.types import StreamSlice +from airbyte_cdk.sources.streams.core import Stream +from dateutil.parser import parse + + +class CustomExtractor(RecordExtractor): + def extract_records(self, response: requests.Response, **kwargs) -> List[Mapping[str, Any]]: + item_keys = [ + "title", + "link", + "description", + "author", + "category", + "comments", + "enclosure", + "guid", + ] + + def convert_item_to_mapping(item) -> Mapping: + mapping = {} + for item_key in item_keys: + try: + mapping[item_key] = item[item_key] + except (AttributeError, KeyError): + pass + + try: + dt = datetime.utcfromtimestamp(timegm(item.published_parsed)) + dt_tz = dt.replace(tzinfo=pytz.UTC) + mapping["published"] = dt_tz.isoformat() + except (AttributeError, KeyError): + pass + + return mapping + + def is_newer(item, initial_state_date) -> bool: + try: + current_record_date = parse(item["published"]) + except Exception: + current_record_date = None + if initial_state_date is None: + return True + elif current_record_date is None: + return True + else: + return current_record_date > initial_state_date + + feed = feedparser.parse(response.text) + try: + initial_state_date = parse(feed["published"]) + except Exception: + initial_state_date = None + + all_item_mappings = [convert_item_to_mapping(item) for item in feed.entries[::-1]] + new_items = [item for item in all_item_mappings if is_newer(item, initial_state_date)] + return new_items diff --git a/airbyte-integrations/connectors/source-rss/source_rss/manifest.yaml b/airbyte-integrations/connectors/source-rss/source_rss/manifest.yaml new file mode 100644 index 0000000000000..188df495b9222 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/manifest.yaml @@ -0,0 +1,104 @@ +version: "0.44.0" + +definitions: + selector: + type: RecordSelector + extractor: + class_name: source_rss.components.CustomExtractor + + requester: + type: HttpRequester + url_base: "{{ config['url'] }}" + http_method: "GET" + authenticator: + type: NoAuth + + retriever: + type: SimpleRetriever + record_selector: + $ref: "#/definitions/selector" + paginator: + type: NoPagination + requester: + $ref: "#/definitions/requester" + + base_stream: + type: DeclarativeStream + retriever: + $ref: "#/definitions/retriever" + + items_stream: + $ref: "#/definitions/base_stream" + name: "items" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/definitions/items_schema" + $parameters: + path: "/" + + items_schema: + $schema: http://json-schema.org/draft-07/schema# + type: object + additionalProperties: true + required: + - published + properties: + title: + type: + - "null" + - string + link: + type: + - "null" + - string + description: + type: + - "null" + - string + author: + type: + - "null" + - string + category: + type: + - "null" + - string + comments: + type: + - "null" + - string + enclosure: + type: + - "null" + - string + guid: + type: + - "null" + - string + published: + type: + - string + format: date-time + +streams: + - "#/definitions/items_stream" + +check: + type: CheckStream + stream_names: + - "items" + +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/rss + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: RSS Spec + type: object + required: + - url + properties: + url: + type: string + description: RSS Feed URL diff --git a/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json b/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json deleted file mode 100644 index 88a12b9904f37..0000000000000 --- a/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "required": ["published"], - "properties": { - "title": { - "type": ["null", "string"] - }, - "link": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "author": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "comments": { - "type": ["null", "string"] - }, - "enclosure": { - "type": ["null", "string"] - }, - "guid": { - "type": ["null", "string"] - }, - "published": { - "type": ["string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-rss/source_rss/source.py b/airbyte-integrations/connectors/source-rss/source_rss/source.py index 7a4056c82f5f9..297b6a38c9ef8 100644 --- a/airbyte-integrations/connectors/source-rss/source_rss/source.py +++ b/airbyte-integrations/connectors/source-rss/source_rss/source.py @@ -2,154 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -from abc import ABC -from calendar import timegm -from datetime import datetime -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -import feedparser -import pytz -import requests -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream -from dateutil.parser import parse +WARNING: Do not modify this file. +""" -item_keys = [ - "title", - "link", - "description", - "author", - "category", - "comments", - "enclosure", - "guid", -] - -def convert_item_to_mapping(item) -> Mapping: - mapping = {} - - for item_key in item_keys: - try: - mapping[item_key] = item[item_key] - except (AttributeError, KeyError): - pass - - try: - # get datetime in UTC - dt = datetime.utcfromtimestamp(timegm(item.published_parsed)) - # make sure that the output string is labeled as UTC - dt_tz = dt.replace(tzinfo=pytz.UTC) - mapping["published"] = dt_tz.isoformat() - except (AttributeError, KeyError): - pass - - return mapping - - -def is_newer(item, initial_state_date) -> bool: - try: - current_record_date = parse(item["published"]) - except Exception: - current_record_date = None - - if initial_state_date is None: - # if we don't have initial state they are all new - return True - elif current_record_date is None: - # if we can't parse the item timestamp, we should return it - return True - else: - return current_record_date > initial_state_date - - -# Basic stream -class RssStream(HttpStream, ABC): - # empty URL base since the stream can have its own full URL - url_base = "" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # no pagination enabled - return None - - # since we only have one response for the stream, we should only return records newer than the initial state object if incremental - def parse_response(self, response: requests.Response, stream_state: MutableMapping[str, Any], **kwargs) -> Iterable[Mapping]: - feed = feedparser.parse(response.text) - - try: - initial_state_date = parse(stream_state["published"]) - except Exception: - initial_state_date = None - - # go through in reverse order which helps the state comparisons - all_item_mappings = [convert_item_to_mapping(item) for item in feed.entries[::-1]] - - # will only filter if we have a state object, so it's incremental - yield from [item for item in all_item_mappings if is_newer(item, initial_state_date)] - - -# Basic incremental stream -class IncrementalRssStream(RssStream, ABC): - # no reason to checkpoint if it's reading individual files without pagination - state_checkpoint_interval = None - - @property - def cursor_field(self) -> str: - return "published" - - # this will fail if the dates aren't parseable, but that means incremental isn't possible anyway for that feed - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - try: - latest_record_date = parse(latest_record["published"]) - latest_record_state = {"published": latest_record["published"]} - except Exception: - latest_record_date = None - - try: - current_record_date = parse(current_stream_state["published"]) - except Exception: - current_record_date = None - - if latest_record_date and current_record_date: - if latest_record_date > current_record_date: - return latest_record_state - else: - return current_stream_state - if latest_record_date: - return latest_record_state - if current_record_date: - return current_stream_state - else: - return {} - - -class Items(IncrementalRssStream): - def __init__(self, url: str): - super().__init__() - self.url = url - - primary_key = None - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - return self.url - - -# Source -class SourceRss(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - try: - resp = requests.get(config.get("url")) - status = resp.status_code - if status == 200: - return True, None - else: - return False, f"Unable to connect to RSS Feed (received status code: {status})" - except Exception as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - return [Items(config.get("url"))] +# Declarative Source +class SourceRss(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml b/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml deleted file mode 100644 index c1128cf4c0d01..0000000000000 --- a/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml +++ /dev/null @@ -1,11 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/rss -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: RSS Spec - type: object - required: - - url - properties: - url: - type: string - description: RSS Feed URL diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py deleted file mode 100644 index 2ed6eb321d811..0000000000000 --- a/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.models import SyncMode -from pytest import fixture -from source_rss.source import IncrementalRssStream - - -@fixture -def patch_incremental_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(IncrementalRssStream, "path", "v0/example_endpoint") - mocker.patch.object(IncrementalRssStream, "primary_key", "test_primary_key") - mocker.patch.object(IncrementalRssStream, "__abstractmethods__", set()) - - -def test_cursor_field(patch_incremental_base_class): - stream = IncrementalRssStream() - expected_cursor_field = "published" - assert stream.cursor_field == expected_cursor_field - - -def test_get_updated_state(patch_incremental_base_class): - stream = IncrementalRssStream() - - inputs = { - "current_stream_state": {"published": "2022-10-24T16:16:00+00:00"}, - "latest_record": {"published": "2022-10-30T16:16:00+00:00"}, - } - - expected_state = {"published": "2022-10-30T16:16:00+00:00"} - assert stream.get_updated_state(**inputs) == expected_state - - -def test_stream_slices(patch_incremental_base_class): - stream = IncrementalRssStream() - # TODO: replace this with your input parameters - inputs = {"sync_mode": SyncMode.incremental, "cursor_field": ["published"], "stream_state": {}} - # TODO: replace this with your expected stream slices list - expected_stream_slice = [None] - assert stream.stream_slices(**inputs) == expected_stream_slice - - -def test_supports_incremental(patch_incremental_base_class, mocker): - mocker.patch.object(IncrementalRssStream, "cursor_field", "dummy_field") - stream = IncrementalRssStream() - assert stream.supports_incremental - - -def test_source_defined_cursor(patch_incremental_base_class): - stream = IncrementalRssStream() - assert stream.source_defined_cursor - - -def test_stream_checkpoint_interval(patch_incremental_base_class): - stream = IncrementalRssStream() - expected_checkpoint_interval = None - assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py deleted file mode 100644 index 758f46ed9b3cf..0000000000000 --- a/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import MagicMock - -from source_rss.source import SourceRss - - -def test_streams(mocker): - source = SourceRss() - config_mock = MagicMock() - streams = source.streams(config_mock) - expected_streams_number = 1 - assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py deleted file mode 100644 index 2569efcaeb5c5..0000000000000 --- a/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py +++ /dev/null @@ -1,107 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import os -import time -from http import HTTPStatus -from unittest.mock import MagicMock - -import pytest -from source_rss.source import RssStream - - -@pytest.fixture -def patch_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(RssStream, "path", "v0/example_endpoint") - mocker.patch.object(RssStream, "primary_key", "test_primary_key") - mocker.patch.object(RssStream, "__abstractmethods__", set()) - - -def test_request_params(patch_base_class): - stream = RssStream() - # TODO: replace this with your input parameters - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - # TODO: replace this with your expected request parameters - expected_params = {} - assert stream.request_params(**inputs) == expected_params - - -def test_next_page_token(patch_base_class): - stream = RssStream() - inputs = {"response": MagicMock()} - expected_token = None - assert stream.next_page_token(**inputs) == expected_token - - -def test_parse_response(patch_base_class): - stream = RssStream() - - class SampleResponse: - text = """"" - - - - - Test Title - http://testlink - Test Description - Fri, 28 Oct 2022 11:16 EDT - - - - """ - - expected_parsed_object = { - "title": "Test Title", - "link": "http://testlink", - "description": "Test Description", - "published": "2022-10-28T15:16:00+00:00", - } - - assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object - - # test that the local timezone doesn't impact how this is computed - os.environ["TZ"] = "Africa/Accra" - time.tzset() - assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object - os.environ["TZ"] = "Asia/Tokyo" - time.tzset() - assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object - - -def test_request_headers(patch_base_class): - stream = RssStream() - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - expected_headers = {} - assert stream.request_headers(**inputs) == expected_headers - - -def test_http_method(patch_base_class): - stream = RssStream() - expected_method = "GET" - assert stream.http_method == expected_method - - -@pytest.mark.parametrize( - ("http_status", "should_retry"), - [ - (HTTPStatus.OK, False), - (HTTPStatus.BAD_REQUEST, False), - (HTTPStatus.TOO_MANY_REQUESTS, True), - (HTTPStatus.INTERNAL_SERVER_ERROR, True), - ], -) -def test_should_retry(patch_base_class, http_status, should_retry): - response_mock = MagicMock() - response_mock.status_code = http_status - stream = RssStream() - assert stream.should_retry(response_mock) == should_retry - - -def test_backoff_time(patch_base_class): - response_mock = MagicMock() - stream = RssStream() - expected_backoff_time = None - assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/cloud_spec.json b/airbyte-integrations/connectors/source-s3/integration_tests/cloud_spec.json index 3d0f3a14fc07e..b8b8e15f08468 100644 --- a/airbyte-integrations/connectors/source-s3/integration_tests/cloud_spec.json +++ b/airbyte-integrations/connectors/source-s3/integration_tests/cloud_spec.json @@ -242,6 +242,12 @@ "default": "None", "airbyte_hidden": true, "enum": ["None", "Primitive Types Only"] + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" } }, "required": ["filetype"] diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json index 8592fc5684ef4..1a99c91d59337 100644 --- a/airbyte-integrations/connectors/source-s3/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-s3/integration_tests/spec.json @@ -242,6 +242,12 @@ "default": "None", "airbyte_hidden": true, "enum": ["None", "Primitive Types Only"] + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" } }, "required": ["filetype"] diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index a9378beb99b47..800ce97883b7e 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.5.9 + dockerImageTag: 4.5.12 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 icon: s3.svg license: ELv2 + maxSecondsBetweenMessages: 1 name: S3 remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-s3/poetry.lock b/airbyte-integrations/connectors/source-s3/poetry.lock index d2c5a06995b76..be6b6b26fc937 100644 --- a/airbyte-integrations/connectors/source-s3/poetry.lock +++ b/airbyte-integrations/connectors/source-s3/poetry.lock @@ -1,28 +1,28 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.68.2" +version = "0.81.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.68.2.tar.gz", hash = "sha256:04c7557e72a2b2da6ffc8abc5196f16f2c5764738284931856c9210dd2d11998"}, - {file = "airbyte_cdk-0.68.2-py3-none-any.whl", hash = "sha256:bad36c9d9a6755fe5ec2d130fa779bdf7a9248abbc8736fa4da1f35d4a97cc8e"}, + {file = "airbyte_cdk-0.81.3-py3-none-any.whl", hash = "sha256:c168acef484120f5b392cbf0c43bb8180d8596a0c87cfe416ac2e8e7fe1ab93a"}, + {file = "airbyte_cdk-0.81.3.tar.gz", hash = "sha256:e91e7ca66b3f4d5714b44304ff3cb1bb9b703933cf6b38d32e7f06384e9e1108"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = "*" avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} @@ -33,31 +33,27 @@ pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" -unstructured = [ - {version = "0.10.27", optional = true, markers = "extra == \"file-based\""}, - {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""}, -] +requests_cache = "*" +unstructured = {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""} "unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -140,17 +136,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.34.56" +version = "1.34.83" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "boto3-1.34.56-py3-none-any.whl", hash = "sha256:300888f0c1b6f32f27f85a9aa876f50f46514ec619647af7e4d20db74d339714"}, - {file = "boto3-1.34.56.tar.gz", hash = "sha256:b26928f9a21cf3649cea20a59061340f3294c6e7785ceb6e1a953eb8010dc3ba"}, + {file = "boto3-1.34.83-py3-none-any.whl", hash = "sha256:33cf93f6de5176f1188c923f4de1ae149ed723b89ed12e434f2b2f628491769e"}, + {file = "boto3-1.34.83.tar.gz", hash = "sha256:9733ce811bd82feab506ad9309e375a79cabe8c6149061971c17754ce8997551"}, ] [package.dependencies] -botocore = ">=1.34.56,<1.35.0" +botocore = ">=1.34.83,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -159,13 +155,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.56" +version = "1.34.83" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "botocore-1.34.56-py3-none-any.whl", hash = "sha256:fff66e22a5589c2d58fba57d1d95c334ce771895e831f80365f6cff6453285ec"}, - {file = "botocore-1.34.56.tar.gz", hash = "sha256:bffeb71ab21d47d4ecf947d9bdb2fbd1b0bbd0c27742cea7cf0b77b701c41d9f"}, + {file = "botocore-1.34.83-py3-none-any.whl", hash = "sha256:0a3fbbe018416aeefa8978454fb0b8129adbaf556647b72269bf02e4bf1f4161"}, + {file = "botocore-1.34.83.tar.gz", hash = "sha256:0f302aa76283d4df62b4fbb6d3d20115c1a8957fc02171257fc93904d69d5636"}, ] [package.dependencies] @@ -173,7 +169,7 @@ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, - {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -570,13 +566,13 @@ files = [ [[package]] name = "emoji" -version = "2.10.1" +version = "2.11.0" description = "Emoji for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "emoji-2.10.1-py2.py3-none-any.whl", hash = "sha256:11fb369ea79d20c14efa4362c732d67126df294a7959a2c98bfd7447c12a218e"}, - {file = "emoji-2.10.1.tar.gz", hash = "sha256:16287283518fb7141bde00198f9ffff4e1c1cb570efb68b2f1ec50975c3a581d"}, + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, ] [package.extras] @@ -642,6 +638,22 @@ lz4 = ["lz4"] snappy = ["python-snappy"] zstandard = ["zstandard"] +[[package]] +name = "filelock" +version = "3.13.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "filetype" version = "1.2.0" @@ -653,6 +665,41 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "fsspec" +version = "2024.3.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + [[package]] name = "genson" version = "1.2.2" @@ -663,35 +710,69 @@ files = [ {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, ] +[[package]] +name = "huggingface-hub" +version = "0.22.2" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"}, + {file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -748,24 +829,24 @@ files = [ [[package]] name = "joblib" -version = "1.3.2" +version = "1.4.0" description = "Lightweight pipelining with Python functions" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, - {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, ] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -805,106 +886,184 @@ six = "*" [[package]] name = "lxml" -version = "5.1.0" +version = "5.2.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, - {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, - {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, - {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, - {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, - {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, - {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, - {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, - {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, - {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, - {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, - {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, - {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, - {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, - {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, - {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, - {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, - {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.7)"] +source = ["Cython (>=3.0.10)"] [[package]] name = "markdown" -version = "3.5.2" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.dependencies] @@ -1129,13 +1288,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1275,79 +1434,80 @@ pytzdata = ">=2020.1" [[package]] name = "pillow" -version = "10.2.0" +version = "10.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, - {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, - {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, - {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, - {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, - {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, - {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, - {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, - {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, - {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, - {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, - {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, - {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, - {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, - {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, - {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, - {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, - {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, ] [package.extras] @@ -1401,47 +1561,47 @@ files = [ [[package]] name = "pyarrow" -version = "15.0.0" +version = "15.0.2" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, - {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, - {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, - {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, - {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, - {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, - {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, - {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, ] [package.dependencies] @@ -1449,58 +1609,58 @@ numpy = ">=1.16.6,<2" [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -1607,17 +1767,17 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -1855,101 +2015,101 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.6.2" +version = "3.8.1" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a5637e6bf11b15b5aff6ee818c76bdec99ad208511b78985e6209ba648a6e3ee"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:380586664f2f63807050ddb95e7702888b4f0b425abf17655940c411f39287ad"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3168ff565d4b8c239cf11fb604dd2507d30e9bcaac76a4077c0ac23cf2c866ed"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be69f7fd46b5c6467fe5e2fd4cff3816b0c03048eed8a4becb9a73e6000960e7"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbd5894f23fdf5697499cf759523639838ac822bd1600e343fdce7313baa02ae"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85a5b6e026393fe39fb61146b9c17c5af66fffbe1410e992c4bb06d9ec327bd3"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab269adfc64480f209e99f253391a10735edd5c09046e04899adab5fb132f20e"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35aeac852bca06023d6bbd50c1fc504ca5a9a3613d5e75a140f0be7601fa34ef"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e706f302c6a3ae0d74edd0d6ace46aee1ae07c563b436ccf5ff04db2b3571e60"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bec353f022011e6e5cd28ccb8700fbd2a33918197af0d4e0abb3c3f4845cc864"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ef3925daaa93eed20401012e219f569ff0c039ed5bf4ce2d3737b4f75d441622"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6ee98d88ae9ccc77ff61992ed33b2496478def5dc0da55c9a9aa06fcb725a352"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:423c7c588b09d618601097b7a0017dfcb91132a2076bef29023c5f3cd2dc3de1"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-win32.whl", hash = "sha256:c17c5efee347a40a6f4c1eec59e3d7d1e22f7613a97f8b8a07733ef723483a04"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:4209816626d8d6ff8ae7dc248061c6059e618b70c6e6f6e4d7444ae3740b2b85"}, - {file = "rapidfuzz-3.6.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c54d3c85e522d3ac9ee39415f183c8fa184c4f87e7e5a37938f15a6d50e853a"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e06f6d270112f5db001f1cba5a97e1a48aee3d3dbdcbea3ec027c230462dbf9b"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:080cb71b50cb6aff11d1c6aeb157f273e2da0b2bdb3f9d7b01257e49e69a8576"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7895e04a22d6515bc91a850e0831f2405547605aa311d1ffec51e4818abc3c1"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82f9838519136b7083dd1e3149ee80344521f3dc37f744f227505ff0883efb"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a945567c2b0b6e069454c9782d5234b0b6795718adf7a9f868bd3144afa6a023"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673ba2c343644805acdae1cb949c6a4de71aa2f62a998978551ebea59603af3f"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d457c89bac1471442002e70551e8268e639b3870b4a4521eae363c07253be87"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495c0d8e14e6f12520eb7fc71b9ba9fcaafb47fc23a654e6e89b6c7985ec0020"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d67b649bf3e1b1722d04eca44d37919aef88305ce7ad05564502d013cf550fd"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e48dde8ca83d11daa00900cf6a5d281a1297aef9b7bfa73801af6e8822be5019"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:824cc381cf81cbf8d158f6935664ec2a69e6ac3b1d39fa201988bf81a257f775"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfe4c24957474ce0ac75d886387e30e292b4be39228a6d71f76de414dc187db"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d57b98013b802621bbc8b12a46bfc9d36ac552ab51ca207f7ce167ad46adabeb"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-win32.whl", hash = "sha256:9a07dffac439223b4f1025dbfc68f4445a3460a859309c9858c2a3fa29617cdc"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:95a49c6b8bf1229743ae585dd5b7d57f0d15a7eb6e826866d5c9965ba958503c"}, - {file = "rapidfuzz-3.6.2-cp311-cp311-win_arm64.whl", hash = "sha256:af7c19ec86e11488539380d3db1755be5d561a3c0e7b04ff9d07abd7f9a8e9d8"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:de8adc12161bf282c60f12dc9233bb31632f71d446a010fe7469a69b8153427f"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:337e357f693130c4c6be740652542b260e36f622c59e01fa33d58f1d2750c930"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6468f8bc8c3c50604f43631550ef9cfec873515dba5023ca34d461be94669fc8"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74c6773b11445b5e5cf93ca383171cd0ac0cdeafea11a7b2a5688f8bf8d813e6"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1507fc5769aa109dda4de3a15f822a0f6a03e18d627bd0ba3ddbb253cf70e07"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:617949a70150e6fffdaed19253dd49f7a53528411dc8bf7663d499ba21e0f61e"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8b77779174b1b40aa70827692571ab457061897846255ad7d5d559e2edb1932"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80e51b22a7da83f9c87a97e92df07ed0612c74c35496590255f4b5d5b4212dfe"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3ae7c86914cb6673e97e187ba431b9c4cf4177d9ae77f8a1e5b2ba9a5628839e"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ddc380ffaa90f204cc9ddcb779114b9ab6f015246d549de9d47871a97ef9f18a"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3c1dc078ef371fce09f9f3eec2ca4eaa2a8cd412ec53941015b4f39f14d34407"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a74102fc5a2534fe91f7507838623e1f3a149d8e05648389c42bb42e14b1c3f"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:48e1eaea8fcd522fca7f04f0480663f0f0cfb77957092cce60a93f4462864996"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-win32.whl", hash = "sha256:66b008bf2972740cd2dda5d382eb8bdb87265cd88198e71c7797bdc0d1f79d20"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:87ac3a87f2251ae2e95fc9478ca5c759de6d141d04c84d3fec9f9cdcfc167b33"}, - {file = "rapidfuzz-3.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:b593cc51aed887e93b78c2f94dfae9008be2b23d17afd3b1f1d3eb3913b58f26"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d830bc7a9b586a374147ec60b08b1f9ae5996b43f75cc514f37faef3866b519"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbee7f5ff11872b76505cbd87c814abc823e8757f11c69062eb3b25130a283da"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c011fb31f2c3f82f503aedd6097d3d3854e574e327a119a3b7eb2cf90b79ca"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda81d0e0ce0c13abfa46b24e10c1e85f9c6acb628f0a9a948f5779f9c2076a2"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c279928651ce0e9e5220dcb25a00cc53b65e592a0861336a38299bcdca3a596"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35bd4bc9c40e6994c5d6edea4b9319388b4d9711c13c66d543bb4c37624b4184"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07899506a5a8760448d9df036d528b55a554bf571714173635c79eef4a86e58"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2e51d01b9c6d6954a3e055c57a80d4685b4fc82719db5519fc153566bcd6bb"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:153d065e353371cc0aeff32b99999a5758266a64e958d1364189367c1c9f6814"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4edcceebb85ebfa49a3ddcde20ad891d36c08dc0fd592efdab0e7d313a4e36af"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3549123fca5bb817341025f98e8e49ca99f84596c7c4f92b658f8e5836040d4a"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:84c1032ae42628465b7a5cc35249906061e18a8193c9c27cbd2db54e9823a9a6"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc91ebd8fc69a6bd3b5711c8250f5f4e70606b4da75ef415f57ad209978205"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-win32.whl", hash = "sha256:f3a70f341c4c111bad910d2df69c78577a98af140319a996af24c9385939335d"}, - {file = "rapidfuzz-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:354ad5fe655beb7b279390cb58334903931c5452ecbad1b1666ffb06786498e2"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1b86b93d93020c2b3edc1665d75c8855784845fc0a739b312c26c3a4bf0c80d5"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28243086ed0e50808bb56632e5442c457241646aeafafd501ac87901f40a3237"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed52461ae5a9ea4c400d38e2649c74a413f1a6d8fb8308b66f1fbd122514732f"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a46220f86a5f9cb016af31525e0d0865cad437d02239aa0d8aed2ab8bff1f1c"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81a630ed2fc3ec5fc7400eb66bab1f87e282b4d47f0abe3e48c6634dfa13b5e4"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8e5a437b9089df6242a718d9c31ab1742989e9400a0977af012ef483b63b4c2"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16270b5529de83b7bae7457e952e4d9cf3fbf029a837dd32d415bb9e0eb8e599"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378c04102c7f084cde30a100154fa6d7e2baf0d51a6bdd2f912545559c1fb35"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f18397c8d6a65fc0b288d2fc29bc7baeea6ba91eeb95163a3cd98f23cd3bc85"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2acd2514defce81e6ff4bbff50252d5e7df8e85a731442c4b83e44c86cf1c916"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:1df2faf80201952e252413b6fac6f3e146080dcebb87bb1bb722508e67558ed8"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6440ed0b3007c1c9286b0b88fe2ab2d9e83edd60cd62293b3dfabb732b4e8a30"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fcfa23b5553b27f4016df77c53172ea743454cf12c28cfa7c35a309a2be93b3"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-win32.whl", hash = "sha256:2d580d937146e803c8e5e1b87916cab8d6f84013b6392713e201efcda335c7d8"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:fe2a68be734e8e88af23385c68d6467e15818b6b1df1cbfebf7bff577226c957"}, - {file = "rapidfuzz-3.6.2-cp39-cp39-win_arm64.whl", hash = "sha256:6478f7803efebf5f644d0b758439c5b25728550fdfbb19783d150004c46a75a9"}, - {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36ce7b68a7b90b787cdd73480a68d2f1ca63c31a3a9d5a79a8736f978e1e9344"}, - {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53597fd72a9340bcdd80d3620f4957c2b92f9b569313b969a3abdaffd193aae6"}, - {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4f6de745fe6ce46a422d353ee10599013631d7d714a36d025f164b2d4e8c000"}, - {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62df2136068e2515ed8beb01756381ff62c29384d785e3bf46e3111d4ea3ba1e"}, - {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7382c90170f60c846c81a07ddd80bb2e8c43c8383754486fa37f67391a571897"}, - {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f31314fd2e2f3dc3e519e6f93669462ce7953df2def1c344aa8f5345976d0eb2"}, - {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012221629d54d3bee954148247f711eb86d4d390b589ebfe03172ea0b37a7531"}, - {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41dd59a70decfce6595315367a2fea2af660d92a9d144acc6479030501014d7"}, - {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9fa14136a5b0cba1ec42531f7c3e0b0d3edb7fd6bc5e5ae7b498541f3855ab"}, - {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:259364199cbfeca33b1af369fc7951f71717aa285184a3fa5a7b1772da1b89db"}, - {file = "rapidfuzz-3.6.2.tar.gz", hash = "sha256:cf911e792ab0c431694c9bf2648afabfd92099103f2e31492893e078ddca5e1a"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1b176f01490b48337183da5b4223005bc0c2354a4faee5118917d2fba0bedc1c"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0798e32304b8009d215026bf7e1c448f1831da0a03987b7de30059a41bee92f3"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad4dbd06c1f579eb043b2dcfc635bc6c9fb858240a70f0abd3bed84d8ac79994"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6ec696a268e8d730b42711537e500f7397afc06125c0e8fa9c8211386d315a5"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8a007fdc5cf646e48e361a39eabe725b93af7673c5ab90294e551cae72ff58"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68b185a0397aebe78bcc5d0e1efd96509d4e2f3c4a05996e5c843732f547e9ef"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:267ff42370e031195e3020fff075420c136b69dc918ecb5542ec75c1e36af81f"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:987cd277d27d14301019fdf61c17524f6127f5d364be5482228726049d8e0d10"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bc5a1ec3bd05b55d3070d557c0cdd4412272d51b4966c79aa3e9da207bd33d65"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa223c73c59cc45c12eaa9c439318084003beced0447ff92b578a890288e19eb"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d4276c7ee061db0bac54846933b40339f60085523675f917f37de24a4b3ce0ee"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2ba0e43e9a94d256a704a674c7010e6f8ef9225edf7287cf3e7f66c9894b06cd"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c22b32a57ab47afb207e8fe4bd7bb58c90f9291a63723cafd4e704742166e368"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win32.whl", hash = "sha256:50db3867864422bf6a6435ea65b9ac9de71ef52ed1e05d62f498cd430189eece"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:bca5acf77508d1822023a85118c2dd8d3c16abdd56d2762359a46deb14daa5e0"}, + {file = "rapidfuzz-3.8.1-cp310-cp310-win_arm64.whl", hash = "sha256:c763d99cf087e7b2c5be0cf34ae9a0e1b031f5057d2341a0a0ed782458645b7e"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:30c282612b7ebf2d7646ebebfd98dd308c582246a94d576734e4b0162f57baf4"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c6a43446f0cd8ff347b1fbb918dc0d657bebf484ddfa960ee069e422a477428"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4969fe0eb179aedacee53ca8f8f1be3c655964a6d62db30f247fee444b9c52b4"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799f5f221d639d1c2ed8a2348d1edf5e22aa489b58b2cc99f5bf0c1917e2d0f2"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e62bde7d5df3312acc528786ee801c472cae5078b1f1e42761c853ba7fe1072a"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ea3d2e41d8fac71cb63ee72f75bee0ed1e9c50709d4c58587f15437761c1858"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f34a541895627c2bc9ef7757f16f02428a08d960d33208adfb96b33338d0945"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0643a25937fafe8d117f2907606e9940cd1cc905c66f16ece9ab93128299994"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63044a7b6791a2e945dce9d812a6886e93159deb0464984eb403617ded257f08"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bbc15985c5658691f637a6b97651771147744edfad2a4be56b8a06755e3932fa"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:48b6e5a337a814aec7c6dda5d6460f947c9330860615301f35b519e16dde3c77"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:8c40da44ca20235cda05751d6e828b6b348e7a7c5de2922fa0f9c63f564fd675"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c21d5c7cfa6078c79897e5e482a7e84ff927143d2f3fb020dd6edd27f5469574"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win32.whl", hash = "sha256:209bb712c448cdec4def6260b9f059bd4681ec61a01568f5e70e37bfe9efe830"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f7641992de44ec2ca54102422be44a8e3fb75b9690ccd74fff72b9ac7fc00ee"}, + {file = "rapidfuzz-3.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:c458085e067c766112f089f78ce39eab2b69ba027d7bbb11d067a0b085774367"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1905d9319a97bed29f21584ca641190dbc9218a556202b77876f1e37618d2e03"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f176867f438ff2a43e6a837930153ca78fddb3ca94e378603a1e7b860d7869bf"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25498650e30122f4a5ad6b27c7614b4af8628c1d32b19d406410d33f77a86c80"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16153a97efacadbd693ccc612a3285df2f072fd07c121f30c2c135a709537075"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0264d03dcee1bb975975b77c2fe041820fb4d4a25a99e3cb74ddd083d671ca"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17d79398849c1244f646425cf31d856eab9ebd67b7d6571273e53df724ca817e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e08b01dc9369941a24d7e512b0d81bf514e7d6add1b93d8aeec3c8fa08a824e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97c13f156f14f10667e1cfc4257069b775440ce005e896c09ce3aff21c9ae665"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8b76abfec195bf1ee6f9ec56c33ba5e9615ff2d0a9530a54001ed87e5a6ced3b"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b0ba20be465566264fa5580d874ccf5eabba6975dba45857e2c76e2df3359c6d"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4d5cd86aca3f12e73bfc70015db7e8fc44122da03aa3761138b95112e83f66e4"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a16ef3702cecf16056c5fd66398b7ea8622ff4e3afeb00a8db3e74427e850af"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:392582aa784737d95255ca122ebe7dca3c774da900d100c07b53d32cd221a60e"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win32.whl", hash = "sha256:ceb10039e7346927cec47eaa490b34abb602b537e738ee9914bb41b8de029fbc"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc4af7090a626c902c48db9b5d786c1faa0d8e141571e8a63a5350419ea575bd"}, + {file = "rapidfuzz-3.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:3aff3b829b0b04bdf78bd780ec9faf5f26eac3591df98c35a0ae216c925ae436"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78a0d2a11bb3936463609777c6d6d4984a27ebb2360b58339c699899d85db036"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f8af980695b866255447703bf634551e67e1a4e1c2d2d26501858d9233d886d7"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1a15fef1938b43468002f2d81012dbc9e7b50eb8533af202b0559c2dc7865d9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4dbb1ebc9a811f38da33f32ed2bb5f58b149289b89eb11e384519e9ba7ca881"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41219536634bd6f85419f38450ef080cfb519638125d805cf8626443e677dc61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3f882110f2f4894942e314451773c47e8b1b4920b5ea2b6dd2e2d4079dd3135"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c754ce1fab41b731259f100d5d46529a38aa2c9b683c92aeb7e96ef5b2898cd8"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:718ea99f84b16c4bdbf6a93e53552cdccefa18e12ff9a02c5041e621460e2e61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9441aca94b21f7349cdb231cd0ce9ca251b2355836e8a02bf6ccbea5b442d7a9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90167a48de3ed7f062058826608a80242b8561d0fb0cce2c610d741624811a61"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8e02425bfc7ebed617323a674974b70eaecd8f07b64a7d16e0bf3e766b93e3c9"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d48657a404fab82b2754faa813a10c5ad6aa594cb1829dca168a49438b61b4ec"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f8b62fdccc429e6643cefffd5df9c7bca65588d06e8925b78014ad9ad983bf5"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-win32.whl", hash = "sha256:63db612bb6da1bb9f6aa7412739f0e714b1910ec07bc675943044fe683ef192c"}, + {file = "rapidfuzz-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:bb571dbd4cc93342be0ba632f0b8d7de4cbd9d959d76371d33716d2216090d41"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b27cea618601ca5032ea98ee116ca6e0fe67be7b286bcb0b9f956d64db697472"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d5592b08e3cadc9e06ef3af6a9d66b6ef1bf871ed5acd7f9b1e162d78806a65"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:58999b21d01dd353f49511a61937eac20c7a5b22eab87612063947081855d85f"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ee3909f611cc5860cc8d9f92d039fd84241ce7360b49ea88e657181d2b45f6"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00b5ee47b387fa3805f4038362a085ec58149135dc5bc640ca315a9893a16f9e"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4c647795c5b901091a68e210c76b769af70a33a8624ac496ac3e34d33366c0d"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77ea62879932b32aba77ab23a9296390a67d024bf2f048dee99143be80a4ce26"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fee62ae76e3b8b9fff8aa2ca4061575ee358927ffbdb2919a8c84a98da59f78"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:231dc1cb63b1c8dd78c0597aa3ad3749a86a2b7e76af295dd81609522699a558"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:827ddf2d5d157ac3d1001b52e84c9e20366237a742946599ffc435af7fdd26d0"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c04ef83c9ca3162d200df36e933b3ea0327a2626cee2e01bbe55acbc004ce261"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:747265f39978bbaad356f5c6b6c808f0e8f5e8994875af0119b82b4700c55387"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:14791324f0c753f5a0918df1249b91515f5ddc16281fbaa5ec48bff8fa659229"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win32.whl", hash = "sha256:b7b9cbc60e3eb08da6d18636c62c6eb6206cd9d0c7ad73996f7a1df3fc415b27"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:2084193fd8fd346db496a2220363437eb9370a06d1d5a7a9dba00a64390c6a28"}, + {file = "rapidfuzz-3.8.1-cp39-cp39-win_arm64.whl", hash = "sha256:c9597a05d08e8103ad59ebdf29e3fbffb0d0dbf3b641f102cfbeadc3a77bde51"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f4174079dfe8ed1f13ece9bde7660f19f98ab17e0c0d002d90cc845c3a7e238"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07d7d4a3c49a15146d65f06e44d7545628ca0437c929684e32ef122852f44d95"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ef119fc127c982053fb9ec638dcc3277f83b034b5972eb05941984b9ec4a290"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e57f9c2367706a320b78e91f8bf9a3b03bf9069464eb7b54455fa340d03e4c"}, + {file = "rapidfuzz-3.8.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d4f1956fe1fc618e34ac79a6ed84fff5a6f23e41a8a476dd3e8570f0b12f02b"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:313bdcd16e9cd5e5568b4a31d18a631f0b04cc10a3fd916e4ef75b713e6f177e"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a02def2eb526cc934d2125533cf2f15aa71c72ed4397afca38427ab047901e88"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9d5d924970b07128c61c08eebee718686f4bd9838ef712a50468169520c953f"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1edafc0a2737df277d3ddf401f3a73f76e246b7502762c94a3916453ae67e9b1"}, + {file = "rapidfuzz-3.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:81fd28389bedab28251f0535b3c034b0e63a618efc3ff1d338c81a3da723adb3"}, + {file = "rapidfuzz-3.8.1.tar.gz", hash = "sha256:a357aae6791118011ad3ab4f2a4aa7bd7a487e5f9981b390e9f3c2c5137ecadf"}, ] [package.extras] @@ -2110,22 +2270,20 @@ yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "responses" @@ -2148,13 +2306,13 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "s3transfer" -version = "0.10.0" +version = "0.10.1" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, - {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, ] [package.dependencies] @@ -2163,20 +2321,152 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +[[package]] +name = "safetensors" +version = "0.4.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safetensors-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:69d8bb8384dc2cb5b72c36c4d6980771b293d1a1377b378763f5e37b6bb8d133"}, + {file = "safetensors-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d420e19fcef96d0067f4de4699682b4bbd85fc8fea0bd45fcd961fdf3e8c82c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca54742122fa3c4821754adb67318e1cd25c3a22bbf0c5520d5176e77a099ac"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b47aa643afdfd66cf7ce4c184092ae734e15d10aba2c2948f24270211801c3c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d88a16bbc330f27e7f2d4caaf6fb061ad0b8a756ecc4033260b0378e128ce8a2"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9223b8ac21085db614a510eb3445e7083cae915a9202357555fa939695d4f57"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6cb86133dc8930a7ab5e7438545a7f205f7a1cdd5aaf108c1d0da6bdcfbc2b"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8a628e0ae2bbc334b62952c384aa5f41621d01850f8d67b04a96b9c39dd7326"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:88d6beb7f811a081e0e5f1d9669fdac816c45340c04b1eaf7ebfda0ce93ea403"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b57fc5b1b54cb12d8690a58a4cf4b7144730d4bde9d98aa0e1dab6295a1cd579"}, + {file = "safetensors-0.4.2-cp310-none-win32.whl", hash = "sha256:9d87a1c98803c16cf113b9ba03f07b2dce5e8eabfd1811a7f7323fcaa2a1bf47"}, + {file = "safetensors-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:18930ec1d1ecb526d3d9835abc2489b8f1530877518f0c541e77ef0b7abcbd99"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c5dd2ed788730ed56b415d1a11c62026b8cc8c573f55a2092afb3ab383e94fff"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc41791b33efb9c83a59b731619f3d15f543dfe71f3a793cb8fbf9bd5d0d5d71"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c888bf71d5ca12a720f1ed87d407c4918afa022fb247a6546d8fac15b1f112b"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6b2feb4b47226a16a792e6fac3f49442714884a3d4c1008569d5068a3941be9"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f41cc0ee4b838ae8f4d8364a1b162067693d11a3893f0863be8c228d40e4d0ee"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51b7228e46c0a483c40ba4b9470dea00fb1ff8685026bb4766799000f6328ac2"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02697f8f2be8ca3c37a4958702dbdb1864447ef765e18b5328a1617022dcf164"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:27fd8f65cf7c80e4280cae1ee6bcd85c483882f6580821abe71ee1a0d3dcfca7"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c487b5f113b0924c9534a07dc034830fb4ef05ce9bb6d78cfe016a7dedfe281f"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:da7f6483f3fe67ff39b3a55552552c67930ea10a36e9f2539d36fc205273d767"}, + {file = "safetensors-0.4.2-cp311-none-win32.whl", hash = "sha256:52a7012f6cb9cb4a132760b6308daede18a9f5f8952ce08adc7c67a7d865c2d8"}, + {file = "safetensors-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:4d1361a097ac430b310ce9eed8ed4746edee33ddafdfbb965debc8966fc34dc2"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:77af8aa0edcc2863760fd6febbfdb82e88fd75d0e60c1ce4ba57208ba5e4a89b"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846666c1c5a8c8888d2dfda8d3921cb9cb8e2c5f78365be756c11021e75a0a2a"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f4bfc7ea19b446bfad41510d4b4c76101698c00caaa8a332c8edd8090a412ef"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:233436fd30f27ffeb3c3780d0b84f496518868445c7a8db003639a649cc98453"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a09237a795d11cd11f9dae505d170a29b5616151db1e10c14f892b11caadc7d"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de01c9a3a3b7b69627d624ff69d9f11d28ce9908eea2fb6245adafa4b1d43df6"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f25c5069ee42a5bcffdc66c300a407941edd73f3239e9fdefd26216407391"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a73b3649456d09ca8506140d44484b63154a7378434cc1e8719f8056550b224"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1625a8d07d046e968bd5c4961810aba1225984e4fb9243626f9d04a06ed3fee"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f74c86b25615cb24ad4cff765a2eefc09d71bf0fed97588cf585aad9c38fbb4"}, + {file = "safetensors-0.4.2-cp312-none-win32.whl", hash = "sha256:8523b9c5777d771bcde5c2389c03f1cdf7ebe8797432a1bd5e345efe25c55987"}, + {file = "safetensors-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:dcff0243e1737a21f83d664c63fed89d1f532c23fc6830d0427279fabd789ccb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:96ad3d7d472612e26cbe413922b4fb13933310f0511d346ea5cc9a1e856e52eb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:88250922401b5ae4e37de929178caf46be47ed16c817b2237b81679bec07c120"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d40443554142fc0ab30652d5cc8554c4b7a613513bde00373e18afd5de8cbe4b"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27f53f70106224d32d874aacecbeb4a6e4c5b16a1d2006d0e876d97229086d71"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc068afe23734dfb26ce19db0a7877499ddf73b1d55ceb762417e8da4a1b05fb"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9be1918eb8d43a11a6f8806759fccfa0eeb0542b12924caba66af8a7800ad01a"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41911087d20a7bbd78cb4ad4f98aab0c431533107584df6635d8b54b99945573"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50771c662aab909f31e94d048e76861fd027d66076ea773eef2e66c717766e24"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13f2e57be007b7ea9329133d2399e6bdfcf1910f655440a4da17df3a45afcd30"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c772147e6395bc829842e0a98e1b30c67fe25d816299c28196488511d5a5e951"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:36239a0060b537a3e8c473df78cffee14c3ec4f51d5f1a853af99371a2fb2a35"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:d0cbb7664fad2c307f95195f951b7059e95dc23e0e1822e5978c8b500098543c"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b3e55adb6bd9dc1c2a341e72f48f075953fa35d173dd8e29a95b3b02d0d1462"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42f743b3cca863fba53ca57a193f510e5ec359b97f38c282437716b6768e4a25"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e6af4a6dbeb06c4e6e7d46cf9c716cbc4cc5ef62584fd8a7c0fe558562df45"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a492ba21b5c8f14ee5ec9b20f42ba969e53ca1f909a4d04aad736b66a341dcc2"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b25b8233a1a85dc67e39838951cfb01595d792f3b7b644add63edb652992e030"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd27e063fbdafe776f7b1714da59110e88f270e86db00788a8fd65f4eacfeba7"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b6fa399f251bbeb52029bf5a0ac2878d7705dd3612a2f8895b48e9c11f0367d"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de642d46b459e4afd5c2020b26c0d6d869a171ea00411897d5776c127cac74f0"}, + {file = "safetensors-0.4.2-cp37-none-win32.whl", hash = "sha256:77b72d17754c93bb68f3598182f14d78776e0b9b31682ca5bb2c7c5bd9a75267"}, + {file = "safetensors-0.4.2-cp37-none-win_amd64.whl", hash = "sha256:d36ee3244d461cd655aeef493792c3bccf4875282f8407fd9af99e9a41cf2530"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:16b6b3884f7876c6b3b23a742428223a7170a5a9dac819d8c12a1569422c4b5a"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee25d311493fbbe0be9d395faee46e9d79e8948f461e388ff39e59875ed9a350"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eed8097968585cd752a1171f86fce9aa1d89a29033e5cd8bec5a502e29f6b7af"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:880e6865cf72cb67f9ab8d04a3c4b49dd95ae92fb1583929ce65aed94e1f685f"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91290f83daf80ce6d1a7f629b244443c200060a80f908b29d879021409e5ea94"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3517d568486ab3508a7acc360b82d7a4a3e26b86efdf210a9ecd9d233c40708a"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f43a77eb38540f782999e5dc5645164fe9027d3f0194f6c9a5126168017efa"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b684d9818aa5d63fddc65f7d0151968037d255d91adf74eba82125b41c680aaa"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ab1f5d84185f9fefaf21413efb764e4908057b8a9a0b987ede890c353490fd70"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bd979642e6c3a517ef4b84ff36c2fee4015664fea05a61154fc565978347553"}, + {file = "safetensors-0.4.2-cp38-none-win32.whl", hash = "sha256:11be6e7afed29e5a5628f0aa6214e34bc194da73f558dc69fc7d56e07037422a"}, + {file = "safetensors-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:2f7a6e5d29bd2cc340cffaa391fa437b1be9d21a2bd8b8724d2875d13a6ef2a9"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a5a921b4fe6925f9942adff3ebae8c16e0487908c54586a5a42f35b59fd69794"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b691727228c28f2d82d8a92b2bc26e7a1f129ee40b2f2a3185b5974e038ed47c"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91ca1056decc4e981248786e87b2a202d4841ee5f99d433f1adf3d44d4bcfa0e"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55969fd2e6fdb38dc221b0ab380668c21b0efa12a7562db9924759faa3c51757"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae429bfaecc10ab5fe78c93009b3d1656c1581da560041e700eadb497dbe7a4"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff88f194fe4ac50b463a4a6f0c03af9ad72eb5d24ec6d6730af59522e37fedb"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80cb48d0a447f8dd18e61813efa7d3f8f8d52edf0f05806abc0c59b83431f57"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b286fb7adfee70a4189898ac2342b8a67d5f493e6b21b0af89ca8eac1b967cbf"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ceeff9ddbab4f78738489eb6682867ae946178776f33699737b2129b5394dc1"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a26fae748a7488cb3aac381eddfa818c42052c87b5e689fb4c6e82ed58cec209"}, + {file = "safetensors-0.4.2-cp39-none-win32.whl", hash = "sha256:039a42ab33c9d68b39706fd38f1922ace26866eff246bf20271edb619f5f848b"}, + {file = "safetensors-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:b3a3e1f5b85859e398773f064943b62a4059f225008a2a8ee6add1edcf77cacf"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4e70d442ad17e8b153ef9095bf48ea64f15a66bf26dc2b6ca94660c154edbc24"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b90f1d9809caf4ff395951b4703295a68d12907f6945bbc3129e934ff8ae46f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c7ac9ad3728838006598e296b3ae9f27d80b489effd4685b92d97b3fc4c98f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5730d77e6ff7f4c7039e20913661ad0ea2f86c09e71c039e73dfdd1f394f08"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:44feb8cb156d6803dcd19fc6b81b27235f29b877660605a6ac35e1da7d64f0e4"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:523a241c33e7c827ab9a3a23760d75c7d062f43dfe55b6b019409f89b0fb52d1"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fb18300e8eb74291225214f26c9a8ae2110fd61a6c9b5a2ff4c4e0eb1bb9a998"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fe5437ff9fb116e44f2ab558981249ae63f978392b4576e62fcfe167d353edbc"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9304a0934ced5a5d272f39de36291dc141dfc152d277f03fb4d65f2fb2ffa7c"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:160ba1b1e11cf874602c233ab80a14f588571d09556cbc3586900121d622b5ed"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04fcd6fcf7d9c13c7e5dc7e08de5e492ee4daa8f4ad74b4d8299d3eb0224292f"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:906d14c4a677d35834fb0f3a5455ef8305e1bba10a5e0f2e0f357b3d1ad989f2"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:df3fcdec0cd543084610d1f09c65cdb10fb3079f79bceddc092b0d187c6a265b"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5ca76f13fb1cef242ea3ad2cb37388e7d005994f42af8b44bee56ba48b2d45ce"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:278a1a3414c020785decdcd741c578725721274d2f9f787fcc930882e83b89cc"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b5a461cc68ecd42d9d546e5e1268a39d8ede7934a68d1ce17c3c659cb829d6"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2341411412a41671d25e26bed59ec121e46bf4fadb8132895e610411c4b9681"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3497ac3895acf17c5f98197f1fa4769f09c5e7ede07fcb102f1c201e663e052c"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:01b5e71d3754d2201294f1eb7a6d59cce3a5702ff96d83d226571b2ca2183837"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3627dbd1ea488dd8046a0491de5087f3c0d641e7acc80c0189a33c69398f1cd1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9d56f0ef53afad26ec54ceede78a43e9a23a076dadbbda7b44d304c591abf4c1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b259ca73d42daf658a1bda463f1f83885ae4d93a60869be80d7f7dfcc9d8bbb5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebc3cd401e4eb54e7c0a70346be565e81942d9a41fafd5f4bf7ab3a55d10378"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc384a0309b706aa0425c93abb0390508a61bf029ce99c7d9df4220f25871a5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:af2d8f7235d8a08fbccfb8394387890e7fa38942b349a94e6eff13c52ac98087"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0911315bbcc5289087d063c2c2c7ccd711ea97a7e557a7bce005ac2cf80146aa"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1efe31673be91832d73439a2af426743e1395fc9ef7b081914e9e1d567bd7b5f"}, + {file = "safetensors-0.4.2.tar.gz", hash = "sha256:acc85dcb09ec5e8aa787f588d7ad4d55c103f31e4ff060e17d92cc0e8b8cac73"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface_hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools_rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2238,6 +2528,133 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + [[package]] name = "toml" version = "0.10.2" @@ -2269,15 +2686,83 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.38.2" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "transformers-4.38.2-py3-none-any.whl", hash = "sha256:c4029cb9f01b3dd335e52f364c52d2b37c65b4c78e02e6a08b1919c5c928573e"}, + {file = "transformers-4.38.2.tar.gz", hash = "sha256:c5fc7ad682b8a50a48b2a4c05d4ea2de5567adb1bdd00053619dbe5960857dd5"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.19.3,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.1" +tokenizers = ">=0.14,<0.19" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +docs-specific = ["hf-doc-builder"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)", "decord (==0.6.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -2433,18 +2918,18 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.0.7" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2464,13 +2949,13 @@ bracex = ">=2.1.1" [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.0.2" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, ] [package.dependencies] @@ -2582,20 +3067,20 @@ files = [ [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e1d8b49218f24ab7ceb86c74a9374287891f17e6481215e1049163a61df28f85" +content-hash = "b6c92390488dfc5f6510aec79f69af60a6dc15305761deb37d1a51a4aecec96b" diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index 44c319fbf268f..449c830d5334c 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.5.9" +version = "4.5.12" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] @@ -21,6 +21,8 @@ pytz = "==2024.1" wcmatch = "==8.4" python-snappy = "==0.6.1" dill = "==0.3.4" +# override transitive dependency that had a vulnerability https://nvd.nist.gov/vuln/detail/CVE-2023-6730 +transformers = "4.38.2" [tool.poetry.scripts] source-s3 = "source_s3.run:run" diff --git a/airbyte-integrations/connectors/source-salesforce/.coveragerc b/airbyte-integrations/connectors/source-salesforce/.coveragerc new file mode 100644 index 0000000000000..2e888e4e4334e --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_salesforce/run.py \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py index 0cb90aa8b52a9..1d843fdfe5c5d 100644 --- a/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-salesforce/integration_tests/integration_test.py @@ -12,6 +12,7 @@ import pytest import requests from airbyte_cdk.models import SyncMode +from airbyte_protocol.models import ConfiguredAirbyteCatalog from source_salesforce.api import Salesforce from source_salesforce.source import SourceSalesforce @@ -20,6 +21,10 @@ NOTE_CONTENT = "It's the note for integration test" UPDATED_NOTE_CONTENT = "It's the updated note for integration test" +_ANY_CATALOG = ConfiguredAirbyteCatalog.parse_obj({"streams": []}) +_ANY_CONFIG = {} +_ANY_STATE = {} + @pytest.fixture(scope="module") def input_sandbox_config(): @@ -41,7 +46,7 @@ def stream_name(): @pytest.fixture(scope="module") def stream(input_sandbox_config, stream_name, sf): - return SourceSalesforce.generate_streams(input_sandbox_config, {stream_name: None}, sf)[0] + return SourceSalesforce(_ANY_CATALOG, _ANY_CONFIG, _ANY_STATE).generate_streams(input_sandbox_config, {stream_name: None}, sf)[0]._legacy_stream def _encode_content(text): diff --git a/airbyte-integrations/connectors/source-salesforce/metadata.yaml b/airbyte-integrations/connectors/source-salesforce/metadata.yaml index ecb9c09e72788..7714035af61d5 100644 --- a/airbyte-integrations/connectors/source-salesforce/metadata.yaml +++ b/airbyte-integrations/connectors/source-salesforce/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: b117307c-14b6-41aa-9422-947e34922962 - dockerImageTag: 2.3.3 + dockerImageTag: 2.5.2 dockerRepository: airbyte/source-salesforce documentationUrl: https://docs.airbyte.com/integrations/sources/salesforce githubIssueLabel: source-salesforce icon: salesforce.svg license: ELv2 + maxSecondsBetweenMessages: 86400 name: Salesforce remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-salesforce/poetry.lock b/airbyte-integrations/connectors/source-salesforce/poetry.lock index 1b8149e9cedb9..bc74e5841c076 100644 --- a/airbyte-integrations/connectors/source-salesforce/poetry.lock +++ b/airbyte-integrations/connectors/source-salesforce/poetry.lock @@ -1,51 +1,50 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.63.2" +version = "0.81.4" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, - {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, + {file = "airbyte_cdk-0.81.4-py3-none-any.whl", hash = "sha256:4ed193da4e8be4867e1d8983172d10afb3c3b10f3e10ec618431deec1f2af4cb"}, + {file = "airbyte_cdk-0.81.4.tar.gz", hash = "sha256:5c63d8c792edf5f24d0ad804b34b3ebcc056ecede6cb4f87ebf9ac07aa987f24"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = "*" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -327,13 +326,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -380,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -526,13 +525,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -684,47 +683,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -816,17 +815,17 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -847,13 +846,13 @@ pytest = ">=5.0.0" [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -964,13 +963,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -982,50 +981,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1051,13 +1048,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -1087,13 +1084,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1198,4 +1195,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "bcf80ea642eccd247e3804a1445935df891810324d047a52571f7c1b28e3450e" +content-hash = "57e78e8581409d12785bf79c2ab5b4d5251afe90782616babc10756528848deb" diff --git a/airbyte-integrations/connectors/source-salesforce/pyproject.toml b/airbyte-integrations/connectors/source-salesforce/pyproject.toml index aabb267b93a55..30b6154390fe5 100644 --- a/airbyte-integrations/connectors/source-salesforce/pyproject.toml +++ b/airbyte-integrations/connectors/source-salesforce/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.3.3" +version = "2.5.2" name = "source-salesforce" description = "Source implementation for Salesforce." authors = [ "Airbyte ",] @@ -18,7 +18,7 @@ include = "source_salesforce" [tool.poetry.dependencies] python = "^3.9,<3.12" pandas = "2.2.1" -airbyte-cdk = "^0.63.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-salesforce = "source_salesforce.run:run" diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py index eb0eed9ef70d6..7a414907dd2d0 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py @@ -6,10 +6,11 @@ import logging from typing import Any, List, Mapping, Optional, Tuple +import backoff import requests # type: ignore[import] from airbyte_cdk.models import ConfiguredAirbyteCatalog from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType +from airbyte_protocol.models import FailureType, StreamDescriptor from requests import adapters as request_adapters from requests.exceptions import HTTPError, RequestException # type: ignore[import] @@ -300,7 +301,7 @@ def get_validated_streams(self, config: Mapping[str, Any], catalog: ConfiguredAi validated_streams = [stream_name for stream_name in stream_names if self.filter_streams(stream_name)] return {stream_name: sobject_options for stream_name, sobject_options in stream_objects.items() if stream_name in validated_streams} - @default_backoff_handler(max_tries=5, factor=5) + @default_backoff_handler(max_tries=5, backoff_method=backoff.expo, backoff_params={"factor": 5}) def _make_request( self, http_method: str, url: str, headers: dict = None, body: dict = None, stream: bool = False, params: dict = None ) -> requests.models.Response: @@ -373,7 +374,14 @@ def load_schema(name: str, stream_options: Mapping[str, Any]) -> Tuple[str, Opti ): if err: self.logger.error(f"Loading error of the {stream_name} schema: {err}") - continue + # Without schema information, the source can't determine the type of stream to instantiate and there might be issues + # related to property chunking + raise AirbyteTracedException( + message=f"Schema could not be extracted for stream {stream_name}. Please retry later.", + internal_message=str(err), + failure_type=FailureType.system_error, + stream_descriptor=StreamDescriptor(name=stream_name), + ) stream_schemas[stream_name] = schema return stream_schemas diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/availability_strategy.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/availability_strategy.py index 514727089c2a4..a4fcca0c1e7a3 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/availability_strategy.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/availability_strategy.py @@ -29,7 +29,7 @@ def handle_http_error( if error.response.status_code in [codes.FORBIDDEN, codes.BAD_REQUEST]: error_data = error.response.json()[0] error_code = error_data.get("errorCode", "") - if error_code != "REQUEST_LIMIT_EXCEEDED" or error_code == "INVALID_TYPE_FOR_OPERATION": + if error_code != "REQUEST_LIMIT_EXCEEDED": return False, f"Cannot receive data for stream '{stream.name}', error message: '{error_data.get('message')}'" return True, None raise error diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/rate_limiting.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/rate_limiting.py index 344a6412e0245..879d933ef4860 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/rate_limiting.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/rate_limiting.py @@ -15,19 +15,50 @@ exceptions.ReadTimeout, exceptions.ConnectionError, exceptions.HTTPError, + # We've had a couple of customers with ProtocolErrors, namely: + # * A self-managed instance during `BulkSalesforceStream.download_data`. This customer had an abnormally high number of ConnectionError + # which seems to indicate problems with his network infrastructure in general. The exact error was: `urllib3.exceptions.ProtocolError: ('Connection broken: IncompleteRead(905 bytes read, 119 more expected)', IncompleteRead(905 bytes read, 119 more expected))` + # * A cloud customer with very long syncs. All those syncs would end up with the following error: `urllib3.exceptions.ProtocolError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))` + # Without much more information, we will make it retryable hoping that performing the same request will work. + exceptions.ChunkedEncodingError, + # We've had examples where the response from Salesforce was not a JSON response. Those cases where error cases though. For example: + # https://github.com/airbytehq/airbyte-internal-issues/issues/6855. We will assume that this is an edge issue and that retry should help + exceptions.JSONDecodeError, ) +_RETRYABLE_400_STATUS_CODES = { + # Using debug mode and breakpointing on the issue, we were able to validate that there issues are retryable. We've also opened a case + # with Salesforce to try to understand what is causing that as the response does not have a body. + 406, + # Most of the time, they don't have a body but there was one from the Salesforce Edge mentioning "We are setting things up. This process + # can take a few minutes. This page will auto-refresh when ready. If it takes too long, please contact support or visit our status + # page for more information." We therefore assume this is a transient error and will retry on it. + 420, + codes.too_many_requests, +} + + logger = logging.getLogger("airbyte") -def default_backoff_handler(max_tries: int, factor: int, **kwargs): +def default_backoff_handler(max_tries: int, backoff_method=None, backoff_params=None): + if backoff_method is None or backoff_params is None: + if not (backoff_method is None and backoff_params is None): + raise ValueError("Both `backoff_method` and `backoff_params` need to be provided if one is provided") + backoff_method = backoff.expo + backoff_params = {"factor": 15} + def log_retry_attempt(details): _, exc, _ = sys.exc_info() logger.info(str(exc)) logger.info(f"Caught retryable error after {details['tries']} tries. Waiting {details['wait']} seconds then retrying...") def should_give_up(exc): - give_up = exc.response is not None and exc.response.status_code != codes.too_many_requests and 400 <= exc.response.status_code < 500 + give_up = ( + exc.response is not None + and exc.response.status_code not in _RETRYABLE_400_STATUS_CODES + and 400 <= exc.response.status_code < 500 + ) # Salesforce can return an error with a limit using a 403 code error. if exc.response is not None and exc.response.status_code == codes.forbidden: @@ -40,12 +71,11 @@ def should_give_up(exc): return give_up return backoff.on_exception( - backoff.expo, + backoff_method, TRANSIENT_EXCEPTIONS, jitter=None, on_backoff=log_retry_attempt, giveup=should_give_up, max_tries=max_tries, - factor=factor, - **kwargs, + **backoff_params, ) diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/source.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/source.py index cb2293731d019..ce24d9bb14456 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/source.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/source.py @@ -3,9 +3,10 @@ # import logging -from datetime import datetime +from datetime import datetime, timedelta, timezone from typing import Any, Iterator, List, Mapping, MutableMapping, Optional, Tuple, Union +import isodate import pendulum import requests from airbyte_cdk import AirbyteLogger @@ -18,7 +19,7 @@ from airbyte_cdk.sources.source import TState from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, FinalStateCursor from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator from airbyte_cdk.sources.utils.schema_helpers import InternalConfig from airbyte_cdk.utils.traced_exception import AirbyteTracedException @@ -29,6 +30,7 @@ from .api import PARENT_SALESFORCE_OBJECTS, UNSUPPORTED_BULK_API_SALESFORCE_OBJECTS, UNSUPPORTED_FILTERING_STREAMS, Salesforce from .streams import ( + LOOKBACK_SECONDS, BulkIncrementalSalesforceStream, BulkSalesforceStream, BulkSalesforceSubStream, @@ -172,9 +174,8 @@ def prepare_stream(cls, stream_name: str, json_schema, sobject_options, sf_objec return stream_class, stream_kwargs - @classmethod def generate_streams( - cls, + self, config: Mapping[str, Any], stream_objects: Mapping[str, Any], sf_object: Salesforce, @@ -184,67 +185,86 @@ def generate_streams( schemas = sf_object.generate_schemas(stream_objects) default_args = [sf_object, authenticator, config] streams = [] + state_manager = ConnectorStateManager(stream_instance_map={s.name: s for s in streams}, state=self.state) for stream_name, sobject_options in stream_objects.items(): json_schema = schemas.get(stream_name, {}) - stream_class, kwargs = cls.prepare_stream(stream_name, json_schema, sobject_options, *default_args) + stream_class, kwargs = self.prepare_stream(stream_name, json_schema, sobject_options, *default_args) parent_name = PARENT_SALESFORCE_OBJECTS.get(stream_name, {}).get("parent_name") if parent_name: # get minimal schema required for getting proper class name full_refresh/incremental, rest/bulk parent_schema = PARENT_SALESFORCE_OBJECTS.get(stream_name, {}).get("schema_minimal") - parent_class, parent_kwargs = cls.prepare_stream(parent_name, parent_schema, sobject_options, *default_args) + parent_class, parent_kwargs = self.prepare_stream(parent_name, parent_schema, sobject_options, *default_args) kwargs["parent"] = parent_class(**parent_kwargs) stream = stream_class(**kwargs) - api_type = cls._get_api_type(stream_name, json_schema, config.get("force_use_bulk_api", False)) + api_type = self._get_api_type(stream_name, json_schema, config.get("force_use_bulk_api", False)) if api_type == "rest" and not stream.primary_key and stream.too_many_properties: logger.warning( f"Can not instantiate stream {stream_name}. It is not supported by the BULK API and can not be " "implemented via REST because the number of its properties exceeds the limit and it lacks a primary key." ) continue - streams.append(stream) + + streams.append(self._wrap_for_concurrency(config, stream, state_manager)) + streams.append(self._wrap_for_concurrency(config, Describe(sf_api=sf_object, catalog=self.catalog), state_manager)) return streams + def _wrap_for_concurrency(self, config, stream, state_manager): + stream_slicer_cursor = None + if stream.cursor_field: + stream_slicer_cursor = self._create_stream_slicer_cursor(config, state_manager, stream) + if hasattr(stream, "set_cursor"): + stream.set_cursor(stream_slicer_cursor) + if hasattr(stream, "parent") and hasattr(stream.parent, "set_cursor"): + stream_slicer_cursor = self._create_stream_slicer_cursor(config, state_manager, stream) + stream.parent.set_cursor(stream_slicer_cursor) + + if not stream_slicer_cursor or self._get_sync_mode_from_catalog(stream) == SyncMode.full_refresh: + cursor = FinalStateCursor( + stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository + ) + state = None + else: + cursor = stream_slicer_cursor + state = cursor.state + return StreamFacade.create_from_stream(stream, self, logger, state, cursor) + def streams(self, config: Mapping[str, Any]) -> List[Stream]: if not config.get("start_date"): config["start_date"] = (datetime.now() - relativedelta(years=self.START_DATE_OFFSET_IN_YEARS)).strftime(self.DATETIME_FORMAT) sf = self._get_sf_object(config) stream_objects = sf.get_validated_streams(config=config, catalog=self.catalog) streams = self.generate_streams(config, stream_objects, sf) - streams.append(Describe(sf_api=sf, catalog=self.catalog)) - state_manager = ConnectorStateManager(stream_instance_map={s.name: s for s in streams}, state=self.state) - - configured_streams = [] - - for stream in streams: - sync_mode = self._get_sync_mode_from_catalog(stream) - if sync_mode == SyncMode.full_refresh: - cursor = NoopCursor() - state = None - else: - cursor_field_key = stream.cursor_field or "" - if not isinstance(cursor_field_key, str): - raise AssertionError(f"A string cursor field key is required, but got {cursor_field_key}.") - cursor_field = CursorField(cursor_field_key) - legacy_state = state_manager.get_stream_state(stream.name, stream.namespace) - cursor = ConcurrentCursor( - stream.name, - stream.namespace, - legacy_state, - self.message_repository, - state_manager, - stream.state_converter, - cursor_field, - self._get_slice_boundary_fields(stream, state_manager), - config["start_date"], - ) - state = cursor.state + return streams - configured_streams.append(StreamFacade.create_from_stream(stream, self, logger, state, cursor)) - return configured_streams + def _create_stream_slicer_cursor( + self, config: Mapping[str, Any], state_manager: ConnectorStateManager, stream: Stream + ) -> ConcurrentCursor: + """ + We have moved the generation of stream slices to the concurrent CDK cursor + """ + cursor_field_key = stream.cursor_field or "" + if not isinstance(cursor_field_key, str): + raise AssertionError(f"Nested cursor field are not supported hence type str is expected but got {cursor_field_key}.") + cursor_field = CursorField(cursor_field_key) + stream_state = state_manager.get_stream_state(stream.name, stream.namespace) + return ConcurrentCursor( + stream.name, + stream.namespace, + stream_state, + self.message_repository, + state_manager, + stream.state_converter, + cursor_field, + self._get_slice_boundary_fields(stream, state_manager), + datetime.fromtimestamp(pendulum.parse(config["start_date"]).timestamp(), timezone.utc), + stream.state_converter.get_end_provider(), + timedelta(seconds=LOOKBACK_SECONDS), + isodate.parse_duration(config["stream_slice_step"]) if "stream_slice_step" in config else timedelta(days=30), + ) def _get_slice_boundary_fields(self, stream: Stream, state_manager: ConnectorStateManager) -> Optional[Tuple[str, str]]: return ("start_date", "end_date") diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py index da5064f8b1488..f1daa59db5348 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py @@ -11,13 +11,16 @@ import uuid from abc import ABC from contextlib import closing +from datetime import timedelta from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Type, Union +import backoff import pandas as pd import pendulum import requests # type: ignore[import] from airbyte_cdk.models import ConfiguredAirbyteCatalog, FailureType, SyncMode from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import IsoMillisConcurrentStreamStateConverter from airbyte_cdk.sources.streams.core import Stream, StreamData from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream @@ -31,7 +34,7 @@ from .api import PARENT_SALESFORCE_OBJECTS, UNSUPPORTED_FILTERING_STREAMS, Salesforce from .availability_strategy import SalesforceAvailabilityStrategy from .exceptions import SalesforceException, TmpFileIOError -from .rate_limiting import default_backoff_handler +from .rate_limiting import TRANSIENT_EXCEPTIONS, default_backoff_handler # https://stackoverflow.com/a/54517228 CSV_FIELD_SIZE_LIMIT = int(ctypes.c_ulong(-1).value // 2) @@ -39,10 +42,11 @@ DEFAULT_ENCODING = "utf-8" LOOKBACK_SECONDS = 600 # based on https://trailhead.salesforce.com/trailblazer-community/feed/0D54V00007T48TASAZ +_JOB_TRANSIENT_ERRORS_MAX_RETRY = 1 class SalesforceStream(HttpStream, ABC): - state_converter = IsoMillisConcurrentStreamStateConverter() + state_converter = IsoMillisConcurrentStreamStateConverter(is_sequential_state=False) page_size = 2000 transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) encoding = DEFAULT_ENCODING @@ -140,7 +144,7 @@ def __init__(self, properties: Mapping[str, Any]): class RestSalesforceStream(SalesforceStream): - state_converter = IsoMillisConcurrentStreamStateConverter() + state_converter = IsoMillisConcurrentStreamStateConverter(is_sequential_state=False) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -294,6 +298,7 @@ def _read_pages( # Always return an empty generator just in case no records were ever yielded yield from [] + @default_backoff_handler(max_tries=5, backoff_method=backoff.constant, backoff_params={"interval": 5}) def _fetch_next_page_for_chunk( self, stream_slice: Mapping[str, Any] = None, @@ -317,7 +322,7 @@ def _fetch_next_page_for_chunk( class BatchedSubStream(HttpSubStream): - state_converter = IsoMillisConcurrentStreamStateConverter() + state_converter = IsoMillisConcurrentStreamStateConverter(is_sequential_state=False) SLICE_BATCH_SIZE = 200 def stream_slices( @@ -351,8 +356,15 @@ def path(self, next_page_token: Mapping[str, Any] = None, **kwargs: Any) -> str: transformer = TypeTransformer(TransformConfig.CustomSchemaNormalization | TransformConfig.DefaultSchemaNormalization) - @default_backoff_handler(max_tries=5, factor=15) + @default_backoff_handler(max_tries=5, backoff_method=backoff.expo, backoff_params={"factor": 15}) def _send_http_request(self, method: str, url: str, json: dict = None, headers: dict = None, stream: bool = False): + """ + This method should be used when you don't have to read data from the HTTP body. Else, you will have to retry when you actually read + the response buffer (which is either by calling `json` or `iter_content`) + """ + return self._non_retryable_send_http_request(method, url, json, headers, stream) + + def _non_retryable_send_http_request(self, method: str, url: str, json: dict = None, headers: dict = None, stream: bool = False): headers = self.authenticator.get_auth_header() if not headers else headers | self.authenticator.get_auth_header() response = self._session.request(method, url=url, headers=headers, json=json, stream=stream) if response.status_code not in [200, 204]: @@ -360,15 +372,22 @@ def _send_http_request(self, method: str, url: str, json: dict = None, headers: response.raise_for_status() return response + @default_backoff_handler(max_tries=5, backoff_method=backoff.expo, backoff_params={"factor": 15}) + def _create_stream_job(self, query: str, url: str) -> Optional[str]: + json = {"operation": "queryAll", "query": query, "contentType": "CSV", "columnDelimiter": "COMMA", "lineEnding": "LF"} + response = self._non_retryable_send_http_request("POST", url, json=json) + job_id: str = response.json()["id"] + return job_id + def create_stream_job(self, query: str, url: str) -> Optional[str]: """ docs: https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/create_job.html + + Note that we want to retry during connection issues as well. Those can occur when calling `.json()`. Even in the case of a + connection error during a HTTPError, we will retry as else, we won't be able to take the right action. """ - json = {"operation": "queryAll", "query": query, "contentType": "CSV", "columnDelimiter": "COMMA", "lineEnding": "LF"} try: - response = self._send_http_request("POST", url, json=json) - job_id: str = response.json()["id"] - return job_id + return self._create_stream_job(query, url) except exceptions.HTTPError as error: if error.response.status_code in [codes.FORBIDDEN, codes.BAD_REQUEST]: # A part of streams can't be used by BULK API. Every API version can have a custom list of @@ -383,9 +402,7 @@ def create_stream_job(self, query: str, url: str) -> Optional[str]: # updated query: "Select Name, (Select Subject,ActivityType from ActivityHistories) from Contact" # The second variant forces customisation for every case (ActivityHistory, ActivityHistories etc). # And the main problem is these subqueries doesn't support CSV response format. - error_data = error.response.json()[0] - error_code = error_data.get("errorCode") - error_message = error_data.get("message", "") + error_code, error_message = self._extract_error_code_and_message(error.response) if error_message == "Selecting compound data not supported in Bulk Query" or ( error_code == "INVALIDENTITY" and "is not supported by the Bulk API" in error_message ): @@ -401,7 +418,7 @@ def create_stream_job(self, query: str, url: str) -> Optional[str]: elif error.response.status_code == codes.FORBIDDEN and error_code == "REQUEST_LIMIT_EXCEEDED": self.logger.error( f"Cannot receive data for stream '{self.name}' ," - f"sobject options: {self.sobject_options}, Error message: '{error_data.get('message')}'" + f"sobject options: {self.sobject_options}, Error message: '{error_message}'" ) elif error.response.status_code == codes.BAD_REQUEST and error_message.endswith("does not support query"): self.logger.error( @@ -437,9 +454,7 @@ def wait_for_job(self, url: str) -> str: try: job_info = self._send_http_request("GET", url=url).json() except exceptions.HTTPError as error: - error_data = error.response.json()[0] - error_code = error_data.get("errorCode") - error_message = error_data.get("message", "") + error_code, error_message = self._extract_error_code_and_message(error.response) if ( "We can't complete the action because enabled transaction security policies took too long to complete." in error_message and error_code == "TXN_SECURITY_METERING_ERROR" @@ -473,6 +488,19 @@ def wait_for_job(self, url: str) -> str: self.logger.warning(f"Not wait the {self.name} data for {self.DEFAULT_WAIT_TIMEOUT_SECONDS} seconds, data: {job_info}!!") return job_status + def _extract_error_code_and_message(self, response: requests.Response) -> tuple[Optional[str], str]: + try: + error_data = response.json()[0] + return error_data.get("errorCode"), error_data.get("message", "") + except exceptions.JSONDecodeError: + self.logger.warning(f"The response for `{response.request.url}` is not a JSON but was `{response.content}`") + except IndexError: + self.logger.warning( + f"The response for `{response.request.url}` was expected to be a list with at least one element but was `{response.content}`" + ) + + return None, "" + def execute_job(self, query: str, url: str) -> Tuple[Optional[str], Optional[str]]: job_status = "Failed" for i in range(0, self.MAX_RETRY_NUMBER): @@ -520,6 +548,7 @@ def get_response_encoding(self, headers) -> str: return self.encoding + @default_backoff_handler(max_tries=5, backoff_method=backoff.constant, backoff_params={"interval": 5}) def download_data(self, url: str, chunk_size: int = 1024) -> tuple[str, str, dict]: """ Retrieves binary data result from successfully `executed_job`, using chunks, to avoid local memory limitations. @@ -529,7 +558,7 @@ def download_data(self, url: str, chunk_size: int = 1024) -> tuple[str, str, dic """ # set filepath for binary data from response tmp_file = str(uuid.uuid4()) - with closing(self._send_http_request("GET", url, headers={"Accept-Encoding": "gzip"}, stream=True)) as response, open( + with closing(self._non_retryable_send_http_request("GET", url, headers={"Accept-Encoding": "gzip"}, stream=True)) as response, open( tmp_file, "wb" ) as data_file: response_headers = response.headers @@ -615,6 +644,7 @@ def read_records( cursor_field: List[str] = None, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, + call_count: int = 0, ) -> Iterable[Mapping[str, Any]]: stream_state = stream_state or {} next_page_token = None @@ -643,7 +673,17 @@ def read_records( while True: req = PreparedRequest() req.prepare_url(f"{job_full_url}/results", {"locator": salesforce_bulk_api_locator}) - tmp_file, response_encoding, response_headers = self.download_data(url=req.url) + try: + tmp_file, response_encoding, response_headers = self.download_data(url=req.url) + except TRANSIENT_EXCEPTIONS as exception: + if call_count >= _JOB_TRANSIENT_ERRORS_MAX_RETRY: + self.logger.error(f"Downloading data failed even after {call_count} retries. Stopping retry and raising exception") + raise exception + self.logger.warning(f"Downloading data failed after {call_count} retries. Retrying the whole job...") + call_count += 1 + yield from self.read_records(sync_mode, cursor_field, stream_slice, stream_state, call_count=call_count) + return + for record in self.read_with_chunks(tmp_file, response_encoding): yield record @@ -667,7 +707,10 @@ def get_standard_instance(self) -> SalesforceStream: stream_kwargs.update({"replication_key": self.replication_key, "start_date": self.start_date}) new_cls = IncrementalRestSalesforceStream - return new_cls(**stream_kwargs) + standard_instance = new_cls(**stream_kwargs) + if hasattr(standard_instance, "set_cursor"): + standard_instance.set_cursor(self._stream_slicer_cursor) + return standard_instance class BulkSalesforceSubStream(BatchedSubStream, BulkSalesforceStream): @@ -694,24 +737,22 @@ def __init__(self, replication_key: str, stream_slice_step: str = "P30D", **kwar super().__init__(**kwargs) self.replication_key = replication_key self._stream_slice_step = stream_slice_step + self._stream_slicer_cursor = None + + def set_cursor(self, cursor: Cursor) -> None: + self._stream_slicer_cursor = cursor def stream_slices( self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: - now = pendulum.now(tz="UTC") - assert LOOKBACK_SECONDS is not None and LOOKBACK_SECONDS >= 0 - - initial_date = self.get_start_date_from_state(stream_state) - pendulum.Duration(seconds=LOOKBACK_SECONDS) - slice_start = initial_date - while slice_start < now: - slice_end = slice_start + self.stream_slice_step - self._slice = { + if not self._stream_slicer_cursor: + raise ValueError("Cursor should be set at this point") + + for slice_start, slice_end in self._stream_slicer_cursor.generate_slices(): + yield { "start_date": slice_start.isoformat(timespec="milliseconds"), - "end_date": min(slice_end, now).isoformat(timespec="milliseconds"), + "end_date": slice_end.isoformat(timespec="milliseconds"), } - yield self._slice - - slice_start += self.stream_slice_step @property def stream_slice_step(self) -> pendulum.Duration: @@ -791,7 +832,7 @@ def request_params( class Describe(Stream): - state_converter = IsoMillisConcurrentStreamStateConverter() + state_converter = IsoMillisConcurrentStreamStateConverter(is_sequential_state=False) """ Stream of sObjects' (Salesforce Objects) describe: https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_describe.htm diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py index 6b31ad9d7d9da..ec266bda90751 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py @@ -14,13 +14,24 @@ import pendulum import pytest import requests_mock -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode, Type +from airbyte_cdk.models import ( + AirbyteStateBlob, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + StreamDescriptor, + SyncMode, + Type, +) from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade +from airbyte_cdk.test.catalog_builder import CatalogBuilder from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.state_builder import StateBuilder from airbyte_cdk.utils import AirbyteTracedException from conftest import encoding_symbols_parameters, generate_stream -from requests.exceptions import HTTPError +from requests.exceptions import ChunkedEncodingError, HTTPError from source_salesforce.api import Salesforce from source_salesforce.exceptions import AUTHENTICATION_ERROR_MESSAGE_MAPPING from source_salesforce.source import SourceSalesforce @@ -29,12 +40,19 @@ BulkIncrementalSalesforceStream, BulkSalesforceStream, BulkSalesforceSubStream, - Describe, IncrementalRestSalesforceStream, RestSalesforceStream, - SalesforceStream, ) +_A_CHUNKED_RESPONSE = [b"first chunk", b"second chunk"] +_A_JSON_RESPONSE = {"id": "any id"} +_A_SUCCESSFUL_JOB_CREATION_RESPONSE = {"state": "JobComplete"} +_A_PK = "a_pk" +_A_STREAM_NAME = "a_stream_name" + +_NUMBER_OF_DOWNLOAD_TRIES = 5 +_FIRST_CALL_FROM_JOB_CREATION = 1 + _ANY_CATALOG = ConfiguredAirbyteCatalog.parse_obj({"streams": []}) _ANY_CONFIG = {} _ANY_STATE = None @@ -388,129 +406,6 @@ def configure_request_params_mock(stream_1, stream_2): stream_2.request_params.return_value = {"q": "query"} -def test_rate_limit_bulk(stream_config, stream_api, bulk_catalog, state): - """ - Connector should stop the sync if one stream reached rate limit - stream_1, stream_2, stream_3, ... - While reading `stream_1` if 403 (Rate Limit) is received, it should finish that stream with success and stop the sync process. - Next streams should not be executed. - """ - stream_config.update({"start_date": "2021-10-01"}) - stream_1: BulkIncrementalSalesforceStream = generate_stream("Account", stream_config, stream_api) - stream_2: BulkIncrementalSalesforceStream = generate_stream("Asset", stream_config, stream_api) - streams = [stream_1, stream_2] - configure_request_params_mock(stream_1, stream_2) - - stream_1.page_size = 6 - stream_1.state_checkpoint_interval = 5 - - source = SourceSalesforce(_ANY_CATALOG, _ANY_CONFIG, _ANY_STATE) - source.streams = Mock() - source.streams.return_value = streams - - json_response = [{"errorCode": "REQUEST_LIMIT_EXCEEDED", "message": "TotalRequests Limit exceeded."}] - with requests_mock.Mocker() as m: - for stream in streams: - creation_responses = [] - for page in [1, 2]: - job_id = f"fake_job_{page}_{stream.name}" - creation_responses.append({"json": {"id": job_id}}) - - m.register_uri("GET", stream.path() + f"/{job_id}", json={"state": "JobComplete"}) - - resp = ["Field1,LastModifiedDate,Id"] + [f"test,2021-10-0{i},{i}" for i in range(1, 7)] # 6 records per page - - if page == 1: - # Read the first page successfully - m.register_uri("GET", stream.path() + f"/{job_id}/results", text="\n".join(resp)) - else: - # Requesting for results when reading second page should fail with 403 (Rate Limit error) - m.register_uri("GET", stream.path() + f"/{job_id}/results", status_code=403, json=json_response) - - m.register_uri("DELETE", stream.path() + f"/{job_id}") - - m.register_uri("POST", stream.path(), creation_responses) - result = read(source=source, config=stream_config, catalog=bulk_catalog, state=state) - assert stream_1.request_params.called - assert ( - not stream_2.request_params.called - ), "The second stream should not be executed, because the first stream finished with Rate Limit." - - records = result.records - assert len(records) == 6 # stream page size: 6 - - state_record = result.state_messages[0] - assert state_record.state.data["Account"]["LastModifiedDate"] == "2021-10-05T00:00:00+00:00" # state checkpoint interval is 5. - - -def test_rate_limit_rest(stream_config, stream_api, rest_catalog, state): - """ - Connector should stop the sync if one stream reached rate limit - stream_1, stream_2, stream_3, ... - While reading `stream_1` if 403 (Rate Limit) is received, it should finish that stream with success and stop the sync process. - Next streams should not be executed. - """ - stream_config.update({"start_date": "2021-11-01"}) - - stream_1: IncrementalRestSalesforceStream = generate_stream("KnowledgeArticle", stream_config, stream_api) - stream_2: IncrementalRestSalesforceStream = generate_stream("AcceptedEventRelation", stream_config, stream_api) - - stream_1.state_checkpoint_interval = 3 - streams = [stream_1, stream_2] - configure_request_params_mock(stream_1, stream_2) - - source = SourceSalesforce(_ANY_CATALOG, _ANY_CONFIG, _ANY_STATE) - source.streams = Mock() - source.streams.return_value = streams - - next_page_url = "/services/data/v57.0/query/012345" - response_1 = { - "done": False, - "totalSize": 10, - "nextRecordsUrl": next_page_url, - "records": [ - { - "ID": 1, - "LastModifiedDate": "2021-11-15", - }, - { - "ID": 2, - "LastModifiedDate": "2021-11-16", - }, - { - "ID": 3, - "LastModifiedDate": "2021-11-17", # check point interval - }, - { - "ID": 4, - "LastModifiedDate": "2021-11-18", - }, - { - "ID": 5, - "LastModifiedDate": "2021-11-19", - }, - ], - } - response_2 = [{"errorCode": "REQUEST_LIMIT_EXCEEDED", "message": "TotalRequests Limit exceeded."}] - - with requests_mock.Mocker() as m: - m.register_uri("GET", stream_1.path(), json=response_1, status_code=200) - m.register_uri("GET", next_page_url, json=response_2, status_code=403) - - result = read(source=source, config=stream_config, catalog=rest_catalog, state=state) - - assert stream_1.request_params.called - assert ( - not stream_2.request_params.called - ), "The second stream should not be executed, because the first stream finished with Rate Limit." - - records = result.records - assert len(records) == 5 - - state_record = result.state_messages[0] - assert state_record.state.data["KnowledgeArticle"]["LastModifiedDate"] == "2021-11-17T00:00:00+00:00" - - def test_pagination_rest(stream_config, stream_api): stream_name = "AcceptedEventRelation" stream: RestSalesforceStream = generate_stream(stream_name, stream_config, stream_api) @@ -578,6 +473,52 @@ def test_csv_reader_dialect_unix(): assert result == data +@patch("source_salesforce.source.BulkSalesforceStream._non_retryable_send_http_request") +def test_given_retryable_error_when_download_data_then_retry(send_http_request_patch): + send_http_request_patch.return_value.iter_content.side_effect = [HTTPError(), _A_CHUNKED_RESPONSE] + BulkSalesforceStream(stream_name=_A_STREAM_NAME, sf_api=Mock(), pk=_A_PK).download_data(url="any url") + assert send_http_request_patch.call_count == 2 + + +@patch("source_salesforce.source.BulkSalesforceStream._non_retryable_send_http_request") +def test_given_first_download_fail_when_download_data_then_retry_job_only_once(send_http_request_patch): + sf_api = Mock() + sf_api.generate_schema.return_value = {} + sf_api.instance_url = "http://test_given_first_download_fail_when_download_data_then_retry_job.com" + job_creation_return_values = [_A_JSON_RESPONSE, _A_SUCCESSFUL_JOB_CREATION_RESPONSE] + send_http_request_patch.return_value.json.side_effect = job_creation_return_values * 2 + send_http_request_patch.return_value.iter_content.side_effect = HTTPError() + + with pytest.raises(Exception): + list(BulkSalesforceStream(stream_name=_A_STREAM_NAME, sf_api=sf_api, pk=_A_PK).read_records(SyncMode.full_refresh)) + + assert send_http_request_patch.call_count == (len(job_creation_return_values) + _NUMBER_OF_DOWNLOAD_TRIES) * 2 + + +@patch("source_salesforce.source.BulkSalesforceStream._non_retryable_send_http_request") +def test_given_http_errors_when_create_stream_job_then_retry(send_http_request_patch): + send_http_request_patch.return_value.json.side_effect = [HTTPError(), _A_JSON_RESPONSE] + BulkSalesforceStream(stream_name=_A_STREAM_NAME, sf_api=Mock(), pk=_A_PK).create_stream_job(query="any query", url="any url") + assert send_http_request_patch.call_count == 2 + + +@patch("source_salesforce.source.BulkSalesforceStream._non_retryable_send_http_request") +def test_given_fail_with_http_errors_when_create_stream_job_then_handle_error(send_http_request_patch): + mocked_response = Mock() + mocked_response.status_code = 666 + send_http_request_patch.return_value.json.side_effect = HTTPError(response=mocked_response) + + with pytest.raises(HTTPError): + BulkSalesforceStream(stream_name=_A_STREAM_NAME, sf_api=Mock(), pk=_A_PK).create_stream_job(query="any query", url="any url") + + +@patch("source_salesforce.source.BulkSalesforceStream._non_retryable_send_http_request") +def test_given_retryable_error_that_are_not_http_errors_when_create_stream_job_then_retry(send_http_request_patch): + send_http_request_patch.return_value.json.side_effect = [ChunkedEncodingError(), _A_JSON_RESPONSE] + BulkSalesforceStream(stream_name=_A_STREAM_NAME, sf_api=Mock(), pk=_A_PK).create_stream_job(query="any query", url="any url") + assert send_http_request_patch.call_count == 2 + + @pytest.mark.parametrize( "stream_names,catalog_stream_names,", ( @@ -885,15 +826,15 @@ def test_bulk_stream_error_on_wait_for_job(requests_mock, stream_config, stream_ @freezegun.freeze_time("2023-01-01") @pytest.mark.parametrize( "lookback, stream_slice_step, expected_len_stream_slices, expect_error", - [(None, "P30D", 0, True), (0, "P30D", 158, False), (10, "P1D", 4732, False), (10, "PT12H", 9463, False), (-1, "P30D", 0, True)], - ids=["lookback-is-none", "lookback-is-0-step-30D", "lookback-is-valid-step-1D", "lookback-is-valid-step-12H", "lookback-is-negative"], + [(0, "P30D", 158, False)], + ids=["lookback-is-0-step-30D"], ) def test_bulk_stream_slices( stream_config_date_format, stream_api, lookback, expect_error, stream_slice_step: str, expected_len_stream_slices: int ): stream_config_date_format["stream_slice_step"] = stream_slice_step - stream: BulkIncrementalSalesforceStream = generate_stream("FakeBulkStream", stream_config_date_format, stream_api) - with patch("source_salesforce.streams.LOOKBACK_SECONDS", lookback): + with patch("source_salesforce.source.LOOKBACK_SECONDS", lookback): + stream: BulkIncrementalSalesforceStream = generate_stream("FakeBulkStream", stream_config_date_format, stream_api) if expect_error: with pytest.raises(AssertionError): list(stream.stream_slices(sync_mode=SyncMode.full_refresh)) @@ -920,11 +861,14 @@ def test_bulk_stream_slices( def test_bulk_stream_request_params_states(stream_config_date_format, stream_api, bulk_catalog, requests_mock): """Check that request params ignore records cursor and use start date from slice ONLY""" stream_config_date_format.update({"start_date": "2023-01-01"}) - stream: BulkIncrementalSalesforceStream = generate_stream("Account", stream_config_date_format, stream_api) + state = StateBuilder().with_stream_state("Account", {"LastModifiedDate": "2023-01-01T10:20:10.000Z"}).build() - source = SourceSalesforce(_ANY_CATALOG, _ANY_CONFIG, _ANY_STATE) + source = SourceSalesforce(CatalogBuilder().with_stream("Account", SyncMode.full_refresh).build(), _ANY_CONFIG, _ANY_STATE) source.streams = Mock() - source.streams.return_value = [stream] + source.streams.return_value = [generate_stream("Account", stream_config_date_format, stream_api, state=state, legacy=False)] + + # using legacy state to configure HTTP requests + stream: BulkIncrementalSalesforceStream = generate_stream("Account", stream_config_date_format, stream_api, state=state, legacy=True) job_id_1 = "fake_job_1" requests_mock.register_uri("GET", stream.path() + f"/{job_id_1}", [{"json": {"state": "JobComplete"}}]) @@ -950,30 +894,29 @@ def test_bulk_stream_request_params_states(stream_config_date_format, stream_api requests_mock.register_uri("PATCH", stream.path() + f"/{job_id_3}") logger = logging.getLogger("airbyte") - state = {"Account": {"LastModifiedDate": "2023-01-01T10:10:10.000Z"}} bulk_catalog.streams.pop(1) - with patch("source_salesforce.streams.LOOKBACK_SECONDS", 0): - result = [i for i in source.read(logger=logger, config=stream_config_date_format, catalog=bulk_catalog, state=state)] - actual_state_values = [item.state.data.get("Account").get(stream.cursor_field) for item in result if item.type == Type.STATE] - # assert request params - assert ( + result = [i for i in source.read(logger=logger, config=stream_config_date_format, catalog=bulk_catalog, state=state)] + + # assert request params: has requests might not be performed in a specific order because of concurrent CDK, we match on any request + all_requests = {request.text for request in queries_history.request_history} + assert any([ "LastModifiedDate >= 2023-01-01T10:10:10.000+00:00 AND LastModifiedDate < 2023-01-31T10:10:10.000+00:00" - in queries_history.request_history[0].text - ) - assert ( + in request for request in all_requests + ]) + assert any([ "LastModifiedDate >= 2023-01-31T10:10:10.000+00:00 AND LastModifiedDate < 2023-03-02T10:10:10.000+00:00" - in queries_history.request_history[1].text - ) - assert ( + in request for request in all_requests + ]) + assert any([ "LastModifiedDate >= 2023-03-02T10:10:10.000+00:00 AND LastModifiedDate < 2023-04-01T00:00:00.000+00:00" - in queries_history.request_history[2].text - ) + in request for request in all_requests + ]) - # assert states - # if connector meets record with cursor `2023-04-01` out of current slice range 2023-01-31 <> 2023-03-02, we ignore all other values and set state to slice end_date - expected_state_values = ["2023-01-15T00:00:00+00:00", "2023-03-02T10:10:10+00:00", "2023-04-01T00:00:00+00:00"] - assert actual_state_values == expected_state_values + # as the execution is concurrent, we can only assert the last state message here + last_actual_state = [item.state.stream.stream_state.dict() for item in result if item.type == Type.STATE][-1] + last_expected_state = {"slices": [{"start": "2023-01-01T00:00:00.000Z", "end": "2023-04-01T00:00:00.000Z"}], "state_type": "date-range"} + assert last_actual_state == last_expected_state def test_request_params_incremental(stream_config_date_format, stream_api): @@ -993,10 +936,8 @@ def test_request_params_substream(stream_config_date_format, stream_api): @freezegun.freeze_time("2023-03-20") def test_stream_slices_for_substream(stream_config, stream_api, requests_mock): """Test BulkSalesforceSubStream for ContentDocumentLink (+ parent ContentDocument) - ContentDocument return 1 record for each slice request. Given start/end date leads to 3 date slice for ContentDocument, thus 3 total records - ContentDocumentLink It means that ContentDocumentLink should have 2 slices, with 2 and 1 records in each """ diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/config_builder.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/config_builder.py new file mode 100644 index 0000000000000..7807f80744af6 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/config_builder.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from datetime import datetime +from typing import Any, Mapping + + +class ConfigBuilder: + def __init__(self) -> None: + self._config = { + "client_id": "fake_client_id", + "client_secret": "fake_client_secret", + "refresh_token": "fake_refresh_token", + "start_date": "2010-01-18T21:18:20Z", + "is_sandbox": False, + "wait_timeout": 15, + } + + def start_date(self, start_date: datetime) -> "ConfigBuilder": + self._config["start_date"] = start_date.strftime("%Y-%m-%dT%H:%M:%SZ") + return self + + def stream_slice_step(self, stream_slice_step: str) -> "ConfigBuilder": + self._config["stream_slice_step"] = stream_slice_step + return self + + def client_id(self, client_id: str) -> "ConfigBuilder": + self._config["client_id"] = client_id + return self + + def client_secret(self, client_secret: str) -> "ConfigBuilder": + self._config["client_secret"] = client_secret + return self + + def refresh_token(self, refresh_token: str) -> "ConfigBuilder": + self._config["refresh_token"] = refresh_token + return self + + def build(self) -> Mapping[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/conftest.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/conftest.py index 48defd5e5ff2a..f2e40adc81a8b 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/conftest.py @@ -1,18 +1,23 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - import json -from typing import List +import pathlib +from typing import Any, List, Mapping from unittest.mock import Mock import pytest from airbyte_cdk.models import ConfiguredAirbyteCatalog from airbyte_cdk.test.state_builder import StateBuilder from airbyte_protocol.models import AirbyteStateMessage +from config_builder import ConfigBuilder from source_salesforce.api import Salesforce from source_salesforce.source import SourceSalesforce +_ANY_CATALOG = ConfiguredAirbyteCatalog.parse_obj({"streams": []}) +_ANY_CONFIG = ConfigBuilder().build() +_ANY_STATE = StateBuilder().build() + @pytest.fixture(autouse=True) def time_sleep_mock(mocker): @@ -22,14 +27,14 @@ def time_sleep_mock(mocker): @pytest.fixture(scope="module") def bulk_catalog(): - with open("unit_tests/bulk_catalog.json") as f: + with (pathlib.Path(__file__).parent / "bulk_catalog.json").open() as f: data = json.loads(f.read()) return ConfiguredAirbyteCatalog.parse_obj(data) @pytest.fixture(scope="module") def rest_catalog(): - with open("unit_tests/rest_catalog.json") as f: + with (pathlib.Path(__file__).parent / "rest_catalog.json").open() as f: data = json.loads(f.read()) return ConfiguredAirbyteCatalog.parse_obj(data) @@ -47,32 +52,22 @@ def state() -> List[AirbyteStateMessage]: @pytest.fixture(scope="module") def stream_config(): """Generates streams settings for BULK logic""" - return { - "client_id": "fake_client_id", - "client_secret": "fake_client_secret", - "refresh_token": "fake_refresh_token", - "start_date": "2010-01-18T21:18:20Z", - "is_sandbox": False, - "wait_timeout": 15, - } + return ConfigBuilder().build() -@pytest.fixture(scope="module") +@pytest.fixture(scope="function") def stream_config_date_format(): """Generates streams settings with `start_date` in format YYYY-MM-DD""" - return { - "client_id": "fake_client_id", - "client_secret": "fake_client_secret", - "refresh_token": "fake_refresh_token", - "start_date": "2010-01-18", - "is_sandbox": False, - "wait_timeout": 15, - } + config = ConfigBuilder().build() + config["start_date"] = "2010-01-18" + return config @pytest.fixture(scope="module") def stream_config_without_start_date(): """Generates streams settings for REST logic without start_date""" + config = ConfigBuilder().build() + config.pop("start_date") return { "client_id": "fake_client_id", "client_secret": "fake_client_secret", @@ -82,7 +77,7 @@ def stream_config_without_start_date(): } -def _stream_api(stream_config, describe_response_data=None): +def mock_stream_api(stream_config: Mapping[str, Any], describe_response_data=None): sf_object = Salesforce(**stream_config) sf_object.login = Mock() sf_object.access_token = Mock() @@ -97,37 +92,45 @@ def _stream_api(stream_config, describe_response_data=None): @pytest.fixture(scope="module") def stream_api(stream_config): - return _stream_api(stream_config) + return mock_stream_api(stream_config) @pytest.fixture(scope="module") def stream_api_v2(stream_config): describe_response_data = {"fields": [{"name": "LastModifiedDate", "type": "string"}, {"name": "BillingAddress", "type": "address"}]} - return _stream_api(stream_config, describe_response_data=describe_response_data) + return mock_stream_api(stream_config, describe_response_data=describe_response_data) @pytest.fixture(scope="module") def stream_api_pk(stream_config): describe_response_data = {"fields": [{"name": "LastModifiedDate", "type": "string"}, {"name": "Id", "type": "string"}]} - return _stream_api(stream_config, describe_response_data=describe_response_data) + return mock_stream_api(stream_config, describe_response_data=describe_response_data) @pytest.fixture(scope="module") def stream_api_v2_too_many_properties(stream_config): describe_response_data = {"fields": [{"name": f"Property{str(i)}", "type": "string"} for i in range(Salesforce.REQUEST_SIZE_LIMITS)]} describe_response_data["fields"].extend([{"name": "BillingAddress", "type": "address"}]) - return _stream_api(stream_config, describe_response_data=describe_response_data) + return mock_stream_api(stream_config, describe_response_data=describe_response_data) @pytest.fixture(scope="module") def stream_api_v2_pk_too_many_properties(stream_config): describe_response_data = {"fields": [{"name": f"Property{str(i)}", "type": "string"} for i in range(Salesforce.REQUEST_SIZE_LIMITS)]} describe_response_data["fields"].extend([{"name": "BillingAddress", "type": "address"}, {"name": "Id", "type": "string"}]) - return _stream_api(stream_config, describe_response_data=describe_response_data) + return mock_stream_api(stream_config, describe_response_data=describe_response_data) + +def generate_stream(stream_name, stream_config, stream_api, state=None, legacy=True): + if state is None: + state = _ANY_STATE -def generate_stream(stream_name, stream_config, stream_api): - return SourceSalesforce.generate_streams(stream_config, {stream_name: None}, stream_api)[0] + stream = SourceSalesforce(_ANY_CATALOG, _ANY_CONFIG, state).generate_streams(stream_config, {stream_name: None}, stream_api)[0] + if legacy and hasattr(stream, "_legacy_stream"): + # Many tests are going through `generate_streams` to test things that are part of the legacy interface. To smooth the transition, + # we will access the legacy stream through the StreamFacade private field + return stream._legacy_stream + return stream def encoding_symbols_parameters(): diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/__init__.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_bulk_stream.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_bulk_stream.py new file mode 100644 index 0000000000000..bf80347129abe --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_bulk_stream.py @@ -0,0 +1,75 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +import urllib.parse +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_protocol.models import SyncMode +from config_builder import ConfigBuilder +from integration.utils import create_base_url, given_authentication, given_stream, read +from salesforce_describe_response_builder import SalesforceDescribeResponseBuilder +from source_salesforce.streams import LOOKBACK_SECONDS + +_A_FIELD_NAME = "a_field" +_ACCESS_TOKEN = "an_access_token" +_CLIENT_ID = "a_client_id" +_CLIENT_SECRET = "a_client_secret" +_CURSOR_FIELD = "SystemModstamp" +_INSTANCE_URL = "https://instance.salesforce.com" +_JOB_ID = "a-job-id" +_LOOKBACK_WINDOW = timedelta(seconds=LOOKBACK_SECONDS) +_NOW = datetime.now(timezone.utc) +_REFRESH_TOKEN = "a_refresh_token" +_STREAM_NAME = "a_stream_name" + +_BASE_URL = create_base_url(_INSTANCE_URL) + + +def _create_field(name: str, _type: Optional[str] = None) -> Dict[str, Any]: + return {"name": name, "type": _type if _type else "string"} + + +def _to_url(to_convert: datetime) -> str: + to_format = to_convert.isoformat(timespec="milliseconds") + return urllib.parse.quote_plus(to_format) + + +def _to_partitioned_datetime(to_convert: datetime) -> str: + return to_convert.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" + + +def _calculate_start_time(start_time: datetime) -> datetime: + # the start is granular to the second hence why we have `0` in terms of milliseconds + return start_time.replace(microsecond=0) + + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._config = ConfigBuilder().client_id(_CLIENT_ID).client_secret(_CLIENT_SECRET).refresh_token(_REFRESH_TOKEN) + + @HttpMocker() + def test_when_read_then_create_job_and_extract_records_from_result(self, http_mocker: HttpMocker) -> None: + given_authentication(http_mocker, _CLIENT_ID, _CLIENT_SECRET, _REFRESH_TOKEN, _INSTANCE_URL) + given_stream(http_mocker, _BASE_URL, _STREAM_NAME, SalesforceDescribeResponseBuilder().field(_A_FIELD_NAME)) + http_mocker.post( + HttpRequest(f"{_BASE_URL}/jobs/query", body=json.dumps({"operation": "queryAll", "query": "SELECT a_field FROM a_stream_name", "contentType": "CSV", "columnDelimiter": "COMMA", "lineEnding": "LF"})), + HttpResponse(json.dumps({"id": _JOB_ID})), + ) + http_mocker.get( + HttpRequest(f"{_BASE_URL}/jobs/query/{_JOB_ID}"), + HttpResponse(json.dumps({"state": "JobComplete"})), + ) + http_mocker.get( + HttpRequest(f"{_BASE_URL}/jobs/query/{_JOB_ID}/results"), + HttpResponse(f"{_A_FIELD_NAME}\nfield_value"), + ) + + output = read(_STREAM_NAME, SyncMode.full_refresh, self._config) + + assert len(output.records) == 1 diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_rest_stream.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_rest_stream.py new file mode 100644 index 0000000000000..a95e02d508a3a --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_rest_stream.py @@ -0,0 +1,142 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +import urllib.parse +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import SyncMode +from config_builder import ConfigBuilder +from integration.utils import create_base_url, given_authentication, given_stream, read +from salesforce_describe_response_builder import SalesforceDescribeResponseBuilder +from source_salesforce.api import UNSUPPORTED_BULK_API_SALESFORCE_OBJECTS +from source_salesforce.streams import LOOKBACK_SECONDS + +_A_FIELD_NAME = "a_field" +_CLIENT_ID = "a_client_id" +_CLIENT_SECRET = "a_client_secret" +_CURSOR_FIELD = "SystemModstamp" +_INSTANCE_URL = "https://instance.salesforce.com" +_BASE_URL = create_base_url(_INSTANCE_URL) +_LOOKBACK_WINDOW = timedelta(seconds=LOOKBACK_SECONDS) +_NOW = datetime.now(timezone.utc) +_REFRESH_TOKEN = "a_refresh_token" +_STREAM_NAME = UNSUPPORTED_BULK_API_SALESFORCE_OBJECTS[0] + + +def _create_field(name: str, _type: Optional[str] = None) -> Dict[str, Any]: + return {"name": name, "type": _type if _type else "string"} + + +def _to_url(to_convert: datetime) -> str: + to_format = to_convert.isoformat(timespec="milliseconds") + return urllib.parse.quote_plus(to_format) + + +def _to_partitioned_datetime(to_convert: datetime) -> str: + return to_convert.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" + + +def _calculate_start_time(start_time: datetime) -> datetime: + # the start is granular to the second hence why we have `0` in terms of milliseconds + return start_time.replace(microsecond=0) + + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._config = ConfigBuilder().client_id(_CLIENT_ID).client_secret(_CLIENT_SECRET).refresh_token(_REFRESH_TOKEN) + + @HttpMocker() + def test_given_error_on_fetch_chunk_when_read_then_retry(self, http_mocker: HttpMocker) -> None: + given_authentication(http_mocker, _CLIENT_ID, _CLIENT_SECRET, _REFRESH_TOKEN, _INSTANCE_URL) + given_stream(http_mocker, _BASE_URL, _STREAM_NAME, SalesforceDescribeResponseBuilder().field(_A_FIELD_NAME)) + http_mocker.get( + HttpRequest(f"{_BASE_URL}/queryAll?q=SELECT+{_A_FIELD_NAME}+FROM+{_STREAM_NAME}+"), + [ + HttpResponse("", status_code=406), + HttpResponse(json.dumps({"records": [{"a_field": "a_value"}]})), + ] + ) + + output = read(_STREAM_NAME, SyncMode.full_refresh, self._config) + + assert len(output.records) == 1 + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + def setUp(self) -> None: + self._config = ConfigBuilder().client_id(_CLIENT_ID).client_secret(_CLIENT_SECRET).refresh_token(_REFRESH_TOKEN) + + self._http_mocker = HttpMocker() + self._http_mocker.__enter__() + + given_authentication(self._http_mocker, _CLIENT_ID, _CLIENT_SECRET, _REFRESH_TOKEN, _INSTANCE_URL) + given_stream(self._http_mocker, _BASE_URL, _STREAM_NAME, SalesforceDescribeResponseBuilder().field(_A_FIELD_NAME).field(_CURSOR_FIELD, "datetime")) + + def tearDown(self) -> None: + self._http_mocker.__exit__(None, None, None) + + def test_given_no_state_when_read_then_start_sync_from_start(self) -> None: + start = _calculate_start_time(_NOW - timedelta(days=5)) + # as the start comes from the config, we can't use the same format as `_to_url` + start_format_url = urllib.parse.quote_plus(start.strftime('%Y-%m-%dT%H:%M:%SZ')) + self._config.stream_slice_step("P30D").start_date(start) + + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/queryAll?q=SELECT+{_A_FIELD_NAME},{_CURSOR_FIELD}+FROM+{_STREAM_NAME}+WHERE+SystemModstamp+%3E%3D+{start_format_url}+AND+SystemModstamp+%3C+{_to_url(_NOW)}"), + HttpResponse(json.dumps({"records": [{"a_field": "a_value"}]})), + ) + + read(_STREAM_NAME, SyncMode.incremental, self._config, StateBuilder().with_stream_state(_STREAM_NAME, {})) + + # then HTTP requests are performed + + def test_given_sequential_state_when_read_then_migrate_to_partitioned_state(self) -> None: + cursor_value = _NOW - timedelta(days=5) + start = _calculate_start_time(_NOW - timedelta(days=10)) + self._config.stream_slice_step("P30D").start_date(start) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/queryAll?q=SELECT+{_A_FIELD_NAME},{_CURSOR_FIELD}+FROM+{_STREAM_NAME}+WHERE+SystemModstamp+%3E%3D+{_to_url(cursor_value - _LOOKBACK_WINDOW)}+AND+SystemModstamp+%3C+{_to_url(_NOW)}"), + HttpResponse(json.dumps({"records": [{"a_field": "a_value"}]})), + ) + + output = read(_STREAM_NAME, SyncMode.incremental, self._config, StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: cursor_value.isoformat(timespec="milliseconds")})) + + assert output.most_recent_state.stream_state.dict() == {"state_type": "date-range", "slices": [{"start": _to_partitioned_datetime(start), "end": _to_partitioned_datetime(_NOW)}]} + + def test_given_partitioned_state_when_read_then_sync_missing_partitions_and_update_state(self) -> None: + missing_chunk = (_NOW - timedelta(days=5), _NOW - timedelta(days=3)) + most_recent_state_value = _NOW - timedelta(days=1) + start = _calculate_start_time(_NOW - timedelta(days=10)) + state = StateBuilder().with_stream_state( + _STREAM_NAME, + { + "state_type": "date-range", + "slices": [ + {"start": start.strftime("%Y-%m-%dT%H:%M:%S.000") + "Z", "end": _to_partitioned_datetime(missing_chunk[0])}, + {"start": _to_partitioned_datetime(missing_chunk[1]), "end": _to_partitioned_datetime(most_recent_state_value)}, + ] + } + ) + self._config.stream_slice_step("P30D").start_date(start) + + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/queryAll?q=SELECT+{_A_FIELD_NAME},{_CURSOR_FIELD}+FROM+{_STREAM_NAME}+WHERE+SystemModstamp+%3E%3D+{_to_url(missing_chunk[0])}+AND+SystemModstamp+%3C+{_to_url(missing_chunk[1])}"), + HttpResponse(json.dumps({"records": [{"a_field": "a_value"}]})), + ) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/queryAll?q=SELECT+{_A_FIELD_NAME},{_CURSOR_FIELD}+FROM+{_STREAM_NAME}+WHERE+SystemModstamp+%3E%3D+{_to_url(most_recent_state_value - _LOOKBACK_WINDOW)}+AND+SystemModstamp+%3C+{_to_url(_NOW)}"), + HttpResponse(json.dumps({"records": [{"a_field": "a_value"}]})), + ) + + output = read(_STREAM_NAME, SyncMode.incremental, self._config, state) + + # the start is granular to the second hence why we have `000` in terms of milliseconds + assert output.most_recent_state.stream_state.dict() == {"state_type": "date-range", "slices": [{"start": _to_partitioned_datetime(start), "end": _to_partitioned_datetime(_NOW)}]} diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_source.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_source.py new file mode 100644 index 0000000000000..31a6442fabc15 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/test_source.py @@ -0,0 +1,76 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +from unittest import TestCase + +import pytest +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_cdk.utils.traced_exception import AirbyteTracedException +from airbyte_protocol.models import FailureType, SyncMode +from config_builder import ConfigBuilder +from integration.utils import create_base_url, given_authentication, given_stream +from salesforce_describe_response_builder import SalesforceDescribeResponseBuilder +from source_salesforce import SourceSalesforce + +_CLIENT_ID = "a_client_id" +_CLIENT_SECRET = "a_client_secret" +_FIELD_NAME = "a_field_name" +_INSTANCE_URL = "https://instance.salesforce.com" +_REFRESH_TOKEN = "a_refresh_token" +_STREAM_NAME = "StreamName" + +_BASE_URL = create_base_url(_INSTANCE_URL) + + +class StreamGenerationTest(TestCase): + + def setUp(self) -> None: + self._config = ConfigBuilder().client_id(_CLIENT_ID).client_secret(_CLIENT_SECRET).refresh_token(_REFRESH_TOKEN).build() + self._source = SourceSalesforce( + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + self._config, + StateBuilder().build() + ) + + self._http_mocker = HttpMocker() + self._http_mocker.__enter__() + + def tearDown(self) -> None: + self._http_mocker.__exit__(None, None, None) + + def test_given_transient_error_fetching_schema_when_streams_then_retry(self) -> None: + given_authentication(self._http_mocker, _CLIENT_ID, _CLIENT_SECRET, _REFRESH_TOKEN, _INSTANCE_URL) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/sobjects"), + HttpResponse(json.dumps({"sobjects": [{"name": _STREAM_NAME, "queryable": True}]})), + ) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/sobjects/{_STREAM_NAME}/describe"), + [ + HttpResponse("", status_code=406), + SalesforceDescribeResponseBuilder().field("a_field_name").build() + ] + ) + + streams = self._source.streams(self._config) + + assert len(streams) == 2 # _STREAM_NAME and Describe which is always added + assert _FIELD_NAME in next(filter(lambda stream: stream.name == _STREAM_NAME, streams)).get_json_schema()["properties"] + + def test_given_errors_fetching_schema_when_streams_then_raise_exception(self) -> None: + given_authentication(self._http_mocker, _CLIENT_ID, _CLIENT_SECRET, _REFRESH_TOKEN, _INSTANCE_URL) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/sobjects"), + HttpResponse(json.dumps({"sobjects": [{"name": _STREAM_NAME, "queryable": True}]})), + ) + self._http_mocker.get( + HttpRequest(f"{_BASE_URL}/sobjects/{_STREAM_NAME}/describe"), + HttpResponse("", status_code=406), + ) + + with pytest.raises(AirbyteTracedException) as exception: + self._source.streams(self._config) + + assert exception.value.failure_type == FailureType.system_error diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/utils.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/utils.py new file mode 100644 index 0000000000000..5337ad01ea404 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/integration/utils.py @@ -0,0 +1,65 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +from typing import Any, Dict, Optional + +from airbyte_cdk.sources.source import TState +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput +from airbyte_cdk.test.entrypoint_wrapper import read as entrypoint_read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, SyncMode +from config_builder import ConfigBuilder +from salesforce_describe_response_builder import SalesforceDescribeResponseBuilder +from source_salesforce import SourceSalesforce + +_API_VERSION = "v57.0" + + +def _catalog(stream_name: str, sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(stream_name, sync_mode).build() + + +def _source(catalog: ConfiguredAirbyteCatalog, config: Dict[str, Any], state: Optional[TState]) -> SourceSalesforce: + return SourceSalesforce(catalog, config, state) + + +def create_base_url(instance_url: str) -> str: + return f"{instance_url}/services/data/{_API_VERSION}" + + +def read( + stream_name: str, + sync_mode: SyncMode, + config_builder: Optional[ConfigBuilder] = None, + state_builder: Optional[StateBuilder] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(stream_name, sync_mode) + config = config_builder.build() if config_builder else ConfigBuilder().build() + state = state_builder.build() if state_builder else StateBuilder().build() + return entrypoint_read(_source(catalog, config, state), config, catalog, state, expecting_exception) + + +def given_authentication(http_mocker: HttpMocker, client_id: str, client_secret: str, refresh_token: str, instance_url: str) -> None: + http_mocker.post( + HttpRequest( + "https://login.salesforce.com/services/oauth2/token", + query_params=ANY_QUERY_PARAMS, + body=f"grant_type=refresh_token&client_id={client_id}&client_secret={client_secret}&refresh_token={refresh_token}" + ), + HttpResponse(json.dumps({"access_token": "any_access_token", "instance_url": instance_url})), + ) + + +def given_stream(http_mocker: HttpMocker, base_url: str, stream_name: str, schema_builder: SalesforceDescribeResponseBuilder) -> None: + http_mocker.get( + HttpRequest(f"{base_url}/sobjects"), + HttpResponse(json.dumps({"sobjects": [{"name": stream_name, "queryable": True}]})), + ) + http_mocker.get( + HttpRequest(f"{base_url}/sobjects/{stream_name}/describe"), + schema_builder.build(), + ) diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/salesforce_describe_response_builder.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/salesforce_describe_response_builder.py new file mode 100644 index 0000000000000..043899d75cba6 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/salesforce_describe_response_builder.py @@ -0,0 +1,18 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +from typing import Optional + +from airbyte_cdk.test.mock_http import HttpResponse + + +class SalesforceDescribeResponseBuilder: + def __init__(self) -> None: + self._fields = [] + + def field(self, name: str, _type: Optional[str] = None) -> "SalesforceDescribeResponseBuilder": + self._fields.append({"name": name, "type": _type if _type else "string"}) + return self + + def build(self) -> HttpResponse: + return HttpResponse(json.dumps({"fields": self._fields})) diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/test_availability_strategy.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_availability_strategy.py new file mode 100644 index 0000000000000..2c8abd44f859d --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_availability_strategy.py @@ -0,0 +1,55 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from unittest import TestCase +from unittest.mock import Mock + +import pytest +from airbyte_cdk.sources.streams import Stream +from requests import HTTPError, Response +from source_salesforce.availability_strategy import SalesforceAvailabilityStrategy + +_NO_SOURCE = None + + +class SalesforceAvailabilityStrategyTest(TestCase): + def setUp(self) -> None: + self._stream = Mock(spec=Stream) + self._logger = Mock() + self._error = HTTPError(response=Mock(spec=Response)) + + def test_given_status_code_is_not_forbidden_or_bad_request_when_handle_http_error_then_raise_error(self) -> None: + availability_strategy = SalesforceAvailabilityStrategy() + self._error.response.status_code = 401 + + with pytest.raises(HTTPError): + availability_strategy.handle_http_error(self._stream, self._logger, _NO_SOURCE, self._error) + + def test_given_status_code_is_forbidden_when_handle_http_error_then_is_not_available_with_reason(self) -> None: + availability_strategy = SalesforceAvailabilityStrategy() + self._error.response.status_code = 403 + self._error.response.json.return_value = [{}] + + is_available, reason = availability_strategy.handle_http_error(self._stream, self._logger, _NO_SOURCE, self._error) + + assert not is_available + assert reason + + def test_given_status_code_is_bad_request_when_handle_http_error_then_is_not_available_with_reason(self) -> None: + availability_strategy = SalesforceAvailabilityStrategy() + self._error.response.status_code = 400 + self._error.response.json.return_value = [{}] + + is_available, reason = availability_strategy.handle_http_error(self._stream, self._logger, _NO_SOURCE, self._error) + + assert not is_available + assert reason + + def test_given_rate_limited_when_handle_http_error_then_is_available(self) -> None: + availability_strategy = SalesforceAvailabilityStrategy() + self._error.response.status_code = 400 + self._error.response.json.return_value = [{"errorCode": "REQUEST_LIMIT_EXCEEDED"}] + + is_available, reason = availability_strategy.handle_http_error(self._stream, self._logger, _NO_SOURCE, self._error) + + assert is_available + assert reason is None diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/test_slice_generation.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_slice_generation.py new file mode 100644 index 0000000000000..3522ff0cd744c --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/test_slice_generation.py @@ -0,0 +1,33 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from unittest import TestCase + +import freezegun +from airbyte_protocol.models import SyncMode +from config_builder import ConfigBuilder +from conftest import generate_stream, mock_stream_api + +_NOW = datetime.fromisoformat("2020-01-01T00:00:00+00:00") + + +@freezegun.freeze_time(time_to_freeze=_NOW) +class IncrementalSliceGenerationTest(TestCase): + def test_given_start_within_slice_range_when_stream_slices_then_return_one_slice_considering_10_minutes_lookback(self) -> None: + config = ConfigBuilder().start_date(_NOW - timedelta(days=15)).stream_slice_step("P30D").build() + stream = generate_stream("Account", config, mock_stream_api(config)) + + slices = list(stream.stream_slices(sync_mode=SyncMode.full_refresh)) + + assert slices == [{"start_date": "2019-12-17T00:00:00.000+00:00", "end_date": "2020-01-01T00:00:00.000+00:00"}] + + def test_given_slice_range_smaller_than_now_minus_start_date_when_stream_slices_then_return_many_slices(self) -> None: + config = ConfigBuilder().start_date(_NOW - timedelta(days=40)).stream_slice_step("P30D").build() + stream = generate_stream("Account", config, mock_stream_api(config)) + + slices = list(stream.stream_slices(sync_mode=SyncMode.full_refresh)) + + assert slices == [ + {"start_date": "2019-11-22T00:00:00.000+00:00", "end_date": "2019-12-22T00:00:00.000+00:00"}, + {"start_date": "2019-12-22T00:00:00.000+00:00", "end_date": "2020-01-01T00:00:00.000+00:00"} + ] diff --git a/airbyte-integrations/connectors/source-sendgrid/.coveragerc b/airbyte-integrations/connectors/source-sendgrid/.coveragerc index 5703402afaeb6..a76c003fd8c94 100644 --- a/airbyte-integrations/connectors/source-sendgrid/.coveragerc +++ b/airbyte-integrations/connectors/source-sendgrid/.coveragerc @@ -1,3 +1,4 @@ [run] omit = source_sendgrid/run.py + source_sendgrid/__init__.py diff --git a/airbyte-integrations/connectors/source-sendgrid/README.md b/airbyte-integrations/connectors/source-sendgrid/README.md index 599bdb7eeddf3..5745cb704aaf0 100644 --- a/airbyte-integrations/connectors/source-sendgrid/README.md +++ b/airbyte-integrations/connectors/source-sendgrid/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-sendgrid spec poetry run source-sendgrid check --config secrets/config.json poetry run source-sendgrid discover --config secrets/config.json -poetry run source-sendgrid read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-sendgrid read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml index 1c5ad5c0a2730..da37ad620d84b 100644 --- a/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml @@ -4,26 +4,35 @@ acceptance_tests: spec: tests: - spec_path: "source_sendgrid/spec.json" + config_path: "secrets/lowcode_config.json" backward_compatibility_tests_config: - disable_for_version: "0.3.0" + disable_for_version: "0.5.0" connection: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/lowcode_config.json" status: "succeed" - config_path: "secrets/old_config.json" status: "succeed" + - config_path: "secrets/python_config.json" + status: "succeed" - config_path: "integration_tests/invalid_time.json" status: "failed" - config_path: "integration_tests/invalid_api_key.json" status: "failed" discovery: tests: - - config_path: "secrets/old_config.json" + - config_path: "secrets/lowcode_config.json" + backward_compatibility_tests_config: + disable_for_version: "1.0.0" + - config_path: "secrets/config.json" + backward_compatibility_tests_config: + disable_for_version: "0.3.0" + - config_path: "secrets/python_config.json" backward_compatibility_tests_config: disable_for_version: "0.3.0" basic_read: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/lowcode_config.json" expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no @@ -37,11 +46,11 @@ acceptance_tests: fail_on_extra_columns: false incremental: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/lowcode_config.json" configured_catalog_path: "integration_tests/no_spam_reports_configured_catalog.json" future_state: future_state_path: "integration_tests/abnormal_state.json" full_refresh: tests: - - config_path: "secrets/config.json" + - config_path: "secrets/lowcode_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-sendgrid/integration_tests/abnormal_state.json index aca4cd802cf88..540a37318c753 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/abnormal_state.json @@ -42,5 +42,16 @@ "name": "invalid_emails" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { + "created_at": "7270247822" + }, + "stream_descriptor": { + "name": "suppression_group_members" + } + } } ] diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-sendgrid/integration_tests/configured_catalog.json index 2e5c801025c40..511e1383e50b3 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/configured_catalog.json @@ -47,7 +47,16 @@ }, { "stream": { - "name": "single_sends", + "name": "singlesends", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "singlesend_stats", "json_schema": {}, "supported_sync_modes": ["full_refresh"] }, @@ -140,15 +149,6 @@ "sync_mode": "incremental", "cursor_field": ["created"], "destination_sync_mode": "append" - }, - { - "stream": { - "name": "unsubscribe_groups", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-sendgrid/integration_tests/expected_records.jsonl index acf7830c5b8e8..933c7e4a911ef 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/expected_records.jsonl @@ -104,8 +104,8 @@ {"stream":"contacts","data":{"email":"fake_email_15@lmail.c","first_name":"Fake contact","last_name":"Lastname","address_line_1":null,"address_line_2":null,"city":null,"state_province_region":null,"postal_code":"22341","country":null,"alternate_emails":null,"phone_number":null,"whatsapp":null,"line":null,"facebook":null,"unique_name":null,"created_at":"2021-02-01T12:35:27Z","updated_at":"2021-02-01T12:35:56Z","contact_id":"50b36a31-daf8-45c4-bc48-13e150f6746e"},"emitted_at":1674825613540} {"stream":"contacts","data":{"email":"fake_email_3@lmail.c","first_name":"Fake contact","last_name":"Lastname","address_line_1":null,"address_line_2":null,"city":null,"state_province_region":null,"postal_code":"22341","country":null,"alternate_emails":null,"phone_number":null,"whatsapp":null,"line":null,"facebook":null,"unique_name":null,"created_at":"2021-02-01T12:35:14Z","updated_at":"2021-02-01T12:35:43Z","contact_id":"d1211b88-e116-4a0b-a823-0361bf059a06"},"emitted_at":1674825613540} {"stream":"contacts","data":{"email":"fake_email_18@lmail.c","first_name":"Fake contact","last_name":"Lastname","address_line_1":null,"address_line_2":null,"city":null,"state_province_region":null,"postal_code":"22341","country":null,"alternate_emails":null,"phone_number":null,"whatsapp":null,"line":null,"facebook":null,"unique_name":null,"created_at":"2021-02-01T12:35:30Z","updated_at":"2021-02-01T12:36:00Z","contact_id":"19163421-bb29-495d-950f-edede6218081"},"emitted_at":1674825613540} -{"stream": "single_sends", "data": {"ab_phase": "all", "ab_variation": "all", "aggregation": "total", "stats": {"bounce_drops": 0, "bounces": 0, "clicks": 0, "unique_clicks": 0, "delivered": 1, "invalid_emails": 0, "opens": 3, "unique_opens": 1, "requests": 1, "spam_report_drops": 0, "spam_reports": 0, "unsubscribes": 1}, "id": "3c5a9fa6-1084-11ec-ac32-4228d699bad5"}, "emitted_at": 1631093373000} -{"stream": "single_sends", "data": {"ab_phase": "all", "ab_variation": "all", "aggregation": "total", "stats": {"bounce_drops": 0, "bounces": 0, "clicks": 1, "unique_clicks": 1, "delivered": 1, "invalid_emails": 0, "opens": 1, "unique_opens": 1, "requests": 1, "spam_report_drops": 0, "spam_reports": 0, "unsubscribes": 0}, "id": "c9f286fb-1083-11ec-ae03-ca0fc7f28419"}, "emitted_at": 1631093373000} +{"stream": "singlesend_stats", "data": {"ab_phase": "all", "ab_variation": "all", "aggregation": "total", "stats": {"bounce_drops": 0, "bounces": 0, "clicks": 1, "unique_clicks": 1, "delivered": 1, "invalid_emails": 0, "opens": 1, "unique_opens": 1, "requests": 1, "spam_report_drops": 0, "spam_reports": 0, "unsubscribes": 0}, "id": "c9f286fb-1083-11ec-ae03-ca0fc7f28419"}, "emitted_at": 1710090955628} +{"stream": "singlesend_stats", "data": {"ab_phase": "all", "ab_variation": "all", "aggregation": "total", "stats": {"bounce_drops": 0, "bounces": 0, "clicks": 0, "unique_clicks": 0, "delivered": 1, "invalid_emails": 0, "opens": 3, "unique_opens": 1, "requests": 1, "spam_report_drops": 0, "spam_reports": 0, "unsubscribes": 1}, "id": "3c5a9fa6-1084-11ec-ac32-4228d699bad5"}, "emitted_at": 1710090955627} {"stream": "templates", "data": {"id": "d-9e5be59949e043e69f3df003e715b99b", "name": "test_dynamic_template", "generation": "dynamic", "updated_at": "2021-02-03 14:56:56", "versions": []}, "emitted_at": 1631093374000} {"stream": "templates", "data": {"id": "93702a5d-4cb2-4616-8d30-c2766e2b8e18", "name": "Template number 20", "generation": "legacy", "updated_at": "2021-02-03 13:31:29", "versions": []}, "emitted_at": 1631093374000} {"stream": "templates", "data": {"id": "0fd7bcc7-7459-49e9-b9f3-4ace4f9a9ae2", "name": "Template number 19", "generation": "legacy", "updated_at": "2021-02-03 13:31:28", "versions": []}, "emitted_at": 1631093374000} @@ -212,12 +212,12 @@ {"stream": "campaigns", "data": {"created_at": "2021-02-01T12:12:25Z", "id": "be9d147f-6486-11eb-8b51-8aa6caa37fdd", "name": "Third Campaign", "status": "draft", "updated_at": "2021-02-01T12:12:25Z", "is_abtest": false, "channels": ["email"]}, "emitted_at": 1678791750592} {"stream": "campaigns", "data": {"created_at": "2021-02-01T12:12:18Z", "id": "ba43f256-6486-11eb-bb4f-823d082c01b8", "name": "Second Campaign", "status": "draft", "updated_at": "2021-02-01T12:12:18Z", "is_abtest": false, "channels": ["email"]}, "emitted_at": 1678791750593} {"stream": "campaigns", "data": {"created_at": "2021-02-01T12:10:59Z", "id": "8b17a7b7-6486-11eb-bd77-2a301ccc59da", "name": "First Campaign", "status": "draft", "updated_at": "2021-02-01T12:10:59Z", "is_abtest": false, "channels": ["email"]}, "emitted_at": 1678791750593} -{"stream": "segments", "data": {"id": "37d93ee7-268e-4f6f-a103-384af7324b35", "name": "Everyone named Pol, POL or pol", "contacts_count": 0, "sample_updated_at": "2023-03-13T17:30:26Z", "next_sample_update": "2023-03-14T17:30:26Z", "created_at": "2021-02-03T13:23:13Z", "updated_at": "2021-02-03T13:23:13Z", "parent_list_id": null, "status": {"query_validation": "VALID"}}, "emitted_at": 1678791752069} -{"stream": "segments", "data": {"id": "bb3309ca-c002-4085-8d3e-de142dfa317e", "name": "Everyone named Jim, JIM or jim", "contacts_count": 0, "sample_updated_at": "2023-03-14T08:31:18Z", "next_sample_update": "2023-03-15T08:31:18Z", "created_at": "2021-02-03T13:19:50Z", "updated_at": "2021-02-03T13:19:50Z", "parent_list_id": "0236d6d2-75d2-42c5-962d-603e0deaf8d1", "status": {"query_validation": "VALID"}}, "emitted_at": 1678791752069} -{"stream": "segments", "data": {"id": "414d2eed-b2ba-4260-b3b4-e66369fafa43", "name": "Everyone named Bob, BOB or bob", "contacts_count": 0, "sample_updated_at": "2023-03-14T03:31:01Z", "next_sample_update": "2023-03-15T03:31:01Z", "created_at": "2021-02-03T13:18:50Z", "updated_at": "2021-02-03T13:18:50Z", "parent_list_id": "0236d6d2-75d2-42c5-962d-603e0deaf8d1", "status": {"query_validation": "VALID"}}, "emitted_at": 1678791752070} +{"stream": "segments", "data": {"id": "37d93ee7-268e-4f6f-a103-384af7324b35", "name": "Everyone named Pol, POL or pol", "query_version": "1", "contacts_count": 0, "sample_updated_at": "2024-03-09T17:30:18Z", "next_sample_update": "2024-03-10T17:30:18Z", "created_at": "2021-02-03T13:23:13Z", "updated_at": "2021-02-03T13:23:13Z", "parent_list_ids": null, "status": {"query_validation": "VALID"}}, "emitted_at": 1710091076235} +{"stream": "segments", "data": {"id": "bb3309ca-c002-4085-8d3e-de142dfa317e", "name": "Everyone named Jim, JIM or jim", "query_version": "1", "contacts_count": 0, "sample_updated_at": "2024-03-10T08:30:38Z", "next_sample_update": "2024-03-11T08:30:38Z", "created_at": "2021-02-03T13:19:50Z", "updated_at": "2021-02-03T13:19:50Z", "parent_list_ids": ["0236d6d2-75d2-42c5-962d-603e0deaf8d1"], "status": {"query_validation": "VALID"}}, "emitted_at": 1710091076236} +{"stream": "segments", "data": {"id": "414d2eed-b2ba-4260-b3b4-e66369fafa43", "name": "Everyone named Bob, BOB or bob", "query_version": "1", "contacts_count": 0, "sample_updated_at": "2024-03-10T03:30:41Z", "next_sample_update": "2024-03-11T03:30:41Z", "created_at": "2021-02-03T13:18:50Z", "updated_at": "2021-02-03T13:18:50Z", "parent_list_ids": ["0236d6d2-75d2-42c5-962d-603e0deaf8d1"], "status": {"query_validation": "VALID"}}, "emitted_at": 1710091076236} {"stream": "stats_automations", "data": {"id": "85e3874b-6654-11eb-8a29-0e707ab27408", "step_id": "all", "aggregation": "total", "stats": {"bounce_drops": 0, "bounces": 6, "clicks": 0, "unique_clicks": 0, "delivered": 6, "invalid_emails": 0, "opens": 7, "unique_opens": 3, "requests": 12, "spam_report_drops": 0, "spam_reports": 0, "unsubscribes": 0}}, "emitted_at": 1678791758456} -{"stream": "unsubscribe_groups", "data": {"name": "Test Suggestions Group 0", "id": 14760, "description": "Suggestions for testing new stream.", "is_default": false, "unsubscribes": 0}, "emitted_at": 1684520573043} -{"stream": "unsubscribe_groups", "data": {"name": "Test Suggestions Group 1", "id": 14761, "description": "Suggestions for testing new stream.", "is_default": false, "unsubscribes": 0}, "emitted_at": 1684520573043} -{"stream": "unsubscribe_groups", "data": {"name": "Test Suggestions Group 2", "id": 14762, "description": "Suggestions for testing new stream.", "is_default": false, "unsubscribes": 0}, "emitted_at": 1684520573044} -{"stream": "unsubscribe_groups", "data": {"name": "Test Suggestions Group 3", "id": 14763, "description": "Suggestions for testing new stream.", "is_default": false, "unsubscribes": 0}, "emitted_at": 1684520573044} -{"stream": "unsubscribe_groups", "data": {"name": "Test Suggestions Group 4", "id": 14764, "description": "Suggestions for testing new stream.", "is_default": false, "unsubscribes": 0}, "emitted_at": 1684520573044} +{"stream": "singlesends", "data": {"id": "d497b877-6486-11eb-be53-b2a243c7228c", "name": "Campaign 18", "status": "draft", "categories": [], "send_at": null, "created_at": "2021-02-01T12:13:02Z", "updated_at": "2021-02-01T12:13:02Z", "is_abtest": false, "abtest": null}, "emitted_at": 1710090864093} +{"stream": "singlesends", "data": {"id": "c9f286fb-1083-11ec-ae03-ca0fc7f28419", "name": "Copy of Untitled Single Send", "status": "triggered", "categories": ["Promotional", "Travel & Hospitality"], "send_at": "2021-09-08T09:08:32Z", "created_at": "2021-09-08T09:04:36Z", "updated_at": "2021-09-08T09:09:08Z", "is_abtest": false, "abtest": null}, "emitted_at": 1710090864093} +{"stream": "singlesends", "data": {"id": "4e5be6a3-1082-11ec-8512-9afd40c324e6", "name": "Untitled Single Send", "status": "triggered", "categories": ["Promotional", "Travel & Hospitality"], "send_at": "2021-09-08T08:57:05Z", "created_at": "2021-09-08T08:53:59Z", "updated_at": "2021-09-08T08:57:08Z", "is_abtest": false, "abtest": null}, "emitted_at": 1710090864093} +{"stream": "singlesends", "data": {"id": "06ee105f-1082-11ec-8245-86a627812e3d", "name": "Untitled Single Send", "status": "triggered", "categories": [], "send_at": "2021-09-08T08:54:40Z", "created_at": "2021-09-08T08:51:59Z", "updated_at": "2021-09-08T08:55:08Z", "is_abtest": false, "abtest": null}, "emitted_at": 1710090864093} +{"stream": "singlesends", "data": {"id": "3c5a9fa6-1084-11ec-ac32-4228d699bad5", "name": "Untitled Single Send", "status": "triggered", "categories": ["Travel & Hospitality", "Loyalty"], "send_at": "2021-09-08T09:10:22Z", "created_at": "2021-09-08T09:07:48Z", "updated_at": "2021-09-08T09:11:08Z", "is_abtest": false, "abtest": null}, "emitted_at": 1710090864093} diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_api_key.json b/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_api_key.json index 6c2779543eef0..4ed43ae7ef81b 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_api_key.json +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_api_key.json @@ -1,3 +1,3 @@ { - "apikey": "wrong.api.key123" + "api_key": "wrong.api.key123" } diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_time.json b/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_time.json index 7f45d297544f7..c227e132b2a6a 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_time.json +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/invalid_time.json @@ -1,4 +1,4 @@ { - "apikey": "apikey", - "start_time": "some erroneous input" + "api_key": "apikey", + "start_date": "some erroneous input" } diff --git a/airbyte-integrations/connectors/source-sendgrid/integration_tests/no_spam_reports_configured_catalog.json b/airbyte-integrations/connectors/source-sendgrid/integration_tests/no_spam_reports_configured_catalog.json index 69d7ca4b2d645..054398c4bfbcd 100644 --- a/airbyte-integrations/connectors/source-sendgrid/integration_tests/no_spam_reports_configured_catalog.json +++ b/airbyte-integrations/connectors/source-sendgrid/integration_tests/no_spam_reports_configured_catalog.json @@ -47,7 +47,16 @@ }, { "stream": { - "name": "single_sends", + "name": "singlesends", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "singlesend_stats", "json_schema": {}, "supported_sync_modes": ["full_refresh"] }, diff --git a/airbyte-integrations/connectors/source-sendgrid/metadata.yaml b/airbyte-integrations/connectors/source-sendgrid/metadata.yaml index 1902d258ce440..2d28645d3bbf1 100644 --- a/airbyte-integrations/connectors/source-sendgrid/metadata.yaml +++ b/airbyte-integrations/connectors/source-sendgrid/metadata.yaml @@ -10,12 +10,24 @@ data: connectorSubtype: api connectorType: source definitionId: fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87 - dockerImageTag: 0.4.3 + dockerImageTag: 1.0.0 + releases: + breakingChanges: + 1.0.0: + message: + This release makes several changes that upgrade the Sendgrid connector. + The configuration options have been renamed to `api_key` and `start_date`. `start_date` is now required. + The `unsubscribe_groups` stream has been removed. It was the same as `suppression_groups`. You can use that and get the same data. + The `single_sends` stream has been renamed `singlesend_stats`. This is closer to the data and API. + The `segments` stream has been upgraded to use the Sendgrid 2.0 API because the older one has been deprecated. The schema has changed as a result. + To ensure a smooth upgrade, please refresh your schemas and reset your data before resuming syncs. + upgradeDeadline: "2024-04-29" dockerRepository: airbyte/source-sendgrid documentationUrl: https://docs.airbyte.com/integrations/sources/sendgrid githubIssueLabel: source-sendgrid icon: sendgrid.svg license: MIT + maxSecondsBetweenMessages: 5400 name: Sendgrid remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-sendgrid/poetry.lock b/airbyte-integrations/connectors/source-sendgrid/poetry.lock index be3562417885d..ee3b4633c5815 100644 --- a/airbyte-integrations/connectors/source-sendgrid/poetry.lock +++ b/airbyte-integrations/connectors/source-sendgrid/poetry.lock @@ -1,50 +1,50 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.51.39" +version = "0.81.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.51.39.tar.gz", hash = "sha256:93e50c6586a41592e3debdcaa051e0c02d8b45cb59a3b411f62d427aa29bbaeb"}, - {file = "airbyte_cdk-0.51.39-py3-none-any.whl", hash = "sha256:ce2436f9c07a2631c5b5c00f7324e728cfb3f81817b58ca0015892f7f94365ac"}, + {file = "airbyte_cdk-0.81.1-py3-none-any.whl", hash = "sha256:01346dc621859a665ef28e391706b95c9adf2c442e1062862a340f2c3578a23e"}, + {file = "airbyte_cdk-0.81.1.tar.gz", hash = "sha256:d9731faf241787142b1c1cff054ff11a9f9139de3efd7dc021dcfb8bc37e682e"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.0" +airbyte-protocol-models = "*" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.0" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.0-py3-none-any.whl", hash = "sha256:e6a31fcd237504198a678d02c0040a8798f281c39203da61a5abce67842c5360"}, - {file = "airbyte_protocol_models-0.4.0.tar.gz", hash = "sha256:518736015c29ac60b6b8964a1b0d9b52e40020bcbd89e2545cc781f0b37d0f2b"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -103,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -312,13 +312,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -365,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -511,47 +511,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.1.1" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, - {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, - {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, - {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, - {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, - {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, - {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, - {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, - {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, - {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, - {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] @@ -561,130 +562,66 @@ numpy = [ ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -729,47 +666,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -779,6 +716,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -846,30 +798,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -886,6 +838,17 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -969,13 +932,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -987,50 +950,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1056,13 +1017,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -1092,13 +1053,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1203,4 +1164,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "8a246317365f1708c1268c85f6a66e59fbc624bd1d3449ff8533756564b443b0" +content-hash = "b6e517853295db78c861e5ce27360f284dd28a95ff3e4a858f9c80ecfb2aa92b" diff --git a/airbyte-integrations/connectors/source-sendgrid/pyproject.toml b/airbyte-integrations/connectors/source-sendgrid/pyproject.toml index 35ad4315bcc91..091b17d5289bb 100644 --- a/airbyte-integrations/connectors/source-sendgrid/pyproject.toml +++ b/airbyte-integrations/connectors/source-sendgrid/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.3" +version = "1.0.0" name = "source-sendgrid" description = "Source implementation for Sendgrid." authors = [ "Airbyte ",] @@ -17,10 +17,8 @@ include = "source_sendgrid" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.51.39" -requests = "==2.31.0" -pandas = "==2.1.1" -backoff = "==2.2.1" +airbyte-cdk = "^0" +pandas = "^2.1.1" [tool.poetry.scripts] source-sendgrid = "source_sendgrid.run:run" diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/config_migrations.py b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/config_migrations.py new file mode 100644 index 0000000000000..222a4136aa876 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/config_migrations.py @@ -0,0 +1,98 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import logging +from typing import Any, List, Mapping, Optional + +import pendulum +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository + +logger = logging.getLogger("airbyte_logger") + + +class MigrateToLowcodeConfig: + """ + This class stands for migrating the config at runtime. + Specifically, starting from `0.5.0`, apikey moved to api_key and start_time moved to start_date. + start_time was not required and now start_date is. + """ + + message_repository: MessageRepository = InMemoryMessageRepository() + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + This method determines if the config should be migrated. + Returns: + > True, if the transformation is necessary + > False, otherwise. + """ + has_api = "apikey" in config + has_time = "start_time" in config + has_new_api = "api_key" in config + has_new_date = "start_date" in config + if has_new_api and has_new_date: + return False + return has_api or has_time + + @staticmethod + def transform_config(config: Mapping[str, Any]) -> Mapping[str, Any]: + # move api_secret inside credentials block + if "apikey" in config: + config["api_key"] = config["apikey"] + + if "start_time" in config: + epoch_time = parse_config_int(config["start_time"]) + if epoch_time: + config["start_date"] = pendulum.from_timestamp(epoch_time, tz="UTC").to_iso8601_string() + else: + config["start_date"] = pendulum.parse(config["start_time"]).to_iso8601_string() + + if "start_date" not in config: + config["start_date"] = "2009-08-01T00:00:00Z" + + return config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + # modify the config + migrated_config = cls.transform_config(config) + # save the config + source.write_config(migrated_config, config_path) + # return modified config + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + print(create_connector_config_control_message(migrated_config).json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + This method checks the input args, should the config be migrated, + transform if necessary and emit the CONTROL message. + """ + # get config path + config_path = AirbyteEntrypoint(source).extract_config(args) + # proceed only if `--config` arg is provided + if config_path: + # read the existing config + config = source.read_config(config_path) + # migration check + if cls.should_migrate(config): + cls.emit_control_message(cls.modify_and_save(config_path, source, config)) + + +def parse_config_int(value) -> Optional[int]: + if isinstance(value, int): + return value + try: + if float(value) == int(float(value)): + return int(value) + except (ValueError, TypeError): + return None + return None diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/manifest.yaml b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/manifest.yaml index f2932c8ea8507..675dbe5878511 100644 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/manifest.yaml +++ b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/manifest.yaml @@ -1,257 +1,920 @@ -version: "0.29.0" -definitions: - page_size: 50 - step: "P30D" - - schema_loader: - type: JsonFileSchemaLoader - file_path: "./source_sendgrid/schemas/{{ parameters.name }}.json" - - requester: - type: HttpRequester - url_base: "https://api.sendgrid.com" - http_method: "GET" - authenticator: - type: "BearerAuthenticator" - api_token: "{{ config.apikey }}" - cursor_paginator: - type: DefaultPaginator - page_size: "#/definitions/page_size" - limit_option: - inject_into: "request_parameter" - field_name: "page_size" - page_token_option: - type: RequestPath - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response._metadata.next }}" - offset_paginator: - type: DefaultPaginator - $parameters: - page_size: "#/definitions/page_size" - limit_option: - inject_into: "request_parameter" - field_name: "limit" - page_token_option: - type: RequestOption - inject_into: "request_parameter" - field_name: "offset" - pagination_strategy: - type: "OffsetIncrement" - retriever: - type: SimpleRetriever - incremental_sync: - type: "DatetimeBasedCursor" - start_datetime: - datetime: "{{ config['start_time'] }}" - datetime_format: "%s" - end_datetime: - datetime: "{{ now_utc() }}" - datetime_format: "%Y-%m-%d %H:%M:%S.%f%z" - step: "#/definitions/step" - start_time_option: - field_name: "start_time" - inject_into: "request_parameter" - end_time_option: - field_name: "end_time" - inject_into: "request_parameter" - datetime_format: "%s" - cursor_granularity: "PT1S" - messages_incremental_sync: - type: "DatetimeBasedCursor" - start_datetime: - datetime: "{{ config['start_time'] }}" - datetime_format: "%s" - end_datetime: - datetime: "{{ now_utc() }}" - datetime_format: "%Y-%m-%d %H:%M:%S.%f%z" - step: "#/definitions/step" - datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" - cursor_granularity: "PT0.000001S" - - base_stream: - type: DeclarativeStream +version: 0.81.1 +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - bounces +streams: + - type: DeclarativeStream + name: bounces + primary_key: [] schema_loader: - $ref: "#/definitions/schema_loader" + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + created: + type: integer + email: + type: string + reason: + type: string + status: + type: string + type: object retriever: - $ref: "#/definitions/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/suppression/bounces + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" record_selector: + type: RecordSelector extractor: + type: DpathExtractor field_path: [] - requester: - $ref: "#/definitions/requester" paginator: - type: NoPagination -streams: - - $ref: "#/definitions/base_stream" - $parameters: - name: "lists" - primary_key: "id" - path: "/v3/marketing/lists" - field_path: ["result"] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 + inject_on_first_request: true + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: spam_reports + primary_key: [] + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + properties: + created: + type: integer + email: + type: string + ip: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/suppression/spam_reports + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/cursor_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "campaigns" - primary_key: "id" - path: "/v3/marketing/campaigns" - field_path: ["result"] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 + inject_on_first_request: true + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: global_suppressions + primary_key: + - email + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + created: + type: integer + email: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/suppression/unsubscribes + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/cursor_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "contacts" - primary_key: "id" - path: "/v3/marketing/contacts" - field_path: ["result"] - - $ref: "#/definitions/base_stream" - $parameters: - name: "stats_automations" - primary_key: "id" - path: "/v3/marketing/stats/automations" - field_path: ["results"] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 + inject_on_first_request: true + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: blocks + primary_key: + - email + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + properties: + created: + type: integer + email: + type: string + reason: + type: string + status: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/suppression/blocks + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/cursor_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "segments" - primary_key: "id" - path: "/v3/marketing/segments" - field_path: ["results"] - - $ref: "#/definitions/base_stream" - $parameters: - name: "single_sends" - primary_key: "id" - path: "/v3/marketing/stats/singlesends" - field_path: ["results"] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 + inject_on_first_request: true + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: invalid_emails + primary_key: + - email + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-07/schema# + properties: + created: + type: integer + email: + type: string + reason: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/suppression/invalid_emails + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/cursor_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "templates" - primary_key: "id" - path: "/v3/templates" - field_path: ["result"] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 + inject_on_first_request: true + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: suppression_groups + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + description: + type: string + id: + type: integer + is_default: + type: boolean + name: + type: string + unsubscribes: + type: integer + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever requester: - $ref: "#/definitions/base_stream/retriever/requester" - request_parameters: - generations: "legacy,dynamic" + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/asm/groups + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/cursor_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "bounces" - primary_key: "email" - cursor_field: "created" - path: "/v3/suppression/bounces" - field_path: [] - incremental_sync: - $ref: "#/definitions/incremental_sync" + type: NoPagination + - type: DeclarativeStream + name: suppression_group_members + primary_key: + - group_id + - email + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + created_at: + type: integer + email: + type: string + group_id: + type: integer + group_name: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/asm/suppressions + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "global_suppressions" - primary_key: "email" - cursor_field: "created" - path: "/v3/suppression/unsubscribes" - field_path: [] + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: offset + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: OffsetIncrement + page_size: 500 incremental_sync: - $ref: "#/definitions/incremental_sync" + type: DatetimeBasedCursor + cursor_field: created_at + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_time + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: end_time + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + - type: DeclarativeStream + name: lists + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + _metadata: + properties: + self: + type: string + type: object + contact_count: + type: integer + id: + type: string + name: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/lists + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - result paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "blocks" - primary_key: "email" - cursor_field: "created" - path: "/v3/suppression/blocks" - field_path: [] - incremental_sync: - $ref: "#/definitions/incremental_sync" + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: page_size + pagination_strategy: + type: CursorPagination + page_size: 1000 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' + - type: DeclarativeStream + name: segments + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + contacts_count: + type: integer + created_at: + format: date-time + type: ["null", string] + id: + type: string + name: + type: string + next_sample_update: + format: date-time + type: ["null", string] + parent_list_ids: + items: + type: ["null", string] + type: + - array + - "null" + query_version: + type: string + sample_updated_at: + format: date-time + type: ["null", string] + status: + properties: + query_validation: + type: string + type: object + updated_at: + format: date-time + type: ["null", string] + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/segments/2.0 + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "suppression_groups" - primary_key: "id" - path: "/v3/asm/groups" - field_path: [] - - $ref: "#/definitions/base_stream" - $parameters: - name: "suppression_group_members" - primary_key: "group_id" - path: "/v3/asm/suppressions" - field_path: [] + type: NoPagination + - type: DeclarativeStream + name: singlesend_stats + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + ab_phase: + type: string + ab_variation: + type: string + aggregation: + type: string + id: + type: string + format: uuid + stats: + properties: + bounce_drops: + type: integer + bounces: + type: integer + clicks: + type: integer + delivered: + type: integer + invalid_emails: + type: integer + opens: + type: integer + requests: + type: integer + spam_report_drops: + type: integer + spam_reports: + type: integer + unique_clicks: + type: integer + unique_opens: + type: integer + unsubscribes: + type: integer + type: object + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/stats/singlesends + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "invalid_emails" - primary_key: "email" - cursor_field: "created" - path: "/v3/suppression/invalid_emails" - field_path: [] - incremental_sync: - $ref: "#/definitions/incremental_sync" + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 50 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' + - type: DeclarativeStream + name: stats_automations + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + aggregation: + type: string + id: + type: string + stats: + properties: + bounce_drops: + type: integer + bounces: + type: integer + clicks: + type: integer + delivered: + type: integer + invalid_emails: + type: integer + opens: + type: integer + requests: + type: integer + spam_report_drops: + type: integer + spam_reports: + type: integer + unique_clicks: + type: integer + unique_opens: + type: integer + unsubscribes: + type: integer + type: object + step_id: + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/stats/automations + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - results paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "spam_reports" - primary_key: "email" - cursor_field: "created" - path: "/v3/suppression/spam_reports" - field_path: [] - incremental_sync: - $ref: "#/definitions/incremental_sync" + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 50 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' + - type: DeclarativeStream + name: singlesends + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + categories: + items: + type: string + type: array + created_at: + format: date-time + type: string + id: + type: string + format: uuid + is_abtest: + type: boolean + name: + type: string + send_at: + format: date-time + type: + - "null" + - string + status: + type: string + updated_at: + format: date-time + type: string + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/singlesends + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - result paginator: - $ref: "#/definitions/offset_paginator" - - $ref: "#/definitions/base_stream" - $parameters: - name: "messages" - primary_key: "msg_id" - cursor_field: "last_event_time" - path: "/v3/messages" - field_path: [] - incremental_sync: - $ref: "#/definitions/incremental_sync" + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' + - type: DeclarativeStream + name: templates + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + generation: + type: string + id: + type: string + name: + type: string + updated_at: + format: date-time + type: ["null", string] + versions: + type: array + type: object retriever: - $ref: "#/definitions/base_stream/retriever" + type: SimpleRetriever requester: - $ref: "#/definitions/requester" + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/templates + http_method: GET request_parameters: - limit: "1000" - query: 'last_event_time BETWEEN TIMESTAMP "{{stream_slice.start_time}}" AND TIMESTAMP "{{stream_slice.end_time}}"' - - $ref: "#/definitions/base_stream" - $parameters: - name: "unsubscribe_groups" - primary_key: "id" - path: "/v3/asm/groups" - field_path: [] -check: - type: CheckStream - stream_names: ["lists"] + generations: legacy,dynamic + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - result + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 200 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' + - type: DeclarativeStream + name: campaigns + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/schema# + properties: + id: + type: string + name: + type: string + created_at: + type: ["null", string] + format: date-time + status: + type: string + updated_at: + type: ["null", string] + format: date-time + is_abtest: + type: boolean + channels: + items: + type: string + type: array + type: object + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.sendgrid.com + path: /v3/marketing/campaigns + http_method: GET + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - result + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: page_size + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("_metadata", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("_metadata", {}).get("next", {}) }}' +spec: + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + type: object + required: + - start_date + - api_key + properties: + start_date: + type: string + title: Start date + format: date-time + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + order: 0 + description: >- + UTC date and time in the format 2017-01-25T00:00:00Z. Any data before + this date will not be replicated. + api_key: + type: string + title: API Key + airbyte_secret: true + order: 1 + description: >- + Sendgrid API Key, use admin to + generate this key. + additionalProperties: true + type: Spec +metadata: + autoImportSchema: + bounces: false + spam_reports: false + global_suppressions: false + blocks: false + invalid_emails: false + suppression_groups: false + suppression_group_members: false + lists: false + segments: false + singlesend_stats: false + stats_automations: false + singlesends: false + templates: false + campaigns: false diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/run.py b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/run.py index d2c1b798edb94..4ebf7a34e96b4 100644 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/run.py +++ b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/run.py @@ -7,8 +7,10 @@ from airbyte_cdk.entrypoint import launch from source_sendgrid import SourceSendgrid +from source_sendgrid.config_migrations import MigrateToLowcodeConfig def run(): source = SourceSendgrid() + MigrateToLowcodeConfig.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/blocks.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/blocks.json deleted file mode 100644 index 7627436235cf4..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/blocks.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "created": { - "type": "integer" - }, - "email": { - "type": "string" - }, - "reason": { - "type": "string" - }, - "status": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/bounces.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/bounces.json deleted file mode 100644 index 7627436235cf4..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/bounces.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "created": { - "type": "integer" - }, - "email": { - "type": "string" - }, - "reason": { - "type": "string" - }, - "status": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/campaigns.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/campaigns.json deleted file mode 100644 index e74183532d9ab..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/campaigns.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "created_at": { - "type": ["string", "null"], - "format": "date-time" - }, - "status": { - "type": "string" - }, - "updated_at": { - "type": ["string", "null"], - "format": "date-time" - }, - "is_abtest": { - "type": "boolean" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/global_suppressions.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/global_suppressions.json deleted file mode 100644 index 46864c307b9c5..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/global_suppressions.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "created": { - "type": "integer" - }, - "email": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/invalid_emails.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/invalid_emails.json deleted file mode 100644 index 86c152f60fcb2..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/invalid_emails.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "created": { - "type": "integer" - }, - "email": { - "type": "string" - }, - "reason": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/lists.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/lists.json deleted file mode 100644 index 5ab1094475581..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/lists.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "contact_count": { - "type": "integer" - }, - "_metadata": { - "type": "object", - "properties": { - "self": { - "type": "string" - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/segments.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/segments.json deleted file mode 100644 index 9deab8c9be557..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/segments.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "contact_count": { - "type": "integer" - }, - "sample_updated_at": { - "type": ["string", "null"], - "format": "date-time" - }, - "next_sample_update": { - "type": ["string", "null"], - "format": "date-time" - }, - "created_at": { - "type": ["string", "null"], - "format": "date-time" - }, - "updated_at": { - "type": ["string", "null"], - "format": "date-time" - }, - "parent_list_id": { - "type": ["string", "null"] - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/single_sends.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/single_sends.json deleted file mode 100644 index 7e7ef8dcbb4e6..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/single_sends.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "format": "uuid" - }, - "ab_phase": { - "type": "string" - }, - "ab_variation": { - "type": "string" - }, - "aggregation": { - "type": "string" - }, - "stats": { - "type": "object", - "properties": { - "bounce_drops": { - "type": "integer" - }, - "bounces": { - "type": "integer" - }, - "clicks": { - "type": "integer" - }, - "unique_clicks": { - "type": "integer" - }, - "delivered": { - "type": "integer" - }, - "invalid_emails": { - "type": "integer" - }, - "opens": { - "type": "integer" - }, - "unique_opens": { - "type": "integer" - }, - "requests": { - "type": "integer" - }, - "spam_report_drops": { - "type": "integer" - }, - "spam_reports": { - "type": "integer" - }, - "unsubscribes": { - "type": "integer" - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/spam_reports.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/spam_reports.json deleted file mode 100644 index 3b5dc769810ed..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/spam_reports.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "created": { - "type": "integer" - }, - "email": { - "type": "string" - }, - "ip": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/stats_automations.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/stats_automations.json deleted file mode 100644 index 4a4315b3f3495..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/stats_automations.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "stats": { - "type": "object", - "properties": { - "bounce_drops": { - "type": "integer" - }, - "bounces": { - "type": "integer" - }, - "clicks": { - "type": "integer" - }, - "delivered": { - "type": "integer" - }, - "invalid_emails": { - "type": "integer" - }, - "opens": { - "type": "integer" - }, - "requests": { - "type": "integer" - }, - "spam_report_drops": { - "type": "integer" - }, - "spam_reports": { - "type": "integer" - }, - "unique_clicks": { - "type": "integer" - }, - "unique_opens": { - "type": "integer" - }, - "unsubscribes": { - "type": "integer" - } - } - }, - "aggregation": { - "type": "string" - }, - "step_id": { - "type": "string" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_group_members.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_group_members.json deleted file mode 100644 index 8a70be42bd993..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_group_members.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "email": { - "type": "string" - }, - "group_id": { - "type": "integer" - }, - "group_name": { - "type": "string" - }, - "created_at": { - "type": "integer" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_groups.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_groups.json deleted file mode 100644 index f9ebb4113fda0..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/suppression_groups.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "description": { - "type": "string" - }, - "is_default": { - "type": "boolean" - }, - "unsubscribes": { - "type": "integer" - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/templates.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/templates.json deleted file mode 100644 index eccb7849eb6a2..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/templates.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["string"] - }, - "name": { - "type": ["null", "string"] - }, - "generation": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "versions": { - "type": ["array"] - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/unsubscribe_groups.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/unsubscribe_groups.json deleted file mode 100644 index 47926609d1b1d..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/schemas/unsubscribe_groups.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": true, - "type": "object", - "properties": { - "id": { - "type": ["null", "integer"] - }, - "name": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "last_email_sent_at": { - "type": ["null", "integer"] - }, - "is_default": { - "type": ["null", "boolean"] - }, - "unsubscribes": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/source.py b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/source.py index 9fea37eb9eff0..e4a7b5926a019 100644 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/source.py +++ b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/source.py @@ -2,70 +2,26 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from typing import Any, List, Mapping -from typing import Any, List, Mapping, Tuple - -import pendulum -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator -from .streams import ( - Blocks, - Bounces, - Campaigns, - Contacts, - GlobalSuppressions, - InvalidEmails, - Lists, - Scopes, - Segments, - SingleSends, - SpamReports, - StatsAutomations, - SuppressionGroupMembers, - SuppressionGroups, - Templates, - UnsubscribeGroups, -) +from .streams import Contacts -class SourceSendgrid(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - try: - start_time = config.get("start_time") - if start_time and isinstance(start_time, str): - pendulum.parse(start_time) - authenticator = TokenAuthenticator(config["apikey"]) - scopes_gen = Scopes(authenticator=authenticator).read_records(sync_mode=SyncMode.full_refresh) - next(scopes_gen) - return True, None - except pendulum.parsing.exceptions.ParserError: - return False, "Please, provide a valid Start Time parameter" - except Exception as error: - return False, f"Unable to connect to Sendgrid API with the provided credentials - {error}" +# Hybrid Declarative Source +class SourceSendgrid(YamlDeclarativeSource): + def __init__(self): + # this takes care of check and other methods + super().__init__(**{"path_to_yaml": "manifest.yaml"}) def streams(self, config: Mapping[str, Any]) -> List[Stream]: - authenticator = TokenAuthenticator(config["apikey"]) - start_time = config.get("start_time") - - streams = [ - Lists(authenticator=authenticator), - Campaigns(authenticator=authenticator), - Contacts(authenticator=authenticator), - StatsAutomations(authenticator=authenticator), - Segments(authenticator=authenticator), - SingleSends(authenticator=authenticator), - Templates(authenticator=authenticator), - GlobalSuppressions(authenticator=authenticator, start_time=start_time), - SuppressionGroups(authenticator=authenticator), - SuppressionGroupMembers(authenticator=authenticator), - Blocks(authenticator=authenticator, start_time=start_time), - Bounces(authenticator=authenticator, start_time=start_time), - InvalidEmails(authenticator=authenticator, start_time=start_time), - SpamReports(authenticator=authenticator, start_time=start_time), - UnsubscribeGroups(authenticator=authenticator), - ] - + # get all the lowcode streams + streams = super().streams(config) + authenticator = TokenAuthenticator(config["api_key"]) + # this stream download a csv file from sendgrid and emits the records + # it's not currently easy to do in lowcode, so we do it in python + streams.append(Contacts(authenticator=authenticator)) return streams diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/spec.json b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/spec.json deleted file mode 100644 index ba932cf1f2f46..0000000000000 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/spec.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/sendgrid", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Sendgrid Spec", - "type": "object", - "required": ["apikey"], - "additionalProperties": true, - "properties": { - "apikey": { - "title": "Sendgrid API key", - "airbyte_secret": true, - "type": "string", - "description": "API Key, use admin to generate this key.", - "order": 0 - }, - "start_time": { - "title": "Start time", - "type": "string", - "format": "date-time", - "pattern": "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(.\\d+)?Z$", - "description": "Start time in ISO8601 format. Any data before this time point will not be replicated.", - "examples": ["2020-01-01T01:01:01Z", "2020-01-01T01:01:01.000001Z"], - "order": 1 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/streams.py b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/streams.py index 68f1932665cde..87d9a06104eb1 100644 --- a/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/streams.py +++ b/airbyte-integrations/connectors/source-sendgrid/source_sendgrid/streams.py @@ -6,9 +6,9 @@ import os import time import zlib -from abc import ABC, abstractmethod +from abc import ABC from contextlib import closing -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from typing import Any, Iterable, List, Mapping, Optional, Tuple from urllib.parse import urlparse import pandas as pd @@ -23,7 +23,7 @@ class SendgridStream(HttpStream, ABC): - url_base = "https://api.sendgrid.com/v3/" + url_base = "https://api.sendgrid.com/" primary_key = "id" limit = 50 data_field = None @@ -43,143 +43,7 @@ def parse_response( stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, ) -> Iterable[Mapping]: - json_response = response.json() - records = json_response.get(self.data_field, []) if self.data_field is not None else json_response - - if records is not None: - for record in records: - yield record - else: - # TODO sendgrid's API is sending null responses at times. This seems like a bug on the API side, so we're adding - # log statements to help reproduce and prevent the connector from failing. - err_msg = ( - f"Response contained no valid JSON data. Response body: {response.text}\n" - f"Response status: {response.status_code}\n" - f"Response body: {response.text}\n" - f"Response headers: {response.headers}\n" - f"Request URL: {response.request.url}\n" - f"Request body: {response.request.body}\n" - ) - # do NOT print request headers as it contains auth token - self.logger.info(err_msg) - - def should_retry(self, response: requests.Response) -> bool: - """Override to provide skip the stream possibility""" - - status = response.status_code - if status in self.permission_error_codes.keys(): - for message in response.json().get("errors", []): - if message.get("message") == self.permission_error_codes.get(status): - self.logger.error( - f"Stream `{self.name}` is not available, due to subscription plan limitations or perrmission issues. Skipping." - ) - setattr(self, "raise_on_http_errors", False) - return False - return 500 <= response.status_code < 600 - - -class SendgridStreamOffsetPagination(SendgridStream): - offset = 0 - - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params["limit"] = self.limit - if next_page_token: - params.update(**next_page_token) - return params - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - stream_data = response.json() - if self.data_field: - stream_data = stream_data[self.data_field] - if len(stream_data) < self.limit: - return - self.offset += self.limit - return {"offset": self.offset} - - -class SendgridStreamIncrementalMixin(HttpStream, ABC): - cursor_field = "created" - - def __init__(self, start_time: Optional[str], **kwargs): - super().__init__(**kwargs) - self._start_time = start_time or 0 - if isinstance(self._start_time, str): - self._start_time = int(pendulum.parse(self._start_time).timestamp()) - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_benchmark = latest_record[self.cursor_field] - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state) - start_time = self._start_time - if stream_state.get(self.cursor_field): - start_time = stream_state[self.cursor_field] - params.update({"start_time": start_time, "end_time": pendulum.now().int_timestamp}) - return params - - -class SendgridStreamMetadataPagination(SendgridStream): - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - params = {} - if not next_page_token: - params = {"page_size": self.limit} - return params - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - next_page_url = response.json()["_metadata"].get("next", False) - if next_page_url: - return {"next_page_url": next_page_url.replace(self.url_base, "")} - - @staticmethod - @abstractmethod - def initial_path() -> str: - """ - :return: initial path for the API endpoint if no next metadata url found - """ - - def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> str: - if next_page_token: - return next_page_token["next_page_url"] - return self.initial_path() - - -class Scopes(SendgridStream): - def path(self, **kwargs) -> str: - return "scopes" - - -class Lists(SendgridStreamMetadataPagination): - data_field = "result" - - @staticmethod - def initial_path() -> str: - return "marketing/lists" - - -class Campaigns(SendgridStreamMetadataPagination): - data_field = "result" - - @staticmethod - def initial_path() -> str: - return "marketing/campaigns" + pass # not actually used because Contacts does read_records class Contacts(SendgridStream): @@ -190,7 +54,7 @@ class Contacts(SendgridStream): encoding = "utf-8" def path(self, **kwargs) -> str: - return "marketing/contacts/exports" + return "v3/marketing/contacts/exports" @default_backoff_handler(max_tries=5, factor=15) def _send_http_request(self, method: str, url: str, stream: bool = False, enable_auth: bool = True): @@ -334,95 +198,3 @@ def read_with_chunks(self, path: str, file_encoding: str, chunk_size: int = 100) finally: # remove binary tmp file, after data is read os.remove(path) - - -class StatsAutomations(SendgridStreamMetadataPagination): - data_field = "results" - - @staticmethod - def initial_path() -> str: - return "marketing/stats/automations" - - -class Segments(SendgridStream): - data_field = "results" - - def path(self, **kwargs) -> str: - return "marketing/segments" - - -class SingleSends(SendgridStreamMetadataPagination): - """ - https://docs.sendgrid.com/api-reference/marketing-campaign-stats/get-all-single-sends-stats - """ - - data_field = "results" - - @staticmethod - def initial_path() -> str: - return "marketing/stats/singlesends" - - -class Templates(SendgridStreamMetadataPagination): - data_field = "result" - - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params["generations"] = "legacy,dynamic" - return params - - @staticmethod - def initial_path() -> str: - return "templates" - - -class GlobalSuppressions(SendgridStreamOffsetPagination, SendgridStreamIncrementalMixin): - primary_key = "email" - - def path(self, **kwargs) -> str: - return "suppression/unsubscribes" - - -class SuppressionGroups(SendgridStream): - def path(self, **kwargs) -> str: - return "asm/groups" - - -class SuppressionGroupMembers(SendgridStreamOffsetPagination): - primary_key = "group_id" - - def path(self, **kwargs) -> str: - return "asm/suppressions" - - -class Blocks(SendgridStreamOffsetPagination, SendgridStreamIncrementalMixin): - primary_key = "email" - - def path(self, **kwargs) -> str: - return "suppression/blocks" - - -class Bounces(SendgridStream, SendgridStreamIncrementalMixin): - primary_key = "email" - - def path(self, **kwargs) -> str: - return "suppression/bounces" - - -class InvalidEmails(SendgridStreamOffsetPagination, SendgridStreamIncrementalMixin): - primary_key = "email" - - def path(self, **kwargs) -> str: - return "suppression/invalid_emails" - - -class SpamReports(SendgridStreamOffsetPagination, SendgridStreamIncrementalMixin): - primary_key = "email" - - def path(self, **kwargs) -> str: - return "suppression/spam_reports" - - -class UnsubscribeGroups(SendgridStream): - def path(self, **kwargs) -> str: - return "asm/groups" diff --git a/airbyte-integrations/connectors/source-sendgrid/unit_tests/test_migration.py b/airbyte-integrations/connectors/source-sendgrid/unit_tests/test_migration.py new file mode 100644 index 0000000000000..78850ed327004 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendgrid/unit_tests/test_migration.py @@ -0,0 +1,67 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import patch + +import pytest +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from source_sendgrid.config_migrations import MigrateToLowcodeConfig +from source_sendgrid.source import SourceSendgrid + +# Test data for parametrized test +test_data = [ + ({"apikey": "secret_value1"}, {"apikey": "secret_value1", "api_key": "secret_value1", "start_date": "2009-08-01T00:00:00Z"}), + # Test when only apikey is present + ({"apikey": "secret_value1"}, {"apikey": "secret_value1", "api_key": "secret_value1", "start_date": "2009-08-01T00:00:00Z"}), + # Test having a time + ( + {"apikey": "secret_value2", "start_time": "2019-05-20T13:43:57Z"}, + {"apikey": "secret_value2", "start_time": "2019-05-20T13:43:57Z", "api_key": "secret_value2", "start_date": "2019-05-20T13:43:57Z"}, + ), + # Really old format + ( + {"apikey": "secret_value2", "start_time": "1558359837"}, + {"apikey": "secret_value2", "start_time": "1558359837", "api_key": "secret_value2", "start_date": "2019-05-20T13:43:57Z"}, + ), + # Test when the time has milliseconds + ( + {"apikey": "secret_value2", "start_time": "2019-05-20T13:43:57.000Z"}, + { + "apikey": "secret_value2", + "start_time": "2019-05-20T13:43:57.000Z", + "api_key": "secret_value2", + "start_date": "2019-05-20T13:43:57Z", + }, + ), + # Test when neither api_secret nor project_id are present + ({"other_key": "value"}, {"other_key": "value"}), + # test when other stuff is around + ( + {"other_key": "value", "apikey": "secret_value3"}, + {"other_key": "value", "apikey": "secret_value3", "api_key": "secret_value3", "start_date": "2009-08-01T00:00:00Z"}, + ), + # Test when it's already right + ( + {"api_key": "secret_value2", "start_date": "2019-05-20T13:43:57Z"}, + {"api_key": "secret_value2", "start_date": "2019-05-20T13:43:57Z"}, + ), +] + + +@pytest.mark.parametrize("test_config, expected", test_data) +@patch.object(AirbyteEntrypoint, "extract_config") +@patch.object(SourceSendgrid, "write_config") +@patch.object(SourceSendgrid, "read_config") +def test_transform_config(source_read_config_mock, source_write_config_mock, ab_entrypoint_extract_config_mock, test_config, expected): + source = SourceSendgrid() + + source_read_config_mock.return_value = test_config + ab_entrypoint_extract_config_mock.return_value = "/path/to/config.json" + + def check_migrated_value(new_config, path): + assert path == "/path/to/config.json" + assert new_config == expected + + source_write_config_mock.side_effect = check_migrated_value + MigrateToLowcodeConfig.migrate(["--config", "/path/to/config.json"], source) diff --git a/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py index eb663f30825b0..b85e0f38fa441 100644 --- a/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py @@ -2,6 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +import json import os import unittest from unittest.mock import MagicMock, Mock, patch @@ -15,29 +16,21 @@ from numpy import nan from requests import codes from source_sendgrid.source import SourceSendgrid -from source_sendgrid.streams import ( - Blocks, - Bounces, - Campaigns, - Contacts, - GlobalSuppressions, - InvalidEmails, - Lists, - Segments, - SendgridStream, - SendgridStreamIncrementalMixin, - SendgridStreamOffsetPagination, - SpamReports, - SuppressionGroupMembers, - SuppressionGroups, - Templates, - UnsubscribeGroups, -) +from source_sendgrid.streams import Contacts, SendgridStream FAKE_NOW = pendulum.DateTime(2022, 1, 1, tzinfo=pendulum.timezone("utc")) FAKE_NOW_ISO_STRING = FAKE_NOW.to_iso8601_string() +def find_stream(stream_name): + streams = SourceSendgrid().streams(config={"api_key": "wrong.api.key123"}) + # find by name + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") + + @pytest.fixture(name="sendgrid_stream") def sendgrid_stream_fixture(mocker) -> SendgridStream: # Wipe the internal list of abstract methods to allow instantiating @@ -54,110 +47,88 @@ def mock_pendulum_now(monkeypatch): monkeypatch.setattr(pendulum, "now", pendulum_mock) -def test_parse_response_gracefully_handles_nulls(mocker, sendgrid_stream: SendgridStream): - response = requests.Response() - mocker.patch.object(response, "json", return_value=None) - mocker.patch.object(response, "request", return_value=MagicMock()) - assert [] == list(sendgrid_stream.parse_response(response)) - - def test_source_wrong_credentials(): source = SourceSendgrid() - status, error = source.check_connection(logger=AirbyteLogger(), config={"apikey": "wrong.api.key123"}) + status, error = source.check_connection(logger=AirbyteLogger(), config={"api_key": "wrong.api.key123"}) assert not status def test_streams(): - streams = SourceSendgrid().streams(config={"apikey": "wrong.api.key123", "start_time": FAKE_NOW_ISO_STRING}) + streams = SourceSendgrid().streams(config={"api_key": "wrong.api.key123", "start_date": FAKE_NOW_ISO_STRING}) assert len(streams) == 15 -@patch.multiple(SendgridStreamOffsetPagination, __abstractmethods__=set(), data_field="result") -def test_pagination(mocker): - stream = SendgridStreamOffsetPagination() - state = {} - response = requests.Response() - mocker.patch.object(response, "json", return_value={"result": range(100)}) - mocker.patch.object(response, "request", return_value=MagicMock()) - next_page_token = stream.next_page_token(response) - request_params = stream.request_params(stream_state=state, next_page_token=next_page_token) - assert request_params == {"limit": 50, "offset": 50} - - -@patch.multiple(SendgridStreamIncrementalMixin, __abstractmethods__=set()) -def test_stream_state(): - stream = SendgridStreamIncrementalMixin(start_time=FAKE_NOW_ISO_STRING) - state = {} - request_params = stream.request_params(stream_state=state) - assert request_params == {"end_time": pendulum.now().int_timestamp, "start_time": int(FAKE_NOW.timestamp())} - - @pytest.mark.parametrize( - "stream_class, url , expected", + "stream_name, url , expected", ( - [Templates, "https://api.sendgrid.com/v3/templates", []], - [Lists, "https://api.sendgrid.com/v3/marketing/lists", []], - [Campaigns, "https://api.sendgrid.com/v3/marketing/campaigns", []], - [Segments, "https://api.sendgrid.com/v3/marketing/segments", []], - [Blocks, "https://api.sendgrid.com/v3/suppression/blocks", ["name", "id", "contact_count", "_metadata"]], - [SuppressionGroupMembers, "https://api.sendgrid.com/v3/asm/suppressions", ["name", "id", "contact_count", "_metadata"]], - [SuppressionGroups, "https://api.sendgrid.com/v3/asm/groups", ["name", "id", "contact_count", "_metadata"]], - [GlobalSuppressions, "https://api.sendgrid.com/v3/suppression/unsubscribes", ["name", "id", "contact_count", "_metadata"]], + ["templates", "https://api.sendgrid.com/v3/templates", []], + ["lists", "https://api.sendgrid.com/v3/marketing/lists", []], + ["campaigns", "https://api.sendgrid.com/v3/marketing/campaigns", []], + ["segments", "https://api.sendgrid.com/v3/marketing/segments/2.0", []], + ["blocks", "https://api.sendgrid.com/v3/suppression/blocks", ["name", "id", "contact_count", "_metadata"]], + ["suppression_group_members", "https://api.sendgrid.com/v3/asm/suppressions", ["name", "id", "contact_count", "_metadata"]], + ["suppression_groups", "https://api.sendgrid.com/v3/asm/groups", ["name", "id", "contact_count", "_metadata"]], + ["global_suppressions", "https://api.sendgrid.com/v3/suppression/unsubscribes", ["name", "id", "contact_count", "_metadata"]], ), ) def test_read_records( - stream_class, + stream_name, url, expected, requests_mock, ): - try: - stream = stream_class(start_time=FAKE_NOW) - except TypeError: - stream = stream_class() + stream = find_stream(stream_name) requests_mock.get("https://api.sendgrid.com/v3/marketing", json={}) requests_mock.get(url, json={"name": "test", "id": "id", "contact_count": 20, "_metadata": {"self": "self"}}) records = list(stream.read_records(sync_mode=SyncMode)) - - assert records == expected + if len(records) == 0: + assert [] == expected + else: + keys = list(records[0].keys()) + assert keys == expected @pytest.mark.parametrize( - "stream_class, expected", + "stream_name, expected", ( - [Templates, "templates"], - [Lists, "marketing/lists"], - [Campaigns, "marketing/campaigns"], - [Contacts, "marketing/contacts/exports"], - [Segments, "marketing/segments"], - [Blocks, "suppression/blocks"], - [SuppressionGroupMembers, "asm/suppressions"], - [SuppressionGroups, "asm/groups"], - [GlobalSuppressions, "suppression/unsubscribes"], - [Bounces, "suppression/bounces"], - [InvalidEmails, "suppression/invalid_emails"], - [SpamReports, "suppression/spam_reports"], - [UnsubscribeGroups, "asm/groups"], + ["templates", "v3/templates"], + ["lists", "v3/marketing/lists"], + ["campaigns", "v3/marketing/campaigns"], + ["contacts", "v3/marketing/contacts/exports"], + ["segments", "v3/marketing/segments/2.0"], + ["blocks", "v3/suppression/blocks"], + ["suppression_group_members", "v3/asm/suppressions"], + ["suppression_groups", "v3/asm/groups"], + ["global_suppressions", "v3/suppression/unsubscribes"], + ["bounces", "v3/suppression/bounces"], + ["invalid_emails", "v3/suppression/invalid_emails"], + ["spam_reports", "v3/suppression/spam_reports"], ), ) -def test_path(stream_class, expected): - stream = stream_class(Mock()) - assert stream.path() == expected +def test_path(stream_name, expected): + stream = find_stream(stream_name) + if hasattr(stream, "path"): + path = stream.path() # Contacts for example + else: + path = stream.retriever.requester.get_path(stream_state=None, stream_slice=None, next_page_token=None) + + assert path == expected @pytest.mark.parametrize( - "stream_class, status, expected", + "stream_name, status, expected", ( - (Blocks, 400, False), - (SuppressionGroupMembers, 401, False), + ("blocks", 400, False), + ("blocks", 429, True), + ("suppression_group_members", 401, False), ), ) -def test_should_retry_on_permission_error(stream_class, status, expected): - stream = stream_class(Mock()) +def test_should_retry_on_permission_error(stream_name, status, expected): + stream = find_stream(stream_name) response_mock = MagicMock() response_mock.status_code = status - assert stream.should_retry(response_mock) == expected + assert stream.retriever.requester._should_retry(response_mock) == expected def test_compressed_contact_response(requests_mock): @@ -220,17 +191,3 @@ def test_read_chunks_pd(): list(stream.read_with_chunks(path="file_not_exist.csv", file_encoding="utf-8")) with pytest.raises(FileNotFoundError): list(stream.read_with_chunks(path="file_not_exist.csv", file_encoding="utf-8")) - - -@pytest.mark.parametrize( - "current_stream_state, latest_record, expected_state", - ( - ({}, {"created": "7270247822"}, {"created": "7270247822"}), - ({"created": "7270247899"}, {"created": "7270247822"}, {"created": "7270247899"}), - ({"created": "7270247822"}, {"created": "7270247899"}, {"created": "7270247899"}), - ), -) -def test_get_updated_state(current_stream_state, latest_record, expected_state): - stream = Blocks(Mock()) - assert stream.get_updated_state(current_stream_state, latest_record) == expected_state - diff --git a/airbyte-integrations/connectors/source-sentry/.coveragerc b/airbyte-integrations/connectors/source-sentry/.coveragerc index 7489972784997..58f2b7bbbf836 100644 --- a/airbyte-integrations/connectors/source-sentry/.coveragerc +++ b/airbyte-integrations/connectors/source-sentry/.coveragerc @@ -1,3 +1,3 @@ [run] -omit = +omit = source_sentry/run.py diff --git a/airbyte-integrations/connectors/source-sentry/README.md b/airbyte-integrations/connectors/source-sentry/README.md index 5646c9f4be76f..a5651e8ee8e63 100644 --- a/airbyte-integrations/connectors/source-sentry/README.md +++ b/airbyte-integrations/connectors/source-sentry/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-sentry spec poetry run source-sentry check --config secrets/config.json poetry run source-sentry discover --config secrets/config.json -poetry run source-sentry read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-sentry read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml index e694c493e7c59..aa5b998b91cbc 100644 --- a/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml @@ -4,7 +4,7 @@ acceptance_tests: - config_path: secrets/config.json empty_streams: - name: issues - bypass_reason: "Project sssues are not being returned by the Sentry API." + bypass_reason: "Project issues are not being returned by the Sentry API." - name: events bypass_reason: "No event records exist for the test project." timeout_seconds: 1200 @@ -22,10 +22,10 @@ acceptance_tests: full_refresh: tests: - config_path: secrets/config.json - configured_catalog_path: integration_tests/configured_catalog.json + configured_catalog_path: integration_tests/configured_catalog_full_refresh.json # test 403 exception is not breaking the sync - config_path: secrets/config_limited_scopes.json - configured_catalog_path: integration_tests/configured_catalog.json + configured_catalog_path: integration_tests/configured_catalog_full_refresh.json incremental: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-sentry/integration_tests/configured_catalog_full_refresh.json b/airbyte-integrations/connectors/source-sentry/integration_tests/configured_catalog_full_refresh.json new file mode 100644 index 0000000000000..e319b768ed979 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/integration_tests/configured_catalog_full_refresh.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + "stream": { + "name": "project_detail", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + } + }, + { + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "stream": { + "name": "projects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + } + }, + { + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "stream": { + "name": "releases", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-sentry/metadata.yaml b/airbyte-integrations/connectors/source-sentry/metadata.yaml index 4f69e04d253ac..3efe1b5e099d0 100644 --- a/airbyte-integrations/connectors/source-sentry/metadata.yaml +++ b/airbyte-integrations/connectors/source-sentry/metadata.yaml @@ -10,13 +10,17 @@ data: connectorSubtype: api connectorType: source definitionId: cdaf146a-9b75-49fd-9dd2-9d64a0bb4781 - dockerImageTag: 0.4.1 + dockerImageTag: 0.5.1 dockerRepository: airbyte/source-sentry documentationUrl: https://docs.airbyte.com/integrations/sources/sentry githubIssueLabel: source-sentry icon: sentry.svg license: MIT - maxSecondsBetweenMessages: 64800 + # Rate limit windows are returned in the response headers of the API requests. + # Senty docs (https://docs.sentry.io/api/ratelimits/) state a per-second rate limit, + # but the `x-rate-limit-reset` header has been observed to return a value of up to the next full minute from a request + # (e.g. request sent: 12:00:15, `x-rate-limit-reset` header value: 12:01:00). + maxSecondsBetweenMessages: 60 name: Sentry remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-sentry/poetry.lock b/airbyte-integrations/connectors/source-sentry/poetry.lock index bbca06bb4b3a4..9dfda269267a1 100644 --- a/airbyte-integrations/connectors/source-sentry/poetry.lock +++ b/airbyte-integrations/connectors/source-sentry/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.2" +version = "0.78.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.2.tar.gz", hash = "sha256:5fe6b8b6f97059c812b50cdb37f193a3075aea15cf6c43dda2dab2b2fd059dd3"}, - {file = "airbyte_cdk-0.58.2-py3-none-any.whl", hash = "sha256:56cccc4275bbae65e3f26d90f10b5834600e911942a3624d91b341a7c57c2ec9"}, + {file = "airbyte_cdk-0.78.3-py3-none-any.whl", hash = "sha256:699d61ace9f8ca4477e06af3ff1bc56856e955a444081a1701c41d94629dcd74"}, + {file = "airbyte_cdk-0.78.3.tar.gz", hash = "sha256:192c2594d0e93140a7ec635fea3d4644318faada6aa986805752adf4caf9b126"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +466,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -685,30 +684,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +825,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +892,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +917,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1028,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "467cb54cd0e2b4a40734c43b04a3b90e2996e5b07aebe20307f83edea3e06973" +content-hash = "25d79195c052c9654e64e6cd73809188b3aa16bd228841f214ff871a895c9c6c" diff --git a/airbyte-integrations/connectors/source-sentry/pyproject.toml b/airbyte-integrations/connectors/source-sentry/pyproject.toml index ba550dd78fae6..25aaa871207ec 100644 --- a/airbyte-integrations/connectors/source-sentry/pyproject.toml +++ b/airbyte-integrations/connectors/source-sentry/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.1" +version = "0.5.1" name = "source-sentry" description = "Source implementation for Sentry." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_sentry" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.2" +airbyte-cdk = "^0" [tool.poetry.scripts] source-sentry = "source_sentry.run:run" diff --git a/airbyte-integrations/connectors/source-sentry/source_sentry/manifest.yaml b/airbyte-integrations/connectors/source-sentry/source_sentry/manifest.yaml index 4cd5508a78ebc..bf50ecd2b6072 100644 --- a/airbyte-integrations/connectors/source-sentry/source_sentry/manifest.yaml +++ b/airbyte-integrations/connectors/source-sentry/source_sentry/manifest.yaml @@ -1,105 +1,175 @@ -version: "0.29.0" +version: 0.57.0 +type: DeclarativeSource + definitions: - page_size: 50 schema_loader: type: JsonFileSchemaLoader - file_path: "./source_sentry/schemas/{{ parameters.name }}.json" - selector: + file_path: "./source_sentry/schemas/{{ parameters['name'] }}.json" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['auth_token'] }}" + requester: + type: HttpRequester + url_base: "https://{{ config['hostname'] }}/api/0/" + http_method: GET + request_headers: {} + authenticator: + $ref: "#/definitions/authenticator" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + filter_record_selector: type: RecordSelector extractor: type: DpathExtractor field_path: [] - requester: - type: HttpRequester - url_base: "https://{{ config.hostname }}/api/0/" - http_method: "GET" - authenticator: - type: "BearerAuthenticator" - api_token: "{{ config.auth_token }}" paginator: type: DefaultPaginator - page_size: "#/definitions/page_size" - limit_option: - inject_into: "request_parameter" - field_name: "" page_token_option: type: RequestOption - inject_into: "request_parameter" - field_name: "cursor" + inject_into: request_parameter + field_name: cursor pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ headers.link.next.cursor }}" - stop_condition: "{{ headers.link.next.results != 'true' }}" + type: CursorPagination + cursor_value: '{{ headers["link"].get("next", {}).get("cursor", {}) }}' + stop_condition: '{{ headers["link"]["next"]["results"] == "false" }}' retriever: type: SimpleRetriever - -streams: - - type: DeclarativeStream - $parameters: - # https://docs.sentry.io/api/events/list-a-projects-events/ - name: "events" - primary_key: "id" + requester: + $ref: "#/definitions/requester" + record_selector: + $ref: "#/definitions/record_selector" + paginator: + $ref: "#/definitions/paginator" + partition_router: [] + retriever_with_filter: + type: SimpleRetriever + requester: + $ref: "#/definitions/requester" + record_selector: + $ref: "#/definitions/record_selector" + record_filter: + condition: "{{ record[parameters['cursor_field']] > stream_state.get(parameters['cursor_field'], '') }}" + paginator: + $ref: "#/definitions/paginator" + partition_router: [] + incremental_sync: + type: DatetimeBasedCursor + cursor_field: "{{ parameters['cursor_field'] }}" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%dT%H:%M:%S.%f%z" + - "%Y-%m-%dT%H:%M:%S%z" + datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" + start_datetime: + type: MinMaxDatetime + datetime: "1900-01-01T00:00:00.0Z" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + is_data_feed: true + base_stream_full_refresh: schema_loader: $ref: "#/definitions/schema_loader" retriever: $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/selector" + base_stream_incremental: + schema_loader: + $ref: "#/definitions/schema_loader" + retriever: + $ref: "#/definitions/retriever_with_filter" + incremental_sync: + $ref: "#/definitions/incremental_sync" + + # Stream Events https://docs.sentry.io/api/events/list-a-projects-error-events/ + events: + type: DeclarativeStream + $parameters: + name: "events" + primary_key: "id" + path: "projects/{{ config['organization'] }}/{{ config['project'] }}/events/" + cursor_field: "dateCreated" + retriever: + type: SimpleRetriever requester: $ref: "#/definitions/requester" - path: "projects/{{config.organization}}/{{config.project}}/events/" request_parameters: full: "true" + record_selector: + $ref: "#/definitions/record_selector" + record_filter: + condition: "{{ record[parameters['cursor_field']] > stream_state.get(parameters['cursor_field'], '') }}" paginator: $ref: "#/definitions/paginator" - - type: DeclarativeStream + incremental_sync: + $ref: "#/definitions/incremental_sync" + + # Stream Issues https://docs.sentry.io/api/events/list-a-projects-issues/ + issues: + type: DeclarativeStream $parameters: name: "issues" - primary_key: "id" - schema_loader: - $ref: "#/definitions/schema_loader" + primary_key: "id" + path: "projects/{{ config['organization'] }}/{{ config['project'] }}/issues/" + cursor_field: "lastSeen" retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/selector" + type: SimpleRetriever requester: $ref: "#/definitions/requester" - path: "projects/{{config.organization}}/{{config.project}}/issues/" request_parameters: - statsPeriod: "" - query: "" + query: "lastSeen:>{{ stream_state.get(parameters['cursor_field']) or '1900-01-01T00:00:00.0Z' if stream_state else '1900-01-01T00:00:00.0Z' }}" + record_selector: + $ref: "#/definitions/record_selector" paginator: $ref: "#/definitions/paginator" - - type: DeclarativeStream + incremental_sync: + $ref: "#/definitions/incremental_sync" + + # Stream Projects https://docs.sentry.io/api/projects/list-your-projects/ + projects: + type: DeclarativeStream + $ref: "#/definitions/base_stream_incremental" $parameters: name: "projects" - primary_key: "id" - schema_loader: - $ref: "#/definitions/schema_loader" - retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/selector" - requester: - $ref: "#/definitions/requester" - path: "projects/" - paginator: - $ref: "#/definitions/paginator" - - type: DeclarativeStream + primary_key: "id" + path: "projects/" + cursor_field: "dateCreated" + + # Stream Project Detail https://docs.sentry.io/api/projects/retrieve-a-project/ + project_detail: + type: DeclarativeStream + $ref: "#/definitions/base_stream_full_refresh" $parameters: name: "project_detail" - primary_key: "id" - schema_loader: - $ref: "#/definitions/schema_loader" - retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/selector" - requester: - $ref: "#/definitions/requester" - path: "projects/{{config.organization}}/{{config.project}}/" - paginator: - type: NoPagination + primary_key: "id" + path: "projects/{{ config['organization'] }}/{{ config['project'] }}/" + + # Stream Releases https://docs.sentry.io/api/projects/retrieve-a-project/ + releases: + type: DeclarativeStream + $ref: "#/definitions/base_stream_incremental" + $parameters: + name: "releases" + primary_key: "id" + path: "organizations/{{ config['organization'] }}/releases/" + cursor_field: "dateCreated" + +streams: + - $ref: "#/definitions/events" + - $ref: "#/definitions/issues" + - $ref: "#/definitions/projects" + - $ref: "#/definitions/project_detail" + - $ref: "#/definitions/releases" + check: type: CheckStream - stream_names: ["project_detail"] + stream_names: + - project_detail + +metadata: + autoImportSchema: + events: true + issues: true + projects: true + project_detail: true + releases: true diff --git a/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/project_detail.json b/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/project_detail.json index 16e132f4bfff1..2e413aa8dab21 100644 --- a/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/project_detail.json +++ b/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/project_detail.json @@ -174,6 +174,21 @@ }, "sentry:reprocessing_active": { "type": ["boolean", "null"] + }, + "filters:chunk-load-error": { + "type": ["boolean", "null"] + }, + "filters:react-hydration-errors": { + "type": ["boolean", "null"] + }, + "quotas:spike-protection-disabled": { + "type": ["boolean", "null"] + }, + "sentry:feedback_user_report_notification": { + "type": ["boolean", "null"] + }, + "sentry:replay_rage_click_issues": { + "type": ["integer", "null"] } } }, @@ -223,6 +238,29 @@ "type": ["string", "null"] } } + }, + "hasAuthProvider": { + "type": ["null", "boolean"] + }, + "features": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "links": { + "type": ["null", "object"], + "properties": { + "organizationUrl": { + "type": ["null", "string"] + }, + "regionUrl": { + "type": ["null", "string"] + } + } + }, + "requireEmailVerification": { + "type": ["null", "boolean"] } } }, diff --git a/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/releases.json b/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/releases.json index 5ecc0f17f0ef4..d657acba2ebbf 100644 --- a/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/releases.json +++ b/airbyte-integrations/connectors/source-sentry/source_sentry/schemas/releases.json @@ -67,6 +67,24 @@ }, "slug": { "type": ["null", "string"] + }, + "hasHealthData": { + "type": ["null", "boolean"] + }, + "id": { + "type": ["null", "integer"] + }, + "newGroups": { + "type": ["null", "integer"] + }, + "platform": { + "type": ["null", "string"] + }, + "platforms": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } } } } diff --git a/airbyte-integrations/connectors/source-sentry/source_sentry/source.py b/airbyte-integrations/connectors/source-sentry/source_sentry/source.py index 464f6188496a1..5f86fd62dd088 100644 --- a/airbyte-integrations/connectors/source-sentry/source_sentry/source.py +++ b/airbyte-integrations/connectors/source-sentry/source_sentry/source.py @@ -2,46 +2,9 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -from typing import Any, List, Mapping, Tuple -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator - -from .streams import Events, Issues, ProjectDetail, Projects, Releases - - -# Source -class SourceSentry(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, Any]: - try: - stream = ProjectDetail( - authenticator=TokenAuthenticator(token=config["auth_token"]), - hostname=config.get("hostname"), - organization=config.get("organization"), - project=config.get("project"), - ) - next(stream.read_records(sync_mode=SyncMode.full_refresh)) - return True, None - except Exception as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - stream_args = { - "authenticator": TokenAuthenticator(token=config["auth_token"]), - "hostname": config.get("hostname"), - } - project_stream_args = { - **stream_args, - "organization": config["organization"], - "project": config["project"], - } - return [ - Events(**project_stream_args), - Issues(**project_stream_args), - ProjectDetail(**project_stream_args), - Projects(**stream_args), - Releases(**project_stream_args), - ] +class SourceSentry(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-sentry/source_sentry/streams.py b/airbyte-integrations/connectors/source-sentry/source_sentry/streams.py deleted file mode 100644 index 1482228b362ab..0000000000000 --- a/airbyte-integrations/connectors/source-sentry/source_sentry/streams.py +++ /dev/null @@ -1,265 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from abc import ABC -from typing import Any, Dict, Iterable, Mapping, MutableMapping, Optional - -import pendulum -import requests -from airbyte_cdk.sources.streams import IncrementalMixin -from airbyte_cdk.sources.streams.http import HttpStream - - -class SentryStream(HttpStream, ABC): - API_VERSION = "0" - URL_TEMPLATE = "https://{hostname}/api/{api_version}/" - primary_key = "id" - - def __init__(self, hostname: str, **kwargs): - super().__init__(**kwargs) - self._url_base = self.URL_TEMPLATE.format(hostname=hostname, api_version=self.API_VERSION) - # hardcode the start_date default value, since it's not present in spec. - self.start_date = "1900-01-01T00:00:00.0Z" - - @property - def url_base(self) -> str: - return self._url_base - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - return {} - - -class SentryStreamPagination(SentryStream): - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """ - Expect the link header field to always contain the values ​​for `rel`, `results`, and `cursor`. - If there is actually the next page, rel="next"; results="true"; cursor="". - """ - if response.links["next"]["results"] == "true": - return {"cursor": response.links["next"]["cursor"]} - else: - return None - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state, stream_slice, next_page_token) - if next_page_token: - params.update(next_page_token) - - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield from response.json() - - -class SentryIncremental(SentryStreamPagination, IncrementalMixin): - def __init__(self, *args, **kwargs): - super(SentryIncremental, self).__init__(*args, **kwargs) - self._cursor_value = None - - def validate_state_value(self, state_value: str = None) -> str: - none_or_empty = state_value == "None" if state_value else True - return self.start_date if none_or_empty else state_value - - def get_state_value(self, stream_state: Mapping[str, Any] = None) -> str: - state_value = self.validate_state_value(stream_state.get(self.cursor_field, self.start_date) if stream_state else self.start_date) - return pendulum.parse(state_value) - - def filter_by_state(self, stream_state: Mapping[str, Any] = None, record: Mapping[str, Any] = None) -> Iterable: - """ - Endpoint does not provide query filtering params, but they provide us - cursor field in most cases, so we used that as incremental filtering - during the parsing. - """ - if pendulum.parse(record[self.cursor_field]) > self.get_state_value(stream_state): - # Persist state. - # There is a bug in state setter: because of self._cursor_value is not defined it raises Attribute error - # which is ignored in airbyte_cdk/sources/abstract_source.py:320 and we have an empty state in return - # See: https://github.com/airbytehq/oncall/issues/1317 - self.state = record - yield record - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[MutableMapping]: - json_response = response.json() or [] - - for record in json_response: - yield from self.filter_by_state(stream_state=stream_state, record=record) - - @property - def state(self) -> Mapping[str, Any]: - return {self.cursor_field: self._cursor_value} - - @state.setter - def state(self, value: Mapping[str, Any]): - """ - Define state as a max between given value and current state - """ - if not self._cursor_value: - self._cursor_value = value.get(self.cursor_field) - else: - current_value = value.get(self.cursor_field) or self.start_date - current_state = str(self.get_state_value(self.state)) - self._cursor_value = max(current_value, current_state) - - -class Events(SentryIncremental): - """ - Docs: https://docs.sentry.io/api/events/list-a-projects-error-events/ - """ - - primary_key = "id" - cursor_field = "dateCreated" - - def __init__(self, organization: str, project: str, **kwargs): - super().__init__(**kwargs) - self._organization = organization - self._project = project - - def path( - self, - stream_state: Optional[Mapping[str, Any]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> str: - return f"projects/{self._organization}/{self._project}/events/" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state, stream_slice, next_page_token) - params.update({"full": "true"}) - - return params - - -class Issues(SentryIncremental): - """ - Docs: https://docs.sentry.io/api/events/list-a-projects-issues/ - """ - - primary_key = "id" - cursor_field = "lastSeen" - - def __init__(self, organization: str, project: str, **kwargs): - super().__init__(**kwargs) - self._organization = organization - self._project = project - - def path( - self, - stream_state: Optional[Mapping[str, Any]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> str: - return f"projects/{self._organization}/{self._project}/issues/" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state, stream_slice, next_page_token) - filter_date = self._get_filter_date(stream_state) - params.update(self._build_query_params(filter_date)) - return params - - def _get_filter_date(self, stream_state: Optional[Mapping[str, Any]]) -> str: - """Retrieve the filter date from the stream state or use the start_date.""" - return stream_state.get(self.cursor_field) or self.start_date if stream_state else self.start_date - - def _build_query_params(self, filter_date: str) -> Dict[str, str]: - """Generate query parameters for the request.""" - filter_date_iso = pendulum.parse(filter_date).to_iso8601_string() - return {"statsPeriod": "", "query": f"lastSeen:>{filter_date_iso}"} - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[MutableMapping]: - json_response = response.json() or [] - - for record in json_response: - cursor_value = self._get_cursor_value(record, stream_state) - self.state = {self.cursor_field: cursor_value} - yield record - - def _get_cursor_value(self, record: Dict[str, Any], stream_state: Mapping[str, Any]) -> pendulum.datetime: - """Compute the maximum cursor value based on the record and stream state.""" - record_time = record[self.cursor_field] - state_time = str(self.get_state_value(stream_state)) - return max(record_time, state_time) - - -class Projects(SentryIncremental): - """ - Docs: https://docs.sentry.io/api/projects/list-your-projects/ - """ - - primary_key = "id" - cursor_field = "dateCreated" - - def path( - self, - stream_state: Optional[Mapping[str, Any]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> str: - return "projects/" - - -class ProjectDetail(SentryStream): - """ - Docs: https://docs.sentry.io/api/projects/retrieve-a-project/ - """ - - def __init__(self, organization: str, project: str, **kwargs): - super().__init__(**kwargs) - self._organization = organization - self._project = project - - def path( - self, - stream_state: Optional[Mapping[str, Any]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> str: - return f"projects/{self._organization}/{self._project}/" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield response.json() - - -class Releases(SentryIncremental): - """ - Docs: https://docs.sentry.io/api/releases/list-an-organizations-releases/ - """ - - primary_key = "id" - cursor_field = "dateCreated" - - def __init__(self, organization: str, project: str, **kwargs): - super().__init__(**kwargs) - self._organization = organization - - def path( - self, - stream_state: Optional[Mapping[str, Any]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> str: - return f"organizations/{self._organization}/releases/" diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/integration/config_builder.py b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/config_builder.py new file mode 100644 index 0000000000000..0c5af692c5af2 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/config_builder.py @@ -0,0 +1,17 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + + +from typing import Any, Dict + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: Dict[str, Any] = { + "auth_token": "test token", + "organization": "test organization", + "project": "test project", + "hostname": "sentry.io" + } + + def build(self) -> Dict[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_events_stream.py b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_events_stream.py new file mode 100644 index 0000000000000..7e13aabbab22c --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_events_stream.py @@ -0,0 +1,53 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import SyncMode +from config_builder import ConfigBuilder +from source_sentry.source import SourceSentry + + +class TestEvents(TestCase): + fr_read_file = "events_full_refresh" + inc_read_file = "events_incremental" + + def catalog(self, sync_mode: SyncMode = SyncMode.full_refresh): + return CatalogBuilder().with_stream(name="events", sync_mode=sync_mode).build() + + def config(self): + return ConfigBuilder().build() + + def state(self): + return StateBuilder().with_stream_state(stream_name="events", state={"dateCreated": "2023-01-01T00:00:00.0Z"}).build() + + @HttpMocker() + def test_read(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url="https://sentry.io/api/0/projects/test%20organization/test%20project/events/", + query_params={"full": "true"} + ), + HttpResponse(body=json.dumps(find_template(self.fr_read_file, __file__)), status_code=200) + + ) + output = read(SourceSentry(), self.config(), self.catalog()) + assert len(output.records) == 1 + + @HttpMocker() + def test_read_incremental(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url="https://sentry.io/api/0/projects/test%20organization/test%20project/events/", + query_params={"full": "true"} + ), + HttpResponse(body=json.dumps(find_template(self.inc_read_file, __file__)), status_code=200) + + ) + output = read(SourceSentry(), self.config(), self.catalog(SyncMode.incremental), self.state()) + assert len(output.records) == 2 diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_issues_stream.py b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_issues_stream.py new file mode 100644 index 0000000000000..e9665a7854bba --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/integration/test_issues_stream.py @@ -0,0 +1,54 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import SyncMode +from config_builder import ConfigBuilder +from source_sentry.source import SourceSentry + + +class TestEvents(TestCase): + fr_read_file = "issues_full_refresh" + inc_read_file = "issues_incremental" + + def catalog(self, sync_mode: SyncMode = SyncMode.full_refresh): + return CatalogBuilder().with_stream(name="issues", sync_mode=sync_mode).build() + + def config(self): + return ConfigBuilder().build() + + def state(self): + return StateBuilder().with_stream_state(stream_name="issues", state={"lastSeen": "2023-01-01T00:00:00.0Z"}).build() + + @HttpMocker() + def test_read(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url="https://sentry.io/api/0/projects/test%20organization/test%20project/issues/", + query_params={"query": "lastSeen:>1900-01-01T00:00:00.0Z"} + ), + HttpResponse(body=json.dumps(find_template(self.fr_read_file, __file__)), status_code=200) + + ) + # https://sentry.io/api/1/projects/airbyte-09/airbyte-09/issues/?query=lastSeen%3A%3E2022-01-01T00%3A00%3A00.0Z + output = read(SourceSentry(), self.config(), self.catalog()) + assert len(output.records) == 1 + + @HttpMocker() + def test_read_incremental(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url="https://sentry.io/api/0/projects/test%20organization/test%20project/issues/", + query_params={"query": "lastSeen:>2023-01-01T00:00:00.0Z"} + ), + HttpResponse(body=json.dumps(find_template(self.inc_read_file, __file__)), status_code=200) + + ) + output = read(SourceSentry(), self.config(), self.catalog(SyncMode.incremental), self.state()) + assert len(output.records) == 2 diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_full_refresh.json b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_full_refresh.json new file mode 100644 index 0000000000000..fe17f7bb7bb1e --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_full_refresh.json @@ -0,0 +1,47 @@ +[ + { + "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1", + "tags": [ + { + "key": "browser", + "value": "Chrome 60.0" + }, + { + "key": "device", + "value": "Other" + }, + { + "key": "environment", + "value": "production" + }, + { + "value": "fatal", + "key": "level" + }, + { + "key": "os", + "value": "Mac OS X 10.12.6" + }, + { + "value": "CPython 2.7.16", + "key": "runtime" + }, + { + "key": "release", + "value": "17642328ead24b51867165985996d04b29310337" + }, + { + "key": "server_name", + "value": "web1.example.com" + } + ], + "dateCreated": "2022-09-02T15:01:28.946777Z", + "user": null, + "message": "", + "title": "This is an example Python exception", + "id": "dfb1a2d057194e76a4186cc8a5271553", + "platform": "python", + "event.type": "error", + "groupID": "1889724436" + } +] diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_incremental.json b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_incremental.json new file mode 100644 index 0000000000000..deaa38ca831f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/events_incremental.json @@ -0,0 +1,50 @@ +[ + { + "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1", + "tags": [ + { "key": "browser", "value": "Chrome 60.0" }, + { + "key": "device", + "value": "Other" + }, + { "key": "environment", "value": "production" }, + { "value": "fatal", "key": "level" }, + { "key": "os", "value": "Mac OS X 10.12.6" }, + { "value": "CPython 2.7.16", "key": "runtime" }, + { "key": "release", "value": "17642328ead24b51867165985996d04b29310337" }, + { "key": "server_name", "value": "web1.example.com" } + ], + "dateCreated": "2023-02-01T00:00:00.0Z", + "user": null, + "message": "", + "title": "This is an example Python exception", + "id": "dfb1a2d057194e76a4186cc8a5271553", + "platform": "python", + "event.type": "error", + "groupID": "1889724436" + }, + { + "eventID": "9fac2ceed9344f2bbfdd1fdacb0ed9b1", + "tags": [ + { "key": "browser", "value": "Chrome 60.0" }, + { + "key": "device", + "value": "Other" + }, + { "key": "environment", "value": "production" }, + { "value": "fatal", "key": "level" }, + { "key": "os", "value": "Mac OS X 10.12.6" }, + { "value": "CPython 2.7.16", "key": "runtime" }, + { "key": "release", "value": "17642328ead24b51867165985996d04b29310337" }, + { "key": "server_name", "value": "web1.example.com" } + ], + "dateCreated": "2024-03-31T15:03:36+00:00", + "user": null, + "message": "", + "title": "This is an example Python exception", + "id": "dfb1a2d057194e76a4186cc8a5271553", + "platform": "python", + "event.type": "error", + "groupID": "1889724436" + } +] diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_full_refresh.json b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_full_refresh.json new file mode 100644 index 0000000000000..248792c45c5d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_full_refresh.json @@ -0,0 +1,42 @@ +[ + { + "annotations": [], + "assignedTo": null, + "count": "1", + "culprit": "raven.scripts.runner in main", + "firstSeen": "2018-11-06T21:19:55Z", + "hasSeen": false, + "id": "1", + "isBookmarked": false, + "isPublic": false, + "isSubscribed": true, + "lastSeen": "2018-11-06T21:19:55Z", + "level": "error", + "logger": null, + "metadata": { + "title": "This is an example Python exception" + }, + "numComments": 0, + "permalink": "https://sentry.io/the-interstellar-jurisdiction/pump-station/issues/1/", + "project": { + "id": "2", + "name": "Pump Station", + "slug": "pump-station" + }, + "shareId": null, + "shortId": "PUMP-STATION-1", + "stats": { + "24h": [ + [1541455200, 473], + [1541458800, 914], + [1541462400, 991] + ] + }, + "status": "unresolved", + "statusDetails": {}, + "subscriptionDetails": null, + "title": "This is an example Python exception", + "type": "default", + "userCount": 0 + } +] diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_incremental.json b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_incremental.json new file mode 100644 index 0000000000000..8a61b03d35723 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/resource/http/response/issues_incremental.json @@ -0,0 +1,82 @@ +[ + { + "annotations": [], + "assignedTo": null, + "count": "1", + "culprit": "raven.scripts.runner in main", + "firstSeen": "2018-11-06T21:19:55Z", + "hasSeen": false, + "id": "1", + "isBookmarked": false, + "isPublic": false, + "isSubscribed": true, + "lastSeen": "2023-02-02T00:00:00.0Z", + "level": "error", + "logger": null, + "metadata": { + "title": "This is an example Python exception" + }, + "numComments": 0, + "permalink": "https://sentry.io/the-interstellar-jurisdiction/pump-station/issues/1/", + "project": { + "id": "2", + "name": "Pump Station", + "slug": "pump-station" + }, + "shareId": null, + "shortId": "PUMP-STATION-1", + "stats": { + "24h": [ + [1541455200, 473], + [1541458800, 914], + [1541462400, 991] + ] + }, + "status": "unresolved", + "statusDetails": {}, + "subscriptionDetails": null, + "title": "This is an example Python exception", + "type": "default", + "userCount": 0 + }, + { + "annotations": [], + "assignedTo": null, + "count": "1", + "culprit": "raven.scripts.runner in main", + "firstSeen": "2018-11-06T21:19:55Z", + "hasSeen": false, + "id": "1", + "isBookmarked": false, + "isPublic": false, + "isSubscribed": true, + "lastSeen": "2023-01-02T00:00:00.0Z", + "level": "error", + "logger": null, + "metadata": { + "title": "This is an example Python exception" + }, + "numComments": 0, + "permalink": "https://sentry.io/the-interstellar-jurisdiction/pump-station/issues/1/", + "project": { + "id": "2", + "name": "Pump Station", + "slug": "pump-station" + }, + "shareId": null, + "shortId": "PUMP-STATION-1", + "stats": { + "24h": [ + [1541455200, 473], + [1541458800, 914], + [1541462400, 991] + ] + }, + "status": "unresolved", + "statusDetails": {}, + "subscriptionDetails": null, + "title": "This is an example Python exception", + "type": "default", + "userCount": 0 + } +] diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/test_source.py b/airbyte-integrations/connectors/source-sentry/unit_tests/test_source.py index 385f2625e45e2..c28cd169d0fa8 100644 --- a/airbyte-integrations/connectors/source-sentry/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/test_source.py @@ -6,7 +6,6 @@ from airbyte_cdk.logger import AirbyteLogger from source_sentry.source import SourceSentry -from source_sentry.streams import ProjectDetail def test_source_wrong_credentials(requests_mock): @@ -15,11 +14,12 @@ def test_source_wrong_credentials(requests_mock): assert not status -def test_check_connection(mocker): +def test_check_connection(requests_mock): source = SourceSentry() - logger_mock, config_mock = MagicMock(), MagicMock() - mocker.patch.object(ProjectDetail, "read_records", return_value=iter([{"id": "1", "name": "test"}])) - assert source.check_connection(logger_mock, config_mock) == (True, None) + logger_mock = MagicMock() + requests_mock.get(url="https://sentry.io/api/0/projects/test-org/test-project/", json={"id": "id", "name": "test-project"}) + config = {"auth_token": "token", "organization": "test-org", "project": "test-project", "hostname": "sentry.io"} + assert source.check_connection(logger_mock, config) == (True, None) def test_streams(mocker): diff --git a/airbyte-integrations/connectors/source-sentry/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-sentry/unit_tests/test_streams.py index 87376d158902a..4f41688c2990f 100644 --- a/airbyte-integrations/connectors/source-sentry/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-sentry/unit_tests/test_streams.py @@ -2,186 +2,139 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock -import pendulum as pdm import pytest -import requests -from source_sentry.streams import Events, Issues, ProjectDetail, Projects, SentryIncremental, SentryStreamPagination +from airbyte_protocol.models import SyncMode +from source_sentry import SourceSentry INIT_ARGS = {"hostname": "sentry.io", "organization": "test-org", "project": "test-project"} -@pytest.fixture -def patch_base_class(mocker): - # Mock abstract methods to enable instantiating abstract class - mocker.patch.object(SentryStreamPagination, "path", "test_endpoint") - mocker.patch.object(SentryStreamPagination, "__abstractmethods__", set()) +def get_stream_by_name(stream_name): + streams = SourceSentry().streams(config=INIT_ARGS) + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") -def test_next_page_token(patch_base_class): - stream = SentryStreamPagination(hostname="sentry.io") - resp = MagicMock() - cursor = "next_page_num" - resp.links = {"next": {"results": "true", "cursor": cursor}} - inputs = {"response": resp} - expected_token = {"cursor": cursor} - assert stream.next_page_token(**inputs) == expected_token +def test_next_page_token(): + stream = get_stream_by_name("events") + response_mock = MagicMock() + response_mock.headers = {} + response_mock.links = {"next": {"cursor": "next-page"}} + assert stream.retriever.paginator.pagination_strategy.next_page_token(response=response_mock, last_records=[]) == "next-page" -def test_next_page_token_is_none(patch_base_class): - stream = SentryStreamPagination(hostname="sentry.io") - resp = MagicMock() - resp.links = {"next": {"results": "false", "cursor": "no_next"}} - inputs = {"response": resp} - expected_token = None - assert stream.next_page_token(**inputs) == expected_token - - -def next_page_token_inputs(): - links_headers = [ - {}, - {"next": {}}, - ] - responses = [MagicMock() for _ in links_headers] - for mock, header in zip(responses, links_headers): - mock.links = header - - return responses - - -@pytest.mark.parametrize("response", next_page_token_inputs()) -def test_next_page_token_raises(patch_base_class, response): - stream = SentryStreamPagination(hostname="sentry.io") - inputs = {"response": response} - with pytest.raises(KeyError): - stream.next_page_token(**inputs) +def test_next_page_token_is_none(): + stream = get_stream_by_name("events") + response_mock = MagicMock() + response_mock.headers = {} + # stop condition: "results": "false" + response_mock.links = {"next": {"cursor": "", "results": "false"}} + assert stream.retriever.paginator.pagination_strategy.next_page_token(response=response_mock, last_records=[]) is None def test_events_path(): - stream = Events(**INIT_ARGS) + stream = get_stream_by_name("events") expected = "projects/test-org/test-project/events/" - assert stream.path() == expected + assert stream.retriever.requester.get_path(stream_state=None, stream_slice=None, next_page_token=None) == expected def test_issues_path(): - stream = Issues(**INIT_ARGS) + stream = get_stream_by_name("issues") expected = "projects/test-org/test-project/issues/" - assert stream.path() == expected + assert stream.retriever.requester.get_path(stream_state=None, stream_slice=None, next_page_token=None) == expected def test_projects_path(): - stream = Projects(hostname="sentry.io") + stream = get_stream_by_name("projects") expected = "projects/" - assert stream.path() == expected + assert stream.retriever.requester.get_path(stream_state=None, stream_slice=None, next_page_token=None) == expected def test_project_detail_path(): - stream = ProjectDetail(**INIT_ARGS) + stream = get_stream_by_name("project_detail") expected = "projects/test-org/test-project/" - assert stream.path() == expected - - -def test_sentry_stream_pagination_request_params(patch_base_class): - stream = SentryStreamPagination(hostname="sentry.io") - expected = {"cursor": "next-page"} - assert stream.request_params(stream_state=None, next_page_token={"cursor": "next-page"}) == expected + assert stream.retriever.requester.get_path(stream_state=None, stream_slice=None, next_page_token=None) == expected def test_events_request_params(): - stream = Events(**INIT_ARGS) - expected = {"cursor": "next-page", "full": "true"} - assert stream.request_params(stream_state=None, next_page_token={"cursor": "next-page"}) == expected + stream = get_stream_by_name("events") + assert stream.retriever.requester.get_request_params(stream_state=None, stream_slice=None, next_page_token=None) == {"full": "true"} def test_issues_request_params(): - stream = Issues(**INIT_ARGS) - expected = {"cursor": "next-page", "statsPeriod": "", "query": "lastSeen:>1900-01-01T00:00:00Z"} - assert stream.request_params(stream_state=None, next_page_token={"cursor": "next-page"}) == expected + stream = get_stream_by_name("issues") + expected = {"query": "lastSeen:>1900-01-01T00:00:00.0Z"} + assert stream.retriever.requester.get_request_params(stream_state=None, stream_slice=None, next_page_token=None) == expected def test_projects_request_params(): - stream = Projects(hostname="sentry.io") - expected = {"cursor": "next-page"} - assert stream.request_params(stream_state=None, next_page_token={"cursor": "next-page"}) == expected + stream = get_stream_by_name("projects") + expected = "next-page" + response_mock = MagicMock() + response_mock.headers = {} + response_mock.links = {"next": {"cursor": expected}} + assert stream.retriever.paginator.pagination_strategy.next_page_token(response=response_mock, last_records=[]) == expected def test_project_detail_request_params(): - stream = ProjectDetail(**INIT_ARGS) + stream = get_stream_by_name("project_detail") expected = {} - assert stream.request_params(stream_state=None, next_page_token=None) == expected - -def test_issues_parse_response(mocker): - with patch('source_sentry.streams.Issues._get_cursor_value') as mock_get_cursor_value: - stream = Issues(**INIT_ARGS) - mock_get_cursor_value.return_value = "time" - state = {} - response = requests.Response() - mocker.patch.object(response, "json", return_value=[{"id": "1"}]) - result = list(stream.parse_response(response, state)) - assert result[0] == {"id": "1"} - -def test_project_detail_parse_response(mocker): - stream = ProjectDetail(organization="test_org", project="test_proj", hostname="sentry.io") - response = requests.Response() - response.json = Mock(return_value={"id": "1"}) - result = list(stream.parse_response(response)) - assert result[0] == {"id": "1"} - -class MockSentryIncremental(SentryIncremental): - def path(): - return '/test/path' - -def test_sentry_incremental_parse_response(mocker): - with patch('source_sentry.streams.SentryIncremental.filter_by_state') as mock_filter_by_state: - stream = MockSentryIncremental(hostname="sentry.io") - mock_filter_by_state.return_value = True - state = None - response = requests.Response() - mocker.patch.object(response, "json", return_value=[{"id": "1"}]) - mock_filter_by_state.return_value = iter(response.json()) - result = list(stream.parse_response(response, state)) - print(result) - assert result[0] == {"id": "1"} + assert stream.retriever.requester.get_request_params(stream_state=None, stream_slice=None, next_page_token=None) == expected + + +def test_project_detail_parse_response(requests_mock): + expected = {"id": "1", "name": "test project"} + stream = get_stream_by_name("project_detail") + requests_mock.get( + "https://sentry.io/api/0/projects/test-org/test-project/", + json=expected + ) + result = list(stream.read_records(sync_mode=SyncMode.full_refresh))[0] + assert expected == result.data @pytest.mark.parametrize( "state, expected", [ - ({}, "1900-01-01T00:00:00.0Z"), - ({"dateCreated": ""}, "1900-01-01T00:00:00.0Z"), - ({"dateCreated": "None"}, "1900-01-01T00:00:00.0Z"), + ({}, None), + ({"dateCreated": ""}, None), ({"dateCreated": "2023-01-01T00:00:00.0Z"}, "2023-01-01T00:00:00.0Z"), ], ids=[ "No State", "State is Empty String", - "State is 'None'", "State is present", ], ) -def test_validate_state_value(state, expected): - stream = Events(**INIT_ARGS) - state_value = state.get(stream.cursor_field) - assert stream.validate_state_value(state_value) == expected +def test_events_validate_state_value(state, expected): + # low code cdk sets state to none if it does not exist, py version used 1900-01-01 as state in this case. + # Instead, record condition will pass all records that were fetched and state will be updated after. + stream = get_stream_by_name("events") + stream.retriever.state = state + assert stream.state.get(stream.cursor_field) == expected @pytest.mark.parametrize( "state, expected", [ - ({}, "1900-01-01T00:00:00.0Z"), - ({"dateCreated": ""}, "1900-01-01T00:00:00.0Z"), - ({"dateCreated": "None"}, "1900-01-01T00:00:00.0Z"), - ({"dateCreated": "2023-01-01T00:00:00.0Z"}, "2023-01-01T00:00:00.0Z"), + ({}, None), + ({"lastSeen": ""}, None), + ({"lastSeen": "2023-01-01T00:00:00.0Z"}, "2023-01-01T00:00:00.0Z"), ], ids=[ "No State", "State is Empty String", - "State is 'None'", "State is present", ], ) -def test_get_state_value(state, expected): - stream = Events(**INIT_ARGS) - # we expect the datetime object out of get_state_value method. - assert stream.get_state_value(state) == pdm.parse(expected) +def test_issues_validate_state_value(state, expected): + # low code cdk sets state to none if it does not exist, py version used 1900-01-01 as state in this case. + # Instead, record condition will pass all records that were fetched and state will be updated after. + stream = get_stream_by_name("issues") + stream.retriever.state = state + assert stream.state.get(stream.cursor_field) == expected + diff --git a/airbyte-integrations/connectors/source-sftp-bulk/.coveragerc b/airbyte-integrations/connectors/source-sftp-bulk/.coveragerc new file mode 100644 index 0000000000000..ca742005beca5 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_sftp_bulk/run.py diff --git a/airbyte-integrations/connectors/source-sftp-bulk/Dockerfile b/airbyte-integrations/connectors/source-sftp-bulk/Dockerfile deleted file mode 100644 index acf70324f9156..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM python:3.9-slim -# FROM python:3.9.11-alpine3.15 - -# Bash is installed for more convenient debugging. -# RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* - -WORKDIR /airbyte/integration_code -COPY source_sftp_bulk ./source_sftp_bulk -COPY main.py ./ -COPY setup.py ./ -RUN pip install . - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.2 -LABEL io.airbyte.name=airbyte/source-sftp-bulk diff --git a/airbyte-integrations/connectors/source-sftp-bulk/README.md b/airbyte-integrations/connectors/source-sftp-bulk/README.md index 3aa14ff408082..e5f94c665b717 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/README.md +++ b/airbyte-integrations/connectors/source-sftp-bulk/README.md @@ -1,68 +1,55 @@ -# SFTP Bulk Source +# Sftp-Bulk source connector -This is the repository for the FTP source connector, written in Python, that helps you bulk ingest files with the same data format from an FTP server into a single stream. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/sftp-bulk). + +This is the repository for the Sftp-Bulk source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/sftp-bulk). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/sftp-bulk) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_sftp_bulk/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/sftp-bulk) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_sftp_bulk/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source ftp test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-sftp-bulk spec +poetry run source-sftp-bulk check --config secrets/config.json +poetry run source-sftp-bulk discover --config secrets/config.json +poetry run source-sftp-bulk read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-sftp-bulk build ``` -An image will be built with the tag `airbyte/source-sftp-bulk:dev`. +An image will be available on your host with the tag `airbyte/source-sftp-bulk:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-sftp-bulk:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-sftp-bulk:dev spec @@ -71,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-sftp-bulk:dev discover docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-sftp-bulk:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-sftp-bulk test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-sftp-bulk test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/sftp-bulk.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/sftp-bulk.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-sftp-bulk/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sftp-bulk/acceptance-test-config.yml index 744fcd04eaed2..4581d5c879fb2 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sftp-bulk/acceptance-test-config.yml @@ -1,27 +1,20 @@ # See [Connector Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-sftp-bulk:dev -tests: +acceptance_tests: spec: - - spec_path: "source_sftp_bulk/spec.json" - timeout_seconds: 60 + tests: + - spec_path: "integration_tests/spec.json" + timeout_seconds: 60 + backward_compatibility_tests_config: + disable_for_version: 0.3.2 # `start_date` format changed to format: date-time connection: - - config_path: "integration_tests/valid_config.json" - status: "succeed" - timeout_seconds: 60 - - config_path: "integration_tests/invalid_config.json" - status: "failed" - timeout_seconds: 60 + bypass_reason: "This connector uses integration tests" discovery: - - config_path: "integration_tests/valid_config.json" + bypass_reason: "This connector uses integration tests" basic_read: - - config_path: "integration_tests/valid_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] - incremental: - - config_path: "integration_tests/valid_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" + bypass_reason: "This connector uses integration tests" full_refresh: - - config_path: "integration_tests/valid_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + bypass_reason: "This connector uses integration tests" + incremental: + bypass_reason: "This connector uses integration tests" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/acceptance.py index 6b89ce9a495d5..3835062857f3e 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/acceptance.py @@ -1,7 +1,6 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - import os import shutil import time @@ -17,25 +16,20 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): - dir_path = os.getcwd() + "/integration_tests/files" - if os.path.exists(TMP_FOLDER): shutil.rmtree(TMP_FOLDER) - shutil.copytree(dir_path, TMP_FOLDER) - + shutil.copytree(f"{os.path.dirname(__file__)}/files", TMP_FOLDER) docker_client = docker.from_env() - container = docker_client.containers.run( "atmoz/sftp", "foo:pass", - name=f"mysftpacceptance_{uuid.uuid4().hex}", - ports={22: 1122}, + name=f"mysftp_acceptance_{uuid.uuid4().hex}", + ports={22: ("0.0.0.0", 2222)}, volumes={ f"{TMP_FOLDER}": {"bind": "/home/foo/files", "mode": "rw"}, }, detach=True, ) - time.sleep(5) yield diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_password.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_password.json new file mode 100644 index 0000000000000..dec8cff7be5ab --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_password.json @@ -0,0 +1,54 @@ +{ + "host": "localhost", + "port": 2222, + "username": "foo", + "credentials": { + "auth_type": "password", + "password": "pass" + }, + "file_type": "json", + "start_date": "2021-01-01T00:00:00.000000Z", + "folder_path": "/files", + "streams": [ + { + "name": "test_stream", + "file_type": "csv", + "globs": ["**/test_1.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_private_key.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_private_key.json new file mode 100644 index 0000000000000..eaac38174eba0 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/config_private_key.json @@ -0,0 +1,54 @@ +{ + "host": "localhost", + "port": 2222, + "username": "foo", + "credentials": { + "auth_type": "private_key", + "private_key": "key" + }, + "file_type": "json", + "start_date": "2021-01-01T00:00:00.000000Z", + "folder_path": "/files", + "streams": [ + { + "name": "test_stream", + "file_type": "csv", + "globs": ["**/test_1.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_csv.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_csv.json new file mode 100644 index 0000000000000..a023079b9b49f --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_csv.json @@ -0,0 +1,44 @@ +{ + "streams": [ + { + "name": "test_stream", + "file_type": "csv", + "globs": ["**/test_1.csv", "**/test_3.csv"], + "legacy_prefix": "", + "validation_policy": "Emit Record", + "format": { + "filetype": "csv", + "delimiter": ",", + "quote_char": "\"", + "double_quote": true, + "null_values": [ + "", + "#N/A", + "#N/A N/A", + "#NA", + "-1.#IND", + "-1.#QNAN", + "-NaN", + "-nan", + "1.#IND", + "1.#QNAN", + "N/A", + "NA", + "NULL", + "NaN", + "n/a", + "nan", + "null" + ], + "true_values": ["1", "True", "TRUE", "true"], + "false_values": ["0", "False", "FALSE", "false"], + "inference_type": "Primitive Types Only", + "strings_can_be_null": false, + "encoding": "utf8", + "header_definition": { + "header_definition_type": "From CSV" + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_jsonl.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_jsonl.json new file mode 100644 index 0000000000000..e969c517488ca --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configs/stream_jsonl.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "name": "test_stream", + "file_type": "jsonl", + "globs": ["**/*.jsonl"], + "validation_policy": "Emit Record", + "format": { + "filetype": "jsonl" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configured_catalog.json index c16d82b80a128..13d265ea9d4b9 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/configured_catalog.json @@ -2,19 +2,28 @@ "streams": [ { "stream": { - "name": "test_stream", + "name": "test", + + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["_ab_source_file_last_modified"], "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", "properties": { - "string_col": { "type": "str" }, - "int_col": { "type": "integer" }, - "last_modified": { "type": "string", "format": "date-time" } + "_ab_source_file_last_modified": { + "type": "string" + }, + "_ab_source_file_url": { + "type": "string" + }, + "string_col": { + "type": ["null", "string"] + }, + "int_col": { + "type": ["null", "integer"] + } } - }, - "supported_sync_modes": ["incremental"], - "default_cursor_field": ["last_modified"], - "source_defined_cursor": true + } }, "sync_mode": "incremental", "cursor_field": ["last_modified"], diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/conftest.py b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/conftest.py new file mode 100644 index 0000000000000..2c5ead6159ce2 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/conftest.py @@ -0,0 +1,125 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + + +import logging +import os +import shutil +import time +import uuid +from io import StringIO +from typing import Any, Mapping, Tuple + +import docker +import paramiko +import pytest +from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode + +from .utils import get_docker_ip, load_config + +logger = logging.getLogger("airbyte") + +PRIVATE_KEY = str() +TMP_FOLDER = "/tmp/test_sftp_source" + + +# HELPERS +def generate_ssh_keys() -> Tuple[str, str]: + key = paramiko.RSAKey.generate(2048) + privateString = StringIO() + key.write_private_key(privateString) + + return privateString.getvalue(), "ssh-rsa " + key.get_base64() + + +@pytest.fixture(scope="session") +def docker_client() -> docker.client.DockerClient: + return docker.from_env() + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup_fixture(docker_client) -> None: + ssh_path = TMP_FOLDER + "/ssh" + dir_path = os.path.dirname(__file__) + if os.path.exists(TMP_FOLDER): + shutil.rmtree(TMP_FOLDER) + + shutil.copytree(f"{dir_path}/files", TMP_FOLDER) + os.makedirs(ssh_path) + private_key, public_key = generate_ssh_keys() + global PRIVATE_KEY + PRIVATE_KEY = private_key + pub_key_path = ssh_path + "/id_rsa.pub" + with open(pub_key_path, "w") as f: + f.write(public_key) + config = load_config("config_password.json") + container = docker_client.containers.run( + "atmoz/sftp", + f"{config['username']}:{config['credentials']['password']}", + name=f"mysftp_integration_{uuid.uuid4().hex}", + ports={22: ("0.0.0.0", config["port"])}, + volumes={ + f"{TMP_FOLDER}": {"bind": "/home/foo/files", "mode": "rw"}, + f"{pub_key_path}": {"bind": "/home/foo/.ssh/keys/id_rsa.pub", "mode": "ro"}, + }, + detach=True, + ) + time.sleep(10) + + yield + + container.kill() + container.remove() + + +@pytest.fixture(name="config", scope="session") +def config_fixture(docker_client) -> Mapping[str, Any]: + config = load_config("config_password.json") + config["host"] = get_docker_ip() + yield config + + +@pytest.fixture(name="config_private_key", scope="session") +def config_fixture_private_key(docker_client) -> Mapping[str, Any]: + config = load_config("config_private_key.json") | { + "credentials": {"auth_type": "private_key", "private_key": PRIVATE_KEY}, + } + config["host"] = get_docker_ip() + yield config + + +@pytest.fixture(name="config_private_key_csv", scope="session") +def config_fixture_private_key_csv(config_private_key) -> Mapping[str, Any]: + yield config_private_key + + +@pytest.fixture(name="config_password_all_csv", scope="session") +def config_fixture_password_all_csv(config) -> Mapping[str, Any]: + yield config | load_config("stream_csv.json") + + +@pytest.fixture(name="config_password_all_jsonl", scope="session") +def config_fixture_password_all_jsonl(config) -> Mapping[str, Any]: + yield config | load_config("stream_jsonl.json") + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = { + "type": "object", + "properties": { + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + "string_col": {"type": ["null", "string"]}, + "int_col": {"type": ["null", "integer"]}, + }, + } + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="test_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental] + ), + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/docker-compose.yaml b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/docker-compose.yaml new file mode 100644 index 0000000000000..071d181a03913 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/docker-compose.yaml @@ -0,0 +1,9 @@ +version: "3" +services: + atmoz-sftp: + image: atmoz/sftp + volumes: + - ./files:/home/foo/files + ports: + - "2222:22" + command: foo:pass:1001 diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_1.csv b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_1.csv index a88d0791f99fa..ac56269260665 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_1.csv +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_1.csv @@ -1,3 +1,3 @@ string_col,int_col "hello",1 -"foo",2 \ No newline at end of file +"foo",2 diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_3.csv b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_3.csv new file mode 100644 index 0000000000000..ac56269260665 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/csv/test_3.csv @@ -0,0 +1,3 @@ +string_col,int_col +"hello",1 +"foo",2 diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/empty/empty.jsonl b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/empty/empty.jsonl new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/test_1.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/jsonl/test_1.jsonl similarity index 100% rename from airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/test_1.json rename to airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/jsonl/test_1.jsonl diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/jsonl/test_2.jsonl b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/jsonl/test_2.jsonl new file mode 100644 index 0000000000000..2460d05da873d --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/jsonl/test_2.jsonl @@ -0,0 +1,2 @@ +{ "string_col": "hello", "int_col": 1 } +{ "string_col": "hello2", "int_col": 2 } diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/test_2.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/test_2.json deleted file mode 100644 index 38e9836a6f02d..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/files/test_2.json +++ /dev/null @@ -1 +0,0 @@ -{ "string_col": "hello", "int_col": 1 } diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/integration_test.py index 8901c1dd16610..336541718fe8f 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/integration_test.py @@ -3,286 +3,72 @@ # import logging -import os -import shutil -import time -from io import StringIO -from socket import socket -from typing import Mapping +from copy import deepcopy +from typing import Any, Mapping -import docker -import paramiko -import pytest -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, Status, SyncMode, Type -from source_sftp_bulk import SourceFtp - -pytest_plugins = ("connector_acceptance_test.plugin",) +from airbyte_cdk.models import ConfiguredAirbyteCatalog, Status +from airbyte_cdk.test.entrypoint_wrapper import read +from source_sftp_bulk import SourceSFTPBulk logger = logging.getLogger("airbyte") -TMP_FOLDER = "/tmp/test_sftp_source" - - -def generate_ssh_keys(): - key = paramiko.RSAKey.generate(2048) - privateString = StringIO() - key.write_private_key(privateString) - - return privateString.getvalue(), "ssh-rsa " + key.get_base64() - - -@pytest.fixture(scope="session") -def docker_client(): - return docker.from_env() - - -@pytest.fixture(name="config", scope="session") -def config_fixture(docker_client): - with socket() as s: - s.bind(("", 0)) - available_port = s.getsockname()[1] - - dir_path = os.getcwd() + "/integration_tests" - - config = { - "host": "localhost", - "port": available_port, - "username": "foo", - "password": "pass", - "file_type": "json", - "start_date": "2021-01-01T00:00:00Z", - "folder_path": "/files", - "stream_name": "overwrite_stream", - } - - container = docker_client.containers.run( - "atmoz/sftp", - f"{config['username']}:{config['password']}", - name="mysftp", - ports={22: config["port"]}, - volumes={ - f"{dir_path}/files": {"bind": "/home/foo/files", "mode": "rw"}, - }, - detach=True, - ) - time.sleep(20) - yield config - - container.kill() - container.remove() - - -@pytest.fixture(name="config_pk", scope="session") -def config_fixture_pk(docker_client): - with socket() as s: - s.bind(("", 0)) - available_port = s.getsockname()[1] - - ssh_path = TMP_FOLDER + "/ssh" - dir_path = os.getcwd() + "/integration_tests" - - if os.path.exists(ssh_path): - shutil.rmtree(ssh_path) - - os.makedirs(ssh_path) - - pk, pubk = generate_ssh_keys() - - pub_key_path = ssh_path + "/id_rsa.pub" - with open(pub_key_path, "w") as f: - f.write(pubk) - - config = { - "host": "localhost", - "port": available_port, - "username": "foo", - "password": "pass", - "file_type": "json", - "private_key": pk, - "start_date": "2021-01-01T00:00:00Z", - "folder_path": "/files", - "stream_name": "overwrite_stream", +def test_check_invalid_private_key_config(configured_catalog: ConfiguredAirbyteCatalog, config_private_key_csv: Mapping[str, Any]): + invalid_config = config_private_key_csv | { + "credentials": { + "auth_type": "private_key", + "private_key": "-----BEGIN OPENSSH PRIVATE KEY-----\nbaddata\n-----END OPENSSH PRIVATE KEY-----", + } } - - container = docker_client.containers.run( - "atmoz/sftp", - f"{config['username']}:{config['password']}:1001", - name="mysftpssh", - ports={22: config["port"]}, - volumes={ - f"{dir_path}/files": {"bind": "/home/foo/files", "mode": "rw"}, - f"{pub_key_path}": {"bind": "/home/foo/.ssh/keys/id_rsa.pub", "mode": "ro"}, - }, - detach=True, - ) - - time.sleep(20) - yield config - - shutil.rmtree(ssh_path) - container.kill() - container.remove() - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = { - "type": "object", - "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}, - } - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental] - ), - sync_mode=SyncMode.full_refresh, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) - - -def test_check_valid_config_pk(config_pk: Mapping): - outcome = SourceFtp().check(logger, config_pk) - assert outcome.status == Status.SUCCEEDED - - -def test_check_valid_config_pk_bad_pk(config_pk: Mapping): - outcome = SourceFtp().check( - logger, {**config_pk, "private_key": "-----BEGIN OPENSSH PRIVATE KEY-----\nbaddata\n-----END OPENSSH PRIVATE KEY-----"} - ) + outcome = SourceSFTPBulk(catalog=configured_catalog, config=invalid_config, state=None).check(logger, invalid_config) assert outcome.status == Status.FAILED -def test_check_invalid_config(config: Mapping): - outcome = SourceFtp().check(logger, {**config, "password": "wrongpass"}) +def test_check_invalid_config(configured_catalog: ConfiguredAirbyteCatalog, config: Mapping[str, Any]): + invalid_config = config | {"credentials": {"auth_type": "password", "password": "wrongpass"}} + outcome = SourceSFTPBulk(catalog=configured_catalog, config=invalid_config, state=None).check(logger, invalid_config) assert outcome.status == Status.FAILED -def test_check_valid_config(config: Mapping): - outcome = SourceFtp().check(logger, config) +def test_check_valid_config_private_key(configured_catalog: ConfiguredAirbyteCatalog, config_private_key: Mapping[str, Any]): + outcome = SourceSFTPBulk(catalog=configured_catalog, config=config_private_key, state=None).check(logger, config_private_key) assert outcome.status == Status.SUCCEEDED -def test_get_files_no_pattern_json(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read(logger, config, configured_catalog, None) - result = list(result_iter) - assert len(result) == 2 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] in ["foo", "hello"] - assert res.record.data["int_col"] in [1, 2] - - -def test_get_files_pattern_json(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read(logger, {**config, "file_pattern": "test_1.+"}, configured_catalog, None) - result = list(result_iter) - assert len(result) == 1 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] == "foo" - assert res.record.data["int_col"] == 2 - - -def test_get_files_pattern_json_new_separator(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read(logger, {**config, "file_pattern": "test_2.+"}, configured_catalog, None) - result = list(result_iter) - assert len(result) == 1 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] == "hello" - assert res.record.data["int_col"] == 1 - - -def test_get_files_pattern_no_match_json(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result = source.read(logger, {**config, "file_pattern": "bad_pattern.+"}, configured_catalog, None) - assert len(list(result)) == 0 - - -def test_get_files_no_pattern_csv(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read(logger, {**config, "file_type": "csv", "folder_path": "files/csv"}, configured_catalog, None) - result = list(result_iter) - assert len(result) == 4 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] in ["foo", "hello"] - assert res.record.data["int_col"] in [1, 2] - - -def test_get_files_pattern_csv(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read( - logger, {**config, "file_type": "csv", "folder_path": "files/csv", "file_pattern": "test_1.+"}, configured_catalog, None - ) - result = list(result_iter) - assert len(result) == 2 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] in ["foo", "hello"] - assert res.record.data["int_col"] in [1, 2] - - -def test_get_files_pattern_csv_new_separator(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read( - logger, {**config, "file_type": "csv", "folder_path": "files/csv", "file_pattern": "test_2.+"}, configured_catalog, None - ) - result = list(result_iter) - assert len(result) == 2 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] in ["foo", "hello"] - assert res.record.data["int_col"] in [1, 2] +def test_check_valid_config(configured_catalog: ConfiguredAirbyteCatalog, config: Mapping[str, Any]): + outcome = SourceSFTPBulk(catalog=configured_catalog, config=config, state=None).check(logger, config) + assert outcome.status == Status.SUCCEEDED -def test_get_files_pattern_csv_new_separator_with_config(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read( - logger, - {**config, "file_type": "csv", "folder_path": "files/csv", "separator": ";", "file_pattern": "test_2.+"}, - configured_catalog, - None, - ) - result = list(result_iter) - assert len(result) == 2 - for res in result: - assert res.type == Type.RECORD - assert res.record.data["string_col"] in ["foo", "hello"] - assert res.record.data["int_col"] in [1, 2] +def test_get_one_file_csv(configured_catalog: ConfiguredAirbyteCatalog, config: Mapping[str, Any]): + source = SourceSFTPBulk(catalog=configured_catalog, config=config, state=None) + output = read(source=source, config=config, catalog=configured_catalog) + assert len(output.records) == 2 -def test_get_files_pattern_no_match_csv(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result = source.read( - logger, {**config, "file_type": "csv", "folder_path": "files/csv", "file_pattern": "badpattern.+"}, configured_catalog, None - ) - assert len(list(result)) == 0 +def test_get_all_files_csv(configured_catalog: ConfiguredAirbyteCatalog, config_password_all_csv: Mapping[str, Any]): + source = SourceSFTPBulk(catalog=configured_catalog, config=config_password_all_csv, state=None) + output = read(source=source, config=config_password_all_csv, catalog=configured_catalog) + assert len(output.records) == 4 -def test_get_files_empty_files(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result = source.read(logger, {**config, "folder_path": "files/empty"}, configured_catalog, None) - assert len(list(result)) == 0 +def test_get_files_pattern_json_new_separator(configured_catalog: ConfiguredAirbyteCatalog, config_password_all_jsonl: Mapping[str, Any]): + source = SourceSFTPBulk(catalog=configured_catalog, config=config_password_all_jsonl, state=None) + output = read(source=source, config=config_password_all_jsonl, catalog=configured_catalog) + assert len(output.records) == 3 -def test_get_files_handle_null_values(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog): - source = SourceFtp() - result_iter = source.read(logger, {**config, "folder_path": "files/null_values", "file_type": "csv"}, configured_catalog, None) - result = list(result_iter) - assert len(result) == 5 +def test_get_files_pattern_no_match_json(configured_catalog: ConfiguredAirbyteCatalog, config_password_all_jsonl: Mapping[str, Any]): + config_with_wrong_glob_pattern = deepcopy(config_password_all_jsonl) + config_with_wrong_glob_pattern["streams"][0]["globs"] = ["**/not_existed_file.jsonl"] + source = SourceSFTPBulk(catalog=configured_catalog, config=config_with_wrong_glob_pattern, state=None) + output = read(source=source, config=config_with_wrong_glob_pattern, catalog=configured_catalog) + assert len(output.records) == 0 - res = result[2] - assert res.type == Type.RECORD - assert res.record.data["string_col"] == "bar" - assert res.record.data["int_col"] is None - res = result[4] - assert res.type == Type.RECORD - assert res.record.data["string_col"] is None - assert res.record.data["int_col"] == 4 +def test_get_files_empty_files(configured_catalog: ConfiguredAirbyteCatalog, config_password_all_jsonl: Mapping[str, Any]): + config_with_wrong_glob_pattern = deepcopy(config_password_all_jsonl) + config_with_wrong_glob_pattern["streams"][0]["globs"] = ["**/files/empty/*.jsonl"] + source = SourceSFTPBulk(catalog=configured_catalog, config=config_with_wrong_glob_pattern, state=None) + output = read(source=source, config=config_with_wrong_glob_pattern, catalog=configured_catalog) + assert len(output.records) == 0 diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/spec.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/spec.json new file mode 100644 index 0000000000000..81218b23d6107 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/spec.json @@ -0,0 +1,497 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/sftp-bulk", + "connectionSpecification": { + "title": "SFTP Bulk Source Spec", + "description": "Used during spec; allows the developer to configure the cloud provider specific options\nthat are needed when users configure a file-based source.", + "type": "object", + "properties": { + "start_date": { + "title": "Start Date", + "description": "UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.", + "examples": ["2021-01-01T00:00:00.000000Z"], + "format": "date-time", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$", + "pattern_descriptor": "YYYY-MM-DDTHH:mm:ss.SSSSSSZ", + "order": 1, + "type": "string" + }, + "streams": { + "title": "The list of streams to sync", + "description": "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.", + "order": 10, + "type": "array", + "items": { + "title": "FileBasedStreamConfig", + "type": "object", + "properties": { + "name": { + "title": "Name", + "description": "The name of the stream.", + "type": "string" + }, + "globs": { + "title": "Globs", + "description": "The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.", + "default": ["**"], + "order": 1, + "type": "array", + "items": { + "type": "string" + } + }, + "legacy_prefix": { + "title": "Legacy Prefix", + "description": "The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.", + "airbyte_hidden": true, + "type": "string" + }, + "validation_policy": { + "title": "Validation Policy", + "description": "The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.", + "default": "Emit Record", + "enum": ["Emit Record", "Skip Record", "Wait for Discover"] + }, + "input_schema": { + "title": "Input Schema", + "description": "The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.", + "type": "string" + }, + "primary_key": { + "title": "Primary Key", + "description": "The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.", + "airbyte_hidden": true, + "type": "string" + }, + "days_to_sync_if_history_is_full": { + "title": "Days To Sync If History Is Full", + "description": "When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.", + "default": 3, + "type": "integer" + }, + "format": { + "title": "Format", + "description": "The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.", + "type": "object", + "oneOf": [ + { + "title": "Avro Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "avro", + "const": "avro", + "type": "string" + }, + "double_as_string": { + "title": "Convert Double Fields to Strings", + "description": "Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.", + "default": false, + "type": "boolean" + } + }, + "required": ["filetype"] + }, + { + "title": "CSV Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "csv", + "const": "csv", + "type": "string" + }, + "delimiter": { + "title": "Delimiter", + "description": "The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\\t'.", + "default": ",", + "type": "string" + }, + "quote_char": { + "title": "Quote Character", + "description": "The character used for quoting CSV values. To disallow quoting, make this field blank.", + "default": "\"", + "type": "string" + }, + "escape_char": { + "title": "Escape Character", + "description": "The character used for escaping special characters. To disallow escaping, leave this field blank.", + "type": "string" + }, + "encoding": { + "title": "Encoding", + "description": "The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.", + "default": "utf8", + "type": "string" + }, + "double_quote": { + "title": "Double Quote", + "description": "Whether two quotes in a quoted CSV value denote a single quote in the data.", + "default": true, + "type": "boolean" + }, + "null_values": { + "title": "Null Values", + "description": "A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.", + "default": [], + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "strings_can_be_null": { + "title": "Strings Can Be Null", + "description": "Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.", + "default": true, + "type": "boolean" + }, + "skip_rows_before_header": { + "title": "Skip Rows Before Header", + "description": "The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.", + "default": 0, + "type": "integer" + }, + "skip_rows_after_header": { + "title": "Skip Rows After Header", + "description": "The number of rows to skip after the header row.", + "default": 0, + "type": "integer" + }, + "header_definition": { + "title": "CSV Header Definition", + "description": "How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.", + "default": { + "header_definition_type": "From CSV" + }, + "oneOf": [ + { + "title": "From CSV", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "From CSV", + "const": "From CSV", + "type": "string" + } + }, + "required": ["header_definition_type"] + }, + { + "title": "Autogenerated", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "Autogenerated", + "const": "Autogenerated", + "type": "string" + } + }, + "required": ["header_definition_type"] + }, + { + "title": "User Provided", + "type": "object", + "properties": { + "header_definition_type": { + "title": "Header Definition Type", + "default": "User Provided", + "const": "User Provided", + "type": "string" + }, + "column_names": { + "title": "Column Names", + "description": "The column names that will be used while emitting the CSV records", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": ["column_names", "header_definition_type"] + } + ], + "type": "object" + }, + "true_values": { + "title": "True Values", + "description": "A set of case-sensitive strings that should be interpreted as true values.", + "default": ["y", "yes", "t", "true", "on", "1"], + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "false_values": { + "title": "False Values", + "description": "A set of case-sensitive strings that should be interpreted as false values.", + "default": ["n", "no", "f", "false", "off", "0"], + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "inference_type": { + "title": "Inference Type", + "description": "How to infer the types of the columns. If none, inference default to strings.", + "default": "None", + "airbyte_hidden": true, + "enum": ["None", "Primitive Types Only"] + }, + "ignore_errors_on_fields_mismatch": { + "title": "Ignore errors on field mismatch", + "description": "Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.", + "default": false, + "type": "boolean" + } + }, + "required": ["filetype"] + }, + { + "title": "Jsonl Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "jsonl", + "const": "jsonl", + "type": "string" + } + }, + "required": ["filetype"] + }, + { + "title": "Parquet Format", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "parquet", + "const": "parquet", + "type": "string" + }, + "decimal_as_float": { + "title": "Convert Decimal Fields to Floats", + "description": "Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.", + "default": false, + "type": "boolean" + } + }, + "required": ["filetype"] + }, + { + "title": "Document File Type Format (Experimental)", + "type": "object", + "properties": { + "filetype": { + "title": "Filetype", + "default": "unstructured", + "const": "unstructured", + "type": "string" + }, + "skip_unprocessable_files": { + "title": "Skip Unprocessable Files", + "description": "If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.", + "default": true, + "always_show": true, + "type": "boolean" + }, + "strategy": { + "title": "Parsing Strategy", + "description": "The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf", + "default": "auto", + "always_show": true, + "order": 0, + "enum": ["auto", "fast", "ocr_only", "hi_res"], + "type": "string" + }, + "processing": { + "title": "Processing", + "description": "Processing configuration", + "default": { + "mode": "local" + }, + "type": "object", + "oneOf": [ + { + "title": "Local", + "type": "object", + "properties": { + "mode": { + "title": "Mode", + "default": "local", + "const": "local", + "enum": ["local"], + "type": "string" + } + }, + "description": "Process files locally, supporting `fast` and `ocr` modes. This is the default option.", + "required": ["mode"] + }, + { + "title": "via API", + "type": "object", + "properties": { + "mode": { + "title": "Mode", + "default": "api", + "const": "api", + "enum": ["api"], + "type": "string" + }, + "api_key": { + "title": "API Key", + "description": "The API key to use matching the environment", + "default": "", + "always_show": true, + "airbyte_secret": true, + "type": "string" + }, + "api_url": { + "title": "API URL", + "description": "The URL of the unstructured API to use", + "default": "https://api.unstructured.io", + "always_show": true, + "examples": ["https://api.unstructured.com"], + "type": "string" + }, + "parameters": { + "title": "Additional URL Parameters", + "description": "List of parameters send to the API", + "default": [], + "always_show": true, + "type": "array", + "items": { + "title": "APIParameterConfigModel", + "type": "object", + "properties": { + "name": { + "title": "Parameter name", + "description": "The name of the unstructured API parameter to use", + "examples": [ + "combine_under_n_chars", + "languages" + ], + "type": "string" + }, + "value": { + "title": "Value", + "description": "The value of the parameter", + "examples": ["true", "hi_res"], + "type": "string" + } + }, + "required": ["name", "value"] + } + } + }, + "description": "Process files via an API, using the `hi_res` mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.", + "required": ["mode"] + } + ] + } + }, + "description": "Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.", + "required": ["filetype"] + } + ] + }, + "schemaless": { + "title": "Schemaless", + "description": "When enabled, syncs will not validate or structure records against the stream's schema.", + "default": false, + "type": "boolean" + } + }, + "required": ["name", "format"] + } + }, + "host": { + "title": "Host Address", + "description": "The server host address", + "examples": ["www.host.com", "192.0.2.1"], + "order": 2, + "type": "string" + }, + "username": { + "title": "User Name", + "description": "The server user", + "order": 3, + "type": "string" + }, + "credentials": { + "title": "Authentication", + "description": "Credentials for connecting to the SFTP Server", + "type": "object", + "order": 4, + "oneOf": [ + { + "title": "Authenticate via Password", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "password", + "const": "password", + "enum": ["password"], + "type": "string" + }, + "password": { + "title": "Password", + "description": "Password", + "airbyte_secret": true, + "order": 3, + "type": "string" + } + }, + "required": ["password", "auth_type"] + }, + { + "title": "Authenticate via Private Key", + "type": "object", + "properties": { + "auth_type": { + "title": "Auth Type", + "default": "private_key", + "const": "private_key", + "enum": ["private_key"], + "type": "string" + }, + "private_key": { + "title": "Private key", + "description": "The Private key", + "multiline": true, + "order": 4, + "type": "string" + } + }, + "required": ["private_key", "auth_type"] + } + ] + }, + "port": { + "title": "Host Address", + "description": "The server port", + "default": 22, + "examples": ["22"], + "order": 5, + "type": "integer" + }, + "folder_path": { + "title": "Folder Path", + "description": "The directory to search files for sync", + "default": "/", + "examples": ["/logs/2022"], + "order": 6, + "pattern_descriptor": "/folder_to_sync", + "type": "string" + } + }, + "required": ["streams", "host", "username", "credentials"] + } +} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/utils.py b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/utils.py new file mode 100644 index 0000000000000..db92623c29984 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/utils.py @@ -0,0 +1,36 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + + +import json +import logging +import os +import re +from typing import Any, Mapping, Union + +logger = logging.getLogger("airbyte") + +TMP_FOLDER = "/tmp/test_sftp_source" + + +def load_config(config_path: str) -> Mapping[str, Any]: + with open(f"{os.path.dirname(__file__)}/configs/{config_path}", "r") as config: + return json.load(config) + + +def write_config(config_path: str, config: Mapping[str, Any]): + with open(f"{os.path.dirname(__file__)}/configs/{config_path}", "w") as config_file: + config = json.dumps(config) + config_file.write(config) + + +def get_docker_ip() -> Union[str, Any]: + # When talking to the Docker daemon via a UNIX socket, route all TCP + # traffic to docker containers via the TCP loopback interface. + docker_host = os.environ.get("DOCKER_HOST", "").strip() + if not docker_host or docker_host.startswith("unix://"): + return "127.0.0.1" + + match = re.match(r"^tcp://(.+?):\d+$", docker_host) + if not match: + raise ValueError('Invalid value for DOCKER_HOST: "%s".' % (docker_host,)) + return match.group(1) diff --git a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/valid_config.json b/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/valid_config.json deleted file mode 100644 index f8093d1e610ea..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/integration_tests/valid_config.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "host": "localhost", - "port": 1122, - "username": "foo", - "password": "pass", - "file_type": "json", - "start_date": "2021-01-01T00:00:00Z", - "folder_path": "/files", - "stream_name": "test_stream", - "file_most_recent": false -} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml index 3339cd083f9b5..0e918273a5ffb 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml +++ b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml @@ -2,10 +2,12 @@ data: ab_internal: ql: 200 sl: 100 + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: file connectorType: source definitionId: 31e3242f-dee7-4cdc-a4b8-8e06c5458517 - dockerImageTag: 0.1.2 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-sftp-bulk documentationUrl: https://docs.airbyte.com/integrations/sources/sftp-bulk githubIssueLabel: source-sftp-bulk @@ -22,8 +24,13 @@ data: oss: enabled: true releaseStage: alpha + releases: + breakingChanges: + 1.0.0: + message: "This upgrade migrates the SFTP Bulk source to the Airbyte file-based CDK. This is the first necessary step of transitioning a file connector from community to Airbyte maintained." + upgradeDeadline: "2024-04-30" supportLevel: community tags: - language:python - - cdk:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/poetry.lock b/airbyte-integrations/connectors/source-sftp-bulk/poetry.lock new file mode 100644 index 0000000000000..38e1017da3e9b --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/poetry.lock @@ -0,0 +1,2280 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.78.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.78.1-py3-none-any.whl", hash = "sha256:73dfc03e55a7107bf28b5bbc4e43572d448c60e9b34368d22cf48b6536aa2263"}, + {file = "airbyte_cdk-0.78.1.tar.gz", hash = "sha256:700e5526ae29db1e453b3def8682726f7d8aa653ee2f3056488d0a484f055133"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +unstructured = {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""} +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bcrypt" +version = "4.1.2" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "docker" +version = "7.0.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, + {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.1.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9cc30dc3c49ea914fa62ea73b57198b541cf2cd522fcf2b9559f99a24df769bb"}, + {file = "lxml-5.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d0824e3ddb969fe1337b1bc45cf0bec8095b342f36903f41a74b7769cc8c73"}, + {file = "lxml-5.1.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4958c378d9387c45ef8c4859495cf6be76f863e4e3b31494f6ec7f2c48d3b8e3"}, + {file = "lxml-5.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aff34295a6c87638a1f1905355cf3a97e4026c45c0cf3bb6ed6bc35b885b4a33"}, + {file = "lxml-5.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b174885fd2cabd1ad48585296f495e25d607f02db99668c08b2afaceb668e21b"}, + {file = "lxml-5.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b0611bba10d6f5467b86673e8f6bba4de0d00f7d111eea843bc872abfe11b5c"}, + {file = "lxml-5.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:adff469b7dbfe9f3babc9e4479449ee97085ba70ac492fbe5f0f7217940c6731"}, + {file = "lxml-5.1.1-cp310-cp310-win32.whl", hash = "sha256:99bcdf665576a26b44c7ce767d76b769a4418b0a13cda8300b26fb7b2647bd5b"}, + {file = "lxml-5.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:3da8db291568c27b2bb248dcfc8838ca3149f373a24e204bcd1c2c89e2813d14"}, + {file = "lxml-5.1.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:906966babd374fdfe46e130fc656488003f0d0d63b7cba612aa5a796c8804283"}, + {file = "lxml-5.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9c03f3715c68fc707d9383d56e482d95d198ba07cb3dad4aee9e5a5ca06b2536"}, + {file = "lxml-5.1.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d26243d994d4077a50056e9008848e5b421be0c6f0fd4e932a9463e1d89fc42b"}, + {file = "lxml-5.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de00750318ae6869b9dfa6429a4f82b8ecad043049414547474d09db549c2ee"}, + {file = "lxml-5.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29b2771b4eec4e85063f10294facdd9829d010e6cc9668040d0cf936dc56733a"}, + {file = "lxml-5.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d9358f7268c161dc0a1c3216018f26c04954b5dd47ba6dead79da6598f4725d4"}, + {file = "lxml-5.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8a943826e7a9254eed661a7134fcde3c832a9fecd989d0f47c6e08c7b769cb2c"}, + {file = "lxml-5.1.1-cp311-cp311-win32.whl", hash = "sha256:74d0967c6f91eec6fe91159f9e8ccb3720fa0fbf9f462109c7bef62550df397c"}, + {file = "lxml-5.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:26974096654241df08a30dc2eb0e139c1ad5653660aa4b2ced66000230e96c14"}, + {file = "lxml-5.1.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:55e13a19829dcdbf0c5233062977aeb6daf72e65124909128045976f659164e8"}, + {file = "lxml-5.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adedfb61be862f48907218e3a24bf051fd2ecca53358f3958b0bdb17d7881c20"}, + {file = "lxml-5.1.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77425482e4311d1cff119a2b5ab26c52ec209d2a3d728a54db3223ab91995e20"}, + {file = "lxml-5.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d380f183bd03ab827899753ea96dabe27d2025eb0bfd4f2ac0eee4afa0f351d"}, + {file = "lxml-5.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8682af96b5ad5093aab9eee5e4ff24cb7a9796c78699d914dd456ebfe7484a6"}, + {file = "lxml-5.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68eed33377a9925aed7ba56c8611d50aaa1e45638c07a92b4b4b0a0436cc2dd2"}, + {file = "lxml-5.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7c1d2f6e9c7a1c4478146ee38d16dbe0eb3be998424bc0f01346c671c38b86d"}, + {file = "lxml-5.1.1-cp312-cp312-win32.whl", hash = "sha256:81107c8de3e463052ae8fd05fd31b97c371c7a9ce4a189b8bb5f45b0b3545fb9"}, + {file = "lxml-5.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:0e46181d15fae102c53621bed9356b7a599a1e837b978c934a350dd00842b1d9"}, + {file = "lxml-5.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:48dd28b9f410329de709a4bb6169c58f2cd8bff25f5a48d647678ec9b8a40c65"}, + {file = "lxml-5.1.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf7e57dbe7b3c605e63849d9c8dae246a6ab9002223c57cd3f3dec7c3a0a8e6d"}, + {file = "lxml-5.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5020b3081030b5cfc8149eee231167aea4ff68df73a610e1d542809e1f11fde7"}, + {file = "lxml-5.1.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77842b79b63c83c04dcfe2f045c78e15e4d97c86838eabd2e6518c1ed97e3900"}, + {file = "lxml-5.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:666432274881cb2535e71dbe745e08ef10fe25c81fbb1a6b1e3c973177823b0c"}, + {file = "lxml-5.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a103426e809640a2d985062d2f4b28db2f0fe4469ff72a67cb31fa70613158f1"}, + {file = "lxml-5.1.1-cp36-cp36m-win32.whl", hash = "sha256:95a51324a55000c55f4ab79e1f7f1e0bc42b7a24e39633f79542753023a9d4b7"}, + {file = "lxml-5.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:bd46b5b19ac969de8e87fb3d04414641d12ee489e2ea6cc75344087829b31c63"}, + {file = "lxml-5.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59ca75cfcf646ff64aa19ca4e7fd2a0fde77268d5a87856525d9e0b69b77d0c4"}, + {file = "lxml-5.1.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55ddc73dec971277b181a6d1a6abdd34f50e4511e1e60f6b4ebe22cbaad05bb"}, + {file = "lxml-5.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56f1e813ff660d031c77edba90a068d57e47ae93a9e811330fc88946fa68af9a"}, + {file = "lxml-5.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43f21b5929185fa4560836942020bb00a0fcdec9f67be98cac1a4b99501757c1"}, + {file = "lxml-5.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1528b37e83c3aeecb438e76e5be6279b353275560125a9c3f4d74642c5f110f9"}, + {file = "lxml-5.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2992480a25434d2df31413136ef87effab14d43b07f1f54c5012c4f6c7530144"}, + {file = "lxml-5.1.1-cp37-cp37m-win32.whl", hash = "sha256:1d0270d33fbde6e1c6758ff58e2e284144f5331aa05dfe7f44ceafdf4e9d31aa"}, + {file = "lxml-5.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dec3491aa69a91ed07f5e6bc033e2b1a9424447ad5312ee69ac973e94d79083a"}, + {file = "lxml-5.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87b67d8620c2725d666e5d88ddba56bcdb1f52211a2e7d22f951b67c35f7f627"}, + {file = "lxml-5.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd2595ebe95214446e00a1ab94571f778b126e17736ea222c07505c4e092289"}, + {file = "lxml-5.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfbdadc3cfe552331ecb0bbdcabf148d1697c73aa4321151e0e6c1704eeb76a7"}, + {file = "lxml-5.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52358249292bc155af681a9240ec3d944c1195f0124aa10ec4e3635adc1e10a1"}, + {file = "lxml-5.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036b36c48cd775e4fd2084b34ae62ffeefa7a01f955f5a5b816f9257c308cfc0"}, + {file = "lxml-5.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f05ab8cea65363d0cc7ce818f42407504b6d94ca885b4cde0270f021e2f4ef61"}, + {file = "lxml-5.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d94a28c16cc430b68c374b37b8bb536ba5f0a4a080be0e1daa8310c44a00a75c"}, + {file = "lxml-5.1.1-cp38-cp38-win32.whl", hash = "sha256:9113fe65a62f834b8e994c8f48e7b2179bf81878c0ec80ad7feba51ab9417663"}, + {file = "lxml-5.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:acff17e0cd5344677757a152631d8411efac6a84e4476d60123a9b33f5d6c511"}, + {file = "lxml-5.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a94a97380ad689d751eb0a1e1ccd2a0622c5141771a31abe9a16075f80027e95"}, + {file = "lxml-5.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f4d37b3f8d2d44493edce3d65ac987127bababd8ae208a6f0d7d260852346e"}, + {file = "lxml-5.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5940f188189956ccb3d1adb413001ada79f2d2b81087d2612a0cc4a1197eed"}, + {file = "lxml-5.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50007f4e94dc4e38030487a8b6c4af87a2d51ed059c7b74b29e3dd937cb1dfe1"}, + {file = "lxml-5.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305d0469177fd78a0a9aa2231c60218266bb85d4b7955f9b67dab628c9267fd"}, + {file = "lxml-5.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:13b73d78a8023203722cf98e9ea0b222da83110d1d5ef437ef8782a7755b4586"}, + {file = "lxml-5.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc6904519dd1f92eb82f7d49814a33bbc444d0b66b1438e76daf3f79ef4aa38f"}, + {file = "lxml-5.1.1-cp39-cp39-win32.whl", hash = "sha256:04ef231dde88294a5499f61a74cdc42af97d8d5ecec1b0a645d1c7d436942789"}, + {file = "lxml-5.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:071e5123d1eca861708c4be5b54e4d88923fa33fab3aa02722e907518b07071c"}, + {file = "lxml-5.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:19c6bc7476eeac4598ff925ae98597610109e21af4cd7ab1e060efcfc4b1c6e2"}, + {file = "lxml-5.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20cd17eb21f5ae54da96791c49e1fbd3327bf66b2c00556cdf8d0552c2270f92"}, + {file = "lxml-5.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a02ed1ebc469734dbfed5b688f709334de19e7a333cba7ae187b17d2b2c1d4ff"}, + {file = "lxml-5.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:161838cb95c97e8d76d01e544a3570b52ab6b863f4897a90e1f073bb110a75ba"}, + {file = "lxml-5.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1abbf2249467a37da45fb2d7ff37e578dfc9813f142800e58db9da761cb7899"}, + {file = "lxml-5.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6c49eb5deaed1990fde5b5d80d6800aec1b5fd6113346b5f11068d988f68f2c4"}, + {file = "lxml-5.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:60ceffdca5d637fe8ee95c7f06733a6c9646e07da80997efe3af2d4b4f366e36"}, + {file = "lxml-5.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a76a7b35e7660c74eb3f943c19f5f78c882dceab890cf8017027b6100b79ad8e"}, + {file = "lxml-5.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5dcb373720b70aa05419e508265dd86f06886ca0388967f6f024fbc4d551379f"}, + {file = "lxml-5.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3641bc124b037921de4220538a5ebb52354fd2799fc2bbfb335d28096063c7d6"}, + {file = "lxml-5.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a6e9b34f59c9755aa279c652e1c48c333c665d05a88afcd8e5ff0bde86f3b14"}, + {file = "lxml-5.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:318847c165063549c8fda6b162a0d068689b10deb825cb3859caef69fddaaaff"}, + {file = "lxml-5.1.1.tar.gz", hash = "sha256:42a8aa957e98bd8b884a8142175ec24ce4ef0a57760e8879f193bfe64b757ca9"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.9)"] + +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "paramiko" +version = "3.4.0" +description = "SSH2 protocol library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, +] + +[package.dependencies] +bcrypt = ">=3.2" +cryptography = ">=3.3" +pynacl = ">=1.5" + +[package.extras] +all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=2.0)"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.2.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.7.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:860f438238f1807532aa5c5c25e74c284232ccc115fe84697b78e25d48f364f7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bb9285abeb0477cdb2f8ea0cf7fd4b5f72ed5a9a7d3f0c0bb4a5239db2fc1ed"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:08671280e0c04d2bb3f39511f13cae5914e6690036fd1eefc3d47a47f9fae634"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04bae4d9c16ce1bab6447d196fb8258d98139ed8f9b288a38b84887985e4227b"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1efa2268b51b68156fb84d18ca1720311698a58051c4a19c40d670057ce60519"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:600b4d4315f33ec0356c0dab3991a5d5761102420bcff29e0773706aa48936e8"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18bc2f13c73d5d34499ff6ada55b052c445d3aa64d22c2639e5ab45472568046"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e11c5e6593be41a555475c9c20320342c1f5585d635a064924956944c465ad4"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d7878025248b99ccca3285891899373f98548f2ca13835d83619ffc42241c626"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b4a7e37fe136022d944374fcd8a2f72b8a19f7b648d2cdfb946667e9ede97f9f"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b5881856f830351aaabd869151124f64a80bf61560546d9588a630a4e933a5de"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c788b11565cc176fab8fab6dfcd469031e906927db94bf7e422afd8ef8f88a5a"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17a3092e74025d896ef1d67ac236c83494da37a78ef84c712e4e2273c115f1"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win32.whl", hash = "sha256:e499c823206c9ffd9d89aa11f813a4babdb9219417d4efe4c8a6f8272da00e98"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:91f798cc00cd94a0def43e9befc6e867c9bd8fa8f882d1eaa40042f528b7e2c7"}, + {file = "rapidfuzz-3.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:d5a3872f35bec89f07b993fa1c5401d11b9e68bcdc1b9737494e279308a38a5f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ef6b6ab64c4c91c57a6b58e1d690b59453bfa1f1e9757a7e52e59b4079e36631"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f9070b42c0ba030b045bba16a35bdb498a0d6acb0bdb3ff4e325960e685e290"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63044c63565f50818d885bfcd40ac369947da4197de56b4d6c26408989d48edf"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b0c47860c733a3d73a4b70b97b35c8cbf24ef24f8743732f0d1c412a8c85de"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1b14489b038f007f425a06fcf28ac6313c02cb603b54e3a28d9cfae82198cc0"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be08f39e397a618aab907887465d7fabc2d1a4d15d1a67cb8b526a7fb5202a3e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16895dc62a7b92028f9c8b6d22830f1cbc77306ee794f461afc6028e1a8d7539"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579cce49dfa57ffd8c8227b3fb53cced54b4df70cec502e63e9799b4d1f44004"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:40998c8dc35fdd221790b8b5134a8d7499adbfab9a5dd9ec626c7e92e17a43ed"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dc3fdb4738a6b83ae27f1d8923b00d3a9c2b5c50da75b9f8b81841839c6e3e1f"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:92b8146fbfb37ac358ef7e0f6b79619e4f793fbbe894b99ea87920f9c0a9d77d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfceaa7c2914585bb8a043265c39ec09078f13fbf53b5525722fc074306b6fa"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f332d61f51b0b9c8b55a0fb052b4764b6ad599ea8ce948ac47a4388e9083c35e"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win32.whl", hash = "sha256:dfd1e4819f1f3c47141f86159b44b7360ecb19bf675080b3b40437bf97273ab9"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:594b9c33fc1a86784962043ee3fbaaed875fbaadff72e467c2f7a83cd6c5d69d"}, + {file = "rapidfuzz-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:0b13a6823a1b83ae43f8bf35955df35032bee7bec0daf9b5ab836e0286067434"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:075a419a0ec29be44b3d7f4bcfa5cb7e91e419379a85fc05eb33de68315bd96f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:51a5b96d2081c3afbef1842a61d63e55d0a5a201473e6975a80190ff2d6f22ca"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9460d8fddac7ea46dff9298eee9aa950dbfe79f2eb509a9f18fbaefcd10894c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39eb1513ee139ba6b5c01fe47ddf2d87e9560dd7fdee1068f7f6efbae70de34"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eace9fdde58a425d4c9a93021b24a0cac830df167a5b2fc73299e2acf9f41493"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc77237242303733de47829028a0a8b6ab9188b23ec9d9ff0a674fdcd3c8e7f"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74e692357dd324dff691d379ef2c094c9ec526c0ce83ed43a066e4e68fe70bf6"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2075ac9ee5c15d33d24a1efc8368d095602b5fd9634c5b5f24d83e41903528"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a8ba64d72329a940ff6c74b721268c2004eecc48558f648a38e96915b5d1c1b"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a1f268a2a37cd22573b4a06eccd481c04504b246d3cadc2d8e8dfa64b575636d"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42c2e8a2341363c7caf276efdbe1a673fc5267a02568c47c8e980f12e9bc8727"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a9acca34b34fb895ee6a84c436bb919f3b9cd8f43e7003d43e9573a1d990ff74"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9bad6a0fe3bc1753dacaa6229a8ba7d9844eb7ae24d44d17c5f4c51c91a8a95e"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win32.whl", hash = "sha256:c86bc4b1d2380739e6485396195e30021df509b4923f3f757914e171587bce7c"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d7361608c8e73a1dc0203a87d151cddebdade0098a047c46da43c469c07df964"}, + {file = "rapidfuzz-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:8fdc26e7863e0f63c2185d53bb61f5173ad4451c1c8287b535b30ea25a419a5a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6167468f76779a14b9af66210f68741af94d32d086f19118de4e919f00585c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bd394e28ff221557ea4d8152fcec3e66d9f620557feca5f2bedc4c21f8cf2f9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8e70f876ca89a6df344f8157ac60384e8c05a0dfb442da2490c3f1c45238ccf5"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c837f89d86a5affe9ee6574dad6b195475676a6ab171a67920fc99966f2ab2c"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cda4550a98658f9a8bcdc03d0498ed1565c1563880e3564603a9eaae28d51b2a"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecd70212fd9f1f8b1d3bdd8bcb05acc143defebd41148bdab43e573b043bb241"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187db4cc8fb54f8c49c67b7f38ef3a122ce23be273032fa2ff34112a2694c3d8"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4604dfc1098920c4eb6d0c6b5cc7bdd4bf95b48633e790c1d3f100a25870691d"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01581b688c5f4f6665b779135e32db0edab1d78028abf914bb91469928efa383"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0828b55ec8ad084febdf4ab0c942eb1f81c97c0935f1cb0be0b4ea84ce755988"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:150c98b65faff17b917b9d36bff8a4d37b6173579c6bc2e38ff2044e209d37a4"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7e4eea225d2bff1aff4c85fcc44716596d3699374d99eb5906b7a7560297460e"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7bc944d7e830cfce0f8b4813875f05904207017b66e25ab7ee757507001310a9"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win32.whl", hash = "sha256:3e55f02105c451ab6ff0edaaba57cab1b6c0a0241cfb2b306d4e8e1503adba50"}, + {file = "rapidfuzz-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:41851620d2900791d66d9b6092fc163441d7dd91a460c73b07957ff1c517bc30"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e8041c6b2d339766efe6298fa272f79d6dd799965df364ef4e50f488c101c899"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e09d81008e212fc824ea23603ff5270d75886e72372fa6c7c41c1880bcb57ed"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419c8961e861fb5fc5590056c66a279623d1ea27809baea17e00cdc313f1217a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1522eaab91b9400b3ef16eebe445940a19e70035b5bc5d98aef23d66e9ac1df0"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611278ce3136f4544d596af18ab8849827d64372e1d8888d9a8d071bf4a3f44d"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4efa9bfc5b955b6474ee077eee154e240441842fa304f280b06e6b6aa58a1d1e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cc9d3c8261457af3f8756b1f71a9fdc4892978a9e8b967976d2803e08bf972"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce728e2b582fd396bc2559160ee2e391e6a4b5d2e455624044699d96abe8a396"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a6a36c9299e059e0bee3409218bc5235a46570c20fc980cdee5ed21ea6110ad"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9ea720db8def684c1eb71dadad1f61c9b52f4d979263eb5d443f2b22b0d5430a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:358692f1df3f8aebcd48e69c77c948c9283b44c0efbaf1eeea01739efe3cd9a6"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:faded69ffe79adcefa8da08f414a0fd52375e2b47f57be79471691dad9656b5a"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f9f3dc14fadbd553975f824ac48c381f42192cec9d7e5711b528357662a8d8e"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win32.whl", hash = "sha256:7be5f460ff42d7d27729115bfe8a02e83fa0284536d8630ee900d17b75c29e65"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd5ad2c12dab2b98340c4b7b9592c8f349730bda9a2e49675ea592bbcbc1360b"}, + {file = "rapidfuzz-3.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:aa163257a0ac4e70f9009d25e5030bdd83a8541dfa3ba78dc86b35c9e16a80b4"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e50840a8a8e0229563eeaf22e21a203359859557db8829f4d0285c17126c5fb"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632f09e19365ace5ff2670008adc8bf23d03d668b03a30230e5b60ff9317ee93"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:209dda6ae66b702f74a78cef555397cdc2a83d7f48771774a20d2fc30808b28c"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc0b78572626af6ab134895e4dbfe4f4d615d18dcc43b8d902d8e45471aabba"}, + {file = "rapidfuzz-3.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ba14850cc8258b3764ea16b8a4409ac2ba16d229bde7a5f495dd479cd9ccd56"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b917764fd2b267addc9d03a96d26f751f6117a95f617428c44a069057653b528"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1252ca156e1b053e84e5ae1c8e9e062ee80468faf23aa5c543708212a42795fd"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c7676a32d7524e40bc73546e511a408bc831ae5b163029d325ea3a2027d089"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e7d729af2e5abb29caa070ec048aba042f134091923d9ca2ac662b5604577e"}, + {file = "rapidfuzz-3.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86eea3e6c314a9238de568254a9c591ec73c2985f125675ed5f171d869c47773"}, + {file = "rapidfuzz-3.7.0.tar.gz", hash = "sha256:620df112c39c6d27316dc1e22046dc0382d6d91fd60d7c51bd41ca0333d867e9"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "641bfa5180e15acdc7432d72a0aeaefb242a95f759681d27282402dd93a2194c" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml b/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml new file mode 100644 index 0000000000000..bbbcb675ac2d5 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.0.0" +name = "source-sftp-bulk" +description = "Source implementation for SFTP Bulk." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/sftp-bulk" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_sftp_bulk" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = {version = "^0", extras = ["file-based"]} +paramiko = "3.4.0" + +[tool.poetry.scripts] +source-sftp-bulk = "source_sftp_bulk.run:run" + +[tool.poetry.group.dev.dependencies] +docker = "^7.0.0" +freezegun = "^1.4.0" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/setup.py b/airbyte-integrations/connectors/source-sftp-bulk/setup.py deleted file mode 100644 index 282a57ca1fac2..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", - "paramiko==2.11.0", - "backoff==1.8.0", - "terminaltables==3.1.0", - "pandas==1.5.0", -] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest-mock~=3.6.1", "pytest~=6.1", "docker==5.0.3"] - -setup( - entry_points={ - "console_scripts": [ - "source-sftp-bulk=source_sftp_bulk.run:run", - ], - }, - name="source_sftp_bulk", - description="Source implementation for SFTP Bulk.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/__init__.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/__init__.py index 8cd850fb68381..6b09a6b644c03 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/__init__.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/__init__.py @@ -3,6 +3,6 @@ # -from .source import SourceFtp +from .source import SourceSFTPBulk -__all__ = ["SourceFtp"] +__all__ = ["SourceSFTPBulk"] diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/client.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/client.py index 873f00ce7dd87..3084303d98b68 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/client.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/client.py @@ -2,20 +2,14 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import csv import io import logging -import os -import re -import socket -import stat -from datetime import datetime -from typing import Any, Dict, List, Mapping, Tuple +from typing import Optional import backoff -import numpy as np -import pandas as pd import paramiko +from airbyte_cdk.utils import AirbyteTracedException +from airbyte_protocol.models import FailureType from paramiko.ssh_exception import AuthenticationException # set default timeout to 300 seconds @@ -23,13 +17,23 @@ logger = logging.getLogger("airbyte") -File = Dict[str, Any] +def handle_backoff(details): + logger.warning("SSH Connection closed unexpectedly. Waiting {wait} seconds and retrying...".format(**details)) -class SFTPClient: - _connection = None - def __init__(self, host, username, password=None, private_key=None, port=None, timeout=REQUEST_TIMEOUT): +class SFTPClient: + _connection: paramiko.SFTPClient = None + + def __init__( + self, + host: str, + username: str, + password: str = None, + private_key: Optional[str] = None, + port: Optional[int] = None, + timeout: Optional[int] = REQUEST_TIMEOUT, + ): self.host = host self.username = username self.password = password @@ -38,17 +42,11 @@ def __init__(self, host, username, password=None, private_key=None, port=None, t self.key = paramiko.RSAKey.from_private_key(io.StringIO(private_key)) if private_key else None self.timeout = float(timeout) if timeout else REQUEST_TIMEOUT - if not self.password and not self.key: - raise Exception("Either password or private key must be provided") - self._connect() - def handle_backoff(details): - logger.warning("SSH Connection closed unexpectedly. Waiting {wait} seconds and retrying...".format(**details)) - # If connection is snapped during connect flow, retry up to a # minute for SSH connection to succeed. 2^6 + 2^5 + ... - @backoff.on_exception(backoff.expo, (EOFError), max_tries=6, on_backoff=handle_backoff, jitter=None, factor=2) + @backoff.on_exception(backoff.expo, EOFError, max_tries=6, on_backoff=handle_backoff, jitter=None, factor=2) def _connect(self): if self._connection is not None: return @@ -64,20 +62,14 @@ def _connect(self): # set request timeout socket.settimeout(self.timeout) - except (AuthenticationException) as ex: - raise Exception("Authentication failed: %s" % ex) - except Exception as ex: - raise Exception("SSH Connection failed: %s" % ex) - - def __enter__(self): - self._connect() - return self - - def __exit__(self): - """Clean up the socket when this class gets garbage collected.""" - self.close() + except AuthenticationException as ex: + raise AirbyteTracedException( + failure_type=FailureType.config_error, + message="SSH Authentication failed, please check username, password or private key and try again", + internal_message="Authentication failed: %s" % ex, + ) - def close(self): + def __del__(self): if self._connection is not None: try: self._connection.close() @@ -88,129 +80,6 @@ def close(self): if str(e) != "'NoneType' object has no attribute 'time'": raise - @staticmethod - def get_files_matching_pattern(files, pattern) -> List[File]: - """Takes a file dict {"filepath": "...", "last_modified": "..."} and a regex pattern string, and returns files matching that pattern.""" - matcher = re.compile(pattern) - return [f for f in files if matcher.search(f["filepath"])] - - # backoff for 60 seconds as there is possibility the request will backoff again in 'discover.get_schema' - @backoff.on_exception(backoff.constant, (socket.timeout), max_time=60, interval=10, jitter=None) - def get_files_by_prefix(self, prefix: str) -> List[File]: - def is_empty(a): - return a.st_size == 0 - - def is_directory(a): - return stat.S_ISDIR(a.st_mode) - - files = [] - - if prefix is None or prefix == "": - prefix = "." - - try: - result = self._connection.listdir_attr(prefix) - except FileNotFoundError as e: - raise Exception("Directory '{}' does not exist".format(prefix)) from e - - for file_attr in result: - # NB: This only looks at the immediate level beneath the prefix directory - if is_directory(file_attr): - logger.info("Skipping directory: %s", file_attr.filename) - else: - if is_empty(file_attr): - logger.info("Skipping empty file: %s", file_attr.filename) - continue - - last_modified = file_attr.st_mtime - if last_modified is None: - logger.warning( - "Cannot read m_time for file %s, defaulting to current epoch time", os.path.join(prefix, file_attr.filename) - ) - last_modified = datetime.utcnow().timestamp() - - files.append( - { - "filepath": prefix + "/" + file_attr.filename, - "last_modified": datetime.utcfromtimestamp(last_modified).replace(tzinfo=None), - } - ) - - return files - - def get_files(self, prefix, search_pattern=None, modified_since=None, most_recent_only=False) -> List[File]: - files = self.get_files_by_prefix(prefix) - - if files: - logger.info('Found %s files in "%s"', len(files), prefix) - else: - logger.warning('Found no files on specified SFTP server at "%s"', prefix) - - matching_files = files - - if search_pattern is not None: - matching_files = self.get_files_matching_pattern(files, search_pattern) - - if matching_files and search_pattern: - logger.info('Found %s files in "%s" matching "%s"', len(matching_files), prefix, search_pattern) - - if not matching_files and search_pattern: - logger.warning('Found no files on specified SFTP server at "%s" matching "%s"', prefix, search_pattern) - - if modified_since is not None: - matching_files = [f for f in matching_files if f["last_modified"] > modified_since] - - # sort files in increasing order of "last_modified" - sorted_files = sorted(matching_files, key=lambda x: (x["last_modified"]).timestamp()) - - if most_recent_only: - logger.info(f"Returning only the most recently modified file: {sorted_files[-1]}.") - sorted_files = sorted_files[-1:] - - return sorted_files - - def peek_line(self, f): - pos = f.tell() - line = f.readline() - f.seek(pos) - return line - - @backoff.on_exception(backoff.expo, (socket.timeout), max_tries=5, factor=2) - def fetch_file(self, fn: Mapping[str, Any], separator, file_type="csv") -> pd.DataFrame: - try: - with self._connection.open(fn["filepath"], "r") as f: - df: pd.DataFrame = None - - if not separator: - dialect = csv.Sniffer().sniff(self.peek_line(f=f)) - separator = dialect.delimiter - - # Using pandas to make reading files in different formats easier - if file_type == "csv": - df = pd.read_csv(f, engine="python", sep=separator) - elif file_type == "json": - df = pd.read_json(f, lines=True) - else: - raise Exception("Unsupported file type: %s" % file_type) - - # Replace nan with None for correct - # json serialization when emitting records - df = df.replace({np.nan: None}) - df["last_modified"] = fn["last_modified"] - return df - - except OSError as e: - if "Permission denied" in str(e): - logger.warning("Skipping %s file because you do not have enough permissions.", f["filepath"]) - else: - logger.warning("Skipping %s file because it is unable to be read.", f["filepath"]) - - raise Exception("Unable to read file: %s" % e) from e - - def fetch_files(self, files, separator, file_type="csv") -> Tuple[datetime, Dict[str, Any]]: - logger.info("Fetching %s files", len(files)) - for fn in files: - records = self.fetch_file(fn=fn, separator=separator, file_type=file_type) - yield (fn["last_modified"], records.to_dict("records")) - - self.close() + @property + def sftp_connection(self) -> paramiko.SFTPClient: + return self._connection diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/run.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/run.py index c3e00b8100cd0..657e496e7812e 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/run.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/run.py @@ -5,10 +5,18 @@ import sys -from airbyte_cdk.entrypoint import launch -from source_sftp_bulk import SourceFtp +from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch +from source_sftp_bulk import SourceSFTPBulk def run(): - source = SourceFtp() - launch(source, sys.argv[1:]) + args = sys.argv[1:] + catalog_path = AirbyteEntrypoint.extract_catalog(args) + config_path = AirbyteEntrypoint.extract_config(args) + state_path = AirbyteEntrypoint.extract_state(args) + source = SourceSFTPBulk( + SourceSFTPBulk.read_catalog(catalog_path) if catalog_path else None, + SourceSFTPBulk.read_config(config_path) if config_path else None, + SourceSFTPBulk.read_state(state_path) if state_path else None, + ) + launch(source, args) diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/source.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/source.py index 0daf05882e0e2..6e90d9a1ff136 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/source.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/source.py @@ -1,135 +1,26 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - -import json import logging -from datetime import datetime -from typing import Any, Dict, List, Mapping, Tuple - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import AirbyteCatalog, AirbyteConnectionStatus, AirbyteStream, Status, SyncMode -from airbyte_cdk.sources import AbstractSource +from typing import Any, Mapping, Optional -from .client import SFTPClient -from .streams import FTPStream +from airbyte_cdk.models import ConfiguredAirbyteCatalog +from airbyte_cdk.sources.file_based.file_based_source import FileBasedSource +from airbyte_cdk.sources.file_based.stream.cursor.default_file_based_cursor import DefaultFileBasedCursor +from airbyte_cdk.sources.source import TState +from source_sftp_bulk.spec import SourceSFTPBulkSpec +from source_sftp_bulk.stream_reader import SourceSFTPBulkStreamReader logger = logging.getLogger("airbyte") -class SourceFtp(AbstractSource): - @property - def _default_json_schema(self): - return { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {}, - } - - def _generate_json_schema(self, dtypes: Dict[str, Any]) -> Dict[str, Any]: - json_schema = self._default_json_schema - - for key, val in dtypes.items(): - if val == "int64": - json_schema["properties"][key] = {"type": ["null", "integer"]} - elif val == "float64": - json_schema["properties"][key] = {"type": ["null", "number"]} - elif val == "bool": - json_schema["properties"][key] = {"type": ["null", "boolean"]} - # Special case for last_modified timestamp - elif key == "last_modified": - json_schema["properties"][key] = {"type": ["null", "string"], "format": "date-time"} - # Default to string - else: - json_schema["properties"][key] = {"type": ["null", "string"]} - - return json_schema - - def _infer_json_schema(self, config: Mapping[str, Any], connection: SFTPClient) -> Dict[str, Any]: - file_pattern = config.get("file_pattern") - files = connection.get_files(config["folder_path"], file_pattern) - - if len(files) == 0: - logger.warning(f"No files found in folder {config['folder_path']} with pattern {file_pattern}") - return self._default_json_schema - - # Get last file to infer schema - # Use pandas `infer_objects` to infer dtypes - df = connection.fetch_file(fn=files[-1], file_type=config["file_type"], separator=config.get("separator")) - df = df.infer_objects() - - # Default column used for incremental sync - # Contains the date when a file was last modified or added - df["last_modified"] = files[-1]["last_modified"] - - if len(df) < 1: - logger.warning(f"No records found in file {files[0]}, can't infer json schema") - return self._default_json_schema - - return self._generate_json_schema(df.dtypes.to_dict()) - - def _get_connection(self, config: Mapping[str, Any]) -> SFTPClient: - return SFTPClient( - host=config["host"], - username=config["username"], - password=config["password"], - private_key=config.get("private_key", None), - port=config["port"], +class SourceSFTPBulk(FileBasedSource): + def __init__(self, catalog: Optional[ConfiguredAirbyteCatalog], config: Optional[Mapping[str, Any]], state: Optional[TState]): + super().__init__( + stream_reader=SourceSFTPBulkStreamReader(), + spec_class=SourceSFTPBulkSpec, + catalog=catalog, + config=config, + state=state, + cursor_cls=DefaultFileBasedCursor, ) - - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, AirbyteConnectionStatus]: - try: - conn = self._get_connection(config) - conn._connect() - conn.close() - return (True, AirbyteConnectionStatus(status=Status.SUCCEEDED)) - except Exception as ex: - logger.error( - f"Failed to connect to FTP server: {ex}", - ) - return (False, AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(ex)}")) - - def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: - _, status = self.check_connection(logger, config) - return status - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - conn = self._get_connection(config) - json_schema = self._infer_json_schema(config, conn) - - stream_name = config["stream_name"] - streams = [] - - sync_modes = [SyncMode.full_refresh] - - file_most_recent = config.get("file_most_recent", False) - if not file_most_recent: - logger.debug("File most recent is false, enabling incremental sync mode") - sync_modes.append(SyncMode.incremental) - - streams.append( - AirbyteStream( - name=stream_name, - json_schema=json_schema, - supported_sync_modes=sync_modes, - source_defined_cursor=True, - default_cursor_field=[] if file_most_recent else ["last_modified"], - ) - ) - - conn.close() - return AirbyteCatalog(streams=streams) - - def streams(self, config: json) -> List[AirbyteStream]: - conn = SFTPClient( - host=config["host"], - username=config["username"], - password=config["password"], - private_key=config.get("private_key", None), - port=config["port"], - ) - - start_date = datetime.strptime(config["start_date"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=None) - json_schema = self._infer_json_schema(config, conn) - - return [FTPStream(config=config, start_date=start_date, connection=conn, json_schema=json_schema)] diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.json b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.json deleted file mode 100644 index c869dbad5fa8b..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.io/integrations/source/ftp", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "FTP Source Spec", - "type": "object", - "required": [ - "username", - "host", - "port", - "stream_name", - "start_date", - "folder_path" - ], - "additionalProperties": true, - "properties": { - "username": { - "title": "User Name", - "description": "The server user", - "type": "string", - "order": 0 - }, - "password": { - "title": "Password", - "description": "OS-level password for logging into the jump server host", - "type": "string", - "airbyte_secret": true, - "order": 1 - }, - "private_key": { - "title": "Private key", - "description": "The private key", - "type": "string", - "multiline": true, - "order": 2 - }, - "host": { - "title": "Host Address", - "description": "The server host address", - "type": "string", - "examples": ["www.host.com", "192.0.2.1"], - "order": 3 - }, - "port": { - "title": "Port", - "description": "The server port", - "type": "integer", - "default": 22, - "examples": ["22"], - "order": 4 - }, - "stream_name": { - "title": "Stream name", - "description": "The name of the stream or table you want to create", - "type": "string", - "examples": ["ftp_contacts"], - "order": 5 - }, - "file_type": { - "title": "File type", - "description": "The file type you want to sync. Currently only 'csv' and 'json' files are supported.", - "type": "string", - "default": "csv", - "enum": ["csv", "json"], - "order": 6, - "examples": ["csv", "json"] - }, - "separator": { - "title": "CSV Separator (Optional)", - "description": "The separator used in the CSV files. Define None if you want to use the Sniffer functionality", - "type": "string", - "default": ",", - "examples": [","], - "order": 7 - }, - "folder_path": { - "title": "Folder Path (Optional)", - "description": "The directory to search files for sync", - "type": "string", - "default": "", - "examples": ["/logs/2022"], - "order": 8 - }, - "file_pattern": { - "title": "File Pattern (Optional)", - "description": "The regular expression to specify files for sync in a chosen Folder Path", - "type": "string", - "default": "", - "examples": [ - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - ], - "order": 9 - }, - "file_most_recent": { - "title": "Most recent file (Optional)", - "description": "Sync only the most recent file for the configured folder path and file pattern", - "type": "boolean", - "default": false, - "order": 10 - }, - "start_date": { - "type": "string", - "title": "Start Date", - "format": "date-time", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": ["2017-01-25T00:00:00Z"], - "description": "The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", - "order": 11 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.py new file mode 100644 index 0000000000000..35b457b45cb57 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/spec.py @@ -0,0 +1,53 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from typing import Literal, Optional, Union + +from airbyte_cdk.sources.file_based.config.abstract_file_based_spec import AbstractFileBasedSpec +from airbyte_cdk.utils.oneof_option_config import OneOfOptionConfig +from pydantic import BaseModel, Field + + +class PasswordCredentials(BaseModel): + class Config(OneOfOptionConfig): + title = "Authenticate via Password" + discriminator = "auth_type" + + auth_type: Literal["password"] = Field("password", const=True) + password: str = Field(title="Password", description="Password", airbyte_secret=True, order=3) + + +class PrivateKeyCredentials(BaseModel): + class Config(OneOfOptionConfig): + title = "Authenticate via Private Key" + discriminator = "auth_type" + + auth_type: Literal["private_key"] = Field("private_key", const=True) + private_key: str = Field(title="Private key", description="The Private key", multiline=True, order=4) + + +class SourceSFTPBulkSpec(AbstractFileBasedSpec): + class Config: + title = "SFTP Bulk Source Spec" + + host: str = Field(title="Host Address", description="The server host address", examples=["www.host.com", "192.0.2.1"], order=2) + username: str = Field(title="User Name", description="The server user", order=3) + credentials: Union[PasswordCredentials, PrivateKeyCredentials] = Field( + title="Authentication", + description="Credentials for connecting to the SFTP Server", + discriminator="auth_type", + type="object", + order=4, + ) + port: int = Field(title="Host Address", description="The server port", default=22, examples=["22"], order=5) + folder_path: Optional[str] = Field( + title="Folder Path", + description="The directory to search files for sync", + examples=["/logs/2022"], + order=6, + default="/", + pattern_descriptor="/folder_to_sync", + ) + + @classmethod + def documentation_url(cls) -> str: + return "https://docs.airbyte.com/integrations/sources/sftp-bulk" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py new file mode 100644 index 0000000000000..1d275dd6e42ba --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py @@ -0,0 +1,82 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import datetime +import logging +import stat +from io import IOBase +from typing import Iterable, List, Optional + +from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader, FileReadMode +from airbyte_cdk.sources.file_based.remote_file import RemoteFile +from source_sftp_bulk.client import SFTPClient +from source_sftp_bulk.spec import SourceSFTPBulkSpec + + +class SourceSFTPBulkStreamReader(AbstractFileBasedStreamReader): + def __init__(self): + super().__init__() + self._sftp_client = None + + @property + def config(self) -> SourceSFTPBulkSpec: + return self._config + + @config.setter + def config(self, value: SourceSFTPBulkSpec): + """ + FileBasedSource reads the config from disk and parses it, and once parsed, the source sets the config on its StreamReader. + + Note: FileBasedSource only requires the keys defined in the abstract config, whereas concrete implementations of StreamReader + will require keys that (for example) allow it to authenticate with the 3rd party. + + Therefore, concrete implementations of AbstractFileBasedStreamReader's config setter should assert that `value` is of the correct + config type for that type of StreamReader. + """ + assert isinstance(value, SourceSFTPBulkSpec) + self._config = value + + @property + def sftp_client(self) -> SFTPClient: + if self._sftp_client is None: + authentication = ( + {"password": self.config.credentials.password} + if self.config.credentials.auth_type == "password" + else {"private_key": self.config.credentials.private_key} + ) + self._sftp_client = SFTPClient( + host=self.config.host, + username=self.config.username, + **authentication, + port=self.config.port, + ) + return self._sftp_client + + def get_matching_files( + self, + globs: List[str], + prefix: Optional[str], + logger: logging.Logger, + ) -> Iterable[RemoteFile]: + directories = [self._config.folder_path or "/"] + + # Iterate through directories and subdirectories + while directories: + current_dir = directories.pop() + try: + items = self.sftp_client.sftp_connection.listdir_attr(current_dir) + except Exception as e: + logger.warning(f"Failed to list files in directory: {e}") + continue + + for item in items: + if item.st_mode and stat.S_ISDIR(item.st_mode): + directories.append(f"{current_dir}/{item.filename}") + else: + yield from self.filter_files_by_globs_and_start_date( + [RemoteFile(uri=f"{current_dir}/{item.filename}", last_modified=datetime.datetime.fromtimestamp(item.st_mtime))], + globs, + ) + + def open_file(self, file: RemoteFile, mode: FileReadMode, encoding: Optional[str], logger: logging.Logger) -> IOBase: + remote_file = self.sftp_client.sftp_connection.open(file.uri, mode=mode.value) + return remote_file diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/streams.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/streams.py deleted file mode 100644 index d969067bd02f2..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/streams.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from datetime import datetime -from typing import Any, Iterable, List, Mapping - -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams import IncrementalMixin, Stream - -from .client import SFTPClient - - -class FTPStream(Stream, IncrementalMixin): - primary_key = None - cursor_field = "last_modified" - - def __init__(self, config: Mapping[str, Any], start_date: datetime, connection: SFTPClient, json_schema: Mapping[str, Any], **kwargs): - super(Stream, self).__init__(**kwargs) - - self.config = config - self.start_date = start_date - self.connection = connection - - self._cursor_value: float = None - self._name = config["stream_name"] - self._only_most_recent_file: bool = config.get("file_most_recent", False) - self._json_schema = json_schema - - if self._only_most_recent_file: - self.cursor_field = None - - @property - def name(self) -> str: - """Source name""" - return self._name - - @property - def state(self) -> Mapping[str, Any]: - if self._cursor_value: - return {self.cursor_field: self._cursor_value.isoformat()} - - return {self.cursor_field: self.start_date.isoformat()} - - @state.setter - def state(self, value: Mapping[str, Any]): - self._cursor_value = datetime.fromisoformat(value[self.cursor_field]) - - def get_json_schema(self) -> Mapping[str, Any]: - return self._json_schema - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - if stream_state and sync_mode == SyncMode.incremental: - self._cursor_value = datetime.fromisoformat(stream_state[self.cursor_field]) - - if not stream_state and sync_mode == SyncMode.incremental: - self._cursor_value = self.start_date - - files = self.connection.get_files( - self.config.get("folder_path"), - self.config.get("file_pattern"), - modified_since=self._cursor_value, - most_recent_only=self._only_most_recent_file, - ) - - for cursor, records in self.connection.fetch_files( - files=files, file_type=self.config["file_type"], separator=self.config.get("separator") - ): - if cursor and sync_mode == SyncMode.incremental: - if self._cursor_value and cursor > self._cursor_value: - self._cursor_value = cursor - - yield from records diff --git a/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/client_test.py b/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/client_test.py index a2bac92dc116a..baa7fd220836e 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/client_test.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/client_test.py @@ -1,67 +1,29 @@ -# # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -from source_sftp_bulk.client import SFTPClient - - -def test_get_files_matching_pattern_match(): - files = [ - { - "filepath": "test.csv", - "last_modified": "2021-01-01 00:00:00", - }, - { - "filepath": "test2.csv", - "last_modified": "2021-01-01 00:00:00", - }, - ] - - result = SFTPClient.get_files_matching_pattern(files, "test.csv") - assert result == [ - { - "filepath": "test.csv", - "last_modified": "2021-01-01 00:00:00", - } - ] - - -def test_get_files_matching_pattern_no_match(): - files = [ - { - "filepath": "test.csv", - "last_modified": "2021-01-01 00:00:00", - }, - { - "filepath": "test2.csv", - "last_modified": "2021-01-01 00:00:00", - }, - ] - - result = SFTPClient.get_files_matching_pattern(files, "test3.csv") - assert result == [] +from unittest.mock import MagicMock, patch +import paramiko +import pytest +from paramiko.ssh_exception import SSHException +from source_sftp_bulk.client import SFTPClient -def test_get_files_matching_pattern_regex_match(): - files = [ - { - "filepath": "test.csv", - "last_modified": "2021-01-01 00:00:00", - }, - { - "filepath": "test2.csv", - "last_modified": "2021-01-01 00:00:00", - }, - ] - result = SFTPClient.get_files_matching_pattern(files, "test.*") - assert result == [ - { - "filepath": "test.csv", - "last_modified": "2021-01-01 00:00:00", - }, - { - "filepath": "test2.csv", - "last_modified": "2021-01-01 00:00:00", - }, - ] +def test_client_exception(): + with pytest.raises(SSHException): + SFTPClient( + host="localhost", + username="username", + password="password", + port=123, + ) + + +def test_client_connection(): + with patch.object(paramiko, "Transport", MagicMock()), patch.object(paramiko, "SFTPClient", MagicMock()): + SFTPClient( + host="localhost", + username="username", + password="password", + port=123, + ) + assert SFTPClient diff --git a/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/source_test.py b/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/source_test.py deleted file mode 100644 index e2cbecac5873c..0000000000000 --- a/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/source_test.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_sftp_bulk import SourceFtp - -source = SourceFtp() - - -def test_generate_json_schema(): - dtypes = { - "col1": "int64", - "col2": "float64", - "col3": "bool", - "col4": "object", - "col5": "string", - "last_modified": "datetime64[ns]", - } - - result = source._generate_json_schema(dtypes) - assert result == { - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "col1": {"type": ["null", "integer"]}, - "col2": {"type": ["null", "number"]}, - "col3": {"type": ["null", "boolean"]}, - "col4": {"type": ["null", "string"]}, - "col5": {"type": ["null", "string"]}, - "last_modified": {"format": "date-time", "type": ["null", "string"]}, - }, - "type": "object", - } diff --git a/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/stream_reader_test.py b/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/stream_reader_test.py new file mode 100644 index 0000000000000..6b88d5acb5ea2 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp-bulk/unit_tests/stream_reader_test.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +import datetime +import logging +from unittest.mock import MagicMock, patch + +import freezegun +import paramiko +from source_sftp_bulk.spec import SourceSFTPBulkSpec +from source_sftp_bulk.stream_reader import SourceSFTPBulkStreamReader + +logger = logging.Logger("") + + +@freezegun.freeze_time("2024-01-02T00:00:00") +def test_stream_reader_files_read_and_filter_by_date(): + fake_client = MagicMock() + fake_client.from_transport = MagicMock(return_value=fake_client) + files_on_server = [ + [ + MagicMock(filename="sample_file_1.csv", st_mode=180, st_mtime=1704067200), + MagicMock(filename="sample_file_2.csv", st_mode=180, st_mtime=1704060200), + ] + ] + fake_client.listdir_attr = MagicMock(side_effect=files_on_server) + with patch.object(paramiko, "Transport", MagicMock()), patch.object(paramiko, "SFTPClient", fake_client): + reader = SourceSFTPBulkStreamReader() + config = SourceSFTPBulkSpec( + host="localhost", + username="username", + credentials={"auth_type": "password", "password": "password"}, + port=123, + streams=[], + start_date="2024-01-01T00:00:00.000000Z", + ) + reader.config = config + files = list(reader.get_matching_files(globs=["**"], prefix=None, logger=logger)) + assert len(files) == 1 + assert files[0].uri == "//sample_file_1.csv" + assert files[0].last_modified == datetime.datetime(2024, 1, 1, 0, 0) diff --git a/airbyte-integrations/connectors/source-shopify/integration_tests/expected_records_transactions_with_user_id.jsonl b/airbyte-integrations/connectors/source-shopify/integration_tests/expected_records_transactions_with_user_id.jsonl index 37be336a3ed61..95af9f2580ce3 100644 --- a/airbyte-integrations/connectors/source-shopify/integration_tests/expected_records_transactions_with_user_id.jsonl +++ b/airbyte-integrations/connectors/source-shopify/integration_tests/expected_records_transactions_with_user_id.jsonl @@ -89,7 +89,7 @@ {"stream": "tender_transactions", "data": {"id": 4448992690365, "order_id": 5010584895677, "amount": "102.00", "currency": "USD", "user_id": null, "test": false, "processed_at": "2023-04-13T05:09:44-07:00", "remote_reference": null, "payment_details": null, "payment_method": "other", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953685995} {"stream": "transactions", "data": {"id": 6281692217533, "order_id": 5010584895677, "kind": "sale", "gateway": "manual", "status": "success", "message": "Marked the manual payment as received", "created_at": "2023-04-13T05:09:44-07:00", "test": false, "authorization": null, "location_id": null, "user_id": null, "parent_id": null, "processed_at": "2023-04-13T05:09:44-07:00", "device_id": null, "error_code": null, "source_name": "checkout_one", "receipt": {}, "amount": 102.0, "currency": "USD", "payment_id": "r9BerEaVJ5OzQNmPGZsK2V7zq", "total_unsettled_set": {"presentment_money": {"amount": 0.0, "currency": "USD"}, "shop_money": {"amount": 0.0, "currency": "USD"}}, "admin_graphql_api_id": "gid://shopify/OrderTransaction/6281692217533", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953688151} {"stream": "transactions", "data": {"id": 6281693561021, "order_id": 5010585911485, "kind": "sale", "gateway": "manual", "status": "success", "message": "Marked the manual payment as received", "created_at": "2023-04-13T05:11:15-07:00", "test": false, "authorization": null, "location_id": null, "user_id": null, "parent_id": null, "processed_at": "2023-04-13T05:11:15-07:00", "device_id": null, "error_code": null, "source_name": "checkout_one", "receipt": {}, "amount": 19.0, "currency": "USD", "payment_id": "rguGpKMnZqzpEzPvDfnSS8x4B", "total_unsettled_set": {"presentment_money": {"amount": 0.0, "currency": "USD"}, "shop_money": {"amount": 0.0, "currency": "USD"}}, "admin_graphql_api_id": "gid://shopify/OrderTransaction/6281693561021", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953688613} -{"stream": "transactions", "data": {"id": 5721110872253, "order_id": 4554821468349, "kind": "sale", "gateway": "bogus", "status": "success", "message": "Bogus Gateway: Forced success", "created_at": "2022-06-15T05:16:52-07:00", "test": true, "authorization": "53433", "location_id": null, "user_id": null, "parent_id": null, "processed_at": "2022-06-15T05:16:52-07:00", "device_id": null, "error_code": null, "source_name": "580111", "payment_details": {"credit_card_bin": "1", "avs_result_code": null, "cvv_result_code": null, "credit_card_number": "\u2022\u2022\u2022\u2022 \u2022\u2022\u2022\u2022 \u2022\u2022\u2022\u2022 1", "credit_card_company": "Bogus", "buyer_action_info": null, "credit_card_name": "Bogus Gateway", "credit_card_wallet": null, "credit_card_expiration_month": 2, "credit_card_expiration_year": 2025}, "receipt": {"paid_amount": "57.23"}, "amount": 57.23, "currency": "USD", "payment_id": "c25048437719229.1", "total_unsettled_set": {"presentment_money": {"amount": 0.0, "currency": "USD"}, "shop_money": {"amount": 0.0, "currency": "USD"}}, "admin_graphql_api_id": "gid://shopify/OrderTransaction/5721110872253", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953689123} +{"stream": "transactions", "data": {"id": 6302086037693, "order_id": 5033391718589, "kind": "sale", "gateway": "manual", "status": "success", "message": "Marked the manual payment as received", "created_at": "2023-04-24T11:00:08-07:00", "test": false, "authorization": null, "location_id": null, "user_id": null, "parent_id": null, "processed_at": "2023-04-24T11:00:08-07:00", "device_id": null, "error_code": null, "source_name": "checkout_one", "receipt": {}, "amount": 19.0, "currency": "USD", "payment_id": "ru7Najsh1HavL8RRkZHavCzGe", "total_unsettled_set": {"presentment_money": {"amount": 0.0, "currency": "USD"}, "shop_money": {"amount": 0.0, "currency": "USD"}}, "admin_graphql_api_id": "gid://shopify/OrderTransaction/6302086037693", "shop_url": "airbyte-integration-test"}, "emitted_at": 1710501533693} {"stream": "customer_address", "data": {"address1": "My Best Accent", "address2": "", "city": "Fair Lawn", "country": "United States", "country_code": "US", "company": "Test Company", "first_name": "New Test", "id": 8092523135165, "last_name": "Customer", "name": "New Test Customer", "phone": "", "province": "New Jersey", "province_code": "NJ", "zip": "07410", "customer_id": 6569096478909, "country_name": "United States", "default": true, "updated_at": "2023-04-24T13:53:48+00:00", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953700866} {"stream": "customer_address", "data": {"address1": null, "address2": null, "city": null, "country": null, "country_code": null, "company": null, "first_name": "MArcos", "id": 8212915650749, "last_name": "Millnitz", "name": "MArcos Millnitz", "phone": null, "province": null, "province_code": null, "zip": null, "customer_id": 6676027932861, "country_name": null, "default": true, "updated_at": "2023-07-11T20:07:45+00:00", "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953700867} {"stream": "countries", "data": {"id": 417014841533, "name": "Rest of World", "code": "*", "tax_name": "Tax", "tax": 0.0, "provinces": [], "shop_url": "airbyte-integration-test"}, "emitted_at": 1708953701837} diff --git a/airbyte-integrations/connectors/source-shopify/metadata.yaml b/airbyte-integrations/connectors/source-shopify/metadata.yaml index 195c399079287..6454a99074152 100644 --- a/airbyte-integrations/connectors/source-shopify/metadata.yaml +++ b/airbyte-integrations/connectors/source-shopify/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 - dockerImageTag: 2.0.0 + dockerImageTag: 2.0.4 dockerRepository: airbyte/source-shopify documentationUrl: https://docs.airbyte.com/integrations/sources/shopify githubIssueLabel: source-shopify icon: shopify.svg license: ELv2 + maxSecondsBetweenMessages: 7200 name: Shopify remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-shopify/poetry.lock b/airbyte-integrations/connectors/source-shopify/poetry.lock index ea5d5de6d8573..03ee3e88209ba 100644 --- a/airbyte-integrations/connectors/source-shopify/poetry.lock +++ b/airbyte-integrations/connectors/source-shopify/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.59.0" +version = "0.73.0" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "airbyte-cdk-0.59.0.tar.gz", hash = "sha256:2f7bc07556cc7f42f0daf41d09be08fd22102864d087a27c8999f6f13fe67aad"}, - {file = "airbyte_cdk-0.59.0-py3-none-any.whl", hash = "sha256:94c561c053b8be3a66bfefe420812ced9237403441249408e2af5445214a6f7b"}, + {file = "airbyte-cdk-0.73.0.tar.gz", hash = "sha256:a03e0265a8a4afb1378d285993624659d9f481404aaf69cf7c0a5ddad3568ea2"}, + {file = "airbyte_cdk-0.73.0-py3-none-any.whl", hash = "sha256:339e42a7602461073a69bf0c4e11be26a7eea3157def43ffecdf9d0d73f32c6f"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -94,13 +94,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -488,13 +488,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -671,13 +671,13 @@ files = [ [[package]] name = "pytest" -version = "8.0.0" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -685,11 +685,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" @@ -710,13 +710,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -816,13 +816,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -834,15 +834,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -865,19 +865,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "sgqlc" @@ -921,13 +921,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -946,13 +946,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1057,4 +1057,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "d5eb24b58b8ae8d10cc9f421ea1d125ef3833c69a1007e13624990a30bf2070a" +content-hash = "56ff1b977f317ed15053fe03a5cb1dfa65d17266ed66830a15892c4008a184d4" diff --git a/airbyte-integrations/connectors/source-shopify/pyproject.toml b/airbyte-integrations/connectors/source-shopify/pyproject.toml index e26cb727a7e72..70adf237cbb6f 100644 --- a/airbyte-integrations/connectors/source-shopify/pyproject.toml +++ b/airbyte-integrations/connectors/source-shopify/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.0" +version = "2.0.4" name = "source-shopify" description = "Source CDK implementation for Shopify." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_shopify" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.59.0" +airbyte-cdk = ">=0.73.0" sgqlc = "==16.3" graphql-query = "^1.1.1" diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py b/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py new file mode 100644 index 0000000000000..f1bd53270d70a --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py @@ -0,0 +1,93 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, List, Mapping + +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository + + +class MigrateConfig: + """ + This class stands for migrating the config at runtime, + while providing the backward compatibility when falling back to the previous source version. + + Specifically, starting from `2.0.1`, the `start_date` property should be not (None or `None`): + > "start_date": "2020-01-01" + instead of, in `2.0.0` for some older configs, when the `start_date` was not required: + > {...} + """ + + message_repository: MessageRepository = InMemoryMessageRepository() + migrate_key: str = "start_date" + # default spec value for the `start_date` is `2020-01-01` + default_start_date_value: str = "2020-01-01" + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + This method determines whether the config should be migrated to have the new structure with `start_date`, + based on the source spec. + + Returns: + > True, if the transformation is necessary + > False, otherwise. + """ + # If the config was already migrated, there is no need to do this again. + # but if the customer has already switched to the new version, + # corrected the old config and switches back to the new version, + # we should try to migrate the modified old custom reports. + none_values: List[str] = [None, "None"] + key_not_present_in_config = cls.migrate_key not in config + key_present_in_config_but_invalid = cls.migrate_key in config and config.get(cls.migrate_key) in none_values + + if key_not_present_in_config: + return True + elif key_present_in_config_but_invalid: + return True + else: + return False + + @classmethod + def modify_config(cls, config: Mapping[str, Any], source: Source = None) -> Mapping[str, Any]: + config[cls.migrate_key] = cls.default_start_date_value + return config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + # modify the config + migrated_config = cls.modify_config(config, source) + # save the config + source.write_config(migrated_config, config_path) + # return modified config + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + # add the Airbyte Control Message to message repo + cls.message_repository.emit_message(create_connector_config_control_message(migrated_config)) + # emit the Airbyte Control Message from message queue to stdout + for message in cls.message_repository._message_queue: + print(message.json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + This method checks the input args, should the config be migrated, + transform if neccessary and emit the CONTROL message. + """ + # get config path + config_path = AirbyteEntrypoint(source).extract_config(args) + # proceed only if `--config` arg is provided + if config_path: + # read the existing config + config = source.read_config(config_path) + # migration check + if cls.should_migrate(config): + cls.emit_control_message( + cls.modify_and_save(config_path, source, config), + ) diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/run.py b/airbyte-integrations/connectors/source-shopify/source_shopify/run.py index 9c13e936ca719..c20aff249a575 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/run.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/run.py @@ -6,10 +6,14 @@ import sys from airbyte_cdk.entrypoint import launch +from source_shopify.config_migrations import MigrateConfig from .source import SourceShopify -def run(): +def run() -> None: source = SourceShopify() + # migrate config at runtime + MigrateConfig.migrate(sys.argv[1:], source) + # run the connector launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py index 7c584865f1c08..e7dffbe3f1e0c 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py @@ -6,9 +6,10 @@ from typing import Any, List, Mapping, Tuple from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import FailureType, SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.utils import AirbyteTracedException from requests.exceptions import ConnectionError, RequestException, SSLError from .auth import MissingAccessTokenError, ShopifyAuthenticator @@ -111,6 +112,11 @@ def get_shop_id(self) -> str: By the time this method is tiggered, we are sure we've passed the `Connection Checks` and have the `shop_id` value. """ response = list(self.test_stream.read_records(sync_mode=SyncMode.full_refresh)) + if len(response) == 0: + raise AirbyteTracedException( + message=f"Could not find a Shopify shop with the name {self.config.get('shop', '')}. Make sure it's valid.", + failure_type=FailureType.config_error, + ) shop_id = response[0].get("id") if shop_id: return shop_id diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py index 6f423a54ec318..bff54825c7421 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py @@ -89,7 +89,9 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp self.logger.warning(f"Unexpected error in `parse_ersponse`: {e}, the actual response data: {response.text}") yield {} - def produce_records(self, records: Optional[Union[Iterable[Mapping[str, Any]], Mapping[str, Any]]] = None) -> Mapping[str, Any]: + def produce_records( + self, records: Optional[Union[Iterable[Mapping[str, Any]], Mapping[str, Any]]] = None + ) -> Iterable[Mapping[str, Any]]: # transform method was implemented according to issue 4841 # Shopify API returns price fields as a string and it should be converted to number # this solution designed to convert string into number, but in future can be modified for general purpose @@ -139,7 +141,7 @@ def get_json_schema(self) -> None: """ return {} - def produce_deleted_records_from_events(self, delete_events: Iterable[Mapping[str, Any]] = []) -> Mapping[str, Any]: + def produce_deleted_records_from_events(self, delete_events: Iterable[Mapping[str, Any]] = []) -> Iterable[Mapping[str, Any]]: for event in delete_events: yield { "id": event["subject_id"], @@ -177,9 +179,7 @@ def request_params( class IncrementalShopifyStream(ShopifyStream, ABC): # Setting the check point interval to the limit of the records output - @property - def state_checkpoint_interval(self) -> int: - return super().limit + state_checkpoint_interval = 250 # Setting the default cursor field for all streams cursor_field = "updated_at" @@ -218,7 +218,7 @@ def filter_records_newer_than_state( ) -> Iterable: # Getting records >= state if stream_state: - state_value = stream_state.get(self.cursor_field) + state_value = stream_state.get(self.cursor_field, self.default_state_comparison_value) for record in records_slice: if self.cursor_field in record: record_value = record.get(self.cursor_field, self.default_state_comparison_value) @@ -489,6 +489,11 @@ def add_parent_id(self, record: Optional[Mapping[str, Any]] = None) -> Mapping[s @stream_state_cache.cache_stream_state def stream_slices(self, stream_state: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: parent_stream_state = stream_state.get(self.parent_stream.name) if stream_state else {} + # `sub record buffer` tunes the STATE frequency, to `checkpoint_interval` + # for the `nested streams` with List[object], but doesn't handle List[{}] (list of one) case, + # thus sometimes, we've got duplicated STATE with 0 records, + # since we emit the STATE for every slice. + sub_records_buffer = [] for record in self.parent_stream.read_records(stream_state=parent_stream_state, **kwargs): # updating the `stream_state` with the state of it's parent stream # to have the child stream sync independently from the parent stream @@ -499,8 +504,20 @@ def stream_slices(self, stream_state: Optional[Mapping[str, Any]] = None, **kwar if self.nested_entity in record.keys(): # add parent_id key, value from mutation_map, if passed. self.add_parent_id(record) - # yield nested sub-rcords - yield from [{self.nested_entity: sub_record} for sub_record in record.get(self.nested_entity, [])] + # unpack the nested list to the sub_set buffer + nested_records = [sub_record for sub_record in record.get(self.nested_entity, [])] + # add nested_records to the buffer, with no summarization. + sub_records_buffer += nested_records + # emit slice when there is a resonable amount of data collected, + # to reduce the amount of STATE messages after each slice. + if len(sub_records_buffer) >= self.state_checkpoint_interval: + yield {self.nested_entity: sub_records_buffer} + # clean the buffer for the next records batch + sub_records_buffer.clear() + + # emit leftovers + if len(sub_records_buffer) > 0: + yield {self.nested_entity: sub_records_buffer} def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: # get the cached substream state, to avoid state collisions for Incremental Syncs @@ -669,16 +686,19 @@ def get_updated_state( updated_state[self.parent_stream.name] = {self.parent_stream.cursor_field: latest_record.get(self.parent_stream.cursor_field)} return updated_state + def get_stream_state_value(self, stream_state: Optional[Mapping[str, Any]]) -> str: + if self.parent_stream_class: + # get parent stream state from the stream_state object. + parent_state = stream_state.get(self.parent_stream.name, {}) + if parent_state: + return parent_state.get(self.parent_stream.cursor_field, self.default_state_comparison_value) + else: + # get the stream state, if no `parent_stream_class` was assigned. + return stream_state.get(self.cursor_field, self.default_state_comparison_value) + def get_state_value(self, stream_state: Mapping[str, Any] = None) -> Optional[Union[str, int]]: if stream_state: - if self.parent_stream_class: - # get parent stream state from the stream_state object. - parent_state = stream_state.get(self.parent_stream.name, {}) - if parent_state: - return parent_state.get(self.parent_stream.cursor_field, self.default_state_comparison_value) - else: - # get the stream state, if no `parent_stream_class` was assigned. - return stream_state.get(self.cursor_field, self.default_state_comparison_value) + return self.get_stream_state_value(stream_state) else: # for majority of cases we fallback to start_date, otherwise. return self.config.get("start_date") diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/conftest.py b/airbyte-integrations/connectors/source-shopify/unit_tests/conftest.py index 6b7d9e7cb0e42..b235659f65d98 100644 --- a/airbyte-integrations/connectors/source-shopify/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/conftest.py @@ -4,7 +4,7 @@ import os from json import dumps -from typing import Any +from typing import Any, List, Mapping import pytest import requests @@ -13,6 +13,15 @@ os.environ["REQUEST_CACHE_PATH"] = "REQUEST_CACHE_PATH" +def records_per_slice(parent_records: List[Mapping[str, Any]], state_checkpoint_interval) -> List[int]: + num_batches = len(parent_records) // state_checkpoint_interval + if len(parent_records) % state_checkpoint_interval != 0: + num_batches += 1 + records_per_slice = len(parent_records) // num_batches + remaining_elements = len(parent_records) % num_batches + result = [records_per_slice] * (num_batches - remaining_elements) + [records_per_slice + 1] * remaining_elements + result.reverse() + return result @pytest.fixture def logger(): diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py b/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py index fa5d50cd0bee6..f0e80e1c5944a 100644 --- a/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py @@ -7,6 +7,7 @@ import requests from source_shopify.shopify_graphql.bulk.exceptions import ShopifyBulkExceptions from source_shopify.shopify_graphql.bulk.job import ShopifyBulkStatus +from source_shopify.streams.base_streams import IncrementalShopifyGraphQlBulkStream from source_shopify.streams.streams import ( Collections, CustomerAddress, @@ -263,3 +264,36 @@ def test_bulk_stream_parse_response( assert test_records == [expected_result] elif isinstance(expected_result, list): assert test_records == expected_result + + +@pytest.mark.parametrize( + "stream, stream_state, with_start_date, expected", + [ + (DiscountCodes, {}, True, "updated_at:>='2023-01-01T00:00:00+00:00'"), + # here the config migration is applied and the value should be "2020-01-01" + (DiscountCodes, {}, False, "updated_at:>='2020-01-01T00:00:00+00:00'"), + (DiscountCodes, {"updated_at": "2022-01-01T00:00:00Z"}, True, "updated_at:>='2022-01-01T00:00:00+00:00'"), + (DiscountCodes, {"updated_at": "2021-01-01T00:00:00Z"}, False, "updated_at:>='2021-01-01T00:00:00+00:00'"), + ], + ids=[ + "No State, but Start Date", + "No State, No Start Date - should fallback to 2018", + "With State, Start Date", + "With State, No Start Date", + ], +) +def test_stream_slices( + auth_config, + stream, + stream_state, + with_start_date, + expected, +) -> None: + # simulating `None` for `start_date` and `config migration` + if not with_start_date: + auth_config["start_date"] = "2020-01-01" + + stream = stream(auth_config) + test_result = list(stream.stream_slices(stream_state=stream_state)) + test_query_from_slice = test_result[0].get("query") + assert expected in test_query_from_slice diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json new file mode 100644 index 0000000000000..327bb81c3b216 --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json @@ -0,0 +1,8 @@ +{ + "shop": "airbyte-integration-test", + "credentials": { + "auth_method": "api_password", + "api_password": "__api_password__" + }, + "bulk_window_in_days": 1000 +} diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py new file mode 100644 index 0000000000000..de54e242294a4 --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py @@ -0,0 +1,70 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Mapping + +from airbyte_cdk.models import OrchestratorType, Type +from airbyte_cdk.sources import Source +from source_shopify.config_migrations import MigrateConfig +from source_shopify.source import SourceShopify + +# BASE ARGS +CMD = "check" +TEST_CONFIG_PATH = "unit_tests/test_migrations/test_config.json" +NEW_TEST_CONFIG_PATH = "unit_tests/test_migrations/test_new_config.json" +SOURCE_INPUT_ARGS = [CMD, "--config", TEST_CONFIG_PATH] +SOURCE: Source = SourceShopify() + + +# HELPERS +def load_config(config_path: str = TEST_CONFIG_PATH) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +def revert_migration(config_path: str = TEST_CONFIG_PATH) -> None: + with open(config_path, "r") as test_config: + config = json.load(test_config) + config.pop("start_date") + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + +def test_migrate_config() -> None: + migration_instance = MigrateConfig() + # original_config = load_config() + # migrate the test_config + migration_instance.migrate(SOURCE_INPUT_ARGS, SOURCE) + # load the updated config + test_migrated_config = load_config() + # check migrated property + assert "start_date" in test_migrated_config + # check the data type + assert isinstance(test_migrated_config["start_date"], str) + # check the migration should be skipped, once already done + assert not migration_instance.should_migrate(test_migrated_config) + # test CONTROL MESSAGE was emitted + control_msg = migration_instance.message_repository._message_queue[0] + assert control_msg.type == Type.CONTROL + assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG + # check the migrated values + assert control_msg.control.connectorConfig.config["start_date"] == "2020-01-01" + # revert the test_config to the starting point + revert_migration() + + +def test_config_is_reverted(): + # check the test_config state, it has to be the same as before tests + test_config = load_config() + # check the config no longer has the migarted property + assert "start_date" not in test_config + + +def test_should_not_migrate_new_config(): + new_config = load_config(NEW_TEST_CONFIG_PATH) + migration_instance = MigrateConfig() + assert not migration_instance.should_migrate(new_config) diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json new file mode 100644 index 0000000000000..1a68f9d86857f --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json @@ -0,0 +1,9 @@ +{ + "start_date": "2020-01-01", + "shop": "airbyte-integration-test", + "credentials": { + "auth_method": "api_password", + "api_password": "__api_password__" + }, + "bulk_window_in_days": 1000 +} diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_source.py b/airbyte-integrations/connectors/source-shopify/unit_tests/test_source.py index e4f07f314761b..277c969517dee 100644 --- a/airbyte-integrations/connectors/source-shopify/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_source.py @@ -3,11 +3,14 @@ # -from unittest.mock import MagicMock +import math +from unittest.mock import MagicMock, patch import pytest +from airbyte_cdk.utils import AirbyteTracedException +from conftest import records_per_slice from source_shopify.auth import ShopifyAuthenticator -from source_shopify.source import SourceShopify +from source_shopify.source import ConnectionCheckTest, SourceShopify from source_shopify.streams.streams import ( AbandonedCheckouts, Articles, @@ -122,6 +125,54 @@ def test_path_with_stream_slice_param(stream, stream_slice, expected_path, confi else: result = stream.path() assert result == expected_path + + +@pytest.mark.parametrize( + "stream, parent_records, state_checkpoint_interval", + [ + ( + ProductImages, + [ + {"id": 1, "images": [{"updated_at": "2021-01-01T00:00:00+00:00"}]}, + {"id": 2, "images": [{"updated_at": "2021-02-01T00:00:00+00:00"}]}, + {"id": 3, "images": [{"updated_at": "2021-03-01T00:00:00+00:00"}]}, + {"id": 4, "images": [{"updated_at": "2021-04-01T00:00:00+00:00"}]}, + {"id": 5, "images": [{"updated_at": "2021-05-01T00:00:00+00:00"}]}, + ], + 2, + ), + ], +) +def test_stream_slice_nested_substream_buffering( + mocker, + config, + stream, + parent_records, + state_checkpoint_interval, +) -> None: + # making the stream instance + stream = stream(config) + stream.state_checkpoint_interval = state_checkpoint_interval + # simulating `read_records` for the `parent_stream` + mocker.patch( + "source_shopify.streams.base_streams.IncrementalShopifyStreamWithDeletedEvents.read_records", + return_value=parent_records, + ) + # count how many slices we expect, based on the number of parent_records + total_slices_expected = math.ceil(len(parent_records) / state_checkpoint_interval) + # define the how many records each individual slice should have, based on the number of parent_records + expected_records_per_slice = records_per_slice(parent_records, state_checkpoint_interval) + # slices counter + total_slices: int = 0 + for slice in enumerate(stream.stream_slices()): + slice_index = slice[0] + nested_records = slice[1].get(stream.nested_entity) + # check the number of records / slice + assert len(nested_records) == expected_records_per_slice[slice_index] + # count total slices + total_slices += 1 + # check we have emitted complete number of slices + assert total_slices == total_slices_expected def test_check_connection(config, mocker) -> None: @@ -226,3 +277,21 @@ def test_select_transactions_stream(config, expected_stream_class): actual = source.select_transactions_stream(config) assert type(actual) == expected_stream_class + +@pytest.mark.parametrize( + "read_records, expected_shop_id, expected_error", + [ + pytest.param([{"id": "12345"}], "12345", None, id="test_shop_name_exists"), + pytest.param([], None, AirbyteTracedException, id="test_shop_name_does_not_exist"), + ], +) +def test_get_shop_id(config, read_records, expected_shop_id, expected_error): + check_test = ConnectionCheckTest(config) + + with patch.object(Shop, "read_records", return_value=read_records): + if expected_error: + with pytest.raises(expected_error): + check_test.get_shop_id() + else: + actual_shop_id = check_test.get_shop_id() + assert actual_shop_id == expected_shop_id diff --git a/airbyte-integrations/connectors/source-slack/.coveragerc b/airbyte-integrations/connectors/source-slack/.coveragerc new file mode 100644 index 0000000000000..ce32301144712 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_slack/run.py diff --git a/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml b/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml index 2fd1ba9af04bd..a9ee3dea68d49 100644 --- a/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml @@ -6,7 +6,9 @@ acceptance_tests: - spec_path: "source_slack/spec.json" backward_compatibility_tests_config: # edited `min`/`max` > `minimum`/`maximum` for `lookback_window` field - disable_for_version: "0.1.26" + #disable_for_version: "0.1.26" + # slight changes: removed doc url, added new null oauth param + disable_for_version: "0.3.10" connection: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-slack/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-slack/integration_tests/abnormal_state.json index d55652e4e69a1..104b5856e0748 100644 --- a/airbyte-integrations/connectors/source-slack/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-slack/integration_tests/abnormal_state.json @@ -9,8 +9,49 @@ { "type": "STREAM", "stream": { - "stream_state": { "float_ts": 7270247822 }, - "stream_descriptor": { "name": "channel_messages" } + "stream_descriptor": { + "name": "channel_messages" + }, + "stream_state": { + "states": [ + { + "partition": { + "channel_id": "C04LTCM2Y56", + "parent_slice": {} + }, + "cursor": { + "float_ts": "2534945416" + } + }, + { + "partition": { + "channel": "C04KX3KEZ54", + "parent_slice": {} + }, + "cursor": { + "float_ts": "2534945416" + } + }, + { + "partition": { + "channel": "C04L3M4PTJ6", + "parent_slice": {} + }, + "cursor": { + "float_ts": "2534945416" + } + }, + { + "partition": { + "channel": "C04LTCM2Y56", + "parent_slice": {} + }, + "cursor": { + "float_ts": "2534945416" + } + } + ] + } } } ] diff --git a/airbyte-integrations/connectors/source-slack/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-slack/integration_tests/expected_records.jsonl index e5b6c79113767..6ed8e6208f04e 100644 --- a/airbyte-integrations/connectors/source-slack/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-slack/integration_tests/expected_records.jsonl @@ -4,12 +4,14 @@ {"stream": "channel_members", "data": {"member_id": "U04L65GPMKN", "channel_id": "C04KX3KEZ54"}, "emitted_at": 1707568736171} {"stream": "channel_members", "data": {"member_id": "U04LY6NARHU", "channel_id": "C04KX3KEZ54"}, "emitted_at": 1707568736172} {"stream": "channel_members", "data": {"member_id": "U04M23SBJGM", "channel_id": "C04KX3KEZ54"}, "emitted_at": 1707568736172} -{"stream": "channel_messages", "data": {"client_msg_id": "3ae60d35-58b8-441c-923a-75de35a4ed8a", "type": "message", "text": "Test Thread 2", "user": "U04L65GPMKN", "ts": "1683104542.931169", "blocks": [{"type": "rich_text", "block_id": "WLB", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 2"}]}]}], "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104568.059569", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104568.059569", "channel_id": "C04KX3KEZ54", "float_ts": 1683104542.931169}, "emitted_at": 1707568738170} -{"stream": "channel_messages", "data": {"client_msg_id": "e27672c0-451e-42a6-8eff-a14d2db8ac1e", "type": "message", "text": "Test Thread 1", "user": "U04L65GPMKN", "ts": "1683104499.808709", "blocks": [{"type": "rich_text", "block_id": "0j7", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 1"}]}]}], "team": "T04KX3KDDU6", "thread_ts": "1683104499.808709", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104528.084359", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104528.084359", "channel_id": "C04LTCM2Y56", "float_ts": 1683104499.808709}, "emitted_at": 1707569060525} -{"stream": "channel_messages", "data": {"type": "message", "subtype": "reminder_add", "text": " set up a reminder \u201ctest reminder\u201d in this channel at 9AM tomorrow, Eastern European Summer Time.", "user": "U04L65GPMKN", "ts": "1695814864.744249", "channel_id": "C04LTCM2Y56", "float_ts": 1695814864.744249}, "emitted_at": 1707569208689} -{"stream": "threads", "data": {"client_msg_id": "3ae60d35-58b8-441c-923a-75de35a4ed8a", "type": "message", "text": "Test Thread 2", "user": "U04L65GPMKN", "ts": "1683104542.931169", "blocks": [{"type": "rich_text", "block_id": "WLB", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 2"}]}]}], "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104568.059569", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104568.059569", "channel_id": "C04KX3KEZ54", "float_ts": 1683104542.931169}, "emitted_at": 1707569354932} -{"stream": "threads", "data": {"client_msg_id": "3e96d351-270c-493f-a1a0-fdc3c4c0e11f", "type": "message", "text": "<@U04M23SBJGM> test test test", "user": "U04L65GPMKN", "ts": "1683104559.922849", "blocks": [{"type": "rich_text", "block_id": "tX6vr", "elements": [{"type": "rich_text_section", "elements": [{"type": "user", "user_id": "U04M23SBJGM"}, {"type": "text", "text": " test test test"}]}]}], "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "parent_user_id": "U04L65GPMKN", "channel_id": "C04KX3KEZ54", "float_ts": 1683104559.922849}, "emitted_at": 1707569354933} -{"stream": "threads", "data": {"client_msg_id": "08023e44-9d18-41ed-81dd-5f04ed699656", "type": "message", "text": "<@U04LY6NARHU> test test", "user": "U04L65GPMKN", "ts": "1683104568.059569", "blocks": [{"type": "rich_text", "block_id": "IyUF", "elements": [{"type": "rich_text_section", "elements": [{"type": "user", "user_id": "U04LY6NARHU"}, {"type": "text", "text": " test test"}]}]}], "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "parent_user_id": "U04L65GPMKN", "channel_id": "C04KX3KEZ54", "float_ts": 1683104568.059569}, "emitted_at": 1707569354933} -{"stream": "users", "data": {"id": "USLACKBOT", "team_id": "T04KX3KDDU6", "name": "slackbot", "deleted": false, "color": "757575", "real_name": "Slackbot", "tz": "America/Los_Angeles", "tz_label": "Pacific Standard Time", "tz_offset": -28800, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Slackbot", "real_name_normalized": "Slackbot", "display_name": "Slackbot", "display_name_normalized": "Slackbot", "fields": {}, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "sv41d8cd98f0", "always_active": true, "first_name": "slackbot", "last_name": "", "image_24": "https://a.slack-edge.com/80588/img/slackbot_24.png", "image_32": "https://a.slack-edge.com/80588/img/slackbot_32.png", "image_48": "https://a.slack-edge.com/80588/img/slackbot_48.png", "image_72": "https://a.slack-edge.com/80588/img/slackbot_72.png", "image_192": "https://a.slack-edge.com/80588/marketing/img/avatars/slackbot/avatar-slackbot.png", "image_512": "https://a.slack-edge.com/80588/img/slackbot_512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_admin": false, "is_owner": false, "is_primary_owner": false, "is_restricted": false, "is_ultra_restricted": false, "is_bot": false, "is_app_user": false, "updated": 0, "is_email_confirmed": false, "who_can_share_contact_card": "EVERYONE"}, "emitted_at": 1707569357949} -{"stream": "users", "data": {"id": "U04KUMXNYMV", "team_id": "T04KX3KDDU6", "name": "deactivateduser693438", "deleted": true, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Deactivated User", "real_name_normalized": "Deactivated User", "display_name": "deactivateduser", "display_name_normalized": "deactivateduser", "fields": null, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "g849cc56ed76", "huddle_state": "default_unset", "first_name": "Deactivated", "last_name": "User", "image_24": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-24.png", "image_32": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-32.png", "image_48": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-48.png", "image_72": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-72.png", "image_192": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-192.png", "image_512": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_bot": false, "is_app_user": false, "updated": 1675090804, "is_forgotten": true, "is_invited_user": true}, "emitted_at": 1707569357951} -{"stream": "users", "data": {"id": "U04L2KY5CES", "team_id": "T04KX3KDDU6", "name": "deactivateduser686066", "deleted": true, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Deactivated User", "real_name_normalized": "Deactivated User", "display_name": "deactivateduser", "display_name_normalized": "deactivateduser", "fields": null, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "g849cc56ed76", "huddle_state": "default_unset", "first_name": "Deactivated", "last_name": "User", "image_24": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-24.png", "image_32": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-32.png", "image_48": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-48.png", "image_72": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-72.png", "image_192": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-192.png", "image_512": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_bot": false, "is_app_user": false, "updated": 1675090785, "is_forgotten": true, "is_invited_user": true}, "emitted_at": 1707569357951} +{"stream": "channel_messages", "data": {"user": "U04L65GPMKN", "type": "message", "ts": "1683104542.931169", "client_msg_id": "3ae60d35-58b8-441c-923a-75de35a4ed8a", "text": "Test Thread 2", "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104568.059569", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104568.059569", "blocks": [{"type": "rich_text", "block_id": "WLB", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 2"}]}]}], "float_ts": 1683104542, "channel_id": "C04KX3KEZ54"}, "emitted_at": 1711041520506} +{"stream": "channel_messages", "data": {"user": "U04L65GPMKN", "type": "message", "ts": "1683104499.808709", "client_msg_id": "e27672c0-451e-42a6-8eff-a14d2db8ac1e", "text": "Test Thread 1", "team": "T04KX3KDDU6", "thread_ts": "1683104499.808709", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104528.084359", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104528.084359", "blocks": [{"type": "rich_text", "block_id": "0j7", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 1"}]}]}], "float_ts": 1683104499, "channel_id": "C04LTCM2Y56"}, "emitted_at": 1711041522765} +{"stream": "channel_messages", "data": {"user": "USLACKBOT", "type": "message", "ts": "1695880827.186049", "bot_id": "B01", "text": "Reminder: test reminder.", "team": "T04KX3KDDU6", "blocks": [{"type": "rich_text", "block_id": "BGzX", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Reminder: test reminder."}]}]}], "float_ts": 1695880827, "channel_id": "C04LTCM2Y56"}, "emitted_at": 1711041523073} +{"stream": "channel_messages", "data": {"subtype": "reminder_add", "user": "U04L65GPMKN", "type": "message", "ts": "1695814864.744249", "text": " set up a reminder \u201ctest reminder\u201d in this channel at 9AM tomorrow, Eastern European Summer Time.", "float_ts": 1695814864, "channel_id": "C04LTCM2Y56"}, "emitted_at": 1711041523080} +{"stream": "threads", "data": {"user": "U04L65GPMKN", "type": "message", "ts": "1683104542.931169", "client_msg_id": "3ae60d35-58b8-441c-923a-75de35a4ed8a", "text": "Test Thread 2", "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "reply_count": 2, "reply_users_count": 1, "latest_reply": "1683104568.059569", "reply_users": ["U04L65GPMKN"], "is_locked": false, "subscribed": true, "last_read": "1683104568.059569", "blocks": [{"type": "rich_text", "block_id": "WLB", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Test Thread 2"}]}]}], "channel_id": "C04KX3KEZ54", "float_ts": 1683104542.931169}, "emitted_at": 1712056304168} +{"stream": "threads", "data": {"user": "U04L65GPMKN", "type": "message", "ts": "1683104559.922849", "client_msg_id": "3e96d351-270c-493f-a1a0-fdc3c4c0e11f", "text": "<@U04M23SBJGM> test test test", "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "parent_user_id": "U04L65GPMKN", "blocks": [{"type": "rich_text", "block_id": "tX6vr", "elements": [{"type": "rich_text_section", "elements": [{"type": "user", "user_id": "U04M23SBJGM"}, {"type": "text", "text": " test test test"}]}]}], "channel_id": "C04KX3KEZ54", "float_ts": 1683104559.922849}, "emitted_at": 1712056304169} +{"stream": "threads", "data": {"user": "U04L65GPMKN", "type": "message", "ts": "1683104568.059569", "client_msg_id": "08023e44-9d18-41ed-81dd-5f04ed699656", "text": "<@U04LY6NARHU> test test", "team": "T04KX3KDDU6", "thread_ts": "1683104542.931169", "parent_user_id": "U04L65GPMKN", "blocks": [{"type": "rich_text", "block_id": "IyUF", "elements": [{"type": "rich_text_section", "elements": [{"type": "user", "user_id": "U04LY6NARHU"}, {"type": "text", "text": " test test"}]}]}], "channel_id": "C04KX3KEZ54", "float_ts": 1683104568.059569}, "emitted_at": 1712056304169} +{"stream": "threads", "data": {"user": "USLACKBOT", "type": "message", "ts": "1695880827.186049", "bot_id": "B01", "text": "Reminder: test reminder.", "team": "T04KX3KDDU6", "blocks": [{"type": "rich_text", "block_id": "BGzX", "elements": [{"type": "rich_text_section", "elements": [{"type": "text", "text": "Reminder: test reminder."}]}]}], "channel_id": "C04LTCM2Y56", "float_ts": 1695880827.186049}, "emitted_at": 1712056304703} +{"stream": "users", "data": {"id": "USLACKBOT", "team_id": "T04KX3KDDU6", "name": "slackbot", "deleted": false, "color": "757575", "real_name": "Slackbot", "tz": "America/Los_Angeles", "tz_label": "Pacific Daylight Time", "tz_offset": -25200, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Slackbot", "real_name_normalized": "Slackbot", "display_name": "Slackbot", "display_name_normalized": "Slackbot", "fields": {}, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "sv41d8cd98f0", "always_active": true, "first_name": "slackbot", "last_name": "", "image_24": "https://a.slack-edge.com/80588/img/slackbot_24.png", "image_32": "https://a.slack-edge.com/80588/img/slackbot_32.png", "image_48": "https://a.slack-edge.com/80588/img/slackbot_48.png", "image_72": "https://a.slack-edge.com/80588/img/slackbot_72.png", "image_192": "https://a.slack-edge.com/80588/marketing/img/avatars/slackbot/avatar-slackbot.png", "image_512": "https://a.slack-edge.com/80588/img/slackbot_512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_admin": false, "is_owner": false, "is_primary_owner": false, "is_restricted": false, "is_ultra_restricted": false, "is_bot": false, "is_app_user": false, "updated": 0, "is_email_confirmed": false, "who_can_share_contact_card": "EVERYONE"}, "emitted_at": 1710501138877} +{"stream": "users", "data": {"id": "U04KUMXNYMV", "team_id": "T04KX3KDDU6", "name": "deactivateduser693438", "deleted": true, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Deactivated User", "real_name_normalized": "Deactivated User", "display_name": "deactivateduser", "display_name_normalized": "deactivateduser", "fields": null, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "g849cc56ed76", "huddle_state": "default_unset", "first_name": "Deactivated", "last_name": "User", "image_24": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-24.png", "image_32": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-32.png", "image_48": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-48.png", "image_72": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-72.png", "image_192": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-192.png", "image_512": "https://secure.gravatar.com/avatar/d5320ceddda202563fd9e6222c07c00a.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0011-512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_bot": false, "is_app_user": false, "updated": 1675090804, "is_forgotten": true, "is_invited_user": true}, "emitted_at": 1710501138879} +{"stream": "users", "data": {"id": "U04L2KY5CES", "team_id": "T04KX3KDDU6", "name": "deactivateduser686066", "deleted": true, "profile": {"title": "", "phone": "", "skype": "", "real_name": "Deactivated User", "real_name_normalized": "Deactivated User", "display_name": "deactivateduser", "display_name_normalized": "deactivateduser", "fields": null, "status_text": "", "status_emoji": "", "status_emoji_display_info": [], "status_expiration": 0, "avatar_hash": "g849cc56ed76", "huddle_state": "default_unset", "first_name": "Deactivated", "last_name": "User", "image_24": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-24.png", "image_32": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-32.png", "image_48": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-48.png", "image_72": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-72.png", "image_192": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-192.png", "image_512": "https://secure.gravatar.com/avatar/cacb225265b3b19c4e72029a62cf1ef1.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0009-512.png", "status_text_canonical": "", "team": "T04KX3KDDU6"}, "is_bot": false, "is_app_user": false, "updated": 1675090785, "is_forgotten": true, "is_invited_user": true}, "emitted_at": 1710501138881} diff --git a/airbyte-integrations/connectors/source-slack/metadata.yaml b/airbyte-integrations/connectors/source-slack/metadata.yaml index 3090e418f2901..de1b2b840aaef 100644 --- a/airbyte-integrations/connectors/source-slack/metadata.yaml +++ b/airbyte-integrations/connectors/source-slack/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: c2281cee-86f9-4a86-bb48-d23286b4c7bd - dockerImageTag: 0.3.9 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-slack documentationUrl: https://docs.airbyte.com/integrations/sources/slack githubIssueLabel: source-slack icon: slack.svg license: MIT + maxSecondsBetweenMessages: 60 name: Slack remoteRegistries: pypi: @@ -27,6 +28,19 @@ data: oss: enabled: true releaseStage: generally_available + releases: + breakingChanges: + 1.0.0: + message: + The source Slack connector is being migrated from the Python CDK to our declarative low-code CDK. + Due to changes in the handling of state format for incremental substreams, this migration constitutes a breaking change for the channel_messages stream. + Users will need to reset source configuration, refresh the source schema and reset the channel_messages stream after upgrading. + For more information, see our migration documentation for source Slack. + upgradeDeadline: "2024-04-29" + scopedImpact: + - scopeType: stream + impactedScopes: + - "channel_messages" suggestedStreams: streams: - users @@ -37,5 +51,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-slack/poetry.lock b/airbyte-integrations/connectors/source-slack/poetry.lock index 608c63e12b1be..6cf0fe1c2aab4 100644 --- a/airbyte-integrations/connectors/source-slack/poetry.lock +++ b/airbyte-integrations/connectors/source-slack/poetry.lock @@ -1,51 +1,50 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.7" +version = "0.81.4" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.7.tar.gz", hash = "sha256:00e379e2379b38683992027114a2190f49befec8cbac67d0a2c907786111e77b"}, - {file = "airbyte_cdk-0.58.7-py3-none-any.whl", hash = "sha256:09b31d32899cc6dc91e39716e8d1601503a7884d837752e683d1e3ef7dfe73be"}, + {file = "airbyte_cdk-0.81.4-py3-none-any.whl", hash = "sha256:4ed193da4e8be4867e1d8983172d10afb3c3b10f3e10ec618431deec1f2af4cb"}, + {file = "airbyte_cdk-0.81.4.tar.gz", hash = "sha256:5c63d8c792edf5f24d0ad804b34b3ebcc056ecede6cb4f87ebf9ac07aa987f24"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = "*" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +300,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -313,13 +326,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -366,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -553,47 +566,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -685,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +906,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -920,13 +931,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1042,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "6d6c74c129dad767e4712df44cb1a80c195a3a27c8b39861a86d80acc72a19ed" +content-hash = "59138844bec5f4f46b8a260d963d206e9881f8580ecdbeb4329d266ec0071a75" diff --git a/airbyte-integrations/connectors/source-slack/pyproject.toml b/airbyte-integrations/connectors/source-slack/pyproject.toml index b7b6103bf52d8..aca63d06159f2 100644 --- a/airbyte-integrations/connectors/source-slack/pyproject.toml +++ b/airbyte-integrations/connectors/source-slack/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.9" +version = "1.0.0" name = "source-slack" description = "Source implementation for Slack." authors = [ "Airbyte ",] @@ -18,7 +18,8 @@ include = "source_slack" [tool.poetry.dependencies] python = "^3.9,<3.12" pendulum = "==2.1.2" -airbyte-cdk = "==0.58.7" +airbyte-cdk = "^0" +freezegun = "^1.4.0" [tool.poetry.scripts] source-slack = "source_slack.run:run" diff --git a/airbyte-integrations/connectors/source-slack/source_slack/components/__init__.py b/airbyte-integrations/connectors/source-slack/source_slack/components/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-slack/source_slack/components/channel_members_extractor.py b/airbyte-integrations/connectors/source-slack/source_slack/components/channel_members_extractor.py new file mode 100644 index 0000000000000..9dbb401a07e9d --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/source_slack/components/channel_members_extractor.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from dataclasses import dataclass +from typing import List + +import requests +from airbyte_cdk.sources.declarative.extractors import DpathExtractor +from airbyte_cdk.sources.declarative.types import Record + + +@dataclass +class ChannelMembersExtractor(DpathExtractor): + """ + Transform response from list of strings to list dicts: + from: ['aa', 'bb'] + to: [{'member_id': 'aa'}, {{'member_id': 'bb'}] + """ + + def extract_records(self, response: requests.Response) -> List[Record]: + records = super().extract_records(response) + return [{"member_id": record} for record in records] diff --git a/airbyte-integrations/connectors/source-slack/source_slack/components/join_channels.py b/airbyte-integrations/connectors/source-slack/source_slack/components/join_channels.py new file mode 100644 index 0000000000000..e7f33851784c2 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/source_slack/components/join_channels.py @@ -0,0 +1,123 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import logging +from functools import partial +from typing import Any, Iterable, List, Mapping, Optional + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.partition_routers import SinglePartitionRouter +from airbyte_cdk.sources.declarative.retrievers import SimpleRetriever +from airbyte_cdk.sources.declarative.types import Record, StreamSlice +from airbyte_cdk.sources.streams.core import StreamData +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator + +LOGGER = logging.getLogger("airbyte_logger") + + +class JoinChannelsStream(HttpStream): + """ + This class is a special stream which joins channels because the Slack API only returns messages from channels this bot is in. + Its responses should only be logged for debugging reasons, not read as records. + """ + + url_base = "https://slack.com/api/" + http_method = "POST" + primary_key = "id" + + def __init__(self, channel_filter: List[str] = None, **kwargs): + self.channel_filter = channel_filter or [] + super().__init__(**kwargs) + + def path(self, **kwargs) -> str: + return "conversations.join" + + def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable: + """ + Override to simply indicate that the specific channel was joined successfully. + This method should not return any data, but should return an empty iterable. + """ + is_ok = response.json().get("ok", False) + if is_ok: + self.logger.info(f"Successfully joined channel: {stream_slice['channel_name']}") + else: + self.logger.info(f"Unable to joined channel: {stream_slice['channel_name']}. Reason: {response.json()}") + return [] + + def request_body_json(self, stream_slice: Mapping = None, **kwargs) -> Optional[Mapping]: + if stream_slice: + return {"channel": stream_slice.get("channel")} + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """ + The pagination is not applicable to this Service Stream. + """ + return None + + +class ChannelsRetriever(SimpleRetriever): + def __post_init__(self, parameters: Mapping[str, Any]): + super().__post_init__(parameters) + self.stream_slicer = SinglePartitionRouter(parameters={}) + self.record_selector.transformations = [] + + def should_join_to_channel(self, config: Mapping[str, Any], record: Record) -> bool: + """ + The `is_member` property indicates whether the API Bot is already assigned / joined to the channel. + https://api.slack.com/types/conversation#booleans + """ + return config["join_channels"] and not record.get("is_member") + + def make_join_channel_slice(self, channel: Mapping[str, Any]) -> Mapping[str, Any]: + channel_id: str = channel.get("id") + channel_name: str = channel.get("name") + LOGGER.info(f"Joining Slack Channel: `{channel_name}`") + return {"channel": channel_id, "channel_name": channel_name} + + def join_channels_stream(self, config) -> JoinChannelsStream: + token = config["credentials"].get("api_token") or config["credentials"].get("access_token") + authenticator = TokenAuthenticator(token) + channel_filter = config["channel_filter"] + return JoinChannelsStream(authenticator=authenticator, channel_filter=channel_filter) + + def join_channel(self, config: Mapping[str, Any], record: Mapping[str, Any]): + list( + self.join_channels_stream(config).read_records( + sync_mode=SyncMode.full_refresh, + stream_slice=self.make_join_channel_slice(record), + ) + ) + + def read_records( + self, + records_schema: Mapping[str, Any], + stream_slice: Optional[StreamSlice] = None, + ) -> Iterable[StreamData]: + _slice = stream_slice or StreamSlice(partition={}, cursor_slice={}) # None-check + + self._paginator.reset() + + most_recent_record_from_slice = None + record_generator = partial( + self._parse_records, + stream_state=self.state or {}, + stream_slice=_slice, + records_schema=records_schema, + ) + + for stream_data in self._read_pages(record_generator, self.state, _slice): + # joining channel logic + if self.should_join_to_channel(self.config, stream_data): + self.join_channel(self.config, stream_data) + + current_record = self._extract_record(stream_data, _slice) + if self.cursor and current_record: + self.cursor.observe(_slice, current_record) + + most_recent_record_from_slice = self._get_most_recent_record(most_recent_record_from_slice, current_record, _slice) + yield stream_data + + if self.cursor: + self.cursor.observe(_slice, most_recent_record_from_slice) + return diff --git a/airbyte-integrations/connectors/source-slack/source_slack/config_migrations.py b/airbyte-integrations/connectors/source-slack/source_slack/config_migrations.py new file mode 100644 index 0000000000000..cc6d9cd036070 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/source_slack/config_migrations.py @@ -0,0 +1,73 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import logging +from typing import Any, List, Mapping + +from airbyte_cdk import AirbyteEntrypoint +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository +from source_slack import SourceSlack + +logger = logging.getLogger("airbyte_logger") + + +class MigrateLegacyConfig: + message_repository: MessageRepository = InMemoryMessageRepository() + + @classmethod + def _should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + legacy config: + { + "start_date": "2021-07-22T20:00:00Z", + "end_date": "2021-07-23T20:00:00Z", + "lookback_window": 1, + "join_channels": True, + "channel_filter": ["airbyte-for-beginners", "good-reads"], + "api_token": "api-token" + } + api token should be in the credentials object + """ + if config.get("api_token") and not config.get("credentials"): + return True + return False + + @classmethod + def _move_token_to_credentials(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: + api_token = config["api_token"] + config.update({"credentials": {"api_token": api_token, "option_title": "API Token Credentials"}}) + config.pop("api_token") + return config + + @classmethod + def _modify_and_save(cls, config_path: str, source: SourceSlack, config: Mapping[str, Any]) -> Mapping[str, Any]: + migrated_config = cls._move_token_to_credentials(config) + # save the config + source.write_config(migrated_config, config_path) + return migrated_config + + @classmethod + def _emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + # add the Airbyte Control Message to message repo + cls.message_repository.emit_message(create_connector_config_control_message(migrated_config)) + # emit the Airbyte Control Message from message queue to stdout + for message in cls.message_repository._message_queue: + print(message.json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: SourceSlack) -> None: + """ + This method checks the input args, should the config be migrated, + transform if necessary and emit the CONTROL message. + """ + # get config path + config_path = AirbyteEntrypoint(source).extract_config(args) + # proceed only if `--config` arg is provided + if config_path: + # read the existing config + config = source.read_config(config_path) + # migration check + if cls._should_migrate(config): + cls._emit_control_message( + cls._modify_and_save(config_path, source, config), + ) diff --git a/airbyte-integrations/connectors/source-slack/source_slack/manifest.yaml b/airbyte-integrations/connectors/source-slack/source_slack/manifest.yaml new file mode 100644 index 0000000000000..5a00f9a41ea1f --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/source_slack/manifest.yaml @@ -0,0 +1,260 @@ +version: 0.73.0 +type: DeclarativeSource + +definitions: + schema_loader: + type: JsonFileSchemaLoader + file_path: "./source_slack/schemas/{{ parameters['name'] }}.json" + + default_paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: cursor + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 1000 + cursor_value: '{{ response.get("response_metadata", {}).get("next_cursor", {}) }}' + stop_condition: >- + {{ not response.get("response_metadata", {}).get("next_cursor", {}) + }} + + api_token_auth: + type: BearerAuthenticator + api_token: "{{ config['credentials']['api_token'] }}" + access_token_auth: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + + requester: + type: HttpRequester + url_base: https://slack.com/api/ + path: "{{ parameters['path'] }}" + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: SelectiveAuthenticator + authenticator_selection_path: ["credentials", "option_title"] + authenticators: + Default OAuth2.0 authorization: "#/definitions/access_token_auth" + API Token Credentials: "#/definitions/api_token_auth" + request_body_json: {} + error_handler: + type: DefaultErrorHandler + response_filters: + - error_message_contains: "invalid_auth" + action: FAIL + error_message: Authentication has failed, please update your credentials. + - http_codes: [429] + action: RETRY + error_message: Failed to perform a request due to rate limits. + - http_codes: [500, 503] + action: RETRY + error_message: Failed to perform a request due to internal server error. + + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - "{{ parameters['field_path'] }}" + + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/requester" + record_selector: + $ref: "#/definitions/selector" + paginator: + $ref: "#/definitions/default_paginator" + partition_router: [] + + stream_base: + primary_key: "id" + retriever: + $ref: "#/definitions/retriever" + schema_loader: + $ref: "#/definitions/schema_loader" + + users_stream: + primary_key: "id" + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/requester" + error_handler: + type: DefaultErrorHandler + response_filters: + - error_message_contains: "invalid_auth" + action: FAIL + error_message: Authentication has failed, please update your credentials. + - http_codes: [429] + action: RETRY + error_message: Failed to perform a request due to rate limits. + - http_codes: [403, 400] + action: FAIL + error_message: Got an exception while trying to set up the connection. Most probably, there are no users in the given Slack instance or your token is incorrect. + - http_codes: [500, 503] + action: RETRY + error_message: Failed to perform a request due to internal server error. + record_selector: + $ref: "#/definitions/selector" + paginator: + $ref: "#/definitions/default_paginator" + partition_router: [] + schema_loader: + $ref: "#/definitions/schema_loader" + $parameters: + name: users + path: users.list + field_path: members + + channels_stream: + primary_key: "id" + $parameters: + name: channels + path: conversations.list + field_path: channels + schema_loader: + $ref: "#/definitions/schema_loader" + retriever: + class_name: "source_slack.components.join_channels.ChannelsRetriever" + requester: + $ref: "#/definitions/requester" + request_parameters: + types: "public_channel" + record_selector: + $ref: "#/definitions/selector" + record_filter: + type: RecordFilter + condition: "{{ record.name in config.channel_filter or not config.channel_filter }}" + $parameters: + transformations: [[]] + paginator: + $ref: "#/definitions/default_paginator" + $parameters: + url_base: https://slack.com/api/ + partition_router: [] + + channels_partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + request_option: + type: RequestOption + field_name: channel + inject_into: request_parameter + partition_field: channel_id + stream: "#/definitions/channels_stream" + + channel_members_stream: + $ref: "#/definitions/stream_base" + $parameters: + name: channel_members + path: conversations.members + field_path: members + primary_key: + - member_id + - channel_id + retriever: + $ref: "#/definitions/retriever" + partition_router: + $ref: "#/definitions/channels_partition_router" + record_selector: + type: RecordSelector + extractor: + class_name: "source_slack.components.channel_members_extractor.ChannelMembersExtractor" + field_path: ["members"] + transformations: + - type: AddFields + fields: + - path: + - channel_id + value: "{{ stream_partition.get('channel_id') }}" + + channel_messages_stream: + $ref: "#/definitions/stream_base" + $parameters: + name: channel_messages + path: conversations.history + field_path: messages + primary_key: + - channel_id + - ts + retriever: + $ref: "#/definitions/retriever" + requester: + $ref: "#/definitions/requester" + request_parameters: + inclusive: "True" + record_selector: + $ref: "#/definitions/selector" + paginator: + $ref: "#/definitions/default_paginator" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: + $ref: "#/definitions/channels_stream" + $parameters: + name: channels + path: conversations.list + field_path: channels + parent_key: id + partition_field: channel + request_option: + field_name: "channel" + inject_into: "request_parameter" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: float_ts + cursor_datetime_formats: + - "%s" + step: P100D + cursor_granularity: P10D + lookback_window: "P{{ config.get('lookback_window', 0) }}D" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + inject_into: request_parameter + field_name: oldest + type: RequestOption + end_time_option: + inject_into: request_parameter + field_name: latest + type: RequestOption + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + transformations: + - type: AddFields + fields: + - path: + - float_ts + value: "{{ record.ts|float }}" + - path: + - channel_id + value: "{{ stream_partition.get('channel') }}" + +streams: + - "#/definitions/users_stream" + - "#/definitions/channels_stream" + - "#/definitions/channel_members_stream" + - "#/definitions/channel_messages_stream" + +check: + type: CheckStream + stream_names: + - users diff --git a/airbyte-integrations/connectors/source-slack/source_slack/run.py b/airbyte-integrations/connectors/source-slack/source_slack/run.py index 14caa9ab08e1e..fd5e385857b95 100644 --- a/airbyte-integrations/connectors/source-slack/source_slack/run.py +++ b/airbyte-integrations/connectors/source-slack/source_slack/run.py @@ -7,8 +7,10 @@ from airbyte_cdk.entrypoint import launch from source_slack import SourceSlack +from source_slack.config_migrations import MigrateLegacyConfig def run(): source = SourceSlack() + MigrateLegacyConfig.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-slack/source_slack/source.py b/airbyte-integrations/connectors/source-slack/source_slack/source.py index e785114f865f4..3925e4bd44a67 100644 --- a/airbyte-integrations/connectors/source-slack/source_slack/source.py +++ b/airbyte-integrations/connectors/source-slack/source_slack/source.py @@ -2,354 +2,21 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from typing import Any, List, Mapping import pendulum -import requests -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream +from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from pendulum import DateTime - -from .utils import chunk_date_range - - -class SlackStream(HttpStream, ABC): - url_base = "https://slack.com/api/" - primary_key = "id" - page_size = 1000 - - @property - def max_retries(self) -> int: - # Slack's rate limiting can be unpredictable so we increase the max number of retries by a lot before failing - return 20 - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """Slack uses a cursor-based pagination strategy. - Extract the cursor from the response if it exists and return it in a format - that can be used to update request parameters""" - - json_response = response.json() - next_cursor = json_response.get("response_metadata", {}).get("next_cursor") - if next_cursor: - return {"cursor": next_cursor} - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - params = {"limit": self.page_size} - if next_page_token: - params.update(**next_page_token) - return params - - def parse_response( - self, - response: requests.Response, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Iterable[MutableMapping]: - json_response = response.json() - yield from json_response.get(self.data_field, []) - - def backoff_time(self, response: requests.Response) -> Optional[float]: - """This method is called if we run into the rate limit. - Slack puts the retry time in the `Retry-After` response header so we - we return that value. If the response is anything other than a 429 (e.g: 5XX) - fall back on default retry behavior. - Rate Limits Docs: https://api.slack.com/docs/rate-limits#web""" - - if "Retry-After" in response.headers: - return int(response.headers["Retry-After"]) - else: - self.logger.info("Retry-after header not found. Using default backoff value") - return 5 - - @property - @abstractmethod - def data_field(self) -> str: - """The name of the field in the response which contains the data""" - - def should_retry(self, response: requests.Response) -> bool: - return response.status_code == requests.codes.REQUEST_TIMEOUT or super().should_retry(response) - - -class JoinChannelsStream(HttpStream): - """ - This class is a special stream which joins channels because the Slack API only returns messages from channels this bot is in. - Its responses should only be logged for debugging reasons, not read as records. - """ - - url_base = "https://slack.com/api/" - http_method = "POST" - primary_key = "id" - - def __init__(self, channel_filter: List[str] = None, **kwargs): - self.channel_filter = channel_filter or [] - super().__init__(**kwargs) - - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable: - """ - Override to simply indicate that the specific channel was joined successfully. - This method should not return any data, but should return an empty iterable. - """ - self.logger.info(f"Successfully joined channel: {stream_slice['channel_name']}") - return [] - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """ - The pagination is not applicable to this Service Stream. - """ - return None - - def path(self, **kwargs) -> str: - return "conversations.join" - - def request_body_json(self, stream_slice: Mapping = None, **kwargs) -> Optional[Mapping]: - return {"channel": stream_slice["channel"]} - - -class ChanneledStream(SlackStream, ABC): - """Slack stream with channel filter""" - - def __init__(self, channel_filter: List[str] = [], join_channels: bool = False, **kwargs): - self.channel_filter = channel_filter - self.join_channels = join_channels - self.kwargs = kwargs - super().__init__(**kwargs) - - @property - def join_channels_stream(self) -> JoinChannelsStream: - return JoinChannelsStream(authenticator=self.kwargs.get("authenticator"), channel_filter=self.channel_filter) - - def should_join_to_channel(self, channel: Mapping[str, Any]) -> bool: - """ - The `is_member` property indicates whether or not the API Bot is already assigned / joined to the channel. - https://api.slack.com/types/conversation#booleans - """ - return self.join_channels and not channel.get("is_member") - - def make_join_channel_slice(self, channel: Mapping[str, Any]) -> Mapping[str, Any]: - channel_id: str = channel.get("id") - channel_name: str = channel.get("name") - self.logger.info(f"Joining Slack Channel: `{channel_name}`") - return {"channel": channel_id, "channel_name": channel_name} - - -class Channels(ChanneledStream): - data_field = "channels" - - @property - def use_cache(self) -> bool: - return True - - def path(self, **kwargs) -> str: - return "conversations.list" - - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - params["types"] = "public_channel" - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[MutableMapping]: - json_response = response.json() - channels = json_response.get(self.data_field, []) - if self.channel_filter: - channels = [channel for channel in channels if channel["name"] in self.channel_filter] - yield from channels - - def read_records(self, sync_mode: SyncMode, **kwargs) -> Iterable[Mapping[str, Any]]: - """ - Override the default `read_records` method to provide the `JoinChannelsStream` functionality, - and be able to read all the channels, not just the ones that already has the API Bot joined. - """ - for channel in super().read_records(sync_mode=sync_mode): - # check the channel should be joined before reading - if self.should_join_to_channel(channel): - # join the channel before reading it - yield from self.join_channels_stream.read_records( - sync_mode=sync_mode, - stream_slice=self.make_join_channel_slice(channel), - ) - # reading the channel data - self.logger.info(f"Reading the channel: `{channel.get('name')}`") - yield channel - - -class ChannelMembers(ChanneledStream): - data_field = "members" - primary_key = ["member_id", "channel_id"] - - def path(self, **kwargs) -> str: - return "conversations.members" - - def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) - params["channel"] = stream_slice["channel_id"] - return params +from source_slack.streams import Threads - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: - for member_id in super().parse_response(response, **kwargs): - # Slack just returns raw IDs as a string, so we want to put them in a "join table" format - yield {"member_id": member_id, "channel_id": stream_slice["channel_id"]} - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - channels_stream = Channels(authenticator=self._session.auth, channel_filter=self.channel_filter) - for channel_record in channels_stream.read_records(sync_mode=SyncMode.full_refresh): - yield {"channel_id": channel_record["id"]} +class SourceSlack(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) - -class Users(SlackStream): - data_field = "members" - - def path(self, **kwargs) -> str: - return "users.list" - - -# Incremental Streams -class IncrementalMessageStream(ChanneledStream, ABC): - data_field = "messages" - cursor_field = "float_ts" - primary_key = ["channel_id", "ts"] - - def __init__(self, default_start_date: DateTime, end_date: Optional[DateTime] = None, **kwargs): - self._start_ts = default_start_date.timestamp() - self._end_ts = end_date and end_date.timestamp() - self.set_sub_primary_key() - super().__init__(**kwargs) - - def set_sub_primary_key(self): - if isinstance(self.primary_key, list): - for index, value in enumerate(self.primary_key): - setattr(self, f"sub_primary_key_{index + 1}", value) - else: - self.logger.error("Failed during setting sub primary keys. Primary key should be list.") - - def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) - params.update(**stream_slice) - return params - - def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: - for record in super().parse_response(response, **kwargs): - record[self.sub_primary_key_1] = stream_slice.get("channel", "") - record[self.cursor_field] = float(record[self.sub_primary_key_2]) - yield record - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - current_stream_state = current_stream_state or {} - current_stream_state[self.cursor_field] = max( - latest_record[self.cursor_field], current_stream_state.get(self.cursor_field, self._start_ts) - ) - - return current_stream_state - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - if not stream_slice: - # return an empty iterator - # this is done to emit at least one state message when no slices are generated - return iter([]) - return super().read_records(sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state) - - -class ChannelMessages(HttpSubStream, IncrementalMessageStream): - def path(self, **kwargs) -> str: - return "conversations.history" - - @property - def use_cache(self) -> bool: - return True - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - stream_state = stream_state or {} - start_date = pendulum.from_timestamp(stream_state.get(self.cursor_field, self._start_ts)) - end_date = self._end_ts and pendulum.from_timestamp(self._end_ts) - slice_yielded = False - for parent_slice in super().stream_slices(sync_mode=SyncMode.full_refresh): - channel = parent_slice["parent"] - for period in chunk_date_range(start_date=start_date, end_date=end_date): - yield {"channel": channel["id"], "oldest": period.start.timestamp(), "latest": period.end.timestamp()} - slice_yielded = True - if not slice_yielded: - # yield an empty slice to checkpoint state later - yield {} - - -class Threads(IncrementalMessageStream): - def __init__(self, lookback_window: Mapping[str, int], **kwargs): - self.messages_lookback_window = lookback_window - super().__init__(**kwargs) - - def path(self, **kwargs) -> str: - return "conversations.replies" - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - """ - The logic for incrementally syncing threads is not very obvious, so buckle up. - - To get all messages in a thread, one must specify the channel and timestamp of the parent (first) message of that thread, - basically its ID. - - One complication is that threads can be updated at Any time in the future. Therefore, if we wanted to comprehensively sync data - i.e: get every single response in a thread, we'd have to read every message in the slack instance every time we ran a sync, - because otherwise there is no way to guarantee that a thread deep in the past didn't receive a new message. - - A pragmatic workaround is to say we want threads to be at least N days fresh i.e: look back N days into the past, - get every message since, and read all of the thread responses. This is essentially the approach we're taking here via slicing: - create slices from N days into the past and read all messages in threads since then. We could optionally filter out records we have - already read, but that's omitted to keep the logic simple to reason about. - - Good luck. - """ - - stream_state = stream_state or {} - channels_stream = Channels(authenticator=self._session.auth, channel_filter=self.channel_filter) - - if self.cursor_field in stream_state: - # Since new messages can be posted to threads continuously after the parent message has been posted, - # we get messages from the latest date - # found in the state minus X days to pick up any new messages in threads. - # If there is state always use lookback - messages_start_date = pendulum.from_timestamp(stream_state[self.cursor_field]) - self.messages_lookback_window - else: - # If there is no state i.e: this is the first sync then there is no use for lookback, just get messages - # from the default start date - messages_start_date = pendulum.from_timestamp(self._start_ts) - - messages_stream = ChannelMessages( - parent=channels_stream, - authenticator=self._session.auth, - default_start_date=messages_start_date, - end_date=self._end_ts and pendulum.from_timestamp(self._end_ts), - ) - - slice_yielded = False - for message_chunk in messages_stream.stream_slices(stream_state={self.cursor_field: messages_start_date.timestamp()}): - self.logger.info(f"Syncing replies {message_chunk}") - for message in messages_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=message_chunk): - yield {"channel": message_chunk["channel"], self.sub_primary_key_2: message[self.sub_primary_key_2]} - slice_yielded = True - if not slice_yielded: - # yield an empty slice to checkpoint state later - yield {} - - -class SourceSlack(AbstractSource): - def _get_authenticator(self, config: Mapping[str, Any]): + def _threads_authenticator(self, config: Mapping[str, Any]): # Added to maintain backward compatibility with previous versions if "api_token" in config: return TokenAuthenticator(config["api_token"]) @@ -363,48 +30,27 @@ def _get_authenticator(self, config: Mapping[str, Any]): else: raise Exception(f"No supported option_title: {credentials_title} specified. See spec.json for references") - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: - try: - authenticator = self._get_authenticator(config) - users_stream = Users(authenticator=authenticator) - next(users_stream.read_records(SyncMode.full_refresh)) - return True, None - except Exception as e: - return ( - False, - f"Got an exception while trying to set up the connection: {e}. " - f"Most probably, there are no users in the given Slack instance or your token is incorrect", - ) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - authenticator = self._get_authenticator(config) + def get_threads_stream(self, config: Mapping[str, Any]) -> HttpStream: + authenticator = self._threads_authenticator(config) default_start_date = pendulum.parse(config["start_date"]) # this field is not exposed to spec, used only for testing purposes end_date = config.get("end_date") end_date = end_date and pendulum.parse(end_date) threads_lookback_window = pendulum.Duration(days=config["lookback_window"]) channel_filter = config.get("channel_filter", []) - should_join_to_channels = config.get("join_channels") + threads = Threads( + authenticator=authenticator, + default_start_date=default_start_date, + end_date=end_date, + lookback_window=threads_lookback_window, + channel_filter=channel_filter, + ) + return threads + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + declarative_streams = super().streams(config) - channels = Channels(authenticator=authenticator, join_channels=should_join_to_channels, channel_filter=channel_filter) - streams = [ - channels, - ChannelMembers(authenticator=authenticator, channel_filter=channel_filter), - ChannelMessages( - parent=channels, - authenticator=authenticator, - default_start_date=default_start_date, - end_date=end_date, - channel_filter=channel_filter, - ), - Threads( - authenticator=authenticator, - default_start_date=default_start_date, - end_date=end_date, - lookback_window=threads_lookback_window, - channel_filter=channel_filter, - ), - Users(authenticator=authenticator), - ] + threads_stream = self.get_threads_stream(config) + declarative_streams.append(threads_stream) - return streams + return declarative_streams diff --git a/airbyte-integrations/connectors/source-slack/source_slack/spec.json b/airbyte-integrations/connectors/source-slack/source_slack/spec.json index 2ed0ba91abbdf..e59f508e41abe 100644 --- a/airbyte-integrations/connectors/source-slack/source_slack/spec.json +++ b/airbyte-integrations/connectors/source-slack/source_slack/spec.json @@ -1,5 +1,4 @@ { - "documentationUrl": "https://docs.airbyte.com/integrations/sources/slack", "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Slack Spec", @@ -107,6 +106,7 @@ "predicate_key": ["credentials", "option_title"], "predicate_value": "Default OAuth2.0 authorization", "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": null, "complete_oauth_output_specification": { "type": "object", "additionalProperties": false, diff --git a/airbyte-integrations/connectors/source-slack/source_slack/streams.py b/airbyte-integrations/connectors/source-slack/source_slack/streams.py new file mode 100644 index 0000000000000..b565d9670e25b --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/source_slack/streams.py @@ -0,0 +1,280 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC, abstractmethod +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional + +import pendulum +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream +from pendulum import DateTime + +from .components.join_channels import JoinChannelsStream +from .utils import chunk_date_range + + +class SlackStream(HttpStream, ABC): + url_base = "https://slack.com/api/" + primary_key = "id" + page_size = 1000 + + @property + def max_retries(self) -> int: + # Slack's rate limiting can be unpredictable so we increase the max number of retries by a lot before failing + return 20 + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """Slack uses a cursor-based pagination strategy. + Extract the cursor from the response if it exists and return it in a format + that can be used to update request parameters""" + + json_response = response.json() + next_cursor = json_response.get("response_metadata", {}).get("next_cursor") + if next_cursor: + return {"cursor": next_cursor} + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + params = {"limit": self.page_size} + if next_page_token: + params.update(**next_page_token) + return params + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[MutableMapping]: + json_response = response.json() + yield from json_response.get(self.data_field, []) + + def backoff_time(self, response: requests.Response) -> Optional[float]: + """This method is called if we run into the rate limit. + Slack puts the retry time in the `Retry-After` response header so we + we return that value. If the response is anything other than a 429 (e.g: 5XX) + fall back on default retry behavior. + Rate Limits Docs: https://api.slack.com/docs/rate-limits#web""" + + if "Retry-After" in response.headers: + return int(response.headers["Retry-After"]) + else: + self.logger.info("Retry-after header not found. Using default backoff value") + return 5 + + @property + @abstractmethod + def data_field(self) -> str: + """The name of the field in the response which contains the data""" + + def should_retry(self, response: requests.Response) -> bool: + return response.status_code == requests.codes.REQUEST_TIMEOUT or super().should_retry(response) + + +class ChanneledStream(SlackStream, ABC): + """Slack stream with channel filter""" + + def __init__(self, channel_filter: List[str] = [], join_channels: bool = False, **kwargs): + self.channel_filter = channel_filter + self.join_channels = join_channels + self.kwargs = kwargs + super().__init__(**kwargs) + + @property + def join_channels_stream(self) -> JoinChannelsStream: + return JoinChannelsStream(authenticator=self.kwargs.get("authenticator"), channel_filter=self.channel_filter) + + def should_join_to_channel(self, channel: Mapping[str, Any]) -> bool: + """ + The `is_member` property indicates whether or not the API Bot is already assigned / joined to the channel. + https://api.slack.com/types/conversation#booleans + """ + return self.join_channels and not channel.get("is_member") + + def make_join_channel_slice(self, channel: Mapping[str, Any]) -> Mapping[str, Any]: + channel_id: str = channel.get("id") + channel_name: str = channel.get("name") + self.logger.info(f"Joining Slack Channel: `{channel_name}`") + return {"channel": channel_id, "channel_name": channel_name} + + +class Channels(ChanneledStream): + data_field = "channels" + + @property + def use_cache(self) -> bool: + return True + + def path(self, **kwargs) -> str: + return "conversations.list" + + def request_params(self, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(**kwargs) + params["types"] = "public_channel" + return params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[MutableMapping]: + json_response = response.json() + channels = json_response.get(self.data_field, []) + if self.channel_filter: + channels = [channel for channel in channels if channel["name"] in self.channel_filter] + yield from channels + + def read_records(self, sync_mode: SyncMode, **kwargs) -> Iterable[Mapping[str, Any]]: + """ + Override the default `read_records` method to provide the `JoinChannelsStream` functionality, + and be able to read all the channels, not just the ones that already has the API Bot joined. + """ + for channel in super().read_records(sync_mode=sync_mode): + # check the channel should be joined before reading + if self.should_join_to_channel(channel): + # join the channel before reading it + yield from self.join_channels_stream.read_records( + sync_mode=sync_mode, + stream_slice=self.make_join_channel_slice(channel), + ) + # reading the channel data + self.logger.info(f"Reading the channel: `{channel.get('name')}`") + yield channel + + +# Incremental Streams +class IncrementalMessageStream(ChanneledStream, ABC): + data_field = "messages" + cursor_field = "float_ts" + primary_key = ["channel_id", "ts"] + + def __init__(self, default_start_date: DateTime, end_date: Optional[DateTime] = None, **kwargs): + self._start_ts = default_start_date.timestamp() + self._end_ts = end_date and end_date.timestamp() + self.set_sub_primary_key() + super().__init__(**kwargs) + + def set_sub_primary_key(self): + if isinstance(self.primary_key, list): + for index, value in enumerate(self.primary_key): + setattr(self, f"sub_primary_key_{index + 1}", value) + else: + self.logger.error("Failed during setting sub primary keys. Primary key should be list.") + + def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, **kwargs) + params.update(**stream_slice) + return params + + def parse_response(self, response: requests.Response, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping]: + for record in super().parse_response(response, **kwargs): + record[self.sub_primary_key_1] = stream_slice.get("channel", "") + record[self.cursor_field] = float(record[self.sub_primary_key_2]) + yield record + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + current_stream_state = current_stream_state or {} + current_stream_state[self.cursor_field] = max( + latest_record[self.cursor_field], current_stream_state.get(self.cursor_field, self._start_ts) + ) + + return current_stream_state + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + if not stream_slice: + # return an empty iterator + # this is done to emit at least one state message when no slices are generated + return iter([]) + return super().read_records(sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state) + + +class ChannelMessages(HttpSubStream, IncrementalMessageStream): + def path(self, **kwargs) -> str: + return "conversations.history" + + @property + def use_cache(self) -> bool: + return True + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + stream_state = stream_state or {} + start_date = pendulum.from_timestamp(stream_state.get(self.cursor_field, self._start_ts)) + end_date = self._end_ts and pendulum.from_timestamp(self._end_ts) + slice_yielded = False + for parent_slice in super().stream_slices(sync_mode=SyncMode.full_refresh): + channel = parent_slice["parent"] + for period in chunk_date_range(start_date=start_date, end_date=end_date): + yield {"channel": channel["id"], "oldest": period.start.timestamp(), "latest": period.end.timestamp()} + slice_yielded = True + if not slice_yielded: + # yield an empty slice to checkpoint state later + yield {} + + +class Threads(IncrementalMessageStream): + def __init__(self, lookback_window: Mapping[str, int], **kwargs): + self.messages_lookback_window = lookback_window + super().__init__(**kwargs) + + def path(self, **kwargs) -> str: + return "conversations.replies" + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + """ + The logic for incrementally syncing threads is not very obvious, so buckle up. + + To get all messages in a thread, one must specify the channel and timestamp of the parent (first) message of that thread, + basically its ID. + + One complication is that threads can be updated at Any time in the future. Therefore, if we wanted to comprehensively sync data + i.e: get every single response in a thread, we'd have to read every message in the slack instance every time we ran a sync, + because otherwise there is no way to guarantee that a thread deep in the past didn't receive a new message. + + A pragmatic workaround is to say we want threads to be at least N days fresh i.e: look back N days into the past, + get every message since, and read all of the thread responses. This is essentially the approach we're taking here via slicing: + create slices from N days into the past and read all messages in threads since then. We could optionally filter out records we have + already read, but that's omitted to keep the logic simple to reason about. + + Good luck. + """ + + stream_state = stream_state or {} + channels_stream = Channels(authenticator=self._session.auth, channel_filter=self.channel_filter) + + if self.cursor_field in stream_state: + # Since new messages can be posted to threads continuously after the parent message has been posted, + # we get messages from the latest date + # found in the state minus X days to pick up any new messages in threads. + # If there is state always use lookback + messages_start_date = pendulum.from_timestamp(stream_state[self.cursor_field]) - self.messages_lookback_window + else: + # If there is no state i.e: this is the first sync then there is no use for lookback, just get messages + # from the default start date + messages_start_date = pendulum.from_timestamp(self._start_ts) + + messages_stream = ChannelMessages( + parent=channels_stream, + authenticator=self._session.auth, + default_start_date=messages_start_date, + end_date=self._end_ts and pendulum.from_timestamp(self._end_ts), + ) + + slice_yielded = False + for message_chunk in messages_stream.stream_slices(stream_state={self.cursor_field: messages_start_date.timestamp()}): + self.logger.info(f"Syncing replies {message_chunk}") + for message in messages_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=message_chunk): + yield {"channel": message_chunk["channel"], self.sub_primary_key_2: message[self.sub_primary_key_2]} + slice_yielded = True + if not slice_yielded: + # yield an empty slice to checkpoint state later + yield {} diff --git a/airbyte-integrations/connectors/source-slack/source_slack/utils.py b/airbyte-integrations/connectors/source-slack/source_slack/utils.py index 7507dbab35657..febfb788442ac 100644 --- a/airbyte-integrations/connectors/source-slack/source_slack/utils.py +++ b/airbyte-integrations/connectors/source-slack/source_slack/utils.py @@ -9,7 +9,7 @@ from pendulum import DateTime, Period -def chunk_date_range(start_date: DateTime, interval=pendulum.duration(days=1), end_date: Optional[DateTime] = None) -> Iterable[Period]: +def chunk_date_range(start_date: DateTime, interval=pendulum.duration(days=100), end_date: Optional[DateTime] = None) -> Iterable[Period]: """ Yields a list of the beginning and ending timestamps of each day between the start date and now. The return value is a pendulum.period diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/configs/actual_config.json b/airbyte-integrations/connectors/source-slack/unit_tests/configs/actual_config.json new file mode 100644 index 0000000000000..065ffde78394f --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/unit_tests/configs/actual_config.json @@ -0,0 +1,11 @@ +{ + "start_date": "2021-07-22T20:00:00Z", + "end_date": "2021-07-23T20:00:00Z", + "lookback_window": 1, + "join_channels": true, + "channel_filter": ["airbyte-for-beginners", "good-reads"], + "credentials": { + "api_token": "api-token", + "option_title": "API Token Credentials" + } +} diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/configs/legacy_config.json b/airbyte-integrations/connectors/source-slack/unit_tests/configs/legacy_config.json new file mode 100644 index 0000000000000..99eda1d750b38 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/unit_tests/configs/legacy_config.json @@ -0,0 +1,8 @@ +{ + "start_date": "2021-07-22T20:00:00Z", + "end_date": "2021-07-23T20:00:00Z", + "lookback_window": 1, + "join_channels": true, + "channel_filter": ["airbyte-for-beginners", "good-reads"], + "api_token": "api-token" +} diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/conftest.py b/airbyte-integrations/connectors/source-slack/unit_tests/conftest.py index 6d9254730d5f6..002a9ec96779d 100644 --- a/airbyte-integrations/connectors/source-slack/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-slack/unit_tests/conftest.py @@ -18,20 +18,11 @@ def conversations_list(requests_mock): "https://slack.com/api/conversations.list?limit=1000&types=public_channel", json={ "channels": [ - {"name": "advice-data-architecture", "id": 1, "is_member": False}, - {"name": "advice-data-orchestration", "id": 2, "is_member": True}, - {"name": "airbyte-for-beginners", "id": 3, "is_member": False}, - {"name": "good-reads", "id": 4, "is_member": True}, - ] + {"id": "airbyte-for-beginners", "is_member": True}, + {"id": "good-reads", "is_member": True}] }, ) - -@pytest.fixture(autouse=True) -def join_channels(requests_mock): - return requests_mock.register_uri("POST", "https://slack.com/api/conversations.join") - - def base_config() -> MutableMapping: return copy.deepcopy( { @@ -100,7 +91,18 @@ def invalid_config() -> MutableMapping: ( (_token_config(), True), (_oauth_config(), True), - (_legacy_token_config(), True), (_invalid_config(), False), ), ) + + +@pytest.fixture +def joined_channel(): + return {"id": "C061EG9SL", "name": "general", "is_channel": True, "is_group": False, "is_im": False, + "created": 1449252889, + "creator": "U061F7AUR", "is_archived": False, "is_general": True, "unlinked": 0, "name_normalized": "general", + "is_shared": False, + "is_ext_shared": False, "is_org_shared": False, "pending_shared": [], "is_pending_ext_shared": False, + "is_member": True, "is_private": False, "is_mpim": False, + "topic": {"value": "Which widget do you worry about?", "creator": "", "last_set": 0}, + "purpose": {"value": "For widget discussion", "creator": "", "last_set": 0}, "previous_names": []} diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/test_components.py b/airbyte-integrations/connectors/source-slack/unit_tests/test_components.py new file mode 100644 index 0000000000000..e40e700fe97b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/unit_tests/test_components.py @@ -0,0 +1,99 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from unittest.mock import MagicMock + +import pendulum +import pytest +from airbyte_cdk.sources.declarative.extractors import DpathExtractor, RecordSelector +from airbyte_cdk.sources.declarative.requesters import HttpRequester +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from airbyte_protocol.models import SyncMode +from source_slack import SourceSlack +from source_slack.components.channel_members_extractor import ChannelMembersExtractor +from source_slack.components.join_channels import ChannelsRetriever, JoinChannelsStream + + +def get_stream_by_name(stream_name, config): + streams = SourceSlack().streams(config=config) + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") + + +def test_channel_members_extractor(token_config): + response_mock = MagicMock() + response_mock.json.return_value = {"members": [ + "U023BECGF", + "U061F7AUR", + "W012A3CDE" + ]} + records = ChannelMembersExtractor(config=token_config, parameters={}, field_path=["members"]).extract_records(response=response_mock) + assert records == [{"member_id": "U023BECGF"}, + {"member_id": "U061F7AUR"}, + {"member_id": "W012A3CDE"}] + + +def test_join_channels(token_config, requests_mock, joined_channel): + mocked_request = requests_mock.post( + url="https://slack.com/api/conversations.join", + json={"ok": True, "channel": joined_channel} + ) + token = token_config["credentials"]["api_token"] + authenticator = TokenAuthenticator(token) + channel_filter = token_config["channel_filter"] + stream = JoinChannelsStream(authenticator=authenticator, channel_filter=channel_filter) + records = stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"channel": "C061EG9SL", "channel_name": "general"}) + assert not list(records) + assert mocked_request.called + + +def get_channels_retriever_instance(token_config): + return ChannelsRetriever( + config=token_config, + requester=HttpRequester(name="channels", path="conversations.list", url_base="https://slack.com/api/", config=token_config, + parameters={}), + record_selector=RecordSelector( + extractor=DpathExtractor(field_path=["channels"], config=token_config, parameters={}), + config=token_config, parameters={}, + schema_normalization=None), + parameters={} + ) + + +def test_join_channels_should_join_to_channel(token_config): + retriever = get_channels_retriever_instance(token_config) + assert retriever.should_join_to_channel(token_config, {"is_member": False}) is True + assert retriever.should_join_to_channel(token_config, {"is_member": True}) is False + + +def test_join_channels_make_join_channel_slice(token_config): + retriever = get_channels_retriever_instance(token_config) + expected_slice = {"channel": "C061EG9SL", "channel_name": "general"} + assert retriever.make_join_channel_slice({"id": "C061EG9SL", "name": "general"}) == expected_slice + + +@pytest.mark.parametrize( + "join_response, log_message", + ( + ({"ok": True, "channel": {"is_member": True, "id": "channel 2", "name": "test channel"}}, "Successfully joined channel: test channel"), + ({"ok": False, "error": "missing_scope", "needed": "channels:write"}, + "Unable to joined channel: test channel. Reason: {'ok': False, 'error': " "'missing_scope', 'needed': 'channels:write'}"), + ), + ids=["successful_join_to_channel", "failed_join_to_channel"] +) +def test_join_channel_read(requests_mock, token_config, joined_channel, caplog, join_response, log_message): + mocked_request = requests_mock.post( + url="https://slack.com/api/conversations.join", + json=join_response + ) + requests_mock.get( + url="https://slack.com/api/conversations.list", + json={"channels": [{"is_member": True, "id": "channel 1"}, {"is_member": False, "id": "channel 2", "name": "test channel"}]} + ) + + retriever = get_channels_retriever_instance(token_config) + assert len(list(retriever.read_records(records_schema={}))) == 2 + assert mocked_request.called + assert mocked_request.last_request._request.body == b'{"channel": "channel 2"}' + assert log_message in caplog.text diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/test_config_migrations.py b/airbyte-integrations/connectors/source-slack/unit_tests/test_config_migrations.py new file mode 100644 index 0000000000000..761597a66fc22 --- /dev/null +++ b/airbyte-integrations/connectors/source-slack/unit_tests/test_config_migrations.py @@ -0,0 +1,47 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +import os +from typing import Any, Mapping + +from source_slack import SourceSlack +from source_slack.config_migrations import MigrateLegacyConfig + +CMD = "check" +TEST_CONFIG_LEGACY_PATH = f"{os.path.dirname(__file__)}/configs/legacy_config.json" +TEST_CONFIG_ACTUAL_PATH = f"{os.path.dirname(__file__)}/configs/actual_config.json" + +SOURCE_INPUT_ARGS_LEGACY = [CMD, "--config", TEST_CONFIG_LEGACY_PATH] +SOURCE_INPUT_ARGS_ACTUAL = [CMD, "--config", TEST_CONFIG_ACTUAL_PATH] + + +def revert_config(): + with open(TEST_CONFIG_LEGACY_PATH, "r") as test_config: + config = json.load(test_config) + config.pop("credentials") + config.update({"api_token": "api-token"}) + with open(TEST_CONFIG_LEGACY_PATH, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + +def load_config(config_path: str = TEST_CONFIG_LEGACY_PATH) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +def test_config_migration(): + migration = MigrateLegacyConfig() + migration.migrate(SOURCE_INPUT_ARGS_LEGACY, SourceSlack()) + test_migrated_config = load_config() + assert test_migrated_config["credentials"]["api_token"] == "api-token" + assert test_migrated_config["credentials"]["option_title"] == "API Token Credentials" + revert_config() + + +def test_config_not_migrated(): + config_before_migration = load_config(TEST_CONFIG_ACTUAL_PATH) + migration = MigrateLegacyConfig() + migration.migrate(SOURCE_INPUT_ARGS_ACTUAL, SourceSlack()) + test_migrated_config = load_config(TEST_CONFIG_ACTUAL_PATH) + assert config_before_migration == test_migrated_config diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/test_source.py b/airbyte-integrations/connectors/source-slack/unit_tests/test_source.py index bef3bf26651f9..ae1a589227970 100644 --- a/airbyte-integrations/connectors/source-slack/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-slack/unit_tests/test_source.py @@ -10,6 +10,13 @@ from .conftest import parametrized_configs +def get_stream_by_name(stream_name, config): + streams = SourceSlack().streams(config=config) + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") + @parametrized_configs def test_streams(conversations_list, config, is_valid): source = SourceSlack() @@ -19,28 +26,25 @@ def test_streams(conversations_list, config, is_valid): else: with pytest.raises(Exception) as exc_info: _ = source.streams(config) - assert "No supported option_title: None specified. See spec.json for references" in repr(exc_info.value) + assert "The path from `authenticator_selection_path` is not found in the config." in repr(exc_info.value) @pytest.mark.parametrize( "status_code, response, is_connection_successful, error_msg", ( (200, {"members": [{"id": 1, "name": "Abraham"}]}, True, None), + (200, {"ok": False, "error": "invalid_auth"}, False, "Authentication has failed, please update your credentials."), ( 400, "Bad request", False, - "Got an exception while trying to set up the connection: 400 Client Error: " - "None for url: https://slack.com/api/users.list?limit=1000. Most probably, there are no users in the given Slack instance or " - "your token is incorrect", + "Got an exception while trying to set up the connection. Most probably, there are no users in the given Slack instance or your token is incorrect.", ), ( 403, "Forbidden", False, - "Got an exception while trying to set up the connection: 403 Client Error: " - "None for url: https://slack.com/api/users.list?limit=1000. Most probably, there are no users in the given Slack instance or " - "your token is incorrect", + "Got an exception while trying to set up the connection. Most probably, there are no users in the given Slack instance or your token is incorrect.", ), ), ) @@ -49,4 +53,20 @@ def test_check_connection(token_config, requests_mock, status_code, response, is source = SourceSlack() success, error = source.check_connection(logger=logging.getLogger("airbyte"), config=token_config) assert success is is_connection_successful - assert error == error_msg + if not success: + assert error_msg in error + + +def test_threads_auth(token_config, oauth_config): + source = SourceSlack() + auth = source._threads_authenticator(token_config) + assert auth.token == "Bearer api-token" + source = SourceSlack() + auth = source._threads_authenticator(oauth_config) + assert auth.token == "Bearer access-token" + + +def test_get_threads_stream(token_config): + source = SourceSlack() + threads_stream = source.get_threads_stream(token_config) + assert threads_stream diff --git a/airbyte-integrations/connectors/source-slack/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-slack/unit_tests/test_streams.py index d0327093318f8..9a3cd092d90b4 100644 --- a/airbyte-integrations/connectors/source-slack/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-slack/unit_tests/test_streams.py @@ -7,12 +7,21 @@ import pendulum import pytest from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from source_slack.source import Channels, Threads, Users +from source_slack import SourceSlack +from source_slack.streams import Channels, JoinChannelsStream, Threads @pytest.fixture -def authenticator(legacy_token_config): - return TokenAuthenticator(legacy_token_config["api_token"]) +def authenticator(token_config): + return TokenAuthenticator(token_config["credentials"]["api_token"]) + + +def get_stream_by_name(stream_name, config): + streams = SourceSlack().streams(config=config) + for stream in streams: + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") @pytest.mark.parametrize( @@ -25,10 +34,10 @@ def authenticator(legacy_token_config): {}, [ # two messages per each channel - {"channel": 3, "ts": 1577866844}, - {"channel": 3, "ts": 1577877406}, - {"channel": 4, "ts": 1577866844}, - {"channel": 4, "ts": 1577877406}, + {'channel': 'airbyte-for-beginners', 'ts': 1577866844}, + {'channel': 'airbyte-for-beginners', 'ts': 1577877406}, + {'channel': 'good-reads', 'ts': 1577866844}, + {'channel': 'good-reads', 'ts': 1577877406}, ], ), ("2020-01-02T00:00:00Z", "2020-01-01T00:00:00Z", [], {}, [{}]), @@ -36,35 +45,36 @@ def authenticator(legacy_token_config): "2020-01-01T00:00:00Z", "2020-01-02T00:00:00Z", [{"ts": 1577866844}, {"ts": 1577877406}], - {"float_ts": 1577915266}, + {"float_ts": 2577866844}, [ - # two messages per each channel per datetime slice - {"channel": 3, "ts": 1577866844}, - {"channel": 3, "ts": 1577877406}, - {"channel": 3, "ts": 1577866844}, - {"channel": 3, "ts": 1577877406}, - {"channel": 4, "ts": 1577866844}, - {"channel": 4, "ts": 1577877406}, - {"channel": 4, "ts": 1577866844}, - {"channel": 4, "ts": 1577877406}, + # no slice when state greater than ts + {}, ], ), ), ) def test_threads_stream_slices( - requests_mock, authenticator, legacy_token_config, start_date, end_date, messages, stream_state, expected_result + requests_mock, authenticator, token_config, start_date, end_date, messages, stream_state, expected_result ): + token_config["channel_filter"] = [] + + requests_mock.register_uri( + "GET", "https://slack.com/api/conversations.history?limit=1000&channel=airbyte-for-beginners", + [{"json": {"messages": messages}}, {"json": {"messages": []}}] + ) requests_mock.register_uri( - "GET", "https://slack.com/api/conversations.history", [{"json": {"messages": messages}}, {"json": {"messages": messages}}] + "GET", "https://slack.com/api/conversations.history?limit=1000&channel=good-reads", + [{"json": {"messages": messages}}, {"json": {"messages": []}}] ) + start_date = pendulum.parse(start_date) end_date = end_date and pendulum.parse(end_date) + stream = Threads( authenticator=authenticator, default_start_date=start_date, end_date=end_date, - lookback_window=pendulum.Duration(days=legacy_token_config["lookback_window"]), - channel_filter=legacy_token_config["channel_filter"], + lookback_window=pendulum.Duration(days=token_config["lookback_window"]) ) slices = list(stream.stream_slices(stream_state=stream_state)) assert slices == expected_result @@ -79,19 +89,64 @@ def test_threads_stream_slices( ({"float_ts": 1577800844}, {"float_ts": 1577866844}, {"float_ts": 1577866844}), ), ) -def test_get_updated_state(authenticator, legacy_token_config, current_state, latest_record, expected_state): +def test_get_updated_state(authenticator, token_config, current_state, latest_record, expected_state): + stream = Threads( authenticator=authenticator, - default_start_date=pendulum.parse(legacy_token_config["start_date"]), - lookback_window=legacy_token_config["lookback_window"], - channel_filter=legacy_token_config["channel_filter"], + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] ) assert stream.get_updated_state(current_stream_state=current_state, latest_record=latest_record) == expected_state +def test_threads_request_params(authenticator, token_config): + stream = Threads( + authenticator=authenticator, + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] + ) + threads_slice = {'channel': 'airbyte-for-beginners', 'ts': 1577866844} + expected = {'channel': 'airbyte-for-beginners', 'limit': 1000, 'ts': 1577866844} + assert stream.request_params(stream_slice=threads_slice, stream_state={}) == expected + + +def test_threads_parse_response(mocker, authenticator, token_config): + stream = Threads( + authenticator=authenticator, + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] + ) + resp = { + "messages": [ + { + "type": "message", + "user": "U061F7AUR", + "text": "island", + "thread_ts": "1482960137.003543", + "reply_count": 3, + "subscribed": True, + "last_read": "1484678597.521003", + "unread_count": 0, + "ts": "1482960137.003543" + } + ] + } + resp_mock = mocker.Mock() + resp_mock.json.return_value = resp + threads_slice = {'channel': 'airbyte-for-beginners', 'ts': 1577866844} + actual_response = list(stream.parse_response(response=resp_mock,stream_slice=threads_slice)) + assert len(actual_response) == 1 + assert actual_response[0]["float_ts"] == 1482960137.003543 + assert actual_response[0]["channel_id"] == "airbyte-for-beginners" + + @pytest.mark.parametrize("headers, expected_result", (({}, 5), ({"Retry-After": 15}, 15))) -def test_backoff(authenticator, headers, expected_result): - stream = Users(authenticator=authenticator) +def test_backoff(token_config, authenticator, headers, expected_result): + stream = Threads( + authenticator=authenticator, + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] + ) assert stream.backoff_time(Mock(headers=headers)) == expected_result @@ -100,11 +155,38 @@ def test_channels_stream_with_autojoin(authenticator) -> None: The test uses the `conversations_list` fixture(autouse=true) as API mocker. """ expected = [ - {'name': 'advice-data-architecture', 'id': 1, 'is_member': False}, - {'name': 'advice-data-orchestration', 'id': 2, 'is_member': True}, - {'name': 'airbyte-for-beginners', 'id': 3, 'is_member': False}, - {'name': 'good-reads', 'id': 4, 'is_member': True}, + {'id': 'airbyte-for-beginners', 'is_member': True}, + {'id': 'good-reads', 'is_member': True} ] stream = Channels(channel_filter=[], join_channels=True, authenticator=authenticator) assert list(stream.read_records(None)) == expected - \ No newline at end of file + + +def test_next_page_token(authenticator, token_config): + stream = Threads( + authenticator=authenticator, + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] + ) + mocked_response = Mock() + mocked_response.json.return_value = {"response_metadata": {"next_cursor": "next page"}} + assert stream.next_page_token(mocked_response) == {"cursor": "next page"} + + +@pytest.mark.parametrize( + "status_code, expected", + ( + (200, False), + (403, False), + (429, True), + (500, True), + ), +) +def test_should_retry(authenticator, token_config, status_code, expected): + stream = Threads( + authenticator=authenticator, + default_start_date=pendulum.parse(token_config["start_date"]), + lookback_window=token_config["lookback_window"] + ) + mocked_response = Mock(status_code=status_code) + assert stream.should_retry(mocked_response) == expected diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml index 82c281aeefcee..22680308aca05 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml @@ -8,11 +8,12 @@ data: connectorSubtype: api connectorType: source definitionId: 200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b - dockerImageTag: 0.4.0 + dockerImageTag: 0.6.0 dockerRepository: airbyte/source-snapchat-marketing githubIssueLabel: source-snapchat-marketing icon: snapchat.svg license: MIT + maxSecondsBetweenMessages: 1 name: Snapchat Marketing remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/poetry.lock b/airbyte-integrations/connectors/source-snapchat-marketing/poetry.lock index 3d45c572b9514..7cfdd7f18c76a 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/poetry.lock +++ b/airbyte-integrations/connectors/source-snapchat-marketing/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.62.1" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.62.1.tar.gz", hash = "sha256:3c934dd8b045079a9c807f699ca2012eaa5df755606e3f5b8b16247cbbd7e8c6"}, - {file = "airbyte_cdk-0.62.1-py3-none-any.whl", hash = "sha256:792399a602b7f5c3cd4ed2a5fce5910cfe3676b9b9199b9208f2d5236f5f42d3"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -807,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -825,15 +825,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -856,19 +856,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -894,13 +894,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -919,13 +919,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1030,4 +1030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "f0ea85ffbecdae30ea01bfe99f00ec3b7c36bdc9d04c5066a06aa19cb1ec9353" +content-hash = "25d79195c052c9654e64e6cd73809188b3aa16bd228841f214ff871a895c9c6c" diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml b/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml index 3efcb8143a359..a857aaff82068 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.0" +version = "0.6.0" name = "source-snapchat-marketing" description = "Source implementation for Snapchat Marketing." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_snapchat_marketing" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.62.1" +airbyte-cdk = "^0" [tool.poetry.scripts] source-snapchat-marketing = "source_snapchat_marketing.run:run" diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/source.py b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/source.py index 87644a6347433..2d5d3368e65a4 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/source.py +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/source.py @@ -165,10 +165,13 @@ class SnapchatMarketingStream(HttpStream, ABC): primary_key = "id" raise_on_http_errors = True - def __init__(self, start_date, end_date, **kwargs): + def __init__(self, start_date, end_date, action_report_time, swipe_up_attribution_window, view_attribution_window, **kwargs): super().__init__(**kwargs) self.start_date = start_date self.end_date = end_date + self.action_report_time = action_report_time + self.swipe_up_attribution_window = swipe_up_attribution_window + self.view_attribution_window = view_attribution_window def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: next_page_cursor = response.json().get("paging", False) @@ -246,7 +249,14 @@ def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: self.initial_state = stream_state.get(self.cursor_field) if stream_state else self.start_date self.max_state = self.initial_state - parent_stream = self.parent(authenticator=self.authenticator, start_date=self.start_date, end_date=self.end_date) + parent_stream = self.parent( + authenticator=self.authenticator, + start_date=self.start_date, + end_date=self.end_date, + action_report_time=self.action_report_time, + swipe_up_attribution_window=self.swipe_up_attribution_window, + view_attribution_window=self.view_attribution_window, + ) stream_slices = get_parent_ids(parent_stream) if stream_slices: @@ -368,7 +378,14 @@ def parent(self) -> SnapchatMarketingStream: def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: """Each stream slice represents each entity id from parent stream""" - parent_stream = self.parent(authenticator=self.authenticator, start_date=self.start_date, end_date=self.end_date) + parent_stream = self.parent( + authenticator=self.authenticator, + start_date=self.start_date, + end_date=self.end_date, + action_report_time=self.action_report_time, + swipe_up_attribution_window=self.swipe_up_attribution_window, + view_attribution_window=self.view_attribution_window, + ) self.parent_name = parent_stream.name stream_slices = get_parent_ids(parent_stream) @@ -388,6 +405,9 @@ def request_params( params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) params["granularity"] = self.granularity.value + params["action_report_time"] = self.action_report_time + params["swipe_up_attribution_window"] = self.swipe_up_attribution_window + params["view_attribution_window"] = self.view_attribution_window if self.metrics: params["fields"] = ",".join(self.metrics) @@ -808,6 +828,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: ), "start_date": config["start_date"], "end_date": config.get("end_date", default_end_date), + "action_report_time": config.get("action_report_time", "conversion"), + "swipe_up_attribution_window": config.get("swipe_up_attribution_window", "28_DAY"), + "view_attribution_window": config.get("view_attribution_window", "1_DAY"), } return [ diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json index 6d26cdd036b71..28389e0a3fd02 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json @@ -45,6 +45,30 @@ "examples": ["2022-01-30"], "order": 4, "format": "date" + }, + "action_report_time": { + "type": "string", + "enum": ["conversion", "impression"], + "title": "Action Report Time", + "description": "Specifies the principle for conversion reporting.", + "default": "conversion", + "order": 5 + }, + "swipe_up_attribution_window": { + "type": "string", + "title": "Swipe Up Attribution Window", + "description": "Attribution window for swipe ups.", + "enum": ["1_DAY", "7_DAY", "28_DAY"], + "default": "28_DAY", + "order": 6 + }, + "view_attribution_window": { + "type": "string", + "title": "View Attribution Window", + "description": "Attribution window for views.", + "enum": ["1_HOUR", "3_HOUR", "6_HOUR", "1_DAY", "7_DAY"], + "default": "1_DAY", + "order": 7 } } }, diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-snapchat-marketing/unit_tests/unit_test.py index 04ac41e345155..18842aee9301c 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-snapchat-marketing/unit_tests/unit_test.py @@ -23,6 +23,9 @@ "authenticator": NoAuth(), "start_date": "2000-01-01", "end_date": "2000-02-10", + "action_report_time": "impression", + "swipe_up_attribution_window": "7_DAY", + "view_attribution_window": "1_DAY", } stats_stream = AdaccountsStatsDaily(**config_mock) diff --git a/airbyte-integrations/connectors/source-stripe/metadata.yaml b/airbyte-integrations/connectors/source-stripe/metadata.yaml index cb75f6f68a4c9..d6e8b4e494695 100644 --- a/airbyte-integrations/connectors/source-stripe/metadata.yaml +++ b/airbyte-integrations/connectors/source-stripe/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: e094cb9a-26de-4645-8761-65c0c425d1de - dockerImageTag: 5.2.4 + dockerImageTag: 5.3.3 dockerRepository: airbyte/source-stripe documentationUrl: https://docs.airbyte.com/integrations/sources/stripe githubIssueLabel: source-stripe icon: stripe.svg license: ELv2 + maxSecondsBetweenMessages: 1 name: Stripe remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-stripe/poetry.lock b/airbyte-integrations/connectors/source-stripe/poetry.lock index 250155d9a76c7..d5ff023c64980 100644 --- a/airbyte-integrations/connectors/source-stripe/poetry.lock +++ b/airbyte-integrations/connectors/source-stripe/poetry.lock @@ -1,51 +1,50 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.60.1" +version = "0.81.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.60.1.tar.gz", hash = "sha256:fc5212b2962c1dc6aca9cc6f1c2000d7636b7509915846c126420c2b0c814317"}, - {file = "airbyte_cdk-0.60.1-py3-none-any.whl", hash = "sha256:94b33c0f6851d1e2546eac3cec54c67489239595d9e0a496ef57c3fc808e89e3"}, + {file = "airbyte_cdk-0.81.3-py3-none-any.whl", hash = "sha256:c168acef484120f5b392cbf0c43bb8180d8596a0c87cfe416ac2e8e7fe1ab93a"}, + {file = "airbyte_cdk-0.81.3.tar.gz", hash = "sha256:e91e7ca66b3f4d5714b44304ff3cb1bb9b703933cf6b38d32e7f06384e9e1108"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = "*" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.9.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.9.0-py3-none-any.whl", hash = "sha256:e972e140b5efd1edad5a338bcae8fdee9fc12545caf2c321e0f61b151c163a9b"}, + {file = "airbyte_protocol_models-0.9.0.tar.gz", hash = "sha256:40b69c33df23fe82d7078e84beb123bd604480e4d73cb277a890fcc92aedc8d2"}, ] [package.dependencies] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -327,13 +326,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -380,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -481,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -567,47 +566,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -699,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -822,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -840,50 +839,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -923,13 +920,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -948,13 +945,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1059,4 +1056,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e1b6a4bb5a2d863623daeb1a4194106b45024cdba1d06cfbfe85a91949cad482" +content-hash = "7fd6fcaf6dc4aa713030d1f160fa62a546fba7046d20e5a31d27bd8b49e36ae4" diff --git a/airbyte-integrations/connectors/source-stripe/pyproject.toml b/airbyte-integrations/connectors/source-stripe/pyproject.toml index fc915f21ffaa8..fa9eb8c634990 100644 --- a/airbyte-integrations/connectors/source-stripe/pyproject.toml +++ b/airbyte-integrations/connectors/source-stripe/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.2.4" +version = "5.3.3" name = "source-stripe" description = "Source implementation for Stripe." authors = [ "Airbyte ",] @@ -19,7 +19,7 @@ include = "source_stripe" python = "^3.9,<3.12" stripe = "==2.56.0" pendulum = "==2.1.2" -airbyte-cdk = "==0.60.1" +airbyte-cdk = "^0" [tool.poetry.scripts] source-stripe = "source_stripe.run:run" diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py index 49479c5cc78ec..c2698eaaf8a1b 100644 --- a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py @@ -4,7 +4,7 @@ import logging import os -from datetime import timedelta +from datetime import datetime, timedelta, timezone from typing import Any, List, Mapping, MutableMapping, Optional, Tuple import pendulum @@ -20,7 +20,7 @@ from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.call_rate import AbstractAPIBudget, HttpAPIBudget, HttpRequestMatcher, MovingWindowCallRatePolicy, Rate from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import Comparable, ConcurrentCursor, CursorField, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import EpochValueConcurrentStreamStateConverter from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator from airbyte_cdk.utils.traced_exception import AirbyteTracedException @@ -520,11 +520,27 @@ def streams(self, config: MutableMapping[str, Any]) -> List[Stream]: ] state_manager = ConnectorStateManager(stream_instance_map={s.name: s for s in streams}, state=self._state) - return [self._to_concurrent(stream, self._start_date_to_timestamp(config), state_manager) for stream in streams] + return [ + self._to_concurrent( + stream, + datetime.fromtimestamp(self._start_date_to_timestamp(config), timezone.utc), + timedelta(days=config["slice_range"]), + state_manager, + ) + for stream in streams + ] - def _to_concurrent(self, stream: Stream, fallback_start, state_manager: ConnectorStateManager) -> Stream: + def _to_concurrent( + self, stream: Stream, fallback_start: datetime, slice_range: timedelta, state_manager: ConnectorStateManager + ) -> Stream: if stream.name in self._streams_configured_as_full_refresh: - return StreamFacade.create_from_stream(stream, self, entrypoint_logger, self._create_empty_state(), NoopCursor()) + return StreamFacade.create_from_stream( + stream, + self, + entrypoint_logger, + self._create_empty_state(), + FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository), + ) state = state_manager.get_stream_state(stream.name, stream.namespace) slice_boundary_fields = self._SLICE_BOUNDARY_FIELDS_BY_IMPLEMENTATION.get(type(stream)) @@ -541,6 +557,9 @@ def _to_concurrent(self, stream: Stream, fallback_start, state_manager: Connecto cursor_field, slice_boundary_fields, fallback_start, + converter.get_end_provider(), + timedelta(seconds=0), + slice_range, ) return StreamFacade.create_from_stream(stream, self, entrypoint_logger, state, cursor) diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees.py index b7028590f4464..84fb1b7115d28 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees.py @@ -19,7 +19,15 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import ( + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStreamState, + ConfiguredAirbyteCatalog, + FailureType, + StreamDescriptor, + SyncMode, +) from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -276,7 +284,9 @@ def test_given_no_state_when_read_then_use_application_fees_endpoint(self, http_ _application_fees_response().with_record(_an_application_fee().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -298,7 +308,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees_refunds.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees_refunds.py index bfde5e409d11a..0394d74ef5d3d 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees_refunds.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_application_fees_refunds.py @@ -21,7 +21,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -421,7 +421,9 @@ def test_given_no_state_when_read_then_use_application_fees_endpoint(self, http_ output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -443,7 +445,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_authorizations.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_authorizations.py index 90e61aad3166a..177e293f41b48 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_authorizations.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_authorizations.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -276,7 +276,9 @@ def test_given_no_state_when_read_then_use_authorizations_endpoint(self, http_mo _authorizations_response().with_record(_an_authorization().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -298,7 +300,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_bank_accounts.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_bank_accounts.py index db5c32d5d9a3a..596fb3655f98c 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_bank_accounts.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_bank_accounts.py @@ -21,7 +21,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -447,7 +447,9 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": int(_NOW.timestamp())}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(_NOW.timestamp())) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -469,7 +471,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_cards.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_cards.py index 413c1e15d2a95..383fb4808e5f8 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_cards.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_cards.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -276,7 +276,9 @@ def test_given_no_state_when_read_then_use_cards_endpoint(self, http_mocker: Htt _cards_response().with_record(_a_card().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -298,7 +300,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_early_fraud_warnings.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_early_fraud_warnings.py index 7f8a0800b97e8..bb339fd751f0f 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_early_fraud_warnings.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_early_fraud_warnings.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -244,7 +244,9 @@ def test_given_no_state_when_read_then_use_early_fraud_warnings_endpoint(self, h _early_fraud_warnings_response().with_record(_an_early_fraud_warning().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -266,7 +268,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_events.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_events.py index 14942b03c54c2..27077e8ec8e10 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_events.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_events.py @@ -18,7 +18,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -230,7 +230,9 @@ def test_given_no_initial_state_when_read_then_return_state_based_on_cursor_fiel _a_response().with_record(_a_record().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {"events": {"created": int(_NOW.timestamp())}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(created=int(_NOW.timestamp())) @HttpMocker() def test_given_state_when_read_then_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: @@ -270,7 +272,9 @@ def test_given_state_more_recent_than_cursor_when_read_then_return_state_based_o StateBuilder().with_stream_state("events", {"created": more_recent_than_record_cursor}).build() ) - assert output.most_recent_state == {"events": {"created": more_recent_than_record_cursor}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(created=more_recent_than_record_cursor) def _read(self, config: ConfigBuilder, state: Optional[Dict[str, Any]], expecting_exception: bool = False) -> EntrypointOutput: return _read(config, SyncMode.incremental, state, expecting_exception) diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_bank_accounts.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_bank_accounts.py index cbd08bce1a5cb..0aed970b586a0 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_bank_accounts.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_bank_accounts.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -243,7 +243,9 @@ def test_given_no_state_when_read_then_use_external_accounts_endpoint(self, http _external_bank_accounts_response().with_record(_an_external_bank_account()).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": int(_NOW.timestamp())}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(_NOW.timestamp())) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -265,7 +267,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_object_is_not_back_account_when_read_then_filter_out(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_cards.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_cards.py index 3635f7bd6d6d8..8cfed13b5f914 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_cards.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_external_account_cards.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -248,7 +248,9 @@ def test_given_no_state_when_read_then_use_external_accounts_endpoint(self, http _external_accounts_card_response().with_record(_an_external_account_card()).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": int(_NOW.timestamp())}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(_NOW.timestamp())) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -270,7 +272,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_object_is_not_back_account_when_read_then_filter_out(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payment_methods.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payment_methods.py index 10785b6c47770..a40b9e9153d42 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payment_methods.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payment_methods.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -249,7 +249,9 @@ def test_given_no_state_when_read_then_use_payment_methods_endpoint(self, http_m _payment_methods_response().with_record(_a_payment_method().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -271,7 +273,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_persons.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_persons.py index 7df5857b0000b..df420a1b97900 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_persons.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_persons.py @@ -19,7 +19,16 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStreamStatus, Level +from airbyte_protocol.models import ( + AirbyteStateBlob, + AirbyteStreamState, + AirbyteStreamStatus, + ConfiguredAirbyteCatalog, + FailureType, + Level, + StreamDescriptor, + SyncMode, +) from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -316,7 +325,9 @@ def test_incremental_with_recent_state(self, http_mocker: HttpMocker): ) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses(_STREAM_NAME)) - assert actual_messages.most_recent_state == {"persons": {"updated": int(state_datetime.timestamp())}} + most_recent_state = actual_messages.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(state_datetime.timestamp())) assert len(actual_messages.records) == 1 @HttpMocker() @@ -355,7 +366,9 @@ def test_incremental_with_deleted_event(self, http_mocker: HttpMocker): ) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses(_STREAM_NAME)) - assert actual_messages.most_recent_state == {"persons": {"updated": int(state_datetime.timestamp())}} + most_recent_state = actual_messages.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(state_datetime.timestamp())) assert len(actual_messages.records) == 1 assert actual_messages.records[0].record.data.get("is_deleted") @@ -391,7 +404,9 @@ def test_incremental_with_newer_start_date(self, http_mocker): ) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses(_STREAM_NAME)) - assert actual_messages.most_recent_state == {"persons": {"updated": int(state_datetime.timestamp())}} + most_recent_state = actual_messages.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(state_datetime.timestamp())) assert len(actual_messages.records) == 1 @HttpMocker() @@ -490,7 +505,9 @@ def test_rate_limited_incremental_events(self, http_mocker: HttpMocker) -> None: ) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses(_STREAM_NAME)) - assert actual_messages.most_recent_state == {"persons": {"updated": int(state_datetime.timestamp())}} + most_recent_state = actual_messages.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name="persons") + assert most_recent_state.stream_state == AirbyteStateBlob(updated=int(state_datetime.timestamp())) assert len(actual_messages.records) == 1 @HttpMocker() @@ -558,7 +575,7 @@ def test_incremental_rate_limit_max_attempts_exceeded(self, http_mocker: HttpMoc state=state, ) - assert len(actual_messages.errors) == 1 + assert len(actual_messages.errors) == 2 @HttpMocker() def test_server_error_parent_stream_accounts(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_reviews.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_reviews.py index d454faec79e8a..c5de0bec903c3 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_reviews.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_reviews.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -276,7 +276,9 @@ def test_given_no_state_when_read_then_use_reviews_endpoint(self, http_mocker: H _reviews_response().with_record(_a_review().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -298,7 +300,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_transactions.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_transactions.py index f0a04e093760e..bf46587e21e36 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_transactions.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_transactions.py @@ -19,7 +19,7 @@ find_template, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from airbyte_protocol.models import AirbyteStateBlob, AirbyteStreamState, ConfiguredAirbyteCatalog, FailureType, StreamDescriptor, SyncMode from integration.config import ConfigBuilder from integration.pagination import StripePaginationStrategy from integration.request_builder import StripeRequestBuilder @@ -276,7 +276,9 @@ def test_given_no_state_when_read_then_use_transactions_endpoint(self, http_mock _transactions_response().with_record(_a_transaction().with_cursor(cursor_value)).build(), ) output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None: @@ -298,7 +300,9 @@ def test_given_state_when_read_then_query_events_using_types_and_state_value_plu StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(), ) - assert output.most_recent_state == {_STREAM_NAME: {"updated": cursor_value}} + most_recent_state = output.most_recent_state + assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME) + assert most_recent_state.stream_state == AirbyteStateBlob(updated=cursor_value) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None: diff --git a/airbyte-integrations/connectors/source-surveymonkey/.coveragerc b/airbyte-integrations/connectors/source-surveymonkey/.coveragerc new file mode 100644 index 0000000000000..ade4238d8efc4 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_surveymonkey/run.py diff --git a/airbyte-integrations/connectors/source-surveymonkey/README.md b/airbyte-integrations/connectors/source-surveymonkey/README.md index 37e81d149ab05..f7b91bd3d6d3f 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/README.md +++ b/airbyte-integrations/connectors/source-surveymonkey/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-surveymonkey spec poetry run source-surveymonkey check --config secrets/config.json poetry run source-surveymonkey discover --config secrets/config.json -poetry run source-surveymonkey read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-surveymonkey read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-surveymonkey/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-surveymonkey/integration_tests/abnormal_state.json index 30c1a1bd3ea9c..b8482a6b2f98e 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-surveymonkey/integration_tests/abnormal_state.json @@ -9,22 +9,243 @@ { "type": "STREAM", "stream": { - "stream_state": { - "306079584": { "date_modified": "2023-01-19T10:17:18+00:00" }, - "307785429": { "date_modified": "2023-01-19T10:59:43+00:00" }, - "307785444": { "date_modified": "2023-01-19T10:00:19+00:00" }, - "307785394": { "date_modified": "2023-01-19T10:00:59+00:00" }, - "307785402": { "date_modified": "2023-01-19T10:01:31+00:00" }, - "307785408": { "date_modified": "2023-01-19T10:02:08+00:00" }, - "307785448": { "date_modified": "2023-01-19T10:02:49+00:00" }, - "307784834": { "date_modified": "2023-01-19T10:03:45+00:00" }, - "307784863": { "date_modified": "2023-01-19T10:04:29+00:00" }, - "307784846": { "date_modified": "2023-01-19T10:05:05+00:00" }, - "307784856": { "date_modified": "2023-01-19T10:05:44+00:00" }, - "307785388": { "date_modified": "2023-01-19T10:06:20+00:00" }, - "307785415": { "date_modified": "2023-01-19T10:06:43+00:00" } + "stream_descriptor": { + "name": "survey_responses" }, - "stream_descriptor": { "name": "survey_responses" } + "stream_state": { + "states": [ + { + "partition": { + "survey_id": "306079584" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:31" + } + }, + { + "partition": { + "survey_id": "307785429" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:32" + } + }, + { + "partition": { + "survey_id": "307785444" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:33" + } + }, + { + "partition": { + "survey_id": "307785394" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:34" + } + }, + { + "partition": { + "survey_id": "307785402" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:36" + } + }, + { + "partition": { + "survey_id": "307785408" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:37" + } + }, + { + "partition": { + "survey_id": "307784834" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:38" + } + }, + { + "partition": { + "survey_id": "307785448" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:40" + } + }, + { + "partition": { + "survey_id": "307784863" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:41" + } + }, + { + "partition": { + "survey_id": "307784846" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:42" + } + }, + { + "partition": { + "survey_id": "307784856" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:43" + } + }, + { + "partition": { + "survey_id": "307785388" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:45" + } + }, + { + "partition": { + "survey_id": "307785415" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:46" + } + }, + { + "partition": { + "survey_id": "510388524" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:48" + } + } + ] + } + }, + "data": { + "survey_responses": { + "states": [ + { + "partition": { + "survey_id": "306079584" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:31" + } + }, + { + "partition": { + "survey_id": "307785429" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:32" + } + }, + { + "partition": { + "survey_id": "307785444" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:33" + } + }, + { + "partition": { + "survey_id": "307785394" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:34" + } + }, + { + "partition": { + "survey_id": "307785402" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:36" + } + }, + { + "partition": { + "survey_id": "307785408" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:37" + } + }, + { + "partition": { + "survey_id": "307784834" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:38" + } + }, + { + "partition": { + "survey_id": "307785448" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:40" + } + }, + { + "partition": { + "survey_id": "307784863" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:41" + } + }, + { + "partition": { + "survey_id": "307784846" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:42" + } + }, + { + "partition": { + "survey_id": "307784856" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:43" + } + }, + { + "partition": { + "survey_id": "307785388" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:45" + } + }, + { + "partition": { + "survey_id": "307785415" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:46" + } + }, + { + "partition": { + "survey_id": "510388524" + }, + "cursor": { + "date_modified": "2025-02-27T12:39:48" + } + } + ] + } } } ] diff --git a/airbyte-integrations/connectors/source-surveymonkey/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-surveymonkey/integration_tests/expected_records.jsonl index 4a7c296bd3410..129c6b34bd5f3 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-surveymonkey/integration_tests/expected_records.jsonl @@ -1,453 +1,15 @@ -{"stream": "surveys", "data": {"title": "Market Research - Product Testing Template", "nickname": "", "language": "en", "folder_id": "0", "category": "market_research", "question_count": 13, "page_count": 1, "response_count": 13, "date_created": "2021-05-07T06:18:00", "date_modified": "2021-06-08T18:09:00", "id": "306079584", "buttons_text": {"next_button": "Next >>", "prev_button": "<< Prev", "done_button": "Done", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/306079584", "analyze_url": "https://www.surveymonkey.com/analyze/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "summary_url": "https://www.surveymonkey.com/summary/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=8T1PwDGoJHE1lbkxjUnaGitKu8jxWzyoclw9fNsShflPlk6MYIzwJ2NgjlBw_2B7iV"}, "emitted_at": 1681752753912} -{"stream": "surveys", "data": {"title": "yswa8kobijei1mkwaqxgy", "nickname": "7b4p9vssf810mslcd0eqpcg9s7p0h", "language": "it", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T10:59:00", "id": "307785429", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785429", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=YRpP3kxXMi2aJkgYoeyZrvuErii13mQ5DRN67Vm4WJ5avIMZ6YvzI_2Bc3FpERJDqx"}, "emitted_at": 1681752754883} -{"stream": "surveys", "data": {"title": "wsfqk1di34d", "nickname": "3pax9qjasev22ofir5dm4x45s", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T11:00:00", "id": "307785444", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785444", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=8fhTDITxRdeFQ_2BLWzVWxsJSU0nfgGgIbcvuAJ6C8LCdfivLn4FPj6xZP2o8_2FINMc"}, "emitted_at": 1681752755938} -{"stream": "surveys", "data": {"title": "vpoha5euc66vp", "nickname": "etv0tds1e45", "language": "en", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:19:00", "date_modified": "2021-06-10T11:01:00", "id": "307785394", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785394", "analyze_url": "https://www.surveymonkey.com/analyze/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D", "summary_url": "https://www.surveymonkey.com/summary/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=RPtFlMc_2B10dLjP_2BMbJ9eseMZkg_2FE5of5WUErPhwmC57Ij1xz0JOW7uC8i49BGEl8"}, "emitted_at": 1681752757038} -{"stream": "surveys", "data": {"title": "s2d9px7cdril0v7789ab4f", "nickname": "wnhin1ctnss8ebdgjef", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T11:02:00", "id": "307785402", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785402", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=RId_2BH4A8CUUX6zNvnFnfsb3wKFGuv8kLhz_2BApiG6Mbvu_2BLypJpz_2BM9EfoUuRXBcL"}, "emitted_at": 1681752757741} -{"stream": "surveys", "data": {"title": "muxx41av9mp", "nickname": "hfs5uo9cw1ce3j7rn7n8ncu88myc", "language": "de", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T11:02:00", "id": "307785408", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785408", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=aPPwgAP8stOxnud8WXi8VHKMrUtKIqvd7JhVnp1f0Ucqjb7cbMATzGizMgcLO_2BzA"}, "emitted_at": 1681752758782} -{"stream": "surveys", "data": {"title": "2iokp4jvp9ru5", "nickname": "j2a0kxhq8lmawfqjkg0hx", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 21, "date_created": "2021-06-09T21:07:00", "date_modified": "2021-06-10T11:03:00", "id": "307784834", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307784834", "analyze_url": "https://www.surveymonkey.com/analyze/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D", "summary_url": "https://www.surveymonkey.com/summary/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=5ovW9LrIPsdAlqAloFxtr_2BhNVquSJyqeGOsrEnkZn56chkdLSKQISgfvIUejUonU"}, "emitted_at": 1681752759856} -{"stream": "surveys", "data": {"title": "9cnwcmdn39ox", "nickname": "vih7eeixclb", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T11:03:00", "id": "307785448", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785448", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=CJF1TcWP7MthjVWkHhiX8ggWVFe484BBhGYkoi2XqDCTB9FcR1nlBSJ_2FeL47hNDV"}, "emitted_at": 1681752760573} -{"stream": "surveys", "data": {"title": "i2bm4lqt5hxv614n4jcl0guxt5ehgf", "nickname": "ti241ke4qo1i8iyqgpo0u6b2", "language": "de", "folder_id": "0", "category": "", "question_count": 2, "page_count": 3, "response_count": 20, "date_created": "2021-06-09T21:07:00", "date_modified": "2021-06-10T11:04:00", "id": "307784863", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307784863", "analyze_url": "https://www.surveymonkey.com/analyze/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D", "summary_url": "https://www.surveymonkey.com/summary/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=kSWrVa29zWxvAB20ibAMkgPUDRBw_2B_2BTV0eX3oRPIEDNdxc2vxtOGJkQUGITIEart"}, "emitted_at": 1681752761722} -{"stream": "surveys", "data": {"title": "j057iyqgxlotswo070", "nickname": "wxuyqq4cgmfo69ik778r", "language": "ru", "folder_id": "0", "category": "", "question_count": 6, "page_count": 3, "response_count": 20, "date_created": "2021-06-09T21:07:00", "date_modified": "2021-06-10T11:05:00", "id": "307784846", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307784846", "analyze_url": "https://www.surveymonkey.com/analyze/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D", "summary_url": "https://www.surveymonkey.com/summary/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=c5YOr2YH8sSXNlr7K0Tsuhs54aXml2seWFuXS8MIqk7n5MinBfQ7OjzW_2BtjWV1Cv"}, "emitted_at": 1681752762326} -{"stream": "surveys", "data": {"title": "u7r02s47jr", "nickname": "ye7fubxhua91ce0fxm", "language": "ru", "folder_id": "0", "category": "", "question_count": 3, "page_count": 3, "response_count": 20, "date_created": "2021-06-09T21:07:00", "date_modified": "2021-06-10T11:05:00", "id": "307784856", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307784856", "analyze_url": "https://www.surveymonkey.com/analyze/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D", "summary_url": "https://www.surveymonkey.com/summary/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=hW7YBNPL4euOVIMWVdvchE0xWtfXGoQrT7wUyFAtDel65HbgmAwoV7JJRkkkFmhn"}, "emitted_at": 1681752763451} -{"stream": "surveys", "data": {"title": "igpfp2yfsw90df6nxbsb49v", "nickname": "h23gl22ulmfsyt4q7xt", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 20, "date_created": "2021-06-09T21:19:00", "date_modified": "2021-06-10T11:06:00", "id": "307785388", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785388", "analyze_url": "https://www.surveymonkey.com/analyze/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D", "summary_url": "https://www.surveymonkey.com/summary/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=khUJQv9z4_2FXXzGUox57WEUPwppIr8YqRqVru77WpakX1HW8hHMmGXZiDGslFZym6"}, "emitted_at": 1681752764067} -{"stream": "surveys", "data": {"title": "b9jo5h23l7pa", "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", "language": "ru", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 20, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T11:07:00", "id": "307785415", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785415", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=YVdtL_2BP5oiGTrfksyofvENkBr7v87Xfh8hbcJr8rbqgesWvwJjz5N1F7pCSRcDoy"}, "emitted_at": 1681752765140} -{"stream": "surveys", "data": {"title": "jjj", "nickname": "", "language": "en", "folder_id": "0", "category": "", "question_count": 0, "page_count": 1, "response_count": 0, "date_created": "2023-01-17T09:17:00", "date_modified": "2023-01-17T09:17:00", "id": "510388524", "buttons_text": {"next_button": "Next", "prev_button": "Prev", "done_button": "Done", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "10292568", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/510388524", "analyze_url": "https://www.surveymonkey.com/analyze/VXMmVNBbmOp9KTSvXdhjIr3FHnqleAX32lr9MLBIOE0_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=VXMmVNBbmOp9KTSvXdhjIr3FHnqleAX32lr9MLBIOE0_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=VXMmVNBbmOp9KTSvXdhjIr3FHnqleAX32lr9MLBIOE0_3D", "summary_url": "https://www.surveymonkey.com/summary/VXMmVNBbmOp9KTSvXdhjIr3FHnqleAX32lr9MLBIOE0_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=TPlNncbuCs17cvxwjuS74VC03_2FOcqpP_2F03m2gerTSI_2FQvLWoY2yn_2FWxLDmxYOp5L"}, "emitted_at": 1681752766116} -{"stream": "survey_responses", "data": {"id": "12706126725", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "124.123.178.184", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hNu3QJYf07WiUPOwxCYcARURFGB3ruOrs9slcTHVhmhgDhoNJ0k7w3jCvo0nLM40", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706126725", "total_time": 62, "date_modified": "2021-06-01T17:40:54+00:00", "date_created": "2021-06-01T17:39:51+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706126725", "pages": [{"id": "165250506", "questions": [{"id": "652286715", "answers": [{"choice_id": "4285525064"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525084"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525070"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525079"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525089"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525074"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525095"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525058", "row_id": "4285525061", "choice_metadata": {"weight": "0"}}]}]}]}, "emitted_at": 1690184765464} -{"stream": "survey_responses", "data": {"id": "12706152767", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "37.229.17.15", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YIZz5DiXEDES47ARxTbRPzAA9ZOwCjcN_2FDSFTYGWgCVPQCo_2B3EeLirGlON5_2BjrX5", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706152767", "total_time": 55, "date_modified": "2021-06-01T17:50:03+00:00", "date_created": "2021-06-01T17:49:08+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706152767", "pages": [{"id": "165250506", "questions": [{"id": "652286726", "answers": [{"tag_data": [], "text": "fuck this"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525067"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525087"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525072"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525091"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525077"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525097"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525052", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "waste of time"}]}]}]}, "emitted_at": 1690184765465} -{"stream": "survey_responses", "data": {"id": "12706159691", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "157.48.231.67", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=P4z1eeLex6p2OQEYYXRJKBxPyHk6ljkOskXPds2olEToYrU_2FwTZWAyllEtgJRyQL", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706159691", "total_time": 104, "date_modified": "2021-06-01T17:52:27+00:00", "date_created": "2021-06-01T17:50:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706159691", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "78.63", "y": "13.19"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "41.94", "y": "50.16"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525086"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525071"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525079"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525076"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525095"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525055", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}]}]}, "emitted_at": 1690184765465} -{"stream": "survey_responses", "data": {"id": "12706182356", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "76.14.176.236", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=oRBufZrmuNVZW5ou_2B4ZICuFqW6p3uYPmpjTb5IJ5Zf_2BH4FPoHKfnz_2BSC_2FR_2FpxWNq", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706182356", "total_time": 36, "date_modified": "2021-06-01T18:00:12+00:00", "date_created": "2021-06-01T17:59:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706182356", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "58.66", "y": "54.49"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "56.64", "y": "71.09"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525063"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525072"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525082"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525092"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525077"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525097"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525060", "row_id": "4285525061", "choice_metadata": {"weight": "100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "gekkiadsuigasdf;oij sefhello \ud83c\udf50\ud83c\udf50\ud83c\udf50\ud83c\udf50\ud83c\udf50"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "good"}]}]}]}, "emitted_at": 1690184765465} -{"stream": "survey_responses", "data": {"id": "12706201784", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "49.37.158.6", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MD0lXMX2bJm93XLiSKWuW2p52_2BwlWpayf88naadbuO5wITz0TijA3kwSis907xu1", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706201784", "total_time": 183, "date_modified": "2021-06-01T18:06:11+00:00", "date_created": "2021-06-01T18:03:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706201784", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "87.90", "y": "67.29"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "7.66", "y": "92.21"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "Colour"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525064"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525070"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525079"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525076"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525097"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525055", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "Nothing"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "I don't know"}]}]}]}, "emitted_at": 1690184765466} -{"stream": "survey_responses", "data": {"id": "12706203862", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "49.205.239.133", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=sNkTdEY_2FEibixIxcUhotq6hQ3muFVlLkg0cE531VDB5Ya2U21pwazZRwwSXFqqtK", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706203862", "total_time": 114, "date_modified": "2021-06-01T18:06:49+00:00", "date_created": "2021-06-01T18:04:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706203862", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "40.32", "y": "93.35"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "37.10", "y": "66.67"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "because pets should not be tied,they should have their own freedom to move"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525071"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525080"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525076"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525096"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525055", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}]}]}, "emitted_at": 1690184765466} -{"stream": "survey_responses", "data": {"id": "12706264166", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "27.6.69.132", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=OboVhBA1nCZ4ejfYCmJ6WDu5SxhIUnnNn2dCz_2BTqxksFnjOSpy88MtS4B5Wbpk_2BW", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706264166", "total_time": 309, "date_modified": "2021-06-01T18:27:03+00:00", "date_created": "2021-06-01T18:21:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706264166", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "43.59", "y": "28.51"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "41.31", "y": "91.08"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "\ud83e\udd14"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525070"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525091"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525076"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525096"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525058", "row_id": "4285525061", "choice_metadata": {"weight": "0"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "\ud83e\uddb4"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "\ud83d\udcaa"}]}]}]}, "emitted_at": 1690184765466} -{"stream": "survey_responses", "data": {"id": "12706274940", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "49.205.116.166", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=D1IcNXq6BKOBmkySTfHcUfHuH3_2Fa0aniMPQEG23UrQ16iSsyx8ye2hPRQt3C61Jd", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706274940", "total_time": 105, "date_modified": "2021-06-01T18:30:56+00:00", "date_created": "2021-06-01T18:29:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706274940", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "7.12", "y": "89.86"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "87.90", "y": "9.02"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "Didn't like the product"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525067"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525087"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525072"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525092"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525077"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525097"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525050", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "Nothing"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "Better don't try to work on it"}]}]}]}, "emitted_at": 1690184765467} -{"stream": "survey_responses", "data": {"id": "12706353147", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "176.37.67.33", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=qwY5AKZSqBd7DfoZDGr4x_2FJr28RhtoeaQ7_2F6VBS1G3yK_2FPH86sPCcFs1zACVlbMO", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706353147", "total_time": 162, "date_modified": "2021-06-01T18:58:44+00:00", "date_created": "2021-06-01T18:56:02+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706353147", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "78.00", "y": "6.84"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "77.62", "y": "8.50"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "I like logo."}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525071"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525076"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525096"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525056", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "Logo."}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "Nothing."}]}]}]}, "emitted_at": 1690184765467} -{"stream": "survey_responses", "data": {"id": "12707255568", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "157.48.145.117", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3XYG_2F55lVJgeQ0_2FWG3xyRxOF5gCKFR0p1HPkdv1iiMZ1h5MIYxNR12enFBgK9TCS", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12707255568", "total_time": 263, "date_modified": "2021-06-02T01:13:33+00:00", "date_created": "2021-06-02T01:09:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12707255568", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "38.17", "y": "40.98"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "42.90", "y": "93.75"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "I love puppies but hate heart symbol"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525063"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525084"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525070"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525080"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525089"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525075"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525095"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525056", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "It's for animals"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "Approach to customer"}]}]}]}, "emitted_at": 1690184765467} -{"stream": "survey_responses", "data": {"id": "12707566461", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "106.195.73.137", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=FskCSEofrdcabC7MVfRNtzeiZ4C4kiwx_2FpBdpRfIsd5SgVGi4N9znXMS9exRXf27", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12707566461", "total_time": 233, "date_modified": "2021-06-02T04:06:48+00:00", "date_created": "2021-06-02T04:02:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12707566461", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "32.67", "y": "32.51"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "89.00", "y": "55.34"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525084"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525069"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525080"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525075"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525096"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525058", "row_id": "4285525061", "choice_metadata": {"weight": "0"}}]}]}]}, "emitted_at": 1690184765468} -{"stream": "survey_responses", "data": {"id": "12709748835", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.196.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=x5BdjfYWF_2B2dRs9qPRcHJ_2BqMN8Dpfn_2FfhKhtD7G8W4_2BX9ECDWh9wAXjYd4mxXk_2F_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12709748835", "total_time": 127, "date_modified": "2021-06-02T18:14:19+00:00", "date_created": "2021-06-02T18:12:12+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12709748835", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "41.85", "y": "55.96"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "31.35", "y": "35.64"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "because"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525063"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525084"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525069"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525079"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525089"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525074"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525095"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525060", "row_id": "4285525061", "choice_metadata": {"weight": "100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "u"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "f"}]}]}]}, "emitted_at": 1690184765468} -{"stream": "survey_responses", "data": {"id": "12706107193", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "49.37.150.53", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=7vMzRy38ln3J_2FJiU8YD1uve9yI6cAQIQ_2FuVxRipS_2FyB57w9vo9xpgShOuFWVcoI2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706107193", "total_time": 607550, "date_modified": "2021-06-08T18:17:17+00:00", "date_created": "2021-06-01T17:31:26+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706107193", "pages": [{"id": "165250506", "questions": [{"id": "652286724", "answers": [{"row_id": "4285525098", "x": "77.56", "y": "6.01"}]}, {"id": "652286725", "answers": [{"row_id": "4285525102", "x": "15.71", "y": "8.23"}]}, {"id": "652286726", "answers": [{"tag_data": [], "text": "It seems in that way"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525065"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525085"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525071"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525090"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525075"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525096"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525050", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "Nothing much"}]}, {"id": "652286720", "answers": [{"tag_data": [], "text": "Will include Tagline"}]}]}]}, "emitted_at": 1690184765469} -{"stream": "survey_responses", "data": {"id": "12731040927", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YORvkBiLvNm2647vGYxs1GGGoUjDrz_2FRfIWw1i07UtykH_2BBJHDTB3ujkOPfyAxqP", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731040927", "total_time": 31, "date_modified": "2021-06-10T08:46:53+00:00", "date_created": "2021-06-10T08:46:22+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731040927", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175368"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175533"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175742"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175749"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175881"}]}]}]}, "emitted_at": 1690184766569} -{"stream": "survey_responses", "data": {"id": "12731055204", "recipient_id": "", "collection_mode": "default", "response_status": "partial", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": ["168831413", "168831415", "168831437"], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=tcbSSptg67E5MmPiY_2BCTC0GEk5rcm_2FHHcASKwxBGLOX_2BBByesO_2Fh848B_2FqaaVF8d", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731055204", "total_time": 15, "date_modified": "2021-06-10T08:54:22+00:00", "date_created": "2021-06-10T08:54:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731055204", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": []}]}, "emitted_at": 1690184766571} -{"stream": "survey_responses", "data": {"id": "12731069666", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=tHWbA6E0Q6UEylVBreS0XaGKh3GcjDQu_2FBytp_2F_2FcCSkYTgRKkVt3jyyhUFoh6T_2Bs", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731069666", "total_time": 33, "date_modified": "2021-06-10T09:02:19+00:00", "date_created": "2021-06-10T09:01:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731069666", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175380"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175733"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175736"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175752"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175878"}]}]}]}, "emitted_at": 1690184766572} -{"stream": "survey_responses", "data": {"id": "12731085951", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=c_2B3Xk1rn3Lhz9GqaPNSRmjd03YiGx88BQu_2BtAvHiMmp5a0BR68kWLCOfALzBwKH4", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731085951", "total_time": 31, "date_modified": "2021-06-10T09:10:05+00:00", "date_created": "2021-06-10T09:09:34+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731085951", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175368"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175533"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175564"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175732"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175745"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766574} -{"stream": "survey_responses", "data": {"id": "12731102076", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=s_2BQLUH6Nb049vqmxYdKTQlIV_2FCXzxmRR5F_2B_2Fe9FaqXRh3H_2FZAFF51mqyI3e8s666", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731102076", "total_time": 32, "date_modified": "2021-06-10T09:17:44+00:00", "date_created": "2021-06-10T09:17:12+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731102076", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175381"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175535"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175561"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175739"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175745"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766576} -{"stream": "survey_responses", "data": {"id": "12731118899", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hRALwRgDjAOTNk8Hpt0wWWn9X3xurqWW9vynXjPkvIvI4Xofu_2FJSgEVwtWK23vLd", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731118899", "total_time": 31, "date_modified": "2021-06-10T09:25:25+00:00", "date_created": "2021-06-10T09:24:53+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731118899", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175533"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175734"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175742"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175745"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175878"}]}]}]}, "emitted_at": 1690184766577} -{"stream": "survey_responses", "data": {"id": "12731135865", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=OEPuy4AY_2B47LLaYrOMe90r_2BJdHiKxv12FpoQr0N94GGA0TFN7l7tk5QH14HD_2FaBy", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731135865", "total_time": 31, "date_modified": "2021-06-10T09:33:08+00:00", "date_created": "2021-06-10T09:32:37+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731135865", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175368"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175381"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175449"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175535"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175742"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175748"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766578} -{"stream": "survey_responses", "data": {"id": "12731153599", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=KPgqZwkF7GLrzwtQWhwpZBCuPbq_2F_2BuXfuUXpCBXX1y_2BwKBvvXi7s1ob9AMVJymCk", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731153599", "total_time": 31, "date_modified": "2021-06-10T09:40:49+00:00", "date_created": "2021-06-10T09:40:18+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731153599", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175561"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175732"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175751"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766580} -{"stream": "survey_responses", "data": {"id": "12731170943", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=e6QtiM7XZfbHB3Ln1ifDr6Ct8PD0j6Nikh_2BTvBikLVfpeCSzS5WYkg2D_2BDVygAee", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731170943", "total_time": 31, "date_modified": "2021-06-10T09:48:36+00:00", "date_created": "2021-06-10T09:48:04+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731170943", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175733"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175736"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175748"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175879"}]}]}]}, "emitted_at": 1690184766581} -{"stream": "survey_responses", "data": {"id": "12731188992", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VATG0VEAVLLh8CTsnDoMZ_2FR3ida2C_2FIqVwt3FHGuc0GRu_2BA6Qa9E4Ewd3iEHt5YQ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731188992", "total_time": 35, "date_modified": "2021-06-10T09:56:51+00:00", "date_created": "2021-06-10T09:56:15+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731188992", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175381"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175533"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175738"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175749"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175881"}]}]}]}, "emitted_at": 1690184766583} -{"stream": "survey_responses", "data": {"id": "12731208790", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=KH_2Bhsc8oIpTdaX60RIwFQ1SDyudOf2_2B61W7cPCZGGXEKsY5_2FN_2BA_2B_2FXh0DeOOdA7F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731208790", "total_time": 32, "date_modified": "2021-06-10T10:05:01+00:00", "date_created": "2021-06-10T10:04:28+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731208790", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175368"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175380"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175449"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175752"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766584} -{"stream": "survey_responses", "data": {"id": "12731228560", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vbsxUKDxxVbnJW7Gng6Y5VblTyjW_2F29grieRYQImUaIVF77GuhY3KDlqPsNfIlx6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731228560", "total_time": 31, "date_modified": "2021-06-10T10:12:51+00:00", "date_created": "2021-06-10T10:12:19+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731228560", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175380"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175449"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175561"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175737"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175749"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175882"}]}]}]}, "emitted_at": 1690184766585} -{"stream": "survey_responses", "data": {"id": "12731247619", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=8lyrgf9sVRbhXAHse5glKYEJoYMvqlR2TNQIhI8Ycw716W_2FHlQeX6Ru4SKObInXR", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731247619", "total_time": 31, "date_modified": "2021-06-10T10:20:41+00:00", "date_created": "2021-06-10T10:20:09+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731247619", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175535"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175734"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175736"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175747"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175881"}]}]}]}, "emitted_at": 1690184766587} -{"stream": "survey_responses", "data": {"id": "12731266056", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=tkqgGP2ApNTakGiA0JrfYbKgIZElTj6_2FPaJ_2FDj0QNjpJDKCexid0Z_2Bq8vZoam1vK", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731266056", "total_time": 31, "date_modified": "2021-06-10T10:28:20+00:00", "date_created": "2021-06-10T10:27:49+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731266056", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175381"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175449"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175732"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175738"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175747"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175882"}]}]}]}, "emitted_at": 1690184766588} -{"stream": "survey_responses", "data": {"id": "12731286200", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=nYsBr3IsP19O_2BhTeV_2BWGnAdXc_2FFsnqMT6maJ0BS3QD9FqjkWLrNYzzEqKsT7c191", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731286200", "total_time": 35, "date_modified": "2021-06-10T10:36:27+00:00", "date_created": "2021-06-10T10:35:52+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731286200", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175367"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175564"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175738"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175752"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175878"}]}]}]}, "emitted_at": 1690184766589} -{"stream": "survey_responses", "data": {"id": "12731305366", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=iIxE7kshwG8vR6T6c2D0swVsNTfDLqcBVkfrnf_2FGZBTuoHjMm9ksd3LbOIXuF6lp", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731305366", "total_time": 34, "date_modified": "2021-06-10T10:44:10+00:00", "date_created": "2021-06-10T10:43:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731305366", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175366"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175380"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175533"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175732"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175736"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175741"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175749"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175879"}]}]}]}, "emitted_at": 1690184766591} -{"stream": "survey_responses", "data": {"id": "12731325134", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=cUq2_2BSagT6pk152_2BsGjjMAAQ2Qq0R0cxleIxEdHEEVKwV5oPCvktmmnTV82pa5S_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731325134", "total_time": 31, "date_modified": "2021-06-10T10:52:02+00:00", "date_created": "2021-06-10T10:51:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731325134", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175369"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175382"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175447"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175535"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175559"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175735"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175739"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175742"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175751"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175878"}]}]}]}, "emitted_at": 1690184766592} -{"stream": "survey_responses", "data": {"id": "12731344038", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843657", "survey_id": "307785429", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=F_2BuWfRunQKdf1g0xuu4mTzXMCAlXia21_2BnF47p_2FbjU4QII1hLaE3Xo6tWdfbzKqr", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D?respondent_id=12731344038", "total_time": 31, "date_modified": "2021-06-10T10:59:42+00:00", "date_created": "2021-06-10T10:59:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785429/responses/12731344038", "pages": [{"id": "168831413", "questions": []}, {"id": "168831415", "questions": [{"id": "667461858", "answers": [{"choice_id": "4385175368"}]}, {"id": "667461861", "answers": [{"choice_id": "4385175381"}]}, {"id": "667461876", "answers": [{"choice_id": "4385175448"}]}, {"id": "667461897", "answers": [{"choice_id": "4385175534"}]}, {"id": "667461902", "answers": [{"choice_id": "4385175563"}]}]}, {"id": "168831437", "questions": [{"id": "667461933", "answers": [{"choice_id": "4385175733"}]}, {"id": "667461934", "answers": [{"choice_id": "4385175739"}]}, {"id": "667461936", "answers": [{"choice_id": "4385175740"}]}, {"id": "667461937", "answers": [{"choice_id": "4385175750"}]}, {"id": "667461986", "answers": [{"choice_id": "4385175880"}]}]}]}, "emitted_at": 1690184766593} -{"stream": "survey_responses", "data": {"id": "12731042086", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=SvCebsqkF1mO3kCZX7XsmQ4ABz0LFCf_2FyW5N2JOLuGh5ixjzbj2i04SarOUiUgPa", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731042086", "total_time": 31, "date_modified": "2021-06-10T08:47:30+00:00", "date_created": "2021-06-10T08:46:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731042086", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176724"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176739"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176764"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176919"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176960"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176970"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176977"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176987"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177005"}]}]}]}, "emitted_at": 1690184767383} -{"stream": "survey_responses", "data": {"id": "12731056238", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=r7v44AE_2Bx_2Fam5dgCBnlUkUkL0aSWyzhJkIPWmmYa5VnqMtd4X2DBzf9U9erpnCvI", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731056238", "total_time": 31, "date_modified": "2021-06-10T08:55:13+00:00", "date_created": "2021-06-10T08:54:41+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731056238", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176723"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176738"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176763"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176919"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176962"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176970"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176979"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176986"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177005"}]}]}]}, "emitted_at": 1690184767383} -{"stream": "survey_responses", "data": {"id": "12731070937", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=_2FuS4ZECIFCnV4LLPEe6gryzUPrf_2FMB8MB51Cv5cU6IQGsWb_2FRIb0BOaRXRodLTAq", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731070937", "total_time": 32, "date_modified": "2021-06-10T09:02:57+00:00", "date_created": "2021-06-10T09:02:25+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731070937", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176724"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176735"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176767"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176920"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176961"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176975"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176978"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176990"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177007"}]}]}]}, "emitted_at": 1690184767383} -{"stream": "survey_responses", "data": {"id": "12731087215", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=eSF0qBZZ3aR1SHBtkTZz83mgEBvJuBfou_2BJGBoFPbxv5bluXZuIMKDaGn5x5xba5", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731087215", "total_time": 31, "date_modified": "2021-06-10T09:10:42+00:00", "date_created": "2021-06-10T09:10:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731087215", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176721"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176727"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176736"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176763"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176918"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176962"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176972"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176983"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176988"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177007"}]}]}]}, "emitted_at": 1690184767384} -{"stream": "survey_responses", "data": {"id": "12731103402", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=QOikOvuCCRFkDsJoRozKO9_2BNPZmp7mliotl5xt4QnStPgBKgrvz6GZ7vdHXf3eMG", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731103402", "total_time": 31, "date_modified": "2021-06-10T09:18:22+00:00", "date_created": "2021-06-10T09:17:50+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731103402", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176721"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176736"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176764"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176962"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176974"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176978"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176987"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177002"}]}]}]}, "emitted_at": 1690184767384} -{"stream": "survey_responses", "data": {"id": "12731120214", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=sV4Oq2KXvMuxWQpflbd5L_2FyoB6ImaDnm9BFwJGk9W_2BwE7YWlvW6MsxaqaeWWY_2Fzh", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731120214", "total_time": 31, "date_modified": "2021-06-10T09:26:01+00:00", "date_created": "2021-06-10T09:25:29+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731120214", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176723"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176734"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176765"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176920"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176960"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176968"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176976"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176987"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177009"}]}]}]}, "emitted_at": 1690184767384} -{"stream": "survey_responses", "data": {"id": "12731137499", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Hy3W8rkVoiYFKw_2BdJM6cChYRJWAXaGTQTol4ykCh_2FxYDhPmBpC753rbLshVSZjNE", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731137499", "total_time": 31, "date_modified": "2021-06-10T09:33:45+00:00", "date_created": "2021-06-10T09:33:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731137499", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176721"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176738"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176766"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176920"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176961"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176970"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176982"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176988"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177008"}]}]}]}, "emitted_at": 1690184767385} -{"stream": "survey_responses", "data": {"id": "12731154912", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=BLjiuDSDI39GvYDURd_2FmEfCd9jRrZ5nLwAgWG65zJj2dA1DskdMkMBHVZjzLHkk6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731154912", "total_time": 31, "date_modified": "2021-06-10T09:41:24+00:00", "date_created": "2021-06-10T09:40:52+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731154912", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176723"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176738"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176764"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176963"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176970"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176977"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176988"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177006"}]}]}]}, "emitted_at": 1690184767385} -{"stream": "survey_responses", "data": {"id": "12731172230", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=jiMm_2FJVQj8fPN6i2HlASqxeTd4rx_2FcPoCLMAznqiCWgY_2B98x39SfA1kKHTo8CgEC", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731172230", "total_time": 31, "date_modified": "2021-06-10T09:49:12+00:00", "date_created": "2021-06-10T09:48:40+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731172230", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176723"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176737"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176767"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176963"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176972"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176977"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176990"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177006"}]}]}]}, "emitted_at": 1690184767385} -{"stream": "survey_responses", "data": {"id": "12731190528", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VrEu2qgFZYPclcWCzyV5DO0WUUCjOIMFC77k1XLOofwOqYD5vHAej03viembeuF8", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731190528", "total_time": 35, "date_modified": "2021-06-10T09:57:32+00:00", "date_created": "2021-06-10T09:56:57+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731190528", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176725"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176739"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176763"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176920"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176959"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176972"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176978"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176987"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177004"}]}]}]}, "emitted_at": 1690184767386} -{"stream": "survey_responses", "data": {"id": "12731210366", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=xOrc7mbcYE0NFiqzEpenJPpJJ1OHiCt_2FUDSohMb9nmqttp0v1il2MNjb_2BRGE177T", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731210366", "total_time": 33, "date_modified": "2021-06-10T10:05:40+00:00", "date_created": "2021-06-10T10:05:06+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731210366", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176722"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176739"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176763"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176919"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176961"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176971"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176977"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176986"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177003"}]}]}]}, "emitted_at": 1690184767386} -{"stream": "survey_responses", "data": {"id": "12731230116", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=xmmEsbDmCA6Nvi6PKgkHAZR5hqRDTYPELB_2BIyMCUzF63brYH0R2ZrSJb9f_2BXtWRa", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731230116", "total_time": 32, "date_modified": "2021-06-10T10:13:28+00:00", "date_created": "2021-06-10T10:12:55+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731230116", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176724"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176727"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176735"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176765"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176920"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176959"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176973"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176977"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176988"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177007"}]}]}]}, "emitted_at": 1690184767386} -{"stream": "survey_responses", "data": {"id": "12731249077", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=1cQksQ2IA1zuU8mpR9JawSnSqr2zaPV1juVQ7ZEPrEqxr231kL_2F2eIW48UokyJBm", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731249077", "total_time": 31, "date_modified": "2021-06-10T10:21:17+00:00", "date_created": "2021-06-10T10:20:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731249077", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176724"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176735"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176764"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176963"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176968"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176982"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176989"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177008"}]}]}]}, "emitted_at": 1690184767387} -{"stream": "survey_responses", "data": {"id": "12731267503", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=zOrptpJoo7QbXn1DoS0HIo4GcoAK8cIZ83MYidVo_2FuK_2FDvnnrsXK2SDzhyjssMdI", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731267503", "total_time": 31, "date_modified": "2021-06-10T10:28:56+00:00", "date_created": "2021-06-10T10:28:25+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731267503", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176721"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176727"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176736"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176765"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176961"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176970"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176983"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176986"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177003"}]}]}]}, "emitted_at": 1690184767387} -{"stream": "survey_responses", "data": {"id": "12731287789", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=zXARfqNZgptvNWp9PboSVlElugTboBiAj_2FSG_2BkbD8e8OUJGsJ8FMRpMFap0qzcYY", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731287789", "total_time": 31, "date_modified": "2021-06-10T10:37:03+00:00", "date_created": "2021-06-10T10:36:32+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731287789", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176726"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176735"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176763"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176918"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176965"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176971"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176981"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176989"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177004"}]}]}]}, "emitted_at": 1690184767387} -{"stream": "survey_responses", "data": {"id": "12731307187", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=d07mxUJIp3BjkeFrp8gTQvz67vJrBUQWNX2tzrHPvkrZ0piZOPTQDKl_2BrNNjOvJO", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731307187", "total_time": 35, "date_modified": "2021-06-10T10:44:49+00:00", "date_created": "2021-06-10T10:44:14+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731307187", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176724"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176727"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176734"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176765"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176921"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176964"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176975"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176982"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176986"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177004"}]}]}]}, "emitted_at": 1690184767388} -{"stream": "survey_responses", "data": {"id": "12731326595", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=BCy4vPYrkiObJMS9B6GjZD2jwLTDr1luSZYtqGxH8zxGvmteSPixhqGNTTdp_2BRwb", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731326595", "total_time": 32, "date_modified": "2021-06-10T10:52:38+00:00", "date_created": "2021-06-10T10:52:05+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731326595", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176722"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176728"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176739"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176767"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176918"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176959"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176974"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176981"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176990"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177003"}]}]}]}, "emitted_at": 1690184767388} -{"stream": "survey_responses", "data": {"id": "12731345509", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843665", "survey_id": "307785444", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=KmG9rTOuf2GcFWGtNhaBIfC5S80jehYrziix2nmuPzJw9_2FzLpaENGb_2F8j4NkPfTJ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxrQYdYnKpzayiBZVWsmE3jE_3D?respondent_id=12731345509", "total_time": 31, "date_modified": "2021-06-10T11:00:18+00:00", "date_created": "2021-06-10T10:59:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785444/responses/12731345509", "pages": [{"id": "168831459", "questions": []}, {"id": "168831461", "questions": [{"id": "667462078", "answers": [{"choice_id": "4385176725"}]}, {"id": "667462079", "answers": [{"choice_id": "4385176729"}]}, {"id": "667462082", "answers": [{"choice_id": "4385176734"}]}, {"id": "667462084", "answers": [{"choice_id": "4385176766"}]}, {"id": "667462086", "answers": [{"choice_id": "4385176919"}]}]}, {"id": "168831467", "questions": [{"id": "667462094", "answers": [{"choice_id": "4385176964"}]}, {"id": "667462096", "answers": [{"choice_id": "4385176968"}]}, {"id": "667462099", "answers": [{"choice_id": "4385176983"}]}, {"id": "667462100", "answers": [{"choice_id": "4385176991"}]}, {"id": "667462102", "answers": [{"choice_id": "4385177005"}]}]}]}, "emitted_at": 1690184767388} -{"stream": "survey_responses", "data": {"id": "12731043150", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=LKTTnPMSZ1aoM4WL0f1Ja7_2FdIZyTQ0DexNwU_2FFgEeZeT6B9T2m2HJuQjxPHn2OpZ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731043150", "total_time": 36, "date_modified": "2021-06-10T08:48:12+00:00", "date_created": "2021-06-10T08:47:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731043150", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173238"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173269"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173412"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173457"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173482"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173525"}]}]}]}, "emitted_at": 1690184768429} -{"stream": "survey_responses", "data": {"id": "12731057303", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Tj5vPzxUmWLsaqGk6A3N4tXy8F1lrXDh3cgduiFC1tcjk3fKqd5Jc_2FYl9kqbrtXl", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731057303", "total_time": 35, "date_modified": "2021-06-10T08:55:53+00:00", "date_created": "2021-06-10T08:55:17+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731057303", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173237"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173414"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173457"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173478"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173481"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173494"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768430} -{"stream": "survey_responses", "data": {"id": "12731072147", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=bAFsDUmEPpCdobkl_2BIl_2F6rdICCatPVmCxnojDaOR9ZJwGzYj7h29WMvC195fRe31", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731072147", "total_time": 35, "date_modified": "2021-06-10T09:03:38+00:00", "date_created": "2021-06-10T09:03:02+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731072147", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173240"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173257"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173268"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173413"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173452"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173476"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173483"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768430} -{"stream": "survey_responses", "data": {"id": "12731088506", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MrquTcpA82ItgrSQWG_2BswTK4ClFm9rTuG5GWb85hiLDvaK2dr6F7vrYvOlwqkkC6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731088506", "total_time": 35, "date_modified": "2021-06-10T09:11:22+00:00", "date_created": "2021-06-10T09:10:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731088506", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173236"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173269"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173413"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173458"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173475"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173484"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173525"}]}]}]}, "emitted_at": 1690184768431} -{"stream": "survey_responses", "data": {"id": "12731104696", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=RO8uULIefm7fQ9VJXKzxtKBfRj4g0rYOE0LRuSA8OT2GVPEFDFQFmSOII7UlshDk", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731104696", "total_time": 36, "date_modified": "2021-06-10T09:19:03+00:00", "date_created": "2021-06-10T09:18:26+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731104696", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173240"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173414"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173457"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173474"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173481"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768431} -{"stream": "survey_responses", "data": {"id": "12731121617", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Z_2BP_2BwLxa1Cmlcetqz6B1oaPvJSUU_2F5d018eOau5jIs8q4IlOyw7MT9YtxrlZnZHx", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731121617", "total_time": 37, "date_modified": "2021-06-10T09:26:44+00:00", "date_created": "2021-06-10T09:26:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731121617", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173228"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173237"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173268"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173414"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173458"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173481"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768431} -{"stream": "survey_responses", "data": {"id": "12731139029", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=R0ZtH7VCYs_2FMuOE8hZBLqJdLMcu2RDqvrV04C34ivpIzGYhRzaVoAG0LDF83Ln_2B2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731139029", "total_time": 37, "date_modified": "2021-06-10T09:34:27+00:00", "date_created": "2021-06-10T09:33:50+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731139029", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173239"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173269"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173415"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173459"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173482"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768432} -{"stream": "survey_responses", "data": {"id": "12731156353", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=t_2BeAHBUtkTES9TALR_2F4aDfYgtWlbqJv08b_2B6EYbplYKefvn8GrYTkoGugKOevrCn", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731156353", "total_time": 35, "date_modified": "2021-06-10T09:42:06+00:00", "date_created": "2021-06-10T09:41:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731156353", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173237"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173416"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173455"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173474"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173481"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768432} -{"stream": "survey_responses", "data": {"id": "12731173574", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=mVe08d8mokL6AWRdTdpFz7jaz6dJhmAV4ZlfpEH6B8OQwQXlC2RxAxy2Z_2BtSrLXG", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731173574", "total_time": 36, "date_modified": "2021-06-10T09:49:53+00:00", "date_created": "2021-06-10T09:49:16+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731173574", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173241"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173414"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173458"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173475"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173486"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768432} -{"stream": "survey_responses", "data": {"id": "12731192021", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=XFNknH0sj9IIdSvzYR75kcMldl4TuTz7kma3E7SGri2Rq_2FvcCRLj4ug5HOCnqMQz", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731192021", "total_time": 39, "date_modified": "2021-06-10T09:58:16+00:00", "date_created": "2021-06-10T09:57:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731192021", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173239"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173416"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173456"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173475"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173481"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173494"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173523"}]}]}]}, "emitted_at": 1690184768433} -{"stream": "survey_responses", "data": {"id": "12731211903", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=V3KUOw0NUFH27PiVeUNuTKv_2BkMDveRdHYnln_2FOZcYY0I7L0VIMlksx3CdNizHX_2BP", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731211903", "total_time": 36, "date_modified": "2021-06-10T10:06:21+00:00", "date_created": "2021-06-10T10:05:44+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731211903", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173239"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173416"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173456"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173484"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173494"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768433} -{"stream": "survey_responses", "data": {"id": "12731231636", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=xv8ZLGu_2FUg9VAJ8hvx10oji6MvB1PbiAyA98qWpExvZvSaPKJ7cIFH8nZktkDKH0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731231636", "total_time": 36, "date_modified": "2021-06-10T10:14:08+00:00", "date_created": "2021-06-10T10:13:32+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731231636", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173226"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173237"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173413"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173454"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173475"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173486"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173527"}]}]}]}, "emitted_at": 1690184768433} -{"stream": "survey_responses", "data": {"id": "12731250495", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=juR_2BEnc_2BsJLlCc6Sg7p2Ql_2B4icSx_2BsZwo_2FU1RVgYxqI2OzngMKnAUlrdJUS39ewn", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731250495", "total_time": 36, "date_modified": "2021-06-10T10:21:58+00:00", "date_created": "2021-06-10T10:21:21+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731250495", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173237"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173257"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173268"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173415"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173459"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173474"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173486"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768434} -{"stream": "survey_responses", "data": {"id": "12731268932", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vXLZxpfaNUwy1JRrQLJCNL0kcVa5U5M5FtCofSN4MCcTJt9om9nRi2D4C4xJeXUg", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731268932", "total_time": 35, "date_modified": "2021-06-10T10:29:36+00:00", "date_created": "2021-06-10T10:29:00+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731268932", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173228"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173242"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173257"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173412"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173454"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173475"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173483"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173524"}]}]}]}, "emitted_at": 1690184768434} -{"stream": "survey_responses", "data": {"id": "12731289280", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=M4Lzmv89J2ZSnoBKgwxsZqDP3em9icPdsBglzN7NQB_2BM5AQ1bWpjH53fB0IsVZi6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731289280", "total_time": 36, "date_modified": "2021-06-10T10:37:44+00:00", "date_created": "2021-06-10T10:37:08+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731289280", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173242"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173258"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173416"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173454"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173485"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768434} -{"stream": "survey_responses", "data": {"id": "12731308800", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=GQAEVhmAlwb5r_2FWfXNC_2Fqzqv5ULovU_2BGovk8oDp5nCvL2982DEXHbEhjSg4A3FdA", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731308800", "total_time": 39, "date_modified": "2021-06-10T10:45:33+00:00", "date_created": "2021-06-10T10:44:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731308800", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173239"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173412"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173459"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173473"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173482"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173494"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173526"}]}]}]}, "emitted_at": 1690184768435} -{"stream": "survey_responses", "data": {"id": "12731328082", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MwpXysDo7Lhe8X4oi_2BfwDcfgG4XMlnjXbj783SvdsQcVCLn0Y0mvT87cyq6wLtri", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731328082", "total_time": 36, "date_modified": "2021-06-10T10:53:17+00:00", "date_created": "2021-06-10T10:52:41+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731328082", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173227"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173241"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173271"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173413"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173456"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173477"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173486"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173492"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173524"}]}]}]}, "emitted_at": 1690184768435} -{"stream": "survey_responses", "data": {"id": "12731346960", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843672", "survey_id": "307785394", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=i9GzbJPTq_2BCfXqXCYc91kWSkkbD40J_2FGL8hESaU68w7nuZXOyFZ7xNpNth1mmf72", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNt3d6AR0J1aeeTU4KsCULsx8_3D?respondent_id=12731346960", "total_time": 35, "date_modified": "2021-06-10T11:00:58+00:00", "date_created": "2021-06-10T11:00:22+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785394/responses/12731346960", "pages": [{"id": "168831344", "questions": []}, {"id": "168831345", "questions": [{"id": "667461468", "answers": [{"choice_id": "4385173229"}]}, {"id": "667461471", "answers": [{"choice_id": "4385173241"}]}, {"id": "667461473", "answers": [{"choice_id": "4385173256"}]}, {"id": "667461476", "answers": [{"choice_id": "4385173270"}]}, {"id": "667461498", "answers": [{"choice_id": "4385173412"}]}]}, {"id": "168831352", "questions": [{"id": "667461513", "answers": [{"choice_id": "4385173457"}]}, {"id": "667461516", "answers": [{"choice_id": "4385173476"}]}, {"id": "667461517", "answers": [{"choice_id": "4385173483"}]}, {"id": "667461521", "answers": [{"choice_id": "4385173493"}]}, {"id": "667461526", "answers": [{"choice_id": "4385173527"}]}]}]}, "emitted_at": 1690184768435} -{"stream": "survey_responses", "data": {"id": "12731044345", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=RiSWqWbbXVsX_2BRIyudfJwY_2BODcC0aHC0L77gSNMTF5T_2BsPWUy2vlIWpXf01Hqy8k", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731044345", "total_time": 34, "date_modified": "2021-06-10T08:48:50+00:00", "date_created": "2021-06-10T08:48:16+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731044345", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173538"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173549"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173661"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173676"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173732"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174059"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174182"}]}]}]}, "emitted_at": 1690184769552} -{"stream": "survey_responses", "data": {"id": "12731058644", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=8fjqeuyiG6EvuMWbYy64yqPXBnCSWaEpW8DInIUYXRbFyqerWPCENf5izNrPGoNw", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731058644", "total_time": 33, "date_modified": "2021-06-10T08:56:31+00:00", "date_created": "2021-06-10T08:55:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731058644", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173537"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173550"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173662"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173677"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173678"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173729"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174063"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174183"}]}]}]}, "emitted_at": 1690184769552} -{"stream": "survey_responses", "data": {"id": "12731073567", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ILVcHPPYp1qBeboZrnUw9zU8OfPDI3pBaSHE_2FPA1mcsZYgDgyEvD2DNJEXG50BwI", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731073567", "total_time": 34, "date_modified": "2021-06-10T09:04:16+00:00", "date_created": "2021-06-10T09:03:42+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731073567", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173537"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173552"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173663"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173711"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173734"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174066"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174183"}]}]}]}, "emitted_at": 1690184769553} -{"stream": "survey_responses", "data": {"id": "12731089919", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=WJVGjucUzW8fBD4StuC_2FFZoPB0pCtugYpTzgDrCOMnJyyzUp5Q2v55jJ8xqcAaJv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731089919", "total_time": 33, "date_modified": "2021-06-10T09:12:00+00:00", "date_created": "2021-06-10T09:11:26+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731089919", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173536"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173551"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173658"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173714"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173728"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174060"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174184"}]}]}]}, "emitted_at": 1690184769553} -{"stream": "survey_responses", "data": {"id": "12731106311", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=pYr2BTIwVP0O3EzGC27aOOaCA0hnWvJ3FzJOiB_2Fhgw0UUSDh2QpQTcjrfXltZ8dv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731106311", "total_time": 33, "date_modified": "2021-06-10T09:19:41+00:00", "date_created": "2021-06-10T09:19:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731106311", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173537"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173549"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173663"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173680"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173712"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173727"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173757"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174061"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174186"}]}]}]}, "emitted_at": 1690184769553} -{"stream": "survey_responses", "data": {"id": "12731123001", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=iMOq3PmDu_2Fpfu6Lqltp5VPoRV2azTczcfDSAY7AjMIzECAtQhUwU_2FNMCCwq_2BSvsN", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731123001", "total_time": 34, "date_modified": "2021-06-10T09:27:23+00:00", "date_created": "2021-06-10T09:26:49+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731123001", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173540"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173550"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173658"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173676"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173682"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173727"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173757"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174059"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174186"}]}]}]}, "emitted_at": 1690184769554} -{"stream": "survey_responses", "data": {"id": "12731140625", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=QTesi81CB_2BwJ62wJ8op06oDEuscTPvq1ke99azBrWELfhmlgdyrrpw0NVegVV_2BE7", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731140625", "total_time": 33, "date_modified": "2021-06-10T09:35:05+00:00", "date_created": "2021-06-10T09:34:32+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731140625", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173538"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173553"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173661"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173678"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173729"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174062"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174185"}]}]}]}, "emitted_at": 1690184769554} -{"stream": "survey_responses", "data": {"id": "12731157855", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Y_2BseyWnN45hqrdC63g5th2srbJCqZZuaaHgJaVUVwfuBPwNI2IdB2Dc1yEhLsRcy", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731157855", "total_time": 33, "date_modified": "2021-06-10T09:42:46+00:00", "date_created": "2021-06-10T09:42:12+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731157855", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173536"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173553"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173661"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173676"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173727"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174060"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174183"}]}]}]}, "emitted_at": 1690184769554} -{"stream": "survey_responses", "data": {"id": "12731175182", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=wvlpuaxYEnle4EG2hvBzqRUelWi_2BwXRQmZSU2ru959wJ7Ly3p7I9sg1wjhKu7Bm_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731175182", "total_time": 27, "date_modified": "2021-06-10T09:50:26+00:00", "date_created": "2021-06-10T09:49:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731175182", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": []}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173713"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173731"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174061"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174186"}]}]}]}, "emitted_at": 1690184769554} -{"stream": "survey_responses", "data": {"id": "12731193598", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=mqmf_2BkbhXVJbx4WQXC0_2F72pU_2FFf6FkOVA8_2F7REJy4j9HAEPKg_2BCPmL8F2wfc6SKi", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731193598", "total_time": 37, "date_modified": "2021-06-10T09:58:58+00:00", "date_created": "2021-06-10T09:58:20+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731193598", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173539"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173553"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173657"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173676"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173714"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173733"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173757"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174065"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174185"}]}]}]}, "emitted_at": 1690184769555} -{"stream": "survey_responses", "data": {"id": "12731213708", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=FmbWXWfmfbsDK8MEuKSsHPviZSUYgchniK99dQeGSFpf_2BWnh6cTWlg0o5YRIRtn7", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731213708", "total_time": 33, "date_modified": "2021-06-10T10:06:59+00:00", "date_created": "2021-06-10T10:06:25+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731213708", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173537"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173550"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173663"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173677"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173683"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173712"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173729"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174061"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174182"}]}]}]}, "emitted_at": 1690184769555} -{"stream": "survey_responses", "data": {"id": "12731233283", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VRo6oIX2X5OOWhsAVRYKuhrXr5zszr6duv29Vq3RdwAJMtFNcQqAJk71Bi4Oq3Bo", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731233283", "total_time": 33, "date_modified": "2021-06-10T10:14:47+00:00", "date_created": "2021-06-10T10:14:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731233283", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173540"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173554"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173656"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173676"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173679"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173715"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173727"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174063"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174185"}]}]}]}, "emitted_at": 1690184769555} -{"stream": "survey_responses", "data": {"id": "12731252105", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=TQiDe8sSx4_2FuWzT1Lr7RKGOBO2iwOmTuPPOzcvcHL45tRjbdZSW9UHv6_2B2nLtY6n", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731252105", "total_time": 34, "date_modified": "2021-06-10T10:22:37+00:00", "date_created": "2021-06-10T10:22:03+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731252105", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173537"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173552"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173659"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173682"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173712"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173731"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174062"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174182"}]}]}]}, "emitted_at": 1690184769556} -{"stream": "survey_responses", "data": {"id": "12731270690", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ipONp_2F6FrM7HwOh1_2BpJXXqL65aZR6EN_2BmCdmmlM4Etuv_2BqfU5P8wsYtpW2_2BvsbR2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731270690", "total_time": 33, "date_modified": "2021-06-10T10:30:16+00:00", "date_created": "2021-06-10T10:29:42+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731270690", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173536"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173550"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173656"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173677"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173681"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173715"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173734"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173757"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174064"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174186"}]}]}]}, "emitted_at": 1690184769556} -{"stream": "survey_responses", "data": {"id": "12731290962", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=eWOUaV5xBiUO9JblKm5rYeLfk2_2FPRiygYknRKYIPlBC3Vl9805OBGP8f2kjnm0r2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731290962", "total_time": 33, "date_modified": "2021-06-10T10:38:22+00:00", "date_created": "2021-06-10T10:37:48+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731290962", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173538"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173553"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173658"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173675"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173678"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173715"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173729"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173757"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174066"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174182"}]}]}]}, "emitted_at": 1690184769556} -{"stream": "survey_responses", "data": {"id": "12731310541", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=wFNF4x4oPfyHaT5uvGstRuK1DUDfrHJyxBXB95bRgrB_2FdJojFuoGIhxA0BIH0XSO", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731310541", "total_time": 37, "date_modified": "2021-06-10T10:46:14+00:00", "date_created": "2021-06-10T10:45:37+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731310541", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173540"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173551"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173661"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173677"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173682"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173712"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173732"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173756"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174059"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174184"}]}]}]}, "emitted_at": 1690184769557} -{"stream": "survey_responses", "data": {"id": "12731329746", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vGkpaT5QedSZDkH1LkdPEMwKHIlyZ2mKjKyt8tgek5uzfoU30oxZnac34WvjYm1h", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731329746", "total_time": 33, "date_modified": "2021-06-10T10:53:55+00:00", "date_created": "2021-06-10T10:53:21+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731329746", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": [{"id": "667461529", "answers": [{"choice_id": "4385173540"}]}, {"id": "667461530", "answers": [{"choice_id": "4385173551"}]}, {"id": "667461549", "answers": [{"choice_id": "4385173661"}]}, {"id": "667461551", "answers": [{"choice_id": "4385173677"}]}, {"id": "667461553", "answers": [{"choice_id": "4385173682"}]}]}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173711"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173732"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174059"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174182"}]}]}]}, "emitted_at": 1690184769557} -{"stream": "survey_responses", "data": {"id": "12731348717", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843682", "survey_id": "307785402", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vuWK6q7Hz9dNybRoH2rP8Ux0qNc_2B0E3lMUmtDlPUbslKbHc7FranQhTJxg_2BIo3Pw", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxuFqiaqWbXDPfS4KhzQnJ4c_3D?respondent_id=12731348717", "total_time": 26, "date_modified": "2021-06-10T11:01:30+00:00", "date_created": "2021-06-10T11:01:03+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785402/responses/12731348717", "pages": [{"id": "168831357", "questions": []}, {"id": "168831358", "questions": []}, {"id": "168831365", "questions": [{"id": "667461555", "answers": [{"choice_id": "4385173714"}]}, {"id": "667461558", "answers": [{"choice_id": "4385173729"}]}, {"id": "667461561", "answers": [{"choice_id": "4385173755"}]}, {"id": "667461580", "answers": [{"choice_id": "4385174059"}]}, {"id": "667461598", "answers": [{"choice_id": "4385174186"}]}]}]}, "emitted_at": 1690184769557} -{"stream": "survey_responses", "data": {"id": "12731045521", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=0SWEpDfTb_2FAVFz7Ddryxk5koNgVExk8Oke4TJ8tz6QD5Eqc7uoqHQWRcLUi7yOAb", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731045521", "total_time": 32, "date_modified": "2021-06-10T08:49:26+00:00", "date_created": "2021-06-10T08:48:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731045521", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174255"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174372"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174482"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174496"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174649"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770269} -{"stream": "survey_responses", "data": {"id": "12731059832", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vhiMNZdaQtV6_2FPkJM_2BP8UtinkM87qYXz4VjmfwvWnjK1NFrLnN1P_2FH3w9GU7JUxX", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731059832", "total_time": 32, "date_modified": "2021-06-10T08:57:08+00:00", "date_created": "2021-06-10T08:56:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731059832", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174485"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174635"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174644"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770270} -{"stream": "survey_responses", "data": {"id": "12731074829", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ZUZi7r7io9fCwXP7l4K5oirdqpAhNEdbesFS73p617lSOzxuK5oKGDsqiK8zdA8E", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731074829", "total_time": 31, "date_modified": "2021-06-10T09:04:52+00:00", "date_created": "2021-06-10T09:04:20+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731074829", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174255"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174360"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174372"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174485"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174498"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174599"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174622"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174645"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770270} -{"stream": "survey_responses", "data": {"id": "12731091270", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=eL4vDkGrDZfi5BiJC001E_2FoL_2Bo1mAtZxB97VuOw0Ue7x2Y_2Fvj9cWXWEFAG1_2For6z", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731091270", "total_time": 33, "date_modified": "2021-06-10T09:12:38+00:00", "date_created": "2021-06-10T09:12:05+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731091270", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174360"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174372"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174483"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174493"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174623"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174646"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770270} -{"stream": "survey_responses", "data": {"id": "12731107632", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VzELV_2Bc1GPJfJsXsRIabrQVAi9_2BSDOS6F_2B68zR_2BP4u98MGtxvy_2BeSOjCh5s1HKa5", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731107632", "total_time": 32, "date_modified": "2021-06-10T09:20:17+00:00", "date_created": "2021-06-10T09:19:44+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731107632", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174254"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174360"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174483"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174496"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174636"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174649"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770270} -{"stream": "survey_responses", "data": {"id": "12731124423", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=QIT1tkBlcTVTBkkHm7dpbWllNNXyBriN0RSobCplWUm5LoJpA_2FC93B5N8fFbc0Zs", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731124423", "total_time": 31, "date_modified": "2021-06-10T09:28:00+00:00", "date_created": "2021-06-10T09:27:28+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731124423", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174362"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174371"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174484"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174498"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174620"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174645"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770271} -{"stream": "survey_responses", "data": {"id": "12731142107", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=URkLmYAmILJkZgOzrwRncRyLAGrxL_2FGHdeqmjSqrSYjS_2F7cjqy4e3nBWqRsOOv0r", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731142107", "total_time": 31, "date_modified": "2021-06-10T09:35:42+00:00", "date_created": "2021-06-10T09:35:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731142107", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174482"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174498"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174601"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174636"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174643"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770271} -{"stream": "survey_responses", "data": {"id": "12731159230", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=O0O262_2ByzCPZXGbz3NLSBdAJfB9rNAyLSz4YX4ubxd_2BIrVIo2jJ_2Bzk8KQNxCcjXb", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731159230", "total_time": 31, "date_modified": "2021-06-10T09:43:20+00:00", "date_created": "2021-06-10T09:42:49+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731159230", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174371"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174482"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174623"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174636"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174643"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770271} -{"stream": "survey_responses", "data": {"id": "12731176347", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=LVM3eoF2xyKmckO7bkvBEw0i_2FoajMZMA_2Fc_2FVcI95ReDdVUuQQ_2Babxm1nILbPUISC", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731176347", "total_time": 33, "date_modified": "2021-06-10T09:51:04+00:00", "date_created": "2021-06-10T09:50:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731176347", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174253"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174486"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174649"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174676"}]}]}]}, "emitted_at": 1690184770272} -{"stream": "survey_responses", "data": {"id": "12731195152", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=SDlGK5Xxc7lpBehXWM_2B47eY24yPtwGB4NBBehIidUXtChXAXOgjA_2F8CxAHW97nUA", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731195152", "total_time": 35, "date_modified": "2021-06-10T09:59:39+00:00", "date_created": "2021-06-10T09:59:03+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731195152", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174253"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174360"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174372"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174485"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174635"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174647"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770272} -{"stream": "survey_responses", "data": {"id": "12731215248", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ft3pzaYpicC1h6Ah26Wj0wRkCmTK1Yu7BTxxTQ0po_2FC00oJaK3YHuX9XV5WB5T60", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731215248", "total_time": 32, "date_modified": "2021-06-10T10:07:34+00:00", "date_created": "2021-06-10T10:07:02+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731215248", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174253"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174360"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174485"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174493"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174599"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174623"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174635"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174646"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174676"}]}]}]}, "emitted_at": 1690184770273} -{"stream": "survey_responses", "data": {"id": "12731234853", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=2W_2B7kOeuvhHSN7s0oUQZQwt3iySUHWLv5qq0xw8gojd0RWsFcAMM6NILAk_2FZJbdX", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731234853", "total_time": 31, "date_modified": "2021-06-10T10:15:23+00:00", "date_created": "2021-06-10T10:14:51+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731234853", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174372"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174485"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174601"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174622"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174650"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770273} -{"stream": "survey_responses", "data": {"id": "12731253651", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=0yectjqXWcCHTMhyv3pd4HqlpH_2FjslmIWhw59V07Em7ASx_2FsHGk5ZFvSKThLEb3f", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731253651", "total_time": 31, "date_modified": "2021-06-10T10:23:12+00:00", "date_created": "2021-06-10T10:22:41+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731253651", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174362"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174482"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174497"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174636"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174646"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770273} -{"stream": "survey_responses", "data": {"id": "12731272195", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=owJREFrbM6_2Fu6fqPiWbW3rJ15U9q9DLJ_2BlAZMq6Gi_2BGKjQ2AGiWVNE5RBM_2FKLyMs", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731272195", "total_time": 31, "date_modified": "2021-06-10T10:30:52+00:00", "date_created": "2021-06-10T10:30:20+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731272195", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174255"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174371"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174482"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174495"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174600"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174623"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174646"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770273} -{"stream": "survey_responses", "data": {"id": "12731292534", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=zAYnDyr9Dxd9Em4YpxVpKH7_2BtJbsMFiI0bH89dH2LhhokCrBrNjKYFI0nacFEIpq", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731292534", "total_time": 32, "date_modified": "2021-06-10T10:38:57+00:00", "date_created": "2021-06-10T10:38:25+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731292534", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174254"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174362"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174483"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174494"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174599"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174623"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174649"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770274} -{"stream": "survey_responses", "data": {"id": "12731312208", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=uSGoF1sumsyf_2B3iBhadh_2F73k_2BM0dxEwFfAlWl70AwraHwQHcfj64cL0FUt9StbRx", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731312208", "total_time": 34, "date_modified": "2021-06-10T10:46:52+00:00", "date_created": "2021-06-10T10:46:18+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731312208", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174255"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174371"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174486"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174495"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174635"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174643"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174677"}]}]}]}, "emitted_at": 1690184770274} -{"stream": "survey_responses", "data": {"id": "12731331308", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vQGF4JMphBOwv8kJxOmtjdRG81lTGr7CW73feMGxDEMLSwybystpUFV76KKxYQR6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731331308", "total_time": 31, "date_modified": "2021-06-10T10:54:29+00:00", "date_created": "2021-06-10T10:53:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731331308", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174253"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174362"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174486"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174498"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174598"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174620"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174636"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174644"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174676"}]}]}]}, "emitted_at": 1690184770274} -{"stream": "survey_responses", "data": {"id": "12731350053", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843688", "survey_id": "307785408", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=uQGI1ouCt3TwYg_2F0OjcapOOokxhfAvoP3vsYcJntZXz4yNaoZRXNQePI1o27JEfS", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxvvr7YEM4YZ5j7dMDUtBHEw_3D?respondent_id=12731350053", "total_time": 32, "date_modified": "2021-06-10T11:02:07+00:00", "date_created": "2021-06-10T11:01:34+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785408/responses/12731350053", "pages": [{"id": "168831381", "questions": []}, {"id": "168831382", "questions": [{"id": "667461606", "answers": [{"choice_id": "4385174252"}]}, {"id": "667461628", "answers": [{"choice_id": "4385174361"}]}, {"id": "667461630", "answers": [{"choice_id": "4385174370"}]}, {"id": "667461651", "answers": [{"choice_id": "4385174483"}]}, {"id": "667461652", "answers": [{"choice_id": "4385174496"}]}]}, {"id": "168831388", "questions": [{"id": "667461666", "answers": [{"choice_id": "4385174599"}]}, {"id": "667461670", "answers": [{"choice_id": "4385174621"}]}, {"id": "667461674", "answers": [{"choice_id": "4385174634"}]}, {"id": "667461676", "answers": [{"choice_id": "4385174646"}]}, {"id": "667461686", "answers": [{"choice_id": "4385174678"}]}]}]}, "emitted_at": 1690184770275} -{"stream": "survey_responses", "data": {"id": "12730895819", "recipient_id": "", "collection_mode": "default", "response_status": "partial", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": ["168830049", "168830050", "168830060"], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=2awnrk_2BH5OxFP_2BQgeiHHjR9yR3bHHngtP06WJ08L7vUw_2Bi4HP_2BoIM_2Fzdi4XZBg4c", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12730895819", "total_time": 3851, "date_modified": "2021-06-10T08:30:24+00:00", "date_created": "2021-06-10T07:26:12+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12730895819", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455130", "answers": [{"choice_id": "4385137345"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137473"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137493"}]}]}, {"id": "168830060", "questions": []}]}, "emitted_at": 1690184771496} -{"stream": "survey_responses", "data": {"id": "12731026318", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=9zPtrl88F3NYIsYeyGVBOAH8xcbXCyfUnb5wJmafeNnqKu0OuTBMd3AMA70CA0Dd", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731026318", "total_time": 42, "date_modified": "2021-06-10T08:38:55+00:00", "date_created": "2021-06-10T08:38:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731026318", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137324"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137342"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137468"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137486"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137702"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137743"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137776"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137797"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137810"}]}]}]}, "emitted_at": 1690184771496} -{"stream": "survey_responses", "data": {"id": "12731034119", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=_2F1iWQXdsu69adcyLuCe0CWLGDC2dm6Mi6UQBAgHHW5tqLGpm6S8VgnyogtlqQcF4", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731034119", "total_time": 38, "date_modified": "2021-06-10T08:43:18+00:00", "date_created": "2021-06-10T08:42:40+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731034119", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137328"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137342"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137470"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137742"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137782"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137813"}]}]}]}, "emitted_at": 1690184771496} -{"stream": "survey_responses", "data": {"id": "12731048348", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=teZL209m07VmwPLhipUVAeikuCFifwYD7Xbn856h7nliTsJok_2BJ8cUsANlBG1x09", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731048348", "total_time": 37, "date_modified": "2021-06-10T08:51:05+00:00", "date_created": "2021-06-10T08:50:27+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731048348", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137324"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137345"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137472"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137780"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137812"}]}]}]}, "emitted_at": 1690184771497} -{"stream": "survey_responses", "data": {"id": "12731062826", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=iBwqqk7dTblJHtZ5UWdoNXNfZvamq_2FWqVX9lPmgcaEk_2FJSEZbpEjvTDEp1kntA4I", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731062826", "total_time": 37, "date_modified": "2021-06-10T08:58:47+00:00", "date_created": "2021-06-10T08:58:09+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731062826", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137325"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137341"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137473"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137485"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137782"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137794"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137813"}]}]}]}, "emitted_at": 1690184771497} -{"stream": "survey_responses", "data": {"id": "12731078267", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=EWvr594gc6fTNOWCnzJcr1awvjtYrfcHOS1dCZlEmJOVdekL3kH9fN015tpi7AVW", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731078267", "total_time": 37, "date_modified": "2021-06-10T09:06:31+00:00", "date_created": "2021-06-10T09:05:53+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731078267", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137329"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137342"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137468"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137495"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137775"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137796"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771497} -{"stream": "survey_responses", "data": {"id": "12731094627", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=UEDBFTIm3XGLxP7HF9d4l806UiymFhRau6MyuUYkfb1y7zOR96xOkRxERkrOJ9ca", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731094627", "total_time": 38, "date_modified": "2021-06-10T09:14:16+00:00", "date_created": "2021-06-10T09:13:38+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731094627", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137329"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137345"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137470"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137486"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137495"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137743"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137780"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137792"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137810"}]}]}]}, "emitted_at": 1690184771498} -{"stream": "survey_responses", "data": {"id": "12731111090", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=X3JQr1B6y2spk9th1IPF0YF8VQxsZUEffTzik0RdvFtFl2SnrnJQRca_2BDoV2r4mq", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731111090", "total_time": 38, "date_modified": "2021-06-10T09:21:54+00:00", "date_created": "2021-06-10T09:21:16+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731111090", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137326"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137341"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137474"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137493"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137775"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771498} -{"stream": "survey_responses", "data": {"id": "12731127763", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ETzE9VL934Fk2aBcNUUcKaSh2V3EZHIzNm7GmAoHdznlQgZLuN90AHX13K3n6CVD", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731127763", "total_time": 37, "date_modified": "2021-06-10T09:29:36+00:00", "date_created": "2021-06-10T09:28:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731127763", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137325"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137341"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137472"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137496"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137702"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137775"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137797"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137813"}]}]}]}, "emitted_at": 1690184771498} -{"stream": "survey_responses", "data": {"id": "12731145509", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VdEi6nthWHwzn1S6g9Ejj36ATaNJTgGRTvPg9OSgXQX3v7zuJ7b8kUW4JpPgoH8N", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731145509", "total_time": 37, "date_modified": "2021-06-10T09:37:18+00:00", "date_created": "2021-06-10T09:36:40+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731145509", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137324"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137344"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137473"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137486"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137492"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137743"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137782"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137795"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137810"}]}]}]}, "emitted_at": 1690184771499} -{"stream": "survey_responses", "data": {"id": "12731162853", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=SpVNwOtvMvqHvq3zHMXNhDHSmc7HA4m3XTWJpvcSJxvt1b1rvgGaOej3oC_2BolYPy", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731162853", "total_time": 38, "date_modified": "2021-06-10T09:45:01+00:00", "date_created": "2021-06-10T09:44:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731162853", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137328"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137340"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137469"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137492"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137743"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137777"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137795"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771499} -{"stream": "survey_responses", "data": {"id": "12731180052", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=JiwKhmlzAA4BE2G7wLdiXM8p7qGKqy7FVolioM_2BFHLNo4MIvbtO7U2T2MtIhjHQT", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731180052", "total_time": 40, "date_modified": "2021-06-10T09:52:49+00:00", "date_created": "2021-06-10T09:52:09+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731180052", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137324"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137342"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137468"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137702"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137775"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137794"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137812"}]}]}]}, "emitted_at": 1690184771499} -{"stream": "survey_responses", "data": {"id": "12731198914", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=4lczf0MNm2lOpWvmpCXz8qaQFfYxrkqDyo3Na6JqZ3SoyW9vfVtRkWihGPcm3TJ9", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731198914", "total_time": 37, "date_modified": "2021-06-10T10:01:22+00:00", "date_created": "2021-06-10T10:00:44+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731198914", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137329"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137345"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137472"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137485"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137776"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137795"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771500} -{"stream": "survey_responses", "data": {"id": "12731219287", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=BuKPZoR_2B7yhJkjbyRrpCv9Xsryak851Sl2u8K17_2FsYnSAZUFHdu4fJB0umnx5Y9_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731219287", "total_time": 37, "date_modified": "2021-06-10T10:09:13+00:00", "date_created": "2021-06-10T10:08:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731219287", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137327"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137341"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137470"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137494"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137776"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137792"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771500} -{"stream": "survey_responses", "data": {"id": "12731238577", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=gYK41YcmDya_2Bvsx_2BqjLe992H7r6ORc6kyUL3YhAqx78syU1tTY9dv_2Fn92J5xM033", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731238577", "total_time": 37, "date_modified": "2021-06-10T10:16:59+00:00", "date_created": "2021-06-10T10:16:21+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731238577", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137327"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137340"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137472"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137485"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137496"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137702"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137741"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137778"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137810"}]}]}]}, "emitted_at": 1690184771500} -{"stream": "survey_responses", "data": {"id": "12731257342", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=gXcd4_2B0crZ3JHyflBVCFpzaABCmJO9PKQzLnjMuvOHPsOS0sIISJZjVscMHtusHw", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731257342", "total_time": 37, "date_modified": "2021-06-10T10:24:47+00:00", "date_created": "2021-06-10T10:24:10+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731257342", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137324"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137340"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137471"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137486"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137495"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137782"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137813"}]}]}]}, "emitted_at": 1690184771501} -{"stream": "survey_responses", "data": {"id": "12731276322", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=fdG3OylITjbhJlFfCAIGHmTQ6YpdyOcILnp8DliR5bm9qPSRMG6AkkWm3Gu2gJoP", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731276322", "total_time": 41, "date_modified": "2021-06-10T10:32:36+00:00", "date_created": "2021-06-10T10:31:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731276322", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137328"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137340"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137468"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137495"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137702"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137742"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137776"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137793"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137812"}]}]}]}, "emitted_at": 1690184771501} -{"stream": "survey_responses", "data": {"id": "12731296299", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=PijLaybIFW4zp3WXn6Qn_2Bes5snwJCUJdhdklhMDUCFNxV8kYJTspV5CKmi9JxcxR", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731296299", "total_time": 28, "date_modified": "2021-06-10T10:40:25+00:00", "date_created": "2021-06-10T10:39:56+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731296299", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": []}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137742"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137778"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137795"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137812"}]}]}]}, "emitted_at": 1690184771501} -{"stream": "survey_responses", "data": {"id": "12731316167", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=DdWwrUlBvSWJMjuKWSDCTJGmzJlZPpJ9HEf_2F_2BW1pzPCQ9WlJ7Su0kQcly_2B_2F6O8il", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731316167", "total_time": 37, "date_modified": "2021-06-10T10:48:31+00:00", "date_created": "2021-06-10T10:47:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731316167", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137327"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137342"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137469"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137492"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137700"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137776"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137797"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771501} -{"stream": "survey_responses", "data": {"id": "12731335082", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=JT5nsI3dl0SB9aaAvObos67DXY6qy_2FDXI1TTpgeEv1m3sqvcxy3MhVQQlApbsDQN", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731335082", "total_time": 37, "date_modified": "2021-06-10T10:56:07+00:00", "date_created": "2021-06-10T10:55:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731335082", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137325"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137340"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137473"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137487"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137493"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137742"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137782"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137794"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137810"}]}]}]}, "emitted_at": 1690184771502} -{"stream": "survey_responses", "data": {"id": "12731354013", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829319", "survey_id": "307784834", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VD2H3n1aqESl1LrF5U_2B7SlCYCH1VXiN11NQ44aSykUk8VdT8zluzTaiFP1WRilVh", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwf91XTSWLx4XNlQvyUVl4Y_3D?respondent_id=12731354013", "total_time": 37, "date_modified": "2021-06-10T11:03:44+00:00", "date_created": "2021-06-10T11:03:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784834/responses/12731354013", "pages": [{"id": "168830049", "questions": []}, {"id": "168830050", "questions": [{"id": "667455128", "answers": [{"choice_id": "4385137325"}]}, {"id": "667455130", "answers": [{"choice_id": "4385137344"}]}, {"id": "667455161", "answers": [{"choice_id": "4385137473"}]}, {"id": "667455172", "answers": [{"choice_id": "4385137488"}]}, {"id": "667455179", "answers": [{"choice_id": "4385137493"}]}]}, {"id": "168830060", "questions": [{"id": "667455202", "answers": [{"choice_id": "4385137701"}]}, {"id": "667455205", "answers": [{"choice_id": "4385137740"}]}, {"id": "667455210", "answers": [{"choice_id": "4385137781"}]}, {"id": "667455212", "answers": [{"choice_id": "4385137792"}]}, {"id": "667455215", "answers": [{"choice_id": "4385137811"}]}]}]}, "emitted_at": 1690184771502} -{"stream": "survey_responses", "data": {"id": "12731046667", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3m5bJqYOPzi9hTHWeQaMVChOOH_2FutIpC1GCzcEhDLsPuEi5IsDvCVew50TpjMJ7_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731046667", "total_time": 36, "date_modified": "2021-06-10T08:50:07+00:00", "date_created": "2021-06-10T08:49:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731046667", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177138"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177155"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177213"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177216"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177393"}]}]}]}, "emitted_at": 1690184772385} -{"stream": "survey_responses", "data": {"id": "12731061053", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=RliJfymKsHA5ERiuKeRola_2B_2F2kldmGpI58KjGRQTMy_2F7NOeFnJ8VaeyjrQYqefuv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731061053", "total_time": 36, "date_modified": "2021-06-10T08:57:50+00:00", "date_created": "2021-06-10T08:57:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731061053", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177068"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177139"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177158"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177171"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177217"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177395"}]}]}]}, "emitted_at": 1690184772386} -{"stream": "survey_responses", "data": {"id": "12731076167", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Fbm6tEId2O2bNT8H0BJ8ZuinueLnpLoMwgotsY_2BRD_2FzbvCnhEaxZHye5e5bkXF_2B0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731076167", "total_time": 36, "date_modified": "2021-06-10T09:05:33+00:00", "date_created": "2021-06-10T09:04:57+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731076167", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177097"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177142"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177153"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177210"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177220"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177392"}]}]}]}, "emitted_at": 1690184772386} -{"stream": "survey_responses", "data": {"id": "12731092606", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=FSRkUyOqh9XslZbpVGw5556THT2fSckee67DmK8vg8QM5E_2BIPpatzKJNVF6E4pI2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731092606", "total_time": 36, "date_modified": "2021-06-10T09:13:19+00:00", "date_created": "2021-06-10T09:12:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731092606", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177066"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177138"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177153"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177173"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177213"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177215"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177399"}]}]}]}, "emitted_at": 1690184772386} -{"stream": "survey_responses", "data": {"id": "12731109051", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hFBXkisB3CTNYQ4UTLe_2FgTWjKwcR0l_2Fk9xXG2L2eiiv2m1A4IK6anbX8e98lxI_2BJ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731109051", "total_time": 36, "date_modified": "2021-06-10T09:20:57+00:00", "date_created": "2021-06-10T09:20:21+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731109051", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177068"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177139"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177156"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177171"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177220"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177383"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177398"}]}]}]}, "emitted_at": 1690184772387} -{"stream": "survey_responses", "data": {"id": "12731125773", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=IitR8qwcQJhjvJZGpnRnQ51uSFMITWI_2Fj71B7P5SgZeC4E3yXEi1_2B_2FMm5wo_2BDV8h", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731125773", "total_time": 37, "date_modified": "2021-06-10T09:28:41+00:00", "date_created": "2021-06-10T09:28:04+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731125773", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177066"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177099"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177142"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177158"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177171"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177203"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177220"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177392"}]}]}]}, "emitted_at": 1690184772387} -{"stream": "survey_responses", "data": {"id": "12731143496", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=afwrrGApCHDKe6n5FTBrtOaHsGGtP_2BhesVrwRj_2FP25EjYCQxzsAC6aAkUchVScSv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731143496", "total_time": 36, "date_modified": "2021-06-10T09:36:22+00:00", "date_created": "2021-06-10T09:35:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731143496", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177066"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177097"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177138"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177154"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177174"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177201"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177213"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177216"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177392"}]}]}]}, "emitted_at": 1690184772387} -{"stream": "survey_responses", "data": {"id": "12731160592", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=gi5j6iOFt63bX7t4yH9tggXGZlHkMPjNnRuEfb_2BfIFrcSwpamxUO_2B9Y0WXX9oSHD", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731160592", "total_time": 35, "date_modified": "2021-06-10T09:44:01+00:00", "date_created": "2021-06-10T09:43:25+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731160592", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177065"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177142"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177156"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177174"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177203"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177210"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177219"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177398"}]}]}]}, "emitted_at": 1690184772388} -{"stream": "survey_responses", "data": {"id": "12731177842", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Mdft9tg4e78_2Fa7lJAhODrqVOjXTcfF0kM1rlL9FH_2B4OFTc8svhyUTN194QTJecZV", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731177842", "total_time": 38, "date_modified": "2021-06-10T09:51:48+00:00", "date_created": "2021-06-10T09:51:09+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731177842", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177068"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177097"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177141"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177155"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177174"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177220"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177398"}]}]}]}, "emitted_at": 1690184772388} -{"stream": "survey_responses", "data": {"id": "12731196644", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=j0mjP6Z7LrcWGOboY6jmQkq3eLodG5VEJ1xjmK_2FVYi4g_2BLXzw2TXVIZDXlMQfSce", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731196644", "total_time": 38, "date_modified": "2021-06-10T10:00:23+00:00", "date_created": "2021-06-10T09:59:44+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731196644", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177096"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177141"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177153"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177174"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177218"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177393"}]}]}]}, "emitted_at": 1690184772388} -{"stream": "survey_responses", "data": {"id": "12731216845", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=42BF5q8j9sIJ7FZW3oHucGLBgLbhnDLuy54e3nC1dr_2BV65av2cB1XpRMxXE8qLrk", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731216845", "total_time": 35, "date_modified": "2021-06-10T10:08:14+00:00", "date_created": "2021-06-10T10:07:38+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731216845", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177066"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177099"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177139"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177158"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177215"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177383"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177392"}]}]}]}, "emitted_at": 1690184772389} -{"stream": "survey_responses", "data": {"id": "12731236327", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=NqwagvKjeHPRtNo1Xjgzhlf20oC0V9pmZhKn9M_2FI7d0WQpLl5SvMJmpPx_2FMR8WOA", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731236327", "total_time": 36, "date_modified": "2021-06-10T10:16:03+00:00", "date_created": "2021-06-10T10:15:27+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731236327", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177066"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177099"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177138"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177153"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177171"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177212"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177214"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177394"}]}]}]}, "emitted_at": 1690184772389} -{"stream": "survey_responses", "data": {"id": "12731255115", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=68xsdhMhxZnM6kEBzq6YOW2Pcy65CdGFnjt47M9pu1JOngmURrmW_2BioZOUdc5iwt", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731255115", "total_time": 35, "date_modified": "2021-06-10T10:23:53+00:00", "date_created": "2021-06-10T10:23:17+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731255115", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177095"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177141"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177158"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177202"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177211"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177218"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177395"}]}]}]}, "emitted_at": 1690184772389} -{"stream": "survey_responses", "data": {"id": "12731273731", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=5V96fOaPSetDK7aFZp_2BQMlsfP5WVMuaklhLOVJzxjkLao_2BG1CUfWUAOvXCoIqzu7", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731273731", "total_time": 35, "date_modified": "2021-06-10T10:31:32+00:00", "date_created": "2021-06-10T10:30:56+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731273731", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177065"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177096"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177140"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177155"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177171"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177203"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177210"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177215"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177398"}]}]}]}, "emitted_at": 1690184772389} -{"stream": "survey_responses", "data": {"id": "12731294056", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=yqW6gwVkcj7CYuN1hQxBNE2_2BGnnSO_2FMoG_2FJOr_2Bl9LOFwGfvlISTxAL6DegDXn5gw", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731294056", "total_time": 36, "date_modified": "2021-06-10T10:39:38+00:00", "date_created": "2021-06-10T10:39:02+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731294056", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177096"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177142"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177154"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177213"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177214"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177383"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177393"}]}]}]}, "emitted_at": 1690184772390} -{"stream": "survey_responses", "data": {"id": "12731313837", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=D85HuJHtbiPW1vl_2FIvU2ocvUVCdJggW_2F0WR5WpMSLCGZaGIg8UdLj54drv_2FcmRMz", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731313837", "total_time": 37, "date_modified": "2021-06-10T10:47:34+00:00", "date_created": "2021-06-10T10:46:57+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731313837", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177067"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177097"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177139"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177156"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177200"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177212"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177220"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177395"}]}]}]}, "emitted_at": 1690184772390} -{"stream": "survey_responses", "data": {"id": "12731332767", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hztkSX5ztC9RUhcoXXjOEZsT0_2BjH5f6tNA9Fh0afPXTD4285Qb8YReTOBLbelKig", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731332767", "total_time": 35, "date_modified": "2021-06-10T10:55:09+00:00", "date_created": "2021-06-10T10:54:33+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731332767", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177068"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177096"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177139"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177158"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177204"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177210"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177217"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177382"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177398"}]}]}]}, "emitted_at": 1690184772390} -{"stream": "survey_responses", "data": {"id": "12731351604", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829931", "survey_id": "307785448", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=4s6X1KZyt2TAwDeH6zLlLbhI1hJkumblOrBysCkO3gPTDp7tdY9BOD_2FvWkRVgEAz", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxi9bVow7JzoPHlGgDld6S4o_3D?respondent_id=12731351604", "total_time": 36, "date_modified": "2021-06-10T11:02:48+00:00", "date_created": "2021-06-10T11:02:12+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785448/responses/12731351604", "pages": [{"id": "168831470", "questions": []}, {"id": "168831471", "questions": [{"id": "667462113", "answers": [{"choice_id": "4385177068"}]}, {"id": "667462114", "answers": [{"choice_id": "4385177098"}]}, {"id": "667462122", "answers": [{"choice_id": "4385177138"}]}, {"id": "667462126", "answers": [{"choice_id": "4385177154"}]}, {"id": "667462130", "answers": [{"choice_id": "4385177172"}]}]}, {"id": "168831478", "questions": [{"id": "667462135", "answers": [{"choice_id": "4385177200"}]}, {"id": "667462136", "answers": [{"choice_id": "4385177213"}]}, {"id": "667462138", "answers": [{"choice_id": "4385177218"}]}, {"id": "667462170", "answers": [{"choice_id": "4385177381"}]}, {"id": "667462172", "answers": [{"choice_id": "4385177397"}]}]}]}, "emitted_at": 1690184772391} -{"stream": "survey_responses", "data": {"id": "12731027651", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=fTFc6V6Lifr7HoJ_2Bl_2Bc74bpg_2B9JRwagJN_2FN3k2RwdJ8CrOldvM08uLZMXEGk5uR_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731027651", "total_time": 29, "date_modified": "2021-06-10T08:39:30+00:00", "date_created": "2021-06-10T08:39:01+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731027651", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138588"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138599"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138621"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138707"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139062"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139232"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773257} -{"stream": "survey_responses", "data": {"id": "12731035521", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=P3yPukgvQbdQ0TVJQrLvSnRv3NllOsosNLSuGp34tGmMRN3SsXsVqXxRkHoUAZzR", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731035521", "total_time": 27, "date_modified": "2021-06-10T08:43:50+00:00", "date_created": "2021-06-10T08:43:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731035521", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138593"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138623"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138700"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139018"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139231"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773257} -{"stream": "survey_responses", "data": {"id": "12731049611", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=1ZxbynfKwvppvatsw2rpbv64I6YRz_2BRUyTcv8dLIbh63fwCm_2Fl9TFtbLXUoePm1q", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731049611", "total_time": 27, "date_modified": "2021-06-10T08:51:37+00:00", "date_created": "2021-06-10T08:51:09+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731049611", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138588"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138598"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138623"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138701"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138782"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139017"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139232"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773258} -{"stream": "survey_responses", "data": {"id": "12731064110", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Vxei42yDMEVFSnyt9M3fpCOreC5nzmSKyEY9iwPt1QtQFzPyFiaGMuw2XQKlF4Tu", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731064110", "total_time": 27, "date_modified": "2021-06-10T08:59:20+00:00", "date_created": "2021-06-10T08:58:52+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731064110", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138591"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138620"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138706"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138782"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139013"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139112"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139231"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773258} -{"stream": "survey_responses", "data": {"id": "12731079723", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=pucu83BOi19Z2n_2Bq7Cmuq6GrV4auydnhRuXlDsCZ5DwqqAhRkuX0GL8i_2FXCRgVnb", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731079723", "total_time": 27, "date_modified": "2021-06-10T09:07:03+00:00", "date_created": "2021-06-10T09:06:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731079723", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138589"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138599"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138622"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138707"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139111"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139235"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773258} -{"stream": "survey_responses", "data": {"id": "12731096120", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=cmwD6t6spTjvPh8zt8wpzOowMPktBxJBhl8ROXhWK7NsNRfaf9VJ51wtPZfMsbPJ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731096120", "total_time": 27, "date_modified": "2021-06-10T09:14:48+00:00", "date_created": "2021-06-10T09:14:21+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731096120", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138593"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138597"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138623"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138706"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139235"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773259} -{"stream": "survey_responses", "data": {"id": "12731112612", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=tYfRsLpnvLXiDU1aSXTH5nIXnauCRjmyXNWWEYT8RsVkytyEn7W9JT8TX_2FQ_2FZXmm", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731112612", "total_time": 27, "date_modified": "2021-06-10T09:22:26+00:00", "date_created": "2021-06-10T09:21:59+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731112612", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138593"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138595"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138625"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138700"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139014"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139110"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139230"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773259} -{"stream": "survey_responses", "data": {"id": "12731129209", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Smvc_2Fhq34btK33i0b1sGxYeWInTkfyhJ_2BYMIBz_2B0I9Z2ZIyvlPNu3h_2B65f84DuA0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731129209", "total_time": 27, "date_modified": "2021-06-10T09:30:08+00:00", "date_created": "2021-06-10T09:29:41+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731129209", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138588"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138597"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138622"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138704"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139015"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139111"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139234"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773259} -{"stream": "survey_responses", "data": {"id": "12731147035", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=U9JFMHqI683O1sqxW8mLF_2FgU_2F7fYGjSSm6LScE6pu2LDPRKAAVQXvZThLKDp2GXJ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731147035", "total_time": 27, "date_modified": "2021-06-10T09:37:50+00:00", "date_created": "2021-06-10T09:37:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731147035", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138589"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138595"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138625"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138703"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138781"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139017"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139110"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139236"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773260} -{"stream": "survey_responses", "data": {"id": "12731164403", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=0jJiVT47Abo_2BkHRNm2_2FqakaCDR5LNaVs9r5z_2BNCgUb1UPEiUm95gFyEF3xtdkYv3", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731164403", "total_time": 27, "date_modified": "2021-06-10T09:45:34+00:00", "date_created": "2021-06-10T09:45:06+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731164403", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138593"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138626"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138701"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139018"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139235"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773260} -{"stream": "survey_responses", "data": {"id": "12731181688", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=UQZSADafI1w57hYP_2FS2NflQKw4HeCD0bqS8NFrI480xqhT_2BmBKvsbhwZESEP6bWH", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731181688", "total_time": 33, "date_modified": "2021-06-10T09:53:29+00:00", "date_created": "2021-06-10T09:52:55+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731181688", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138592"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138598"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138622"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138707"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139013"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139062"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139112"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139230"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773261} -{"stream": "survey_responses", "data": {"id": "12731200697", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Z_2FtAEFehOtxFbKh3vENz8bUgBufRs3dp1FPH_2BaixcDbpWdxRX3s0ydAEfVu_2F_2FV9k", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731200697", "total_time": 27, "date_modified": "2021-06-10T10:01:55+00:00", "date_created": "2021-06-10T10:01:27+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731200697", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138589"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138598"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138622"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138706"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139013"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139112"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139233"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773261} -{"stream": "survey_responses", "data": {"id": "12731221002", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YYEQ9YPblwXypUXP9UsD2QOQI_2B52u8EQSph1_2FmToJDM9mtFZbqeiOLVv8CbSN_2BdL", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731221002", "total_time": 27, "date_modified": "2021-06-10T10:09:45+00:00", "date_created": "2021-06-10T10:09:18+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731221002", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138592"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138620"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138702"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139017"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139062"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139233"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773261} -{"stream": "survey_responses", "data": {"id": "12731240798", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=4ZdZmIl45awCiUkNXcRb9vC0pYIZMfHtCv3apho2x9McRs1lDMv5yNnqz4z3VnSP", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731240798", "total_time": 27, "date_modified": "2021-06-10T10:17:43+00:00", "date_created": "2021-06-10T10:17:16+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731240798", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138590"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138620"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138703"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139018"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139062"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139230"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773262} -{"stream": "survey_responses", "data": {"id": "12731258961", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=emSG54_2BrHmGYvPyc21uSnPZeh8nU2iPahv_2B21isNydKP_2Bl4t3C_2B821nvNucSSxD_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731258961", "total_time": 27, "date_modified": "2021-06-10T10:25:19+00:00", "date_created": "2021-06-10T10:24:52+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731258961", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138588"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138596"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138624"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138701"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139062"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139235"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139245"}]}]}]}, "emitted_at": 1690184773262} -{"stream": "survey_responses", "data": {"id": "12731278204", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=IqIG55XKRRDg9SlyFaGnS3M3bEcCu3Kc0adBsNxgJoXgsm_2FSsAZO5dQiHmAg137N", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731278204", "total_time": 29, "date_modified": "2021-06-10T10:33:09+00:00", "date_created": "2021-06-10T10:32:40+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731278204", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138589"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138596"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138621"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138705"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139113"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139232"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773263} -{"stream": "survey_responses", "data": {"id": "12731297693", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=4YsTEO_2Ft1AqmjnchtBZcCuBm0IyfVnOc8GnFYBmeTI3DsacLLtX6HINSLOXrSTvD", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731297693", "total_time": 27, "date_modified": "2021-06-10T10:40:58+00:00", "date_created": "2021-06-10T10:40:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731297693", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138588"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138598"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138626"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138704"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138780"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139015"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139063"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139111"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139230"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773263} -{"stream": "survey_responses", "data": {"id": "12731317951", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=0MbtsVOF4vEh5Xcf2H_2B4YhWcgeBtjnt_2BlJxPWFG88HIYuNIOdDcQkUMEv2VCZHUT", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731317951", "total_time": 27, "date_modified": "2021-06-10T10:49:06+00:00", "date_created": "2021-06-10T10:48:39+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731317951", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138589"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138596"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138624"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138702"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138779"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139018"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139109"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139236"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773263} -{"stream": "survey_responses", "data": {"id": "12731336744", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=jRZyZbcPITg018MLWZvCHAcG0IsxU_2BF2d4PQO0uZu8xhWEPv58hIqjKkEBTk1g1x", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731336744", "total_time": 27, "date_modified": "2021-06-10T10:56:40+00:00", "date_created": "2021-06-10T10:56:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731336744", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138591"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138596"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138623"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138702"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138781"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139016"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139113"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139234"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139246"}]}]}]}, "emitted_at": 1690184773264} -{"stream": "survey_responses", "data": {"id": "12731356183", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843442", "survey_id": "307784863", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=j2Vv3hWWCKKX8TGm3kD6g4UQCdPiw0tyfJlXlm3Yr4aHlHW_2FZ_2FCSgfXWR8mLWGn0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XGwAs0GsZ1qsocisThgUFPk0_3D?respondent_id=12731356183", "total_time": 28, "date_modified": "2021-06-10T11:04:28+00:00", "date_created": "2021-06-10T11:04:00+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784863/responses/12731356183", "pages": [{"id": "168830093", "questions": []}, {"id": "168830094", "questions": [{"id": "667455348", "answers": [{"choice_id": "4385138593"}]}, {"id": "667455351", "answers": [{"choice_id": "4385138600"}]}, {"id": "667455358", "answers": [{"choice_id": "4385138621"}]}, {"id": "667455370", "answers": [{"choice_id": "4385138702"}]}, {"id": "667455395", "answers": [{"choice_id": "4385138782"}]}]}, {"id": "168830108", "questions": [{"id": "667455427", "answers": [{"choice_id": "4385139018"}]}, {"id": "667455439", "answers": [{"choice_id": "4385139064"}]}, {"id": "667455443", "answers": [{"choice_id": "4385139110"}]}, {"id": "667455463", "answers": [{"choice_id": "4385139233"}]}, {"id": "667455466", "answers": [{"choice_id": "4385139244"}]}]}]}, "emitted_at": 1690184773264} -{"stream": "survey_responses", "data": {"id": "12731028687", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=dUjh6A_2FaiaLmSiByVq0EnrrvhqLUItqI0TokJUP8jl9SbUi5cd6hNui1hNqGv56Z", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731028687", "total_time": 33, "date_modified": "2021-06-10T08:40:08+00:00", "date_created": "2021-06-10T08:39:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731028687", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137907"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137947"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138061"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138109"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138133"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138210"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138240"}]}]}]}, "emitted_at": 1690184774468} -{"stream": "survey_responses", "data": {"id": "12731036487", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=5ir6yV1yx1xcxnDqd5CbKT7hxkDCbqeG_2BHctWsXeZb015fsixx9pWXx0NgTuAnPh", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731036487", "total_time": 31, "date_modified": "2021-06-10T08:44:28+00:00", "date_created": "2021-06-10T08:43:56+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731036487", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137911"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137935"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137946"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138064"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138110"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138126"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138210"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774468} -{"stream": "survey_responses", "data": {"id": "12731050647", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=5b_2BmMZNzhREunXwy6FMGNiacjaGpQeXb3F9yW3dcaVweZLGQY8t0kgV3XBTUneNG", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731050647", "total_time": 31, "date_modified": "2021-06-10T08:52:13+00:00", "date_created": "2021-06-10T08:51:41+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731050647", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137907"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137933"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137949"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138063"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138108"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138126"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138216"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774469} -{"stream": "survey_responses", "data": {"id": "12731065101", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ivYcxChrgUjC39ad0me9CZw6BmVAkTQjhWy6e7bR1QV_2BZ7rOJIplHeJnM7IdV0ev", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731065101", "total_time": 31, "date_modified": "2021-06-10T08:59:55+00:00", "date_created": "2021-06-10T08:59:24+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731065101", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137908"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137925"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137933"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137949"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138061"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138113"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138131"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138216"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774469} -{"stream": "survey_responses", "data": {"id": "12731080861", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3uxM3bC1ju6y0ZbYbgP_2BvgFNEF0MBf07Z1sR5msqxiFnqdE8BnFPnrip62Dyzq_2FJ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731080861", "total_time": 31, "date_modified": "2021-06-10T09:07:39+00:00", "date_created": "2021-06-10T09:07:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731080861", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137906"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137924"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137931"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137944"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138061"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138111"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138129"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138209"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774469} -{"stream": "survey_responses", "data": {"id": "12731097254", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=zCaGCrPMhDj216XESYkQCRoFWd5JT94Z_2F_2Bns224Ht4yvHiBMzA0R869PAJ_2FrE_2F_2BS", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731097254", "total_time": 31, "date_modified": "2021-06-10T09:15:24+00:00", "date_created": "2021-06-10T09:14:52+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731097254", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137904"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137936"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137950"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138110"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138131"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138216"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138239"}]}]}]}, "emitted_at": 1690184774469} -{"stream": "survey_responses", "data": {"id": "12731113718", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=VoxvgtLVPESmmstl7tMZG_2BuMDM0Aa9E5fCEa86a_2BuXZa6pilvyY520S9G1tWgrRY", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731113718", "total_time": 31, "date_modified": "2021-06-10T09:23:02+00:00", "date_created": "2021-06-10T09:22:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731113718", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137907"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137925"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137935"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137949"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138063"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138110"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138130"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138210"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138241"}]}]}]}, "emitted_at": 1690184774470} -{"stream": "survey_responses", "data": {"id": "12731130369", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=z7UD89cfARngMPT8rraMXX6AgkCQ7ffFPZ0s_2BXsx7CQathuufUT_2FM9g_2B2Y8fjZzA", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731130369", "total_time": 31, "date_modified": "2021-06-10T09:30:44+00:00", "date_created": "2021-06-10T09:30:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731130369", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137905"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137927"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137934"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137947"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138064"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138111"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138130"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138213"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138239"}]}]}]}, "emitted_at": 1690184774470} -{"stream": "survey_responses", "data": {"id": "12731148248", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ju0hkPiuicpj80ml6Hl_2F22nPizQlWQLGiYwxc_2FDGxTXzgomNhfw66UPrCE0D636O", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731148248", "total_time": 33, "date_modified": "2021-06-10T09:38:28+00:00", "date_created": "2021-06-10T09:37:55+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731148248", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137907"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137927"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137936"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137949"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138112"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138130"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138214"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138242"}]}]}]}, "emitted_at": 1690184774470} -{"stream": "survey_responses", "data": {"id": "12731165533", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=CJNhekEYhzbzSqz1R0VHxVjTmJ866MvLYw4aVPGTNOnsy33BOg1S1_2F_2Faesq41w0h", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731165533", "total_time": 32, "date_modified": "2021-06-10T09:46:10+00:00", "date_created": "2021-06-10T09:45:38+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731165533", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137909"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137927"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137945"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138110"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138128"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138213"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138240"}]}]}]}, "emitted_at": 1690184774471} -{"stream": "survey_responses", "data": {"id": "12731183192", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=bEWAYVF9toetHEI2_2BOwwwWoYM7U4BuBikNpbjp7LRt5pr8geoSPYOEqTu3ySeLJ0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731183192", "total_time": 35, "date_modified": "2021-06-10T09:54:12+00:00", "date_created": "2021-06-10T09:53:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731183192", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137911"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137924"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137935"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137949"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138063"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138111"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138127"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138216"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138241"}]}]}]}, "emitted_at": 1690184774471} -{"stream": "survey_responses", "data": {"id": "12731202203", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=cVfxg1YUnkHWG5xh95O_2FZH6JrXJKyH7dK1jz4klmqpr6kkJW4Nyg6pAF9RU6K8Yo", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731202203", "total_time": 33, "date_modified": "2021-06-10T10:02:33+00:00", "date_created": "2021-06-10T10:02:00+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731202203", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137908"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137925"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137947"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138064"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138109"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138133"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138214"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774471} -{"stream": "survey_responses", "data": {"id": "12731222304", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YIMZ6ZxwVSC8E_2FINoIUPvqiJVxX2_2B7h72mIiWrB82xJdDZqY_2BGZdCN_2FCfsiwQr_2Bv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731222304", "total_time": 31, "date_modified": "2021-06-10T10:10:21+00:00", "date_created": "2021-06-10T10:09:50+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731222304", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137909"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137925"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137937"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137948"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138108"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138131"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138209"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138243"}]}]}]}, "emitted_at": 1690184774472} -{"stream": "survey_responses", "data": {"id": "12731242020", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Qg5RZVMhJ6CDLIoNt_2Fdza6f_2FQJjxJXII1xxBSj3_2Fq6BnYaKChCZI73piSP5nhUtB", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731242020", "total_time": 31, "date_modified": "2021-06-10T10:18:19+00:00", "date_created": "2021-06-10T10:17:48+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731242020", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137904"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137924"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137948"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138109"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138127"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138211"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138239"}]}]}]}, "emitted_at": 1690184774472} -{"stream": "survey_responses", "data": {"id": "12731260237", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=mm7m2qJMFmTa3oybMk65fiRc3YyuaqIC4txcJF43fNUph71wneRBif0eIJvUBTcs", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731260237", "total_time": 31, "date_modified": "2021-06-10T10:25:55+00:00", "date_created": "2021-06-10T10:25:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731260237", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137906"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137948"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138112"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138126"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138211"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138241"}]}]}]}, "emitted_at": 1690184774472} -{"stream": "survey_responses", "data": {"id": "12731279578", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=FEjSAqhsmwQRvI0nIbXznSQrW_2Fs9Zok9WSmUIEpPh_2Ftrvxkif6aWXIcsv7S3npp4", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731279578", "total_time": 34, "date_modified": "2021-06-10T10:33:48+00:00", "date_created": "2021-06-10T10:33:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731279578", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137911"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137934"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137951"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138113"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138130"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138156"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138212"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138241"}]}]}]}, "emitted_at": 1690184774473} -{"stream": "survey_responses", "data": {"id": "12731299025", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3IJV7_2FsHEFUvR3AFurcV71Uh5BITVe91sCc8NyxnqEaGN7SJCiiuPB87bmvoaGw1", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731299025", "total_time": 34, "date_modified": "2021-06-10T10:41:36+00:00", "date_created": "2021-06-10T10:41:02+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731299025", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137905"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137927"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137936"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137950"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138062"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138109"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138130"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138215"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138239"}]}]}]}, "emitted_at": 1690184774473} -{"stream": "survey_responses", "data": {"id": "12731319249", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=GlfLviGx8LUycI0kX0T8cF_2FFpkVAIQj5obyoRevuZv4czryRkMD2PZvND9Sb17Up", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731319249", "total_time": 33, "date_modified": "2021-06-10T10:49:44+00:00", "date_created": "2021-06-10T10:49:11+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731319249", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137909"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137924"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137931"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137944"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138064"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138109"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138133"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138157"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138210"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138239"}]}]}]}, "emitted_at": 1690184774473} -{"stream": "survey_responses", "data": {"id": "12731338028", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=H_2BVgWdKB0xr0jY94jPfEAo0VmKxhUUF4pufmYQx4fDmquorp1yOG5FlQBPgiHLrx", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731338028", "total_time": 31, "date_modified": "2021-06-10T10:57:15+00:00", "date_created": "2021-06-10T10:56:44+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731338028", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137911"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137927"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137950"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138060"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138111"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138131"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138215"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138241"}]}]}]}, "emitted_at": 1690184774474} -{"stream": "survey_responses", "data": {"id": "12731357558", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405829776", "survey_id": "307784846", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=tfLWR7lVR8G3JC7jFc4srAqfLvwuFEvSW_2Ffbm1CZxKHZ6J8nad7_2FnXzcLHhgXchk", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG0D0cevwMQIvVNwWzg_2Bmm4o_3D?respondent_id=12731357558", "total_time": 31, "date_modified": "2021-06-10T11:05:04+00:00", "date_created": "2021-06-10T11:04:32+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784846/responses/12731357558", "pages": [{"id": "168830067", "questions": []}, {"id": "168830068", "questions": [{"id": "667455236", "answers": [{"choice_id": "4385137904"}]}, {"id": "667455240", "answers": [{"choice_id": "4385137926"}]}, {"id": "667455243", "answers": [{"choice_id": "4385137932"}]}, {"id": "667455245", "answers": [{"choice_id": "4385137950"}]}, {"id": "667455263", "answers": [{"choice_id": "4385138060"}]}]}, {"id": "168830074", "questions": [{"id": "667455268", "answers": [{"choice_id": "4385138108"}]}, {"id": "667455272", "answers": [{"choice_id": "4385138126"}]}, {"id": "667455276", "answers": [{"choice_id": "4385138155"}]}, {"id": "667455290", "answers": [{"choice_id": "4385138214"}]}, {"id": "667455293", "answers": [{"choice_id": "4385138240"}]}]}]}, "emitted_at": 1690184774474} -{"stream": "survey_responses", "data": {"id": "12731029969", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=APV_2FZ7VTm09gZj5b0f0NUIlksURZJjTLWSe4fg4_2FRKA8y93l4UjzYAPkgAla3AIo", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731029969", "total_time": 32, "date_modified": "2021-06-10T08:40:52+00:00", "date_created": "2021-06-10T08:40:19+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731029969", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138286"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138291"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138316"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138427"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138437"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138442"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138468"}]}]}]}, "emitted_at": 1690184775696} -{"stream": "survey_responses", "data": {"id": "12731037636", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=iToNu2Xqp1PZ2VPT32fencmUbPumpf00lYTMhCdGbbG2LXZSZ277leTsfXYDwFtv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731037636", "total_time": 32, "date_modified": "2021-06-10T08:45:07+00:00", "date_created": "2021-06-10T08:44:34+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731037636", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138286"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138288"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138315"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138389"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138413"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138424"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138432"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138446"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138501"}]}]}]}, "emitted_at": 1690184775697} -{"stream": "survey_responses", "data": {"id": "12731051876", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=QRhskGxFdS_2BJr5Lu32xTKbFyIMZUHZjCRh3x_2Bw039aa_2BRtw9tyy6pHa1Lf_2FC57ic", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731051876", "total_time": 32, "date_modified": "2021-06-10T08:52:52+00:00", "date_created": "2021-06-10T08:52:19+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731051876", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138287"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138294"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138318"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138416"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138422"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138439"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138446"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138500"}]}]}]}, "emitted_at": 1690184775697} -{"stream": "survey_responses", "data": {"id": "12731066155", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=McbGtSUJW9mHgwplanqgtYWhK7utHcK1iZyh5FsyiMXjBFayK7UQxIRtfgwtM_2Bca", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731066155", "total_time": 31, "date_modified": "2021-06-10T09:00:33+00:00", "date_created": "2021-06-10T09:00:01+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731066155", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138283"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138293"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138317"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138423"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138438"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138442"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138502"}]}]}]}, "emitted_at": 1690184775697} -{"stream": "survey_responses", "data": {"id": "12731082183", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=IG3sd3Y0qV_2FQGladjE04s9jje9y8SwVJC2bX5QbuekXXduCLZZ2TedtTVTZU5dGc", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731082183", "total_time": 32, "date_modified": "2021-06-10T09:08:18+00:00", "date_created": "2021-06-10T09:07:46+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731082183", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138278"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138284"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138288"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138318"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138416"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138423"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138438"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138446"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138499"}]}]}]}, "emitted_at": 1690184775698} -{"stream": "survey_responses", "data": {"id": "12731098436", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MV4ZWqb4qCvpVsm9l8DE5bPr9uyUbMzuYGBxSnRcZ1REiHW2ugohyYyDG7vYEDPl", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731098436", "total_time": 31, "date_modified": "2021-06-10T09:16:00+00:00", "date_created": "2021-06-10T09:15:28+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731098436", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138284"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138291"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138320"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138389"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138421"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138439"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138443"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138501"}]}]}]}, "emitted_at": 1690184775698} -{"stream": "survey_responses", "data": {"id": "12731115074", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=BHzC6lqHgVLzZ1lCVMXIDPuMoH8yVGnrQsGh6WbIIUI3nRO_2F6cMb_2BuTaApRu6hdt", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731115074", "total_time": 31, "date_modified": "2021-06-10T09:23:39+00:00", "date_created": "2021-06-10T09:23:07+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731115074", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138287"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138290"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138318"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138387"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138416"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138422"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138432"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138441"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138499"}]}]}]}, "emitted_at": 1690184775698} -{"stream": "survey_responses", "data": {"id": "12731131693", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=WYN2n8yADqGUJHwNtkRzDB8UgTsiinkFRQKNlRO2tOl_2FgYSeg5AwgcAfpVBqPc9Z", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731131693", "total_time": 31, "date_modified": "2021-06-10T09:31:22+00:00", "date_created": "2021-06-10T09:30:50+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731131693", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138278"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138287"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138292"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138317"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138387"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138423"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138438"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138444"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138503"}]}]}]}, "emitted_at": 1690184775699} -{"stream": "survey_responses", "data": {"id": "12731149596", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=I4wWuuErezAO1McHSX4e_2FC_2BNPLPDMiVmc0o7Q21l2qpRsReL4GXorsYqqLman2mF", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731149596", "total_time": 31, "date_modified": "2021-06-10T09:39:04+00:00", "date_created": "2021-06-10T09:38:32+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731149596", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138286"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138288"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138319"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138388"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138415"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138422"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138434"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138444"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138498"}]}]}]}, "emitted_at": 1690184775699} -{"stream": "survey_responses", "data": {"id": "12731167001", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=oF11Z5EpZ0lcqLyzezPfZ7YyRd8_2FNkQHaVMTUl3_2B_2FvwfgV5z5JscZwGxgOXTCvE_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731167001", "total_time": 32, "date_modified": "2021-06-10T09:46:50+00:00", "date_created": "2021-06-10T09:46:17+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731167001", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138282"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138295"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138320"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138426"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138434"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138441"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138502"}]}]}]}, "emitted_at": 1690184775699} -{"stream": "survey_responses", "data": {"id": "12731184662", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=2m4cTOSjC_2FTrSbvTeTRqC4CdH_2F10I0y0VrEPTPWCsU9zVgHoB7Jj9EFyjh7Eso8A", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731184662", "total_time": 35, "date_modified": "2021-06-10T09:54:53+00:00", "date_created": "2021-06-10T09:54:17+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731184662", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138284"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138292"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138315"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138412"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138421"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138438"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138441"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138498"}]}]}]}, "emitted_at": 1690184775700} -{"stream": "survey_responses", "data": {"id": "12731204167", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MDNWDmNrUDODNjQjATix_2BWBUiXJqVC8POKFh8fvebZujKwgJ_2B6XlCVEKCced6L16", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731204167", "total_time": 32, "date_modified": "2021-06-10T10:03:16+00:00", "date_created": "2021-06-10T10:02:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731204167", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138287"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138292"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138315"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138388"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138423"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138432"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138447"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138499"}]}]}]}, "emitted_at": 1690184775700} -{"stream": "survey_responses", "data": {"id": "12731223860", "recipient_id": "", "collection_mode": "default", "response_status": "partial", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": ["168830082", "168830083", "168830087"], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=cwttWQdofPxpfAkwREhinTUX2Ac_2BMviHT6jlan_2BWYlBpX3h0gEfYptMi_2F2sF_2FIut", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731223860", "total_time": 16, "date_modified": "2021-06-10T10:10:45+00:00", "date_created": "2021-06-10T10:10:28+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731223860", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138282"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138292"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138317"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138388"}]}]}, {"id": "168830087", "questions": []}]}, "emitted_at": 1690184775700} -{"stream": "survey_responses", "data": {"id": "12731243435", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=kl6SOoj4lel7HllGtpDwHTPILQJujXr_2Bpkcndfg_2FPQpyHzOR6XWv1saPjDTiOeH4", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731243435", "total_time": 31, "date_modified": "2021-06-10T10:18:55+00:00", "date_created": "2021-06-10T10:18:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731243435", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138283"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138288"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138317"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138389"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138414"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138426"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138434"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138441"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138468"}]}]}]}, "emitted_at": 1690184775701} -{"stream": "survey_responses", "data": {"id": "12731261675", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=5CaWLgB4g6FYPekVOqKYg2mpmDFcRgFD_2FvNGi7Xqozpk2f_2BA5WGRd_2F8q5jr1Jwan", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731261675", "total_time": 31, "date_modified": "2021-06-10T10:26:32+00:00", "date_created": "2021-06-10T10:26:01+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731261675", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138282"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138290"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138315"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138389"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138416"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138423"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138434"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138445"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138500"}]}]}]}, "emitted_at": 1690184775701} -{"stream": "survey_responses", "data": {"id": "12731281414", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=q5Ky3UvnlTpc15S78mm8JQSFeuSBuv8iuZCnl9MZYl_2BB5nkG8L9uJmUl1JNNtwyv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731281414", "total_time": 34, "date_modified": "2021-06-10T10:34:33+00:00", "date_created": "2021-06-10T10:33:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731281414", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138279"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138282"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138293"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138316"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138388"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138412"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138427"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138439"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138445"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138468"}]}]}]}, "emitted_at": 1690184775701} -{"stream": "survey_responses", "data": {"id": "12731300699", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ATEXrllobiYc9DU6PcEpkS_2F8617dJW0dLGMQupo4QwGEGwqFQJ7m6Bks7oWF7Rae", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731300699", "total_time": 34, "date_modified": "2021-06-10T10:42:18+00:00", "date_created": "2021-06-10T10:41:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731300699", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138282"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138295"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138315"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138416"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138427"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138433"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138442"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138503"}]}]}]}, "emitted_at": 1690184775702} -{"stream": "survey_responses", "data": {"id": "12731320713", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=319qghptwfcFVSVtQrSzrw_2BQzR8BlHJbviMDirTnupaGrVddWmTDjhG7R3Tkvb5J", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731320713", "total_time": 31, "date_modified": "2021-06-10T10:50:19+00:00", "date_created": "2021-06-10T10:49:48+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731320713", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138283"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138293"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138317"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138386"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138415"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138426"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138433"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138444"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138499"}]}]}]}, "emitted_at": 1690184775702} -{"stream": "survey_responses", "data": {"id": "12731339540", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=unN8NiMSyoHjiCeMXzhPxCruuBwPKAXbf1kk1TZxnnc04vi5qaipiErEFBUKU3Vw", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731339540", "total_time": 32, "date_modified": "2021-06-10T10:57:54+00:00", "date_created": "2021-06-10T10:57:22+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731339540", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138287"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138291"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138318"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138387"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138412"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138426"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138432"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138444"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138498"}]}]}]}, "emitted_at": 1690184775702} -{"stream": "survey_responses", "data": {"id": "12731359116", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843460", "survey_id": "307784856", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=t_2BHFU_2BkwbG0L8cOxkJuYSUIt0ArFixObCD15GpXjKrp4kEQv20jOpUIgPXB30R0f", "analyze_url": "https://www.surveymonkey.com/analyze/browse/moGgts_2Bl1LYlJ1mbVw6XG6cfdBSOknoT0fzVp7iTUO4_3D?respondent_id=12731359116", "total_time": 32, "date_modified": "2021-06-10T11:05:43+00:00", "date_created": "2021-06-10T11:05:11+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307784856/responses/12731359116", "pages": [{"id": "168830082", "questions": []}, {"id": "168830083", "questions": [{"id": "667455297", "answers": [{"choice_id": "4385138277"}]}, {"id": "667455299", "answers": [{"choice_id": "4385138284"}]}, {"id": "667455301", "answers": [{"choice_id": "4385138290"}]}, {"id": "667455314", "answers": [{"choice_id": "4385138319"}]}, {"id": "667455318", "answers": [{"choice_id": "4385138387"}]}]}, {"id": "168830087", "questions": [{"id": "667455323", "answers": [{"choice_id": "4385138412"}]}, {"id": "667455325", "answers": [{"choice_id": "4385138426"}]}, {"id": "667455328", "answers": [{"choice_id": "4385138434"}]}, {"id": "667455329", "answers": [{"choice_id": "4385138441"}]}, {"id": "667455332", "answers": [{"choice_id": "4385138501"}]}]}]}, "emitted_at": 1690184775703} -{"stream": "survey_responses", "data": {"id": "12731031048", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=oj7KuTDJShqSiV97LkSV6uvYXz_2BBmc1ocrvgJB_2F6EIN4dNLEz7QkUKli1frVW9z_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731031048", "total_time": 32, "date_modified": "2021-06-10T08:41:28+00:00", "date_created": "2021-06-10T08:40:56+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731031048", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172937"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172950"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172991"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173023"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173041"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173088"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173103"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173115"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173170"}]}]}]}, "emitted_at": 1690184776892} -{"stream": "survey_responses", "data": {"id": "12731038731", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=NvnexeVe1TL2cbU10b8utpSHSxTK1HTwbsncQeCDCuIJKnwvRo58aul3yetztS1_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731038731", "total_time": 31, "date_modified": "2021-06-10T08:45:43+00:00", "date_created": "2021-06-10T08:45:11+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731038731", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172935"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172953"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172990"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173021"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173041"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173104"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173116"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173170"}]}]}]}, "emitted_at": 1690184776892} -{"stream": "survey_responses", "data": {"id": "12731053047", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=TWL1aXuGzW_2Bld3kwRCla449GewPFQF3JFwcpsO1LndH1s3tFfKtesgelUuLTCGR6", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731053047", "total_time": 31, "date_modified": "2021-06-10T08:53:29+00:00", "date_created": "2021-06-10T08:52:57+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731053047", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172937"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172948"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172991"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173020"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173045"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173088"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173114"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776893} -{"stream": "survey_responses", "data": {"id": "12731067385", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3aXaERIhrxNROtqwHMzJ7BQrVYzI9ua7E_2FxCsZfjUOxvEf88Av09k8zGLpOFLBw_2B", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731067385", "total_time": 31, "date_modified": "2021-06-10T09:01:09+00:00", "date_created": "2021-06-10T09:00:37+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731067385", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172934"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172953"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173042"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173091"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173104"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173114"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776893} -{"stream": "survey_responses", "data": {"id": "12731083422", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=kmKEpAG_2FngBuSmfP0e49vf6Q2YDpzhvzQyHTBrwvk0ZeV8Na8b5Evuz7N4wEaQuN", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731083422", "total_time": 32, "date_modified": "2021-06-10T09:08:54+00:00", "date_created": "2021-06-10T09:08:22+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731083422", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172932"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172953"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172990"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173042"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173113"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173169"}]}]}]}, "emitted_at": 1690184776893} -{"stream": "survey_responses", "data": {"id": "12731099693", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YgZuSogQMKDrPTmOoLetMqFtiUi_2BJHKxcATXloIoBnmzHLDvYmNSSoajIC4_2BX2qc", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731099693", "total_time": 31, "date_modified": "2021-06-10T09:16:36+00:00", "date_created": "2021-06-10T09:16:04+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731099693", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172933"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172951"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172993"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173023"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173043"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173076"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173090"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173113"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776894} -{"stream": "survey_responses", "data": {"id": "12731116372", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=w67tX9wQ238KZ8cKM0cnTSfg3hzhdt35TpCZfm6_2B7K6B_2F0VZpeEefqMTVZQXnuVe", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731116372", "total_time": 31, "date_modified": "2021-06-10T09:24:15+00:00", "date_created": "2021-06-10T09:23:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731116372", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172932"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172948"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172990"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173046"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173103"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173117"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173167"}]}]}]}, "emitted_at": 1690184776894} -{"stream": "survey_responses", "data": {"id": "12731133155", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=KkixO6u45W2ICclAKrxKYNgJxrynUdDZxArXycLA_2FpZUBsG3FcMXHvqQmk3DsHGL", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731133155", "total_time": 31, "date_modified": "2021-06-10T09:31:58+00:00", "date_created": "2021-06-10T09:31:27+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731133155", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172933"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172948"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172990"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173044"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173090"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776894} -{"stream": "survey_responses", "data": {"id": "12731151051", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Ep0IYx019QcuhuuRrvANkBTw0Ti8WirkyKLpMo62qXxWHwkYGtgh9c143qYGmywG", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731151051", "total_time": 31, "date_modified": "2021-06-10T09:39:40+00:00", "date_created": "2021-06-10T09:39:08+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731151051", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172937"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172949"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172993"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173020"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173046"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173076"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173091"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173169"}]}]}]}, "emitted_at": 1690184776895} -{"stream": "survey_responses", "data": {"id": "12731168310", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=HS7SOSb9j2AarAA_2BdLKuhIUhWoas6e_2BtUPh9aNHRDTHOTwcnvASYUjIFJk2Aha4I", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731168310", "total_time": 31, "date_modified": "2021-06-10T09:47:26+00:00", "date_created": "2021-06-10T09:46:54+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731168310", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172933"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172955"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173020"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173047"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173106"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173113"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776895} -{"stream": "survey_responses", "data": {"id": "12731186075", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Cc0IypCRFbsr7v94sQfKFHhIf_2FH89oCDUARL5wWkYsguNXp0zyA6DxHEdm_2BNMhJ_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731186075", "total_time": 35, "date_modified": "2021-06-10T09:55:32+00:00", "date_created": "2021-06-10T09:54:57+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731186075", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172932"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172954"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172992"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173041"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173088"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173104"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173169"}]}]}]}, "emitted_at": 1690184776895} -{"stream": "survey_responses", "data": {"id": "12731205704", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=nuK5QZZCJUv9YoeVqOpq22Tt3VOETEzxSzV_2Fk4xjbEXXiwUUe6gMiDzLVDs1IWo_2F", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731205704", "total_time": 31, "date_modified": "2021-06-10T10:03:50+00:00", "date_created": "2021-06-10T10:03:19+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731205704", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172933"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172948"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173023"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173045"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173090"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173104"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173115"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173168"}]}]}]}, "emitted_at": 1690184776896} -{"stream": "survey_responses", "data": {"id": "12731225522", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=E977wLXc6H_2BfVEbR4rzSs1q6JCgk1O_2BeQ5Sr2f_2BEeSGpDaPDGuslUALFqPRPlZDW", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731225522", "total_time": 31, "date_modified": "2021-06-10T10:11:38+00:00", "date_created": "2021-06-10T10:11:06+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731225522", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172935"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172951"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173044"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173076"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173091"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173103"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173167"}]}]}]}, "emitted_at": 1690184776896} -{"stream": "survey_responses", "data": {"id": "12731244870", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=3wY0IYE_2F0V4hMJO6Hd2YuVpnFpa2qt5soGs4KM6L3wgFb3k2BHMHLGtDGfFRCSfQ", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731244870", "total_time": 31, "date_modified": "2021-06-10T10:19:30+00:00", "date_created": "2021-06-10T10:18:59+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731244870", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172932"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172953"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172993"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173020"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173040"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173091"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173102"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173116"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173170"}]}]}]}, "emitted_at": 1690184776896} -{"stream": "survey_responses", "data": {"id": "12731263131", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=StRkIgYeE5JwNLawW20wxeS_2FED8yP1J_2FHGw3CFhxGAXfN5lodZFoZ7n2CQYuFI4m", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731263131", "total_time": 31, "date_modified": "2021-06-10T10:27:08+00:00", "date_created": "2021-06-10T10:26:37+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731263131", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172933"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172951"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173021"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173046"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173106"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173169"}]}]}]}, "emitted_at": 1690184776897} -{"stream": "survey_responses", "data": {"id": "12731283031", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=J4cSx3UMmpoujky2cMr76AMl3Fslun9t7M7fr8ahVpWNGLwaW_2Bu8X3jQJkNttUcC", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731283031", "total_time": 34, "date_modified": "2021-06-10T10:35:10+00:00", "date_created": "2021-06-10T10:34:36+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731283031", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172936"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172948"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172994"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173021"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173041"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173090"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173103"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173118"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173169"}]}]}]}, "emitted_at": 1690184776897} -{"stream": "survey_responses", "data": {"id": "12731302340", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ILLNvXOj_2BrxmX_2By2ZRAmTpFslgCTd_2Feoso7P36bgr0_2FDMlf4kKul9iF6Ox4uTGnl", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731302340", "total_time": 34, "date_modified": "2021-06-10T10:42:56+00:00", "date_created": "2021-06-10T10:42:22+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731302340", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172937"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172951"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172994"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173042"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173091"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173105"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173114"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173167"}]}]}]}, "emitted_at": 1690184776897} -{"stream": "survey_responses", "data": {"id": "12731322211", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=4UUhK9SRY_2FM3fA_2FqkLCL5mS8aBtccVCgcBQMTDfpX84us_2FZDOkgCVuCexjzjooNm", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731322211", "total_time": 31, "date_modified": "2021-06-10T10:50:54+00:00", "date_created": "2021-06-10T10:50:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731322211", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172935"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172950"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172995"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173023"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173047"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173090"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173102"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173115"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173170"}]}]}]}, "emitted_at": 1690184776898} -{"stream": "survey_responses", "data": {"id": "12731341043", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=K51RB0BiJ39RT0TzevECB3iQ54AUAqtSYC5AGG_2Fb3JiEOIV74uOcQhGpP_2B9Xzumv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731341043", "total_time": 32, "date_modified": "2021-06-10T10:58:30+00:00", "date_created": "2021-06-10T10:57:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731341043", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172932"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172949"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172991"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173020"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173042"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173078"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173088"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173106"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173114"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173170"}]}]}]}, "emitted_at": 1690184776898} -{"stream": "survey_responses", "data": {"id": "12731360608", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843624", "survey_id": "307785388", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=joQxJxoDIf61JZdELXuIeomTgZquDM7T76ksdKdRUHFrgw1Him4UO_2BbCMcv0tXrC", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5QHdVgvFd_2Bn4fvmj_2F1aNtwM9q4oP_2B3VqXy_2BeJTiumoQ_3D?respondent_id=12731360608", "total_time": 31, "date_modified": "2021-06-10T11:06:19+00:00", "date_created": "2021-06-10T11:05:47+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785388/responses/12731360608", "pages": [{"id": "168831335", "questions": []}, {"id": "168831336", "questions": [{"id": "667461429", "answers": [{"choice_id": "4385172934"}]}, {"id": "667461433", "answers": [{"choice_id": "4385172951"}]}, {"id": "667461439", "answers": [{"choice_id": "4385172991"}]}, {"id": "667461441", "answers": [{"choice_id": "4385173022"}]}, {"id": "667461444", "answers": [{"choice_id": "4385173040"}]}]}, {"id": "168831340", "questions": [{"id": "667461449", "answers": [{"choice_id": "4385173077"}]}, {"id": "667461452", "answers": [{"choice_id": "4385173089"}]}, {"id": "667461454", "answers": [{"choice_id": "4385173103"}]}, {"id": "667461456", "answers": [{"choice_id": "4385173113"}]}, {"id": "667461462", "answers": [{"choice_id": "4385173167"}]}]}]}, "emitted_at": 1690184776898} -{"stream": "survey_responses", "data": {"id": "12731032160", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=k5_2BoYr8L1JivcJuf1rH2GX_2BHMQI9F9YIVUvhVjaOj1ZaTr1AN4RrYerLAgE0dex5", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731032160", "total_time": 30, "date_modified": "2021-06-10T08:42:04+00:00", "date_created": "2021-06-10T08:41:34+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731032160", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174976"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175072"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175099"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175117"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175148"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175199"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175281"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777745} -{"stream": "survey_responses", "data": {"id": "12731039882", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=ueh4txTCmZOSY56GbXU_2BI53xrA9e5AOQ2EOFNrDgskH2E4ZDeJ4HcDXDLxDPUl8L", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731039882", "total_time": 29, "date_modified": "2021-06-10T08:46:16+00:00", "date_created": "2021-06-10T08:45:47+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731039882", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174972"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175072"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175100"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175148"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175200"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175283"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777746} -{"stream": "survey_responses", "data": {"id": "12731054183", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=6Utx9fT_2BqzHMlSdMksExZ0ph_2BvNfvb4CBc5S5YuvlA0kAV4HfmiM38IyZMLnHuoK", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731054183", "total_time": 29, "date_modified": "2021-06-10T08:54:02+00:00", "date_created": "2021-06-10T08:53:33+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731054183", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174971"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175055"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175070"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175102"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175120"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175152"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175200"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175284"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777746} -{"stream": "survey_responses", "data": {"id": "12731068596", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=Y_2BpA02_2Br_2B8t0cCK8Ua99SpTBk8YAUIPP5rIlV7TWE2bRQsxJ7Wy8nlQiPeSSH1Aj", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731068596", "total_time": 29, "date_modified": "2021-06-10T09:01:43+00:00", "date_created": "2021-06-10T09:01:13+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731068596", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174974"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175070"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175102"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175117"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175147"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175200"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175279"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777746} -{"stream": "survey_responses", "data": {"id": "12731084672", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=oDcOq8OB_2BpjrJzvV_2B78DTMP7bGXGg_2FFfY5m4CGX3bYG_2BHn3WUTPsqZIYhsLwynBU", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731084672", "total_time": 29, "date_modified": "2021-06-10T09:09:28+00:00", "date_created": "2021-06-10T09:08:59+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731084672", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174970"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175097"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175115"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175146"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175200"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175284"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175294"}]}]}]}, "emitted_at": 1690184777747} -{"stream": "survey_responses", "data": {"id": "12731100913", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=5kE4DYJKeO6DrzEs62N47RQ2rioxaIjqDc5G3RxsiNkZeZZ2L6PL09YQRzsKPf5P", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731100913", "total_time": 29, "date_modified": "2021-06-10T09:17:08+00:00", "date_created": "2021-06-10T09:16:39+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731100913", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174701"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174975"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175102"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175121"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175148"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175202"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175279"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777747} -{"stream": "survey_responses", "data": {"id": "12731117664", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=MK3EF7tVkhtzcIOL0XN0yqP2HY1_2Fqf1WCffD2MxDMDr4xww37ssvSgs9tAUezwLH", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731117664", "total_time": 29, "date_modified": "2021-06-10T09:24:48+00:00", "date_created": "2021-06-10T09:24:19+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731117664", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174977"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175103"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175119"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175147"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175204"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175279"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175294"}]}]}]}, "emitted_at": 1690184777748} -{"stream": "survey_responses", "data": {"id": "12731134542", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=vaKWu1zSFOcQlKfr2_2FhbI4n7QTTLkC4cwx4WG8fdzf9IVtI3MzE7e_2FRnH89alHIc", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731134542", "total_time": 29, "date_modified": "2021-06-10T09:32:32+00:00", "date_created": "2021-06-10T09:32:03+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731134542", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174701"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174973"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175070"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175099"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175118"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175149"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175203"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175284"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777748} -{"stream": "survey_responses", "data": {"id": "12731152328", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=IVs8i7sYR_2FPHAlgRFsF_2FqtmR8VKhb4HLvOemx_2F_2Bx31hPXV_2FqSHT1WSMqyx9fGWRv", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731152328", "total_time": 29, "date_modified": "2021-06-10T09:40:14+00:00", "date_created": "2021-06-10T09:39:45+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731152328", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174970"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175056"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175103"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175152"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175204"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175282"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777748} -{"stream": "survey_responses", "data": {"id": "12731169686", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=I6lNoZHdDu_2FqdBBrqWGXmuNL5Ezwzz6bS8n4LrSLKUFZ5_2FTaf8k7MnS_2FxWjh5UYN", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731169686", "total_time": 29, "date_modified": "2021-06-10T09:48:00+00:00", "date_created": "2021-06-10T09:47:30+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731169686", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174701"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174973"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175056"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175070"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175102"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175152"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175200"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175282"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777749} -{"stream": "survey_responses", "data": {"id": "12731187603", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=FiK9D4oAC52vzwBrNjbYa9wHiLT6nJc_2BvhbFVDpRqaS4N9_2FeTfbrKcUlu1iDPmLr", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731187603", "total_time": 32, "date_modified": "2021-06-10T09:56:10+00:00", "date_created": "2021-06-10T09:55:38+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731187603", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174701"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174970"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175070"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175101"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175150"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175201"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175281"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777749} -{"stream": "survey_responses", "data": {"id": "12731207265", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=DxJNIKLDsZB0jSdF3mQJziPzHSc1QhKVEAwTlaWxpmlfbYqie5vDH6pcFcXka8HP", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731207265", "total_time": 29, "date_modified": "2021-06-10T10:04:24+00:00", "date_created": "2021-06-10T10:03:55+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731207265", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174976"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175055"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175071"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175102"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175121"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175150"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175203"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175280"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777749} -{"stream": "survey_responses", "data": {"id": "12731227055", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hifnhc2BvbuUJ2eC5tqTz1sgyzN6BVSqhHrOZMc67UAWrZ86iRSDesnY2EqLW3sf", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731227055", "total_time": 30, "date_modified": "2021-06-10T10:12:13+00:00", "date_created": "2021-06-10T10:11:43+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731227055", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174974"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175096"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175121"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175148"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175202"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175279"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777749} -{"stream": "survey_responses", "data": {"id": "12731246288", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=9TFIXzJKIgVd5dHU3k_2BM3i00wcrMbvDj3vBf1YYe_2FDh5Hv2kyiiZ13WyXCNq5Og9", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731246288", "total_time": 29, "date_modified": "2021-06-10T10:20:04+00:00", "date_created": "2021-06-10T10:19:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731246288", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174974"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175098"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175117"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175151"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175198"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175284"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175294"}]}]}]}, "emitted_at": 1690184777750} -{"stream": "survey_responses", "data": {"id": "12731264698", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=s1IbylETE10QgXtryui0Bdz_2Bj5IU5LzqBMHDbnhunQHBP5CedlG7l56df1BhmnL2", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731264698", "total_time": 29, "date_modified": "2021-06-10T10:27:44+00:00", "date_created": "2021-06-10T10:27:15+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731264698", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174977"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175096"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175119"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175146"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175199"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175281"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175294"}]}]}]}, "emitted_at": 1690184777750} -{"stream": "survey_responses", "data": {"id": "12731284644", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=gRmE1HF_2BBq7nrEqx2yWW8g0zVXXWYXVJ2u7ohGYqL6fyVTm6aN1WqlcfWrYGOvpl", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731284644", "total_time": 32, "date_modified": "2021-06-10T10:35:47+00:00", "date_created": "2021-06-10T10:35:15+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731284644", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174972"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175055"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175098"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175146"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175202"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175283"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777750} -{"stream": "survey_responses", "data": {"id": "12731303856", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=R15XZO_2FAOou8IYPELNzZWNvHyYHz5f2yg5XY9DyTPB_2F1F9zoalgsQcLiDzAzYgfp", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731303856", "total_time": 31, "date_modified": "2021-06-10T10:43:31+00:00", "date_created": "2021-06-10T10:43:00+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731303856", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174972"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175056"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175072"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175101"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175115"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175148"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175198"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175284"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175296"}]}]}]}, "emitted_at": 1690184777751} -{"stream": "survey_responses", "data": {"id": "12731323720", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=EiMwydLUjYosaJKqAS7XvTZYSRk1ONKVDTvKy287UbU9nHh4OUTunOiBk4lTOMIX", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731323720", "total_time": 29, "date_modified": "2021-06-10T10:51:27+00:00", "date_created": "2021-06-10T10:50:58+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731323720", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174700"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174977"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175057"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175071"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175103"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175119"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175152"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175201"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175282"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777751} -{"stream": "survey_responses", "data": {"id": "12731342570", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=26PW15rB0RZChb0Db5YdoDywvKkwI95rERJAB_2BeZi_2F8EbRztchqEvwih9zygXayj", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731342570", "total_time": 30, "date_modified": "2021-06-10T10:59:05+00:00", "date_created": "2021-06-10T10:58:35+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731342570", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174701"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174971"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175056"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175073"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175099"}]}]}, {"id": "168831402", "questions": [{"id": "667461801", "answers": [{"choice_id": "4385175116"}]}, {"id": "667461805", "answers": [{"choice_id": "4385175146"}]}, {"id": "667461811", "answers": [{"choice_id": "4385175202"}]}, {"id": "667461833", "answers": [{"choice_id": "4385175283"}]}, {"id": "667461834", "answers": [{"choice_id": "4385175295"}]}]}]}, "emitted_at": 1690184777751} -{"stream": "survey_responses", "data": {"id": "12731362003", "recipient_id": "", "collection_mode": "default", "response_status": "partial", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "91.242.192.88", "logic_path": {}, "metadata": {"contact": {}}, "page_path": ["168831392", "168831393", "168831402"], "collector_id": "405843634", "survey_id": "307785415", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=LFmWWSa5B0YPzIswFuEpoIxYrVHHhorNICipbTUhp6Z3p5apUZsA3ENZGZbaPFH0", "analyze_url": "https://www.surveymonkey.com/analyze/browse/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D?respondent_id=12731362003", "total_time": 19, "date_modified": "2021-06-10T11:06:42+00:00", "date_created": "2021-06-10T11:06:23+00:00", "href": "https://api.surveymonkey.com/v3/surveys/307785415/responses/12731362003", "pages": [{"id": "168831392", "questions": []}, {"id": "168831393", "questions": [{"id": "667461690", "answers": [{"choice_id": "4385174702"}]}, {"id": "667461777", "answers": [{"choice_id": "4385174976"}]}, {"id": "667461791", "answers": [{"choice_id": "4385175058"}]}, {"id": "667461794", "answers": [{"choice_id": "4385175071"}]}, {"id": "667461797", "answers": [{"choice_id": "4385175099"}]}]}, {"id": "168831402", "questions": []}]}, "emitted_at": 1690184777752} -{"stream":"survey_pages","data":{"title":"sy4ara","description":"Сурвейманки жлобы","position":1,"question_count":13,"id":"165250506","href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506"},"emitted_at":1674149681286} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831413","href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831413"},"emitted_at":1674149682414} -{"stream":"survey_pages","data":{"title":"xsgqdhdakh7x","description":"wlju6xsgkxyig0s1","position":2,"question_count":5,"id":"168831415","href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415"},"emitted_at":1674149682415} -{"stream":"survey_pages","data":{"title":"ajsn8v0tvicgt7u063","description":"dcwmhxdx6p8buu","position":3,"question_count":5,"id":"168831437","href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437"},"emitted_at":1674149682415} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831459","href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831459"},"emitted_at":1674149683744} -{"stream":"survey_pages","data":{"title":"ijw0pw2tlfb0vd3","description":"k8tycaedxbl4","position":2,"question_count":5,"id":"168831461","href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461"},"emitted_at":1674149683744} -{"stream":"survey_pages","data":{"title":"krd3l3bj7vaym6pc4","description":"oy458fugj0k","position":3,"question_count":5,"id":"168831467","href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467"},"emitted_at":1674149683745} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831344","href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831344"},"emitted_at":1674149684444} -{"stream":"survey_pages","data":{"title":"q4fuvltqc6","description":"7kfibw7aer8mr937a3ko","position":2,"question_count":5,"id":"168831345","href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345"},"emitted_at":1674149684445} -{"stream":"survey_pages","data":{"title":"p5sdpb0pus6","description":"o9gbkpmfik2x","position":3,"question_count":5,"id":"168831352","href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352"},"emitted_at":1674149684445} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831357","href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831357"},"emitted_at":1674149685687} -{"stream":"survey_pages","data":{"title":"133nvr2cx99r","description":"shwtfx0edv","position":2,"question_count":5,"id":"168831358","href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358"},"emitted_at":1674149685687} -{"stream":"survey_pages","data":{"title":"otgwn5b4wicdemu1q","description":"s3ndgrck3qr4898qwtgh","position":3,"question_count":5,"id":"168831365","href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365"},"emitted_at":1674149685687} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831381","href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831381"},"emitted_at":1674149686909} -{"stream":"survey_pages","data":{"title":"ooj54g8q2thh","description":"8hrryr258se","position":2,"question_count":5,"id":"168831382","href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382"},"emitted_at":1674149686909} -{"stream":"survey_pages","data":{"title":"mva5ojqgmx6wnv62as","description":"wq6q460p143mi0","position":3,"question_count":5,"id":"168831388","href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388"},"emitted_at":1674149686909} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168830049","href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830049"},"emitted_at":1674149687592} -{"stream":"survey_pages","data":{"title":"v3q97ckq2438fqkppcyn","description":"oforikk3wu4gin","position":2,"question_count":5,"id":"168830050","href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050"},"emitted_at":1674149687593} -{"stream":"survey_pages","data":{"title":"t57ybjyll8fwgu39w","description":"ttlkuqp07ua6kpsh","position":3,"question_count":5,"id":"168830060","href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060"},"emitted_at":1674149687594} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831470","href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831470"},"emitted_at":1674149688355} -{"stream":"survey_pages","data":{"title":"q3wpp1ufpi5r058o","description":"ay91aymge2vuacmrl9co","position":2,"question_count":5,"id":"168831471","href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471"},"emitted_at":1674149688356} -{"stream":"survey_pages","data":{"title":"m345ab1u6tjjo4nn3d","description":"ec22umvdkhxd0ne575","position":3,"question_count":5,"id":"168831478","href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478"},"emitted_at":1674149688357} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168830093","href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830093"},"emitted_at":1674149689582} -{"stream":"survey_pages","data":{"title":"7t9dgejtlw5wsofbt","description":"mfqrejibgc831bp31","position":2,"question_count":5,"id":"168830094","href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094"},"emitted_at":1674149689583} -{"stream":"survey_pages","data":{"title":"9kri0ao4fh8e3i0j2hms","description":"dh7qg9jc1k65x","position":3,"question_count":5,"id":"168830108","href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108"},"emitted_at":1674149689583} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168830067","href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830067"},"emitted_at":1674149690505} -{"stream":"survey_pages","data":{"title":"9vpm4e5ecjis5hge0p","description":"r6e38n2fp33skkjrl","position":2,"question_count":5,"id":"168830068","href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068"},"emitted_at":1674149690506} -{"stream":"survey_pages","data":{"title":"nvv6kl2njpt5b1l2p","description":"rd2j09sxv4ssu976g","position":3,"question_count":5,"id":"168830074","href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074"},"emitted_at":1674149690506} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168830082","href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830082"},"emitted_at":1674149691681} -{"stream":"survey_pages","data":{"title":"v1yxmq6n1ix","description":"aeoyc3hiak9vui1hevm","position":2,"question_count":5,"id":"168830083","href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083"},"emitted_at":1674149691682} -{"stream":"survey_pages","data":{"title":"g84sqoltkc2jen8iaj0","description":"ss2439kly1u4j1k1","position":3,"question_count":5,"id":"168830087","href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087"},"emitted_at":1674149691682} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831335","href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831335"},"emitted_at":1674149692346} -{"stream":"survey_pages","data":{"title":"k91l1laduo8","description":"4tmb1eke23bi1l2ev","position":2,"question_count":5,"id":"168831336","href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336"},"emitted_at":1674149692347} -{"stream":"survey_pages","data":{"title":"gisj5ms868kxxv","description":"4g1iiqg0sa15pbk","position":3,"question_count":5,"id":"168831340","href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340"},"emitted_at":1674149692348} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"168831392","href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831392"},"emitted_at":1674149693064} -{"stream":"survey_pages","data":{"title":"p71uerk2uh7k5","description":"92cb9d98j15jmfo","position":2,"question_count":5,"id":"168831393","href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393"},"emitted_at":1674149693064} -{"stream":"survey_pages","data":{"title":"bqd6mn6bdgv5u1rnstkx","description":"e0jrpexyx6t","position":3,"question_count":5,"id":"168831402","href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402"},"emitted_at":1674149693065} -{"stream":"survey_pages","data":{"title":"","description":"","position":1,"question_count":0,"id":"36710109","href":"https://api.surveymonkey.com/v3/surveys/510388524/pages/36710109"},"emitted_at":1674149694191} -{"stream":"survey_questions","data":{"id":"652286724","position":1,"visible":true,"family":"click_map","subtype":"single","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"Click on the area you like best about this product.","image":{"url":"https://surveymonkey-assets.s3.amazonaws.com/survey/306079584/20535460-8f99-41b0-ac96-b9f4f2aecb96.png"}}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286724","answers":{"rows":[{"position":1,"visible":true,"text":"Click 1","id":"4285525098"}]},"page_id":"165250506"},"emitted_at":1674149694470} -{"stream":"survey_questions","data":{"id":"652286725","position":2,"visible":true,"family":"click_map","subtype":"single","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"Click on the area you like least about this product.","image":{"url":"https://surveymonkey-assets.s3.amazonaws.com/survey/306079584/79215d25-9dbc-4870-91cd-3a36778aae52.png"}}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286725","answers":{"rows":[{"position":1,"visible":true,"text":"Click 1","id":"4285525102"}]},"page_id":"165250506"},"emitted_at":1674149694470} -{"stream":"survey_questions","data":{"id":"652286726","position":3,"visible":true,"family":"open_ended","subtype":"essay","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"Why did you make that selection?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286726","page_id":"165250506"},"emitted_at":1674149694471} -{"stream":"survey_questions","data":{"id":"652286715","position":4,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"What is your first reaction to the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286715","answers":{"choices":[{"position":1,"visible":true,"text":"Very positive","quiz_options":{"score":0},"id":"4285525063"},{"position":2,"visible":true,"text":"Somewhat positive","quiz_options":{"score":0},"id":"4285525064"},{"position":3,"visible":true,"text":"Neutral","quiz_options":{"score":0},"id":"4285525065"},{"position":4,"visible":true,"text":"Somewhat negative","quiz_options":{"score":0},"id":"4285525066"},{"position":5,"visible":true,"text":"Very negative","quiz_options":{"score":0},"id":"4285525067"}]},"page_id":"165250506"},"emitted_at":1674149694471} -{"stream":"survey_questions","data":{"id":"652286721","position":5,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"How would you rate the quality of the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286721","answers":{"choices":[{"position":1,"visible":true,"text":"Very high quality","quiz_options":{"score":0},"id":"4285525083"},{"position":2,"visible":true,"text":"High quality","quiz_options":{"score":0},"id":"4285525084"},{"position":3,"visible":true,"text":"Neither high nor low quality","quiz_options":{"score":0},"id":"4285525085"},{"position":4,"visible":true,"text":"Low quality","quiz_options":{"score":0},"id":"4285525086"},{"position":5,"visible":true,"text":"Very low quality","quiz_options":{"score":0},"id":"4285525087"}]},"page_id":"165250506"},"emitted_at":1674149694472} -{"stream":"survey_questions","data":{"id":"652286716","position":6,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"How innovative is the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286716","answers":{"choices":[{"position":1,"visible":true,"text":"Extremely innovative","quiz_options":{"score":0},"id":"4285525068"},{"position":2,"visible":true,"text":"Very innovative","quiz_options":{"score":0},"id":"4285525069"},{"position":3,"visible":true,"text":"Somewhat innovative","quiz_options":{"score":0},"id":"4285525070"},{"position":4,"visible":true,"text":"Not so innovative","quiz_options":{"score":0},"id":"4285525071"},{"position":5,"visible":true,"text":"Not at all innovative","quiz_options":{"score":0},"id":"4285525072"}]},"page_id":"165250506"},"emitted_at":1674149694473} -{"stream":"survey_questions","data":{"id":"652286718","position":7,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"When you think about the product, do you think of it as something you need or don’t need?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286718","answers":{"choices":[{"position":1,"visible":true,"text":"Definitely need","quiz_options":{"score":0},"id":"4285525078"},{"position":2,"visible":true,"text":"Probably need","quiz_options":{"score":0},"id":"4285525079"},{"position":3,"visible":true,"text":"Neutral","quiz_options":{"score":0},"id":"4285525080"},{"position":4,"visible":true,"text":"Probably don’t need","quiz_options":{"score":0},"id":"4285525081"},{"position":5,"visible":true,"text":"Definitely don’t need","quiz_options":{"score":0},"id":"4285525082"}]},"page_id":"165250506"},"emitted_at":1674149694474} -{"stream":"survey_questions","data":{"id":"652286722","position":8,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"How would you rate the value for money of the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286722","answers":{"choices":[{"position":1,"visible":true,"text":"Excellent","quiz_options":{"score":0},"id":"4285525088"},{"position":2,"visible":true,"text":"Above average","quiz_options":{"score":0},"id":"4285525089"},{"position":3,"visible":true,"text":"Average","quiz_options":{"score":0},"id":"4285525090"},{"position":4,"visible":true,"text":"Below average","quiz_options":{"score":0},"id":"4285525091"},{"position":5,"visible":true,"text":"Poor","quiz_options":{"score":0},"id":"4285525092"}]},"page_id":"165250506"},"emitted_at":1674149694474} -{"stream":"survey_questions","data":{"id":"652286717","position":9,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"If the product were available today, how likely would you be to buy the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286717","answers":{"choices":[{"position":1,"visible":true,"text":"Extremely likely","quiz_options":{"score":0},"id":"4285525073"},{"position":2,"visible":true,"text":"Very likely","quiz_options":{"score":0},"id":"4285525074"},{"position":3,"visible":true,"text":"Somewhat likely","quiz_options":{"score":0},"id":"4285525075"},{"position":4,"visible":true,"text":"Not so likely","quiz_options":{"score":0},"id":"4285525076"},{"position":5,"visible":true,"text":"Not at all likely","quiz_options":{"score":0},"id":"4285525077"}]},"page_id":"165250506"},"emitted_at":1674149694475} -{"stream":"survey_questions","data":{"id":"652286723","position":10,"visible":true,"family":"single_choice","subtype":"vertical_two_col","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"How likely are you to replace your current product with the product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286723","answers":{"choices":[{"position":1,"visible":true,"text":"Extremely likely ","quiz_options":{"score":0},"id":"4285525093"},{"position":2,"visible":true,"text":"Very likely ","quiz_options":{"score":0},"id":"4285525094"},{"position":3,"visible":true,"text":"Somewhat likely","quiz_options":{"score":0},"id":"4285525095"},{"position":4,"visible":true,"text":"Not so likely","quiz_options":{"score":0},"id":"4285525096"},{"position":5,"visible":true,"text":"Not at all likely","quiz_options":{"score":0},"id":"4285525097"}]},"page_id":"165250506"},"emitted_at":1674149694475} -{"stream":"survey_questions","data":{"id":"652286714","position":11,"visible":true,"family":"matrix","subtype":"rating","layout":{"bottom_spacing":0,"col_width":80,"col_width_format":"percent","left_spacing":0,"num_chars":null,"num_lines":null,"position":"new_row","right_spacing":0,"top_spacing":0,"width":100,"width_format":"percent"},"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"How likely is it that you would recommend our new product to a friend or colleague?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286714","answers":{"rows":[{"position":1,"visible":true,"text":"","id":"4285525061"}],"choices":[{"position":1,"visible":true,"text":"Not at all likely - 0","id":"4285525050","is_na":false,"weight":-100,"description":"Not at all likely"},{"position":2,"visible":true,"text":"1","id":"4285525051","is_na":false,"weight":-100,"description":""},{"position":3,"visible":true,"text":"2","id":"4285525052","is_na":false,"weight":-100,"description":""},{"position":4,"visible":true,"text":"3","id":"4285525053","is_na":false,"weight":-100,"description":""},{"position":5,"visible":true,"text":"4","id":"4285525054","is_na":false,"weight":-100,"description":""},{"position":6,"visible":true,"text":"5","id":"4285525055","is_na":false,"weight":-100,"description":""},{"position":7,"visible":true,"text":"6","id":"4285525056","is_na":false,"weight":-100,"description":""},{"position":8,"visible":true,"text":"7","id":"4285525057","is_na":false,"weight":0,"description":""},{"position":9,"visible":true,"text":"8","id":"4285525058","is_na":false,"weight":0,"description":""},{"position":10,"visible":true,"text":"9","id":"4285525059","is_na":false,"weight":100,"description":""},{"position":11,"visible":true,"text":"Extremely likely - 10","id":"4285525060","is_na":false,"weight":100,"description":"Extremely likely"}]},"page_id":"165250506"},"emitted_at":1674149694476} -{"stream":"survey_questions","data":{"id":"652286719","position":12,"visible":true,"family":"open_ended","subtype":"essay","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"In your own words, what are the things that you like most about this new product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286719","page_id":"165250506"},"emitted_at":1674149694476} -{"stream":"survey_questions","data":{"id":"652286720","position":13,"visible":true,"family":"open_ended","subtype":"essay","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"In your own words, what are the things that you would most like to improve in this new product?"}],"href":"https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286720","page_id":"165250506"},"emitted_at":1674149694477} -{"stream":"survey_questions","data":{"id":"667461858","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"vprw9pgg3d xiygcvd0suru k7rews838g6qc ndsukv2 7sa31urnvoskixw c52sg4uoete874i"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415/questions/667461858","answers":{"choices":[{"position":1,"visible":true,"text":"m5kpo9621yynjey kdt5w6pkkit yqyocxqf yw1p3uh2e5b 7gtmvs4 6em5ugqat x6pmhcfrvq4pit t67pif54aj jbgure","quiz_options":{"score":0},"id":"4385175366"},{"position":2,"visible":true,"text":"usdfft kvi9yqh1m38w3m 6uxryyvhrk1 nfxlt gnhjy826e rqks3jjuyj9hd 3y8755o","quiz_options":{"score":0},"id":"4385175367"},{"position":3,"visible":true,"text":"m6xv3yca7 up9u0qwx23h2skj 0cjlw19k5emypgm awi5tg l9atp kv4jrd73y9","quiz_options":{"score":0},"id":"4385175368"},{"position":4,"visible":true,"text":"todhc7 krw2v8qa rt2iu19vhxyw1dp x6oav54yak4vj yu4le2fc7 fksvl ejbr7x2u69 k9n9n7g3f","quiz_options":{"score":0},"id":"4385175369"}]},"page_id":"168831415"},"emitted_at":1674149694578} -{"stream":"survey_questions","data":{"id":"667461861","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"rl3uabslq46p mnwh0fle3xfs ejupx8e26q55va svfm11o"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415/questions/667461861","answers":{"choices":[{"position":1,"visible":true,"text":"m6g15xqsuwpbh3 x0116lkpod 5vkgg7duiq23sp ot884xd67v6fv2q 1u2mpgo ttj3ehahbljf1j6 pwj46w1d","quiz_options":{"score":0},"id":"4385175380"},{"position":2,"visible":true,"text":"cuff7 mbn2k1hxd6n6 jg9kffdkccjh bpodqpt2wtxu 7x38qxmvg42ap qpv0cddfumvix s0vv161iytceelx","quiz_options":{"score":0},"id":"4385175381"},{"position":3,"visible":true,"text":"jm5q6yu4rn pl8wwv23lnxs ou5r8m3np4fis6 6wlatg yeh3kafns0 h8u0o8f yhqni064ev6","quiz_options":{"score":0},"id":"4385175382"}]},"page_id":"168831415"},"emitted_at":1674149694579} -{"stream":"survey_questions","data":{"id":"667461876","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"hn4xnf ox0joj4inoy6ja jh02428n qeqxm9nopevjca sccwladi v63ks6mdqf0h0 4pug94eya 5et1g3t4 exbryyy6hv9mvd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415/questions/667461876","answers":{"choices":[{"position":1,"visible":true,"text":"w6nvr mkrj8q1g 740smg3nda m3afibg8 224jb59fon975t w9t8ma","quiz_options":{"score":0},"id":"4385175447"},{"position":2,"visible":true,"text":"cerw942pk xv1wg4gk4l7jq q3grdgasaol 75ghj ppo6ivm3r hxodiktx9rxs","quiz_options":{"score":0},"id":"4385175448"},{"position":3,"visible":true,"text":"5os82a1jwgygye 61dhsf6v sgy0ui7ib78ws7f j3pymv","quiz_options":{"score":0},"id":"4385175449"}]},"page_id":"168831415"},"emitted_at":1674149694580} -{"stream":"survey_questions","data":{"id":"667461897","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"5a32s3dhl a967a54aoj3cr ttu0uk4hu4h1 r360wdohq1x9xu7 qvqpg1eb6qg3 01ogn ucbhive fpwstwi6hre0ynb xu3c3txpma7eoh3"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415/questions/667461897","answers":{"choices":[{"position":1,"visible":true,"text":"6gngt 5w10n7fl47r07 68t8t79 66pqqth5urrw 1ve2kn385x0u9s 99vcfs08at","quiz_options":{"score":0},"id":"4385175533"},{"position":2,"visible":true,"text":"pnwrx dwj1dx dpdan1wpqs 9lhgks36 1w8a2utjbxas31t rlc1u51mdpjr 90tcj6i8ibicvxt q1ahtd2x doujpba kjjjdi0","quiz_options":{"score":0},"id":"4385175534"},{"position":3,"visible":true,"text":"wu74ewyb4grv fqb8h3yoldsn 0nxv5844yn0lpx jct7na y9sp3u ueq7vk83ix7g7sx f5sl73r2r29e84","quiz_options":{"score":0},"id":"4385175535"}]},"page_id":"168831415"},"emitted_at":1674149694581} -{"stream":"survey_questions","data":{"id":"667461902","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"sdek6wejcdn 82r223sfhy6xkm5 65gns2m7phi 0fx8dx5bp psvndjnn5b 5kki467 a8faadeid0gl13 x2t3e 03xco2 cf39nv9mdq3vj"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415/questions/667461902","answers":{"choices":[{"position":1,"visible":true,"text":"nfiq0 d7gwft9 8bhinfrsv6r6 k3vylofokamx3 9hrik k2ageb5amyj89a toli0yrsq bqbrcp","quiz_options":{"score":0},"id":"4385175559"},{"position":2,"visible":true,"text":"8c5qdiklqb8tb 5uhc6w1a1o9 er8h4w0mf779o 14nsksqs65j10","quiz_options":{"score":0},"id":"4385175561"},{"position":3,"visible":true,"text":"5wpggsufojm 4decq179m5 0brpk1la0kyno e8ctqi fxa4j0uo9atp","quiz_options":{"score":0},"id":"4385175563"},{"position":4,"visible":true,"text":"5it4y4q 49im15osxk2j0 j8twpv8j nei1egowtm9a lyrigqwu0eby tpg5o7kuvgn34l spdu5icxlc 2f4qf qb55g 8si14ri4bdw1v72","quiz_options":{"score":0},"id":"4385175564"}]},"page_id":"168831415"},"emitted_at":1674149694582} -{"stream":"survey_questions","data":{"id":"667461933","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"4vnmxluvh7a l8s5r0i5ck pa7pckqf tnah7fan76p38n8 m1i5r ea9ib0t823amcn k98290k23wh7qn"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437/questions/667461933","answers":{"choices":[{"position":1,"visible":true,"text":"aldwijo3p9mh3 v26dql0wbrctg0 98sqogqfmi2 558gsvip","quiz_options":{"score":0},"id":"4385175732"},{"position":2,"visible":true,"text":"c5fqe o43qm3oq9i 23qgoxg85 cg5hr5wj79469n 328j7ji1kugb 60jlwtbu4eoh63 alyho5","quiz_options":{"score":0},"id":"4385175733"},{"position":3,"visible":true,"text":"882jtclxvq15yk dgive oqhqdf a8a2rgxse4b8fw 87oeyfbk7 wcevsx 4mv1lyp6lyxysrm 3m8yayarq7wm 9bpmul9 el2j1j4yw","quiz_options":{"score":0},"id":"4385175734"},{"position":4,"visible":true,"text":"3so1buxa 88ypp61nq6bhi0 lwa2pfg6tg lra8e1r5bn4k umstgwdck9 wslq681gvn3g2f a20nr1eovr3 feo0p5bqbgrcvun np851e23cojfv1 uqlwg","quiz_options":{"score":0},"id":"4385175735"}]},"page_id":"168831437"},"emitted_at":1674149694582} -{"stream":"survey_questions","data":{"id":"667461934","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"k6ba3wdl 6c0nvj5es h0dttl3krobdcd va6klkdyf 79e2a lhmov"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437/questions/667461934","answers":{"choices":[{"position":1,"visible":true,"text":"h8dywnj ithq4el 98e20manw xxyp0f9oike55 t56arby dhmmxh4pehs","quiz_options":{"score":0},"id":"4385175736"},{"position":2,"visible":true,"text":"14p6263c 7khx2y rpjy6432cy7kkr 0po0vol3uk1cuf 1oejmy viircj9b nyw76yqpf","quiz_options":{"score":0},"id":"4385175737"},{"position":3,"visible":true,"text":"vtdf2x 2kme3 vhpqi5s82k7v4 1mjr5r jp0ox03i6t d5ef5228du3ck 536btd7etv","quiz_options":{"score":0},"id":"4385175738"},{"position":4,"visible":true,"text":"yt2e6nk6 v2wl4k047h1n5 civs8 tjjr7lkeay9i3","quiz_options":{"score":0},"id":"4385175739"}]},"page_id":"168831437"},"emitted_at":1674149694583} -{"stream":"survey_questions","data":{"id":"667461936","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"u1jbhrxyrg30ra9 qs6b5m237lag 7tlvv 05okuge2imipht bg813 hixvf9bd9n10gk lwxdjnajbu9gsra uyh3fjd40bs"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437/questions/667461936","answers":{"choices":[{"position":1,"visible":true,"text":"yl8yqqhtr9rxi 049x312 oogub1figyg4e tixix79g85hxi2l bin0fp5g goq3kwu2eaase x9mihu0erdhl9v4 4yausp34y2rgyx iwyfuo4b7yme8lr","quiz_options":{"score":0},"id":"4385175740"},{"position":2,"visible":true,"text":"gfqbxk8 3tosymacon00av uoxydvdy28 iuce5bjdvpg gwxdpkudg24ouk u2ns1u x10hujmiiy9l62i soln75a14nm8q0","quiz_options":{"score":0},"id":"4385175741"},{"position":3,"visible":true,"text":"j299iy64y804 k709a5kk9 yjsifyt4ksu b4fnp4a","quiz_options":{"score":0},"id":"4385175742"}]},"page_id":"168831437"},"emitted_at":1674149694584} -{"stream":"survey_questions","data":{"id":"667461937","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"06qgl j7f89g3 5ok0ilvhvpa ojcs6ixbovvf u6xympul0 o00vc 6bjg7jmvy9s 543ndxd jmugw njqhxa3phj4jd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437/questions/667461937","answers":{"choices":[{"position":1,"visible":true,"text":"5v1hangb7u0bj8 44xo3y4o inytm95s16m dvn5e2leb6sf7 hamaw8l538eph 6xuyp5030i538c 0ym6m831 duar0bo76 kl36ykf5w9ugh","quiz_options":{"score":0},"id":"4385175745"},{"position":2,"visible":true,"text":"3iiaxf5dgi tr6crqs i51ocgg ka1ri0ffd8xeh cv12807q5pq 7fmq8lxy7 5wuas7gcn4kln tv6mf3pqq4jm x3yfr sm7idq2nvoonk","quiz_options":{"score":0},"id":"4385175746"},{"position":3,"visible":true,"text":"mj5cmssx3htoni ld622ppaiqr uw63p1q4up3 lfxioj94gd3jky0 2tj631 5ql6116xsyp r5hwhy2","quiz_options":{"score":0},"id":"4385175747"},{"position":4,"visible":true,"text":"llb9oe7fk8v3w4q xpd66rgbyp9m 0opo19df7n6 scr70cg86pn o1qtr7rclqxjl kmpnf79790 6wmig6mjwflh2vq 1oe033jyd","quiz_options":{"score":0},"id":"4385175748"},{"position":5,"visible":true,"text":"pg851 mju7py cdp1jqcaeg66 3gv05ohwromt u0uot","quiz_options":{"score":0},"id":"4385175749"},{"position":6,"visible":true,"text":"mu0hn799v 4ch8i7j5o5tf8s 6rn9y0ft67mchc u5ds14s9 aj1qg26dwf41suc i1mdhdk","quiz_options":{"score":0},"id":"4385175750"},{"position":7,"visible":true,"text":"6w4xjejtc9j 50n12 u1cdbulvkqykvci x79sdq9y hmbem37x7 s7pwufdjnmn xo8qy81a3fjmv","quiz_options":{"score":0},"id":"4385175751"},{"position":8,"visible":true,"text":"k582dst vgjvse b2h3mxi dteo4p9lrtx m54ug","quiz_options":{"score":0},"id":"4385175752"}]},"page_id":"168831437"},"emitted_at":1674149694585} -{"stream":"survey_questions","data":{"id":"667461986","position":5,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"gjahy0fl7 fs6b3q6e7 rsvbmotgt9p7 w9l2l7 hvj8fhrqwc vays8 4yh7qch hjj5lx0 co6a5rqd 5pt79or0a7evc"}],"href":"https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437/questions/667461986","answers":{"choices":[{"position":1,"visible":true,"text":"b7fbynqu hwhvtbqwbmxy 9bjgxs1 1e05mo toj685p5v w34e97j","quiz_options":{"score":0},"id":"4385175878"},{"position":2,"visible":true,"text":"n5e5yl9c6 yw12588olh swl7nwm1dl9n2l 9v7n8wursm6739 p67woq9u27w7p y3ge5y1iji819g7 uklmy7q8 7ocv68por","quiz_options":{"score":0},"id":"4385175879"},{"position":3,"visible":true,"text":"h08nyyi 1393hst fcdij6j yepfw2","quiz_options":{"score":0},"id":"4385175880"},{"position":4,"visible":true,"text":"rdme8pwwjl07 4ju4xn47ofvbj i31u4ty4f4 wteatx2 gc3nqgji pu9h7","quiz_options":{"score":0},"id":"4385175881"},{"position":5,"visible":true,"text":"jxkod8gx x8tcsxxle4f0lv4 vcmjicpk7v i19dxl3 3lvmmdkx","quiz_options":{"score":0},"id":"4385175882"}]},"page_id":"168831437"},"emitted_at":1674149694587} -{"stream":"survey_questions","data":{"id":"667462078","position":1,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"vrwr5e5qwxqu4 mn9jcdpf 0e5u3k ansge 2hwkipig u0wn3acc2sct xnv4y8 3irjcv i0cgva8762tfosw"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461/questions/667462078","answers":{"choices":[{"position":1,"visible":true,"text":"55yvst1hihf4br ialhkcxm73fjln 1lelxuw93g0 nukml0","quiz_options":{"score":0},"id":"4385176721"},{"position":2,"visible":true,"text":"okdweedyn93 mua59r2l4haj orem637bgltyb 7jc2yral4o8qov j15ry fkkgjxdea rjd297yot eq57kefir4if3c 2l098 ech2y5fv","quiz_options":{"score":0},"id":"4385176722"},{"position":3,"visible":true,"text":"b3sgu7kkbjowg ux7qqyw41yb umfqpq6 dfgu4awr uy2i4j626 cb17jp5xal6","quiz_options":{"score":0},"id":"4385176723"},{"position":4,"visible":true,"text":"5igq4pw5ul la3i72sh30 uk24o0qi jh51hl9s3a43s 9tgq0ip8k1nev ar6it adgfobu491 f8qke95 o2f9u2ubb49t28c obyoj9cfsl","quiz_options":{"score":0},"id":"4385176724"},{"position":5,"visible":true,"text":"r73ge7qkd8mjfu d7kdhfmco d0pcoyqqyjrph4g 06xs83492x 5ajmrgy1 x4ev3aroh9q86r gwiu17g02i6h75c np3r62er4x5n 73hbkk43af 7f5b333hk7tkc","quiz_options":{"score":0},"id":"4385176725"},{"position":6,"visible":true,"text":"vo6u3 9gpy2s57xh u8dcib 4f8gbbng3wq0h36","quiz_options":{"score":0},"id":"4385176726"}]},"page_id":"168831461"},"emitted_at":1674149694691} -{"stream":"survey_questions","data":{"id":"667462079","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"6d3w2gqt8056 h5vl4u3kcuvlqc lk2ip62d kpkowwj"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461/questions/667462079","answers":{"choices":[{"position":1,"visible":true,"text":"agvpu24j1 g51hr01jbjsk vw6gq cftg9oeklijpda qbibhtf35pl4gtq wu48wsd5 c6jifqlyt4e","quiz_options":{"score":0},"id":"4385176727"},{"position":2,"visible":true,"text":"4kbb46n 0vmm0c4we qwaiv1f731l y8iaiu3bkcb6 loqrsy","quiz_options":{"score":0},"id":"4385176728"},{"position":3,"visible":true,"text":"rwm2cgejb qc4g9 7y68obgewd fou0um xh7dkb89o bfosq3 v9bdp7s9450","quiz_options":{"score":0},"id":"4385176729"}]},"page_id":"168831461"},"emitted_at":1674149694692} -{"stream":"survey_questions","data":{"id":"667462082","position":3,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"dvpu842k yh6g475bbwk75 qtkq7f5yd01igo ixldsnn uqbpr ngarg6pf f6ueafaq 4ch8dhr nqsm5uydajns9"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461/questions/667462082","answers":{"choices":[{"position":1,"visible":true,"text":"hfrp92k576cl 4yjdkhqj0vr9 qx16a oc86p8hhonp5xs fn4lo1wmewe5n7 sue7kinfb6 8qpyh auccq7xlw1b","quiz_options":{"score":0},"id":"4385176734"},{"position":2,"visible":true,"text":"lqon4mp qhj98iee7o 37f50nvkrs3va 18vxkbch636kh yn8ih 8jxqpq03v 6j5lurdu7c17 knmjb3 0868hyuu7","quiz_options":{"score":0},"id":"4385176735"},{"position":3,"visible":true,"text":"obj26 pb0pwmt gxvk8isp04c42 77ocbs1 y7jyxsbl vbbtsy d1t8el31vu7x6d r44iyhr2y810o0o","quiz_options":{"score":0},"id":"4385176736"},{"position":4,"visible":true,"text":"yebv8qvq dd5qeecs45k 2un02jrywfqf1 u96tcry54cxiyu mnbgr2mqe a43wm7 1rnggj b99hudv2g0kgor","quiz_options":{"score":0},"id":"4385176737"},{"position":5,"visible":true,"text":"b6v1txm65a40 h1sah8 84enie0e 57clim 2eew4","quiz_options":{"score":0},"id":"4385176738"},{"position":6,"visible":true,"text":"d88o04mls0oddci d61gj k4acpqp r3ugg2t1e55s ldb3ll5gjkn ruerq4w4w95j649 c3bea007si4t 4jp1ctllptvfao2 m78584fllehmi b9fpb","quiz_options":{"score":0},"id":"4385176739"}]},"page_id":"168831461"},"emitted_at":1674149694692} -{"stream":"survey_questions","data":{"id":"667462084","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"n6y1nnlv45movkl vkjge u5wp4unnsf cp6y14e"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461/questions/667462084","answers":{"choices":[{"position":1,"visible":true,"text":"2ak2bbt6l 958nphvdetkv4 ycoevsx u49by0 eohbcorqyd2fk","quiz_options":{"score":0},"id":"4385176763"},{"position":2,"visible":true,"text":"6lnrx0h 8rtivmuwxpy8wv ebygucb3xu1m42 iyep7ijy4gu0mvl s1oco7cxc0","quiz_options":{"score":0},"id":"4385176764"},{"position":3,"visible":true,"text":"m5ysyahej9ffbl hiob9pqg7je5r oqyup1bmda i142y odbg2sgkibp5a5n","quiz_options":{"score":0},"id":"4385176765"},{"position":4,"visible":true,"text":"r56bqr7ts07qj7l 0odv2rxnmffpw ctmr30tp61ibr l64xab0cs nmqjd 21rw3b cy2xks0me55b05","quiz_options":{"score":0},"id":"4385176766"},{"position":5,"visible":true,"text":"wjnn6f 4at64hj rif45s1bu9asypf vmjw8wv55isa 87va6t705w","quiz_options":{"score":0},"id":"4385176767"}]},"page_id":"168831461"},"emitted_at":1674149694693} -{"stream":"survey_questions","data":{"id":"667462086","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ikhsonq7 fadyfis1w 1qmo7d2wgcq 5nht9h9j esydnd1m a5ehikxk6ypubk dtanss721p6"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831461/questions/667462086","answers":{"choices":[{"position":1,"visible":true,"text":"3p8dvrh6lr0 iejlsiah ch3n24c7d9tt 7w9rvdkxpqe8xh fo9jq7k8sa1 er31i44d633 xurs2ie khm8mmp5d8gc9 egn43 86jj22","quiz_options":{"score":0},"id":"4385176918"},{"position":2,"visible":true,"text":"r5qhjy06onu7loq n6htekeur 2xm1jh99 ibsp4oat8878fy 940xq9 n4tmuhn e5901s317nbq pevqvednus0ph8g","quiz_options":{"score":0},"id":"4385176919"},{"position":3,"visible":true,"text":"at4b6f9tud4tvpl uf1d5p jgci9m0u17qkj 8tmdkc9o lb7b63gth6 6ds89i5 uu91pd2ybc wwk60i ntwtf5enlg5h8o","quiz_options":{"score":0},"id":"4385176920"},{"position":4,"visible":true,"text":"ovrn2np gg0nxt88wji6fp ckphrf1l3 0um296qkhvgh","quiz_options":{"score":0},"id":"4385176921"}]},"page_id":"168831461"},"emitted_at":1674149694694} -{"stream":"survey_questions","data":{"id":"667462094","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"uhxc9mpgrh9c9aa 05iqvfqi y8rip1qcmmxh b8jj5k cavri2y5 1sqge 7ywuxm3awoh ddul9pvje6dcr kggd2y4"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467/questions/667462094","answers":{"choices":[{"position":1,"visible":true,"text":"t30umxtof3b6e 9o88p i9hcrmm57pq42 16pnnhw idraibx70sby1i","quiz_options":{"score":0},"id":"4385176959"},{"position":2,"visible":true,"text":"xnxw9wa4fo 157vk4i0d 1m9fluu gmmy39m41 icu2b548yd9p3o 3yj0tyamvyjci1 1c19qkbuqb031r","quiz_options":{"score":0},"id":"4385176960"},{"position":3,"visible":true,"text":"ehjwxib3hha9 bjj11etv shhdikfh1iy0u nioigschrcpqi2 v7mgatm l98c4no7k9 bf2j11cmi1dgbt2 f8etga4g kg2w2yngt62","quiz_options":{"score":0},"id":"4385176961"},{"position":4,"visible":true,"text":"uyqt6g2o61rc wcwn8q7ohbhegr wtwkdefljgy uli9v6","quiz_options":{"score":0},"id":"4385176962"},{"position":5,"visible":true,"text":"7pg26slappuq1 aw3vsgc4317be eqe9a8v6s5whg0 dvrw9imoe o9wl91vi282 4h7f26","quiz_options":{"score":0},"id":"4385176963"},{"position":6,"visible":true,"text":"wjipnxgtewp8r ddtad8 yqo4rj5x657nge ykw2qghp3e r3tqrvk1m7l ir60pwi5g5 h7exqf4","quiz_options":{"score":0},"id":"4385176964"},{"position":7,"visible":true,"text":"aafwgbh80 ip6mgadg 8ls8v4ss2fk4y s19ti1jvfnaey g0v4qdn ifk0ve j2trlbjolhuc3 op07p93t1xsh0 kqiw7s ktofe9muryqxe","quiz_options":{"score":0},"id":"4385176965"}]},"page_id":"168831467"},"emitted_at":1674149694694} -{"stream":"survey_questions","data":{"id":"667462096","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"hh8r3c6evhaj2ug 34w4tpf8 b41jtyj tn7vh1hwl 7rrs5 p5rct6v9pg7k 7nw3etg9"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467/questions/667462096","answers":{"choices":[{"position":1,"visible":true,"text":"4jlthohsbk gq1uy9yt 83htjom d5udup0g0jfsido a6nxtc2vkdn 7tfgt9 6e6wsq","quiz_options":{"score":0},"id":"4385176968"},{"position":2,"visible":true,"text":"nt9b0e1tk eq9ckgs2u2fiu probh n7929oe950jcs7c uvdxlffx66obu 6oschegtcq q2a7d hwp71gp7 s3b03f5p3g24h","quiz_options":{"score":0},"id":"4385176969"},{"position":3,"visible":true,"text":"u1p9r on01v021jli y4n4d2gpcbvk fe3y53pjwje6ms atckrpa2r44f 5lkaim1 coo8gs4mer wfbusogrereep 3r2i1qlkh6097d1","quiz_options":{"score":0},"id":"4385176970"},{"position":4,"visible":true,"text":"i4liv68323s xc5i56ba5pa osxulquad221n xbmov jnhv1ogkdw9","quiz_options":{"score":0},"id":"4385176971"},{"position":5,"visible":true,"text":"tlinp 6b4ig tcw2f9no5xm7stv qv6l4foeesgh","quiz_options":{"score":0},"id":"4385176972"},{"position":6,"visible":true,"text":"jfxlbnonc tee9khi75t0ah wr83dgnnsc0l pvyf5t266eq1ev","quiz_options":{"score":0},"id":"4385176973"},{"position":7,"visible":true,"text":"rv7aly5o19 y3hvj5byk uojji58u9thv w8dnv stba3pan 5yho9m1f3o097n7","quiz_options":{"score":0},"id":"4385176974"},{"position":8,"visible":true,"text":"0kmmyvxkq0ixg wmm2bnydk3xg nf2e5e3fn4e 0jd59 tu9bib d6i9t6 3ikku8yd42gnt 0uusc9kip2w","quiz_options":{"score":0},"id":"4385176975"}]},"page_id":"168831467"},"emitted_at":1674149694695} -{"stream":"survey_questions","data":{"id":"667462099","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"952l4noismh4 jwlr6yhgbpo 9waador5oikm xg4aqk0w5o08j 7bg3d02bw22df 84xdy6eq34snqhk h4ngqp6rpih4 9jevsisqvxcv"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467/questions/667462099","answers":{"choices":[{"position":1,"visible":true,"text":"lknhasa3eu03sf p1uflsed0h3sr q1b0gmga v8swv5rbv","quiz_options":{"score":0},"id":"4385176976"},{"position":2,"visible":true,"text":"u6hdwwqb4cc84s8 8jo8vmqx7 klsxteic7f 9m3mg59ov s7npsi1 89i5dq8chb 919k2r7u0t","quiz_options":{"score":0},"id":"4385176977"},{"position":3,"visible":true,"text":"xjpll0bpay536 2ex024o9rd 7boywqww rktvkxinf9fmoh 7d9cb06es933e jm2hsgge2cjy5jj 1ecnm8shkp a3flgyb gcf1622ijpsj ydnun9wos44","quiz_options":{"score":0},"id":"4385176978"},{"position":4,"visible":true,"text":"n7q0ogrerpn8dl qmip86lbe sl1xjutnlap7 3bov7 5fihf81ek49s 9p0f4 r1rptp","quiz_options":{"score":0},"id":"4385176979"},{"position":5,"visible":true,"text":"kwdbb5 hi349arw9 r2f0df eee097e k5035hpewmi6 ky0f9 kh7oio2u0bilxue 5l0moimxob7fid oeno82n5rkplm6i","quiz_options":{"score":0},"id":"4385176980"},{"position":6,"visible":true,"text":"finhomjk 48ciiw7x1g2f c50x4 9kenej45r","quiz_options":{"score":0},"id":"4385176981"},{"position":7,"visible":true,"text":"e7ravpe7reoe5il 9kgwh794cl9x0w snp3wnpq1ryga p4vpk03q7","quiz_options":{"score":0},"id":"4385176982"},{"position":8,"visible":true,"text":"ao3rdw8o5r0xvy x07pd7op60cce ylfrtqpvu81e3u 3f7jtv448v mfoeywlt","quiz_options":{"score":0},"id":"4385176983"}]},"page_id":"168831467"},"emitted_at":1674149694695} -{"stream":"survey_questions","data":{"id":"667462100","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"onm04qqjiu e5hh4bm1nrm 4lql830cednt4y m4vd6n kk1rvup ghucvtc uw8xrjg6 u6giu1qxo oywqk5fc4elxs"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467/questions/667462100","answers":{"choices":[{"position":1,"visible":true,"text":"n66vr4n4vn w57cw3jovcl9 9xhjvp8 mkuivep2954e71r","quiz_options":{"score":0},"id":"4385176986"},{"position":2,"visible":true,"text":"dncqrcm vkp1wf2u0d9j 9o2d4ag7rwt827i wo23d8v14j1e3p 8wty8ixwwhnc6 huw4we49ag40 fruxk5p9080lxhb hjvf468vja8rcxr","quiz_options":{"score":0},"id":"4385176987"},{"position":3,"visible":true,"text":"54u3kd450tpm i9jjwlr8r3xbduw 3k1ic1tg4 4tijdweafs ypprmy7wcpxeg yvvc4l19sd5p gmleu45","quiz_options":{"score":0},"id":"4385176988"},{"position":4,"visible":true,"text":"a1hrtqesu1ph 88spqcx6hyo7 dt64okx798gal cy3tbwljajmrr","quiz_options":{"score":0},"id":"4385176989"},{"position":5,"visible":true,"text":"ehg03 uyx121pwsyo 7rhvgcdfy2st8p 7ahaiboegtn5kd iu8bkj3imm3eo8d k2s2gg4g 2ys9hfx 8vpui26opm6n vsf5rfq6tqi2 jwxoe4suo4","quiz_options":{"score":0},"id":"4385176990"},{"position":6,"visible":true,"text":"c7lxx90luvc6y ogvbopnx 12v4swfg lofg3gcj dq1r6s8ptp5qho h04pqe67tbcnno xhyh0rp62kqkb dvtuwyu u89ppkdl876m","quiz_options":{"score":0},"id":"4385176991"}]},"page_id":"168831467"},"emitted_at":1674149694696} -{"stream":"survey_questions","data":{"id":"667462102","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"bwurewbkcd12 acvusgq 3u00c224nnv iyfj31 i9qtkl9tolx2fjr"}],"href":"https://api.surveymonkey.com/v3/surveys/307785444/pages/168831467/questions/667462102","answers":{"choices":[{"position":1,"visible":true,"text":"m6ihyhdpxt1 0xf9391 y8wvcnlw tfvxn6m 0bswb5w5p4","quiz_options":{"score":0},"id":"4385177002"},{"position":2,"visible":true,"text":"2kdjxc93o enx72tbxki oj43409eg90kt1 5fbnj d8rvo5c7a7vrgf 3c66h1 guo3c x6uv8kl qy0ttl 8r8dgqope","quiz_options":{"score":0},"id":"4385177003"},{"position":3,"visible":true,"text":"h3292 dtp4cjf11njvbhh huvgv1 onie86 tvdr09jg2lvp","quiz_options":{"score":0},"id":"4385177004"},{"position":4,"visible":true,"text":"16ua2fvav 75q7b2odhv fbw0xrnfn 2t3ivdrphr agrq241x d447hwdy rxa3g9gwe23","quiz_options":{"score":0},"id":"4385177005"},{"position":5,"visible":true,"text":"1dl0nhl0bx 7v1hc ooiv3gogf nxecupy","quiz_options":{"score":0},"id":"4385177006"},{"position":6,"visible":true,"text":"97geexnx4ussry4 s2x8sduenrfgwq y4s8f5v e6nrx0st23b f1xq1ax2xwl6yl c1to792d3i l0aq9 92s0aix8l31n5 qnnn38brgah","quiz_options":{"score":0},"id":"4385177007"},{"position":7,"visible":true,"text":"cg3w94v47ecgrik hviug 5xub14 808gw4 g0j3vn 5y58dv7vn8r5 xg72mp91 2l4ubwdo1wrw0t6","quiz_options":{"score":0},"id":"4385177008"},{"position":8,"visible":true,"text":"t92n3lmvuxaf5o5 29jh4afc h2wvqr9xf1q4w 27xf5d9ij1on qvbuiu7 8oghx0dt3nr5 g1mw6f9y6 muu0hhili","quiz_options":{"score":0},"id":"4385177009"}]},"page_id":"168831467"},"emitted_at":1674149694697} -{"stream":"survey_questions","data":{"id":"667461468","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"fn1i25upg io6aniuf yrrrp8vt07fo19 48d5ol09146s8m ldy0ebojp819g"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345/questions/667461468","answers":{"choices":[{"position":1,"visible":true,"text":"t56ghqeix s9huggwiub4 7tqxa1m0h7kj5 yk8r2 0f65tvl3fnpo 0gebiihpxcovnht","quiz_options":{"score":0},"id":"4385173226"},{"position":2,"visible":true,"text":"5o75tse0c okpssn319qiklp 5uyby90fx7n slfgxco07i ejp0kn xmr7ghykkxbypc","quiz_options":{"score":0},"id":"4385173227"},{"position":3,"visible":true,"text":"qe6tkjiccorb lxdmlb30 jmh6j4d6p vces42b344ry gbgmssamx0tvcs 785dl brkbvpl3ctq4nj2 yjjy0hh9b4sr yq2v7lhwtje33x2 7pbqmqgul4gqg","quiz_options":{"score":0},"id":"4385173228"},{"position":4,"visible":true,"text":"lyn712e8uyrrar 0qvy855feo 5l3egg 6t5uin58qrj1hj","quiz_options":{"score":0},"id":"4385173229"}]},"page_id":"168831345"},"emitted_at":1674149694802} -{"stream":"survey_questions","data":{"id":"667461471","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"wcrsas 0v14j5i3e8eb2an ymydb45sqwsn g8j35s649o3rq g582wdp eoraxaf8dc7gf qtdw5a27iucmesj"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345/questions/667461471","answers":{"choices":[{"position":1,"visible":true,"text":"nd18cy2gv u2bvpyki k0f38w8 qtlqut6ttsdyk83 pptqq84jmec6bfd a1aks3iy0i oykgg4gv0 busn0o8cc","quiz_options":{"score":0},"id":"4385173236"},{"position":2,"visible":true,"text":"mbd0ll7jes0qm 02w87s60 jslfxkj7yeh1 ol4yj 405yk1b kddsvxnk4hsw7 v1t7q9h8k26rt","quiz_options":{"score":0},"id":"4385173237"},{"position":3,"visible":true,"text":"jl49hsj6tjdf aq1njo56no2 f2gw9 eyqqekovg0c7ov kwwlc9b77 i4yu6aa7uxu","quiz_options":{"score":0},"id":"4385173238"},{"position":4,"visible":true,"text":"5v78pcotkwvvlr 7gvcf3xsd1s2 1y6oc8df 3n3f0 d3cg8ihr3ox9qdc rnyw7g 0lv7p6bka ia0p1tqjcew","quiz_options":{"score":0},"id":"4385173239"},{"position":5,"visible":true,"text":"xtha0tjj1 1j0vri su2xvilf9734 te51yhl03q558lc","quiz_options":{"score":0},"id":"4385173240"},{"position":6,"visible":true,"text":"w4st1 gljqfop06 2bu4fs7b fiia7kj22xl0i4v dl88vjnwo3pf","quiz_options":{"score":0},"id":"4385173241"},{"position":7,"visible":true,"text":"ivfna3y9ru jhxob vuncvdavfek1dsd 80ha2m2 8raod4lyb40 3r895onujni4onh xm9h0g6di g8e735xi ax62ju3eihf9a84","quiz_options":{"score":0},"id":"4385173242"}]},"page_id":"168831345"},"emitted_at":1674149694803} -{"stream":"survey_questions","data":{"id":"667461473","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"xc4wjnko0i6 g7p33u41n 1tjsntksw58hn4 ewweiwcgt 58rs9ek5qi1bgvx"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345/questions/667461473","answers":{"choices":[{"position":1,"visible":true,"text":"t09fsskd08sl88r 36sh46a2jva sd91wxy 5dncy1vabsf yfkliw o2spc5k ud2oktdabb7bv l76he8ba3y6 1veap0sd vh2aeoqb5","quiz_options":{"score":0},"id":"4385173256"},{"position":2,"visible":true,"text":"fvdf828g92n4cr4 jt9qg8529uihyge n69seyn8haq ido6m3","quiz_options":{"score":0},"id":"4385173257"},{"position":3,"visible":true,"text":"4n2rpc6iv82cene b7pvp 9fakf0 qmfrbf3","quiz_options":{"score":0},"id":"4385173258"}]},"page_id":"168831345"},"emitted_at":1674149694804} -{"stream":"survey_questions","data":{"id":"667461476","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"wojs6fb lri4g 9cd8kjrhy9rd3 j9s5e32o8i l82xpfkqgwhj jma6htdue e1kk3u071rcef 9ivvnisv asaqww"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345/questions/667461476","answers":{"choices":[{"position":1,"visible":true,"text":"j5lja4pu f0iq7pnkrk 9pra3hkah6abp 92wxt7ox6c dcukwklc k46geqe 2xk4g405p39t62 516tsgu xes5tkhjts","quiz_options":{"score":0},"id":"4385173268"},{"position":2,"visible":true,"text":"1l04q3mtxn75 t7ghyls2hqug j2hu42 uyoecoks6edsqns 57ya7vtyoyt u6k0gtnmf 1mm6wtektvp808 fkk89 qqprq k07oheboie1tfhs","quiz_options":{"score":0},"id":"4385173269"},{"position":3,"visible":true,"text":"9kvc37 gqyklsggws9 db070xge8y7fjr qmdym3ft1jp 5rh1txs","quiz_options":{"score":0},"id":"4385173270"},{"position":4,"visible":true,"text":"y9obh2bu8yk7e94 0ftyrjtcxhicd4 iobxi70wog4vf uokiplls1e ah0g8v mpsslhisijk lmomrov 07cfwh auqwaujyytig","quiz_options":{"score":0},"id":"4385173271"}]},"page_id":"168831345"},"emitted_at":1674149694805} -{"stream":"survey_questions","data":{"id":"667461498","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0tljucg0y3kh l2fh1q1i umeldoo665wgb 92c0k ldeevedeust7pg icrfpt7m1cq l6sfwhhk3vtql qpue297bcaeb7 yrn4gauy007n8q"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831345/questions/667461498","answers":{"choices":[{"position":1,"visible":true,"text":"8lcirsh0a t4ydqb06w0pqv 54k3qmnrjdyf j3ebgxwih2m4wa calo6wrbt 88n96xatpw3 hxaq56r brlwuuymmm jc5psliso3joc aoa41qwdhm0inny","quiz_options":{"score":0},"id":"4385173412"},{"position":2,"visible":true,"text":"bxc3u7guumu w525yaov3om5 3n0ld61 bhlpnw e252afl6giiia 56ybyswiyivgqs s1svdd30gf9kqks 522nxgw3obgj","quiz_options":{"score":0},"id":"4385173413"},{"position":3,"visible":true,"text":"dc3t2djjjqkvmfi cu7osl pxtlaevr117irr ji3qcr","quiz_options":{"score":0},"id":"4385173414"},{"position":4,"visible":true,"text":"qqr9ksi gob32wx p01fp4yqobput39 fryunx r020wh2ctxg 7hxy9g8m9m nuqel75h vwr1mjecaopo1hi o9fc3o","quiz_options":{"score":0},"id":"4385173415"},{"position":5,"visible":true,"text":"cf6162qvdywm crcubxjk5m5ju 7vhy5q5kj4j2q 5b55n9h w9v6c9n r7m6xx","quiz_options":{"score":0},"id":"4385173416"}]},"page_id":"168831345"},"emitted_at":1674149694806} -{"stream":"survey_questions","data":{"id":"667461513","position":1,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"18i8lhabavlr 32wsccj5f rx8pvk4unsgm7r s5x7es2uepdb 96xgtjo7voy08d7 n4e9p fayts khd1nb8pow rpjp6"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352/questions/667461513","answers":{"choices":[{"position":1,"visible":true,"text":"b0wj1y3wel57 ktqj3mdnpi dhr0k r3tasjvw 5mx1wclhjj","quiz_options":{"score":0},"id":"4385173452"},{"position":2,"visible":true,"text":"qott2n8w37qaai 1qn102yos4 imupcls4nm th4ob4","quiz_options":{"score":0},"id":"4385173453"},{"position":3,"visible":true,"text":"9uksb 862xwr8j1 6vxdyc4 goi32thkhq93 helvbkt266nnrw xvit2acd67olt pvvkx","quiz_options":{"score":0},"id":"4385173454"},{"position":4,"visible":true,"text":"ees3ivm7fphpd 9jj95pohh4 8sc972k8tyxk ge9j787 nxjkeee670jiec o36kcvbluxrp03 mb4e0et0qwqjxyc d13juonhq","quiz_options":{"score":0},"id":"4385173455"},{"position":5,"visible":true,"text":"294dtkjuyn ttbf6ikcy7s 84hmb t17hw 82aljhi8fwnf65 usuwl5d7ytrca x52ehyic447miq isobapv 3gkr1 2wphdit0p","quiz_options":{"score":0},"id":"4385173456"},{"position":6,"visible":true,"text":"nc1kush2rbxkqrr rhbdvjy xn4au2o0o94 xjq5ll","quiz_options":{"score":0},"id":"4385173457"},{"position":7,"visible":true,"text":"mh1igjt m7gteqlf8 q87g5gj n1g1w7s80","quiz_options":{"score":0},"id":"4385173458"},{"position":8,"visible":true,"text":"sfnq7n g13n21dx5cy aky2s t6h2y8j 5rbdufdcu7i ji8jgrcanxhxv piyrbdr72031gh6 7nna7 gn4gfqo3o 9tbatn8r","quiz_options":{"score":0},"id":"4385173459"}]},"page_id":"168831352"},"emitted_at":1674149694807} -{"stream":"survey_questions","data":{"id":"667461516","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"7gnta43 5s7jfw8axcw3tkf vkg8bmasyo6u 91wwhy c1pae9eu342 3tg0jl70wuoc9 yyxo4v8se myu4ls9xoaiqsep"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352/questions/667461516","answers":{"choices":[{"position":1,"visible":true,"text":"qfjo9n 5a7yv16lp71 ow2bbt3n2ke 0jb1xfpxf51s1u f4k77hkwmu wprku4 55311lxm0wcpw0 c53g4v24","quiz_options":{"score":0},"id":"4385173473"},{"position":2,"visible":true,"text":"j4n9jfdyr4cgs gfc0c a8trns3c4m 5mem4dona y097q wxpm5 q4gqos2ukx ufjxck1lmi1nn1","quiz_options":{"score":0},"id":"4385173474"},{"position":3,"visible":true,"text":"4kb04jje8c8d m12jddaa1iljt fr4qqhj 579w6 6u57xdn gaduvgl0u3 887ffq2vq9b58 q4r02mfsjdx6 3v8yskph8uyu srpxeiuxm0ber3","quiz_options":{"score":0},"id":"4385173475"},{"position":4,"visible":true,"text":"biom7l20wjpvl la1rwpf51ia1ex 23gosg8xvae87 ev2iaj4bo c2581d1re2 93rl9 tmj3467ajulb","quiz_options":{"score":0},"id":"4385173476"},{"position":5,"visible":true,"text":"om88wcjqp6f 5fn54j nmdg9 1sriyr pt6p1h4cm vkdmgxanhh 96847e1mgp l8b2t jl8oxa ifs1c","quiz_options":{"score":0},"id":"4385173477"},{"position":6,"visible":true,"text":"l8bo1yes158 w294970vmw33c yp6x582 6sa834 j86xhb qfm2f2 tj7gt7g xkgtymut","quiz_options":{"score":0},"id":"4385173478"}]},"page_id":"168831352"},"emitted_at":1674149694808} -{"stream":"survey_questions","data":{"id":"667461517","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"uy1ifum6sjcimr he9dly7g it4v8exuhulo ffdod9aof90 9cdfum2phl11nin 5habw5hvuj9pqio ce6ftayekj"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352/questions/667461517","answers":{"choices":[{"position":1,"visible":true,"text":"gd5221owwi1 tjya80m2al3k62t xq0sa9fp leyp52b5ahghpaa","quiz_options":{"score":0},"id":"4385173481"},{"position":2,"visible":true,"text":"lk1rk9 94xju3kfoc1w5 8dybd90 9qdq98 rxu5edq9gs99 kks85ifc03qye","quiz_options":{"score":0},"id":"4385173482"},{"position":3,"visible":true,"text":"ykpin7yh wv14vdjfwf0l 295s1en8cwa2 s6vafhu1fops 8k2vvkt9pftfc2 d1jxuoudjb 614hx251hr6fouy x3w49ql1 7b50ohc76","quiz_options":{"score":0},"id":"4385173483"},{"position":4,"visible":true,"text":"dewxv4aajhaw yij71ndhch8 76bjhg71csdc4w 5gn1l cmu7yo vdn7mtuy 6oplwmikjyvv1bi l3kw7j","quiz_options":{"score":0},"id":"4385173484"},{"position":5,"visible":true,"text":"kuoin09q0 lm3vqjykpb ji7lrp5ylkt58 qb0mhh86kcgm crjjoqq gvfejauprgj mlsjegwm 7psv8r4tc4365j8 98t96g2ifjm","quiz_options":{"score":0},"id":"4385173485"},{"position":6,"visible":true,"text":"5c8y8 f7droof ha5vfclhyivm6kl bfylm txvwxasx1j9l qpyifm4y qwhcsobdo 3ui2qts 97h9i1v5jv5g7","quiz_options":{"score":0},"id":"4385173486"}]},"page_id":"168831352"},"emitted_at":1674149694809} -{"stream":"survey_questions","data":{"id":"667461521","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"pgb7ltsb96ed dep0echkqixfc l4rcb230c wl14ohr2hre1o fn94g0r87nde"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352/questions/667461521","answers":{"choices":[{"position":1,"visible":true,"text":"55yon1b4lyavl 6v6trvxwrc8 s1hwd2 d4hrxv9tbga9e0","quiz_options":{"score":0},"id":"4385173492"},{"position":2,"visible":true,"text":"x8ak4mn2 601o7cb6qnhxox2 k8q23 24nb8q6tx diuko kre6p82em vrf8yxch2 53klbp1xtber s29v8xwbiqm4s8 trwpy6qd1464q","quiz_options":{"score":0},"id":"4385173493"},{"position":3,"visible":true,"text":"s1pxp wsngl9et r2o40 18ltd3 r1b8qpyqskyx a94qup","quiz_options":{"score":0},"id":"4385173494"}]},"page_id":"168831352"},"emitted_at":1674149694810} -{"stream":"survey_questions","data":{"id":"667461526","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"q859nk 369qqyw7scs77jo 9ji0c9vow2uv yjucgiioytvwm x0x95w 222dyntjt8pue 1ndsg06k"}],"href":"https://api.surveymonkey.com/v3/surveys/307785394/pages/168831352/questions/667461526","answers":{"choices":[{"position":1,"visible":true,"text":"tnea3634 p2lnkyut0h yllu3g pufn062o 2etbix7jtgiqx4","quiz_options":{"score":0},"id":"4385173522"},{"position":2,"visible":true,"text":"0qf4fyco1ea itos8brey3wgkp xje8mlffe4m8wlv a9b44 jgmtf","quiz_options":{"score":0},"id":"4385173523"},{"position":3,"visible":true,"text":"hc4a1wgbqqryji 4x7enmw1nam8o1o yn3nxtni y7bwpr wm1oww66f5ox","quiz_options":{"score":0},"id":"4385173524"},{"position":4,"visible":true,"text":"0giuwu7l3v 5ufp8 u3mupcfr0 r8977cguqc2 hrfeq0ug7wo0 fjmkook g8up66tqb7shgg","quiz_options":{"score":0},"id":"4385173525"},{"position":5,"visible":true,"text":"xp5qsik ipf6q be83j63io 5kw68q bwb1mxf gt4l3besb ikfckl9f f3lgigcoib t8wa3hjgn00 csln5ikv2a","quiz_options":{"score":0},"id":"4385173526"},{"position":6,"visible":true,"text":"ai0mef32co1ohl qfwbhctboxq89ye u5v2kyq ebo7e 2u7q9yvlgd1oc 8uxx84l65n7lf0","quiz_options":{"score":0},"id":"4385173527"}]},"page_id":"168831352"},"emitted_at":1674149694811} -{"stream":"survey_questions","data":{"id":"667461529","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"5c2dy1fcqn80 athbhqoadncx3n v1lt9m7799 6s154slj6ga"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358/questions/667461529","answers":{"choices":[{"position":1,"visible":true,"text":"rqa77mh3qxuc 6c5uywhchx1bgd f6or0t6s ds7hiuwb3bu9 u9x2d8hs 52pocgqntfeejo4 oaryhk woq1xb7akj","quiz_options":{"score":0},"id":"4385173536"},{"position":2,"visible":true,"text":"hxex6h1upkfw81 pad50wjl 8fo1fiqvtqfe 3xg4xms66g6l luckdtn1 udk8r284o iwpvh21 xotxpvww6p29 4gmqucj4 my8vynnaemow89q","quiz_options":{"score":0},"id":"4385173537"},{"position":3,"visible":true,"text":"jyb1qcl8o2gtte jlwtdhkxh65a9e q6xfymirg7g 24hkqyd0x0 9w4wb4dostk 6i8s8mh5 rxmq5y8ti r99xgye0urvl bdq2598nnha o6gaqqdvg5q","quiz_options":{"score":0},"id":"4385173538"},{"position":4,"visible":true,"text":"qtgjiim109 xoslnipbfe v28m271knt yquk8m2j96w6 xff8kw1 1jhr5l14","quiz_options":{"score":0},"id":"4385173539"},{"position":5,"visible":true,"text":"me39vca xqytbsfb 8uiad g1elq77xy1 tv88etyc gj1g8580e1lbmg4 bjnj4es","quiz_options":{"score":0},"id":"4385173540"}]},"page_id":"168831358"},"emitted_at":1674149694919} -{"stream":"survey_questions","data":{"id":"667461530","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"v2whrhb82ev kvvqlgl ek50nx1ar9 a7h4fo9xuaynt2"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358/questions/667461530","answers":{"choices":[{"position":1,"visible":true,"text":"e8nxccvdcdm 6wfcehdl1 o7g5ce64yvs efob6e 9bj7ny6aipb 4dc2wsqwtr6u24 xsxa081k8 4fyis3oyl9t42 lmu0hhgj4ok24m6 fg0p0lt","quiz_options":{"score":0},"id":"4385173549"},{"position":2,"visible":true,"text":"a5cw9gn7fy95q 8ui6bhr1j0djvtl sr2o8a3 kojha7w 1itxl7twgq avmrl2q5jbwds9 gva3tqhpgk8y5 v4aylod k5ci4cngn6h44jv","quiz_options":{"score":0},"id":"4385173550"},{"position":3,"visible":true,"text":"s3s7w5clw y2uvtas6qhvbk qc1vwrlyi2bk goqfgfe1 1ayx9l73nb78dq a0ykwprnx8gxm1 xn695vlym9rt4mk","quiz_options":{"score":0},"id":"4385173551"},{"position":4,"visible":true,"text":"c4qwlt8a dgnsqhqut5cy iq1htk kitbw 5mwuqxv92w70s c8hxelnesk 75craldvy4w","quiz_options":{"score":0},"id":"4385173552"},{"position":5,"visible":true,"text":"qp2pgo2f63 wmlcuryftjwedj yq0xo a1n5ayjf jikmqnf4mdbw 02r05wsu14a146b","quiz_options":{"score":0},"id":"4385173553"},{"position":6,"visible":true,"text":"ootl4 kp8ae6ggbjabg hgusbwsla83 rnl7dnuqo","quiz_options":{"score":0},"id":"4385173554"}]},"page_id":"168831358"},"emitted_at":1674149694920} -{"stream":"survey_questions","data":{"id":"667461549","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"sn8gp1r0uc1og wobjh 6x0e6t4wug m6466nxyawm k81p20 883oi8u wgeeiqdcmxwa1rt mc0fnojb"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358/questions/667461549","answers":{"choices":[{"position":1,"visible":true,"text":"wy826 0uog090ox923 2m3arldq 9v6wr7xwvxfk lbxye5v0svm7 93ji5 vkfvxgr7iy 1r09yxnkpjm23 0bbktk3rfg0bscf","quiz_options":{"score":0},"id":"4385173656"},{"position":2,"visible":true,"text":"krqy1l9f dj0wppa p93gk rvffc v6cfo0 o1areevq8 jbhm4t gcc224tu 3thelsk2","quiz_options":{"score":0},"id":"4385173657"},{"position":3,"visible":true,"text":"f1cpwqowl4ju9 43sdy2 qdqyqw17lxw2v enee166iohhy kq5khb6s r3pucoufnsh hrhcpt q4ukten6scjuvg ldvapiy bg61mb0pf8ofd5i","quiz_options":{"score":0},"id":"4385173658"},{"position":4,"visible":true,"text":"eq2hnewfaec e9jxrip xrrkl1wh3rbcx7 23t2bp5535o2e qwjbhf5051mi iqgykvmdvsbqr 8s3wwn8tqc513c 4q3a2gr6uhdvlc 7dr6m","quiz_options":{"score":0},"id":"4385173659"},{"position":5,"visible":true,"text":"jth67q sto4nhh23s op593ly6lw4p1o gxwnyfr4m99nbvc dplgcarhd m24hy6i1d n28tg7mytetk","quiz_options":{"score":0},"id":"4385173660"},{"position":6,"visible":true,"text":"rlo3t 7ppem2 bx29gt ucqxmy0 jm67u2dynhq iqfpiyufxif 8lm6ydc9u16rj y7ombxa0meqr2","quiz_options":{"score":0},"id":"4385173661"},{"position":7,"visible":true,"text":"vijhvdm7x6h6 4swh9tom vcr06ugp1hl6 50al5 3ndajrt9ilers 2gnm8il0h ry9dtv jqpu16b0ukt qsb5rqrnmglh arno84j1kroy3","quiz_options":{"score":0},"id":"4385173662"},{"position":8,"visible":true,"text":"3ujkb4krosaln 4lk1ckhounns7qy ap07a65ruvnjed vygxbabompj8dn op8annyrush8v p6t0nd2qi w4pp18y4x lljwuc ubknhjf31m6l1tu","quiz_options":{"score":0},"id":"4385173663"}]},"page_id":"168831358"},"emitted_at":1674149694920} -{"stream":"survey_questions","data":{"id":"667461551","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"jf7fw46e14 vd9x8vs6f0av npyn7 ymxbaq24mqtvj bci1xcln9ch6302 eeghuyauh mjkw9hkx0oooduf 0ui0l93x 6m5tbpl yoplf8cqmbl"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358/questions/667461551","answers":{"choices":[{"position":1,"visible":true,"text":"rwk1x rtfy9upgpmj xkawlfe0vo8 3ounnnx3e9rl2d4 e8pqns3nwx pr5st9qy o6yi4klar9onlp innrgi2ua3f","quiz_options":{"score":0},"id":"4385173675"},{"position":2,"visible":true,"text":"pcupu8t3iodmw34 67g78ktu6rmr6nq 8nph4ohmv 9xtmtr2a7p9 6rb6h6","quiz_options":{"score":0},"id":"4385173676"},{"position":3,"visible":true,"text":"m6xlaj5xutmgw l08mgyjovq 6vk9sibk r9am0kbr 9qmm4d18mxnx u1sw4to 3a8ek ekp479980 hbuxaj2bf sbio7yw7","quiz_options":{"score":0},"id":"4385173677"}]},"page_id":"168831358"},"emitted_at":1674149694921} -{"stream":"survey_questions","data":{"id":"667461553","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"uue1b3 s4rj4 dawj4ao j5xhorntprj5p9s obfe7 y47vcjc s2gp2fh6 oysg9o5"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831358/questions/667461553","answers":{"choices":[{"position":1,"visible":true,"text":"bf1i44 xxi5e2h7orevv2 d737wgpt4pu3 pmbeixrmvw1 ya3ww2x gpvmeo04nh5 sx2a7ey cv9ldq57qkus","quiz_options":{"score":0},"id":"4385173678"},{"position":2,"visible":true,"text":"ocmcxbjsqw o1vr3t 8fhpimedvui rn1hqm5m559bcq kw3138dxm77 vsdor8m v55p3f tn721 l4w4a6j3c","quiz_options":{"score":0},"id":"4385173679"},{"position":3,"visible":true,"text":"sme3nthgos 7c5bv0n80ymsa7 x02op26 jjum7w0s7lt2ll6","quiz_options":{"score":0},"id":"4385173680"},{"position":4,"visible":true,"text":"3p2mj4nlrsgu4d 8rygmmsqqxm2pl6 yshbvwq l5mcc5a kdpqvuul","quiz_options":{"score":0},"id":"4385173681"},{"position":5,"visible":true,"text":"o7ehnkg 134kywmgi wm8jcrpnl45if j86ffs6jfgcp 2e25qvayvwj18pp 3rduk lqjqyar2 xhq7yxb8i9ef6u","quiz_options":{"score":0},"id":"4385173682"},{"position":6,"visible":true,"text":"axh7312qg827nk s7tl78iwcr h1b6pv7hn6 o51sbag","quiz_options":{"score":0},"id":"4385173683"},{"position":7,"visible":true,"text":"onu50gtox9de40 v638eyavt886 mu6y85 6a54r1n9","quiz_options":{"score":0},"id":"4385173684"},{"position":8,"visible":true,"text":"0on3vhb3 94ujem74f4 0git5 fr73t6hds rfc4iyd4gwc","quiz_options":{"score":0},"id":"4385173685"}]},"page_id":"168831358"},"emitted_at":1674149694921} -{"stream":"survey_questions","data":{"id":"667461555","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"8nym349ifndc4 1ergxvcs rg8nxji7wngvfo ha1fx7qgh0p4m 3tkgf"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365/questions/667461555","answers":{"choices":[{"position":1,"visible":true,"text":"r7wm8q65rf0ed wj7kk61hjpqrt nkjjva6busq fuhp9098sc6 8ro2p6ww","quiz_options":{"score":0},"id":"4385173711"},{"position":2,"visible":true,"text":"wjxlbkjgaps2 sep73b5ia8yj5 a9wr0gq 9o2wqf7du1v0tl 81u2apxnt ww3588h","quiz_options":{"score":0},"id":"4385173712"},{"position":3,"visible":true,"text":"8r7tkvpljdu 8a667c88iw vtasj a5vacvo6pbxwsw 8in6kb nv2yeug1x 3lv9de7rw8rtq","quiz_options":{"score":0},"id":"4385173713"},{"position":4,"visible":true,"text":"obyau9nl8c9 jj7tk8n3wv1gb qmiuhuql 1m01qx26c213r6 aj6u9iw459er mxax2id3o8iv riondp2tea3 ay77ba gpl30307 440clb21coauy","quiz_options":{"score":0},"id":"4385173714"},{"position":5,"visible":true,"text":"0wm0f2w wykr4 9lcofhvrxdiwlp0 u8yf3 p1bwwa","quiz_options":{"score":0},"id":"4385173715"}]},"page_id":"168831365"},"emitted_at":1674149694922} -{"stream":"survey_questions","data":{"id":"667461558","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0wjijbi1kwvxt h19om9 kjvx4 jqeprmna15i bwqfaihmlk0"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365/questions/667461558","answers":{"choices":[{"position":1,"visible":true,"text":"q7y7hkgfgi1plk s1nwdd 0d7dt14f q604qs 47cwawfbo m1mmdsyi3fsp3l r0qma8q f5buoh","quiz_options":{"score":0},"id":"4385173727"},{"position":2,"visible":true,"text":"sn5hskkfn0ykw if30ikcbmfjsxj g6u5fay eauotfitd2 9cg6qkon 9t0l88ops7633","quiz_options":{"score":0},"id":"4385173728"},{"position":3,"visible":true,"text":"c678ymx 92bxd icea1i4p cjdl771k u37cuw rnjvrihdi89s2x a1vef","quiz_options":{"score":0},"id":"4385173729"},{"position":4,"visible":true,"text":"uqlmm5yk5 1v12y59qw hv82bo91a0leef 111gmwav8pnwe np5mfx6nsq6 cxgilxkmtvmm2kw 8mb5wiet5q wdbyo993 uh7kcfhy59 sebg3lik","quiz_options":{"score":0},"id":"4385173730"},{"position":5,"visible":true,"text":"uq2uqj20gchd ly2k4d4goq1 ehe66mwx ei0a0d4ggv0al9a 744s2h u254g40o5m 0i9o7dqjdkrtys","quiz_options":{"score":0},"id":"4385173731"},{"position":6,"visible":true,"text":"nyy288shht 3eibqfcm69se94 de2u01w0k5o 342ouq sb68ogkspj61j3p blbhcqok 8vdd2u7b2 su21c5iwou qgi1dj","quiz_options":{"score":0},"id":"4385173732"},{"position":7,"visible":true,"text":"fkidasstba rfn3erxiv282 vy0fqx 3acgtlyka1 mykyngnc 8u2kws4tp 35wst9aglv2 chbms5wm974 ajaygp8vjkhq","quiz_options":{"score":0},"id":"4385173733"},{"position":8,"visible":true,"text":"lmp71nc3bt 6eic0k8kacvx 17ititl v9u25fh1x78 vgfpglqvf6j ajv15gaccdsa elo40oe8ht75c","quiz_options":{"score":0},"id":"4385173734"}]},"page_id":"168831365"},"emitted_at":1674149694922} -{"stream":"survey_questions","data":{"id":"667461561","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"su1ullpk4 t553g unva8lk6wfw7 ipvkkika"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365/questions/667461561","answers":{"choices":[{"position":1,"visible":true,"text":"j463gx5a 1n4fa1q94 x4yw9ack6xoh u5l2fmvwlvktyw7 kpgulj5y49q d03pdq 0bl58ibk39hpe g6m66xw y8ajm5qk2uyu 02pbmuywb7h9m","quiz_options":{"score":0},"id":"4385173755"},{"position":2,"visible":true,"text":"4w8kyesurm3674 est71dh9qi23fw jbowojvmon40p jeggkq5soym 4dgqd70 6fx65rd2f94b5t lnsm1pjg0bypfv jt7d9jj","quiz_options":{"score":0},"id":"4385173756"},{"position":3,"visible":true,"text":"atun8p07f006myd wotd669048pp4 j2wa6v97pbj d7uvpqvrv7omvxw p8ef7giw88ft04 rgn3uwqgx08 rhe5yu0hg16b tdaflfh 2jpl4e","quiz_options":{"score":0},"id":"4385173757"}]},"page_id":"168831365"},"emitted_at":1674149694923} -{"stream":"survey_questions","data":{"id":"667461580","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"in0q9d6e580p28o 65hh3n3rbth4000 jowj89wgvpem9u lc918vs7s pt2r8mt n5t61mrm1nw i64uqjqjxw jhpgayjd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365/questions/667461580","answers":{"choices":[{"position":1,"visible":true,"text":"d1k3kg doxgp648opvyvwx o6xwbacsy2nvo oglkc2p5fegn l7cfksxmkd3ekk es7rvcb8t8v243","quiz_options":{"score":0},"id":"4385174059"},{"position":2,"visible":true,"text":"tc374t uo4de jkgh56fi 7ttvi102i5fbjw 78i6rml o5gekwgh5r rtxw6mrnqi9","quiz_options":{"score":0},"id":"4385174060"},{"position":3,"visible":true,"text":"mfwrrhume d5c5kcy7n32d ovt2xo d3cm1 shv19 r339qbp","quiz_options":{"score":0},"id":"4385174061"},{"position":4,"visible":true,"text":"l4pnl52o9gc 6q4ufrpc7q bw16kqt 6bagaclfl21f9 x92gg7slb2h gs7qxwmj6mg o81ntc46r8q3 3tkbuhrp0","quiz_options":{"score":0},"id":"4385174062"},{"position":5,"visible":true,"text":"9nbdwy6f06wn g2tdvcsqsg 74jmcd ulao4 i4f5y r3yn39fp vsxqmxocm59s10d 9l98r8lp7554 eek3weuw","quiz_options":{"score":0},"id":"4385174063"},{"position":6,"visible":true,"text":"l7am7i8hxiy u3ylucwt8lvms bl5ftdk19mxheie gt5x6 4eiq6 7an0oa731ay2p a9gyy1qsjse xif4ton3b hu5v6cg kuasc8ce9ihbjxi","quiz_options":{"score":0},"id":"4385174064"},{"position":7,"visible":true,"text":"iw5t0k3fr7tv454 lmsbkhfkfx6 vq5ds3yhq20b2 jom26vaad fu7w8 f0t9nanj2","quiz_options":{"score":0},"id":"4385174065"},{"position":8,"visible":true,"text":"18u6s9r5 na05j55lqd i6its t96py9n9q h03ueyjkfewy rhiv9a8nxh 3adtql0ksc9l2s","quiz_options":{"score":0},"id":"4385174066"}]},"page_id":"168831365"},"emitted_at":1674149694923} -{"stream":"survey_questions","data":{"id":"667461598","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"fg2ok1k5x73j xi2i2leb9xip0jl nbjab ut48risbmg0 cmxjg1y14a b350n2yl90sq r2fpg 9u3fc44vd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785402/pages/168831365/questions/667461598","answers":{"choices":[{"position":1,"visible":true,"text":"6ia2cfy435ajc4 qwja8 186ntjx 8jshqf5 i32esbjh08 yhf12enewt5 8shld76x18d18 qw01jndxbuhj50 4mtgy6fbn7nfbs","quiz_options":{"score":0},"id":"4385174182"},{"position":2,"visible":true,"text":"e5phn4tqfq 5xee0uu5x v4lw52 hkhk8lpakvb 304yi j23bhqyat9h5c j52nrstj5cqo3q5","quiz_options":{"score":0},"id":"4385174183"},{"position":3,"visible":true,"text":"swapu0ru 89dqkvum0e2 ybf7xmg d0atxxfgpb3ag ylnjkwaufdj8 rj6dedrfte xkc6hqtimwg","quiz_options":{"score":0},"id":"4385174184"},{"position":4,"visible":true,"text":"n4l9n4wq5yas blbdmjmr nkhx7l gh95jmffy0p69j 8kg39ic3a05l41 dmjd7","quiz_options":{"score":0},"id":"4385174185"},{"position":5,"visible":true,"text":"du734l u98n1d5ovgxe 1i94woho8w2k u16an6dw ha0sc8v11aeep 1p9pawo5f6v 9s6ysd nv3v3","quiz_options":{"score":0},"id":"4385174186"}]},"page_id":"168831365"},"emitted_at":1674149694924} -{"stream":"survey_questions","data":{"id":"667461606","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"f9jgr myr928nokvyw5jg fqoan6w aqdnfas0gapwd 9hobof x7d7vy7xd9il kh6cd dteve1vnaoq6o uccii826ldkj1c"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382/questions/667461606","answers":{"choices":[{"position":1,"visible":true,"text":"1ak71apsl1lb2 dtgtym8ftk 52syq wtnfautgqxev t2sp9icu3465pll i4rhctmtx rh5yifue1 k91tlt","quiz_options":{"score":0},"id":"4385174252"},{"position":2,"visible":true,"text":"3t2uxb my2rtujx8njmbtm a2cyspuk yvfub6ofm8a9 819o7h8o eac0xea","quiz_options":{"score":0},"id":"4385174253"},{"position":3,"visible":true,"text":"4t3bj1jwi5bo ofhe7 cg6nbcys7lu v2qv7q11u48 7oowwv12fndqe","quiz_options":{"score":0},"id":"4385174254"},{"position":4,"visible":true,"text":"7etbg4thd4k k6d0nlv 02cobf bby0p2i","quiz_options":{"score":0},"id":"4385174255"}]},"page_id":"168831382"},"emitted_at":1674149695048} -{"stream":"survey_questions","data":{"id":"667461628","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"fx8h961tahe na1oufba ybjyqi qm6girwad8b1xq"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382/questions/667461628","answers":{"choices":[{"position":1,"visible":true,"text":"lakrew7c6qd wsih7a2nkq0no4 94gtxvs36 87wp1kbadg1l 1hkmpi6 2g1juofi5a fmmygrjehmcuygi 0bs6ka 2pj0gvmjpd6b","quiz_options":{"score":0},"id":"4385174360"},{"position":2,"visible":true,"text":"jjsfl l1sxtcih8fyeeof 6m9lk94qqmuaegl ki7wf11a9i93 xypqh9bjq1av7d1","quiz_options":{"score":0},"id":"4385174361"},{"position":3,"visible":true,"text":"eb4np f3xpqwjcv ypljmiu0y337 lu913m i9uet4t 6dvp7afcy3uqh 1xiu25 1qqn7cyv","quiz_options":{"score":0},"id":"4385174362"}]},"page_id":"168831382"},"emitted_at":1674149695048} -{"stream":"survey_questions","data":{"id":"667461630","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"576vlo18en3 1bxycqddfhn6v rsbd7a1e4bbyf ugieqgvywq4 g3qltn1jy8 0ebdn 78lxt9iwu1jx rbdjre1cujgtsq"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382/questions/667461630","answers":{"choices":[{"position":1,"visible":true,"text":"5k5p528j2o b9frywomc0 aukxrjdqt wiuvud9asln2f3w 6a4mdsa22mygo bnwh1sv 5l5s9ae rbo5hecsrd w0x40 6a86lloan5","quiz_options":{"score":0},"id":"4385174370"},{"position":2,"visible":true,"text":"o8jtnh4keupcl6 ff77c s862cc89 avdo6n25pw","quiz_options":{"score":0},"id":"4385174371"},{"position":3,"visible":true,"text":"vq7kbm97 p8svkwom2lo dg91j0 i570c 7gve09y 9xn2svsiu 22oh8ub7crysocm","quiz_options":{"score":0},"id":"4385174372"}]},"page_id":"168831382"},"emitted_at":1674149695049} -{"stream":"survey_questions","data":{"id":"667461651","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"799t8 uw9o03 89r6ndohi8r8bvs gv887i 14be70qd88w qgux0yh3"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382/questions/667461651","answers":{"choices":[{"position":1,"visible":true,"text":"dtvq3u jyp1h165k86n y0rkc4ich02nw qshv8da ppf7j9x raeq9k abqfxgd","quiz_options":{"score":0},"id":"4385174481"},{"position":2,"visible":true,"text":"i2muubbwbuf 3ffi5361bv wdoq2xkw2hsgpkj t45p9rr5mhi 8xwetg9 gc9u9p4hpmkkev do1ik5y 1439x s9k4vbcj4uaay8","quiz_options":{"score":0},"id":"4385174482"},{"position":3,"visible":true,"text":"0bwq2juhggn68 d2ioj rs8ybqs07gxkcri pgqiiha ir4cb94rbh71q u5mssukw nsyithmc5ismqc fyjxgqjx48fsdp dcua4s9spewq63","quiz_options":{"score":0},"id":"4385174483"},{"position":4,"visible":true,"text":"t3298cfyqx7toco rvkbw4338qk9 k5vd0u42if tkkkkm hfxufgj1i3f7vm8 69yd8hxcl5 swv8ipasty 4g6qsfyb0hdpa mx6s5tmxs","quiz_options":{"score":0},"id":"4385174484"},{"position":5,"visible":true,"text":"o7itx tcbcw8snyw 6jmor7biu7 x59bcw2hwr t94tgt axflfvftpmd0h 46td0yk 67ydkpti6t m3v2de7","quiz_options":{"score":0},"id":"4385174485"},{"position":6,"visible":true,"text":"u7s76bgo3nsbob i7yi958x br6wqw77f3mv b4f5yubeq q7ubaeju1t x5t0cx1bc25jhy acvms2 pggajefpr6 c7prvy9 6ain0uox3g3rd95","quiz_options":{"score":0},"id":"4385174486"}]},"page_id":"168831382"},"emitted_at":1674149695050} -{"stream":"survey_questions","data":{"id":"667461652","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ske92t16t19co1u 3qou5xj75ksuy 1rfxe q5tbfl9xa6dx"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831382/questions/667461652","answers":{"choices":[{"position":1,"visible":true,"text":"gn0e03jdsij ops28 wxpn34 o9q6o9otb uxw5rwr5xh g855ayu37wsg","quiz_options":{"score":0},"id":"4385174493"},{"position":2,"visible":true,"text":"6yft8d7xv vwn2h vrlpr8r7ppbnfr vtvgbje916jf5 kv7g2ax 292bgj62fdk","quiz_options":{"score":0},"id":"4385174494"},{"position":3,"visible":true,"text":"iicugnyhyj 0t742iccrlgnc olaw2v e67g2xd1etgcq","quiz_options":{"score":0},"id":"4385174495"},{"position":4,"visible":true,"text":"55ganyc qk6vvo 1gfswk294 t4qxy0nirv3a y1wr6vmvd","quiz_options":{"score":0},"id":"4385174496"},{"position":5,"visible":true,"text":"5hjxv2qf8jr svkm4b90cs6n7u 44ewbk0getyk g6hhmaqvlcdj9g7 if6lqdknx rr5abg5ylw7ho2f rabuvef2t0k5x 0a7x5oqof9xhi8 o8tv6ke3ev7r","quiz_options":{"score":0},"id":"4385174497"},{"position":6,"visible":true,"text":"sy4uf cxu4rsabrlmaw kdov7 hx6ta8d4c0x290","quiz_options":{"score":0},"id":"4385174498"}]},"page_id":"168831382"},"emitted_at":1674149695051} -{"stream":"survey_questions","data":{"id":"667461666","position":1,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ssubl 48hjj9vj31v omr18dpic pp65f94 j35xxoh u04pidx8dp7i bo1ae4ptsntu3 o6v99q"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388/questions/667461666","answers":{"choices":[{"position":1,"visible":true,"text":"04s0qv msoghfkw0fr4hmq kdv92tuo2 l8htirpt 4h2nxjcla5rjor n1wy2w5","quiz_options":{"score":0},"id":"4385174598"},{"position":2,"visible":true,"text":"64o6m jya4p7twvvs ls75jv9lvf 8gr7y35au5hcqfq","quiz_options":{"score":0},"id":"4385174599"},{"position":3,"visible":true,"text":"x6ijo4tj5qrljkh dk5dmknmhn 3rm63 kdym4 bxjsru9kvh1 g2vp966a8nkh 6dhh6k99a88gt 9b7emois0ldfr swbnqor4k66","quiz_options":{"score":0},"id":"4385174600"},{"position":4,"visible":true,"text":"4uhchfo7n2wmrtx e211emk0v53w a7ckw5lg40n qrx8pw0r5xrph q8ndfdm5g08tex8","quiz_options":{"score":0},"id":"4385174601"}]},"page_id":"168831388"},"emitted_at":1674149695052} -{"stream":"survey_questions","data":{"id":"667461670","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"lbje0ppud4so8xk ax7vnbef0l3fv7j alcnsi48jehkf8c 9fghb3uop 8qfnul9 w8d6lma9 ejcm4j9x604p t7b7dk"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388/questions/667461670","answers":{"choices":[{"position":1,"visible":true,"text":"i6exa76ck98 9pkrhl37j88s 8k6dvdhxg5axw3q 0dtn8xt6 ji1gm0qu 9pie3iotb","quiz_options":{"score":0},"id":"4385174620"},{"position":2,"visible":true,"text":"knjsfkea66j bp5sxiba qa1yvg m3vul 96hgbjtin7ux7","quiz_options":{"score":0},"id":"4385174621"},{"position":3,"visible":true,"text":"jf90cc rc80q71mvp1 8f3qlcc4pls wvixh60l7b","quiz_options":{"score":0},"id":"4385174622"},{"position":4,"visible":true,"text":"u8275ses4k sq0bax8 vbpfs7qwh yhbborapj3ai t6eo1n6o9rf uitrqawsi p2oe4x5ie","quiz_options":{"score":0},"id":"4385174623"}]},"page_id":"168831388"},"emitted_at":1674149695053} -{"stream":"survey_questions","data":{"id":"667461674","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"7xuudm hd85d2fbd49 hxo4gmsnhaxn53y r3oo0piiij5 hvifmm5 mfep196v1yi q9o6w9gksyedtgs o3y01cyw4ca91"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388/questions/667461674","answers":{"choices":[{"position":1,"visible":true,"text":"7h3hoeucihjd41o 4la76h8i8 8dgl2v5eub 962jah6wws8 qdtm85x9t3y96w5 mkq9yvj 81nsju0 koqkfhpe7pw","quiz_options":{"score":0},"id":"4385174634"},{"position":2,"visible":true,"text":"vpmf8jyppptscjq 3ujxkee7dye ggqm9tg9svjgx ashilo1ffin","quiz_options":{"score":0},"id":"4385174635"},{"position":3,"visible":true,"text":"nselknrgymf vq2vd1efu 3a9b7whs2k7bj5 nujxdbcbg8qcjp 2va7wdag r0k00wo6 bd5u5 btk0p","quiz_options":{"score":0},"id":"4385174636"}]},"page_id":"168831388"},"emitted_at":1674149695053} -{"stream":"survey_questions","data":{"id":"667461676","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0g353xw0tmg9uj yjpp4gp6d kkvtb90xqw7se5s 1rtjhd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388/questions/667461676","answers":{"choices":[{"position":1,"visible":true,"text":"qi3ml trfm3r0 hij01mx6lt6 olgi265lrg7a45","quiz_options":{"score":0},"id":"4385174643"},{"position":2,"visible":true,"text":"sip0lix7kpf g1vjmce6cj u3qgbnitdp2w3 gxgg5cc 53rn81g6mr 008agselt2cfwmo","quiz_options":{"score":0},"id":"4385174644"},{"position":3,"visible":true,"text":"29qp4n5j92gre bwkmqp821xab2ri kyqdft g46vvp19jlvq1k 2sgm90q2b6 1ewvm 8491gfakn3p esqiug2cxmg6ka gb9cdrtlc9qs","quiz_options":{"score":0},"id":"4385174645"},{"position":4,"visible":true,"text":"mcijdsetfwvx 9b6tmxv phlw18ap37 20s5wv","quiz_options":{"score":0},"id":"4385174646"},{"position":5,"visible":true,"text":"3g748xwkq6o 2ut8g nb63dyj stbl5wm 2dkk61h6 29bjrer dxet33 v6f36uqes9qu vgqoxu1nmi5lm gtuqaph","quiz_options":{"score":0},"id":"4385174647"},{"position":6,"visible":true,"text":"j5p31 t1ltv5rpt 85q5tlwq2sfsv3l bh4a2e rev7lbj7 2qwf83ylrc3n fiufpgf h3fp2fd0y5o uw3phwkrek ihy0faqy9s1rsp","quiz_options":{"score":0},"id":"4385174648"},{"position":7,"visible":true,"text":"prd59 gqs7t8tmls 633ujsd7d9ni u1fkx5a6mx318w k71rc82lydu43 orkxf utbtip9c1ky72","quiz_options":{"score":0},"id":"4385174649"},{"position":8,"visible":true,"text":"dc5yl63yj6lv jqeok71lte 4leao9ms1xu3oh 0fdb4m50ip t31tl6q57jjpmh v2osljafdca1x6","quiz_options":{"score":0},"id":"4385174650"}]},"page_id":"168831388"},"emitted_at":1674149695054} -{"stream":"survey_questions","data":{"id":"667461686","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"987ob 7kiuo4teh1 8s7deqmvpopsp txo4fxjj5e 8eqdafh1k7yjhwf 889ll 0s23juj5vyyq99g 7390ddsh565 lpni4hn1bi0"}],"href":"https://api.surveymonkey.com/v3/surveys/307785408/pages/168831388/questions/667461686","answers":{"choices":[{"position":1,"visible":true,"text":"1vdqk14 sjvrskliu ejn42p38f ougj5qtxi6k5q 6d2ccy9073i9","quiz_options":{"score":0},"id":"4385174676"},{"position":2,"visible":true,"text":"b0yc27 c3v3i nhneas7k4y dqth374wqfs9hqf eg6g53l03g 8fg7266kcffpx7 6i9d7rffvncy s0f2on","quiz_options":{"score":0},"id":"4385174677"},{"position":3,"visible":true,"text":"k2kv88vnieg5pk fxdpcorngy bmidk g2lb58 qkbeew82lmprw","quiz_options":{"score":0},"id":"4385174678"}]},"page_id":"168831388"},"emitted_at":1674149695055} -{"stream":"survey_questions","data":{"id":"667455128","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"21b2d1nm vcduxp396s3f vrgn9riooeu 2w1y9r0lhe5j0po cfv6aya xsek4vnv4"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050/questions/667455128","answers":{"choices":[{"position":1,"visible":true,"text":"1cqki1ijt cnkrfpewyh hloep tun23aq fm2hlturu wp1hn1005","quiz_options":{"score":0},"id":"4385137324"},{"position":2,"visible":true,"text":"8q54jh807g67 0chem4m32w 6g6cg4kfbpyp48 wgbo0l1 3ivc27 h1g0jagebgvj36d loo7agxubx imfmbchanx4w47p o4rfv9","quiz_options":{"score":0},"id":"4385137325"},{"position":3,"visible":true,"text":"axjh0ev2g5g0s01 d1f1ekg41aq 1r7fcho4 adaclkcgj0ra1h5 obp9ot rak96hmu bx3804jjlr5vg wruubenes3g4p h41e18dufrs40kb 9t34k1uad6g2a0y","quiz_options":{"score":0},"id":"4385137326"},{"position":4,"visible":true,"text":"0rvnqa934hd356 yx0p7kab01kubvj yuy2tik0o6t 3854yf6up2mgfc kqn54h6qvfoj jecuuxm0d9s u5d8y0hmv5bcww eumwsigjumvc543","quiz_options":{"score":0},"id":"4385137327"},{"position":5,"visible":true,"text":"sutsvkyg7 x3pr1f8 gnn8yg 32ls0it6m8c5c3 5xhh23hb0bck c5kbcphdgm 8n1839 8oe5f4wdsxv oyijcnki143a","quiz_options":{"score":0},"id":"4385137328"},{"position":6,"visible":true,"text":"a14e7gtqqbrm ei6h8ynhoi92vs dq0oj6m rulpoxmdtew b1cnc7v 8pxkgtbq6vu0o","quiz_options":{"score":0},"id":"4385137329"}]},"page_id":"168830050"},"emitted_at":1674149695180} -{"stream":"survey_questions","data":{"id":"667455130","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"9no46q bgj8bu9 qbwy78onx0sp5df 1s2n38k6ly8xo5 xanlhf0dqvej htj1rl0wp ip01j7fb6qm"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050/questions/667455130","answers":{"choices":[{"position":1,"visible":true,"text":"7dpysnial1 w484xmm33rpralf fsb2j19 r599iaw30wgp gk8tit5i681m 14lsb5gmddkr qrncp46 vp0lw srtca1i587dq","quiz_options":{"score":0},"id":"4385137340"},{"position":2,"visible":true,"text":"0yflf5ua3 5l5tu93l 45fu03hrvt kii5twmbs6n uqppc ca18nfpevvny 8od85fpdw gc9isqks5ou7","quiz_options":{"score":0},"id":"4385137341"},{"position":3,"visible":true,"text":"uuv580b3 ejgygxa3nxuod qx2s8cx85l1vu c96g3513rcjdmuv 161c5f mrclx05fg2o 9r8lk172","quiz_options":{"score":0},"id":"4385137342"},{"position":4,"visible":true,"text":"8jhq2vl1hj6 dvnvp6su wshwoa9 unbnfs wjkm9w4w5w glupwsmbjeb3 26q9svrti 21b4potlf4d","quiz_options":{"score":0},"id":"4385137343"},{"position":5,"visible":true,"text":"y1xu41qdgnk hw9yooajs6n xhaqwoc8 hxi6ioaqcnf3 le1ein6yt","quiz_options":{"score":0},"id":"4385137344"},{"position":6,"visible":true,"text":"y7vgshrq086w 18x5gdxg0 ibct5mdxqm78py aq9lw3kx kq3rfwnnod q4a8qk7q 2mytdfvtgxe q7w14xsgcfsw4","quiz_options":{"score":0},"id":"4385137345"}]},"page_id":"168830050"},"emitted_at":1674149695181} -{"stream":"survey_questions","data":{"id":"667455161","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"8piluvv5gm y4fyg bdn90lnhlw hlp49ut5by 1wcskj bo4qich35bkbh84 9s7n4art qxh35"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050/questions/667455161","answers":{"choices":[{"position":1,"visible":true,"text":"7v9pb77j2u5h8lq fjdckl44c6tdi oxtly8metbj35c7 2coi267okn 772jt3x4 8pdrkhod14i eeyu3ox bkl79rtci65 0jt9nxkpamu5","quiz_options":{"score":0},"id":"4385137468"},{"position":2,"visible":true,"text":"w13utk5t7u98 ltxgdnhqgh3 tv729vqrf2g8 a3kf9ym ddp3yd ryqfnhyyshm","quiz_options":{"score":0},"id":"4385137469"},{"position":3,"visible":true,"text":"4o86igx uap6ssaxn rej7oi49j4nj g1afe aj0rahjii gdy2ygm yrq0r33ljaqav","quiz_options":{"score":0},"id":"4385137470"},{"position":4,"visible":true,"text":"ywmratfmy ko10n3cmegap9f vor9gwdvwevpi2 7oid9t72lp yc37r5pssxrm d9fdyo6kj9g5","quiz_options":{"score":0},"id":"4385137471"},{"position":5,"visible":true,"text":"l1ei3wav57twf yn5mmi2almiidx7 gdkyhn 3npig9sip","quiz_options":{"score":0},"id":"4385137472"},{"position":6,"visible":true,"text":"rr84y ht7saym46 rqxpn8 qix3c","quiz_options":{"score":0},"id":"4385137473"},{"position":7,"visible":true,"text":"dtaeau8 wv5ay39bjtph6 uxvtv8 bfa13j2 t1cxi9sgxs9u wh5it","quiz_options":{"score":0},"id":"4385137474"}]},"page_id":"168830050"},"emitted_at":1674149695182} -{"stream":"survey_questions","data":{"id":"667455172","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"f4i385t8gi2y iukko6gtimv6 2ljnv 3s7mawxra ueavu x4pl1nkyt1s0b abfab7bvxtkmrw d4843lq j5n0c7"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050/questions/667455172","answers":{"choices":[{"position":1,"visible":true,"text":"5dkq8pnhf1gag8 u73xqg mej8lwdqo qs1tuihyc heabchr403srhq0 51c5ipel70a csfc3dwuxn9mt9m m9j6t37f xe547smil yy8ji7kgqx","quiz_options":{"score":0},"id":"4385137485"},{"position":2,"visible":true,"text":"q02a6wo56w 6fc33b k7d7a9n7grsgt n7j84b6552eyy58 xapgm1 xxxxklio 1m6q53gxh23a sl0kn09v kdw23flh1k9ss 1kfm8tao","quiz_options":{"score":0},"id":"4385137486"},{"position":3,"visible":true,"text":"d3482iajo1udye ian1i2sop8y5 6j6u9vi h2flbi3mhh 8bo3d4b7wnwto x7dlap9vcos","quiz_options":{"score":0},"id":"4385137487"},{"position":4,"visible":true,"text":"ytjtpqs9tcdnb 5pxact7wjjdtx filtao5oyv w02h42un1hxtd5h dbagrj511v8 d2k7wxlw","quiz_options":{"score":0},"id":"4385137488"}]},"page_id":"168830050"},"emitted_at":1674149695184} -{"stream":"survey_questions","data":{"id":"667455179","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"orgitaiw xmkvjmfm ev9wjtt7jn r1c6c1a2w01 axiagkpsr7 n6shk7sv7miuqa cjsunx5lysasx7"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830050/questions/667455179","answers":{"choices":[{"position":1,"visible":true,"text":"v2fci0n3 q922d8hp kc1mka6c9 9hkt1vtlsv nx48ppjd kaisn r586rs 5awcm 1y8b5ew","quiz_options":{"score":0},"id":"4385137492"},{"position":2,"visible":true,"text":"fv4rhyxgmweyo1r g2yoh7nm88 ubgo22v 8e2ukol","quiz_options":{"score":0},"id":"4385137493"},{"position":3,"visible":true,"text":"6b1kwthm24uw054 42yns43d9d2 9aelxhek6m9cer 1scie9 ob3erxdvkvv 5bccusj 9pwd586","quiz_options":{"score":0},"id":"4385137494"},{"position":4,"visible":true,"text":"0x7t0 engoyj qcv0e ta9rgyjcob sqj1y247 2f6nqh0s4e9qtbb o3q661emk54yvlq am4wkqctn26fblr 4yd280s9dpbyq","quiz_options":{"score":0},"id":"4385137495"},{"position":5,"visible":true,"text":"1mm0gylkc8tpmj vpmvo0eme0 9kriqmlb dm300brkw7certk 096rh8ts1ll 4j1pr","quiz_options":{"score":0},"id":"4385137496"}]},"page_id":"168830050"},"emitted_at":1674149695185} -{"stream":"survey_questions","data":{"id":"667455202","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"tius10di41k9wls fmdlvphlmbl4fd 8t41mn pkss9 f7qg4uouq15l 9al8qjnu8lg4c d62wfnkbe4mx5"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060/questions/667455202","answers":{"choices":[{"position":1,"visible":true,"text":"pxw0w9ui9m4oc inicqftdpfe dyp4ai vn1pmsp6 6cm2gr9b 0ruklaf8 xhb0a8q","quiz_options":{"score":0},"id":"4385137700"},{"position":2,"visible":true,"text":"p28lapcj xmelobuak9wnfe k0ilacxb575 54eam 96ng7","quiz_options":{"score":0},"id":"4385137701"},{"position":3,"visible":true,"text":"gw913n4emtltob sqr3rxe9q alm4u96n5dbp 79stl7bky bseq17ndb ibhcv2mf06av","quiz_options":{"score":0},"id":"4385137702"}]},"page_id":"168830060"},"emitted_at":1674149695186} -{"stream":"survey_questions","data":{"id":"667455205","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"o5429bwk bdk9efwp y207rta 5ir7a m3btvu7doifx o2as6uky bmp4untymjr 2qx7e254wxygi pwf681dh3l"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060/questions/667455205","answers":{"choices":[{"position":1,"visible":true,"text":"ihqtbuwrc7tkk t8rxgekiec q546dffi6yo7ato ip17xm5fe","quiz_options":{"score":0},"id":"4385137740"},{"position":2,"visible":true,"text":"ithl3 gl2tfo1dl uuiy7ls5 7wkeslse","quiz_options":{"score":0},"id":"4385137741"},{"position":3,"visible":true,"text":"ul101be 6w0o3urllk5o4rc m6ttbhcts3nrpq cd93g3j6","quiz_options":{"score":0},"id":"4385137742"},{"position":4,"visible":true,"text":"keye6y0 vxjehf4oga975i hh0hwvp20y0 hmvp1i de9i0gf 309gv73vahw5tv6 b7th25myd dl56yk9tjsnwbkg nlwrih","quiz_options":{"score":0},"id":"4385137743"}]},"page_id":"168830060"},"emitted_at":1674149695187} -{"stream":"survey_questions","data":{"id":"667455210","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ccus8ci s30ote6pf5enhkv 3xw226wx0r4r soqp6i56rx7p"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060/questions/667455210","answers":{"choices":[{"position":1,"visible":true,"text":"ror4u04bm535i fajv10 ji7v3kn9 nu7pljg4qd vtkx1s","quiz_options":{"score":0},"id":"4385137775"},{"position":2,"visible":true,"text":"5xjy8qckk 6w20pov 9ductfhj r469e wem8w rc0jks9hwv","quiz_options":{"score":0},"id":"4385137776"},{"position":3,"visible":true,"text":"difesvxw9al ygl163e5w9dv6 x2glb8m5g6fxm eiibhejl","quiz_options":{"score":0},"id":"4385137777"},{"position":4,"visible":true,"text":"tnfsnewa16k155 klbxdolp it7hi 3bwcsq4kxfs6ag y4rjmghck8fb1 kbr64tikluwhtsh m6uwesc5861 tdfj8p qrf7oe2ydofs vy2xx8qsmcvubj","quiz_options":{"score":0},"id":"4385137778"},{"position":5,"visible":true,"text":"w4u167 jcxljoenbixyu2 xmxki pnigr00vdimxddq","quiz_options":{"score":0},"id":"4385137779"},{"position":6,"visible":true,"text":"s4ksxp4wgiv24 jd0h5q9cdqbaf2 at9rhpltsnm9e 9vvcvfubdxoda 3dr2s4l bcvpvk5qq","quiz_options":{"score":0},"id":"4385137780"},{"position":7,"visible":true,"text":"p0nngm2a15gos i48053r8tp2si 5c356fdw 7lwo3016oo2u ysdomx7ts utp4qqa6","quiz_options":{"score":0},"id":"4385137781"},{"position":8,"visible":true,"text":"hjopydw3jidvpdk opbjs2sr 86c6g94l 1045p9imdm h0ewun d0ki3w4t li53wdcc9 v0m8nd0q8rim c9kuoh 986j08boiew4s","quiz_options":{"score":0},"id":"4385137782"}]},"page_id":"168830060"},"emitted_at":1674149695188} -{"stream":"survey_questions","data":{"id":"667455212","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ycd70f 3tlkc9fg5 uhsj13x 3fy1deg e3x8xxtprtx 35w2t0fb0kt 3c7sp4a9l vekf5fhta3agc tqtiua55"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060/questions/667455212","answers":{"choices":[{"position":1,"visible":true,"text":"3nd2k 12iqy qcy1q78egxti b88f20djp60v 6orn1h44 xb2fsut8p63fd spd9rybvtlrx6 p79lf27l","quiz_options":{"score":0},"id":"4385137792"},{"position":2,"visible":true,"text":"w6va3wm5af u5earys3j7d2w0 aigku 8mhkw6l24gftb lj78ittnu2lhxct","quiz_options":{"score":0},"id":"4385137793"},{"position":3,"visible":true,"text":"1vjpoerlc8kf lnqycklqdoe yt06t24brl aoqrq","quiz_options":{"score":0},"id":"4385137794"},{"position":4,"visible":true,"text":"syf9d5k3axfjhqx f1un5ergu68m7h hf5by9f1 0qf94 docxia9h qcda6u ja83rjw7gdm","quiz_options":{"score":0},"id":"4385137795"},{"position":5,"visible":true,"text":"al4dbwow9is7xut pif50a9 434fsto nh16xfiu34c0eld beshoy","quiz_options":{"score":0},"id":"4385137796"},{"position":6,"visible":true,"text":"q9ufu06kh4d 4oogck630yox 7wsoh 0l2dsb3 8noi1cwam8ukth","quiz_options":{"score":0},"id":"4385137797"},{"position":7,"visible":true,"text":"3e5ijt o0m3chiw3pitxr8 4hciaiuh9c gpc5q4olp3cib 0bhsd3payjog 562gi7o4647qe 39j8aa4ptw jbydrqc7ujb iqlxnqn4uea","quiz_options":{"score":0},"id":"4385137798"}]},"page_id":"168830060"},"emitted_at":1674149695189} -{"stream":"survey_questions","data":{"id":"667455215","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0qm15jfh tv1ui2w edoha95n wbhjp4 rui3rtv6xf4n5 v920f2a1hrd 5d7gj7guq r7ljcjdk2f8t jv5iocm9mv7pg cmow0h6"}],"href":"https://api.surveymonkey.com/v3/surveys/307784834/pages/168830060/questions/667455215","answers":{"choices":[{"position":1,"visible":true,"text":"sewil7695q3 nu4xd9w cr1qqpg4h9qlr ktchj5oeb4 1ns6cdqbjex5f w2y8iflb52 kpkid 1f8unrp e17xh5 qn47cr","quiz_options":{"score":0},"id":"4385137810"},{"position":2,"visible":true,"text":"q21t3nuf 71sfp kag1g9kes bx74gjdj1 uvk0chaofja60","quiz_options":{"score":0},"id":"4385137811"},{"position":3,"visible":true,"text":"nkbskl7xaxqbh76 pgggq5trhj3t isut5qlmwmxnbw apxr23h4v0l","quiz_options":{"score":0},"id":"4385137812"},{"position":4,"visible":true,"text":"jotjdok64gv ya4g5j b0w379 riavnfi10mu3bm 739xph torva74 9dcgi9ns8qlnho","quiz_options":{"score":0},"id":"4385137813"}]},"page_id":"168830060"},"emitted_at":1674149695190} -{"stream":"survey_questions","data":{"id":"667462113","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"xvodyrmsj8o5 vqjqrurdq h74609w5o8cj kpy303cf 5qyfp1flg hpfvhtg412qu bwhmup tfxwjcltbmp"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471/questions/667462113","answers":{"choices":[{"position":1,"visible":true,"text":"x5gvnnvh 39thim64p k3naeh9 ebx5et8ci8vnjwq mwfb0867jts36 lq8ghnv8c23c86i","quiz_options":{"score":0},"id":"4385177065"},{"position":2,"visible":true,"text":"5atgqxr5w uk3ussdg7 29qlw s20qfx0w1u1d cdx47v m76g66t1j x9wuj hyv5095ipd9ly","quiz_options":{"score":0},"id":"4385177066"},{"position":3,"visible":true,"text":"yqvo5j rojr27j6ww 5k0ra1y96j6 vbd70ncr0 wdoxqqhkv a283r7g tshidt7i0jw 70gxqa3 d1wh0y 8q2x4yu5u3tcga","quiz_options":{"score":0},"id":"4385177067"},{"position":4,"visible":true,"text":"53838lqaxys gt190mgek1r2r llxh86fi38xyyb puwhs54wnxa8m5r cb8w1f312hts1 80gh5hrp0o 9a6siov 5i3l99eiefhoq","quiz_options":{"score":0},"id":"4385177068"}]},"page_id":"168831471"},"emitted_at":1674149695299} -{"stream":"survey_questions","data":{"id":"667462114","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"r5gme6gxar 540ixc g0a32bjvkgg4lh 9n8hpb qor7i9od6r max0ae1vu08"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471/questions/667462114","answers":{"choices":[{"position":1,"visible":true,"text":"tk3pmts41 aw7w4y8e5 65iplksmu5wa0o 9ve8f0vgh eiy8hki8e3 5vbwg fa3nnnvu3uq c43makf6i","quiz_options":{"score":0},"id":"4385177095"},{"position":2,"visible":true,"text":"3kvwm3v j0x7vrrpnxmtcub 7h9g43wg71ppkn 71nswd5eq ocqpss07r05dej4 7ln0vdgw0a","quiz_options":{"score":0},"id":"4385177096"},{"position":3,"visible":true,"text":"07td1maptf9wde nrlxmvshy dgpasbrawvpndo id9yjt2tsi mkm0ri7epkw5d poj8pv8m4lu0hc 2xh0i62g5j wxokec50ps52h8 d6uqtrf td70pitsu","quiz_options":{"score":0},"id":"4385177097"},{"position":4,"visible":true,"text":"ccw6cg wd5s85rpk dqlppdkh wpqg1t9vhdq8c","quiz_options":{"score":0},"id":"4385177098"},{"position":5,"visible":true,"text":"8b85ql72lufp 3r2k118 vy5uh6mnntsq5 x49hvqp19g 3oc9laa75hjxwn7 a77dw8 aiskfi350fyh5w 2h7ra ry4mj i2el2","quiz_options":{"score":0},"id":"4385177099"}]},"page_id":"168831471"},"emitted_at":1674149695300} -{"stream":"survey_questions","data":{"id":"667462122","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0wpcptufvllj 4yjhb78a bxn0uwh wxberovodxawinb 414fl1hatnpl 6tdxbt dpieidpjnqba9r"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471/questions/667462122","answers":{"choices":[{"position":1,"visible":true,"text":"c7nxl 3qrbcjg tddcypx5j63ne1 ebrs5911s ilsx5w8mvfs s24oodbw hbah99rw65wt7i","quiz_options":{"score":0},"id":"4385177138"},{"position":2,"visible":true,"text":"1lx4btr3s0pqq63 y93x5lqe xmyta126vo 07q0rnh 4f2bxvoc441 wcnvx ytjeo8g5a wjqrg 134n2q fgobl","quiz_options":{"score":0},"id":"4385177139"},{"position":3,"visible":true,"text":"t9hwm2nyw se2sl72a t70iepatjw0rsh kq6ta 6mhce jgfjh52 w3ivi m0sxg2i4 pup4tsn7a18","quiz_options":{"score":0},"id":"4385177140"},{"position":4,"visible":true,"text":"j6w78imo yhv9sgxs64ii ep2ckum1ge3 3d8gcbsuw j0b95oqfwn","quiz_options":{"score":0},"id":"4385177141"},{"position":5,"visible":true,"text":"5gn4d25al 29tdmvxhe1 svxcxt7qsq pbnlphkoa xvotqve1o79","quiz_options":{"score":0},"id":"4385177142"},{"position":6,"visible":true,"text":"sexvxfmqb jp6xina8q 3gsyu2jfgvpag d2wcp0k5ukuk igg5ecqj rtkd5j 485nmxjd tjodfsyhu5","quiz_options":{"score":0},"id":"4385177143"}]},"page_id":"168831471"},"emitted_at":1674149695301} -{"stream":"survey_questions","data":{"id":"667462126","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"5r90gmyyy 1eaacbfag0g4dtj 4rlwnt3r jxg5ruhn tkkml hhlu8r79jma"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471/questions/667462126","answers":{"choices":[{"position":1,"visible":true,"text":"vln5gpqjq tglcfbuxu7b dob43c qnisv2 td55l7hc91pd00q 8gig93a5h 7hrhg6drxsxy bj0oldpxa33by1 4q9mx98wbi p0wb03eabrt87q","quiz_options":{"score":0},"id":"4385177153"},{"position":2,"visible":true,"text":"fj0a0nfwmfjrong q38px84nyk rvqmj 2sv0x6n9a hm88jqn87kvws","quiz_options":{"score":0},"id":"4385177154"},{"position":3,"visible":true,"text":"ms6ilfpbca 8ota11f 1qvg0ellfxis pfyvkkuujjjg qih57geu 366h9cnbrnny9 4sx4q0hbgu","quiz_options":{"score":0},"id":"4385177155"},{"position":4,"visible":true,"text":"si9tpsg54u 13dds2djq4kh kra03y5p43 j8rej5mv d0tkvig6","quiz_options":{"score":0},"id":"4385177156"},{"position":5,"visible":true,"text":"k5ijfpctrn flm2q7kn5t4jf xkjec55tbfsgsn7 r61gy346sxt","quiz_options":{"score":0},"id":"4385177157"},{"position":6,"visible":true,"text":"v9u400dqc fii651ftc 29rn2dffioa0 1m22amm42b qhs0wpth5 mva3eqo09hvo7x 5od22d7a 7m4606e271a6yk","quiz_options":{"score":0},"id":"4385177158"}]},"page_id":"168831471"},"emitted_at":1674149695302} -{"stream":"survey_questions","data":{"id":"667462130","position":5,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"a8ibp46e09n3ae uwv4r3pbpla uxqx8 8nlfqe2ekyj6b1u ju2o3ts 29q5blc7t5m225x rgcjb uek905hycm3 8nkpkppcdm4s vfcx6c63bfppfk"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831471/questions/667462130","answers":{"choices":[{"position":1,"visible":true,"text":"xkuphh 9sloeexwdfla qyx58xq9u ne6p3x","quiz_options":{"score":0},"id":"4385177171"},{"position":2,"visible":true,"text":"83uko6624m otef3go 1ba21xa3 6k24mmw t82m7my9ipep","quiz_options":{"score":0},"id":"4385177172"},{"position":3,"visible":true,"text":"ke2gqx614f p7y9pi89 asll5jaju89e1g vgrj3wbe x815bgx 67md1tfhviy9v","quiz_options":{"score":0},"id":"4385177173"},{"position":4,"visible":true,"text":"w5raj381jcgffq 9o1b73 0jnat47bs8bq9e 41ffc wcmivj410jtu19 njxb3v7 gurti qsjowjls niwyt2clulpdpa fe0skx96v529ui","quiz_options":{"score":0},"id":"4385177174"}]},"page_id":"168831471"},"emitted_at":1674149695302} -{"stream":"survey_questions","data":{"id":"667462135","position":1,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ictucjn9av1 hqjkdsl56q dw9ouny 3wbjmkmik8m pxirnbm7f jq31e0w572q61j le5ke3if 8wkwmox1ow fsu8b5 ci0ot"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478/questions/667462135","answers":{"choices":[{"position":1,"visible":true,"text":"k4q8hnwb0uticgy kx7rcy sp8nvx036 u552gi 1icsxiqmdmects 0c1d69aee soiye","quiz_options":{"score":0},"id":"4385177200"},{"position":2,"visible":true,"text":"bolb3i7f7 5d7y5 n7il93or tb2599 wlr1hh88d5bjpr vu3v5uudajbgoa s7kmwnhqpph","quiz_options":{"score":0},"id":"4385177201"},{"position":3,"visible":true,"text":"ve8olc60hjdia ake87 4iv5po m484jgkaut 5e285a0hgsrirjm qtnofl05rdtx","quiz_options":{"score":0},"id":"4385177202"},{"position":4,"visible":true,"text":"1pix5bfrx 9pncw4rp5g xf1sm2qr c474c1s67jgcrw trxj2k huiccx1kxt1 l13mo1hij12","quiz_options":{"score":0},"id":"4385177203"},{"position":5,"visible":true,"text":"kf0u04jlsopl 1b4xnt0g 3ehjhwg d9465xfy24uisef 0xv77xfgm7x","quiz_options":{"score":0},"id":"4385177204"}]},"page_id":"168831478"},"emitted_at":1674149695303} -{"stream":"survey_questions","data":{"id":"667462136","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"c4f5ohh3sr2w xoto95lot 7lykuivbhs078b h524lu75 mvss35agi"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478/questions/667462136","answers":{"choices":[{"position":1,"visible":true,"text":"1blqraty 6uddh9u imhcfci dho72p k7feqgqqqr3jm7a 8y7drc4m8m8f1l0","quiz_options":{"score":0},"id":"4385177210"},{"position":2,"visible":true,"text":"cnxmsuqasj3g gm6e1da2102igu c3w07jugiuro afj54 kpoom7 n11yf","quiz_options":{"score":0},"id":"4385177211"},{"position":3,"visible":true,"text":"82nkl1jumcnc6y3 raogw1xp84nswty h77x7 f26dju09o2 ajgbdjdjne6i","quiz_options":{"score":0},"id":"4385177212"},{"position":4,"visible":true,"text":"dukxs7vo kj6j0x kjsmang 6g8vgh t9ymo1h2hfln6 iyuklw ou0uq6","quiz_options":{"score":0},"id":"4385177213"}]},"page_id":"168831478"},"emitted_at":1674149695304} -{"stream":"survey_questions","data":{"id":"667462138","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"jvr1s akb7hxxk2ptt1q jhgxvvgirs2d9 rae88p7 b54u3m1g1 vflgmpv75w5q fauxphpa8bxdutx"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478/questions/667462138","answers":{"choices":[{"position":1,"visible":true,"text":"jnkfpiwx3ye b6osvfmkkn eek8p036ayn rt6k5em681q78c 8j9brp8e2qsa 3wu71v85le4x65g","quiz_options":{"score":0},"id":"4385177214"},{"position":2,"visible":true,"text":"oriu3pl31qd r6u7isqu2p1 s6lrtm ybawl9g3oafvxx9 sw16llmo3dpc2xx v0r4lrj serixtbxt sy64a2vcyjf f8mhp","quiz_options":{"score":0},"id":"4385177215"},{"position":3,"visible":true,"text":"v0j4ixrd gxjah4rsf03mhtt selnh 311w2 80liajkb9v2h8 m7aggx9 kmjjabnan kk3g4y8jiuv n15toj tt60bhmd","quiz_options":{"score":0},"id":"4385177216"},{"position":4,"visible":true,"text":"vmvi1nlvkd1vy 13gv0qjxtri sty84 df77l wwh82kck3x0 7jer7beg 8x6vcrnh9qiaf3 nhl5ref5v5bud dol9qivxsg8owna xsw7e0s1","quiz_options":{"score":0},"id":"4385177217"},{"position":5,"visible":true,"text":"a2fdw9jbcytl8ok 0ir460hfwm8 rqecrtla vyi4l3tpv eyhkdh47w2uixp","quiz_options":{"score":0},"id":"4385177218"},{"position":6,"visible":true,"text":"yvusg0ns7jw9tt nadlph4 0ur0o5dhan 9jlq9878999lbf","quiz_options":{"score":0},"id":"4385177219"},{"position":7,"visible":true,"text":"48eqbvdq1ubpvyk 5f4pwr438ik81xj x7ioyubgl90808k 12tr703dg0s 9xpyni000c9sbr","quiz_options":{"score":0},"id":"4385177220"}]},"page_id":"168831478"},"emitted_at":1674149695305} -{"stream":"survey_questions","data":{"id":"667462170","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"csowuuey o93r9336 qf9fym8wbog1q l48c9"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478/questions/667462170","answers":{"choices":[{"position":1,"visible":true,"text":"0k0agd84ev s54c44f90gaj pol5dlpg9t fa5trrngfu cvrd4cmtmq","quiz_options":{"score":0},"id":"4385177381"},{"position":2,"visible":true,"text":"9egly8imf1 9k1iapox y80fk4 58nrajt2 swkm1na","quiz_options":{"score":0},"id":"4385177382"},{"position":3,"visible":true,"text":"se2mcirb6lt5ty qqt53dn jpg4b3wk0c7 91onwco8d7ll0 e7y4mwxa ojs5nky 6u7am","quiz_options":{"score":0},"id":"4385177383"}]},"page_id":"168831478"},"emitted_at":1674149695305} -{"stream":"survey_questions","data":{"id":"667462172","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ypxnp3k6 708jao1b5ok671 cv8x263j0f nk1a0a5cm 7y4r2bclg2rgkd7 alax412w9y76"}],"href":"https://api.surveymonkey.com/v3/surveys/307785448/pages/168831478/questions/667462172","answers":{"choices":[{"position":1,"visible":true,"text":"dil7s3hpsuf ms76n abih45plj 9bw5noujg c759tcfvo7t3e","quiz_options":{"score":0},"id":"4385177392"},{"position":2,"visible":true,"text":"okltv66m4qav 88ijt vcq7k5m xcpwi9e4v7 ujqbg24ap g715rctpafkm0g","quiz_options":{"score":0},"id":"4385177393"},{"position":3,"visible":true,"text":"vev8vhb11 dc5mn3colgaea8g jflk5j niy1qt8onj788","quiz_options":{"score":0},"id":"4385177394"},{"position":4,"visible":true,"text":"hq1wbebc2gleq tj2vf867 n1aqc afpekt91re 89guq9pn9rrww 0bgvg74","quiz_options":{"score":0},"id":"4385177395"},{"position":5,"visible":true,"text":"bxkr18qipct gp9wcqfk n69e6ai66xmy01w xlh7s17hlsbcyt o16ecq6vf42q crmh6p ykbam9mxc37ah2 ii40872r6mws 2xobrm","quiz_options":{"score":0},"id":"4385177396"},{"position":6,"visible":true,"text":"x18wk 3k4tt8ae kkrj0xjbf krc0uyrj2u 0qq68","quiz_options":{"score":0},"id":"4385177397"},{"position":7,"visible":true,"text":"fkx0pjep58wx jpxs8a7vfnf bq2hlx3vhqsn qv4d1evms x64h8k3e tqaeb3mc a1yiqm7qjgi8 cnaw9173k6js msisp4d7bf12 qan9ffq3sv","quiz_options":{"score":0},"id":"4385177398"},{"position":8,"visible":true,"text":"tdjr97it3qvsl 8pfqjvhuo91 mp8kdeymnwv9f 8apy9eshudwhq veo63l6q9np w18s0102tvjru 6lckytkggn6 ncdlx68","quiz_options":{"score":0},"id":"4385177399"}]},"page_id":"168831478"},"emitted_at":1674149695306} -{"stream":"survey_questions","data":{"id":"667455348","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"se3rt7c 9l8d9rig3sk jk791y4nv9qp2 nof9xr8 6irr0mr2uv73g4 rmv1ko9gx2 qievxp819lc o9jbr n1vrq"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094/questions/667455348","answers":{"choices":[{"position":1,"visible":true,"text":"6sudul87 6605b94 w6ubg6a7p0v ufh3d5da nyic6pot11a64v9 awpjgnn22pdgup aidahk8krd 0vhentor51fp ujasxxx1srf","quiz_options":{"score":0},"id":"4385138588"},{"position":2,"visible":true,"text":"08yvwmw 441si6svo1bkxm o85tbsa3k wlkn3","quiz_options":{"score":0},"id":"4385138589"},{"position":3,"visible":true,"text":"uvmasd gagqr6qlswnr af0hp3nfg2vucd vbtkx8vssxlvg6k 5g3rhca48m2 y8gwlm2","quiz_options":{"score":0},"id":"4385138590"},{"position":4,"visible":true,"text":"mrjr3x0 57yyh77a3cmlcmj mmwked8tvqnt5 bwe0hh5dqp1sn3 pia1tu u7ujusnwtvt be24cc8s5olebg 5n65mmu5nki2b","quiz_options":{"score":0},"id":"4385138591"},{"position":5,"visible":true,"text":"k3wcw3bjn hq24e21 f5vvmq0kipi 1smhqty4 p8x2xxy0obkxv qsa4cuvmy81r7s 651b5hlw 2ej3xrm289eqd2 ugnf354v90h 92mk44ghps6cm","quiz_options":{"score":0},"id":"4385138592"},{"position":6,"visible":true,"text":"k8yajsxquuu 94uubd5wo9 piwa122aeek 3fhrq5gvwh14jho 6n8k5a54q7nf rnoqwd82y1 tkhxop blr0p2l7utyydd 7eto449ipd","quiz_options":{"score":0},"id":"4385138593"}]},"page_id":"168830094"},"emitted_at":1674149695413} -{"stream":"survey_questions","data":{"id":"667455351","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"dni8kmob8f47ly 92hb1pg6lvuklf f3hvi6 yjiq3bvv yys743quapbmwm 9xclo63 ydxt59b89 iwks1mdba9iuje vmaq243f75y4m 8cqcf54w"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094/questions/667455351","answers":{"choices":[{"position":1,"visible":true,"text":"9e7yu 2366vw4sa3 rhhgm0 yg8p9f9","quiz_options":{"score":0},"id":"4385138595"},{"position":2,"visible":true,"text":"1ldfxfr k0hnmi dcjdiaquo0xyac remaba5g8 9pslek2fmlvxf7 vn4gl8yjs10up4d b4i21994 78q3d","quiz_options":{"score":0},"id":"4385138596"},{"position":3,"visible":true,"text":"aq8d89iqk6 ytv9mg39tq5 lb5gtx1kdm5a 7scilf5j73g580i tr22c460a1vu dvpernw oy2j9qqnhhbpia9 ufy7twqsl2ovj","quiz_options":{"score":0},"id":"4385138597"},{"position":4,"visible":true,"text":"dlt857wic34l 5y0sijl8 1jmsvy9r e7psvexn8rj0nw ioo4ka04w vtk0ihmg27ac bm60oah9 chljr8vrnt8adx1","quiz_options":{"score":0},"id":"4385138598"},{"position":5,"visible":true,"text":"mk4nh6bnddk1ph 63551586l v0d3vhn w9s7xmbbm3 cbddi6vgmddb se5grc1xr2ycuu bmk1yr7kq3e","quiz_options":{"score":0},"id":"4385138599"},{"position":6,"visible":true,"text":"ob4dd4 r36mdre p96uaag57ld7vo yeqwbn7w2tgi4sj wcgvpd0 4jq0nh42ev8 23i0ye5x 4vuki","quiz_options":{"score":0},"id":"4385138600"}]},"page_id":"168830094"},"emitted_at":1674149695414} -{"stream":"survey_questions","data":{"id":"667455358","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"79kmj4p2d 3sbolhwfrs ioe6b93hwr70jt7 679dpby y4mxy 1pr869ii"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094/questions/667455358","answers":{"choices":[{"position":1,"visible":true,"text":"mqjitf64d0 k83dvyn 227nrf 7a5bulp 0jvp9hmiew umwdpysrqbgp he3hkvoxi8d t0ekmaqml bniybp7","quiz_options":{"score":0},"id":"4385138620"},{"position":2,"visible":true,"text":"ocaqjvupc or8s5on 2txbvlu6e20 8uxgb ll6ktjtk gnwy7knkeeev y2t9cah46lu7 rkmxywsr 8qgmy6gm3fd84 eko2r","quiz_options":{"score":0},"id":"4385138621"},{"position":3,"visible":true,"text":"3kjxifk f8vld xx9lg2ejtf3ccwx 1rfll3376hrm xwaevy7i2krc 93h2fggv1nak vvo0hqr0n 27qcal0ao","quiz_options":{"score":0},"id":"4385138622"},{"position":4,"visible":true,"text":"qqsv99lam uidx4csd1npsc nccxm6ueer 2ihx3c5ysd","quiz_options":{"score":0},"id":"4385138623"},{"position":5,"visible":true,"text":"3ooy243l5h usutwf3do32na0o t5vo8vggvjl pp44k52k3dw swg9bvhtl v4mn5goadw4 9g9jefyf6qhsijc 2j1bmvuu83brk","quiz_options":{"score":0},"id":"4385138624"},{"position":6,"visible":true,"text":"fnme6flca1yfyo kj1q1j tsfq15e2iam8 tyny80 fjb3nf p0mtkwgteciioq p5b79nk1a44d0re lwpwx8wu0e2 at5p0nbaqbp 67tqasnk0lb9dr","quiz_options":{"score":0},"id":"4385138625"},{"position":7,"visible":true,"text":"8n8el8 it06oap6nv fapkjmk yxemrfihr61i heif6anfb o9nel86ws 2t1vpytr","quiz_options":{"score":0},"id":"4385138626"}]},"page_id":"168830094"},"emitted_at":1674149695414} -{"stream":"survey_questions","data":{"id":"667455370","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"lqroep8etllsp 3o8w6vilbyqlwkm b5velmro8 b4ylvv9kkt"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094/questions/667455370","answers":{"choices":[{"position":1,"visible":true,"text":"4bdsyi00551 hsh1woqfmclyma sslfhk y11rdswosp4l 2my277i74fedi lool7","quiz_options":{"score":0},"id":"4385138700"},{"position":2,"visible":true,"text":"qdn040 3tljs j7juh9xjucx 31royj9xs6q lg56gf8c81s","quiz_options":{"score":0},"id":"4385138701"},{"position":3,"visible":true,"text":"ytndri0sy3noxx p2d3euo 4d2jqiygj8jwasr 38lf63x39sxb irlep2 cgi9pr 4pcvga t64nykcc4gn tv9xfejs41 er8a8pw","quiz_options":{"score":0},"id":"4385138702"},{"position":4,"visible":true,"text":"n2hd3mf5 7gg2j7cn55e77 1j1ijenv62 ntd44byw6 g86vqe62ogytk qcodrayuwmht5tn ix3qc42ybhqd 76ry3 goorkieate s2dofir","quiz_options":{"score":0},"id":"4385138703"},{"position":5,"visible":true,"text":"ct0y2yaibh 5hhaeb2u5jmncca ibf7eql6h w3k37s","quiz_options":{"score":0},"id":"4385138704"},{"position":6,"visible":true,"text":"w9647 j2wq2eisb85rlu js474wesi1j 1d7eiqq 3o7ubmmvsbl4","quiz_options":{"score":0},"id":"4385138705"},{"position":7,"visible":true,"text":"deheetxt3hlox 8e4hd76i 517ltrj0v dgi0r19ud5srqj 1qdru4f sl8p3 7gxolfxkhlc5x cm0seo7wroouww","quiz_options":{"score":0},"id":"4385138706"},{"position":8,"visible":true,"text":"4xv8h753xoo8n1 9atxgj8 5eaohgaugpu3in ybq32s67 atg5l7u4aotebk1 0asuiahw4 dyakwm kpqe9vu81dkfvcf","quiz_options":{"score":0},"id":"4385138707"}]},"page_id":"168830094"},"emitted_at":1674149695415} -{"stream":"survey_questions","data":{"id":"667455395","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0qnmuchico4fa8q dfvkni 15mwj2a8 1c8qh1 y3dml65 yfodif0 uik9a vja72b"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830094/questions/667455395","answers":{"choices":[{"position":1,"visible":true,"text":"k9cwqyrhvt6t2y lijf97ywdxctyr nahqrqwo o73wy4r13r","quiz_options":{"score":0},"id":"4385138779"},{"position":2,"visible":true,"text":"0dtp7umdw2tn 3qerkatyyrcndup s70sgadrf0pjwna 13cc30nv 3fku5vgj5","quiz_options":{"score":0},"id":"4385138780"},{"position":3,"visible":true,"text":"8vwwlvlx3 rl3x4l r3itwqo 2uiml2j417 p0x4d9pxyhs bdyo06b oyuo333qq","quiz_options":{"score":0},"id":"4385138781"},{"position":4,"visible":true,"text":"ua5rn0o lgn9qrvh a3xi82nkmd9s2d sdhqh3q8m yu21i9gn3 u4wyck8efnu 47ubnif60vxre wfs92q1c84 qyyup","quiz_options":{"score":0},"id":"4385138782"}]},"page_id":"168830094"},"emitted_at":1674149695415} -{"stream":"survey_questions","data":{"id":"667455427","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"bja345d 7gmh5 u335j3ifd bmn2iwulckt qbbgde6 4l86ghgt3bnplod fsq6qpqp ogqqnp01j gycd6318in4xl s7sxc7ric"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108/questions/667455427","answers":{"choices":[{"position":1,"visible":true,"text":"h1f77hud lvvbp8566vit8 eij3l1f6m86rt9 658f6 auhky2x 1qb9805ptcl4yvs","quiz_options":{"score":0},"id":"4385139013"},{"position":2,"visible":true,"text":"82ow11qv0ewplb iipebne7de pccthtsby8y 87tn87egsum9nd 41xopta 65fhf560c 326vjb4pp4mtdvx 4eqmo8viakl pmuv626kfxaqr 9laiyqibgse9","quiz_options":{"score":0},"id":"4385139014"},{"position":3,"visible":true,"text":"8prnel y3ygc5kxmjbxfc hdd0bos b9w3a1gfm1sm6 8wwao s6j2lw5ennapne 6psoe bxyxi pp141xm uatdplo5f60et","quiz_options":{"score":0},"id":"4385139015"},{"position":4,"visible":true,"text":"yejw4cxbnjeup 9war0kp9wl ngag1wd y6quyhtcv lhai6 6fo022bo2 0y9e1wuj sm9c4jui n9udx","quiz_options":{"score":0},"id":"4385139016"},{"position":5,"visible":true,"text":"o9i4r2ej 825mg 5rifxtuu83ox y3nhp","quiz_options":{"score":0},"id":"4385139017"},{"position":6,"visible":true,"text":"3vukbvawqxox yxo374n8xcpw s6ai05vb64 gld3jyi97 auhaq08 sl5tt43hgv n549d7mf0n7cr2q 44x9v5o31yor0 oel767u6o1evo5 knipqna","quiz_options":{"score":0},"id":"4385139018"}]},"page_id":"168830108"},"emitted_at":1674149695416} -{"stream":"survey_questions","data":{"id":"667455439","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"yvl318t7lj 7wdmc8mttupa 2g7ogtjr976g lm77fy59yrhs4 tb928lr2 kiug5rc d4hq6y1"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108/questions/667455439","answers":{"choices":[{"position":1,"visible":true,"text":"du6tsy5k rhhm4v540es3mo oqkt8crv5wvgvj s00kbxja5h 6kmpu","quiz_options":{"score":0},"id":"4385139062"},{"position":2,"visible":true,"text":"b52dpf uy6exuyx l6ugq4ki qkl8h9l 8ptnm 2jwismc2by8 ls428u","quiz_options":{"score":0},"id":"4385139063"},{"position":3,"visible":true,"text":"86bpwgdk1q0 4iigbk1xjrm hncx3xkk5lj e75h213rkrpjg cku6p9no3qv rn1dvjp5hmtbfar 694ly6v9m ue5ad4q xomxi5c69o6pqm f75mwyy5nd","quiz_options":{"score":0},"id":"4385139064"}]},"page_id":"168830108"},"emitted_at":1674149695416} -{"stream":"survey_questions","data":{"id":"667455443","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"6myggi xgx2lptp oxfg8bgavhkxd h8586wxt2vv7 tba2bd 07altrm 2vmmtgfir gchrhk6kdw"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108/questions/667455443","answers":{"choices":[{"position":1,"visible":true,"text":"5acyluby8j u7jbf p5x8foahy k5uk8ubi0 ov4ouretpl lxrf3 ufct467j99f2hfl 7dlgr8eo","quiz_options":{"score":0},"id":"4385139109"},{"position":2,"visible":true,"text":"vpcmt6klp47b0 q3o6j96 3mm9gebofu1n2 jq7dq","quiz_options":{"score":0},"id":"4385139110"},{"position":3,"visible":true,"text":"r1wmg0i0ae892 acmd77ws2k 0pk531 pb5k6xev ury0cf","quiz_options":{"score":0},"id":"4385139111"},{"position":4,"visible":true,"text":"0hemigqnwym5f0j vn57ess p5vjtn nylhr7","quiz_options":{"score":0},"id":"4385139112"},{"position":5,"visible":true,"text":"gl7otpnp 0b43hto2wr0o tnavjwnce9lc5d 7j6hs ca3qbj70t b1nc7q4 7j0696hum k6ytijiprdmdvd","quiz_options":{"score":0},"id":"4385139113"}]},"page_id":"168830108"},"emitted_at":1674149695417} -{"stream":"survey_questions","data":{"id":"667455463","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"pjksyv7o2s qtssl1k83 r6ypxm 30krmk4j0e bp8y6m0or"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108/questions/667455463","answers":{"choices":[{"position":1,"visible":true,"text":"olpxel7n8ktrjx l0x3kfm fg4f96cpv60w2 kpp8waqc9j1y6 2bjrk4f v33tofsgnbg s6gcg386 kcubx22oev1ju i3n5l7veb","quiz_options":{"score":0},"id":"4385139230"},{"position":2,"visible":true,"text":"jkdmotc7gfb43b2 4cfkvtut 6w3ys rn3v7u17ccsx 0gyynlvql xgsrdfrtffvr","quiz_options":{"score":0},"id":"4385139231"},{"position":3,"visible":true,"text":"7ngjrf1wlkv1 wox4shhjp4 yivmg3epgwl5d 11tfmviaf7pv59s 14c6b1vudh brl9d3b799gibr fknowr6c fi6k91n34pm270","quiz_options":{"score":0},"id":"4385139232"},{"position":4,"visible":true,"text":"o8r5r32univvk5 dhx2tc7hbbgl63n bxhmmoi6vk 1ynu42 8goapcp gh1gr8dab p1iik6y eud8jxcg jilsond0434xp77 g1s9ydwgrd6y5tb","quiz_options":{"score":0},"id":"4385139233"},{"position":5,"visible":true,"text":"p9xp9x7yepqx wg9w4q8 9f8n1maik5weupa 9s5nnuan5i co88w9s89g2pfq lxrdlybm6 fl702pf2x7 xsp9xl8yrrg m8xre7g s61if4ojul0qlm0","quiz_options":{"score":0},"id":"4385139234"},{"position":6,"visible":true,"text":"5k001wyjow 9rfq00sg6vcytu yxddli0wkif3wk1 vxk0kcy6l80jfc a06swtlqbdg 2lk8kytqeeqs 81w3s8","quiz_options":{"score":0},"id":"4385139235"},{"position":7,"visible":true,"text":"ia4knoy0u1vku io2wydrh90d6 52j9qii2gcgc9fk tx7409a0o59ffk0","quiz_options":{"score":0},"id":"4385139236"}]},"page_id":"168830108"},"emitted_at":1674149695417} -{"stream":"survey_questions","data":{"id":"667455466","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"17ks2i1p 1roi7n8j p21kn jkrvnckxot 3nkruc3e ugsatu9qx7wsw qsttqci3i03e4"}],"href":"https://api.surveymonkey.com/v3/surveys/307784863/pages/168830108/questions/667455466","answers":{"choices":[{"position":1,"visible":true,"text":"n1ic8gxnyrf1 5qacyxsh ixbp7b33rq4kigy 2rdbf2a4 rfms8di taxqa3oc","quiz_options":{"score":0},"id":"4385139244"},{"position":2,"visible":true,"text":"i4kx3k5s1t 3qw1r6n two72xjft yvu2k2 lywg4w3xhffbwrv pu20atqyr5g 34tsp4wk nwadhm3qirol8y o5x91 755w2suug91yywd","quiz_options":{"score":0},"id":"4385139245"},{"position":3,"visible":true,"text":"2xl9i1c4 11w3fsb78 bfoj2 4rmfw06mn2lxpjo wfijsno52y90 wxuik4 70c6ioht","quiz_options":{"score":0},"id":"4385139246"}]},"page_id":"168830108"},"emitted_at":1674149695418} -{"stream":"survey_questions","data":{"id":"667455236","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ibdmqewlt gbp5yhhp c8i5s75wwg 0xosr1devp 2ijbswdblw0bjko k6httw7 kfr0tk"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068/questions/667455236","answers":{"choices":[{"position":1,"visible":true,"text":"3nj2ikx od392yg7 akufy84 6gw9q axmbaw59fn nfc8xe410jg 6allsd7jg7u 3wyrciq0uje8rn0","quiz_options":{"score":0},"id":"4385137904"},{"position":2,"visible":true,"text":"cb0o3tqf5 ijxi31vsg1 i0ga1cu10vmcgry gfmk6","quiz_options":{"score":0},"id":"4385137905"},{"position":3,"visible":true,"text":"i4frvbso f2k3tyww70ehqkp 5qy73 21lpebsil0fwspd kbx0uypchk wqdb8j0a0 8dr1nvs 592s2coha4r0v","quiz_options":{"score":0},"id":"4385137906"},{"position":4,"visible":true,"text":"lmg2vsbrw180cm otp0wq1e2 530jnetga2dp88 e2uploqajl","quiz_options":{"score":0},"id":"4385137907"},{"position":5,"visible":true,"text":"p0gqg3u03 11u6b6 xdb1lqr bxjqwc48sis 5dd68pfpw lxtumpinwgj vwdyg0uw4u wlvjnya7n7fr","quiz_options":{"score":0},"id":"4385137908"},{"position":6,"visible":true,"text":"74mma6 8c3o4 sjp4tg uagi32nfox489p 33m8i9q2t g1ecllv3xjnumg w3i1p26o789 3g8l35xh9m33","quiz_options":{"score":0},"id":"4385137909"},{"position":7,"visible":true,"text":"217g3m8t fdh1g7f6sng3r nt3u7d6n8j1d5 k8hwfegvg4i3xhs 6wung qmv6ilblwpg4t4c r04g9x2 ra26624 hl1gnkjlcu3pn","quiz_options":{"score":0},"id":"4385137910"},{"position":8,"visible":true,"text":"hc8vkvc9 yd3dll3x 2mo5ol4vkqru4k ubqgf odk7ghioir7gc lm0je2 4q3r0 ks32ix ra4rjxv2d9","quiz_options":{"score":0},"id":"4385137911"}]},"page_id":"168830068"},"emitted_at":1674149695519} -{"stream":"survey_questions","data":{"id":"667455240","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"6vdliabstjs pjq6v4ea sux2ll21xwm7h qt6k4qflxuslj ig5xnx aqyngvnivqhi63 k7trhm7s9n yc1uiij"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068/questions/667455240","answers":{"choices":[{"position":1,"visible":true,"text":"n59jsjeq v4yp5kweu77pjd 6dsaj62 f51922ixg bfy1tdx0ajmwq sq2g2ca6t1ag t91oetm","quiz_options":{"score":0},"id":"4385137924"},{"position":2,"visible":true,"text":"miou6 s74ictupiv15j a1ojxwpxsc 24wjut22cino3li 3nef4p8a1onycu s2iku aluhi 78a20fa","quiz_options":{"score":0},"id":"4385137925"},{"position":3,"visible":true,"text":"96wygu4eqjryyr vft0o 1omksgrj4e4u hov2cmxl xryqlb9qe5s 69bf1gxp prv2hpeebouh","quiz_options":{"score":0},"id":"4385137926"},{"position":4,"visible":true,"text":"80jv70rswm80ng s3yupsiahmmfxf 0qerhyspf y1shqteym hu7hk9dkpo 58idxrntiwiqc h36nn","quiz_options":{"score":0},"id":"4385137927"}]},"page_id":"168830068"},"emitted_at":1674149695520} -{"stream":"survey_questions","data":{"id":"667455243","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"1lmh6obuf8d67h 9f7639oxptfr pgb8r ud5359op7klll"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068/questions/667455243","answers":{"choices":[{"position":1,"visible":true,"text":"ht5dtccv20hfamc 6m2dk7 edktxyp 9bpptvmpf 76dv4sin8ps33ac","quiz_options":{"score":0},"id":"4385137931"},{"position":2,"visible":true,"text":"m1e1sw2b xk8ujpclrw rnv8g5qtu04 g1hy75 887fxgre","quiz_options":{"score":0},"id":"4385137932"},{"position":3,"visible":true,"text":"8iqn4b9 ico6j1l7h b44vd6d3383m0 rkm2tqhsi00qy 5hso1f919pq 5yyhjb5h7 dhhsrml5g6kiefx apd2weqls w4hg7 p1rwc2o7pko","quiz_options":{"score":0},"id":"4385137933"},{"position":4,"visible":true,"text":"erh5b0d 47uxrflw 23r21lf24iwf bb8yqiqs79 3y8eb7le7y2ocb8 juni8","quiz_options":{"score":0},"id":"4385137934"},{"position":5,"visible":true,"text":"x32489wko4ns f0ob7be3j pmc9ui3s6qp0 08kfm8yaqcanw 8aot5prgvkqyseo q4vp4n656gj57g xygwva 96gw2r2npb2","quiz_options":{"score":0},"id":"4385137935"},{"position":6,"visible":true,"text":"sjsfbmxa97 xq0084k5hm3 hivfs05sfir 40dj15utx bo9mx1yu0","quiz_options":{"score":0},"id":"4385137936"},{"position":7,"visible":true,"text":"jrb1fu2j x90bw8mlv85gpl0 xfo84sk0jy6 l3392k bucd4nmlc7yj jdj3x2clsir95 uw9dhluee e9ai5v8pm5 4eufmbvvi","quiz_options":{"score":0},"id":"4385137937"}]},"page_id":"168830068"},"emitted_at":1674149695520} -{"stream":"survey_questions","data":{"id":"667455245","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"h3en3q3evla 6eye52o9ijuo 078omcsgacycjn9 a76ryw2wl 5ejj2a399 r62c2jrxj2x7y"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068/questions/667455245","answers":{"choices":[{"position":1,"visible":true,"text":"90t457 4q172 f4kmgq3 vc09o 7hqxmsg9jx6 7fenorkuyq ov1kus876aiv0p rrmrq5qlittb6y","quiz_options":{"score":0},"id":"4385137944"},{"position":2,"visible":true,"text":"9aii09 2r9oaqs23 j2x9qy94cnxfgn jrlxnqn9n","quiz_options":{"score":0},"id":"4385137945"},{"position":3,"visible":true,"text":"r8jgwe j87njrw3yo1fla bbngq3 g26et","quiz_options":{"score":0},"id":"4385137946"},{"position":4,"visible":true,"text":"91n95cu 5fp11un vngsubusfe4 vgbho","quiz_options":{"score":0},"id":"4385137947"},{"position":5,"visible":true,"text":"9qw7f8sdfws1 6wfueox bghljj3 4yal6iqt73m6han l921hy 6yr04p7c","quiz_options":{"score":0},"id":"4385137948"},{"position":6,"visible":true,"text":"trrepiynd5phbcr 35t494bg wm02s1clg9wkxl1 eemhx faxm4gd3aqepewc rp34jr2ho5gb5q ol65t043p1 66bg3yos kh4xbmdvqyvm ibrokgs","quiz_options":{"score":0},"id":"4385137949"},{"position":7,"visible":true,"text":"ni9kx4ob31 mbn9d2n3t4j6lal xel50s53bw6eydo b0s9p p512cv6lrh 2jq1h yqmu3hg70qxw99e k1xjbd vv9lbb4kvlt1jg p86c1lob16h","quiz_options":{"score":0},"id":"4385137950"},{"position":8,"visible":true,"text":"3tx1f04qbxgku4k modppm wc68tgts7o me6s4w7ikqolcg4","quiz_options":{"score":0},"id":"4385137951"}]},"page_id":"168830068"},"emitted_at":1674149695521} -{"stream":"survey_questions","data":{"id":"667455263","position":5,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"gx0d844wwsnss b89h0hrw ihlud3p23xbv3 mtv2g99i bda7267b5 pxxsxit4ey lcji2t"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830068/questions/667455263","answers":{"choices":[{"position":1,"visible":true,"text":"e0dhk875qtxip 22ci0nbrenwe tnj1517py jfe77 wnmap goid8 s5r92q9nx9gbxf oy5bkxrwsyqx 7oemt6oc3wcw2 5iwm8","quiz_options":{"score":0},"id":"4385138060"},{"position":2,"visible":true,"text":"nnkw2nj3g1wb 6os5x3rph9 oejmdr7e u9vf9uaxis17w7p vee9xcim164h jkcq9v7e01i njv8rrgfo3hfumd ha6djf y3jckk4un mwvekphui4","quiz_options":{"score":0},"id":"4385138061"},{"position":3,"visible":true,"text":"0b1s6goa 31eeolk05pyxfvy 5fd1rlv8g8g 9iachco 3gqpac6rjo hmq9tr5huh4mxm","quiz_options":{"score":0},"id":"4385138062"},{"position":4,"visible":true,"text":"kkpmmshmp2owso tm5dsf0bt4rm474 w1wwr4 p6ltlf pxm8o1um1cv 8lh8goe9rqqo58x 9j7ej5b 4chhf50nlimyy","quiz_options":{"score":0},"id":"4385138063"},{"position":5,"visible":true,"text":"hygcvxvhqh1um 80ak8e wd9b3vkvg hkyetchu2pdm1 tn8hu6fropre lqx4jilgig1929w","quiz_options":{"score":0},"id":"4385138064"}]},"page_id":"168830068"},"emitted_at":1674149695521} -{"stream":"survey_questions","data":{"id":"667455268","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"of74qo8c 333x5lwc2aral be0joqsxrs hvgeb7ltbcu dtyfa2y98wnn2 jbv7c vjee2juk267 o2fd4pyua"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074/questions/667455268","answers":{"choices":[{"position":1,"visible":true,"text":"jyb7orcvpwo c9lcx 9aq0cld2fwngg5 ple2j5y3xn2px itcvwnp0rpn2 ykoj6qw las5p s01fya1co65 q6qrkss","quiz_options":{"score":0},"id":"4385138108"},{"position":2,"visible":true,"text":"vd9eniq 7lpv8hbl bq0pjeiplw0 f0hjp7xyv2pk8 82d5r7ciqjbl ngrw1mugyp 0qb5g4vnxxjjpf","quiz_options":{"score":0},"id":"4385138109"},{"position":3,"visible":true,"text":"s84erg ueidus67wjw w849glheqbx5p0m w2am53 erjlbf9pu7","quiz_options":{"score":0},"id":"4385138110"},{"position":4,"visible":true,"text":"n9yd3gp4otjuvma 16axdetwq epitspj3f8hq bgqks3 1s7wh28 qn8cff mfk5el7vr2w26pg 7bkx6xqe 1d9eryxaimnk u6sr9v3id0t","quiz_options":{"score":0},"id":"4385138111"},{"position":5,"visible":true,"text":"l3bswpvl 9obip6i6bvg ytwh8i mmn1n0p9xe9kiu rjgto5mi9ce5dm6","quiz_options":{"score":0},"id":"4385138112"},{"position":6,"visible":true,"text":"a7j5u 2td37rxj7 1lwcfapxi2cw6g 32egv55cc52fcv ay1jvha 1169qapnsa0avix w406ev2kwt1k8n y7xqqn i8uoafghx8 gelu50jf149xtg8","quiz_options":{"score":0},"id":"4385138113"}]},"page_id":"168830074"},"emitted_at":1674149695522} -{"stream":"survey_questions","data":{"id":"667455272","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"svumjkj5ei6 t1jkf ttf93l bilmloj40l q6er242eh bojtdo6 sf5jtk 66a4anc 3j5cbh3k xdfpwwtdf3hpb3"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074/questions/667455272","answers":{"choices":[{"position":1,"visible":true,"text":"i6ece2o dl6k3s11vd0 shf3m get86","quiz_options":{"score":0},"id":"4385138126"},{"position":2,"visible":true,"text":"fufxxy5ueb 5u74cl6a lol396aaape2 dwgaeqcphflg ogf55axqis54 1a338abonkbvc5 f0hj7rjhu 15gsns71xund4xp","quiz_options":{"score":0},"id":"4385138127"},{"position":3,"visible":true,"text":"di9p4lr30wahw 2rg14l uqqnx1roqy5k65r 2we720uhdva oyb5f74cil odhas8h1n4u3rj dqrhl943a","quiz_options":{"score":0},"id":"4385138128"},{"position":4,"visible":true,"text":"b2j0ivu58yysns 4roq2 mqukye4mmik92 wpr49dqhc6y g6ivs5m7n9 iotdbwjay566 06vv0nn17yfqb59 peuvkh9jjd jly7qt4151 roq63i3ld","quiz_options":{"score":0},"id":"4385138129"},{"position":5,"visible":true,"text":"1rceqano97 sd5m1s3hsskyv7 bwmei412e ikfkbu xhdo2mx6 aqg5dpo10 sfjb38 vj05jf71y","quiz_options":{"score":0},"id":"4385138130"},{"position":6,"visible":true,"text":"7nlryn01exq u14xxgcx8mngy 4pc2y3 48lxatpljatuox vbt7cvv2cipj7 ise8v3j9wnomr ms330kk mgjaw7kgowjfq6","quiz_options":{"score":0},"id":"4385138131"},{"position":7,"visible":true,"text":"y0sx70a14 527jr the5p62a hga0cg5nio53 hv2s0l 6e2lq473mrdqgo","quiz_options":{"score":0},"id":"4385138132"},{"position":8,"visible":true,"text":"rxtxctm7 veua1a ds34biwojid 5cjj1qvd3 v8ksghp4g 6ist9e a5xrfr8r2 ae9rb2xw lguj1iafi","quiz_options":{"score":0},"id":"4385138133"}]},"page_id":"168830074"},"emitted_at":1674149695522} -{"stream":"survey_questions","data":{"id":"667455276","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"6ms3lgjw25pq b20i2lk pkqw6iy j77i7ux32y8 e323sjcc kh8fxxm724seg"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074/questions/667455276","answers":{"choices":[{"position":1,"visible":true,"text":"nqewt2eyb57 a96u7u 4sr85tfu1 acbvejxvxdui ur1p5sb 4a2r81h0hnw3m 51q3v7s4kllox","quiz_options":{"score":0},"id":"4385138155"},{"position":2,"visible":true,"text":"maso4 ovhyv0ixo hu4gwpq8ky21j9 qo2qc3kuc8n lvou1 a0wbyyp bqr9bre7 csdtfe ya1ltf6kynj93y","quiz_options":{"score":0},"id":"4385138156"},{"position":3,"visible":true,"text":"bjaogae0yct1yk slo3wuygf6 wh0fyn7lym fof1mvu a23qhlj 0rkb9sms 14urdydlb5vht dbw8uh0n3rwdj2i haqry8lhmmpcnmy gw3bvde3lsyue3","quiz_options":{"score":0},"id":"4385138157"}]},"page_id":"168830074"},"emitted_at":1674149695523} -{"stream":"survey_questions","data":{"id":"667455290","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"8rej58igix 5ev85 tiximo qcv1v11x0ixckwa uhte1umn0p v1h2mr0lm6 duw2034nurju"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074/questions/667455290","answers":{"choices":[{"position":1,"visible":true,"text":"xx157diwdtvnm 27fuvnqueywcf2d 7fgvt3b 7qte6 xb8oaf6bdynl","quiz_options":{"score":0},"id":"4385138209"},{"position":2,"visible":true,"text":"7uixmrb45 5217t8r2se vv8oeea62 4mjg3vqh og2vgrc5v251 xe8bb89i iefjxgj 5flbdfdb77 amgpmbo6gav6i","quiz_options":{"score":0},"id":"4385138210"},{"position":3,"visible":true,"text":"1sn25lv32td8 516w18 pdmovblb0hbd50 42vmf7","quiz_options":{"score":0},"id":"4385138211"},{"position":4,"visible":true,"text":"rnlfk p5x50v6jxbdfnkc lfjnc39nl8o ok6oyhwope sr35gd1kr5r8fg5 em4qqf7wj tt8linqt 8e8c6d9 s2geie6vw15ny nbkq71k87eu","quiz_options":{"score":0},"id":"4385138212"},{"position":5,"visible":true,"text":"g0otgfss8 l5ag3n97qp b3hbjigxocewjyi dflt05hus4w ddpumu2h7dx6ff pby1r9n a8d6xu9db rwapp","quiz_options":{"score":0},"id":"4385138213"},{"position":6,"visible":true,"text":"1w7np c3t0x knc262g yrfo4f1r4f4reh i8dhd9l1v77 72eamvjcaggrn9 fb7v3v2g2","quiz_options":{"score":0},"id":"4385138214"},{"position":7,"visible":true,"text":"n5h0km6i 58fjgrqq s17q2dwxiha 9wl01dstrdvo upbsfwpyyryn 5ducq pm1vbp w4no5od9pwqf b3e18e3i","quiz_options":{"score":0},"id":"4385138215"},{"position":8,"visible":true,"text":"o9ln4neod6l3v 5p7w4sosqt95e qk9mir6c48fbqj xlglulme cbafrf2g69p6nj htqcbq8v7u24lyc","quiz_options":{"score":0},"id":"4385138216"}]},"page_id":"168830074"},"emitted_at":1674149695523} -{"stream":"survey_questions","data":{"id":"667455293","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"virtcpk896m68m bbdg4mss12ps3 qo4gk153jqp9 iotdms cxo7l2t9 gtofci5cg0er"}],"href":"https://api.surveymonkey.com/v3/surveys/307784846/pages/168830074/questions/667455293","answers":{"choices":[{"position":1,"visible":true,"text":"1vflj0ggk1o9l nbqw3aj69wprwix i955uorj 69jyg2ci6m2lj sb3sor4o2 jmnjbw3i f6weyfas9eq1 yanlggtlu8823f s7fpa7","quiz_options":{"score":0},"id":"4385138239"},{"position":2,"visible":true,"text":"mgeif2h8oilulvj 4q5x7owpgk sxoc48wbr7u3 rmt30uk4m7w7q hpqj8faxcl3qrc5 qfuhdde a1itv i3292al93b","quiz_options":{"score":0},"id":"4385138240"},{"position":3,"visible":true,"text":"3an10n o05hsocsiq0quj5 9ay5x f6p79mjl jjcw6ym9dpfwrj sygnf5 bh3mo 1x146ti6 x4u0e3pxa1ko","quiz_options":{"score":0},"id":"4385138241"},{"position":4,"visible":true,"text":"va9yadd6 xsc7t62edxbwl d7d7n7ecqsealn u9ognb1ox nmaht pwy1d 5mdngtxn4ol1tel dxlmr67c00e hw11e9xn7h","quiz_options":{"score":0},"id":"4385138242"},{"position":5,"visible":true,"text":"b1qfaxqnj91j8mj edwip5b22pdd tuh6g5uodx2 sn4e9lv7xsuul jxmu0iubodnpw 7rqts1liyv27j","quiz_options":{"score":0},"id":"4385138243"}]},"page_id":"168830074"},"emitted_at":1674149695524} -{"stream":"survey_questions","data":{"id":"667455297","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"lq8cc4kb4wb hagmv535tvyfw4 q505o lc9pke7la"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083/questions/667455297","answers":{"choices":[{"position":1,"visible":true,"text":"gnig56iqkr f2p71ed2w9fr gnpyvvl1 9mc6qpbow5tam mxta8 t9blqkndj9c 1gisvkrl2 0p43lebad 6v56y0e392el ahfwj9tq6lbhm7","quiz_options":{"score":0},"id":"4385138277"},{"position":2,"visible":true,"text":"77wwt btv0ntp gym5s54 lr3ji8wtg4dd qoy58mimj d2yjili","quiz_options":{"score":0},"id":"4385138278"},{"position":3,"visible":true,"text":"9hphq 5qj6yrbg2na mnmodl22e8cg siook2te8gpl","quiz_options":{"score":0},"id":"4385138279"}]},"page_id":"168830083"},"emitted_at":1674149695661} -{"stream":"survey_questions","data":{"id":"667455299","position":2,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0cphg3eq5u jtapt0bso07ghd 0bgtvsb 5pd5xfhq5t1fgf w0jm2nstiu 93om3crky6skr q49leuh249 3q2tkvncda03g 40orw7354cy p8eku"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083/questions/667455299","answers":{"choices":[{"position":1,"visible":true,"text":"mrhrmoj8c c8fd51r 0lbuywjnvnmijj 0tmyxfrg fggvqmm3bvivav 8b3elaxwyb d2wp2i hr0jllmfkd7 prv23lvvws27gx 0e6okfhy5sn","quiz_options":{"score":0},"id":"4385138282"},{"position":2,"visible":true,"text":"mv5uq8u7dup2r lfo2ih2jkc5cp r6jydm0x6w 1p1c3s67p57 7eulrlih 1v7p2vig8 99esod2scbs pg87n9lp9mg476 6hmjxdey","quiz_options":{"score":0},"id":"4385138283"},{"position":3,"visible":true,"text":"ahxj3 imh6reai78juny6 bop9te8ej8q6l gwxwkjup43o6tr1 nbvorbchco4ptow pwomv9iyd9t jkrjgggo3s 6ipaxevsfrxrmtw uq3n0cmg k1odeemd29l","quiz_options":{"score":0},"id":"4385138284"},{"position":4,"visible":true,"text":"t4pu3i ixvxd q10uqer3 gkqtljjmflbts","quiz_options":{"score":0},"id":"4385138285"},{"position":5,"visible":true,"text":"xwwgqr 7hlt9dq 1tloksa kehvt","quiz_options":{"score":0},"id":"4385138286"},{"position":6,"visible":true,"text":"2vinv5qis ipjdbl cuwxgei6t8g is2ihbn xs3q9m3rl","quiz_options":{"score":0},"id":"4385138287"}]},"page_id":"168830083"},"emitted_at":1674149695661} -{"stream":"survey_questions","data":{"id":"667455301","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"5h0aahwgk8 eoh0xct ytmtsdr15y fawlco duk30p0qejro"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083/questions/667455301","answers":{"choices":[{"position":1,"visible":true,"text":"1ib8lq0i q896a iwg01totbr1c8 nic9ye883le 9pjfihiuxbp5 6wu9jitrk1 k3kvcggtbgboo9 nenddst 1qgr2y ilmy2v1ddb9","quiz_options":{"score":0},"id":"4385138288"},{"position":2,"visible":true,"text":"05nxifhb7u4 v0tu1 i8u8dd63ekyj h3xah1h0s7k9vn m95qi vmfn7l5i7iu3es hrywpnsl6rp","quiz_options":{"score":0},"id":"4385138289"},{"position":3,"visible":true,"text":"1fgt8f3w 2i0cyt47w3sl6o k8x6l8i3stl2cc 2h13e9tt1cwaa9 e5l78fvc x2y42gkhqhkc7r","quiz_options":{"score":0},"id":"4385138290"},{"position":4,"visible":true,"text":"ye6nyey79pu2 596slmq qgawtw10v06mtad m113i7i8sd1l x9o2f","quiz_options":{"score":0},"id":"4385138291"},{"position":5,"visible":true,"text":"2mr26jkd368pv wfxs2sxlxag3o3 o15fq 5u1n5tdvs7j0","quiz_options":{"score":0},"id":"4385138292"},{"position":6,"visible":true,"text":"ffdpeyyy nvtjrvxqnqmr f2jwutj 5uw3e0w4n2h dmah35mk v979ctn 2s683h24","quiz_options":{"score":0},"id":"4385138293"},{"position":7,"visible":true,"text":"245tmey w2ltcq50f sktfit h9ymojx j3xrggyo 51d1y","quiz_options":{"score":0},"id":"4385138294"},{"position":8,"visible":true,"text":"lv4qkg6meoylx rmlf7cdb1aht r316f1u kfcwrh5 cm1m3 s5x3eqj3t v1h721uqo3k5km7 9n1oqah9","quiz_options":{"score":0},"id":"4385138295"}]},"page_id":"168830083"},"emitted_at":1674149695662} -{"stream":"survey_questions","data":{"id":"667455314","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ux15jxtnsx3 vofmkp85 a5kvupg5km6vq0 t5g3uf3q7hn i661htxcb s7x7r26 orjn3oisiik"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083/questions/667455314","answers":{"choices":[{"position":1,"visible":true,"text":"9fr03iy 9xmpt6xgqwpc k6t7k9gp1ht1 6h620md9wd70hh bs3kw i9htv yil47l xn823m6ih","quiz_options":{"score":0},"id":"4385138315"},{"position":2,"visible":true,"text":"it0ef6tr6h9nt t2jwq18hq3w59p4 oyx7e1bj86bcm 02aaa4kvmf8ru77 ssmts45dadkf gl1spgihc4 acylm 0vmvvgxv6yvey0 8xi073ec2m5","quiz_options":{"score":0},"id":"4385138316"},{"position":3,"visible":true,"text":"8m4mr5q 7wkbq4t8vplwih 0cvqrnnt qt9mjry1n xyqbuaepupf3 ed52xu5ak bd1vxipoo5ad s7pxs874 a9imnp7nm","quiz_options":{"score":0},"id":"4385138317"},{"position":4,"visible":true,"text":"2clmq75t 252dtb2ce3i fld27xux vrip5ox3ds8qnb lvp972rcrcjc ruvk2sclvimuvx 1ud7hrsbm567","quiz_options":{"score":0},"id":"4385138318"},{"position":5,"visible":true,"text":"0l86fyh8uo 090ymhll1pq 9lkwl89vq f1pcky3lidacy9 3ecmm11niu fu8tfp","quiz_options":{"score":0},"id":"4385138319"},{"position":6,"visible":true,"text":"h37f95j7qxup0fe tbx1l3bgii ol4bri0itarcwk doh3p2p0pi jq9guw3h382 08fje7vyonhmfe5 s2ioi7c4v6ci","quiz_options":{"score":0},"id":"4385138320"}]},"page_id":"168830083"},"emitted_at":1674149695662} -{"stream":"survey_questions","data":{"id":"667455318","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"1gm3ed2s89 p2xfqhaj4r p7fwo6 12imab59cds2p aqmmilr2dvmwvky nljfs ts3g4cw6au9jii snbv40hbjcu3"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830083/questions/667455318","answers":{"choices":[{"position":1,"visible":true,"text":"dfyvo2qlanm8s ikbv3wa7030 rmjt80 o7cym2 1r6qae7c70v30 fko724jo7me82s1 bsuhjbov2ttwr q3w1wpn5twsv5e slrnk3sx m3u87rixhv6nmc","quiz_options":{"score":0},"id":"4385138386"},{"position":2,"visible":true,"text":"f47smf7vov8sp1 24r5d2b6q4s duka34dqpn6si 4r2wn 92ekkv2p794l8h l8n6cdc","quiz_options":{"score":0},"id":"4385138387"},{"position":3,"visible":true,"text":"okl5ki7v1r5 4oqdy4 x7ny0qmas 0ddqlr1 ja5wspe 2ieqa m3ucowjq1krai","quiz_options":{"score":0},"id":"4385138388"},{"position":4,"visible":true,"text":"gt5y6 vpjp0e5p6 vqhwb2dytiuihsv ru25v6bm mcihbuved71h2 quy2rej9e8eb97","quiz_options":{"score":0},"id":"4385138389"}]},"page_id":"168830083"},"emitted_at":1674149695662} -{"stream":"survey_questions","data":{"id":"667455323","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"wv0en qigyrej bappxu8q j3ihl9p6ki"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087/questions/667455323","answers":{"choices":[{"position":1,"visible":true,"text":"xv1me9s6hd aihplmw6 plhv5b k52pvh68j so1ggnlishy5m 6qe1fhw75gt k1jxdmddlhj35en","quiz_options":{"score":0},"id":"4385138412"},{"position":2,"visible":true,"text":"7ecrthfm97aysv 8u9a9sn1f9kj8 90owtm 0h8qgota7j3qpn vnfs9vleja36","quiz_options":{"score":0},"id":"4385138413"},{"position":3,"visible":true,"text":"v519ikhgw0fl s8x0shqsi ssr005 8xm1b7fal622l","quiz_options":{"score":0},"id":"4385138414"},{"position":4,"visible":true,"text":"gbpim9ar0dfgi94 nj0mq3ejst csj5e 763j6d5eo gf4fvw0 s2ea20n33yo iqd5r5l9 3t0okvw2oyh","quiz_options":{"score":0},"id":"4385138415"},{"position":5,"visible":true,"text":"jyxp0s7xfc7td5y vom52gda3dxr ko6256dtc5nv5f 7s8nej n32hyka ywsoxywn","quiz_options":{"score":0},"id":"4385138416"}]},"page_id":"168830087"},"emitted_at":1674149695662} -{"stream":"survey_questions","data":{"id":"667455325","position":2,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"jdkmr4b5t3o9qd6 0u4huyxr8whu cxqri4e2i1a 88yyx cq3xbymudltf y6hmsrn4socbj"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087/questions/667455325","answers":{"choices":[{"position":1,"visible":true,"text":"5aacsmqn 9p8l1tvy9i 0f92rvnxn9 to5xiaghqmiw 6xik80 jxvy64ut9 bjk3w6ywb","quiz_options":{"score":0},"id":"4385138421"},{"position":2,"visible":true,"text":"bc85jh39p 7nskr87a3x ny6d4 jlbavo9t8h6j6hu dh5ne","quiz_options":{"score":0},"id":"4385138422"},{"position":3,"visible":true,"text":"3vykabcspbl7qt ajqad2gu3v0jbg yeo3sobedrdfe0 1wcrn35 5l7jwq3 f5bc8bx","quiz_options":{"score":0},"id":"4385138423"},{"position":4,"visible":true,"text":"sm6lx7btphddbw 0882qf4o omh4u2i446c9p4q 5hqyq27jlse1e7 ns3xkqg8gcx8pc sjd0skhv 9ydxkadh8e814j7 0mk8m5tm9d38e 23cvhf22g lxb5c7c3p0oe1","quiz_options":{"score":0},"id":"4385138424"},{"position":5,"visible":true,"text":"2x7f7xmgur5a 6mn40jjs2dde 3th2mj8cn dv2pbu6s 7n8hw1f ptapt6nxgddk7a 4cj77u6m3mm idvm31 mx9ygnq3i 4lw1gmm4cwaig","quiz_options":{"score":0},"id":"4385138425"},{"position":6,"visible":true,"text":"3q51n sofjjqlu2y 6088c4c ncdkdt8exikoiir ew86v6gkob94v 7jsgkctqkhm1","quiz_options":{"score":0},"id":"4385138426"},{"position":7,"visible":true,"text":"7lffwkal h4d2j5 dqjf3y5 jwopu 0xf2vqmb6an igo5ri3px747b 0l5s9df7w7s","quiz_options":{"score":0},"id":"4385138427"}]},"page_id":"168830087"},"emitted_at":1674149695662} -{"stream":"survey_questions","data":{"id":"667455328","position":3,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ieffti0yi qcxktrskkug9ij ebvs68ni79 1vrpnp mobkmem70 7uc86c7sx"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087/questions/667455328","answers":{"choices":[{"position":1,"visible":true,"text":"7vvg5h tc4vy 5c5194t1n6eu wutfyil","quiz_options":{"score":0},"id":"4385138432"},{"position":2,"visible":true,"text":"17dh5pnpe3bc 6gmy6r15tq7 fuwlrcnnt61wotb mj08g80e2pri","quiz_options":{"score":0},"id":"4385138433"},{"position":3,"visible":true,"text":"414pty e5a4vs1 4p6122ihy qnjg9a xuowh226f18 hc449ct tdwm0wu29u","quiz_options":{"score":0},"id":"4385138434"},{"position":4,"visible":true,"text":"24k06ig dhy3huyx0plis 0n5vomlwuo38j 0nft5aw obgn4qcoq0l44b l5dtviydcom58cn 699vqm6 06i0mr52i0 u1mvn mm1bqovxpvtwkok","quiz_options":{"score":0},"id":"4385138435"},{"position":5,"visible":true,"text":"3fr4ttmtr3mhvr d7xcfp6tx48mne3 ttcyvypnom ik9eqkf o7q5x4veph 31y5w8u036c56rh 9yar58y9t5d","quiz_options":{"score":0},"id":"4385138436"},{"position":6,"visible":true,"text":"jrghjb5g3h6t dlpx7hve7lijy 77ergx421ad dgekp5dauuod5t 3mn6a 7m9wvlhvgeua 5orruhepinotb hd948u958 23p4f fmprms","quiz_options":{"score":0},"id":"4385138437"},{"position":7,"visible":true,"text":"dvjf23fp1f8slys 6120e2kbl8p1 f2ildddc i9ocnxo dk1c5jm5bx3 1mmcj3qmntljpbt 889694rivh72g07 25yrmna iwjlytheaogoxq fanj2","quiz_options":{"score":0},"id":"4385138438"},{"position":8,"visible":true,"text":"f2tlh hih5om u7aqefshc47ph sxxt22yg7hi","quiz_options":{"score":0},"id":"4385138439"}]},"page_id":"168830087"},"emitted_at":1674149695663} -{"stream":"survey_questions","data":{"id":"667455329","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"96mfgjfkj4g 1roun x4g5tcrq d52byhs855 cjc897qm8l udgjrsby"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087/questions/667455329","answers":{"choices":[{"position":1,"visible":true,"text":"2ny2fhgpm6 aturc40fuggi de2bnbhjnc a8jmpcw54h3nu pij7dkc3 sbrbi0rbi40ox2 wskx4gyt7aoc65 coi4cfe4y p44mj88ikx 93sodjvxi7ny30","quiz_options":{"score":0},"id":"4385138440"},{"position":2,"visible":true,"text":"t1f53t6jo 5s7kaoe ma28g93gfjjsm nwfjlfr2 h4k3jav 4xvidc3fv 8enehee7txlhvp ca6vkoxrb465pi xmi35um8q54r6 4rvruqr46m","quiz_options":{"score":0},"id":"4385138441"},{"position":3,"visible":true,"text":"i7iw5 ap5gjyafbm2l adp3lcc1 52hc8 j6ldt5","quiz_options":{"score":0},"id":"4385138442"},{"position":4,"visible":true,"text":"468nunilylfthe gwy8lhtgga9re4f 5xacgti673jfgs 3eei9s6qwg4avy ksqn6cwpvl585 wtyw59jhy7kck m462cr65qglmq ev7c0b5","quiz_options":{"score":0},"id":"4385138443"},{"position":5,"visible":true,"text":"yxrha98q xgjtmqc6x6tbsq l5co87ln2j3044 49lpv4 l8rfhvt2 rp4v9ofww2bekc7 ops08osul9","quiz_options":{"score":0},"id":"4385138444"},{"position":6,"visible":true,"text":"dbah1h80x 07f9vbs n89jmtwm0t2 47sd0ilc umky7iesp5j1ye f825fm7sn5fteb","quiz_options":{"score":0},"id":"4385138445"},{"position":7,"visible":true,"text":"9ybbt8x1xk o12xtb esgrab5p169kpou jyx54 456l76rs9f 3pcmlfoju rfyofv71 lb7gr6gi2ab0 gmexy","quiz_options":{"score":0},"id":"4385138446"},{"position":8,"visible":true,"text":"k3s5bwg5b2q 0ikv741vhxu3x4w efpp0p21i1s 44ca0fl4bklmn","quiz_options":{"score":0},"id":"4385138447"}]},"page_id":"168830087"},"emitted_at":1674149695663} -{"stream":"survey_questions","data":{"id":"667455332","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"v80ku mx1co9qwm34tat e9i3mdjcvixs grggkttt lhn014lmqj86 achjddrt9o5fx 9v07aged4niq0g ye1woaclolxuaq w5e7jclooee"}],"href":"https://api.surveymonkey.com/v3/surveys/307784856/pages/168830087/questions/667455332","answers":{"choices":[{"position":1,"visible":true,"text":"u1e4v4rx27412v v9mjw7oaf 0t873cte 57y89l xdqtturimm5 b4stpodx65s8u 2mk2es7jwrpn 7enb3sp29","quiz_options":{"score":0},"id":"4385138468"},{"position":2,"visible":true,"text":"w0mid5oagg qx0e1bvil6w5v6 0cadbm51x7hbg rpxgn9yni","quiz_options":{"score":0},"id":"4385138498"},{"position":3,"visible":true,"text":"7rex013 m75tu1nu9orrc2 k4du9rcsy2n5l0 cfbiki1u6cp2f qe81rjnguphrum j4019 tbl21q37","quiz_options":{"score":0},"id":"4385138499"},{"position":4,"visible":true,"text":"sat97b1hk6dx9k uq924nht7pr8cb 7nr3h2hclmiqg txrkxr29wtrc217 bxmed4ll1b23561 vvsu7 x293il lrl3e","quiz_options":{"score":0},"id":"4385138500"},{"position":5,"visible":true,"text":"h3yh19ckoclpq 0hb213i nj1mfmvbj9 p4ibgetarc6h6u 8kahs","quiz_options":{"score":0},"id":"4385138501"},{"position":6,"visible":true,"text":"xyvg34bae2 7u2n4l87h aec3h1sy5aw62r 60yajbqvxifw65 c7q9ty4pdby2d vyjp2 n7tavs0550g46 07p64c9pp8oo","quiz_options":{"score":0},"id":"4385138502"},{"position":7,"visible":true,"text":"kn8f11mlx sucpq9a 79n0u6vi1tgt b0dom486a929h ocblyvsm6 ti4tnjv533","quiz_options":{"score":0},"id":"4385138503"}]},"page_id":"168830087"},"emitted_at":1674149695663} -{"stream":"survey_questions","data":{"id":"667461429","position":1,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"vq7ho4rb qt0fpw3 fvn1b2y21n fpkcw9v73dqvfq0"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336/questions/667461429","answers":{"choices":[{"position":1,"visible":true,"text":"p7fu5tjqvd qgxdych hv1xcryuq7jlia 1qjuijk40 2v6u2e kwqcepbnx 2hjta3wqc je2wbyr6337d5 o0fq46u48nsa as5jyqahxo","quiz_options":{"score":0},"id":"4385172931"},{"position":2,"visible":true,"text":"tubba qc6l28jc4m1xi 5lf38au3ry9d o9eh4us9ki ul832 jippjf4 bcsoug6 9w7mnssanfsknl f422osb58c","quiz_options":{"score":0},"id":"4385172932"},{"position":3,"visible":true,"text":"juswx5 78b5n495 duu5kiikm feknbdqqtxg memsnyrcmao1lh","quiz_options":{"score":0},"id":"4385172933"},{"position":4,"visible":true,"text":"xpu48c 3cdbygss10 ujxmb ore7vx7o0x 9qmlb9fig6p3u w6c8oqr5dhp1l g3ihi9p1x kf2lvtbxo 4guw65","quiz_options":{"score":0},"id":"4385172934"},{"position":5,"visible":true,"text":"i9508b8n ld7powh72 nfvmljfhgn3n 643ydxghpbak7v ehe18sjo56yx m1bpaoj4epr 5sv5aw6 7m0bt","quiz_options":{"score":0},"id":"4385172935"},{"position":6,"visible":true,"text":"vwtr9m5fu3ot ltiqhsx3fi uuylf62qec mmn5fxqj","quiz_options":{"score":0},"id":"4385172936"},{"position":7,"visible":true,"text":"ww4qdi5pqg 3qvmj8g0yvx rrr5fx06d7 t6giac3k8 t3d6exqx175ft 10k251y 47v1vnu938 kkqqcsl50d7i","quiz_options":{"score":0},"id":"4385172937"}]},"page_id":"168831336"},"emitted_at":1674149695747} -{"stream":"survey_questions","data":{"id":"667461433","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"3bdmcu bth1w4ifhtcbm slcl3p6ynqhjtyp n5nea bdq5iuvq77m0t"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336/questions/667461433","answers":{"choices":[{"position":1,"visible":true,"text":"26j27d 5s3pbf08free77b ttq5exq7 n2gm99qkada 3rosqp hn74g juh2ww6","quiz_options":{"score":0},"id":"4385172948"},{"position":2,"visible":true,"text":"phmu9ghnjc vkh0ury2y6et tqj1lwjv 37m5bx1itv a5x88phj3g5 832qd1l rpbrunm7v49 kpmqo a8eqht49077t","quiz_options":{"score":0},"id":"4385172949"},{"position":3,"visible":true,"text":"jbsjkqa2f2xb ks824l5wnkd b0p0elet784r6rw ay5sqsv2vdron lr8mx7r6pc im42wmwt ltyhtay7p8u","quiz_options":{"score":0},"id":"4385172950"},{"position":4,"visible":true,"text":"ikkvwye7tah 7shp28 p7k7je0as5u sdc3f1rvin staohk6a44k nqjsxn3 97bog4jfxn qvorj30xpuh1gip","quiz_options":{"score":0},"id":"4385172951"},{"position":5,"visible":true,"text":"p83chdtsw6s88u 0hqgb8h871mw p0ej9 djy316qsa7iv6pe","quiz_options":{"score":0},"id":"4385172952"},{"position":6,"visible":true,"text":"kgfa1 tyxdxoe3gc xclen0vw9oa2 6bu9o8b6awx hcg9pgsi1av9v 7dicbt6wsee70f glwmxxtcdr 4qi7m9p8tfkxur3 ir5jr31364","quiz_options":{"score":0},"id":"4385172953"},{"position":7,"visible":true,"text":"u6xyfnav qw5qefsi 6ttsauh 3jwvmju8sdjk87 bql4ra2ww 8nxxobw4o58 omi87y6ur8l1f2g 62gaxuq","quiz_options":{"score":0},"id":"4385172954"},{"position":8,"visible":true,"text":"xdjcisgicnaix2 fj8bpbqaqgntr fb2n0o73 mgsb8xg5x3nfg9 9t18omvng4p6e 06wepywm4wku 82pemp2 l3nsu2ib2erbva tj475a","quiz_options":{"score":0},"id":"4385172955"}]},"page_id":"168831336"},"emitted_at":1674149695747} -{"stream":"survey_questions","data":{"id":"667461439","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"vrhftdyyte qir7jajr68td obg64x tu1rcy2h pqnxtdrwxk00a c173brbv 6qfxck7huyx"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336/questions/667461439","answers":{"choices":[{"position":1,"visible":true,"text":"q6705n7lidhb 0gsqn8 ocnyaudmo ulp9r0rsoheh4f cjwcqblbh o28tilm13w384","quiz_options":{"score":0},"id":"4385172990"},{"position":2,"visible":true,"text":"vyrx1jpjcl07 etxm7itb161 51rf5csw6e5tsh 3ux7rxq2 vct6fys4r7","quiz_options":{"score":0},"id":"4385172991"},{"position":3,"visible":true,"text":"pp1lc12uhii 2qmm3xnfsp i9912c8ac5k74i hxew5625hxtm3 4o658 jenbgi9o89 t4ppqc2qvhlui iu92ym nqjkka1i7","quiz_options":{"score":0},"id":"4385172992"},{"position":4,"visible":true,"text":"6mx9j44l0oa yv2wb1letc4p d3u87l59 vp7a65ykcfjyt tpe3k92l y2flusuc3tc t220oi sekyd","quiz_options":{"score":0},"id":"4385172993"},{"position":5,"visible":true,"text":"4fbdo eli65un2emx e6oyl3a41ugoxb2 saxpsxn6fv l8s2mk0e57d60","quiz_options":{"score":0},"id":"4385172994"},{"position":6,"visible":true,"text":"r5ly6h 7eu4pmnx7tv04g rdtjvlsup3gdn h4qoreg 3ct3fudxbuuw2 b975v8 ilsfmaa22","quiz_options":{"score":0},"id":"4385172995"}]},"page_id":"168831336"},"emitted_at":1674149695747} -{"stream":"survey_questions","data":{"id":"667461441","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"5p8n613hcmidh w4gyoyf stviro3om xey832w t9w23kj3j4rp5j h2w1swynkq1n9e vqrk2q5eb76p9yn pbj778q19u1hqy r9uvl76qqhfg"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336/questions/667461441","answers":{"choices":[{"position":1,"visible":true,"text":"tmdpekdn75l9b 3nuus dqbdxtj od2mltbx tmw5kcvpiw 6n41343132bdmc kb3i9er4qg","quiz_options":{"score":0},"id":"4385173020"},{"position":2,"visible":true,"text":"6a8efsocxmc eeohurhhyduo31w c8na1dub0ycx q8314ir twgrq 2ukcrksjt30s","quiz_options":{"score":0},"id":"4385173021"},{"position":3,"visible":true,"text":"ou9m3xl2n9wvn m0wews5 il8o8so pygnm380cd66 7nhjkpk9lu65n 4e3ifrwcb8wr 46bo0ani86m du57mphcnvf1in 6gf58fwm2c50tp","quiz_options":{"score":0},"id":"4385173022"},{"position":4,"visible":true,"text":"nahvw2sd3 hn5trfgqbuso cq82jp7 k8ev0 s8a0a23m0p g2jewuy0wdadgsa hca0mm5q 8agnm fxnf8vrgdybkg04","quiz_options":{"score":0},"id":"4385173023"}]},"page_id":"168831336"},"emitted_at":1674149695748} -{"stream":"survey_questions","data":{"id":"667461444","position":5,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"khys853d7u tgmr69e7mu7 0h6sn7 cja88 nj2os3 wnand9tdmohxca rowe88asxsja1dn rvs12kt5m0wqd2 41pwt1vnhst008o"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831336/questions/667461444","answers":{"choices":[{"position":1,"visible":true,"text":"3k875epq o7u7f5 kihtesav7 qwxlu7j5dj59i 8ghsap3n","quiz_options":{"score":0},"id":"4385173040"},{"position":2,"visible":true,"text":"cemvy 140ek4 d1fumxxlqqnyt 99pb868kg2b hqur2f44k uwr5dyif8g6","quiz_options":{"score":0},"id":"4385173041"},{"position":3,"visible":true,"text":"iykvyahtufq91l bvs9t6mmoyw ld2hep1somcl7 2rs7wk3q 5ncge9mfj1ac r30355p0g4wt c1bx5 n5wgmcud6h","quiz_options":{"score":0},"id":"4385173042"},{"position":4,"visible":true,"text":"a4xcahm1euf l3h1jx abcc0r500rlqyhv 9w01sfmci3j ur9vp3sxfioh r0dksavcmduhk2 kw7nbl 5hys2r8vebx4e 94rajuno uwm3ajywh3vlqbf","quiz_options":{"score":0},"id":"4385173043"},{"position":5,"visible":true,"text":"0whm5dw5kuk44 efrr6i iu9592 a968tp4ff0uaf q0f6mtpcp2x82ae 97m1gvsnfthibp agt6dm3 ip1e3y9","quiz_options":{"score":0},"id":"4385173044"},{"position":6,"visible":true,"text":"hwbtcf7 60gji9qoeovhlf 9b3ijm8 xiw5w09k 460v2o27hacdts 30eq74bg9m qpfv2jd9f3ur rlacn2rf273ck7 2welkh7188h","quiz_options":{"score":0},"id":"4385173045"},{"position":7,"visible":true,"text":"cbqj2nlsnyy tmjyvoija wirje1e bxsqxqe g12fuxmtgfq 7fc74o yrdsjey 1xmv6u077j7l 8hkf7","quiz_options":{"score":0},"id":"4385173046"},{"position":8,"visible":true,"text":"uqlus j3nd9u8g faqa6ghioy60h9 a95spokyj58 ndgctguy75jr ei16b1p7jabc2 hfkfpruds","quiz_options":{"score":0},"id":"4385173047"}]},"page_id":"168831336"},"emitted_at":1674149695748} -{"stream":"survey_questions","data":{"id":"667461449","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"cwipmppqgy2vy 3mn05n4 gedv0twor lyawl9v528587w wfmifrr9hk pykvx52nu1ds5p hh28niyix"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340/questions/667461449","answers":{"choices":[{"position":1,"visible":true,"text":"3gjvb8l80nc8j 90ebdlnd3hg spis2ipbim9l fjt4xli4a 0bqmti2wp0juwcl cdwl64u2yfghjdf 190uek5lw","quiz_options":{"score":0},"id":"4385173076"},{"position":2,"visible":true,"text":"xjdswmkwm24 rrmxo7otpear8 ogepd7v8br0y5m vi2vyip49ux uvtnq2sem 5gwcppvvvc bw56us8o 1huql09g6pa1ylb","quiz_options":{"score":0},"id":"4385173077"},{"position":3,"visible":true,"text":"gyq16tgah s669s7 j5isg9 l63lf q61pyy oywk58jfmnvy 6lhnukrce1px bwt066s8k5248o sylwt em0x3","quiz_options":{"score":0},"id":"4385173078"}]},"page_id":"168831340"},"emitted_at":1674149695748} -{"stream":"survey_questions","data":{"id":"667461452","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"aku2037bp sxtf1busk2uj 7scwpc00giyau urduwqdsyr3 xk3y39yjywei1y uw7y5s3ky fetwdqr6n qp00a8rofpy 4rwl41yk78jd"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340/questions/667461452","answers":{"choices":[{"position":1,"visible":true,"text":"86hx9ifui6c4o 02xkn5ghldfw35k faipmm g7634wrx1jfvy jewjfdq kkyyxx00brj wtu0xrhfhvu5lv8 573fv0vtm0n","quiz_options":{"score":0},"id":"4385173088"},{"position":2,"visible":true,"text":"nfeyvnb0kat1yb0 3lk3rse66t8jp w90vixgpu32cyir q03jxd8vch9xn7t 3a0swht9ykadh8 fvoa9t8dri fm0twcdhxm 4ot9xpxe9s08b ho7d1","quiz_options":{"score":0},"id":"4385173089"},{"position":3,"visible":true,"text":"k5pyc4y 9uul1o 7lvigo 7xsdo49jx2yc d07jr1w4","quiz_options":{"score":0},"id":"4385173090"},{"position":4,"visible":true,"text":"3e49s5 gxqwbv w6c2e tjaf7rpgtksjpe ivr0he574 ft8qso pqq4l5hbchy 6mhw0ksgrh xbj4l7a2g","quiz_options":{"score":0},"id":"4385173091"}]},"page_id":"168831340"},"emitted_at":1674149695748} -{"stream":"survey_questions","data":{"id":"667461454","position":3,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"4i1qajt 6sy8nl3hjougqbu okftpe4sw0bpr6 gv1pbyk8km 9ihd103uu2n3lx c6spvurq080unx uswpaexarx"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340/questions/667461454","answers":{"choices":[{"position":1,"visible":true,"text":"vg0vpi1 100y8 03nnh0 947rcth5ofg x4xfc38q81i6e7 bgf20442q i2jh2 lolt43y3ri2 vl0t6 2rkshapnf","quiz_options":{"score":0},"id":"4385173102"},{"position":2,"visible":true,"text":"o7qfwgqys eyumel 7spc5 42qxefd","quiz_options":{"score":0},"id":"4385173103"},{"position":3,"visible":true,"text":"usb43wqju6w csearclgaedae 2oo3m2a79oo5si granw6hf 9g9ms37peouy gqaxry74 x45yqa0xtkcu g0in9 lyp7xe6 few8vkh7yf","quiz_options":{"score":0},"id":"4385173104"},{"position":4,"visible":true,"text":"spfrf5sfebrhj8 gwodgm3o1 m5cydcxbtk2 pgrj6h mome4a us97ellx2peg3s ilidjy8juu0","quiz_options":{"score":0},"id":"4385173105"},{"position":5,"visible":true,"text":"w579aalpr5gaj1 1h2ud 53d80pebt4ep 0l2gw8fk7fa","quiz_options":{"score":0},"id":"4385173106"}]},"page_id":"168831340"},"emitted_at":1674149695748} -{"stream":"survey_questions","data":{"id":"667461456","position":4,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"is9uyf6ka iel16fh 1f0xg 7mx16glcei fygrn g7l10em5fbeybb 1vgps0k3 q4fls qryybq07jy f3luik1nx09b7so"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340/questions/667461456","answers":{"choices":[{"position":1,"visible":true,"text":"0fg2b3v4 jmdxep5of1y q2ag243 cfytydw942toqsl 2ixd8iynbs eg22ia","quiz_options":{"score":0},"id":"4385173113"},{"position":2,"visible":true,"text":"0am3pervuj biqdk6g5yw i5xvk20h1n6jv l0v9m5cxn 2vyqvcp8rlthxfv k0v6o48p3v8 551pws1020t98f","quiz_options":{"score":0},"id":"4385173114"},{"position":3,"visible":true,"text":"cjtogxiie0arj ltkgiwrpoa4x1v foqfr1gk406a rale1dx 2p4gjy2g","quiz_options":{"score":0},"id":"4385173115"},{"position":4,"visible":true,"text":"h31va0f12qx3vg 51m9g4bo9 r4ofiqr j1yym2ma8m13kch 1e4jxhdyol2ny nf3fh0h2e qw8euwei1lyhemq 3j4un2sjdoj7 q92100o573tbom 5phvt3n22","quiz_options":{"score":0},"id":"4385173116"},{"position":5,"visible":true,"text":"h8hktik e03ehrspn7 rnjqq4431q2 3bs1nevr8j0 audw020tl kalstea2","quiz_options":{"score":0},"id":"4385173117"},{"position":6,"visible":true,"text":"9w090 e6835j0fvfg89 kk6swkb5g oo0f4ho99x437 qf7b8y1aaa4 i0r03","quiz_options":{"score":0},"id":"4385173118"}]},"page_id":"168831340"},"emitted_at":1674149695749} -{"stream":"survey_questions","data":{"id":"667461462","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"r1kf2oglhju ou2ldq63qu2g1 ufs1pkw pgr9p75sh9k7 vnu477cixnk1kx nxnfn0ti3x3u9wh ew6ho e18cmaeovxhx h1fac8j8 8ni6ay"}],"href":"https://api.surveymonkey.com/v3/surveys/307785388/pages/168831340/questions/667461462","answers":{"choices":[{"position":1,"visible":true,"text":"xytk5fnqv6odvms sw94ohnav5npnm lg9uf4 iwuqgfd0 5ee0wd5 lfha63ve x55yip4 gxgdkff3sckdn mq1khupscjgqj 8mtp54i5c3rjonv","quiz_options":{"score":0},"id":"4385173167"},{"position":2,"visible":true,"text":"2ammt8omj0l c0i1q 1uvaf203rh1 8wj2w7pp qlqayl9e8ldc y7iivv cv189py2di xuihxup7b2 rh8owrr595st","quiz_options":{"score":0},"id":"4385173168"},{"position":3,"visible":true,"text":"o9cvkj8x k0txiswxc5 ogf66jujgcrwdb l7n0c0rodcx 2gduko0wwimb21 afw8mi","quiz_options":{"score":0},"id":"4385173169"},{"position":4,"visible":true,"text":"mcb62sefmuo plnbygilddeqg u64kkkjvoms4b5q jw4tashu6c7ve12 8di4g100598 ad1bet nnqd7jmg","quiz_options":{"score":0},"id":"4385173170"}]},"page_id":"168831340"},"emitted_at":1674149695749} -{"stream":"survey_questions","data":{"id":"667461690","position":1,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"53o3ibly at73qjs4e4 y9dug7jxfmpmr 8esacb5"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461690","answers":{"choices":[{"position":1,"visible":true,"text":"lg2mcft4e64 ywiatkmeo ci3rr4l2v0 ot6un49a 4b28sq4g8qv7tj 4ihpko73bp0k6lf swaeo3o4mg2jf5g rnh225wj520w1ps p9emk1wg64vwl","quiz_options":{"score":0},"id":"4385174700"},{"position":2,"visible":true,"text":"ywg8bovna adsahna5kd1jg vdism1 w045ovutkx9 oubne2u vd0x7lh3 y3npa4kfb5","quiz_options":{"score":0},"id":"4385174701"},{"position":3,"visible":true,"text":"xsy4kv tqp8vty29815 de8nt5ab2fyr m6jilru2ek l7fktx3j5mbj l33ip83t4p29 exfygne a1btj95m1r","quiz_options":{"score":0},"id":"4385174702"}]},"page_id":"168831393"},"emitted_at":1674149695838} -{"stream":"survey_questions","data":{"id":"667461777","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"kjqdk eo7hfnu or7bmd1iwqxxp sguqta4f8141iy"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461777","answers":{"choices":[{"position":1,"visible":true,"text":"11bp1ll11nu0 ool67 tkbke01j3mtq 22f4r54u073p h6kt4puolum4","quiz_options":{"score":0},"id":"4385174970"},{"position":2,"visible":true,"text":"8q53omsxw8 08yyjvj3ns9j yu7yap87 d2tgjv55j5d5o3y dbd69m94qav1wma 8upqf7cliu hb26pytfkwyt rfo2ac4","quiz_options":{"score":0},"id":"4385174971"},{"position":3,"visible":true,"text":"6d7qmnw obxwg4elaab6 2sby04sor66 1wuoh26aftxu7","quiz_options":{"score":0},"id":"4385174972"},{"position":4,"visible":true,"text":"n0xwexbwtviyj1a midgl2jpfdy a72ut27ta 8i9fmkwg0q mbtxhkn b2ut8mtsslkt609 tgmnd7ovnqlbr","quiz_options":{"score":0},"id":"4385174973"},{"position":5,"visible":true,"text":"qjfs0pmb iecatmqyxtk w1s0fs9vcbayf5 rwsneyp0wx6lsyq pq99n hrx1mk4saug gv06qshlabe 0s2t4 h11ee2xna0m8r","quiz_options":{"score":0},"id":"4385174974"},{"position":6,"visible":true,"text":"11uf3he wbstw etbysmu4 c84vqddvx","quiz_options":{"score":0},"id":"4385174975"},{"position":7,"visible":true,"text":"rnfx7m ndifoe7ihy q98pov78016t 8smlnm lb3xicjp9 0r30sie97y12ve7","quiz_options":{"score":0},"id":"4385174976"},{"position":8,"visible":true,"text":"jc8s2ra5qxytxbu u6tj7jgep95 vbva1b4uslioa omku9","quiz_options":{"score":0},"id":"4385174977"}]},"page_id":"168831393"},"emitted_at":1674149695839} -{"stream":"survey_questions","data":{"id":"667461791","position":3,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"0qw6a5lnf426 2sh3g9f8wu xmgflj 41pjy"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461791","answers":{"choices":[{"position":1,"visible":true,"text":"7kxk7bkhfdx86sh 3rnrsj70ud 048jbf4qx4 p96o8 sn7xi oh02tfput4 6js84u99m5t","quiz_options":{"score":0},"id":"4385175055"},{"position":2,"visible":true,"text":"x259osu33y 8qadkcxpsnk4o20 m4wo3183nwxhgye q4mpg srpfibk96sf t3h2cx58eji x7l0sdipnjece8 7tgwfdfmh9hgdwi w99mkib2","quiz_options":{"score":0},"id":"4385175056"},{"position":3,"visible":true,"text":"lil1tboe p80wa8yed7w8 cll24c2lls6cc0 gpbv7rnap psk1et","quiz_options":{"score":0},"id":"4385175057"},{"position":4,"visible":true,"text":"wodtghhkt 2ae1c8q5s1ha 8lppd7ko84al j95eq1imtu7 6x8qknrhn0 l7h53","quiz_options":{"score":0},"id":"4385175058"}]},"page_id":"168831393"},"emitted_at":1674149695840} -{"stream":"survey_questions","data":{"id":"667461794","position":4,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"q3ay58w3 2rfjgu4 0cf9uh1 pu4fo16w 6c2wkn 1oo7d8"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461794","answers":{"choices":[{"position":1,"visible":true,"text":"1orbs vtqu62x9bp t75k10e89krhn bdnsfy6ng34g 8yv9p1c92jlbt0s","quiz_options":{"score":0},"id":"4385175070"},{"position":2,"visible":true,"text":"5j8dww2lxevx4a wv3ppbb vnccslwrjjdc n5pjsmw m7b4533y8tcbbus","quiz_options":{"score":0},"id":"4385175071"},{"position":3,"visible":true,"text":"fnjqkqy2 44brrpru jllsj9cdggwt4 behkog76y5ua 7ftpd8c8qhblii","quiz_options":{"score":0},"id":"4385175072"},{"position":4,"visible":true,"text":"srjre1h3w9 qojsh5w2 sq7wva6tkl9 raxp5mldrp","quiz_options":{"score":0},"id":"4385175073"}]},"page_id":"168831393"},"emitted_at":1674149695841} -{"stream":"survey_questions","data":{"id":"667461797","position":5,"visible":true,"family":"single_choice","subtype":"horiz","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"bvrdxa swsrjt sjox8u6767lv5 wgcomvtnoi0yg namiomuh6cou61u nl2v5bfu15i7 sqpu07jp489uc"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461797","answers":{"choices":[{"position":1,"visible":true,"text":"y97bshsv ite5mgk76p89o yrtt28bmm4jo9 ftc2tnjg","quiz_options":{"score":0},"id":"4385175096"},{"position":2,"visible":true,"text":"r970efm0 5p96h9iy1 o7ft83xrqgsrh8 owk30 buqg6ksd297lw9 lh6ygen9s2rac2b k5d3lbr7m37p","quiz_options":{"score":0},"id":"4385175097"},{"position":3,"visible":true,"text":"ktg10 vp7khp0ucx vuo5qrcor po9nbn6cdpdu56a rt8eiu0umg0dkx j2k8vgtr6","quiz_options":{"score":0},"id":"4385175098"},{"position":4,"visible":true,"text":"iubh35s1gvpm4gj svwbyf7npunm3 0thmsjmt2qb5im0 undxh7b frxykv55emi padtjsk69 qa0jrnwrfoj qqjg6ifvlx0abdb","quiz_options":{"score":0},"id":"4385175099"},{"position":5,"visible":true,"text":"w64hwv9edeaf55 l0gkthucpqj 80wgqsffl 0m45xm56a25psm 8opb8b0gw2w6 n8xex","quiz_options":{"score":0},"id":"4385175100"},{"position":6,"visible":true,"text":"ju3rt297a t028c0b35635 l0kj9vj seuar76 89587qhw46295","quiz_options":{"score":0},"id":"4385175101"},{"position":7,"visible":true,"text":"c4de01u4eil p1p2vy 0gqjglc mc2r97p07 d8d90 j15xktb2idx91 tecpeak3 4anh9o5w7h0runq yr0nd0q9392229","quiz_options":{"score":0},"id":"4385175102"},{"position":8,"visible":true,"text":"yc5erasa3ovk4d ed9adudq8e1s 7wrf8k w9ohrhltg3kv1 wgrnemp 7dqxmy5e bxnsro2sl","quiz_options":{"score":0},"id":"4385175103"}]},"page_id":"168831393"},"emitted_at":1674149695841} -{"stream":"survey_questions","data":{"id":"667461801","position":1,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"iu425c2v4yqs04 43g37 wg8awi s2pjwsm vjhybbs wry73cuukw85l2"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402/questions/667461801","answers":{"choices":[{"position":1,"visible":true,"text":"xjortc6k0sxjydf rdusho82tsr3l 3b3gch ogabx6895eb3 e7bj5pq poft6c4g1","quiz_options":{"score":0},"id":"4385175115"},{"position":2,"visible":true,"text":"bscm9v7d9nv0 e5x94dt0402ge i7mwtey74y4 er7bwam13 6xcjpw pre922tv ihmvbih 9piadim1lterm","quiz_options":{"score":0},"id":"4385175116"},{"position":3,"visible":true,"text":"ywtecquds5ctgu sjcgsa3hm d087wy 6yjqp0jgm 1ywj8v3wuuq wmlmq essefj rbgrjtv6smxcmag","quiz_options":{"score":0},"id":"4385175117"},{"position":4,"visible":true,"text":"gc8d58x66m ftpowgvwodht9h fj47r927vh826 qrkgkb bcvxni fo6g9wdlxgvnq","quiz_options":{"score":0},"id":"4385175118"},{"position":5,"visible":true,"text":"fxsrgxts qih9ukhxafmmiv4 h2ujh1va9jf b6ho30","quiz_options":{"score":0},"id":"4385175119"},{"position":6,"visible":true,"text":"81binesi6f 7urb7 ylotwabgvbt 03ke1u5h 3ehye3g olw0f83a1h667t 71ujnoyf p49ce","quiz_options":{"score":0},"id":"4385175120"},{"position":7,"visible":true,"text":"ht1rd9ymh 2tftisj80s74mop b1eavw d6vgqwrj","quiz_options":{"score":0},"id":"4385175121"}]},"page_id":"168831402"},"emitted_at":1674149695842} -{"stream":"survey_questions","data":{"id":"667461805","position":2,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"mxlksuvmoras9o 94fj2 dieyg92v384lfv8 f9rwin4 cdmg95wcnt2xa ybcmni7yd1x 1yl4j4q7j"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402/questions/667461805","answers":{"choices":[{"position":1,"visible":true,"text":"mevjvlslfppe ex251ss vkrs7swus g72vplm9svejkdw 7onhrwlh bouaam3k7cnn yadoqmnhn4swehq u8lhv3fdh58o3 wrbc0y197d","quiz_options":{"score":0},"id":"4385175146"},{"position":2,"visible":true,"text":"uglir jabjq1 poswkedidqpmj ta5ma4ep9xxr ghu3n2a4u7f3orh 9oud3lwe0f vwip14snnv6gtb 5fw29neis71ogsm dpq7m 9an28j1styhc","quiz_options":{"score":0},"id":"4385175147"},{"position":3,"visible":true,"text":"p3ndk7 nxpv9grg77 ek2kndt51g 2v10497 bdr0a3466ao","quiz_options":{"score":0},"id":"4385175148"},{"position":4,"visible":true,"text":"7w76l 9k393odbjg7cht7 mio9w4tcv 6wvef4vm orgg1n 20d8lh8x9osqcv dv50mjj w3g96tt0m3rf9 24uun3grfy2u 4vns2lt","quiz_options":{"score":0},"id":"4385175149"},{"position":5,"visible":true,"text":"4tcuvnn1wxy cqpr795s sfyecjwup fn76iwks5hko rk6wvgyblb3gqe4 rl5ulee1w rq66d","quiz_options":{"score":0},"id":"4385175150"},{"position":6,"visible":true,"text":"cmrjgc4 dwotyvr4o n9jid3i79xoql klkrt23lklso4p hh6d57t5 9xk3o9me 8bkpgry1yu009y","quiz_options":{"score":0},"id":"4385175151"},{"position":7,"visible":true,"text":"43ghcfhsl 74xoo rn7rmgjhd3cq u2x2ir6n449kqxp 8isq7wb tccg39oy1b 9mw0eu1ho0 a4x77foba5y ywgyosh9ue ynh9u8odsos5q2","quiz_options":{"score":0},"id":"4385175152"}]},"page_id":"168831402"},"emitted_at":1674149695842} -{"stream":"survey_questions","data":{"id":"667461811","position":3,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"i4mol250lne3 bhrh2dvt9b qss461 lkb1u chpwmgcnuoeec un2l5"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402/questions/667461811","answers":{"choices":[{"position":1,"visible":true,"text":"s4xpl2l93 57k4asd04 gyddhg dn53f8bd 8wtgobxts3 ms7nan ns5wv6q2vy6 nnaudmbyu80llen 8be4urorunk","quiz_options":{"score":0},"id":"4385175197"},{"position":2,"visible":true,"text":"ec2l4hr 5lwp46ij8 3tqigw elleyat98j2jjd if8hiia3 vad578","quiz_options":{"score":0},"id":"4385175198"},{"position":3,"visible":true,"text":"9t9nsl0tjlcjxr k1chdb iislvtl gpcnyi82o5ebu 46ayfj 5r3b3w92l6 vaqskragdor","quiz_options":{"score":0},"id":"4385175199"},{"position":4,"visible":true,"text":"8ad404t4 86hyrfxr7 xef8em2 g7u8fc2 rsslpdptcgrsh9n n5pb1u9b","quiz_options":{"score":0},"id":"4385175200"},{"position":5,"visible":true,"text":"bijp3kiqfs quasi89mov1y hj9ku 9w6iuh 81sng4yu32tyh d4q9kbxuoqd2xaq","quiz_options":{"score":0},"id":"4385175201"},{"position":6,"visible":true,"text":"73xiyg2gc q1l6a28s 991jaxujf56sqi rhxrnjum ges25br tb2x1wamrh3jac1 t3s8ocme8q9d8 c505btw99r hwljwx","quiz_options":{"score":0},"id":"4385175202"},{"position":7,"visible":true,"text":"ri75nf 5yy3nq 8m5e68j4mh8m sf1v3 60nijf1oeq9 bwp7bfx9u11a474 w66gfkiayng55q 6h0gp80h","quiz_options":{"score":0},"id":"4385175203"},{"position":8,"visible":true,"text":"nbqtnbbuiue5fr a9s8yrpjm7x0p qid4y913k 8ueagmuy2 5kvul122lseh5h5","quiz_options":{"score":0},"id":"4385175204"}]},"page_id":"168831402"},"emitted_at":1674149695843} -{"stream":"survey_questions","data":{"id":"667461833","position":4,"visible":true,"family":"single_choice","subtype":"menu","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"s3pjyrvc bj99egp0o99 f4ddk7sed bdc2yh24yf tyii1jye nmvwhj18oqxna6b lku2vt8hrnx4 j327a"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402/questions/667461833","answers":{"choices":[{"position":1,"visible":true,"text":"hqpox2w6wuwyd2 fm6kvjiq6ns5k jv1eutgqn1jj if8dj81e 57l25ev1tal9j","quiz_options":{"score":0},"id":"4385175279"},{"position":2,"visible":true,"text":"vr2p6mvbedcpkak 2c91smhshw9ee mwdy43um3334e7i u4o5frorc3py srt09vtrol825 i9s8n2koaoc6fu","quiz_options":{"score":0},"id":"4385175280"},{"position":3,"visible":true,"text":"1icyn0f tifktyc2uwd k8ehexjojth9a2f 0n7sh5p4i6kswe","quiz_options":{"score":0},"id":"4385175281"},{"position":4,"visible":true,"text":"6ju066 4chnhs0be43dy2 xdkxk37j1i0qy1 43b22jang8 na1yapnjj 7tvgbeu v1dw7as","quiz_options":{"score":0},"id":"4385175282"},{"position":5,"visible":true,"text":"vnslaachd7t07f0 db6whw u6ahc71ajst 2cn114ialhcvex kpwm1qo1y g82xup","quiz_options":{"score":0},"id":"4385175283"},{"position":6,"visible":true,"text":"aniu1f d47vbpsl mm26jpf7 g2io86ycj6yk","quiz_options":{"score":0},"id":"4385175284"}]},"page_id":"168831402"},"emitted_at":1674149695844} -{"stream":"survey_questions","data":{"id":"667461834","position":5,"visible":true,"family":"single_choice","subtype":"vertical","layout":null,"sorting":null,"required":null,"validation":null,"forced_ranking":false,"headings":[{"heading":"ce8esfvsy7xcwqu gemf05b3s5ap5 76oc1 srngx7qca"}],"href":"https://api.surveymonkey.com/v3/surveys/307785415/pages/168831402/questions/667461834","answers":{"choices":[{"position":1,"visible":true,"text":"jggh1bnginkodsv 4jhtwffnlgybux 1na25qx xr5jtwfp vvip26cqr st09ps653caiyj 1icxwhc1hut6","quiz_options":{"score":0},"id":"4385175294"},{"position":2,"visible":true,"text":"gdxye rstmylwe4l w2lkwbdf87e735u rdxn1vxbg3aw kwkn1gfsu s3oa2wx7 6vegglr1ihckyxa","quiz_options":{"score":0},"id":"4385175295"},{"position":3,"visible":true,"text":"qsghk1r8e3p ciuick1mgdwbyc k8wbxctpmtu2v xau05rusflq k3as06r35dl9 38xpts","quiz_options":{"score":0},"id":"4385175296"}]},"page_id":"168831402"},"emitted_at":1674149695844} -{"stream":"collectors","data":{"status":"open","id":"405437100","survey_id":"306079584","type":"weblink","name":"Web Link 1","thank_you_message":"Thank you for submitting your responses!","thank_you_page":{"is_enabled":false,"message":"Thank you for submitting your responses!"},"disqualification_type":"message","disqualification_message":"Thank you for completing our survey!","disqualification_url":"https://www.surveymonkey.com","closed_page_message":"This survey is currently closed. Please contact the author of this survey for further assistance.","redirect_type":"url","redirect_url":"https://www.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-02T18:14:00+00:00","date_created":"2021-06-01T17:30:00+00:00","response_count":13,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://www.surveymonkey.com/r/BQB2V52","href":"https://api.surveymonkey.com/v3/collectors/405437100"},"emitted_at":1687778040457} -{"stream":"collectors","data":{"status":"open","id":"405843657","survey_id":"307785429","type":"weblink","name":"Web Link 1","thank_you_message":"Grazie per avere inviato le risposte.","thank_you_page":{"is_enabled":false,"message":"Grazie per avere inviato le risposte."},"disqualification_type":"message","disqualification_message":"Grazie per aver partecipato al nostro sondaggio!","disqualification_url":"https://it.surveymonkey.com","closed_page_message":"Il sondaggio è stato chiuso. Per ulteriori informazioni, contatta l’autore del sondaggio.","redirect_type":"url","redirect_url":"https://it.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T10:59:00+00:00","date_created":"2021-06-10T06:41:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://it.surveymonkey.com/r/GYXQMYF","href":"https://api.surveymonkey.com/v3/collectors/405843657"},"emitted_at":1687778042387} -{"stream":"collectors","data":{"status":"open","id":"405843665","survey_id":"307785444","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:00:00+00:00","date_created":"2021-06-10T06:42:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/GYNFJLH","href":"https://api.surveymonkey.com/v3/collectors/405843665"},"emitted_at":1687778044085} -{"stream":"collectors","data":{"status":"open","id":"405843672","survey_id":"307785394","type":"weblink","name":"Web Link 1","thank_you_message":"Thank you for submitting your responses!","thank_you_page":{"is_enabled":false,"message":"Thank you for submitting your responses!"},"disqualification_type":"message","disqualification_message":"Thank you for completing our survey!","disqualification_url":"https://www.surveymonkey.com","closed_page_message":"This survey is currently closed. Please contact the author of this survey for further assistance.","redirect_type":"url","redirect_url":"https://www.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:01:00+00:00","date_created":"2021-06-10T06:42:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://www.surveymonkey.com/r/GYFBNFM","href":"https://api.surveymonkey.com/v3/collectors/405843672"},"emitted_at":1687778045615} -{"stream":"collectors","data":{"status":"open","id":"405843682","survey_id":"307785402","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:02:00+00:00","date_created":"2021-06-10T06:42:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/GYGQJ5R","href":"https://api.surveymonkey.com/v3/collectors/405843682"},"emitted_at":1687778047089} -{"stream":"collectors","data":{"status":"open","id":"405843688","survey_id":"307785408","type":"weblink","name":"Web Link 1","thank_you_message":"Vielen Dank für die Teilnahme an der Umfrage!","thank_you_page":{"is_enabled":false,"message":"Vielen Dank für die Teilnahme an der Umfrage!"},"disqualification_type":"message","disqualification_message":"Vielen Dank, dass Sie die Umfrage abgeschlossen haben!","disqualification_url":"https://de.surveymonkey.com","closed_page_message":"Diese Umfrage ist derzeit geschlossen. Wenden Sie sich an den Autor dieser Umfrage, um weitere Hilfe zu erhalten.","redirect_type":"url","redirect_url":"https://de.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:02:00+00:00","date_created":"2021-06-10T06:43:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://de.surveymonkey.com/r/GYBJDCP","href":"https://api.surveymonkey.com/v3/collectors/405843688"},"emitted_at":1687778048609} -{"stream":"collectors","data":{"status":"open","id":"405829319","survey_id":"307784834","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:03:00+00:00","date_created":"2021-06-09T21:08:00+00:00","response_count":21,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/NCH93N6","href":"https://api.surveymonkey.com/v3/collectors/405829319"},"emitted_at":1687778050140} -{"stream":"collectors","data":{"status":"open","id":"405829931","survey_id":"307785448","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:03:00+00:00","date_created":"2021-06-09T21:21:00+00:00","response_count":18,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/NS2ZPQC","href":"https://api.surveymonkey.com/v3/collectors/405829931"},"emitted_at":1687778051829} -{"stream":"collectors","data":{"status":"open","id":"405843442","survey_id":"307784863","type":"weblink","name":"Web Link 1","thank_you_message":"Vielen Dank für die Teilnahme an der Umfrage!","thank_you_page":{"is_enabled":false,"message":"Vielen Dank für die Teilnahme an der Umfrage!"},"disqualification_type":"message","disqualification_message":"Vielen Dank, dass Sie die Umfrage abgeschlossen haben!","disqualification_url":"https://de.surveymonkey.com","closed_page_message":"Diese Umfrage ist derzeit geschlossen. Wenden Sie sich an den Autor dieser Umfrage, um weitere Hilfe zu erhalten.","redirect_type":"url","redirect_url":"https://de.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:04:00+00:00","date_created":"2021-06-10T06:32:00+00:00","response_count":20,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://de.surveymonkey.com/r/GDQLK9D","href":"https://api.surveymonkey.com/v3/collectors/405843442"},"emitted_at":1687778053347} -{"stream":"collectors","data":{"status":"open","id":"405829776","survey_id":"307784846","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:05:00+00:00","date_created":"2021-06-09T21:18:00+00:00","response_count":20,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/NWW9NKW","href":"https://api.surveymonkey.com/v3/collectors/405829776"},"emitted_at":1687778054580} -{"stream":"collectors","data":{"status":"open","id":"405843460","survey_id":"307784856","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:05:00+00:00","date_created":"2021-06-10T06:32:00+00:00","response_count":20,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/GDSZR22","href":"https://api.surveymonkey.com/v3/collectors/405843460"},"emitted_at":1687778056142} -{"stream":"collectors","data":{"status":"open","id":"405843624","survey_id":"307785388","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:06:00+00:00","date_created":"2021-06-10T06:40:00+00:00","response_count":20,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/GYWV533","href":"https://api.surveymonkey.com/v3/collectors/405843624"},"emitted_at":1687778057704} -{"stream":"collectors","data":{"status":"open","id":"405843634","survey_id":"307785415","type":"weblink","name":"Web Link 1","thank_you_message":"Спасибо за ответы!","thank_you_page":{"is_enabled":false,"message":"Спасибо за ответы!"},"disqualification_type":"message","disqualification_message":"Спасибо за участие в нашем опросе!","disqualification_url":"https://ru.surveymonkey.com","closed_page_message":"В настоящее время этот опрос закрыт. За дальнейшей информацией обращайтесь к автору опроса.","redirect_type":"url","redirect_url":"https://ru.surveymonkey.com","display_survey_results":false,"edit_response_type":"until_complete","anonymous_type":"not_anonymous","allow_multiple_responses":false,"date_modified":"2021-06-10T11:07:00+00:00","date_created":"2021-06-10T06:40:00+00:00","response_count":20,"password_enabled":false,"response_limit":null,"respondent_authentication":false,"sender_email":null,"close_date":null,"url":"https://ru.surveymonkey.com/r/GYS33KN","href":"https://api.surveymonkey.com/v3/collectors/405843634"},"emitted_at":1687778058997} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405437100", "href": "https://api.surveymonkey.com/v3/collectors/405437100", "type": "weblink", "survey_id": "306079584"}, "emitted_at": 1681918860499} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843657", "href": "https://api.surveymonkey.com/v3/collectors/405843657", "type": "weblink", "survey_id": "307785429"}, "emitted_at": 1681918861357} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843665", "href": "https://api.surveymonkey.com/v3/collectors/405843665", "type": "weblink", "survey_id": "307785444"}, "emitted_at": 1681918862236} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843672", "href": "https://api.surveymonkey.com/v3/collectors/405843672", "type": "weblink", "survey_id": "307785394"}, "emitted_at": 1681918863235} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843682", "href": "https://api.surveymonkey.com/v3/collectors/405843682", "type": "weblink", "survey_id": "307785402"}, "emitted_at": 1681918864098} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843688", "href": "https://api.surveymonkey.com/v3/collectors/405843688", "type": "weblink", "survey_id": "307785408"}, "emitted_at": 1681918865029} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405829319", "href": "https://api.surveymonkey.com/v3/collectors/405829319", "type": "weblink", "survey_id": "307784834"}, "emitted_at": 1681918865930} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405829931", "href": "https://api.surveymonkey.com/v3/collectors/405829931", "type": "weblink", "survey_id": "307785448"}, "emitted_at": 1681918866981} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843442", "href": "https://api.surveymonkey.com/v3/collectors/405843442", "type": "weblink", "survey_id": "307784863"}, "emitted_at": 1681918868050} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405829776", "href": "https://api.surveymonkey.com/v3/collectors/405829776", "type": "weblink", "survey_id": "307784846"}, "emitted_at": 1681918868953} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843460", "href": "https://api.surveymonkey.com/v3/collectors/405843460", "type": "weblink", "survey_id": "307784856"}, "emitted_at": 1681918870019} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843624", "href": "https://api.surveymonkey.com/v3/collectors/405843624", "type": "weblink", "survey_id": "307785388"}, "emitted_at": 1681918870844} -{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843634", "href": "https://api.surveymonkey.com/v3/collectors/405843634", "type": "weblink", "survey_id": "307785415"}, "emitted_at": 1681918871852} \ No newline at end of file +{"stream": "surveys", "data": {"title": "Market Research - Product Testing Template", "nickname": "", "language": "en", "folder_id": "0", "category": "market_research", "question_count": 13, "page_count": 1, "response_count": 13, "date_created": "2021-05-07T06:18:00", "date_modified": "2021-06-08T18:09:00", "id": "306079584", "buttons_text": {"next_button": "Next >>", "prev_button": "<< Prev", "done_button": "Done", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "template_id": "319", "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/306079584", "analyze_url": "https://www.surveymonkey.com/analyze/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "summary_url": "https://www.surveymonkey.com/summary/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=8T1PwDGoJHE1lbkxjUnaGitKu8jxWzyoclw9fNsShflPlk6MYIzwJ2NgjlBw_2B7iV"}, "emitted_at": 1709037198469} +{"stream": "surveys", "data": {"title": "yswa8kobijei1mkwaqxgy", "nickname": "7b4p9vssf810mslcd0eqpcg9s7p0h", "language": "it", "folder_id": "0", "category": "", "question_count": 10, "page_count": 3, "response_count": 18, "date_created": "2021-06-09T21:20:00", "date_modified": "2021-06-10T10:59:00", "id": "307785429", "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, "is_owner": true, "footer": true, "theme_id": "4510354", "template_id": null, "custom_variables": {}, "href": "https://api.surveymonkey.com/v3/surveys/307785429", "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxoENnL6uEj_2FlAo5YSBwashU_3D", "preview": "https://www.surveymonkey.com/r/Preview/?sm=YRpP3kxXMi2aJkgYoeyZrvuErii13mQ5DRN67Vm4WJ5avIMZ6YvzI_2Bc3FpERJDqx"}, "emitted_at": 1709037199358} +{"stream": "survey_responses", "data": {"id": "12706126725", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "124.123.178.184", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=hNu3QJYf07WiUPOwxCYcARURFGB3ruOrs9slcTHVhmhgDhoNJ0k7w3jCvo0nLM40", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706126725", "total_time": 62, "date_modified": "2021-06-01T17:40:54+00:00", "date_created": "2021-06-01T17:39:51+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706126725", "pages": [{"id": "165250506", "questions": [{"id": "652286715", "answers": [{"choice_id": "4285525064"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525084"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525070"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525079"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525089"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525074"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525095"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525058", "row_id": "4285525061", "choice_metadata": {"weight": "0"}}]}]}]}, "emitted_at": 1709037201198} +{"stream": "survey_responses", "data": {"id": "12706152767", "recipient_id": "", "collection_mode": "default", "response_status": "completed", "custom_value": "", "first_name": "", "last_name": "", "email_address": "", "ip_address": "37.229.17.15", "logic_path": {}, "metadata": {"contact": {}}, "page_path": [], "collector_id": "405437100", "survey_id": "306079584", "custom_variables": {}, "edit_url": "https://www.surveymonkey.com/r/?sm=YIZz5DiXEDES47ARxTbRPzAA9ZOwCjcN_2FDSFTYGWgCVPQCo_2B3EeLirGlON5_2BjrX5", "analyze_url": "https://www.surveymonkey.com/analyze/browse/5jPPwKLnlqevaUQom_2BgYAWJWlrKNA2ZFTOYrMBrqW2c_3D?respondent_id=12706152767", "total_time": 55, "date_modified": "2021-06-01T17:50:03+00:00", "date_created": "2021-06-01T17:49:08+00:00", "href": "https://api.surveymonkey.com/v3/surveys/306079584/responses/12706152767", "pages": [{"id": "165250506", "questions": [{"id": "652286726", "answers": [{"tag_data": [], "text": "fuck this"}]}, {"id": "652286715", "answers": [{"choice_id": "4285525067"}]}, {"id": "652286721", "answers": [{"choice_id": "4285525087"}]}, {"id": "652286716", "answers": [{"choice_id": "4285525072"}]}, {"id": "652286718", "answers": [{"choice_id": "4285525081"}]}, {"id": "652286722", "answers": [{"choice_id": "4285525091"}]}, {"id": "652286717", "answers": [{"choice_id": "4285525077"}]}, {"id": "652286723", "answers": [{"choice_id": "4285525097"}]}, {"id": "652286714", "answers": [{"choice_id": "4285525052", "row_id": "4285525061", "choice_metadata": {"weight": "-100"}}]}, {"id": "652286719", "answers": [{"tag_data": [], "text": "waste of time"}]}]}]}, "emitted_at": 1709037201222} +{"stream": "survey_pages", "data": {"title": "sy4ara", "description": "\u0421\u0443\u0440\u0432\u0435\u0439\u043c\u0430\u043d\u043a\u0438 \u0436\u043b\u043e\u0431\u044b", "position": 1, "question_count": 13, "id": "165250506", "href": "https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506"}, "emitted_at": 1709037204091} +{"stream": "survey_pages", "data": {"title": "", "description": "", "position": 1, "question_count": 0, "id": "168831413", "href": "https://api.surveymonkey.com/v3/surveys/307785429/pages/168831413"}, "emitted_at": 1709037204976} +{"stream": "survey_pages", "data": {"title": "xsgqdhdakh7x", "description": "wlju6xsgkxyig0s1", "position": 2, "question_count": 5, "id": "168831415", "href": "https://api.surveymonkey.com/v3/surveys/307785429/pages/168831415"}, "emitted_at": 1709037204980} +{"stream": "survey_pages", "data": {"title": "ajsn8v0tvicgt7u063", "description": "dcwmhxdx6p8buu", "position": 3, "question_count": 5, "id": "168831437", "href": "https://api.surveymonkey.com/v3/surveys/307785429/pages/168831437"}, "emitted_at": 1709037204983} +{"stream": "survey_questions", "data": {"id": "652286724", "position": 1, "visible": true, "family": "click_map", "subtype": "single", "layout": null, "sorting": null, "required": null, "validation": null, "forced_ranking": false, "headings": [{"heading": "Click on the area you like best about this product.", "image": {"url": "https://surveymonkey-assets.s3.amazonaws.com/survey/306079584/20535460-8f99-41b0-ac96-b9f4f2aecb96.png"}}], "href": "https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286724", "answers": {"rows": [{"position": 1, "visible": true, "text": "Click 1", "id": "4285525098"}]}}, "emitted_at": 1709037205863} +{"stream": "survey_questions", "data": {"id": "652286725", "position": 2, "visible": true, "family": "click_map", "subtype": "single", "layout": null, "sorting": null, "required": null, "validation": null, "forced_ranking": false, "headings": [{"heading": "Click on the area you like least about this product.", "image": {"url": "https://surveymonkey-assets.s3.amazonaws.com/survey/306079584/79215d25-9dbc-4870-91cd-3a36778aae52.png"}}], "href": "https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286725", "answers": {"rows": [{"position": 1, "visible": true, "text": "Click 1", "id": "4285525102"}]}}, "emitted_at": 1709037205866} +{"stream": "survey_questions", "data": {"id": "652286726", "position": 3, "visible": true, "family": "open_ended", "subtype": "essay", "layout": null, "sorting": null, "required": null, "validation": null, "forced_ranking": false, "headings": [{"heading": "Why did you make that selection?"}], "href": "https://api.surveymonkey.com/v3/surveys/306079584/pages/165250506/questions/652286726"}, "emitted_at": 1709037205869} +{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405437100", "href": "https://api.surveymonkey.com/v3/collectors/405437100", "type": "weblink", "survey_id": "306079584"}, "emitted_at": 1709037207336} +{"stream": "survey_collectors", "data": {"name": "Web Link 1", "id": "405843657", "href": "https://api.surveymonkey.com/v3/collectors/405843657", "type": "weblink", "survey_id": "307785429"}, "emitted_at": 1709037207772} +{"stream": "collectors", "data": {"status": "open", "id": "405437100", "survey_id": "306079584", "type": "weblink", "name": "Web Link 1", "thank_you_message": "Thank you for submitting your responses!", "thank_you_page": {"is_enabled": false, "message": "Thank you for submitting your responses!"}, "disqualification_type": "message", "disqualification_message": "Thank you for completing our survey!", "disqualification_url": "https://www.surveymonkey.com", "closed_page_message": "This survey is currently closed. Please contact the author of this survey for further assistance.", "redirect_type": "url", "redirect_url": "https://www.surveymonkey.com", "display_survey_results": false, "edit_response_type": "until_complete", "anonymous_type": "not_anonymous", "allow_multiple_responses": false, "date_modified": "2021-06-02T18:14:00+00:00", "date_created": "2021-06-01T17:30:00+00:00", "response_count": 13, "password_enabled": false, "response_limit": null, "respondent_authentication": false, "sender_email": null, "close_date": null, "url": "https://www.surveymonkey.com/r/BQB2V52", "href": "https://api.surveymonkey.com/v3/collectors/405437100"}, "emitted_at": 1709037208691} +{"stream": "collectors", "data": {"status": "open", "id": "405843657", "survey_id": "307785429", "type": "weblink", "name": "Web Link 1", "thank_you_message": "Grazie per avere inviato le risposte.", "thank_you_page": {"is_enabled": false, "message": "Grazie per avere inviato le risposte."}, "disqualification_type": "message", "disqualification_message": "Grazie per aver partecipato al nostro sondaggio!", "disqualification_url": "https://it.surveymonkey.com", "closed_page_message": "Il sondaggio \u00e8 stato chiuso. Per ulteriori informazioni, contatta l\u2019autore del sondaggio.", "redirect_type": "url", "redirect_url": "https://it.surveymonkey.com", "display_survey_results": false, "edit_response_type": "until_complete", "anonymous_type": "not_anonymous", "allow_multiple_responses": false, "date_modified": "2021-06-10T10:59:00+00:00", "date_created": "2021-06-10T06:41:00+00:00", "response_count": 18, "password_enabled": false, "response_limit": null, "respondent_authentication": false, "sender_email": null, "close_date": null, "url": "https://it.surveymonkey.com/r/GYXQMYF", "href": "https://api.surveymonkey.com/v3/collectors/405843657"}, "emitted_at": 1709037209419} diff --git a/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml b/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml index ad966800ff012..007271ccde39d 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml +++ b/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: badc5925-0485-42be-8caa-b34096cb71b5 - dockerImageTag: 0.2.4 + dockerImageTag: 0.3.0 dockerRepository: airbyte/source-surveymonkey documentationUrl: https://docs.airbyte.com/integrations/sources/surveymonkey githubIssueLabel: source-surveymonkey icon: surveymonkey.svg license: MIT + maxSecondsBetweenMessages: 86400 name: SurveyMonkey remoteRegistries: pypi: @@ -30,5 +31,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-surveymonkey/poetry.lock b/airbyte-integrations/connectors/source-surveymonkey/poetry.lock index 144064338eb2c..d553360075645 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/poetry.lock +++ b/airbyte-integrations/connectors/source-surveymonkey/poetry.lock @@ -2,64 +2,54 @@ [[package]] name = "airbyte-cdk" -version = "0.51.40" +version = "0.78.3" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.51.40.tar.gz", hash = "sha256:b1bb1edecb8c27b4b1c8a313e391ac18c04259e10732867021b4c4d781f92554"}, - {file = "airbyte_cdk-0.51.40-py3-none-any.whl", hash = "sha256:4e94ca42e535fc51c2c8bda872d977cf8534fbbe49f61506d0caf414ae02cfb0"}, + {file = "airbyte_cdk-0.78.3-py3-none-any.whl", hash = "sha256:699d61ace9f8ca4477e06af3ff1bc56856e955a444081a1701c41d94629dcd74"}, + {file = "airbyte_cdk-0.78.3.tar.gz", hash = "sha256:192c2594d0e93140a7ec635fea3d4644318faada6aa986805752adf4caf9b126"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.0" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.0" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.0-py3-none-any.whl", hash = "sha256:e6a31fcd237504198a678d02c0040a8798f281c39203da61a5abce67842c5360"}, - {file = "airbyte_protocol_models-0.4.0.tar.gz", hash = "sha256:518736015c29ac60b6b8964a1b0d9b52e40020bcbd89e2545cc781f0b37d0f2b"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] pydantic = ">=1.9.2,<2.0.0" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - [[package]] name = "attrs" version = "23.2.0" @@ -103,13 +93,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -365,13 +355,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -565,113 +555,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -703,17 +628,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - [[package]] name = "pydantic" version = "1.10.14" @@ -766,6 +680,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -809,59 +738,68 @@ files = [ [[package]] name = "pytest" -version = "6.2.5" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -945,13 +883,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -963,50 +901,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1020,36 +956,25 @@ files = [ ] [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1298,4 +1223,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "82d1e2e98e3dcdedee09c2f9112d8df65dff18dc4f79705659c2246282ddec0e" +content-hash = "600a97507b92d5147474e57857ec554ffbe7baea6eaba333ab6a44aecf6d8792" diff --git a/airbyte-integrations/connectors/source-surveymonkey/pyproject.toml b/airbyte-integrations/connectors/source-surveymonkey/pyproject.toml index f78bab2e5e2b2..5977902f78a15 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/pyproject.toml +++ b/airbyte-integrations/connectors/source-surveymonkey/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.2.4" +version = "0.3.0" name = "source-surveymonkey" description = "Source implementation for Surveymonkey." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_surveymonkey" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.51.40" +airbyte-cdk = "^0" vcrpy = "==4.1.1" urllib3 = "==1.26.18" @@ -26,5 +26,5 @@ source-surveymonkey = "source_surveymonkey.run:run" [tool.poetry.group.dev.dependencies] requests-mock = "^1.9.3" -pytest-mock = "^3.6.1" -pytest = "^6.1" +pytest-mock = "^3.12.0" +pytest = "^8.0.0" diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/__init__.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/__init__.py index 7e9c35dfc1db2..ac59ec55ea7bb 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/__init__.py +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/__init__.py @@ -22,6 +22,7 @@ SOFTWARE. """ +from .config_migrations import MigrateAccessTokenToCredentials from .source import SourceSurveymonkey -__all__ = ["SourceSurveymonkey"] +__all__ = ["SourceSurveymonkey", "MigrateAccessTokenToCredentials"] diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/components.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/components.py new file mode 100644 index 0000000000000..6732dcec83a09 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/components.py @@ -0,0 +1,51 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Iterable + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState + + +class SurveyIdPartitionRouter(SubstreamPartitionRouter): + """ + A SurveyIdPartitionRouter is specifically tailored for survey data, addressing the limitations of the current solution, + SubstreamPartitionRouter, which only offers one option for partitioning via access to the parent stream with input. + The SurveyIdPartitionRouter generates stream slices for partitioning based on either provided survey IDs or parent stream keys. + + + Inherits from: + SubstreamPartitionRouter + + Custom Methods: + stream_slices: Generates stream slices for partitioning. + """ + + def stream_slices(self) -> Iterable[StreamSlice]: + """ + Generates stream slices for partitioning based on survey IDs or parent stream keys. + """ + + # Get the survey IDs from the configuration + survey_ids = self.config.get("survey_ids", []) + + # Extract necessary configuration parameters + parent_stream_config = self.parent_stream_configs[0] + parent_key = parent_stream_config.parent_key.string + partition_field = parent_stream_config.partition_field.string + + if survey_ids: + # If specific survey IDs are provided, yield slices based on them + for item in survey_ids: + yield StreamSlice(partition={partition_field: item}, cursor_slice={}) + else: + # If not, iterate over parent stream records and yield slices based on parent keys + for parent_stream_config in self.parent_stream_configs: + for item in parent_stream_config.stream.read_records(sync_mode=SyncMode.full_refresh): + yield StreamSlice(partition={partition_field: item[parent_key]}, cursor_slice={}) + + # Ensures the function always returns an iterable + yield from [] diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/config_migrations.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/config_migrations.py new file mode 100644 index 0000000000000..98709c3e5f930 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/config_migrations.py @@ -0,0 +1,64 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import logging +from typing import Any, List, Mapping + +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository + +logger = logging.getLogger("airbyte_logger") + + +class MigrateAccessTokenToCredentials: + """ + Facilitates the migration of configuration at runtime. + This migration maintains backward compatibility with the previous version by creating a new property. + When reverting to the previous source version, the connector will use the old property `access_token`. + + Starting from version `0.3.0`, the `access_token` property is relocated to the `credentials` property. + """ + + migrate_from_key: str = "access_token" + migrate_to_key: str = "credentials" + default_values: dict = {"auth_method": "oauth2.0"} + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + Determines whether the configuration should be migrated to adopt the new structure for the `custom_reports`, + based on the source specification. + """ + return cls.migrate_to_key not in config + + @classmethod + def transform(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: + """Transforms the configuration.""" + migrated_config = {cls.migrate_to_key: {cls.migrate_from_key: config.get(cls.migrate_from_key), **cls.default_values}} + return config | migrated_config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + """Modifies and saves the configuration.""" + migrated_config = cls.transform(config) + source.write_config(migrated_config, config_path) + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + print(create_connector_config_control_message(migrated_config).json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + Checks the input arguments, migrates the configuration if necessary, and emits the CONTROL message. + """ + config_path = AirbyteEntrypoint(source).extract_config(args) + if config_path: + config = source.read_config(config_path) + if cls.should_migrate(config): + cls.emit_control_message(cls.modify_and_save(config_path, source, config)) diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/manifest.yaml b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/manifest.yaml new file mode 100644 index 0000000000000..17f7da823aefa --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/manifest.yaml @@ -0,0 +1,467 @@ +version: 0.61.2 +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - surveys +definitions: + survey_ids: + type: DeclarativeStream + name: survey_ids + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys + http_method: GET + request_parameters: + sort_by: date_modified + sort_order: ASC + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 1000 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' +streams: + - type: DeclarativeStream + name: survey_pages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys/{{ stream_partition.survey_id }}/details + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - pages + partition_router: + - type: CustomPartitionRouter + class_name: source_surveymonkey.components.SurveyIdPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: survey_id + stream: + $ref: "#/definitions/survey_ids" + transformations: + - type: RemoveFields + field_pointers: + - - questions + - type: DeclarativeStream + name: survey_questions + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys/{{ stream_partition.survey_id }}/details + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - pages + - "*" + - questions + - "*" + partition_router: + - type: CustomPartitionRouter + class_name: source_surveymonkey.components.SurveyIdPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: survey_id + stream: + $ref: "#/definitions/survey_ids" + - type: DeclarativeStream + name: survey_responses + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys/{{ stream_partition.survey_id }}/responses/bulk + http_method: GET + request_parameters: + sort_order: ASC + sort_by: date_modified + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: per_page + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + type: CustomPartitionRouter + class_name: source_surveymonkey.components.SurveyIdPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: survey_id + stream: + $ref: "#/definitions/survey_ids" + state_migrations: + - type: LegacyToPerPartitionStateMigration + incremental_sync: + type: DatetimeBasedCursor + cursor_field: date_modified + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + datetime_format: "%Y-%m-%dT%H:%M:%S" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: start_modified_at + inject_into: request_parameter + - type: DeclarativeStream + name: survey_collectors + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys/{{ stream_partition.survey_id }}/collectors + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: CustomPartitionRouter + class_name: source_surveymonkey.components.SurveyIdPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: survey_id + stream: + $ref: "#/definitions/survey_ids" + transformations: + - type: AddFields + fields: + - path: + - survey_id + value: "{{ stream_slice.survey_id }}" + value_type: string + - type: DeclarativeStream + name: collectors + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: collectors//{{ stream_partition.collector_id }} + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + partition_router: + - type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: collector_id + stream: + type: DeclarativeStream + name: survey_collectors + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + type: HttpRequester + url_base: https://api.surveymonkey.com/v3/ + path: surveys/{{ stream_partition.survey_id }}/collectors + http_method: GET + request_parameters: {} + request_headers: {} + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + backoff_strategies: + - type: WaitTimeFromHeader + header: X-Ratelimit-App-Global-Minute-Reset + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("links", {}).get("next", {}) }}' + stop_condition: '{{ not response.get("links", {}).get("next", {}) }}' + partition_router: + - type: CustomPartitionRouter + class_name: source_surveymonkey.components.SurveyIdPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: survey_id + stream: + $ref: "#/definitions/survey_ids" + transformations: + - type: AddFields + fields: + - path: + - survey_id + value: "{{ stream_slice.survey_id }}" + value_type: string +spec: + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + type: object + required: + - start_date + - credentials + properties: + origin: + type: string + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: "Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: object + required: + - auth_method + - access_token + order: 2 + properties: + auth_method: + type: string + const: "oauth2.0" + order: 0 + client_id: + type: string + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: string + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + access_token: + title: "Access Token" + order: 3 + type: string + airbyte_secret: true + description: 'Access Token for making authenticated requests. See the docs for information on how to generate this key.' + start_date: + title: "Start Date" + order: 3 + type: string + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: array + order: 1000 + items: + type: string + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: "IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated." + additionalProperties: true + advanced_auth: + auth_flow_type: "oauth2.0" + predicate_key: + - credentials + - auth_method + predicate_value: "oauth2.0" + oauth_config_specification: + complete_oauth_output_specification: + type: object + additionalProperties: false + properties: + access_token: + type: string + path_in_connector_config: + - credentials + - access_token + complete_oauth_server_input_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + client_secret: + type: string + complete_oauth_server_output_specification: + type: object + additionalProperties: false + properties: + client_id: + type: string + path_in_connector_config: + - credentials + - client_id + client_secret: + type: string + path_in_connector_config: + - credentials + - client_secret + oauth_user_input_from_connector_config_specification: + type: object + additionalProperties: false + properties: + origin: + type: string + path_in_connector_config: + - origin + type: Spec diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/run.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/run.py index f3cbc028402b0..cd9ab6158d297 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/run.py +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/run.py @@ -6,9 +6,10 @@ import sys from airbyte_cdk.entrypoint import launch -from source_surveymonkey import SourceSurveymonkey +from source_surveymonkey import MigrateAccessTokenToCredentials, SourceSurveymonkey def run(): source = SourceSurveymonkey() + MigrateAccessTokenToCredentials.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/source.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/source.py index 15dc2aac82ed2..08da56aa721c9 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/source.py +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/source.py @@ -2,98 +2,65 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from itertools import groupby -from operator import itemgetter from typing import Any, List, Mapping, Tuple import pendulum import requests from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator -from .streams import Collectors, SurveyCollectors, SurveyPages, SurveyQuestions, SurveyResponses, Surveys +from .streams import Surveys +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -class SourceSurveymonkey(AbstractSource): - SCOPES = {"responses_read_detail", "surveys_read", "users_read"} +WARNING: Do not modify this file. +""" - @classmethod - def _check_credentials(cls, config: Mapping[str, Any]) -> Tuple[bool, Any]: - # check if the credentials are provided correctly, because for now these value are not required in spec - if not config.get("access_token"): - credentials = config.get("credentials", {}) - if not credentials: - return False, "credentials fields are not provided" - else: - if not credentials.get("auth_method"): - return False, "auth_method in credentials is not provided" - if not credentials.get("access_token"): - return False, "access_token in credentials is not provided" +class SourceSurveymonkey(YamlDeclarativeSource): + SCOPES = {"responses_read_detail", "surveys_read", "users_read"} + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) + + @classmethod + def _check_scopes(cls, response_json): + granted_scopes = response_json["scopes"]["granted"] + missed_scopes = cls.SCOPES - set(granted_scopes) + if missed_scopes: + return False, "missed required scopes: " + ", ".join(missed_scopes) return True, None - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: - is_valid_credentials, msg = self._check_credentials(config) - if not is_valid_credentials: - return is_valid_credentials, msg + @staticmethod + def get_authenticator(config: Mapping[str, Any]): + token = config.get("credentials", {}).get("access_token") + if not token: + token = config["access_token"] + return TokenAuthenticator(token=token) - authenticator = self.get_authenticator(config) - if "survey_ids" in config: - # Check whether survey id exists and collect errors - errors = [] - for survey_id in config["survey_ids"]: - response = requests.head( - url=f"https://api.surveymonkey.com/v3/surveys/{survey_id}/details", headers=authenticator.get_auth_header() - ) - try: - response.raise_for_status() - except requests.exceptions.HTTPError: - errors.append((survey_id, f"{response.status_code} {response.reason}")) - if errors: - # Group survey ids by their error type - survey_id_index, error_message_index = 0, 1 - msg = "; ".join( - [ - f"{error_type}: {', '.join(list(map(itemgetter(survey_id_index), survey_ids)))}" - for error_type, survey_ids in groupby(errors, lambda x: x[error_message_index]) - ] - ) - return False, msg + def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: + # Check scopes try: + authenticator = self.get_authenticator(config) response = requests.get(url="https://api.surveymonkey.com/v3/users/me", headers=authenticator.get_auth_header()) response.raise_for_status() return self._check_scopes(response.json()) except Exception as e: return False, repr(e) + return super().check_connection(logger, config) + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + streams = super().streams(config=config) + authenticator = self.get_authenticator(config) start_date = pendulum.parse(config["start_date"]) survey_ids = config.get("survey_ids", []) args = {"authenticator": authenticator, "start_date": start_date, "survey_ids": survey_ids} - return [ - Collectors(**args), - Surveys(**args), - SurveyCollectors(**args), - SurveyPages(**args), - SurveyQuestions(**args), - SurveyResponses(**args), - ] - @staticmethod - def get_authenticator(config: Mapping[str, Any]): - token = config.get("credentials", {}).get("access_token") - if not token: - token = config["access_token"] - return TokenAuthenticator(token=token) - - @classmethod - def _check_scopes(cls, response_json): - granted_scopes = response_json["scopes"]["granted"] - missed_scopes = cls.SCOPES - set(granted_scopes) - if missed_scopes: - return False, "missed required scopes: " + ", ".join(missed_scopes) - return True, None + streams.append(Surveys(**args)) + return streams diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/spec.json b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/spec.json deleted file mode 100644 index fb7cd73de11d4..0000000000000 --- a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/spec.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/surveymonkey", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "SurveyMonkey Spec", - "type": "object", - "required": ["start_date"], - "additionalProperties": true, - "properties": { - "origin": { - "type": "string", - "order": 1, - "enum": ["USA", "Europe", "Canada"], - "default": "USA", - "title": "Origin datacenter of the SurveyMonkey account", - "description": "Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different." - }, - "credentials": { - "title": "SurveyMonkey Authorization Method", - "description": "The authorization method to use to retrieve data from SurveyMonkey", - "type": "object", - "required": ["auth_method", "access_token"], - "order": 2, - "properties": { - "auth_method": { - "type": "string", - "const": "oauth2.0", - "order": 0 - }, - "client_id": { - "type": "string", - "title": "Client ID", - "description": "The Client ID of the SurveyMonkey developer application.", - "airbyte_secret": true, - "order": 1 - }, - "client_secret": { - "type": "string", - "title": "Client Secret", - "description": "The Client Secret of the SurveyMonkey developer application.", - "airbyte_secret": true, - "order": 2 - }, - "access_token": { - "title": "Access Token", - "order": 3, - "type": "string", - "airbyte_secret": true, - "description": "Access Token for making authenticated requests. See the docs for information on how to generate this key." - } - } - }, - "start_date": { - "title": "Start Date", - "order": 3, - "type": "string", - "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$", - "examples": ["2021-01-01T00:00:00Z"], - "format": "date-time" - }, - "survey_ids": { - "type": "array", - "order": 4, - "items": { - "type": "string", - "pattern": "^[0-9]{8,9}$" - }, - "title": "Survey Monkey survey IDs", - "description": "IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated." - } - } - }, - "advanced_auth": { - "auth_flow_type": "oauth2.0", - "predicate_key": ["credentials", "auth_method"], - "predicate_value": "oauth2.0", - "oauth_config_specification": { - "complete_oauth_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "access_token": { - "type": "string", - "path_in_connector_config": ["credentials", "access_token"] - } - } - }, - "complete_oauth_server_input_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string" - }, - "client_secret": { - "type": "string" - } - } - }, - "complete_oauth_server_output_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "client_id": { - "type": "string", - "path_in_connector_config": ["credentials", "client_id"] - }, - "client_secret": { - "type": "string", - "path_in_connector_config": ["credentials", "client_secret"] - } - } - }, - "oauth_user_input_from_connector_config_specification": { - "type": "object", - "additionalProperties": false, - "properties": { - "origin": { - "type": "string", - "path_in_connector_config": ["origin"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/streams.py b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/streams.py index f9f64b7f96e44..f92260ab7b292 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/streams.py +++ b/airbyte-integrations/connectors/source-surveymonkey/source_surveymonkey/streams.py @@ -76,11 +76,9 @@ def backoff_time(self, response: requests.Response) -> Optional[float]: https://developer.surveymonkey.com/api/v3/#headers X-Ratelimit-App-Global-Minute-Remaining - Number of remaining requests app has before hitting per minute limit X-Ratelimit-App-Global-Minute-Reset - Number of seconds until the rate limit remaining resets - Limits: https://developer.surveymonkey.com/api/v3/#request-and-response-limits Max Requests Per Day - 500 Max Requests Per Minute - 120 - Real limits from API response headers: "X-Ratelimit-App-Global-Minute-Limit": "720" "X-Ratelimit-App-Global-Day-Limit": "500000" @@ -161,16 +159,13 @@ class Surveys(SurveyIDSliceMixin, IncrementalSurveymonkeyStream): """ Docs: https://developer.surveymonkey.com/api/v3/#surveys A source for stream slices. It does not contain useful info itself. - The `surveys/id/details` endpoint contains full data about pages and questions. This data is already collected and gathered into array [pages] and array of arrays questions, where each inner array contains data about certain page. Example [[q1, q2,q3], [q4,q5]] means we have 2 pages, first page contains 3 questions q1, q2, q3, second page contains other. - If we use the "normal" query, we need to query surveys/id/pages for page enumeration, then we need to query each page_id in every new request for details (because `pages` doesn't contain full info and valid only for enumeration), then for each page need to enumerate questions and get each question_id for details (since `/surveys/id/pages/id/questions` without ending /id also doesnt contain full info, - In other words, we need to have triple stream slices, (note that api is very very rate limited and we need details for each survey etc), and finally we get a response similar to those we can have from `/id/details` endpoint. Also we will need to gather info to array in case of overrequesting, but details is already gathered it for us. @@ -186,121 +181,3 @@ def parse_response(self, response: requests.Response, stream_state: Mapping[str, for record in data: record.pop("pages", None) # remove pages data yield record - - -class SurveyPages(SurveyIDSliceMixin, SurveymonkeyStream): - """should be filled from SurveyDetails""" - - data_field = "pages" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - data = super().parse_response(response=response, stream_state=stream_state, **kwargs) - for record in data: - record.pop("questions", None) # remove question data - yield record - - -class SurveyQuestions(SurveyIDSliceMixin, SurveymonkeyStream): - """should be filled from SurveyDetails""" - - data_field = "pages" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - data = super().parse_response(response=response, stream_state=stream_state, **kwargs) - for entry in data: - page_id = entry["id"] - questions = entry["questions"] - for question in questions: - question["page_id"] = page_id - yield question - - -class SurveyResponses(SurveyIDSliceMixin, IncrementalSurveymonkeyStream): - """ - Docs: https://developer.surveymonkey.com/api/v3/#api-endpoints-survey-responses - """ - - cursor_field = "date_modified" - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"surveys/{stream_slice['survey_id']}/responses/bulk" - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the survey_id and cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - survey_id = latest_record.get("survey_id") - if not current_stream_state: - current_stream_state = {} - survey_state = current_stream_state.get(survey_id, {}) - - latest_record_value = latest_record.get(self.cursor_field, "") - if latest_record_value: - # add 1 second, otherwise next incremental syns return the same record - latest_record_value = pendulum.parse(latest_record_value).add(seconds=1).to_iso8601_string() - - state_value = max( - latest_record_value, - survey_state.get(self.cursor_field, ""), - ) - current_stream_state[survey_id] = {self.cursor_field: state_value} - return current_stream_state - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - if next_page_token: - return next_page_token - - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params["sort_order"] = "ASC" - params["sort_by"] = self.cursor_field - # Max of 100 allowed per page. We use the highest - # possible value to reduce the number of API calls. - params["per_page"] = 100 - - since_value_surv = stream_state.get(stream_slice["survey_id"]) - if since_value_surv: - since_value = ( - pendulum.parse(since_value_surv.get(self.cursor_field)) if since_value_surv.get(self.cursor_field) else self._start_date - ) - since_value = max(since_value, self._start_date) - else: - since_value = self._start_date - params["start_modified_at"] = since_value.strftime("%Y-%m-%dT%H:%M:%S") - return params - - -class SurveyCollectors(SurveyIDSliceMixin, SurveymonkeyStream): - """ - API Docs: https://www.surveymonkey.com/developer/api/v3/#api-endpoints-get-surveys-id-collectors - """ - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"surveys/{ stream_slice['survey_id'] }/collectors" - - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - data = super().parse_response(response=response, stream_state=stream_state, **kwargs) - for record in data: - record["survey_id"] = kwargs.get("stream_slice", {}).get("survey_id") - yield record - - -class Collectors(SurveymonkeyStream): - """ - API Docs: https://www.surveymonkey.com/developer/api/v3/#api-endpoints-get-collectors-id- - """ - - data_field = None - - def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"collectors/{stream_slice['collector_id']}" - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs): - - survey_collectors = SurveyCollectors(start_date=self._start_date, survey_ids=self._survey_ids, authenticator=self.authenticator) - survey_ids = survey_collectors.stream_slices(stream_state, **kwargs) - for slice in survey_ids: - for collector in survey_collectors.read_records(sync_mode=SyncMode.full_refresh, stream_state=stream_state, stream_slice=slice): - yield {"collector_id": collector["id"]} diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/conftest.py b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/conftest.py new file mode 100644 index 0000000000000..043ae568f284e --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/conftest.py @@ -0,0 +1,49 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json + +import pendulum +import pytest +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import StreamSlice +from airbyte_cdk.sources.streams.http.auth import NoAuth +from source_surveymonkey.source import SourceSurveymonkey + + +@pytest.fixture(name='read_json') +def read_json_fixture(request): + def read_json(file_name, skip_folder=False): + if not skip_folder: + folder_name = request.node.fspath.basename.split('.')[0] + with open("unit_tests/" + folder_name + "/" + file_name) as f: + return json.load(f) + return read_json + +@pytest.fixture(name='read_records') +def read_records_fixture(config): + def read_records(stream_name, slice=StreamSlice(partition={"survey_id": "307785415"}, cursor_slice={})): + stream = next(filter(lambda x: x.name == stream_name, SourceSurveymonkey().streams(config=config))) + records = list( + map(lambda record: record.data, stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice))) + return records + return read_records + + +@pytest.fixture +def args_mock(): + return { + "authenticator": NoAuth(), + "start_date": pendulum.parse("2000-01-01"), + "survey_ids": [] + } + +@pytest.fixture +def config(args_mock): + return { + **args_mock, + "survey_ids": ["307785415"], + "credentials": {"access_token": "access_token"}, + "start_date": args_mock["start_date"].to_iso8601_string() + } diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations.py b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations.py new file mode 100644 index 0000000000000..4e4fde8edf321 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations.py @@ -0,0 +1,83 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json + +from source_surveymonkey.config_migrations import MigrateAccessTokenToCredentials +from source_surveymonkey.source import SourceSurveymonkey + +TEST_CONFIG = "test_old_config.json" +NEW_TEST_CONFIG = "test_new_config.json" +UPGRADED_TEST_CONFIG = "test_upgraded_config.json" + + +def revert_migration(config_path: str = TEST_CONFIG) -> None: + config_path = "unit_tests/test_config_migrations/" + config_path + with open(config_path, "r") as test_config: + config = json.load(test_config) + config.pop("credentials") + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + +def test_migrate_config(capsys, read_json): + migration_instance = MigrateAccessTokenToCredentials() + original_config = read_json(TEST_CONFIG) + # migrate the test_config + migration_instance.migrate(["check", "--config", "unit_tests/test_config_migrations/" + TEST_CONFIG], SourceSurveymonkey()) + # load the updated config + test_migrated_config = read_json(TEST_CONFIG) + # check migrated property + assert "credentials" in test_migrated_config + assert isinstance(test_migrated_config["credentials"], dict) + assert "access_token" in test_migrated_config["credentials"] + assert isinstance(test_migrated_config["access_token"], str) + assert "auth_method" in test_migrated_config["credentials"] + assert test_migrated_config["credentials"]["auth_method"] == "oauth2.0" + # check the old property is in place + assert "access_token" in test_migrated_config + assert isinstance(test_migrated_config["access_token"], str) + # check the migration should be skipped, once already done + assert not migration_instance.should_migrate(test_migrated_config) + # load the old custom reports VS migrated + assert original_config["access_token"] == test_migrated_config["credentials"]["access_token"] + # test CONTROL MESSAGE was emitted + control_msg = json.loads(capsys.readouterr()[0].split("\n")[0]) + control = control_msg.get("control", {}) + config = control.get("connectorConfig", {}).get("config", {}) + + assert control_msg.get("type") == "CONTROL" + assert control.get("type") == "CONNECTOR_CONFIG" + # old custom_reports are stil type(str) + assert isinstance(config.get("access_token"), str) + # new custom_reports are type(list) + assert isinstance(config.get("credentials", {}).get("access_token"), str) + # check the migrated values + assert config.get("credentials", {}).get("access_token") == "access_token" + # revert the test_config to the starting point + revert_migration() + + +def test_config_is_reverted(read_json): + # check the test_config state, it has to be the same as before tests + test_config = read_json(TEST_CONFIG) + # check the config no longer has the migarted property + assert "credentials" not in test_config + # check the old property is still there + assert "access_token" in test_config + assert isinstance(test_config["access_token"], str) + + +def test_should_not_migrate_new_config(read_json): + new_config = read_json(NEW_TEST_CONFIG) + migration_instance = MigrateAccessTokenToCredentials() + assert not migration_instance.should_migrate(new_config) + + +def test_should_not_migrate_upgraded_config(read_json): + new_config = read_json(UPGRADED_TEST_CONFIG) + migration_instance = MigrateAccessTokenToCredentials() + assert not migration_instance.should_migrate(new_config) diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_new_config.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_new_config.json new file mode 100644 index 0000000000000..bd75c2164030a --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_new_config.json @@ -0,0 +1,7 @@ +{ + "start_date": "2021-01-01T00:00:00Z", + "credentials": { + "access_token": "access_token", + "auth_method": "oauth2.0" + } +} diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_old_config.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_old_config.json new file mode 100644 index 0000000000000..8be5bdaf40416 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_old_config.json @@ -0,0 +1 @@ +{ "access_token": "access_token", "start_date": "2021-01-01T00:00:00Z" } diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_upgraded_config.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_upgraded_config.json new file mode 100644 index 0000000000000..c7d37542e3a5b --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_config_migrations/test_upgraded_config.json @@ -0,0 +1,8 @@ +{ + "access_token": "access_token", + "start_date": "2021-01-01T00:00:00Z", + "credentials": { + "access_token": "access_token", + "auth_method": "oauth2.0" + } +} diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_custom_router.py b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_custom_router.py new file mode 100644 index 0000000000000..eaa7b4d151767 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_custom_router.py @@ -0,0 +1,47 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from unittest.mock import Mock + +import pytest +from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import ParentStreamConfig +from source_surveymonkey.components import SurveyIdPartitionRouter + +# test cases as a list of tuples (survey_ids, parent_stream_configs, expected_slices) +test_cases = [ + ( + # test form ids present in config + ["survey_id_1", "survey_id_2"], + [{"stream": Mock(read_records=Mock(return_value=[{"id": "survey_id_3"}, {"id": "survey_id_4"}]))}], + [{"survey_id": "survey_id_1"}, {"survey_id": "survey_id_2"}], + ), + ( + # test no form ids in config + [], + [ + {"stream": Mock(read_records=Mock(return_value=[{"id": "survey_id_3"}, {"id": "survey_id_4"}]))}, + {"stream": Mock(read_records=Mock(return_value=[{"id": "survey_id_5"}, {"id": "survey_id_6"}]))}, + ], + [{"survey_id": "survey_id_3"}, {"survey_id": "survey_id_4"}, {"survey_id": "survey_id_5"}, {"survey_id": "survey_id_6"}], + ), +] + + +@pytest.mark.parametrize("survey_ids, parent_stream_configs, expected_slices", test_cases) +def test_stream_slices(survey_ids, parent_stream_configs, expected_slices): + stream_configs = [] + + for parent_stream_config in parent_stream_configs: + stream_config = ParentStreamConfig( + stream=parent_stream_config["stream"], parent_key="id", partition_field="survey_id", config=None, parameters=None + ) + stream_configs.append(stream_config) + if not stream_configs: + stream_configs = [None] + + router = SurveyIdPartitionRouter(config={"survey_ids": survey_ids}, parent_stream_configs=stream_configs, parameters=None) + slices = list(router.stream_slices()) + + assert slices == expected_slices diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_source.py b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_source.py index d76e50f555d19..a397163a108bf 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_source.py @@ -2,7 +2,6 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import pytest from source_surveymonkey.source import SourceSurveymonkey source_config = {"start_date": "2021-01-01T00:00:00", "access_token": "something"} @@ -18,75 +17,8 @@ def test_source_streams(): assert len(streams) == 6 -def test_source_check_connection_old_config(requests_mock): - requests_mock.get( - "https://api.surveymonkey.com/v3/users/me", json={"scopes": {"granted": ["responses_read_detail", "surveys_read", "users_read"]}} - ) - - results = SourceSurveymonkey().check_connection(logger=None, config=source_config) - assert results == (True, None) - - -def test_source_check_connection_new_config(requests_mock): - requests_mock.get( - "https://api.surveymonkey.com/v3/users/me", json={"scopes": {"granted": ["responses_read_detail", "surveys_read", "users_read"]}} - ) - - results = SourceSurveymonkey().check_connection(logger=None, config=new_source_config) - assert results == (True, None) - - def test_source_check_connection_failed_missing_scopes(requests_mock): requests_mock.get("https://api.surveymonkey.com/v3/users/me", json={"scopes": {"granted": ["surveys_read", "users_read"]}}) results = SourceSurveymonkey().check_connection(logger=None, config=new_source_config) assert results == (False, "missed required scopes: responses_read_detail") - -def test_source_check_connection_config_with_survey_id_errors(requests_mock): - mock_status_code = 404 - mock_survey_id = "1234567890" - mock_msg = f"{mock_status_code} None: {mock_survey_id}" - - new_source_config['survey_ids'] = [mock_survey_id] - requests_mock.get( - "https://api.surveymonkey.com/v3/users/me", json={"scopes": {"granted": ["responses_read_detail", "surveys_read", "users_read"]}} - ) - - requests_mock.head( - f"https://api.surveymonkey.com/v3/surveys/{mock_survey_id}/details", status_code=mock_status_code - ) - - results = SourceSurveymonkey().check_connection(logger=None, config=new_source_config) - assert results == (False, mock_msg) - -@pytest.mark.parametrize( - "config, err_msg", - [ - ( - { - "start_date": "2021-01-01T00:00:00", - "origin": "USA", - }, - "credentials fields are not provided", - ), - ( - { - "start_date": "2021-01-01T00:00:00", - "origin": "USA", - "credentials": {"access_token": "something", "client_secret": "client_secret", "client_id": "client_id"}, - }, - "auth_method in credentials is not provided", - ), - ( - { - "start_date": "2021-01-01T00:00:00", - "origin": "USA", - "credentials": {"auth_method": "something", "client_secret": "client_secret", "client_id": "client_id"}, - }, - "access_token in credentials is not provided", - ), - ], -) -def test_source_check_connection_failed_missing_credentials(config, err_msg): - results = SourceSurveymonkey().check_connection(logger=None, config=config) - assert results == (False, err_msg) diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams.py index b2148397749b9..24a60bc08d959 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams.py @@ -4,46 +4,16 @@ from unittest.mock import Mock -import pendulum import pytest from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http.auth import NoAuth -from source_surveymonkey.streams import SurveyCollectors, SurveyIds, SurveyPages, SurveyQuestions, SurveyResponses, Surveys +from source_surveymonkey.streams import SurveyIds, Surveys -args_mock = {"authenticator": NoAuth(), "start_date": pendulum.parse("2000-01-01"), "survey_ids": []} -records_survey_ids = [ - { - "id": "307785415", - "title": "b9jo5h23l7pa", - "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", - "href": "https://api.surveymonkey.com/v3/surveys/307785415", - }, - { - "id": "307785388", - "title": "igpfp2yfsw90df6nxbsb49v", - "nickname": "h23gl22ulmfsyt4q7xt", - "href": "https://api.surveymonkey.com/v3/surveys/307785388", - }, -] - -response_survey_ids = { - "data": records_survey_ids, - "per_page": 50, - "page": 1, - "total": 2, - "links": {"self": "https://api.surveymonkey.com/v3/surveys?per_page=50&page=1"}, -} - - -def test_survey_ids(requests_mock): - requests_mock.get("https://api.surveymonkey.com/v3/surveys", json=response_survey_ids) - stream = SurveyIds(**args_mock) - records = stream.read_records(sync_mode=SyncMode.full_refresh) - assert list(records) == records_survey_ids - - -def test_user_defined_retry(requests_mock): +@pytest.mark.parametrize("stream, expected_records_file, stream_slice", [ + (SurveyIds, "records_survey_ids.json", None), + (Surveys, "records_surveys.json", {"survey_id": "307785415"}) +]) +def test_survey_stream_read_records(requests_mock, args_mock, read_json, stream, expected_records_file, stream_slice): requests_mock.get( "https://api.surveymonkey.com/v3/surveys", [ @@ -60,291 +30,44 @@ def test_user_defined_retry(requests_mock): } }, }, - {"status_code": 200, "headers": {"X-Ratelimit-App-Global-Minute-Remaining": "100"}, "json": response_survey_ids}, + {"status_code": 200, "headers": {"X-Ratelimit-App-Global-Minute-Remaining": "100"}, + "json": read_json("response_survey_ids.json")}, ], ) - - stream = SurveyIds(**args_mock) - stream.default_backoff_time = 3 - records = stream.read_records(sync_mode=SyncMode.full_refresh) - assert list(records) == records_survey_ids - - -def test_slices_from_survey_ids(requests_mock): - requests_mock.get("https://api.surveymonkey.com/v3/surveys", json=response_survey_ids) + requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/details", json=read_json("response_survey_details.json")) + stream_instance = stream(**args_mock) + stream_instance.default_backoff_time = 3 + records = stream_instance.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice) + expected_records = read_json(expected_records_file) + assert list(records) == expected_records + + +@pytest.mark.parametrize("additional_arguments, expected_slices", [ + ({}, [{"survey_id": "307785415"}, {"survey_id": "307785388"}]), + ({"survey_ids": ["307785415"]}, [{"survey_id": "307785415"}]) +]) +def test_survey_slices(requests_mock, args_mock, read_json, additional_arguments, expected_slices): + if not additional_arguments: + requests_mock.get("https://api.surveymonkey.com/v3/surveys", json=read_json("response_survey_ids.json")) + args_mock.update(additional_arguments) stream_slices = Surveys(**args_mock).stream_slices() - assert list(stream_slices) == [{"survey_id": "307785415"}, {"survey_id": "307785388"}] + assert list(stream_slices) == expected_slices -def test_slices_from_config(requests_mock): - args = {**args_mock, **{"survey_ids": ["307785415"]}} - stream_slices = Surveys(**args).stream_slices() - assert list(stream_slices) == [{"survey_id": "307785415"}] +@pytest.mark.parametrize("endpoint, records_filename", [ + ("survey_pages", "records_survey_pages.json"), + ("survey_questions", "records_survey_questions.json"), + ("survey_collectors", "records_survey_collectors.json") +]) +def test_survey_data(requests_mock, read_records, read_json, endpoint, records_filename): + requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/details", json=read_json("response_survey_details.json")) + requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/collectors", json=read_json("response_survey_collectors.json")) + records = read_records(endpoint) + expected_records = read_json(records_filename) + assert list(records) == expected_records -response_survey_details = { - "title": "b9jo5h23l7pa", - "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", - "language": "ru", - "folder_id": "0", - "category": "", - "question_count": 10, - "page_count": 3, - "response_count": 20, - "date_created": "2021-06-09T21:20:00", - "date_modified": "2021-06-10T11:07:00", - "id": "307785415", - "buttons_text": {"next_button": "Nex >>>>>", "prev_button": "Nix <<<<<", "done_button": "Nax_Don_Gon!", "exit_button": ""}, - "is_owner": True, - "footer": True, - "theme_id": "4510354", - "custom_variables": {}, - "href": "https://api.surveymonkey.com/v3/surveys/307785415", - "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "preview": "https://www.surveymonkey.com/r/Preview/?sm=YVdtL_2BP5oiGTrfksyofvENkBr7v87Xfh8hbcJr8rbqgesWvwJjz5N1F7pCSRcDoy", - "pages": [ - { - "title": "", - "description": "", - "position": 1, - "question_count": 0, - "id": "168831392", - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831392", - "questions": [], - }, - { - "title": "p71uerk2uh7k5", - "description": "92cb9d98j15jmfo", - "position": 2, - "question_count": 2, - "id": "168831393", - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393", - "questions": [ - { - "id": "667461690", - "position": 1, - "visible": True, - "family": "single_choice", - "subtype": "vertical", - "layout": None, - "sorting": None, - "required": None, - "validation": None, - "forced_ranking": False, - "headings": [{"heading": "53o3ibly at73qjs4e4 y9dug7jxfmpmr 8esacb5"}], - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461690", - "answers": { - "choices": [ - { - "position": 1, - "visible": True, - "text": "lg2mcft4e64 ywiatkmeo ci3rr4l2v0 ot6un49a 4b28sq4g8qv7tj 4ihpko73bp0k6lf swaeo3o4mg2jf5g rnh225wj520w1ps p9emk1wg64vwl", - "quiz_options": {"score": 0}, - "id": "4385174700", - }, - { - "position": 2, - "visible": True, - "text": "ywg8bovna adsahna5kd1jg vdism1 w045ovutkx9 oubne2u vd0x7lh3 y3npa4kfb5", - "quiz_options": {"score": 0}, - "id": "4385174701", - }, - ] - }, - }, - { - "id": "667461777", - "position": 2, - "visible": True, - "family": "single_choice", - "subtype": "menu", - "layout": None, - "sorting": None, - "required": None, - "validation": None, - "forced_ranking": False, - "headings": [{"heading": "kjqdk eo7hfnu or7bmd1iwqxxp sguqta4f8141iy"}], - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461777", - "answers": { - "choices": [ - { - "position": 1, - "visible": True, - "text": "11bp1ll11nu0 ool67 tkbke01j3mtq 22f4r54u073p h6kt4puolum4", - "quiz_options": {"score": 0}, - "id": "4385174970", - }, - { - "position": 2, - "visible": True, - "text": "8q53omsxw8 08yyjvj3ns9j yu7yap87 d2tgjv55j5d5o3y dbd69m94qav1wma 8upqf7cliu hb26pytfkwyt rfo2ac4", - "quiz_options": {"score": 0}, - "id": "4385174971", - }, - ] - }, - }, - ], - }, - ], -} - - -def test_surveys(requests_mock): - requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/details", json=response_survey_details) - args = {**args_mock, **{"survey_ids": ["307785415"]}} - records = Surveys(**args).read_records(sync_mode=SyncMode.full_refresh, stream_slice={"survey_id": "307785415"}) - assert list(records) == [ - { - "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "buttons_text": {"done_button": "Nax_Don_Gon!", "exit_button": "", "next_button": "Nex >>>>>", "prev_button": "Nix <<<<<"}, - "category": "", - "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "custom_variables": {}, - "date_created": "2021-06-09T21:20:00", - "date_modified": "2021-06-10T11:07:00", - "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "folder_id": "0", - "footer": True, - "href": "https://api.surveymonkey.com/v3/surveys/307785415", - "id": "307785415", - "is_owner": True, - "language": "ru", - "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", - "page_count": 3, - "preview": "https://www.surveymonkey.com/r/Preview/?sm=YVdtL_2BP5oiGTrfksyofvENkBr7v87Xfh8hbcJr8rbqgesWvwJjz5N1F7pCSRcDoy", - "question_count": 10, - "response_count": 20, - "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", - "theme_id": "4510354", - "title": "b9jo5h23l7pa", - } - ] - - -def test_survey_pages(requests_mock): - requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/details", json=response_survey_details) - args = {**args_mock, **{"survey_ids": ["307785415"]}} - records = SurveyPages(**args).read_records(sync_mode=SyncMode.full_refresh, stream_slice={"survey_id": "307785415"}) - assert list(records) == [ - { - "description": "", - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831392", - "id": "168831392", - "position": 1, - "question_count": 0, - "title": "", - }, - { - "description": "92cb9d98j15jmfo", - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393", - "id": "168831393", - "position": 2, - "question_count": 2, - "title": "p71uerk2uh7k5", - }, - ] - - -def test_survey_questions(requests_mock): - requests_mock.get("https://api.surveymonkey.com/v3/surveys/307785415/details", json=response_survey_details) - args = {**args_mock, **{"survey_ids": ["307785415"]}} - records = SurveyQuestions(**args).read_records(sync_mode=SyncMode.full_refresh, stream_slice={"survey_id": "307785415"}) - assert list(records) == [ - { - "answers": { - "choices": [ - { - "id": "4385174700", - "position": 1, - "quiz_options": {"score": 0}, - "text": "lg2mcft4e64 ywiatkmeo ci3rr4l2v0 ot6un49a " - "4b28sq4g8qv7tj 4ihpko73bp0k6lf " - "swaeo3o4mg2jf5g rnh225wj520w1ps " - "p9emk1wg64vwl", - "visible": True, - }, - { - "id": "4385174701", - "position": 2, - "quiz_options": {"score": 0}, - "text": "ywg8bovna adsahna5kd1jg vdism1 w045ovutkx9 " "oubne2u vd0x7lh3 y3npa4kfb5", - "visible": True, - }, - ] - }, - "family": "single_choice", - "forced_ranking": False, - "headings": [{"heading": "53o3ibly at73qjs4e4 y9dug7jxfmpmr 8esacb5"}], - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461690", - "id": "667461690", - "layout": None, - "page_id": "168831393", - "position": 1, - "required": None, - "sorting": None, - "subtype": "vertical", - "validation": None, - "visible": True, - }, - { - "answers": { - "choices": [ - { - "id": "4385174970", - "position": 1, - "quiz_options": {"score": 0}, - "text": "11bp1ll11nu0 ool67 tkbke01j3mtq " "22f4r54u073p h6kt4puolum4", - "visible": True, - }, - { - "id": "4385174971", - "position": 2, - "quiz_options": {"score": 0}, - "text": "8q53omsxw8 08yyjvj3ns9j yu7yap87 " "d2tgjv55j5d5o3y dbd69m94qav1wma 8upqf7cliu " "hb26pytfkwyt rfo2ac4", - "visible": True, - }, - ] - }, - "family": "single_choice", - "forced_ranking": False, - "headings": [{"heading": "kjqdk eo7hfnu or7bmd1iwqxxp sguqta4f8141iy"}], - "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461777", - "id": "667461777", - "layout": None, - "page_id": "168831393", - "position": 2, - "required": None, - "sorting": None, - "subtype": "menu", - "validation": None, - "visible": True, - }, - ] - - -def test_survey_collectors(requests_mock): - requests_mock.get( - "https://api.surveymonkey.com/v3/surveys/307785415/collectors", - json={ - "data": [{"name": "Teams Poll", "id": "1", "href": "https://api.surveymonkey.com/v3/collectors/1"}], - "per_page": 50, - "page": 1, - "total": 1, - "links": {"self": "https://api.surveymonkey.com/v3/surveys/307785415/collectors?page=1&per_page=50"}, - }, - ) - args = {**args_mock, **{"survey_ids": ["307785415"]}} - records = SurveyCollectors(**args).read_records(sync_mode=SyncMode.full_refresh, stream_slice={"survey_id": "307785415"}) - assert list(records) == [ - {"name": "Teams Poll", "id": "1", "href": "https://api.surveymonkey.com/v3/collectors/1", "survey_id": "307785415"} - ] - - -def test_surveys_next_page_token(): +def test_surveys_next_page_token(args_mock): args = {**args_mock, **{"survey_ids": ["307785415"]}} stream = SurveyIds(**args) mockresponse = Mock() @@ -358,55 +81,3 @@ def test_surveys_next_page_token(): params = stream.next_page_token(mockresponse) assert params == {"page": "2", "per_page": "50"} - - -@pytest.mark.parametrize( - "current_stream_state,latest_record,state", - [ - ( - {"307785415": {"date_modified": "2021-01-01T00:00:00+00:00"}}, - {"survey_id": "307785415", "date_modified": "2021-12-01T00:00:00+00:00"}, - {"307785415": {"date_modified": "2021-12-01T00:00:01+00:00"}}, - ), - ( - {}, - {"survey_id": "307785415", "date_modified": "2021-12-01T00:00:00+00:00"}, - {"307785415": {"date_modified": "2021-12-01T00:00:01+00:00"}}, - ), - ( - {"307785415": {"date_modified": "2021-01-01T00:00:00+00:00"}}, - {"survey_id": "307785415"}, - {"307785415": {"date_modified": "2021-01-01T00:00:00+00:00"}}, - ), - ], -) -def test_surveys_responses_get_updated_state(current_stream_state, latest_record, state): - args = {**args_mock, **{"survey_ids": ["307785415"]}} - stream = SurveyResponses(**args) - actual_state = stream.get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - assert actual_state == state - - -@pytest.mark.parametrize( - "stream_state,params", - [ - ( - {"307785415": {"date_modified": "2021-01-01T00:00:00+00:00"}}, - {"sort_order": "ASC", "sort_by": "date_modified", "per_page": 100, "start_modified_at": "2021-01-01T00:00:00"}, - ), - ( - {}, - { - "sort_order": "ASC", - "sort_by": "date_modified", - "per_page": 100, - "start_modified_at": "2000-01-01T00:00:00", - }, # return start_date - ), - ], -) -def test_surveys_responses_request_params(stream_state, params): - args = {**args_mock, **{"survey_ids": ["307785415"]}} - stream = SurveyResponses(**args) - actual_params = stream.request_params(stream_state=stream_state, stream_slice={"survey_id": "307785415"}) - assert actual_params == params diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_collectors.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_collectors.json new file mode 100644 index 0000000000000..0f9e049ce2e9b --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_collectors.json @@ -0,0 +1,8 @@ +[ + { + "name": "Teams Poll", + "id": "1", + "href": "https://api.surveymonkey.com/v3/collectors/1", + "survey_id": "307785415" + } +] diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_ids.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_ids.json new file mode 100644 index 0000000000000..b9e34a371b1b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_ids.json @@ -0,0 +1,14 @@ +[ + { + "id": "307785415", + "title": "b9jo5h23l7pa", + "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", + "href": "https://api.surveymonkey.com/v3/surveys/307785415" + }, + { + "id": "307785388", + "title": "igpfp2yfsw90df6nxbsb49v", + "nickname": "h23gl22ulmfsyt4q7xt", + "href": "https://api.surveymonkey.com/v3/surveys/307785388" + } +] diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_pages.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_pages.json new file mode 100644 index 0000000000000..9d72654fbd3f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_pages.json @@ -0,0 +1,18 @@ +[ + { + "description": "", + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831392", + "id": "168831392", + "position": 1, + "question_count": 0, + "title": "" + }, + { + "description": "92cb9d98j15jmfo", + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393", + "id": "168831393", + "position": 2, + "question_count": 2, + "title": "p71uerk2uh7k5" + } +] diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_questions.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_questions.json new file mode 100644 index 0000000000000..d0221ab7a8c32 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_survey_questions.json @@ -0,0 +1,82 @@ +[ + { + "answers": { + "choices": [ + { + "id": "4385174700", + "position": 1, + "quiz_options": { + "score": 0 + }, + "text": "lg2mcft4e64 ywiatkmeo ci3rr4l2v0 ot6un49a 4b28sq4g8qv7tj 4ihpko73bp0k6lf swaeo3o4mg2jf5g rnh225wj520w1ps p9emk1wg64vwl", + "visible": true + }, + { + "id": "4385174701", + "position": 2, + "quiz_options": { + "score": 0 + }, + "text": "ywg8bovna adsahna5kd1jg vdism1 w045ovutkx9 oubne2u vd0x7lh3 y3npa4kfb5", + "visible": true + } + ] + }, + "family": "single_choice", + "forced_ranking": false, + "headings": [ + { + "heading": "53o3ibly at73qjs4e4 y9dug7jxfmpmr 8esacb5" + } + ], + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461690", + "id": "667461690", + "layout": null, + "position": 1, + "required": null, + "sorting": null, + "subtype": "vertical", + "validation": null, + "visible": true + }, + { + "answers": { + "choices": [ + { + "id": "4385174970", + "position": 1, + "quiz_options": { + "score": 0 + }, + "text": "11bp1ll11nu0 ool67 tkbke01j3mtq 22f4r54u073p h6kt4puolum4", + "visible": true + }, + { + "id": "4385174971", + "position": 2, + "quiz_options": { + "score": 0 + }, + "text": "8q53omsxw8 08yyjvj3ns9j yu7yap87 d2tgjv55j5d5o3y dbd69m94qav1wma 8upqf7cliu hb26pytfkwyt rfo2ac4", + "visible": true + } + ] + }, + "family": "single_choice", + "forced_ranking": false, + "headings": [ + { + "heading": "kjqdk eo7hfnu or7bmd1iwqxxp sguqta4f8141iy" + } + ], + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461777", + "id": "667461777", + "layout": null, + "position": 2, + "required": null, + "sorting": null, + "subtype": "menu", + "validation": null, + "visible": true + } +] diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_surveys.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_surveys.json new file mode 100644 index 0000000000000..e1607411511c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/records_surveys.json @@ -0,0 +1,31 @@ +[ + { + "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "buttons_text": { + "done_button": "Nax_Don_Gon!", + "exit_button": "", + "next_button": "Nex >>>>>", + "prev_button": "Nix <<<<<" + }, + "category": "", + "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "custom_variables": {}, + "date_created": "2021-06-09T21:20:00", + "date_modified": "2021-06-10T11:07:00", + "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "folder_id": "0", + "footer": true, + "href": "https://api.surveymonkey.com/v3/surveys/307785415", + "id": "307785415", + "is_owner": true, + "language": "ru", + "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", + "page_count": 3, + "preview": "https://www.surveymonkey.com/r/Preview/?sm=YVdtL_2BP5oiGTrfksyofvENkBr7v87Xfh8hbcJr8rbqgesWvwJjz5N1F7pCSRcDoy", + "question_count": 10, + "response_count": 20, + "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "theme_id": "4510354", + "title": "b9jo5h23l7pa" + } +] diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_collectors.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_collectors.json new file mode 100644 index 0000000000000..30cb2c53c83c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_collectors.json @@ -0,0 +1,15 @@ +{ + "data": [ + { + "name": "Teams Poll", + "id": "1", + "href": "https://api.surveymonkey.com/v3/collectors/1" + } + ], + "per_page": 50, + "page": 1, + "total": 1, + "links": { + "self": "https://api.surveymonkey.com/v3/surveys/307785415/collectors?page=1&per_page=50" + } +} diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_details.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_details.json new file mode 100644 index 0000000000000..efe4b218c9614 --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_details.json @@ -0,0 +1,130 @@ +{ + "title": "b9jo5h23l7pa", + "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", + "language": "ru", + "folder_id": "0", + "category": "", + "question_count": 10, + "page_count": 3, + "response_count": 20, + "date_created": "2021-06-09T21:20:00", + "date_modified": "2021-06-10T11:07:00", + "id": "307785415", + "buttons_text": { + "next_button": "Nex >>>>>", + "prev_button": "Nix <<<<<", + "done_button": "Nax_Don_Gon!", + "exit_button": "" + }, + "is_owner": true, + "footer": true, + "theme_id": "4510354", + "custom_variables": {}, + "href": "https://api.surveymonkey.com/v3/surveys/307785415", + "analyze_url": "https://www.surveymonkey.com/analyze/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "edit_url": "https://www.surveymonkey.com/create/?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "collect_url": "https://www.surveymonkey.com/collect/list?sm=BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "summary_url": "https://www.surveymonkey.com/summary/BPAkhAawaMN8C17tmmNFxjZ0KOiJJ3FCQU4krShVQhg_3D", + "preview": "https://www.surveymonkey.com/r/Preview/?sm=YVdtL_2BP5oiGTrfksyofvENkBr7v87Xfh8hbcJr8rbqgesWvwJjz5N1F7pCSRcDoy", + "pages": [ + { + "title": "", + "description": "", + "position": 1, + "question_count": 0, + "id": "168831392", + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831392", + "questions": [] + }, + { + "title": "p71uerk2uh7k5", + "description": "92cb9d98j15jmfo", + "position": 2, + "question_count": 2, + "id": "168831393", + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393", + "questions": [ + { + "id": "667461690", + "position": 1, + "visible": true, + "family": "single_choice", + "subtype": "vertical", + "layout": null, + "sorting": null, + "required": null, + "validation": null, + "forced_ranking": false, + "headings": [ + { + "heading": "53o3ibly at73qjs4e4 y9dug7jxfmpmr 8esacb5" + } + ], + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461690", + "answers": { + "choices": [ + { + "position": 1, + "visible": true, + "text": "lg2mcft4e64 ywiatkmeo ci3rr4l2v0 ot6un49a 4b28sq4g8qv7tj 4ihpko73bp0k6lf swaeo3o4mg2jf5g rnh225wj520w1ps p9emk1wg64vwl", + "quiz_options": { + "score": 0 + }, + "id": "4385174700" + }, + { + "position": 2, + "visible": true, + "text": "ywg8bovna adsahna5kd1jg vdism1 w045ovutkx9 oubne2u vd0x7lh3 y3npa4kfb5", + "quiz_options": { + "score": 0 + }, + "id": "4385174701" + } + ] + } + }, + { + "id": "667461777", + "position": 2, + "visible": true, + "family": "single_choice", + "subtype": "menu", + "layout": null, + "sorting": null, + "required": null, + "validation": null, + "forced_ranking": false, + "headings": [ + { + "heading": "kjqdk eo7hfnu or7bmd1iwqxxp sguqta4f8141iy" + } + ], + "href": "https://api.surveymonkey.com/v3/surveys/307785415/pages/168831393/questions/667461777", + "answers": { + "choices": [ + { + "position": 1, + "visible": true, + "text": "11bp1ll11nu0 ool67 tkbke01j3mtq 22f4r54u073p h6kt4puolum4", + "quiz_options": { + "score": 0 + }, + "id": "4385174970" + }, + { + "position": 2, + "visible": true, + "text": "8q53omsxw8 08yyjvj3ns9j yu7yap87 d2tgjv55j5d5o3y dbd69m94qav1wma 8upqf7cliu hb26pytfkwyt rfo2ac4", + "quiz_options": { + "score": 0 + }, + "id": "4385174971" + } + ] + } + } + ] + } + ] +} diff --git a/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_ids.json b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_ids.json new file mode 100644 index 0000000000000..387e718871c1e --- /dev/null +++ b/airbyte-integrations/connectors/source-surveymonkey/unit_tests/test_streams/response_survey_ids.json @@ -0,0 +1,22 @@ +{ + "data": [ + { + "id": "307785415", + "title": "b9jo5h23l7pa", + "nickname": "qhs5vg2qi0o4arsjiwy2ay00n82n", + "href": "https://api.surveymonkey.com/v3/surveys/307785415" + }, + { + "id": "307785388", + "title": "igpfp2yfsw90df6nxbsb49v", + "nickname": "h23gl22ulmfsyt4q7xt", + "href": "https://api.surveymonkey.com/v3/surveys/307785388" + } + ], + "per_page": 50, + "page": 1, + "total": 2, + "links": { + "self": "https://api.surveymonkey.com/v3/surveys?per_page=50&page=1" + } +} diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml index 7a7377d41d17e..2bd018b11ae26 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 - dockerImageTag: 3.9.3 + dockerImageTag: 3.9.4 dockerRepository: airbyte/source-tiktok-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/tiktok-marketing githubIssueLabel: source-tiktok-marketing icon: tiktok.svg license: MIT + maxSecondsBetweenMessages: 86400 name: TikTok Marketing remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock b/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock index 925f4338357a0..9612385ca187c 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock +++ b/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.52.10" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.52.10.tar.gz", hash = "sha256:0daee950fe0d4453e6ceea2633090fc1d2144224e6f170b3c6cb4c6392811b47"}, - {file = "airbyte_cdk-0.52.10-py3-none-any.whl", hash = "sha256:366fd7bbbba317223edc1571d22b91c6f5bcff4ba65b3131e42f9b37e29932f4"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.19)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.19)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.19)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -466,113 +467,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -667,6 +603,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -751,18 +702,29 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -846,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -864,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -895,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.1 [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -943,24 +905,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -979,13 +930,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1090,4 +1041,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "8cc7e89fec6f2188200b8327a969602f26fcd02a5929b5e5504c954eae99f3fc" +content-hash = "df9de409feed610e08e732ea9c7f06017133e96f2798dec42e1f8012c747cf24" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml b/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml index 909a3989301d5..54fe5a4206543 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.9.3" +version = "3.9.4" name = "source-tiktok-marketing" description = "Source implementation for Tiktok Marketing." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_tiktok_marketing" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.52.10" +airbyte-cdk = "^0" [tool.poetry.scripts] source-tiktok-marketing = "source_tiktok_marketing.run:run" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py index d38cc2f1759bb..e42a67f256021 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/streams.py @@ -430,6 +430,11 @@ def parse_response( yield record def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + if not self.cursor_field: + # BasicReports streams are incremental. However, report streams configured to use LIFETIME granularity only work as + # full refresh and don't have a cursor field. There is no state value to extract from the record + return {} + # needs to save a last state if all advertisers are used before only current_stream_state_value = (self.select_cursor_field_value(current_stream_state)) or "" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py index f4a4d10aed0c3..b77b35b3a5286 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py @@ -256,6 +256,16 @@ def test_get_updated_state(): assert state2_modify_time.dict() == "2020-01-08 00:00:00" +def test_get_updated_state_no_cursor_field(): + """ + Some full_refresh streams (which don't define a cursor) inherit the get_updated_state() method from an incremental + stream. This test verifies that the stream does not attempt to extract the cursor value from the latest record + """ + ads_reports = AdsReports(**CONFIG_SANDBOX) + state1 = ads_reports.get_updated_state(current_stream_state={}, latest_record={}) + assert state1 == {} + + @pytest.mark.parametrize( "value, expected", [ diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl index b0476fbfb2d04..65bb012b98d01 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl @@ -7,9 +7,9 @@ {"stream": "available_phone_number_countries", "data": {"country_code": "AU", "country": "Australia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Mobile.json"}}, "emitted_at": 1691419684730} {"stream": "available_phone_number_countries", "data": {"country_code": "BE", "country": "Belgium", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE/TollFree.json"}}, "emitted_at": 1691419684732} {"stream": "available_phone_number_countries", "data": {"country_code": "SE", "country": "Sweden", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE/Mobile.json"}}, "emitted_at": 1691419684732} -{"stream": "calls", "data": {"date_updated": "2023-06-15T19:57:59Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 13, "from": "+12056890337", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAb70f3b70167dd8d4ee2e1dc15db64e02", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-06-15T19:57:46Z", "date_created": "2023-06-15T19:57:46Z", "from_formatted": "(205) 689-0337", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-06-15T19:57:59Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568884} -{"stream": "calls", "data": {"date_updated": "2023-03-15T11:35:20Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": "", "duration": 0, "from": "+12056561170", "to": "+14156236785", "annotation": null, "answered_by": null, "sid": "CA651f21262d4308879ea685e704dd0384", "queue_time": 0, "price": null, "api_version": "2010-04-01", "status": "busy", "direction": "outbound-api", "start_time": "2023-03-15T11:35:03Z", "date_created": "2023-03-15T11:35:03Z", "from_formatted": "(205) 656-1170", "group_sid": null, "trunk_sid": "", "forwarded_from": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-03-15T11:35:20Z", "to_formatted": "(415) 623-6785", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568886} -{"stream": "calls", "data": {"date_updated": "2023-02-16T14:37:32Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 14, "from": "+380636306253", "to": "+13603004201", "annotation": null, "answered_by": null, "sid": "CA9121cd06fb7a1c0c96664c089621c979", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-02-16T14:37:18Z", "date_created": "2023-02-16T14:37:18Z", "from_formatted": "+380636306253", "group_sid": null, "trunk_sid": "", "forwarded_from": "+13603004201", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-02-16T14:37:32Z", "to_formatted": "(360) 300-4201", "phone_number_sid": "PN1fe31291fa81c17bf71cd128bc649e68", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568887} +{"stream": "calls", "data": {"date_updated": "2024-02-02T17:11:40Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 93, "from": "+18004246423", "to": "+12766776270", "annotation": null, "answered_by": null, "sid": "CA15ff3bb763d558f8888b6317d0d25129", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2024-02-02T17:10:07Z", "date_created": "2024-02-02T17:10:07Z", "from_formatted": "(800) 424-6423", "group_sid": null, "trunk_sid": "", "forwarded_from": "+13803335311", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2024-02-02T17:11:40Z", "to_formatted": "(276) 677-6270", "phone_number_sid": "PNb5c8f3452694ee2b4bdcf71194ab047f", "subresource_uris": {"user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA15ff3bb763d558f8888b6317d0d25129/Events.json"}}, "emitted_at": 1710758042377} +{"stream": "calls", "data": {"date_updated": "2024-02-07T15:44:05Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 86, "from": "+13523750714", "to": "+19704017747", "annotation": null, "answered_by": null, "sid": "CA9a7ba19f8585573f4eda12ecef5c3109", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2024-02-07T15:42:39Z", "date_created": "2024-02-07T15:42:39Z", "from_formatted": "(352) 375-0714", "group_sid": null, "trunk_sid": "", "forwarded_from": "+19704017747", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2024-02-07T15:44:05Z", "to_formatted": "(970) 401-7747", "phone_number_sid": "PN63c288b22a08ce3339371b4e6e10877e", "subresource_uris": {"user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9a7ba19f8585573f4eda12ecef5c3109/Events.json"}}, "emitted_at": 1710758042378} +{"stream": "calls", "data": {"date_updated": "2024-02-13T15:55:29Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 87, "from": "+12102250171", "to": "+12316133455", "annotation": null, "answered_by": null, "sid": "CA4aa861962935306778bd295482139954", "queue_time": 0, "price": -0.017, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2024-02-13T15:54:02Z", "date_created": "2024-02-13T15:54:02Z", "from_formatted": "(210) 225-0171", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12316133455", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2024-02-13T15:55:29Z", "to_formatted": "(231) 613-3455", "phone_number_sid": "PN08b70304eae2bc80ade646afa929b77e", "subresource_uris": {"user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA4aa861962935306778bd295482139954/Events.json"}}, "emitted_at": 1710758042379} {"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-16T10:09:23Z", "region": "us1", "friendly_name": "Conference4", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CA9c8671884602ca5e64895a917ad1ba90", "sid": "CF582c8fbd75e1fa02301b553711f87e7f", "date_created": "2023-02-16T10:09:11Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f/Recordings.json"}}, "emitted_at": 1709305079075} {"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-15T14:49:37Z", "region": "us1", "friendly_name": "Conference2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CA04ae9210566d36c425bae2087736f6ac", "sid": "CF15e8707d15e02c1af88809b159ff8b42", "date_created": "2023-02-15T14:49:21Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42/Recordings.json"}}, "emitted_at": 1691419855509} {"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-16T09:57:39Z", "region": "us1", "friendly_name": "Conference2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CAf8464ca5eda3ab7cc3e2d86cdb3c720f", "sid": "CF33199d5a9a0b202b3bd9558438a052d8", "date_created": "2023-02-16T09:57:11Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8/Recordings.json"}}, "emitted_at": 1691419855510} @@ -23,11 +23,9 @@ {"stream": "incoming_phone_numbers", "data": {"origin": "twilio", "status": "in-use", "address_requirements": "none", "date_updated": "2023-03-27T07:58:14Z", "voice_url": "https://handler.twilio.com/twiml/EHb6471af720e8b66baa14e7226227893b", "sms_application_sid": "", "voice_fallback_method": "POST", "emergency_address_status": "unregistered", "identity_sid": null, "emergency_status": "Active", "voice_application_sid": "", "capabilities": {"fax": false, "voice": true, "sms": true, "mms": true}, "api_version": "2010-04-01", "sid": "PNd74715bab1be123cc9004f03b85bb067", "status_callback_method": "POST", "voice_fallback_url": "", "phone_number": "+14246220939", "emergency_address_sid": null, "beta": false, "address_sid": "AD0164001bc0f84d9bc29e17378fe47c20", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "voice_method": "POST", "voice_caller_id_lookup": false, "friendly_name": "Test phone number 9", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNd74715bab1be123cc9004f03b85bb067.json", "sms_fallback_url": "", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sms_method": "POST", "trunk_sid": null, "sms_fallback_method": "POST", "date_created": "2023-02-16T14:34:00Z", "bundle_sid": null, "status_callback": "", "subresource_uris": {"assigned_add_ons": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNd74715bab1be123cc9004f03b85bb067/AssignedAddOns.json"}}, "emitted_at": 1691419867848} {"stream": "incoming_phone_numbers", "data": {"origin": "twilio", "status": "in-use", "address_requirements": "none", "date_updated": "2023-03-27T07:58:40Z", "voice_url": "https://handler.twilio.com/twiml/EHb77bc7c1f889b6c9fe5202d0463edfc4", "sms_application_sid": "", "voice_fallback_method": "POST", "emergency_address_status": "unregistered", "identity_sid": null, "emergency_status": "Active", "voice_application_sid": "", "capabilities": {"fax": false, "voice": true, "sms": true, "mms": true}, "api_version": "2010-04-01", "sid": "PN99400a65bf5a4305d5420060842d4d2c", "status_callback_method": "POST", "voice_fallback_url": "", "phone_number": "+19125901057", "emergency_address_sid": null, "beta": false, "address_sid": "AD0e69bf9110f766787a88f99b507c9eeb", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "voice_method": "POST", "voice_caller_id_lookup": false, "friendly_name": "Test phone number 2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PN99400a65bf5a4305d5420060842d4d2c.json", "sms_fallback_url": "", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sms_method": "POST", "trunk_sid": null, "sms_fallback_method": "POST", "date_created": "2023-02-15T09:31:24Z", "bundle_sid": null, "status_callback": "", "subresource_uris": {"assigned_add_ons": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PN99400a65bf5a4305d5420060842d4d2c/AssignedAddOns.json"}}, "emitted_at": 1691419867849} {"stream": "message_media", "data": {"sid": "ME66ee8039997ee13231f5bd4a9121162c", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MMf491b7a98d00cdf54afc20b1839cea4e", "content_type": "image/png", "date_created": "2023-07-19T07:03:14Z", "date_updated": "2023-07-19T07:03:14Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e/Media/ME66ee8039997ee13231f5bd4a9121162c.json"}, "emitted_at": 1691419887396} -{"stream": "message_media", "data": {"sid": "ME34324546a4398b36fc96fd36500038c3", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MM56662e159d1a5d1f1c6e2d43202b7940", "content_type": "image/png", "date_created": "2023-02-14T14:02:28Z", "date_updated": "2023-02-14T14:02:28Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM56662e159d1a5d1f1c6e2d43202b7940/Media/ME34324546a4398b36fc96fd36500038c3.json"}, "emitted_at": 1691419915272} -{"stream": "message_media", "data": {"sid": "ME45c86c927aa3eb6749bac07b9bc6f418", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MM5e9241ae9a444f8061b28e3de05fe818", "content_type": "image/png", "date_created": "2023-02-14T14:02:59Z", "date_updated": "2023-02-14T14:02:59Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5e9241ae9a444f8061b28e3de05fe818/Media/ME45c86c927aa3eb6749bac07b9bc6f418.json"}, "emitted_at": 1691419915437} -{"stream": "messages", "data": {"body": "Hi there, Test 3!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T15:53:24Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:42Z", "status": "sent", "sid": "MM863b367b7d0725532b80a161c9dab4e5", "date_sent": "2023-02-14T14:03:43Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5/Feedback.json"}}, "emitted_at": 1709305631427} -{"stream": "messages", "data": {"body": "Hi there, Test 2!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T15:54:04Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:47Z", "status": "sent", "sid": "MM5a93ae7d20c07ceae87cd2649485ba72", "date_sent": "2023-02-14T14:03:48Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72/Feedback.json"}}, "emitted_at": 1709305631428} -{"stream": "messages", "data": {"body": "Hi there, Test 1!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T16:12:21Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:52Z", "status": "sent", "sid": "MM89c1a0785d96b5faa30d65aa644c70b4", "date_sent": "2023-02-14T14:03:53Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4/Feedback.json"}}, "emitted_at": 1709305631429} +{"stream": "messages", "data": {"body": "Bring your ass..let's get it now", "num_segments": 1, "direction": "inbound", "from": "+12052003153", "date_updated": "2023-07-01T18:57:02Z", "price": -0.01, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMb241f4a1fc983b30c085c70e5ddcb6b9.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 0, "to": "+12056561170", "date_created": "2023-07-01T18:57:02Z", "status": "received", "sid": "MMb241f4a1fc983b30c085c70e5ddcb6b9", "date_sent": "2023-07-01T18:57:02Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMb241f4a1fc983b30c085c70e5ddcb6b9/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMb241f4a1fc983b30c085c70e5ddcb6b9/Feedback.json"}}, "emitted_at": 1710946751761} +{"stream": "messages", "data": {"body": "Hi there, Test 1!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-07-24T07:03:18Z", "price": -0.02, "error_message": "Unknown error", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-07-19T07:03:14Z", "status": "undelivered", "sid": "MMf491b7a98d00cdf54afc20b1839cea4e", "date_sent": "2023-07-19T07:03:15Z", "messaging_service_sid": null, "error_code": "30008", "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e/Feedback.json"}}, "emitted_at": 1710946751939} +{"stream": "messages", "data": {"account_sid": "ACdade166c12e160e9ed0a6088226718fb", "api_version": "2010-04-01", "body": "That $200 didn't go through! Owe me $400 now ", "date_created": "2024-01-19T13:05:18Z", "date_sent": "2024-01-19T13:05:18Z", "date_updated": "2024-01-19T13:05:18Z", "direction": "inbound", "error_code": null, "error_message": null, "from": "+12058267189", "messaging_service_sid": null, "num_media": 0, "num_segments": 1, "price": -0.0079, "price_unit": "USD", "sid": "SM8cfbddf906bdb372750e9f1569d4f63c", "status": "received", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM8cfbddf906bdb372750e9f1569d4f63c/Feedback.json", "media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM8cfbddf906bdb372750e9f1569d4f63c/Media.json"}, "to": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM8cfbddf906bdb372750e9f1569d4f63c.json"}, "emitted_at": 1710946753193} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+14153597503", "date_updated": "2020-11-17T04:17:37Z", "friendly_name": "(415) 359-7503", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN16ba111c0df5756cfe37044ed0ee3136.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN16ba111c0df5756cfe37044ed0ee3136", "date_created": "2020-11-17T04:17:37Z"}, "emitted_at": 1691419960444} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+18023494963", "date_updated": "2020-12-11T04:28:02Z", "friendly_name": "(802) 349-4963", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN726d635f970c30193cd12e7b994510a1.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN726d635f970c30193cd12e7b994510a1", "date_created": "2020-12-11T04:28:02Z"}, "emitted_at": 1691419960446} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+14156236785", "date_updated": "2023-02-15T15:33:09Z", "friendly_name": "Slack sms channel", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PNbb9c658169cfd057a46cdce9dc00afa3.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PNbb9c658169cfd057a46cdce9dc00afa3", "date_created": "2023-02-14T12:11:53Z"}, "emitted_at": 1691419960447} diff --git a/airbyte-integrations/connectors/source-twilio/metadata.yaml b/airbyte-integrations/connectors/source-twilio/metadata.yaml index 627130c6cbc0e..acbcd4e81356d 100644 --- a/airbyte-integrations/connectors/source-twilio/metadata.yaml +++ b/airbyte-integrations/connectors/source-twilio/metadata.yaml @@ -13,12 +13,13 @@ data: connectorSubtype: api connectorType: source definitionId: b9dc6155-672e-42ea-b10d-9f1f1fb95ab1 - dockerImageTag: 0.10.2 + dockerImageTag: 0.11.0 dockerRepository: airbyte/source-twilio documentationUrl: https://docs.airbyte.com/integrations/sources/twilio githubIssueLabel: source-twilio icon: twilio.svg license: MIT + maxSecondsBetweenMessages: 5400 name: Twilio remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-twilio/poetry.lock b/airbyte-integrations/connectors/source-twilio/poetry.lock index 92d0ef9ca49dc..301dbb8e4c29e 100644 --- a/airbyte-integrations/connectors/source-twilio/poetry.lock +++ b/airbyte-integrations/connectors/source-twilio/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.55.0" +version = "0.72.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.55.0.tar.gz", hash = "sha256:b75ffcc9c94453b41c49e25c12404e5741c238ac2d42949420101496aa4c143e"}, - {file = "airbyte_cdk-0.55.0-py3-none-any.whl", hash = "sha256:f9b7ea2484c7f62ad3557dbf29cdec8f7783e146513f29810f7fbacc1aeeb303"}, + {file = "airbyte-cdk-0.72.1.tar.gz", hash = "sha256:1dbd0a11f3784cfdd5afa9f40315c9a6123e803be91f9f861642a78e7ee14cd9"}, + {file = "airbyte_cdk-0.72.1-py3-none-any.whl", hash = "sha256:849077805442286de99f589ecba4be82491a3d9d3f516ce1a8b0cbaf303db9a4"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,7 +22,7 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" @@ -32,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -481,13 +481,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -716,13 +716,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -764,7 +764,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -822,13 +821,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -840,15 +839,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -871,19 +870,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -909,13 +908,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -934,13 +933,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1045,4 +1044,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "fcc0ecc10e4b4573a02085f518f90ca99bf5794d301d0c73e1a94258cffad925" +content-hash = "e21f9940f1301e1a1543fa09508c96610c08b9252e7ead3034021b0f522769cc" diff --git a/airbyte-integrations/connectors/source-twilio/pyproject.toml b/airbyte-integrations/connectors/source-twilio/pyproject.toml index 94c5b71102cea..288b6fa1d0cb5 100644 --- a/airbyte-integrations/connectors/source-twilio/pyproject.toml +++ b/airbyte-integrations/connectors/source-twilio/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.10.2" +version = "0.11.0" name = "source-twilio" description = "Source implementation for Twilio." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_twilio" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.55.0" +airbyte-cdk = "^0" pendulum = "==2.1.2" requests = "==2.31.0" diff --git a/airbyte-integrations/connectors/source-typeform/metadata.yaml b/airbyte-integrations/connectors/source-typeform/metadata.yaml index 186fe506f38c4..3d36ab3d607c7 100644 --- a/airbyte-integrations/connectors/source-typeform/metadata.yaml +++ b/airbyte-integrations/connectors/source-typeform/metadata.yaml @@ -10,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: e7eff203-90bf-43e5-a240-19ea3056c474 - dockerImageTag: 1.2.5 + dockerImageTag: 1.2.6 dockerRepository: airbyte/source-typeform documentationUrl: https://docs.airbyte.com/integrations/sources/typeform githubIssueLabel: source-typeform icon: typeform.svg license: MIT + maxSecondsBetweenMessages: 1 name: Typeform remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-typeform/poetry.lock b/airbyte-integrations/connectors/source-typeform/poetry.lock index cfbe169883b46..7834a189dff70 100644 --- a/airbyte-integrations/connectors/source-typeform/poetry.lock +++ b/airbyte-integrations/connectors/source-typeform/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.59.0" +version = "0.70.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.59.0.tar.gz", hash = "sha256:2f7bc07556cc7f42f0daf41d09be08fd22102864d087a27c8999f6f13fe67aad"}, - {file = "airbyte_cdk-0.59.0-py3-none-any.whl", hash = "sha256:94c561c053b8be3a66bfefe420812ced9237403441249408e2af5445214a6f7b"}, + {file = "airbyte-cdk-0.70.1.tar.gz", hash = "sha256:fd27815350b8155fc42afd43d005a8d321c9f309c1adaedabbb0b74e9788648f"}, + {file = "airbyte_cdk-0.70.1-py3-none-any.whl", hash = "sha256:856b51c988c8e348f53df2806d8bf929919f220f5784696cf9a9578d7eb16e72"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1031,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "1a1acacff3cde56bc593bf94bef4f697cff3c1d4eb0599c70cc9e8307df9cb5b" +content-hash = "a7a96e2b3330d2b39e398d386ac5724f0ddb92f7862e5029789b59942d9ba36d" diff --git a/airbyte-integrations/connectors/source-typeform/pyproject.toml b/airbyte-integrations/connectors/source-typeform/pyproject.toml index ee42093d03d47..e50f4503aaa8c 100644 --- a/airbyte-integrations/connectors/source-typeform/pyproject.toml +++ b/airbyte-integrations/connectors/source-typeform/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.2.5" +version = "1.2.6" name = "source-typeform" description = "Source implementation for Typeform." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_typeform" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.59.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-typeform = "source_typeform.run:run" diff --git a/airbyte-integrations/connectors/source-typeform/source_typeform/components.py b/airbyte-integrations/connectors/source-typeform/source_typeform/components.py index d7e12b70396d4..674aa77ed4214 100644 --- a/airbyte-integrations/connectors/source-typeform/source_typeform/components.py +++ b/airbyte-integrations/connectors/source-typeform/source_typeform/components.py @@ -30,10 +30,10 @@ def stream_slices(self) -> Iterable[StreamSlice]: if form_ids: for item in form_ids: - yield {"form_id": item} + yield StreamSlice(partition={"form_id": item}, cursor_slice={}) else: for parent_stream_config in self.parent_stream_configs: for item in parent_stream_config.stream.read_records(sync_mode=SyncMode.full_refresh): - yield {"form_id": item["id"]} + yield StreamSlice(partition={"form_id": item["id"]}, cursor_slice={}) yield from [] diff --git a/airbyte-integrations/connectors/source-zendesk-chat/README.md b/airbyte-integrations/connectors/source-zendesk-chat/README.md index f7d40d3e06a8b..411735aa8b10c 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/README.md +++ b/airbyte-integrations/connectors/source-zendesk-chat/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-zendesk-chat spec poetry run source-zendesk-chat check --config secrets/config.json poetry run source-zendesk-chat discover --config secrets/config.json -poetry run source-zendesk-chat read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-zendesk-chat read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-zendesk-chat/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-chat/acceptance-test-config.yml index 37352d28dabec..674c07534f860 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-chat/acceptance-test-config.yml @@ -6,8 +6,6 @@ acceptance_tests: - spec_path: "source_zendesk_chat/spec.json" connection: tests: - - config_path: "secrets/config_old.json" - status: "succeed" - config_path: "secrets/config.json" status: "succeed" - config_path: "secrets/config_oauth.json" @@ -23,12 +21,10 @@ acceptance_tests: tests: - config_path: "secrets/config.json" expect_records: - path: "integration_tests/expected_records.txt" - fail_on_extra_columns: false + path: "integration_tests/expected_records.jsonl" - config_path: "secrets/config_oauth.json" expect_records: - path: "integration_tests/expected_records.txt" - fail_on_extra_columns: false + path: "integration_tests/expected_records.jsonl" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.jsonl new file mode 100644 index 0000000000000..45c1c29ec9772 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.jsonl @@ -0,0 +1,32 @@ +{"stream": "accounts", "data": {"create_date": "2020-12-11T18:33:40Z", "account_key": "svBRNv6HoJnSZRgpf6yMmBZaFMY6s2hP", "status": "active", "plan": {"goals": 5, "long_desc": "Best for Organizations to manage Large Support Teams", "price": 70.0, "short_desc": "Ideal for Large Support Teams", "widget_customization": "full", "max_agents": 5, "sla": true, "monitoring": true, "rest_api": true, "email_reports": true, "daily_reports": true, "chat_reports": true, "agent_reports": true, "agent_leaderboard": true, "unbranding": true, "high_load": true, "ip_restriction": true, "support": true, "name": "enterprise", "max_basic_triggers": "unlimited", "max_advanced_triggers": "unlimited", "max_departments": "unlimited", "max_concurrent_chats": "unlimited", "max_history_search_days": "unlimited", "operating_hours": true, "file_upload": true, "analytics": true, "integrations": true}}, "emitted_at": 1709738612256} +{"stream": "agents", "data": {"enabled": true, "create_date": "2020-11-17T23:55:24Z", "role_id": 360002848996, "first_name": "Team Airbyte", "email": "integration-test@airbyte.io", "last_name": "", "id": 360786799676, "enabled_departments": [7282618889231], "departments": [7282618889231, 5059474192015, 5060105343503, 5060005480847, 5060049125391, 5060061403535, 5060061508879, 5060049288719, 5060049443215, 5060066676751, 5060066798607, 5060071902479, 5060093166863, 5060100872591, 5060101239823, 5060072765583, 5060101350159, 5060077702799, 5060088742799, 5060103345935, 5060078913935, 5059463979663, 5060103664783, 5060079026575, 5060055796111, 5060090959759, 5059403825935, 5060108375311, 5059473809295, 5059436114575, 360003074836, 6770788212111], "skills": [], "display_name": "Team Airbyte", "last_login": "2024-02-09T13:12:16Z", "login_count": 113, "roles": {"administrator": true, "owner": false}}, "emitted_at": 1709738612909} +{"stream": "agents", "data": {"enabled": true, "create_date": "2021-04-23T14:33:11Z", "role_id": 360002848976, "first_name": "Fake User number - 1", "email": "fake.user-1@email.com", "last_name": "", "id": 361084605116, "enabled_departments": [7282640316815, 7282630247567, 7282624630287], "departments": [7282640316815, 7282630247567, 7282624630287, 5060105343503, 5060005480847, 5060049125391, 5060061403535, 5060061508879, 5060049288719, 5060049443215, 5060066676751, 5060066798607, 5060071902479, 5059452990735, 5060093166863, 5060100872591, 5060101239823, 5060072765583, 5060101350159, 5060077702799, 5060088742799, 5060103345935, 5060078913935, 5060103664783, 5060079026575, 5060055796111, 5060090959759, 5059473603087, 5059403825935, 5060108375311, 5059473809295, 5059436284943, 360003074836], "skills": [1300601, 8565161], "display_name": "Fake User number - 1", "last_login": null, "login_count": 0, "roles": {"administrator": false, "owner": false}}, "emitted_at": 1709738612913} +{"stream": "agents", "data": {"enabled": true, "create_date": "2021-04-23T14:34:20Z", "role_id": 360002848976, "first_name": "Fake Agent number - 1", "email": "fake.agent-1@email.com", "last_name": "", "id": 361089721035, "enabled_departments": [7282630247567], "departments": [7282630247567, 7282657193103, 5059439464079, 5060105343503, 5060005480847, 5060049125391, 5060061403535, 5060061508879, 5060049288719, 5060049443215, 5060066676751, 5060066798607, 5060071902479, 5060093166863, 5060100872591, 5060101239823, 5060072765583, 5060101350159, 5060077702799, 5060088742799, 5060103345935, 5060078913935, 5060103664783, 5060079026575, 5060055796111, 5060090959759, 5059473603087, 5060108375311, 5059473809295, 5059436114575, 5059404003599, 360003074836], "skills": [1296081, 1300641], "display_name": "Fake Agent number - 1", "last_login": null, "login_count": 0, "roles": {"administrator": false, "owner": false}}, "emitted_at": 1709738612916} +{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2020-12-14T04:08:32Z", "status": "invisible", "duration": 459.213926, "id": "360786799676|2020-12-14T04:08:32Z"}, "emitted_at": 1709738613859} +{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2020-12-14T04:17:32Z", "status": "invisible", "duration": 3440.710507, "id": "360786799676|2020-12-14T04:17:32Z"}, "emitted_at": 1709738613863} +{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2020-12-14T18:45:37Z", "status": "invisible", "duration": 520.75554, "id": "360786799676|2020-12-14T18:45:37Z"}, "emitted_at": 1709738613864} +{"stream": "bans", "data": {"created_at": "2021-04-21T14:42:46Z", "reason": "Spammer", "type": "ip_address", "id": 70519881, "ip_address": "192.123.123.5"}, "emitted_at": 1709738615366} +{"stream": "bans", "data": {"created_at": "2021-04-26T13:55:20Z", "reason": "Spammer", "type": "ip_address", "id": 75112241, "ip_address": "191.121.123.5"}, "emitted_at": 1709738615369} +{"stream": "bans", "data": {"created_at": "2021-04-26T13:55:30Z", "reason": "Spammer", "type": "ip_address", "id": 75112281, "ip_address": "111.121.123.5"}, "emitted_at": 1709738615371} +{"stream": "chats", "data": {"timestamp": "2021-04-26T13:54:02Z", "unread": false, "webpath": [], "type": "offline_msg", "id": "2104.10414779.SVhDCJ9flq79a", "update_timestamp": "2021-04-27T15:09:17Z", "tags": [], "department_name": null, "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "department_id": null, "deleted": false, "message": "Hi there!", "visitor": {"phone": "+32178763521", "notes": "Test 2", "id": "3.45678", "name": "Jiny", "email": "visitor_jiny@doe.com"}, "zendesk_ticket_id": null}, "emitted_at": 1709738618587} +{"stream": "chats", "data": {"timestamp": "2021-04-21T14:36:55Z", "unread": false, "webpath": [], "type": "offline_msg", "id": "2104.10414779.SVE9Mo9bE4wR8", "update_timestamp": "2021-04-30T11:06:19Z", "tags": [], "department_name": null, "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "department_id": null, "deleted": false, "message": "Hi there!", "visitor": {"phone": "", "notes": "", "id": "1.12345", "name": "John", "email": "visitor_john@doe.com"}, "zendesk_ticket_id": null}, "emitted_at": 1709738618592} +{"stream": "chats", "data": {"timestamp": "2021-04-26T13:53:30Z", "unread": false, "webpath": [], "type": "offline_msg", "id": "2104.10414779.SVhD3v7I1LBOq", "update_timestamp": "2021-04-30T11:08:12Z", "tags": [], "department_name": null, "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "department_id": null, "deleted": false, "message": "Hi there!", "visitor": {"phone": "+78763521", "notes": "Test", "id": "2.34567", "name": "Tiny", "email": "visitor_tiny@doe.com"}, "zendesk_ticket_id": null}, "emitted_at": 1709738618596} +{"stream": "departments", "data": {"name": "Airbyte Department 1", "members": [361084605116], "settings": {"chat_enabled": true, "solved_ticket_reassignment_strategy": "", "support_group_id": 7282640316815}, "description": "A sample department", "id": 7282640316815, "enabled": true}, "emitted_at": 1709738620228} +{"stream": "departments", "data": {"name": "Department 1", "members": [360786799676], "settings": {"chat_enabled": true, "solved_ticket_reassignment_strategy": "", "support_group_id": 7282618889231}, "description": "A sample department", "id": 7282618889231, "enabled": true}, "emitted_at": 1709738620231} +{"stream": "departments", "data": {"name": "Department 2", "members": [361089721035, 361084605116], "settings": {"chat_enabled": true, "solved_ticket_reassignment_strategy": "", "support_group_id": 7282630247567}, "description": "A sample department 2", "id": 7282630247567, "enabled": true}, "emitted_at": 1709738620233} +{"stream": "goals", "data": {"enabled": true, "attribution_model": "first_touch", "description": "A new goal", "name": "Goal 3", "id": 513481, "attribution_period": 15, "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1709738621059} +{"stream": "goals", "data": {"enabled": false, "attribution_model": "first_touch", "description": "A new goal - 1", "name": "Goal one", "id": 529641, "attribution_period": 5, "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1709738621061} +{"stream": "goals", "data": {"enabled": false, "attribution_model": "first_touch", "description": "A new goal - 2", "name": "Goal two", "id": 529681, "attribution_period": 15, "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1709738621063} +{"stream": "roles", "data": {"enabled": true, "permissions": {"visitors_seen": "account", "proactive_chatting": "listen-join", "edit_visitor_information": true, "edit_visitor_notes": true, "view_past_chats": "account", "edit_chat_tags": true, "manage_bans": "account", "access_analytics": "account", "view_monitor": "account", "edit_department_agents": "account", "set_agent_chat_limit": "account", "manage_shortcuts": "account"}, "description": "In addition to regular agent privileges, administrators can edit widget and accounts settings, manage agents, roles and permissions, and more. Permissions for the administrator role cannot be modified.", "name": "Administrator", "id": 360002848996, "members_count": 1}, "emitted_at": 1709738621711} +{"stream": "roles", "data": {"enabled": true, "permissions": {"visitors_seen": "account", "proactive_chatting": "listen-join", "edit_visitor_information": true, "edit_visitor_notes": true, "view_past_chats": "account", "edit_chat_tags": false, "manage_bans": "account", "access_analytics": "none", "view_monitor": "account", "edit_department_agents": "none", "set_agent_chat_limit": "none", "manage_shortcuts": "account"}, "description": "Agent is the most basic role in an account, and their primary responsibility is to serve chats. Permissions for the agent role can be modified.", "name": "Agent", "id": 360002848976, "members_count": 2}, "emitted_at": 1709738621714} +{"stream": "roles", "data": {"enabled": true, "permissions": {"visitors_seen": "account", "proactive_chatting": "listen-join", "edit_visitor_information": true, "edit_visitor_notes": true, "view_past_chats": "account", "edit_chat_tags": false, "manage_bans": "account", "access_analytics": "none", "view_monitor": "account", "edit_department_agents": "none", "set_agent_chat_limit": "none", "manage_shortcuts": "account"}, "description": "Can serve social messaging conversations only", "name": "Agent (limited)", "id": 7282769201935, "members_count": 0}, "emitted_at": 1709738621715} +{"stream": "routing_settings", "data": {"routing_mode": "assigned", "chat_limit": {"enabled": false, "limit": 3, "limit_type": "account", "allow_agent_override": false}, "skill_routing": {"enabled": true, "max_wait_time": 30}, "reassignment": {"enabled": true, "timeout": 30}, "auto_idle": {"enabled": false, "reassignments_before_idle": 3, "new_status": "away"}, "auto_accept": {"enabled": false}}, "emitted_at": 1709738622442} +{"stream": "shortcuts", "data": {"options": "Yes/No", "id": "goodbye", "scope": "all", "name": "goodbye", "tags": ["goodbye_survey"], "message": "Thanks for chatting with us. Have we resolved your question(s)?"}, "emitted_at": 1709738623130} +{"stream": "shortcuts", "data": {"options": "Yes/No", "id": "help", "scope": "all", "name": "help", "tags": ["help_survey"], "message": "Do you need any help?"}, "emitted_at": 1709738623133} +{"stream": "shortcuts", "data": {"options": "", "id": "hi", "scope": "all", "name": "hi", "tags": [], "message": "Hi, how can we help you today? =)"}, "emitted_at": 1709738623135} +{"stream": "skills", "data": {"id": 1300601, "name": "english", "members": [361084605116], "description": "English language", "enabled": true}, "emitted_at": 1709738623775} +{"stream": "skills", "data": {"id": 1300641, "name": "france", "members": [361089721035], "description": "France language", "enabled": true}, "emitted_at": 1709738623778} +{"stream": "skills", "data": {"id": 1296081, "name": "mandarin", "members": [361089721035], "description": "Chinese language", "enabled": true}, "emitted_at": 1709738623780} +{"stream": "triggers", "data": {"enabled": true, "definition": {"condition": ["and", ["not", ["firedBefore"]], ["and", ["neq", "@account_status", "offline"], ["stillOnSite", 60], ["eq", "@visitor_served", false]]], "event": "chat_requested", "actions": [["setTriggered", true], ["sendMessageToVisitor", "Customer Service", "We apologize for keeping you waiting. Our operators are busy at the moment, please leave us a message with your email address and we'll get back to you shortly."]]}, "description": "Auto respond to messages if agents don't respond in time.", "name": "Chat Rescuer", "id": 66052481}, "emitted_at": 1709738624471} +{"stream": "triggers", "data": {"enabled": true, "definition": {"event": "page_enter", "condition": ["and", ["eq", "@visitor_page_url", "www.zendesk.com/cart"], ["stillOnPage", 60], ["eq", "@visitor_requesting_chat", false], ["eq", "@visitor_served", false], ["not", ["firedBefore"]]], "actions": [["sendMessageToVisitor", "Stephanie", "Hi, are you having any trouble checking out? Feel free to reach out to us with any questions."]], "version": 1, "editor": "advanced"}, "description": "Reduce cart abandonment by engaging customers that are lingering on the checkout page.", "name": "Checkout Page", "id": 66052561}, "emitted_at": 1709738624474} +{"stream": "triggers", "data": {"enabled": true, "definition": {"event": "chat_requested", "condition": ["and", ["eq", "@visitor_requesting_chat", true], ["eq", "@visitor_served", false], ["not", ["firedBefore"]]], "actions": [["wait", 5], ["sendMessageToVisitor", "Customer Service", "Thanks for your message, please wait a moment while our agents attend to you."]], "version": 1, "editor": "advanced"}, "description": "Send an automated reply to customers that start a chat, so they know their request is being attended to.", "name": "First Reply", "id": 66052601}, "emitted_at": 1709738624476} diff --git a/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.txt deleted file mode 100644 index 10f75ba0af99f..0000000000000 --- a/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/expected_records.txt +++ /dev/null @@ -1,34 +0,0 @@ -{"stream": "accounts", "data": {"create_date": "2020-12-11T18:33:40Z", "status": "active", "account_key": "svBRNv6HoJnSZRgpf6yMmBZaFMY6s2hP", "plan": {"goals": 5, "long_desc": "Best for Organizations to manage Large Support Teams", "price": 70.0, "short_desc": "Ideal for Large Support Teams", "widget_customization": "full", "max_agents": 5, "sla": true, "monitoring": true, "rest_api": true, "email_reports": true, "daily_reports": true, "chat_reports": true, "agent_reports": true, "agent_leaderboard": true, "unbranding": true, "high_load": true, "ip_restriction": true, "support": true, "name": "enterprise", "max_basic_triggers": "unlimited", "max_advanced_triggers": "unlimited", "max_departments": "unlimited", "max_concurrent_chats": "unlimited", "max_history_search_days": "unlimited", "operating_hours": true, "file_upload": true, "analytics": true, "integrations": true}}, "emitted_at": 1672828432816} -{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2022-01-17T13:20:50Z", "status": "invisible", "duration": 789.733983, "id": "360786799676|2022-01-17T13:20:50Z"}, "emitted_at": 1672828433249} -{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2022-06-30T17:16:55Z", "status": "invisible", "duration": 61.089883, "id": "360786799676|2022-06-30T17:16:55Z"}, "emitted_at": 1672828433249} -{"stream": "agent_timeline", "data": {"agent_id": 360786799676, "engagement_count": 0, "start_time": "2022-10-28T12:43:05Z", "status": "invisible", "duration": 370.793077, "id": "360786799676|2022-10-28T12:43:05Z"}, "emitted_at": 1672828433249} -{"stream": "agents", "data": {"role_id": 360002848976, "departments": [7282640316815, 7282630247567, 7282624630287, 5060105343503, 5060005480847, 5060049125391, 5060061403535, 5060061508879, 5060049288719, 5060049443215, 5060066676751, 5060066798607, 5060071902479, 5059452990735, 5060093166863, 5060100872591, 5060101239823, 5060072765583, 5060101350159, 5060077702799, 5060088742799, 5060103345935, 5060078913935, 5060103664783, 5060079026575, 5060055796111, 5060090959759, 5059473603087, 5059403825935, 5060108375311, 5059473809295, 5059436284943, 360003074836], "enabled_departments": [7282640316815, 7282630247567, 7282624630287], "last_name": "", "create_date": "2021-04-23T14:33:11Z", "first_name": "Fake User number - 1", "enabled": true, "skills": [1300601, 8565161], "id": 361084605116, "display_name": "Fake User number - 1", "email": "fake.user-1@email.com", "last_login": null, "login_count": 0, "roles": {"administrator": false, "owner": false}}, "emitted_at": 1688547518353} -{"stream": "agents", "data": {"role_id": 360002848976, "departments": [7282630247567, 7282657193103, 5059439464079, 5060105343503, 5060005480847, 5060049125391, 5060061403535, 5060061508879, 5060049288719, 5060049443215, 5060066676751, 5060066798607, 5060071902479, 5060093166863, 5060100872591, 5060101239823, 5060072765583, 5060101350159, 5060077702799, 5060088742799, 5060103345935, 5060078913935, 5060103664783, 5060079026575, 5060055796111, 5060090959759, 5059473603087, 5060108375311, 5059473809295, 5059436114575, 5059404003599, 360003074836], "enabled_departments": [7282630247567], "last_name": "", "create_date": "2021-04-23T14:34:20Z", "first_name": "Fake Agent number - 1", "enabled": true, "skills": [1296081, 1300641], "id": 361089721035, "display_name": "Fake Agent number - 1", "email": "fake.agent-1@email.com", "last_login": null, "login_count": 0, "roles": {"administrator": false, "owner": false}}, "emitted_at": 1688547518353} -{"stream": "bans", "data": {"type": "visitor", "id": 75411361, "reason": "Spammer", "created_at": "2021-04-27T15:52:32Z", "visitor_name": "Visitor 47225177", "visitor_id": "10414779.13ojzHu7ISdt0SM"}, "emitted_at": 1672828433831} -{"stream": "bans", "data": {"type": "visitor", "id": 75411401, "reason": "Spammer", "created_at": "2021-04-27T15:52:32Z", "visitor_name": "Visitor 62959049", "visitor_id": "10414779.13ojzHu7at4VKcG"}, "emitted_at": 1672828433831} -{"stream": "bans", "data": {"created_at": "2021-04-27T15:52:32Z", "visitor_id": "10414779.13ojzHu7at4VKcG", "id": 75411401, "reason": "Spammer", "visitor_name": "Visitor 62959049", "type": "visitor"}, "emitted_at": 1672828434000} -{"stream": "bans", "data": {"created_at": "2021-04-27T15:52:33Z", "visitor_id": "10414779.13ojzHu7s9YwIjz", "id": 75411441, "reason": "Spammer", "visitor_name": "Visitor 97350211", "type": "visitor"}, "emitted_at": 1672828434001} -{"stream": "chats", "data": {"department_id": null, "webpath": [], "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "visitor": {"phone": "+32178763521", "notes": "Test 2", "id": "3.45678", "name": "Jiny", "email": "visitor_jiny@doe.com"}, "update_timestamp": "2021-04-27T15:09:17Z", "department_name": null, "type": "offline_msg", "deleted": false, "tags": [], "timestamp": "2021-04-26T13:54:02Z", "unread": false, "id": "2104.10414779.SVhDCJ9flq79a", "message": "Hi there!", "zendesk_ticket_id": null}, "emitted_at": 1701452730189} -{"stream": "chats", "data": {"department_id": null, "webpath": [], "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "visitor": {"phone": "", "notes": "", "id": "1.12345", "name": "John", "email": "visitor_john@doe.com"}, "update_timestamp": "2021-04-30T11:06:19Z", "department_name": null, "type": "offline_msg", "deleted": false, "tags": [], "timestamp": "2021-04-21T14:36:55Z", "unread": false, "id": "2104.10414779.SVE9Mo9bE4wR8", "message": "Hi there!", "zendesk_ticket_id": null}, "emitted_at": 1701452730190} -{"stream": "chats", "data": {"department_id": null, "webpath": [], "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2014-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "visitor": {"phone": "+78763521", "notes": "Test", "id": "2.34567", "name": "Tiny", "email": "visitor_tiny@doe.com"}, "update_timestamp": "2021-04-30T11:08:12Z", "department_name": null, "type": "offline_msg", "deleted": false, "tags": [], "timestamp": "2021-04-26T13:53:30Z", "unread": false, "id": "2104.10414779.SVhD3v7I1LBOq", "message": "Hi there!", "zendesk_ticket_id": null}, "emitted_at": 1701452730190} -{"stream": "chats", "data": {"department_id": null, "webpath": [], "session": {"browser": "Safari", "city": "Orlando", "country_code": "US", "country_name": "United States", "end_date": "2022-10-09T05:46:47Z", "id": "141109.654464.1KhqS0Nw", "ip": "67.32.299.96", "platform": "Mac OS", "region": "Florida", "start_date": "2014-10-09T05:28:31Z", "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25"}, "visitor": {"phone": "", "notes": "", "id": "7.34502", "name": "Fake user - chat 2", "email": "fake_user_chat_2@doe.com"}, "update_timestamp": "2021-04-30T13:32:27Z", "department_name": null, "type": "offline_msg", "deleted": false, "tags": [], "timestamp": "2021-04-30T13:32:27Z", "unread": true, "id": "2104.10414779.SW4VrjJpOq6gk", "message": "Hi there!", "zendesk_ticket_id": null}, "emitted_at": 1701452730191} -{"stream": "departments", "data": {"settings": {"chat_enabled": true, "support_group_id": 7282640316815}, "members": [361084605116], "name": "Airbyte Department 1", "enabled": true, "description": "A sample department", "id": 7282640316815}, "emitted_at": 1688547521914} -{"stream": "departments", "data": {"settings": {"chat_enabled": true, "support_group_id": 7282618889231}, "members": [360786799676], "name": "Department 1", "enabled": true, "description": "A sample department", "id": 7282618889231}, "emitted_at": 1688547521914} -{"stream": "departments", "data": {"settings": {"chat_enabled": true, "support_group_id": 7282630247567}, "members": [361089721035, 361084605116], "name": "Department 2", "enabled": true, "description": "A sample department 2", "id": 7282630247567}, "emitted_at": 1688547521914} -{"stream": "goals", "data": {"enabled": true, "id": 513481, "attribution_period": 15, "attribution_model": "first_touch", "name": "Goal 3", "description": "A new goal", "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1701453031915} -{"stream": "goals", "data": {"enabled": false, "id": 529641, "attribution_period": 5, "attribution_model": "first_touch", "name": "Goal one", "description": "A new goal - 1", "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1701453031916} -{"stream": "goals", "data": {"enabled": false, "id": 529681, "attribution_period": 15, "attribution_model": "first_touch", "name": "Goal two", "description": "A new goal - 2", "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://mysite.com/"}]}}, "emitted_at": 1701453031916} -{"stream": "goals", "data": {"enabled": true, "id": 537121, "attribution_period": 30, "attribution_model": "last_touch", "name": "Test goal", "description": "Test goal", "settings": {"conditions": [{"operator": "equals", "type": "url", "value": "http://zendesk.com/thanks"}]}}, "emitted_at": 1701453031916} -{"stream": "roles", "data": {"permissions": {"visitors_seen": "account", "proactive_chatting": "listen-join", "edit_visitor_information": true, "edit_visitor_notes": true, "view_past_chats": "account", "edit_chat_tags": true, "manage_bans": "account", "access_analytics": "account", "view_monitor": "account", "edit_department_agents": "account", "set_agent_chat_limit": "account", "manage_shortcuts": "account"}, "enabled": true, "description": "In addition to regular agent privileges, administrators can edit widget and accounts settings, manage agents, roles and permissions, and more. Permissions for the administrator role cannot be modified.", "id": 360002848996, "name": "Administrator", "members_count": 1}, "emitted_at": 1672828435141} -{"stream": "roles", "data": {"permissions": {"visitors_seen": "account", "proactive_chatting": "listen-join", "edit_visitor_information": true, "edit_visitor_notes": true, "view_past_chats": "account", "edit_chat_tags": false, "manage_bans": "account", "access_analytics": "none", "view_monitor": "account", "edit_department_agents": "none", "set_agent_chat_limit": "none", "manage_shortcuts": "account"}, "enabled": true, "description": "Agent is the most basic role in an account, and their primary responsibility is to serve chats. Permissions for the agent role can be modified.", "id": 360002848976, "name": "Agent", "members_count": 2}, "emitted_at": 1672828435142} -{"stream": "shortcuts", "data": {"name": "goodbye", "id": "goodbye", "options": "Yes/No", "tags": ["goodbye_survey"], "scope": "all", "message": "Thanks for chatting with us. Have we resolved your question(s)?"}, "emitted_at": 1672828435386} -{"stream": "shortcuts", "data": {"name": "help", "id": "help", "options": "Yes/No", "tags": ["help_survey"], "scope": "all", "message": "Do you need any help?"}, "emitted_at": 1672828435386} -{"stream": "shortcuts", "data": {"name": "hi", "id": "hi", "options": "", "tags": [], "scope": "all", "message": "Hi, how can we help you today? =)"}, "emitted_at": 1672828435386} -{"stream": "shortcuts", "data": {"name": "returning", "id": "returning", "options": "", "tags": ["returning_visitor"], "scope": "all", "message": "Welcome back. How can we help you today"}, "emitted_at": 1672828435387} -{"stream": "skills", "data": {"id": 1300601, "name": "english", "enabled": true, "description": "English language", "members": [361084605116]}, "emitted_at": 1672828435627} -{"stream": "skills", "data": {"id": 1300641, "name": "france", "enabled": true, "description": "France language", "members": [361089721035]}, "emitted_at": 1672828435628} -{"stream": "skills", "data": {"id": 1296081, "name": "mandarin", "enabled": true, "description": "Chinese language", "members": [361089721035]}, "emitted_at": 1672828435628} -{"stream": "triggers", "data": {"name": "Product Discounts", "enabled": true, "description": "Offer your returning customers a discount on one of your products or services. This Trigger will need to be customized based on the page.", "id": 66052801, "definition": {"event": "chat_requested", "condition": ["and", ["icontains", "@visitor_page_url", "[product name]"], ["stillOnPage", 30], ["eq", "@visitor_requesting_chat", false], ["eq", "@visitor_served", false], ["not", ["firedBefore"]]], "actions": [["sendMessageToVisitor", "Customer Service", "Hi, are you interested in [insert product name]? We're offering a one-time 20% discount. Chat with me to find out more."]], "version": 1, "editor": "advanced"}}, "emitted_at": 1688547525543} -{"stream": "triggers", "data": {"name": "Request Contact Details", "enabled": true, "description": "When your account is set to away, ask customer's requesting a chat to leave their email address.", "id": 66052841, "definition": {"event": "chat_requested", "condition": ["and", ["eq", "@account_status", "away"], ["not", ["firedBefore"]]], "actions": [["addTag", "Away_request"], ["sendMessageToVisitor", "Customer Service", "Hi, sorry we are away at the moment. Please leave your email address and we will get back to you as soon as possible."]], "version": 1, "editor": "advanced"}}, "emitted_at": 1688547525543} -{"stream": "triggers", "data": {"name": "Tag Repeat Visitors", "enabled": true, "description": "Add a tag to a visitor that has visited your site 5 or more times. This helps you identify potential customers who are very interested in your brand.", "id": 66052881, "definition": {"event": "page_enter", "condition": ["and", ["gte", "@visitor_previous_visits", 5]], "actions": [["addTag", "5times"]], "version": 1, "editor": "advanced"}}, "emitted_at": 1688547525543} -{"stream": "routing_settings", "data": {"routing_mode": "assigned", "chat_limit": {"enabled": false, "limit": 3, "limit_type": "account", "allow_agent_override": false}, "skill_routing": {"enabled": true, "max_wait_time": 30}, "reassignment": {"enabled": true, "timeout": 30}, "auto_idle": {"enabled": false, "reassignments_before_idle": 3, "new_status": "away"}, "auto_accept": {"enabled": false}}, "emitted_at": 1701453336379} diff --git a/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/state.json b/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/state.json new file mode 100644 index 0000000000000..5042b1676175f --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/integration_tests/state.json @@ -0,0 +1,14 @@ +{ + "agents": { + "id": 361089721035 + }, + "bans": { + "id": 75412441 + }, + "chats": { + "update_timestamp": "2023-10-20T09:44:12Z" + }, + "agent_timeline": { + "start_time": "2024-02-09T13:12:16Z" + } +} diff --git a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml index 3657e930c7c41..66b2f13bf74ed 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml @@ -6,17 +6,18 @@ data: hosts: - zopim.com connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4 - dockerImageTag: 0.2.2 + dockerImageTag: 0.3.0 dockerRepository: airbyte/source-zendesk-chat documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-chat githubIssueLabel: source-zendesk-chat icon: zendesk-chat.svg license: MIT name: Zendesk Chat + maxSecondsBetweenMessages: 60 remoteRegistries: pypi: enabled: true @@ -30,5 +31,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zendesk-chat/poetry.lock b/airbyte-integrations/connectors/source-zendesk-chat/poetry.lock index 4035ad70602bc..9f1120dc128c0 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/poetry.lock +++ b/airbyte-integrations/connectors/source-zendesk-chat/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.51.41" +version = "0.72.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.51.41.tar.gz", hash = "sha256:cce614d67872cf66a151e5b72d70f4bf26e2a1ce672c7abfc15a5cb4e45d8429"}, - {file = "airbyte_cdk-0.51.41-py3-none-any.whl", hash = "sha256:bbf82a45d9ec97c4a92b85e3312b327f8060fffec1f7c7ea7dfa720f9adcc13b"}, + {file = "airbyte-cdk-0.72.2.tar.gz", hash = "sha256:3c06ed9c1436967ffde77b51814772dbbd79745d610bc2fe400dff9c4d7a9877"}, + {file = "airbyte_cdk-0.72.2-py3-none-any.whl", hash = "sha256:8d50773fe9ffffe9be8d6c2d2fcb10c50153833053b3ef4283fcb39c544dc4b9"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -466,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -602,6 +603,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -686,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -792,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -810,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -841,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -879,13 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -904,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1015,4 +1031,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e55b65b435ed00315a8288393c1fb2adde5904ae32b5aed66f133bdb721a6991" +content-hash = "ccbf9ba9481a72f2e99d49b166340fbaca1a8ae9d6ef8990e87759d8453b287a" diff --git a/airbyte-integrations/connectors/source-zendesk-chat/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-chat/pyproject.toml index f47dbc02c81db..0867e658dd228 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-chat/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.2.2" +version = "0.3.0" name = "source-zendesk-chat" description = "Source implementation for Zendesk Chat." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_zendesk_chat" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.51.41" +airbyte-cdk = "^0" pendulum = "==2.1.2" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/bans_record_extractor.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/bans_record_extractor.py new file mode 100644 index 0000000000000..2dffe978edfb7 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/bans_record_extractor.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from dataclasses import dataclass +from typing import Any, List, Mapping + +import pendulum +import requests +from airbyte_cdk.sources.declarative.extractors.record_extractor import RecordExtractor +from airbyte_cdk.sources.declarative.types import Record + + +@dataclass +class ZendeskChatBansRecordExtractor(RecordExtractor): + """ + Unnesting nested bans: `visitor`, `ip_address`. + """ + + def extract_records(self, response: requests.Response) -> List[Mapping[str, Any]]: + response_data = response.json() + ip_address: List[Mapping[str, Any]] = response_data.get("ip_address", []) + visitor: List[Mapping[str, Any]] = response_data.get("visitor", []) + bans = ip_address + visitor + bans = sorted(bans, key=lambda x: pendulum.parse(x["created_at"]) if x["created_at"] else pendulum.datetime(1970, 1, 1)) + return bans diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_incremental_cursor.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_incremental_cursor.py new file mode 100644 index 0000000000000..1addd15641563 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_incremental_cursor.py @@ -0,0 +1,155 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass, field +from typing import Any, Iterable, Mapping, Optional, Union + +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, Type +from airbyte_cdk.sources.declarative.incremental.cursor import Cursor +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.requesters.request_option import RequestOptionType +from airbyte_cdk.sources.declarative.types import Config, Record, StreamSlice, StreamState +from airbyte_cdk.sources.message import MessageRepository + + +@dataclass +class ZendeskChatIdIncrementalCursor(Cursor): + """ + Custom Incremental Cursor implementation to provide the ability to pull data using `id`(int) as cursor. + More info: https://developer.zendesk.com/api-reference/live-chat/chat-api/agents/#parameters + + Attributes: + config (Config): connection config + field_name (Union[InterpolatedString, str]): the name of the field which will hold the cursor value for outbound API call + cursor_field (Union[InterpolatedString, str]): record's cursor field + """ + + config: Config + cursor_field: Union[InterpolatedString, str] + field_name: Union[InterpolatedString, str] + parameters: InitVar[Mapping[str, Any]] + _highest_observed_record_cursor_value: Optional[str] = field( + repr=False, default=None + ) # tracks the latest observed datetime, which may not be safe to emit in the case of out-of-order records + _cursor: Optional[str] = field(repr=False, default=None) + message_repository: Optional[MessageRepository] = None + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + self._state: Optional[int] = None + self._start_boundary: int = 0 + self.cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters).eval(self.config) + self.field_name = InterpolatedString.create(self.field_name, parameters=parameters).eval(self.config) + + def get_stream_state(self) -> StreamState: + return {self.cursor_field: self._cursor} if self._cursor else {} + + def set_initial_state(self, stream_state: StreamState) -> None: + """ + Cursors are not initialized with their state. As state is needed in order to function properly, this method should be called + before calling anything else + + :param stream_state: The state of the stream as returned by get_stream_state + """ + + self._cursor = stream_state.get(self.cursor_field) if stream_state else None + self._start_boundary = self._cursor if self._cursor else 0 + self._state = self._cursor if self._cursor else self._state + + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + """ + Register a record with the cursor; the cursor instance can then use it to manage the state of the in-progress stream read. + + :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the + stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. + """ + record_cursor_value = record.get(self.cursor_field) + if self._is_within_boundaries(record, self._start_boundary): + self._highest_observed_record_cursor_value = record_cursor_value if record_cursor_value else self._start_boundary + + def _is_within_boundaries( + self, + record: Record, + start_boundary: int, + ) -> bool: + record_cursor_value = record.get(self.cursor_field) + if not record_cursor_value: + self._send_log( + Level.WARN, + f"Could not find cursor field `{self.cursor_field}` in record. The record will not be considered when emitting sync state", + ) + return False + return start_boundary <= record_cursor_value + + def collect_cursor_values(self) -> Mapping[str, Optional[int]]: + """ + Makes the `cursor_values` using `stream_slice` and `most_recent_record`. + """ + cursor_values: dict = { + "state": self._cursor if self._cursor else self._start_boundary, + "highest_observed_record_value": self._highest_observed_record_cursor_value + if self._highest_observed_record_cursor_value + else self._start_boundary, + } + # filter out the `NONE` STATE values from the `cursor_values` + return {key: value for key, value in cursor_values.items()} + + def process_state(self, cursor_values: Optional[dict] = None) -> Optional[int]: + state_value = cursor_values.get("state") if cursor_values else 0 + highest_observed_value = cursor_values.get("highest_observed_record_value") if cursor_values else 0 + return max(state_value, highest_observed_value) + + def close_slice(self, stream_slice: StreamSlice) -> None: + cursor_values: dict = self.collect_cursor_values() + self._cursor = self.process_state(cursor_values) if cursor_values else 0 + + def stream_slices(self) -> Iterable[StreamSlice]: + """ + Use a single Slice. + """ + return [StreamSlice(partition={}, cursor_slice={})] + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + return self._get_request_options(RequestOptionType.request_parameter, stream_slice) + + def _get_request_options(self, option_type: RequestOptionType, stream_slice: StreamSlice): + options = {} + if self._state: + options[self.field_name] = self._state + return options + + def should_be_synced(self, record: Record) -> bool: + record_cursor_value: int = record.get(self.cursor_field) + if not record_cursor_value: + self._send_log( + Level.WARN, + f"Could not find cursor field `{self.cursor_field}` in record. The incremental sync will assume it needs to be synced", + ) + return True + latest_possible_cursor_value = self._cursor if self._cursor else 0 + return latest_possible_cursor_value <= record_cursor_value + + def _send_log(self, level: Level, message: str) -> None: + if self.message_repository: + self.message_repository.emit_message( + AirbyteMessage( + type=Type.LOG, + log=AirbyteLogMessage(level=level, message=message), + ) + ) + + def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: + first_cursor_value = first.get(self.cursor_field) + second_cursor_value = second.get(self.cursor_field) + if first_cursor_value and second_cursor_value: + return first_cursor_value >= second_cursor_value + elif first_cursor_value: + return True + else: + return False diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_offset_pagination.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_offset_pagination.py new file mode 100644 index 0000000000000..9c3eb3109f52b --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/id_offset_pagination.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, List, Mapping, Optional, Union + +import requests +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.requesters.paginators.strategies import OffsetIncrement + + +@dataclass +class ZendeskChatIdOffsetIncrementPaginationStrategy(OffsetIncrement): + """ + Id Offset Pagination docs: + https://developer.zendesk.com/api-reference/live-chat/chat-api/agents/#pagination + + Attributes: + page_size (InterpolatedString): the number of records to request, + id_field (InterpolatedString): the name of the to track and increment from, {: 1234} + """ + + id_field: Union[InterpolatedString, str] = None + + def __post_init__(self, parameters: Mapping[str, Any], **kwargs) -> None: + if not self.id_field: + raise ValueError("The `id_field` property is missing, with no-default value.") + else: + self._id_field = InterpolatedString.create(self.id_field, parameters=parameters).eval(self.config) + super().__post_init__(parameters=parameters, **kwargs) + + def should_stop_pagination(self, decoded_response: Mapping[str, Any], last_records: List[Mapping[str, Any]]) -> bool: + """ + Stop paginating when there are fewer records than the page size or the current page has no records + """ + last_records_len = len(last_records) + no_records = last_records_len == 0 + current_page_len = self._page_size.eval(self.config, response=decoded_response) + return (self._page_size and last_records_len < current_page_len) or no_records + + def get_next_page_token_offset(self, last_records: List[Mapping[str, Any]]) -> int: + """ + The `IDs` are returned in `ASC` order, we add `+1` to the ID integer value to avoid the record duplicates, + Described in: https://developer.zendesk.com/api-reference/live-chat/chat-api/agents/#pagination + + Arguments: + last_records: List[Records] -- decoded from the RESPONSE. + + Returns: + The offset value as the `next_page_token` + """ + self._offset = last_records[-1][self._id_field] + return self._offset + 1 + + def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + decoded_response = self.decoder.decode(response) + if self.should_stop_pagination(decoded_response, last_records): + return None + else: + return self.get_next_page_token_offset(last_records) diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/time_offset_pagination.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/time_offset_pagination.py new file mode 100644 index 0000000000000..284325c12e3b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/time_offset_pagination.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, List, Mapping, Optional, Union + +import requests +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.requesters.paginators.strategies import OffsetIncrement + + +@dataclass +class ZendeskChatTimeOffsetIncrementPaginationStrategy(OffsetIncrement): + """ + Time Offset Pagination docs: + https://developer.zendesk.com/api-reference/live-chat/chat-api/agents/#pagination + + Attributes: + page_size (InterpolatedString): the number of records to request, + time_field_name (InterpolatedString): the name of the to track and increment from, {: 1234} + """ + + time_field_name: Union[InterpolatedString, str] = None + + def __post_init__(self, parameters: Mapping[str, Any], **kwargs) -> None: + if not self.time_field_name: + raise ValueError("The `time_field_name` property is missing, with no-default value.") + else: + self._time_field_name = InterpolatedString.create(self.time_field_name, parameters=parameters).eval(self.config) + super().__post_init__(parameters=parameters, **kwargs) + + def should_stop_pagination(self, decoded_response: Mapping[str, Any], last_records: List[Mapping[str, Any]]) -> bool: + """ + Stop paginating when there are fewer records than the page size or the current page has no records + """ + last_records_len = len(last_records) + no_records = last_records_len == 0 + current_page_len = self._page_size.eval(self.config, response=decoded_response) + return (self._page_size and last_records_len < current_page_len) or no_records + + def get_next_page_token_offset(self, decoded_response: Mapping[str, Any]) -> int: + """ + The `records` are returned in `ASC` order. + Described in: https://developer.zendesk.com/api-reference/live-chat/chat-api/incremental_export/#incremental-agent-timeline-export + + Arguments: + decoded_response: Mapping[str, Any] -- The object with RECORDS decoded from the RESPONSE. + + Returns: + The offset value as the `next_page_token` + """ + self._offset = decoded_response[self._time_field_name] + return self._offset + + def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + decoded_response = self.decoder.decode(response) + if self.should_stop_pagination(decoded_response, last_records): + return None + else: + return self.get_next_page_token_offset(decoded_response) diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/timestamp_based_cursor.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/timestamp_based_cursor.py new file mode 100644 index 0000000000000..caab6dfc3cf90 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/components/timestamp_based_cursor.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from dataclasses import dataclass +from typing import Any, Mapping, MutableMapping, Optional, Union + +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState + + +@dataclass +class ZendeskChatTimestampCursor(DatetimeBasedCursor): + """ + Override for the default `DatetimeBasedCursor` to provide the `request_params["start_time"]` with added `microseconds`, as required by the API. + More info: https://developer.zendesk.com/rest_api/docs/chat/incremental_export#incremental-agent-timeline-export + + The dates in future are not(!) allowed for the Zendesk Chat endpoints, and slicer values could be far away from exact cursor values. + + Arguments: + use_microseconds: bool - whether or not to add dummy `000000` (six zeros) to provide the microseconds unit timestamps + """ + + use_microseconds: Union[InterpolatedString, str] = True + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + self._use_microseconds = InterpolatedString.create(self.use_microseconds, parameters=parameters).eval(self.config) + self._start_date = self.config.get("start_date") + super().__post_init__(parameters=parameters) + + def add_microseconds( + self, + params: MutableMapping[str, Any], + stream_slice: Optional[StreamSlice] = None, + ) -> MutableMapping[str, Any]: + start_time = stream_slice.get(self._partition_field_start.eval(self.config)) + if start_time: + params[self.start_time_option.field_name.eval(config=self.config)] = int(start_time) * 1000000 + return params + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + params = {} + if self._use_microseconds: + params = self.add_microseconds(params, stream_slice) + else: + params[self.start_time_option.field_name.eval(config=self.config)] = stream_slice.get( + self._partition_field_start.eval(self.config) + ) + return params diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/manifest.yaml b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/manifest.yaml new file mode 100644 index 0000000000000..5a5ff833c1e7a --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/manifest.yaml @@ -0,0 +1,318 @@ +version: 0.72.1 + +definitions: + # COMMON PARTS + schema_loader: + type: JsonFileSchemaLoader + file_path: "./source_zendesk_chat/schemas/{{ parameters['name'] }}.json" + selector: + description: >- + Base records selector for Full Refresh streams + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["{{ parameters.get('data_field') }}"] + authenticator: + type: BearerAuthenticator + api_token: "{{ config['credentials']['access_token'] }}" + + # PAGINATORS + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: cursor + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: limit + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: '{{ response.get("next_url", {}) }}' + stop_condition: '{{ not response.get("next_url", {}) }}' + paginator_id_offset: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: since_id + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: limit + pagination_strategy: + type: CustomPaginationStrategy + class_name: source_zendesk_chat.components.id_offset_pagination.ZendeskChatIdOffsetIncrementPaginationStrategy + id_field: id + page_size: 100 + paginator_time_offset: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: start_time + page_size_option: + inject_into: request_parameter + type: RequestOption + field_name: limit + pagination_strategy: + type: CustomPaginationStrategy + class_name: source_zendesk_chat.components.time_offset_pagination.ZendeskChatTimeOffsetIncrementPaginationStrategy + time_field_name: end_time + page_size: 1000 + + # REQUESTERS + requester: + description: >- + Default Base Requester for Full Refresh streams + type: HttpRequester + url_base: https://www.zopim.com/api/v2/ + path: "{{ parameters['path'] }}" + http_method: GET + authenticator: + $ref: "#/definitions/authenticator" + error_handler: + type: DefaultErrorHandler + description: >- + The default error handler + backoff_strategies: + - type: WaitTimeFromHeader + header: Retry-After + + # RETRIEVERS + retriever_base: + description: >- + Default Retriever for Full Refresh streams + record_selector: + $ref: "#/definitions/selector" + requester: + $ref: "#/definitions/requester" + paginator: + $ref: "#/definitions/paginator" + retriever_for_type_list: + $ref: "#/definitions/retriever_base" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + retriever_for_type_list_no_pagination: + $ref: "#/definitions/retriever_for_type_list" + paginator: + type: NoPagination + + # BASE STREAMS + base_stream: + primary_key: "id" + schema_loader: + $ref: "#/definitions/schema_loader" + retriever: + $ref: "#/definitions/retriever_base" + # FULL-REFRESH + base_stream_with_list_response_no_pagination: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever_for_type_list_no_pagination" + base_stream_with_id_offset_pagination: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever_for_type_list" + paginator: + $ref: "#/definitions/paginator_id_offset" + base_stream_with_time_offset_pagination: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever_base" + paginator: + $ref: "#/definitions/paginator_time_offset" + requester: + $ref: "#/definitions/requester" + request_parameters: + # add `fields=(*)` to the request_params + fields: "{{ parameters['name'] + '(*)' }}" + + # INCREMENTAL + base_incremental_id_stream: + $ref: "#/definitions/base_stream_with_id_offset_pagination" + retriever: + $ref: "#/definitions/base_stream_with_id_offset_pagination/retriever" + # this is needed to ignore additional params for incremental syncs + ignore_stream_slicer_parameters_on_paginated_requests: true + incremental_sync: + type: CustomIncrementalSync + class_name: source_zendesk_chat.components.id_incremental_cursor.ZendeskChatIdIncrementalCursor + cursor_field: "id" + field_name: "since_id" + base_incremental_time_stream: + $ref: "#/definitions/base_stream_with_time_offset_pagination" + retriever: + $ref: "#/definitions/base_stream_with_time_offset_pagination/retriever" + # this is needed to ignore additional params for incremental syncs + ignore_stream_slicer_parameters_on_paginated_requests: true + incremental_sync: + type: CustomIncrementalSync + class_name: source_zendesk_chat.components.timestamp_based_cursor.ZendeskChatTimestampCursor + use_microseconds: "{{ parameters['use_microseconds'] }}" + cursor_field: "{{ parameters['cursor_field'] }}" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%fZ" + - "%Y-%m-%dT%H:%M:%SZ" + datetime_format: "%s" + start_datetime: + datetime: "{{ format_datetime(config['start_date'], '%s') }}" + start_time_option: + field_name: start_time + inject_into: "request_parameter" + + # FULL-REFRESH STREAMS + # ACCOUNTS + accounts_stream: + description: >- + Accounts Stream: https://developer.zendesk.com/rest_api/docs/chat/accounts#show-account + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + primary_key: "account_key" + $parameters: + name: "accounts" + path: "account" + # SHORTCUTS + shortcuts_stream: + description: >- + Shortcuts Stream: https://developer.zendesk.com/rest_api/docs/chat/shortcuts#list-shortcuts + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "shortcuts" + path: "shortcuts" + # ROUTING SETTINGS + routing_settings_stream: + description: >- + Routing Settings Stream: https://developer.zendesk.com/rest_api/docs/chat/routing_settings#show-account-routing-settings + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + retriever: + $ref: "#/definitions/base_stream_with_list_response_no_pagination/retriever" + record_selector: + extractor: + type: DpathExtractor + field_path: ["data"] + primary_key: "" + $parameters: + name: "routing_settings" + data_field: "data" + path: "routing_settings/account" + # TRIGGERS + triggers_stream: + description: >- + Triggers Stream: https://developer.zendesk.com/rest_api/docs/chat/triggers#list-triggers + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "triggers" + path: "triggers" + # ROLES + roles_stream: + description: >- + Roles Stream: https://developer.zendesk.com/rest_api/docs/chat/roles#list-roles + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "roles" + path: "roles" + # SKILLS + skills_stream: + description: >- + Skills Stream: https://developer.zendesk.com/rest_api/docs/chat/skills#list-skills + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "skills" + path: "skills" + # GOALS + goals_stream: + description: >- + Goals Stream: https://developer.zendesk.com/rest_api/docs/chat/goals#list-goals + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "goals" + path: "goals" + # DEPARTMENTS + departments_stream: + description: >- + Departments Stream: https://developer.zendesk.com/rest_api/docs/chat/departments#list-departments + $ref: "#/definitions/base_stream_with_list_response_no_pagination" + $parameters: + name: "departments" + path: "departments" + + # INCREMENTAL STREAMS + # AGENTS + agents_stream: + description: >- + Agents Stream: https://developer.zendesk.com/rest_api/docs/chat/agents#list-agents + $ref: "#/definitions/base_incremental_id_stream" + $parameters: + name: "agents" + path: "agents" + # BANS + bans_stream: + description: >- + Bans Stream: https://developer.zendesk.com/rest_api/docs/chat/bans#list-bans + $ref: "#/definitions/base_incremental_id_stream" + retriever: + $ref: "#/definitions/base_incremental_id_stream/retriever" + record_selector: + type: RecordSelector + extractor: + type: CustomRecordExtractor + class_name: source_zendesk_chat.components.bans_record_extractor.ZendeskChatBansRecordExtractor + $parameters: + name: "bans" + path: "bans" + # AGENTS TIMELINES + agents_timelines_stream: + description: >- + Agent Timelines Stream: https://developer.zendesk.com/rest_api/docs/chat/incremental_export#incremental-agent-timeline-export + $ref: "#/definitions/base_incremental_time_stream" + transformations: + - type: AddFields + fields: + # To preserve the non-breaking changes, the additional transformations should be applied + # 1) transform the `start_time` - cursor_field to have the old datetime format == %Y-%m-%dT%H:%M:%SZ (2023-01-01T00:00:00) + - path: ["start_time"] + value: "{{ format_datetime(record.get('start_time', config['start_date']), '%Y-%m-%dT%H:%M:%SZ') }}" + # 2) make the composite `id` field + - path: ["id"] + value: "{{ record.get('agent_id', '')|string + '|' + record.get('start_time', '')|string }}" + $parameters: + cursor_field: "start_time" + name: "agent_timeline" + data_field: "agent_timeline" + path: "incremental/agent_timeline" + use_microseconds: true + # CHATS + chats_stream: + description: >- + Chats Stream: https://developer.zendesk.com/api-reference/live-chat/chat-api/incremental_export/#incremental-chat-export + $ref: "#/definitions/base_incremental_time_stream" + $parameters: + cursor_field: "update_timestamp" + name: "chats" + data_field: "chats" + path: "incremental/chats" + use_microseconds: false + +streams: + - "#/definitions/accounts_stream" + - "#/definitions/agents_stream" + - "#/definitions/agents_timelines_stream" + - "#/definitions/bans_stream" + - "#/definitions/chats_stream" + - "#/definitions/departments_stream" + - "#/definitions/goals_stream" + - "#/definitions/roles_stream" + - "#/definitions/routing_settings_stream" + - "#/definitions/shortcuts_stream" + - "#/definitions/skills_stream" + - "#/definitions/triggers_stream" + +check: + type: CheckStream + stream_names: + - routing_settings diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/agent_timeline.json b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/agent_timeline.json index 4a61d458898a5..04424877eda15 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/agent_timeline.json +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/schemas/agent_timeline.json @@ -2,6 +2,9 @@ "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", "properties": { + "id": { + "type": ["null", "string"] + }, "agent_id": { "type": ["null", "integer"] }, diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/source.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/source.py index bcad700d13d4a..2b0540f7cd8f1 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/source.py +++ b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/source.py @@ -2,58 +2,16 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -from typing import Any, Dict, List, Mapping, Tuple +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. +WARNING: Do not modify this file. +""" -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -from .streams import Accounts, Agents, AgentTimelines, Bans, Chats, Departments, Goals, Roles, RoutingSettings, Shortcuts, Skills, Triggers - - -class ZendeskAuthentication: - """Provides the authentication capabilities for both old and new methods.""" - - def __init__(self, config: Dict): - self.config = config - - def get_auth(self) -> TokenAuthenticator: - """Return the TokenAuthenticator object with access_token.""" - - # the old config supports for backward capability - access_token = self.config.get("access_token") - if not access_token: - # the new config supports `OAuth2.0` - access_token = self.config["credentials"]["access_token"] - - return TokenAuthenticator(token=access_token) - - -class SourceZendeskChat(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - authenticator = ZendeskAuthentication(config).get_auth() - try: - records = RoutingSettings(authenticator=authenticator).read_records(sync_mode=SyncMode.full_refresh) - next(records) - return True, None - except Exception as error: - return False, f"Unable to connect to Zendesk Chat API with the provided credentials - {error}" - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - authenticator = ZendeskAuthentication(config).get_auth() - return [ - Accounts(authenticator=authenticator), - AgentTimelines(authenticator=authenticator, start_date=config["start_date"]), - Agents(authenticator=authenticator), - Bans(authenticator=authenticator), - Chats(authenticator=authenticator, start_date=config["start_date"]), - Departments(authenticator=authenticator), - Goals(authenticator=authenticator), - Roles(authenticator=authenticator), - RoutingSettings(authenticator=authenticator), - Shortcuts(authenticator=authenticator), - Skills(authenticator=authenticator), - Triggers(authenticator=authenticator), - ] +# Declarative Source +class SourceZendeskChat(YamlDeclarativeSource): + def __init__(self) -> None: + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py b/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py deleted file mode 100644 index 353c87030e9bf..0000000000000 --- a/airbyte-integrations/connectors/source-zendesk-chat/source_zendesk_chat/streams.py +++ /dev/null @@ -1,315 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union -from urllib.parse import parse_qs, urlparse - -import pendulum -import requests -from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy -from airbyte_cdk.sources.streams.http import HttpStream - - -class Stream(HttpStream, ABC): - url_base = "https://www.zopim.com/api/v2/" - primary_key = "id" - - data_field = None - - limit = 100 - - @property - def availability_strategy(self) -> Optional["AvailabilityStrategy"]: - return None - - def request_kwargs( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Mapping[str, Any]: - - return {"timeout": 60} - - def backoff_time(self, response: requests.Response) -> Optional[float]: - delay_time = response.headers.get("Retry-After") - if delay_time: - return int(delay_time) - - def path(self, **kwargs) -> str: - return self.name - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_data = response.json() - - if "next_url" in response_data: - next_url = response_data["next_url"] - cursor = parse_qs(urlparse(next_url).query)["cursor"] - return {"cursor": cursor} - - def request_params( - self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = {"limit": self.limit} - if next_page_token: - params.update(next_page_token) - - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - stream_data = self.get_stream_data(response_data) - - yield from stream_data - - def get_stream_data(self, response_data: Any) -> List[dict]: - if self.data_field: - response_data = response_data.get(self.data_field, []) - - if isinstance(response_data, list): - return list(map(self.parse_response_obj, response_data)) - elif isinstance(response_data, dict): - return [self.parse_response_obj(response_data)] - else: - raise Exception(f"Unsupported type of response data for stream {self.name}") - - def parse_response_obj(self, response_obj: dict) -> dict: - return response_obj - - -class BaseIncrementalStream(Stream, ABC): - @property - @abstractmethod - def cursor_field(self) -> str: - """ - Defining a cursor field indicates that a stream is incremental, so any incremental stream must extend this class - and define a cursor field. - """ - - @abstractmethod - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - - @staticmethod - def _field_to_datetime(value: Union[int, str]) -> pendulum.datetime: - if isinstance(value, int): - value = pendulum.from_timestamp(value / 1000.0) - elif isinstance(value, str): - value = pendulum.parse(value) - else: - raise ValueError(f"Unsupported type of datetime field {type(value)}") - return value - - -class TimeIncrementalStream(BaseIncrementalStream, ABC): - - state_checkpoint_interval = 1000 - - def __init__(self, start_date, **kwargs): - super().__init__(**kwargs) - self._start_date = pendulum.parse(start_date) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_data = response.json() - if response_data["count"] == self.limit: - return {"start_time": response_data["end_time"]} - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - latest_benchmark = self._field_to_datetime(latest_record[self.cursor_field]) - if current_stream_state.get(self.cursor_field): - state = max(latest_benchmark, self._field_to_datetime(current_stream_state[self.cursor_field])) - return {self.cursor_field: state.strftime("%Y-%m-%dT%H:%M:%SZ")} - return {self.cursor_field: latest_benchmark.strftime("%Y-%m-%dT%H:%M:%SZ")} - - def request_params( - self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, next_page_token=next_page_token) - if next_page_token: - params.update(next_page_token) - else: - start_datetime = self._start_date - if stream_state.get(self.cursor_field): - start_datetime = pendulum.parse(stream_state[self.cursor_field]) - - params.update({"start_time": int(start_datetime.timestamp())}) - - params.update({"fields": f"{self.name}(*)"}) - return params - - def path(self, **kwargs) -> str: - return f"incremental/{self.name}" - - def parse_response_obj(self, response_obj: dict) -> dict: - response_obj[self.cursor_field] = pendulum.parse(response_obj[self.cursor_field]).strftime("%Y-%m-%dT%H:%M:%SZ") - return response_obj - - -class IdIncrementalStream(BaseIncrementalStream): - cursor_field = "id" - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - latest_benchmark = latest_record[self.cursor_field] - if current_stream_state.get(self.cursor_field): - return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} - return {self.cursor_field: latest_benchmark} - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - stream_data = self.get_stream_data(response.json()) - if len(stream_data) == self.limit: - last_object_id = stream_data[-1]["id"] - return {"since_id": last_object_id} - - def request_params( - self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, next_page_token=next_page_token) - - if next_page_token: - params.update(next_page_token) - elif stream_state.get(self.cursor_field): - params.update({"since_id": stream_state[self.cursor_field]}) - - return params - - -class Agents(IdIncrementalStream): - """ - Agents Stream: https://developer.zendesk.com/rest_api/docs/chat/agents#list-agents - """ - - -class AgentTimelines(TimeIncrementalStream): - """ - Agent Timelines Stream: https://developer.zendesk.com/rest_api/docs/chat/incremental_export#incremental-agent-timeline-export - """ - - primary_key = None - cursor_field = "start_time" - data_field = "agent_timeline" - name = "agent_timeline" - limit = 1000 - - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - if not kwargs.get("next_page_token"): - params["start_time"] = params["start_time"] * 1000000 - return params - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - stream_data = self.get_stream_data(response_data) - - def generate_key(record): - record.update({"id": "|".join((str(record.get("agent_id", "")), str(record.get("start_time", ""))))}) - return record - - # associate the surrogate key - yield from map( - generate_key, - stream_data, - ) - - -class Accounts(Stream): - """ - Accounts Stream: https://developer.zendesk.com/rest_api/docs/chat/accounts#show-account - """ - - primary_key = "account_key" - - def path(self, **kwargs) -> str: - return "account" - - -class Chats(TimeIncrementalStream): - """ - Chats Stream: https://developer.zendesk.com/api-reference/live-chat/chat-api/incremental_export/#incremental-chat-export - """ - - cursor_field = "update_timestamp" - data_field = "chats" - limit = 1000 - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - response_data = response.json() - if response_data["count"] == self.limit: - next_page = {"start_time": response_data["end_time"]} - - start_id = response_data.get("end_id") - if start_id: - next_page.update({"start_id": start_id}) - - return next_page - - -class Shortcuts(Stream): - """ - Shortcuts Stream: https://developer.zendesk.com/rest_api/docs/chat/shortcuts#list-shortcuts - """ - - -class Triggers(Stream): - """ - Triggers Stream: https://developer.zendesk.com/rest_api/docs/chat/triggers#list-triggers - """ - - -class Bans(IdIncrementalStream): - """ - Bans Stream: https://developer.zendesk.com/rest_api/docs/chat/bans#list-bans - """ - - def get_stream_data(self, response_data) -> List[dict]: - bans = response_data["ip_address"] + response_data["visitor"] - bans = sorted(bans, key=lambda x: pendulum.parse(x["created_at"]) if x["created_at"] else pendulum.datetime(1970, 1, 1)) - return bans - - -class Departments(Stream): - """ - Departments Stream: https://developer.zendesk.com/rest_api/docs/chat/departments#list-departments - """ - - -class Goals(Stream): - """ - Goals Stream: https://developer.zendesk.com/rest_api/docs/chat/goals#list-goals - """ - - -class Skills(Stream): - """ - Skills Stream: https://developer.zendesk.com/rest_api/docs/chat/skills#list-skills - """ - - -class Roles(Stream): - """ - Roles Stream: https://developer.zendesk.com/rest_api/docs/chat/roles#list-roles - """ - - -class RoutingSettings(Stream): - """ - Routing Settings Stream: https://developer.zendesk.com/rest_api/docs/chat/routing_settings#show-account-routing-settings - """ - - primary_key = "" - - name = "routing_settings" - data_field = "data" - - def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> str: - return "routing_settings/account" diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/__init__.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/__init__.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/conftest.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/conftest.py new file mode 100644 index 0000000000000..c48196cfa1edd --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/conftest.py @@ -0,0 +1,65 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping + +import pytest + + +@pytest.fixture +def config() -> Mapping[str, Any]: + return { + "start_date": "2020-10-01T00:00:00Z", + "subdomain": "", + "credentials": { + "credentials": "access_token", + "access_token": "__access_token__" + } + } + + +@pytest.fixture +def bans_stream_record() -> Mapping[str, Any]: + return { + "ip_address": [ + { + "reason": "test", + "type": "ip_address", + "id": 1234, + "created_at": "2021-04-21T14:42:46Z", + "ip_address": "0.0.0.0" + } + ], + "visitor": [ + { + "type": "visitor", + "id": 4444, + "visitor_name": "Visitor 4444", + "visitor_id": "visitor_id", + "reason": "test", + "created_at": "2021-04-27T13:25:01Z" + } + ] + } + + +@pytest.fixture +def bans_stream_record_extractor_expected_output() -> List[Mapping[str, Any]]: + return [ + { + "reason": "test", + "type": "ip_address", + "id": 1234, + "created_at": "2021-04-21T14:42:46Z", + "ip_address": "0.0.0.0" + }, + { + "type": "visitor", + "id": 4444, + "visitor_name": "Visitor 4444", + "visitor_id": "visitor_id", + "reason": "test", + "created_at": "2021-04-27T13:25:01Z" + }, + ] diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_bans_record_extractor.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_bans_record_extractor.py new file mode 100644 index 0000000000000..446bcc8f63dec --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_bans_record_extractor.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import requests +from source_zendesk_chat.components.bans_record_extractor import ZendeskChatBansRecordExtractor + + +def test_bans_stream_record_extractor( + requests_mock, + bans_stream_record, + bans_stream_record_extractor_expected_output, +) -> None: + test_url = "https://www.zopim.com/api/v2/bans" + requests_mock.get(test_url, json=bans_stream_record) + test_response = requests.get(test_url) + assert ZendeskChatBansRecordExtractor().extract_records(test_response) == bans_stream_record_extractor_expected_output diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_incremental_cursor.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_incremental_cursor.py new file mode 100644 index 0000000000000..9557a312b6359 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_incremental_cursor.py @@ -0,0 +1,114 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest +from source_zendesk_chat.components.id_incremental_cursor import ZendeskChatIdIncrementalCursor + + +def _get_cursor(config) -> ZendeskChatIdIncrementalCursor: + return ZendeskChatIdIncrementalCursor( + config = config, + cursor_field = "id", + field_name = "since_id", + parameters = {}, + ) + + +@pytest.mark.parametrize( + "stream_state, expected_cursor_value, expected_state_value", + [ + ({"id": 10}, 10, {'id': 10}), + ], + ids=[ + "SET Initial State and GET State" + ] +) +def test_id_incremental_cursor_set_initial_state_and_get_stream_state( + config, + stream_state, + expected_cursor_value, + expected_state_value, +) -> None: + cursor = _get_cursor(config) + cursor.set_initial_state(stream_state) + assert cursor._cursor == expected_cursor_value + assert cursor._state == expected_cursor_value + assert cursor.get_stream_state() == expected_state_value + + +@pytest.mark.parametrize( + "test_record, expected", + [ + ({"id": 123}, 123), + ({"id": 456}, 456), + ], + ids=[ + "first", + "second" + ] +) +def test_id_incremental_cursor_close_slice(config, test_record, expected) -> None: + cursor = _get_cursor(config) + cursor.observe(stream_slice={}, record=test_record) + cursor.close_slice(stream_slice={}) + assert cursor._cursor == expected + + +@pytest.mark.parametrize( + "stream_state, input_slice, expected", + [ + ({}, {"id": 1}, {}), + ({"id": 2}, {"id": 1}, {"since_id": 2}), + ], + ids=[ + "No State", + "With State" + ] +) +def test_id_incremental_cursor_get_request_params(config, stream_state, input_slice, expected) -> None: + cursor = _get_cursor(config) + if stream_state: + cursor.set_initial_state(stream_state) + assert cursor.get_request_params(stream_slice=input_slice) == expected + + +@pytest.mark.parametrize( + "stream_state, record, expected", + [ + ({}, {"id": 1}, True), + ({"id": 2}, {"id": 1}, False), + ({"id": 2}, {"id": 3}, True), + ], + ids=[ + "No State", + "With State > Record value", + "With State < Record value", + ] +) +def test_id_incremental_cursor_should_be_synced(config, stream_state, record, expected) -> None: + cursor = _get_cursor(config) + if stream_state: + cursor.set_initial_state(stream_state) + assert cursor.should_be_synced(record=record) == expected + + +@pytest.mark.parametrize( + "first_record, second_record, expected", + [ + ({"id": 2}, {"id": 1}, True), + ({"id": 2}, {"id": 3}, False), + ({"id": 3}, {}, True), + ({}, {}, False), + ], + ids=[ + "First > Second - should synced", + "First < Second - should not be synced", + "Has First but no Second - should be synced", + "Has no First and has no Second - should not be synced", + ] +) +def test_id_incremental_cursor_is_greater_than_or_equal(config, first_record, second_record, expected) -> None: + cursor = _get_cursor(config) + assert cursor.is_greater_than_or_equal(first=first_record, second=second_record) == expected diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_offset_pagination.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_offset_pagination.py new file mode 100644 index 0000000000000..5c5f4dd46b1ad --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_id_offset_pagination.py @@ -0,0 +1,32 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest +import requests +from source_zendesk_chat.components.id_offset_pagination import ZendeskChatIdOffsetIncrementPaginationStrategy + + +def _get_paginator(config, id_field) -> ZendeskChatIdOffsetIncrementPaginationStrategy: + return ZendeskChatIdOffsetIncrementPaginationStrategy( + config = config, + page_size = 1, + id_field = id_field, + parameters = {}, + ) + + +@pytest.mark.parametrize( + "id_field, last_records, expected", + [ + ("id", [{"id": 1}], 2), + ("id", [], None) + ], +) +def test_id_offset_increment_pagination_next_page_token(requests_mock, config, id_field, last_records, expected) -> None: + paginator = _get_paginator(config, id_field) + test_url = "https://www.zopim.com/api/v2/agents" + requests_mock.get(test_url, json=last_records) + test_response = requests.get(test_url) + assert paginator.next_page_token(test_response, last_records) == expected diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_time_offset_pagination.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_time_offset_pagination.py new file mode 100644 index 0000000000000..086ea195fac2d --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_time_offset_pagination.py @@ -0,0 +1,32 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest +import requests +from source_zendesk_chat.components.time_offset_pagination import ZendeskChatTimeOffsetIncrementPaginationStrategy + + +def _get_paginator(config, time_field_name) -> ZendeskChatTimeOffsetIncrementPaginationStrategy: + return ZendeskChatTimeOffsetIncrementPaginationStrategy( + config = config, + page_size = 1, + time_field_name = time_field_name, + parameters = {}, + ) + + +@pytest.mark.parametrize( + "time_field_name, response, last_records, expected", + [ + ("end_time", {"chats":[{"update_timestamp": 1}], "end_time": 2}, [{"update_timestamp": 1}], 2), + ("end_time", {"chats":[], "end_time": 3}, [], None), + ], +) +def test_time_offset_increment_pagination_next_page_token(requests_mock, config, time_field_name, response, last_records, expected) -> None: + paginator = _get_paginator(config, time_field_name) + test_url = "https://www.zopim.com/api/v2/chats" + requests_mock.get(test_url, json=response) + test_response = requests.get(test_url) + assert paginator.next_page_token(test_response, last_records) == expected diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_timestamp_based_cursor.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_timestamp_based_cursor.py new file mode 100644 index 0000000000000..a98cc8283e930 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/components/test_timestamp_based_cursor.py @@ -0,0 +1,54 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest +from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType +from source_zendesk_chat.components.timestamp_based_cursor import ZendeskChatTimestampCursor + + +def _get_cursor(config, cursor_field, use_microseconds) -> ZendeskChatTimestampCursor: + cursor = ZendeskChatTimestampCursor( + start_datetime = "2020-10-01T00:00:00Z", + cursor_field = cursor_field, + datetime_format = "%s", + config = config, + parameters = {}, + use_microseconds = f"{{{ {use_microseconds} }}}", + ) + # patching missing parts + cursor.start_time_option = RequestOption( + field_name = cursor_field, + inject_into = RequestOptionType.request_parameter, + parameters={}, + ) + return cursor + + +@pytest.mark.parametrize( + "use_microseconds, input_slice, expected", + [ + (True, {"start_time": 1}, {'start_time': 1000000}), + ], +) +def test_timestamp_based_cursor_add_microseconds(config, use_microseconds, input_slice, expected) -> None: + cursor = _get_cursor(config, "start_time", use_microseconds) + test_result = cursor.add_microseconds({}, input_slice) + assert test_result == expected + + +@pytest.mark.parametrize( + "use_microseconds, input_slice, expected", + [ + (True, {"start_time": 1}, {'start_time': 1000000}), + (False, {"start_time": 1}, {'start_time': 1}), + ], + ids=[ + "WITH `use_microseconds`", + "WITHOUT `use_microseconds`", + ] +) +def test_timestamp_based_cursor_get_request_params(config, use_microseconds, input_slice, expected) -> None: + cursor = _get_cursor(config, "start_time", use_microseconds) + assert cursor.get_request_params(stream_slice=input_slice) == expected diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_source.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_source.py deleted file mode 100644 index 4607e132314f1..0000000000000 --- a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_source.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from unittest.mock import patch - -import pytest -import requests -from airbyte_cdk import AirbyteLogger -from source_zendesk_chat.source import SourceZendeskChat, ZendeskAuthentication -from source_zendesk_chat.streams import ( - Accounts, - Agents, - AgentTimelines, - Bans, - Chats, - Departments, - Goals, - Roles, - RoutingSettings, - Shortcuts, - Skills, - Triggers, -) - -TEST_CONFIG: dict = { - "start_date": "2020-10-01T00:00:00Z", - "access_token": "access_token", -} -TEST_INSTANCE: SourceZendeskChat = SourceZendeskChat() - - -def test_get_auth(): - expected = {"Authorization": "Bearer access_token"} - result = ZendeskAuthentication(TEST_CONFIG).get_auth().get_auth_header() - assert expected == result - - -@pytest.mark.parametrize( - "response, check_passed", - [ - (iter({"id": 123}), True), - (requests.HTTPError(), False), - ], - ids=["Success", "Fail"], -) -def test_check(response, check_passed): - with patch.object(RoutingSettings, "read_records", return_value=response) as mock_method: - result = TEST_INSTANCE.check_connection(logger=AirbyteLogger, config=TEST_CONFIG) - mock_method.assert_called() - assert check_passed == result[0] - - -@pytest.mark.parametrize( - "stream_cls", - [ - (Accounts), - (Agents), - (AgentTimelines), - (Bans), - (Chats), - (Departments), - (Goals), - (Roles), - (RoutingSettings), - (Shortcuts), - (Skills), - (Triggers), - ], -) -def test_streams(stream_cls): - streams = TEST_INSTANCE.streams(config=TEST_CONFIG) - for stream in streams: - if stream_cls in streams: - assert isinstance(stream, stream_cls) diff --git a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_streams.py deleted file mode 100644 index b90941b01c72f..0000000000000 --- a/airbyte-integrations/connectors/source-zendesk-chat/unit_tests/test_streams.py +++ /dev/null @@ -1,344 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import pytest -import requests -from source_zendesk_chat.source import ZendeskAuthentication -from source_zendesk_chat.streams import ( - Accounts, - Agents, - AgentTimelines, - Bans, - Chats, - Departments, - Goals, - Roles, - RoutingSettings, - Shortcuts, - Skills, - Triggers, -) - -TEST_CONFIG: dict = { - "start_date": "2020-10-01T00:00:00Z", - "access_token": "access_token", -} -TEST_CONFIG.update(**{"authenticator": ZendeskAuthentication(TEST_CONFIG).get_auth()}) - - -class TestFullRefreshStreams: - """ - STREAMS: - Accounts, Shortcuts, Triggers, Departments, Goals, Skills, Roles, RoutingSettings - """ - - @pytest.mark.parametrize( - "stream_cls", - [ - (Accounts), - (Departments), - (Goals), - (Roles), - (RoutingSettings), - (Shortcuts), - (Skills), - (Triggers), - ], - ) - def test_request_kwargs(self, stream_cls): - stream = stream_cls(TEST_CONFIG) - expected = {"timeout": 60} - assert expected == stream.request_kwargs(stream_state=None) - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Accounts, "5"), - (Departments, "5"), - (Goals, "5"), - (Roles, "3"), - (RoutingSettings, "3"), - (Shortcuts, "3"), - (Skills, "1"), - (Triggers, "1"), - ], - ) - def test_backoff_time(self, requests_mock, stream_cls, expected): - stream = stream_cls(TEST_CONFIG) - url = f"{stream.url_base}{stream.path()}" - test_headers = {"Retry-After": expected} - requests_mock.get(url, headers=test_headers) - response = requests.get(url) - result = stream.backoff_time(response) - assert result == int(expected) - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Accounts, "account"), - (Departments, "departments"), - (Goals, "goals"), - (Roles, "roles"), - (RoutingSettings, "routing_settings/account"), - (Shortcuts, "shortcuts"), - (Skills, "skills"), - (Triggers, "triggers"), - ], - ) - def test_path(self, stream_cls, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.path() - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, expected_cursor", - [ - (Accounts, "MTU4MD"), - (Departments, "c1Mzc"), - (Goals, "wfHw0MzJ8"), - (Roles, "0MzJ8"), - (RoutingSettings, "MTUC4wJ8"), - (Shortcuts, "MTU4MD"), - (Skills, "c1Mzc"), - (Triggers, "0MzJ8"), - ], - ) - def test_next_page_token(self, requests_mock, stream_cls, expected_cursor): - stream = stream_cls(TEST_CONFIG) - url = f"{stream.url_base}{stream.path()}" - next_url = f"{url}/cursor.json?cursor={expected_cursor}" - test_response = {"next_url": next_url} - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.next_page_token(response) - assert result == {"cursor": [expected_cursor]} - - @pytest.mark.parametrize( - "stream_cls, next_page_token, expected", - [ - (Accounts, {"cursor": "MTU4MD"}, {"limit": 100, "cursor": "MTU4MD"}), - (Departments, {"cursor": "c1Mzc"}, {"limit": 100, "cursor": "c1Mzc"}), - (Goals, {"cursor": "wfHw0MzJ8"}, {"limit": 100, "cursor": "wfHw0MzJ8"}), - (Roles, {"cursor": "0MzJ8"}, {"limit": 100, "cursor": "0MzJ8"}), - (RoutingSettings, {"cursor": "MTUC4wJ8"}, {"limit": 100, "cursor": "MTUC4wJ8"}), - (Shortcuts, {"cursor": "MTU4MD"}, {"limit": 100, "cursor": "MTU4MD"}), - (Skills, {"cursor": "c1Mzc"}, {"limit": 100, "cursor": "c1Mzc"}), - (Triggers, {"cursor": "0MzJ8"}, {"limit": 100, "cursor": "0MzJ8"}), - ], - ) - def test_request_params(self, stream_cls, next_page_token, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.request_params(stream_state=None, next_page_token=next_page_token) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, test_response, expected", - [ - (Accounts, [{"id": "123"}], [{"id": "123"}]), - (Departments, {"id": "123"}, [{"id": "123"}]), - (Goals, {}, [{}]), - (Roles, [{"id": "123"}], [{"id": "123"}]), - (RoutingSettings, {"data": {"id": "123"}}, [{"id": "123"}]), - (Shortcuts, [{"id": "123"}], [{"id": "123"}]), - (Skills, [{"id": "123"}], [{"id": "123"}]), - (Triggers, [{"id": "123"}], [{"id": "123"}]), - ], - ) - def test_parse_response(self, requests_mock, stream_cls, test_response, expected): - stream = stream_cls(TEST_CONFIG) - url = f"{stream.url_base}{stream.path()}" - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.parse_response(response) - assert list(result) == expected - - -class TestTimeIncrementalStreams: - """ - STREAMS: - AgentTimelines, Chats - """ - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (AgentTimelines, 1000), - (Chats, 1000), - ], - ) - def test_state_checkpoint_interval(self, stream_cls, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - result = stream.state_checkpoint_interval - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (AgentTimelines, "start_time"), - (Chats, "update_timestamp"), - ], - ) - def test_cursor_field(self, stream_cls, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - result = stream.cursor_field - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, test_response, expected", - [ - (AgentTimelines, {"end_time": "123"}, {"start_time": "123"}), - (Chats, {"end_time": "123"}, {"start_time": "123"}), - ], - ) - def test_next_page_token(self, requests_mock, stream_cls, test_response, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - test_response.update(**{"count": stream.limit}) - url = f"{stream.url_base}{stream.path()}" - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.next_page_token(response) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, current_state, last_record, expected", - [ - (AgentTimelines, {}, {"start_time": "2021-01-01"}, {"start_time": "2021-01-01T00:00:00Z"}), - (Chats, {"update_timestamp": "2022-02-02"}, {"update_timestamp": "2022-03-03"}, {"update_timestamp": "2022-03-03T00:00:00Z"}), - ], - ) - def test_get_updated_state(self, stream_cls, current_state, last_record, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - result = stream.get_updated_state(current_state, last_record) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, stream_state, next_page_token, expected", - [ - (AgentTimelines, {}, {"start_time": "123"}, {"limit": 1000, "start_time": "123", "fields": "agent_timeline(*)"}), - (Chats, {"update_timestamp": "2022-02-02"}, {"start_time": "234"}, {"limit": 1000, "start_time": "234", "fields": "chats(*)"}), - ], - ) - def test_request_params(self, stream_cls, stream_state, next_page_token, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - result = stream.request_params(stream_state=stream_state, next_page_token=next_page_token) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, test_response, expected", - [ - ( - AgentTimelines, - {"agent_timeline": {"id": "123", "agent_id": "test_id", "start_time": "2021-01-01"}}, - [{"id": "test_id|2021-01-01T00:00:00Z", "agent_id": "test_id", "start_time": "2021-01-01T00:00:00Z"}], - ), - ( - Chats, - {"chats": {"id": "234", "agent_id": "test_id", "update_timestamp": "2022-01-01"}}, - [{"id": "234", "agent_id": "test_id", "update_timestamp": "2022-01-01T00:00:00Z"}], - ), - ], - ) - def test_parse_response(self, requests_mock, stream_cls, test_response, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - url = f"{stream.url_base}{stream.path()}" - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.parse_response(response) - assert list(result) == expected - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (AgentTimelines, "incremental/agent_timeline"), - (Chats, "incremental/chats"), - ], - ) - def test_path(self, stream_cls, expected): - stream = stream_cls(start_date=TEST_CONFIG["start_date"]) - result = stream.path() - assert result == expected - - -class TestIdIncrementalStreams: - """ - STREAMS: - Agents, Bans - """ - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Agents, "agents"), - (Bans, "bans"), - ], - ) - def test_path(self, stream_cls, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.path() - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, expected", - [ - (Agents, "id"), - (Bans, "id"), - ], - ) - def test_cursor_field(self, stream_cls, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.cursor_field - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, current_state, last_record, expected", - [ - (Agents, {}, {"id": "1"}, {"id": "1"}), - (Bans, {"id": "1"}, {"id": "2"}, {"id": "2"}), - ], - ) - def test_get_updated_state(self, stream_cls, current_state, last_record, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.get_updated_state(current_state, last_record) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, test_response, expected", - [ - (Agents, [{"id": "2"}], {"since_id": "2"}), - ], - ) - def test_next_page_token(self, requests_mock, stream_cls, test_response, expected): - stream = stream_cls(TEST_CONFIG) - stream.limit = 1 - url = f"{stream.url_base}{stream.path()}" - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.next_page_token(response) - assert result == expected - - @pytest.mark.parametrize( - "stream_cls, test_response, expected", - [ - (Agents, {"id": "2"}, [{"id": "2"}]), - ], - ) - def test_parse_response(self, requests_mock, stream_cls, test_response, expected): - stream = stream_cls(TEST_CONFIG) - url = f"{stream.url_base}{stream.path()}" - requests_mock.get(url, json=test_response) - response = requests.get(url) - result = stream.parse_response(response) - assert list(result) == expected - - @pytest.mark.parametrize( - "stream_cls, stream_state, next_page_token, expected", - [ - (Agents, {}, {"since_id": "1"}, {"limit": 100, "since_id": "1"}), - (Bans, {"id": "1"}, {"since_id": "2"}, {"limit": 100, "since_id": "2"}), - ], - ) - def test_request_params(self, stream_cls, stream_state, next_page_token, expected): - stream = stream_cls(TEST_CONFIG) - result = stream.request_params(stream_state=stream_state, next_page_token=next_page_token) - assert result == expected diff --git a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml index 8a549caf39ad8..6f9644e53aa91 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml @@ -44,6 +44,7 @@ acceptance_tests: configured_catalog_path: "integration_tests/incremental_catalog.json" future_state: future_state_path: "integration_tests/abnormal_state.json" + timeout_seconds: 3600 full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl index ae83e846ae3a3..e7c17f005aebd 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl @@ -3,8 +3,7 @@ {"stream": "articles", "data": {"id": 7253394952591, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/en-us/articles/7253394952591.json", "html_url": "https://d3v-airbyte.zendesk.com/hc/en-us/articles/7253394952591-How-do-I-customize-my-Help-Center", "author_id": 360786799676, "comments_disabled": false, "draft": false, "promoted": false, "position": 0, "vote_sum": 0, "vote_count": 0, "section_id": 7253394947215, "created_at": "2023-06-22T00:32:20Z", "updated_at": "2023-06-22T00:32:20Z", "name": "How do I customize my Help Center?", "title": "How do I customize my Help Center?", "source_locale": "en-us", "locale": "en-us", "outdated": false, "outdated_locales": [], "edited_at": "2023-06-22T00:32:20Z", "user_segment_id": null, "permission_group_id": 7253379449487, "content_tag_ids": [], "label_names": [], "body": "

    You can modify the look and feel of your Help Center by changing colors and fonts. See Branding your Help Center to learn how.

    \n\n

    You can also change the design of your Help Center. If you're comfortable working with page code, you can dig in to the site's HTML, CSS, and Javascript to customize your theme. To get started, see Customizing the Help Center.

    "}, "emitted_at": 1697714809849} {"stream": "article_comments", "data": {"id": 7253381447311, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/en-us/articles/7253394935055/comments/7253381447311.json", "body": "

    Test comment 2

    ", "author_id": 360786799676, "source_id": 7253394935055, "source_type": "Article", "html_url": "https://d3v-airbyte.zendesk.com/hc/en-us/articles/7253394935055/comments/7253381447311", "locale": "en-us", "created_at": "2023-06-22T00:33:36Z", "updated_at": "2023-06-22T00:33:42Z", "vote_sum": -1, "vote_count": 1, "non_author_editor_id": null, "non_author_updated_at": null}, "emitted_at": 1697714814160} {"stream": "article_comments", "data": {"id": 7253366869647, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/en-us/articles/7253394935055/comments/7253366869647.json", "body": "

    Test comment

    ", "author_id": 360786799676, "source_id": 7253394935055, "source_type": "Article", "html_url": "https://d3v-airbyte.zendesk.com/hc/en-us/articles/7253394935055/comments/7253366869647", "locale": "en-us", "created_at": "2023-06-22T00:33:29Z", "updated_at": "2023-06-22T00:33:40Z", "vote_sum": 1, "vote_count": 1, "non_author_editor_id": null, "non_author_updated_at": null}, "emitted_at": 1697714814162} -{"stream": "article_comment_votes", "data": {"id": 7253393200655, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/votes/7253393200655.json", "user_id": 360786799676, "value": -1, "item_id": 7253381447311, "item_type": "Comment", "created_at": "2023-06-22T00:33:42Z", "updated_at": "2023-06-22T00:33:42Z"}, "emitted_at": 1697714823072} -{"stream": "article_comment_votes", "data": {"id": 7253381522703, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/votes/7253381522703.json", "user_id": 360786799676, "value": 1, "item_id": 7253366869647, "item_type": "Comment", "created_at": "2023-06-22T00:33:40Z", "updated_at": "2023-06-22T00:33:40Z"}, "emitted_at": 1697714823501} +{"stream": "article_comment_votes", "data": {"id": 7253393200655, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/votes/7253393200655.json", "user_id": 360786799676, "value": -1, "item_id": 7253381447311, "item_type": "Comment", "created_at": "2023-06-22T00:33:42Z", "updated_at": "2023-06-22T00:33:42Z"}, "emitted_at": 1711134948370} {"stream": "article_votes", "data": {"id": 7816935174287, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/votes/7816935174287.json", "user_id": 360786799676, "value": 1, "item_id": 7253394935055, "item_type": "Article", "created_at": "2023-09-04T13:52:38Z", "updated_at": "2023-09-04T13:52:38Z"}, "emitted_at": 1697714827544} {"stream": "article_votes", "data": {"id": 7816935384335, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/votes/7816935384335.json", "user_id": 360786799676, "value": 1, "item_id": 7253391120527, "item_type": "Article", "created_at": "2023-09-04T13:52:58Z", "updated_at": "2023-09-04T13:52:58Z"}, "emitted_at": 1697714828540} {"stream": "audit_logs", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/audit_logs/8841266675343.json", "id": 8841266675343, "action_label": "Updated", "actor_id": 360786799676, "source_id": 8841127505167, "source_type": "user", "source_label": "Dylan Dominguez499", "action": "update", "change_description": "Organization: Test998 is assigned", "ip_address": "24.228.86.152", "created_at": "2024-01-19T15:55:46Z", "actor_name": "Team Airbyte"}, "emitted_at": 1697714829754} @@ -35,18 +34,18 @@ {"stream": "ticket_audits", "data": {"id": 8178673821967, "ticket_id": 158, "created_at": "2023-10-20T12:01:58Z", "author_id": -1, "metadata": {"system": {}, "custom": {}}, "events": [{"id": 8178673822095, "type": "Notification", "subject": "Request #{{ticket.id}}: How would you rate the support you received?", "body": "Hello {{ticket.requester.name}},\n\nWe'd love to hear what you think of our customer service. Please take a moment to answer one simple question by clicking either link below:\n\n{{satisfaction.rating_section}}\n\nHere's a reminder of what this request was about:\n\n{{ticket.comments_formatted}}\n", "recipients": [8178212241935]}, {"id": 8178673822223, "type": "Change", "value": "offered", "field_name": "satisfaction_score", "previous_value": "unoffered"}], "via": {"channel": "rule", "source": {"to": {}, "from": {"deleted": false, "title": "Request customer satisfaction rating (system automation)", "id": 360021281435}, "rel": "automation"}}}, "emitted_at": 1709714976448} {"stream": "ticket_audits", "data": {"id": 8178567687311, "ticket_id": 159, "created_at": "2023-10-20T11:29:29Z", "author_id": 360786799676, "metadata": {"system": {"client": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "ip_address": "162.19.235.114", "location": "Frankfurt am Main, HE, Germany", "latitude": 50.1101, "longitude": 8.6721}, "custom": {}}, "events": [{"id": 8178567687439, "type": "Change", "value": "360786799676", "field_name": "assignee_id", "previous_value": null}, {"id": 8178567687567, "type": "Change", "value": "6770788212111", "field_name": "group_id", "previous_value": null}, {"id": 8178567687695, "type": "Change", "value": "open", "field_name": "status", "previous_value": "new"}, {"id": 8178567687823, "type": "Change", "value": "4044376", "field_name": "custom_status_id", "previous_value": "4044356"}], "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}}, "emitted_at": 1709714976449} {"stream": "ticket_audits", "data": {"id": 8178427216527, "ticket_id": 159, "created_at": "2023-10-20T10:57:49Z", "author_id": 360786799676, "metadata": {"system": {"client": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "ip_address": "162.19.235.114", "location": "Frankfurt am Main, HE, Germany", "latitude": 50.1101, "longitude": 8.6721}, "custom": {}}, "events": [{"id": 8178427216655, "type": "Comment", "author_id": 360786799676, "body": "test ticket", "html_body": "
    test ticket
    ", "plain_body": "test ticket", "public": true, "attachments": [], "audit_id": 8178427216527}, {"id": 8178427216783, "type": "Create", "value": "360000358316", "field_name": "brand_id"}, {"id": 8178427216911, "type": "Create", "value": "8178212241935", "field_name": "requester_id"}, {"id": 8178427217039, "type": "Create", "value": "4044356", "field_name": "custom_status_id"}, {"id": 8178427217167, "type": "Create", "value": "555666", "field_name": "subject"}, {"id": 8178427217295, "type": "Create", "value": "360000084116", "field_name": "ticket_form_id"}, {"id": 8178427217423, "type": "Create", "value": null, "field_name": "priority"}, {"id": 8178427217551, "type": "Create", "value": null, "field_name": "type"}, {"id": 8178427217679, "type": "Create", "value": "new", "field_name": "status"}, {"id": 8178427217807, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify requester of new proactive ticket", "id": 360011363196, "revision_id": 3}, "rel": "trigger"}}, "subject": "{{ticket.title}}", "body": "This ticket was created on your behalf.\n\n{{ticket.comments_formatted}}\n\nTo add additional comments, reply to this email.", "recipients": [8178212241935]}, {"id": 8178427217935, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify all agents of received request", "id": 360011363296, "revision_id": 3}, "rel": "trigger"}}, "subject": "[{{ticket.account}}] {{ticket.title}}", "body": "A ticket (#{{ticket.id}}) by {{ticket.requester.name}} has been received. It is unassigned.\n\n{{ticket.comments_formatted}}", "recipients": [361089721035, 360786799676, 7282634891791]}], "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Diana", "address": "valitdiana@gmail.com"}, "rel": null}}}, "emitted_at": 1709714976450} -{"stream": "ticket_comments", "data": {"id": 5162146653071, "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Team Airbyte", "address": "integration-test@airbyte.io"}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": " 163748", "html_body": "
     163748
    ", "plain_body": " 163748", "public": true, "attachments": [], "audit_id": 5162146652943, "created_at": "2022-07-18T09:58:23Z", "event_type": "Comment", "ticket_id": 124, "timestamp": 1658138303}, "emitted_at": 1697714859038} -{"stream": "ticket_comments", "data": {"id": 5162208963983, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "238473846", "html_body": "
    238473846
    ", "plain_body": "238473846", "public": false, "attachments": [], "audit_id": 5162208963855, "created_at": "2022-07-18T10:16:53Z", "event_type": "Comment", "ticket_id": 125, "timestamp": 1658139413}, "emitted_at": 1697714859039} -{"stream": "ticket_comments", "data": {"id": 5162223308559, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "Airbyte", "html_body": "", "plain_body": "Airbyte", "public": false, "attachments": [], "audit_id": 5162223308431, "created_at": "2022-07-18T10:25:21Z", "event_type": "Comment", "ticket_id": 125, "timestamp": 1658139921}, "emitted_at": 1697714859040} +{"stream": "ticket_comments", "data": {"id": 400789458076, "via": {"channel": "api", "source": {"from": {}, "to": {"name": "Team Airbyte", "address": "integration-test@airbyte.io"}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "Congratulations, you have set up Zendesk Talk and received your first call! Your new number is +1 (205) 953-1462. Click the Talk icon above to begin accepting calls.", "html_body": "

    Congratulations, you have set up Zendesk Talk and received your first call! Your new number is +1 (205) 953-1462. Click the Talk icon above to begin accepting calls.

    ", "plain_body": "Congratulations, you have set up Zendesk Talk and received your first call! Your new number is +1 (205) 953-1462. Click the Talk icon above to begin accepting calls.", "public": true, "attachments": [], "audit_id": 400789458056, "created_at": "2021-04-01T13:42:49Z", "event_type": "Comment", "ticket_id": 2, "timestamp": 1617284569}, "emitted_at": 1712912452160} +{"stream": "ticket_comments", "data": {"id": 409521612516, "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Team Airbyte", "address": "integration-test@airbyte.io"}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "dsdsdsdsdsds", "html_body": "
    dsdsdsdsdsds
    ", "plain_body": "dsdsdsdsdsds", "public": true, "attachments": [], "audit_id": 409521612496, "created_at": "2021-07-15T18:34:19Z", "event_type": "Comment", "ticket_id": 3, "timestamp": 1626374059}, "emitted_at": 1712912452161} +{"stream": "ticket_comments", "data": {"id": 409681806556, "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Team Airbyte", "address": "integration-test@airbyte.io"}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "e4t3ett4t4etet", "html_body": "
    e4t3ett4t4etet

    ", "plain_body": "e4t3ett4t4etet", "public": true, "attachments": [], "audit_id": 409681806536, "created_at": "2021-07-17T21:19:34Z", "event_type": "Comment", "ticket_id": 3, "timestamp": 1626556774}, "emitted_at": 1712912452161} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833076.json", "id": 360002833076, "type": "subject", "title": "Subject", "raw_title": "Subject", "description": "", "raw_description": "", "position": 1, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Subject", "raw_title_in_portal": "Subject", "visible_in_portal": true, "editable_in_portal": true, "required_in_portal": true, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null}, "emitted_at": 1697714860081} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833096.json", "id": 360002833096, "type": "description", "title": "Description", "raw_title": "Description", "description": "Please enter the details of your request. A member of our support staff will respond as soon as possible.", "raw_description": "Please enter the details of your request. A member of our support staff will respond as soon as possible.", "position": 2, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Description", "raw_title_in_portal": "Description", "visible_in_portal": true, "editable_in_portal": true, "required_in_portal": true, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null}, "emitted_at": 1697714860083} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833116.json", "id": 360002833116, "type": "status", "title": "Status", "raw_title": "Status", "description": "Request status", "raw_description": "Request status", "position": 3, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Status", "raw_title_in_portal": "Status", "visible_in_portal": false, "editable_in_portal": false, "required_in_portal": false, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null, "system_field_options": [{"name": "Open", "value": "open"}, {"name": "Pending", "value": "pending"}, {"name": "Solved", "value": "solved"}], "sub_type_id": 0}, "emitted_at": 1697714860085} {"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/8171838264079.json", "id": 8171838264079, "ticket_id": 155, "created_at": "2023-10-19T15:22:00Z", "updated_at": "2023-10-19T15:24:05Z", "group_stations": 1, "assignee_stations": 1, "reopens": 0, "replies": 0, "assignee_updated_at": null, "requester_updated_at": "2023-10-19T15:22:32Z", "status_updated_at": "2023-10-19T15:24:05Z", "initially_assigned_at": "2023-10-19T15:24:05Z", "assigned_at": "2023-10-19T15:24:05Z", "solved_at": null, "latest_comment_added_at": "2023-10-19T15:25:58Z", "reply_time_in_minutes": {"calendar": null, "business": null}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": 2, "business": 0}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-10-19T15:24:05Z"}, "emitted_at": 1709718678594} {"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7283000498191.json", "id": 7283000498191, "ticket_id": 153, "created_at": "2023-06-26T11:31:48Z", "updated_at": "2023-06-26T12:13:42Z", "group_stations": 2, "assignee_stations": 2, "reopens": 0, "replies": 0, "assignee_updated_at": "2023-06-26T11:31:48Z", "requester_updated_at": "2023-06-26T11:31:48Z", "status_updated_at": "2023-06-26T11:31:48Z", "initially_assigned_at": "2023-06-26T11:31:48Z", "assigned_at": "2023-06-26T12:13:42Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T11:31:48Z", "reply_time_in_minutes": {"calendar": null, "business": null}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:31:48Z"}, "emitted_at": 1709718678594} {"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7282901696015.json", "id": 7282901696015, "ticket_id": 151, "created_at": "2023-06-26T11:09:33Z", "updated_at": "2023-06-26T12:03:38Z", "group_stations": 1, "assignee_stations": 1, "reopens": 0, "replies": 1, "assignee_updated_at": "2023-06-26T12:03:37Z", "requester_updated_at": "2023-06-26T11:09:33Z", "status_updated_at": "2023-06-26T11:09:33Z", "initially_assigned_at": "2023-06-26T11:09:33Z", "assigned_at": "2023-06-26T11:09:33Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T12:03:37Z", "reply_time_in_minutes": {"calendar": 54, "business": 0}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:09:33Z"}, "emitted_at": 1709718678595} -{"stream": "ticket_metric_events", "data": {"id": 4992797383183, "ticket_id": 121, "metric": "agent_work_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863384} -{"stream": "ticket_metric_events", "data": {"id": 4992797383311, "ticket_id": 121, "metric": "pausable_update_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} -{"stream": "ticket_metric_events", "data": {"id": 4992797383439, "ticket_id": 121, "metric": "reply_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} +{"stream": "ticket_metric_events", "data": {"id": 383001965136, "ticket_id": 1, "metric": "agent_work_time", "instance_id": 0, "type": "measure", "time": "2020-12-11T18:34:09Z"}, "emitted_at": 1712913374388} +{"stream": "ticket_metric_events", "data": {"id": 383001965156, "ticket_id": 1, "metric": "agent_work_time", "instance_id": 1, "type": "activate", "time": "2020-12-11T18:34:09Z"}, "emitted_at": 1712913374389} +{"stream": "ticket_metric_events", "data": {"id": 383001965176, "ticket_id": 1, "metric": "pausable_update_time", "instance_id": 0, "type": "measure", "time": "2020-12-11T18:34:09Z"}, "emitted_at": 1712913374389} {"stream": "ticket_skips", "data": {"id": 7290033348623, "ticket_id": 121, "user_id": 360786799676, "reason": "I have no idea.", "created_at": "2023-06-27T08:24:02Z", "updated_at": "2023-06-27T08:24:02Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/121.json", "id": 121, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (689) 689-8023", "phone": "+16896898023", "name": "Caller +1 (689) 689-8023"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T14:49:20Z", "updated_at": "2022-06-17T16:01:42Z", "generated_timestamp": 1655481702, "type": null, "subject": "Voicemail from: Caller +1 (689) 689-8023", "raw_subject": "Voicemail from: Caller +1 (689) 689-8023", "description": "Call from: +1 (689) 689-8023\\nTime of call: June 17, 2022 at 2:48:27 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4992781783439, "submitter_id": 4992781783439, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1709038262604} {"stream": "ticket_skips", "data": {"id": 7290088475023, "ticket_id": 125, "user_id": 360786799676, "reason": "Another test skip.", "created_at": "2023-06-27T08:30:01Z", "updated_at": "2023-06-27T08:30:01Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/125.json", "id": 125, "external_id": null, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "created_at": "2022-07-18T10:16:53Z", "updated_at": "2022-07-18T10:36:02Z", "generated_timestamp": 1658140562, "type": "question", "subject": "Ticket Test 2", "raw_subject": "Ticket Test 2", "description": "238473846", "priority": "urgent", "status": "open", "recipient": null, "requester_id": 360786799676, "submitter_id": 360786799676, "assignee_id": 361089721035, "organization_id": 360033549136, "group_id": 5059439464079, "collaborator_ids": [360786799676], "follower_ids": [360786799676], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "unoffered"}, "sharing_agreement_ids": [], "custom_status_id": 4044376, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1709038262605} {"stream": "tickets", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/121.json", "id": 121, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (689) 689-8023", "phone": "+16896898023", "name": "Caller +1 (689) 689-8023"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T14:49:20Z", "updated_at": "2022-06-17T16:01:42Z", "type": null, "subject": "Voicemail from: Caller +1 (689) 689-8023", "raw_subject": "Voicemail from: Caller +1 (689) 689-8023", "description": "Call from: +1 (689) 689-8023\\nTime of call: June 17, 2022 at 2:48:27 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4992781783439, "submitter_id": 4992781783439, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false, "generated_timestamp": 1655481702}, "emitted_at": 1697714865818} diff --git a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml index 130c5f9ad8db3..b1fb36bb1853e 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml @@ -11,13 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 - dockerImageTag: 2.2.8 + dockerImageTag: 2.3.0 dockerRepository: airbyte/source-zendesk-support documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-support githubIssueLabel: source-zendesk-support icon: zendesk-support.svg license: ELv2 - maxSecondsBetweenMessages: 10800 + maxSecondsBetweenMessages: 60 name: Zendesk Support remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-zendesk-support/poetry.lock b/airbyte-integrations/connectors/source-zendesk-support/poetry.lock index 457d40ddb6cce..85800d870b6b5 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/poetry.lock +++ b/airbyte-integrations/connectors/source-zendesk-support/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.60.2" +version = "0.77.2" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.60.2.tar.gz", hash = "sha256:11cae56d77ae28dff228016373d8573d5fe7b9f65f7b984586283bb904f628ea"}, - {file = "airbyte_cdk-0.60.2-py3-none-any.whl", hash = "sha256:c34d601a50de2b8e0a4732bf5f7c08eeb9b41972df816e1fb6164eea250c8928"}, + {file = "airbyte_cdk-0.77.2-py3-none-any.whl", hash = "sha256:6dffbe0c4b3454a5cdd20525b4f1e9cfef2e80c005b6b30473fc5bf6f75af64e"}, + {file = "airbyte_cdk-0.77.2.tar.gz", hash = "sha256:84aeb27862a18e135c7bc3a5dfc363037665d428e7495e8824673f853adcca70"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -380,13 +379,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -481,13 +480,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -699,30 +698,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -833,13 +832,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -851,15 +850,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -882,19 +881,19 @@ test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.1 [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -920,13 +919,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -945,13 +944,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1056,4 +1055,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "92ec2b4ac91287d9aed5533a5929649943394a41c5dca9427fd7278c956490ae" +content-hash = "12c06489819e20daa0b2fffd6bac1d8f28bd1c5bf917ae2519f8482f47a8d2ae" diff --git a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml index e0944429cb233..6e923b829a578 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.2.8" +version = "2.3.0" name = "source-zendesk-support" description = "Source implementation for Zendesk Support." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_zendesk_support" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.60.2" +airbyte-cdk = "^0" pytz = "==2024.1" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py index 8444c48880201..7dfb610c389e3 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py @@ -24,7 +24,7 @@ @freezegun.freeze_time(_NOW.isoformat()) -class TestPostsCommentsVouteStreamFullRefresh(TestCase): +class TestPostsCommentsVotesStreamFullRefresh(TestCase): @property def _config(self): return ConfigBuilder() \ @@ -174,7 +174,8 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self output = read_stream("post_comment_votes", SyncMode.incremental, self._config) assert len(output.records) == 1 - assert output.most_recent_state == {"post_comment_votes": {"updated_at": post_comment_votes["updated_at"]}} + assert output.most_recent_state.stream_descriptor.name == "post_comment_votes" + assert output.most_recent_state.stream_state == {"updated_at": post_comment_votes["updated_at"]} @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -243,4 +244,5 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc output = read_stream("post_comment_votes", SyncMode.incremental, self._config, StateBuilder().with_stream_state("post_comment_votes", state).build()) assert len(output.records) == 2 - assert output.most_recent_state == {"post_comment_votes": {"updated_at": datetime_to_string(last_page_record_updated_at)}} + assert output.most_recent_state.stream_descriptor.name == "post_comment_votes" + assert output.most_recent_state.stream_state == {"updated_at": datetime_to_string(last_page_record_updated_at)} diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py index 710dcf3abc645..c29943f64ad60 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py @@ -158,7 +158,8 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self assert len(output.records) == 1 post_comment = post_comments_record_builder.build() - assert output.most_recent_state == {"post_comments": {"updated_at": post_comment["updated_at"]}} + assert output.most_recent_state.stream_descriptor.name == "post_comments" + assert output.most_recent_state.stream_state == {"updated_at": post_comment["updated_at"]} @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -215,4 +216,5 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc output = read_stream("post_comments", SyncMode.incremental, self._config, StateBuilder().with_stream_state("post_comments", state).build()) assert len(output.records) == 2 - assert output.most_recent_state == {"post_comments": {"updated_at": datetime_to_string(last_page_record_updated_at)}} + assert output.most_recent_state.stream_descriptor.name == "post_comments" + assert output.most_recent_state.stream_state == {"updated_at": datetime_to_string(last_page_record_updated_at)} diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py index a98444bad526e..f8adc48af73a4 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py @@ -158,7 +158,8 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self assert len(output.records) == 1 post_comment = post_comments_record_builder.build() - assert output.most_recent_state == {"post_votes": {"updated_at": post_comment["updated_at"]}} + assert output.most_recent_state.stream_descriptor.name == "post_votes" + assert output.most_recent_state.stream_state == {"updated_at": post_comment["updated_at"]} @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -215,4 +216,5 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc output = read_stream("post_votes", SyncMode.incremental, self._config, StateBuilder().with_stream_state("post_votes", state).build()) assert len(output.records) == 2 - assert output.most_recent_state == {"post_votes": {"updated_at": datetime_to_string(last_page_record_updated_at)}} + assert output.most_recent_state.stream_descriptor.name == "post_votes" + assert output.most_recent_state.stream_state == {"updated_at": datetime_to_string(last_page_record_updated_at)} diff --git a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml index 212e2a43ea26a..032e8976e979c 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml @@ -11,12 +11,13 @@ data: connectorSubtype: api connectorType: source definitionId: c8630570-086d-4a40-99ae-ea5b18673071 - dockerImageTag: 0.1.13 + dockerImageTag: 0.2.0 dockerRepository: airbyte/source-zendesk-talk documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-talk githubIssueLabel: source-zendesk-talk icon: zendesk-talk.svg license: MIT + maxSecondsBetweenMessages: 300 name: Zendesk Talk remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-zendesk-talk/poetry.lock b/airbyte-integrations/connectors/source-zendesk-talk/poetry.lock index 7941e3a9b3df5..712f8a1059021 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/poetry.lock +++ b/airbyte-integrations/connectors/source-zendesk-talk/poetry.lock @@ -1,40 +1,39 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.58.8" +version = "0.77.2" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte-cdk-0.58.8.tar.gz", hash = "sha256:80cfad673302802e0f5d485879f1bd2f3679a4e3b12b2af42bd7bb37a3991a71"}, - {file = "airbyte_cdk-0.58.8-py3-none-any.whl", hash = "sha256:5b0b19745e96ba3f20683c48530d58a00be48361dfa34ec3c38cef8da03ba330"}, + {file = "airbyte_cdk-0.77.2-py3-none-any.whl", hash = "sha256:6dffbe0c4b3454a5cdd20525b4f1e9cfef2e80c005b6b30473fc5bf6f75af64e"}, + {file = "airbyte_cdk-0.77.2.tar.gz", hash = "sha256:84aeb27862a18e135c7bc3a5dfc363037665d428e7495e8824673f853adcca70"}, ] [package.dependencies] airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" -Deprecated = ">=1.2,<2.0" +Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" +jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" -PyYAML = ">=6.0.1" +PyYAML = ">=6.0.1,<7.0.0" requests = "*" -requests-cache = "*" +requests_cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] @@ -104,13 +103,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -366,13 +365,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +version = "0.2" +description = "An implementation of JSON Reference for Python" optional = false -python-versions = ">=3.3,<4.0" +python-versions = "*" files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, ] [[package]] @@ -467,13 +466,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -685,30 +684,30 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +807,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,50 +825,48 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.0" description = "Mock out responses from the requests package" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.0.tar.gz", hash = "sha256:4e34f2a2752f0b78397fb414526605d95fcdeab021ac1f26d18960e7eb41f6a8"}, + {file = "requests_mock-1.12.0-py2.py3-none-any.whl", hash = "sha256:4f6fdf956de568e0bac99eee4ad96b391c602e614cc0ad33e7f5c72edd699e70"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +892,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +917,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1028,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "b9f1312ff855d2ea6c2f6c7a329923044ad6cd1b88c9c3de3b49736510b45be6" +content-hash = "dedee3fe65d06e7ceb8403980b7cb1fadb463183c7c25b2cda747e60bcd7be03" diff --git a/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml index cd56a4f28eaac..2807c8d1581c6 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.13" +version = "0.2.0" name = "source-zendesk-talk" description = "Source implementation for Zendesk Talk." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_zendesk_talk" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.8" +airbyte-cdk = "^0" [tool.poetry.scripts] source-zendesk-talk = "source_zendesk_talk.run:run" diff --git a/airbyte-integrations/connectors/source-zenloop/Dockerfile b/airbyte-integrations/connectors/source-zenloop/Dockerfile deleted file mode 100644 index d88a8850387ae..0000000000000 --- a/airbyte-integrations/connectors/source-zenloop/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_zenloop ./source_zenloop - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.10 -LABEL io.airbyte.name=airbyte/source-zenloop diff --git a/airbyte-integrations/connectors/source-zenloop/README.md b/airbyte-integrations/connectors/source-zenloop/README.md index a06504a693734..13a27f64b3df1 100644 --- a/airbyte-integrations/connectors/source-zenloop/README.md +++ b/airbyte-integrations/connectors/source-zenloop/README.md @@ -1,69 +1,55 @@ -# Zenloop Source +# Zenloop source connector + This is the repository for the Zenloop source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/zenloop). +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/zenloop). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/zenloop) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zenloop/spec.json` file. +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/zenloop) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zenloop/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source zenloop test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-zenloop spec +poetry run source-zenloop check --config secrets/config.json +poetry run source-zenloop discover --config secrets/config.json +poetry run source-zenloop read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-zenloop build ``` -An image will be built with the tag `airbyte/source-zenloop:dev`. +An image will be available on your host with the tag `airbyte/source-zenloop:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-zenloop:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-zenloop:dev spec @@ -72,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zenloop:dev discover - docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-zenloop:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-zenloop test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-zenloop test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/zenloop.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/zenloop.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-zenloop/metadata.yaml b/airbyte-integrations/connectors/source-zenloop/metadata.yaml index e586ad077d1e7..de9c9fe49dd4b 100644 --- a/airbyte-integrations/connectors/source-zenloop/metadata.yaml +++ b/airbyte-integrations/connectors/source-zenloop/metadata.yaml @@ -1,32 +1,34 @@ data: + ab_internal: + ql: 300 + sl: 100 allowedHosts: hosts: - api.zenloop.com + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: f1e4c7f6-db5c-4035-981f-d35ab4998794 - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 dockerRepository: airbyte/source-zenloop + documentationUrl: https://docs.airbyte.com/integrations/sources/zenloop githubIssueLabel: source-zenloop icon: zenloop.svg license: MIT name: Zenloop - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-zenloop registries: cloud: enabled: true oss: enabled: true releaseStage: beta - documentationUrl: https://docs.airbyte.com/integrations/sources/zenloop + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-zenloop + supportLevel: community tags: - language:python - cdk:low-code - ab_internal: - sl: 100 - ql: 300 - supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zenloop/poetry.lock b/airbyte-integrations/connectors/source-zenloop/poetry.lock new file mode 100644 index 0000000000000..fdd83631b6885 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/poetry.lock @@ -0,0 +1,1050 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.78.6" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.78.6-py3-none-any.whl", hash = "sha256:e5f44c6da6d5b5d6f3f6a7f41a3f4a5e2dfc6fefb4c6823af6302c34c6fb4a87"}, + {file = "airbyte_cdk-0.78.6.tar.gz", hash = "sha256:0178f3cefa705f600d51f09e1313024a89cd1c99f2f1f796e8e0181d8e02ad2f"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.15" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "responses" +version = "0.13.4" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "responses-0.13.4-py2.py3-none-any.whl", hash = "sha256:d8d0f655710c46fd3513b9202a7f0dcedd02ca0f8cf4976f27fa8ab5b81e656d"}, + {file = "responses-0.13.4.tar.gz", hash = "sha256:9476775d856d3c24ae660bbebe29fb6d789d4ad16acd723efbfb6ee20990b899"}, +] + +[package.dependencies] +requests = ">=2.0" +six = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "2b328f2521c0eca700026e672cc1acb57cf3214ae21b3bb5c27b4c2a076d29c7" diff --git a/airbyte-integrations/connectors/source-zenloop/pyproject.toml b/airbyte-integrations/connectors/source-zenloop/pyproject.toml new file mode 100644 index 0000000000000..263f52d42fa26 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.11" +name = "source-zenloop" +description = "Source implementation for Zenloop." +authors = [ "Alexander Batoulis ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/zenloop" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_zenloop" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-zenloop = "source_zenloop.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +responses = "^0.13.3" +pytest = "^6.1" +pytest-mock = "^3.6.1" diff --git a/airbyte-integrations/connectors/source-zenloop/setup.py b/airbyte-integrations/connectors/source-zenloop/setup.py deleted file mode 100644 index 8bc3ceffdfcb0..0000000000000 --- a/airbyte-integrations/connectors/source-zenloop/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk>=0.44.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", - "responses~=0.13.3", -] - -setup( - entry_points={ - "console_scripts": [ - "source-zenloop=source_zenloop.run:run", - ], - }, - name="source_zenloop", - description="Source implementation for Zenloop.", - author="Alexander Batoulis", - author_email="alexander.batoulis@hometogo.com", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/components.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/components.py index 909e0bfc7054e..2f85e4b1b8e11 100644 --- a/airbyte-integrations/connectors/source-zenloop/source_zenloop/components.py +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/components.py @@ -32,4 +32,4 @@ def stream_slices(self) -> Iterable[StreamSlice]: else: for parent_stream_config in self.parent_stream_configs: stream_state_field = parent_stream_config.partition_field.eval(self.config) - yield {stream_state_field: custom_stream_state_value, "parent_slice": {}} + yield StreamSlice(partition={stream_state_field: custom_stream_state_value, "parent_slice": {}}, cursor_slice={}) diff --git a/airbyte-integrations/connectors/source-zoom/Dockerfile b/airbyte-integrations/connectors/source-zoom/Dockerfile deleted file mode 100644 index 2fcce7c308da7..0000000000000 --- a/airbyte-integrations/connectors/source-zoom/Dockerfile +++ /dev/null @@ -1,40 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_zoom ./source_zoom - - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" - -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.0.0 -LABEL io.airbyte.name=airbyte/source-zoom diff --git a/airbyte-integrations/connectors/source-zoom/README.md b/airbyte-integrations/connectors/source-zoom/README.md index d2146cd51da74..3b188883c65e0 100644 --- a/airbyte-integrations/connectors/source-zoom/README.md +++ b/airbyte-integrations/connectors/source-zoom/README.md @@ -17,19 +17,70 @@ and place them into `secrets/config.json`. ### Locally running the connector docker image -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** + + +#### Use `airbyte-ci` to build your connector +The Airbyte way of building this connector is to use our `airbyte-ci` tool. +You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). +Then running the following command will build your connector: + ```bash -airbyte-ci connectors --name=source-zoom build +airbyte-ci connectors --name source-zoom build +``` +Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-zoom:dev`. + +##### Customizing our build process +When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. +You can customize our build process by adding a `build_customization.py` module to your connector. +This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. +It will be imported at runtime by our build process and the functions will be called if they exist. + +Here is an example of a `build_customization.py` module: +```python +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # Feel free to check the dagger documentation for more information on the Container object and its methods. + # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ + from dagger import Container + + +async def pre_connector_install(base_image_container: Container) -> Container: + return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") + +async def post_connector_install(connector_container: Container) -> Container: + return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") ``` -An image will be built with the tag `airbyte/source-zoom:dev`. +#### Build your own connector image +This connector is built using our dynamic built process in `airbyte-ci`. +The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. +The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). +It does not rely on a Dockerfile. + +If you would like to patch our connector and build your own a simple approach would be to: + +1. Create your own Dockerfile based on the latest version of the connector image. +```Dockerfile +FROM airbyte/source-zoom:latest + +COPY . ./airbyte/integration_code +RUN pip install ./airbyte/integration_code -**Via `docker build`:** +# The entrypoint and default env vars are already set in the base image +# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] +``` +Please use this as an example. This is not optimized. + +2. Build your image: ```bash docker build -t airbyte/source-zoom:dev . +# Running the spec command against your patched connector +docker run airbyte/source-zoom:dev spec ``` - #### Run Then run any of the connector commands as follows: ``` @@ -64,4 +115,3 @@ You've checked out the repo, implemented a million dollar feature, and you're re 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-zoom/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zoom/acceptance-test-config.yml index 0401b49598b8f..55fa9e4ac247f 100644 --- a/airbyte-integrations/connectors/source-zoom/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zoom/acceptance-test-config.yml @@ -1,44 +1,55 @@ # See [Connector Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-zoom:dev -tests: +acceptance_tests: spec: - - spec_path: "source_zoom/spec.yaml" + tests: + - spec_path: "source_zoom/spec.yaml" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 3600 - empty_streams: - - "meeting_registrants" - - "meeting_polls" - - "meeting_poll_results" - - "meeting_registration_questions" - - "webinars" - - "webinar_panelists" - - "webinar_registrants" - - "webinar_absentees" - - "webinar_polls" - - "webinar_poll_results" - - "webinar_registration_questions" - - "webinar_tracking_sources" - - "webinar_qna_results" - - "report_meetings" - - "report_meeting_participants" - - "report_webinars" - - "report_webinar_participants" + tests: + - config_path: "secrets/config.json" + expect_records: + path: "integration_tests/expected_records.jsonl" + configured_catalog_path: "integration_tests/configured_catalog.json" + timeout_seconds: 3600 + empty_streams: + - name: "webinars" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_panelists" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_registrants" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_absentees" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_polls" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_poll_results" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_registration_questions" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_tracking_sources" + bypass_reason: "Need paid Webinar license to populate data" + - name: "webinar_qna_results" + bypass_reason: "Need paid Webinar license to populate data" + - name: "report_webinars" + bypass_reason: "Need paid Webinar license to populate data" + - name: "report_webinar_participants" + bypass_reason: "Need paid Webinar license to populate data" full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - ignored_fields: - "meetings": - - "start_url" - "webinars": - - "start_url" - timeout_seconds: 3600 + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + ignored_fields: + meetings: + - name: "start_url" + bypass_reason: "Causes sequential_read test to fail as the value is unique upon each read" + timeout_seconds: 3600 diff --git a/airbyte-integrations/connectors/source-zoom/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zoom/integration_tests/expected_records.jsonl new file mode 100644 index 0000000000000..981349b07114a --- /dev/null +++ b/airbyte-integrations/connectors/source-zoom/integration_tests/expected_records.jsonl @@ -0,0 +1,15 @@ +{"stream": "users", "data": {"id": "mKV7t-gGSWWQOqej1qxafA", "first_name": "Integration", "last_name": "Testing", "display_name": "Integration Testing", "email": "integration.testing.alternate@gmail.com", "type": 2, "pmi": 3936831403, "timezone": "America/Los_Angeles", "verified": 1, "created_at": "2024-02-12T11:06:04Z", "last_login_time": "2024-02-20T11:36:44Z", "language": "en-US", "phone_number": "", "status": "active", "role_id": "0", "user_created_at": "2024-02-12T11:06:04Z"}, "emitted_at": 1709070636347} +{"stream": "meetings", "data": {"uuid": "i6KKhn7/QNGLiZVJKyW1gQ==", "id": 97151043254, "host_id": "mKV7t-gGSWWQOqej1qxafA", "host_email": "integration.testing.alternate@gmail.com", "assistant_id": "", "topic": "Test meeting with registration", "type": 2, "status": "waiting", "start_time": "2024-02-20T14:00:00Z", "duration": 30, "timezone": "America/Los_Angeles", "agenda": "", "created_at": "2024-02-20T11:41:48Z", "start_url": "https://zoom.us/s/97151043254?zak=eyJ0eXAiOiJKV1QiLCJzdiI6IjAwMDAwMSIsInptX3NrbSI6InptX28ybSIsImFsZyI6IkhTMjU2In0.eyJhdWQiOiJjbGllbnRzbSIsInVpZCI6Im1LVjd0LWdHU1dXUU9xZWoxcXhhZkEiLCJpc3MiOiJ3ZWIiLCJzayI6IjIxMTY4OTM3NDA5MTE4NDE3ODkiLCJzdHkiOjEwMCwid2NkIjoiYXcxIiwiY2x0IjowLCJtbnVtIjoiOTcxNTEwNDMyNTQiLCJleHAiOjE3MDkwNzc4MzcsImlhdCI6MTcwOTA3MDYzNywiYWlkIjoiN2VBTjhCUXRTSFMzSE05LVllWkd5ZyIsImNpZCI6IiJ9.tphsykiiZHoDrgTi3Q0As7ZF2T3C6TQGi2jmf4UOtVc", "join_url": "https://zoom.us/j/97151043254?pwd=N3ZqeThrRndJTHZQYTlDdFJ2Tm5iQT09", "registration_url": "https://zoom.us/meeting/register/tJMscuioqjgoH9BkXmJtHnAWICvzNFMQkdI2", "settings": {"host_video": false, "participant_video": false, "cn_meeting": false, "in_meeting": false, "join_before_host": true, "jbh_time": 0, "mute_upon_entry": true, "watermark": false, "use_pmi": false, "approval_type": 0, "audio": "both", "auto_recording": "none", "enforce_login": false, "enforce_login_domains": "", "alternative_hosts": "", "alternative_host_update_polls": false, "close_registration": false, "show_share_button": true, "allow_multiple_devices": true, "registrants_confirmation_email": true, "waiting_room": false, "request_permission_to_unmute_participants": false, "global_dial_in_countries": ["US"], "global_dial_in_numbers": [{"country_name": "US", "number": "+1 646 931 3860", "type": "toll", "country": "US"}, {"country_name": "US", "city": "New York", "number": "+1 929 436 2866", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Washington DC", "number": "+1 301 715 8592", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 305 224 1968", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 309 205 3325", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Chicago", "number": "+1 312 626 6799", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 719 359 4580", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 253 205 0468", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Tacoma", "number": "+1 253 215 8782", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Houston", "number": "+1 346 248 7799", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 360 209 5623", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 386 347 5053", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 507 473 4847", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 564 217 2000", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 669 444 9171", "type": "toll", "country": "US"}, {"country_name": "US", "city": "San Jose", "number": "+1 669 900 6833", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 689 278 1000", "type": "toll", "country": "US"}], "registrants_email_notification": true, "meeting_authentication": false, "encryption_type": "enhanced_encryption", "approved_or_denied_countries_or_regions": {"enable": false}, "breakout_room": {"enable": false}, "internal_meeting": false, "continuous_meeting_chat": {"enable": false, "auto_add_invited_external_users": false}, "participant_focused_meeting": false, "push_change_to_calendar": false, "resources": [], "alternative_hosts_email_notification": true, "show_join_info": true, "device_testing": false, "focus_mode": false, "enable_dedicated_group_chat": false, "private_meeting": false, "email_notification": true, "host_save_video_order": false, "sign_language_interpretation": {"enable": false}, "email_in_attendee_report": false}, "pre_schedule": false}, "emitted_at": 1709070638055} +{"stream": "meetings", "data": {"uuid": "i/ddUMbhSOOY2dZJ0aLfzw==", "id": 98217683973, "host_id": "mKV7t-gGSWWQOqej1qxafA", "host_email": "integration.testing.alternate@gmail.com", "assistant_id": "", "topic": "Polls test meeting", "type": 2, "status": "waiting", "start_time": "2024-02-20T15:30:00Z", "duration": 30, "timezone": "America/Los_Angeles", "agenda": "", "created_at": "2024-02-20T15:16:09Z", "start_url": "https://zoom.us/s/98217683973?zak=eyJ0eXAiOiJKV1QiLCJzdiI6IjAwMDAwMSIsInptX3NrbSI6InptX28ybSIsImFsZyI6IkhTMjU2In0.eyJhdWQiOiJjbGllbnRzbSIsInVpZCI6Im1LVjd0LWdHU1dXUU9xZWoxcXhhZkEiLCJpc3MiOiJ3ZWIiLCJzayI6IjIxMTY4OTM3NDA5MTE4NDE3ODkiLCJzdHkiOjEwMCwid2NkIjoiYXcxIiwiY2x0IjowLCJtbnVtIjoiOTgyMTc2ODM5NzMiLCJleHAiOjE3MDkwNzc4MzcsImlhdCI6MTcwOTA3MDYzNywiYWlkIjoiN2VBTjhCUXRTSFMzSE05LVllWkd5ZyIsImNpZCI6IiJ9.zmZE9kPjAG7GFgJgPLzZY4UVIUSPl7Xcd3vGJuNHKNU", "join_url": "https://zoom.us/j/98217683973?pwd=RVBBTjNvOGljS0EwakRoRktlNmJIdz09", "settings": {"host_video": false, "participant_video": false, "cn_meeting": false, "in_meeting": false, "join_before_host": true, "jbh_time": 0, "mute_upon_entry": true, "watermark": false, "use_pmi": false, "approval_type": 2, "audio": "both", "auto_recording": "none", "enforce_login": false, "enforce_login_domains": "", "alternative_hosts": "", "alternative_host_update_polls": false, "close_registration": false, "show_share_button": false, "allow_multiple_devices": false, "registrants_confirmation_email": true, "waiting_room": false, "request_permission_to_unmute_participants": false, "global_dial_in_countries": ["US"], "global_dial_in_numbers": [{"country_name": "US", "city": "Chicago", "number": "+1 312 626 6799", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 646 931 3860", "type": "toll", "country": "US"}, {"country_name": "US", "city": "New York", "number": "+1 929 436 2866", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Washington DC", "number": "+1 301 715 8592", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 305 224 1968", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 309 205 3325", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 689 278 1000", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 719 359 4580", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 253 205 0468", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Tacoma", "number": "+1 253 215 8782", "type": "toll", "country": "US"}, {"country_name": "US", "city": "Houston", "number": "+1 346 248 7799", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 360 209 5623", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 386 347 5053", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 507 473 4847", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 564 217 2000", "type": "toll", "country": "US"}, {"country_name": "US", "number": "+1 669 444 9171", "type": "toll", "country": "US"}, {"country_name": "US", "city": "San Jose", "number": "+1 669 900 6833", "type": "toll", "country": "US"}], "registrants_email_notification": true, "meeting_authentication": false, "encryption_type": "enhanced_encryption", "approved_or_denied_countries_or_regions": {"enable": false}, "breakout_room": {"enable": false}, "internal_meeting": false, "continuous_meeting_chat": {"enable": true, "auto_add_invited_external_users": false, "channel_id": "efa253a7b352473da9ebb0894aef7403"}, "participant_focused_meeting": false, "push_change_to_calendar": false, "resources": [], "alternative_hosts_email_notification": true, "show_join_info": false, "device_testing": false, "focus_mode": false, "enable_dedicated_group_chat": true, "private_meeting": false, "email_notification": true, "host_save_video_order": false, "sign_language_interpretation": {"enable": false}, "email_in_attendee_report": false}, "pre_schedule": false}, "emitted_at": 1709070638173} +{"stream": "meeting_registrants", "data": {"id": "1D_xTxevRL-tNMQopOwGig", "first_name": "Tester", "last_name": "Airbyte", "email": "gl_danylo.jablonski@airbyte.io", "address": "", "city": "", "country": "", "zip": "", "state": "", "phone": "", "industry": "", "org": "", "job_title": "", "purchasing_time_frame": "", "role_in_purchase_process": "", "no_of_employees": "", "comments": "", "custom_questions": [], "status": "approved", "create_time": "2024-02-20T11:45:21Z", "join_url": "https://zoom.us/w/97151043254?tk=eyVyv4HGMH_jh6fNTeF64UzzH9CTtuI5ONpPzzu2D5c.DQYAAAAWnqdGthYxRF94VHhldlJMLXROTVFvcE93R2lnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA&pwd=N3ZqeThrRndJTHZQYTlDdFJ2Tm5iQT09", "meeting_id": 97151043254}, "emitted_at": 1709070639856} +{"stream": "meeting_registrants", "data": {"id": "QEaQ7622RCaaCYY2wRLD_A", "first_name": "Elvis", "last_name": "Presley", "email": "gl_danylo.jablonski@airbyte.io", "address": "", "city": "", "country": "", "zip": "", "state": "", "phone": "", "industry": "", "org": "", "job_title": "", "purchasing_time_frame": "", "role_in_purchase_process": "", "no_of_employees": "", "comments": "", "custom_questions": [], "status": "approved", "create_time": "2024-02-21T16:26:55Z", "join_url": "https://zoom.us/w/97908376030?tk=tuP9JTX6C8oTyQYsr1_6OFX-9xtLEmWQDDn_oKzHPms.DQYAAAAWy8tB3hZRRWFRNzYyMlJDYWFDWVkyd1JMRF9BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA&pwd=WUw2UUZZRVd3RzJTdW1rdnp4bjMwdz09", "meeting_id": 97908376030}, "emitted_at": 1709070640026} +{"stream": "meeting_polls", "data": {"id": "MKKxWL5vShqKp183Fr4R3A", "title": "Test Poll 3", "anonymous": false, "status": "notstart", "questions": [{"name": "Is it your birthday today?", "type": "single", "answers": ["Don't remember", "Won't say", "Yep"], "show_as_dropdown": false, "answer_required": false}, {"name": "When is my birthday?", "type": "single", "answers": ["Don't know", "Today", "August 24th"], "show_as_dropdown": false, "answer_required": false}], "poll_type": 2, "meeting_id": 92688776306}, "emitted_at": 1709070641588} +{"stream": "meeting_polls", "data": {"id": "s6LJXfLbQDetjNUxy9tY9g", "title": "Test Poll 2", "anonymous": false, "status": "notstart", "questions": [{"name": "Why sky is blue?", "type": "single", "answers": ["It's not", "Pollution", "I don't know"], "show_as_dropdown": false, "answer_required": false}, {"name": "Is the Earth round?", "type": "single", "answers": ["No, it's flat", "Yep, maybe", "I didn't check"], "show_as_dropdown": false, "answer_required": false}, {"name": "Do you like dogs?", "type": "single", "answers": ["yes", "Yes", "YES"], "show_as_dropdown": false, "answer_required": false}], "poll_type": 2, "meeting_id": 92688776306}, "emitted_at": 1709070641590} +{"stream": "meeting_poll_results", "data": {"name": "Test User 1", "question_details": [{"question": "Test question #1", "answer": "Answer #1", "polling_id": "QMhhtmgdRoGbzZJNoMnVAw", "date_time": "2024-02-20 15:31:33"}, {"question": "Test question #2", "answer": "Answer #2", "polling_id": "QMhhtmgdRoGbzZJNoMnVAw", "date_time": "2024-02-20 15:31:33"}], "first_name": "Test User 1", "meeting_id": 98217683973}, "emitted_at": 1709132575593} +{"stream": "meeting_poll_results", "data": {"name": "Elvis Presley", "email": "gl_danylo.jablonski@airbyte.io", "question_details": [{"question": "Why sky is blue?", "answer": "I don't know", "polling_id": "s6LJXfLbQDetjNUxy9tY9g", "date_time": "2024-02-21 16:35:41"}, {"question": "Is the Earth round?", "answer": "Yep, maybe", "polling_id": "s6LJXfLbQDetjNUxy9tY9g", "date_time": "2024-02-21 16:35:41"}, {"question": "Do you like dogs?", "answer": "yes", "polling_id": "s6LJXfLbQDetjNUxy9tY9g", "date_time": "2024-02-21 16:35:41"}], "first_name": "Elvis", "last_name": "Presley", "meeting_id": 97908376030}, "emitted_at": 1709132575896} +{"stream": "meeting_registration_questions", "data": {"questions": [{"field_name": "last_name", "required": true}], "custom_questions": [], "meeting_id": 97151043254}, "emitted_at": 1709070645669} +{"stream": "meeting_registration_questions", "data": {"questions": [{"field_name": "last_name", "required": true}], "custom_questions": [], "meeting_id": 96091267389}, "emitted_at": 1709070646051} +{"stream": "report_meetings", "data": {"uuid": "cuL1o37VTaiN3IhHInJ0GA==", "id": 92688776306, "host_id": "mKV7t-gGSWWQOqej1qxafA", "type": 8, "topic": "Weekly test meeting", "user_name": "Integration Testing", "user_email": "integration-testing1@proton.me", "start_time": "2024-02-13T15:00:42Z", "end_time": "2024-02-13T15:04:32Z", "duration": 4, "total_minutes": 5, "participants_count": 2, "tracking_fields": [], "dept": ""}, "emitted_at": 1709070655675} +{"stream": "report_meetings", "data": {"uuid": "OLsodO6eTfqjWLqBKPwRSw==", "id": 92281295306, "host_id": "mKV7t-gGSWWQOqej1qxafA", "type": 2, "topic": "Test meeting 1", "user_name": "Integration Testing", "user_email": "integration.testing.alternate@gmail.com", "start_time": "2024-02-12T11:24:50Z", "end_time": "2024-02-12T11:24:56Z", "duration": 1, "total_minutes": 1, "participants_count": 1, "tracking_fields": [], "dept": ""}, "emitted_at": 1709070655961} +{"stream": "report_meeting_participants", "data": {"id": "mKV7t-gGSWWQOqej1qxafA", "user_id": "16778240", "name": "Integration Testing", "user_email": "integration-testing1@proton.me", "join_time": "2024-02-13T15:00:42Z", "leave_time": "2024-02-13T15:04:32Z", "duration": 230, "attentiveness_score": "", "failover": false, "status": "in_meeting", "customer_key": "", "participant_user_id": "mKV7t-gGSWWQOqej1qxafA", "meeting_id": 92688776306}, "emitted_at": 1709070658333} +{"stream": "report_meeting_participants", "data": {"id": "", "user_id": "16785408", "name": "User 2", "user_email": "", "join_time": "2024-02-13T15:03:23Z", "leave_time": "2024-02-13T15:04:31Z", "duration": 68, "attentiveness_score": "", "failover": false, "status": "in_meeting", "customer_key": "", "meeting_id": 92688776306}, "emitted_at": 1709070658335} diff --git a/airbyte-integrations/connectors/source-zoom/metadata.yaml b/airbyte-integrations/connectors/source-zoom/metadata.yaml index e8d75508659c5..5cf26dbec3799 100644 --- a/airbyte-integrations/connectors/source-zoom/metadata.yaml +++ b/airbyte-integrations/connectors/source-zoom/metadata.yaml @@ -1,30 +1,42 @@ data: + ab_internal: + ql: 200 + sl: 100 + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: cbfd9856-1322-44fb-bcf1-0b39b7a8e92e - dockerImageTag: 1.0.0 + dockerImageTag: 1.1.0 dockerRepository: airbyte/source-zoom + documentationUrl: https://docs.airbyte.com/integrations/sources/zoom githubIssueLabel: source-zoom icon: zoom.svg license: MIT name: Zoom - remoteRegistries: - pypi: - # TODO: Enable once build problems are fixed - enabled: false - packageName: airbyte-source-zoom registries: cloud: enabled: true oss: enabled: true releaseStage: alpha - documentationUrl: https://docs.airbyte.com/integrations/sources/zoom + releases: + breakingChanges: + 1.1.0: + message: + Zoom has deprecated JWT authentication in favor of OAuth. To successfully + migrate, users will need to create a new server-to-server OAuth app and + update their credentials in the Airbyte UI. + upgradeDeadline: 2023-09-08 + scopedImpact: + - scopeType: stream + impactedScopes: ["meeting_registration_questions"] + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-zoom + supportLevel: community tags: - language:python - cdk:low-code - ab_internal: - sl: 100 - ql: 200 - supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zoom/poetry.lock b/airbyte-integrations/connectors/source-zoom/poetry.lock new file mode 100644 index 0000000000000..9ddb0bea8076f --- /dev/null +++ b/airbyte-integrations/connectors/source-zoom/poetry.lock @@ -0,0 +1,1034 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.67.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.67.1.tar.gz", hash = "sha256:3f82be93ae6f574c70d7ad5352d34f9235e86bd74c0db14a0aa7d246f3a403c2"}, + {file = "airbyte_cdk-0.67.1-py3-none-any.whl", hash = "sha256:b1de0f004441a2ae6e2928e55f7ac31bd160af30e928ffda90eb75b5e3c56bf3"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "64656b30b207f0f046c259a3688f501d0e4de944131c28d2ec60e06ff7efd57e" diff --git a/airbyte-integrations/connectors/source-zoom/pyproject.toml b/airbyte-integrations/connectors/source-zoom/pyproject.toml new file mode 100644 index 0000000000000..454944d92b9c8 --- /dev/null +++ b/airbyte-integrations/connectors/source-zoom/pyproject.toml @@ -0,0 +1,28 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.1.0" +name = "source-zoom" +description = "Source implementation for Zoom." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/zoom" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_zoom" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.67.1" + +[tool.poetry.scripts] +source-zoom = "source_zoom.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-zoom/requirements.txt b/airbyte-integrations/connectors/source-zoom/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/source-zoom/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-zoom/setup.py b/airbyte-integrations/connectors/source-zoom/setup.py deleted file mode 100644 index 6d4f526d5e352..0000000000000 --- a/airbyte-integrations/connectors/source-zoom/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.11", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-zoom=source_zoom.run:run", - ], - }, - name="source_zoom", - description="Source implementation for Zoom.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/components.py b/airbyte-integrations/connectors/source-zoom/source_zoom/components.py index e2f9a8af12f89..00214c737833e 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/components.py +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/components.py @@ -6,7 +6,7 @@ import time from dataclasses import dataclass from http import HTTPStatus -from typing import Any, Mapping, Union +from typing import Any, Mapping, Optional, Union import requests from airbyte_cdk.sources.declarative.auth.declarative_authenticator import NoAuth @@ -63,12 +63,12 @@ def __call__(self, request: requests.PreparedRequest) -> requests.PreparedReques return request @property - def auth_header(self) -> dict[str, str]: - return {"Authorization": f"Bearer {self.token}", "Content-type": "application/json"} + def auth_header(self) -> str: + return "Authorization" @property - def token(self) -> str: - return self._access_token + def token(self) -> Optional[str]: + return self._access_token if self._access_token else None def generate_access_token(self) -> str: self._generate_token_time = time.time() diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/manifest.yaml b/airbyte-integrations/connectors/source-zoom/source_zoom/manifest.yaml index 21ec25e9ff0bf..808f4405e78fd 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/manifest.yaml +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/manifest.yaml @@ -1,4 +1,4 @@ -version: "0.29.0" +version: 0.67.1 definitions: # Server to Server Oauth Authenticator @@ -37,6 +37,7 @@ definitions: file_path: "./source_zoom/schemas/{{ parameters['name'] }}.json" users_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/user/methods/#operation/users schema_loader: $ref: "#/definitions/schema_loader" retriever: @@ -53,6 +54,8 @@ definitions: path: "/users" meetings_list_tmp_stream: + # This stream is used to fetch parent_ids for the meetings stream and all its substreams. No data is synced from this stream. + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/meetings schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -68,7 +71,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/users/{{ stream_slice.parent_id }}/meetings" + path: "/users/{{ stream_partition.parent_id }}/meetings" partition_router: type: SubstreamPartitionRouter parent_stream_configs: @@ -77,6 +80,7 @@ definitions: partition_field: "parent_id" meetings_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/meeting schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -92,7 +96,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/meetings/{{ stream_slice.parent_id }}" + path: "/meetings/{{ stream_partition.parent_id }}" partition_router: type: SubstreamPartitionRouter parent_stream_configs: @@ -101,6 +105,7 @@ definitions: partition_field: "parent_id" meeting_registrants_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/meetingRegistrants schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -116,7 +121,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/meetings/{{ stream_slice.parent_id }}/registrants" + path: "/meetings/{{ stream_partition.parent_id }}/registrants" error_handler: type: CompositeErrorHandler error_handlers: @@ -136,9 +141,10 @@ definitions: - type: AddFields fields: - path: ["meeting_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" meeting_polls_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/meetingPolls schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -154,7 +160,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/meetings/{{ stream_slice.parent_id }}/polls" + path: "/meetings/{{ stream_partition.parent_id }}/polls" error_handler: type: CompositeErrorHandler # ignore 400 error; We get this error if Meeting poll is not enabled for the meeting, or scheduling capabilities aren't in the account @@ -174,9 +180,10 @@ definitions: - type: AddFields fields: - path: ["meeting_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" meeting_poll_results_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/listPastMeetingPolls schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -191,7 +198,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/past_meetings/{{ stream_slice.parent_id }}/polls" + path: "/past_meetings/{{ stream_partition.parent_id }}/polls" error_handler: type: CompositeErrorHandler error_handlers: @@ -206,15 +213,16 @@ definitions: type: SubstreamPartitionRouter parent_stream_configs: - stream: "#/definitions/meetings_list_tmp_stream" - parent_key: "uuid" + parent_key: "id" partition_field: "parent_id" transformations: - type: AddFields fields: - - path: ["meeting_uuid"] - value: "{{ stream_slice.parent_id }}" + - path: ["meeting_id"] + value: "{{ stream_partition.parent_id }}" meeting_registration_questions_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/meetingRegistrantsQuestionsGet schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -229,7 +237,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/meetings/{{ stream_slice.parent_id }}/registrants/questions" + path: "/meetings/{{ stream_partition.parent_id }}/registrants/questions" error_handler: type: CompositeErrorHandler error_handlers: @@ -249,7 +257,7 @@ definitions: - type: AddFields fields: - path: ["meeting_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinars_list_tmp_stream: schema_loader: @@ -267,7 +275,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/users/{{ stream_slice.parent_id }}/webinars" + path: "/users/{{ stream_partition.parent_id }}/webinars" error_handler: type: CompositeErrorHandler # ignore 400 error; We get this error if Meeting is more than created an year ago @@ -300,7 +308,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}" + path: "/webinars/{{ stream_partition.parent_id }}" error_handler: type: CompositeErrorHandler # ignore 400 error @@ -334,7 +342,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}/panelists" + path: "/webinars/{{ stream_partition.parent_id }}/panelists" error_handler: type: CompositeErrorHandler # ignore 400 error @@ -355,7 +363,7 @@ definitions: - type: AddFields fields: - path: ["webinar_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_registrants_stream: schema_loader: @@ -372,7 +380,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}/registrants" + path: "/webinars/{{ stream_partition.parent_id }}/registrants" error_handler: type: CompositeErrorHandler # ignore 400 error @@ -393,7 +401,7 @@ definitions: - type: AddFields fields: - path: ["webinar_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_absentees_stream: schema_loader: @@ -411,7 +419,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/past_webinars/{{ stream_slice.parent_uuid }}/absentees" + path: "/past_webinars/{{ stream_partition.parent_uuid }}/absentees" error_handler: type: CompositeErrorHandler # ignore 400 error @@ -432,7 +440,7 @@ definitions: - type: AddFields fields: - path: ["webinar_uuid"] - value: "{{ stream_slice.parent_uuid }}" + value: "{{ stream_partition.parent_uuid }}" webinar_polls_stream: schema_loader: @@ -449,7 +457,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}/polls" + path: "/webinars/{{ stream_partition.parent_id }}/polls" error_handler: type: CompositeErrorHandler # ignore 400 error; We get this error if Webinar poll is disabled @@ -470,7 +478,7 @@ definitions: - type: AddFields fields: - path: ["webinar_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_poll_results_stream: schema_loader: @@ -487,7 +495,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/past_webinars/{{ stream_slice.parent_id }}/polls" + path: "/past_webinars/{{ stream_partition.parent_id }}/polls" error_handler: type: CompositeErrorHandler error_handlers: @@ -506,7 +514,7 @@ definitions: - type: AddFields fields: - path: ["webinar_uuid"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_registration_questions_stream: schema_loader: @@ -523,7 +531,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}/registrants/questions" + path: "/webinars/{{ stream_partition.parent_id }}/registrants/questions" error_handler: type: CompositeErrorHandler error_handlers: @@ -543,7 +551,7 @@ definitions: - type: AddFields fields: - path: ["webinar_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_tracking_sources_stream: schema_loader: @@ -561,7 +569,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/webinars/{{ stream_slice.parent_id }}/tracking_sources" + path: "/webinars/{{ stream_partition.parent_id }}/tracking_sources" error_handler: type: CompositeErrorHandler error_handlers: @@ -580,7 +588,7 @@ definitions: - type: AddFields fields: - path: ["webinar_id"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" webinar_qna_results_stream: schema_loader: @@ -597,7 +605,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/past_webinars/{{ stream_slice.parent_id }}/qa" + path: "/past_webinars/{{ stream_partition.parent_id }}/qa" error_handler: type: CompositeErrorHandler error_handlers: @@ -616,9 +624,10 @@ definitions: - type: AddFields fields: - path: ["webinar_uuid"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" report_meetings_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/reportMeetingDetails schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -630,32 +639,28 @@ definitions: record_selector: extractor: type: DpathExtractor - field_path: ["tracking_sources"] + field_path: [] $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/report/meetings/{{ stream_slice.parent_id }}" + path: "/report/meetings/{{ stream_partition.parent_id }}" error_handler: type: CompositeErrorHandler error_handlers: - type: DefaultErrorHandler response_filters: - - http_codes: [400] + - http_codes: [400, 404] action: IGNORE - type: DefaultErrorHandler partition_router: type: SubstreamPartitionRouter parent_stream_configs: - stream: "#/definitions/meetings_list_tmp_stream" - parent_key: "uuid" + parent_key: "id" partition_field: "parent_id" - transformations: - - type: AddFields - fields: - - path: ["meeting_uuid"] - value: "{{ stream_slice.parent_id }}" report_meeting_participants_stream: + # Endpoint docs: https://developers.zoom.us/docs/api/rest/reference/zoom-api/methods/#operation/reportMeetingParticipants schema_loader: $ref: "#/definitions/schema_loader" $parameters: @@ -671,26 +676,26 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/report/meetings/{{ stream_slice.parent_id }}/participants" + path: "/report/meetings/{{ stream_partition.parent_id }}/participants" error_handler: type: CompositeErrorHandler error_handlers: - type: DefaultErrorHandler response_filters: - - http_codes: [400] + - http_codes: [400, 404] action: IGNORE - type: DefaultErrorHandler partition_router: type: SubstreamPartitionRouter parent_stream_configs: - stream: "#/definitions/meetings_list_tmp_stream" - parent_key: "uuid" + parent_key: "id" partition_field: "parent_id" transformations: - type: AddFields fields: - - path: ["meeting_uuid"] - value: "{{ stream_slice.parent_id }}" + - path: ["meeting_id"] + value: "{{ stream_partition.parent_id }}" report_webinars_stream: schema_loader: @@ -707,7 +712,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/report/webinars/{{ stream_slice.parent_id }}" + path: "/report/webinars/{{ stream_partition.parent_id }}" error_handler: type: CompositeErrorHandler error_handlers: @@ -726,7 +731,7 @@ definitions: - type: AddFields fields: - path: ["webinar_uuid"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" report_webinar_participants_stream: schema_loader: @@ -743,7 +748,7 @@ definitions: $ref: "#/definitions/retriever" requester: $ref: "#/definitions/requester" - path: "/report/webinars/{{ stream_slice.parent_id }}/participants" + path: "/report/webinars/{{ stream_partition.parent_id }}/participants" error_handler: type: CompositeErrorHandler error_handlers: @@ -762,7 +767,7 @@ definitions: - type: AddFields fields: - path: ["webinar_uuid"] - value: "{{ stream_slice.parent_id }}" + value: "{{ stream_partition.parent_id }}" streams: - "#/definitions/users_stream" diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_poll_results.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_poll_results.json index 0f4b978752234..3c9ea605ecd3e 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_poll_results.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_poll_results.json @@ -3,35 +3,42 @@ "type": "object", "properties": { "meeting_uuid": { - "type": "string" + "type": ["null", "string"] + }, + "meeting_id": { + "type": ["null", "integer"] }, "email": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] }, "question_details": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answer": { - "type": "string" + "type": ["null", "string"] }, "date_time": { - "type": "string" + "type": ["null", "string"] }, "polling_id": { - "type": "string" + "type": ["null", "string"] }, "question": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_polls.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_polls.json index 8b80e1d1c7740..d9cbaccf720db 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_polls.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_polls.json @@ -3,94 +3,92 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": ["null", "string"] }, "meeting_id": { - "type": "number" + "type": ["null", "number"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "anonymous": { - "type": "boolean" + "type": ["null", "boolean"] }, "poll_type": { - "type": "number" + "type": ["null", "number"] }, "questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answer_max_character": { - "type": "number" + "type": ["null", "number"] }, "answer_min_character": { - "type": "number" + "type": ["null", "number"] }, "answer_required": { - "type": "boolean" + "type": ["null", "boolean"] }, "answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "case_sensitive": { - "type": "boolean" + "type": ["null", "boolean"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "prompts": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "prompt_question": { - "type": "string" + "type": ["null", "string"] }, "prompt_right_answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } } - }, - "required": [] + } } }, "rating_max_label": { - "type": "string" + "type": ["null", "string"] }, "rating_max_value": { - "type": "number" + "type": ["null", "number"] }, "rating_min_label": { - "type": "string" + "type": ["null", "string"] }, "rating_min_value": { - "type": "number" + "type": ["null", "number"] }, "right_answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "show_as_dropdown": { - "type": "boolean" + "type": ["null", "boolean"] }, "type": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "title": { - "type": "string" + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registrants.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registrants.json index b7870278d8109..e881cb099190f 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registrants.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registrants.json @@ -1,84 +1,83 @@ { "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", "properties": { "meeting_id": { - "type": "number" + "type": ["null", "number"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "address": { - "type": "string" + "type": ["null", "string"] }, "city": { - "type": "string" + "type": ["null", "string"] }, "comments": { - "type": "string" + "type": ["null", "string"] }, "country": { - "type": "string" + "type": ["null", "string"] }, "custom_questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "title": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "email": { - "type": "string" + "type": ["null", "string"] }, "first_name": { - "type": "string" + "type": ["null", "string"] }, "industry": { - "type": "string" + "type": ["null", "string"] }, "job_title": { - "type": "string" + "type": ["null", "string"] }, "last_name": { - "type": "string" + "type": ["null", "string"] }, "no_of_employees": { - "type": "string" + "type": ["null", "string"] }, "org": { - "type": "string" + "type": ["null", "string"] }, "phone": { - "type": "string" + "type": ["null", "string"] }, "purchasing_time_frame": { - "type": "string" + "type": ["null", "string"] }, "role_in_purchase_process": { - "type": "string" + "type": ["null", "string"] }, "state": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "zip": { - "type": "string" + "type": ["null", "string"] }, "create_time": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registration_questions.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registration_questions.json index 0bb4a101de5eb..39fb68f6f5874 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registration_questions.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meeting_registration_questions.json @@ -3,45 +3,43 @@ "type": "object", "properties": { "meeting_id": { - "type": ["string"] + "type": ["null", "integer"] }, "custom_questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "required": { - "type": "boolean" + "type": ["null", "boolean"] }, "title": { - "type": "string" + "type": ["null", "string"] }, "type": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field_name": { - "type": "string" + "type": ["null", "string"] }, "required": { - "type": "boolean" + "type": ["null", "boolean"] } - }, - "required": [] + } } } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings.json index 81363abcb9848..194f626121bad 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings.json @@ -1,393 +1,485 @@ { - "$schema": "http://json-schema.org/draft-06/schema#", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", "properties": { "assistant_id": { - "type": "string" + "type": ["null", "string"] }, "host_email": { - "type": "string" + "type": ["null", "string"] }, "host_id": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "number" + "type": ["null", "number"] }, "uuid": { - "type": "string" + "type": ["null", "string"] }, "agenda": { - "type": "string" + "type": ["null", "string"] }, "created_at": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "encrypted_password": { - "type": "string" + "type": ["null", "string"] }, "h323_password": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] + }, + "chat_join_url": { + "type": ["null", "string"] }, "occurrences": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "duration": { - "type": "number" + "type": ["null", "number"] }, "occurrence_id": { - "type": "string" + "type": ["null", "string"] }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "password": { - "type": "string" + "type": ["null", "string"] }, "pmi": { - "type": "string" + "type": ["null", "string"] }, "pre_schedule": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "pstn_password": { + "type": ["null", "string"] }, "recurrence": { - "type": "object", + "type": ["null", "object"], "properties": { "end_date_time": { - "type": "string" + "type": ["null", "string"] }, "end_times": { - "type": "number" + "type": ["null", "number"] }, "monthly_day": { - "type": "number" + "type": ["null", "number"] }, "monthly_week": { - "type": "number" + "type": ["null", "number"] }, "monthly_week_day": { - "type": "number" + "type": ["null", "number"] }, "repeat_interval": { - "type": "number" + "type": ["null", "number"] }, "type": { - "type": "number" + "type": ["null", "number"] }, "weekly_days": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } }, "settings": { - "type": "object", + "type": ["null", "object"], "properties": { "allow_multiple_devices": { - "type": "boolean" + "type": ["null", "boolean"] }, "alternative_hosts": { - "type": "string" + "type": ["null", "string"] }, "alternative_hosts_email_notification": { - "type": "boolean" + "type": ["null", "boolean"] }, "alternative_host_update_polls": { - "type": "boolean" + "type": ["null", "boolean"] }, "approval_type": { - "type": "number" + "type": ["null", "number"] }, "approved_or_denied_countries_or_regions": { - "type": "object", + "type": ["null", "object"], "properties": { "approved_list": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "denied_list": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "method": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } }, "audio": { - "type": "string" + "type": ["null", "string"] + }, + "audio_conference_info": { + "type": ["null", "string"] }, "authentication_domains": { - "type": "string" + "type": ["null", "string"] }, "authentication_exception": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "email": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "authentication_name": { - "type": "string" + "type": ["null", "string"] }, "authentication_option": { - "type": "string" + "type": ["null", "string"] }, "auto_recording": { - "type": "string" + "type": ["null", "string"] }, "breakout_room": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "rooms": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "name": { - "type": "string" + "type": ["null", "string"] }, "participants": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } } - }, - "required": [] + } } } - }, - "required": [] + } }, "calendar_type": { - "type": "number" + "type": ["null", "number"] }, "close_registration": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "cn_meeting": { + "type": ["null", "boolean"] }, "contact_email": { - "type": "string" + "type": ["null", "string"] }, "contact_name": { - "type": "string" + "type": ["null", "string"] }, "custom_keys": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "key": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "email_notification": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "device_testing": { + "type": ["null", "boolean"] + }, + "email_in_attendee_report": { + "type": ["null", "boolean"] + }, + "enable_dedicated_group_chat": { + "type": ["null", "boolean"] }, "encryption_type": { - "type": "string" + "type": ["null", "string"] + }, + "enforce_login": { + "type": ["null", "boolean"] + }, + "enforce_login_domains": { + "type": ["null", "string"] }, "focus_mode": { - "type": "boolean" + "type": ["null", "boolean"] }, "global_dial_in_countries": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "global_dial_in_numbers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "city": { - "type": "string" + "type": ["null", "string"] }, "country": { - "type": "string" + "type": ["null", "string"] }, "country_name": { - "type": "string" + "type": ["null", "string"] }, "number": { - "type": "string" + "type": ["null", "string"] }, "type": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "host_video": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "in_meeting": { + "type": ["null", "boolean"] }, "jbh_time": { - "type": "number" + "type": ["null", "number"] }, "join_before_host": { - "type": "boolean" + "type": ["null", "boolean"] }, "language_interpretation": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "interpreters": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "email": { - "type": "string" + "type": ["null", "string"] }, "languages": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } } - }, - "required": [] + } }, "meeting_authentication": { - "type": "boolean" + "type": ["null", "boolean"] }, "mute_upon_entry": { - "type": "boolean" + "type": ["null", "boolean"] }, "participant_video": { - "type": "boolean" + "type": ["null", "boolean"] }, "private_meeting": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "resources": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "resource_type": { + "type": ["null", "string"] + }, + "resource_id": { + "type": ["null", "string"] + }, + "permission_level": { + "type": ["null", "string"] + } + } + } }, "registrants_confirmation_email": { - "type": "boolean" + "type": ["null", "boolean"] }, "registrants_email_notification": { - "type": "boolean" + "type": ["null", "boolean"] }, "registration_type": { - "type": "number" + "type": ["null", "number"] + }, + "request_permission_to_unmute_participants": { + "type": ["null", "boolean"] + }, + "show_join_info": { + "type": ["null", "boolean"] }, "show_share_button": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "sign_language_interpretation": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "enable": { + "type": ["null", "boolean"] + } + } }, "use_pmi": { - "type": "boolean" + "type": ["null", "boolean"] }, "waiting_room": { - "type": "boolean" + "type": ["null", "boolean"] }, "waiting_room_options": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "admit_type": { - "type": "number" + "type": ["null", "number"] }, "auto_admit": { - "type": "number" + "type": ["null", "number"] }, "internal_user_auto_admit": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } }, "watermark": { - "type": "boolean" + "type": ["null", "boolean"] }, "host_save_video_order": { - "type": "boolean" + "type": ["null", "boolean"] + }, + "internal_meeting": { + "type": ["null", "boolean"] + }, + "continuous_meeting_chat": { + "type": ["null", "object"], + "properties": { + "enable": { + "type": ["null", "boolean"] + }, + "auto_add_invited_external_users": { + "type": ["null", "boolean"] + }, + "channel_id": { + "type": ["null", "string"] + } + } + }, + "participant_focused_meeting": { + "type": ["null", "boolean"] + }, + "push_change_to_calendar": { + "type": ["null", "boolean"] + } + } + }, + "registration_url": { + "type": ["null", "string"] + }, + "resources": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "resource_type": { + "type": ["null", "string"] + }, + "resource_id": { + "type": ["null", "string"] + }, + "permission_level": { + "type": ["null", "string"] + } } - }, - "required": [] + } }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "start_url": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "timezone": { - "type": "string" + "type": ["null", "string"] }, "topic": { - "type": "string" + "type": ["null", "string"] }, "tracking_fields": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] }, "visible": { - "type": "boolean" + "type": ["null", "boolean"] } - }, - "required": [] + } } }, "type": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings_list_tmp.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings_list_tmp.json new file mode 100644 index 0000000000000..c74e6c5a5915b --- /dev/null +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/meetings_list_tmp.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meeting_participants.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meeting_participants.json index 763392427fc43..6ac1210c943a3 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meeting_participants.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meeting_participants.json @@ -3,44 +3,52 @@ "type": "object", "properties": { "meeting_uuid": { - "type": "string" + "type": ["null", "string"] + }, + "meeting_id": { + "type": ["null", "integer"] }, "customer_key": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "failover": { - "type": "boolean" + "type": ["null", "boolean"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "join_time": { - "type": "string" + "type": ["null", "string"] }, "leave_time": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "registrant_id": { - "type": "string" + "type": ["null", "string"] }, "user_email": { - "type": "string" + "type": ["null", "string"] }, "user_id": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "bo_mtg_id": { - "type": "string" + "type": ["null", "string"] + }, + "participant_user_id": { + "type": ["null", "string"] + }, + "attentiveness_score": { + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meetings.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meetings.json index e7b31f338a724..96bb88296539e 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meetings.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_meetings.json @@ -3,74 +3,74 @@ "type": "object", "properties": { "meeting_uuid": { - "type": "string" + "type": ["null", "string"] }, "custom_keys": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "key": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "dept": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "end_time": { - "type": "string" + "type": ["null", "string"] + }, + "host_id": { + "type": ["null", "string"] }, "id": { - "type": "number" + "type": ["null", "number"] }, "participants_count": { - "type": "number" + "type": ["null", "number"] }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "topic": { - "type": "string" + "type": ["null", "string"] }, "total_minutes": { - "type": "number" + "type": ["null", "number"] }, "tracking_fields": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "type": { - "type": "number" + "type": ["null", "number"] }, "user_email": { - "type": "string" + "type": ["null", "string"] }, "user_name": { - "type": "string" + "type": ["null", "string"] }, "uuid": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinar_participants.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinar_participants.json index bfba6ff87d935..97728b61a0da6 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinar_participants.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinar_participants.json @@ -3,53 +3,40 @@ "type": "object", "properties": { "webinar_uuid": { - "type": "string" + "type": ["null", "string"] }, "customer_key": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "failover": { - "type": "boolean" + "type": ["null", "boolean"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "join_time": { - "type": "string" + "type": ["null", "string"] }, "leave_time": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "registrant_id": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "user_email": { - "type": "string" + "type": ["null", "string"] }, "user_id": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [ - "customer_key", - "duration", - "failover", - "id", - "join_time", - "leave_time", - "name", - "registrant_id", - "status", - "user_email", - "user_id" - ] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinars.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinars.json index b8ae1ed0ceac8..785c07bdc9bac 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinars.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/report_webinars.json @@ -3,74 +3,71 @@ "type": "object", "properties": { "webinar_uuid": { - "type": "string" + "type": ["null", "string"] }, "custom_keys": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "key": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "dept": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "end_time": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "number" + "type": ["null", "number"] }, "participants_count": { - "type": "number" + "type": ["null", "number"] }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "topic": { - "type": "string" + "type": ["null", "string"] }, "total_minutes": { - "type": "number" + "type": ["null", "number"] }, "tracking_fields": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "type": { - "type": "number" + "type": ["null", "number"] }, "user_email": { - "type": "string" + "type": ["null", "string"] }, "user_name": { - "type": "string" + "type": ["null", "string"] }, "uuid": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/users.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/users.json index e6936c92b6a16..a2f1fce046921 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/users.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/users.json @@ -1,84 +1,92 @@ { - "$schema": "http://json-schema.org/draft-06/schema#", + "$schema": "http://json-schema.org/draft-07/schema#", "properties": { + "user_created_at": { + "type": ["null", "string"] + }, "created_at": { - "type": "string" + "type": ["null", "string"] }, "custom_attributes": { - "type": "array", + "type": ["null", "array"], "items": { "type": "object", "properties": { "key": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "dept": { - "type": "string" + "type": ["null", "string"] }, "email": { - "type": "string" + "type": ["null", "string"] }, "employee_unique_id": { - "type": "string" + "type": ["null", "string"] }, "first_name": { - "type": "string" + "type": ["null", "string"] }, "group_ids": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "id": { - "type": "string" + "type": ["null", "string"] }, "im_group_ids": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, + "language": { + "type": ["null", "string"] + }, "last_client_version": { - "type": "string" + "type": ["null", "string"] }, "last_login_time": { - "type": "string" + "type": ["null", "string"] }, "last_name": { - "type": "string" + "type": ["null", "string"] + }, + "phone_number": { + "type": ["null", "string"] }, "plan_united_type": { - "type": "string" + "type": ["null", "string"] }, "pmi": { - "type": "number" + "type": ["null", "number"] }, "role_id": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "timezone": { - "type": "string" + "type": ["null", "string"] }, "type": { - "type": "number" + "type": ["null", "number"] }, "verified": { - "type": "number" - } - }, - "required": [] + "type": ["null", "number"] + }, + "display_name": { "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_absentees.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_absentees.json index ced32756af1a1..c56de977fb97d 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_absentees.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_absentees.json @@ -3,83 +3,81 @@ "type": "object", "properties": { "webinar_uuid": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "address": { - "type": "string" + "type": ["null", "string"] }, "city": { - "type": "string" + "type": ["null", "string"] }, "comments": { - "type": "string" + "type": ["null", "string"] }, "country": { - "type": "string" + "type": ["null", "string"] }, "custom_questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "title": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "email": { - "type": "string" + "type": ["null", "string"] }, "first_name": { - "type": "string" + "type": ["null", "string"] }, "industry": { - "type": "string" + "type": ["null", "string"] }, "job_title": { - "type": "string" + "type": ["null", "string"] }, "last_name": { - "type": "string" + "type": ["null", "string"] }, "no_of_employees": { - "type": "string" + "type": ["null", "string"] }, "org": { - "type": "string" + "type": ["null", "string"] }, "phone": { - "type": "string" + "type": ["null", "string"] }, "purchasing_time_frame": { - "type": "string" + "type": ["null", "string"] }, "role_in_purchase_process": { - "type": "string" + "type": ["null", "string"] }, "state": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "zip": { - "type": "string" + "type": ["null", "string"] }, "create_time": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_panelists.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_panelists.json index 53801958fa0e6..65a7dbff19e29 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_panelists.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_panelists.json @@ -3,35 +3,34 @@ "type": "object", "properties": { "webinar_id": { - "type": "number" + "type": ["null", "number"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "email": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] }, "virtual_background_id": { - "type": "string" + "type": ["null", "string"] }, "name_tag_id": { - "type": "string" + "type": ["null", "string"] }, "name_tag_name": { - "type": "string" + "type": ["null", "string"] }, "name_tag_pronouns": { - "type": "string" + "type": ["null", "string"] }, "name_tag_description": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_poll_results.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_poll_results.json index dbb102491ec10..d405339cea2c2 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_poll_results.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_poll_results.json @@ -3,35 +3,33 @@ "type": "object", "properties": { "webinar_uuid": { - "type": "string" + "type": ["null", "string"] }, "email": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "question_details": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answer": { - "type": "string" + "type": ["null", "string"] }, "date_time": { - "type": "string" + "type": ["null", "string"] }, "polling_id": { - "type": "string" + "type": ["null", "string"] }, "question": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_polls.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_polls.json index 35ed3e392162e..1c30b07f1dee3 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_polls.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_polls.json @@ -3,95 +3,92 @@ "type": "object", "properties": { "webinar_id": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "anonymous": { - "type": "boolean" + "type": ["null", "boolean"] }, "poll_type": { - "type": "number" + "type": ["null", "number"] }, "questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answer_max_character": { - "type": "number" + "type": ["null", "number"] }, "answer_min_character": { - "type": "number" + "type": ["null", "number"] }, "answer_required": { - "type": "boolean" + "type": ["null", "boolean"] }, "answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "case_sensitive": { - "type": "boolean" + "type": ["null", "boolean"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "prompts": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "prompt_question": { - "type": "string" + "type": ["null", "string"] }, "prompt_right_answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } } - }, - "required": [] + } } }, "rating_max_label": { - "type": "string" + "type": ["null", "string"] }, "rating_max_value": { - "type": "number" + "type": ["null", "number"] }, "rating_min_label": { - "type": "string" + "type": ["null", "string"] }, "rating_min_value": { - "type": "number" + "type": ["null", "number"] }, "right_answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "show_as_dropdown": { - "type": "boolean" + "type": ["null", "boolean"] }, "type": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "title": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_qna_results.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_qna_results.json index 361b24a5fc56f..175b6dcd633ea 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_qna_results.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_qna_results.json @@ -3,29 +3,27 @@ "type": "object", "properties": { "webinar_uuid": { - "type": "string" + "type": ["null", "string"] }, "email": { - "type": "string" + "type": ["null", "string"] }, "name": { - "type": "string" + "type": ["null", "string"] }, "question_details": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answer": { - "type": "string" + "type": ["null", "string"] }, "question": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registrants.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registrants.json index 0b5a7cdaf6c20..7fda1561c4abe 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registrants.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registrants.json @@ -3,83 +3,81 @@ "type": "object", "properties": { "webinar_id": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "address": { - "type": "string" + "type": ["null", "string"] }, "city": { - "type": "string" + "type": ["null", "string"] }, "comments": { - "type": "string" + "type": ["null", "string"] }, "country": { - "type": "string" + "type": ["null", "string"] }, "custom_questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "title": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "email": { - "type": "string" + "type": ["null", "string"] }, "first_name": { - "type": "string" + "type": ["null", "string"] }, "industry": { - "type": "string" + "type": ["null", "string"] }, "job_title": { - "type": "string" + "type": ["null", "string"] }, "last_name": { - "type": "string" + "type": ["null", "string"] }, "no_of_employees": { - "type": "string" + "type": ["null", "string"] }, "org": { - "type": "string" + "type": ["null", "string"] }, "phone": { - "type": "string" + "type": ["null", "string"] }, "purchasing_time_frame": { - "type": "string" + "type": ["null", "string"] }, "role_in_purchase_process": { - "type": "string" + "type": ["null", "string"] }, "state": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] }, "zip": { - "type": "string" + "type": ["null", "string"] }, "create_time": { - "type": "string" + "type": ["null", "string"] }, "join_url": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registration_questions.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registration_questions.json index 46f0dad22ea02..a7ba8b6985c52 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registration_questions.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_registration_questions.json @@ -3,47 +3,44 @@ "type": "object", "properties": { "webinar_id": { - "type": "string" + "type": ["null", "string"] }, "custom_questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "answers": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "required": { - "type": "boolean" + "type": ["null", "boolean"] }, "title": { - "type": "string" + "type": ["null", "string"] }, "type": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "questions": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field_name": { - "type": "string" + "type": ["null", "string"] }, "required": { - "type": "boolean" + "type": ["null", "boolean"] } - }, - "required": [] + } } } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_tracking_sources.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_tracking_sources.json index b7cab1839c57f..b97d71e40147e 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_tracking_sources.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinar_tracking_sources.json @@ -3,23 +3,22 @@ "type": "object", "properties": { "webinar_id": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "string" + "type": ["null", "string"] }, "registration_count": { - "type": "number" + "type": ["null", "number"] }, "source_name": { - "type": "string" + "type": ["null", "string"] }, "tracking_url": { - "type": "string" + "type": ["null", "string"] }, "visitor_count": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars.json index 850b0c16c0c9f..383818e6b57e3 100644 --- a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars.json +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars.json @@ -3,320 +3,309 @@ "type": "object", "properties": { "host_email": { - "type": "string" + "type": ["null", "string"] }, "host_id": { - "type": "string" + "type": ["null", "string"] }, "id": { - "type": "number" + "type": ["null", "number"] }, "uuid": { - "type": "string" + "type": ["null", "string"] }, "agenda": { - "type": "string" + "type": ["null", "string"] }, "created_at": { - "type": "string" + "type": ["null", "string"] }, "duration": { - "type": "number" + "type": ["null", "number"] }, "join_url": { - "type": "string" + "type": ["null", "string"] }, "occurrences": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "duration": { - "type": "number" + "type": ["null", "number"] }, "occurrence_id": { - "type": "string" + "type": ["null", "string"] }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "status": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "password": { - "type": "string" + "type": ["null", "string"] }, "recurrence": { - "type": "object", + "type": ["null", "object"], "properties": { "end_date_time": { - "type": "string" + "type": ["null", "string"] }, "end_times": { - "type": "number" + "type": ["null", "number"] }, "monthly_day": { - "type": "number" + "type": ["null", "number"] }, "monthly_week": { - "type": "number" + "type": ["null", "number"] }, "monthly_week_day": { - "type": "number" + "type": ["null", "number"] }, "repeat_interval": { - "type": "number" + "type": ["null", "number"] }, "type": { - "type": "number" + "type": ["null", "number"] }, "weekly_days": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } }, "settings": { - "type": "object", + "type": ["null", "object"], "properties": { "allow_multiple_devices": { - "type": "boolean" + "type": ["null", "boolean"] }, "alternative_hosts": { - "type": "string" + "type": ["null", "string"] }, "alternative_host_update_polls": { - "type": "boolean" + "type": ["null", "boolean"] }, "approval_type": { - "type": "number" + "type": ["null", "number"] }, "attendees_and_panelists_reminder_email_notification": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "type": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } }, "audio": { - "type": "string" + "type": ["null", "string"] }, "authentication_domains": { - "type": "string" + "type": ["null", "string"] }, "authentication_name": { - "type": "string" + "type": ["null", "string"] }, "authentication_option": { - "type": "string" + "type": ["null", "string"] }, "auto_recording": { - "type": "string" + "type": ["null", "string"] }, "close_registration": { - "type": "boolean" + "type": ["null", "boolean"] }, "contact_email": { - "type": "string" + "type": ["null", "string"] }, "contact_name": { - "type": "string" + "type": ["null", "string"] }, "email_language": { - "type": "string" + "type": ["null", "string"] }, "follow_up_absentees_email_notification": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "type": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } }, "follow_up_attendees_email_notification": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "type": { - "type": "number" + "type": ["null", "number"] } - }, - "required": [] + } }, "global_dial_in_countries": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "string" + "type": ["null", "string"] } }, "hd_video": { - "type": "boolean" + "type": ["null", "boolean"] }, "hd_video_for_attendees": { - "type": "boolean" + "type": ["null", "boolean"] }, "host_video": { - "type": "boolean" + "type": ["null", "boolean"] }, "language_interpretation": { - "type": "object", + "type": ["null", "object"], "properties": { "enable": { - "type": "boolean" + "type": ["null", "boolean"] }, "interpreters": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "email": { - "type": "string" + "type": ["null", "string"] }, "languages": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } } - }, - "required": [] + } }, "panelist_authentication": { - "type": "boolean" + "type": ["null", "boolean"] }, "meeting_authentication": { - "type": "boolean" + "type": ["null", "boolean"] }, "add_watermark": { - "type": "boolean" + "type": ["null", "boolean"] }, "add_audio_watermark": { - "type": "boolean" + "type": ["null", "boolean"] }, "notify_registrants": { - "type": "boolean" + "type": ["null", "boolean"] }, "on_demand": { - "type": "boolean" + "type": ["null", "boolean"] }, "panelists_invitation_email_notification": { - "type": "boolean" + "type": ["null", "boolean"] }, "panelists_video": { - "type": "boolean" + "type": ["null", "boolean"] }, "post_webinar_survey": { - "type": "boolean" + "type": ["null", "boolean"] }, "practice_session": { - "type": "boolean" + "type": ["null", "boolean"] }, "question_and_answer": { - "type": "object", + "type": ["null", "object"], "properties": { "allow_anonymous_questions": { - "type": "boolean" + "type": ["null", "boolean"] }, "answer_questions": { - "type": "string" + "type": ["null", "string"] }, "attendees_can_comment": { - "type": "boolean" + "type": ["null", "boolean"] }, "attendees_can_upvote": { - "type": "boolean" + "type": ["null", "boolean"] }, "allow_auto_reply": { - "type": "boolean" + "type": ["null", "boolean"] }, "auto_reply_text": { - "type": "string" + "type": ["null", "string"] }, "enable": { - "type": "boolean" + "type": ["null", "boolean"] } - }, - "required": [] + } }, "registrants_confirmation_email": { - "type": "boolean" + "type": ["null", "boolean"] }, "registrants_email_notification": { - "type": "boolean" + "type": ["null", "boolean"] }, "registrants_restrict_number": { - "type": "number" + "type": ["null", "number"] }, "registration_type": { - "type": "number" + "type": ["null", "number"] }, "send_1080p_video_to_attendees": { - "type": "boolean" + "type": ["null", "boolean"] }, "show_share_button": { - "type": "boolean" + "type": ["null", "boolean"] }, "survey_url": { - "type": "string" + "type": ["null", "string"] }, "enable_session_branding": { - "type": "boolean" + "type": ["null", "boolean"] } - }, - "required": [] + } }, "start_time": { - "type": "string" + "type": ["null", "string"] }, "start_url": { - "type": "string" + "type": ["null", "string"] }, "timezone": { - "type": "string" + "type": ["null", "string"] }, "topic": { - "type": "string" + "type": ["null", "string"] }, "tracking_fields": { - "type": "array", + "type": ["null", "array"], "items": { - "type": "object", + "type": ["null", "object"], "properties": { "field": { - "type": "string" + "type": ["null", "string"] }, "value": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } }, "type": { - "type": "number" + "type": ["null", "number"] }, "is_simulive": { - "type": "boolean" + "type": ["null", "boolean"] }, "record_file_id": { - "type": "string" + "type": ["null", "string"] } - }, - "required": [] + } } diff --git a/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars_list_tmp.json b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars_list_tmp.json new file mode 100644 index 0000000000000..c74e6c5a5915b --- /dev/null +++ b/airbyte-integrations/connectors/source-zoom/source_zoom/schemas/webinars_list_tmp.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-zoom/unit_tests/test_zoom_authenticator.py b/airbyte-integrations/connectors/source-zoom/unit_tests/test_zoom_authenticator.py index c10eb35a6e6a0..3e2b27319383a 100755 --- a/airbyte-integrations/connectors/source-zoom/unit_tests/test_zoom_authenticator.py +++ b/airbyte-integrations/connectors/source-zoom/unit_tests/test_zoom_authenticator.py @@ -20,8 +20,7 @@ def test_generate_access_token(self): "account_id": "rc-asdfghjkl", "client_id": "rc-123456789", "client_secret": "rc-test-secret", - "authorization_endpoint": "https://example.zoom.com/oauth/token", - "grant_type": "account_credentials", + "authorization_endpoint": "https://example.zoom.com/oauth/token" } parameters = config client = ServerToServerOauthAuthenticator( @@ -29,7 +28,6 @@ def test_generate_access_token(self): account_id=config["account_id"], client_id=config["client_id"], client_secret=config["client_secret"], - grant_type=config["grant_type"], authorization_endpoint=config["authorization_endpoint"], parameters=parameters, ) @@ -41,7 +39,7 @@ def test_generate_access_token(self): headers = {"Authorization": f"Basic {token}", "Content-type": "application/json"} # Define the URL containing the grant_type and account_id as query parameters - url = f'{config.get("authorization_endpoint")}?grant_type={config.get("grant_type")}&account_id={config.get("account_id")}' + url = f'{config.get("authorization_endpoint")}?grant_type=account_credentials&account_id={config.get("account_id")}' with requests_mock.Mocker() as m: # Mock the requests.post call with the expected URL, headers and token response diff --git a/airbyte-integrations/connectors/tasks.py b/airbyte-integrations/connectors/tasks.py deleted file mode 100644 index 5463f1f168095..0000000000000 --- a/airbyte-integrations/connectors/tasks.py +++ /dev/null @@ -1,272 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import os -import shutil -import tempfile -from glob import glob -from multiprocessing import Pool -from typing import Any, Dict, Iterable, List, Optional, Set - -import virtualenv -from invoke import Context, Exit, task - -CONNECTORS_DIR: str = os.path.abspath(os.path.curdir) - -ROOT_DIR = os.path.dirname(os.path.dirname(CONNECTORS_DIR)) -CONFIG_FILE: str = os.path.join(ROOT_DIR, "pyproject.toml") - -# TODO: Get it from a single place with `pre-commit` (or make pre-commit to use these tasks) -TOOLS_VERSIONS: Dict[str, str] = { - "black": "21.12b0", - "colorama": "0.4.4", - "coverage": "6.2", - "flake": "0.0.1a2", - "flake_junit": "2.1", - "isort": "5.10.1", - "mccabe": "0.6.1", - "mypy": "0.910", - "lxml": "4.7", -} - -TASK_COMMANDS: Dict[str, List[str]] = { - "black": [ - f"pip install black~={TOOLS_VERSIONS['black']}", - f"XDG_CACHE_HOME={os.devnull} black -v {{check_option}} {{source_path}}/.", - ], - "coverage": [ - "pip install .", - f"pip install coverage[toml]~={TOOLS_VERSIONS['coverage']}", - f"coverage report --rcfile={CONFIG_FILE}", - ], - "flake": [ - f"pip install mccabe~={TOOLS_VERSIONS['mccabe']}", - f"pip install pyproject-flake8~={TOOLS_VERSIONS['flake']}", - "pflake8 -v {source_path}", - ], - "isort": [ - f"pip install colorama~={TOOLS_VERSIONS['colorama']}", - f"pip install isort~={TOOLS_VERSIONS['isort']}", - "isort -v {check_option} {source_path}/.", - ], - "mypy": [ - "pip install .", - f"pip install mypy~={TOOLS_VERSIONS['mypy']}", - "mypy --install-types --non-interactive ", - f"mypy {{source_path}} --config-file={CONFIG_FILE}", - ], - "test": [ - f"cp -rf {os.path.join(CONNECTORS_DIR, os.pardir, 'bases', 'connector-acceptance-test')} {{venv}}/", - "pip install build", - f"python -m build {os.path.join('{venv}', 'connector-acceptance-test')}", - f"pip install {os.path.join('{venv}', 'connector-acceptance-test', 'dist', 'connector_acceptance_test-*.whl')}", - "pip install .", - "pip install .[tests]", - "pip install pytest-cov", - "pytest -v --cov={source_path} --cov-report xml unit_tests", - ], -} - - -########################################################################################################################################### -# HELPER FUNCTIONS -########################################################################################################################################### - - -def get_connectors_names() -> Set[str]: - cur_dir = os.path.abspath(os.curdir) - os.chdir(CONNECTORS_DIR) - names = set() - for name in glob("source-*"): - if os.path.exists(os.path.join(name, "setup.py")): - if not name.endswith("-singer"): # There are some problems with those. The optimal way is to wait until it's replaced by CDK. - names.add(name.split("source-", 1)[1].rstrip()) - - os.chdir(cur_dir) - return names - - -CONNECTORS_NAMES = get_connectors_names() - - -def _run_single_connector_task(args: Iterable) -> int: - """ - Wrapper for unpack task arguments. - """ - return _run_task(*args) - - -def _run_task( - ctx: Context, - connector_string: str, - task_name: str, - multi_envs: bool = True, - module_path: Optional[str] = None, - task_commands: Dict = TASK_COMMANDS, - **kwargs: Any, -) -> int: - """ - Run task in its own environment. - """ - cur_dir = os.getcwd() - if multi_envs: - if module_path: - os.chdir(module_path) - source_path = connector_string - else: - os.chdir(os.path.join(CONNECTORS_DIR, f"source-{connector_string}")) - source_path = f"source_{connector_string.replace('-', '_')}" - - else: - source_path = connector_string - - venv_name = tempfile.mkdtemp(dir=os.curdir) - virtualenv.cli_run([venv_name]) - activator = os.path.join(os.path.abspath(venv_name), "bin", "activate") - - commands = [] - - commands.extend([cmd.format(source_path=source_path, venv=venv_name, **kwargs) for cmd in task_commands[task_name]]) - - exit_code: int = 0 - - try: - with ctx.prefix(f"source {activator}"): - for command in commands: - result = ctx.run(command, echo=True, warn=True) - if result.return_code: - exit_code = 1 - break - finally: - shutil.rmtree(venv_name, ignore_errors=True) - - if module_path: - os.chdir(cur_dir) - - return exit_code - - -def apply_task_for_connectors(ctx: Context, connectors_names: str, task_name: str, multi_envs: bool = False, **kwargs: Any) -> None: - """ - Run task commands for every connector or for once for a set of connectors, depending on task needs (`multi_envs` param). - If `multi_envs == True` task for every connector runs in its own subprocess. - """ - # TODO: Separate outputs to avoid a mess. - - connectors = connectors_names.split(",") if connectors_names else CONNECTORS_NAMES - connectors = set(connectors) & CONNECTORS_NAMES - - exit_code: int = 0 - - if multi_envs: - print(f"Running {task_name} for the following connectors: {connectors}") - - task_args = [(ctx, connector, task_name) for connector in connectors] - with Pool() as pool: - for result in pool.imap_unordered(_run_single_connector_task, task_args): - if result: - exit_code = 1 - - else: - source_path = " ".join([f"{os.path.join(CONNECTORS_DIR, f'source-{connector}')}" for connector in connectors]) - exit_code = _run_task(ctx, source_path, task_name, multi_envs=False, **kwargs) - - raise Exit(code=exit_code) - - -########################################################################################################################################### -# TASKS -########################################################################################################################################### - -_arg_help_connectors = ( - "Comma-separated connectors' names without 'source-' prefix (ex.: -c github,google-ads,s3). " - "The default is a list of all found connectors excluding the ones with `-singer` suffix." -) - - -@task(help={"connectors": _arg_help_connectors}) -def all_checks(ctx, connectors=None): # type: ignore[no-untyped-def] - """ - Run following checks one by one with default parameters: black, flake, isort, mypy, test, coverage. - Zero exit code indicates about successful passing of all checks. - Terminate on the first non-zero exit code. - """ - tasks = ( - black, - flake, - isort, - mypy, - coverage, - ) - for task_ in tasks: - try: - task_(ctx, connectors=connectors) - except Exit as e: - if e.code: - raise - - -@task(help={"connectors": _arg_help_connectors, "write": "Write changes into the files (runs 'black' without '--check' option)"}) -def black(ctx, connectors=None, write=False): # type: ignore[no-untyped-def] - """ - Run 'black' checks for one or more given connector(s) code. - Zero exit code indicates about successful passing of all checks. - """ - check_option: str = "" if write else " --check" - apply_task_for_connectors(ctx, connectors, "black", check_option=check_option) - - -@task(help={"connectors": _arg_help_connectors}) -def flake(ctx, connectors=None): # type: ignore[no-untyped-def] - """ - Run 'flake8' checks for one or more given connector(s) code. - Zero exit code indicates about successful passing of all checks. - """ - apply_task_for_connectors(ctx, connectors, "flake") - - -@task(help={"connectors": _arg_help_connectors, "write": "Write changes into the files (runs 'isort' without '--check' option)"}) -def isort(ctx, connectors=None, write=False): # type: ignore[no-untyped-def] - """ - Run 'isort' checks for one or more given connector(s) code. - Zero exit code indicates about successful passing of all checks. - """ - check_option: str = "" if write else " --check" - apply_task_for_connectors(ctx, connectors, "isort", check_option=check_option) - - -@task(help={"connectors": _arg_help_connectors}) -def mypy(ctx, connectors=None): # type: ignore[no-untyped-def] - """ - Run MyPy checks for one or more given connector(s) code. - A virtual environment is being created for every one. - Zero exit code indicates about successful passing of all checks. - """ - apply_task_for_connectors(ctx, connectors, "mypy", multi_envs=True) - - -@task(help={"connectors": _arg_help_connectors}) -def test(ctx, connectors=None): # type: ignore[no-untyped-def] - """ - Run unittests for one or more given connector(s). - A virtual environment is being created for every one. - Zero exit code indicates about successful passing of all tests. - """ - apply_task_for_connectors(ctx, connectors, "test", multi_envs=True) - - -@task(help={"connectors": _arg_help_connectors}) -def coverage(ctx, connectors=None): # type: ignore[no-untyped-def] - """ - Check test coverage of code for one or more given connector(s). - A virtual environment is being created for every one. - "test" command is being run before this one. - Zero exit code indicates about enough coverage level. - """ - try: - test(ctx, connectors=connectors) - except Exit as e: - if e.code: - raise - apply_task_for_connectors(ctx, connectors, "coverage", multi_envs=True) diff --git a/airbyte-integrations/infrastructure/README.md b/airbyte-integrations/infrastructure/README.md deleted file mode 100644 index ba00b044b5717..0000000000000 --- a/airbyte-integrations/infrastructure/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# Airbyte Terraform - -## Connector Development Infrastructure -We use Terraform to manage any persistent infrastructure used for developing or testing connectors. - -Directory structure is roughly as follows: - - ├── aws - │   ├── demo - │   │   ├── core - │   │   └── lb - │   ├── shared - │   └── ssh_tunnel - │   ├── module - │   │   ├── secrets - │   │   └── sql - │   └── user_ssh_public_keys - └── gcp - -Top level is which provider the terraform is for. Next level is a -directory containing the project name, or 'shared' for infrastructure (like -the backend for terraform itself) that crosses projects. - -Within each project directory, the top level main.tf contains the infrastructure -for that project, in a high-level format. The module within it contains the -fine grained details. - -Do not place terraform in the top level per-provider directory, as that results in -a monorepo where 'terraform destroy' has a too-wide blast radius. Instead, create -a separate small terraform instance for each project. Then plan and destroy only affect -that project and not other unrelated infrastructure. - - -### Workflow - -#### Setup Credentials -**GCP** - -Copy the contents of the Lastpass credentials `Connector GCP Terraform Key` into `gcp/connectors/secrets/svc_account_creds.json`. - -Any `secrets` directory in the entire repo is gitignored by default so there is no danger of checking credentials into git. - -**AWS** - -You'll find it useful to create an IAM user for yourself and put it in the terraform role, so that -you can use terraform apply directly against the correct subaccount. This involves getting logged in to the -aws console using the lastpass credentials, and then go to IAM and create a user through the GUI. Download your csv creds -from there. You can use `aws sts get-caller-identity` to make sure your custom user is recognized. - -**Azure** - -Coming soon. - - - -#### Iteration Cycle -To run terraform commands, use the tfenv wrapper available through brew or download: - - brew install tfenv - -Once you have tfenv and are in a directory with a .terraform-version file, just -use the normal terraform commands: - - terraform init - terraform plan - terraform apply - -**If this is your first time running Terraform** run the `init` command before plan or apply. - -To achieve isolation and minimize risks, infrastructure should be isolated by connector -where feasible (but use your judgment w.r.t costs of duplicate infra). - -To create connector-related resources in any of the clouds: - - -1. Repeatedly modify the relevant terraform and apply as you work. - -2. Once satisfied, create a PR with your changes. Please post the -output of the `terraform plan` command to show the diff in infrastructure -between the master branch and your PR. This may require deleting all the -infra you just created and running `terraform apply` one last time. - - diff --git a/airbyte-integrations/infrastructure/shared/.terraform-version b/airbyte-integrations/infrastructure/shared/.terraform-version deleted file mode 100644 index 21e8796a09d4f..0000000000000 --- a/airbyte-integrations/infrastructure/shared/.terraform-version +++ /dev/null @@ -1 +0,0 @@ -1.0.3 diff --git a/airbyte-integrations/infrastructure/shared/kms.tf b/airbyte-integrations/infrastructure/shared/kms.tf deleted file mode 100644 index 2b2f69acb730c..0000000000000 --- a/airbyte-integrations/infrastructure/shared/kms.tf +++ /dev/null @@ -1,11 +0,0 @@ -# Terraform state files can contain secrets, so we should always encrypt -# the bucket those are on. Encryption requires a KMS key, which is created here. - -resource "aws_kms_key" "terraform_s3_kms" { - description = "KMS key for terraform state S3 bucket" -} -resource "aws_kms_alias" "terraform_s3_kms_alias" { - name = "alias/terraform_s3_kms" - target_key_id = aws_kms_key.terraform_s3_kms.key_id -} - diff --git a/airbyte-integrations/infrastructure/shared/provider.tf b/airbyte-integrations/infrastructure/shared/provider.tf deleted file mode 100644 index ec8a3a0822835..0000000000000 --- a/airbyte-integrations/infrastructure/shared/provider.tf +++ /dev/null @@ -1,19 +0,0 @@ -terraform { - required_providers { - aws = { - source = "hashicorp/aws" - version = "~> 3.50.0" - } - } - backend "s3" { - bucket = "com-airbyte-terraform-state" - key = "projects/shared/terraform.state" - region = "us-east-2" - dynamodb_table = "terraform-state-lock-dynamo" - } -} - -provider "aws" { - region = "us-east-2" -} - diff --git a/airbyte-integrations/infrastructure/shared/s3.tf b/airbyte-integrations/infrastructure/shared/s3.tf deleted file mode 100644 index 67d31257a09dc..0000000000000 --- a/airbyte-integrations/infrastructure/shared/s3.tf +++ /dev/null @@ -1,20 +0,0 @@ -# This bucket holds terraform.tfstate files for various tf projects. -resource "aws_s3_bucket" "com-airbyte-terraform-state" { - bucket = "com-airbyte-terraform-state" - acl = "private" - versioning { - enabled = true - } - server_side_encryption_configuration { - rule { - apply_server_side_encryption_by_default { - kms_master_key_id = aws_kms_key.terraform_s3_kms.arn - sse_algorithm = "aws:kms" - } - } - } - tags = { - Name = "com-airbyte-terraform-state" - } -} - diff --git a/airbyte-integrations/infrastructure/shared/shared_state.tf b/airbyte-integrations/infrastructure/shared/shared_state.tf deleted file mode 100644 index c66e0d58fe5cb..0000000000000 --- a/airbyte-integrations/infrastructure/shared/shared_state.tf +++ /dev/null @@ -1,20 +0,0 @@ -# Best Practice is to use a remote state file on S3 plus a dynamodb concurrency lock -# https://www.terraform.io/docs/language/state/remote.html -# https://www.terraform.io/docs/language/state/locking.html - -# Create a dynamodb table for locking the state file during terraform apply. -# This can be reused by multiple terraform projects just by referencing the resource. -resource "aws_dynamodb_table" "dynamodb-terraform-state-lock" { - name = "terraform-state-lock-dynamo" - hash_key = "LockID" - read_capacity = 20 - write_capacity = 20 - attribute { - name = "LockID" - type = "S" - } - tags = { - Name = "DynamoDB Terraform State Lock Table" - } -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/.terraform-version b/airbyte-integrations/infrastructure/ssh_tunnel/.terraform-version deleted file mode 100644 index 21e8796a09d4f..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/.terraform-version +++ /dev/null @@ -1 +0,0 @@ -1.0.3 diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/main.tf b/airbyte-integrations/infrastructure/ssh_tunnel/main.tf deleted file mode 100644 index 9e6275830eca1..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/main.tf +++ /dev/null @@ -1,29 +0,0 @@ -# Set up a subnet with bastion and postgres so we can test inbound ssh tunnel behavior from connectors. - -# ec2-user needs login creds -resource "aws_key_pair" "key" { - key_name = "dbtunnel-bastion-ec2-user-ssh-key" - public_key = file("${path.module}/user_ssh_public_keys/dbtunnel-bastion-ec2-user_rsa.pub") -} - -# Sets up the bastion host, an unprivileged airbyte shell user, and postgres -module "ssh_tunnel_testing" { - source = "./module" - - airbyte_user_authorized_keys_local_filepath = "user_ssh_public_keys/dbtunnel-bastion-airbyte_rsa.pub" - - aws_vpc_id = "vpc-001ad881b80193126" - sudo_keypair_name = aws_key_pair.key.key_name - - subnet_az1 = "us-east-2a" - subnet_cidr_block1 = "10.0.40.0/24" - - subnet_az2 = "us-east-2b" - subnet_cidr_block2 = "10.0.41.0/24" - - rds_instance_class = "db.t3.small" - - // Outputs: bastion_ip_addr postgres_endpoint_fqdn_with_port - -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/database.tf b/airbyte-integrations/infrastructure/ssh_tunnel/module/database.tf deleted file mode 100644 index e3ca0bbe70306..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/database.tf +++ /dev/null @@ -1,45 +0,0 @@ -# Sets up a postgres instance for use in running airbyte connector test cases. - - -# Tell the database what subnet to belong to, so it joins the right subnets and is routable from the bastion. -# AWS insists on a minimum of two availability zones for an RDS instance even if we don't care about high availability. -resource "aws_db_subnet_group" "default" { - name = "dbtunnel-public-dbsubnet-group" - subnet_ids = [aws_subnet.main-subnet-public-dbtunnel.id, aws_subnet.main-subnet-private-dbtunnel.id] - - tags = { - Name = "dbtunnel-public-dbsubnet-group" - } -} - -# This is mainly a placeholder for settings we might want to configure later. -resource "aws_db_parameter_group" "default" { - name = "rds-pg" - family = "postgres12" - description = "RDS default parameter group" - - #parameter { - # name = "character_set_client" - # value = "utf8" - #} -} - -# Create the postgres instance on RDS so it's fully managed and low maintenance. -# For now all we care about is testing with postgres. -resource "aws_db_instance" "default" { - allocated_storage = 5 - engine = "postgres" - engine_version = "12.6" - identifier = "tunnel-dev" - instance_class = var.rds_instance_class - db_subnet_group_name = aws_db_subnet_group.default.name - name = "airbyte" - username = "airbyte" - password = chomp(file("${path.module}/secrets/aws_db_instance-master-password")) - parameter_group_name = aws_db_parameter_group.default.name - publicly_accessible = false - skip_final_snapshot = true - apply_immediately = true - vpc_security_group_ids = [aws_security_group.dbtunnel-sg.id] -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/inputs.tf b/airbyte-integrations/infrastructure/ssh_tunnel/module/inputs.tf deleted file mode 100644 index d26eb7d6aeaaf..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/inputs.tf +++ /dev/null @@ -1,32 +0,0 @@ -variable "sudo_keypair_name" { - type = string - description = "Which aws keypair should be used for the sudo-capable user 'ec2-user' on the bastion host" -} -variable "aws_vpc_id" { - type = string - description = "Which vpc should the subnets (containing postgres and the bastion) be placed in" -} -variable "subnet_cidr_block1" { - type = string - description = "Which private CIDR block should be used for the first subnet" -} -variable "subnet_cidr_block2" { - type = string - description = "Which private CIDR block should be used for the second subnet" -} -variable "subnet_az1" { - type = string - description = "Which availability zone should be used for the first subnet" -} -variable "subnet_az2" { - type = string - description = "Which availability zone should be used for the second subnet" -} -variable "rds_instance_class" { - type = string - description = "What instance size should be used for the postgres instance" -} -variable "airbyte_user_authorized_keys_local_filepath" { - type = string - description = "Source path for file provisioner to upload the airbyte user's authorized_keys file" -} diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/main.tf b/airbyte-integrations/infrastructure/ssh_tunnel/module/main.tf deleted file mode 100644 index ab1cd1a33b30c..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/main.tf +++ /dev/null @@ -1,67 +0,0 @@ -# Create a bastion host with a user account we can ssh in as, for using an ssh tunnel. - -# The standard amazon-linux-2 ami will work fine. Don't care about version except stay recent-ish. -data "aws_ami" "amazon-linux-2" { - owners = [137112412989] - - filter { - name = "owner-alias" - values = ["amazon"] - } - - filter { - name = "name" - values = ["amzn2-ami-hvm-2.0.20210701.0-x86_64-gp2"] - } -} - -# Create a host we can ssh into for database ssh tunnel connections from airbyte connectors. -resource "aws_instance" "dbtunnel-bastion" { - ami = data.aws_ami.amazon-linux-2.id - instance_type = "t3.small" - - subnet_id = aws_subnet.main-subnet-public-dbtunnel.id - vpc_security_group_ids = [aws_security_group.ssh-and-egress-allowed.id] - key_name = var.sudo_keypair_name - user_data = file("${path.module}/userdata.sh") - lifecycle { - ignore_changes = [associate_public_ip_address] - } - - tags = { - Name = "dbtunnel-bastion" - } - - provisioner "file" { - source = var.airbyte_user_authorized_keys_local_filepath - destination = "/tmp/airbyte_authorized_keys" - connection { - type = "ssh" - user = "ec2-user" # presumes you have the ssh key in your ssh-agent already - host = aws_instance.dbtunnel-bastion.public_ip - } - } - provisioner "remote-exec" { - inline = [ - "sudo bash -cx \"adduser airbyte -m && mkdir /home/airbyte/.ssh && chmod 700 /home/airbyte/.ssh && touch /home/airbyte/.ssh/authorized_keys && chmod 600 /home/airbyte/.ssh/authorized_keys && chown -R airbyte.airbyte /home/airbyte/.ssh && cat /tmp/airbyte_authorized_keys > /home/airbyte/.ssh/authorized_keys && rm /tmp/airbyte_authorized_keys\"" - ] - connection { - type = "ssh" - user = "ec2-user" # presumes you have the ssh private key in your ssh-agent already - host = aws_instance.dbtunnel-bastion.public_ip - } - } - -} - -# We're using a static IP for connector testing for now since dns isn't usable for this. -# We would prefer DNS someday. -resource "aws_eip" "dbtunnel-eip" { - vpc = true -} - -resource "aws_eip_association" "dbtunnel-eip-assoc" { - instance_id = aws_instance.dbtunnel-bastion.id - allocation_id = aws_eip.dbtunnel-eip.id -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/network.tf b/airbyte-integrations/infrastructure/ssh_tunnel/module/network.tf deleted file mode 100644 index c9d7bccdc133c..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/network.tf +++ /dev/null @@ -1,117 +0,0 @@ -# Setting up a subnet within our main vpc, for ssh tunnel testing. -# We need an ec2 instance (bastion host) reachable from the net with port 22 open for inbound ssh. -# The ec2 instance should be able to download package updates from the public internet, for ease of setup. -# -# We need a postgres instance NOT reachable from the net, but YES reachable from the EC2 instance. -# AWS lets us set publicly_accessible=false (this is default) on the rds instance -# to ensure no public IP is assigned to the postgres server. -# That means we can put the postgres server and the bastion host in the same subnet -# for simplicity, and not have to worry about maintaining route tables. - -data "aws_vpc" "main" { - id = var.aws_vpc_id -} -data "aws_internet_gateway" "default" { - filter { - name = "attachment.vpc-id" - values = [data.aws_vpc.main.id] - } -} - -# Bastion host sits inside a public subnet -resource "aws_subnet" "main-subnet-public-dbtunnel" { - vpc_id = data.aws_vpc.main.id - cidr_block = var.subnet_cidr_block1 - map_public_ip_on_launch = "true" - availability_zone = var.subnet_az1 - tags = { - Name = "public-dbtunnel" - } -} - -# Because an RDS instance requires two AZs we need another subnet for it -resource "aws_subnet" "main-subnet-private-dbtunnel" { - vpc_id = data.aws_vpc.main.id - cidr_block = var.subnet_cidr_block2 - map_public_ip_on_launch = "false" - availability_zone = var.subnet_az2 - tags = { - Name = "private-dbtunnel" - } -} - - -# Output to the public internet -resource "aws_route_table" "dbtunnel-public-route" { - vpc_id = data.aws_vpc.main.id - - route { - cidr_block = "0.0.0.0/0" - gateway_id = data.aws_internet_gateway.default.id - } - - tags = { - Name = "dbtunnel-public-route" - } -} - -resource "aws_route_table_association" "dbtunnel-route-assoc-public-subnet-1"{ - subnet_id = "${aws_subnet.main-subnet-public-dbtunnel.id}" - route_table_id = "${aws_route_table.dbtunnel-public-route.id}" -} - -resource "aws_route_table_association" "dbtunnel-route-assoc-private-subnet-2"{ - subnet_id = "${aws_subnet.main-subnet-private-dbtunnel.id}" - route_table_id = "${aws_route_table.dbtunnel-public-route.id}" -} - - -resource "aws_security_group" "ssh-and-egress-allowed" { - vpc_id = data.aws_vpc.main.id - - egress { - from_port = 0 - to_port = 0 - protocol = -1 - cidr_blocks = ["0.0.0.0/0"] - } - ingress { - from_port = 22 - to_port = 22 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - } - tags = { - Name = "ssh-and-egress-allowed" - } -} - - -# If we don't provide a security group, RDS picks a default, which won't have our port open. -# So set up a custom security group where we can control the ports open to the database. -resource "aws_security_group" "dbtunnel-sg" { - name = "dbtunnel-sg-allow-postgres" - description = "Allow inbound traffic but only from the dbtunnel subnet" - vpc_id = data.aws_vpc.main.id - - ingress { - description = "tcp on 5432 from subnet" - from_port = 5432 - to_port = 5432 - protocol = "tcp" - cidr_blocks = [aws_subnet.main-subnet-public-dbtunnel.cidr_block] - } - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - ipv6_cidr_blocks = ["::/0"] - } - - tags = { - Name = "dbtunnel-sg-allow-postgres" - } -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/outputs.tf b/airbyte-integrations/infrastructure/ssh_tunnel/module/outputs.tf deleted file mode 100644 index cd26817a95217..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/outputs.tf +++ /dev/null @@ -1,7 +0,0 @@ -output "bastion_ip_addr" { - value = aws_instance.dbtunnel-bastion.public_ip -} -output "postgres_endpoint_fqdn_with_port" { - value = aws_db_instance.default.endpoint -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/readme.md b/airbyte-integrations/infrastructure/ssh_tunnel/module/readme.md deleted file mode 100644 index abaefad3e77f1..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/readme.md +++ /dev/null @@ -1,46 +0,0 @@ -# SSH Tunnel Testing - -This directory creates infrastructure for testing ssh tunneling to -databases for airbyte connectors. It sets up: - -* a public subnet (for a bastion host and one postgres AZ) -* a private subnet (for postgres secondary AZ that aws insists on) -* two security groups (for the bastion host, for the postgres server) -* a bastion host reachable from the internet, with ssh tunnel support -* a user account on the bastion host -* a postgres database on a private address - -All infrastructure for this is kept separate from other airbyte -infrastructure, as it's meant to simulate a client's corporate -environment and private databases. - -This configuration uses the 'tfenv' wrapper on terraform for versioning. -Each directory contains a .terraform-version file specifying the compatibility -for that terraform instance. - - brew install tfenv # install - terraform plan # should use the tfenv wrapper's version of terraform - - -## Public Keys - -The bastion host requires an ec2-user (always) and preferably also a non-root capable -user named airbyte. The airbyte user is used for ssh tunnel from the connectors, and should not be a -priviledged user. These are in the integration test secrets store under the 'infra' prefix. - -To create a fresh ssh keypair and set its comment (where the email usually shows), use a command like this: - - ssh-keygen -t rsa -f dbtunnel-bastion-ec2-user_rsa -C ec2-user - ssh-keygen -t rsa -f ~/dbtunnel-bastion-airbyte_rsa -C airbyte - -The public key from that is used for ec2 instance creation, but the private key should be kept secret. - -TODO: The airbyte user will also need password auth allowed on the ssh connection, once we're ready for that. - -## Database Setup - -We don't have yet automation for running the database configuration scripts -from infrastructure as code. The sql scripts included should be run once by hand -when setting up from scratch. Note that the sql script creating a user has a place to -manually change the password. - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-01-dbcreate.sql b/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-01-dbcreate.sql deleted file mode 100644 index ced5848f85ae9..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-01-dbcreate.sql +++ /dev/null @@ -1,111 +0,0 @@ --- generic setup for a brand new empty postgresql RDS -CREATE - ROLE integrationtest_rw; - -GRANT integrationtest_rw TO airbyte; - -CREATE - DATABASE test OWNER integrationtest_rw; - -GRANT CONNECT ON -DATABASE test TO integrationtest_rw; - -CREATE - SCHEMA integrationtest AUTHORIZATION integrationtest_rw; - -GRANT USAGE, -CREATE - ON - SCHEMA integrationtest TO integrationtest_rw; - -GRANT SELECT - , - INSERT - , - UPDATE - , - DELETE - ON - ALL TABLES IN SCHEMA integrationtest TO integrationtest_rw; - -ALTER DEFAULT PRIVILEGES IN SCHEMA integrationtest GRANT SELECT - , - INSERT - , - UPDATE - , - DELETE - ON - TABLES TO integrationtest_rw; - -GRANT USAGE ON -ALL SEQUENCES IN SCHEMA integrationtest TO integrationtest_rw; - -ALTER DEFAULT PRIVILEGES IN SCHEMA integrationtest GRANT USAGE ON -SEQUENCES TO integrationtest_rw; - -REVOKE ALL ON -database template1 -FROM -public; - -REVOKE ALL ON -database postgres -FROM -public; - -# Test DATA used BY the postgres SOURCE test classes -SET -SCHEMA 'public'; - -CREATE - TABLE - id_and_name( - id INTEGER, - name VARCHAR(200) - ); - -INSERT - INTO - id_and_name( - id, - name - ) - VALUES( - 1, - 'picard' - ), - ( - 2, - 'crusher' - ), - ( - 3, - 'vash' - ); - -CREATE - TABLE - starships( - id INTEGER, - name VARCHAR(200) - ); - -INSERT - INTO - starships( - id, - name - ) - VALUES( - 1, - 'enterprise-d' - ), - ( - 2, - 'defiant' - ), - ( - 3, - 'yamato' - ); diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-02-user-create.sql b/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-02-user-create.sql deleted file mode 100644 index 83cd4e05abf06..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/sql/postgresql-02-user-create.sql +++ /dev/null @@ -1,5 +0,0 @@ --- create the specific user we want to use from airbyte -CREATE - USER testcaseuser WITH password 'ThisIsNotTheRealPassword.PleaseSetThisByHand'; - -GRANT integrationtest_rw TO testcaseuser; diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/module/userdata.sh b/airbyte-integrations/infrastructure/ssh_tunnel/module/userdata.sh deleted file mode 100755 index a77287468165a..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/module/userdata.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -e - -# Apply security patches -sudo yum update -y - -# TODO: Figure out if connectors should use password auth or ssh keys for this, or support both. - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/provider.tf b/airbyte-integrations/infrastructure/ssh_tunnel/provider.tf deleted file mode 100644 index ab0c2ac3fa811..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/provider.tf +++ /dev/null @@ -1,19 +0,0 @@ -terraform { - required_providers { - aws = { - source = "hashicorp/aws" - version = "~> 3.50.0" - } - } - backend "s3" { - bucket = "com-airbyte-terraform-state" - region = "us-east-2" - key = "projects/main/terraform.state" - dynamodb_table = "terraform-state-lock-dynamo" - } -} - -provider "aws" { - region = "us-east-2" -} - diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-airbyte_rsa.pub b/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-airbyte_rsa.pub deleted file mode 100644 index aeb717424d6e4..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-airbyte_rsa.pub +++ /dev/null @@ -1 +0,0 @@ -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDISW6oFBHzGT+Y+BDc5NSg8PcPfdbHnX71pdfVufB++Do5HdvPLY1Jyb47mv0aTxg+Au/akh8OcDV0JVuis6QfMzmbA92dwhhSrUNj3OPWl7YDcywSzwWz3qL8PL1sjaLiIKcFYfNhuJWEZP8ubCkQulsNcqZm8G+/0R8bkbURaQy8Dp78DWYh6hf/40ln07UW1VlM2ja6t0nCJDDBLNCpWD3L7XvlF5UmpwGKX1Dp8d7AtwDSn1qnRDJgpfNHLGu5Ag1a4ohP+HQL3Syu/dmh06oNKJ9Jr6s057nhXC3FKj4TNZ2YwY16cepzw4xVopNkeE5gAPmS47OVEh03XFE0h9uu/lg6/atxKjVF5ppe+kvwlvucJTBosn3uOgjSl3cwNd6Kwe3LAGdEZYCs/BLtXoFAkkY2GuHSN7Xrai+lVPZtFICRGMGA236nrnZs4u38LX7rhf5jspHrCdkkskTAEcQ2v8J6h4YCbr7BTltIG+8XsyHGuYUESYHfe2N0Tqc= airbyte diff --git a/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-ec2-user_rsa.pub b/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-ec2-user_rsa.pub deleted file mode 100644 index 3f9ae51a605b4..0000000000000 --- a/airbyte-integrations/infrastructure/ssh_tunnel/user_ssh_public_keys/dbtunnel-bastion-ec2-user_rsa.pub +++ /dev/null @@ -1 +0,0 @@ -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCtNfcjnlbnmMXcYuscE96X3jp6uXbcjMFeloBPpeV5r9WwuAcczo7Sm8gqUBDtQrJ/TtGTZ+plAX5w8RlbQ7NsWoOIMhHaKmR/USo2kxiwXfQ3J8nJH42lGT/1lDKR01q3rQvjSNF4sJ4uJ7Bb7XWWLnT4X0IxCHrmNAEX/TH0D8RByFbjiHmw0avsHBt58Qux7AtAgZ4zqN0Q4NI3BX7uK3hti9H2I/QM3GYIDJSzcas6PUKxSh/QmaGfae0UBhavE4mrYXwftK6Q2lL7flU8b01eL9q+sfS5ncMIE7FA99+fXFB3vm3INNZysaMUGh/cCS+5vGEm2kD/T/ZZWAMIngjr6KRSUAvinSKMfw1aILm0dpQgty3E9lXXJm4cE49jS/tull6didwwWEvtchYHZIGSz+F7yuLswukz8hNC2y1wKS8046yYgpdO2C41HcUpfJyQ7YmR5Q40FOJjXS9twxCSvbsDr/DfRqA11rPuXt0VwBKPAFWzonJ0jCSnrt8= ec2-user diff --git a/airbyte-lib/.gitattributes b/airbyte-lib/.gitattributes deleted file mode 100644 index 7af38cfbe1078..0000000000000 --- a/airbyte-lib/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# Hide diffs in auto-generated files -docs/generated/**/* linguist-generated=true diff --git a/airbyte-lib/.gitignore b/airbyte-lib/.gitignore deleted file mode 100644 index fccb6b3edd800..0000000000000 --- a/airbyte-lib/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.venv* -.env diff --git a/airbyte-lib/README.md b/airbyte-lib/README.md deleted file mode 100644 index 868a5483dc634..0000000000000 --- a/airbyte-lib/README.md +++ /dev/null @@ -1,109 +0,0 @@ -# airbyte-lib - -airbyte-lib is a library that allows to run Airbyte syncs embedded into any Python application, without the need to run Airbyte server. - -## Development - -- Make sure [Poetry is installed](https://python-poetry.org/docs/#). -- Run `poetry install` -- For examples, check out the `examples` folder. They can be run via `poetry run python examples/` -- Unit tests and type checks can be run via `poetry run pytest` - -## Release - -- In your PR: - - Bump the version in `pyproject.toml` - - Add a changelog entry to the table below -- Once the PR is merged, go to Github and trigger the `Publish AirbyteLib Manually` workflow. This will publish the new version to PyPI. - -## Secrets Management - -AirbyteLib can auto-import secrets from the following sources: - -1. Environment variables. -2. Variables defined in a local `.env` ("Dotenv") file. -3. [Google Colab secrets](https://medium.com/@parthdasawant/how-to-use-secrets-in-google-colab-450c38e3ec75). -4. Manual entry via [`getpass`](https://docs.python.org/3.9/library/getpass.html). - -_Note: Additional secret store options may be supported in the future. [More info here.](https://github.com/airbytehq/airbyte-lib-private-beta/discussions/5)_ - -### Retrieving Secrets - -```python -from airbyte_lib import get_secret, SecretSource - -source = get_connection("source-github") -source.set_config( - "credentials": { - "personal_access_token": get_secret("GITHUB_PERSONAL_ACCESS_TOKEN"), - } -) -``` - -The `get_secret()` function accepts an optional `source` argument of enum type `SecretSource`. If omitted or set to `SecretSource.ANY`, AirbyteLib will search all available secrets sources. If `source` is set to a specific source, then only that source will be checked. If a list of `SecretSource` entries is passed, then the sources will be checked using the provided ordering. - -By default, AirbyteLib will prompt the user for any requested secrets that are not provided via other secret managers. You can disable this prompt by passing `prompt=False` to `get_secret()`. - -### Versioning - -Versioning follows [Semantic Versioning](https://semver.org/). For new features, bump the minor version. For bug fixes, bump the patch version. For pre-releases, append `dev.N` to the version. For example, `0.1.0dev.1` is the first pre-release of the `0.1.0` version. - -## Documentation - -Regular documentation lives in the `/docs` folder. Based on the doc strings of public methods, we generate API documentation using [pdoc](https://pdoc.dev). To generate the documentation, run `poetry run generate-docs`. The documentation will be generated in the `docs/generate` folder. This needs to be done manually when changing the public interface of the library. - -A unit test validates the documentation is up to date. - -## Connector compatibility - -To make a connector compatible with airbyte-lib, the following requirements must be met: -* The connector must be a Python package, with a `pyproject.toml` or a `setup.py` file. -* In the package, there must be a `run.py` file that contains a `run` method. This method should read arguments from the command line, and run the connector with them, outputting messages to stdout. -* The `pyproject.toml` or `setup.py` file must specify a command line entry point for the `run` method called `source-`. This is usually done by adding a `console_scripts` section to the `pyproject.toml` file, or a `entry_points` section to the `setup.py` file. For example: - -```toml -[tool.poetry.scripts] -source-my-connector = "my_connector.run:run" -``` - -```python -setup( - ... - entry_points={ - 'console_scripts': [ - 'source-my-connector = my_connector.run:run', - ], - }, - ... -) -``` - -To publish a connector to PyPI, specify the `pypi` section in the `metadata.yaml` file. For example: - -```yaml -data: - # ... - remoteRegistries: - pypi: - enabled: true - packageName: "airbyte-source-my-connector" -``` - -## Validating source connectors - -To validate a source connector for compliance, the `airbyte-lib-validate-source` script can be used. It can be used like this: - -```bash -airbyte-lib-validate-source —connector-dir . -—sample-config secrets/config.json -``` - -The script will install the python package in the provided directory, and run the connector against the provided config. The config should be a valid JSON file, with the same structure as the one that would be provided to the connector in Airbyte. The script will exit with a non-zero exit code if the connector fails to run. - -For a more lightweight check, the `--validate-install-only` flag can be used. This will only check that the connector can be installed and returns a spec, no sample config required. - -## Changelog - -| Version | PR | Description | -| ----------- | ---------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | -| 0.1.0 | [#35184](https://github.com/airbytehq/airbyte/pull/35184) | Beta Release 0.1.0 | -| 0.1.0dev.2 | [#34111](https://github.com/airbytehq/airbyte/pull/34111) | Initial publish - add publish workflow | diff --git a/airbyte-lib/airbyte_lib/__init__.py b/airbyte-lib/airbyte_lib/__init__.py deleted file mode 100644 index aeeea8506c32e..0000000000000 --- a/airbyte-lib/airbyte_lib/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -"""AirbyteLib brings Airbyte ELT to every Python developer.""" -from __future__ import annotations - -from airbyte_lib._factories.cache_factories import get_default_cache, new_local_cache -from airbyte_lib._factories.connector_factories import get_source -from airbyte_lib.caches import DuckDBCache, DuckDBCacheConfig -from airbyte_lib.datasets import CachedDataset -from airbyte_lib.registry import get_available_connectors -from airbyte_lib.results import ReadResult -from airbyte_lib.secrets import SecretSource, get_secret -from airbyte_lib.source import Source - - -__all__ = [ - "CachedDataset", - "DuckDBCache", - "DuckDBCacheConfig", - "get_available_connectors", - "get_source", - "get_default_cache", - "get_secret", - "new_local_cache", - "ReadResult", - "SecretSource", - "Source", -] diff --git a/airbyte-lib/airbyte_lib/_executor.py b/airbyte-lib/airbyte_lib/_executor.py deleted file mode 100644 index cedc774a9a7f0..0000000000000 --- a/airbyte-lib/airbyte_lib/_executor.py +++ /dev/null @@ -1,457 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import shlex -import subprocess -import sys -from abc import ABC, abstractmethod -from contextlib import contextmanager, suppress -from pathlib import Path -from shutil import rmtree -from typing import IO, TYPE_CHECKING, Any, NoReturn, cast - -from rich import print - -from airbyte_lib import exceptions as exc -from airbyte_lib.registry import ConnectorMetadata -from airbyte_lib.telemetry import SourceTelemetryInfo, SourceType - - -if TYPE_CHECKING: - from collections.abc import Generator, Iterable, Iterator - - -_LATEST_VERSION = "latest" - - -class Executor(ABC): - def __init__( - self, - *, - name: str | None = None, - metadata: ConnectorMetadata | None = None, - target_version: str | None = None, - ) -> None: - """Initialize a connector executor. - - The 'name' param is required if 'metadata' is None. - """ - if not name and not metadata: - raise exc.AirbyteLibInternalError(message="Either name or metadata must be provided.") - - self.name: str = name or cast(ConnectorMetadata, metadata).name # metadata is not None here - self.metadata: ConnectorMetadata | None = metadata - self.enforce_version: bool = target_version is not None - - self.reported_version: str | None = None - self.target_version: str | None = None - if target_version: - if metadata and target_version == _LATEST_VERSION: - self.target_version = metadata.latest_available_version - else: - self.target_version = target_version - - @abstractmethod - def execute(self, args: list[str]) -> Iterator[str]: - pass - - @abstractmethod - def ensure_installation(self, *, auto_fix: bool = True) -> None: - _ = auto_fix - pass - - @abstractmethod - def install(self) -> None: - pass - - @abstractmethod - def get_telemetry_info(self) -> SourceTelemetryInfo: - pass - - @abstractmethod - def uninstall(self) -> None: - pass - - -@contextmanager -def _stream_from_subprocess(args: list[str]) -> Generator[Iterable[str], None, None]: - process = subprocess.Popen( - args, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - ) - - def _stream_from_file(file: IO[str]) -> Generator[str, Any, None]: - while True: - line = file.readline() - if not line: - break - yield line - - if process.stdout is None: - raise exc.AirbyteSubprocessError( - message="Subprocess did not return a stdout stream.", - context={ - "args": args, - "returncode": process.returncode, - }, - ) - try: - yield _stream_from_file(process.stdout) - finally: - # Close the stdout stream - if process.stdout: - process.stdout.close() - - # Terminate the process if it is still running - if process.poll() is None: # Check if the process is still running - process.terminate() - try: - # Wait for a short period to allow process to terminate gracefully - process.wait(timeout=10) - except subprocess.TimeoutExpired: - # If the process does not terminate within the timeout, force kill it - process.kill() - - # Now, the process is either terminated or killed. Check the exit code. - exit_code = process.wait() - - # If the exit code is not 0 or -15 (SIGTERM), raise an exception - if exit_code not in (0, -15): - raise exc.AirbyteSubprocessFailedError( - run_args=args, - exit_code=exit_code, - ) - - -class VenvExecutor(Executor): - def __init__( - self, - name: str | None = None, - *, - metadata: ConnectorMetadata | None = None, - target_version: str | None = None, - pip_url: str | None = None, - install_root: Path | None = None, - ) -> None: - """Initialize a connector executor that runs a connector in a virtual environment. - - Args: - name: The name of the connector. - metadata: (Optional.) The metadata of the connector. - target_version: (Optional.) The version of the connector to install. - pip_url: (Optional.) The pip URL of the connector to install. - install_root: (Optional.) The root directory where the virtual environment will be - created. If not provided, the current working directory will be used. - """ - super().__init__(name=name, metadata=metadata, target_version=target_version) - - if not pip_url and metadata and not metadata.pypi_package_name: - raise exc.AirbyteConnectorNotPyPiPublishedError( - connector_name=self.name, - context={ - "metadata": metadata, - }, - ) - - self.pip_url = pip_url or ( - metadata.pypi_package_name - if metadata and metadata.pypi_package_name - else f"airbyte-{self.name}" - ) - self.install_root = install_root or Path.cwd() - - def _get_venv_name(self) -> str: - return f".venv-{self.name}" - - def _get_venv_path(self) -> Path: - return self.install_root / self._get_venv_name() - - def _get_connector_path(self) -> Path: - return self._get_venv_path() / "bin" / self.name - - def _run_subprocess_and_raise_on_failure(self, args: list[str]) -> None: - result = subprocess.run( - args, - check=False, - stderr=subprocess.PIPE, - ) - if result.returncode != 0: - raise exc.AirbyteSubprocessFailedError( - run_args=args, - exit_code=result.returncode, - log_text=result.stderr.decode("utf-8"), - ) - - def uninstall(self) -> None: - if self._get_venv_path().exists(): - rmtree(str(self._get_venv_path())) - - self.reported_version = None # Reset the reported version from the previous installation - - @property - def docs_url(self) -> str: - """Get the URL to the connector's documentation.""" - # TODO: Refactor installation so that this can just live in the Source class. - return "https://docs.airbyte.com/integrations/sources/" + self.name.lower().replace( - "source-", "" - ) - - def install(self) -> None: - """Install the connector in a virtual environment. - - After installation, the installed version will be stored in self.reported_version. - """ - self._run_subprocess_and_raise_on_failure( - [sys.executable, "-m", "venv", str(self._get_venv_path())] - ) - - pip_path = str(self._get_venv_path() / "bin" / "pip") - print( - f"Installing '{self.name}' into virtual environment '{self._get_venv_path()!s}'.\n" - f"Running 'pip install {self.pip_url}'...\n" - ) - try: - self._run_subprocess_and_raise_on_failure( - args=[pip_path, "install", *shlex.split(self.pip_url)] - ) - except exc.AirbyteSubprocessFailedError as ex: - # If the installation failed, remove the virtual environment - # Otherwise, the connector will be considered as installed and the user may not be able - # to retry the installation. - with suppress(exc.AirbyteSubprocessFailedError): - self.uninstall() - - raise exc.AirbyteConnectorInstallationError from ex - - # Assuming the installation succeeded, store the installed version - self.reported_version = self._get_installed_version(raise_on_error=False, recheck=True) - print( - f"Connector '{self.name}' installed successfully!\n" - f"For more information, see the {self.name} documentation:\n" - f"{self.docs_url}#reference\n" - ) - - def _get_installed_version( - self, - *, - raise_on_error: bool = False, - recheck: bool = False, - ) -> str | None: - """Detect the version of the connector installed. - - Returns the version string if it can be detected, otherwise None. - - If raise_on_error is True, raise an exception if the version cannot be detected. - - If recheck if False and the version has already been detected, return the cached value. - - In the venv, we run the following: - > python -c "from importlib.metadata import version; print(version(''))" - """ - if not recheck and self.reported_version: - return self.reported_version - - connector_name = self.name - if not self.interpreter_path.exists(): - # No point in trying to detect the version if the interpreter does not exist - if raise_on_error: - raise exc.AirbyteLibInternalError( - message="Connector's virtual environment interpreter could not be found.", - context={ - "interpreter_path": self.interpreter_path, - }, - ) - return None - - try: - package_name = ( - self.metadata.pypi_package_name - if self.metadata and self.metadata.pypi_package_name - else f"airbyte-{connector_name}" - ) - return subprocess.check_output( - [ - self.interpreter_path, - "-c", - f"from importlib.metadata import version; print(version('{package_name}'))", - ], - universal_newlines=True, - stderr=subprocess.PIPE, # Don't print to stderr - ).strip() - except Exception: - if raise_on_error: - raise - - return None - - @property - def interpreter_path(self) -> Path: - return self._get_venv_path() / "bin" / "python" - - def ensure_installation( - self, - *, - auto_fix: bool = True, - ) -> None: - """Ensure that the connector is installed in a virtual environment. - - If not yet installed and if install_if_missing is True, then install. - - Optionally, verify that the installed version matches the target version. - - Note: Version verification is not supported for connectors installed from a - local path. - """ - # Store the installed version (or None if not installed) - if not self.reported_version: - self.reported_version = self._get_installed_version() - - original_installed_version = self.reported_version - - reinstalled = False - venv_name = f".venv-{self.name}" - if not self._get_venv_path().exists(): - if not auto_fix: - raise exc.AirbyteConnectorInstallationError( - message="Virtual environment does not exist.", - connector_name=self.name, - context={ - "venv_path": self._get_venv_path(), - }, - ) - - # If the venv path does not exist, install. - self.install() - reinstalled = True - - elif not self._get_connector_path().exists(): - if not auto_fix: - raise exc.AirbyteConnectorInstallationError( - message="Could not locate connector executable within the virtual environment.", - connector_name=self.name, - context={ - "connector_path": self._get_connector_path(), - }, - ) - - # If the connector path does not exist, uninstall and re-install. - # This is sometimes caused by a failed or partial installation. - print( - "Connector executable not found within the virtual environment " - f"at {self._get_connector_path()!s}.\nReinstalling..." - ) - self.uninstall() - self.install() - reinstalled = True - - # By now, everything should be installed. Raise an exception if not. - - connector_path = self._get_connector_path() - if not connector_path.exists(): - raise exc.AirbyteConnectorInstallationError( - message="Connector's executable could not be found within the virtual environment.", - connector_name=self.name, - context={ - "connector_path": self._get_connector_path(), - }, - ) from FileNotFoundError(connector_path) - - if self.enforce_version: - version_after_reinstall: str | None = None - if self.reported_version != self.target_version: - if auto_fix and not reinstalled: - # If we haven't already reinstalled above, reinstall now. - self.install() - reinstalled = True - - if reinstalled: - version_after_reinstall = self.reported_version - - # Check the version again - if self.reported_version != self.target_version: - raise exc.AirbyteConnectorInstallationError( - message="Connector's reported version does not match the target version.", - connector_name=self.name, - context={ - "venv_name": venv_name, - "target_version": self.target_version, - "original_installed_version": original_installed_version, - "version_after_reinstall": version_after_reinstall, - }, - ) - - def execute(self, args: list[str]) -> Iterator[str]: - connector_path = self._get_connector_path() - - with _stream_from_subprocess([str(connector_path), *args]) as stream: - yield from stream - - def get_telemetry_info(self) -> SourceTelemetryInfo: - return SourceTelemetryInfo( - name=self.name, - type=SourceType.VENV, - version=self.reported_version, - ) - - -class PathExecutor(Executor): - def __init__( - self, - name: str | None = None, - *, - path: Path, - target_version: str | None = None, - ) -> None: - """Initialize a connector executor that runs a connector from a local path. - - If path is simply the name of the connector, it will be expected to exist in the current - PATH or in the current working directory. - """ - self.path: Path = path - name = name or path.name - super().__init__(name=name, target_version=target_version) - - def ensure_installation( - self, - *, - auto_fix: bool = True, - ) -> None: - """Ensure that the connector executable can be found. - - The auto_fix parameter is ignored for this executor type. - """ - _ = auto_fix - try: - self.execute(["spec"]) - except Exception as e: - # TODO: Improve error handling. We should try to distinguish between - # a connector that is not installed and a connector that is not - # working properly. - raise exc.AirbyteConnectorExecutableNotFoundError( - connector_name=self.name, - ) from e - - def install(self) -> NoReturn: - raise exc.AirbyteConnectorInstallationError( - message="Connector cannot be installed because it is not managed by airbyte-lib.", - connector_name=self.name, - ) - - def uninstall(self) -> NoReturn: - raise exc.AirbyteConnectorInstallationError( - message="Connector cannot be uninstalled because it is not managed by airbyte-lib.", - connector_name=self.name, - ) - - def execute(self, args: list[str]) -> Iterator[str]: - with _stream_from_subprocess([str(self.path), *args]) as stream: - yield from stream - - def get_telemetry_info(self) -> SourceTelemetryInfo: - return SourceTelemetryInfo( - str(self.name), - SourceType.LOCAL_INSTALL, - version=self.reported_version, - ) diff --git a/airbyte-lib/airbyte_lib/_factories/cache_factories.py b/airbyte-lib/airbyte_lib/_factories/cache_factories.py deleted file mode 100644 index 82ad3241920cc..0000000000000 --- a/airbyte-lib/airbyte_lib/_factories/cache_factories.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from pathlib import Path - -import ulid - -from airbyte_lib import exceptions as exc -from airbyte_lib.caches.duckdb import DuckDBCache, DuckDBCacheConfig - - -def get_default_cache() -> DuckDBCache: - """Get a local cache for storing data, using the default database path. - - Cache files are stored in the `.cache` directory, relative to the current - working directory. - """ - config = DuckDBCacheConfig( - db_path="./.cache/default_cache_db.duckdb", - ) - return DuckDBCache(config=config) - - -def new_local_cache( - cache_name: str | None = None, - cache_dir: str | Path | None = None, - *, - cleanup: bool = True, -) -> DuckDBCache: - """Get a local cache for storing data, using a name string to seed the path. - - Args: - cache_name: Name to use for the cache. Defaults to None. - cache_dir: Root directory to store the cache in. Defaults to None. - cleanup: Whether to clean up temporary files. Defaults to True. - - Cache files are stored in the `.cache` directory, relative to the current - working directory. - """ - if cache_name: - if " " in cache_name: - raise exc.AirbyteLibInputError( - message="Cache name cannot contain spaces.", - input_value=cache_name, - ) - - if not cache_name.replace("_", "").isalnum(): - raise exc.AirbyteLibInputError( - message="Cache name can only contain alphanumeric characters and underscores.", - input_value=cache_name, - ) - - cache_name = cache_name or str(ulid.ULID()) - cache_dir = cache_dir or Path(f"./.cache/{cache_name}") - if not isinstance(cache_dir, Path): - cache_dir = Path(cache_dir) - - config = DuckDBCacheConfig( - db_path=cache_dir / f"db_{cache_name}.duckdb", - cache_dir=cache_dir, - cleanup=cleanup, - ) - return DuckDBCache(config=config) diff --git a/airbyte-lib/airbyte_lib/_factories/connector_factories.py b/airbyte-lib/airbyte_lib/_factories/connector_factories.py deleted file mode 100644 index 5d0c516ec7d1d..0000000000000 --- a/airbyte-lib/airbyte_lib/_factories/connector_factories.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import shutil -import warnings -from pathlib import Path -from typing import Any - -from airbyte_lib import exceptions as exc -from airbyte_lib._executor import PathExecutor, VenvExecutor -from airbyte_lib.registry import ConnectorMetadata, get_connector_metadata -from airbyte_lib.source import Source - - -def get_connector( - name: str, - config: dict[str, Any] | None = None, - *, - version: str | None = None, - pip_url: str | None = None, - local_executable: Path | str | None = None, - install_if_missing: bool = True, -) -> Source: - """Deprecated. Use get_source instead.""" - warnings.warn( - "The `get_connector()` function is deprecated and will be removed in a future version." - "Please use `get_source()` instead.", - DeprecationWarning, - stacklevel=2, - ) - return get_source( - name=name, - config=config, - version=version, - pip_url=pip_url, - local_executable=local_executable, - install_if_missing=install_if_missing, - ) - - -def get_source( - name: str, - config: dict[str, Any] | None = None, - *, - version: str | None = None, - pip_url: str | None = None, - local_executable: Path | str | None = None, - install_if_missing: bool = True, -) -> Source: - """Get a connector by name and version. - - Args: - name: connector name - config: connector config - if not provided, you need to set it later via the set_config - method. - version: connector version - if not provided, the currently installed version will be used. - If no version is installed, the latest available version will be used. The version can - also be set to "latest" to force the use of the latest available version. - pip_url: connector pip URL - if not provided, the pip url will be inferred from the - connector name. - local_executable: If set, the connector will be assumed to already be installed and will be - executed using this path or executable name. Otherwise, the connector will be installed - automatically in a virtual environment. - install_if_missing: Whether to install the connector if it is not available locally. This - parameter is ignored when local_executable is set. - """ - if local_executable: - if pip_url: - raise exc.AirbyteLibInputError( - message="Param 'pip_url' is not supported when 'local_executable' is set." - ) - if version: - raise exc.AirbyteLibInputError( - message="Param 'version' is not supported when 'local_executable' is set." - ) - - if isinstance(local_executable, str): - if "/" in local_executable or "\\" in local_executable: - # Assume this is a path - local_executable = Path(local_executable).absolute() - else: - which_executable = shutil.which(local_executable) - if which_executable is None: - raise FileNotFoundError(local_executable) - local_executable = Path(which_executable).absolute() - - print(f"Using local `{name}` executable: {local_executable!s}") - return Source( - name=name, - config=config, - executor=PathExecutor( - name=name, - path=local_executable, - ), - ) - - # else: we are installing a connector in a virtual environment: - - metadata: ConnectorMetadata | None = None - try: - metadata = get_connector_metadata(name) - except exc.AirbyteConnectorNotRegisteredError: - if not pip_url: - # We don't have a pip url or registry entry, so we can't install the connector - raise - - executor = VenvExecutor( - name=name, - metadata=metadata, - target_version=version, - pip_url=pip_url, - ) - if install_if_missing: - executor.ensure_installation() - - return Source( - executor=executor, - name=name, - config=config, - ) diff --git a/airbyte-lib/airbyte_lib/_file_writers/__init__.py b/airbyte-lib/airbyte_lib/_file_writers/__init__.py deleted file mode 100644 index aae8c474ca97f..0000000000000 --- a/airbyte-lib/airbyte_lib/_file_writers/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import annotations - -from .base import FileWriterBase, FileWriterBatchHandle, FileWriterConfigBase -from .parquet import ParquetWriter, ParquetWriterConfig - - -__all__ = [ - "FileWriterBatchHandle", - "FileWriterBase", - "FileWriterConfigBase", - "ParquetWriter", - "ParquetWriterConfig", -] diff --git a/airbyte-lib/airbyte_lib/_file_writers/base.py b/airbyte-lib/airbyte_lib/_file_writers/base.py deleted file mode 100644 index e037c567e7c83..0000000000000 --- a/airbyte-lib/airbyte_lib/_file_writers/base.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Define abstract base class for File Writers, which write and read from file storage.""" - -from __future__ import annotations - -import abc -from dataclasses import dataclass, field -from pathlib import Path -from typing import TYPE_CHECKING, cast, final - -from overrides import overrides - -from airbyte_lib._processors import BatchHandle, RecordProcessor -from airbyte_lib.config import CacheConfigBase - - -if TYPE_CHECKING: - import pyarrow as pa - - from airbyte_protocol.models import ( - AirbyteStateMessage, - ) - - -DEFAULT_BATCH_SIZE = 10000 - - -# The batch handle for file writers is a list of Path objects. -@dataclass -class FileWriterBatchHandle(BatchHandle): - """The file writer batch handle is a list of Path objects.""" - - files: list[Path] = field(default_factory=list) - - -class FileWriterConfigBase(CacheConfigBase): - """Configuration for the Snowflake cache.""" - - cache_dir: Path = Path("./.cache/files/") - """The directory to store cache files in.""" - cleanup: bool = True - """Whether to clean up temporary files after processing a batch.""" - - -class FileWriterBase(RecordProcessor, abc.ABC): - """A generic base implementation for a file-based cache.""" - - config_class = FileWriterConfigBase - config: FileWriterConfigBase - - @abc.abstractmethod - @overrides - def _write_batch( - self, - stream_name: str, - batch_id: str, - record_batch: pa.Table, - ) -> FileWriterBatchHandle: - """Process a record batch. - - Return a list of paths to one or more cache files. - """ - ... - - @final - def write_batch( - self, - stream_name: str, - batch_id: str, - record_batch: pa.Table, - ) -> FileWriterBatchHandle: - """Write a batch of records to the cache. - - This method is final because it should not be overridden. - - Subclasses should override `_write_batch` instead. - """ - return self._write_batch(stream_name, batch_id, record_batch) - - @overrides - def _cleanup_batch( - self, - stream_name: str, - batch_id: str, - batch_handle: BatchHandle, - ) -> None: - """Clean up the cache. - - For file writers, this means deleting the files created and declared in the batch. - - This method is a no-op if the `cleanup` config option is set to False. - """ - if self.config.cleanup: - batch_handle = cast(FileWriterBatchHandle, batch_handle) - _ = stream_name, batch_id - for file_path in batch_handle.files: - file_path.unlink() - - @final - def cleanup_batch( - self, - stream_name: str, - batch_id: str, - batch_handle: BatchHandle, - ) -> None: - """Clean up the cache. - - For file writers, this means deleting the files created and declared in the batch. - - This method is final because it should not be overridden. - - Subclasses should override `_cleanup_batch` instead. - """ - self._cleanup_batch(stream_name, batch_id, batch_handle) - - @overrides - def _finalize_state_messages( - self, - stream_name: str, - state_messages: list[AirbyteStateMessage], - ) -> None: - """ - State messages are not used in file writers, so this method is a no-op. - """ - pass diff --git a/airbyte-lib/airbyte_lib/_file_writers/parquet.py b/airbyte-lib/airbyte_lib/_file_writers/parquet.py deleted file mode 100644 index bc7fbe9cd7044..0000000000000 --- a/airbyte-lib/airbyte_lib/_file_writers/parquet.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A Parquet cache implementation.""" -from __future__ import annotations - -from pathlib import Path -from typing import cast - -import pyarrow as pa -import ulid -from overrides import overrides -from pyarrow import parquet - -from airbyte_lib import exceptions as exc -from airbyte_lib._file_writers.base import ( - FileWriterBase, - FileWriterBatchHandle, - FileWriterConfigBase, -) -from airbyte_lib._util.text_util import lower_case_set - - -class ParquetWriterConfig(FileWriterConfigBase): - """Configuration for the Snowflake cache.""" - - # Inherits `cache_dir` from base class - - -class ParquetWriter(FileWriterBase): - """A Parquet cache implementation.""" - - config_class = ParquetWriterConfig - - def get_new_cache_file_path( - self, - stream_name: str, - batch_id: str | None = None, # ULID of the batch - ) -> Path: - """Return a new cache file path for the given stream.""" - batch_id = batch_id or str(ulid.ULID()) - config: ParquetWriterConfig = cast(ParquetWriterConfig, self.config) - target_dir = Path(config.cache_dir) - target_dir.mkdir(parents=True, exist_ok=True) - return target_dir / f"{stream_name}_{batch_id}.parquet" - - def _get_missing_columns( - self, - stream_name: str, - record_batch: pa.Table, - ) -> list[str]: - """Return a list of columns that are missing in the batch. - - The comparison is based on a case-insensitive comparison of the column names. - """ - if not self._catalog_manager: - raise exc.AirbyteLibInternalError(message="Catalog manager should exist but does not.") - stream = self._catalog_manager.get_stream_config(stream_name) - stream_property_names = stream.stream.json_schema["properties"].keys() - return [ - col - for col in stream_property_names - if col.lower() not in lower_case_set(record_batch.schema.names) - ] - - @overrides - def _write_batch( - self, - stream_name: str, - batch_id: str, - record_batch: pa.Table, - ) -> FileWriterBatchHandle: - """Process a record batch. - - Return the path to the cache file. - """ - _ = batch_id # unused - output_file_path = self.get_new_cache_file_path(stream_name) - - missing_columns = self._get_missing_columns(stream_name, record_batch) - if missing_columns: - # We need to append columns with the missing column name(s) and a null type - null_array = cast(pa.Array, pa.array([None] * len(record_batch), type=pa.null())) - for col in missing_columns: - record_batch = record_batch.append_column(col, null_array) - - with parquet.ParquetWriter(output_file_path, schema=record_batch.schema) as writer: - writer.write_table(record_batch) - - batch_handle = FileWriterBatchHandle() - batch_handle.files.append(output_file_path) - return batch_handle diff --git a/airbyte-lib/airbyte_lib/_processors.py b/airbyte-lib/airbyte_lib/_processors.py deleted file mode 100644 index e879d22214e05..0000000000000 --- a/airbyte-lib/airbyte_lib/_processors.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Define abstract base class for Processors, including Caches and File writers. - -Processors can all take input from STDIN or a stream of Airbyte messages. - -Caches will pass their input to the File Writer. They share a common base class so certain -abstractions like "write" and "finalize" can be handled in either layer, or both. -""" - -from __future__ import annotations - -import abc -import contextlib -import io -import sys -from collections import defaultdict -from typing import TYPE_CHECKING, Any, cast, final - -import pyarrow as pa -import ulid - -from airbyte_protocol.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStateType, - AirbyteStreamState, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - Type, -) - -from airbyte_lib import exceptions as exc -from airbyte_lib._util import protocol_util -from airbyte_lib.progress import progress -from airbyte_lib.strategies import WriteStrategy - - -if TYPE_CHECKING: - from collections.abc import Generator, Iterable, Iterator - - from airbyte_lib.caches._catalog_manager import CatalogManager - from airbyte_lib.config import CacheConfigBase - - -DEFAULT_BATCH_SIZE = 10_000 -DEBUG_MODE = False # Set to True to enable additional debug logging. - - -class BatchHandle: - pass - - -class AirbyteMessageParsingError(Exception): - """Raised when an Airbyte message is invalid or cannot be parsed.""" - - -class RecordProcessor(abc.ABC): - """Abstract base class for classes which can process input records.""" - - config_class: type[CacheConfigBase] - skip_finalize_step: bool = False - _expected_streams: set[str] - - def __init__( - self, - config: CacheConfigBase | dict | None, - *, - catalog_manager: CatalogManager | None = None, - ) -> None: - if isinstance(config, dict): - config = self.config_class(**config) - - self.config = config or self.config_class() - if not isinstance(self.config, self.config_class): - err_msg = ( - f"Expected config class of type '{self.config_class.__name__}'. " - f"Instead found '{type(self.config).__name__}'." - ) - raise TypeError(err_msg) - - self.source_catalog: ConfiguredAirbyteCatalog | None = None - self._source_name: str | None = None - - self._pending_batches: dict[str, dict[str, Any]] = defaultdict(lambda: {}, {}) - self._finalized_batches: dict[str, dict[str, Any]] = defaultdict(lambda: {}, {}) - - self._pending_state_messages: dict[str, list[AirbyteStateMessage]] = defaultdict(list, {}) - self._finalized_state_messages: dict[ - str, - list[AirbyteStateMessage], - ] = defaultdict(list, {}) - - self._catalog_manager: CatalogManager | None = catalog_manager - self._setup() - - def register_source( - self, - source_name: str, - incoming_source_catalog: ConfiguredAirbyteCatalog, - stream_names: set[str], - ) -> None: - """Register the source name and catalog.""" - if not self._catalog_manager: - raise exc.AirbyteLibInternalError( - message="Catalog manager should exist but does not.", - ) - self._catalog_manager.register_source( - source_name, - incoming_source_catalog=incoming_source_catalog, - incoming_stream_names=stream_names, - ) - self._expected_streams = stream_names - - @property - def _streams_with_data(self) -> set[str]: - """Return a list of known streams.""" - return self._pending_batches.keys() | self._finalized_batches.keys() - - @final - def process_stdin( - self, - write_strategy: WriteStrategy = WriteStrategy.AUTO, - *, - max_batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - """Process the input stream from stdin. - - Return a list of summaries for testing. - """ - input_stream = io.TextIOWrapper(sys.stdin.buffer, encoding="utf-8") - self.process_input_stream( - input_stream, write_strategy=write_strategy, max_batch_size=max_batch_size - ) - - @final - def _airbyte_messages_from_buffer( - self, - buffer: io.TextIOBase, - ) -> Iterator[AirbyteMessage]: - """Yield messages from a buffer.""" - yield from (AirbyteMessage.parse_raw(line) for line in buffer) - - @final - def process_input_stream( - self, - input_stream: io.TextIOBase, - write_strategy: WriteStrategy = WriteStrategy.AUTO, - *, - max_batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - """Parse the input stream and process data in batches. - - Return a list of summaries for testing. - """ - messages = self._airbyte_messages_from_buffer(input_stream) - self.process_airbyte_messages( - messages, - write_strategy=write_strategy, - max_batch_size=max_batch_size, - ) - - @final - def process_airbyte_messages( - self, - messages: Iterable[AirbyteMessage], - write_strategy: WriteStrategy, - *, - max_batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - """Process a stream of Airbyte messages.""" - if not isinstance(write_strategy, WriteStrategy): - raise exc.AirbyteInternalError( - message="Invalid `write_strategy` argument. Expected instance of WriteStrategy.", - context={"write_strategy": write_strategy}, - ) - - stream_batches: dict[str, list[dict]] = defaultdict(list, {}) - - # Process messages, writing to batches as we go - for message in messages: - if message.type is Type.RECORD: - record_msg = cast(AirbyteRecordMessage, message.record) - stream_name = record_msg.stream - stream_batch = stream_batches[stream_name] - stream_batch.append(protocol_util.airbyte_record_message_to_dict(record_msg)) - - if len(stream_batch) >= max_batch_size: - record_batch = pa.Table.from_pylist(stream_batch) - self._process_batch(stream_name, record_batch) - progress.log_batch_written(stream_name, len(stream_batch)) - stream_batch.clear() - - elif message.type is Type.STATE: - state_msg = cast(AirbyteStateMessage, message.state) - if state_msg.type in [AirbyteStateType.GLOBAL, AirbyteStateType.LEGACY]: - self._pending_state_messages[f"_{state_msg.type}"].append(state_msg) - else: - stream_state = cast(AirbyteStreamState, state_msg.stream) - stream_name = stream_state.stream_descriptor.name - self._pending_state_messages[stream_name].append(state_msg) - - else: - # Ignore unexpected or unhandled message types: - # Type.LOG, Type.TRACE, Type.CONTROL, etc. - pass - - # Add empty streams to the dictionary, so we create a destination table for it - for stream_name in self._expected_streams: - if stream_name not in stream_batches: - if DEBUG_MODE: - print(f"Stream {stream_name} has no data") - stream_batches[stream_name] = [] - - # We are at the end of the stream. Process whatever else is queued. - for stream_name, stream_batch in stream_batches.items(): - record_batch = pa.Table.from_pylist(stream_batch) - self._process_batch(stream_name, record_batch) - progress.log_batch_written(stream_name, len(stream_batch)) - - # Finalize any pending batches - for stream_name in list(self._pending_batches.keys()): - self._finalize_batches(stream_name, write_strategy=write_strategy) - progress.log_stream_finalized(stream_name) - - @final - def _process_batch( - self, - stream_name: str, - record_batch: pa.Table, - ) -> tuple[str, Any, Exception | None]: - """Process a single batch. - - Returns a tuple of the batch ID, batch handle, and an exception if one occurred. - """ - batch_id = self._new_batch_id() - batch_handle = self._write_batch( - stream_name, - batch_id, - record_batch, - ) or self._get_batch_handle(stream_name, batch_id) - - if self.skip_finalize_step: - self._finalized_batches[stream_name][batch_id] = batch_handle - else: - self._pending_batches[stream_name][batch_id] = batch_handle - - return batch_id, batch_handle, None - - @abc.abstractmethod - def _write_batch( - self, - stream_name: str, - batch_id: str, - record_batch: pa.Table, - ) -> BatchHandle: - """Process a single batch. - - Returns a batch handle, such as a path or any other custom reference. - """ - - def _cleanup_batch( # noqa: B027 # Intentionally empty, not abstract - self, - stream_name: str, - batch_id: str, - batch_handle: BatchHandle, - ) -> None: - """Clean up the cache. - - This method is called after the given batch has been finalized. - - For instance, file writers can override this method to delete the files created. Caches, - similarly, can override this method to delete any other temporary artifacts. - """ - pass - - def _new_batch_id(self) -> str: - """Return a new batch handle.""" - return str(ulid.ULID()) - - def _get_batch_handle( - self, - stream_name: str, - batch_id: str | None = None, # ULID of the batch - ) -> str: - """Return a new batch handle. - - By default this is a concatenation of the stream name and batch ID. - However, any Python object can be returned, such as a Path object. - """ - batch_id = batch_id or self._new_batch_id() - return f"{stream_name}_{batch_id}" - - def _finalize_batches( - self, - stream_name: str, - write_strategy: WriteStrategy, - ) -> dict[str, BatchHandle]: - """Finalize all uncommitted batches. - - Returns a mapping of batch IDs to batch handles, for processed batches. - - This is a generic implementation, which can be overridden. - """ - _ = write_strategy # Unused - with self._finalizing_batches(stream_name) as batches_to_finalize: - if batches_to_finalize and not self.skip_finalize_step: - raise NotImplementedError( - "Caches need to be finalized but no _finalize_batch() method " - f"exists for class {self.__class__.__name__}", - ) - - return batches_to_finalize - - @abc.abstractmethod - def _finalize_state_messages( - self, - stream_name: str, - state_messages: list[AirbyteStateMessage], - ) -> None: - """Handle state messages. - Might be a no-op if the processor doesn't handle incremental state.""" - pass - - @final - @contextlib.contextmanager - def _finalizing_batches( - self, - stream_name: str, - ) -> Generator[dict[str, BatchHandle], str, None]: - """Context manager to use for finalizing batches, if applicable. - - Returns a mapping of batch IDs to batch handles, for those processed batches. - """ - batches_to_finalize = self._pending_batches[stream_name].copy() - state_messages_to_finalize = self._pending_state_messages[stream_name].copy() - self._pending_batches[stream_name].clear() - self._pending_state_messages[stream_name].clear() - - progress.log_batches_finalizing(stream_name, len(batches_to_finalize)) - yield batches_to_finalize - self._finalize_state_messages(stream_name, state_messages_to_finalize) - progress.log_batches_finalized(stream_name, len(batches_to_finalize)) - - self._finalized_batches[stream_name].update(batches_to_finalize) - self._finalized_state_messages[stream_name] += state_messages_to_finalize - - for batch_id, batch_handle in batches_to_finalize.items(): - self._cleanup_batch(stream_name, batch_id, batch_handle) - - def _setup(self) -> None: # noqa: B027 # Intentionally empty, not abstract - """Create the database. - - By default this is a no-op but subclasses can override this method to prepare - any necessary resources. - """ - - def _teardown(self) -> None: - """Teardown the processor resources. - - By default, the base implementation simply calls _cleanup_batch() for all pending batches. - """ - for stream_name, pending_batches in self._pending_batches.items(): - for batch_id, batch_handle in pending_batches.items(): - self._cleanup_batch( - stream_name=stream_name, - batch_id=batch_id, - batch_handle=batch_handle, - ) - - @final - def __del__(self) -> None: - """Teardown temporary resources when instance is unloaded from memory.""" - self._teardown() - - @final - def _get_stream_config( - self, - stream_name: str, - ) -> ConfiguredAirbyteStream: - """Return the column definitions for the given stream.""" - if not self._catalog_manager: - raise exc.AirbyteLibInternalError( - message="Catalog manager should exist but does not.", - ) - - return self._catalog_manager.get_stream_config(stream_name) - - @final - def _get_stream_json_schema( - self, - stream_name: str, - ) -> dict[str, Any]: - """Return the column definitions for the given stream.""" - return self._get_stream_config(stream_name).stream.json_schema diff --git a/airbyte-lib/airbyte_lib/_util/__init__.py b/airbyte-lib/airbyte_lib/_util/__init__.py deleted file mode 100644 index 1073e4feb2320..0000000000000 --- a/airbyte-lib/airbyte_lib/_util/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""Internal utility functions for dealing with pip. - -Note: This module is for internal use only and it should not be depended upon for production use. -It is subject to change without notice. -""" -from __future__ import annotations - -from airbyte_lib._util.pip_util import connector_pip_url, github_pip_url - - -__all__ = [ - "connector_pip_url", - "github_pip_url", -] diff --git a/airbyte-lib/airbyte_lib/_util/pip_util.py b/airbyte-lib/airbyte_lib/_util/pip_util.py deleted file mode 100644 index b965c52f93924..0000000000000 --- a/airbyte-lib/airbyte_lib/_util/pip_util.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""Internal utility functions for dealing with pip.""" - -from __future__ import annotations - - -def github_pip_url( - owner: str = "airbytehq", - repo: str = "airbyte", - *, - package_name: str, - branch_or_ref: str | None = None, - subdirectory: str | None = None, -) -> str: - """Return the pip URL for a GitHub repository. - - Results will look like: - - `git+airbytehq/airbyte.git#egg=airbyte-lib&subdirectory=airbyte-lib' - - `git+airbytehq/airbyte.git@master#egg=airbyte-lib&subdirectory=airbyte-lib' - - `git+airbytehq/airbyte.git@my-branch#egg=source-github - &subdirectory=airbyte-integrations/connectors/source-github' - """ - result = f"git+https://github.com/{owner}/{repo}.git" - - if branch_or_ref: - result += f"@{branch_or_ref}" - - next_delimiter = "#" - if package_name: - result += f"{next_delimiter}egg={package_name}" - next_delimiter = "&" - - if subdirectory: - result += f"{next_delimiter}subdirectory={subdirectory}" - - return result - - -def connector_pip_url( - connector_name: str, - /, - branch: str, - *, - owner: str | None = None, -) -> str: - """Return a pip URL for a connector in the main `airbytehq/airbyte` git repo.""" - owner = owner or "airbytehq" - if not connector_name.startswith("source-") and not connector_name.startswith("destination-"): - connector_name = "source-" + connector_name - - return github_pip_url( - owner=owner, - repo="airbyte", - branch_or_ref=branch, - package_name=connector_name, - subdirectory=f"airbyte-integrations/connectors/{connector_name}", - ) diff --git a/airbyte-lib/airbyte_lib/_util/protocol_util.py b/airbyte-lib/airbyte_lib/_util/protocol_util.py deleted file mode 100644 index 2ddaa1346e307..0000000000000 --- a/airbyte-lib/airbyte_lib/_util/protocol_util.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Internal utility functions, especially for dealing with Airbyte Protocol.""" -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, cast - -from airbyte_protocol.models import ( - AirbyteMessage, - AirbyteRecordMessage, - ConfiguredAirbyteCatalog, - Type, -) - -from airbyte_lib import exceptions as exc - - -if TYPE_CHECKING: - from collections.abc import Iterable, Iterator - - -def airbyte_messages_to_record_dicts( - messages: Iterable[AirbyteMessage], -) -> Iterator[dict[str, Any]]: - """Convert an AirbyteMessage to a dictionary.""" - yield from ( - cast(dict[str, Any], airbyte_message_to_record_dict(message)) - for message in messages - if message is not None and message.type == Type.RECORD - ) - - -def airbyte_message_to_record_dict(message: AirbyteMessage) -> dict[str, Any] | None: - """Convert an AirbyteMessage to a dictionary. - - Return None if the message is not a record message. - """ - if message.type != Type.RECORD: - return None - - return airbyte_record_message_to_dict(message.record) - - -def airbyte_record_message_to_dict( - record_message: AirbyteRecordMessage, -) -> dict[str, Any]: - """Convert an AirbyteMessage to a dictionary. - - Return None if the message is not a record message. - """ - result = record_message.data - - # TODO: Add the metadata columns (this breaks tests) - # result["_airbyte_extracted_at"] = datetime.datetime.fromtimestamp( - # record_message.emitted_at - # ) - - return result # noqa: RET504 # unnecessary assignment and then return (see TODO above) - - -def get_primary_keys_from_stream( - stream_name: str, - configured_catalog: ConfiguredAirbyteCatalog, -) -> set[str]: - """Get the primary keys from a stream in the configured catalog.""" - stream = next( - (stream for stream in configured_catalog.streams if stream.stream.name == stream_name), - None, - ) - if stream is None: - raise exc.AirbyteStreamNotFoundError( - stream_name=stream_name, - connector_name=configured_catalog.connection.configuration["name"], - available_streams=[stream.stream.name for stream in configured_catalog.streams], - ) - - return set(stream.stream.source_defined_primary_key or []) diff --git a/airbyte-lib/airbyte_lib/_util/text_util.py b/airbyte-lib/airbyte_lib/_util/text_util.py deleted file mode 100644 index d5f8909938686..0000000000000 --- a/airbyte-lib/airbyte_lib/_util/text_util.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Internal utility functions for dealing with text.""" -from __future__ import annotations - -from typing import TYPE_CHECKING - - -if TYPE_CHECKING: - from collections.abc import Iterable - - -def lower_case_set(str_iter: Iterable[str]) -> set[str]: - """Converts a list of strings to a set of lower case strings.""" - return {s.lower() for s in str_iter} diff --git a/airbyte-lib/airbyte_lib/caches/__init__.py b/airbyte-lib/airbyte_lib/caches/__init__.py deleted file mode 100644 index 3cb5c31cf1192..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Base module for all caches.""" -from __future__ import annotations - -from airbyte_lib.caches.base import SQLCacheBase -from airbyte_lib.caches.duckdb import DuckDBCache, DuckDBCacheConfig -from airbyte_lib.caches.postgres import PostgresCache, PostgresCacheConfig -from airbyte_lib.caches.snowflake import SnowflakeCacheConfig, SnowflakeSQLCache - - -# We export these classes for easy access: `airbyte_lib.caches...` -__all__ = [ - "DuckDBCache", - "DuckDBCacheConfig", - "PostgresCache", - "PostgresCacheConfig", - "SQLCacheBase", - "SnowflakeCacheConfig", - "SnowflakeSQLCache", -] diff --git a/airbyte-lib/airbyte_lib/caches/_catalog_manager.py b/airbyte-lib/airbyte_lib/caches/_catalog_manager.py deleted file mode 100644 index 3eb94e148f63d..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/_catalog_manager.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A SQL Cache implementation.""" -from __future__ import annotations - -import json -from typing import TYPE_CHECKING, Callable - -from sqlalchemy import Column, DateTime, String -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import Session -from sqlalchemy.sql import func - -from airbyte_protocol.models import ( - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, -) - -from airbyte_lib import exceptions as exc - - -if TYPE_CHECKING: - from sqlalchemy.engine import Engine - -STREAMS_TABLE_NAME = "_airbytelib_streams" -STATE_TABLE_NAME = "_airbytelib_state" - -GLOBAL_STATE_STREAM_NAMES = ["_GLOBAL", "_LEGACY"] - -Base = declarative_base() - - -class CachedStream(Base): # type: ignore[valid-type,misc] - __tablename__ = STREAMS_TABLE_NAME - - stream_name = Column(String) - source_name = Column(String) - table_name = Column(String, primary_key=True) - catalog_metadata = Column(String) - - -class StreamState(Base): # type: ignore[valid-type,misc] - __tablename__ = STATE_TABLE_NAME - - source_name = Column(String) - stream_name = Column(String) - table_name = Column(String, primary_key=True) - state_json = Column(String) - last_updated = Column(DateTime(timezone=True), onupdate=func.now(), default=func.now()) - - -class CatalogManager: - """ - A class to manage the stream catalog of data synced to a cache: - * What streams exist and to what tables they map - * The JSON schema for each stream - * The state of each stream if available - """ - - def __init__( - self, - engine: Engine, - table_name_resolver: Callable[[str], str], - ) -> None: - self._engine: Engine = engine - self._table_name_resolver = table_name_resolver - self._source_catalog: ConfiguredAirbyteCatalog | None = None - self._load_catalog_from_internal_table() - assert self._source_catalog is not None - - @property - def source_catalog(self) -> ConfiguredAirbyteCatalog: - """Return the source catalog. - - Raises: - AirbyteLibInternalError: If the source catalog is not set. - """ - if not self._source_catalog: - raise exc.AirbyteLibInternalError( - message="Source catalog should be initialized but is not.", - ) - - return self._source_catalog - - def _ensure_internal_tables(self) -> None: - engine = self._engine - Base.metadata.create_all(engine) - - def save_state( - self, - source_name: str, - state: AirbyteStateMessage, - stream_name: str, - ) -> None: - self._ensure_internal_tables() - engine = self._engine - with Session(engine) as session: - session.query(StreamState).filter( - StreamState.table_name == self._table_name_resolver(stream_name) - ).delete() - session.commit() - session.add( - StreamState( - source_name=source_name, - stream_name=stream_name, - table_name=self._table_name_resolver(stream_name), - state_json=state.json(), - ) - ) - session.commit() - - def get_state( - self, - source_name: str, - streams: list[str], - ) -> list[dict] | None: - self._ensure_internal_tables() - engine = self._engine - with Session(engine) as session: - states = ( - session.query(StreamState) - .filter( - StreamState.source_name == source_name, - StreamState.stream_name.in_([*streams, *GLOBAL_STATE_STREAM_NAMES]), - ) - .all() - ) - if not states: - return None - # Only return the states if the table name matches what the current cache - # would generate. Otherwise consider it part of a different cache. - states = [ - state - for state in states - if state.table_name == self._table_name_resolver(state.stream_name) - ] - return [json.loads(state.state_json) for state in states] - - def register_source( - self, - source_name: str, - incoming_source_catalog: ConfiguredAirbyteCatalog, - incoming_stream_names: set[str], - ) -> None: - """Register a source and its streams in the cache.""" - self._update_catalog( - incoming_source_catalog=incoming_source_catalog, - incoming_stream_names=incoming_stream_names, - ) - self._save_catalog_to_internal_table( - source_name=source_name, - incoming_source_catalog=incoming_source_catalog, - incoming_stream_names=incoming_stream_names, - ) - - def _update_catalog( - self, - incoming_source_catalog: ConfiguredAirbyteCatalog, - incoming_stream_names: set[str], - ) -> None: - if not self._source_catalog: - self._source_catalog = ConfiguredAirbyteCatalog( - streams=[ - stream - for stream in incoming_source_catalog.streams - if stream.stream.name in incoming_stream_names - ], - ) - assert len(self._source_catalog.streams) == len(incoming_stream_names) - return - - # Keep existing streams untouched if not incoming - unchanged_streams: list[ConfiguredAirbyteStream] = [ - stream - for stream in self._source_catalog.streams - if stream.stream.name not in incoming_stream_names - ] - new_streams: list[ConfiguredAirbyteStream] = [ - stream - for stream in incoming_source_catalog.streams - if stream.stream.name in incoming_stream_names - ] - self._source_catalog = ConfiguredAirbyteCatalog(streams=unchanged_streams + new_streams) - - def _save_catalog_to_internal_table( - self, - source_name: str, - incoming_source_catalog: ConfiguredAirbyteCatalog, - incoming_stream_names: set[str], - ) -> None: - self._ensure_internal_tables() - engine = self._engine - with Session(engine) as session: - # Delete and replace existing stream entries from the catalog cache - table_name_entries_to_delete = [ - self._table_name_resolver(incoming_stream_name) - for incoming_stream_name in incoming_stream_names - ] - result = ( - session.query(CachedStream) - .filter(CachedStream.table_name.in_(table_name_entries_to_delete)) - .delete() - ) - _ = result - session.commit() - insert_streams = [ - CachedStream( - source_name=source_name, - stream_name=stream.stream.name, - table_name=self._table_name_resolver(stream.stream.name), - catalog_metadata=json.dumps(stream.stream.json_schema), - ) - for stream in incoming_source_catalog.streams - ] - session.add_all(insert_streams) - session.commit() - - def get_stream_config( - self, - stream_name: str, - ) -> ConfiguredAirbyteStream: - """Return the column definitions for the given stream.""" - if not self.source_catalog: - raise exc.AirbyteLibInternalError( - message="Cannot get stream JSON schema without a catalog.", - ) - - matching_streams: list[ConfiguredAirbyteStream] = [ - stream for stream in self.source_catalog.streams if stream.stream.name == stream_name - ] - if not matching_streams: - raise exc.AirbyteStreamNotFoundError( - stream_name=stream_name, - context={ - "available_streams": [ - stream.stream.name for stream in self.source_catalog.streams - ], - }, - ) - - if len(matching_streams) > 1: - raise exc.AirbyteLibInternalError( - message="Multiple streams found with same name.", - context={ - "stream_name": stream_name, - }, - ) - - return matching_streams[0] - - def _load_catalog_from_internal_table(self) -> None: - self._ensure_internal_tables() - engine = self._engine - with Session(engine) as session: - # load all the streams - streams: list[CachedStream] = session.query(CachedStream).all() - if not streams: - # no streams means the cache is pristine - if not self._source_catalog: - self._source_catalog = ConfiguredAirbyteCatalog(streams=[]) - - return - - # load the catalog - self._source_catalog = ConfiguredAirbyteCatalog( - streams=[ - ConfiguredAirbyteStream( - stream=AirbyteStream( - name=stream.stream_name, - json_schema=json.loads(stream.catalog_metadata), - supported_sync_modes=[SyncMode.full_refresh], - ), - sync_mode=SyncMode.full_refresh, - destination_sync_mode=DestinationSyncMode.append, - ) - for stream in streams - # only load the streams where the table name matches what - # the current cache would generate - if stream.table_name == self._table_name_resolver(stream.stream_name) - ] - ) diff --git a/airbyte-lib/airbyte_lib/caches/base.py b/airbyte-lib/airbyte_lib/caches/base.py deleted file mode 100644 index b5ee35e680ac9..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/base.py +++ /dev/null @@ -1,986 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A SQL Cache implementation.""" -from __future__ import annotations - -import abc -import enum -from contextlib import contextmanager -from functools import cached_property -from typing import TYPE_CHECKING, cast, final - -import pandas as pd -import pyarrow as pa -import sqlalchemy -import ulid -from overrides import overrides -from sqlalchemy import ( - Column, - Table, - and_, - create_engine, - insert, - null, - select, - text, - update, -) -from sqlalchemy.pool import StaticPool -from sqlalchemy.sql.elements import TextClause - -from airbyte_lib import exceptions as exc -from airbyte_lib._file_writers.base import FileWriterBase, FileWriterBatchHandle -from airbyte_lib._processors import BatchHandle, RecordProcessor -from airbyte_lib._util.text_util import lower_case_set -from airbyte_lib.caches._catalog_manager import CatalogManager -from airbyte_lib.config import CacheConfigBase -from airbyte_lib.datasets._sql import CachedDataset -from airbyte_lib.strategies import WriteStrategy -from airbyte_lib.types import SQLTypeConverter - - -if TYPE_CHECKING: - from collections.abc import Generator, Iterator - from pathlib import Path - - from sqlalchemy.engine import Connection, Engine - from sqlalchemy.engine.cursor import CursorResult - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql.base import Executable - - from airbyte_protocol.models import ( - AirbyteStateMessage, - ConfiguredAirbyteCatalog, - ) - - from airbyte_lib.datasets._base import DatasetBase - from airbyte_lib.telemetry import CacheTelemetryInfo - - -DEBUG_MODE = False # Set to True to enable additional debug logging. - - -class RecordDedupeMode(enum.Enum): - APPEND = "append" - REPLACE = "replace" - - -class SQLRuntimeError(Exception): - """Raised when an SQL operation fails.""" - - -class SQLCacheConfigBase(CacheConfigBase): - """Same as a regular config except it exposes the 'get_sql_alchemy_url()' method.""" - - schema_name: str = "airbyte_raw" - - table_prefix: str | None = None - """ A prefix to add to all table names. - If 'None', a prefix will be created based on the source name. - """ - - table_suffix: str = "" - """A suffix to add to all table names.""" - - @abc.abstractmethod - def get_sql_alchemy_url(self) -> str: - """Returns a SQL Alchemy URL.""" - ... - - @abc.abstractmethod - def get_database_name(self) -> str: - """Return the name of the database.""" - ... - - -class GenericSQLCacheConfig(SQLCacheConfigBase): - """Allows configuring 'sql_alchemy_url' directly.""" - - sql_alchemy_url: str - - @overrides - def get_sql_alchemy_url(self) -> str: - """Returns a SQL Alchemy URL.""" - return self.sql_alchemy_url - - -class SQLCacheBase(RecordProcessor): - """A base class to be used for SQL Caches. - - Optionally we can use a file cache to store the data in parquet files. - """ - - type_converter_class: type[SQLTypeConverter] = SQLTypeConverter - config_class: type[SQLCacheConfigBase] - file_writer_class: type[FileWriterBase] - - supports_merge_insert = False - use_singleton_connection = False # If true, the same connection is used for all operations. - - # Constructor: - - @final # We don't want subclasses to have to override the constructor. - def __init__( - self, - config: SQLCacheConfigBase | None = None, - file_writer: FileWriterBase | None = None, - ) -> None: - self.config: SQLCacheConfigBase - self._engine: Engine | None = None - self._connection_to_reuse: Connection | None = None - super().__init__(config) - self._ensure_schema_exists() - self._catalog_manager = CatalogManager( - engine=self.get_sql_engine(), - table_name_resolver=lambda stream_name: self.get_sql_table_name(stream_name), - ) - self.file_writer = file_writer or self.file_writer_class( - config, catalog_manager=self._catalog_manager - ) - self.type_converter = self.type_converter_class() - self._cached_table_definitions: dict[str, sqlalchemy.Table] = {} - - def __getitem__(self, stream: str) -> DatasetBase: - return self.streams[stream] - - def __contains__(self, stream: str) -> bool: - return stream in self._streams_with_data - - def __iter__(self) -> Iterator[str]: - return iter(self._streams_with_data) - - # Public interface: - - def get_sql_alchemy_url(self) -> str: - """Return the SQLAlchemy URL to use.""" - return self.config.get_sql_alchemy_url() - - @final - @cached_property - def database_name(self) -> str: - """Return the name of the database.""" - return self.config.get_database_name() - - @final - def get_sql_engine(self) -> Engine: - """Return a new SQL engine to use.""" - if self._engine: - return self._engine - - sql_alchemy_url = self.get_sql_alchemy_url() - - execution_options = {"schema_translate_map": {None: self.config.schema_name}} - if self.use_singleton_connection: - if self._connection_to_reuse is None: - # This temporary bootstrap engine will be created once and is needed to - # create the long-lived connection object. - bootstrap_engine = create_engine( - sql_alchemy_url, - ) - self._connection_to_reuse = bootstrap_engine.connect() - - self._engine = create_engine( - sql_alchemy_url, - creator=lambda: self._connection_to_reuse, - poolclass=StaticPool, - echo=DEBUG_MODE, - execution_options=execution_options, - # isolation_level="AUTOCOMMIT", - ) - else: - # Regular engine creation for new connections - self._engine = create_engine( - sql_alchemy_url, - echo=DEBUG_MODE, - execution_options=execution_options, - # isolation_level="AUTOCOMMIT", - ) - - return self._engine - - def _init_connection_settings(self, connection: Connection) -> None: - """This is called automatically whenever a new connection is created. - - By default this is a no-op. Subclasses can use this to set connection settings, such as - timezone, case-sensitivity settings, and other session-level variables. - """ - pass - - @contextmanager - def get_sql_connection(self) -> Generator[sqlalchemy.engine.Connection, None, None]: - """A context manager which returns a new SQL connection for running queries. - - If the connection needs to close, it will be closed automatically. - """ - if self.use_singleton_connection and self._connection_to_reuse is not None: - connection = self._connection_to_reuse - self._init_connection_settings(connection) - yield connection - - else: - with self.get_sql_engine().begin() as connection: - self._init_connection_settings(connection) - yield connection - - if not self.use_singleton_connection: - connection.close() - del connection - - def get_sql_table_name( - self, - stream_name: str, - ) -> str: - """Return the name of the SQL table for the given stream.""" - table_prefix = self.config.table_prefix or "" - - # TODO: Add default prefix based on the source name. - - return self._normalize_table_name( - f"{table_prefix}{stream_name}{self.config.table_suffix}", - ) - - @final - def get_sql_table( - self, - stream_name: str, - ) -> sqlalchemy.Table: - """Return the main table object for the stream.""" - return self._get_table_by_name(self.get_sql_table_name(stream_name)) - - def _get_table_by_name( - self, - table_name: str, - *, - force_refresh: bool = False, - ) -> sqlalchemy.Table: - """Return a table object from a table name. - - To prevent unnecessary round-trips to the database, the table is cached after the first - query. To ignore the cache and force a refresh, set 'force_refresh' to True. - """ - if force_refresh or table_name not in self._cached_table_definitions: - self._cached_table_definitions[table_name] = sqlalchemy.Table( - table_name, - sqlalchemy.MetaData(schema=self.config.schema_name), - autoload_with=self.get_sql_engine(), - ) - - return self._cached_table_definitions[table_name] - - @final - @property - def streams( - self, - ) -> dict[str, CachedDataset]: - """Return a temporary table name.""" - result = {} - for stream_name in self._streams_with_data: - result[stream_name] = CachedDataset(self, stream_name) - - return result - - # Read methods: - - def get_records( - self, - stream_name: str, - ) -> CachedDataset: - """Uses SQLAlchemy to select all rows from the table.""" - return CachedDataset(self, stream_name) - - def get_pandas_dataframe( - self, - stream_name: str, - ) -> pd.DataFrame: - """Return a Pandas data frame with the stream's data.""" - table_name = self.get_sql_table_name(stream_name) - engine = self.get_sql_engine() - return pd.read_sql_table(table_name, engine) - - # Protected members (non-public interface): - - def _ensure_schema_exists( - self, - ) -> None: - """Return a new (unique) temporary table name.""" - schema_name = self.config.schema_name - if schema_name in self._get_schemas_list(): - return - - sql = f"CREATE SCHEMA IF NOT EXISTS {schema_name}" - - try: - self._execute_sql(sql) - except Exception as ex: - # Ignore schema exists errors. - if "already exists" not in str(ex): - raise - - if DEBUG_MODE: - found_schemas = self._get_schemas_list() - assert ( - schema_name in found_schemas - ), f"Schema {schema_name} was not created. Found: {found_schemas}" - - def _quote_identifier(self, identifier: str) -> str: - """Return the given identifier, quoted.""" - return f'"{identifier}"' - - @final - def _get_temp_table_name( - self, - stream_name: str, - batch_id: str | None = None, # ULID of the batch - ) -> str: - """Return a new (unique) temporary table name.""" - batch_id = batch_id or str(ulid.ULID()) - return self._normalize_table_name(f"{stream_name}_{batch_id}") - - def _fully_qualified( - self, - table_name: str, - ) -> str: - """Return the fully qualified name of the given table.""" - return f"{self.config.schema_name}.{self._quote_identifier(table_name)}" - - @final - def _create_table_for_loading( - self, - /, - stream_name: str, - batch_id: str, - ) -> str: - """Create a new table for loading data.""" - temp_table_name = self._get_temp_table_name(stream_name, batch_id) - column_definition_str = ",\n ".join( - f"{self._quote_identifier(column_name)} {sql_type}" - for column_name, sql_type in self._get_sql_column_definitions(stream_name).items() - ) - self._create_table(temp_table_name, column_definition_str) - - return temp_table_name - - def _get_tables_list( - self, - ) -> list[str]: - """Return a list of all tables in the database.""" - with self.get_sql_connection() as conn: - inspector: Inspector = sqlalchemy.inspect(conn) - return inspector.get_table_names(schema=self.config.schema_name) - - def _get_schemas_list( - self, - database_name: str | None = None, - ) -> list[str]: - """Return a list of all tables in the database.""" - inspector: Inspector = sqlalchemy.inspect(self.get_sql_engine()) - database_name = database_name or self.database_name - found_schemas = inspector.get_schema_names() - return [ - found_schema.split(".")[-1].strip('"') - for found_schema in found_schemas - if "." not in found_schema - or (found_schema.split(".")[0].lower().strip('"') == database_name.lower()) - ] - - def _ensure_final_table_exists( - self, - stream_name: str, - *, - create_if_missing: bool = True, - ) -> str: - """Create the final table if it doesn't already exist. - - Return the table name. - """ - table_name = self.get_sql_table_name(stream_name) - did_exist = self._table_exists(table_name) - if not did_exist and create_if_missing: - column_definition_str = ",\n ".join( - f"{self._quote_identifier(column_name)} {sql_type}" - for column_name, sql_type in self._get_sql_column_definitions( - stream_name, - ).items() - ) - self._create_table(table_name, column_definition_str) - - return table_name - - def _ensure_compatible_table_schema( - self, - stream_name: str, - *, - raise_on_error: bool = False, - ) -> bool: - """Return true if the given table is compatible with the stream's schema. - - If raise_on_error is true, raise an exception if the table is not compatible. - - TODO: Expand this to check for column types and sizes, and to add missing columns. - - Returns true if the table is compatible, false if it is not. - """ - json_schema = self._get_stream_json_schema(stream_name) - stream_column_names: list[str] = json_schema["properties"].keys() - table_column_names: list[str] = self.get_sql_table(stream_name).columns.keys() - - lower_case_table_column_names = lower_case_set(table_column_names) - missing_columns = [ - stream_col - for stream_col in stream_column_names - if stream_col.lower() not in lower_case_table_column_names - ] - if missing_columns: - if raise_on_error: - raise exc.AirbyteLibCacheTableValidationError( - violation="Cache table is missing expected columns.", - context={ - "stream_column_names": stream_column_names, - "table_column_names": table_column_names, - "missing_columns": missing_columns, - }, - ) - return False # Some columns are missing. - - return True # All columns exist. - - @final - def _create_table( - self, - table_name: str, - column_definition_str: str, - primary_keys: list[str] | None = None, - ) -> None: - if DEBUG_MODE: - assert table_name not in self._get_tables_list(), f"Table {table_name} already exists." - - if primary_keys: - pk_str = ", ".join(primary_keys) - column_definition_str += f",\n PRIMARY KEY ({pk_str})" - - cmd = f""" - CREATE TABLE {self._fully_qualified(table_name)} ( - {column_definition_str} - ) - """ - _ = self._execute_sql(cmd) - if DEBUG_MODE: - tables_list = self._get_tables_list() - assert ( - table_name in tables_list - ), f"Table {table_name} was not created. Found: {tables_list}" - - def _normalize_column_name( - self, - raw_name: str, - ) -> str: - return raw_name.lower().replace(" ", "_").replace("-", "_") - - def _normalize_table_name( - self, - raw_name: str, - ) -> str: - return raw_name.lower().replace(" ", "_").replace("-", "_") - - @final - def _get_sql_column_definitions( - self, - stream_name: str, - ) -> dict[str, sqlalchemy.types.TypeEngine]: - """Return the column definitions for the given stream.""" - columns: dict[str, sqlalchemy.types.TypeEngine] = {} - properties = self._get_stream_json_schema(stream_name)["properties"] - for property_name, json_schema_property_def in properties.items(): - clean_prop_name = self._normalize_column_name(property_name) - columns[clean_prop_name] = self.type_converter.to_sql_type( - json_schema_property_def, - ) - - # TODO: Add the metadata columns (this breaks tests) - # columns["_airbyte_extracted_at"] = sqlalchemy.TIMESTAMP() - # columns["_airbyte_loaded_at"] = sqlalchemy.TIMESTAMP() - return columns - - @overrides - def _write_batch( - self, - stream_name: str, - batch_id: str, - record_batch: pa.Table, - ) -> FileWriterBatchHandle: - """Process a record batch. - - Return the path to the cache file. - """ - return self.file_writer.write_batch(stream_name, batch_id, record_batch) - - def _cleanup_batch( - self, - stream_name: str, - batch_id: str, - batch_handle: BatchHandle, - ) -> None: - """Clean up the cache. - - For SQL caches, we only need to call the cleanup operation on the file writer. - - Subclasses should call super() if they override this method. - """ - self.file_writer.cleanup_batch(stream_name, batch_id, batch_handle) - - @final - @overrides - def _finalize_batches( - self, - stream_name: str, - write_strategy: WriteStrategy, - ) -> dict[str, BatchHandle]: - """Finalize all uncommitted batches. - - This is a generic 'final' implementation, which should not be overridden. - - Returns a mapping of batch IDs to batch handles, for those processed batches. - - TODO: Add a dedupe step here to remove duplicates from the temp table. - Some sources will send us duplicate records within the same stream, - although this is a fairly rare edge case we can ignore in V1. - """ - with self._finalizing_batches(stream_name) as batches_to_finalize: - if not batches_to_finalize: - return {} - - files: list[Path] = [] - # Get a list of all files to finalize from all pending batches. - for batch_handle in batches_to_finalize.values(): - batch_handle = cast(FileWriterBatchHandle, batch_handle) - files += batch_handle.files - # Use the max batch ID as the batch ID for table names. - max_batch_id = max(batches_to_finalize.keys()) - - # Make sure the target schema and target table exist. - self._ensure_schema_exists() - final_table_name = self._ensure_final_table_exists( - stream_name, - create_if_missing=True, - ) - self._ensure_compatible_table_schema( - stream_name=stream_name, - raise_on_error=True, - ) - - temp_table_name = self._write_files_to_new_table( - files=files, - stream_name=stream_name, - batch_id=max_batch_id, - ) - try: - self._write_temp_table_to_final_table( - stream_name=stream_name, - temp_table_name=temp_table_name, - final_table_name=final_table_name, - write_strategy=write_strategy, - ) - finally: - self._drop_temp_table(temp_table_name, if_exists=True) - - # Return the batch handles as measure of work completed. - return batches_to_finalize - - @overrides - def _finalize_state_messages( - self, - stream_name: str, - state_messages: list[AirbyteStateMessage], - ) -> None: - """Handle state messages by passing them to the catalog manager.""" - if not self._catalog_manager: - raise exc.AirbyteLibInternalError( - message="Catalog manager should exist but does not.", - ) - if state_messages and self._source_name: - self._catalog_manager.save_state( - source_name=self._source_name, - stream_name=stream_name, - state=state_messages[-1], - ) - - def get_state(self) -> list[dict]: - """Return the current state of the source.""" - if not self._source_name: - return [] - if not self._catalog_manager: - raise exc.AirbyteLibInternalError( - message="Catalog manager should exist but does not.", - ) - return ( - self._catalog_manager.get_state(self._source_name, list(self._streams_with_data)) or [] - ) - - def _execute_sql(self, sql: str | TextClause | Executable) -> CursorResult: - """Execute the given SQL statement.""" - if isinstance(sql, str): - sql = text(sql) - if isinstance(sql, TextClause): - sql = sql.execution_options( - autocommit=True, - ) - - with self.get_sql_connection() as conn: - try: - result = conn.execute(sql) - except ( - sqlalchemy.exc.ProgrammingError, - sqlalchemy.exc.SQLAlchemyError, - ) as ex: - msg = f"Error when executing SQL:\n{sql}\n{type(ex).__name__}{ex!s}" - raise SQLRuntimeError(msg) from None # from ex - - return result - - def _drop_temp_table( - self, - table_name: str, - *, - if_exists: bool = True, - ) -> None: - """Drop the given table.""" - exists_str = "IF EXISTS" if if_exists else "" - self._execute_sql(f"DROP TABLE {exists_str} {self._fully_qualified(table_name)}") - - def _write_files_to_new_table( - self, - files: list[Path], - stream_name: str, - batch_id: str, - ) -> str: - """Write a file(s) to a new table. - - This is a generic implementation, which can be overridden by subclasses - to improve performance. - """ - temp_table_name = self._create_table_for_loading(stream_name, batch_id) - for file_path in files: - with pa.parquet.ParquetFile(file_path) as pf: - record_batch = pf.read() - dataframe = record_batch.to_pandas() - - # Pandas will auto-create the table if it doesn't exist, which we don't want. - if not self._table_exists(temp_table_name): - raise exc.AirbyteLibInternalError( - message="Table does not exist after creation.", - context={ - "temp_table_name": temp_table_name, - }, - ) - - dataframe.to_sql( - temp_table_name, - self.get_sql_alchemy_url(), - schema=self.config.schema_name, - if_exists="append", - index=False, - dtype=self._get_sql_column_definitions(stream_name), - ) - return temp_table_name - - @final - def _write_temp_table_to_final_table( - self, - stream_name: str, - temp_table_name: str, - final_table_name: str, - write_strategy: WriteStrategy, - ) -> None: - """Write the temp table into the final table using the provided write strategy.""" - has_pks: bool = bool(self._get_primary_keys(stream_name)) - has_incremental_key: bool = bool(self._get_incremental_key(stream_name)) - if write_strategy == WriteStrategy.MERGE and not has_pks: - raise exc.AirbyteLibInputError( - message="Cannot use merge strategy on a stream with no primary keys.", - context={ - "stream_name": stream_name, - }, - ) - - if write_strategy == WriteStrategy.AUTO: - if has_pks: - write_strategy = WriteStrategy.MERGE - elif has_incremental_key: - write_strategy = WriteStrategy.APPEND - else: - write_strategy = WriteStrategy.REPLACE - - if write_strategy == WriteStrategy.REPLACE: - self._swap_temp_table_with_final_table( - stream_name=stream_name, - temp_table_name=temp_table_name, - final_table_name=final_table_name, - ) - return - - if write_strategy == WriteStrategy.APPEND: - self._append_temp_table_to_final_table( - stream_name=stream_name, - temp_table_name=temp_table_name, - final_table_name=final_table_name, - ) - return - - if write_strategy == WriteStrategy.MERGE: - if not self.supports_merge_insert: - # Fallback to emulated merge if the database does not support merge natively. - self._emulated_merge_temp_table_to_final_table( - stream_name=stream_name, - temp_table_name=temp_table_name, - final_table_name=final_table_name, - ) - return - - self._merge_temp_table_to_final_table( - stream_name=stream_name, - temp_table_name=temp_table_name, - final_table_name=final_table_name, - ) - return - - raise exc.AirbyteLibInternalError( - message="Write strategy is not supported.", - context={ - "write_strategy": write_strategy, - }, - ) - - def _append_temp_table_to_final_table( - self, - temp_table_name: str, - final_table_name: str, - stream_name: str, - ) -> None: - nl = "\n" - columns = [self._quote_identifier(c) for c in self._get_sql_column_definitions(stream_name)] - self._execute_sql( - f""" - INSERT INTO {self._fully_qualified(final_table_name)} ( - {f',{nl} '.join(columns)} - ) - SELECT - {f',{nl} '.join(columns)} - FROM {self._fully_qualified(temp_table_name)} - """, - ) - - def _get_primary_keys( - self, - stream_name: str, - ) -> list[str]: - pks = self._get_stream_config(stream_name).primary_key - if not pks: - return [] - - joined_pks = [".".join(pk) for pk in pks] - for pk in joined_pks: - if "." in pk: - msg = "Nested primary keys are not yet supported. Found: {pk}" - raise NotImplementedError(msg) - - return joined_pks - - def _get_incremental_key( - self, - stream_name: str, - ) -> str | None: - return self._get_stream_config(stream_name).cursor_field - - def _swap_temp_table_with_final_table( - self, - stream_name: str, - temp_table_name: str, - final_table_name: str, - ) -> None: - """Merge the temp table into the main one. - - This implementation requires MERGE support in the SQL DB. - Databases that do not support this syntax can override this method. - """ - if final_table_name is None: - raise exc.AirbyteLibInternalError(message="Arg 'final_table_name' cannot be None.") - if temp_table_name is None: - raise exc.AirbyteLibInternalError(message="Arg 'temp_table_name' cannot be None.") - - _ = stream_name - deletion_name = f"{final_table_name}_deleteme" - commands = "\n".join( - [ - f"ALTER TABLE {final_table_name} RENAME TO {deletion_name};", - f"ALTER TABLE {temp_table_name} RENAME TO {final_table_name};", - f"DROP TABLE {deletion_name};", - ] - ) - self._execute_sql(commands) - - def _merge_temp_table_to_final_table( - self, - stream_name: str, - temp_table_name: str, - final_table_name: str, - ) -> None: - """Merge the temp table into the main one. - - This implementation requires MERGE support in the SQL DB. - Databases that do not support this syntax can override this method. - """ - nl = "\n" - columns = {self._quote_identifier(c) for c in self._get_sql_column_definitions(stream_name)} - pk_columns = {self._quote_identifier(c) for c in self._get_primary_keys(stream_name)} - non_pk_columns = columns - pk_columns - join_clause = "{nl} AND ".join(f"tmp.{pk_col} = final.{pk_col}" for pk_col in pk_columns) - set_clause = "{nl} ".join(f"{col} = tmp.{col}" for col in non_pk_columns) - self._execute_sql( - f""" - MERGE INTO {self._fully_qualified(final_table_name)} final - USING ( - SELECT * - FROM {self._fully_qualified(temp_table_name)} - ) AS tmp - ON {join_clause} - WHEN MATCHED THEN UPDATE - SET - {set_clause} - WHEN NOT MATCHED THEN INSERT - ( - {f',{nl} '.join(columns)} - ) - VALUES ( - tmp.{f',{nl} tmp.'.join(columns)} - ); - """, - ) - - def _get_column_by_name(self, table: str | Table, column_name: str) -> Column: - """Return the column object for the given column name. - - This method is case-insensitive. - """ - if isinstance(table, str): - table = self._get_table_by_name(table) - try: - # Try to get the column in a case-insensitive manner - return next(col for col in table.c if col.name.lower() == column_name.lower()) - except StopIteration: - raise exc.AirbyteLibInternalError( - message="Could not find matching column.", - context={ - "table": table, - "column_name": column_name, - }, - ) from None - - def _emulated_merge_temp_table_to_final_table( - self, - stream_name: str, - temp_table_name: str, - final_table_name: str, - ) -> None: - """Emulate the merge operation using a series of SQL commands. - - This is a fallback implementation for databases that do not support MERGE. - """ - final_table = self._get_table_by_name(final_table_name) - temp_table = self._get_table_by_name(temp_table_name) - pk_columns = self._get_primary_keys(stream_name) - - columns_to_update: set[str] = self._get_sql_column_definitions( - stream_name=stream_name - ).keys() - set(pk_columns) - - # Create a dictionary mapping columns in users_final to users_stage for updating - update_values = { - self._get_column_by_name(final_table, column): ( - self._get_column_by_name(temp_table, column) - ) - for column in columns_to_update - } - - # Craft the WHERE clause for composite primary keys - join_conditions = [ - self._get_column_by_name(final_table, pk_column) - == self._get_column_by_name(temp_table, pk_column) - for pk_column in pk_columns - ] - join_clause = and_(*join_conditions) - - # Craft the UPDATE statement - update_stmt = update(final_table).values(update_values).where(join_clause) - - # Define a join between temp_table and final_table - joined_table = temp_table.outerjoin(final_table, join_clause) - - # Define a condition that checks for records in temp_table that do not have a corresponding - # record in final_table - where_not_exists_clause = self._get_column_by_name(final_table, pk_columns[0]) == null() - - # Select records from temp_table that are not in final_table - select_new_records_stmt = ( - select([temp_table]).select_from(joined_table).where(where_not_exists_clause) - ) - - # Craft the INSERT statement using the select statement - insert_new_records_stmt = insert(final_table).from_select( - names=[column.name for column in temp_table.columns], select=select_new_records_stmt - ) - - if DEBUG_MODE: - print(str(update_stmt)) - print(str(insert_new_records_stmt)) - - with self.get_sql_connection() as conn: - conn.execute(update_stmt) - conn.execute(insert_new_records_stmt) - - @final - def _table_exists( - self, - table_name: str, - ) -> bool: - """Return true if the given table exists.""" - return table_name in self._get_tables_list() - - @overrides - def register_source( - self, - source_name: str, - incoming_source_catalog: ConfiguredAirbyteCatalog, - stream_names: set[str], - ) -> None: - """Register the source with the cache. - - We use stream_names to determine which streams will receive data, and - we only register the stream if is expected to receive data. - - This method is called by the source when it is initialized. - """ - self._source_name = source_name - self._ensure_schema_exists() - super().register_source( - source_name, - incoming_source_catalog, - stream_names=stream_names, - ) - - @property - @overrides - def _streams_with_data(self) -> set[str]: - """Return a list of known streams.""" - if not self._catalog_manager: - raise exc.AirbyteLibInternalError( - message="Cannot get streams with data without a catalog.", - ) - return { - stream.stream.name - for stream in self._catalog_manager.source_catalog.streams - if self._table_exists(self.get_sql_table_name(stream.stream.name)) - } - - @abc.abstractmethod - def get_telemetry_info(self) -> CacheTelemetryInfo: - pass diff --git a/airbyte-lib/airbyte_lib/caches/duckdb.py b/airbyte-lib/airbyte_lib/caches/duckdb.py deleted file mode 100644 index 07c7fbdaf1255..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/duckdb.py +++ /dev/null @@ -1,205 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A DuckDB implementation of the cache.""" - -from __future__ import annotations - -import warnings -from pathlib import Path -from textwrap import dedent, indent -from typing import cast - -from overrides import overrides - -from airbyte_lib._file_writers import ParquetWriter, ParquetWriterConfig -from airbyte_lib.caches.base import SQLCacheBase, SQLCacheConfigBase -from airbyte_lib.telemetry import CacheTelemetryInfo - - -# Suppress warnings from DuckDB about reflection on indices. -# https://github.com/Mause/duckdb_engine/issues/905 -warnings.filterwarnings( - "ignore", - message="duckdb-engine doesn't yet support reflection on indices", -) - - -class DuckDBCacheConfig(SQLCacheConfigBase, ParquetWriterConfig): - """Configuration for the DuckDB cache. - - Also inherits config from the ParquetWriter, which is responsible for writing files to disk. - """ - - db_path: Path | str - """Normally db_path is a Path object. - - There are some cases, such as when connecting to MotherDuck, where it could be a string that - is not also a path, such as "md:" to connect the user's default MotherDuck DB. - """ - schema_name: str = "main" - """The name of the schema to write to. Defaults to "main".""" - - @overrides - def get_sql_alchemy_url(self) -> str: - """Return the SQLAlchemy URL to use.""" - # return f"duckdb:///{self.db_path}?schema={self.schema_name}" - return f"duckdb:///{self.db_path!s}" - - def get_database_name(self) -> str: - """Return the name of the database.""" - if self.db_path == ":memory:": - return "memory" - - # Return the file name without the extension - return str(self.db_path).split("/")[-1].split(".")[0] - - -class DuckDBCacheBase(SQLCacheBase): - """A DuckDB implementation of the cache. - - Parquet is used for local file storage before bulk loading. - Unlike the Snowflake implementation, we can't use the COPY command to load data - so we insert as values instead. - """ - - config_class = DuckDBCacheConfig - supports_merge_insert = False - - @overrides - def get_telemetry_info(self) -> CacheTelemetryInfo: - return CacheTelemetryInfo("duckdb") - - @overrides - def _setup(self) -> None: - """Create the database parent folder if it doesn't yet exist.""" - config = cast(DuckDBCacheConfig, self.config) - - if config.db_path == ":memory:": - return - - Path(config.db_path).parent.mkdir(parents=True, exist_ok=True) - - -class DuckDBCache(DuckDBCacheBase): - """A DuckDB implementation of the cache. - - Parquet is used for local file storage before bulk loading. - Unlike the Snowflake implementation, we can't use the COPY command to load data - so we insert as values instead. - """ - - file_writer_class = ParquetWriter - - # TODO: Delete or rewrite this method after DuckDB adds support for primary key inspection. - # @overrides - # def _merge_temp_table_to_final_table( - # self, - # stream_name: str, - # temp_table_name: str, - # final_table_name: str, - # ) -> None: - # """Merge the temp table into the main one. - - # This implementation requires MERGE support in the SQL DB. - # Databases that do not support this syntax can override this method. - # """ - # if not self._get_primary_keys(stream_name): - # raise exc.AirbyteLibInternalError( - # message="Primary keys not found. Cannot run merge updates without primary keys.", - # context={ - # "stream_name": stream_name, - # }, - # ) - - # _ = stream_name - # final_table = self._fully_qualified(final_table_name) - # staging_table = self._fully_qualified(temp_table_name) - # self._execute_sql( - # # https://duckdb.org/docs/sql/statements/insert.html - # # NOTE: This depends on primary keys being set properly in the final table. - # f""" - # INSERT OR REPLACE INTO {final_table} BY NAME - # (SELECT * FROM {staging_table}) - # """ - # ) - - @overrides - def _ensure_compatible_table_schema( - self, - stream_name: str, - *, - raise_on_error: bool = True, - ) -> bool: - """Return true if the given table is compatible with the stream's schema. - - In addition to the base implementation, this also checks primary keys. - """ - # call super - if not super()._ensure_compatible_table_schema( - stream_name=stream_name, - raise_on_error=raise_on_error, - ): - return False - - # TODO: Add validation for primary keys after DuckDB adds support for primary key - # inspection: https://github.com/Mause/duckdb_engine/issues/594 - # This is a problem because DuckDB implicitly joins on primary keys during MERGE. - # pk_cols = self._get_primary_keys(stream_name) - # table = self.get_sql_table(table_name) - # table_pk_cols = table.primary_key.columns.keys() - # if set(pk_cols) != set(table_pk_cols): - # if raise_on_error: - # raise exc.AirbyteLibCacheTableValidationError( - # violation="Primary keys do not match.", - # context={ - # "stream_name": stream_name, - # "table_name": table_name, - # "expected": pk_cols, - # "found": table_pk_cols, - # }, - # ) - # return False - - return True - - def _write_files_to_new_table( - self, - files: list[Path], - stream_name: str, - batch_id: str, - ) -> str: - """Write a file(s) to a new table. - - We use DuckDB's `read_parquet` function to efficiently read the files and insert - them into the table in a single operation. - - Note: This implementation is fragile in regards to column ordering. However, since - we are inserting into a temp table we have just created, there should be no - drift between the table schema and the file schema. - """ - temp_table_name = self._create_table_for_loading( - stream_name=stream_name, - batch_id=batch_id, - ) - columns_list = [ - self._quote_identifier(c) - for c in list(self._get_sql_column_definitions(stream_name).keys()) - ] - columns_list_str = indent("\n, ".join(columns_list), " ") - files_list = ", ".join([f"'{f!s}'" for f in files]) - insert_statement = dedent( - f""" - INSERT INTO {self.config.schema_name}.{temp_table_name} - ( - {columns_list_str} - ) - SELECT - {columns_list_str} - FROM read_parquet( - [{files_list}], - union_by_name = true - ) - """ - ) - self._execute_sql(insert_statement) - return temp_table_name diff --git a/airbyte-lib/airbyte_lib/caches/postgres.py b/airbyte-lib/airbyte_lib/caches/postgres.py deleted file mode 100644 index 324d29c2d58e5..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/postgres.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A Postgres implementation of the cache.""" - -from __future__ import annotations - -from overrides import overrides - -from airbyte_lib._file_writers import ParquetWriter, ParquetWriterConfig -from airbyte_lib.caches.base import SQLCacheBase, SQLCacheConfigBase -from airbyte_lib.telemetry import CacheTelemetryInfo - - -class PostgresCacheConfig(SQLCacheConfigBase, ParquetWriterConfig): - """Configuration for the Postgres cache. - - Also inherits config from the ParquetWriter, which is responsible for writing files to disk. - """ - - host: str - port: int - username: str - password: str - database: str - - # Already defined in base class: `schema_name` - - @overrides - def get_sql_alchemy_url(self) -> str: - """Return the SQLAlchemy URL to use.""" - return f"postgresql+psycopg2://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}" - - def get_database_name(self) -> str: - """Return the name of the database.""" - return self.database - - -class PostgresCache(SQLCacheBase): - """A Postgres implementation of the cache. - - Parquet is used for local file storage before bulk loading. - Unlike the Snowflake implementation, we can't use the COPY command to load data - so we insert as values instead. - - TOOD: Add optimized bulk load path for Postgres. Could use an alternate file writer - or another import method. (Relatively low priority, since for now it works fine as-is.) - """ - - config_class = PostgresCacheConfig - file_writer_class = ParquetWriter - supports_merge_insert = False # TODO: Add native implementation for merge insert - - @overrides - def get_telemetry_info(self) -> CacheTelemetryInfo: - return CacheTelemetryInfo("postgres") diff --git a/airbyte-lib/airbyte_lib/caches/snowflake.py b/airbyte-lib/airbyte_lib/caches/snowflake.py deleted file mode 100644 index 2a59f723af06f..0000000000000 --- a/airbyte-lib/airbyte_lib/caches/snowflake.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A Snowflake implementation of the cache.""" - -from __future__ import annotations - -from textwrap import dedent, indent -from typing import TYPE_CHECKING - -import sqlalchemy -from overrides import overrides -from snowflake.sqlalchemy import URL, VARIANT - -from airbyte_lib._file_writers import ParquetWriter, ParquetWriterConfig -from airbyte_lib.caches.base import ( - RecordDedupeMode, - SQLCacheBase, - SQLCacheConfigBase, -) -from airbyte_lib.telemetry import CacheTelemetryInfo -from airbyte_lib.types import SQLTypeConverter - - -if TYPE_CHECKING: - from pathlib import Path - - from sqlalchemy.engine import Connection - - -class SnowflakeCacheConfig(SQLCacheConfigBase, ParquetWriterConfig): - """Configuration for the Snowflake cache. - - Also inherits config from the ParquetWriter, which is responsible for writing files to disk. - """ - - account: str - username: str - password: str - warehouse: str - database: str - role: str - - dedupe_mode = RecordDedupeMode.APPEND - - # Already defined in base class: - # schema_name: str - - @overrides - def get_sql_alchemy_url(self) -> str: - """Return the SQLAlchemy URL to use.""" - return str( - URL( - account=self.account, - user=self.username, - password=self.password, - database=self.database, - warehouse=self.warehouse, - schema=self.schema_name, - role=self.role, - ) - ) - - def get_database_name(self) -> str: - """Return the name of the database.""" - return self.database - - -class SnowflakeTypeConverter(SQLTypeConverter): - """A class to convert types for Snowflake.""" - - @overrides - def to_sql_type( - self, - json_schema_property_def: dict[str, str | dict | list], - ) -> sqlalchemy.types.TypeEngine: - """Convert a value to a SQL type. - - We first call the parent class method to get the type. Then if the type JSON, we - replace it with VARIANT. - """ - sql_type = super().to_sql_type(json_schema_property_def) - if isinstance(sql_type, sqlalchemy.types.JSON): - return VARIANT() - - return sql_type - - -class SnowflakeSQLCache(SQLCacheBase): - """A Snowflake implementation of the cache. - - Parquet is used for local file storage before bulk loading. - """ - - config_class = SnowflakeCacheConfig - file_writer_class = ParquetWriter - type_converter_class = SnowflakeTypeConverter - - @overrides - def _write_files_to_new_table( - self, - files: list[Path], - stream_name: str, - batch_id: str, - ) -> str: - """Write files to a new table.""" - temp_table_name = self._create_table_for_loading( - stream_name=stream_name, - batch_id=batch_id, - ) - internal_sf_stage_name = f"@%{temp_table_name}" - put_files_statements = "\n".join( - [ - f"PUT 'file://{file_path.absolute()!s}' {internal_sf_stage_name};" - for file_path in files - ] - ) - self._execute_sql(put_files_statements) - - columns_list = [ - self._quote_identifier(c) - for c in list(self._get_sql_column_definitions(stream_name).keys()) - ] - files_list = ", ".join([f"'{f.name}'" for f in files]) - columns_list_str: str = indent("\n, ".join(columns_list), " " * 12) - variant_cols_str: str = ("\n" + " " * 21 + ", ").join([f"$1:{col}" for col in columns_list]) - copy_statement = dedent( - f""" - COPY INTO {temp_table_name} - ( - {columns_list_str} - ) - FROM ( - SELECT {variant_cols_str} - FROM {internal_sf_stage_name} - ) - FILES = ( {files_list} ) - FILE_FORMAT = ( TYPE = PARQUET ) - ; - """ - ) - self._execute_sql(copy_statement) - return temp_table_name - - @overrides - def _init_connection_settings(self, connection: Connection) -> None: - """We set Snowflake-specific settings for the session. - - This sets QUOTED_IDENTIFIERS_IGNORE_CASE setting to True, which is necessary because - Snowflake otherwise will treat quoted table and column references as case-sensitive. - More info: https://docs.snowflake.com/en/sql-reference/identifiers-syntax - - This also sets MULTI_STATEMENT_COUNT to 0, which allows multi-statement commands. - """ - connection.execute( - """ - ALTER SESSION SET - QUOTED_IDENTIFIERS_IGNORE_CASE = TRUE - MULTI_STATEMENT_COUNT = 0 - """ - ) - - @overrides - def get_telemetry_info(self) -> CacheTelemetryInfo: - return CacheTelemetryInfo("snowflake") diff --git a/airbyte-lib/airbyte_lib/config.py b/airbyte-lib/airbyte_lib/config.py deleted file mode 100644 index 4fd4e60367a9e..0000000000000 --- a/airbyte-lib/airbyte_lib/config.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Define base Config interface, used by Caches and also File Writers (Processors).""" - -from __future__ import annotations - -from pydantic import BaseModel - - -class CacheConfigBase( - BaseModel -): # TODO: meta=EnforceOverrides (Pydantic doesn't like it currently.) - pass diff --git a/airbyte-lib/airbyte_lib/datasets/__init__.py b/airbyte-lib/airbyte_lib/datasets/__init__.py deleted file mode 100644 index bfd4f02ce319a..0000000000000 --- a/airbyte-lib/airbyte_lib/datasets/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from airbyte_lib.datasets._base import DatasetBase -from airbyte_lib.datasets._lazy import LazyDataset -from airbyte_lib.datasets._map import DatasetMap -from airbyte_lib.datasets._sql import CachedDataset, SQLDataset - - -__all__ = [ - "CachedDataset", - "DatasetBase", - "DatasetMap", - "LazyDataset", - "SQLDataset", -] diff --git a/airbyte-lib/airbyte_lib/datasets/_base.py b/airbyte-lib/airbyte_lib/datasets/_base.py deleted file mode 100644 index f0fdfab52b912..0000000000000 --- a/airbyte-lib/airbyte_lib/datasets/_base.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from abc import ABC, abstractmethod -from collections.abc import Iterator, Mapping -from typing import Any, cast - -from pandas import DataFrame - - -class DatasetBase(ABC): - """Base implementation for all datasets.""" - - @abstractmethod - def __iter__(self) -> Iterator[Mapping[str, Any]]: - """Return the iterator of records.""" - raise NotImplementedError - - def to_pandas(self) -> DataFrame: - """Return a pandas DataFrame representation of the dataset. - - The base implementation simply passes the record iterator to Panda's DataFrame constructor. - """ - # Technically, we return an iterator of Mapping objects. However, pandas - # expects an iterator of dict objects. This cast is safe because we know - # duck typing is correct for this use case. - return DataFrame(cast(Iterator[dict[str, Any]], self)) diff --git a/airbyte-lib/airbyte_lib/datasets/_lazy.py b/airbyte-lib/airbyte_lib/datasets/_lazy.py deleted file mode 100644 index 83d67cec00436..0000000000000 --- a/airbyte-lib/airbyte_lib/datasets/_lazy.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from typing import TYPE_CHECKING, Any - -from overrides import overrides - -from airbyte_lib.datasets import DatasetBase - - -if TYPE_CHECKING: - from collections.abc import Iterator, Mapping - - -class LazyDataset(DatasetBase): - """A dataset that is loaded incrementally from a source or a SQL query.""" - - def __init__( - self, - iterator: Iterator[Mapping[str, Any]], - ) -> None: - self._iterator: Iterator[Mapping[str, Any]] = iterator - super().__init__() - - @overrides - def __iter__(self) -> Iterator[Mapping[str, Any]]: - return self._iterator - - def __next__(self) -> Mapping[str, Any]: - return next(self._iterator) diff --git a/airbyte-lib/airbyte_lib/datasets/_map.py b/airbyte-lib/airbyte_lib/datasets/_map.py deleted file mode 100644 index 42eaed88f0e3e..0000000000000 --- a/airbyte-lib/airbyte_lib/datasets/_map.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A generic interface for a set of streams. - -TODO: This is a work in progress. It is not yet used by any other code. -TODO: Implement before release, or delete. -""" -from __future__ import annotations - -from collections.abc import Iterator, Mapping -from typing import TYPE_CHECKING - - -if TYPE_CHECKING: - from airbyte_lib.datasets._base import DatasetBase - - -class DatasetMap(Mapping): - """A generic interface for a set of streams or datasets.""" - - def __init__(self) -> None: - self._datasets: dict[str, DatasetBase] = {} - - def __getitem__(self, key: str) -> DatasetBase: - return self._datasets[key] - - def __iter__(self) -> Iterator[str]: - return iter(self._datasets) - - def __len__(self) -> int: - return len(self._datasets) diff --git a/airbyte-lib/airbyte_lib/datasets/_sql.py b/airbyte-lib/airbyte_lib/datasets/_sql.py deleted file mode 100644 index 7dfb224821463..0000000000000 --- a/airbyte-lib/airbyte_lib/datasets/_sql.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, cast - -from overrides import overrides -from sqlalchemy import and_, func, select, text - -from airbyte_lib.datasets._base import DatasetBase - - -if TYPE_CHECKING: - from collections.abc import Iterator - - from pandas import DataFrame - from sqlalchemy import Selectable, Table - from sqlalchemy.sql import ClauseElement - - from airbyte_lib.caches import SQLCacheBase - - -class SQLDataset(DatasetBase): - """A dataset that is loaded incrementally from a SQL query. - - The CachedDataset class is a subclass of this class, which simply passes a SELECT over the full - table as the query statement. - """ - - def __init__( - self, - cache: SQLCacheBase, - stream_name: str, - query_statement: Selectable, - ) -> None: - self._length: int | None = None - self._cache: SQLCacheBase = cache - self._stream_name: str = stream_name - self._query_statement: Selectable = query_statement - super().__init__() - - @property - def stream_name(self) -> str: - return self._stream_name - - def __iter__(self) -> Iterator[Mapping[str, Any]]: - with self._cache.get_sql_connection() as conn: - for row in conn.execute(self._query_statement): - # Access to private member required because SQLAlchemy doesn't expose a public API. - # https://pydoc.dev/sqlalchemy/latest/sqlalchemy.engine.row.RowMapping.html - yield cast(Mapping[str, Any], row._mapping) # noqa: SLF001 - - def __len__(self) -> int: - """Return the number of records in the dataset. - - This method caches the length of the dataset after the first call. - """ - if self._length is None: - count_query = select([func.count()]).select_from(self._query_statement.alias()) - with self._cache.get_sql_connection() as conn: - self._length = conn.execute(count_query).scalar() - - return self._length - - def to_pandas(self) -> DataFrame: - return self._cache.get_pandas_dataframe(self._stream_name) - - def with_filter(self, *filter_expressions: ClauseElement | str) -> SQLDataset: - """Filter the dataset by a set of column values. - - Filters can be specified as either a string or a SQLAlchemy expression. - - Filters are lazily applied to the dataset, so they can be chained together. For example: - - dataset.with_filter("id > 5").with_filter("id < 10") - - is equivalent to: - - dataset.with_filter("id > 5", "id < 10") - """ - # Convert all strings to TextClause objects. - filters: list[ClauseElement] = [ - text(expression) if isinstance(expression, str) else expression - for expression in filter_expressions - ] - filtered_select = self._query_statement.where(and_(*filters)) - return SQLDataset( - cache=self._cache, - stream_name=self._stream_name, - query_statement=filtered_select, - ) - - -class CachedDataset(SQLDataset): - """A dataset backed by a SQL table cache. - - Because this dataset includes all records from the underlying table, we also expose the - underlying table as a SQLAlchemy Table object. - """ - - def __init__(self, cache: SQLCacheBase, stream_name: str) -> None: - self._sql_table: Table = cache.get_sql_table(stream_name) - super().__init__( - cache=cache, - stream_name=stream_name, - query_statement=self._sql_table.select(), - ) - - @overrides - def to_pandas(self) -> DataFrame: - return self._cache.get_pandas_dataframe(self._stream_name) - - def to_sql_table(self) -> Table: - return self._sql_table - - def __eq__(self, value: object) -> bool: - """Return True if the value is a CachedDataset with the same cache and stream name. - - In the case of CachedDataset objects, we can simply compare the cache and stream name. - - Note that this equality check is only supported on CachedDataset objects and not for - the base SQLDataset implementation. This is because of the complexity and computational - cost of comparing two arbitrary SQL queries that could be bound to different variables, - as well as the chance that two queries can be syntactically equivalent without being - text-wise equivalent. - """ - if not isinstance(value, SQLDataset): - return False - - if self._cache is not value._cache: - return False - - if self._stream_name != value._stream_name: - return False - - return True diff --git a/airbyte-lib/airbyte_lib/exceptions.py b/airbyte-lib/airbyte_lib/exceptions.py deleted file mode 100644 index 04322c080580e..0000000000000 --- a/airbyte-lib/airbyte_lib/exceptions.py +++ /dev/null @@ -1,277 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""All exceptions used in the Airbyte Lib. - -This design is modeled after structlog's exceptions, in that we bias towards auto-generated -property prints rather than sentence-like string concatenation. - -E.g. Instead of this: -> Subprocess failed with exit code '1' - -We do this: -> Subprocess failed. (exit_code=1) - -The benefit of this approach is that we can easily support structured logging, and we can -easily add new properties to exceptions without having to update all the places where they -are raised. We can also support any arbitrary number of properties in exceptions, without spending -time on building sentence-like string constructions with optional inputs. - - -In addition, the following principles are applied for exception class design: - -- All exceptions inherit from a common base class. -- All exceptions have a message attribute. -- The first line of the docstring is used as the default message. -- The default message can be overridden by explicitly setting the message attribute. -- Exceptions may optionally have a guidance attribute. -- Exceptions may optionally have a help_url attribute. -- Rendering is automatically handled by the base class. -- Any helpful context not defined by the exception class can be passed in the `context` dict arg. -- Within reason, avoid sending PII to the exception constructor. -- Exceptions are dataclasses, so they can be instantiated with keyword arguments. -- Use the 'from' syntax to chain exceptions when it is helpful to do so. - E.g. `raise AirbyteConnectorNotFoundError(...) from FileNotFoundError(connector_path)` -- Any exception that adds a new property should also be decorated as `@dataclass`. -""" -from __future__ import annotations - -from dataclasses import dataclass -from textwrap import indent -from typing import Any - - -NEW_ISSUE_URL = "https://github.com/airbytehq/airbyte/issues/new/choose" -DOCS_URL = "https://docs.airbyte.io/" - - -# Base error class - - -@dataclass -class AirbyteError(Exception): - """Base class for exceptions in Airbyte.""" - - guidance: str | None = None - help_url: str | None = None - log_text: str | list[str] | None = None - context: dict[str, Any] | None = None - message: str | None = None - - def get_message(self) -> str: - """Return the best description for the exception. - - We resolve the following in order: - 1. The message sent to the exception constructor (if provided). - 2. The first line of the class's docstring. - """ - if self.message: - return self.message - - return self.__doc__.split("\n")[0] if self.__doc__ else "" - - def __str__(self) -> str: - special_properties = ["message", "guidance", "help_url", "log_text", "context"] - display_properties = { - k: v - for k, v in self.__dict__.items() - if k not in special_properties and not k.startswith("_") and v is not None - } - display_properties.update(self.context or {}) - context_str = "\n ".join( - f"{str(k).replace('_', ' ').title()}: {v!r}" for k, v in display_properties.items() - ) - exception_str = f"{self.__class__.__name__}: {self.get_message()}\n" - if context_str: - exception_str += " " + context_str - - if self.log_text: - if isinstance(self.log_text, list): - self.log_text = "\n".join(self.log_text) - - exception_str += f"\nLog output: \n {indent(self.log_text, ' ')}" - - if self.guidance: - exception_str += f"\nSuggestion: {self.guidance}" - - if self.help_url: - exception_str += f"\nMore info: {self.help_url}" - - return exception_str - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - properties_str = ", ".join( - f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_") - ) - return f"{class_name}({properties_str})" - - -# AirbyteLib Internal Errors (these are probably bugs) - - -@dataclass -class AirbyteLibInternalError(AirbyteError): - """An internal error occurred in Airbyte Lib.""" - - guidance = "Please consider reporting this error to the Airbyte team." - help_url = NEW_ISSUE_URL - - -# AirbyteLib Input Errors (replaces ValueError for user input) - - -@dataclass -class AirbyteLibInputError(AirbyteError, ValueError): - """The input provided to AirbyteLib did not match expected validation rules. - - This inherits from ValueError so that it can be used as a drop-in replacement for - ValueError in the Airbyte Lib API. - """ - - # TODO: Consider adding a help_url that links to the auto-generated API reference. - - guidance = "Please check the provided value and try again." - input_value: str | None = None - - -@dataclass -class AirbyteLibNoStreamsSelectedError(AirbyteLibInputError): - """No streams were selected for the source.""" - - guidance = ( - "Please call `select_streams()` to select at least one stream from the list provided. " - "You can also call `select_all_streams()` to select all available streams for this source." - ) - connector_name: str | None = None - available_streams: list[str] | None = None - - -# AirbyteLib Cache Errors - - -class AirbyteLibCacheError(AirbyteError): - """Error occurred while accessing the cache.""" - - -@dataclass -class AirbyteLibCacheTableValidationError(AirbyteLibCacheError): - """Cache table validation failed.""" - - violation: str | None = None - - -@dataclass -class AirbyteConnectorConfigurationMissingError(AirbyteLibCacheError): - """Connector is missing configuration.""" - - connector_name: str | None = None - - -# Subprocess Errors - - -@dataclass -class AirbyteSubprocessError(AirbyteError): - """Error when running subprocess.""" - - run_args: list[str] | None = None - - -@dataclass -class AirbyteSubprocessFailedError(AirbyteSubprocessError): - """Subprocess failed.""" - - exit_code: int | None = None - - -# Connector Registry Errors - - -class AirbyteConnectorRegistryError(AirbyteError): - """Error when accessing the connector registry.""" - - -@dataclass -class AirbyteConnectorNotRegisteredError(AirbyteConnectorRegistryError): - """Connector not found in registry.""" - - connector_name: str | None = None - guidance = "Please double check the connector name." - - -@dataclass -class AirbyteConnectorNotPyPiPublishedError(AirbyteConnectorRegistryError): - """Connector found, but not published to PyPI.""" - - connector_name: str | None = None - guidance = "This likely means that the connector is not ready for use with airbyte-lib." - - -# Connector Errors - - -@dataclass -class AirbyteConnectorError(AirbyteError): - """Error when running the connector.""" - - connector_name: str | None = None - - -class AirbyteConnectorExecutableNotFoundError(AirbyteConnectorError): - """Connector executable not found.""" - - -class AirbyteConnectorInstallationError(AirbyteConnectorError): - """Error when installing the connector.""" - - -class AirbyteConnectorReadError(AirbyteConnectorError): - """Error when reading from the connector.""" - - -class AirbyteNoDataFromConnectorError(AirbyteConnectorError): - """No data was provided from the connector.""" - - -class AirbyteConnectorMissingCatalogError(AirbyteConnectorError): - """Connector did not return a catalog.""" - - -class AirbyteConnectorMissingSpecError(AirbyteConnectorError): - """Connector did not return a spec.""" - - -class AirbyteConnectorCheckFailedError(AirbyteConnectorError): - """Connector check failed.""" - - guidance = ( - "Please double-check your config or review the connector's logs for more information." - ) - - -@dataclass -class AirbyteConnectorFailedError(AirbyteConnectorError): - """Connector failed.""" - - exit_code: int | None = None - - -@dataclass -class AirbyteStreamNotFoundError(AirbyteConnectorError): - """Connector stream not found.""" - - stream_name: str | None = None - available_streams: list[str] | None = None - - -@dataclass -class AirbyteLibSecretNotFoundError(AirbyteError): - """Secret not found.""" - - guidance = "Please ensure that the secret is set." - help_url = ( - "https://docs.airbyte.com/using-airbyte/airbyte-lib/getting-started#secrets-management" - ) - - secret_name: str | None = None - sources: list[str] | None = None diff --git a/airbyte-lib/airbyte_lib/progress.py b/airbyte-lib/airbyte_lib/progress.py deleted file mode 100644 index 5d9de1cfc0539..0000000000000 --- a/airbyte-lib/airbyte_lib/progress.py +++ /dev/null @@ -1,381 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""A simple progress bar for the command line and IPython notebooks.""" -from __future__ import annotations - -import datetime -import math -import sys -import time -from contextlib import suppress -from enum import Enum, auto -from typing import cast - -from rich.errors import LiveError -from rich.live import Live as RichLive -from rich.markdown import Markdown as RichMarkdown - - -DEFAULT_REFRESHES_PER_SECOND = 2 -IS_REPL = hasattr(sys, "ps1") # True if we're in a Python REPL, in which case we can use Rich. - -try: - IS_NOTEBOOK = True - from IPython import display as ipy_display - -except ImportError: - ipy_display = None - IS_NOTEBOOK = False - - -class ProgressStyle(Enum): - """An enum of progress bar styles.""" - - AUTO = auto() - """Automatically select the best style for the environment.""" - - RICH = auto() - """A Rich progress bar.""" - - IPYTHON = auto() - """Use IPython display methods.""" - - PLAIN = auto() - """A plain text progress print.""" - - NONE = auto() - """Skip progress prints.""" - - -MAX_UPDATE_FREQUENCY = 1000 -"""The max number of records to read before updating the progress bar.""" - - -def _to_time_str(timestamp: float) -> str: - """Convert a timestamp float to a local time string. - - For now, we'll just use UTC to avoid breaking tests. In the future, we should - return a local time string. - """ - datetime_obj = datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) - # TODO: Uncomment this line when we can get tests to properly account for local timezones. - # For now, we'll just use UTC to avoid breaking tests. - # datetime_obj = datetime_obj.astimezone() - return datetime_obj.strftime("%H:%M:%S") - - -def _get_elapsed_time_str(seconds: int) -> str: - """Return duration as a string. - - Seconds are included until 10 minutes is exceeded. - Minutes are always included after 1 minute elapsed. - Hours are always included after 1 hour elapsed. - """ - if seconds <= 60: # noqa: PLR2004 # Magic numbers OK here. - return f"{seconds} seconds" - - if seconds < 60 * 10: - minutes = seconds // 60 - seconds = seconds % 60 - return f"{minutes}min {seconds}s" - - if seconds < 60 * 60: - minutes = seconds // 60 - seconds = seconds % 60 - return f"{minutes}min" - - hours = seconds // (60 * 60) - minutes = (seconds % (60 * 60)) // 60 - return f"{hours}hr {minutes}min" - - -class ReadProgress: - """A simple progress bar for the command line and IPython notebooks.""" - - def __init__( - self, - style: ProgressStyle = ProgressStyle.AUTO, - ) -> None: - """Initialize the progress tracker.""" - # Streams expected (for progress bar) - self.num_streams_expected = 0 - - # Reads - self.read_start_time = time.time() - self.read_end_time: float | None = None - self.total_records_read = 0 - - # Writes - self.total_records_written = 0 - self.total_batches_written = 0 - self.written_stream_names: set[str] = set() - - # Finalization - self.finalize_start_time: float | None = None - self.finalize_end_time: float | None = None - self.total_records_finalized = 0 - self.total_batches_finalized = 0 - self.finalized_stream_names: set[str] = set() - - self.last_update_time: float | None = None - - self._rich_view: RichLive | None = None - self.style: ProgressStyle = style - if self.style == ProgressStyle.AUTO: - self.style = ProgressStyle.PLAIN - if IS_NOTEBOOK: - self.style = ProgressStyle.IPYTHON - - elif IS_REPL: - self.style = ProgressStyle.PLAIN - - else: - # Test for Rich availability: - self._rich_view = RichLive() - try: - self._rich_view.start() - self._rich_view.stop() - self._rich_view = None - self.style = ProgressStyle.RICH - except LiveError: - # Rich live view not available. Using plain text progress. - self._rich_view = None - self.style = ProgressStyle.PLAIN - - def _start(self) -> None: - """Start the progress bar.""" - if self.style == ProgressStyle.RICH and not self._rich_view: - self._rich_view = RichLive( - auto_refresh=True, - refresh_per_second=DEFAULT_REFRESHES_PER_SECOND, - ) - self._rich_view.start() - - def _stop(self) -> None: - """Stop the progress bar.""" - if self._rich_view: - with suppress(Exception): - self._rich_view.stop() - self._rich_view = None - - def __del__(self) -> None: - """Close the Rich view.""" - self._stop() - - def log_success(self) -> None: - """Log success and stop tracking progress.""" - if self.finalize_end_time is None: - # If we haven't already finalized, do so now. - - self.finalize_end_time = time.time() - - self.update_display(force_refresh=True) - self._stop() - - def reset(self, num_streams_expected: int) -> None: - """Reset the progress tracker.""" - # Streams expected (for progress bar) - self.num_streams_expected = num_streams_expected - - # Reads - self.read_start_time = time.time() - self.read_end_time = None - self.total_records_read = 0 - - # Writes - self.total_records_written = 0 - self.total_batches_written = 0 - self.written_stream_names = set() - - # Finalization - self.finalize_start_time = None - self.finalize_end_time = None - self.total_records_finalized = 0 - self.total_batches_finalized = 0 - self.finalized_stream_names = set() - - self._start() - - @property - def elapsed_seconds(self) -> int: - """Return the number of seconds elapsed since the read operation started.""" - if self.finalize_end_time: - return int(self.finalize_end_time - self.read_start_time) - - return int(time.time() - self.read_start_time) - - @property - def elapsed_time_string(self) -> str: - """Return duration as a string.""" - return _get_elapsed_time_str(self.elapsed_seconds) - - @property - def elapsed_seconds_since_last_update(self) -> float | None: - """Return the number of seconds elapsed since the last update.""" - if self.last_update_time is None: - return None - - return time.time() - self.last_update_time - - @property - def elapsed_read_seconds(self) -> int: - """Return the number of seconds elapsed since the read operation started.""" - if self.read_end_time is None: - return int(time.time() - self.read_start_time) - - return int(self.read_end_time - self.read_start_time) - - @property - def elapsed_read_time_string(self) -> str: - """Return duration as a string.""" - return _get_elapsed_time_str(self.elapsed_read_seconds) - - @property - def elapsed_finalization_seconds(self) -> int: - """Return the number of seconds elapsed since the read operation started.""" - if self.finalize_start_time is None: - return 0 - if self.finalize_end_time is None: - return int(time.time() - self.finalize_start_time) - return int(self.finalize_end_time - self.finalize_start_time) - - @property - def elapsed_finalization_time_str(self) -> str: - """Return duration as a string.""" - return _get_elapsed_time_str(self.elapsed_finalization_seconds) - - def log_records_read(self, new_total_count: int) -> None: - """Load a number of records read.""" - self.total_records_read = new_total_count - - # This is some math to make updates adaptive to the scale of records read. - # We want to update the display more often when the count is low, and less - # often when the count is high. - updated_period = min( - MAX_UPDATE_FREQUENCY, 10 ** math.floor(math.log10(max(self.total_records_read, 1)) / 4) - ) - if self.total_records_read % updated_period != 0: - return - - self.update_display() - - def log_batch_written(self, stream_name: str, batch_size: int) -> None: - """Log that a batch has been written. - - Args: - stream_name: The name of the stream. - batch_size: The number of records in the batch. - """ - self.total_records_written += batch_size - self.total_batches_written += 1 - self.written_stream_names.add(stream_name) - self.update_display() - - def log_batches_finalizing(self, stream_name: str, num_batches: int) -> None: - """Log that batch are ready to be finalized. - - In our current implementation, we ignore the stream name and number of batches. - We just use this as a signal that we're finished reading and have begun to - finalize any accumulated batches. - """ - _ = stream_name, num_batches # unused for now - if self.finalize_start_time is None: - self.read_end_time = time.time() - self.finalize_start_time = self.read_end_time - - self.update_display(force_refresh=True) - - def log_batches_finalized(self, stream_name: str, num_batches: int) -> None: - """Log that a batch has been finalized.""" - _ = stream_name # unused for now - self.total_batches_finalized += num_batches - self.update_display(force_refresh=True) - - def log_stream_finalized(self, stream_name: str) -> None: - """Log that a stream has been finalized.""" - self.finalized_stream_names.add(stream_name) - self.update_display(force_refresh=True) - if len(self.finalized_stream_names) == self.num_streams_expected: - self.log_success() - - def update_display(self, *, force_refresh: bool = False) -> None: - """Update the display.""" - # Don't update more than twice per second unless force_refresh is True. - if ( - not force_refresh - and self.last_update_time # if not set, then we definitely need to update - and cast(float, self.elapsed_seconds_since_last_update) < 0.5 # noqa: PLR2004 - ): - return - - status_message = self._get_status_message() - - if self.style == ProgressStyle.IPYTHON: - # We're in a notebook so use the IPython display. - ipy_display.clear_output(wait=True) - ipy_display.display(ipy_display.Markdown(status_message)) - - elif self.style == ProgressStyle.RICH and self._rich_view is not None: - self._rich_view.update(RichMarkdown(status_message)) - - elif self.style == ProgressStyle.PLAIN: - # TODO: Add a plain text progress print option that isn't too noisy. - pass - - elif self.style == ProgressStyle.NONE: - pass - - self.last_update_time = time.time() - - def _get_status_message(self) -> str: - """Compile and return a status message.""" - # Format start time as a friendly string in local timezone: - start_time_str = _to_time_str(self.read_start_time) - records_per_second: float = 0.0 - if self.elapsed_read_seconds > 0: - records_per_second = round(self.total_records_read / self.elapsed_read_seconds, 1) - status_message = ( - f"## Read Progress\n\n" - f"Started reading at {start_time_str}.\n\n" - f"Read **{self.total_records_read:,}** records " - f"over **{self.elapsed_read_time_string}** " - f"({records_per_second:,} records / second).\n\n" - ) - if self.total_records_written > 0: - status_message += ( - f"Wrote **{self.total_records_written:,}** records " - f"over {self.total_batches_written:,} batches.\n\n" - ) - if self.read_end_time is not None: - read_end_time_str = _to_time_str(self.read_end_time) - status_message += f"Finished reading at {read_end_time_str}.\n\n" - if self.finalize_start_time is not None: - finalize_start_time_str = _to_time_str(self.finalize_start_time) - status_message += f"Started finalizing streams at {finalize_start_time_str}.\n\n" - status_message += ( - f"Finalized **{self.total_batches_finalized}** batches " - f"over {self.elapsed_finalization_time_str}.\n\n" - ) - if self.finalized_stream_names: - status_message += ( - f"Completed {len(self.finalized_stream_names)} " - + (f"out of {self.num_streams_expected} " if self.num_streams_expected else "") - + "streams:\n\n" - ) - for stream_name in self.finalized_stream_names: - status_message += f" - {stream_name}\n" - - status_message += "\n\n" - - if self.finalize_end_time is not None: - completion_time_str = _to_time_str(self.finalize_end_time) - status_message += ( - f"Completed writing at {completion_time_str}. " - f"Total time elapsed: {self.elapsed_time_string}\n\n" - ) - status_message += "\n------------------------------------------------\n" - - return status_message - - -progress = ReadProgress() diff --git a/airbyte-lib/airbyte_lib/registry.py b/airbyte-lib/airbyte_lib/registry.py deleted file mode 100644 index bab4a87f034fc..0000000000000 --- a/airbyte-lib/airbyte_lib/registry.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import json -import os -from copy import copy -from dataclasses import dataclass -from pathlib import Path - -import requests - -from airbyte_lib import exceptions as exc -from airbyte_lib.version import get_version - - -__cache: dict[str, ConnectorMetadata] | None = None - - -REGISTRY_ENV_VAR = "AIRBYTE_LOCAL_REGISTRY" -REGISTRY_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json" - - -@dataclass -class ConnectorMetadata: - name: str - latest_available_version: str - pypi_package_name: str | None - - -def _get_registry_url() -> str: - if REGISTRY_ENV_VAR in os.environ: - return str(os.environ.get(REGISTRY_ENV_VAR)) - - return REGISTRY_URL - - -def _registry_entry_to_connector_metadata(entry: dict) -> ConnectorMetadata: - name = entry["dockerRepository"].replace("airbyte/", "") - remote_registries: dict = entry.get("remoteRegistries", {}) - pypi_registry: dict = remote_registries.get("pypi", {}) - pypi_package_name: str = pypi_registry.get("packageName", None) - pypi_enabled: bool = pypi_registry.get("enabled", False) - return ConnectorMetadata( - name=name, - latest_available_version=entry["dockerImageTag"], - pypi_package_name=pypi_package_name if pypi_enabled else None, - ) - - -def _get_registry_cache(*, force_refresh: bool = False) -> dict[str, ConnectorMetadata]: - """Return the registry cache.""" - global __cache - if __cache and not force_refresh: - return __cache - - registry_url = _get_registry_url() - if registry_url.startswith("http"): - response = requests.get( - registry_url, headers={"User-Agent": f"airbyte-lib-{get_version()}"} - ) - response.raise_for_status() - data = response.json() - else: - # Assume local file - with Path(registry_url).open() as f: - data = json.load(f) - - new_cache: dict[str, ConnectorMetadata] = {} - - for connector in data["sources"]: - connector_metadata = _registry_entry_to_connector_metadata(connector) - new_cache[connector_metadata.name] = connector_metadata - - if len(new_cache) == 0: - raise exc.AirbyteLibInternalError( - message="Connector registry is empty.", - context={ - "registry_url": _get_registry_url(), - }, - ) - - __cache = new_cache - return __cache - - -def get_connector_metadata(name: str) -> ConnectorMetadata: - """Check the cache for the connector. - - If the cache is empty, populate by calling update_cache. - """ - cache = copy(_get_registry_cache()) - if not cache: - raise exc.AirbyteLibInternalError( - message="Connector registry could not be loaded.", - context={ - "registry_url": _get_registry_url(), - }, - ) - if name not in cache: - raise exc.AirbyteConnectorNotRegisteredError( - connector_name=name, - context={ - "registry_url": _get_registry_url(), - "available_connectors": get_available_connectors(), - }, - ) - return cache[name] - - -def get_available_connectors() -> list[str]: - """Return a list of all available connectors. - - Connectors will be returned in alphabetical order, with the standard prefix "source-". - """ - return sorted( - conn.name for conn in _get_registry_cache().values() if conn.pypi_package_name is not None - ) diff --git a/airbyte-lib/airbyte_lib/results.py b/airbyte-lib/airbyte_lib/results.py deleted file mode 100644 index 5c5021fc8afc3..0000000000000 --- a/airbyte-lib/airbyte_lib/results.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING - -from airbyte_lib.datasets import CachedDataset - - -if TYPE_CHECKING: - from collections.abc import Iterator - - from sqlalchemy.engine import Engine - - from airbyte_lib.caches import SQLCacheBase - - -class ReadResult(Mapping[str, CachedDataset]): - def __init__( - self, processed_records: int, cache: SQLCacheBase, processed_streams: list[str] - ) -> None: - self.processed_records = processed_records - self._cache = cache - self._processed_streams = processed_streams - - def __getitem__(self, stream: str) -> CachedDataset: - if stream not in self._processed_streams: - raise KeyError(stream) - - return CachedDataset(self._cache, stream) - - def __contains__(self, stream: object) -> bool: - if not isinstance(stream, str): - return False - - return stream in self._processed_streams - - def __iter__(self) -> Iterator[str]: - return self._processed_streams.__iter__() - - def __len__(self) -> int: - return len(self._processed_streams) - - def get_sql_engine(self) -> Engine: - return self._cache.get_sql_engine() - - @property - def streams(self) -> Mapping[str, CachedDataset]: - return { - stream_name: CachedDataset(self._cache, stream_name) - for stream_name in self._processed_streams - } - - @property - def cache(self) -> SQLCacheBase: - return self._cache diff --git a/airbyte-lib/airbyte_lib/secrets.py b/airbyte-lib/airbyte_lib/secrets.py deleted file mode 100644 index 6aea9f163d2fc..0000000000000 --- a/airbyte-lib/airbyte_lib/secrets.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""Secrets management for AirbyteLib.""" -from __future__ import annotations - -import contextlib -import os -from enum import Enum, auto -from getpass import getpass -from typing import TYPE_CHECKING - -from dotenv import dotenv_values - -from airbyte_lib import exceptions as exc - - -if TYPE_CHECKING: - from collections.abc import Callable - - -try: - from google.colab import userdata as colab_userdata -except ImportError: - colab_userdata = None - - -class SecretSource(Enum): - ENV = auto() - DOTENV = auto() - GOOGLE_COLAB = auto() - ANY = auto() - - PROMPT = auto() - - -def _get_secret_from_env( - secret_name: str, -) -> str | None: - if secret_name not in os.environ: - return None - - return os.environ[secret_name] - - -def _get_secret_from_dotenv( - secret_name: str, -) -> str | None: - try: - dotenv_vars: dict[str, str | None] = dotenv_values() - except Exception: - # Can't locate or parse a .env file - return None - - if secret_name not in dotenv_vars: - # Secret not found - return None - - return dotenv_vars[secret_name] - - -def _get_secret_from_colab( - secret_name: str, -) -> str | None: - if colab_userdata is None: - # The module doesn't exist. We probably aren't in Colab. - return None - - try: - return colab_userdata.get(secret_name) - except Exception: - # Secret name not found. Continue. - return None - - -def _get_secret_from_prompt( - secret_name: str, -) -> str | None: - with contextlib.suppress(Exception): - return getpass(f"Enter the value for secret '{secret_name}': ") - - return None - - -_SOURCE_FUNCTIONS: dict[SecretSource, Callable] = { - SecretSource.ENV: _get_secret_from_env, - SecretSource.DOTENV: _get_secret_from_dotenv, - SecretSource.GOOGLE_COLAB: _get_secret_from_colab, - SecretSource.PROMPT: _get_secret_from_prompt, -} - - -def get_secret( - secret_name: str, - source: SecretSource | list[SecretSource] = SecretSource.ANY, - *, - prompt: bool = True, -) -> str: - """Get a secret from the environment. - - The optional `source` argument of enum type `SecretSource` or list of `SecretSource` options. - If left blank, the `source` arg will be `SecretSource.ANY`. If `source` is set to a specific - source, then only that source will be checked. If a list of `SecretSource` entries is passed, - then the sources will be checked using the provided ordering. - - If `prompt` to `True` or if SecretSource.PROMPT is declared in the `source` arg, then the - user will be prompted to enter the secret if it is not found in any of the other sources. - """ - sources = [source] if not isinstance(source, list) else source - all_sources = set(_SOURCE_FUNCTIONS.keys()) - {SecretSource.PROMPT} - if SecretSource.ANY in sources: - sources += [s for s in all_sources if s not in sources] - sources.remove(SecretSource.ANY) - - if prompt or SecretSource.PROMPT in sources: - if SecretSource.PROMPT in sources: - sources.remove(SecretSource.PROMPT) - - sources.append(SecretSource.PROMPT) # Always check prompt last - - for source in sources: - fn = _SOURCE_FUNCTIONS[source] # Get the matching function for this source - val = fn(secret_name) - if val: - return val - - raise exc.AirbyteLibSecretNotFoundError( - secret_name=secret_name, - sources=[str(s) for s in sources], - ) diff --git a/airbyte-lib/airbyte_lib/source.py b/airbyte-lib/airbyte_lib/source.py deleted file mode 100644 index 47beb0ee0aa0f..0000000000000 --- a/airbyte-lib/airbyte_lib/source.py +++ /dev/null @@ -1,582 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import json -import tempfile -import warnings -from contextlib import contextmanager, suppress -from typing import TYPE_CHECKING, Any - -import jsonschema -import pendulum -import yaml -from rich import print - -from airbyte_protocol.models import ( - AirbyteCatalog, - AirbyteMessage, - AirbyteStateMessage, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - ConnectorSpecification, - DestinationSyncMode, - Status, - SyncMode, - TraceType, - Type, -) - -from airbyte_lib import exceptions as exc -from airbyte_lib._factories.cache_factories import get_default_cache -from airbyte_lib._util import protocol_util -from airbyte_lib._util.text_util import lower_case_set # Internal utility functions -from airbyte_lib.datasets._lazy import LazyDataset -from airbyte_lib.progress import progress -from airbyte_lib.results import ReadResult -from airbyte_lib.strategies import WriteStrategy -from airbyte_lib.telemetry import ( - CacheTelemetryInfo, - SyncState, - send_telemetry, - streaming_cache_info, -) - - -if TYPE_CHECKING: - from collections.abc import Generator, Iterable, Iterator - - from airbyte_lib._executor import Executor - from airbyte_lib.caches import SQLCacheBase - - -@contextmanager -def as_temp_files(files_contents: list[Any]) -> Generator[list[str], Any, None]: - """Write the given contents to temporary files and yield the file paths as strings.""" - temp_files: list[Any] = [] - try: - for content in files_contents: - temp_file = tempfile.NamedTemporaryFile(mode="w+t", delete=True) - temp_file.write( - json.dumps(content) if isinstance(content, dict) else content, - ) - temp_file.flush() - temp_files.append(temp_file) - yield [file.name for file in temp_files] - finally: - for temp_file in temp_files: - with suppress(Exception): - temp_file.close() - - -class Source: - """A class representing a source that can be called.""" - - def __init__( - self, - executor: Executor, - name: str, - config: dict[str, Any] | None = None, - streams: list[str] | None = None, - *, - validate: bool = False, - ) -> None: - """Initialize the source. - - If config is provided, it will be validated against the spec if validate is True. - """ - self.executor = executor - self.name = name - self._processed_records = 0 - self._config_dict: dict[str, Any] | None = None - self._last_log_messages: list[str] = [] - self._discovered_catalog: AirbyteCatalog | None = None - self._spec: ConnectorSpecification | None = None - self._selected_stream_names: list[str] = [] - if config is not None: - self.set_config(config, validate=validate) - if streams is not None: - self.set_streams(streams) - - def set_streams(self, streams: list[str]) -> None: - """Deprecated. See select_streams().""" - warnings.warn( - "The 'set_streams' method is deprecated and will be removed in a future version. " - "Please use the 'select_streams' method instead.", - DeprecationWarning, - stacklevel=2, - ) - self.select_streams(streams) - - def select_all_streams(self) -> None: - """Select all streams. - - This is a more streamlined equivalent to: - > source.select_streams(source.get_available_streams()). - """ - self._selected_stream_names = self.get_available_streams() - - def select_streams(self, streams: list[str]) -> None: - """Select the stream names that should be read from the connector. - - Currently, if this is not set, all streams will be read. - """ - available_streams = self.get_available_streams() - for stream in streams: - if stream not in available_streams: - raise exc.AirbyteStreamNotFoundError( - stream_name=stream, - connector_name=self.name, - available_streams=available_streams, - ) - self._selected_stream_names = streams - - def get_selected_streams(self) -> list[str]: - """Get the selected streams. - - If no streams are selected, return an empty list. - """ - return self._selected_stream_names - - def set_config( - self, - config: dict[str, Any], - *, - validate: bool = False, - ) -> None: - """Set the config for the connector. - - If validate is True, raise an exception if the config fails validation. - - If validate is False, validation will be deferred until check() or validate_config() - is called. - """ - if validate: - self.validate_config(config) - - self._config_dict = config - - def get_config(self) -> dict[str, Any]: - """Get the config for the connector.""" - return self._config - - @property - def _config(self) -> dict[str, Any]: - if self._config_dict is None: - raise exc.AirbyteConnectorConfigurationMissingError( - guidance="Provide via get_source() or set_config()" - ) - return self._config_dict - - def _discover(self) -> AirbyteCatalog: - """Call discover on the connector. - - This involves the following steps: - * Write the config to a temporary file - * execute the connector with discover --config - * Listen to the messages and return the first AirbyteCatalog that comes along. - * Make sure the subprocess is killed when the function returns. - """ - with as_temp_files([self._config]) as [config_file]: - for msg in self._execute(["discover", "--config", config_file]): - if msg.type == Type.CATALOG and msg.catalog: - return msg.catalog - raise exc.AirbyteConnectorMissingCatalogError( - log_text=self._last_log_messages, - ) - - def validate_config(self, config: dict[str, Any] | None = None) -> None: - """Validate the config against the spec. - - If config is not provided, the already-set config will be validated. - """ - spec = self._get_spec(force_refresh=False) - config = self._config if config is None else config - jsonschema.validate(config, spec.connectionSpecification) - - def get_available_streams(self) -> list[str]: - """Get the available streams from the spec.""" - return [s.name for s in self.discovered_catalog.streams] - - def _get_spec(self, *, force_refresh: bool = False) -> ConnectorSpecification: - """Call spec on the connector. - - This involves the following steps: - * execute the connector with spec - * Listen to the messages and return the first AirbyteCatalog that comes along. - * Make sure the subprocess is killed when the function returns. - """ - if force_refresh or self._spec is None: - for msg in self._execute(["spec"]): - if msg.type == Type.SPEC and msg.spec: - self._spec = msg.spec - break - - if self._spec: - return self._spec - - raise exc.AirbyteConnectorMissingSpecError( - log_text=self._last_log_messages, - ) - - @property - def _yaml_spec(self) -> str: - """Get the spec as a yaml string. - - For now, the primary use case is for writing and debugging a valid config for a source. - - This is private for now because we probably want better polish before exposing this - as a stable interface. This will also get easier when we have docs links with this info - for each connector. - """ - spec_obj: ConnectorSpecification = self._get_spec() - spec_dict = spec_obj.dict(exclude_unset=True) - # convert to a yaml string - return yaml.dump(spec_dict) - - @property - def docs_url(self) -> str: - """Get the URL to the connector's documentation.""" - # TODO: Replace with docs URL from metadata when available - return "https://docs.airbyte.com/integrations/sources/" + self.name.lower().replace( - "source-", "" - ) - - @property - def discovered_catalog(self) -> AirbyteCatalog: - """Get the raw catalog for the given streams. - - If the catalog is not yet known, we call discover to get it. - """ - if self._discovered_catalog is None: - self._discovered_catalog = self._discover() - - return self._discovered_catalog - - @property - def configured_catalog(self) -> ConfiguredAirbyteCatalog: - """Get the configured catalog for the given streams. - - If the raw catalog is not yet known, we call discover to get it. - - If no specific streams are selected, we return a catalog that syncs all available streams. - - TODO: We should consider disabling by default the streams that the connector would - disable by default. (For instance, streams that require a premium license are sometimes - disabled by default within the connector.) - """ - # Ensure discovered catalog is cached before we start - _ = self.discovered_catalog - - # Filter for selected streams if set, otherwise use all available streams: - streams_filter: list[str] = self._selected_stream_names or self.get_available_streams() - - return ConfiguredAirbyteCatalog( - streams=[ - ConfiguredAirbyteStream( - stream=stream, - destination_sync_mode=DestinationSyncMode.overwrite, - primary_key=stream.source_defined_primary_key, - # TODO: The below assumes all sources can coalesce from incremental sync to - # full_table as needed. CDK supports this, so it might be safe: - sync_mode=SyncMode.incremental, - ) - for stream in self.discovered_catalog.streams - if stream.name in streams_filter - ], - ) - - def get_records(self, stream: str) -> LazyDataset: - """Read a stream from the connector. - - This involves the following steps: - * Call discover to get the catalog - * Generate a configured catalog that syncs the given stream in full_refresh mode - * Write the configured catalog and the config to a temporary file - * execute the connector with read --config --catalog - * Listen to the messages and return the first AirbyteRecordMessages that come along. - * Make sure the subprocess is killed when the function returns. - """ - catalog = self._discover() - configured_catalog = ConfiguredAirbyteCatalog( - streams=[ - ConfiguredAirbyteStream( - stream=s, - sync_mode=SyncMode.full_refresh, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - for s in catalog.streams - if s.name == stream - ], - ) - if len(configured_catalog.streams) == 0: - raise exc.AirbyteLibInputError( - message="Requested stream does not exist.", - context={ - "stream": stream, - "available_streams": self.get_available_streams(), - "connector_name": self.name, - }, - ) from KeyError(stream) - - configured_stream = configured_catalog.streams[0] - all_properties = set(configured_stream.stream.json_schema["properties"].keys()) - - def _with_missing_columns(records: Iterable[dict[str, Any]]) -> Iterator[dict[str, Any]]: - """Add missing columns to the record with null values.""" - for record in records: - existing_properties_lower = lower_case_set(record.keys()) - appended_dict = { - prop: None - for prop in all_properties - if prop.lower() not in existing_properties_lower - } - yield {**record, **appended_dict} - - iterator: Iterator[dict[str, Any]] = _with_missing_columns( - protocol_util.airbyte_messages_to_record_dicts( - self._read_with_catalog( - streaming_cache_info, - configured_catalog, - ), - ) - ) - return LazyDataset(iterator) - - def check(self) -> None: - """Call check on the connector. - - This involves the following steps: - * Write the config to a temporary file - * execute the connector with check --config - * Listen to the messages and return the first AirbyteCatalog that comes along. - * Make sure the subprocess is killed when the function returns. - """ - with as_temp_files([self._config]) as [config_file]: - try: - for msg in self._execute(["check", "--config", config_file]): - if msg.type == Type.CONNECTION_STATUS and msg.connectionStatus: - if msg.connectionStatus.status != Status.FAILED: - print(f"Connection check succeeded for `{self.name}`.") - return - - raise exc.AirbyteConnectorCheckFailedError( - help_url=self.docs_url, - context={ - "failure_reason": msg.connectionStatus.message, - }, - ) - raise exc.AirbyteConnectorCheckFailedError(log_text=self._last_log_messages) - except exc.AirbyteConnectorReadError as ex: - raise exc.AirbyteConnectorCheckFailedError( - message="The connector failed to check the connection.", - log_text=ex.log_text, - ) from ex - - def install(self) -> None: - """Install the connector if it is not yet installed.""" - self.executor.install() - print("For configuration instructions, see: \n" f"{self.docs_url}#reference\n") - - def uninstall(self) -> None: - """Uninstall the connector if it is installed. - - This only works if the use_local_install flag wasn't used and installation is managed by - airbyte-lib. - """ - self.executor.uninstall() - - def _read( - self, - cache_info: CacheTelemetryInfo, - state: list[AirbyteStateMessage] | None = None, - ) -> Iterable[AirbyteMessage]: - """ - Call read on the connector. - - This involves the following steps: - * Call discover to get the catalog - * Generate a configured catalog that syncs all streams in full_refresh mode - * Write the configured catalog and the config to a temporary file - * execute the connector with read --config --catalog - * Listen to the messages and return the AirbyteMessage that come along. - """ - # Ensure discovered and configured catalog properties are cached before we start reading - _ = self.discovered_catalog - _ = self.configured_catalog - yield from self._read_with_catalog( - cache_info, - catalog=self.configured_catalog, - state=state, - ) - - def _read_with_catalog( - self, - cache_info: CacheTelemetryInfo, - catalog: ConfiguredAirbyteCatalog, - state: list[AirbyteStateMessage] | None = None, - ) -> Iterator[AirbyteMessage]: - """Call read on the connector. - - This involves the following steps: - * Write the config to a temporary file - * execute the connector with read --config --catalog - * Listen to the messages and return the AirbyteRecordMessages that come along. - * Send out telemetry on the performed sync (with information about which source was used and - the type of the cache) - """ - source_tracking_information = self.executor.get_telemetry_info() - send_telemetry(source_tracking_information, cache_info, SyncState.STARTED) - sync_failed = False - self._processed_records = 0 # Reset the counter before we start - try: - with as_temp_files( - [self._config, catalog.json(), json.dumps(state) if state else "[]"] - ) as [ - config_file, - catalog_file, - state_file, - ]: - yield from self._execute( - [ - "read", - "--config", - config_file, - "--catalog", - catalog_file, - "--state", - state_file, - ], - ) - except Exception: - send_telemetry( - source_tracking_information, cache_info, SyncState.FAILED, self._processed_records - ) - sync_failed = True - raise - finally: - if not sync_failed: - send_telemetry( - source_tracking_information, - cache_info, - SyncState.SUCCEEDED, - self._processed_records, - ) - - def _add_to_logs(self, message: str) -> None: - self._last_log_messages.append(message) - self._last_log_messages = self._last_log_messages[-10:] - - def _execute(self, args: list[str]) -> Iterator[AirbyteMessage]: - """Execute the connector with the given arguments. - - This involves the following steps: - * Locate the right venv. It is called ".venv-" - * Spawn a subprocess with .venv-/bin/ - * Read the output line by line of the subprocess and serialize them AirbyteMessage objects. - Drop if not valid. - """ - # Fail early if the connector is not installed. - self.executor.ensure_installation(auto_fix=False) - - try: - self._last_log_messages = [] - for line in self.executor.execute(args): - try: - message = AirbyteMessage.parse_raw(line) - if message.type is Type.RECORD: - self._processed_records += 1 - if message.type == Type.LOG: - self._add_to_logs(message.log.message) - if message.type == Type.TRACE and message.trace.type == TraceType.ERROR: - self._add_to_logs(message.trace.error.message) - yield message - except Exception: - self._add_to_logs(line) - except Exception as e: - raise exc.AirbyteConnectorReadError( - log_text=self._last_log_messages, - ) from e - - def _tally_records( - self, - messages: Iterable[AirbyteMessage], - ) -> Generator[AirbyteMessage, Any, None]: - """This method simply tallies the number of records processed and yields the messages.""" - self._processed_records = 0 # Reset the counter before we start - progress.reset(len(self._selected_stream_names or [])) - - for message in messages: - yield message - progress.log_records_read(self._processed_records) - - def read( - self, - cache: SQLCacheBase | None = None, - *, - write_strategy: str | WriteStrategy = WriteStrategy.AUTO, - force_full_refresh: bool = False, - ) -> ReadResult: - """Read from the connector and write to the cache. - - Args: - cache: The cache to write to. If None, a default cache will be used. - write_strategy: The strategy to use when writing to the cache. If a string, it must be - one of "append", "upsert", "replace", or "auto". If a WriteStrategy, it must be one - of WriteStrategy.APPEND, WriteStrategy.UPSERT, WriteStrategy.REPLACE, or - WriteStrategy.AUTO. - force_full_refresh: If True, the source will operate in full refresh mode. Otherwise, - streams will be read in incremental mode if supported by the connector. This option - must be True when using the "replace" strategy. - """ - if write_strategy == WriteStrategy.REPLACE and not force_full_refresh: - raise exc.AirbyteLibInputError( - message="The replace strategy requires full refresh mode.", - context={ - "write_strategy": write_strategy, - "force_full_refresh": force_full_refresh, - }, - ) - if cache is None: - cache = get_default_cache() - - if isinstance(write_strategy, str): - try: - write_strategy = WriteStrategy(write_strategy) - except ValueError: - raise exc.AirbyteLibInputError( - message="Invalid strategy", - context={ - "write_strategy": write_strategy, - "available_strategies": [s.value for s in WriteStrategy], - }, - ) from None - - if not self._selected_stream_names: - raise exc.AirbyteLibNoStreamsSelectedError( - connector_name=self.name, - available_streams=self.get_available_streams(), - ) - - cache.register_source( - source_name=self.name, - incoming_source_catalog=self.configured_catalog, - stream_names=set(self._selected_stream_names), - ) - state = cache.get_state() if not force_full_refresh else None - print(f"Started `{self.name}` read operation at {pendulum.now().format('HH:mm:ss')}...") - cache.process_airbyte_messages( - self._tally_records( - self._read( - cache.get_telemetry_info(), - state=state, - ), - ), - write_strategy=write_strategy, - ) - print(f"Completed `{self.name}` read operation at {pendulum.now().format('HH:mm:ss')}.") - - return ReadResult( - processed_records=self._processed_records, - cache=cache, - processed_streams=[stream.stream.name for stream in self.configured_catalog.streams], - ) diff --git a/airbyte-lib/airbyte_lib/strategies.py b/airbyte-lib/airbyte_lib/strategies.py deleted file mode 100644 index 4d0b75a06590a..0000000000000 --- a/airbyte-lib/airbyte_lib/strategies.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Read and write strategies for AirbyteLib.""" -from __future__ import annotations - -from enum import Enum - - -class WriteStrategy(str, Enum): - """Read strategies for AirbyteLib.""" - - MERGE = "merge" - """Merge new records with existing records. - - This requires a primary key to be set on the stream. - If no primary key is set, this will raise an exception. - - To apply this strategy in cases where some destination streams don't have a primary key, - please use the `auto` strategy instead. - """ - - APPEND = "append" - """Append new records to existing records.""" - - REPLACE = "replace" - """Replace existing records with new records.""" - - AUTO = "auto" - """Automatically determine the best strategy to use. - - This will use the following logic: - - If there's a primary key, use merge. - - Else, if there's an incremental key, use append. - - Else, use full replace (table swap). - """ diff --git a/airbyte-lib/airbyte_lib/telemetry.py b/airbyte-lib/airbyte_lib/telemetry.py deleted file mode 100644 index 4e929d6d9d327..0000000000000 --- a/airbyte-lib/airbyte_lib/telemetry.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import datetime -import os -from contextlib import suppress -from dataclasses import asdict, dataclass -from enum import Enum -from typing import Any - -import requests - -from airbyte_lib.version import get_version - - -TRACKING_KEY = os.environ.get("AIRBYTE_TRACKING_KEY", "") or "cukeSffc0G6gFQehKDhhzSurDzVSZ2OP" - - -class SourceType(str, Enum): - VENV = "venv" - LOCAL_INSTALL = "local_install" - - -@dataclass -class CacheTelemetryInfo: - type: str - - -streaming_cache_info = CacheTelemetryInfo("streaming") - - -class SyncState(str, Enum): - STARTED = "started" - FAILED = "failed" - SUCCEEDED = "succeeded" - - -@dataclass -class SourceTelemetryInfo: - name: str - type: SourceType - version: str | None - - -def send_telemetry( - source_info: SourceTelemetryInfo, - cache_info: CacheTelemetryInfo, - state: SyncState, - number_of_records: int | None = None, -) -> None: - # If DO_NOT_TRACK is set, we don't send any telemetry - if os.environ.get("DO_NOT_TRACK"): - return - - current_time: str = datetime.datetime.utcnow().isoformat() # noqa: DTZ003 # prefer now() over utcnow() - payload: dict[str, Any] = { - "anonymousId": "airbyte-lib-user", - "event": "sync", - "properties": { - "version": get_version(), - "source": asdict(source_info), - "state": state, - "cache": asdict(cache_info), - # explicitly set to 0.0.0.0 to avoid leaking IP addresses - "ip": "0.0.0.0", - "flags": { - "CI": bool(os.environ.get("CI")), - }, - }, - "timestamp": current_time, - } - if number_of_records is not None: - payload["properties"]["number_of_records"] = number_of_records - - # Suppress exceptions if host is unreachable or network is unavailable - with suppress(Exception): - # Do not handle the response, we don't want to block the execution - _ = requests.post("https://api.segment.io/v1/track", auth=(TRACKING_KEY, ""), json=payload) diff --git a/airbyte-lib/airbyte_lib/types.py b/airbyte-lib/airbyte_lib/types.py deleted file mode 100644 index a95dbf59d68e9..0000000000000 --- a/airbyte-lib/airbyte_lib/types.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Type conversion methods for SQL Caches.""" -from __future__ import annotations - -from typing import cast - -import sqlalchemy -from rich import print - - -# Compare to documentation here: https://docs.airbyte.com/understanding-airbyte/supported-data-types -CONVERSION_MAP = { - "string": sqlalchemy.types.VARCHAR, - "integer": sqlalchemy.types.BIGINT, - "number": sqlalchemy.types.DECIMAL, - "boolean": sqlalchemy.types.BOOLEAN, - "date": sqlalchemy.types.DATE, - "timestamp_with_timezone": sqlalchemy.types.TIMESTAMP, - "timestamp_without_timezone": sqlalchemy.types.TIMESTAMP, - "time_with_timezone": sqlalchemy.types.TIME, - "time_without_timezone": sqlalchemy.types.TIME, - # Technically 'object' and 'array' as JSON Schema types, not airbyte types. - # We include them here for completeness. - "object": sqlalchemy.types.JSON, - "array": sqlalchemy.types.JSON, -} - - -class SQLTypeConversionError(Exception): - """An exception to be raised when a type conversion fails.""" - - -def _get_airbyte_type( # noqa: PLR0911 # Too many return statements - json_schema_property_def: dict[str, str | dict | list], -) -> tuple[str, str | None]: - """Get the airbyte type and subtype from a JSON schema property definition. - - Subtype is only used for array types. Otherwise, subtype will return None. - """ - airbyte_type = cast(str, json_schema_property_def.get("airbyte_type", None)) - if airbyte_type: - return airbyte_type, None - - json_schema_type = json_schema_property_def.get("type", None) - json_schema_format = json_schema_property_def.get("format", None) - - # if json_schema_type is an array of two strings with one of them being null, pick the other one - # this strategy is often used by connectors to indicate a field might not be set all the time - if isinstance(json_schema_type, list): - non_null_types = [t for t in json_schema_type if t != "null"] - if len(non_null_types) == 1: - json_schema_type = non_null_types[0] - - if json_schema_type == "string": - if json_schema_format == "date": - return "date", None - - if json_schema_format == "date-time": - return "timestamp_with_timezone", None - - if json_schema_format == "time": - return "time_without_timezone", None - - if json_schema_type in ["string", "number", "boolean", "integer"]: - return cast(str, json_schema_type), None - - if json_schema_type == "object": - return "object", None - - if json_schema_type == "array": - items_def = json_schema_property_def.get("items", None) - if isinstance(items_def, dict): - subtype, _ = _get_airbyte_type(items_def) - return "array", subtype - - return "array", None - - err_msg = f"Could not determine airbyte type from JSON schema type: {json_schema_property_def}" - raise SQLTypeConversionError(err_msg) - - -class SQLTypeConverter: - """A base class to perform type conversions.""" - - def __init__( - self, - conversion_map: dict | None = None, - ) -> None: - self.conversion_map = conversion_map or CONVERSION_MAP - - @staticmethod - def get_failover_type() -> sqlalchemy.types.TypeEngine: - """Get the 'last resort' type to use if no other type is found.""" - return sqlalchemy.types.VARCHAR() - - def to_sql_type( - self, - json_schema_property_def: dict[str, str | dict | list], - ) -> sqlalchemy.types.TypeEngine: - """Convert a value to a SQL type.""" - try: - airbyte_type, _ = _get_airbyte_type(json_schema_property_def) - return self.conversion_map[airbyte_type]() - except SQLTypeConversionError: - print(f"Could not determine airbyte type from JSON schema: {json_schema_property_def}") - except KeyError: - print(f"Could not find SQL type for airbyte type: {airbyte_type}") - - json_schema_type = json_schema_property_def.get("type", None) - json_schema_format = json_schema_property_def.get("format", None) - - if json_schema_type == "string" and json_schema_format == "date": - return sqlalchemy.types.DATE() - - if json_schema_type == "string" and json_schema_format == "date-time": - return sqlalchemy.types.TIMESTAMP() - - if json_schema_type == "array": - # TODO: Implement array type conversion. - return self.get_failover_type() - - if json_schema_type == "object": - # TODO: Implement object type handling. - return self.get_failover_type() - - return self.get_failover_type() diff --git a/airbyte-lib/airbyte_lib/validate.py b/airbyte-lib/airbyte_lib/validate.py deleted file mode 100644 index 243556cd40200..0000000000000 --- a/airbyte-lib/airbyte_lib/validate.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""Defines the `airbyte-lib-validate-source` CLI. - -This tool checks if connectors are compatible with airbyte-lib. -""" -from __future__ import annotations - -import argparse -import json -import os -import subprocess -import sys -import tempfile -from pathlib import Path - -import yaml -from rich import print - -import airbyte_lib as ab -from airbyte_lib import exceptions as exc - - -def _parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Validate a connector") - parser.add_argument( - "--connector-dir", - type=str, - required=True, - help="Path to the connector directory", - ) - parser.add_argument( - "--validate-install-only", - action="store_true", - help="Only validate that the connector can be installed and config can be validated.", - ) - parser.add_argument( - "--sample-config", - type=str, - required=False, - help="Path to the sample config.json file. Required without --validate-install-only.", - ) - return parser.parse_args() - - -def _run_subprocess_and_raise_on_failure(args: list[str]) -> None: - result = subprocess.run( - args, - check=False, - stderr=subprocess.PIPE, - ) - if result.returncode != 0: - raise exc.AirbyteSubprocessFailedError( - run_args=args, - exit_code=result.returncode, - log_text=result.stderr.decode("utf-8"), - ) - - -def full_tests(connector_name: str, sample_config: str) -> None: - print("Creating source and validating spec and version...") - source = ab.get_source( - # TODO: FIXME: noqa: SIM115, PTH123 - connector_name, - config=json.load(open(sample_config)), # noqa: SIM115, PTH123, - install_if_missing=False, - ) - - print("Running check...") - source.check() - - print("Fetching streams...") - streams = source.get_available_streams() - - # try to peek all streams - if one works, stop, if none works, throw exception - for stream in streams: - try: - print(f"Trying to read from stream {stream}...") - record = next(source.get_records(stream)) - assert record, "No record returned" - break - except exc.AirbyteError as e: - print(f"Could not read from stream {stream}: {e}") - except Exception as e: - print(f"Unhandled error occurred when trying to read from {stream}: {e}") - else: - raise exc.AirbyteNoDataFromConnectorError( - context={"selected_streams": streams}, - ) - - -def install_only_test(connector_name: str) -> None: - print("Creating source and validating spec is returned successfully...") - source = ab.get_source(connector_name) - source._get_spec(force_refresh=True) # noqa: SLF001 - - -def run() -> None: - """Handle CLI entrypoint for the `airbyte-lib-validate-source` command. - - It's called like this: - > airbyte-lib-validate-source —connector-dir . -—sample-config secrets/config.json - - It performs a basic smoke test to make sure the connector in question is airbyte-lib compliant: - * Can be installed into a venv - * Can be called via cli entrypoint - * Answers according to the Airbyte protocol when called with spec, check, discover and read. - """ - # parse args - args = _parse_args() - connector_dir = args.connector_dir - sample_config = args.sample_config - validate_install_only = args.validate_install_only - validate(connector_dir, sample_config, validate_install_only=validate_install_only) - - -def validate(connector_dir: str, sample_config: str, *, validate_install_only: bool) -> None: - # read metadata.yaml - metadata_path = Path(connector_dir) / "metadata.yaml" - with Path(metadata_path).open() as stream: - metadata = yaml.safe_load(stream)["data"] - - # TODO: Use remoteRegistries.pypi.packageName once set for connectors - connector_name = metadata["dockerRepository"].replace("airbyte/", "") - - # create a venv and install the connector - venv_name = f".venv-{connector_name}" - venv_path = Path(venv_name) - if not venv_path.exists(): - _run_subprocess_and_raise_on_failure([sys.executable, "-m", "venv", venv_name]) - - pip_path = str(venv_path / "bin" / "pip") - - _run_subprocess_and_raise_on_failure([pip_path, "install", connector_dir]) - - # write basic registry to temp json file - registry = { - "sources": [ - { - "dockerRepository": f"airbyte/{connector_name}", - "dockerImageTag": "0.0.1", - "remoteRegistries": { - "pypi": {"packageName": "airbyte-{connector_name}", "enabled": True} - }, - }, - ], - } - - with tempfile.NamedTemporaryFile(mode="w+t", delete=True) as temp_file: - temp_file.write(json.dumps(registry)) - temp_file.seek(0) - os.environ["AIRBYTE_LOCAL_REGISTRY"] = str(temp_file.name) - if validate_install_only: - install_only_test(connector_name) - else: - if not sample_config: - raise exc.AirbyteLibInputError( - input_value="--sample-config is required without --validate-install-only set" - ) - full_tests(connector_name, sample_config) diff --git a/airbyte-lib/airbyte_lib/version.py b/airbyte-lib/airbyte_lib/version.py deleted file mode 100644 index 114a730a5e7c1..0000000000000 --- a/airbyte-lib/airbyte_lib/version.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import importlib.metadata - - -airbyte_lib_version = importlib.metadata.version("airbyte-lib") - - -def get_version() -> str: - return airbyte_lib_version diff --git a/airbyte-lib/docs.py b/airbyte-lib/docs.py deleted file mode 100644 index be5dea69b9efe..0000000000000 --- a/airbyte-lib/docs.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import os -import pathlib -import shutil - -import pdoc - - -def run() -> None: - """Generate docs for all public modules in airbyte_lib and save them to docs/generated. - - Public modules are: - * The main airbyte_lib module - * All directory modules in airbyte_lib that don't start with an underscore. - """ - public_modules = ["airbyte_lib"] - - # recursively delete the docs/generated folder if it exists - if pathlib.Path("docs/generated").exists(): - shutil.rmtree("docs/generated") - - # All folders in `airbyte_lib` that don't start with "_" are treated as public modules. - for d in os.listdir("airbyte_lib"): - dir_path = pathlib.Path(f"airbyte_lib/{d}") - if dir_path.is_dir() and not d.startswith("_") and (dir_path / "__init__.py").exists(): - public_modules.append(dir_path) - - pdoc.render.configure(template_directory="docs", show_source=False, search=False) - pdoc.pdoc(*public_modules, output_directory=pathlib.Path("docs/generated")) diff --git a/airbyte-lib/docs/frame.html.jinja2 b/airbyte-lib/docs/frame.html.jinja2 deleted file mode 100644 index 379ae376725f0..0000000000000 --- a/airbyte-lib/docs/frame.html.jinja2 +++ /dev/null @@ -1,14 +0,0 @@ - -
    - {% block module_contents %}{% endblock %} -
    - -{% filter minify_css %} - {% block style %} - {# The same CSS files as in pdoc's default template, except for layout.css. - You may leave out Bootstrap Reboot, which corrects inconsistences across browsers - but may conflict with you website's stylesheet. #} - - - {% endblock %} -{% endfilter %} diff --git a/airbyte-lib/docs/generated/airbyte_lib.html b/airbyte-lib/docs/generated/airbyte_lib.html deleted file mode 100644 index 5c7d778615234..0000000000000 --- a/airbyte-lib/docs/generated/airbyte_lib.html +++ /dev/null @@ -1,889 +0,0 @@ - -
    -
    -
    - - class - CachedDataset(airbyte_lib.datasets._sql.SQLDataset): - - -
    - - -

    A dataset backed by a SQL table cache.

    - -

    Because this dataset includes all records from the underlying table, we also expose the -underlying table as a SQLAlchemy Table object.

    -
    - - -
    -
    - - CachedDataset(cache: 'SQLCacheBase', stream_name: str) - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - to_pandas(self) -> pandas.core.frame.DataFrame: - - -
    - - -

    Return a pandas DataFrame representation of the dataset.

    - -

    The base implementation simply passes the record iterator to Panda's DataFrame constructor.

    -
    - - -
    -
    -
    - - def - to_sql_table(self) -> 'Table': - - -
    - - - - -
    -
    -
    Inherited Members
    -
    -
    airbyte_lib.datasets._sql.SQLDataset
    -
    stream_name
    -
    with_filter
    - -
    -
    -
    -
    -
    -
    - - class - DuckDBCache(airbyte_lib.caches.duckdb.DuckDBCacheBase): - - -
    - - -

    A DuckDB implementation of the cache.

    - -

    Parquet is used for local file storage before bulk loading. -Unlike the Snowflake implementation, we can't use the COPY command to load data -so we insert as values instead.

    -
    - - -
    -
    - file_writer_class = -<class 'airbyte_lib._file_writers.parquet.ParquetWriter'> - - -
    - - - - -
    -
    -
    Inherited Members
    -
    -
    airbyte_lib.caches.base.SQLCacheBase
    -
    SQLCacheBase
    -
    type_converter_class
    -
    use_singleton_connection
    -
    config
    -
    file_writer
    -
    type_converter
    -
    get_sql_alchemy_url
    -
    database_name
    -
    get_sql_engine
    -
    get_sql_connection
    -
    get_sql_table_name
    -
    get_sql_table
    -
    streams
    -
    get_records
    -
    get_pandas_dataframe
    -
    get_state
    -
    register_source
    - -
    -
    airbyte_lib.caches.duckdb.DuckDBCacheBase
    -
    config_class
    -
    supports_merge_insert
    -
    get_telemetry_info
    - -
    -
    airbyte_lib._processors.RecordProcessor
    -
    skip_finalize_step
    -
    source_catalog
    -
    process_stdin
    -
    process_input_stream
    -
    process_airbyte_messages
    - -
    -
    -
    -
    -
    -
    - - class - DuckDBCacheConfig(airbyte_lib.caches.base.SQLCacheConfigBase, airbyte_lib._file_writers.parquet.ParquetWriterConfig): - - -
    - - -

    Configuration for the DuckDB cache.

    - -

    Also inherits config from the ParquetWriter, which is responsible for writing files to disk.

    -
    - - -
    -
    - db_path: pathlib.Path | str - - -
    - - -

    Normally db_path is a Path object.

    - -

    There are some cases, such as when connecting to MotherDuck, where it could be a string that -is not also a path, such as "md:" to connect the user's default MotherDuck DB.

    -
    - - -
    -
    -
    - schema_name: str - - -
    - - -

    The name of the schema to write to. Defaults to "main".

    -
    - - -
    -
    -
    -
    @overrides
    - - def - get_sql_alchemy_url(self) -> str: - - -
    - - -

    Return the SQLAlchemy URL to use.

    -
    - - -
    -
    -
    - - def - get_database_name(self) -> str: - - -
    - - -

    Return the name of the database.

    -
    - - -
    -
    -
    Inherited Members
    -
    -
    pydantic.main.BaseModel
    -
    BaseModel
    -
    Config
    -
    dict
    -
    json
    -
    parse_obj
    -
    parse_raw
    -
    parse_file
    -
    from_orm
    -
    construct
    -
    copy
    -
    schema
    -
    schema_json
    -
    validate
    -
    update_forward_refs
    - -
    -
    airbyte_lib.caches.base.SQLCacheConfigBase
    -
    table_prefix
    -
    table_suffix
    - -
    -
    airbyte_lib._file_writers.base.FileWriterConfigBase
    -
    cache_dir
    -
    cleanup
    - -
    -
    -
    -
    -
    -
    - - def - get_available_connectors() -> list[str]: - - -
    - - -

    Return a list of all available connectors.

    - -

    Connectors will be returned in alphabetical order, with the standard prefix "source-".

    -
    - - -
    -
    -
    - - def - get_source( name: str, config: dict[str, typing.Any] | None = None, *, version: str | None = None, pip_url: str | None = None, local_executable: pathlib.Path | str | None = None, install_if_missing: bool = True) -> Source: - - -
    - - -

    Get a connector by name and version.

    - -

    Args: - name: connector name - config: connector config - if not provided, you need to set it later via the set_config - method. - version: connector version - if not provided, the currently installed version will be used. - If no version is installed, the latest available version will be used. The version can - also be set to "latest" to force the use of the latest available version. - pip_url: connector pip URL - if not provided, the pip url will be inferred from the - connector name. - local_executable: If set, the connector will be assumed to already be installed and will be - executed using this path or executable name. Otherwise, the connector will be installed - automatically in a virtual environment. - install_if_missing: Whether to install the connector if it is not available locally. This - parameter is ignored when local_executable is set.

    -
    - - -
    -
    -
    - - def - get_default_cache() -> DuckDBCache: - - -
    - - -

    Get a local cache for storing data, using the default database path.

    - -

    Cache files are stored in the .cache directory, relative to the current -working directory.

    -
    - - -
    -
    -
    - - def - get_secret( secret_name: str, source: SecretSource | list[SecretSource] = <SecretSource.ANY: 4>, *, prompt: bool = True) -> str: - - -
    - - -

    Get a secret from the environment.

    - -

    The optional source argument of enum type SecretSource or list of SecretSource options. -If left blank, the source arg will be SecretSource.ANY. If source is set to a specific -source, then only that source will be checked. If a list of SecretSource entries is passed, -then the sources will be checked using the provided ordering.

    - -

    If prompt to True or if SecretSource.PROMPT is declared in the source arg, then the -user will be prompted to enter the secret if it is not found in any of the other sources.

    -
    - - -
    -
    -
    - - def - new_local_cache( cache_name: str | None = None, cache_dir: str | pathlib.Path | None = None, *, cleanup: bool = True) -> DuckDBCache: - - -
    - - -

    Get a local cache for storing data, using a name string to seed the path.

    - -

    Args: - cache_name: Name to use for the cache. Defaults to None. - cache_dir: Root directory to store the cache in. Defaults to None. - cleanup: Whether to clean up temporary files. Defaults to True.

    - -

    Cache files are stored in the .cache directory, relative to the current -working directory.

    -
    - - -
    -
    -
    - - class - ReadResult(collections.abc.Mapping[str, airbyte_lib.datasets._sql.CachedDataset]): - - -
    - - -

    A Mapping is a generic container for associating key/value -pairs.

    - -

    This class provides concrete generic implementations of all -methods except for __getitem__, __iter__, and __len__.

    -
    - - -
    -
    - - ReadResult( processed_records: int, cache: airbyte_lib.caches.base.SQLCacheBase, processed_streams: list[str]) - - -
    - - - - -
    -
    -
    - processed_records - - -
    - - - - -
    -
    -
    - - def - get_sql_engine(self) -> sqlalchemy.engine.base.Engine: - - -
    - - - - -
    -
    -
    - streams: collections.abc.Mapping[str, CachedDataset] - - -
    - - - - -
    -
    -
    - cache: airbyte_lib.caches.base.SQLCacheBase - - -
    - - - - -
    -
    -
    Inherited Members
    -
    -
    collections.abc.Mapping
    -
    get
    -
    keys
    -
    items
    -
    values
    - -
    -
    -
    -
    -
    -
    - - class - SecretSource(enum.Enum): - - -
    - - -

    An enumeration.

    -
    - - -
    -
    - ENV = -<SecretSource.ENV: 1> - - -
    - - - - -
    -
    -
    - DOTENV = -<SecretSource.DOTENV: 2> - - -
    - - - - -
    -
    -
    - GOOGLE_COLAB = -<SecretSource.GOOGLE_COLAB: 3> - - -
    - - - - -
    -
    -
    - ANY = -<SecretSource.ANY: 4> - - -
    - - - - -
    -
    -
    - PROMPT = -<SecretSource.PROMPT: 5> - - -
    - - - - -
    -
    -
    Inherited Members
    -
    -
    enum.Enum
    -
    name
    -
    value
    - -
    -
    -
    -
    -
    -
    - - class - Source: - - -
    - - -

    A class representing a source that can be called.

    -
    - - -
    -
    - - Source( executor: airbyte_lib._executor.Executor, name: str, config: dict[str, typing.Any] | None = None, streams: list[str] | None = None, *, validate: bool = False) - - -
    - - -

    Initialize the source.

    - -

    If config is provided, it will be validated against the spec if validate is True.

    -
    - - -
    -
    -
    - executor - - -
    - - - - -
    -
    -
    - name - - -
    - - - - -
    -
    -
    - - def - set_streams(self, streams: list[str]) -> None: - - -
    - - -

    Deprecated. See select_streams().

    -
    - - -
    -
    -
    - - def - select_all_streams(self) -> None: - - -
    - - -

    Select all streams.

    - -

    This is a more streamlined equivalent to:

    - -
    -

    source.select_streams(source.get_available_streams()).

    -
    -
    - - -
    -
    -
    - - def - select_streams(self, streams: list[str]) -> None: - - -
    - - -

    Select the stream names that should be read from the connector.

    - -

    Currently, if this is not set, all streams will be read.

    -
    - - -
    -
    -
    - - def - get_selected_streams(self) -> list[str]: - - -
    - - -

    Get the selected streams.

    - -

    If no streams are selected, return an empty list.

    -
    - - -
    -
    -
    - - def - set_config(self, config: dict[str, typing.Any], *, validate: bool = False) -> None: - - -
    - - -

    Set the config for the connector.

    - -

    If validate is True, raise an exception if the config fails validation.

    - -

    If validate is False, validation will be deferred until check() or validate_config() -is called.

    -
    - - -
    -
    -
    - - def - get_config(self) -> dict[str, typing.Any]: - - -
    - - -

    Get the config for the connector.

    -
    - - -
    -
    -
    - - def - validate_config(self, config: dict[str, typing.Any] | None = None) -> None: - - -
    - - -

    Validate the config against the spec.

    - -

    If config is not provided, the already-set config will be validated.

    -
    - - -
    -
    -
    - - def - get_available_streams(self) -> list[str]: - - -
    - - -

    Get the available streams from the spec.

    -
    - - -
    -
    -
    - docs_url: str - - -
    - - -

    Get the URL to the connector's documentation.

    -
    - - -
    -
    -
    - discovered_catalog: airbyte_protocol.models.airbyte_protocol.AirbyteCatalog - - -
    - - -

    Get the raw catalog for the given streams.

    - -

    If the catalog is not yet known, we call discover to get it.

    -
    - - -
    -
    -
    - configured_catalog: airbyte_protocol.models.airbyte_protocol.ConfiguredAirbyteCatalog - - -
    - - -

    Get the configured catalog for the given streams.

    - -

    If the raw catalog is not yet known, we call discover to get it.

    - -

    If no specific streams are selected, we return a catalog that syncs all available streams.

    - -

    TODO: We should consider disabling by default the streams that the connector would -disable by default. (For instance, streams that require a premium license are sometimes -disabled by default within the connector.)

    -
    - - -
    -
    -
    - - def - get_records(self, stream: str) -> airbyte_lib.datasets._lazy.LazyDataset: - - -
    - - -

    Read a stream from the connector.

    - -

    This involves the following steps:

    - -
      -
    • Call discover to get the catalog
    • -
    • Generate a configured catalog that syncs the given stream in full_refresh mode
    • -
    • Write the configured catalog and the config to a temporary file
    • -
    • execute the connector with read --config --catalog
    • -
    • Listen to the messages and return the first AirbyteRecordMessages that come along.
    • -
    • Make sure the subprocess is killed when the function returns.
    • -
    -
    - - -
    -
    -
    - - def - check(self) -> None: - - -
    - - -

    Call check on the connector.

    - -

    This involves the following steps:

    - -
      -
    • Write the config to a temporary file
    • -
    • execute the connector with check --config
    • -
    • Listen to the messages and return the first AirbyteCatalog that comes along.
    • -
    • Make sure the subprocess is killed when the function returns.
    • -
    -
    - - -
    -
    -
    - - def - install(self) -> None: - - -
    - - -

    Install the connector if it is not yet installed.

    -
    - - -
    -
    -
    - - def - uninstall(self) -> None: - - -
    - - -

    Uninstall the connector if it is installed.

    - -

    This only works if the use_local_install flag wasn't used and installation is managed by -airbyte-lib.

    -
    - - -
    -
    -
    - - def - read( self, cache: airbyte_lib.caches.base.SQLCacheBase | None = None, *, write_strategy: str | airbyte_lib.strategies.WriteStrategy = <WriteStrategy.AUTO: 'auto'>, force_full_refresh: bool = False) -> ReadResult: - - -
    - - -

    Read from the connector and write to the cache.

    - -

    Args: - cache: The cache to write to. If None, a default cache will be used. - write_strategy: The strategy to use when writing to the cache. If a string, it must be - one of "append", "upsert", "replace", or "auto". If a WriteStrategy, it must be one - of WriteStrategy.APPEND, WriteStrategy.UPSERT, WriteStrategy.REPLACE, or - WriteStrategy.AUTO. - force_full_refresh: If True, the source will operate in full refresh mode. Otherwise, - streams will be read in incremental mode if supported by the connector. This option - must be True when using the "replace" strategy.

    -
    - - -
    -
    -
    - - - - \ No newline at end of file diff --git a/airbyte-lib/docs/generated/airbyte_lib/caches.html b/airbyte-lib/docs/generated/airbyte_lib/caches.html deleted file mode 100644 index cf1eb7276567d..0000000000000 --- a/airbyte-lib/docs/generated/airbyte_lib/caches.html +++ /dev/null @@ -1,992 +0,0 @@ - -
    -
    -
    - - class - DuckDBCache(airbyte_lib.caches.duckdb.DuckDBCacheBase): - - -
    - - -

    A DuckDB implementation of the cache.

    - -

    Parquet is used for local file storage before bulk loading. -Unlike the Snowflake implementation, we can't use the COPY command to load data -so we insert as values instead.

    -
    - - -
    -
    - file_writer_class = -<class 'airbyte_lib._file_writers.parquet.ParquetWriter'> - - -
    - - - - -
    -
    -
    Inherited Members
    -
    - -
    airbyte_lib.caches.duckdb.DuckDBCacheBase
    -
    config_class
    -
    supports_merge_insert
    -
    get_telemetry_info
    - -
    -
    airbyte_lib._processors.RecordProcessor
    -
    skip_finalize_step
    -
    source_catalog
    -
    process_stdin
    -
    process_input_stream
    -
    process_airbyte_messages
    - -
    -
    -
    -
    -
    -
    - - class - DuckDBCacheConfig(airbyte_lib.caches.base.SQLCacheConfigBase, airbyte_lib._file_writers.parquet.ParquetWriterConfig): - - -
    - - -

    Configuration for the DuckDB cache.

    - -

    Also inherits config from the ParquetWriter, which is responsible for writing files to disk.

    -
    - - -
    -
    - db_path: pathlib.Path | str - - -
    - - -

    Normally db_path is a Path object.

    - -

    There are some cases, such as when connecting to MotherDuck, where it could be a string that -is not also a path, such as "md:" to connect the user's default MotherDuck DB.

    -
    - - -
    -
    -
    - schema_name: str - - -
    - - -

    The name of the schema to write to. Defaults to "main".

    -
    - - -
    -
    -
    -
    @overrides
    - - def - get_sql_alchemy_url(self) -> str: - - -
    - - -

    Return the SQLAlchemy URL to use.

    -
    - - -
    -
    -
    - - def - get_database_name(self) -> str: - - -
    - - -

    Return the name of the database.

    -
    - - -
    -
    -
    Inherited Members
    -
    -
    pydantic.main.BaseModel
    -
    BaseModel
    -
    Config
    -
    dict
    -
    json
    -
    parse_obj
    -
    parse_raw
    -
    parse_file
    -
    from_orm
    -
    construct
    -
    copy
    -
    schema
    -
    schema_json
    -
    validate
    -
    update_forward_refs
    - -
    -
    airbyte_lib.caches.base.SQLCacheConfigBase
    -
    table_prefix
    -
    table_suffix
    - -
    -
    airbyte_lib._file_writers.base.FileWriterConfigBase
    -
    cache_dir
    -
    cleanup
    - -
    -
    -
    -
    -
    -
    - - class - PostgresCache(airbyte_lib.caches.SQLCacheBase): - - -
    - - -

    A Postgres implementation of the cache.

    - -

    Parquet is used for local file storage before bulk loading. -Unlike the Snowflake implementation, we can't use the COPY command to load data -so we insert as values instead.

    - -

    TOOD: Add optimized bulk load path for Postgres. Could use an alternate file writer -or another import method. (Relatively low priority, since for now it works fine as-is.)

    -
    - - -
    -
    - config_class = -<class 'PostgresCacheConfig'> - - -
    - - - - -
    -
    -
    - file_writer_class = -<class 'airbyte_lib._file_writers.parquet.ParquetWriter'> - - -
    - - - - -
    -
    -
    - supports_merge_insert = -False - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - get_telemetry_info(self) -> airbyte_lib.telemetry.CacheTelemetryInfo: - - -
    - - - - -
    -
    -
    Inherited Members
    -
    - -
    airbyte_lib._processors.RecordProcessor
    -
    skip_finalize_step
    -
    source_catalog
    -
    process_stdin
    -
    process_input_stream
    -
    process_airbyte_messages
    - -
    -
    -
    -
    -
    -
    - - class - PostgresCacheConfig(airbyte_lib.caches.base.SQLCacheConfigBase, airbyte_lib._file_writers.parquet.ParquetWriterConfig): - - -
    - - -

    Configuration for the Postgres cache.

    - -

    Also inherits config from the ParquetWriter, which is responsible for writing files to disk.

    -
    - - -
    -
    - host: str - - -
    - - - - -
    -
    -
    - port: int - - -
    - - - - -
    -
    -
    - username: str - - -
    - - - - -
    -
    -
    - password: str - - -
    - - - - -
    -
    -
    - database: str - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - get_sql_alchemy_url(self) -> str: - - -
    - - -

    Return the SQLAlchemy URL to use.

    -
    - - -
    -
    -
    - - def - get_database_name(self) -> str: - - -
    - - -

    Return the name of the database.

    -
    - - -
    -
    -
    Inherited Members
    -
    -
    pydantic.main.BaseModel
    -
    BaseModel
    -
    Config
    -
    dict
    -
    json
    -
    parse_obj
    -
    parse_raw
    -
    parse_file
    -
    from_orm
    -
    construct
    -
    copy
    -
    schema
    -
    schema_json
    -
    validate
    -
    update_forward_refs
    - -
    -
    airbyte_lib.caches.base.SQLCacheConfigBase
    -
    schema_name
    -
    table_prefix
    -
    table_suffix
    - -
    -
    airbyte_lib._file_writers.base.FileWriterConfigBase
    -
    cache_dir
    -
    cleanup
    - -
    -
    -
    -
    -
    -
    - - class - SQLCacheBase(airbyte_lib._processors.RecordProcessor): - - -
    - - -

    A base class to be used for SQL Caches.

    - -

    Optionally we can use a file cache to store the data in parquet files.

    -
    - - -
    -
    - type_converter_class: type[airbyte_lib.types.SQLTypeConverter] = -<class 'airbyte_lib.types.SQLTypeConverter'> - - -
    - - - - -
    -
    -
    - config_class: type[airbyte_lib.caches.base.SQLCacheConfigBase] - - -
    - - - - -
    -
    -
    - file_writer_class: type[airbyte_lib._file_writers.base.FileWriterBase] - - -
    - - - - -
    -
    -
    - supports_merge_insert = -False - - -
    - - - - -
    -
    -
    - use_singleton_connection = -False - - -
    - - - - -
    -
    -
    - config: airbyte_lib.caches.base.SQLCacheConfigBase - - -
    - - - - -
    -
    -
    - file_writer - - -
    - - - - -
    -
    -
    - type_converter - - -
    - - - - -
    -
    -
    - - def - get_sql_alchemy_url(self) -> str: - - -
    - - -

    Return the SQLAlchemy URL to use.

    -
    - - -
    -
    -
    - database_name: str - - -
    - - -

    Return the name of the database.

    -
    - - -
    -
    -
    -
    @final
    - - def - get_sql_engine(self) -> sqlalchemy.engine.base.Engine: - - -
    - - -

    Return a new SQL engine to use.

    -
    - - -
    -
    -
    -
    @contextmanager
    - - def - get_sql_connection( self) -> collections.abc.Generator[sqlalchemy.engine.base.Connection, None, None]: - - -
    - - -

    A context manager which returns a new SQL connection for running queries.

    - -

    If the connection needs to close, it will be closed automatically.

    -
    - - -
    -
    -
    - - def - get_sql_table_name(self, stream_name: str) -> str: - - -
    - - -

    Return the name of the SQL table for the given stream.

    -
    - - -
    -
    -
    -
    @final
    - - def - get_sql_table(self, stream_name: str) -> sqlalchemy.sql.schema.Table: - - -
    - - -

    Return the main table object for the stream.

    -
    - - -
    -
    -
    - streams: dict[str, airbyte_lib.datasets._sql.CachedDataset] - - -
    - - -

    Return a temporary table name.

    -
    - - -
    -
    -
    - - def - get_records(self, stream_name: str) -> airbyte_lib.datasets._sql.CachedDataset: - - -
    - - -

    Uses SQLAlchemy to select all rows from the table.

    -
    - - -
    -
    -
    - - def - get_pandas_dataframe(self, stream_name: str) -> pandas.core.frame.DataFrame: - - -
    - - -

    Return a Pandas data frame with the stream's data.

    -
    - - -
    -
    -
    - - def - get_state(self) -> list[dict]: - - -
    - - -

    Return the current state of the source.

    -
    - - -
    -
    -
    -
    @overrides
    - - def - register_source( self, source_name: str, incoming_source_catalog: airbyte_protocol.models.airbyte_protocol.ConfiguredAirbyteCatalog, stream_names: set[str]) -> None: - - -
    - - -

    Register the source with the cache.

    - -

    We use stream_names to determine which streams will receive data, and -we only register the stream if is expected to receive data.

    - -

    This method is called by the source when it is initialized.

    -
    - - -
    -
    -
    -
    @abc.abstractmethod
    - - def - get_telemetry_info(self) -> airbyte_lib.telemetry.CacheTelemetryInfo: - - -
    - - - - -
    -
    -
    Inherited Members
    -
    -
    airbyte_lib._processors.RecordProcessor
    -
    skip_finalize_step
    -
    source_catalog
    -
    process_stdin
    -
    process_input_stream
    -
    process_airbyte_messages
    - -
    -
    -
    -
    -
    -
    - - class - SnowflakeCacheConfig(airbyte_lib.caches.base.SQLCacheConfigBase, airbyte_lib._file_writers.parquet.ParquetWriterConfig): - - -
    - - -

    Configuration for the Snowflake cache.

    - -

    Also inherits config from the ParquetWriter, which is responsible for writing files to disk.

    -
    - - -
    -
    - account: str - - -
    - - - - -
    -
    -
    - username: str - - -
    - - - - -
    -
    -
    - password: str - - -
    - - - - -
    -
    -
    - warehouse: str - - -
    - - - - -
    -
    -
    - database: str - - -
    - - - - -
    -
    -
    - role: str - - -
    - - - - -
    -
    -
    - dedupe_mode - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - get_sql_alchemy_url(self) -> str: - - -
    - - -

    Return the SQLAlchemy URL to use.

    -
    - - -
    -
    -
    - - def - get_database_name(self) -> str: - - -
    - - -

    Return the name of the database.

    -
    - - -
    -
    -
    Inherited Members
    -
    -
    pydantic.main.BaseModel
    -
    BaseModel
    -
    Config
    -
    dict
    -
    json
    -
    parse_obj
    -
    parse_raw
    -
    parse_file
    -
    from_orm
    -
    construct
    -
    copy
    -
    schema
    -
    schema_json
    -
    validate
    -
    update_forward_refs
    - -
    -
    airbyte_lib.caches.base.SQLCacheConfigBase
    -
    schema_name
    -
    table_prefix
    -
    table_suffix
    - -
    -
    airbyte_lib._file_writers.base.FileWriterConfigBase
    -
    cache_dir
    -
    cleanup
    - -
    -
    -
    -
    -
    -
    - - class - SnowflakeSQLCache(airbyte_lib.caches.SQLCacheBase): - - -
    - - -

    A Snowflake implementation of the cache.

    - -

    Parquet is used for local file storage before bulk loading.

    -
    - - -
    -
    - config_class = -<class 'SnowflakeCacheConfig'> - - -
    - - - - -
    -
    -
    - file_writer_class = -<class 'airbyte_lib._file_writers.parquet.ParquetWriter'> - - -
    - - - - -
    -
    -
    - type_converter_class = -<class 'airbyte_lib.caches.snowflake.SnowflakeTypeConverter'> - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - get_telemetry_info(self) -> airbyte_lib.telemetry.CacheTelemetryInfo: - - -
    - - - - -
    -
    -
    Inherited Members
    -
    - -
    airbyte_lib._processors.RecordProcessor
    -
    skip_finalize_step
    -
    source_catalog
    -
    process_stdin
    -
    process_input_stream
    -
    process_airbyte_messages
    - -
    -
    -
    -
    -
    - - - - \ No newline at end of file diff --git a/airbyte-lib/docs/generated/airbyte_lib/datasets.html b/airbyte-lib/docs/generated/airbyte_lib/datasets.html deleted file mode 100644 index 76089344eca0b..0000000000000 --- a/airbyte-lib/docs/generated/airbyte_lib/datasets.html +++ /dev/null @@ -1,258 +0,0 @@ - -
    -
    -
    - - class - CachedDataset(airbyte_lib.datasets.SQLDataset): - - -
    - - -

    A dataset backed by a SQL table cache.

    - -

    Because this dataset includes all records from the underlying table, we also expose the -underlying table as a SQLAlchemy Table object.

    -
    - - -
    -
    - - CachedDataset(cache: 'SQLCacheBase', stream_name: str) - - -
    - - - - -
    -
    -
    -
    @overrides
    - - def - to_pandas(self) -> pandas.core.frame.DataFrame: - - -
    - - -

    Return a pandas DataFrame representation of the dataset.

    - -

    The base implementation simply passes the record iterator to Panda's DataFrame constructor.

    -
    - - -
    -
    -
    - - def - to_sql_table(self) -> 'Table': - - -
    - - - - -
    -
    -
    Inherited Members
    -
    - -
    -
    -
    -
    -
    - - class - DatasetBase(abc.ABC): - - -
    - - -

    Base implementation for all datasets.

    -
    - - -
    -
    - - def - to_pandas(self) -> pandas.core.frame.DataFrame: - - -
    - - -

    Return a pandas DataFrame representation of the dataset.

    - -

    The base implementation simply passes the record iterator to Panda's DataFrame constructor.

    -
    - - -
    -
    -
    -
    - - class - DatasetMap(collections.abc.Mapping): - - -
    - - -

    A generic interface for a set of streams or datasets.

    -
    - - -
    -
    Inherited Members
    -
    -
    collections.abc.Mapping
    -
    get
    -
    keys
    -
    items
    -
    values
    - -
    -
    -
    -
    -
    -
    - - class - LazyDataset(airbyte_lib.datasets.DatasetBase): - - -
    - - -

    A dataset that is loaded incrementally from a source or a SQL query.

    -
    - - -
    -
    - - LazyDataset( iterator: collections.abc.Iterator[collections.abc.Mapping[str, typing.Any]]) - - -
    - - - - -
    -
    -
    Inherited Members
    -
    - -
    -
    -
    -
    -
    - - class - SQLDataset(airbyte_lib.datasets.DatasetBase): - - -
    - - -

    A dataset that is loaded incrementally from a SQL query.

    - -

    The CachedDataset class is a subclass of this class, which simply passes a SELECT over the full -table as the query statement.

    -
    - - -
    -
    - - SQLDataset( cache: 'SQLCacheBase', stream_name: str, query_statement: 'Selectable') - - -
    - - - - -
    -
    -
    - stream_name: str - - -
    - - - - -
    -
    -
    - - def - to_pandas(self) -> pandas.core.frame.DataFrame: - - -
    - - -

    Return a pandas DataFrame representation of the dataset.

    - -

    The base implementation simply passes the record iterator to Panda's DataFrame constructor.

    -
    - - -
    -
    -
    - - def - with_filter( self, *filter_expressions: 'ClauseElement | str') -> SQLDataset: - - -
    - - -

    Filter the dataset by a set of column values.

    - -

    Filters can be specified as either a string or a SQLAlchemy expression.

    - -

    Filters are lazily applied to the dataset, so they can be chained together. For example:

    - -
        dataset.with_filter("id > 5").with_filter("id < 10")
    -
    - -

    is equivalent to:

    - -
        dataset.with_filter("id > 5", "id < 10")
    -
    -
    - - -
    -
    -
    - - - - \ No newline at end of file diff --git a/airbyte-lib/docs/generated/index.html b/airbyte-lib/docs/generated/index.html deleted file mode 100644 index 6dfc876b8f9c6..0000000000000 --- a/airbyte-lib/docs/generated/index.html +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/airbyte-lib/examples/run_faker.py b/airbyte-lib/examples/run_faker.py deleted file mode 100644 index 758cd07e2216d..0000000000000 --- a/airbyte-lib/examples/run_faker.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""A simple test of AirbyteLib, using the Faker source connector. - -Usage (from airbyte-lib root directory): -> poetry run python ./examples/run_faker.py - -No setup is needed, but you may need to delete the .venv-source-faker folder -if your installation gets interrupted or corrupted. -""" -from __future__ import annotations - -import airbyte_lib as ab - - -SCALE = 500_000 # Number of records to generate between users and purchases. - -# This is a dummy secret, just to test functionality. -DUMMY_SECRET = ab.get_secret("DUMMY_SECRET") - - -print("Installing Faker source...") -source = ab.get_source( - "source-faker", - config={"count": SCALE / 2}, - install_if_missing=True, -) -print("Faker source installed.") -source.check() -source.select_streams(["products", "users", "purchases"]) - -result = source.read() - -for name, records in result.streams.items(): - print(f"Stream {name}: {len(records)} records") diff --git a/airbyte-lib/examples/run_github.py b/airbyte-lib/examples/run_github.py deleted file mode 100644 index 253e1275a5412..0000000000000 --- a/airbyte-lib/examples/run_github.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""A simple test of AirbyteLib, using the Faker source connector. - -Usage (from airbyte-lib root directory): -> poetry run python ./examples/run_github.py - -No setup is needed, but you may need to delete the .venv-source-faker folder -if your installation gets interrupted or corrupted. -""" -from __future__ import annotations - -import airbyte_lib as ab - - -# Create a token here: https://github.com/settings/tokens -GITHUB_TOKEN = ab.get_secret("GITHUB_PERSONAL_ACCESS_TOKEN") - - -source = ab.get_source("source-github") -source.set_config( - { - "repositories": ["airbytehq/airbyte-lib-private-beta"], - "credentials": {"personal_access_token": GITHUB_TOKEN}, - } -) -source.check() -source.select_streams(["issues", "pull_requests", "commits", "collaborators", "deployments"]) - -result = source.read(cache=ab.new_local_cache("github")) -print(result.processed_records) - -for name, records in result.streams.items(): - print(f"Stream {name}: {len(records)} records") diff --git a/airbyte-lib/examples/run_pokeapi.py b/airbyte-lib/examples/run_pokeapi.py deleted file mode 100644 index 9b710bd625fad..0000000000000 --- a/airbyte-lib/examples/run_pokeapi.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -"""A simple test of AirbyteLib, using the PokeAPI source connector. - -Usage (from airbyte-lib root directory): -> poetry run python ./examples/run_pokeapi.py - -No setup is needed, but you may need to delete the .venv-source-pokeapi folder -if your installation gets interrupted or corrupted. -""" -from __future__ import annotations - -import airbyte_lib as ab - - -source = ab.get_source( - "source-pokeapi", - config={"pokemon_name": "bulbasaur"}, - install_if_missing=True, -) -source.check() - -# print(list(source.get_records("pokemon"))) -source.read(cache=ab.new_local_cache("poke")) diff --git a/airbyte-lib/examples/run_snowflake_faker.py b/airbyte-lib/examples/run_snowflake_faker.py deleted file mode 100644 index 56d8af8f10ef7..0000000000000 --- a/airbyte-lib/examples/run_snowflake_faker.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import json -import os - -from google.cloud import secretmanager - -import airbyte_lib as ab -from airbyte_lib.caches import SnowflakeCacheConfig, SnowflakeSQLCache - - -source = ab.get_source( - "source-faker", - config={"count": 10000, "seed": 0, "parallelism": 1, "always_updated": False}, - install_if_missing=True, -) - -# load secrets from GSM using the GCP_GSM_CREDENTIALS env variable -secret_client = secretmanager.SecretManagerServiceClient.from_service_account_info( - json.loads(os.environ["GCP_GSM_CREDENTIALS"]) -) -secret = json.loads( - secret_client.access_secret_version( - name="projects/dataline-integration-testing/secrets/AIRBYTE_LIB_SNOWFLAKE_CREDS/versions/latest" - ).payload.data.decode("UTF-8") -) - -cache = SnowflakeSQLCache( - SnowflakeCacheConfig( - account=secret["account"], - username=secret["username"], - password=secret["password"], - database=secret["database"], - warehouse=secret["warehouse"], - role=secret["role"], - ) -) - -source.check() - -source.select_streams(["products"]) -result = source.read(cache) - -for name in ["products"]: - print(f"Stream {name}: {len(list(result[name]))} records") diff --git a/airbyte-lib/examples/run_spacex.py b/airbyte-lib/examples/run_spacex.py deleted file mode 100644 index f2695d7ff6955..0000000000000 --- a/airbyte-lib/examples/run_spacex.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -from itertools import islice - -import airbyte_lib as ab - - -# preparation (from airbyte-lib main folder): -# python -m venv .venv-source-spacex-api -# source .venv-source-spacex-api/bin/activate -# pip install -e ../airbyte-integrations/connectors/source-spacex-api -# In separate terminal: -# poetry run python examples/run_spacex.py - -source = ab.get_source( - "source-spacex-api", - config={"id": "605b4b6aaa5433645e37d03f"}, - install_if_missing=True, -) -cache = ab.new_local_cache() - -source.check() - -source.select_streams(["launches", "rockets", "capsules"]) - -result = source.read(cache) - -print(islice(source.get_records("capsules"), 10)) - -for name, records in result.cache.streams.items(): - print(f"Stream {name}: {len(list(records))} records") diff --git a/airbyte-lib/examples/run_test_source.py b/airbyte-lib/examples/run_test_source.py deleted file mode 100644 index e448f0f8b96ca..0000000000000 --- a/airbyte-lib/examples/run_test_source.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import os - -import airbyte_lib as ab - - -# preparation (from airbyte-lib main folder): -# python -m venv .venv-source-test -# source .venv-source-test/bin/activate -# pip install -e ./tests/integration_tests/fixtures/source-test -# In separate terminal: -# poetry run python examples/run_test_source.py - -os.environ["AIRBYTE_LOCAL_REGISTRY"] = "./tests/integration_tests/fixtures/registry.json" - -source = ab.get_source("source-test", config={"apiKey": "test"}) -cache = ab.new_local_cache("cache_test") - -source.check() - -print(source.get_available_streams()) - -result = source.read(cache) - -print(result.processed_records) -print(list(result["stream1"])) - -different_cache = ab.new_local_cache("cache_test") -print(list(different_cache["stream1"])) diff --git a/airbyte-lib/examples/run_test_source_single_stream.py b/airbyte-lib/examples/run_test_source_single_stream.py deleted file mode 100644 index 9b695979c8335..0000000000000 --- a/airbyte-lib/examples/run_test_source_single_stream.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import os - -import airbyte_lib as ab - - -# preparation (from airbyte-lib main folder): -# python -m venv .venv-source-test -# source .venv-source-test/bin/activate -# pip install -e ./tests/integration_tests/fixtures/source-test -# In separate terminal: -# poetry run python examples/run_test_source.py - -os.environ["AIRBYTE_LOCAL_REGISTRY"] = "./tests/integration_tests/fixtures/registry.json" - -source = ab.get_source("source-test", config={"apiKey": "test"}) - -print(list(source.read_stream("stream1"))) diff --git a/airbyte-lib/poetry.lock b/airbyte-lib/poetry.lock deleted file mode 100644 index 86d14143870c9..0000000000000 --- a/airbyte-lib/poetry.lock +++ /dev/null @@ -1,2687 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "airbyte-cdk" -version = "0.58.9" -description = "A framework for writing Airbyte Connectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte-cdk-0.58.9.tar.gz", hash = "sha256:e749bd4aab0911bd93c710e3ab2fcdde45d7a0bed2c0032d873006d3df701478"}, - {file = "airbyte_cdk-0.58.9-py3-none-any.whl", hash = "sha256:45dfbac2d0ae86dd5872c07c140ce16be8481452b7b8f65b228bc9f892843871"}, -] - -[package.dependencies] -airbyte-protocol-models = "0.5.1" -backoff = "*" -cachetools = "*" -Deprecated = ">=1.2,<2.0" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" -isodate = ">=0.6.1,<0.7.0" -Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" -jsonschema = ">=3.2.0,<3.3.0" -pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" -pyrate-limiter = ">=3.1.0,<3.2.0" -python-dateutil = "*" -PyYAML = ">=6.0.1" -requests = "*" -requests-cache = "*" -wcmatch = "8.4" - -[package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] - -[[package]] -name = "airbyte-protocol-models" -version = "0.5.1" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "airbyte-source-faker" -version = "6.0.1" -description = "Source implementation for fake but realistic looking data." -optional = false -python-versions = "*" -files = [ - {file = "airbyte-source-faker-6.0.1.tar.gz", hash = "sha256:8173a48551fbfe0eb6e9c331fec650fa490f283736aef0d58e2f14e55f8cf90a"}, - {file = "airbyte_source_faker-6.0.1-py3-none-any.whl", hash = "sha256:622cd123589218cffe69755727addfe85873d7563002cf8d5f949586604e0d9f"}, -] - -[package.dependencies] -airbyte-cdk = ">=0.2,<1.0" -mimesis = "6.1.1" - -[package.extras] -tests = ["pytest (>=6.2,<7.0)", "pytest-mock (>=3.6.1,<3.7.0)", "requests-mock (>=1.9.3,<1.10.0)"] - -[[package]] -name = "asn1crypto" -version = "1.5.1" -description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" -optional = false -python-versions = "*" -files = [ - {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, - {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bracex" -version = "2.4" -description = "Bash style brace expander." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, - {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, -] - -[[package]] -name = "cachetools" -version = "5.3.2" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, -] - -[[package]] -name = "cattrs" -version = "23.2.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, - {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "cryptography" -version = "41.0.7" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "docker" -version = "7.0.0" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.8" -files = [ - {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, - {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, -] - -[package.dependencies] -packaging = ">=14.0" -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" - -[package.extras] -ssh = ["paramiko (>=2.4.3)"] -websockets = ["websocket-client (>=1.3.0)"] - -[[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, -] - -[[package]] -name = "duckdb" -version = "0.10.0" -description = "DuckDB in-process database" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, - {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, - {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, - {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, - {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, - {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, - {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, - {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, - {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, - {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, - {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, - {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, - {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, - {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, - {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, -] - -[[package]] -name = "duckdb-engine" -version = "0.10.0" -description = "SQLAlchemy driver for duckdb" -optional = false -python-versions = ">=3.7" -files = [ - {file = "duckdb_engine-0.10.0-py3-none-any.whl", hash = "sha256:c408d002e83630b6bbb05fc3b26a43406085b1c22dd43e8cab00bf0b9c011ea8"}, - {file = "duckdb_engine-0.10.0.tar.gz", hash = "sha256:5e3dad3b3513f055a4f5ec5430842249cfe03015743a7597ed1dcc0447dca565"}, -] - -[package.dependencies] -duckdb = ">=0.4.0" -sqlalchemy = ">=1.3.22" - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "faker" -version = "21.0.1" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-21.0.1-py3-none-any.whl", hash = "sha256:0afc67ec898a2d71842a3456e9302620ebc35fab6ad4f3829693fdf151fa4a3a"}, - {file = "Faker-21.0.1.tar.gz", hash = "sha256:bb404bba449b87e6b54a8c50b4602765e9c1a42eaf48abfceb025e42fed01608"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "filelock" -version = "3.13.1" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "freezegun" -version = "1.4.0" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -files = [ - {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, - {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "genson" -version = "1.2.2" -description = "GenSON is a powerful, user-friendly JSON Schema generator." -optional = false -python-versions = "*" -files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, -] - -[[package]] -name = "google-api-core" -version = "2.17.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-auth" -version = "2.28.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, - {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-cloud-secret-manager" -version = "2.18.1" -description = "Google Cloud Secret Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-secret-manager-2.18.1.tar.gz", hash = "sha256:310555f3c8cb977f4a46d4454eca2c83fed6a09f3c4b35b84f6fa1f8fef55024"}, - {file = "google_cloud_secret_manager-2.18.1-py2.py3-none-any.whl", hash = "sha256:38e00ece9abf466cb449991b1a141a69690c6d51fe18456e531faf4935fbade3"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "googleapis-common-protos" -version = "1.62.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.0" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "grpcio" -version = "1.60.1" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, - {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, - {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, - {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, - {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, - {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, - {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, - {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, - {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, - {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, - {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, - {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, - {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, - {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, - {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, - {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, - {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, - {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, - {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, - {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, - {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, - {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, - {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, - {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.60.1)"] - -[[package]] -name = "grpcio-status" -version = "1.60.1" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.60.1.tar.gz", hash = "sha256:61b5aab8989498e8aa142c20b88829ea5d90d18c18c853b9f9e6d407d37bf8b4"}, - {file = "grpcio_status-1.60.1-py3-none-any.whl", hash = "sha256:3034fdb239185b6e0f3169d08c268c4507481e4b8a434c21311a03d9eb5889a0"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.60.1" -protobuf = ">=4.21.6" - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.3,<4.0" -files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, -] - -[[package]] -name = "jsonschema" -version = "3.2.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mimesis" -version = "6.1.1" -description = "Mimesis: Fake Data Generator." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "mimesis-6.1.1-py3-none-any.whl", hash = "sha256:eabe41d7afa23b01dffb51ebd9e10837df6417fef02fa9841989ca886e479790"}, - {file = "mimesis-6.1.1.tar.gz", hash = "sha256:044ac378c61db0e06832ff722548fd6e604881d36bc938002e0bd5b85eeb6a98"}, -] - -[[package]] -name = "mypy" -version = "1.8.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "objprint" -version = "0.2.3" -description = "A library that can print Python objects in human readable format" -optional = false -python-versions = ">=3.6" -files = [ - {file = "objprint-0.2.3-py3-none-any.whl", hash = "sha256:1721e6f97bae5c5b86c2716a0d45a9dd2c9a4cd9f52cfc8a0dfbe801805554cb"}, - {file = "objprint-0.2.3.tar.gz", hash = "sha256:73d0ad5a7c3151fce634c8892e5c2a050ccae3b1a353bf1316f08b7854da863b"}, -] - -[[package]] -name = "orjson" -version = "3.9.14" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.9.14-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:793f6c9448ab6eb7d4974b4dde3f230345c08ca6c7995330fbceeb43a5c8aa5e"}, - {file = "orjson-3.9.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bc7928d161840096adc956703494b5c0193ede887346f028216cac0af87500"}, - {file = "orjson-3.9.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58b36f54da759602d8e2f7dad958752d453dfe2c7122767bc7f765e17dc59959"}, - {file = "orjson-3.9.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:abcda41ecdc950399c05eff761c3de91485d9a70d8227cb599ad3a66afe93bcc"}, - {file = "orjson-3.9.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df76ecd17b1b3627bddfd689faaf206380a1a38cc9f6c4075bd884eaedcf46c2"}, - {file = "orjson-3.9.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d450a8e0656efb5d0fcb062157b918ab02dcca73278975b4ee9ea49e2fcf5bd5"}, - {file = "orjson-3.9.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:95c03137b0cf66517c8baa65770507a756d3a89489d8ecf864ea92348e1beabe"}, - {file = "orjson-3.9.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20837e10835c98973673406d6798e10f821e7744520633811a5a3d809762d8cc"}, - {file = "orjson-3.9.14-cp310-none-win32.whl", hash = "sha256:1f7b6f3ef10ae8e3558abb729873d033dbb5843507c66b1c0767e32502ba96bb"}, - {file = "orjson-3.9.14-cp310-none-win_amd64.whl", hash = "sha256:ea890e6dc1711aeec0a33b8520e395c2f3d59ead5b4351a788e06bf95fc7ba81"}, - {file = "orjson-3.9.14-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c19009ff37f033c70acd04b636380379499dac2cba27ae7dfc24f304deabbc81"}, - {file = "orjson-3.9.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19cdea0664aec0b7f385be84986d4defd3334e9c3c799407686ee1c26f7b8251"}, - {file = "orjson-3.9.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:135d518f73787ce323b1a5e21fb854fe22258d7a8ae562b81a49d6c7f826f2a3"}, - {file = "orjson-3.9.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2cf1d0557c61c75e18cf7d69fb689b77896e95553e212c0cc64cf2087944b84"}, - {file = "orjson-3.9.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7c11667421df2d8b18b021223505dcc3ee51be518d54e4dc49161ac88ac2b87"}, - {file = "orjson-3.9.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eefc41ba42e75ed88bc396d8fe997beb20477f3e7efa000cd7a47eda452fbb2"}, - {file = "orjson-3.9.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:917311d6a64d1c327c0dfda1e41f3966a7fb72b11ca7aa2e7a68fcccc7db35d9"}, - {file = "orjson-3.9.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4dc1c132259b38d12c6587d190cd09cd76e3b5273ce71fe1372437b4cbc65f6f"}, - {file = "orjson-3.9.14-cp311-none-win32.whl", hash = "sha256:6f39a10408478f4c05736a74da63727a1ae0e83e3533d07b19443400fe8591ca"}, - {file = "orjson-3.9.14-cp311-none-win_amd64.whl", hash = "sha256:26280a7fcb62d8257f634c16acebc3bec626454f9ab13558bbf7883b9140760e"}, - {file = "orjson-3.9.14-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:08e722a8d06b13b67a51f247a24938d1a94b4b3862e40e0eef3b2e98c99cd04c"}, - {file = "orjson-3.9.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2591faa0c031cf3f57e5bce1461cfbd6160f3f66b5a72609a130924917cb07d"}, - {file = "orjson-3.9.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2450d87dd7b4f277f4c5598faa8b49a0c197b91186c47a2c0b88e15531e4e3e"}, - {file = "orjson-3.9.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90903d2908158a2c9077a06f11e27545de610af690fb178fd3ba6b32492d4d1c"}, - {file = "orjson-3.9.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce6f095eef0026eae76fc212f20f786011ecf482fc7df2f4c272a8ae6dd7b1ef"}, - {file = "orjson-3.9.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:751250a31fef2bac05a2da2449aae7142075ea26139271f169af60456d8ad27a"}, - {file = "orjson-3.9.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9a1af21160a38ee8be3f4fcf24ee4b99e6184cadc7f915d599f073f478a94d2c"}, - {file = "orjson-3.9.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:449bf090b2aa4e019371d7511a6ea8a5a248139205c27d1834bb4b1e3c44d936"}, - {file = "orjson-3.9.14-cp312-none-win_amd64.whl", hash = "sha256:a603161318ff699784943e71f53899983b7dee571b4dd07c336437c9c5a272b0"}, - {file = "orjson-3.9.14-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:814f288c011efdf8f115c5ebcc1ab94b11da64b207722917e0ceb42f52ef30a3"}, - {file = "orjson-3.9.14-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88cafb100af68af3b9b29b5ccd09fdf7a48c63327916c8c923a94c336d38dd3"}, - {file = "orjson-3.9.14-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba3518b999f88882ade6686f1b71e207b52e23546e180499be5bbb63a2f9c6e6"}, - {file = "orjson-3.9.14-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978f416bbff9da8d2091e3cf011c92da68b13f2c453dcc2e8109099b2a19d234"}, - {file = "orjson-3.9.14-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75fc593cf836f631153d0e21beaeb8d26e144445c73645889335c2247fcd71a0"}, - {file = "orjson-3.9.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d1528db3c7554f9d6eeb09df23cb80dd5177ec56eeb55cc5318826928de506"}, - {file = "orjson-3.9.14-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:7183cc68ee2113b19b0b8714221e5e3b07b3ba10ca2bb108d78fd49cefaae101"}, - {file = "orjson-3.9.14-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:df3266d54246cb56b8bb17fa908660d2a0f2e3f63fbc32451ffc1b1505051d07"}, - {file = "orjson-3.9.14-cp38-none-win32.whl", hash = "sha256:7913079b029e1b3501854c9a78ad938ed40d61fe09bebab3c93e60ff1301b189"}, - {file = "orjson-3.9.14-cp38-none-win_amd64.whl", hash = "sha256:29512eb925b620e5da2fd7585814485c67cc6ba4fe739a0a700c50467a8a8065"}, - {file = "orjson-3.9.14-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5bf597530544db27a8d76aced49cfc817ee9503e0a4ebf0109cd70331e7bbe0c"}, - {file = "orjson-3.9.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac650d49366fa41fe702e054cb560171a8634e2865537e91f09a8d05ea5b1d37"}, - {file = "orjson-3.9.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:236230433a9a4968ab895140514c308fdf9f607cb8bee178a04372b771123860"}, - {file = "orjson-3.9.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3014ccbda9be0b1b5f8ea895121df7e6524496b3908f4397ff02e923bcd8f6dd"}, - {file = "orjson-3.9.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac0c7eae7ad3a223bde690565442f8a3d620056bd01196f191af8be58a5248e1"}, - {file = "orjson-3.9.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca33fdd0b38839b01912c57546d4f412ba7bfa0faf9bf7453432219aec2df07"}, - {file = "orjson-3.9.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f75823cc1674a840a151e999a7dfa0d86c911150dd6f951d0736ee9d383bf415"}, - {file = "orjson-3.9.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f52ac2eb49e99e7373f62e2a68428c6946cda52ce89aa8fe9f890c7278e2d3a"}, - {file = "orjson-3.9.14-cp39-none-win32.whl", hash = "sha256:0572f174f50b673b7df78680fb52cd0087a8585a6d06d295a5f790568e1064c6"}, - {file = "orjson-3.9.14-cp39-none-win_amd64.whl", hash = "sha256:ab90c02cb264250b8a58cedcc72ed78a4a257d956c8d3c8bebe9751b818dfad8"}, - {file = "orjson-3.9.14.tar.gz", hash = "sha256:06fb40f8e49088ecaa02f1162581d39e2cf3fd9dbbfe411eb2284147c99bad79"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pandas" -version = "2.1.4" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.1" - -[package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] - -[[package]] -name = "pandas-stubs" -version = "2.1.4.231227" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas_stubs-2.1.4.231227-py3-none-any.whl", hash = "sha256:211fc23e6ae87073bdf41dbf362c4a4d85e1e3477cb078dbac3da6c7fdaefba8"}, - {file = "pandas_stubs-2.1.4.231227.tar.gz", hash = "sha256:3ea29ef001e9e44985f5ebde02d4413f94891ef6ec7e5056fb07d125be796c23"}, -] - -[package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} -types-pytz = ">=2022.1.1" - -[[package]] -name = "pdoc" -version = "14.4.0" -description = "API Documentation for Python Projects" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pdoc-14.4.0-py3-none-any.whl", hash = "sha256:6ea4fe07620b1f7601e2708a307a257636ec206e20b5611640b30f2e3cab47d6"}, - {file = "pdoc-14.4.0.tar.gz", hash = "sha256:c92edc425429ccbe287ace2a027953c24f13de53eab484c1a6d31ca72dd2fda9"}, -] - -[package.dependencies] -Jinja2 = ">=2.11.0" -MarkupSafe = "*" -pygments = ">=2.12.0" - -[package.extras] -dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] - -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "platformdirs" -version = "3.11.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "proto-plus" -version = "1.23.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.2" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, -] - -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - -[[package]] -name = "pyarrow" -version = "14.0.2" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyarrow-stubs" -version = "10.0.1.7" -description = "Type annotations for pyarrow" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pyarrow_stubs-10.0.1.7-py3-none-any.whl", hash = "sha256:cccc7a46eddeea4e3cb85330eb8972c116a615da6188b8ae1f7a44cb724b21ac"}, -] - -[[package]] -name = "pyasn1" -version = "0.5.1" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.3.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyopenssl" -version = "23.3.0" -description = "Python wrapper module around the OpenSSL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, - {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, -] - -[package.dependencies] -cryptography = ">=41.0.5,<42" - -[package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] - -[[package]] -name = "pyrate-limiter" -version = "3.1.1" -description = "Python Rate-Limiter using Leaky-Bucket Algorithm" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, - {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, -] - -[package.extras] -all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] -docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, -] - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-docker" -version = "2.2.0" -description = "Simple pytest fixtures for Docker and Docker Compose based tests" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-docker-2.2.0.tar.gz", hash = "sha256:b083fd2ae69212369390033c22228d3263555a5f3b4bef87b74160e07218f377"}, - {file = "pytest_docker-2.2.0-py3-none-any.whl", hash = "sha256:8ee9c9742d58ac079c81c03635bb830881f7f4d529f0f53f4ba2c89ffc9c7137"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -pytest = ">=4.0,<8.0" - -[package.extras] -docker-compose-v1 = ["docker-compose (>=1.27.3,<2.0)"] -tests = ["pytest-pycodestyle (>=2.0.0,<3.0)", "pytest-pylint (>=0.14.1,<1.0)", "requests (>=2.22.0,<3.0)"] - -[[package]] -name = "pytest-mypy" -version = "0.10.3" -description = "Mypy static type checker plugin for Pytest" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-mypy-0.10.3.tar.gz", hash = "sha256:f8458f642323f13a2ca3e2e61509f7767966b527b4d8adccd5032c3e7b4fd3db"}, - {file = "pytest_mypy-0.10.3-py3-none-any.whl", hash = "sha256:7638d0d3906848fc1810cb2f5cc7fceb4cc5c98524aafcac58f28620e3102053"}, -] - -[package.dependencies] -attrs = ">=19.0" -filelock = ">=3.0" -mypy = [ - {version = ">=0.900", markers = "python_version >= \"3.11\""}, - {version = ">=0.780", markers = "python_version >= \"3.9\" and python_version < \"3.11\""}, -] -pytest = [ - {version = ">=6.2", markers = "python_version >= \"3.10\""}, - {version = ">=4.6", markers = "python_version >= \"3.6\" and python_version < \"3.10\""}, -] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-ulid" -version = "2.2.0" -description = "Universally unique lexicographically sortable identifier" -optional = false -python-versions = ">=3.9" -files = [ - {file = "python_ulid-2.2.0-py3-none-any.whl", hash = "sha256:ec2e69292c0b7c338a07df5e15b05270be6823675c103383e74d1d531945eab5"}, - {file = "python_ulid-2.2.0.tar.gz", hash = "sha256:9ec777177d396880d94be49ac7eb4ae2cd4a7474448bfdbfe911537add970aeb"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "referencing" -version = "0.33.0" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-cache" -version = "1.1.1" -description = "A persistent cache for python requests" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, -] - -[package.dependencies] -attrs = ">=21.2" -cattrs = ">=22.2" -platformdirs = ">=2.5" -requests = ">=2.22" -url-normalize = ">=1.4" -urllib3 = ">=1.25.5" - -[package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] -bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] -dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] -json = ["ujson (>=5.4)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] - -[[package]] -name = "rich" -version = "13.7.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.18.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, - {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, - {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, - {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, - {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, - {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, - {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, - {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, - {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, - {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, - {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, - {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, - {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruff" -version = "0.1.15" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, -] - -[[package]] -name = "setuptools" -version = "69.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "snowflake-connector-python" -version = "3.6.0" -description = "Snowflake Connector for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "snowflake-connector-python-3.6.0.tar.gz", hash = "sha256:15667a918780d79da755e6a60bbf6918051854951e8f56ccdf5692283e9a8479"}, - {file = "snowflake_connector_python-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4093b38cf9abf95c38119f0b23b07e23dc7a8689b956cd5d34975e1875741f20"}, - {file = "snowflake_connector_python-3.6.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:cf5a964fe01b177063f8c44d14df3a72715580bcd195788ec2822090f37330a5"}, - {file = "snowflake_connector_python-3.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a6418cec585b050e6f05404f25e62b075a3bbea587dc1f903de15640565c58"}, - {file = "snowflake_connector_python-3.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c76aea92b87f6ecd604e9c934aac8a779f2e20f3be1d990d53bb5b6d87b009"}, - {file = "snowflake_connector_python-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:9dfcf178271e892e64e4092b9e011239a066ce5de848afd2efe3f13197a9f8b3"}, - {file = "snowflake_connector_python-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4916f9b4a0efd7c96d1fa50a157e05907b6935f91492cca7f200b43cc178a25e"}, - {file = "snowflake_connector_python-3.6.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:f15024c66db5e87d359216ec733a2974d7562aa38f3f18c8b6e65489839e00d7"}, - {file = "snowflake_connector_python-3.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcbd3102f807ebbbae52b1b5683d45cd7b3dcb0eaec131233ba6b156e8d70fa4"}, - {file = "snowflake_connector_python-3.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7662e2de25b885abe08ab866cf7c7b026ad1af9faa39c25e2c25015ef807abe3"}, - {file = "snowflake_connector_python-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1fa102f55ee166cc766aeee3f9333b17b4bede6fb088eee1e1f022df15b6d81"}, - {file = "snowflake_connector_python-3.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fde1e0727e2f23c2a07b49b30e1bc0f49977f965d08ddfda10015b24a2beeb76"}, - {file = "snowflake_connector_python-3.6.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:1b51fe000c8cf6372d30b73c7136275e52788e6af47010cd1984c9fb03378e86"}, - {file = "snowflake_connector_python-3.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a11699689a19916e65794ce58dca72b8a40fe6a7eea06764931ede10b47bcc"}, - {file = "snowflake_connector_python-3.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d810be5b180c6f47ce9b6f989fe64b9984383e4b77e30b284a83e33f229a3a82"}, - {file = "snowflake_connector_python-3.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5db47d4164d6b7a07c413a46f9edc4a1d687e3df44fd9d5fa89a89aecb94a8e"}, - {file = "snowflake_connector_python-3.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf8c1ad5aab5304fefa2a4178061a24c96da45e3e3db9d901621e9953e005402"}, - {file = "snowflake_connector_python-3.6.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1058ab5c98cc62fde8b3f021f0a5076cb7865b5cdab8a9bccde0df88b9e91334"}, - {file = "snowflake_connector_python-3.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b93f55989f80d69278e0f40a7a1c0e737806b7c0ddb0351513a752b837243e8"}, - {file = "snowflake_connector_python-3.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50dd954ea5918d3242ded69225b72f701963cd9c043ee7d9ab35dc22211611c8"}, - {file = "snowflake_connector_python-3.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4ad42613b87f31441d07a8ea242f4c28ed5eb7b6e05986f9e94a7e44b96d3d1e"}, -] - -[package.dependencies] -asn1crypto = ">0.24.0,<2.0.0" -certifi = ">=2017.4.17" -cffi = ">=1.9,<2.0.0" -charset-normalizer = ">=2,<4" -cryptography = ">=3.1.0,<42.0.0" -filelock = ">=3.5,<4" -idna = ">=2.5,<4" -packaging = "*" -platformdirs = ">=2.6.0,<4.0.0" -pyjwt = "<3.0.0" -pyOpenSSL = ">=16.2.0,<24.0.0" -pytz = "*" -requests = "<3.0.0" -sortedcontainers = ">=2.4.0" -tomlkit = "*" -typing-extensions = ">=4.3,<5" -urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] -pandas = ["pandas (>=1.0.0,<2.2.0)", "pyarrow"] -secure-local-storage = ["keyring (!=16.1.0,<25.0.0)"] - -[[package]] -name = "snowflake-sqlalchemy" -version = "1.5.1" -description = "Snowflake SQLAlchemy Dialect" -optional = false -python-versions = ">=3.7" -files = [ - {file = "snowflake-sqlalchemy-1.5.1.tar.gz", hash = "sha256:4f1383402ffc89311974bd810dee22003aef4af0f312a0fdb55778333ad1abf7"}, - {file = "snowflake_sqlalchemy-1.5.1-py2.py3-none-any.whl", hash = "sha256:df022fb73bc04d68dfb3216ebf7a1bfbd14d22def9c38bbe05275beb258adcd0"}, -] - -[package.dependencies] -snowflake-connector-python = "<4.0.0" -sqlalchemy = ">=1.4.0,<2.0.0" - -[package.extras] -development = ["mock", "numpy", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytz"] -pandas = ["snowflake-connector-python[pandas] (<4.0.0)"] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - -[[package]] -name = "sqlalchemy" -version = "1.4.51" -description = "Database Abstraction Library" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.51-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:1a09d5bd1a40d76ad90e5570530e082ddc000e1d92de495746f6257dc08f166b"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win32.whl", hash = "sha256:7af40425ac535cbda129d9915edcaa002afe35d84609fd3b9d6a8c46732e02ee"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win_amd64.whl", hash = "sha256:8d1d7d63e5d2f4e92a39ae1e897a5d551720179bb8d1254883e7113d3826d43c"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eaeeb2464019765bc4340214fca1143081d49972864773f3f1e95dba5c7edc7d"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win32.whl", hash = "sha256:50e074aea505f4427151c286955ea025f51752fa42f9939749336672e0674c81"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win_amd64.whl", hash = "sha256:3b0cd89a7bd03f57ae58263d0f828a072d1b440c8c2949f38f3b446148321171"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a33cb3f095e7d776ec76e79d92d83117438b6153510770fcd57b9c96f9ef623d"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win32.whl", hash = "sha256:8e702e7489f39375601c7ea5a0bef207256828a2bc5986c65cb15cd0cf097a87"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win_amd64.whl", hash = "sha256:0525c4905b4b52d8ccc3c203c9d7ab2a80329ffa077d4bacf31aefda7604dc65"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:1980e6eb6c9be49ea8f89889989127daafc43f0b1b6843d71efab1514973cca0"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win32.whl", hash = "sha256:d0a83afab5e062abffcdcbcc74f9d3ba37b2385294dd0927ad65fc6ebe04e054"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win_amd64.whl", hash = "sha256:a61184c7289146c8cff06b6b41807c6994c6d437278e72cf00ff7fe1c7a263d1"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:3f0ef620ecbab46e81035cf3dedfb412a7da35340500ba470f9ce43a1e6c423b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win32.whl", hash = "sha256:f2e5b6f5cf7c18df66d082604a1d9c7a2d18f7d1dbe9514a2afaccbb51cc4fc3"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win_amd64.whl", hash = "sha256:5e180fff133d21a800c4f050733d59340f40d42364fcb9d14f6a67764bdc48d2"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7d8139ca0b9f93890ab899da678816518af74312bb8cd71fb721436a93a93298"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win32.whl", hash = "sha256:cecb66492440ae8592797dd705a0cbaa6abe0555f4fa6c5f40b078bd2740fc6b"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win_amd64.whl", hash = "sha256:39b02b645632c5fe46b8dd30755682f629ffbb62ff317ecc14c998c21b2896ff"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b03850c290c765b87102959ea53299dc9addf76ca08a06ea98383348ae205c99"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win32.whl", hash = "sha256:b00cf0471888823b7a9f722c6c41eb6985cf34f077edcf62695ac4bed6ec01ee"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win_amd64.whl", hash = "sha256:a055ba17f4675aadcda3005df2e28a86feb731fdcc865e1f6b4f209ed1225cba"}, - {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.12.3" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, -] - -[[package]] -name = "types-jsonschema" -version = "4.21.0.20240118" -description = "Typing stubs for jsonschema" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-jsonschema-4.21.0.20240118.tar.gz", hash = "sha256:31aae1b5adc0176c1155c2d4f58348b22d92ae64315e9cc83bd6902168839232"}, - {file = "types_jsonschema-4.21.0.20240118-py3-none-any.whl", hash = "sha256:77a4ac36b0be4f24274d5b9bf0b66208ee771c05f80e34c4641de7d63e8a872d"}, -] - -[package.dependencies] -referencing = "*" - -[[package]] -name = "types-pytz" -version = "2024.1.0.20240203" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, - {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.12" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, -] - -[[package]] -name = "types-requests" -version = "2.31.0.4" -description = "Typing stubs for requests" -optional = false -python-versions = "*" -files = [ - {file = "types-requests-2.31.0.4.tar.gz", hash = "sha256:a111041148d7e04bf100c476bc4db3ee6b0a1cd0b4018777f6a660b1c4f1318d"}, - {file = "types_requests-2.31.0.4-py3-none-any.whl", hash = "sha256:c7a9d6b62776f21b169a94a0e9d2dfcae62fa9149f53594ff791c3ae67325490"}, -] - -[package.dependencies] -types-urllib3 = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, -] - -[[package]] -name = "typing-extensions" -version = "4.9.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ulid" -version = "1.1" -description = "Pyhton version of this: https://github.com/alizain/ulid" -optional = false -python-versions = "*" -files = [ - {file = "ulid-1.1.tar.gz", hash = "sha256:0943e8a751ec10dfcdb4df2758f96dffbbfbc055d0b49288caf2f92125900d49"}, -] - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "1.26.18" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, -] - -[package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "viztracer" -version = "0.16.2" -description = "A debugging and profiling tool that can trace and visualize python code execution" -optional = false -python-versions = ">=3.8" -files = [ - {file = "viztracer-0.16.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:bdc62e90a2957e4119632e98f8b77d0ff1ab4db7029dd2e265bb3748e0fc0e05"}, - {file = "viztracer-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:789ac930e1c9621f04d275ee3ebb75a5d6109bcd4634796a77934608c60424d0"}, - {file = "viztracer-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee504771e3182045996a966d94d95d71693e59717b2643199162ec754a6e2400"}, - {file = "viztracer-0.16.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef9ecf4110d379245f17429d2a10391f3612f60b5618d0d61a30c110e9df2313"}, - {file = "viztracer-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57c2574cc15b688eb0ce4e24a2c30f06c1df3bbe1dd16a1d18676e411e785f96"}, - {file = "viztracer-0.16.2-cp310-cp310-win32.whl", hash = "sha256:9fe652834f5073bf99debc25d8ba6084690fa2f26420621ca38a09efcae71b2f"}, - {file = "viztracer-0.16.2-cp310-cp310-win_amd64.whl", hash = "sha256:d59f57e3e46e116ce77e144f419739d1d8d976a903c51a822ba4ef167e5b37d4"}, - {file = "viztracer-0.16.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:b0bd434c43b7f87f76ddd21cf7371d910edb74b131aaff670a8fcc9f28251e67"}, - {file = "viztracer-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bbbb9c80b08db692993c67e7b10d7b06db3eedc6c38f0d93a40ea31de82076e"}, - {file = "viztracer-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e7842e437d81fb47ef8266b2dde76bf755c95305014eeec8346b2fce9711c0"}, - {file = "viztracer-0.16.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bddfe6a6f2a66f363fcca79a694986b0602ba0dc3dede57dc182cdd6d0823585"}, - {file = "viztracer-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4a2639e6f18200b73a70f3e7dca4cbb3ba08e3807023fd526f44ebf2185d1e"}, - {file = "viztracer-0.16.2-cp311-cp311-win32.whl", hash = "sha256:371496734ebb3eafd6a6e033dbf04960618089e021dc7eded95179a8f3700c40"}, - {file = "viztracer-0.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:d9c7670e7fb077fe48c92036766a6772e10a3caf41455d6244b8b1c8d48bbd87"}, - {file = "viztracer-0.16.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2fd8b5aa8143b5be4d696e53e8ac5027c20187c178396839f39f8aa610d5873d"}, - {file = "viztracer-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3a8ddc4990154f2d400b09deefc9236d963a733d458b2825bd590ced7e7bf89"}, - {file = "viztracer-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcf8b14dc8dd1567bca3f8cb13e31665a3cbf2ee95552de0afe9179e3a7bde22"}, - {file = "viztracer-0.16.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:309cf5d545222adb2581ae6aeb48d3d03d7241d335142408d87c49f1d0793f85"}, - {file = "viztracer-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee749a2a3f4ed662d35eb9378ff0648907aa6321befa16ad1d8bec6034b4d260"}, - {file = "viztracer-0.16.2-cp312-cp312-win32.whl", hash = "sha256:a082dab37b6b8cea43438b80a11a6e859f1b45522b8684a2fb9af03539d83803"}, - {file = "viztracer-0.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:03cd21181fe9a630ac5fb9ff1ee83fb7a67814e51e130f0ed83300e163fbac23"}, - {file = "viztracer-0.16.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e920d383abae1b9314f2a60dd94e04c83998bfe759556af49d3c422d1d64d11e"}, - {file = "viztracer-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb9941b198fed8ba5b3f9d8105e59d37ab15f7f00b9a576686b1073990806d12"}, - {file = "viztracer-0.16.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1b7030aa6f934ff02882dfd48eca5a9442951b8be24c1dc5dc99fabbfb1997c"}, - {file = "viztracer-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:258087076c06d065d2786dc8a0f1f017d655d3753a8fe6836640c005c66a0c43"}, - {file = "viztracer-0.16.2-cp38-cp38-win32.whl", hash = "sha256:f0fd53e2fec972f9332677e6d11332ba789fcccf59060d7b9f309041602dc712"}, - {file = "viztracer-0.16.2-cp38-cp38-win_amd64.whl", hash = "sha256:ab067398029a50cc784d5456c5e8bef339b4bffaa1c3f0f9384a26b57c0efdaa"}, - {file = "viztracer-0.16.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:45879cf54ad9116245e2a6115660307f98ae3aa98a77347f2b336a904f260370"}, - {file = "viztracer-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc61cfc36b33a301b950554d9e9027a506d580ebf1e764aa6656af0acfa3354"}, - {file = "viztracer-0.16.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:419f738bba8204e7ddb422faff3a40576896d030bbbf4fb79ace006147ca60e7"}, - {file = "viztracer-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c594022093bf9eee57ad2b9656f836dca2ed9c0b8e4d94a9d13a6cbc531386fe"}, - {file = "viztracer-0.16.2-cp39-cp39-win32.whl", hash = "sha256:4f98da282e87013a93917c2ae080ba52845e98ed5280faecdc42ee0c7fb74a4a"}, - {file = "viztracer-0.16.2-cp39-cp39-win_amd64.whl", hash = "sha256:64b97120374a572d2320fb795473c051c92d39dfc99fb74754e61e4c212e7617"}, - {file = "viztracer-0.16.2.tar.gz", hash = "sha256:8dff5637a7b42ffdbc1ed3768ce43979e71b09893ff370bc3c3ede54afed93ee"}, -] - -[package.dependencies] -objprint = ">0.1.3" - -[package.extras] -full = ["orjson"] - -[[package]] -name = "wcmatch" -version = "8.4" -description = "Wildcard/glob file name matcher." -optional = false -python-versions = ">=3.7" -files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, -] - -[package.dependencies] -bracex = ">=2.1.1" - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "3b46151e994684f0953be1041a850ac8efcedea10632f8fa86aaaa6d20385174" diff --git a/airbyte-lib/poetry.toml b/airbyte-lib/poetry.toml deleted file mode 100644 index ab1033bd37224..0000000000000 --- a/airbyte-lib/poetry.toml +++ /dev/null @@ -1,2 +0,0 @@ -[virtualenvs] -in-project = true diff --git a/airbyte-lib/pyproject.toml b/airbyte-lib/pyproject.toml deleted file mode 100644 index 8634bf4d1f3b4..0000000000000 --- a/airbyte-lib/pyproject.toml +++ /dev/null @@ -1,261 +0,0 @@ -[tool.poetry] -name = "airbyte-lib" -description = "AirbyteLib" -version = "0.1.0" -authors = ["Airbyte "] -readme = "README.md" -packages = [{include = "airbyte_lib"}] - -[tool.poetry.dependencies] -python = "^3.9" - -airbyte-cdk = "^0.58.3" -# airbyte-protocol-models = "^1.0.1" # Conflicts with airbyte-cdk # TODO: delete or resolve -jsonschema = "3.2.0" -orjson = "^3.9.10" -overrides = "^7.4.0" -pandas = "2.1.4" # 2.2.0 breaks sqlalchemy interop - TODO: optionally retest higher versions -psycopg2-binary = "^2.9.9" -python-ulid = "^2.2.0" -types-pyyaml = "^6.0.12.12" -ulid = "^1.1" -sqlalchemy = "1.4.51" -snowflake-connector-python = "3.6.0" -snowflake-sqlalchemy = "^1.5.1" -duckdb-engine = "^0.10.0" -requests = "^2.31.0" -pyarrow = "^14.0.2" - -# Psycopg3 is not supported in SQLAlchemy 1.x: -# psycopg = {extras = ["binary", "pool"], version = "^3.1.16"} -rich = "^13.7.0" -pendulum = "<=3.0.0" -python-dotenv = "^1.0.1" - - -[tool.poetry.group.dev.dependencies] -docker = "^7.0.0" -faker = "^21.0.0" -mypy = "^1.7.1" -pandas-stubs = "^2.1.4.231218" -pdoc = "^14.3.0" -pyarrow-stubs = "^10.0.1.7" -pytest = "^7.4.3" -pytest-docker = "^2.0.1" -pytest-mypy = "^0.10.3" -ruff = "^0.1.11" -types-jsonschema = "^4.20.0.0" -google-cloud-secret-manager = "^2.17.0" -types-requests = "2.31.0.4" -freezegun = "^1.4.0" -airbyte-source-faker = "^6.0.0" -viztracer = "^0.16.2" -tomli = "^2.0" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - -[tool.pytest.ini_options] -markers = [ - "slow: marks tests as slow (deselect with '-m \"not slow\"')", - "requires_creds: marks a test as requiring credentials (skip when secrets unavailable)" -] - -[tool.ruff.pylint] -max-args = 8 # Relaxed from default of 5 -max-branches = 15 # Relaxed from default of 12 - -[tool.ruff] -target-version = "py39" -select = [ - # For rules reference, see https://docs.astral.sh/ruff/rules/ - "A", # flake8-builtins - "ANN", # flake8-annotations - "ARG", # flake8-unused-arguments - "ASYNC", # flake8-async - "B", # flake8-bugbear - "FBT", # flake8-boolean-trap - "BLE", # Blind except - "C4", # flake8-comprehensions - "C90", # mccabe (complexity) - "COM", # flake8-commas - "CPY", # missing copyright notice - # "D", # pydocstyle # TODO: Re-enable when adding docstrings - "DTZ", # flake8-datetimez - "E", # pycodestyle (errors) - "ERA", # flake8-eradicate (commented out code) - "EXE", # flake8-executable - "F", # Pyflakes - "FA", # flake8-future-annotations - "FIX", # flake8-fixme - "FLY", # flynt - "FURB", # Refurb - "I", # isort - "ICN", # flake8-import-conventions - "INP", # flake8-no-pep420 - "INT", # flake8-gettext - "ISC", # flake8-implicit-str-concat - "ICN", # flake8-import-conventions - "LOG", # flake8-logging - "N", # pep8-naming - "PD", # pandas-vet - "PERF", # Perflint - "PIE", # flake8-pie - "PGH", # pygrep-hooks - "PL", # Pylint - "PT", # flake8-pytest-style - "PTH", # flake8-use-pathlib - "PYI", # flake8-pyi - "Q", # flake8-quotes - "RET", # flake8-return - "RSE", # flake8-raise - "RUF", # Ruff-specific rules - "SIM", # flake8-simplify - "SLF", # flake8-self - "SLOT", # flake8-slots - "T10", # debugger calls - # "T20", # flake8-print # TODO: Re-enable once we have logging - "TCH", # flake8-type-checking - "TD", # flake8-todos - "TID", # flake8-tidy-imports - "TRY", # tryceratops - "TRY002", # Disallow raising vanilla Exception. Create or use a custom exception instead. - "TRY003", # Disallow vanilla string passing. Prefer kwargs to the exception constructur. - "UP", # pyupgrade - "W", # pycodestyle (warnings) - "YTT", # flake8-2020 -] -ignore = [ - # For rules reference, see https://docs.astral.sh/ruff/rules/ - - # These we don't agree with or don't want to prioritize to enforce: - "ANN003", # kwargs missing type annotations - "ANN101", # Type annotations for 'self' args - "COM812", # Because it conflicts with ruff auto-format - "EM", # flake8-errmsgs (may reconsider later) - "DJ", # Django linting - "G", # flake8-logging-format - "ISC001", # Conflicts with ruff auto-format - "NPY", # NumPy-specific rules - "PIE790", # Allow unnecssary 'pass' (sometimes useful for readability) - "PERF203", # exception handling in loop - "S", # flake8-bandit (noisy, security related) - "SIM910", # Allow "None" as second argument to Dict.get(). "Explicit is better than implicit." - "TD002", # Require author for TODOs - "TRIO", # flake8-trio (opinionated, noisy) - "INP001", # Dir 'examples' is part of an implicit namespace package. Add an __init__.py. - - # TODO: Consider re-enabling these before release: - "A003", # Class attribute 'type' is shadowing a Python builtin - "BLE001", # Do not catch blind exception: Exception - "ERA001", # Remove commented-out code - "FIX002", # Allow "TODO:" until release (then switch to requiring links via TDO003) - "PLW0603", # Using the global statement to update _cache is discouraged - "TD003", # Require links for TODOs # TODO: Re-enable when we disable FIX002 -] -fixable = ["ALL"] -unfixable = [ - "ERA001", # Commented-out code (avoid silent loss of code) - "T201" # print() calls (avoid silent loss of code / log messages) -] -line-length = 100 -extend-exclude = ["docs", "test", "tests"] -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - -[tool.ruff.isort] -force-sort-within-sections = false -lines-after-imports = 2 -known-first-party = ["airbyte_cdk", "airbyte_protocol"] -known-local-folder = ["airbyte_lib"] -required-imports = ["from __future__ import annotations"] -known-third-party = [] -section-order = [ - "future", - "standard-library", - "third-party", - "first-party", - "local-folder" -] - -[tool.ruff.mccabe] -max-complexity = 24 - -[tool.ruff.pycodestyle] -ignore-overlong-task-comments = true - -[tool.ruff.pydocstyle] -convention = "google" - -[tool.ruff.flake8-annotations] -allow-star-arg-any = false -ignore-fully-untyped = false - -[tool.ruff.format] -quote-style = "double" -indent-style = "space" -skip-magic-trailing-comma = false -line-ending = "auto" -preview = false -docstring-code-format = true - -[tool.mypy] -# Platform configuration -python_version = "3.9" -# imports related -ignore_missing_imports = true -follow_imports = "silent" -# None and Optional handling -no_implicit_optional = true -strict_optional = true -# Configuring warnings -warn_unused_configs = true -warn_redundant_casts = true -warn_unused_ignores = true -warn_no_return = true -warn_unreachable = true -warn_return_any = false -# Untyped definitions and calls -check_untyped_defs = true -disallow_untyped_calls = false -disallow_untyped_defs = true -disallow_incomplete_defs = true -disallow_untyped_decorators = false -# Disallow dynamic typing -disallow_subclassing_any = true -disallow_any_unimported = false -disallow_any_expr = false -disallow_any_decorated = false -disallow_any_explicit = false -disallow_any_generics = false -# Miscellaneous strictness flags -allow_untyped_globals = false -allow_redefinition = false -local_partial_types = false -implicit_reexport = true -strict_equality = true -# Configuring error messages -show_error_context = false -show_column_numbers = false -show_error_codes = true -exclude = ["docs", "test", "tests"] - -[[tool.mypy.overrides]] -module = [ - "airbyte_protocol", - "airbyte_protocol.models" -] -ignore_missing_imports = true # No stubs yet (😢) - -[tool.poetry.scripts] -generate-docs = "docs:run" -airbyte-lib-validate-source = "airbyte_lib.validate:run" - -[tool.poe.tasks] -test = "pytest tests" - -[tool.airbyte_ci] -extra_poetry_groups = ["dev"] -poe_tasks = ["test"] -required_environment_variables = ["GCP_GSM_CREDENTIALS"] -side_car_docker_engine = true diff --git a/airbyte-lib/tests/conftest.py b/airbyte-lib/tests/conftest.py deleted file mode 100644 index 0824d77f3eb8e..0000000000000 --- a/airbyte-lib/tests/conftest.py +++ /dev/null @@ -1,250 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Global pytest fixtures.""" - -import json -import logging -import os -import shutil -import socket -import subprocess -import time - -import ulid -from airbyte_lib.caches.snowflake import SnowflakeCacheConfig - -import docker -import psycopg2 as psycopg -import pytest -from _pytest.nodes import Item -from google.cloud import secretmanager -from pytest_docker.plugin import get_docker_ip -from sqlalchemy import create_engine - -from airbyte_lib.caches import PostgresCacheConfig - -logger = logging.getLogger(__name__) - - -PYTEST_POSTGRES_IMAGE = "postgres:13" -PYTEST_POSTGRES_CONTAINER = "postgres_pytest_container" -PYTEST_POSTGRES_PORT = 5432 - -LOCAL_TEST_REGISTRY_URL = "./tests/integration_tests/fixtures/registry.json" - - -def pytest_collection_modifyitems(items: list[Item]) -> None: - """Override default pytest behavior, sorting our tests in a sensible execution order. - - In general, we want faster tests to run first, so that we can get feedback faster. - - Running lint tests first is helpful because they are fast and can catch typos and other errors. - - Otherwise tests are run based on an alpha-based natural sort, where 'unit' tests run after - 'integration' tests because 'u' comes after 'i' alphabetically. - """ - def test_priority(item: Item) -> int: - if item.get_closest_marker(name="slow"): - return 9 # slow tests have the lowest priority - elif 'lint_tests' in str(item.fspath): - return 1 # lint tests have high priority - elif 'unit_tests' in str(item.fspath): - return 2 # unit tests have highest priority - elif 'docs_tests' in str(item.fspath): - return 3 # doc tests have medium priority - elif 'integration_tests' in str(item.fspath): - return 4 # integration tests have the lowest priority - else: - return 5 # all other tests have lower priority - - # Sort the items list in-place based on the test_priority function - items.sort(key=test_priority) - - -def is_port_in_use(port): - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - return s.connect_ex(("localhost", port)) == 0 - - -@pytest.fixture(scope="session", autouse=True) -def remove_postgres_container(): - client = docker.from_env() - if is_port_in_use(PYTEST_POSTGRES_PORT): - try: - container = client.containers.get( - PYTEST_POSTGRES_CONTAINER, - ) - container.stop() - container.remove() - except docker.errors.NotFound: - pass # Container not found, nothing to do. - - -def test_pg_connection(host) -> bool: - pg_url = f"postgresql://postgres:postgres@{host}:{PYTEST_POSTGRES_PORT}/postgres" - - max_attempts = 120 - for attempt in range(max_attempts): - try: - conn = psycopg.connect(pg_url) - conn.close() - return True - except psycopg.OperationalError: - logger.info(f"Waiting for postgres to start (attempt {attempt + 1}/{max_attempts})") - time.sleep(1.0) - - else: - return False - - -@pytest.fixture(scope="session") -def pg_dsn(): - client = docker.from_env() - try: - client.images.get(PYTEST_POSTGRES_IMAGE) - except docker.errors.ImageNotFound: - # Pull the image if it doesn't exist, to avoid failing our sleep timer - # if the image needs to download on-demand. - client.images.pull(PYTEST_POSTGRES_IMAGE) - - try: - previous_container = client.containers.get(PYTEST_POSTGRES_CONTAINER) - previous_container.remove() - except docker.errors.NotFound: - pass - - postgres_is_running = False - postgres = client.containers.run( - image=PYTEST_POSTGRES_IMAGE, - name=PYTEST_POSTGRES_CONTAINER, - environment={"POSTGRES_USER": "postgres", "POSTGRES_PASSWORD": "postgres", "POSTGRES_DB": "postgres"}, - ports={"5432/tcp": PYTEST_POSTGRES_PORT}, - detach=True, - ) - - attempts = 10 - while not postgres_is_running and attempts > 0: - try: - postgres.reload() - postgres_is_running = postgres.status == "running" - except docker.errors.NotFound: - attempts -= 1 - time.sleep(3) - if not postgres_is_running: - raise Exception(f"Failed to start the PostgreSQL container. Status: {postgres.status}.") - - final_host = None - if host := os.environ.get("DOCKER_HOST_NAME"): - final_host = host if test_pg_connection(host) else None - else: - # Try to connect to the database using localhost and the docker host IP - for host in ["127.0.0.1", "localhost", "host.docker.internal", "172.17.0.1"]: - if test_pg_connection(host): - final_host = host - break - - if final_host is None: - raise Exception(f"Failed to connect to the PostgreSQL database on host {host}.") - - yield final_host - # Stop and remove the container after the tests are done - postgres.stop() - postgres.remove() - - -@pytest.fixture -def new_pg_cache_config(pg_dsn): - """Fixture to return a fresh cache. - - Each test that uses this fixture will get a unique table prefix. - """ - config = PostgresCacheConfig( - host=pg_dsn, - port=PYTEST_POSTGRES_PORT, - username="postgres", - password="postgres", - database="postgres", - schema_name="public", - - # TODO: Move this to schema name when we support it (breaks as of 2024-01-31): - table_prefix=f"test{str(ulid.ULID())[-6:]}_", - ) - yield config - - -@pytest.fixture -def snowflake_config(): - if "GCP_GSM_CREDENTIALS" not in os.environ: - raise Exception("GCP_GSM_CREDENTIALS env variable not set, can't fetch secrets for Snowflake. Make sure they are set up as described: https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/ci_credentials/README.md#get-gsm-access") - secret_client = secretmanager.SecretManagerServiceClient.from_service_account_info( - json.loads(os.environ["GCP_GSM_CREDENTIALS"]) - ) - secret = json.loads( - secret_client.access_secret_version( - name="projects/dataline-integration-testing/secrets/AIRBYTE_LIB_SNOWFLAKE_CREDS/versions/latest" - ).payload.data.decode("UTF-8") - ) - config = SnowflakeCacheConfig( - account=secret["account"], - username=secret["username"], - password=secret["password"], - database=secret["database"], - warehouse=secret["warehouse"], - role=secret["role"], - schema_name=f"test{str(ulid.ULID()).lower()[-6:]}", - ) - - yield config - - engine = create_engine(config.get_sql_alchemy_url()) - with engine.begin() as connection: - connection.execute(f"DROP SCHEMA IF EXISTS {config.schema_name}") - - -@pytest.fixture(autouse=True) -def source_test_registry(monkeypatch): - """ - Set environment variables for the test source. - - These are applied to this test file only. - - This means the normal registry is not usable. Expect AirbyteConnectorNotRegisteredError for - other connectors. - """ - env_vars = { - "AIRBYTE_LOCAL_REGISTRY": LOCAL_TEST_REGISTRY_URL, - } - for key, value in env_vars.items(): - monkeypatch.setenv(key, value) - - -@pytest.fixture(autouse=True) -def do_not_track(monkeypatch): - """ - Set environment variables for the test source. - - These are applied to this test file only. - """ - env_vars = { - "DO_NOT_TRACK": "true" - } - for key, value in env_vars.items(): - monkeypatch.setenv(key, value) - - -@pytest.fixture(scope="package") -def source_test_installation(): - """ - Prepare test environment. This will pre-install the test source from the fixtures array and set - the environment variable to use the local json file as registry. - """ - venv_dir = ".venv-source-test" - if os.path.exists(venv_dir): - shutil.rmtree(venv_dir) - - subprocess.run(["python", "-m", "venv", venv_dir], check=True) - subprocess.run([f"{venv_dir}/bin/pip", "install", "-e", "./tests/integration_tests/fixtures/source-test"], check=True) - - yield - - shutil.rmtree(venv_dir) diff --git a/airbyte-lib/tests/docs_tests/test_docs_checked_in.py b/airbyte-lib/tests/docs_tests/test_docs_checked_in.py deleted file mode 100644 index 54614c7cd6210..0000000000000 --- a/airbyte-lib/tests/docs_tests/test_docs_checked_in.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import os - -import docs - - -def test_docs_checked_in(): - """ - Docs need to be generated via `poetry run generate-docs` and checked in to the repo. - - This test runs the docs generation and compares the output with the checked in docs. - It will fail if there are any differences. - """ - - docs.run() - - # compare the generated docs with the checked in docs - diff = os.system("git diff --exit-code docs/generated") - - # if there is a diff, fail the test - assert diff == 0, "Docs are out of date. Please run `poetry run generate-docs` and commit the changes." diff --git a/airbyte-lib/tests/docs_tests/test_validate_changelog.py b/airbyte-lib/tests/docs_tests/test_validate_changelog.py deleted file mode 100644 index 7481d014af2a3..0000000000000 --- a/airbyte-lib/tests/docs_tests/test_validate_changelog.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import tomli - - -def test_validate_changelog(): - """ - Publishing a version involves bumping the version in pyproject.toml and adding a changelog entry. - This test ensures that the changelog entry is present. - """ - - # get the version from pyproject.toml - with open("pyproject.toml") as f: - contents = tomli.loads(f.read()) - version = contents["tool"]["poetry"]["version"] - - # get the changelog - with open("README.md") as f: - readme = f.read() - changelog = readme.split("## Changelog")[-1] - - # check that the changelog contains the version - assert version in changelog, f"Version {version} is missing from the changelog in README.md. Please add it." diff --git a/airbyte-lib/tests/integration_tests/fixtures/invalid_config.json b/airbyte-lib/tests/integration_tests/fixtures/invalid_config.json deleted file mode 100644 index 3ce4b45a32097..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/invalid_config.json +++ /dev/null @@ -1 +0,0 @@ -{ "apiKey": "wrong" } diff --git a/airbyte-lib/tests/integration_tests/fixtures/registry.json b/airbyte-lib/tests/integration_tests/fixtures/registry.json deleted file mode 100644 index d356be8c0ccdc..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/registry.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "sources": [ - { - "sourceDefinitionId": "9f32dab3-77cb-45a1-9d33-347aa5fbe363", - "name": "Test Source", - "dockerRepository": "airbyte/source-test", - "dockerImageTag": "0.0.1", - "documentationUrl": "https://docs.airbyte.com/integrations/sources/test", - "icon": "test.svg", - "iconUrl": "https://connectors.airbyte.com/files/metadata/airbyte/source-test/latest/icon.svg", - "sourceType": "api", - "remoteRegistries": { - "pypi": { - "packageName": "airbyte-source-test", - "enabled": true - } - }, - "spec": { - "documentationUrl": "https://docs.airbyte.com/integrations/sources/test", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "apiKey": { - "type": "string", - "title": "API Key", - "description": "The API key for the service" - } - } - } - }, - "tombstone": false, - "public": true, - "custom": false, - "releaseStage": "alpha", - "supportLevel": "community", - "ab_internal": { - "sl": 100, - "ql": 200 - }, - "tags": ["language:python"], - "githubIssueLabel": "source-test", - "license": "MIT" - }, - { - "sourceDefinitionId": "9f32dab3-77cb-45a1-9d33-347aa5fbe333", - "name": "Non-published source", - "dockerRepository": "airbyte/source-non-published", - "dockerImageTag": "0.0.1", - "documentationUrl": "https://docs.airbyte.com/integrations/sources/test", - "icon": "test.svg", - "iconUrl": "https://connectors.airbyte.com/files/metadata/airbyte/source-test/latest/icon.svg", - "sourceType": "api", - "remoteRegistries": { - "pypi": { - "packageName": "airbyte-source-non-published", - "enabled": false - } - }, - "spec": { - "documentationUrl": "https://docs.airbyte.com/integrations/sources/test", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "apiKey": { - "type": "string", - "title": "API Key", - "description": "The API key for the service" - } - } - } - }, - "tombstone": false, - "public": true, - "custom": false, - "releaseStage": "alpha", - "supportLevel": "community", - "ab_internal": { - "sl": 100, - "ql": 200 - }, - "tags": ["language:python"], - "githubIssueLabel": "source-source-non-published", - "license": "MIT" - } - ] -} diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-broken/metadata.yaml b/airbyte-lib/tests/integration_tests/fixtures/source-broken/metadata.yaml deleted file mode 100644 index 6f1494d43b275..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-broken/metadata.yaml +++ /dev/null @@ -1,17 +0,0 @@ -data: - connectorSubtype: api - connectorType: source - definitionId: 47f17145-fe20-4ef5-a548-e29b048adf84 - dockerImageTag: 0.0.0 - dockerRepository: airbyte/source-broken - githubIssueLabel: source-broken - name: Test - releaseDate: 2023-08-25 - releaseStage: alpha - supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/sources/apify-dataset - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-broken -metadataSpecVersion: "1.0" diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-broken/setup.py b/airbyte-lib/tests/integration_tests/fixtures/source-broken/setup.py deleted file mode 100644 index 516112718b7e9..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-broken/setup.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import setup - -setup( - name="airbyte-source-broken", - version="0.0.1", - description="Test Soutce", - author="Airbyte", - author_email="contact@airbyte.io", - packages=["source_broken"], - entry_points={ - "console_scripts": [ - "source-broken=source_broken.run:run", - ], - }, -) diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-broken/source_broken/run.py b/airbyte-lib/tests/integration_tests/fixtures/source-broken/source_broken/run.py deleted file mode 100644 index c777271f249a6..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-broken/source_broken/run.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -def run(): - raise Exception("Could not run") \ No newline at end of file diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-test/metadata.yaml b/airbyte-lib/tests/integration_tests/fixtures/source-test/metadata.yaml deleted file mode 100644 index 8712af99c05d2..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-test/metadata.yaml +++ /dev/null @@ -1,17 +0,0 @@ -data: - connectorSubtype: api - connectorType: source - definitionId: 47f17145-fe20-4ef5-a548-e29b048adf84 - dockerImageTag: 0.0.0 - dockerRepository: airbyte/source-test - githubIssueLabel: source-test - name: Test - releaseDate: 2023-08-25 - releaseStage: alpha - supportLevel: community - documentationUrl: https://docs.airbyte.com/integrations/sources/apify-dataset - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-test -metadataSpecVersion: "1.0" diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-test/setup.py b/airbyte-lib/tests/integration_tests/fixtures/source-test/setup.py deleted file mode 100644 index 0035f1eda76a2..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-test/setup.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import setup - -setup( - name="airbyte-source-test", - version="0.0.1", - description="Test Soutce", - author="Airbyte", - author_email="contact@airbyte.io", - packages=["source_test"], - entry_points={ - "console_scripts": [ - "source-test=source_test.run:run", - ], - }, -) diff --git a/airbyte-lib/tests/integration_tests/fixtures/source-test/source_test/run.py b/airbyte-lib/tests/integration_tests/fixtures/source-test/source_test/run.py deleted file mode 100644 index 5f4ae3f1e939b..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/source-test/source_test/run.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import json -import sys - -sample_catalog = { - "type": "CATALOG", - "catalog": { - "streams": [ - { - "name": "stream1", - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": True, - "default_cursor_field": ["column1"], - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "column1": {"type": "string"}, - "column2": {"type": "number"}, - }, - }, - }, - { - "name": "stream2", - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": False, - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "column1": {"type": "string"}, - "column2": {"type": "number"}, - "empty_column": {"type": "string"}, - }, - }, - }, - { - "name": "always-empty-stream", - "description": "This stream always emits zero records, to test handling of empty datasets.", - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": False, - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "column1": {"type": "string"}, - "column2": {"type": "number"}, - "empty_column": {"type": "string"}, - }, - }, - }, - ] - }, -} - -sample_connection_specification = { - "type": "SPEC", - "spec": { - "documentationUrl": "https://example.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "apiKey": { - "type": "string", - "title": "API Key", - "description": "The API key for the service", - } - }, - }, - }, -} - -sample_connection_check_success = { - "type": "CONNECTION_STATUS", - "connectionStatus": {"status": "SUCCEEDED"}, -} - -sample_connection_check_failure = { - "type": "CONNECTION_STATUS", - "connectionStatus": {"status": "FAILED", "message": "An error"}, -} - -sample_record1_stream1 = { - "type": "RECORD", - "record": { - "data": {"column1": "value1", "column2": 1}, - "stream": "stream1", - "emitted_at": 123456789, - }, -} -sample_record2_stream1 = { - "type": "RECORD", - "record": { - "data": {"column1": "value2", "column2": 2}, - "stream": "stream1", - "emitted_at": 123456789, - }, -} -sample_record_stream2 = { - "type": "RECORD", - "record": { - "data": {"column1": "value1", "column2": 1}, - "stream": "stream2", - "emitted_at": 123456789, - }, -} - - -def parse_args(): - arg_dict = {} - args = sys.argv[2:] - for i in range(0, len(args), 2): - arg_dict[args[i]] = args[i + 1] - - return arg_dict - - -def get_json_file(path): - with open(path, "r") as f: - return json.load(f) - - -def run(): - args = sys.argv[1:] - if args[0] == "spec": - print(json.dumps(sample_connection_specification)) - elif args[0] == "discover": - print(json.dumps(sample_catalog)) - elif args[0] == "check": - args = parse_args() - config = get_json_file(args["--config"]) - if config.get("apiKey").startswith("test"): - print(json.dumps(sample_connection_check_success)) - else: - print(json.dumps(sample_connection_check_failure)) - elif args[0] == "read": - args = parse_args() - catalog = get_json_file(args["--catalog"]) - config = get_json_file(args["--config"]) - print(json.dumps({"type": "LOG", "log": {"level": "INFO", "message": "Starting sync"}})) - for stream in catalog["streams"]: - if stream["stream"]["name"] == "stream1": - print(json.dumps(sample_record1_stream1)) - if config.get("apiKey") == "test_fail_during_sync": - raise Exception("An error") - print(json.dumps(sample_record2_stream1)) - elif stream["stream"]["name"] == "stream2": - print(json.dumps(sample_record_stream2)) diff --git a/airbyte-lib/tests/integration_tests/fixtures/valid_config.json b/airbyte-lib/tests/integration_tests/fixtures/valid_config.json deleted file mode 100644 index fbe094d80a449..0000000000000 --- a/airbyte-lib/tests/integration_tests/fixtures/valid_config.json +++ /dev/null @@ -1 +0,0 @@ -{ "apiKey": "test" } diff --git a/airbyte-lib/tests/integration_tests/test_install.py b/airbyte-lib/tests/integration_tests/test_install.py deleted file mode 100644 index 3350d54cbfb52..0000000000000 --- a/airbyte-lib/tests/integration_tests/test_install.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from gettext import install -import pytest - -from airbyte_lib._factories.connector_factories import get_source -from airbyte_lib import exceptions as exc - - -def test_install_failure_log_pypi(): - """Test that the install log is created and contains the expected content.""" - with pytest.raises(exc.AirbyteConnectorNotRegisteredError): - source = get_source("source-not-found") - - with pytest.raises(exc.AirbyteConnectorInstallationError) as exc_info: - source = get_source( - "source-not-found", - pip_url="https://pypi.org/project/airbyte-not-found", - install_if_missing=True, - ) - - # Check that the stderr log contains the expected content from a failed pip install - assert 'Could not install requirement' in str(exc_info.value.__cause__.log_text) diff --git a/airbyte-lib/tests/integration_tests/test_snowflake_cache.py b/airbyte-lib/tests/integration_tests/test_snowflake_cache.py deleted file mode 100644 index 4ac08f4bebe3b..0000000000000 --- a/airbyte-lib/tests/integration_tests/test_snowflake_cache.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Integration tests which leverage the source-faker connector to test the framework end-to-end. - -Since source-faker is included in dev dependencies, we can assume `source-faker` is installed -and available on PATH for the poetry-managed venv. -""" -from __future__ import annotations -from collections.abc import Generator -import os -import sys -import shutil -from pathlib import Path - -import pytest -import ulid -import viztracer - -from airbyte_cdk.models import ConfiguredAirbyteCatalog - -import airbyte_lib as ab -from airbyte_lib import caches - - -# Product count is always the same, regardless of faker scale. -NUM_PRODUCTS = 100 - -SEED_A = 1234 -SEED_B = 5678 - -# Number of records in each of the 'users' and 'purchases' streams. -FAKER_SCALE_A = 200 -# We want this to be different from FAKER_SCALE_A. -FAKER_SCALE_B = 300 - - -# Patch PATH to include the source-faker executable. - -@pytest.fixture(autouse=True) -def add_venv_bin_to_path(monkeypatch): - # Get the path to the bin directory of the virtual environment - venv_bin_path = os.path.join(sys.prefix, 'bin') - - # Add the bin directory to the PATH - new_path = f"{venv_bin_path}:{os.environ['PATH']}" - monkeypatch.setenv('PATH', new_path) - - -@pytest.fixture(scope="function") # Each test gets a fresh source-faker instance. -def source_faker_seed_a() -> ab.Source: - """Fixture to return a source-faker connector instance.""" - source = ab.get_source( - "source-faker", - local_executable="source-faker", - config={ - "count": FAKER_SCALE_A, - "seed": SEED_A, - "parallelism": 16, # Otherwise defaults to 4. - }, - install_if_missing=False, # Should already be on PATH - ) - source.check() - source.select_streams([ - "users", - ]) - return source - - -@pytest.fixture(scope="function") # Each test gets a fresh source-faker instance. -def source_faker_seed_b() -> ab.Source: - """Fixture to return a source-faker connector instance.""" - source = ab.get_source( - "source-faker", - local_executable="source-faker", - config={ - "count": FAKER_SCALE_B, - "seed": SEED_B, - "parallelism": 16, # Otherwise defaults to 4. - }, - install_if_missing=False, # Should already be on PATH - ) - source.check() - source.select_streams([ - "users", - ]) - return source - - -@pytest.fixture(scope="function") -def snowflake_cache(snowflake_config) -> Generator[caches.SnowflakeCache, None, None]: - """Fixture to return a fresh cache.""" - cache: caches.SnowflakeCache = caches.SnowflakeSQLCache(snowflake_config) - yield cache - # TODO: Delete cache DB file after test is complete. - return - - -# Uncomment this line if you want to see performance trace logs. -# You can render perf traces using the viztracer CLI or the VS Code VizTracer Extension. -#@viztracer.trace_and_save(output_dir=".pytest_cache/snowflake_trace/") -@pytest.mark.requires_creds -@pytest.mark.slow -def test_faker_read_to_snowflake( - source_faker_seed_a: ab.Source, - snowflake_cache: ab.SnowflakeCache, -) -> None: - """Test that the append strategy works as expected.""" - result = source_faker_seed_a.read( - snowflake_cache, write_strategy="replace", force_full_refresh=True - ) - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A - - -@pytest.mark.requires_creds -@pytest.mark.slow -def test_replace_strategy( - source_faker_seed_a: ab.Source, - snowflake_cache: ab.SnowflakeCache, -) -> None: - """Test that the append strategy works as expected.""" - for _ in range(2): - result = source_faker_seed_a.read( - snowflake_cache, write_strategy="replace", force_full_refresh=True - ) - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A - - -@pytest.mark.requires_creds -@pytest.mark.slow -def test_merge_strategy( - source_faker_seed_a: ab.Source, - source_faker_seed_b: ab.Source, - snowflake_cache: ab.DuckDBCache, -) -> None: - """Test that the merge strategy works as expected. - - Since all streams have primary keys, we should expect the auto strategy to be identical to the - merge strategy. - """ - # First run, seed A (counts should match the scale or the product count) - result = source_faker_seed_a.read(snowflake_cache, write_strategy="merge") - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A - - # Second run, also seed A (should have same exact data, no change in counts) - result = source_faker_seed_a.read(snowflake_cache, write_strategy="merge") - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A - - # Third run, seed B - should increase record count to the scale of B, which is greater than A. - # TODO: See if we can reliably predict the exact number of records, since we use fixed seeds. - result = source_faker_seed_b.read(snowflake_cache, write_strategy="merge") - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_B - - # Third run, seed A again - count should stay at scale B, since A is smaller. - # TODO: See if we can reliably predict the exact number of records, since we use fixed seeds. - result = source_faker_seed_a.read(snowflake_cache, write_strategy="merge") - assert len(list(result.cache.streams["users"])) == FAKER_SCALE_B diff --git a/airbyte-lib/tests/integration_tests/test_source_faker_integration.py b/airbyte-lib/tests/integration_tests/test_source_faker_integration.py deleted file mode 100644 index 244c52f0e901e..0000000000000 --- a/airbyte-lib/tests/integration_tests/test_source_faker_integration.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -"""Integration tests which leverage the source-faker connector to test the framework end-to-end. - -Since source-faker is included in dev dependencies, we can assume `source-faker` is installed -and available on PATH for the poetry-managed venv. -""" -from __future__ import annotations -from collections.abc import Generator -import os -import sys -import shutil -from pathlib import Path - -import pytest -import ulid -import viztracer - -from airbyte_cdk.models import ConfiguredAirbyteCatalog - -import airbyte_lib as ab -from airbyte_lib import caches - - -# Product count is always the same, regardless of faker scale. -NUM_PRODUCTS = 100 - -SEED_A = 1234 -SEED_B = 5678 - -# Number of records in each of the 'users' and 'purchases' streams. -FAKER_SCALE_A = 200 -# We want this to be different from FAKER_SCALE_A. -FAKER_SCALE_B = 300 - - -# Patch PATH to include the source-faker executable. - -@pytest.fixture(autouse=True) -def add_venv_bin_to_path(monkeypatch): - # Get the path to the bin directory of the virtual environment - venv_bin_path = os.path.join(sys.prefix, 'bin') - - # Add the bin directory to the PATH - new_path = f"{venv_bin_path}:{os.environ['PATH']}" - monkeypatch.setenv('PATH', new_path) - - -def test_which_source_faker() -> None: - """Test that source-faker is available on PATH.""" - assert shutil.which("source-faker") is not None, \ - f"Can't find source-faker on PATH: {os.environ['PATH']}" - - -@pytest.fixture(scope="function") # Each test gets a fresh source-faker instance. -def source_faker_seed_a() -> ab.Source: - """Fixture to return a source-faker connector instance.""" - source = ab.get_source( - "source-faker", - local_executable="source-faker", - config={ - "count": FAKER_SCALE_A, - "seed": SEED_A, - "parallelism": 16, # Otherwise defaults to 4. - }, - install_if_missing=False, # Should already be on PATH - ) - source.check() - source.select_streams([ - "users", - "products", - "purchases", - ]) - return source - - -@pytest.fixture(scope="function") # Each test gets a fresh source-faker instance. -def source_faker_seed_b() -> ab.Source: - """Fixture to return a source-faker connector instance.""" - source = ab.get_source( - "source-faker", - local_executable="source-faker", - config={ - "count": FAKER_SCALE_B, - "seed": SEED_B, - "parallelism": 16, # Otherwise defaults to 4. - }, - install_if_missing=False, # Should already be on PATH - ) - source.check() - source.select_streams([ - "products", - "purchases", - "users", - ]) - return source - - -@pytest.fixture(scope="function") -def duckdb_cache() -> Generator[caches.DuckDBCache, None, None]: - """Fixture to return a fresh cache.""" - cache: caches.DuckDBCache = ab.new_local_cache() - yield cache - # TODO: Delete cache DB file after test is complete. - return - - -@pytest.fixture(scope="function") -def postgres_cache(new_pg_cache_config) -> Generator[caches.PostgresCache, None, None]: - """Fixture to return a fresh cache.""" - cache: caches.PostgresCache = caches.PostgresCache(config=new_pg_cache_config) - yield cache - # TODO: Delete cache DB file after test is complete. - return - - -@pytest.fixture -def all_cache_types( - duckdb_cache: ab.DuckDBCache, - postgres_cache: ab.PostgresCache, -): - _ = postgres_cache - return [ - duckdb_cache, - postgres_cache, - ] - -def test_faker_pks( - source_faker_seed_a: ab.Source, - duckdb_cache: ab.DuckDBCache, -) -> None: - """Test that the append strategy works as expected.""" - - catalog: ConfiguredAirbyteCatalog = source_faker_seed_a.configured_catalog - - assert catalog.streams[0].primary_key - assert catalog.streams[1].primary_key - - read_result = source_faker_seed_a.read(duckdb_cache, write_strategy="append") - assert read_result.cache._get_primary_keys("products") == ["id"] - assert read_result.cache._get_primary_keys("purchases") == ["id"] - - -@pytest.mark.slow -def test_replace_strategy( - source_faker_seed_a: ab.Source, - all_cache_types: ab.DuckDBCache, -) -> None: - """Test that the append strategy works as expected.""" - for cache in all_cache_types: # Function-scoped fixtures can't be used in parametrized(). - for _ in range(2): - result = source_faker_seed_a.read( - cache, write_strategy="replace", force_full_refresh=True - ) - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_A - - -@pytest.mark.slow -def test_append_strategy( - source_faker_seed_a: ab.Source, - all_cache_types: ab.DuckDBCache, -) -> None: - """Test that the append strategy works as expected.""" - for cache in all_cache_types: # Function-scoped fixtures can't be used in parametrized(). - for iteration in range(1, 3): - result = source_faker_seed_a.read(cache, write_strategy="append") - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS * iteration - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_A * iteration - - -@pytest.mark.slow -@pytest.mark.parametrize("strategy", ["merge", "auto"]) -def test_merge_strategy( - strategy: str, - source_faker_seed_a: ab.Source, - source_faker_seed_b: ab.Source, - all_cache_types: ab.DuckDBCache, -) -> None: - """Test that the merge strategy works as expected. - - Since all streams have primary keys, we should expect the auto strategy to be identical to the - merge strategy. - """ - for cache in all_cache_types: # Function-scoped fixtures can't be used in parametrized(). - # First run, seed A (counts should match the scale or the product count) - result = source_faker_seed_a.read(cache, write_strategy=strategy) - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_A - - # Second run, also seed A (should have same exact data, no change in counts) - result = source_faker_seed_a.read(cache, write_strategy=strategy) - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_A - - # Third run, seed B - should increase record count to the scale of B, which is greater than A. - # TODO: See if we can reliably predict the exact number of records, since we use fixed seeds. - result = source_faker_seed_b.read(cache, write_strategy=strategy) - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_B - - # Third run, seed A again - count should stay at scale B, since A is smaller. - # TODO: See if we can reliably predict the exact number of records, since we use fixed seeds. - result = source_faker_seed_a.read(cache, write_strategy=strategy) - assert len(list(result.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result.cache.streams["purchases"])) == FAKER_SCALE_B - - -def test_incremental_sync( - source_faker_seed_a: ab.Source, - source_faker_seed_b: ab.Source, - duckdb_cache: ab.DuckDBCache, -) -> None: - config_a = source_faker_seed_a.get_config() - config_b = source_faker_seed_b.get_config() - config_a["always_updated"] = False - config_b["always_updated"] = False - source_faker_seed_a.set_config(config_a) - source_faker_seed_b.set_config(config_b) - - result1 = source_faker_seed_a.read(duckdb_cache) - assert len(list(result1.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result1.cache.streams["purchases"])) == FAKER_SCALE_A - assert result1.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2 - - assert not duckdb_cache.get_state() == [] - - # Second run should not return records as it picks up the state and knows it's up to date. - result2 = source_faker_seed_b.read(duckdb_cache) - - assert result2.processed_records == 0 - assert len(list(result2.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result2.cache.streams["purchases"])) == FAKER_SCALE_A - - -def test_incremental_state_cache_persistence( - source_faker_seed_a: ab.Source, - source_faker_seed_b: ab.Source, -) -> None: - config_a = source_faker_seed_a.get_config() - config_b = source_faker_seed_b.get_config() - config_a["always_updated"] = False - config_b["always_updated"] = False - source_faker_seed_a.set_config(config_a) - source_faker_seed_b.set_config(config_b) - cache_name = str(ulid.ULID()) - cache = ab.new_local_cache(cache_name) - result = source_faker_seed_a.read(cache) - assert result.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2 - second_cache = ab.new_local_cache(cache_name) - # The state should be persisted across cache instances. - result2 = source_faker_seed_b.read(second_cache) - assert result2.processed_records == 0 - - assert not second_cache.get_state() == [] - assert len(list(result2.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result2.cache.streams["purchases"])) == FAKER_SCALE_A - - -def test_incremental_state_prefix_isolation( - source_faker_seed_a: ab.Source, - source_faker_seed_b: ab.Source, -) -> None: - """ - Test that state in the cache correctly isolates streams when different table prefixes are used - """ - config_a = source_faker_seed_a.get_config() - config_a["always_updated"] = False - source_faker_seed_a.set_config(config_a) - cache_name = str(ulid.ULID()) - db_path = Path(f"./.cache/{cache_name}.duckdb") - cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="prefix_")) - different_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="different_prefix_")) - - result = source_faker_seed_a.read(cache) - assert result.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2 - - result2 = source_faker_seed_b.read(different_prefix_cache) - assert result2.processed_records == NUM_PRODUCTS + FAKER_SCALE_B * 2 - - assert len(list(result2.cache.streams["products"])) == NUM_PRODUCTS - assert len(list(result2.cache.streams["purchases"])) == FAKER_SCALE_B diff --git a/airbyte-lib/tests/integration_tests/test_source_test_fixture.py b/airbyte-lib/tests/integration_tests/test_source_test_fixture.py deleted file mode 100644 index 5e3a05e683a9f..0000000000000 --- a/airbyte-lib/tests/integration_tests/test_source_test_fixture.py +++ /dev/null @@ -1,820 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from collections.abc import Mapping -import os -import shutil -import itertools -from contextlib import nullcontext as does_not_raise -from typing import Any -from unittest.mock import Mock, call, patch -import tempfile -from pathlib import Path -from airbyte_lib.caches.base import SQLCacheBase - -from sqlalchemy import column, text - -import airbyte_lib as ab -from airbyte_lib.caches import SnowflakeCacheConfig, SnowflakeSQLCache -import pandas as pd -import pytest - -from airbyte_lib.caches import PostgresCache, PostgresCacheConfig -from airbyte_lib import registry -from airbyte_lib.version import get_version -from airbyte_lib.results import ReadResult -from airbyte_lib.datasets import CachedDataset, LazyDataset, SQLDataset -import airbyte_lib as ab - -from airbyte_lib.results import ReadResult -from airbyte_lib import exceptions as exc -import ulid - - -@pytest.fixture(scope="module", autouse=True) -def autouse_source_test_installation(source_test_installation): - return - - -@pytest.fixture(scope="function", autouse=True) -def autouse_source_test_registry(source_test_registry): - return - - -@pytest.fixture -def source_test(source_test_env) -> ab.Source: - return ab.get_source("source-test", config={"apiKey": "test"}) - - -@pytest.fixture -def expected_test_stream_data() -> dict[str, list[dict[str, str | int]]]: - return { - "stream1": [ - {"column1": "value1", "column2": 1}, - {"column1": "value2", "column2": 2}, - ], - "stream2": [ - {"column1": "value1", "column2": 1, "empty_column": None}, - ], - "always-empty-stream": [], - } - -def test_registry_get(): - metadata = registry.get_connector_metadata("source-test") - assert metadata.name == "source-test" - assert metadata.latest_available_version == "0.0.1" - - -def test_registry_list() -> None: - assert registry.get_available_connectors() == ["source-test"] - - -def test_list_streams(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source( - "source-test", config={"apiKey": "test"}, install_if_missing=False - ) - assert source.get_available_streams() == list(expected_test_stream_data.keys()) - - -def test_invalid_config(): - source = ab.get_source( - "source-test", config={"apiKey": 1234}, install_if_missing=False - ) - with pytest.raises(exc.AirbyteConnectorCheckFailedError): - source.check() - - -def test_ensure_installation_detection(): - """Assert that install isn't called, since the connector is already installed by the fixture.""" - with patch("airbyte_lib._executor.VenvExecutor.install") as mock_venv_install, \ - patch("airbyte_lib.source.Source.install") as mock_source_install, \ - patch("airbyte_lib._executor.VenvExecutor.ensure_installation") as mock_ensure_installed: - source = ab.get_source( - "source-test", - config={"apiKey": 1234}, - pip_url="https://pypi.org/project/airbyte-not-found", - install_if_missing=True, - ) - assert mock_ensure_installed.call_count == 1 - assert not mock_venv_install.called - assert not mock_source_install.called - - -def test_source_yaml_spec(): - source = ab.get_source( - "source-test", config={"apiKey": 1234}, install_if_missing=False - ) - assert source._yaml_spec.startswith("connectionSpecification:\n $schema:") - - -def test_non_existing_connector(): - with pytest.raises(Exception): - ab.get_source("source-not-existing", config={"apiKey": "abc"}) - -def test_non_enabled_connector(): - with pytest.raises(exc.AirbyteConnectorNotPyPiPublishedError): - ab.get_source("source-non-published", config={"apiKey": "abc"}) - -@pytest.mark.parametrize( - "latest_available_version, requested_version, raises", - [ - ("0.0.1", "latest", False), - ("0.0.1", "0.0.1", False), - ("0.0.1", None, False), - ("1.2.3", None, False), # Don't raise if a version is not requested - ("1.2.3", "latest", True), - ("1.2.3", "1.2.3", True), - ]) -def test_version_enforcement( - raises: bool, - latest_available_version, - requested_version, -): - """" - Ensures version enforcement works as expected: - * If no version is specified, the current version is accepted - * If the version is specified as "latest", only the latest available version is accepted - * If the version is specified as a semantic version, only the exact version is accepted - - In this test, the actually installed version is 0.0.1 - """ - patched_entry = registry.ConnectorMetadata( - name="source-test", latest_available_version=latest_available_version, pypi_package_name="airbyte-source-test" - ) - - # We need to initialize the cache before we can patch it. - _ = registry._get_registry_cache() - with patch.dict("airbyte_lib.registry.__cache", {"source-test": patched_entry}, clear=False): - if raises: - with pytest.raises(Exception): - source = ab.get_source( - "source-test", - version=requested_version, - config={"apiKey": "abc"}, - install_if_missing=False, - ) - source.executor.ensure_installation(auto_fix=False) - else: - source = ab.get_source( - "source-test", - version=requested_version, - config={"apiKey": "abc"}, - install_if_missing=False, - ) - if requested_version: # Don't raise if a version is not requested - assert source.executor._get_installed_version(raise_on_error=True) == ( - requested_version or latest_available_version - ).replace("latest", latest_available_version) - source.executor.ensure_installation(auto_fix=False) - - -def test_check(): - source = ab.get_source( - "source-test", - config={"apiKey": "test"}, - install_if_missing=False, - ) - source.check() - - -def test_check_fail(): - source = ab.get_source("source-test", config={"apiKey": "wrong"}) - - with pytest.raises(Exception): - source.check() - - -def test_file_write_and_cleanup() -> None: - """Ensure files are written to the correct location and cleaned up afterwards.""" - with tempfile.TemporaryDirectory() as temp_dir_1, tempfile.TemporaryDirectory() as temp_dir_2: - cache_w_cleanup = ab.new_local_cache(cache_dir=temp_dir_1, cleanup=True) - cache_wo_cleanup = ab.new_local_cache(cache_dir=temp_dir_2, cleanup=False) - - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - _ = source.read(cache_w_cleanup) - _ = source.read(cache_wo_cleanup) - - assert len(list(Path(temp_dir_1).glob("*.parquet"))) == 0, "Expected files to be cleaned up" - assert len(list(Path(temp_dir_2).glob("*.parquet"))) == 3, "Expected files to exist" - - -def assert_cache_data(expected_test_stream_data: dict[str, list[dict[str, str | int]]], cache: SQLCacheBase, streams: list[str] = None): - for stream_name in streams or expected_test_stream_data.keys(): - if len(cache[stream_name]) > 0: - pd.testing.assert_frame_equal( - cache[stream_name].to_pandas(), - pd.DataFrame(expected_test_stream_data[stream_name]), - check_dtype=False, - ) - else: - # stream is empty - assert len(expected_test_stream_data[stream_name]) == 0 - - # validate that the cache doesn't contain any other streams - if streams: - assert len(list(cache.__iter__())) == len(streams) - - -def test_sync_to_duckdb(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = ab.new_local_cache() - - result: ReadResult = source.read(cache) - - assert result.processed_records == 3 - assert_cache_data(expected_test_stream_data, cache) - - -def test_read_result_mapping(): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - result: ReadResult = source.read(ab.new_local_cache()) - assert len(result) == 3 - assert isinstance(result, Mapping) - assert "stream1" in result - assert "stream2" in result - assert "always-empty-stream" in result - assert "stream3" not in result - assert result.keys() == {"stream1", "stream2", "always-empty-stream"} - - -def test_dataset_list_and_len(expected_test_stream_data): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - result: ReadResult = source.read(ab.new_local_cache()) - stream_1 = result["stream1"] - assert len(stream_1) == 2 - assert len(list(stream_1)) == 2 - # Make sure we can iterate over the stream after calling len - assert list(stream_1) == [{"column1": "value1", "column2": 1}, {"column1": "value2", "column2": 2}] - # Make sure we can iterate over the stream a second time - assert list(stream_1) == [{"column1": "value1", "column2": 1}, {"column1": "value2", "column2": 2}] - - assert isinstance(result, Mapping) - assert "stream1" in result - assert "stream2" in result - assert "always-empty-stream" in result - assert "stream3" not in result - assert result.keys() == {"stream1", "stream2", "always-empty-stream"} - - -def test_read_from_cache(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - """ - Test that we can read from a cache that already has data (identifier by name) - """ - cache_name = str(ulid.ULID()) - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = ab.new_local_cache(cache_name) - - source.read(cache) - - # Create a new cache pointing to the same duckdb file - second_cache = ab.new_local_cache(cache_name) - - - assert_cache_data(expected_test_stream_data, second_cache) - - -def test_read_isolated_by_prefix(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - """ - Test that cache correctly isolates streams when different table prefixes are used - """ - cache_name = str(ulid.ULID()) - db_path = Path(f"./.cache/{cache_name}.duckdb") - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="prefix_")) - - source.read(cache) - - same_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="prefix_")) - different_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="different_prefix_")) - no_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix=None)) - - # validate that the cache with the same prefix has the data as expected, while the other two are empty - assert_cache_data(expected_test_stream_data, same_prefix_cache) - assert len(list(different_prefix_cache.__iter__())) == 0 - assert len(list(no_prefix_cache.__iter__())) == 0 - - # read partial data into the other two caches - source.select_streams(["stream1"]) - source.read(different_prefix_cache) - source.read(no_prefix_cache) - - second_same_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="prefix_")) - second_different_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix="different_prefix_")) - second_no_prefix_cache = ab.DuckDBCache(config=ab.DuckDBCacheConfig(db_path=db_path, table_prefix=None)) - - # validate that the first cache still has full data, while the other two have partial data - assert_cache_data(expected_test_stream_data, second_same_prefix_cache) - assert_cache_data(expected_test_stream_data, second_different_prefix_cache, streams=["stream1"]) - assert_cache_data(expected_test_stream_data, second_no_prefix_cache, streams=["stream1"]) - - -def test_merge_streams_in_cache(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - """ - Test that we can extend a cache with new streams - """ - cache_name = str(ulid.ULID()) - source = ab.get_source("source-test", config={"apiKey": "test"}) - cache = ab.new_local_cache(cache_name) - - source.select_streams(["stream1"]) - source.read(cache) - - # Assert that the cache only contains stream1 - with pytest.raises(KeyError): - cache["stream2"] - - # Create a new cache with the same name - second_cache = ab.new_local_cache(cache_name) - source.select_streams(["stream2"]) - result = source.read(second_cache) - - third_cache = ab.new_local_cache(cache_name) - source.select_streams(["always-empty-stream"]) - result = source.read(third_cache) - - # Assert that the read result only contains stream2 - with pytest.raises(KeyError): - result["stream1"] - with pytest.raises(KeyError): - result["stream2"] - - assert_cache_data(expected_test_stream_data, third_cache) - - -def test_read_result_as_list(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = ab.new_local_cache() - - result: ReadResult = source.read(cache) - stream_1_list = list(result["stream1"]) - stream_2_list = list(result["stream2"]) - always_empty_stream_list = list(result["always-empty-stream"]) - assert stream_1_list == expected_test_stream_data["stream1"] - assert stream_2_list == expected_test_stream_data["stream2"] - assert always_empty_stream_list == expected_test_stream_data["always-empty-stream"] - - -def test_get_records_result_as_list(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source("source-test", config={"apiKey": "test"}) - cache = ab.new_local_cache() - - stream_1_list = list(source.get_records("stream1")) - stream_2_list = list(source.get_records("stream2")) - always_empty_stream_list = list(source.get_records("always-empty-stream")) - assert stream_1_list == expected_test_stream_data["stream1"] - assert stream_2_list == expected_test_stream_data["stream2"] - assert always_empty_stream_list == expected_test_stream_data["always-empty-stream"] - - - -def test_sync_with_merge_to_duckdb(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - """Test that the merge strategy works as expected. - - In this test, we sync the same data twice. If the data is not duplicated, we assume - the merge was successful. - - # TODO: Add a check with a primary key to ensure that the merge strategy works as expected. - """ - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = ab.new_local_cache() - - # Read twice to test merge strategy - result: ReadResult = source.read(cache) - result: ReadResult = source.read(cache) - - assert result.processed_records == 3 - for stream_name, expected_data in expected_test_stream_data.items(): - if len(cache[stream_name]) > 0: - pd.testing.assert_frame_equal( - result[stream_name].to_pandas(), - pd.DataFrame(expected_data), - check_dtype=False, - ) - else: - # stream is empty - assert len(expected_test_stream_data[stream_name]) == 0 - - -def test_cached_dataset( - expected_test_stream_data: dict[str, list[dict[str, str | int]]], -) -> None: - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - result: ReadResult = source.read(ab.new_local_cache()) - - stream_name = "stream1" - not_a_stream_name = "not_a_stream" - - # Check that the stream appears in mapping-like attributes - assert stream_name in result.cache._streams_with_data - assert stream_name in result - assert stream_name in result.cache - assert stream_name in result.cache.streams - assert stream_name in result.streams - - stream_get_a: CachedDataset = result[stream_name] - stream_get_b: CachedDataset = result.streams[stream_name] - stream_get_c: CachedDataset = result.cache[stream_name] - stream_get_d: CachedDataset = result.cache.streams[stream_name] - - # Check that each get method is syntactically equivalent - - assert isinstance(stream_get_a, CachedDataset) - assert isinstance(stream_get_b, CachedDataset) - assert isinstance(stream_get_c, CachedDataset) - assert isinstance(stream_get_d, CachedDataset) - - assert stream_get_a == stream_get_b - assert stream_get_b == stream_get_c - assert stream_get_c == stream_get_d - - # Check that we can iterate over the stream - - list_from_iter_a = list(stream_get_a) - list_from_iter_b = [row for row in stream_get_a] - - # Make sure that we get a key error if we try to access a stream that doesn't exist - with pytest.raises(KeyError): - result[not_a_stream_name] - with pytest.raises(KeyError): - result.streams[not_a_stream_name] - with pytest.raises(KeyError): - result.cache[not_a_stream_name] - with pytest.raises(KeyError): - result.cache.streams[not_a_stream_name] - - # Make sure we can use "result.streams.items()" - for stream_name, cached_dataset in result.streams.items(): - assert isinstance(cached_dataset, CachedDataset) - assert isinstance(stream_name, str) - - list_data = list(cached_dataset) - assert list_data == expected_test_stream_data[stream_name] - - # Make sure we can use "result.cache.streams.items()" - for stream_name, cached_dataset in result.cache.streams.items(): - assert isinstance(cached_dataset, CachedDataset) - assert isinstance(stream_name, str) - - list_data = list(cached_dataset) - assert list_data == expected_test_stream_data[stream_name] - - -def test_cached_dataset_filter(): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - result: ReadResult = source.read(ab.new_local_cache()) - - stream_name = "stream1" - - # Check the many ways to add a filter: - cached_dataset: CachedDataset = result[stream_name] - filtered_dataset_a: SQLDataset = cached_dataset.with_filter("column2 == 1") - filtered_dataset_b: SQLDataset = cached_dataset.with_filter(text("column2 == 1")) - filtered_dataset_c: SQLDataset = cached_dataset.with_filter(column("column2") == 1) - - assert isinstance(cached_dataset, CachedDataset) - all_records = list(cached_dataset) - assert len(all_records) == 2 - - for filtered_dataset, case in [ - (filtered_dataset_a, "a"), - (filtered_dataset_b, "b"), - (filtered_dataset_c, "c"), - ]: - assert isinstance(filtered_dataset, SQLDataset) - - # Check that we can iterate over each stream - - filtered_records: list[Mapping[str, Any]] = [row for row in filtered_dataset] - - # Check that the filter worked - assert len(filtered_records) == 1, f"Case '{case}' had incorrect number of records." - - # Assert the stream name still matches - assert filtered_dataset.stream_name == stream_name, \ - f"Case '{case}' had incorrect stream name." - - # Check that chaining filters works - chained_dataset = filtered_dataset.with_filter("column1 == 'value1'") - chained_records = [row for row in chained_dataset] - assert len(chained_records) == 1, \ - f"Case '{case}' had incorrect number of records after chaining filters." - - -def test_lazy_dataset_from_source( - expected_test_stream_data: dict[str, list[dict[str, str | int]]], -) -> None: - source = ab.get_source("source-test", config={"apiKey": "test"}) - - stream_name = "stream1" - not_a_stream_name = "not_a_stream" - - lazy_dataset_a = source.get_records(stream_name) - lazy_dataset_b = source.get_records(stream_name) - - assert isinstance(lazy_dataset_a, LazyDataset) - - # Check that we can iterate over the stream - - list_from_iter_a = list(lazy_dataset_a) - list_from_iter_b = [row for row in lazy_dataset_b] - - assert list_from_iter_a == list_from_iter_b - - # Make sure that we get a key error if we try to access a stream that doesn't exist - with pytest.raises(exc.AirbyteLibInputError): - source.get_records(not_a_stream_name) - - # Make sure we can iterate on all available streams - for stream_name in source.get_available_streams(): - assert isinstance(stream_name, str) - - lazy_dataset: LazyDataset = source.get_records(stream_name) - assert isinstance(lazy_dataset, LazyDataset) - - list_data = list(lazy_dataset) - assert list_data == expected_test_stream_data[stream_name] - - -@pytest.mark.parametrize( - "method_call", - [ - pytest.param(lambda source: source.check(), id="check"), - pytest.param(lambda source: list(source.get_records("stream1")), id="read_stream"), - pytest.param(lambda source: source.read(), id="read"), - ], -) -def test_check_fail_on_missing_config(method_call): - source = ab.get_source("source-test") - - with pytest.raises(exc.AirbyteConnectorConfigurationMissingError): - method_call(source) - -def test_sync_with_merge_to_postgres(new_pg_cache_config: PostgresCacheConfig, expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - """Test that the merge strategy works as expected. - - In this test, we sync the same data twice. If the data is not duplicated, we assume - the merge was successful. - - # TODO: Add a check with a primary key to ensure that the merge strategy works as expected. - """ - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = PostgresCache(config=new_pg_cache_config) - - # Read twice to test merge strategy - result: ReadResult = source.read(cache) - result: ReadResult = source.read(cache) - - assert result.processed_records == 3 - for stream_name, expected_data in expected_test_stream_data.items(): - if len(cache[stream_name]) > 0: - pd.testing.assert_frame_equal( - result[stream_name].to_pandas(), - pd.DataFrame(expected_data), - check_dtype=False, - ) - else: - # stream is empty - assert len(expected_test_stream_data[stream_name]) == 0 - - -def test_airbyte_lib_version() -> None: - assert get_version() - assert isinstance(get_version(), str) - - # Ensure the version is a valid semantic version (x.y.z or x.y.z.alpha0) - assert 3 <= len(get_version().split(".")) <= 4 - - -@patch.dict('os.environ', {'DO_NOT_TRACK': ''}) -@patch('airbyte_lib.telemetry.requests') -@patch('airbyte_lib.telemetry.datetime') -@pytest.mark.parametrize( - "raises, api_key, expected_state, expected_number_of_records, request_call_fails, extra_env, expected_flags, cache_type, number_of_records_read", - [ - pytest.param(pytest.raises(Exception), "test_fail_during_sync", "failed", 1, False, {"CI": ""}, {"CI": False}, "duckdb", None, id="fail_during_sync"), - pytest.param(does_not_raise(), "test", "succeeded", 3, False, {"CI": ""}, {"CI": False}, "duckdb", None, id="succeed_during_sync"), - pytest.param(does_not_raise(), "test", "succeeded", 3, True, {"CI": ""}, {"CI": False}, "duckdb", None,id="fail_request_without_propagating"), - pytest.param(does_not_raise(), "test", "succeeded", 3, False, {"CI": ""}, {"CI": False}, "duckdb", None,id="falsy_ci_flag"), - pytest.param(does_not_raise(), "test", "succeeded", 3, False, {"CI": "true"}, {"CI": True}, "duckdb", None,id="truthy_ci_flag"), - pytest.param(pytest.raises(Exception), "test_fail_during_sync", "failed", 1, False, {"CI": ""}, {"CI": False}, "streaming", 3, id="streaming_fail_during_sync"), - pytest.param(does_not_raise(), "test", "succeeded", 2, False, {"CI": ""}, {"CI": False}, "streaming", 2, id="streaming_succeed"), - pytest.param(does_not_raise(), "test", "succeeded", 1, False, {"CI": ""}, {"CI": False}, "streaming", 1, id="streaming_partial_read"), - ], -) -def test_tracking( - mock_datetime: Mock, - mock_requests: Mock, - raises, api_key: str, - expected_state: str, - expected_number_of_records: int, - request_call_fails: bool, - extra_env: dict[str, str], - expected_flags: dict[str, bool], - cache_type: str, - number_of_records_read: int -): - """ - Test that the telemetry is sent when the sync is successful. - This is done by mocking the requests.post method and checking that it is called with the right arguments. - """ - now_date = Mock() - mock_datetime.datetime = Mock() - mock_datetime.datetime.utcnow.return_value = now_date - now_date.isoformat.return_value = "2021-01-01T00:00:00.000000" - - mock_post = Mock() - mock_requests.post = mock_post - - source = ab.get_source("source-test", config={"apiKey": api_key}) - source.select_all_streams() - - cache = ab.new_local_cache() - - if request_call_fails: - mock_post.side_effect = Exception("test exception") - - with patch.dict('os.environ', extra_env): - with raises: - if cache_type == "streaming": - list(itertools.islice(source.get_records("stream1"), number_of_records_read)) - else: - source.read(cache) - - mock_post.assert_has_calls([ - call("https://api.segment.io/v1/track", - auth=("cukeSffc0G6gFQehKDhhzSurDzVSZ2OP", ""), - json={ - "anonymousId": "airbyte-lib-user", - "event": "sync", - "properties": { - "version": get_version(), - "source": {'name': 'source-test', 'version': '0.0.1', 'type': 'venv'}, - "state": "started", - "cache": {"type": cache_type}, - "ip": "0.0.0.0", - "flags": expected_flags - }, - "timestamp": "2021-01-01T00:00:00.000000", - } - ), - call( - "https://api.segment.io/v1/track", - auth=("cukeSffc0G6gFQehKDhhzSurDzVSZ2OP", ""), - json={ - "anonymousId": "airbyte-lib-user", - "event": "sync", - "properties": { - "version": get_version(), - "source": {'name': 'source-test', 'version': '0.0.1', 'type': 'venv'}, - "state": expected_state, - "number_of_records": expected_number_of_records, - "cache": {"type": cache_type}, - "ip": "0.0.0.0", - "flags": expected_flags - }, - "timestamp": "2021-01-01T00:00:00.000000", - } - ) - ]) - - -def test_sync_to_postgres(new_pg_cache_config: PostgresCacheConfig, expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = PostgresCache(config=new_pg_cache_config) - - result: ReadResult = source.read(cache) - - assert result.processed_records == 3 - for stream_name, expected_data in expected_test_stream_data.items(): - if len(cache[stream_name]) > 0: - pd.testing.assert_frame_equal( - result[stream_name].to_pandas(), - pd.DataFrame(expected_data), - check_dtype=False, - ) - else: - # stream is empty - assert len(expected_test_stream_data[stream_name]) == 0 - -@pytest.mark.slow -@pytest.mark.requires_creds -def test_sync_to_snowflake(snowflake_config: SnowflakeCacheConfig, expected_test_stream_data: dict[str, list[dict[str, str | int]]]): - source = ab.get_source("source-test", config={"apiKey": "test"}) - source.select_all_streams() - - cache = SnowflakeSQLCache(config=snowflake_config) - - result: ReadResult = source.read(cache) - - assert result.processed_records == 3 - for stream_name, expected_data in expected_test_stream_data.items(): - if len(cache[stream_name]) > 0: - pd.testing.assert_frame_equal( - result[stream_name].to_pandas(), - pd.DataFrame(expected_data), - check_dtype=False, - ) - else: - # stream is empty - assert len(expected_test_stream_data[stream_name]) == 0 - - -def test_sync_limited_streams(expected_test_stream_data): - source = ab.get_source("source-test", config={"apiKey": "test"}) - cache = ab.new_local_cache() - - source.select_streams(["stream2"]) - - result = source.read(cache) - - assert result.processed_records == 1 - pd.testing.assert_frame_equal( - result["stream2"].to_pandas(), - pd.DataFrame(expected_test_stream_data["stream2"]), - check_dtype=False, - ) - - -def test_read_stream(): - source = ab.get_source("source-test", config={"apiKey": "test"}) - - assert list(source.get_records("stream1")) == [{"column1": "value1", "column2": 1}, {"column1": "value2", "column2": 2}] - - -def test_read_stream_nonexisting(): - source = ab.get_source("source-test", config={"apiKey": "test"}) - - with pytest.raises(Exception): - list(source.get_records("non-existing")) - -def test_failing_path_connector(): - with pytest.raises(Exception): - ab.get_source("source-test", config={"apiKey": "test"}, use_local_install=True) - -def test_succeeding_path_connector(): - new_path = f"{os.path.abspath('.venv-source-test/bin')}:{os.environ['PATH']}" - - # Patch the PATH env var to include the test venv bin folder - with patch.dict(os.environ, {"PATH": new_path}): - source = ab.get_source( - "source-test", - config={"apiKey": "test"}, - local_executable="source-test", - ) - source.check() - -def test_install_uninstall(): - with tempfile.TemporaryDirectory() as temp_dir: - source = ab.get_source( - "source-test", - pip_url="./tests/integration_tests/fixtures/source-test", - config={"apiKey": "test"}, - install_if_missing=False, - ) - - # Override the install root to avoid conflicts with the test fixture - install_root = Path(temp_dir) - source.executor.install_root = install_root - - # assert that the venv is gone - assert not os.path.exists(install_root / ".venv-source-test") - - # use which to check if the executable is available - assert shutil.which("source-test") is None - - # assert that the connector is not available - with pytest.raises(Exception): - source.check() - - source.install() - - assert os.path.exists(install_root / ".venv-source-test") - assert os.path.exists(install_root / ".venv-source-test/bin/source-test") - - source.check() - - source.uninstall() - - assert not os.path.exists(install_root / ".venv-source-test") - assert not os.path.exists(install_root / ".venv-source-test/bin/source-test") diff --git a/airbyte-lib/tests/integration_tests/test_validation.py b/airbyte-lib/tests/integration_tests/test_validation.py deleted file mode 100644 index 140a7d52023e5..0000000000000 --- a/airbyte-lib/tests/integration_tests/test_validation.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import os -import shutil - -import pytest -from airbyte_lib.validate import validate - - -@pytest.fixture(scope="module", autouse=True) -def autouse_source_test_installation(source_test_installation): - return - - -@pytest.fixture(scope="function", autouse=True) -def autouse_source_test_registry(source_test_registry): - return - - -def test_validate_success(): - validate("./tests/integration_tests/fixtures/source-test", "./tests/integration_tests/fixtures/valid_config.json", validate_install_only=False) - -def test_validate_check_failure(): - with pytest.raises(Exception): - validate("./tests/integration_tests/fixtures/source-test", "./tests/integration_tests/fixtures/invalid_config.json", validate_install_only=False) - -def test_validate_success_install_only(): - validate("./tests/integration_tests/fixtures/source-test", "./tests/integration_tests/fixtures/invalid_config.json", validate_install_only=True) - -def test_validate_config_failure(): - with pytest.raises(Exception): - validate("./tests/integration_tests/fixtures/source-broken", "./tests/integration_tests/fixtures/valid_config.json", validate_install_only=True) diff --git a/airbyte-lib/tests/lint_tests/test_mypy.py b/airbyte-lib/tests/lint_tests/test_mypy.py deleted file mode 100644 index df09978280792..0000000000000 --- a/airbyte-lib/tests/lint_tests/test_mypy.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import subprocess - -import pytest - - -def test_mypy_typing(): - # Run the check command - check_result = subprocess.run( - ["poetry", "run", "mypy", "."], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # Assert that the Ruff command exited without errors (exit code 0) - assert check_result.returncode == 0, ( - "MyPy checks failed:\n" - + f"{check_result.stdout.decode()}\n{check_result.stderr.decode()}\n\n" - + "Run `poetry run mypy .` to see all failures." - ) diff --git a/airbyte-lib/tests/lint_tests/test_ruff.py b/airbyte-lib/tests/lint_tests/test_ruff.py deleted file mode 100644 index 57262a8f608c4..0000000000000 --- a/airbyte-lib/tests/lint_tests/test_ruff.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import subprocess - -import pytest - - -def test_ruff_linting(): - # Run the check command - check_result = subprocess.run( - ["poetry", "run", "ruff", "check", "."], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # Assert that the Ruff command exited without errors (exit code 0) - assert check_result.returncode == 0, ( - "Ruff checks failed:\n\n" - + f"{check_result.stdout.decode()}\n{check_result.stderr.decode()}\n\n" - + "Run `poetry run ruff check .` to view all issues." - ) - - -def test_ruff_linting_fixable(): - # Run the check command - fix_diff_result = subprocess.run( - ["poetry", "run", "ruff", "check", "--fix", "--diff", "."], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # Assert that the Ruff command exited without errors (exit code 0) - assert fix_diff_result.returncode == 0, ( - "Ruff checks revealed fixable issues:\n\n" - + f"{fix_diff_result.stdout.decode()}\n{fix_diff_result.stderr.decode()}\n\n" - + "Run `poetry run ruff check --fix .` to attempt automatic fixes." - ) - - -def test_ruff_format(): - # Define the command to run Ruff - command = ["poetry", "run", "ruff", "format", "--check", "--diff"] - - # Run the command - result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - # Assert that the Ruff command exited without errors (exit code 0) - assert result.returncode == 0, ( - f"Ruff checks failed:\n\n{result.stdout.decode()}\n{result.stderr.decode()}\n\n" - + "Run `poetry run ruff format .` to attempt automatic fixes." - ) diff --git a/airbyte-lib/tests/unit_tests/test_caches.py b/airbyte-lib/tests/unit_tests/test_caches.py deleted file mode 100644 index 5bc2ba4186cd8..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_caches.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from pathlib import Path - -import pytest - -from airbyte_lib._file_writers import ParquetWriterConfig -from airbyte_lib.caches.base import SQLCacheBase, SQLCacheConfigBase -from airbyte_lib.caches.duckdb import DuckDBCacheBase, DuckDBCacheConfig - - -def test_duck_db_cache_config_initialization(): - config = DuckDBCacheConfig(db_path='test_path', schema_name='test_schema') - assert config.db_path == Path('test_path') - assert config.schema_name == 'test_schema' - -def test_duck_db_cache_config_default_schema_name(): - config = DuckDBCacheConfig(db_path='test_path') - assert config.schema_name == 'main' - -def test_get_sql_alchemy_url(): - config = DuckDBCacheConfig(db_path='test_path', schema_name='test_schema') - assert config.get_sql_alchemy_url() == 'duckdb:///test_path' - -def test_get_sql_alchemy_url_with_default_schema_name(): - config = DuckDBCacheConfig(db_path='test_path') - assert config.get_sql_alchemy_url() == 'duckdb:///test_path' - -def test_duck_db_cache_config_inheritance(): - assert issubclass(DuckDBCacheConfig, SQLCacheConfigBase) - assert issubclass(DuckDBCacheConfig, ParquetWriterConfig) - -def test_duck_db_cache_config_get_sql_alchemy_url(): - config = DuckDBCacheConfig(db_path='test_path', schema_name='test_schema') - assert config.get_sql_alchemy_url() == 'duckdb:///test_path' - -def test_duck_db_cache_config_get_database_name(): - config = DuckDBCacheConfig(db_path='test_path/test_db.duckdb', schema_name='test_schema') - assert config.get_database_name() == 'test_db' - -def test_duck_db_cache_base_inheritance(): - assert issubclass(DuckDBCacheBase, SQLCacheBase) - -def test_duck_db_cache_config_default_schema_name(): - config = DuckDBCacheConfig(db_path='test_path') - assert config.schema_name == 'main' - -def test_duck_db_cache_config_get_sql_alchemy_url_with_default_schema_name(): - config = DuckDBCacheConfig(db_path='test_path') - assert config.get_sql_alchemy_url() == 'duckdb:///test_path' - -def test_duck_db_cache_config_get_database_name_with_default_schema_name(): - config = DuckDBCacheConfig(db_path='test_path/test_db.duckdb') - assert config.get_database_name() == 'test_db' - -def test_duck_db_cache_config_inheritance_from_sql_cache_config_base(): - assert issubclass(DuckDBCacheConfig, SQLCacheConfigBase) - -def test_duck_db_cache_config_inheritance_from_parquet_writer_config(): - assert issubclass(DuckDBCacheConfig, ParquetWriterConfig) diff --git a/airbyte-lib/tests/unit_tests/test_exceptions.py b/airbyte-lib/tests/unit_tests/test_exceptions.py deleted file mode 100644 index ef5a391e47df0..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_exceptions.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import inspect -import pytest -import inspect -import airbyte_lib.exceptions as exceptions_module - -def test_exceptions(): - exception_classes = [ - (name, obj) - for name, obj in inspect.getmembers(exceptions_module) - if inspect.isclass(obj) and name.endswith("Error") - ] - assert "AirbyteError" in [name for name, _ in exception_classes] - assert "NotAnError" not in [name for name, _ in exception_classes] - for name, obj in exception_classes: - instance = obj() - message = instance.get_message() - assert isinstance(message, str), "No message for class: " + name - assert message.count("\n") == 0 - assert message != "" - assert message.strip() == message - assert name.startswith("Airbyte") - assert name.endswith("Error") - - -if __name__ == "__main__": - pytest.main() diff --git a/airbyte-lib/tests/unit_tests/test_pip_helpers.py b/airbyte-lib/tests/unit_tests/test_pip_helpers.py deleted file mode 100644 index e99ba3e624ff6..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_pip_helpers.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import pytest -from airbyte_lib._util import github_pip_url, connector_pip_url - -@pytest.mark.parametrize('owner, repo, branch_or_ref, package_name, subdirectory, expected', [ - ('airbytehq', 'airbyte', None, None, None, 'git+https://github.com/airbytehq/airbyte.git'), - ('airbytehq', 'airbyte', 'master', None, None, 'git+https://github.com/airbytehq/airbyte.git@master'), - ('airbytehq', 'airbyte', 'my-branch', None, None, 'git+https://github.com/airbytehq/airbyte.git@my-branch'), - ('airbytehq', 'airbyte', 'my-branch', 'airbyte-lib', None, 'git+https://github.com/airbytehq/airbyte.git@my-branch#egg=airbyte-lib'), - ('airbytehq', 'airbyte', 'my-branch', 'airbyte-lib', 'airbyte-lib', 'git+https://github.com/airbytehq/airbyte.git@my-branch#egg=airbyte-lib&subdirectory=airbyte-lib'), -]) -def test_github_pip_url(owner, repo, branch_or_ref, package_name, subdirectory, expected): - result = github_pip_url(owner, repo, branch_or_ref=branch_or_ref, package_name=package_name, subdirectory=subdirectory) - assert result == expected - -@pytest.mark.parametrize('connector_name, branch, owner, expected', [ - ('source-coin-api', 'my-branch', None, 'git+https://github.com/airbytehq/airbyte.git@my-branch#egg=source-coin-api&subdirectory=airbyte-integrations/connectors/source-coin-api'), - ('source-coin-api', 'my-branch', 'my-fork', 'git+https://github.com/my-fork/airbyte.git@my-branch#egg=source-coin-api&subdirectory=airbyte-integrations/connectors/source-coin-api'), -]) -def test_connector_pip_url(connector_name, branch, owner, expected): - result = connector_pip_url( - connector_name, - branch, - owner=owner) - assert result == expected diff --git a/airbyte-lib/tests/unit_tests/test_progress.py b/airbyte-lib/tests/unit_tests/test_progress.py deleted file mode 100644 index 377df860bb57a..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_progress.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import datetime -from textwrap import dedent -import time -import pytest -from freezegun import freeze_time -from airbyte_lib.progress import ReadProgress, _get_elapsed_time_str, _to_time_str -from dateutil.tz import tzlocal - -# Calculate the offset from UTC in hours -tz_offset_hrs = int(datetime.datetime.now(tzlocal()).utcoffset().total_seconds() / 3600) - - -@freeze_time("2022-01-01") -def test_read_progress_initialization(): - progress = ReadProgress() - assert progress.num_streams_expected == 0 - assert progress.read_start_time == 1640995200.0 # Unix timestamp for 2022-01-01 - assert progress.total_records_read == 0 - assert progress.total_records_written == 0 - assert progress.total_batches_written == 0 - assert progress.written_stream_names == set() - assert progress.finalize_start_time is None - assert progress.finalize_end_time is None - assert progress.total_records_finalized == 0 - assert progress.total_batches_finalized == 0 - assert progress.finalized_stream_names == set() - assert progress.last_update_time is None - - -@freeze_time("2022-01-01") -def test_read_progress_reset(): - progress = ReadProgress() - progress.reset(5) - assert progress.num_streams_expected == 5 - assert progress.read_start_time == 1640995200.0 - assert progress.total_records_read == 0 - assert progress.total_records_written == 0 - assert progress.total_batches_written == 0 - assert progress.written_stream_names == set() - assert progress.finalize_start_time is None - assert progress.finalize_end_time is None - assert progress.total_records_finalized == 0 - assert progress.total_batches_finalized == 0 - assert progress.finalized_stream_names == set() - -@freeze_time("2022-01-01") -def test_read_progress_log_records_read(): - progress = ReadProgress() - progress.log_records_read(100) - assert progress.total_records_read == 100 - -@freeze_time("2022-01-01") -def test_read_progress_log_batch_written(): - progress = ReadProgress() - progress.log_batch_written("stream1", 50) - assert progress.total_records_written == 50 - assert progress.total_batches_written == 1 - assert progress.written_stream_names == {"stream1"} - -@freeze_time("2022-01-01") -def test_read_progress_log_batches_finalizing(): - progress = ReadProgress() - progress.log_batches_finalizing("stream1", 1) - assert progress.finalize_start_time == 1640995200.0 - -@freeze_time("2022-01-01") -def test_read_progress_log_batches_finalized(): - progress = ReadProgress() - progress.log_batches_finalized("stream1", 1) - assert progress.total_batches_finalized == 1 - -@freeze_time("2022-01-01") -def test_read_progress_log_stream_finalized(): - progress = ReadProgress() - progress.log_stream_finalized("stream1") - assert progress.finalized_stream_names == {"stream1"} - - -def test_get_elapsed_time_str(): - assert _get_elapsed_time_str(30) == "30 seconds" - assert _get_elapsed_time_str(90) == "1min 30s" - assert _get_elapsed_time_str(600) == "10min" - assert _get_elapsed_time_str(3600) == "1hr 0min" - - -@freeze_time("2022-01-01 0:00:00") -def test_get_time_str(): - assert _to_time_str(time.time()) == "00:00:00" - - -def _assert_lines(expected_lines, actual_lines: list[str] | str): - if isinstance(actual_lines, list): - actual_lines = "\n".join(actual_lines) - for line in expected_lines: - assert line in actual_lines, f"Missing line: {line}" - -def test_get_status_message_after_finalizing_records(): - - # Test that we can render the initial status message before starting to read - with freeze_time("2022-01-01 00:00:00"): - progress = ReadProgress() - expected_lines = [ - "Started reading at 00:00:00.", - "Read **0** records over **0 seconds** (0.0 records / second).", - ] - _assert_lines(expected_lines, progress._get_status_message()) - - # Test after reading some records - with freeze_time("2022-01-01 00:01:00"): - progress.log_records_read(100) - expected_lines = [ - "Started reading at 00:00:00.", - "Read **100** records over **60 seconds** (1.7 records / second).", - ] - _assert_lines(expected_lines, progress._get_status_message()) - - # Advance the day and reset the progress - with freeze_time("2022-01-02 00:00:00"): - progress = ReadProgress() - progress.reset(1) - expected_lines = [ - "Started reading at 00:00:00.", - "Read **0** records over **0 seconds** (0.0 records / second).", - ] - _assert_lines(expected_lines, progress._get_status_message()) - - # Test after writing some records and starting to finalize - with freeze_time("2022-01-02 00:01:00"): - progress.log_records_read(100) - progress.log_batch_written("stream1", 50) - progress.log_batches_finalizing("stream1", 1) - expected_lines = [ - "## Read Progress", - "Started reading at 00:00:00.", - "Read **100** records over **60 seconds** (1.7 records / second).", - "Wrote **50** records over 1 batches.", - "Finished reading at 00:01:00.", - "Started finalizing streams at 00:01:00.", - ] - _assert_lines(expected_lines, progress._get_status_message()) - - # Test after finalizing some records - with freeze_time("2022-01-02 00:02:00"): - progress.log_batches_finalized("stream1", 1) - expected_lines = [ - "## Read Progress", - "Started reading at 00:00:00.", - "Read **100** records over **60 seconds** (1.7 records / second).", - "Wrote **50** records over 1 batches.", - "Finished reading at 00:01:00.", - "Started finalizing streams at 00:01:00.", - "Finalized **1** batches over 60 seconds.", - ] - _assert_lines(expected_lines, progress._get_status_message()) - - # Test after finalizing all records - with freeze_time("2022-01-02 00:02:00"): - progress.log_stream_finalized("stream1") - message = progress._get_status_message() - expected_lines = [ - "## Read Progress", - "Started reading at 00:00:00.", - "Read **100** records over **60 seconds** (1.7 records / second).", - "Wrote **50** records over 1 batches.", - "Finished reading at 00:01:00.", - "Started finalizing streams at 00:01:00.", - "Finalized **1** batches over 60 seconds.", - "Completed 1 out of 1 streams:", - "- stream1", - "Total time elapsed: 2min 0s", - ] - _assert_lines(expected_lines, message) diff --git a/airbyte-lib/tests/unit_tests/test_type_translation.py b/airbyte-lib/tests/unit_tests/test_type_translation.py deleted file mode 100644 index a2c255c5b0d71..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_type_translation.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import pytest -from sqlalchemy import types -from airbyte_lib.types import SQLTypeConverter, _get_airbyte_type - - -@pytest.mark.parametrize( - "json_schema_property_def, expected_sql_type", - [ - ({"type": "string"}, types.VARCHAR), - ({"type": ["boolean", "null"]}, types.BOOLEAN), - ({"type": ["null", "boolean"]}, types.BOOLEAN), - ({"type": "string"}, types.VARCHAR), - ({"type": ["null", "string"]}, types.VARCHAR), - ({"type": "boolean"}, types.BOOLEAN), - ({"type": "string", "format": "date"}, types.DATE), - ({"type": ["null", "string"]}, types.VARCHAR), - ({"type": ["null", "boolean"]}, types.BOOLEAN), - ({"type": ["null", "number"]}, types.DECIMAL), - ({"type": "string", "format": "date-time", "airbyte_type": "timestamp_without_timezone"}, types.TIMESTAMP), - ({"type": "string", "format": "date-time", "airbyte_type": "timestamp_with_timezone"}, types.TIMESTAMP), - ({"type": "string", "format": "time", "airbyte_type": "time_without_timezone"}, types.TIME), - ({"type": "string", "format": "time", "airbyte_type": "time_with_timezone"}, types.TIME), - ({"type": "integer"}, types.BIGINT), - ({"type": "number", "airbyte_type": "integer"}, types.BIGINT), - ({"type": "number"}, types.DECIMAL), - ({"type": "array", "items": {"type": "object"}}, types.JSON), - ({"type": "object", "properties": {}}, types.JSON), - ], -) -def test_to_sql_type(json_schema_property_def, expected_sql_type): - converter = SQLTypeConverter() - sql_type = converter.to_sql_type(json_schema_property_def) - assert isinstance(sql_type, expected_sql_type) - - -@pytest.mark.parametrize( - "json_schema_property_def, expected_airbyte_type", - [ - ({"type": "string"}, "string"), - ({"type": ["boolean", "null"]}, "boolean"), - ({"type": ["null", "boolean"]}, "boolean"), - ({"type": "string"}, "string"), - ({"type": ["null", "string"]}, "string"), - ({"type": "boolean"}, "boolean"), - ({"type": "string", "format": "date"}, "date"), - ({"type": "string", "format": "date-time", "airbyte_type": "timestamp_without_timezone"}, "timestamp_without_timezone"), - ({"type": "string", "format": "date-time", "airbyte_type": "timestamp_with_timezone"}, "timestamp_with_timezone"), - ({"type": "string", "format": "time", "airbyte_type": "time_without_timezone"}, "time_without_timezone"), - ({"type": "string", "format": "time", "airbyte_type": "time_with_timezone"}, "time_with_timezone"), - ({"type": "integer"}, "integer"), - ({"type": "number", "airbyte_type": "integer"}, "integer"), - ({"type": "number"}, "number"), - ({"type": "array"}, "array"), - ({"type": "object"}, "object"), - ], -) -def test_to_airbyte_type(json_schema_property_def, expected_airbyte_type): - airbyte_type, _ = _get_airbyte_type(json_schema_property_def) - assert airbyte_type == expected_airbyte_type - - -@pytest.mark.parametrize( - "json_schema_property_def, expected_airbyte_type, expected_airbyte_subtype", - [ - ({"type": "string"}, "string", None), - ({"type": "number"}, "number", None), - ({"type": "array"}, "array", None), - ({"type": "object"}, "object", None), - ({"type": "array", "items": {"type": ["null", "string"]}}, "array", "string"), - ({"type": "array", "items": {"type": ["boolean"]}}, "array", "boolean"), - ], -) -def test_to_airbyte_subtype( - json_schema_property_def, - expected_airbyte_type, - expected_airbyte_subtype, -): - airbyte_type, subtype = _get_airbyte_type(json_schema_property_def) - assert airbyte_type == expected_airbyte_type - assert subtype == expected_airbyte_subtype diff --git a/airbyte-lib/tests/unit_tests/test_writers.py b/airbyte-lib/tests/unit_tests/test_writers.py deleted file mode 100644 index 2578ae10b4835..0000000000000 --- a/airbyte-lib/tests/unit_tests/test_writers.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from pathlib import Path -import pytest -from airbyte_lib._file_writers.base import FileWriterBase, FileWriterBatchHandle, FileWriterConfigBase -from airbyte_lib._file_writers.parquet import ParquetWriter, ParquetWriterConfig -from numpy import source - - -def test_parquet_writer_config_initialization(): - config = ParquetWriterConfig(cache_dir='test_path') - assert config.cache_dir == Path('test_path') - -def test_parquet_writer_config_inheritance(): - assert issubclass(ParquetWriterConfig, FileWriterConfigBase) - -def test_parquet_writer_initialization(): - config = ParquetWriterConfig(cache_dir='test_path') - writer = ParquetWriter(config) - assert writer.config == config - -def test_parquet_writer_inheritance(): - assert issubclass(ParquetWriter, FileWriterBase) - -def test_parquet_writer_has_config(): - config = ParquetWriterConfig(cache_dir='test_path') - writer = ParquetWriter(config) - assert hasattr(writer, 'config') - -def test_parquet_writer_has_source_catalog(): - config = ParquetWriterConfig(cache_dir='test_path') - writer = ParquetWriter(config) - -def test_parquet_writer_source_catalog_is_none(): - config = ParquetWriterConfig(cache_dir='test_path') - writer = ParquetWriter(config) diff --git a/build.gradle b/build.gradle index 02755ee877b0d..9a21d4bfa126a 100644 --- a/build.gradle +++ b/build.gradle @@ -1,10 +1,13 @@ import com.github.spotbugs.snom.Confidence import com.github.spotbugs.snom.Effort import com.github.spotbugs.snom.SpotBugsTask +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.dsl.KotlinVersion plugins { id 'base' - id 'com.github.spotbugs' version '6.0.7' + id 'com.github.spotbugs' version '6.0.7' apply false + id 'org.jetbrains.kotlin.jvm' version '1.9.23' apply false } allprojects { @@ -12,6 +15,9 @@ allprojects { apply plugin: 'java' apply plugin: 'java-test-fixtures' apply plugin: 'com.github.spotbugs' + apply plugin: 'org.jetbrains.kotlin.jvm' + apply plugin: 'idea' + // By default gradle uses directory as the project name. That works very well in a single project environment but // projects clobber each other in an environments with subprojects when projects are in directories named identically. @@ -42,6 +48,8 @@ allprojects { // Common java configurations java { + withSourcesJar() + withJavadocJar() sourceCompatibility = JavaVersion.VERSION_21 targetCompatibility = JavaVersion.VERSION_21 compileJava { @@ -58,9 +66,54 @@ allprojects { } } + tasks.named('sourcesJar').configure { + dependsOn tasks.matching { it.name == 'generate' } + } + + compileKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = true + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } + } + compileTestKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = true + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } + } + compileTestFixturesKotlin { + compilerOptions { + jvmTarget = JvmTarget.JVM_21 + languageVersion = KotlinVersion.KOTLIN_1_9 + allWarningsAsErrors = true + freeCompilerArgs = ["-Xjvm-default=all"] + } + dependsOn { + tasks.matching { it.name == 'generate' } + } + } + + idea { + module { + downloadJavadoc = true + downloadSources = true + } + } + spotbugs { ignoreFailures = false - effort = Effort.valueOf(System.getProperty('skipSlowTests', 'false') == 'false' ? 'MAX' : 'MIN') + effort = Effort.valueOf('MAX') excludeFilter.set rootProject.file('spotbugs-exclude-filter-file.xml') reportLevel = Confidence.valueOf('HIGH') showProgress = false @@ -145,23 +198,23 @@ allprojects { testFixturesAnnotationProcessor lombok // JUnit dependencies. - def vAssertJ = "3.21.0" - def vJUnit = "5.9.1" - def vJUnitJupiter = "5.10.0" - def vJUnitPioneer = "1.7.1" + def vAssertJ = "3.25.3" + def vJUnit = "5.10.2" + def vJUnitJupiter = "5.11.0" + testFixturesImplementation platform("org.junit:junit-bom:${vJUnit}") testFixturesImplementation "org.junit.jupiter:junit-jupiter-api:${vJUnit}" testFixturesImplementation "org.junit.jupiter:junit-jupiter-params:${vJUnit}" testFixturesImplementation "org.mockito:mockito-junit-jupiter:${vJUnitJupiter}" testFixturesImplementation "org.assertj:assertj-core:${vAssertJ}" - testFixturesImplementation "org.junit-pioneer:junit-pioneer:${vJUnitPioneer}" + testImplementation platform("org.junit:junit-bom:${vJUnit}") testImplementation "org.junit.jupiter:junit-jupiter-api:${vJUnit}" testImplementation "org.junit.jupiter:junit-jupiter-params:${vJUnit}" testImplementation "org.mockito:mockito-junit-jupiter:${vJUnitJupiter}" testImplementation "org.assertj:assertj-core:${vAssertJ}" - testImplementation "org.junit-pioneer:junit-pioneer:${vJUnitPioneer}" + testRuntimeOnly platform("org.junit:junit-bom:${vJUnit}") testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${vJUnit}" // Spotbugs dependencies. @@ -177,5 +230,7 @@ allprojects { } } - javadoc.options.addStringOption('Xdoclint:none', '-quiet') + javadoc { + options.addStringOption('Xdoclint:none', '-quiet') + } } diff --git a/buildSrc/readme.md b/buildSrc/readme.md deleted file mode 100644 index 1dea0b8d527f7..0000000000000 --- a/buildSrc/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# buildSrc - -This module contains custom Gradle modules that we have written to improve our build. diff --git a/buildSrc/src/main/groovy/airbyte-java-connector.gradle b/buildSrc/src/main/groovy/airbyte-java-connector.gradle index d7626c8e5f112..0a6cbf407212e 100644 --- a/buildSrc/src/main/groovy/airbyte-java-connector.gradle +++ b/buildSrc/src/main/groovy/airbyte-java-connector.gradle @@ -46,7 +46,7 @@ class AirbyteJavaConnectorExtension { ] void addCdkDependencies() { - def projectName = { ":airbyte-cdk:java:airbyte-cdk:${it}" } + def projectName = { ":airbyte-cdk:java:airbyte-cdk:airbyte-cdk-${it}" } def jarName = { "io.airbyte.cdk:airbyte-cdk-${it}:${cdkVersionRequired}" } project.processIntegrationTestJavaResources { // The metadata.yaml file is required by DestinationAcceptanceTest. @@ -55,17 +55,39 @@ class AirbyteJavaConnectorExtension { duplicatesStrategy DuplicatesStrategy.EXCLUDE } } + if (!cdkVersionRequired.matches("^[0-9]+\\.[0-9]+\\.[0-9]+")) { + throw new IllegalArgumentException("cdkVersionRequired should be a well-formed 3-part version number") + } + final int minor = Integer.parseInt(cdkVersionRequired.split('\\.')[1]) + project.dependencies { - def dep = { useLocalCdk ? project.project(projectName(it)) : jarName(it) } - def testFixturesDep = { useLocalCdk ? testFixtures(project.project(projectName(it))) : "${jarName(it)}:test-fixtures" } - if (useLocalCdk || !cdkVersionRequired.matches("^0\\.[0-9]\\..*|^0\\.1[0-8]\\..*")) { + def dep = useLocalCdk + ? { project.project(projectName(it)) } + : { jarName(it) } + def testFixturesDep = (useLocalCdk || minor >= 29) + ? { testFixtures(dep(it)) } + : { "${jarName(it)}:test-fixtures" } // handle pre-v0.29 publications + + if (useLocalCdk || minor >= 19) { // v0.19+ module structure + compileOnly "org.projectlombok:lombok:1.18.30" + annotationProcessor "org.projectlombok:lombok:1.18.30" // Lombok must be added BEFORE Micronaut + testCompileOnly "org.projectlombok:lombok:1.18.30" + testAnnotationProcessor "org.projectlombok:lombok:1.18.30" + testFixturesCompileOnly "org.projectlombok:lombok:1.18.30" + testAnnotationProcessor "org.projectlombok:lombok:1.18.30" implementation dep("dependencies") testImplementation dep("dependencies") testFixturesImplementation dep("dependencies") integrationTestJavaImplementation dep("dependencies") integrationTestJavaImplementation testFixturesDep("dependencies") } else { + compileOnly "org.projectlombok:lombok:1.18.30" + annotationProcessor "org.projectlombok:lombok:1.18.30" // Lombok must be added BEFORE Micronaut + testCompileOnly "org.projectlombok:lombok:1.18.30" + testAnnotationProcessor "org.projectlombok:lombok:1.18.30" + testFixturesCompileOnly "org.projectlombok:lombok:1.18.30" + testAnnotationProcessor "org.projectlombok:lombok:1.18.30" // pre-v0.19 module structure implementation(platform("com.fasterxml.jackson:jackson-bom:2.13.0")) implementation(platform("org.glassfish.jersey:jersey-bom:2.31")) @@ -132,6 +154,9 @@ class AirbyteJavaConnectorPlugin implements Plugin { java { srcDir 'src/test-integration/java' } + kotlin { + srcDir 'src/test-integration/kotlin' + } resources { srcDir 'src/test-integration/resources' } @@ -140,6 +165,9 @@ class AirbyteJavaConnectorPlugin implements Plugin { java { srcDir 'src/test-performance/java' } + kotlin { + srcDir 'src/test-performance/kotlin' + } resources { srcDir 'src/test-performance/resources' } @@ -237,6 +265,7 @@ class AirbyteJavaConnectorPlugin implements Plugin { performanceTestJavaImplementation testFixtures(project) } + project.extensions.create('airbyteJavaConnector', AirbyteJavaConnectorExtension, project) } } diff --git a/connectors.md b/connectors.md deleted file mode 100644 index 0bedf2d2f229b..0000000000000 --- a/connectors.md +++ /dev/null @@ -1,7 +0,0 @@ -# Airbyte Connectors - -A list of all airbyte connectors can now be found [here](https://connectors.airbyte.com/files/generated_reports/connector_registry_report.html) (or in the [docs](https://docs.airbyte.com/integrations/)). - -This report is generated from the **Airbyte Catalog Registries** -* [OSS Registry JSON](https://connectors.airbyte.com/files/registries/v0/oss_registry.json) -* [Cloud Registry JSON](https://connectors.airbyte.com/files/registries/v0/cloud_registry.json) diff --git a/deps.toml b/deps.toml index 1caea5603cc85..0ae8b52f1f73c 100644 --- a/deps.toml +++ b/deps.toml @@ -9,8 +9,8 @@ glassfish_version = "2.31" hikaricp = "5.0.1" jmh = "1.36" jooq = "3.13.4" -junit-jupiter = "5.9.1" -kotlin = "1.9.0" +junit-bom = "5.10.1" +kotlin = "1.9.23" log4j = "2.21.1" lombok = "1.18.30" postgresql = "42.6.0" @@ -19,6 +19,7 @@ segment = "2.1.1" slf4j = "2.0.9" temporal = "1.17.0" debezium = "2.4.0.Final" +mockito-version = "5.11.0" [libraries] airbyte-protocol = { module = "io.airbyte.airbyte-protocol:protocol-models", version.ref = "airbyte-protocol" } @@ -70,15 +71,11 @@ jooq = { module = "org.jooq:jooq", version.ref = "jooq" } jooq-codegen = { module = "org.jooq:jooq-codegen", version.ref = "jooq" } jooq-meta = { module = "org.jooq:jooq-meta", version.ref = "jooq" } jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } -junit-jupiter-api = { module = "org.junit.jupiter:junit-jupiter-api", version.ref = "junit-jupiter" } -junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit-jupiter" } -junit-jupiter-params = { module = "org.junit.jupiter:junit-jupiter-params", version.ref = "junit-jupiter" } junit-jupiter-system-stubs = { module = "uk.org.webcompere:system-stubs-jupiter", version = "2.0.1" } -junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version = "1.7.1" } kotlin-logging = { module = "io.github.oshai:kotlin-logging-jvm", version = "5.1.0" } kotlinx-cli = { module = "org.jetbrains.kotlinx:kotlinx-cli", version = "0.3.5" } kotlinx-cli-jvm = { module = "org.jetbrains.kotlinx:kotlinx-cli-jvm", version = "0.3.5" } -launchdarkly = { module = "com.launchdarkly:launchdarkly-java-server-sdk", version = "6.0.1" } +launchdarkly = { module = "com.launchdarkly:launchdarkly-java-server-sdk", version = "7.2.6" } log4j-api = { module = "org.apache.logging.log4j:log4j-api", version.ref = "log4j" } log4j-core = { module = "org.apache.logging.log4j:log4j-core", version.ref = "log4j" } log4j-slf4j2-impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "log4j" } @@ -87,7 +84,6 @@ log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } -mockito-junit-jupiter = { module = "org.mockito:mockito-junit-jupiter", version = "5.10.0" } mockk = { module = "io.mockk:mockk", version = "1.13.3" } mongo-driver-sync = { module = "org.mongodb:mongodb-driver-sync", version = "4.10.2" } otel-bom = { module = "io.opentelemetry:opentelemetry-bom", version = "1.14.0" } @@ -117,7 +113,6 @@ debezium-postgres = { module = "io.debezium:debezium-connector-postgres", versio apache = ["apache-commons", "apache-commons-lang"] datadog = ["datadog-trace-api", "datadog-trace-ot"] jackson = ["jackson-databind", "jackson-annotations", "jackson-dataformat", "jackson-datatype"] -junit = ["junit-jupiter-api", "junit-jupiter-params", "mockito-junit-jupiter"] log4j = ["log4j-api", "log4j-core", "log4j-slf4j-impl", "log4j-slf4j2-impl", "log4j-web"] slf4j = ["jul-to-slf4j", "jcl-over-slf4j", "log4j-over-slf4j"] temporal = ["temporal-sdk", "temporal-serviceclient"] diff --git a/docs/.gitbook/assets/source/azure-blob-storage/access_control_iam.png b/docs/.gitbook/assets/source/azure-blob-storage/access_control_iam.png new file mode 100644 index 0000000000000..8967b2bfdc951 Binary files /dev/null and b/docs/.gitbook/assets/source/azure-blob-storage/access_control_iam.png differ diff --git a/docs/.gitbook/assets/source/azure-blob-storage/add_members.png b/docs/.gitbook/assets/source/azure-blob-storage/add_members.png new file mode 100644 index 0000000000000..d9bc39944bc3c Binary files /dev/null and b/docs/.gitbook/assets/source/azure-blob-storage/add_members.png differ diff --git a/docs/.gitbook/assets/source/azure-blob-storage/add_role.png b/docs/.gitbook/assets/source/azure-blob-storage/add_role.png new file mode 100644 index 0000000000000..81b42513987ff Binary files /dev/null and b/docs/.gitbook/assets/source/azure-blob-storage/add_role.png differ diff --git a/docs/.gitbook/assets/source/azure-blob-storage/search_role.png b/docs/.gitbook/assets/source/azure-blob-storage/search_role.png new file mode 100644 index 0000000000000..f164b7829e784 Binary files /dev/null and b/docs/.gitbook/assets/source/azure-blob-storage/search_role.png differ diff --git a/docs/access-management/sso-providers/azure-entra-id.md b/docs/access-management/sso-providers/azure-entra-id.md index 3b71e7c2ac7cd..dda6849698587 100644 --- a/docs/access-management/sso-providers/azure-entra-id.md +++ b/docs/access-management/sso-providers/azure-entra-id.md @@ -73,7 +73,7 @@ The following steps need to be executed by an administrator of your company's Az You will need to create a new Entra ID application for Airbyte. Log into the [Azure Portal](https://portal.azure.com/) and search for the Entra ID service. -From the overview page of Entra ID, press **Add** > **App registration** on the top of the screen. The name you select is your app integration name. Once chosen, configure a **Redirect URI** of type **Web** with the following value: +From the overview page of Entra ID, press **Add** > **App registration** on the top of the screen. The name you select is your app integration name. Once chosen, **choose who can use the application, typically set to "Accounts in this organization directory only" for specific access,** and configure a **Redirect URI** of type **Web** with the following value: ``` /auth/realms/airbyte/broker//endpoint @@ -87,6 +87,13 @@ To create client credentials for Airbyte to interface with your application, hea 1. Click **New client secret**, and enter the expiry date of your choosing. You'll need to pass in the new client secret every time the old one expires to continue being able to log in via Entra ID. 2. Copy the **Value** (the client secret itself) immediately after creation. You won't be able to view this later on. +:::caution +Depending on the default "Admin consent require' value for your organization you may need to manually provide Admin consent within the **API Permissions** menu. To do so click **API Permissions** and then click **Grant admin consent for Airbtyte** (see image below.) +::: + +Admin Consent Option + + ### Setup information needed Once your Microsoft Entra ID app is set up, you're ready to deploy Airbyte Self-Managed Enterprise with SSO. Take note of the following configuration values, as you will need them to configure Airbyte to use your new Okta SSO app integration: diff --git a/docs/assets/docs/airbyte-lib-high-level-architecture.svg b/docs/assets/docs/pyairbyte-high-level-architecture.svg similarity index 100% rename from docs/assets/docs/airbyte-lib-high-level-architecture.svg rename to docs/assets/docs/pyairbyte-high-level-architecture.svg diff --git a/docs/cloud/managing-airbyte-cloud/assets/connection-job-history.png b/docs/cloud/managing-airbyte-cloud/assets/connection-job-history.png index a9df65156a02b..40f51fcfc59e4 100644 Binary files a/docs/cloud/managing-airbyte-cloud/assets/connection-job-history.png and b/docs/cloud/managing-airbyte-cloud/assets/connection-job-history.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/connection-status-page.png b/docs/cloud/managing-airbyte-cloud/assets/connection-status-page.png index a382786e38040..33ef7afbc61b9 100644 Binary files a/docs/cloud/managing-airbyte-cloud/assets/connection-status-page.png and b/docs/cloud/managing-airbyte-cloud/assets/connection-status-page.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/notification-slack-add-webhook.png b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-add-webhook.png new file mode 100644 index 0000000000000..f96be7457dede Binary files /dev/null and b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-add-webhook.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/notification-slack-create-app.png b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-create-app.png new file mode 100644 index 0000000000000..613cf1d959ccc Binary files /dev/null and b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-create-app.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/notification-slack-success.png b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-success.png new file mode 100644 index 0000000000000..fba8d0a51e3e2 Binary files /dev/null and b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-success.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/notification-slack-webhook-url-success.png b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-webhook-url-success.png new file mode 100644 index 0000000000000..c7ac383a95bdd Binary files /dev/null and b/docs/cloud/managing-airbyte-cloud/assets/notification-slack-webhook-url-success.png differ diff --git a/docs/cloud/managing-airbyte-cloud/assets/notifications-email.png b/docs/cloud/managing-airbyte-cloud/assets/notifications-email.png new file mode 100644 index 0000000000000..339868c244663 Binary files /dev/null and b/docs/cloud/managing-airbyte-cloud/assets/notifications-email.png differ diff --git a/docs/cloud/managing-airbyte-cloud/configuring-connections.md b/docs/cloud/managing-airbyte-cloud/configuring-connections.md index 269b68723ce60..94b213e439f81 100644 --- a/docs/cloud/managing-airbyte-cloud/configuring-connections.md +++ b/docs/cloud/managing-airbyte-cloud/configuring-connections.md @@ -14,7 +14,7 @@ To configure these settings: 1. In the Airbyte UI, click **Connections** and then click the connection you want to change. -2. Click the **Replication** tab. +2. Click the **Settings** tab. 3. Click the **Configuration** dropdown to expand the options. @@ -29,31 +29,23 @@ You can configure the following settings: | Setting | Description | |--------------------------------------|-------------------------------------------------------------------------------------| | Connection Name | A custom name for your connection | -| [Replication frequency](/using-airbyte/core-concepts/sync-schedules.md) | How often data syncs (can be scheduled, cron, API-triggered or manual) | -| [Destination namespace](/using-airbyte/core-concepts/namespaces.md) | Where the replicated data is written to in the destination | -| Destination stream prefix | A prefix added to each table name in the destination | +| [Schedule Type](/using-airbyte/core-concepts/sync-schedules.md) | How often data syncs (can be scheduled, cron, API-triggered or manual) | +| [Destination Namespace](/using-airbyte/core-concepts/namespaces.md) | Where the replicated data is written to in the destination | +| Destination Stream Prefix | A prefix added to each table name in the destination | | [Detect and propagate schema changes](/cloud/managing-airbyte-cloud/manage-schema-changes.md) | How Airbyte handles schema changes in the source | | [Connection Data Residency](/cloud/managing-airbyte-cloud/manage-data-residency.md) | Where data will be processed (Cloud only) | -## Modify streams in your connection +## Modify Streams -In the **Activate the streams you want to sync** table, you choose which streams to sync and how they are loaded to the destination. +On the "Schema" tab, you choose which streams to sync and how they are loaded to the destination. :::info A connection's schema consists of one or many streams. Each stream is most commonly associated with a database table or an API endpoint. Within a stream, there can be one or many fields or columns. ::: -To modify streams: +To modify streams, click **Connections** and then click the connection you want to change. Click the **Schema** tab to see all the streams Airbyte can sync. To modify an individual stream: -1. In the Airbyte UI, click **Connections** and then click the connection you want to change. - -2. Click the **Replication** tab. - -3. Scroll down to the **Activate the streams you want to sync** table. - -Modify an individual stream: - -1. In the **Activate the streams you want to sync** table, toggle **Sync** on or off for your selected stream. To select or deselect all streams, click the checkbox in the table header. To deselect an individual stream, deselect its checkbox in the table. +1. Toggle **Sync** on or off for your selected stream. To select or deselect all streams at once, use "Hide disabled streams" in the table header. To deselect an individual stream, use the toggle in its row. 2. Click the **Sync mode** dropdown and select the sync mode you want to apply. Depending on the sync mode you select, you may need to choose a cursor or primary key. diff --git a/docs/cloud/managing-airbyte-cloud/manage-airbyte-cloud-notifications.md b/docs/cloud/managing-airbyte-cloud/manage-airbyte-cloud-notifications.md index b03e0d24d6e95..09d5dd4389bdf 100644 --- a/docs/cloud/managing-airbyte-cloud/manage-airbyte-cloud-notifications.md +++ b/docs/cloud/managing-airbyte-cloud/manage-airbyte-cloud-notifications.md @@ -10,81 +10,74 @@ This page provides guidance on how to manage notifications for Airbyte, allowing | Type of Notification | Description | |------------------------|---------------------------------------------------------------------------------------------------------------------| -| Failed Syncs | A sync from any of your connections fails. Note that if sync runs frequently or if there are many syncs in the workspace these types of events can be noisy | -| Successful Syncs | A sync from any of your connections succeeds. Note that if sync runs frequently or if there are many syncs in the workspace these types of events can be noisy -| Automated Connection Updates | A connection is updated automatically (ex. a source schema is automatically updated) | -| Connection Updates Requiring Action | A connection update requires you to take action (ex. a breaking schema change is detected) | -| Warning - Repeated Failures | A connection will be disabled soon due to repeated failures. It has failed 50 times consecutively or there were only failed jobs in the past 7 days | -| Sync Disabled - Repeated Failures | A connection was automatically disabled due to repeated failures. It will be disabled when it has failed 100 times consecutively or has been failing for 14 days in a row | -| Warning - Upgrade Required (Cloud only) | A new connector version is available and requires manual upgrade | -| Sync Disabled - Upgrade Required (Cloud only) | One or more connections were automatically disabled due to a connector upgrade deadline passing +| **Failed Syncs** | A sync from any of your connections fails. Note that if sync runs frequently or if there are many syncs in the workspace these types of events can be noisy | +| **Successful Syncs** | A sync from any of your connections succeeds. Note that if sync runs frequently or if there are many syncs in the workspace these types of events can be noisy +| **Automated Connection Updates** | A connection is updated automatically (ex. a source schema is automatically updated) | +| **Connection Updates Requiring Action** | A connection update requires you to take action (ex. a breaking schema change is detected) | +| **Warning - Repeated Failures** | A connection will be disabled soon due to repeated failures. It has failed 50 times consecutively or there were only failed jobs in the past 7 days | +| **Sync Disabled - Repeated Failures** | A connection was automatically disabled due to repeated failures. It will be disabled when it has failed 100 times consecutively or has been failing for 14 days in a row | +| **Warning - Upgrade Required** (Cloud only) | A new connector version is available and requires manual upgrade | +| **Sync Disabled - Upgrade Required** (Cloud only) | One or more connections were automatically disabled due to a connector upgrade deadline passing + +### Enabling schema update notifications + +To be notified of any source schema changes, make sure you have enabled `Automatic Connection Updates` and `Connection Updates Requiring Action` notifications. If these are off, even if you turned on schema update notifications in a connection's settings, Airbyte will *NOT* send out any notifications related to these types of events. + +To edit this setting, click **Connections** and select the connection you want to receive notifications for. Click the **Settings** tab on the Connection page. In the **Advanced Settings**, toggle **Schema update notifications**. + ## Configure Email Notification Settings -To set up email notifications: +To set up email notifications, click **Settings** and navigate to **Workspace** > **Notifications**. -1. In the Airbyte UI, click **Settings** and navigate to **Notifications**. +Toggle which messages you'd like to receive from Airbyte. All email notifications will be sent by default to the creator of the workspace. -2. Toggle which messages you'd like to receive from Airbyte. All email notifications will be sent by default to the creator of the workspace. To change the recipient, edit and save the **notification email recipient**. If you would like to send email notifications to more than one recipient, you can enter an email distribution list (ie Google Group) as the recipient. - -3. Click **Save changes**. +![](./assets/notifications-email.png) :::note All email notifications except for Successful Syncs are enabled by default. ::: -## Configure Slack Notification settings +### Modify the email recipient +To change the recipient, edit and save the **notification email recipient**. If you would like to send email notifications to more than one recipient, you can enter an email distribution list (ie Google Group) as the recipient. -To set up Slack notifications: +## Configure Slack Notification settings -If you're more of a visual learner, just head over to [this video](https://www.youtube.com/watch?v=NjYm8F-KiFc&ab_channel=Airbyte) to learn how to do this. You can also refer to the Slack documentation on how to [create an incoming webhook for Slack](https://api.slack.com/messaging/webhooks). +If you're more of a visual learner, head over to [this video](https://www.youtube.com/watch?v=NjYm8F-KiFc&ab_channel=Airbyte) to learn how to do this. You can also refer to the Slack documentation on how to [create an incoming webhook for Slack](https://api.slack.com/messaging/webhooks). ### Create a Slack app -1. **Create a Slack App**: Navigate to https://api.slack.com/apps/. Select `Create an App`. +1. To set up Slack notifications, navigate to https://api.slack.com/apps/. Select `Create an App`. -![](../../.gitbook/assets/notifications_create_slack_app.png) +![](./assets/notification-slack-create-app.png) 2. Select `From Scratch`. Enter your App Name (e.g. Airbyte Sync Notifications) and pick your desired Slack workspace. -3. **Set up the webhook URL.**: in the left sidebar, click on `Incoming Webhooks`. Click the slider button in the top right to turn the feature on. Then click `Add New Webhook to Workspace`. +3. **Enable Incoming Webhooks**: in the left sidebar, click on `Incoming Webhooks`. Click the slider button in the top right to turn the feature on. Then click `Add New Webhook to Workspace`. -![](../../.gitbook/assets/notifications_add_new_webhook.png) +![](./assets/notification-slack-add-webhook.png) -4. Pick the channel that you want to receive Airbyte notifications in (ideally a dedicated one), and click `Allow` to give it permissions to access the channel. You should see the bot show up in the selected channel now. You will see an active webhook right above the `Add New Webhook to Workspace` button. +4. Select the channel that you want to receive Airbyte notifications in (ideally a dedicated one), and click `Allow` to give it permissions to access the channel. You should see the bot show up in the selected channel now. You will see an active webhook right above the `Add New Webhook to Workspace` button. -![](../../.gitbook/assets/notifications_webhook_url.png) +![](./assets/notification-slack-webhook-url-success.png) 5. Click `Copy.` to copy the link to your clipboard, which you will need to enter into Airbyte. -Your Webhook URL should look something like this: - -![](../../.gitbook/assets/notifications_airbyte_notification_settings.png) +Your Webhook URL should look similar to this: + ``` + https://hooks.slack.com/services/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PM1 + ``` ### Enable the Slack notification in Airbyte -1. In the Airbyte UI, click **Settings** and navigate to **Notifications**. - -2. Paste the copied webhook URL to `Webhook URL`. Using a Slack webook is recommended. On this page, you can toggle each slider decide whether you want notifications on each notification type. - -3. **Test it out.**: you can click `Test` to send a test message to the channel. Or, just run a sync now and try it out! If all goes well, you should receive a notification in your selected channel that looks like this: - -![](../../.gitbook/assets/notifications_slack_message.png) - -You're done! - -4. Click **Save changes**. - -## Enable schema update notifications +1. Click **Settings** and navigate to **Notifications**. On this page, you can toggle each slider decide whether you want notifications on each notification type. Paste the copied webhook URL to `Webhook URL`. -To be notified of any source schema changes: -1. Make sure you have enabled `Automatic Connection Updates` and `Connection Updates Requiring Action` notifications. If these are off, even if you turned on schema update notifications in a connection's settings, Airbyte will *NOT* send out any notifications related to these types of events. +3. **Test it out**: you can click `Test` to send a test message to the channel. Or, just run a sync now and try it out! For a successful sync, you should receive a notification that looks like this: -2. In the Airbyte UI, click **Connections** and select the connection you want to receive notifications for. +![](./assets/notification-slack-success.png) -3. Click the **Settings** tab on the Connection page. -4. Toggle **Schema update notifications**. +4. Click **Save changes** to ensure you continue to receive alerts about your Airbyte syncs. \ No newline at end of file diff --git a/docs/cloud/managing-airbyte-cloud/manage-credits.md b/docs/cloud/managing-airbyte-cloud/manage-credits.md index 8f04f6ffa7882..67518ead4155e 100644 --- a/docs/cloud/managing-airbyte-cloud/manage-credits.md +++ b/docs/cloud/managing-airbyte-cloud/manage-credits.md @@ -8,15 +8,15 @@ Airbyte [credits](https://airbyte.com/pricing) are used to pay for Airbyte resou ## Buy credits -To purchase credits directly through the UI, +1. To purchase credits directly through the UI, click **Billing** in the left-hand sidebar. The billing page displays the available credits, total credit usage, and the credit usage per connection. -1. Click **Billing** in the left-hand sidebar. + :::tip -2. If you are unsure of how many credits you need, use our [Cost Estimator](https://www.airbyte.com/pricing) or click **Talk to Sales** to find the right amount for your team. + If you are unsure of how many credits you need, use our [Cost Estimator](https://www.airbyte.com/pricing) or click **Talk to Sales** to find the right amount for your team. -3. Click **Buy credits**. + ::: -4. Determine the quantity of credits you intend to purchase. Adjust the **credit quantity**. When you're ready, click **Checkout**. +2. Click **Buy credits**. Enter the quantity of credits you intend to purchase and adjust the **credit quantity** accordingly. When you're ready, click **Checkout**. :::note @@ -30,11 +30,7 @@ To purchase credits directly through the UI, 5. You'll be renavigated to a Stripe payment page. If this is your first time purchasing, you'll be asked for payment details. After you enter your billing address, sales tax (if applicable) is calculated and added to the total. -6. Click **Pay**. - - Your payment is processed. The Billing page displays the available credits, total credit usage, and the credit usage per connection. - - A receipt for your purchase is sent to your email. +6. Click **Pay** to process your payment. A receipt for your purchase is automatically sent to your email. :::note @@ -42,17 +38,17 @@ To purchase credits directly through the UI, ::: -## Automatic reload of credits (Beta) +## Automatic reload of credits -You can enroll in automatic top-ups of your credit balance. This is a beta feature for those who do not want to manually add credits each time. +You can enroll in automatic top-ups of your credit balance. This feature is for those who do not want to manually add credits each time. To enroll, [email us](mailto:billing@airbyte.io) with: -1. A link to your workspace that you'd like to enable this feature for. +1. A link to your workspace or organization that you'd like to enable this feature for. 2. **Recharge threshold** The number under what credit balance you would like the automatic top up to occur. 3. **Recharge balance** The amount of credits you would like to refill to. -As an example, if the recharge threshold is 10 credits and recharge balance is 30 credits, anytime your workspace's credit balance dipped below 10 credits, Airbyte will automatically add enough credits to bring the balance back to 30 credits by charging the difference between your credit balance and 30 credits. +As an example, if the recharge threshold is 10 credits and recharge balance is 30 credits, anytime your credit balance dips below 10 credits, Airbyte will automatically add enough credits to bring the balance back to 30 credits by charging the difference between your credit balance and 30 credits. To take a real example, if: 1. The credit balance reached 3 credits. diff --git a/docs/cloud/managing-airbyte-cloud/manage-data-residency.md b/docs/cloud/managing-airbyte-cloud/manage-data-residency.md index 478c6c5862b5d..ec76c2cb33478 100644 --- a/docs/cloud/managing-airbyte-cloud/manage-data-residency.md +++ b/docs/cloud/managing-airbyte-cloud/manage-data-residency.md @@ -18,15 +18,7 @@ While the data is processed in a data plane of the chosen residency, the cursor When you set the default data residency, it applies your preference to new connections only. If you do not adjust the default data residency, the [Airbyte Default](configuring-connections.md) region is used (United States). If you want to change the data residency for an individual connection, you can do so in its [connection settings](configuring-connections.md). -To choose your default data residency: - -1. In the Airbyte UI, click **Settings**. - -2. Click **Data Residency**. - -3. Click the dropdown and choose the location for your default data residency. - -4. Click **Save changes**. +To choose your default data residency, click **Settings** in the Airbyte UI. Navigate to **Workspace** > **Data Residency**. Use the dropdown to choose the location for your default data residency and save your changes. :::info @@ -37,15 +29,7 @@ Depending on your network configuration, you may need to add [IP addresses](/ope ## Choose the data residency for a connection You can additionally choose the data residency for your connection in the connection settings. You can choose the data residency when creating a new connection, or you can set the default data residency for your workspace so that it applies for any new connections moving forward. -To choose a custom data residency for your connection: - -1. In the Airbyte UI, click **Connections** and then click the connection that you want to change. - -2. Click the **Settings** tab. - -3. Click the **Data residency** dropdown and choose the location for your default data residency. - -4. Click **Save changes** +To choose a custom data residency for your connection, click **Connections** in the Airbyte UI and then select the connection that you want to configure. Navigate to the **Settings** tab, open the **Advanced Settings**, and select the **Data residency** for the connection. :::note diff --git a/docs/cloud/managing-airbyte-cloud/understand-airbyte-cloud-limits.md b/docs/cloud/managing-airbyte-cloud/understand-airbyte-cloud-limits.md index f9e4b5467a846..86d9ef7188e24 100644 --- a/docs/cloud/managing-airbyte-cloud/understand-airbyte-cloud-limits.md +++ b/docs/cloud/managing-airbyte-cloud/understand-airbyte-cloud-limits.md @@ -6,11 +6,23 @@ products: cloud Understanding the following limitations will help you more effectively manage Airbyte Cloud. -* Max number of workspaces per user: 3* -* Max number of instances of the same source connector: 10* -* Max number of destinations in a workspace: 20* -* Max number of streams that can be returned by a source in a discover call: 1K -* Max number of streams that can be configured to sync in a single connection: 1K -* Size of a single record: 20MB +- Max number of workspaces per user: 3\* +- Max number of instances of the same source connector: 10\* +- Max number of destinations in a workspace: 20\* +- Max number of streams that can be returned by a source in a discover call: 1K +- Max number of streams that can be configured to sync in a single connection: 1K +- Size of a single record: 20MB\*\* -*Limits on workspaces, sources, and destinations do not apply to customers of [Powered by Airbyte](https://airbyte.com/solutions/powered-by-airbyte). To learn more [contact us](https://airbyte.com/talk-to-sales)! +--- + +\* Limits on workspaces, sources, and destinations do not apply to customers of +[Powered by Airbyte](https://airbyte.com/solutions/powered-by-airbyte). To learn more +[contact us](https://airbyte.com/talk-to-sales)! + +\*\* The effective maximum size of the record may vary based per destination. Some destinations may +fail to sync if a record cannot be stored, but Destinations which support +[typing and deduping](/using-airbyte/core-concepts/typing-deduping) will adjust your record so that +the sync does not fail, given the database/file constraints. For example, the maximum size of a +record in MongoDB is 16MB - records larger than that will need to be modified. At the very least, +primary keys and cursors will be maintained. Any modifications to the record will be stored within +`airbyte_meta.changes` for your review within the destination. diff --git a/docs/connector-development/README.md b/docs/connector-development/README.md index 34795a73856c2..6b0dc8b5c377e 100644 --- a/docs/connector-development/README.md +++ b/docs/connector-development/README.md @@ -1,141 +1,108 @@ # Connector Development -Airbyte supports two types of connectors: Sources and Destinations. A connector takes the form of a Docker image which follows the [Airbyte specification](../understanding-airbyte/airbyte-protocol.md). +There are two types of connectors in Airbyte: Sources and Destinations. Connectors can be built in +any programming language, as long as they're built into docker images that implement the +[Airbyte specification](../understanding-airbyte/airbyte-protocol.md). -To build a new connector in Java or Python, we provide templates so you don't need to start everything from scratch. +Most database sources and destinations are written in Java. API sources and destinations are written +in Python using the [Low-code CDK](config-based/low-code-cdk-overview.md) or +[Python CDK](cdk-python/). -**Note: you are not required to maintain the connectors you create.** The goal is that the Airbyte core team and the community help maintain the connector. +If you need to build a connector for an API Source, start with Connector Builder. It'll be enough +for most use cases. If you need help with connector development, we offer premium support to our +open-source users, [talk to our team](https://airbyte.com/talk-to-sales-premium-support) to get +access to it. -Airbyte provides some Connector Development Kits (CDKs) to help you build connectors. +### Connector Builder -If you need help from our team for connector development, we offer premium support to our open-source users, [talk to our team](https://airbyte.com/talk-to-sales-premium-support) to get access to it. - -### Connector builder UI - -The [connector builder UI](connector-builder-ui/overview.md) is based on the low-code development framework below and allows to develop and use connectors without leaving the Airbyte UI (no local development environment required). +The [connector builder UI](connector-builder-ui/overview.md) is based on the low-code development +framework below and allows to develop and use connectors without leaving the Airbyte web UI. No +local developer environment required. ### Low-code Connector-Development Framework -You can use the [low-code framework](config-based/low-code-cdk-overview.md) to build source connectors for REST APIs by modifying boilerplate YAML files. +You can use the [low-code framework](config-based/low-code-cdk-overview.md) to build source +connectors for HTTP API sources. Low-code CDK is a declarative framework that provides a YAML schema +to describe your connector without writing any Python code, but allowing you to use custom Python +components if required. ### Python Connector-Development Kit \(CDK\) -You can build a connector very quickly in Python with the [Airbyte CDK](cdk-python/), which generates 75% of the code required for you. +You can build a connector in Python with the [Airbyte CDK](cdk-python/). Compared to the low-code +CDK, the Python CDK is more flexible, but building the connector will be more involved. It provides +classes that work out of the box for most scenarios, and Airbyte provides generators that make the +connector scaffolds for you. Here's a guide for +[building a connector in Python CDK](tutorials/custom-python-connector/0-getting-started.md). ### Community maintained CDKs The Airbyte community also maintains some CDKs: -- The [Typescript CDK](https://github.com/faros-ai/airbyte-connectors) is actively maintained by Faros.ai for use in their product. -- The [Airbyte Dotnet CDK](cdk-dotnet/) comes with C# templates which can be used to generate 75% of the code required for you - -## The Airbyte specification - -Before building a new connector, review [Airbyte's data protocol specification](../understanding-airbyte/airbyte-protocol.md). +- The [Typescript CDK](https://github.com/faros-ai/airbyte-connectors) is actively maintained by + Faros.ai for use in their product. +- The [Airbyte Dotnet CDK](https://github.com/mrhamburg/airbyte.cdk.dotnet) in C#. +:::note +Before building a new connector, review +[Airbyte's data protocol specification](../understanding-airbyte/airbyte-protocol.md). +::: ## Adding a new connector -### Requirements - -To add a new connector you need to: - -1. Implement & Package your connector in an Airbyte Protocol compliant Docker image -2. Add integration tests for your connector. At a minimum, all connectors must pass [Airbyte's standard test suite](testing-connectors/), but you can also add your own tests. -3. Document how to build & test your connector -4. Publish the Docker image containing the connector - -Each requirement has a subsection below. - -### 1. Implement & package the connector +The easiest way to make and start using a connector in your workspace is by using the +[Connector Builder](connector-builder-ui/overview.md). -If you are building a connector in any of the following languages/frameworks, then you're in luck! We provide autogenerated templates to get you started quickly: +If you're writing your connector in Python or low-code CDK, use the generator to get the project +started: -#### Sources - -- **Python Source Connector** -- [**Singer**](https://singer.io)**-based Python Source Connector**. [Singer.io](https://singer.io/) is an open source framework with a large community and many available connectors \(known as taps & targets\). To build an Airbyte connector from a Singer tap, wrap the tap in a thin Python package to make it Airbyte Protocol-compatible. See the [Github Connector](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-github) for an example of an Airbyte Connector implemented on top of a Singer tap. -- **Generic Connector**: This template provides a basic starting point for any language. - -#### Destinations - -- **Java Destination Connector** -- **Python Destination Connector** - -#### Creating a connector from a template - -Run the interactive generator: - -```text +```bash cd airbyte-integrations/connector-templates/generator ./generate.sh ``` -and choose the relevant template by using the arrow keys. This will generate a new connector in the `airbyte-integrations/connectors/` directory. +and choose the relevant template by using the arrow keys. This will generate a new connector in the +`airbyte-integrations/connectors/` directory. -Search the generated directory for "TODO"s and follow them to implement your connector. For more detailed walkthroughs and instructions, follow the relevant tutorial: +Search the generated directory for "TODO"s and follow them to implement your connector. For more +detailed walkthroughs and instructions, follow the relevant tutorial: -- [Speedrun: Building a HTTP source with the CDK](tutorials/cdk-speedrun.md) -- [Building a HTTP source with the CDK](tutorials/cdk-tutorial-python-http/getting-started.md) -- [Building a Python source](tutorials/building-a-python-source.md) +- [Building a HTTP source with the CDK](tutorials/custom-python-connector/0-getting-started.md) - [Building a Java destination](tutorials/building-a-java-destination.md) -As you implement your connector, make sure to review the [Best Practices for Connector Development](best-practices.md) guide. Following best practices is not a requirement for merging your contribution to Airbyte, but it certainly doesn't hurt ;\) - -### 2. Integration tests - -At a minimum, your connector must implement the acceptance tests described in [Testing Connectors](testing-connectors/) - -**Note: Acceptance tests are not yet available for Python destination connectors. Coming** [**soon**](https://github.com/airbytehq/airbyte/issues/4698)**!** - -### 3. Document building & testing your connector - -If you're writing in Python or Java, skip this section -- it is provided automatically. - -If you're writing in another language, please document the commands needed to: - -1. Build your connector docker image \(usually this is just `docker build .` but let us know if there are necessary flags, gotchas, etc..\) -2. Run any unit or integration tests _in a Docker image_. - -Your integration and unit tests must be runnable entirely within a Docker image. This is important to guarantee consistent build environments. - -When you submit a PR to Airbyte with your connector, the reviewer will use the commands you provide to integrate your connector into Airbyte's build system as follows: - -1. `:airbyte-integrations:connectors:source-:build` should run unit tests and build the integration's Docker image -2. `:airbyte-integrations:connectors:source-:integrationTest` should run integration tests including Airbyte's Standard test suite. - -### 4. Publish the connector - -Typically this will be handled as part of code review by an Airbyter. There is a section below on what steps are needed for publishing a connector and will mostly be used by Airbyte employees publishing the connector. +As you implement your connector, make sure to review the +[Best Practices for Connector Development](best-practices.md) guide. ## Updating an existing connector -The steps for updating an existing connector are the same as for building a new connector minus the need to use the autogenerator to create a new connector. Therefore the steps are: +The steps for updating an existing connector are the same as for building a new connector minus the +need to use the autogenerator to create a new connector. Therefore the steps are: 1. Iterate on the connector to make the needed changes 2. Run tests 3. Add any needed docs updates -4. Create a PR to get the connector published +4. Create a PR and get it reviewed and merged. -## Adding Typing and Deduplication to a connector - -_Coming soon._ - -Typing and Deduplication is how Airbyte transforms the raw data which is transmitted during a sync into easy-to-use final tables for database and data warehouse destinations. For more information on how typing and deduplication works, see [this doc](/using-airbyte/core-concepts/typing-deduping). +The new version of the connector will automatically be published in Cloud and OSS registries when +the PR is merged. ## Publishing a connector -Once you've finished iterating on the changes to a connector as specified in its `README.md`, follow these instructions to ship the new version of the connector with Airbyte out of the box. +Once you've finished iterating on the changes to a connector as specified in its `README.md`, follow +these instructions to ship the new version of the connector with Airbyte out of the box. -1. Bump the version in the `Dockerfile` of the connector \(`LABEL io.airbyte.version=X.X.X`\). -2. Bump the docker image version in the [metadata.yaml](connector-metadata-file.md) of the connector. -3. Submit a PR containing the changes you made. -4. One of Airbyte maintainers will review the change in the new version and make sure the tests are passing. -5. You our an Airbyte maintainer can merge the PR once it is approved and all the required CI checks are passing you. -6. Once the PR is merged the new connector version will be published to DockerHub and the connector should now be available for everyone who uses it. Thank you! +1. Bump the docker image version in the [metadata.yaml](connector-metadata-file.md) of the + connector. +2. Submit a PR containing the changes you made. +3. One of Airbyte maintainers will review the change in the new version and make sure the tests are + passing. +4. You our an Airbyte maintainer can merge the PR once it is approved and all the required CI checks + are passing you. +5. Once the PR is merged the new connector version will be published to DockerHub and the connector + should now be available for everyone who uses it. Thank you! ### Updating Connector Metadata -When a new (or updated version) of a connector is ready, our automations will check your branch for a few things: +When a new (or updated version) of a connector is ready, our automations will check your branch for +a few things: - Does the connector have an icon? - Does the connector have documentation and is it in the proper format? @@ -144,26 +111,44 @@ When a new (or updated version) of a connector is ready, our automations will ch If any of the above are failing, you won't be able to merge your PR or publish your connector. -Connector icons should be square SVGs and be located in [this directory](https://github.com/airbytehq/airbyte/tree/master/airbyte-config-oss/init-oss/src/main/resources/icons). +Connector icons should be square SVGs and be located in +[this directory](https://github.com/airbytehq/airbyte/tree/master/airbyte-config-oss/init-oss/src/main/resources/icons). -Connector documentation and changelogs are markdown files living either [here for sources](https://github.com/airbytehq/airbyte/tree/master/docs/integrations/sources), or [here for destinations](https://github.com/airbytehq/airbyte/tree/master/docs/integrations/destinations). +Connector documentation and changelogs are markdown files living either +[here for sources](https://github.com/airbytehq/airbyte/tree/master/docs/integrations/sources), or +[here for destinations](https://github.com/airbytehq/airbyte/tree/master/docs/integrations/destinations). ## Using credentials in CI -In order to run integration tests in CI, you'll often need to inject credentials into CI. There are a few steps for doing this: - -1. **Place the credentials into Google Secret Manager(GSM)**: Airbyte uses a project 'Google Secret Manager' service as the source of truth for all CI secrets. Place the credentials **exactly as they should be used by the connector** into a GSM secret [here](https://console.cloud.google.com/security/secret-manager?referrer=search&orgonly=true&project=dataline-integration-testing&supportedpurview=organizationId) i.e.: it should basically be a copy paste of the `config.json` passed into a connector via the `--config` flag. We use the following naming pattern: `SECRET__CREDS` e.g: `SECRET_SOURCE-S3_CREDS` or `SECRET_DESTINATION-SNOWFLAKE_CREDS`. +In order to run integration tests in CI, you'll often need to inject credentials into CI. There are +a few steps for doing this: + +1. **Place the credentials into Google Secret Manager(GSM)**: Airbyte uses a project 'Google Secret + Manager' service as the source of truth for all CI secrets. Place the credentials **exactly as + they should be used by the connector** into a GSM secret + [here](https://console.cloud.google.com/security/secret-manager?referrer=search&orgonly=true&project=dataline-integration-testing&supportedpurview=organizationId) + i.e.: it should basically be a copy paste of the `config.json` passed into a connector via the + `--config` flag. We use the following naming pattern: + `SECRET__CREDS` e.g: `SECRET_SOURCE-S3_CREDS` or + `SECRET_DESTINATION-SNOWFLAKE_CREDS`. 2. **Add the GSM secret's labels**: - - `connector` (required) -- unique connector's name or set of connectors' names with '\_' as delimiter i.e.: `connector=source-s3`, `connector=destination-snowflake` - - `filename` (optional) -- custom target secret file. Unfortunately Google doesn't use '.' into labels' values and so Airbyte CI scripts will add '.json' to the end automatically. By default secrets will be saved to `./secrets/config.json` i.e: `filename=config_auth` => `secrets/config_auth.json` -3. **Save a necessary JSON value** [Example](https://user-images.githubusercontent.com/11213273/146040653-4a76c371-a00e-41fe-8300-cbd411f10b2e.png). + - `connector` (required) -- unique connector's name or set of connectors' names with '\_' as + delimiter i.e.: `connector=source-s3`, `connector=destination-snowflake` + - `filename` (optional) -- custom target secret file. Unfortunately Google doesn't use '.' into + labels' values and so Airbyte CI scripts will add '.json' to the end automatically. By default + secrets will be saved to `./secrets/config.json` i.e: `filename=config_auth` => + `secrets/config_auth.json` +3. **Save a necessary JSON value** + [Example](https://user-images.githubusercontent.com/11213273/146040653-4a76c371-a00e-41fe-8300-cbd411f10b2e.png). 4. That should be it. #### Access CI secrets on GSM -Access to GSM storage is limited to Airbyte employees. To give an employee permissions to the project: +Access to GSM storage is limited to Airbyte employees. To give an employee permissions to the +project: -1. Go to the permissions' [page](https://console.cloud.google.com/iam-admin/iam?project=dataline-integration-testing) +1. Go to the permissions' + [page](https://console.cloud.google.com/iam-admin/iam?project=dataline-integration-testing) 2. Add a new principal to `dataline-integration-testing`: - input their login email diff --git a/docs/connector-development/cdk-dotnet/README.md b/docs/connector-development/cdk-dotnet/README.md deleted file mode 100644 index 901a6dc4df9ac..0000000000000 --- a/docs/connector-development/cdk-dotnet/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Connector Development Kit \(C# .NET\) - -The [Airbyte Dotnet CDK](https://github.com/mrhamburg/airbyte.cdk.dotnet) is a framework for rapidly developing production-grade Airbyte connectors. The CDK currently offers helpers specific for creating Airbyte source connectors for: - -* HTTP APIs \(REST APIs, GraphQL, etc..\) -* Generic Dotnet sources \(anything not covered by the above\) - -The CDK provides an improved developer experience by providing basic implementation structure and abstracting away low-level glue boilerplate. - -## Resources - -[This document](https://github.com/mrhamburg/airbyte.cdk.dotnet/blob/main/README.md) is the main guide for developing an Airbyte source with the Dotnet CDK. - diff --git a/docs/connector-development/cdk-python/README.md b/docs/connector-development/cdk-python/README.md index 76872e0186c6b..3830da20a135e 100644 --- a/docs/connector-development/cdk-python/README.md +++ b/docs/connector-development/cdk-python/README.md @@ -50,7 +50,7 @@ cd airbyte-integrations/connector-templates/generator Next, find all `TODO`s in the generated project directory. They're accompanied by comments explaining what you'll need to do in order to implement your connector. Upon completing all TODOs properly, you should have a functioning connector. -Additionally, you can follow [this tutorial](../tutorials/cdk-tutorial-python-http/getting-started.md) for a complete walkthrough of creating an HTTP connector using the Airbyte CDK. +Additionally, you can follow [this tutorial](../tutorials/custom-python-connector/0-getting-started.md) for a complete walkthrough of creating an HTTP connector using the Airbyte CDK. ### Concepts & Documentation @@ -72,7 +72,7 @@ Airbyte recommends using the CDK template generator to develop with the CDK. The For tips on useful Python knowledge, see the [Python Concepts](python-concepts.md) page. -You can find a complete tutorial for implementing an HTTP source connector in [this tutorial](../tutorials/cdk-tutorial-python-http/getting-started.md) +You can find a complete tutorial for implementing an HTTP source connector in [this tutorial](../tutorials/custom-python-connector/0-getting-started.md) ### Example Connectors diff --git a/docs/connector-development/config-based/low-code-cdk-overview.md b/docs/connector-development/config-based/low-code-cdk-overview.md index da246f0317751..c22efdc16cb20 100644 --- a/docs/connector-development/config-based/low-code-cdk-overview.md +++ b/docs/connector-development/config-based/low-code-cdk-overview.md @@ -67,7 +67,6 @@ If the answer to all questions is yes, you can use the low-code framework to bui - An API key for the source you want to build a connector for - Python >= 3.9 - Docker -- NodeJS ## Overview of the process diff --git a/docs/connector-development/config-based/tutorial/4-reading-data.md b/docs/connector-development/config-based/tutorial/4-reading-data.md index 677f5af4d9e8a..d1f69b71163e2 100644 --- a/docs/connector-development/config-based/tutorial/4-reading-data.md +++ b/docs/connector-development/config-based/tutorial/4-reading-data.md @@ -26,7 +26,7 @@ See the [catalog guide](https://docs.airbyte.com/understanding-airbyte/beginners Let's define the stream schema in `source-exchange-rates-tutorial/source_exchange_rates_tutorial/schemas/rates.json` -You can download the JSON file describing the output schema with all currencies [here](../../tutorials/cdk-tutorial-python-http/exchange_rates_schema.json) for convenience and place it in `schemas/`. +You can download the JSON file describing the output schema with all currencies [here](./exchange_rates_schema.json) for convenience and place it in `schemas/`. ```bash curl https://raw.githubusercontent.com/airbytehq/airbyte/master/docs/connector-development/tutorials/cdk-tutorial-python-http/exchange_rates_schema.json > source_exchange_rates_tutorial/schemas/rates.json diff --git a/docs/connector-development/config-based/tutorial/6-testing.md b/docs/connector-development/config-based/tutorial/6-testing.md index 284dfacc5b8c7..7effad89c30ad 100644 --- a/docs/connector-development/config-based/tutorial/6-testing.md +++ b/docs/connector-development/config-based/tutorial/6-testing.md @@ -35,7 +35,7 @@ airbyte-ci connectors --use-remote-secrets=false --name source-exchange-rates-tu ## Next steps: -Next, we'll add the connector to the [Airbyte platform](https://docs.airbyte.com/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte). +Next, we'll add the connector to the [Airbyte platform](https://docs.airbyte.com/operator-guides/using-custom-connectors). ## Read more: diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/exchange_rates_schema.json b/docs/connector-development/config-based/tutorial/exchange_rates_schema.json similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/exchange_rates_schema.json rename to docs/connector-development/config-based/tutorial/exchange_rates_schema.json diff --git a/docs/connector-development/config-based/understanding-the-yaml-file/pagination.md b/docs/connector-development/config-based/understanding-the-yaml-file/pagination.md index 021804c15b497..b47e57416b13c 100644 --- a/docs/connector-development/config-based/understanding-the-yaml-file/pagination.md +++ b/docs/connector-development/config-based/understanding-the-yaml-file/pagination.md @@ -220,7 +220,7 @@ paginator: <...> pagination_strategy: type: "CursorPagination" - cursor_value: "{{ headers['urls']['next'] }}" + cursor_value: "{{ headers['link']['next']['url'] }}" page_token_option: type: "RequestPath" ``` diff --git a/docs/connector-development/config-based/understanding-the-yaml-file/reference.md b/docs/connector-development/config-based/understanding-the-yaml-file/reference.md index 6400771d4109e..85c60bc9b8d22 100644 --- a/docs/connector-development/config-based/understanding-the-yaml-file/reference.md +++ b/docs/connector-development/config-based/understanding-the-yaml-file/reference.md @@ -46,5 +46,15 @@ export const toc = [ value: def.title, id: `/macros/${def.title}`, level: 3 + })), + { + "value": "Interpolation filters:", + "id": "filters", + "level": 2 + }, + ...schema.interpolation.filters.map((def) => ({ + value: def.title, + id: `/filters/${def.title}`, + level: 3 })) ]; \ No newline at end of file diff --git a/docs/connector-development/connector-metadata-file.md b/docs/connector-development/connector-metadata-file.md index 1b9fed5380b67..bdee0bd9fbafe 100644 --- a/docs/connector-development/connector-metadata-file.md +++ b/docs/connector-development/connector-metadata-file.md @@ -1,9 +1,9 @@ # Connector Metadata.yaml File -The `metadata.yaml` file is a new addition to Airbyte's connector folders. This file is created with the goal of simplifying and enhancing how we manage information related to each connector. It is designed to replace the previous `source_definitions.yaml` and `destinations_definitions.yaml` files. - The `metadata.yaml` file contains crucial information about the connector, including its type, definition ID, Docker image tag, Docker repository, and much more. It plays a key role in the way Airbyte handles connector data and improves the overall organization and accessibility of this data. +N.B. the `metadata.yaml` file replaces the previous `source_definitions.yaml` and `destinations_definitions.yaml` files. + ## Structure Below is an example of a `metadata.yaml` file for the Postgres source: @@ -88,9 +88,6 @@ In the example above, the connector has three tags. Tags are used for two primar These are just examples of how tags can be used. As a free-form field, the tags list can be customized as required for each connector. This flexibility allows tags to be a powerful tool for managing and discovering connectors. ## The `icon` Field -This denotes the name of the icon file for the connector. At this time the icon file is located in the `airbyte-platform` repository. So please ensure that the icon file is present in the `airbyte-platform` repository at [https://github.com/airbytehq/airbyte-platform/tree/main/airbyte-config/init/src/main/resources/icons](https://github.com/airbytehq/airbyte-platform/tree/main/airbyte-config/init/src/main/resources/icons) before adding the icon name to the `metadata.yaml` file. - -### Future Plans _⚠️ This property is in the process of being refactored to be a file in the connector folder_ You may notice a `icon.svg` file in the connectors folder. diff --git a/docs/connector-development/migration-to-base-image.md b/docs/connector-development/migration-to-base-image.md index 63299ddc06b0d..d6bc3bac2d8c0 100644 --- a/docs/connector-development/migration-to-base-image.md +++ b/docs/connector-development/migration-to-base-image.md @@ -1,6 +1,6 @@ -# Migration guide: How to make a connector use our base image +# Migration Guide: How to make a python connector use our base image -We currently enforce our certified connectors to use our [base image](https://hub.docker.com/r/airbyte/python-connector-base). +We currently enforce our certified python connectors to use our [base image](https://hub.docker.com/r/airbyte/python-connector-base). This guide will help connector developers to migrate their connector to use our base image. N.B: This guide currently only applies to python connectors. diff --git a/docs/connector-development/testing-connectors/standard-source-tests.md b/docs/connector-development/testing-connectors/standard-source-tests.md deleted file mode 100644 index 4f9d1b5307560..0000000000000 --- a/docs/connector-development/testing-connectors/standard-source-tests.md +++ /dev/null @@ -1,4 +0,0 @@ -# Standard Source Test Suite - -Test methods start with `test`. Other methods are internal helpers in the java class implementing the test suite. - diff --git a/docs/connector-development/tutorials/building-a-python-source.md b/docs/connector-development/tutorials/building-a-python-source.md deleted file mode 100644 index e83aeec9d0ac8..0000000000000 --- a/docs/connector-development/tutorials/building-a-python-source.md +++ /dev/null @@ -1,389 +0,0 @@ -# Building a Python Source - -## Summary - -This article provides a checklist for how to create a python source. Each step in the checklist has -a link to a more detailed explanation below. - -## Requirements - -Docker, Python, and Java with the versions listed in the -[tech stack section](../../understanding-airbyte/tech-stack.md). - -:::info - -All the commands below assume that `python` points to a version of python >3.7. On some systems, -`python` points to a Python2 installation and `python3` points to Python3. If this is the case on -your machine, substitute all `python` commands in this guide with `python3` . Otherwise, make sure -to install Python 3 before beginning. - -::: - -## Checklist - -### Creating a Source - -- Step 1: Create the source using template -- Step 2: Build the newly generated source -- Step 3: Set up your Airbyte development environment -- Step 4: Implement `spec` \(and define the specification for the source - `airbyte-integrations/connectors/source-/spec.yaml`\) -- Step 5: Implement `check` -- Step 6: Implement `discover` -- Step 7: Implement `read` -- Step 8: Set up Connector Acceptance Tests -- Step 9: Write unit tests or integration tests -- Step 10: Update the `README.md` \(If API credentials are required to run the integration, please - document how they can be obtained or link to a how-to guide.\) -- Step 11: Update the `metadata.yaml` file with accurate information about your connector. These - metadata will be used to add the connector to Airbyte's connector registry. -- Step 12: Add docs \(in `docs/integrations/sources/.md`\) - -:::info -Each step of the Creating a Source checklist is explained in more detail below. -::: - -:::info -All `./gradlew` commands must be run from the root of the airbyte project. -::: - -### Submitting a Source to Airbyte - -- If you need help with any step of the process, feel free to submit a PR with your progress and any - questions you have. -- Submit a PR. -- To run integration tests, Airbyte needs access to a test account/environment. Coordinate with an - Airbyte engineer \(via the PR\) to add test credentials so that we can run tests for the - integration in the CI. \(We will create our own test account once you let us know what source we - need to create it for.\) -- Once the config is stored in Github Secrets, edit `.github/workflows/test-command.yml` and - `.github/workflows/publish-command.yml` to inject the config into the build environment. -- Edit the `airbyte/tools/bin/ci_credentials.sh` script to pull the script from the build - environment and write it to `secrets/config.json` during the build. - -:::info - -If you have a question about a step the Submitting a Source to Airbyte checklist include it -in your PR or ask it on -[#help-connector-development channel on Slack](https://airbytehq.slack.com/archives/C027KKE4BCZ). - -::: - -## Explaining Each Step - -### Step 1: Create the source using template - -Airbyte provides a code generator which bootstraps the scaffolding for our connector. - -```bash -$ cd airbyte-integrations/connector-templates/generator # assumes you are starting from the root of the Airbyte project. -$ ./generate.sh -``` - -Select the `python` template and then input the name of your connector. For this walk through we -will refer to our source as `example-python` - -### Step 2: Install the newly generated source - -Install the source by running: - -```bash -cd airbyte-integrations/connectors/source- -poetry install -``` - -### Step 3: Set up your Airbyte development environment - -The generator creates a file `source_/source.py`. This will be where you implement the -logic for your source. The templated `source.py` contains extensive comments explaining each method -that needs to be implemented. Briefly here is an overview of each of these methods. - -1. `spec`: declares the user-provided credentials or configuration needed to run the connector -2. `check`: tests if with the user-provided configuration the connector can connect with the - underlying data source. -3. `discover`: declares the different streams of data that this connector can output -4. `read`: reads data from the underlying data source \(The stock ticker API\) - -#### Dependencies - -Python dependencies for your source should be declared in -`airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You -will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; -they give your source access to the helper interface that is provided by the generator. - -You may notice that there is a `requirements.txt` in your source's directory as well. Do not touch -this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be -declared in `setup.py`. - -#### Development Environment - -The commands we ran above created a virtual environment for your source. If you want your IDE to -auto complete and resolve dependencies properly, point it at the virtual env -`airbyte-integrations/connectors/source-/.venv`. Also anytime you change the -dependencies in the `setup.py` make sure to re-run the build command. The build system will handle -installing all dependencies in the `setup.py` into the virtual environment. - -Pretty much all it takes to create a source is to implement the `Source` interface. The template -fills in a lot of information for you and has extensive docstrings describing what you need to do to -implement each method. The next 4 steps are just implementing that interface. - -:::info - -All logging should be done through the `logger` object passed into each method. Otherwise, -logs will not be shown in the Airbyte UI. - -::: - -#### Iterating on your implementation - -Everyone develops differently but here are 3 ways that we recommend iterating on a source. Consider -using whichever one matches your style. - -**Run the source using python** - -You'll notice in your source's directory that there is a python file called `main.py`. This file -exists as convenience for development. You can call it from within the virtual environment mentioned -above `. ./.venv/bin/activate` to test out that your source works. - -```bash -# from airbyte-integrations/connectors/source- -poetry run source- spec -poetry run source- check --config secrets/config.json -poetry run source- discover --config secrets/config.json -poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -The nice thing about this approach is that you can iterate completely within in python. The downside -is that you are not quite running your source as it will actually be run by Airbyte. Specifically -you're not running it from within the docker container that will house it. - -**Build the source docker image** - -You have to build a docker image for your connector if you want to run your source exactly as it -will be run by Airbyte. - -**Option A: Building the docker image with `airbyte-ci`** - -This is the preferred method for building and testing connectors. - -If you want to open source your connector we encourage you to use our -[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) -tool to build your connector. It will not use a Dockerfile but will build the connector image from -our -[base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) -and use our internal build logic to build an image from your Python connector code. - -Running `airbyte-ci connectors --name source- build` will build your connector image. -Once the command is done, you will find your connector image in your local docker host: -`airbyte/source-:dev`. - -**Option B: Building the docker image with a Dockerfile** - -If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image -using your own Dockerfile. This method is not preferred, and is not supported for certified -connectors. - -Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look -something like this: - -```Dockerfile - -FROM airbyte/python-connector-base:1.1.0 - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` - -Please use this as an example. This is not optimized. - -Build your image: - -```bash -docker build . -t airbyte/source-example-python:dev -``` - -**Run the source docker image** - -```bash -docker run --rm airbyte/source-example-python:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-example-python:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-example-python:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/source-example-python:dev read --config /secrets/config.json --catalog /sample_files/configured_catalog.json -``` - -:::info - -Each time you make a change to your implementation you need to re-build the connector image. -This ensures the new python code is added into the docker container. - -::: - -The nice thing about this approach is that you are running your source exactly as it will be run by -Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the -connector between each change. - -**Detailed Debug Messages** - -During development of your connector, you can enable the printing of detailed debug information -during a sync by specifying the `--debug` flag. This will allow you to get a better picture of what -is happening during each step of your sync. - -```bash -poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json --debug -``` - -In addition to the preset CDK debug statements, you can also emit custom debug information from your -connector by introducing your own debug statements: - -```python -self.logger.debug( - "your debug message here", - extra={ - "debug_field": self.value, - "custom_field": your_object.field - } -) -``` - -**TDD using acceptance tests & integration tests** - -Airbyte provides an acceptance test suite that is run against every source. The objective of these -tests is to provide some "free" tests that can sanity check that the basic functionality of the -source works. One approach to developing your connector is to simply run the tests between each -change and use the feedback from them to guide your development. - -If you want to try out this approach, check out Step 8 which describes what you need to do to set up -the standard tests for your source. - -The nice thing about this approach is that you are running your source exactly as Airbyte will run -it in the CI. The downside is that the tests do not run very quickly. - -### Step 4: Implement `spec` - -Each source contains a specification that describes what inputs it needs in order for it to pull -data. This file can be found in `airbyte-integrations/connectors/source-/spec.yaml`. -This is a good place to start when developing your source. Using JsonSchema define what the inputs -are \(e.g. username and password\). Here's -[an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) -of what the `spec.yaml` looks like for the stripe source. - -For more details on what the spec is, you can read about the Airbyte Protocol -[here](../../understanding-airbyte/airbyte-protocol.md). - -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes -that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have -declared the necessary JsonSchema in `spec.yaml` you should be done with this step. - -### Step 5: Implement `check` - -As described in the template code, this method takes in a json object called config that has the -values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the -source requires a `username` and `password` the config object might be -`{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given -the credentials in the config, whether we were able to connect to the source. For example, with the -given credentials could the source connect to the database server. - -While developing, we recommend storing this object in `secrets/config.json`. The `secrets` directory -is gitignored by default. - -### Step 6: Implement `discover` - -As described in the template code, this method takes in the same config object as `check`. It then -returns a json object called a `catalog` that describes what data is available and metadata on what -options are available for how to replicate it. - -For a brief overview on the catalog check out -[Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). - -### Step 7: Implement `read` - -As described in the template code, this method takes in the same config object as the previous -methods. It also takes in a "configured catalog". This object wraps the catalog emitted by the -`discover` step and includes configuration on how the data should be replicated. For a brief -overview on the configured catalog check out -[Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). -It then returns a generator which returns each record in the stream. - -### Step 8: Set up Connector Acceptance Tests (CATs) - -The Connector Acceptance Tests are a set of tests that run against all sources. These tests are run -in the Airbyte CI to prevent regressions. They also can help you sanity check that your source works -as expected. The following [article](../testing-connectors/connector-acceptance-tests-reference.md) -explains Connector Acceptance Tests and how to run them. - -You can run the tests using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -`airbyte-ci connectors --name source- test --only-step=acceptance` - -:::info - -In some rare cases we make exceptions and allow a source to not need to pass all the -standard tests. If for some reason you think your source cannot reasonably pass one of the tests -cases, reach out to us on github or slack, and we can determine whether there's a change we can make -so that the test will pass or if we should skip that test for your source. - -::: - -### Step 9: Write unit tests and/or integration tests - -The connector acceptance tests are meant to cover the basic functionality of a source. Think of it -as the bare minimum required for us to add a source to Airbyte. In case you need to test additional -functionality of your source, write unit or integration tests. - -#### Unit Tests - -Add any relevant unit tests to the `tests/unit_tests` directory. Unit tests should _not_ depend on any secrets. - -You can run the tests using `poetry run pytest tests/unit_tests` - -#### Integration Tests - -Place any integration tests in the `integration_tests` directory such that they can be -[discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). - -You can run the tests using `poetry run pytest tests/integration_tests` - -### Step 10: Update the `README.md` - -The template fills in most of the information for the readme for you. Unless there is a special -case, the only piece of information you need to add is how one can get the credentials required to -run the source. e.g. Where one can find the relevant API key, etc. - -### Step 11: Add the connector to the API/UI - -There are multiple ways to use the connector you have built. - -If you are self hosting Airbyte (OSS) you are able to use the Custom Connector feature. This feature -allows you to run any Docker container that implements the Airbye protocol. You can read more about -it [here](https://docs.airbyte.com/integrations/custom-connectors/). - -If you are using Airbyte Cloud (or OSS), you can submit a PR to add your connector to the Airbyte -repository. Once the PR is merged, the connector will be available to all Airbyte Cloud users. You -can read more about it -[here](https://docs.airbyte.com/contributing-to-airbyte/submit-new-connector). - -Note that when submitting an Airbyte connector, you will need to ensure that - -1. The connector passes the CAT suite. See - [Set up Connector Acceptance Tests](<#step-8-set-up-connector-acceptance-tests-(cats)>). -2. The metadata.yaml file (created by our generator) is filed out and valid. See - [Connector Metadata File](https://docs.airbyte.com/connector-development/connector-metadata-file). -3. You have created appropriate documentation for the connector. See [Add docs](#step-12-add-docs). - -### Step 12: Add docs - -Each connector has its own documentation page. By convention, that page should have the following -path: in `docs/integrations/sources/.md`. For the documentation to get packaged with -the docs, make sure to add a link to it in `docs/SUMMARY.md`. You can pattern match doing that from -existing connectors. - -## Related tutorials - -For additional examples of how to use the Python CDK to build an Airbyte source connector, see the -following tutorials: - -- [Python CDK Speedrun: Creating a Source](https://docs.airbyte.com/connector-development/tutorials/cdk-speedrun) -- [Build a connector to extract data from the Webflow API](https://airbyte.com/tutorials/extract-data-from-the-webflow-api) diff --git a/docs/connector-development/tutorials/cdk-speedrun.md b/docs/connector-development/tutorials/cdk-speedrun.md index 35a9543d2e53f..6082912f0999f 100644 --- a/docs/connector-development/tutorials/cdk-speedrun.md +++ b/docs/connector-development/tutorials/cdk-speedrun.md @@ -3,7 +3,7 @@ ## CDK Speedrun \(HTTP API Source Creation Any Route\) This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version -of [this tutorial.](cdk-tutorial-python-http/getting-started.md) +of [this tutorial.](custom-python-connector/0-getting-started.md) If you are a visual learner and want to see a video version of this guide going over each part in detail, check it out below. @@ -14,8 +14,7 @@ detail, check it out below. 1. Python >= 3.9 2. [Poetry](https://python-poetry.org/) -2. Docker -3. NodeJS +3. Docker #### Generate the Template @@ -27,7 +26,7 @@ cd airbyte-integrations/connector-templates/generator ./generate.sh ``` -Select the `Python HTTP API Source` and name it `python-http-example`. +Select the `Python CDK Source` and name it `python-http-example`. #### Create Dev Environment diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json b/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json deleted file mode 100644 index 7aa9a7e9b2229..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json +++ /dev/null @@ -1,134 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "exchange_rates", - "json_schema": { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "access_key": { - "type": "string" - }, - "base": { - "type": "string" - }, - "rates": { - "type": "object", - "properties": { - "GBP": { - "type": "number" - }, - "HKD": { - "type": "number" - }, - "IDR": { - "type": "number" - }, - "PHP": { - "type": "number" - }, - "LVL": { - "type": "number" - }, - "INR": { - "type": "number" - }, - "CHF": { - "type": "number" - }, - "MXN": { - "type": "number" - }, - "SGD": { - "type": "number" - }, - "CZK": { - "type": "number" - }, - "THB": { - "type": "number" - }, - "BGN": { - "type": "number" - }, - "EUR": { - "type": "number" - }, - "MYR": { - "type": "number" - }, - "NOK": { - "type": "number" - }, - "CNY": { - "type": "number" - }, - "HRK": { - "type": "number" - }, - "PLN": { - "type": "number" - }, - "LTL": { - "type": "number" - }, - "TRY": { - "type": "number" - }, - "ZAR": { - "type": "number" - }, - "CAD": { - "type": "number" - }, - "BRL": { - "type": "number" - }, - "RON": { - "type": "number" - }, - "DKK": { - "type": "number" - }, - "NZD": { - "type": "number" - }, - "EEK": { - "type": "number" - }, - "JPY": { - "type": "number" - }, - "RUB": { - "type": "number" - }, - "KRW": { - "type": "number" - }, - "USD": { - "type": "number" - }, - "AUD": { - "type": "number" - }, - "HUF": { - "type": "number" - }, - "SEK": { - "type": "number" - } - } - }, - "date": { - "type": "string" - } - } - }, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md deleted file mode 100644 index 984082e7a60bf..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md +++ /dev/null @@ -1,83 +0,0 @@ -# Step 4: Connection Checking - -The second operation in the Airbyte Protocol that we'll implement is the `check` operation. - -This operation verifies that the input configuration supplied by the user can be used to connect to -the underlying data source. Note that this user-supplied configuration has the values described in -the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a -`username` and `password` the config object might be -`{ "username": "airbyte", "password": "password123" }`. You should then implement something that -returns a json object reporting, given the credentials in the config, whether we were able to -connect to the source. - -In order to make requests to the API, we need to specify the access. In our case, this is a fairly -trivial check since the API requires no credentials. Instead, let's verify that the user-input -`base` currency is a legitimate currency. In `source.py` we'll find the following autogenerated -source: - -```python -class SourcePythonHttpTutorial(AbstractSource): - - def check_connection(self, logger, config) -> Tuple[bool, any]: - """ - TODO: Implement a connection check to validate that the user-provided config can be used to connect to the underlying API - - See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 - for an example. - - :param config: the user-input config object conforming the connector's spec.yaml - :param logger: logger object - :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. - """ - return True, None - -... -``` - -Following the docstring instructions, we'll change the implementation to verify that the input -currency is a real currency: - -```python - def check_connection(self, logger, config) -> Tuple[bool, any]: - accepted_currencies = {"USD", "JPY", "BGN", "CZK", "DKK"} # assume these are the only allowed currencies - input_currency = config['base'] - if input_currency not in accepted_currencies: - return False, f"Input currency {input_currency} is invalid. Please input one of the following currencies: {accepted_currencies}" - else: - return True, None -``` - -:::info - -In a real implementation you should write code to connect to the API to validate connectivity -and not just validate inputs - for an example see `check_connection` in the -[OneSignal source connector implementation](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-onesignal/source_onesignal/source.py) - -::: - -Let's test out this implementation by creating two objects: a valid and an invalid config and -attempt to give them as input to the connector. For this section, you will need to take the API -access key generated earlier and add it to both configs. Because these configs contain secrets, we -recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` -directory is gitignored by default. - -```bash -mkdir sample_files -echo '{"start_date": "2022-04-01", "base": "USD", "apikey": }' > secrets/config.json -echo '{"start_date": "2022-04-01", "base": "BTC", "apikey": }' > secrets/invalid_config.json -poetry run source-python-http-example check --config secrets/config.json -poetry run source-python-http-example check --config secrets/invalid_config.json -``` - -You should see output like the following: - -```bash -> poetry run source-python-http-example check --config secrets/config.json -{"type": "CONNECTION_STATUS", "connectionStatus": {"status": "SUCCEEDED"}} - -> poetry run source-python-http-example check --config secrets/invalid_config.json -{"type": "CONNECTION_STATUS", "connectionStatus": {"status": "FAILED", "message": "Input currency BTC is invalid. Please input one of the following currencies: {'DKK', 'USD', 'CZK', 'BGN', 'JPY'}"}} -``` - -While developing, we recommend storing configs which contain secrets in `secrets/config.json` -because the `secrets` directory is gitignored by default. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md deleted file mode 100644 index ed4ff875bc38f..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md +++ /dev/null @@ -1,22 +0,0 @@ -# Step 1: Creating the Source - -Airbyte provides a code generator which bootstraps the scaffolding for our connector. - -```bash -$ cd airbyte-integrations/connector-templates/generator # assumes you are starting from the root of the Airbyte project. -# Install NPM from https://www.npmjs.com/get-npm if you don't have it -$ ./generate.sh -``` - -This will bring up an interactive helper application. Use the arrow keys to pick a template from the -list. Select the `Python HTTP API Source` template and then input the name of your connector. The -application will create a new directory in airbyte/airbyte-integrations/connectors/ with the name of -your new connector. - -For this walk-through we will refer to our source as `python-http-example`. The finalized source -code for this tutorial can be found -[here](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-python-http-tutorial). - -The source we will build in this tutorial will pull data from the -[Rates API](https://exchangeratesapi.io/), a free and open API which documents historical exchange -rates for fiat currencies. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md deleted file mode 100644 index 54f15a72e5c39..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md +++ /dev/null @@ -1,97 +0,0 @@ -# Step 5: Declare the Schema - -The `discover` method of the Airbyte Protocol returns an `AirbyteCatalog`: an object which declares -all the streams output by a connector and their schemas. It also declares the sync modes supported -by the stream \(full refresh or incremental\). See the -[catalog tutorial](https://docs.airbyte.com/understanding-airbyte/beginners-guide-to-catalog) for -more information. - -This is a simple task with the Airbyte CDK. For each stream in our connector we'll need to: - -1. Create a python `class` in `source.py` which extends `HttpStream`. -2. Place a `.json` file in the `source_/schemas/` directory. The name of the file - should be the snake_case name of the stream whose schema it describes, and its contents should be - the JsonSchema describing the output from that stream. - -Let's create a class in `source.py` which extends `HttpStream`. You'll notice there are classes with -extensive comments describing what needs to be done to implement various connector features. Feel -free to read these classes as needed. But for the purposes of this tutorial, let's assume that we -are adding classes from scratch either by deleting those generated classes or editing them to match -the implementation below. - -We'll begin by creating a stream to represent the data that we're pulling from the Exchange Rates -API: - -```python -class ExchangeRates(HttpStream): - url_base = "https://api.apilayer.com/exchangerates_data/" - - # Set this as a noop. - primary_key = None - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # The API does not offer pagination, so we return None to indicate there are no more pages in the response - return None - - def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None - ) -> str: - return "" # TODO - - def parse_response( - self, - response: requests.Response, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Iterable[Mapping]: - return None # TODO -``` - -Note that this implementation is entirely empty -- we haven't actually done anything. We'll come -back to this in the next step. But for now we just want to declare the schema of this stream. We'll -declare this as a stream that the connector outputs by returning it from the `streams` method: - -```python -from airbyte_cdk.sources.streams.http.auth import NoAuth - -class SourcePythonHttpTutorial(AbstractSource): - - def check_connection(self, logger, config) -> Tuple[bool, any]: - ... - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - # NoAuth just means there is no authentication required for this API and is included for completeness. - # Skip passing an authenticator if no authentication is required. - # Other authenticators are available for API token-based auth and Oauth2. - auth = NoAuth() - return [ExchangeRates(authenticator=auth)] -``` - -Having created this stream in code, we'll put a file `exchange_rates.json` in the `schemas/` folder. -You can download the JSON file describing the output schema [here](./exchange_rates_schema.json) for -convenience and place it in `schemas/`. - -With `.json` schema file in place, let's see if the connector can now find this schema and produce a -valid catalog: - -```bash -poetry run source-python-http-example discover --config secrets/config.json # this is not a mistake, the schema file is found by naming snake_case naming convention as specified above -``` - -you should see some output like: - -```json -{"type": "CATALOG", "catalog": {"streams": [{"name": "exchange_rates", "json_schema": {"$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": {"base": {"type": "string"}, "rates": {"type": "object", "properties": {"GBP": {"type": "number"}, "HKD": {"type": "number"}, "IDR": {"type": "number"}, "PHP": {"type": "number"}, "LVL": {"type": "number"}, "INR": {"type": "number"}, "CHF": {"type": "number"}, "MXN": {"type": "number"}, "SGD": {"type": "number"}, "CZK": {"type": "number"}, "THB": {"type": "number"}, "BGN": {"type": "number"}, "EUR": {"type": "number"}, "MYR": {"type": "number"}, "NOK": {"type": "number"}, "CNY": {"type": "number"}, "HRK": {"type": "number"}, "PLN": {"type": "number"}, "LTL": {"type": "number"}, "TRY": {"type": "number"}, "ZAR": {"type": "number"}, "CAD": {"type": "number"}, "BRL": {"type": "number"}, "RON": {"type": "number"}, "DKK": {"type": "number"}, "NZD": {"type": "number"}, "EEK": {"type": "number"}, "JPY": {"type": "number"}, "RUB": {"type": "number"}, "KRW": {"type": "number"}, "USD": {"type": "number"}, "AUD": {"type": "number"}, "HUF": {"type": "number"}, "SEK": {"type": "number"}}}, "date": {"type": "string"}}}, "supported_sync_modes": ["full_refresh"]}]}} -``` - -It's that simple! Now the connector knows how to declare your connector's stream's schema. We -declare only one stream since our source is simple, but the principle is exactly the same if you had -many streams. - -You can also dynamically define schemas, but that's beyond the scope of this tutorial. See the -[schema docs](../../cdk-python/full-refresh-stream.md#defining-the-streams-schema) for more -information. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md deleted file mode 100644 index 956a452194307..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md +++ /dev/null @@ -1,59 +0,0 @@ -# Step 3: Define Inputs - -Each connector declares the inputs it needs to read data from the underlying data source. This is -the Airbyte Protocol's `spec` operation. - -The simplest way to implement this is by creating a `spec.yaml` file in `source_/spec.yaml` -which describes your connector's inputs according to the -[ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/docs/understanding-airbyte/airbyte-protocol.md#spec) -schema. This is a good place to start when developing your source. Using JsonSchema, define what the -inputs are \(e.g. username and password\). Here's -[an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) -of what the `spec.yaml` looks like for the Stripe API source. - -For more details on what the spec is, you can read about the Airbyte Protocol -[here](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol). - -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes -that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have -declared the necessary JsonSchema in `spec.yaml` you should be done with this step. - -Given that we'll pulling currency data for our example source, we'll define the following -`spec.yaml`: - -```yaml -documentationUrl: https://docs.airbyte.com/integrations/sources/exchangeratesapi -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Python Http Tutorial Spec - type: object - required: - - apikey - - start_date - - base - properties: - apikey: - type: string - description: API access key used to retrieve data from the Exchange Rates API. - airbyte_secret: true - start_date: - type: string - description: Start getting data from that date. - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ - examples: - - "%Y-%m-%d" - base: - type: string - examples: - - USD - - EUR - description: - 'ISO reference currency. See here.' -``` - -In addition to metadata, we define three inputs: - -- `apikey`: The API access key used to authenticate requests to the API -- `start_date`: The beginning date to start tracking currency exchange rates from -- `base`: The currency whose rates we're interested in tracking diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md deleted file mode 100644 index f97c65bd63522..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md +++ /dev/null @@ -1,38 +0,0 @@ -# Getting Started - -## Summary - -This is a step-by-step guide for how to create an Airbyte source in Python to read data from an HTTP -API. We'll be using the Exchange Rates API as an example since it is simple and demonstrates a lot -of the capabilities of the CDK. - -## Requirements -* Python >= 3.9 -* [Poetry](https://python-poetry.org/) -* Docker - -All the commands below assume that `python` points to a version of python >=3.9.0. On some -systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the -case on your machine, substitute all `python` commands in this guide with `python3`. - -## Exchange Rates API Setup - -For this guide we will be making API calls to the Exchange Rates API. In order to generate the API -access key that will be used by the new connector, you will have to follow steps on the -[Exchange Rates Data API](https://apilayer.com/marketplace/exchangerates_data-api/) by signing up -for the Free tier plan. Once you have an API access key, you can continue with the guide. - -## Checklist - -- Step 1: Create the source using the template -- Step 2: Install dependencies for the new source -- Step 3: Define the inputs needed by your connector -- Step 4: Implement connection checking -- Step 5: Declare the schema of your streams -- Step 6: Implement functionality for reading your streams -- Step 7: Use the connector in Airbyte -- Step 8: Write unit tests or integration tests - -Each step of the Creating a Source checklist is explained in more detail in the following steps. We -also mention how you can submit the connector to be included with the general Airbyte release at the -end of the tutorial. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md deleted file mode 100644 index 04a835a3c783a..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md +++ /dev/null @@ -1,95 +0,0 @@ -# Step 2: Install Dependencies - -Now that you've generated the module, let's navigate to its directory and install dependencies: - -```bash -cd ../../connectors/source- -poetry install -``` - -Let's verify everything is working as intended. Run: - -```bash -poetry run source- spec -``` - -You should see some output: - -```json -{"type": "SPEC", "spec": {"documentationUrl": "https://docsurl.com", "connectionSpecification": {"$schema": "http://json-schema.org/draft-07/schema#", "title": "Python Http Tutorial Spec", "type": "object", "required": ["TODO"], "properties": {"TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.": {"type": "string", "description": "describe me"}}}}} -``` - -We just ran Airbyte Protocol's `spec` command! We'll talk more about this later, but this is a -simple sanity check to make sure everything is wired up correctly. - -## Notes on iteration cycle - -### Dependencies - -Python dependencies for your source should be declared in -`airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You -will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; -they give your source access to the helper interfaces provided by the generator. - -You may notice that there is a `requirements.txt` in your source's directory as well. Don't edit -this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be -declared in `setup.py`. - -### Development Environment - -The commands we ran above created a -[Python virtual environment](https://docs.python.org/3/tutorial/venv.html) for your source. If you -want your IDE to auto complete and resolve dependencies properly, point it at the virtual env -`airbyte-integrations/connectors/source-/.venv`. Also anytime you change the -dependencies in the `setup.py` make sure to re-run `pip install -r requirements.txt`. - -### Iterating on your implementation - -There are two ways we recommend iterating on a source. Consider using whichever one matches your -style. - -**Run the source using python** - -You'll notice in your source's directory that there is a python file called `main.py`. This file -exists as convenience for development. You run it to test that your source works: - -```bash -# from airbyte-integrations/connectors/source- -poetry run source- spec -poetry run source- check --config secrets/config.json -poetry run source- discover --config secrets/config.json -poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -The nice thing about this approach is that you can iterate completely within python. The downside is -that you are not quite running your source as it will actually be run by Airbyte. Specifically, -you're not running it from within the docker container that will house it. - -**Run the source using docker** - -If you want to run your source exactly as it will be run by Airbyte \(i.e. within a docker -container\), you can use the following commands from the connector module directory -\(`airbyte-integrations/connectors/source-python-http-example`\): - -```bash -# First build the container -docker build . -t airbyte/source-:dev - -# Then use the following commands to run it -docker run --rm airbyte/source-:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/source-:dev read --config /secrets/config.json --catalog /sample_files/configured_catalog.json -``` - -:::info - -Each time you make a change to your implementation you need to re-build the connector image -via `docker build . -t airbyte/source-:dev`. This ensures the new python code is added into -the docker container. - -::: - -The nice thing about this approach is that you are running your source exactly as it will be run by -Airbyte. The tradeoff is iteration is slightly slower, as the connector is re-built between each -change. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md deleted file mode 100644 index a2bcfee77562f..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md +++ /dev/null @@ -1,300 +0,0 @@ -# Step 6: Read Data - -Describing schemas is good and all, but at some point we have to start reading data! So let's get to -work. But before, let's describe what we're about to do: - -The `HttpStream` superclass, like described in the -[concepts documentation](../../cdk-python/http-streams.md), is facilitating reading data from HTTP -endpoints. It contains built-in functions or helpers for: - -- authentication -- pagination -- handling rate limiting or transient errors -- and other useful functionality - -In order for it to be able to do this, we have to provide it with a few inputs: - -- the URL base and path of the endpoint we'd like to hit -- how to parse the response from the API -- how to perform pagination - -Optionally, we can provide additional inputs to customize requests: - -- request parameters and headers -- how to recognize rate limit errors, and how long to wait \(by default it retries 429 and 5XX - errors using exponential backoff\) -- HTTP method and request body if applicable -- configure exponential backoff policy - -Backoff policy options: - -- `retry_factor` Specifies factor for exponential backoff policy \(by default is 5\) -- `max_retries` Specifies maximum amount of retries for backoff policy \(by default is 5\) -- `raise_on_http_errors` If set to False, allows opting-out of raising HTTP code exception \(by - default is True\) - -There are many other customizable options - you can find them in the -[`airbyte_cdk.sources.streams.http.HttpStream`](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py) -class. - -So in order to read data from the exchange rates API, we'll fill out the necessary information for -the stream to do its work. First, we'll implement a basic read that just reads the last day's -exchange rates, then we'll implement incremental sync using stream slicing. - -Let's begin by pulling data for the last day's rates by using the `/latest` endpoint: - -```python -class ExchangeRates(HttpStream): - url_base = "https://api.apilayer.com/exchangerates_data/" - - primary_key = None - - def __init__(self, config: Mapping[str, Any], **kwargs): - super().__init__() - self.base = config['base'] - self.apikey = config['apikey'] - - - def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None - ) -> str: - # The "/latest" path gives us the latest currency exchange rates - return "latest" - - def request_headers( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> Mapping[str, Any]: - # The api requires that we include apikey as a header so we do that in this method - return {'apikey': self.apikey} - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - # The api requires that we include the base currency as a query param so we do that in this method - return {'base': self.base} - - def parse_response( - self, - response: requests.Response, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Iterable[Mapping]: - # The response is a simple JSON whose schema matches our stream's schema exactly, - # so we just return a list containing the response - return [response.json()] - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # The API does not offer pagination, - # so we return None to indicate there are no more pages in the response - return None -``` - -This may look big, but that's just because there are lots of \(unused, for now\) parameters in these -methods \(those can be hidden with Python's `**kwargs`, but don't worry about it for now\). Really -we just added a few lines of "significant" code: - -1. Added a constructor `__init__` which stores the `base` currency to query for and the `apikey` - used for authentication. -2. `return {'base': self.base}` to add the `?base=` query parameter to the request based - on the `base` input by the user. -3. `return {'apikey': self.apikey}` to add the header `apikey=` to the request based - on the `apikey` input by the user. -4. `return [response.json()]` to parse the response from the API to match the schema of our schema - `.json` file. -5. `return "latest"` to indicate that we want to hit the `/latest` endpoint of the API to get the - latest exchange rate data. - -Let's also pass the config specified by the user to the stream class: - -```python - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = NoAuth() - return [ExchangeRates(authenticator=auth, config=config)] -``` - -We're now ready to query the API! - -To do this, we'll need a -[ConfiguredCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md). We've prepared -one -[here](https://github.com/airbytehq/airbyte/blob/master/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json) --- download this and place it in `sample_files/configured_catalog.json`. Then run: - -```bash - poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -you should see some output lines, one of which is a record from the API: - -```text -"type": "RECORD", "record": {"stream": "exchange_rates", "data": {"success": true, "timestamp": 1651129443, "base": "EUR", "date": "2022-04-28", "rates": {"AED": 3.86736, "AFN": 92.13195, "ALL": 120.627843, "AMD": 489.819318, "ANG": 1.910347, "AOA": 430.073735, "ARS": 121.119674, "AUD": 1.478877, "AWG": 1.895762, "AZN": 1.794932, "BAM": 1.953851, "BBD": 2.140212, "BDT": 91.662775, "BGN": 1.957013, "BHD": 0.396929, "BIF": 2176.669098, "BMD": 1.052909, "BND": 1.461004, "BOB": 7.298009, "BRL": 5.227798, "BSD": 1.060027, "BTC": 2.6717761e-05, "BTN": 81.165435, "BWP": 12.802036, "BYN": 3.565356, "BYR": 20637.011334, "BZD": 2.136616, "CAD": 1.349329, "CDF": 2118.452361, "CHF": 1.021627, "CLF": 0.032318, "CLP": 891.760584, "CNY": 6.953724, "COP": 4171.971894, "CRC": 701.446322, "CUC": 1.052909, "CUP": 27.902082, "CVE": 110.15345, "CZK": 24.499027, "DJF": 188.707108, "DKK": 7.441548, "DOP": 58.321493, "DZD": 152.371647, "EGP": 19.458297, "ERN": 15.793633, "ETB": 54.43729, "EUR": 1, "FJD": 2.274651, "FKP": 0.80931, "GBP": 0.839568, "GEL": 3.20611, "GGP": 0.80931, "GHS": 7.976422, "GIP": 0.80931, "GMD": 56.64554, "GNF": 9416.400803, "GTQ": 8.118402, "GYD": 221.765423, "HKD": 8.261854, "HNL": 26.0169, "HRK": 7.563467, "HTG": 115.545574, "HUF": 377.172734, "IDR": 15238.748216, "ILS": 3.489582, "IMP": 0.80931, "INR": 80.654494, "IQD": 1547.023976, "IRR": 44538.040218, "ISK": 137.457233, "JEP": 0.80931, "JMD": 163.910125, "JOD": 0.746498, "JPY": 137.331903, "KES": 121.87429, "KGS": 88.581418, "KHR": 4286.72178, "KMF": 486.443591, "KPW": 947.617993, "KRW": 1339.837191, "KWD": 0.322886, "KYD": 0.883397, "KZT": 473.770223, "LAK": 12761.755235, "LBP": 1602.661797, "LKR": 376.293562, "LRD": 159.989586, "LSL": 15.604181, "LTL": 3.108965, "LVL": 0.636894, "LYD": 5.031557, "MAD": 10.541225, "MDL": 19.593772, "MGA": 4284.002369, "MKD": 61.553251, "MMK": 1962.574442, "MNT": 3153.317641, "MOP": 8.567461, "MRO": 375.88824, "MUR": 45.165684, "MVR": 16.199478, "MWK": 865.62318, "MXN": 21.530268, "MYR": 4.594366, "MZN": 67.206888, "NAD": 15.604214, "NGN": 437.399752, "NIO": 37.965356, "NOK": 9.824365, "NPR": 129.86672, "NZD": 1.616441, "OMR": 0.405421, "PAB": 1.060027, "PEN": 4.054233, "PGK": 3.73593, "PHP": 55.075028, "PKR": 196.760944, "PLN": 4.698101, "PYG": 7246.992296, "QAR": 3.833603, "RON": 4.948144, "RSD": 117.620172, "RUB": 77.806269, "RWF": 1086.709833, "SAR": 3.949063, "SBD": 8.474149, "SCR": 14.304711, "SDG": 470.649944, "SEK": 10.367719, "SGD": 1.459695, "SHP": 1.45028, "SLL": 13082.391386, "SOS": 609.634325, "SRD": 21.904702, "STD": 21793.085136, "SVC": 9.275519, "SYP": 2645.380032, "SZL": 16.827859, "THB": 36.297991, "TJS": 13.196811, "TMT": 3.685181, "TND": 3.22348, "TOP": 2.428117, "TRY": 15.575532, "TTD": 7.202107, "TWD": 31.082183, "TZS": 2446.960099, "UAH": 32.065033, "UGX": 3773.578577, "USD": 1.052909, "UYU": 43.156886, "UZS": 11895.19696, "VEF": 225143710305.04727, "VND": 24171.62598, "VUV": 118.538204, "WST": 2.722234, "XAF": 655.287181, "XAG": 0.045404, "XAU": 0.000559, "XCD": 2.845538, "XDR": 0.783307, "XOF": 655.293398, "XPF": 118.347299, "YER": 263.490114, "ZAR": 16.77336, "ZMK": 9477.445964, "ZMW": 18.046154, "ZWL": 339.036185}}, "emitted_at": 1651130169364}} -``` - -There we have it - a stream which reads data in just a few lines of code! - -We theoretically _could_ stop here and call it a connector. But let's give adding incremental sync a -shot. - -## Adding incremental sync - -To add incremental sync, we'll do a few things: - -1. Pass the `start_date` param input by the user into the stream. -2. Declare the stream's `cursor_field`. -3. Declare the stream's property `_cursor_value` to hold the state value -4. Add `IncrementalMixin` to the list of the ancestors of the stream and implement setter and getter - of the `state`. -5. Implement the `stream_slices` method. -6. Update the `path` method to specify the date to pull exchange rates for. -7. Update the configured catalog to use `incremental` sync when we're testing the stream. - -We'll describe what each of these methods do below. Before we begin, it may help to familiarize -yourself with how incremental sync works in Airbyte by reading the -[docs on incremental](/using-airbyte/core-concepts/sync-modes/incremental-append.md). - -To keep things concise, we'll only show functions as we edit them one by one. - -Let's get the easy parts out of the way and pass the `start_date`: - -```python -def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = NoAuth() - # Parse the date from a string into a datetime object - start_date = datetime.strptime(config['start_date'], '%Y-%m-%d') - return [ExchangeRates(authenticator=auth, config=config, start_date=start_date)] -``` - -Let's also add this parameter to the constructor and declare the `cursor_field`: - -```python -from datetime import datetime, timedelta -from airbyte_cdk.sources.streams import IncrementalMixin - - -class ExchangeRates(HttpStream, IncrementalMixin): - url_base = "https://api.apilayer.com/exchangerates_data/" - cursor_field = "date" - primary_key = "date" - - def __init__(self, config: Mapping[str, Any], start_date: datetime, **kwargs): - super().__init__() - self.base = config['base'] - self.apikey = config['apikey'] - self.start_date = start_date - self._cursor_value = None -``` - -Declaring the `cursor_field` informs the framework that this stream now supports incremental sync. -The next time you run `python main_dev.py discover --config secrets/config.json` you'll find that -the `supported_sync_modes` field now also contains `incremental`. - -But we're not quite done with supporting incremental, we have to actually emit state! We'll -structure our state object very simply: it will be a `dict` whose single key is `'date'` and value -is the date of the last day we synced data from. For example, `{'date': '2021-04-26'}` indicates the -connector previously read data up until April 26th and therefore shouldn't re-read anything before -April 26th. - -Let's do this by implementing the getter and setter for the `state` inside the `ExchangeRates` -class. - -```python - @property - def state(self) -> Mapping[str, Any]: - if self._cursor_value: - return {self.cursor_field: self._cursor_value.strftime('%Y-%m-%d')} - else: - return {self.cursor_field: self.start_date.strftime('%Y-%m-%d')} - - @state.setter - def state(self, value: Mapping[str, Any]): - self._cursor_value = datetime.strptime(value[self.cursor_field], '%Y-%m-%d') -``` - -Update internal state `cursor_value` inside `read_records` method - -```python - def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: - for record in super().read_records(*args, **kwargs): - if self._cursor_value: - latest_record_date = datetime.strptime(record[self.cursor_field], '%Y-%m-%d') - self._cursor_value = max(self._cursor_value, latest_record_date) - yield record - -``` - -This implementation compares the date from the latest record with the date in the current state and -takes the maximum as the "new" state object. - -We'll implement the `stream_slices` method to return a list of the dates for which we should pull -data based on the stream state if it exists: - -```python - def _chunk_date_range(self, start_date: datetime) -> List[Mapping[str, Any]]: - """ - Returns a list of each day between the start date and now. - The return value is a list of dicts {'date': date_string}. - """ - dates = [] - while start_date < datetime.now(): - dates.append({self.cursor_field: start_date.strftime('%Y-%m-%d')}) - start_date += timedelta(days=1) - return dates - - def stream_slices(self, sync_mode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None) -> Iterable[Optional[Mapping[str, Any]]]: - start_date = datetime.strptime(stream_state[self.cursor_field], '%Y-%m-%d') if stream_state and self.cursor_field in stream_state else self.start_date - return self._chunk_date_range(start_date) -``` - -Each slice will cause an HTTP request to be made to the API. We can then use the information present -in the `stream_slice` parameter \(a single element from the list we constructed in `stream_slices` -above\) to set other configurations for the outgoing request like `path` or `request_params`. For -more info about stream slicing, see [the slicing docs](../../cdk-python/stream-slices.md). - -In order to pull data for a specific date, the Exchange Rates API requires that we pass the date as -the path component of the URL. Let's override the `path` method to achieve this: - -```python -def path(self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: - return stream_slice['date'] -``` - -With these changes, your implementation should look like the file -[here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py). - -The last thing we need to do is change the `sync_mode` field in the -`sample_files/configured_catalog.json` to `incremental`: - -```text -"sync_mode": "incremental", -``` - -We should now have a working implementation of incremental sync! - -Let's try it out: - -```bash -poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -You should see a bunch of `RECORD` messages and `STATE` messages. To verify that incremental sync is -working, pass the input state back to the connector and run it again: - -```bash -# Save the latest state to sample_files/state.json -poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json | grep STATE | tail -n 1 | jq .state.data > sample_files/state.json - -# Run a read operation with the latest state message -poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json --state sample_files/state.json -``` - -You should see that only the record from the last date is being synced! This is acceptable behavior, -since Airbyte requires at-least-once delivery of records, so repeating the last record twice is OK. - -With that, we've implemented incremental sync for our connector! diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md deleted file mode 100644 index c6fe41cc6265b..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md +++ /dev/null @@ -1,28 +0,0 @@ -# Step 8: Test the Connector - -## Unit Tests - -Add any relevant unit tests to the `tests/unit_tests` directory. Unit tests should **not** depend on any secrets. - -You can run the tests using `poetry run pytest tests/unit_tests`. - -## Integration Tests - -Place any integration tests in the `integration_tests` directory such that they can be -[discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). - -You can run the tests using `poetry run pytest tests/integration_tests`. - -More information on integration testing can be found on -[the Testing Connectors doc](https://docs.airbyte.com/connector-development/testing-connectors/#running-integration-tests). - -## Connector Acceptance Tests - -Connector Acceptance Tests (CATs) are a fixed set of tests Airbyte provides that every Airbyte -source connector must pass. While they're only required if you intend to submit your connector -to Airbyte, you might find them helpful in any case. See -[Testing your connectors](../../testing-connectors/) - -If you want to submit this connector to become a default connector within Airbyte, follow steps 8 -onwards from the -[Python source checklist](../building-a-python-source.md#step-8-set-up-standard-tests) diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md deleted file mode 100644 index 7772bcbebc1d5..0000000000000 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md +++ /dev/null @@ -1,56 +0,0 @@ -# Step 7: Use the Connector in Airbyte - -To use your connector in your own installation of Airbyte you have to build the docker image for -your connector. - -**Option A: Building the docker image with `airbyte-ci`** - -This is the preferred method for building and testing connectors. - -If you want to open source your connector we encourage you to use our -[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) -tool to build your connector. It will not use a Dockerfile but will build the connector image from -our -[base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) -and use our internal build logic to build an image from your Python connector code. - -Running `airbyte-ci connectors --name source- build` will build your connector image. -Once the command is done, you will find your connector image in your local docker host: -`airbyte/source-:dev`. - -**Option B: Building the docker image with a Dockerfile** - -If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image -using your own Dockerfile. This method is not preferred, and is not supported for certified -connectors. - -Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look -something like this: - -```Dockerfile - -FROM airbyte/python-connector-base:1.1.0 - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` - -Please use this as an example. This is not optimized. - -Build your image: - -```bash -docker build . -t airbyte/source-example-python:dev -``` - -Then, follow the instructions from the -[building a Python source tutorial](../building-a-python-source.md#step-11-add-the-connector-to-the-api-ui) -for using the connector in the Airbyte UI, replacing the name as appropriate. - -Note: your built docker image must be accessible to the `docker` daemon running on the Airbyte node. -If you're doing this tutorial locally, these instructions are sufficient. Otherwise you may need to -push your Docker image to Dockerhub. diff --git a/docs/connector-development/tutorials/custom-python-connector/0-getting-started.md b/docs/connector-development/tutorials/custom-python-connector/0-getting-started.md new file mode 100644 index 0000000000000..d47582fbdc773 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/0-getting-started.md @@ -0,0 +1,114 @@ +# Getting started + +This tutorial will walk you through the creation of a custom Airbyte connector implemented with the +Python CDK. It assumes you're already familiar with Airbyte concepts and you've already built a +connector using the [Connector Builder](../../connector-builder-ui/tutorial.mdx). + +The Python CDK should be used to implement connectors that require features that are not yet +available in the Connector Builder or in the low-code framework. You can use the +[Connector Builder compatibility guide](../../connector-builder-ui/connector-builder-compatibility.md) +to know whether it is suitable for your needs. + +We'll build an connector for the Survey Monkey API, focusing on the `surveys` and `survey responses` +endpoints. + +You can find the documentation for the API +[here](https://api.surveymonkey.com/v3/docs?shell#getting-started). + +As a first step, follow the getting started instructions from the docs to register a draft app to +your account. + +Next, we'll inspect the API docs to understand how the endpoints work. + +## Surveys endpoint + +The [surveys endpoint doc](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys) +shows that the endpoint URL is https://api.surveymonkey.com/v3/surveys and that the data is nested +in the response's "data" field. + +It also shows there are two ways to iterate through the record pages. We could either keep a page +counter and increment it on every request, or use the link sent as part of the response in "links" +-> "next". + +The two approaches are equivalent for the Survey Monkey API, but as a rule of thumb, it is +preferable to use the links provided by the API if it is available instead of reverse engineering +the mechanism. This way, we don't need to modify the connector if the API changes their pagination +mechanism, for instance, if they decide to implement server-side pagination. + +:::info When available, server-side pagination should be preferred over client-side pagination +because it has lower risks of missing records if the collection is modified while the connector +iterates. + +::: + +The "Optional Query Strings for GET" section shows that the `perPage` parameter is important because +it’ll define how many records we can fetch with a single request. The maximum page size isn't +explicit from the docs. We'll use 1000 as a limit. When unsure, we recommend finding the limit +experimentally by trying multiple values. + +Also note that we'll need to add the `include` query parameter to fetch all the properties, such as +`date_modified`, which we'll use as our cursor value. + +The section also shows how to filter the data based on the record's timestamp, which will allow the +connector to read records incrementally. We'll use the `start_modified_at` and `end_modified_at` to +scope our requests. + +We won't worry about the other query params as we won't filter by title or folder. + +:::info + +As a rule of thumb, it's preferable to fetch all the available data rather than ask the user to +specify which folder IDs they care about. + +::: + +## Survey responses + +Next, we'll take a look at the +[documentation for the survey responses endpoint](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys-id-responses). +It shows that this endpoint depends on the `surveys` endpoint, since we'll need to first fetch the +surveys to fetch the responses. + +It shows that the records are also nested in a "data" field. It's unclear from the examples if the +responses include a link to the next page. I already confirmed that's the case for you, but I'd +recommend validating this kind of assumption for any connector you plan on running in production. + +We’re not going to worry about the custom variables because we want to pull all the data. + +It’s worth noting that this stream won’t support incremental mode because there’s no timestamp to +filter on. + +## Authentication + +The [authentication section](https://api.surveymonkey.com/v3/docs?shell#authentication describes how +to authenticate to the API. Follow the instructions to obtain an access key. We'll then be able to +authenticate by passing a HTTP header in the format `Authorization: bearer YOUR_ACCESS_TOKEN`. + +## Rate limits + +The +[request and responses section](https://api.surveymonkey.com/v3/docs?shell#request-and-response-limits) +shows there’s a limit of 120 requests per minute, and of 500 requests per day. + +We’ll handle the 120 requests per minute by throttling, but we’ll let the sync fail if it hits the +daily limit because we don’t want to let the sync spin for up to 24 hours without any reason. + +We won’t worry about the increasing the rate limits. + +## Error codes + +The [Error Codes](https://api.surveymonkey.com/v3/docs?shell#error-codes) section shows the error +codes 1010-1018 represent authentication failures. These failures should be handled by the end-user, +and aren't indicative of a system failure. We'll therefore handle them explicitly so users know how +to resolve them should they occur. + +## Putting it all together + +We now know enough about how the API works: + +| Stream | URL | authentication | path to data | pagination | cursor value | time based filters | query params | rate limits | user errors | +| ---------------- | ------------------------------------------------------------- | --------------------------------------------- | ------------ | ------------------------- | ------------- | -------------------------------------------------- | ------------------------------------------------------------------------------------------------------------ | ---------------------- | -------------------- | +| surveys | https://api.surveymonkey.com/v3/surveys | bearerAuthorization: bearer YOUR_ACCESS_TOKEN | data | response -> links -> next | date_modified | start_modified_at and end_modified_at query params | include: response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats | 120 request per minute | error code 1010-1018 | +| survey responses | https://api.surveymonkey.com/v3/surveys/{survey_id}/responses | bearerAuthorization: bearer YOUR_ACCESS_TOKEN | data | response -> links -> next | None | None | None | 120 request per minute | error code 1010-1018 | + +In the [next section](./1-environment-setup.md), we'll setup our development environment. diff --git a/docs/connector-development/tutorials/custom-python-connector/1-environment-setup.md b/docs/connector-development/tutorials/custom-python-connector/1-environment-setup.md new file mode 100644 index 0000000000000..fa77bd4d278ed --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/1-environment-setup.md @@ -0,0 +1,68 @@ +# Environment setup + +Let's first start by cloning the repository, optionally forking it first + +```bash +git@github.com:airbytehq/airbyte.git +cd airbyte +``` + +Use the Airbyte provided code generator which bootstraps the scaffolding for our connector: + +```bash +cd airbyte-integrations/connector-templates/generator +./generate.sh +``` + +Select Python-source Set name to survey-monkey-demo + +Next change your working directory to the new connector module. Then create an initial python +environment and install the dependencies required to run an API Source connector: + +```bash +cd ../../connectors/source-survey-monkey-demo +poetry install --with dev +``` + +Let's verify the unit tests pass + +```bash +poetry run pytest unit_tests +``` + +And the check operation fails as expected + +```bash +poetry run source-survey-monkey-demo check --config secrets/config.json +``` + +It should return a failed connection status + +```json +{ + "type": "CONNECTION_STATUS", + "connectionStatus": { + "status": "FAILED", + "message": "Config validation error: 'TODO' is a required property" + } +} +``` + +The discover operation should also fail as expected + +```bash +poetry run source-survey-monkey-demo discover --config secrets/config.json +``` + +It should fail because `TODO' is a required property` + +The read operation should also fail as expected + +```bash +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +It should fail because `TODO' is a required property` + +We're ready to start development. In the [next section](./2-reading-a-page.md), we'll read a page of +records from the surveys endpoint. diff --git a/docs/connector-development/tutorials/custom-python-connector/2-reading-a-page.md b/docs/connector-development/tutorials/custom-python-connector/2-reading-a-page.md new file mode 100644 index 0000000000000..c3bd7f223c3c6 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/2-reading-a-page.md @@ -0,0 +1,344 @@ +# Reading a page + +In this section, we'll read a single page of records from the surveys endpoint. + +## Write a failing test that reads a single page + +We'll start by writing a failing integration test. + +Create a file `unit_tests/integration/test_surveys.py` + +```bash +mkdir unit_tests/integration +touch unit_tests/integration/test_surveys.py +code . +``` + +Copy this template to +`airbyte-integrations/connectors/source-survey-monkey-demo/unit_tests/integration/test_surveys.py` + +```python +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Mapping, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.sources.source import TState +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse +from airbyte_protocol.models import ConfiguredAirbyteCatalog, SyncMode +from source_survey_monkey_demo import SourceSurveyMonkeyDemo + +_A_CONFIG = { + +} +_NOW = + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + @HttpMocker() + def test_read_a_single_page(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + HttpRequest(url=), + HttpResponse(body=, status_code=) + ) + + output = self._read(_A_CONFIG, _configured_catalog(, SyncMode.full_refresh)) + + assert len(output.records) == 2 + + def _read(self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, configured_catalog=configured_catalog, expecting_exception=expecting_exception) + +def _read( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + return read(_source(configured_catalog, config, state), config, configured_catalog, state, expecting_exception) + + +def _configured_catalog(stream_name: str, sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(stream_name, sync_mode).build() + + +def _source(catalog: ConfiguredAirbyteCatalog, config: Dict[str, Any], state: Optional[TState]) -> SourceSurveyMonkeyDemo: + return SourceSurveyMonkeyDemo() +``` + +Most of this code is boilerplate. The most interesting section is the test + +```python + @HttpMocker() + def test_read_a_single_page(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + HttpRequest(url=), + HttpResponse(body=, status_code=) + ) + + output = self._read(_A_CONFIG, _configured_catalog(, SyncMode.full_refresh)) + + assert len(output.records) == 2 +``` + +`http_mocker.get` is used to register mocked requests and responses. You can specify the URL, query +params, and request headers the connector is expected to send and mock the response that should be +returned by the server to implement fast integration test that can be used to verify the connector's +behavior without the need to reach the API. This allows the tests to be fast and reproducible. + +Now, we'll implement a first test verifying the connector will send a request to the right endpoint, +with the right parameter, and verify that records are extracted from the data field of the response. + +```python +_A_CONFIG = { + "access_token": "access_token" +} +_NOW = datetime.now(timezone.utc) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + @HttpMocker() + def test_read_a_single_page(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + HttpRequest(url="https://api.surveymonkey.com/v3/surveys?include=response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats"), + HttpResponse(body=""" + { + "data": [ + { + "id": "1234", + "title": "My Survey", + "nickname": "", + "href": "https://api.surveymonkey.com/v3/surveys/1234" + }, + { + "id": "1234", + "title": "My Survey", + "nickname": "", + "href": "https://api.surveymonkey.com/v3/surveys/1234" + } + ], + "per_page": 50, + "page": 1, + "total": 2, + "links": { + "self": "https://api.surveymonkey.com/v3/surveys?page=1&per_page=50" + } +} +""", status_code=200) + ) + + output = self._read(_A_CONFIG, _configured_catalog("surveys", SyncMode.full_refresh)) + + assert len(output.records) == 2 +``` + +Note that the test also required adding the "access_token" field to the config. We'll use this field +to store the API key obtained in the first section of the tutorial. + +The test should fail because the expected request was not sent + +```bash +poetry run pytest unit_tests/integration +``` + +> ValueError: Invalid number of matches for +> `HttpRequestMatcher(request_to_match=ParseResult(scheme='https', netloc='api.surveymonkey.com', path='/v3/surveys', params='', query='include=response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats', fragment='') with headers {} and body None), minimum_number_of_expected_match=1, actual_number_of_matches=0)` + +We'll now remove the unit tests files. Writing unit tests is left as an exercise for the reader, but +it is highly recommended for any productionized connector. + +``` +rm unit_tests/test_incremental_streams.py unit_tests/test_source.py unit_tests/test_streams.py +``` + +Replace the content of +`airbyte-integrations/connectors/source-survey-monkey-demo/source_survey_monkey_demo/source.py` with +the following template: + +```python +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator, TokenAuthenticator + + +class SurveyMonkeyBaseStream(HttpStream, ABC): + def __init__(self, name: str, path: str, primary_key: Union[str, List[str]], data_field: str, **kwargs: Any) -> None: + self._name = name + self._path = path + self._primary_key = primary_key + self._data_field = data_field + super().__init__(**kwargs) + + url_base = + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return {"include": "response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats"} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + if self._data_field: + yield from response_json.get(self._data_field, []) + else: + yield from response_json + + @property + def name(self) -> str: + return self._name + + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: + return self._path + + @property + def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: + return self._primary_key + + +# Source +class SourceSurveyMonkeyDemo(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + auth = + return [SurveyMonkeyBaseStream(name=, path=, primary_key=, data_field=, authenticator=auth)] +``` + +:::info This template restructures the code so its easier to extend. Specifically, it provides a +base class that can be extended with composition instead of inheritance, which is generally less +error prone. + +::: + +Then set the URL base + +```python +url_base = "https://api.surveymonkey.com" +``` + +Set the query parameters: + +```python + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return {"include": "response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats"} +``` + +and configure the authenticator, the name, the path, and the primary key + +```python + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + auth = TokenAuthenticator(token=config["access_token"]) + return [SurveyMonkeyBaseStream(name="surveys", path="v3/surveys", primary_key="id", data_field="data", authenticator=auth)] +``` + +We'll now update the +[connector specification](../../../understanding-airbyte/airbyte-protocol.md#actor-specification). +We'll add the access_token as a required property, making sure to flag it as an `airbyte_secret` to +ensure the value isn't accidentally leaked, and we'll specify its `order` should be 0 so it shows up +first in the Source setup page. + +```yaml +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Survey Monkey Demo Spec + type: object + required: + - access_token + properties: + access_token: + type: string + description: "Access token for Survey Monkey API" + order: 0 + airbyte_secret: true +``` + +Let's now rename one of the mocked schema files to `surveys.json` so its used by our new stream, and +remove the second one as it isn't needed. + +``` +mv source_survey_monkey_demo/schemas/customers.json source_survey_monkey_demo/schemas/surveys.json +rm source_survey_monkey_demo/schemas/employees.json +``` + +The two tests should now pass + +``` +poetry run pytest unit_tests/ +``` + +Now fill in the `secrets/config.json` file with your API access token + +```json +{ + "access_token": "" +} +``` + +and update the configured catalog so it knows about the newly created stream: + +```json +{ + "streams": [ + { + "stream": { + "name": "surveys", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} +``` + +We can now run a read command to pull data from the endpoint: + +``` +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +The connector should've successfully read records. + +```json +{ "type": "LOG", "log": { "level": "INFO", "message": "Read 14 records from surveys stream" } } +``` + +You can also pass in the `--debug` flag to see the real requests and responses sent and received. +It's also recommended to use these real requests as templates for the integration tests as they can +be more accurate the examples from API documentation. + +In the [next section](./3-reading-multiple-pages.md), we'll implement pagination to read all surveys +from the endpoint# Reading a page diff --git a/docs/connector-development/tutorials/custom-python-connector/3-reading-multiple-pages.md b/docs/connector-development/tutorials/custom-python-connector/3-reading-multiple-pages.md new file mode 100644 index 0000000000000..52496ca20bbf4 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/3-reading-multiple-pages.md @@ -0,0 +1,136 @@ +# Read multiple pages + +In this section, we'll implement pagination to read all the records available in the surveys +endpoint. + +Again, we'll start by writing a failing test for fetching multiple pages of records + +```python + @HttpMocker() + def test_read_multiple_pages(self, http_mocker: HttpMocker) -> None: + + http_mocker.get( + HttpRequest(url="https://api.surveymonkey.com/v3/surveys?include=response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats&per_page=1000"), + HttpResponse(body=""" + { + "data": [ + { + "id": "1234", + "title": "My Survey", + "nickname": "", + "href": "https://api.surveymonkey.com/v3/surveys/1234" + } + ], + "per_page": 50, + "page": 1, + "total": 2, + "links": { + "self": "https://api.surveymonkey.com/v3/surveys?page=1&per_page=50", + "next": "https://api.surveymonkey.com/v3/surveys?include=response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats&per_page=1000&page=2" + } +} +""", status_code=200) + ) + http_mocker.get( + HttpRequest(url="https://api.surveymonkey.com/v3/surveys?include=response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats&per_page=1000&page=2"), + HttpResponse(body=""" + { + "data": [ + { + "id": "5678", + "title": "My Survey", + "nickname": "", + "href": "https://api.surveymonkey.com/v3/surveys/1234" + } + ], + "per_page": 50, + "page": 1, + "total": 2, + "links": { + "self": "https://api.surveymonkey.com/v3/surveys?page=1&per_page=50" + } +} +""", status_code=200) + ) + + output = self._read(_A_CONFIG, _configured_catalog("surveys", SyncMode.full_refresh)) + + assert len(output.records) == 2 +``` + +These tests now have a lot of duplications because we keep pasting the same response templates. You +can look at the +[source-stripe connector for an example of how this can be DRY'd](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_cards.py). + +The test should fail because the request wasn't matched: + +```bash +poetry run pytest unit_tests +``` + +> ValueError: Invalid number of matches for +> `HttpRequestMatcher(request_to_match=ParseResult(scheme='https', netloc='api.surveymonkey.com', +> path='/v3/surveys', params='', query='page=2&per_page=100', fragment='') + +First, we'll update the request parameters to only be set if this is not a request. If submitting a +paginated request, we'll use the parameters coming from the response. + +```python + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return urlparse(next_page_token["next_url"]).query + else: + return {"include": "response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats", + "per_page": _PAGE_SIZE} +``` + +Then we'll extract the next_page_token from the response + +```python + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + links = response.json().get("links", {}) + if "next" in links: + return {"next_url": links["next"]} + else: + return {} +``` + +The test should now pass. We won't write more integration tests in this tutorial, but they are +strongly recommended for any connector used in production. + +```bash +poetry run pytest unit_tests +``` + +We'll try reading + +```bash +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +There might not be enough records in your account to trigger the pagination. + +It might be easier to test pagination by forcing the connector to only fetch one record per page: + +``` + _PAGE_SIZE: int = 1 +``` + +and reading again + +```bash +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +All records should be read now. + +Change the \_PAGE_SIZE back to 1000: + +``` + _PAGE_SIZE: int = 1000 +``` + +In the [next section](./4-check-and-error-handling.md), we'll implement the check operation, and +improve the error handling. diff --git a/docs/connector-development/tutorials/custom-python-connector/4-check-and-error-handling.md b/docs/connector-development/tutorials/custom-python-connector/4-check-and-error-handling.md new file mode 100644 index 0000000000000..e28761ed67ec2 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/4-check-and-error-handling.md @@ -0,0 +1,79 @@ +# Check and error handling + +In this section, we'll implement the check operation, and implement error handling to surface the +user-friendly messages when failing due to authentication errors. + +Let's first implement the check operation. + +This operation verifies that the input configuration supplied by the user can be used to connect to +the underlying data source. + +Use the following command to run the check operation: + +```bash +poetry run source-survey-monkey-demo check --config secrets/config.json +``` + +The command succeed, but it'll succeed even if the config is invalid. We should modify the check so +it fails if the connector is unable to pull any record a stream. + +We'll do this by trying to read a single record from the stream, and fail the connector could not +read any. + +```python + def check_connection(self, logger, config) -> Tuple[bool, any]: + first_stream = next(iter(self.streams(config))) + + stream_slice = next(iter(first_stream.stream_slices(sync_mode=SyncMode.full_refresh))) + + try: + read_stream = first_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice) + first_record = None + while not first_record: + first_record = next(read_stream) + if isinstance(first_record, AirbyteMessage): + if first_record.type == "RECORD": + first_record = first_record.record + return True, None + else: + first_record = None + return True, None + except Exception as e: + return False, f"Unable to connect to the API with the provided credentials - {str(e)}" +``` + +Next, we'll improve the error handling. + +First, we'll disable the availability strategy. Availability strategies are a legacy concept used to +filter out streams that might not be available given a user's permissions. + +```python + @property + def availability_strategy(self) -> Optional[AvailabilityStrategy]: + return None + +``` + +Instead of using an availability strategy, we'll raise a config error if we're unable to +authenticate: + +```python + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + # https://api.surveymonkey.com/v3/docs?shell#error-codes + if response_json.get("error") in (1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018): + internal_message = "Unauthorized credentials. Response: {response_json}" + external_message = "Can not get metadata with unauthorized credentials. Try to re-authenticate in source settings." + raise AirbyteTracedException( + message=external_message, internal_message=internal_message, failure_type=FailureType.config_error + ) + elif self._data_field: + yield from response_json[self._data_field] + else: + yield from response_json +``` + +The `external_message` will be displayed to the end-user, while the `internal_message` will be +logged for troubleshooting purposes. + +In the [next section](./5-discover.md), we'll implement the discover operation. diff --git a/docs/connector-development/tutorials/custom-python-connector/5-discover.md b/docs/connector-development/tutorials/custom-python-connector/5-discover.md new file mode 100644 index 0000000000000..ad6b04f503abd --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/5-discover.md @@ -0,0 +1,124 @@ +# Discover + +The discover method of the Airbyte Protocol returns an AirbyteCatalog: an object which declares all +the streams output by a connector and their schemas. It also declares the sync modes supported by +the stream (full refresh or incremental). See the +[beginner's guide to the catalog](../../../understanding-airbyte/beginners-guide-to-catalog.md) for +more information. + +Run a discover command: + +```bash +poetry run source-survey-monkey-demo discover --config secrets/config.json +``` + +The command should succeed, but the schema will be wrong: + +```json +{ + "type": "CATALOG", + "catalog": { + "streams": [ + { + "name": "surveys", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { "type": ["null", "string"] }, + "name": { "type": ["null", "string"] }, + "signup_date": { "type": ["null", "string"], "format": "date-time" } + } + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + } + ] + } +} +``` + +We'll need to replace the schema with a json_schema representation of the records yielded by the +stream. + +The easiest way to extract the schema from a HTTP response is to use the Connector Builder. You can +also paste the schema below, which was generated by the Connector Builder: + +```json +{ + "$schema": "http://json-schema.org/schema#", + "properties": { + "analyze_url": { + "type": "string" + }, + "collect_stats": { + "properties": { + "status": { + "properties": { + "open": { + "type": "number" + } + }, + "type": "object" + }, + "total_count": { + "type": "number" + }, + "type": { + "properties": { + "weblink": { + "type": "number" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "date_created": { + "type": "string" + }, + "date_modified": { + "type": "string" + }, + "href": { + "type": "string" + }, + "id": { + "type": "string" + }, + "language": { + "type": "string" + }, + "nickname": { + "type": "string" + }, + "preview": { + "type": "string" + }, + "question_count": { + "type": "number" + }, + "response_count": { + "type": "number" + }, + "title": { + "type": "string" + } + }, + "type": "object" +} +``` + +:::info + +If the connector you're building has a dynamic schema, you'll need to overwrite the +`AbstractSource::streams`. + +::: + +--- + +The three connector operations work as expected. You can now +[upload your connector to your Airbyte instance](https://docs.airbyte.com/operator-guides/using-custom-connectors).In +the [next section](6-incremental-reads.md), we'll add the connector to our local Airbyte instance. diff --git a/docs/connector-development/tutorials/custom-python-connector/6-incremental-reads.md b/docs/connector-development/tutorials/custom-python-connector/6-incremental-reads.md new file mode 100644 index 0000000000000..a8579cb094fc4 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/6-incremental-reads.md @@ -0,0 +1,178 @@ +# Incremental reads + +In this section, we'll add support to read data incrementally. While this is optional, you should +implement it whenever possible because reading in incremental mode allows users to save time and +money by only reading new data. + +We'll first need to implement three new methods on the base stream class + +The `cursor_field` property indicates that records produced by the stream have a cursor that can be +used to identify it in the timeline. + +```python + @property + def cursor_field(self) -> Optional[str]: + return self._cursor_field +``` + +The `get_updated_state` method is used to update the stream's state. We'll set its value to the +maximum between the current state's value and the value extracted from the record. + +```python + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + state_value = max(current_stream_state.get(self.cursor_field, 0), datetime.datetime.strptime(latest_record.get(self._cursor_field, ""), _INCOMING_DATETIME_FORMAT).timestamp()) + return {self._cursor_field: state_value} +``` + +Note that we're converting the datetimes to unix epoch. We could've also chosen to persist it as an +ISO date. You can use any format that works best for you. Integers are easy to work with so that's +what we'll do for this tutorial. + +Then we'll implement the `stream_slices` method, which will be used to partition the stream into +time windows. While this isn't mandatory since we could omit the `end_modified_at` parameter from +our requests and try to read all new records at once, it is preferable to partition the stream +because it enables checkpointing. + +This might mean the connector will make more requests than necessary during the initial sync, and +this is most visible when working with a sandbox or an account that does not have many records. The +upside are worth the tradeoff because the additional cost is negligible for accounts that have many +records, and the time cost will be entirely mitigated in a follow up section when we fetch +partitions concurrently. + +```python + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + start_ts = stream_state.get(self._cursor_field, _START_DATE) if stream_state else _START_DATE + now_ts = datetime.datetime.now().timestamp() + if start_ts >= now_ts: + yield from [] + return + for start, end in self.chunk_dates(start_ts, now_ts): + yield {"start_date": start, "end_date": end} + + def chunk_dates(self, start_date_ts: int, end_date_ts: int) -> Iterable[Tuple[int, int]]: + step = int(_SLICE_RANGE * 24 * 60 * 60) + after_ts = start_date_ts + while after_ts < end_date_ts: + before_ts = min(end_date_ts, after_ts + step) + yield after_ts, before_ts + after_ts = before_ts + 1 +``` + +Note that we're introducing the concept of a start date. You might have to fiddle to find the +earliest start date that can be queried. You can also choose to make the start date configurable by +the end user. This will make your life simpler, at the cost of pushing the complexity to the +end-user. + +We'll now update the query params. In addition the passing the page size and the include field, +we'll pass in the `start_modified_at` and `end_modified_at` which can be extracted from the +`stream_slice` parameter. + +```python + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return urlparse(next_page_token["next_url"]).query + else: + return { + "per_page": _PAGE_SIZE, "include": "response_count,date_created,date_modified,language,question_count,analyze_url,preview,collect_stats", + "start_modified_at": datetime.datetime.strftime(datetime.datetime.fromtimestamp(stream_slice["start_date"]), _OUTGOING_DATETIME_FORMAT), + "end_modified_at": datetime.datetime.strftime(datetime.datetime.fromtimestamp(stream_slice["end_date"]), _OUTGOING_DATETIME_FORMAT) + } +``` + +And add the following constants to the source.py file + +```python +_START_DATE = datetime.datetime(2020,1,1, 0,0,0).timestamp() +_SLICE_RANGE = 365 +_OUTGOING_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" +_INCOMING_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S" +``` + +Notice the outgoing and incoming date formats are different! + +Now, update the stream constructor so it accepts a cursor_field parameter. + +```python +class SurveyMonkeyBaseStream(HttpStream, ABC): + def __init__(self, name: str, path: str, primary_key: Union[str, List[str]], data_field: Optional[str], cursor_field: Optional[str], +**kwargs: Any) -> None: + self._name = name + self._path = path + self._primary_key = primary_key + self._data_field = data_field + self._cursor_field = cursor_field + super().__init__(**kwargs) +``` + +And update the stream's creation: + +```python +return [SurveyMonkeyBaseStream(name="surveys", path="/v3/surveys", primary_key="id", data_field="data", cursor_field="date_modified", authenticator=auth)] +``` + +Finally, modify the configured catalog to run the stream in incremental mode: + +```json +{ + "streams": [ + { + "stream": { + "name": "surveys", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} +``` + +Run another read operation. The state messages should include the cursor: + +```json +{ + "type": "STATE", + "state": { + "type": "STREAM", + "stream": { + "stream_descriptor": { "name": "surveys", "namespace": null }, + "stream_state": { "date_modified": 1623348420.0 } + }, + "sourceStats": { "recordCount": 0.0 } + } +} +``` + +And update the sample state to a timestamp earlier than the first record. There should be fewer +records + +```json +[ + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "surveys" + }, + "stream_state": { + "date_modified": 1711753326 + } + } + } +] +``` + +Run another read command, passing the `--state` flag: + +```bash +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json --state integration_tests/sample_state.json +``` + +Only more recent records should be read. + +In the [next section](7-reading-from-a-subresource.md), we'll implement the survey responses stream, +which depends on the surveys stream. diff --git a/docs/connector-development/tutorials/custom-python-connector/7-reading-from-a-subresource.md b/docs/connector-development/tutorials/custom-python-connector/7-reading-from-a-subresource.md new file mode 100644 index 0000000000000..5b2886f06ff00 --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/7-reading-from-a-subresource.md @@ -0,0 +1,197 @@ +# Reading from a subresource + +In this section, we'll implement a stream for the survey responses stream. This stream structure is +a little different because it depends on the surveys stream. + +Start by creating a new base class for substreams: + +```python +class SurveyMonkeySubstream(HttpStream, ABC): + + def __init__(self, name: str, path: str, primary_key: Union[str, List[str]], parent_stream: Stream, **kwargs: Any) -> None: + self._name = name + self._path = path + self._primary_key = primary_key + self._parent_stream = parent_stream + super().__init__(**kwargs) + + url_base = "https://api.surveymonkey.com" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + links = response.json().get("links", {}) + if "next" in links: + return {"next_url": links["next"]} + else: + return {} + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return urlparse(next_page_token["next_url"]).query + else: + return {"per_page": _PAGE_SIZE} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + yield from response.json().get("data", []) + + @property + def name(self) -> str: + return self._name + + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: + try: + return self._path.format(stream_slice=stream_slice) + except Exception as e: + raise e + + @property + def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: + return self._primary_key + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + for _slice in self._parent_stream.stream_slices(): + for parent_record in self._parent_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=_slice): + yield parent_record +``` + +This class is similar to the base class, but it does not support incremental reads, and its stream +slices are generated by reading records from a parent stream. This is how we'll ensure we always +read all survey responses. + +Note that using this approach, the connector will checkpoint after reading responses for each +survey. + +Don't forget to update the `streams` method to also instantiate the surveys responses stream: + +```python + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + auth = TokenAuthenticator(token=config["access_token"]) + surveys = SurveyMonkeyBaseStream(name="surveys", path="/v3/surveys", primary_key="id", data_field="data", cursor_field="date_modified", authenticator=auth) + survey_responses = SurveyMonkeySubstream(name="survey_responses", path="/v3/surveys/{stream_slice[id]}/responses/", primary_key="id", authenticator=auth, parent_stream=surveys) + return [ + surveys, + survey_responses + ] +``` + +Before moving on, we'll enable request caching on the surveys stream to avoid fetching the records +both for the surveys stream and for the survey responses stream. You can do this by setting the +`use_cache` property to true on the `SurveyMonkeyBaseStream` class. + +``` + @property + def use_cache(self) -> bool: + return True +``` + +Now add the stream to the configured catalog: + +```json +{ + "streams": [ + { + "stream": { + "name": "surveys", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "survey_responses", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} +``` + +and create a new schema file in `source_survey_monkey_demo/schemas/survey_responses.json`. You can +use the connector builder to generate the schema, or paste the one provided below: + +```json +{ + "$schema": "http://json-schema.org/schema#", + "properties": { + "analyze_url": { + "type": ["string", "null"] + }, + "collect_stats": { + "properties": { + "status": { + "properties": { + "open": { + "type": ["number", "null"] + } + }, + "type": ["object", "null"] + }, + "total_count": { + "type": ["number", "null"] + }, + "type": { + "properties": { + "weblink": { + "type": ["number", "null"] + } + }, + "type": ["object", "null"] + } + }, + "type": ["object", "null"] + }, + "date_created": { + "type": ["string", "null"] + }, + "date_modified": { + "type": ["string", "null"] + }, + "href": { + "type": ["string", "null"] + }, + "id": { + "type": ["string", "null"] + }, + "language": { + "type": ["string", "null"] + }, + "nickname": { + "type": ["string", "null"] + }, + "preview": { + "type": ["string", "null"] + }, + "question_count": { + "type": ["number", "null"] + }, + "response_count": { + "type": ["number", "null"] + }, + "title": { + "type": ["string", "null"] + } + }, + "type": "object" +} +``` + +You should now be able to read your survey responses: + +``` +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +In the [next section](8-concurrency.md) we'll update the connector so it reads stream slices +concurrently. diff --git a/docs/connector-development/tutorials/custom-python-connector/8-concurrency.md b/docs/connector-development/tutorials/custom-python-connector/8-concurrency.md new file mode 100644 index 0000000000000..b624492743f0b --- /dev/null +++ b/docs/connector-development/tutorials/custom-python-connector/8-concurrency.md @@ -0,0 +1,209 @@ +# Concurrent + +In this section, we'll improve the connector performance by reading multiple stream slices in +parallel. + +Let's update the source. The bulk of the change is changing its parent class to +`ConcurrentSourceAdapter`, and updating its `__init__` method so it's properly initialized. This +requires a little bit of boilerplate: + +```python +class SourceSurveyMonkeyDemo(ConcurrentSourceAdapter): + message_repository = InMemoryMessageRepository(Level(AirbyteLogFormatter.level_mapping[_logger.level])) + + def __init__(self, config: Optional[Mapping[str, Any]], state: Optional[Mapping[str, Any]]): + if config: + concurrency_level = min(config.get("num_workers", _DEFAULT_CONCURRENCY), _MAX_CONCURRENCY) + else: + concurrency_level = _DEFAULT_CONCURRENCY + _logger.info(f"Using concurrent cdk with concurrency level {concurrency_level}") + concurrent_source = ConcurrentSource.create( + concurrency_level, concurrency_level // 2, _logger, self._slice_logger, self.message_repository + ) + super().__init__(concurrent_source) + self._config = config + self._state = state + + def _get_slice_boundary_fields(self, stream: Stream, state_manager: ConnectorStateManager) -> Optional[Tuple[str, str]]: + return ("start_date", "end_date") +``` + +We'll also need to update the `streams` method to wrap the streams in an adapter class to enable +concurrency. + +```python + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + auth = TokenAuthenticator(config["access_token"]) + + survey_stream = SurveyMonkeyBaseStream(name="surveys", path="/v3/surveys", primary_key="id", authenticator=auth, cursor_field="date_modified") + synchronous_streams = [ + survey_stream, + SurveyMonkeySubstream(name="survey_responses", path="/v3/surveys/{stream_slice[id]}/responses/", primary_key="id", authenticator=auth, parent_stream=survey_stream) + ] + state_manager = ConnectorStateManager(stream_instance_map={s.name: s for s in synchronous_streams}, state=self._state) + + configured_streams = [] + + for stream in synchronous_streams: + + if stream.cursor_field: + cursor_field = CursorField(stream.cursor_field) + legacy_state = state_manager.get_stream_state(stream.name, stream.namespace) + cursor = ConcurrentCursor( + stream.name, + stream.namespace, + legacy_state, + self.message_repository, + state_manager, + stream.state_converter, + cursor_field, + self._get_slice_boundary_fields(stream, state_manager), + _START_DATE, + ) + else: + cursor = FinalStateCursor(stream.name, stream.namespace, self.message_repository) + configured_streams.append ( + StreamFacade.create_from_stream(stream, + self, + _logger, + legacy_state, + cursor) + ) + return configured_streams +``` + +The most interesting piece from this block is the use of `ConcurrentCursor` to support concurrent +state management. + +The survey responses stream does not support incremental reads, so it's using a `FinalStateCursor` +instead. The rest of the code change is mostly boilerplate. + +We'll also add a state converter to the `SurveyMonkeyBaseStream` to describe how the state cursor is +formatted. We'll use the `EpochValueConcurrentStreamStateConverter` since the `get_updated_state` +method returns the cursor as a timestamp + +``` +state_converter = EpochValueConcurrentStreamStateConverter() +``` + +Next we'll add a few missing constants: + +``` +_DEFAULT_CONCURRENCY = 10 +_MAX_CONCURRENCY = 10 +_RATE_LIMIT_PER_MINUTE = 120 +_logger = logging.getLogger("airbyte") +``` + +--- + +:::info + +The substream isn't entirely concurrent because its stream_slices definition reads records from the +parent stream concurrently: + +```python + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + for _slice in self._parent_stream.stream_slices(): + for parent_record in self._parent_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=_slice): + yield parent_record +``` + +This can be solved by implementing the connector using constructs from the concurrent CDK directly +instead of wrapping synchronous streams in an adapter. This is left outside of the scope of this +tutorial because no production connectors currently implement this. + +::: + +We'll now enable throttling to avoid going over the API rate limit. You can do this by configuring a +moving window rate limit policy for the `SurveyMonkeyBaseStream` class: + +```python +class SurveyMonkeyBaseStream(HttpStream, ABC): + def __init__(self, name: str, path: str, primary_key: Union[str, List[str]], data_field: Optional[str], cursor_field: Optional[str], +**kwargs: Any) -> None: + self._name = name + self._path = path + self._primary_key = primary_key + self._data_field = data_field + self._cursor_field = cursor_field + super().__init__(**kwargs) + + policies = [ + MovingWindowCallRatePolicy( + rates=[Rate(limit=_RATE_LIMIT_PER_MINUTE, interval=datetime.timedelta(minutes=1))], + matchers=[], + ), + ] + api_budget = HttpAPIBudget(policies=policies) + super().__init__(api_budget=api_budget, **kwargs) +``` + +Finally, update the `run.py` file to properly instantiate the class. Most of this code is +boilerplate code and isn't specific to the Survey Monkey connector. + +```python +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys +import sys +import traceback +from datetime import datetime +from typing import List + +from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch +from airbyte_cdk.models import AirbyteErrorTraceMessage, AirbyteMessage, AirbyteTraceMessage, TraceType, Type + +from airbyte_cdk.entrypoint import launch + +from .source import SourceSurveyMonkeyDemo +def _get_source(args: List[str]): + config_path = AirbyteEntrypoint.extract_config(args) + state_path = AirbyteEntrypoint.extract_state(args) + try: + return SourceSurveyMonkeyDemo( + SourceSurveyMonkeyDemo.read_config(config_path) if config_path else None, + SourceSurveyMonkeyDemo.read_state(state_path) if state_path else None, + ) + except Exception as error: + print( + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.ERROR, + emitted_at=int(datetime.now().timestamp() * 1000), + error=AirbyteErrorTraceMessage( + message=f"Error starting the sync. This could be due to an invalid configuration or catalog. Please contact Support for assistance. Error: {error}", + stack_trace=traceback.format_exc(), + ), + ), + ).json() + ) + return None + + + +def run(): + args = sys.argv[1:] + source = _get_source(args) + launch(source, args) +``` + +You can now run a read operation again. The connector will read multiple partitions concurrently +instead of looping through all of them sequentially. + +```bash +poetry run source-survey-monkey-demo read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +We're now done! We implemented a Python connector covering many features: + +- Fast and reproducible integration tests +- Authentication errors are detected and labeled as such +- One stream supports incremental reads +- One stream depends on another stream + +The final code can be found [here](https://github.com/girarda/airbyte/tree/survey_monkey_demo) diff --git a/docs/connector-development/tutorials/profile-java-connector-memory.md b/docs/connector-development/tutorials/profile-java-connector-memory.md deleted file mode 100644 index 608e234f6b680..0000000000000 --- a/docs/connector-development/tutorials/profile-java-connector-memory.md +++ /dev/null @@ -1,119 +0,0 @@ -# Profile Java Connector Memory Usage - -This tutorial demos how to profile the memory usage of a Java connector with Visual VM. Such -profiling can be useful when we want to debug memory leaks, or optimize the connector's memory -footprint. - -The example focuses on docker deployment, because it is more straightforward. It is also possible to -apply the same procedure to Kubernetes deployments. - -## Prerequisite - -- [Docker](https://www.docker.com/products/personal) running locally. -- [VisualVM](https://visualvm.github.io/) preinstalled. - -## Step-by-Step - -1. Enable JMX in `airbyte-integrations/connectors//build.gradle`, and expose it on - port 6000. The port is chosen arbitrary, and can be port number that's available. - - - `` examples: `source-mysql`, `source-github`, `destination-snowflake`. - - ```groovy - application { - mainClass = 'io.airbyte.integrations.' - applicationDefaultJvmArgs = [ - '-XX:+ExitOnOutOfMemoryError', - '-XX:MaxRAMPercentage=75.0', - - // add the following JVM arguments to enable JMX: - '-XX:NativeMemoryTracking=detail', - '-XX:+UsePerfData', - '-Djava.rmi.server.hostname=localhost', - '-Dcom.sun.management.jmxremote=true', - '-Dcom.sun.management.jmxremote.port=6000', - "-Dcom.sun.management.jmxremote.rmi.port=6000", - '-Dcom.sun.management.jmxremote.local.only=false', - '-Dcom.sun.management.jmxremote.authenticate=false', - '-Dcom.sun.management.jmxremote.ssl=false', - - // optionally, add a max heap size to limit the memory usage - '-Xmx2000m', - ] - } - ``` - -2. Modify `airbyte-integrations/connectors//Dockerfile` to expose the JMX port. - - ```dockerfile - // optionally install procps to enable the ps command in the connector container - RUN apt-get update && apt-get install -y procps && rm -rf /var/lib/apt/lists/* - - // expose the same JMX port specified in the previous step - EXPOSE 6000 - ``` - -3. Expose the same port in - `airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java`. - - ```java - // map local 6000 to the JMX port from the container - if (imageName.startsWith("airbyte/")) { - LOGGER.info("Exposing image {} port 6000", imageName); - cmd.add("-p"); - cmd.add("6000:6000"); - } - ``` - - Disable the [`host` network mode](https://docs.docker.com/network/host/) by _removing_ the - following code block in the same file. This is necessary because under the `host` network mode, - published ports are discarded. - - ```java - if (networkName != null) { - cmd.add("--network"); - cmd.add(networkName); - } - ``` - - (This - [commit](https://github.com/airbytehq/airbyte/pull/10394/commits/097ec57869a64027f5b7858aa8bb9575844e8b76) - can be used as a reference. It reverts them. So just do the opposite.) - -4. Build and launch Airbyte locally. It is necessary to build it because we have modified the - `DockerProcessFactory.java`. - - ```sh - SUB_BUILD=PLATFORM ./gradlew build -x test - VERSION=dev docker compose up - ``` - -5. Build the connector to be profiled locally. It will create a `dev` version local image: - `airbyte/:dev`. - - ```sh - ./gradlew :airbyte-integrations:connectors::airbyteDocker - ``` - -6. Connect to the launched local Airbyte server at `localhost:8000`, go to the `Settings` page, and - change the version of the connector to be profiled to `dev` which was just built in the previous - step. - -7. Create a connection using the connector to be profiled. - - - The `Replication frequency` of this connector should be `manual` so that we can control when it - starts. - - We can use the e2e test connectors as either the source or destination for convenience. - - The e2e test connectors are usually very reliable, and requires little configuration. - - For example, if we are profiling a source connector, create an e2e test destination at the - other end of the connection. - -8. Profile the connector in question. - - - Launch a data sync run. - - After the run starts, open Visual VM, and click `File` / `Add JMX Connection...`. A modal will - show up. Type in `localhost:6000`, and click `OK`. - - Now we can see a new connection shows up under the `Local` category on the left, and the - information about the connector's JVM gets retrieved. - - ![visual vm screenshot](https://visualvm.github.io/images/visualvm_screenshot_20.png) diff --git a/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md b/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md index f3d3be401ed64..1f1d01e435f84 100644 --- a/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md +++ b/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md @@ -323,7 +323,7 @@ That's all you need to do to add incremental functionality to the stock ticker S You can deploy the new version of your connector simply by running: ```bash -./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build +airbyte-ci connectors --name source-stock-ticker-api build ``` Bonus points: go to Airbyte UI and reconfigure the connection to use incremental sync. diff --git a/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md b/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md index e1713854eb4ff..876898b31441c 100644 --- a/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md +++ b/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md @@ -1093,7 +1093,7 @@ Airbyte's build system builds and tags your connector's image correctly by defau connector's standard `build` process. **From the Airbyte repo root**, run: ```bash -./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build +airbyte-ci connectors --name source-stock-ticker-api build ``` This is the equivalent of running `docker build . -t airbyte/source-stock-ticker-api:dev` from the @@ -1249,5 +1249,5 @@ contributor now ;\) ## Additional guides -- [Building a Python Source](https://docs.airbyte.com/connector-development/tutorials/building-a-python-source.md) +- [Building a Python Source](https://docs.airbyte.com/connector-development/tutorials/cdk-speedrun) - [Building a Java Destination](https://docs.airbyte.com/connector-development/tutorials/building-a-java-destination) diff --git a/docs/contributing-to-airbyte/README.md b/docs/contributing-to-airbyte/README.md index 59b7d3e59599b..aa36724dd24b3 100644 --- a/docs/contributing-to-airbyte/README.md +++ b/docs/contributing-to-airbyte/README.md @@ -44,7 +44,7 @@ The usual workflow of code contribution is: 5. Push your local branch to your fork. 6. Submit a Pull Request so that we can review your changes. 7. [Link an existing Issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) that does not include the `needs triage` label to your Pull Request. A pull request without a linked issue will be closed, otherwise. -8. Write a PR title and description that follows the [Pull Request Handbook](./resources/pull-requests-handbook.md) and [Pull Request Template](https://github.com/airbytehq/airbyte/blob/master/.github/pull_request_template.md). +8. Write a PR title and description that follows the [Pull Request Handbook](./resources/pull-requests-handbook.md). 9. An Airbyte maintainer will trigger the CI tests for you and review the code. 10. Review and respond to feedback and questions by Airbyte maintainers. 11. Merge the contribution. diff --git a/docs/contributing-to-airbyte/change-cdk-connector.md b/docs/contributing-to-airbyte/change-cdk-connector.md index 81fcd8a291c0b..89466149ac4d5 100644 --- a/docs/contributing-to-airbyte/change-cdk-connector.md +++ b/docs/contributing-to-airbyte/change-cdk-connector.md @@ -1,4 +1,4 @@ -# Changes to CDK or Low-Code Connector +# Changes to Python CDK or Low-Code Connector ## Contribution Process diff --git a/docs/contributing-to-airbyte/resources/code-style.md b/docs/contributing-to-airbyte/resources/code-style.md deleted file mode 100644 index 40a14c1632000..0000000000000 --- a/docs/contributing-to-airbyte/resources/code-style.md +++ /dev/null @@ -1,53 +0,0 @@ -# Code Style - -## Configure Java Style for IntelliJ - -First, download the style configuration. - -```text -curl https://raw.githubusercontent.com/google/styleguide/gh-pages/intellij-java-google-style.xml -o ~/Downloads/intellij-java-google-style.xml -``` - -Install it in IntelliJ: - -1. Go to `Preferences > Editor > Code Style` -2. Press the little cog: - 1. `Import Scheme > IntelliJ IDEA code style XML` - 2. Select the file we just downloaded -3. Select `GoogleStyle` in the dropdown -4. Change default `Hard wrap at` in `Wrapping and Braces` tab to **150** -5. Use explicit imports - 1. Under `Preferences > Code Style > Java > Imports` - 1. change `Class count to use import with '*'` to `9999` - 2. change `Names count to use static import with '*'` to `9999` - 2. Under `Preferences > Code Style > Kotlin > Imports` - 1. change `Top Level Symbols` to `Use single name import` - 2. change `Java Statics and Enum Members` to `Use single name import` -6. Add the `final` keyword wherever possible. You can either set this as the default for your IDE or you can set it just for the Airbyte project(s) that you are using - 1. Turn on the inspection. Go into `Preferences > Editor > Inspections` - 1. Search `"Field may be 'final'"` > check the box - 2. Search `"local variable or parameter can be 'final'"` > check the box - 3. Apply the changes - 2. Turn on the auto add final. Go into IntelliJ Preferences - 1. Plugins - install Save Actions if not already installed - 1. If you're running Intellij 2023.1 or higher, the official version may not work. Try manually installing [this fork](https://github.com/fishermans/intellij-plugin-save-actions/releases/tag/v2.6.0) (see [Github issue](https://github.com/dubreuia/intellij-plugin-save-actions/issues/427)). - 2. Go to Save Actions in the preferences [left navigation column](../../assets/docs/save_actions_settings.png) (NOT Tools > Actions on Save -- that is a different tool) - 1. `Activate save actions on save` > check the box - 2. `Active save actions on shortcut` > check the box - 3. `Activate save actions on batch` > check the box - 4. `Add final modifier to field` > check the box - 5. `Add final modifier to local variable or parameter` > check the box - 6. Apply the changes -7. You're done! - -## Source code comments - -It's hard to pin down exactly what to do around source code comments, but there are two \(very subjective\) and rough guidelines: - -**If something is not obvious, write it down**. Examples include: - -* non-trivial class definitions should have docstrings -* magic variables should have comments explaining why those values are used \(e.g: if using a page size of 10 in a connector, describe why if possible. If there is no reason, that's also fine, just mention in a comment\). -* Complicated subroutines/logic which cannot be refactored should have comments explaining what they are doing and why - -**If something is obvious, don't write it down** since it's probably more likely to go out of date. For example, a comment like `x = 42; // sets x to 42` is not adding any new information and is therefore better omitted. diff --git a/docs/contributing-to-airbyte/resources/developing-locally.md b/docs/contributing-to-airbyte/resources/developing-locally.md index 3ddcb437a14b9..7bffa0174eb8b 100644 --- a/docs/contributing-to-airbyte/resources/developing-locally.md +++ b/docs/contributing-to-airbyte/resources/developing-locally.md @@ -3,7 +3,7 @@ The following technologies are required to build Airbyte locally. 1. [`Java 21`](https://jdk.java.net/archive/) -2. `Node 16` +2. `Node 20.` 3. `Python 3.9` 4. `Docker` 5. `Jq` @@ -156,35 +156,7 @@ If you are working in the platform run `SUB_BUILD=PLATFORM ./gradlew format` fro ### Connector -To format an individual connector in python, run the following command in your local `airbyte` repository: - -``` - ./gradlew :airbyte-integrations:connectors::airbytePythonFormat -``` - -For instance: - -``` -./gradlew :airbyte-integrations:connectors:source-s3:airbytePythonFormat -``` - -To format connectors in java, run `./gradlew format` - -### Connector Infrastructure - -Finally, if you are working in any module in `:airbyte-integrations:bases` or `:airbyte-cdk:python`, run the following command in your local `airbyte` repository: - -```bash -SUB_BUILD=CONNECTORS_BASE ./gradlew format -``` - -Note: If you are contributing a Python file without imports or function definitions, place the following comment at the top of your file: - -```python -""" -[FILENAME] includes [INSERT DESCRIPTION OF CONTENTS HERE] -""" -``` +To format your local `airbyte` repository, run `airbyte-ci format fix all`. ### Develop on `airbyte-webapp` diff --git a/docs/contributing-to-airbyte/resources/gradle.md b/docs/contributing-to-airbyte/resources/gradle.md deleted file mode 100644 index 84adf04695da7..0000000000000 --- a/docs/contributing-to-airbyte/resources/gradle.md +++ /dev/null @@ -1,377 +0,0 @@ -# (DEPRECATED) Gradle Cheatsheet - -## Overview - -We have 3 ways of slicing our builds: - -1. **Build Everything**: Including every single connectors. -2. **Build Platform**: Build only modules related to the core platform. -3. **Build Connectors Base**: Build only modules related to code infrastructure for connectors. - -**Build Everything** is really not particularly functional as building every single connector at once is really prone to transient errors. As there are more connectors the chance that there is a transient issue while downloading any single dependency starts to get really high. - -In our CI we run **Build Platform** and **Build Connectors Base**. Then separately, on a regular cadence, we build each connector and run its integration tests. - -We split Build Platform and Build Connectors Base from each other for a few reasons: - -1. The tech stacks are very different. The Platform is almost entirely Java. Because of differing needs around separating environments, the Platform build can be optimized separately from the Connectors one. -2. We want to the iteration cycles of people working on connectors or the platform faster _and_ independent. e.g. Before this change someone working on a Platform feature needs to run formatting on the entire codebase \(including connectors\). This led to a lot of cosmetic build failures that obfuscated actually problems. Ideally a failure on the connectors side should not block progress on the platform side. -3. The lifecycles are different. One can safely release the Platform even if parts of Connectors Base is failing \(and vice versa\). - -Future Work: The next step here is to figure out how to more formally split connectors and platform. Right now we exploit behavior in [settings.gradle](../../settings.gradle) to separate them. This is not a best practice. Ultimately, we want these two builds to be totally separate. We do not know what that will look like yet. - -## Cheatsheet - -Here is a cheatsheet for common gradle commands. - -### List Gradle Tasks - -To view all available tasks: -```text -./gradlew tasks -``` - -To view all tasks available for a given namespace: - -```text -./gradlew :tasks -``` - -for example: - -```text -./gradlew :airbyte-integrations:connectors:source-bigquery:tasks -``` - -### Basic Build Syntax - -Here is the syntax for running gradle commands on the different parts of the code base that we called out above. - -#### Build Everything - -```text -./gradlew -``` - -#### Build Platform - -```text -SUB_BUILD=PLATFORM ./gradlew -``` - -#### Build Connectors Base - -```text -SUB_BUILD=CONNECTORS_BASE ./gradlew -``` - -#### Build CDK - -```text -SUB_BUILD=CDK ./gradlew -``` - -### Build - -In order to "build" the project. This task includes producing all artifacts and running unit tests \(anything called in the `:test` task\). It does _not_ include integration tests \(anything called in the `:integrationTest` task\). - -For example all the following are valid. - -```shell -./gradlew build # builds the entire Airbyte project including every single connector supported -SUB_BUILD=PLATFORM ./gradlew build -x test # builds Airbyte Platform without running tests -SUB_BUILD=CONNECTORS_BASE ./gradlew build # builds all Airbyte connectors and runs unit tests -``` - -### Debugging - -To debug a Gradle task, add `--scan` to the `./gradlew` command. After the task has completed, you should see a message like: - -```text -Publishing build scan... -https://gradle.com/s/6y7ritpvzkwp4 -``` - -Clicking the link opens a browser page which contains lots of information pertinent to debugging why a build failed, or understanding what sub-tasks were run during a task. - -### Formatting - -The build system has a custom task called `format`. It is not called as part of `build`. If the command is called on a subset of the project, it will \(mostly\) target just the included modules. The exception is that `spotless` \(a gradle formatter\) will always format any file types that it is configured to manage regardless of which sub build is run. `spotless` is relatively fast, so this should not be too much of an annoyance. It can lead to formatting changes in unexpected parts of the code base. - -For example all the following are valid. - -```shell -./gradlew format -SUB_BUILD=PLATFORM ./gradlew format -SUB_BUILD=CONNECTORS_BASE ./gradlew format -``` - -### Platform-Specific Commands - -#### Build Artifacts - -This command just builds the docker images that are used as artifacts in the platform. It bypasses running tests. - -```shell -SUB_BUILD=PLATFORM ./gradlew build -``` - -#### Running Tests - -The Platform has 3 different levels of tests: Unit Tests, Acceptance Tests, Frontend Acceptance Tests. - -| Test | Used | Description | -|:------------|:----:|:----------------------------------------------------------------------------------------------| -| Unit | X | Aims to test each component (e.g. a method function) | -| Integration | | Checks the data flow from one module to other modules | -| System | | Tests overall interaction of components, includes load, performance, reliability and security | -| Acceptance | X | Assess whether the Product is working for the user's viewpoint | - -**Unit Tests** - -Unit Tests can be run using the `:test` task on any submodule. These test class-level behavior. They should avoid using external resources \(e.g. calling staging services or pulling resources from the internet\). We do allow these tests to spin up local resources \(usually in docker containers\). For example, we use test containers frequently to spin up test postgres databases. - -**Acceptance Tests** - -We split Acceptance Tests into 2 different test suites: - -* Platform Acceptance Tests: These tests are a coarse test to sanity check that each major feature in the platform. They are run with the following command: `SUB_BUILD=PLATFORM ./gradlew :airbyte-tests:acceptanceTests`. These tests expect to find a local version of Airbyte running. For testing the docker version start Airbyte locally. For an example, see the [acceptance_test script](https://github.com/airbytehq/airbyte-platform/blob/main/tools/bin/acceptance_test.sh) that is used by the CI. For Kubernetes, see the [acceptance_test_helm script](https://github.com/airbytehq/airbyte-platform/blob/main/tools/bin/acceptance_test_kube_helm.sh) that is used by the CI. -* Migration Acceptance Tests: These tests make sure the end-to-end process of migrating from one version of Airbyte to the next works. These tests are run with the following command: `SUB_BUILD=PLATFORM ./gradlew :airbyte-tests:automaticMigrationAcceptanceTest --scan`. These tests do not expect there to be a separate deployment of Airbyte running. - -These tests currently all live in [airbyte-tests](https://github.com/airbytehq/airbyte/airbyte-tests) - -**Frontend Acceptance Tests** - -These are acceptance tests for the frontend. They are run with -```shell -SUB_BUILD=PLATFORM ./gradlew --no-daemon :airbyte-webapp-e2e-tests:e2etest -``` - -Like the Platform Acceptance Tests, they expect Airbyte to be running locally. See the [script](https://github.com/airbytehq/airbyte/blob/master/tools/bin/e2e_test.sh) that is used by the CI. - -These tests currently all live in [airbyte-webapp-e2e-tests](https://github.com/airbytehq/airbyte/airbyte-webapp-e2e-tests) - -**Future Work** - -Our story around "integration testing" or "E2E testing" is a little ambiguous. Our Platform Acceptance Test Suite is getting somewhat unwieldy. It was meant to just be some coarse sanity checks, but over time we have found more need to test interactions between systems more granular. Whether we start supporting a separate class of tests \(e.g. integration tests\) or figure out how allow for more granular tests in the existing Acceptance Test framework is TBD. - -### Connectors-Specific Commands \(Connector Development\) - -#### Commands used in CI - -All connectors, regardless of implementation language, implement the following interface to allow uniformity in the build system when run from CI: - -**Build connector, run unit tests, and build Docker image**: -```shell -./gradlew :airbyte-integrations:connectors::build -``` - -**Run integration tests**: -```shell -./gradlew :airbyte-integrations:connectors::integrationTest -``` - -#### Python - -The ideal end state for a Python connector developer is that they shouldn't have to know Gradle exists. - -We're almost there, but today there is only one Gradle command that's needed when developing in Python, used for formatting code. - -**Formatting python module**: -```shell -./gradlew :airbyte-integrations:connectors::airbytePythonFormat -``` - -# Updating Gradle Dependencies -We use [Gradle Catalogs](https://docs.gradle.org/current/userguide/platforms.html#sub:central-declaration-of-dependencies) -to keep dependencies synced up across different Java projects. This is particularly useful for Airbyte Cloud, and can be -used by any project seeking to build off Airbyte. - -Catalogs allow dependencies to be represented as dependency coordinates. A user can reference preset dependencies/versions -when declaring dependencies in a build script. - -> Version Catalog Example: -> ```gradle -> dependencies { -> implementation(libs.groovy.core) -> } -> ``` -> In this context, libs is a catalog and groovy represents a dependency available in this catalog. Instead of declaring a -> specific version, we reference the version in the Catalog. - -This helps reduce the chances of dependency drift and dependency hell. - -Thus, please use the Catalog when: -- declaring new common dependencies. -- specifying new common dependencies. - -A common dependency is a foundational Java package e.g. Apache commons, Log4j etc that is often the basis on which libraries -are built upon. - -This is a relatively new addition, so devs should keep this in mind and use the top-level Catalog on a best-effort basis. - -### Setup Details -This section is for engineers wanting to understand Gradle Catalog details and how Airbyte has set this up. - -#### The version catalog TOML file format -Gradle offers a conventional file to declare a catalog. -It’s a conventional location to declare dependencies that are both consumed and published. - -The TOML file consists of 4 major sections: -- the [versions] section is used to declare versions which can be referenced by dependencies -- the [libraries] section is used to declare the aliases to coordinates -- the [bundles] section is used to declare dependency bundles -- the [plugins] section is used to declare plugins - -TOML file Example: - -```gradle -[versions] -groovy = "3.0.5" - -[libraries] -groovy-core = { module = "org.codehaus.groovy:groovy", version.ref = "groovy" } - -[bundles] -groovy = ["groovy-core", "groovy-json", "groovy-nio"] - -[plugins] -jmh = { id = "me.champeau.jmh", version = "0.6.5" } -``` - -NOTE: for more information please follow [this](https://docs.gradle.org/current/userguide/platforms.html#:~:text=The%20version%20catalog%20TOML%20file%20format) link. - -As described above this project contains TOML file `deps.toml` which is fully fulfilled with respect to [official](https://docs.gradle.org/current/userguide/platforms.html#sub::toml-dependencies-format) documentation. -In case when new versions should be used please update `deps.toml` accordingly. - -
    -deps.toml - -``` -[versions] -fasterxml_version = "2.13.0" -glassfish_version = "2.31" -commons_io = "2.7" -log4j = "2.17.1" -slf4j = "1.7.30" -lombok = "1.18.22" -junit-jupiter = "5.8.2" - -[libraries] -fasterxml = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "fasterxml_version" } -glassfish = { module = "org.glassfish.jersey:jackson-bom", version.ref = "glassfish_version" } -jackson-databind = { module = "com.fasterxml.jackson.core:jackson-databind", version.ref = "fasterxml_version" } -jackson-annotations = { module = "com.fasterxml.jackson.core:jackson-annotations", version.ref = "fasterxml_version" } -jackson-dataformat = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml", version.ref = "fasterxml_version" } -jackson-datatype = { module = "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", version.ref = "fasterxml_version" } -guava = { module = "com.google.guava:guava", version = "30.1.1-jre" } -commons-io = { module = "commons-io:commons-io", version.ref = "commons_io" } -apache-commons = { module = "org.apache.commons:commons-compress", version = "1.20" } -apache-commons-lang = { module = "org.apache.commons:commons-lang3", version = "3.11" } -slf4j-api = { module = "org.slf4j:slf4j-api", version = "1.7.30" } -log4j-api = { module = "org.apache.logging.log4j:log4j-api", version.ref = "log4j" } -log4j-core = { module = "org.apache.logging.log4j:log4j-core", version.ref = "log4j" } -log4j-impl = { module = "org.apache.logging.log4j:log4j-slf4j-impl", version.ref = "log4j" } -log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } -jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } -jcl-over-slf4j = { module = "org.slf4j:jcl-over-slf4j", version.ref = "slf4j" } -log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j" } -appender-log4j2 = { module = "com.therealvan:appender-log4j2", version = "3.6.0" } -aws-java-sdk-s3 = { module = "com.amazonaws:aws-java-sdk-s3", version = "1.12.6" } -google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version = "2.2.2" } -s3 = { module = "software.amazon.awssdk:s3", version = "2.16.84" } -lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } -junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit-jupiter" } -junit-jupiter-api = { module = "org.junit.jupiter:junit-jupiter-api", version.ref = "junit-jupiter" } -junit-jupiter-params = { module = "org.junit.jupiter:junit-jupiter-params", version.ref = "junit-jupiter" } -mockito-junit-jupiter = { module = "org.mockito:mockito-junit-jupiter", version = "4.0.0" } -assertj-core = { module = "org.assertj:assertj-core", version = "3.21.0" } -junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version = "1.6.2" } -findsecbugs-plugin = { module = "com.h3xstream.findsecbugs:findsecbugs-plugin", version = "1.11.0" } - -[bundles] -jackson = ["jackson-databind", "jackson-annotations", "jackson-dataformat", "jackson-datatype"] -apache = ["apache-commons", "apache-commons-lang"] -log4j = ["log4j-api", "log4j-core", "log4j-impl", "log4j-web"] -slf4j = ["jul-to-slf4j", "jcl-over-slf4j", "log4j-over-slf4j"] -junit = ["junit-jupiter-api", "junit-jupiter-params", "mockito-junit-jupiter"] -``` - -
    - -#### Declaring a version catalog -Version catalogs can be declared in the settings.gradle file. -There should be specified section `dependencyResolutionManagement` which uses `deps.toml` file as a declared catalog. -Example: - -```gradle -dependencyResolutionManagement { - repositories { - maven { - url 'https://airbyte.mycloudrepo.io/public/repositories/airbyte-public-jars/' - } - } - versionCatalogs { - libs { - from(files("deps.toml")) - } - } -} -``` - -#### Sharing Catalogs -To share this catalog for further usage by other Projects, we do the following 2 steps: -- Define `version-catalog` plugin in `build.gradle` file (ignore if this record exists) - - ```gradle - plugins { - id '...' - id 'version-catalog' - ``` -- Prepare Catalog for Publishing - - ```gradle - catalog { - versionCatalog { - from(files("deps.toml")) < --- declere either dependencies or specify existing TOML file - } - } - ``` - -#### Configure the Plugin Publishing Plugin -To **Publishing**, first define the `maven-publish` plugin in `build.gradle` file (ignore if this already exists): - -```gradle -plugins { - id '...' - id 'maven-publish' -} -``` -After that, describe the publishing section. Please use [this](https://docs.gradle.org/current/userguide/publishing_gradle_plugins.html) official documentation for more details. -Example: - -```gradle -publishing { - publications { - maven(MavenPublication) { - groupId = 'io.airbyte' - artifactId = 'oss-catalog' - - from components.versionCatalog - } - } - - repositories { - maven { - url 'https://airbyte.mycloudrepo.io/repositories/airbyte-public-jars' - credentials { - name 'cloudrepo' - username System.getenv('CLOUDREPO_USER') - password System.getenv('CLOUDREPO_PASSWORD') - } - } - - mavenLocal() - } -} -``` diff --git a/docs/contributing-to-airbyte/resources/pull-requests-handbook.md b/docs/contributing-to-airbyte/resources/pull-requests-handbook.md index b38d7606bb52e..2b1944c2d6fd2 100644 --- a/docs/contributing-to-airbyte/resources/pull-requests-handbook.md +++ b/docs/contributing-to-airbyte/resources/pull-requests-handbook.md @@ -3,7 +3,7 @@ ### Pull Request Title Convention When creating a pull request follow the naming conventions depending on the change being made. -In general the pull request title starts with an emoji with the connector you're doing the changes, eg (✨ Source E-Commerce: add new stream `Users`). +In general, the pull request title starts with an emoji with the connector you're doing the changes, eg (✨ Source E-Commerce: add new stream `Users`). Airbyte uses this pattern to automatically assign team reviews and build the product release notes. | Pull Request Type | Emoji | Examples | @@ -18,7 +18,7 @@ For more information about [breaking changes](#breaking-changes-to-connectors). Any refactors, cleanups, etc.. that are not visible improvements to the user should not have emojis. -If you're code change is doing more than one change type at once we strongly recommend to break into multiple pull requests. It helps us to review and merge your contribution. +If your code change is doing more than one change type at once, we strongly recommend to break it into multiple pull requests. It helps us to review and merge your contribution. ## Descriptions @@ -43,7 +43,7 @@ When creating or updating connectors, we spend a lot of time manually transcribi Changes to connector behavior should always be accompanied by a version bump and a changelog entry. We use [semantic versioning](https://semver.org/) to version changes to connectors. Since connectors are a bit different from APIs, we have our own take on semantic versioning, focusing on maintaining the best user experience of using a connector. - Major: a version in which a change is made which requires manual intervention (update to config or configured catalog) for an existing connection to continue to succeed, or one in which data that was previously being synced will no longer be synced - - Note that a category of "user intervention" is a schema change in the destination, as users will be required to update downstream reports and tools. A change that leads to a differnt final table in the destination is a breaking change + - Note that a category of "user intervention" is a schema change in the destination, as users will be required to update downstream reports and tools. A change that leads to a different final table in the destination is a breaking change - Minor: a version that introduces user-facing functionality in a backwards compatible manner - Patch: a version that introduces backwards compatible bug fixes or performance improvements diff --git a/docs/contributing-to-airbyte/resources/python-gradle-setup.md b/docs/contributing-to-airbyte/resources/python-gradle-setup.md deleted file mode 100644 index 34ef885a9f64e..0000000000000 --- a/docs/contributing-to-airbyte/resources/python-gradle-setup.md +++ /dev/null @@ -1,95 +0,0 @@ -# (DEPRECATED) Monorepo Python Development - -This guide contains instructions on how to setup Python with Gradle within the Airbyte Monorepo. If you are a contributor working on one or two connectors, this page is most likely not relevant to you. Instead, you should use your standard Python development flow. - -## Python Connector Development - -Before working with connectors written in Python, we recommend running the following command from the airbyte root directory - -```bash -python3 tools/bin/update_intellij_venv.py -modules --install-venv -``` - -e.g - -```bash -python tools/bin/update_intellij_venv.py -modules source-stripe --install-venv -``` - -If using Pycharm or IntelliJ, you'll also want to add the interpreter to the IDE's list of known interpreters. You can do this by adding the `--update-intellij` flag. More details can be found [here](#ide) - -```bash -python tools/bin/update_intellij_venv.py -modules --install-venv --update-intellij -``` - -If working with many connectors, you can use the `--all-modules` flag to install the virtual environments for all connectors - -```bash -python tools/bin/update_intellij_venv.py --all-modules --install-venv -``` - -This will create a `virtualenv` and install dependencies for the connector you want to work on as well as any internal Airbyte python packages it depends on. - - - -## Formatting/linting - -To format and lint your code before commit you can use the Gradle command above, but for convenience we support [pre-commit](https://pre-commit.com/) tool. To use it you need to install it first: - -```bash -pip install pre-commit -``` - -then, to install `pre-commit` as a git hook, run - -```text -pre-commit install -``` - -That's it, `pre-commit` will format/lint the code every time you commit something. You find more information about pre-commit [here](https://pre-commit.com/). - -## IDE - -At Airbyte, we use IntelliJ IDEA for development. Although it is possible to develop connectors with any IDE, we typically recommend IntelliJ IDEA or PyCharm, since we actively work towards compatibility. - -### Autocompletion - -Install the [Pydantic](https://plugins.jetbrains.com/plugin/12861-pydantic) plugin. This will help autocompletion with some of our internal types. - -### PyCharm \(ItelliJ IDEA\) - -The following setup steps are written for PyCharm but should have similar equivalents for IntelliJ IDEA: - -1.`python tools/bin/update_intellij_venv.py -modules --update-intellij` - -2. Restart PyCharm -3. Go to `File -> New -> Project...` -4. Select `Pure Python`. -5. Select a project name like `airbyte` and a directory **outside of** the `airbyte` code root. -6. Go to `Preferences -> Project -> Python Interpreter` -7. Find a gear ⚙️ button next to `Python interpreter` dropdown list, click and select `Add` -8. Select `Virtual Environment -> Existing` -9. Set the interpreter path to the one that was created by Python command, i.e. `airbyte-integrations/connectors//.venv/bin/python`. -10. Wait for PyCharm to finish indexing and loading skeletons from selected virtual environment. - -You should now have access to code completion and proper syntax highlighting for python projects. - -If you need to work on another connector you can quickly change the current virtual environment in the bottom toolbar. - -### Excluding files from venv - -By default, the find function in IntelliJ is not scoped and will include all files in the monorepo, including all the libraries installed as part of a connector's virtual environment. This huge volume of files makes indexing and search very slow. You can ignore files from the connectors' virtual environment with the following steps: - -1. Open the project structure using `cmd-;` -2. Navigate to the "Project Settings / Modules" section in the right-side of the menu -3. Select the top level `airbyte` module so the change is applied to all submodules -4. Add the following filter to the `Exclude files` option: `connectors/**/.venv` -5. Press OK to confirm your options. - - -#### Manual Workaround - -We have seen the above solution not being applied by IntelliJ. The exact reason is not clear to us but as a workaround, you can: -1. Open `.gitignore` in your IntelliJ -2. There will be a banner saying `Some of the ignored directories are not excluded from indexing and search`. Click on `View Directories` -3. A tree with all the git ignored files should be displayed. You can exclude them from IntelliJ by clicking `Exclude` diff --git a/docs/contributing-to-airbyte/resources/qa-checks.md b/docs/contributing-to-airbyte/resources/qa-checks.md index 037b499e17c47..0905c90c8aca0 100644 --- a/docs/contributing-to-airbyte/resources/qa-checks.md +++ b/docs/contributing-to-airbyte/resources/qa-checks.md @@ -43,6 +43,11 @@ Connectors must have a language tag in their metadata. It must be set in the `ta *Applies to the following connector languages: python, low-code* Python connectors must have a CDK tag in their metadata. It must be set in the `tags` field in metadata.yaml. The values can be `cdk:low-code`, `cdk:python`, or `cdk:file`. +### Breaking change deadline should be a week in the future +*Applies to the following connector types: source, destination* +*Applies to the following connector languages: java, low-code, python* + +If the connector version has a breaking change, the deadline field must be set to at least a week in the future. ## 📦 Packaging @@ -75,7 +80,7 @@ Connector version in metadata.yaml and pyproject.toml file must match. This is t *Applies to the following connector types: source* *Applies to the following connector languages: python, low-code* -Python connectors must have [PyPi](https://pypi.org/) publishing enabled in their `metadata.yaml` file. This is declared by setting `remoteRegistries.pypi.enabled` to `true` in metadata.yaml. This is to ensure that all connectors can be published to PyPi and can be used in `airbyte-lib`. +Python connectors must have [PyPi](https://pypi.org/) publishing enabled in their `metadata.yaml` file. This is declared by setting `remoteRegistries.pypi.enabled` to `true` in metadata.yaml. This is to ensure that all connectors can be published to PyPi and can be used in `PyAirbyte`. ## 💼 Assets diff --git a/docs/contributing-to-airbyte/writing-docs.md b/docs/contributing-to-airbyte/writing-docs.md index 75e6efd9a4e67..0343e3f8b86ec 100644 --- a/docs/contributing-to-airbyte/writing-docs.md +++ b/docs/contributing-to-airbyte/writing-docs.md @@ -33,9 +33,19 @@ To make minor changes (example: fixing typos) or edit a single file, you can edi ### Editing on your local machine +#### Prerequisites + +To contribute to our documentation, please ensure following required technologies are installed on your local machine: + +1. [`Node.js`](https://nodejs.org/en/learn/getting-started/how-to-install-nodejs) +2. [`pnpm`](https://pnpm.io/installation) + +#### Setup and Making Changes + To make complex changes or edit multiple files, edit the files on your local machine: 1. [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the Airbyte [repository](https://github.com/airbytehq/airbyte). + 2. Clone the fork on your local machine: ```bash @@ -52,7 +62,15 @@ To make complex changes or edit multiple files, edit the files on your local mac While cloning on Windows, you might encounter errors about long filenames. Refer to the instructions [here](../deploying-airbyte/local-deployment.md#handling-long-filename-error) to correct it. -3. Test changes locally: +3. Create a feature branch from which to make changes: + + ```bash + git checkout -b {YOUR_USERNAME}/{FEATURE/BUG} + ``` + + (e.g. `jdoe/source-stock-api-stream-fix`) + +4. Test changes locally: To install the docs locally, run the following commands in your terminal: @@ -79,13 +97,13 @@ To make complex changes or edit multiple files, edit the files on your local mac Then navigate to [http://localhost:3000/](http://localhost:3000/) to see your changes. You can stop the running server in OSX/Linux by pressing `Ctrl-C` in the terminal. -4. [Follow the GitHub workflow](https://docs.github.com/en/get-started/quickstart/contributing-to-projects/) to edit the files and create a pull request. +5. [Follow the GitHub workflow](https://docs.github.com/en/get-started/quickstart/contributing-to-projects/) to edit the files and create a pull request. :::note Before we accept any contributions, you'll need to sign the Contributor License Agreement (CLA). By signing a CLA, we can ensure that the community is free and confident in its ability to use your contributions. You will be prompted to sign the CLA while opening a pull request. ::: -5. Assign `airbytehq/docs` as a Reviewer for your pull request. +6. Assign `airbytehq/docs` as a Reviewer for your pull request. ### Custom markdown extensions for connector docs Airbyte's markdown documentation—particularly connector-specific documentation—needs to gracefully support multiple different contexts: key details may differ between open-source builds and Airbyte Cloud, and the more exhaustive explanations appropriate for https://docs.airbyte.com may bury key details when rendered as inline documentation within the Airbyte application. In order to support all these different contexts without resorting to multiple overlapping files that must be maintained in parallel, Airbyte's documentation tooling supports multiple nonstandard features. @@ -324,25 +342,26 @@ Back to ordinary markdown content. ``` Eagle-eyed readers may note that _all_ markdown should support this feature since it's part of the html spec. However, it's worth special mention since these dropdowns have been styled to be a graceful visual fit within our rendered documentation in all environments. -#### Documenting airbyte-lib usage +#### Documenting PyAirbyte usage + +PyAirbyte is a Python library that allows to run syncs within a Python script for a subset of connectors. Documentation around PyAirbyte connectors is automatically generated from the connector's JSON schema spec. There are a few approaches to combine full control over the documentation with automatic generation for common cases: + +* If a connector is PyAirbyte enabled (`remoteRegistries.pypi.enabled` set in the `metadata.yaml` file of the connector) and there is no second-level heading `Usage with PyAirbyte` in the documentation, the documentation will be automatically generated and placed above the `Changelog` section. +* By manually specifying a `Usage with PyAirbyte` section, this automatism is disabled. The following is a good starting point for this section: -airbyte-lib is a Python library that allows to run syncs within a Python script for a subset of connectors. Documentation around airbyte-lib connectors is automatically generated from the connector's JSON schema spec. -There are a few approaches to combine full control over the documentation with automatic generation for common cases: -* If a connector is airbyte-lib enabled (`remoteRegistries.pypi.enabled` set in the `metadata.yaml` file of the connector) and there is no second-level heading `Usage with airbyte-lib` in the documentation, the documentation will be automatically generated and placed above the `Changelog` section. -* By manually specifying a `Usage with airbyte-lib` section, this automatism is disabled. The following is a good starting point for this section: ```md -## Usage with airbyte-lib +## Usage with PyAirbyte - + ``` -The `AirbyteLibExample` component will generate a code example that can be run with airbyte-lib, excluding an auto-generated sample configuration based on the configuration schema. The `SpecSchema` component will generate a reference table with the connector's JSON schema spec, like a non-interactive version of the connector form in the UI. It can be used on any docs page. +The `PyAirbyteExample` component will generate a code example that can be run with PyAirbyte, excluding an auto-generated sample configuration based on the configuration schema. The `SpecSchema` component will generate a reference table with the connector's JSON schema spec, like a non-interactive version of the connector form in the UI. It can be used on any docs page. ## Additional guidelines diff --git a/docs/deploying-airbyte/docker-compose.md b/docs/deploying-airbyte/docker-compose.md new file mode 100644 index 0000000000000..c1b37fae45dcc --- /dev/null +++ b/docs/deploying-airbyte/docker-compose.md @@ -0,0 +1,76 @@ +# Docker Compose + +:::info +These instructions have been tested on MacOS, Windows 10 and Ubuntu 22.04. + +::: + +## Setup & launch Airbyte + +- Install `Docker Engine` and the `Docker Compose plugin` on your workstation \(see [instructions](https://docs.docker.com/engine/install/)\). +- After Docker is installed, you can immediately get started locally by running: + +```bash +# clone Airbyte from GitHub +git clone --depth=1 https://github.com/airbytehq/airbyte.git + +# switch into Airbyte directory +cd airbyte + +# start Airbyte +./run-ab-platform.sh +``` + +- In your browser, visit [http://localhost:8000](http://localhost:8000) +- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. Once you deploy Airbyte to your servers, be sure to change these in your `.env` file: + +```yaml +# Proxy Configuration +# Set to empty values, e.g. "" to disable basic auth +BASIC_AUTH_USERNAME=your_new_username_here +BASIC_AUTH_PASSWORD=your_new_password_here +``` + +- Start moving some data! + +## Deploy on Windows + +After installing the WSL 2 backend and Docker you should be able to run containers using Windows PowerShell. Additionally, as we note frequently, you will need `docker-compose` to build Airbyte from source. The suggested guide already installs `docker-compose` on Windows. + +### Setup Guide + +**1. Check out system requirements from [Docker documentation](https://docs.docker.com/desktop/windows/install/).** + +Follow the steps on the system requirements, and necessarily, download and install the Linux kernel update package. + +**2. Install Docker Desktop on Windows.** + +Install [Docker Desktop](https://docs.docker.com/desktop/windows/install/) from here. + +Make sure to select the options: + +1. _Enable Hyper-V Windows Features_ +2. _Install required Windows components for WSL 2_ + when prompted. After installation, it will require to reboot your computer. + +**3. You're done!** + +```bash +git clone --depth=1 https://github.com/airbytehq/airbyte.git +cd airbyte +bash run-ab-platform.sh +``` + +- In your browser, just visit [http://localhost:8000](http://localhost:8000) +- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. Once you deploy airbyte to your servers, be sure to change these. +- Start moving some data! + +## Troubleshooting +If you have any questions about the local setup and deployment process, head over to our [Getting Started FAQ](https://github.com/airbytehq/airbyte/discussions/categories/questions) on our Airbyte Forum that answers the following questions and more: + +- How long does it take to set up Airbyte? +- Where can I see my data once I've run a sync? +- Can I set a start time for my sync? + +If you encounter any issues, check out [Getting Support](/community/getting-support) documentation +for options how to get in touch with the community or us. \ No newline at end of file diff --git a/docs/deploying-airbyte/local-deployment.md b/docs/deploying-airbyte/local-deployment.md index d3247a86668cd..07bd81857b6d8 100644 --- a/docs/deploying-airbyte/local-deployment.md +++ b/docs/deploying-airbyte/local-deployment.md @@ -1,68 +1,31 @@ # Local Deployment :::info -These instructions have been tested on MacOS, Windows 10 and Ubuntu 22.04. +These instructions have been tested on MacOS, Windows, Ubuntu and Fedora. ::: ## Setup & launch Airbyte -- Install `Docker Engine` and the `Docker Compose plugin` on your workstation \(see [instructions](https://docs.docker.com/engine/install/)\). -- After Docker is installed, you can immediately get started locally by running: +- Install `Docker Desktop` \(see [instructions](https://docs.docker.com/desktop/install/mac-install/)\). +- After `Docker Desktop` is installed, you must enable `Kubernetes` \(see [instructions](https://docs.docker.com/desktop/kubernetes/)\). +- Download the latest version of `abctl` from the [releases page](https://github.com/airbytehq/abctl/releases) and run the following command: ```bash -# clone Airbyte from GitHub -git clone --depth=1 https://github.com/airbytehq/airbyte.git - -# switch into Airbyte directory -cd airbyte - -# start Airbyte -./run-ab-platform.sh -``` - -- In your browser, visit [http://localhost:8000](http://localhost:8000) -- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. Once you deploy Airbyte to your servers, be sure to change these in your `.env` file: - -```yaml -# Proxy Configuration -# Set to empty values, e.g. "" to disable basic auth -BASIC_AUTH_USERNAME=your_new_username_here -BASIC_AUTH_PASSWORD=your_new_password_here +abctl local install ``` -- Start moving some data! - -## Deploy on Windows - -After installing the WSL 2 backend and Docker you should be able to run containers using Windows PowerShell. Additionally, as we note frequently, you will need `docker-compose` to build Airbyte from source. The suggested guide already installs `docker-compose` on Windows. - -### Setup Guide - -**1. Check out system requirements from [Docker documentation](https://docs.docker.com/desktop/windows/install/).** - -Follow the steps on the system requirements, and necessarily, download and install the Linux kernel update package. - -**2. Install Docker Desktop on Windows.** - -Install [Docker Desktop](https://docs.docker.com/desktop/windows/install/) from here. - -Make sure to select the options: - -1. _Enable Hyper-V Windows Features_ -2. _Install required Windows components for WSL 2_ - when prompted. After installation, it will require to reboot your computer. - -**3. You're done!** +- Your browser should open to the Airbyte Application, if it does not visit [http://localhost](http://localhost) +- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. You can set these values through command line flags or environment variables. For example, to set the username and password to `foo` and `bar` respectively, you can run the following command: ```bash -git clone --depth=1 https://github.com/airbytehq/airbyte.git -cd airbyte -bash run-ab-platform.sh +abctl local install --username foo --password bar + +# Or as Environment Variables +ABCTL_LOCAL_INSTALL_PASSWORD=foo +ABCTL_LOCAL_INSTALL_USERNAME=bar ``` -- In your browser, just visit [http://localhost:8000](http://localhost:8000) -- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. Once you deploy airbyte to your servers, be sure to change these. - Start moving some data! ## Troubleshooting diff --git a/docs/deploying-airbyte/on-kubernetes-via-helm.md b/docs/deploying-airbyte/on-kubernetes-via-helm.md index bf6c72e0dd597..8cf05d9bacd52 100644 --- a/docs/deploying-airbyte/on-kubernetes-via-helm.md +++ b/docs/deploying-airbyte/on-kubernetes-via-helm.md @@ -2,21 +2,31 @@ ## Overview -Airbyte allows scaling sync workloads horizontally using Kubernetes. The core components \(api server, worker, etc\) run as deployments while the scheduler launches connector-related pods on different nodes. +Airbyte allows scaling sync workloads horizontally using Kubernetes. The core components \(api +server, worker, etc\) run as deployments while the scheduler launches connector-related pods on +different nodes. ## Quickstart -If you don't want to configure your own Kubernetes cluster and Airbyte instance, you can use the free, open-source project [Plural](https://www.plural.sh/) to bring up a Kubernetes cluster and Airbyte for you. Use [this guide](on-plural.md) to get started. +If you don't want to configure your own Kubernetes cluster and Airbyte instance, you can use the +free, open-source project [Plural](https://www.plural.sh/) to bring up a Kubernetes cluster and +Airbyte for you. Use [this guide](on-plural.md) to get started. -Alternatively, you can deploy Airbyte on [Restack](https://www.restack.io) to provision your Kubernetes cluster on AWS. Follow [this guide](on-restack.md) to get started. +Alternatively, you can deploy Airbyte on [Restack](https://www.restack.io) to provision your +Kubernetes cluster on AWS. Follow [this guide](on-restack.md) to get started. :::note -Airbyte running on Self-Hosted Kubernetes doesn't support DBT Transformations. Please refer to [#5901](https://github.com/airbytehq/airbyte/issues/5091) + +Airbyte running on Self-Hosted Kubernetes doesn't support DBT Transformations. Please refer to +[#5901](https://github.com/airbytehq/airbyte/issues/5091) + ::: :::note -Airbyte Kubernetes Community Edition does not support basic auth by default. -To enable basic auth, consider adding a reverse proxy in front of Airbyte. + +Airbyte Kubernetes Community Edition does not support basic auth by default. To enable basic auth, +consider adding a reverse proxy in front of Airbyte. + ::: ## Getting Started @@ -30,15 +40,22 @@ For local testing we recommend following one of the following setup guides: - NOTE: Start Minikube with at least 4gb RAM with `minikube start --memory=4000` - [Kind](https://kind.sigs.k8s.io/docs/user/quick-start/) -For testing on GKE you can [create a cluster with the command line or the Cloud Console UI](https://cloud.google.com/kubernetes-engine/docs/how-to/creating-a-zonal-cluster). +For testing on GKE you can +[create a cluster with the command line or the Cloud Console UI](https://cloud.google.com/kubernetes-engine/docs/how-to/creating-a-zonal-cluster). -For testing on EKS you can [install eksctl](https://eksctl.io/introduction/) and run `eksctl create cluster` to create an EKS cluster/VPC/subnets/etc. This process should take 10-15 minutes. +For testing on EKS you can [install eksctl](https://eksctl.io/introduction/) and run +`eksctl create cluster` to create an EKS cluster/VPC/subnets/etc. This process should take 10-15 +minutes. -For production, Airbyte should function on most clusters v1.19 and above. We have tested support on GKE and EKS. If you run into a problem starting Airbyte, please reach out on the `#troubleshooting` channel on our [Slack](https://slack.airbyte.io/) or [create an issue on GitHub](https://github.com/airbytehq/airbyte/issues/new?assignees=&labels=type%2Fbug&template=bug-report.md&title=). +For production, Airbyte should function on most clusters v1.19 and above. We have tested support on +GKE and EKS. If you run into a problem starting Airbyte, please reach out on the `#troubleshooting` +channel on our [Slack](https://slack.airbyte.io/) or +[create an issue on GitHub](https://github.com/airbytehq/airbyte/issues/new?assignees=&labels=type%2Fbug&template=bug-report.md&title=). ### Install `kubectl` -If you do not already have the CLI tool `kubectl` installed, please follow [these instructions to install](https://kubernetes.io/docs/tasks/tools/). +If you do not already have the CLI tool `kubectl` installed, please follow +[these instructions to install](https://kubernetes.io/docs/tasks/tools/). ### Configure `kubectl` @@ -47,7 +64,8 @@ Configure `kubectl` to connect to your cluster by using `kubectl use-context my- For GKE: 1. Configure `gcloud` with `gcloud auth login`. -2. On the Google Cloud Console, the cluster page will have a `Connect` button, which will give a command to run locally that looks like +2. On the Google Cloud Console, the cluster page will have a `Connect` button, which will give a + command to run locally that looks like `gcloud container clusters get-credentials $CLUSTER_NAME --zone $ZONE_NAME --project $PROJECT_NAME`. @@ -56,9 +74,11 @@ For GKE: For EKS: -1. [Configure your AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) to connect to your project. +1. [Configure your AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) + to connect to your project. 2. Install [eksctl](https://eksctl.io/introduction/) -3. Run `eksctl utils write-kubeconfig --cluster=` to make the context available to `kubectl` +3. Run `eksctl utils write-kubeconfig --cluster=` to make the context available to + `kubectl` 4. Use `kubectl config get-contexts` to show the contexts available. 5. Run `kubectl config use-context ` to access the cluster with `kubectl`. @@ -72,13 +92,15 @@ For MacOS: For Linux: -1. Download installer script `curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3` +1. Download installer script + `curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3` 2. Assign required premissions `chmod 700 get_helm.sh` 3. Run script `./get_helm.sh` ### Add Helm Repository -From now charts are stored in helm-repo thus there're no need to clone the repo each time you need to deploy the chart. +From now charts are stored in helm-repo thus there're no need to clone the repo each time you need +to deploy the chart. To add remote helm repo simply run: `helm repo add airbyte https://airbytehq.github.io/helm-charts`. @@ -117,13 +139,17 @@ In order to do so, run the command: helm install %release_name% airbyte/airbyte ``` -**Note**: `release_name` should only contain lowercase letters and optionally dashes (`release_name` must start with a letter). +**Note**: `release_name` should only contain lowercase letters and optionally dashes (`release_name` +must start with a letter). ### Custom deployment -In order to customize your deployment, you need to create `values.yaml` file in the local folder and populate it with default configuration override values. +In order to customize your deployment, you need to create `values.yaml` file in the local folder and +populate it with default configuration override values. -`values.yaml` example can be located in [charts/airbyte](https://github.com/airbytehq/airbyte-platform/blob/main/charts/airbyte/values.yaml) folder of the Airbyte repository. +`values.yaml` example can be located in +[charts/airbyte](https://github.com/airbytehq/airbyte-platform/blob/main/charts/airbyte/values.yaml) +folder of the Airbyte repository. After specifying your own configuration, run the following command: @@ -131,49 +157,17 @@ After specifying your own configuration, run the following command: helm install --values path/to/values.yaml %release_name% airbyte/airbyte ``` -## Migrate from old charts to new ones - -Starting from `0.39.37-alpha` we've revisited helm charts structure and separated all components of airbyte into their own independent charts, thus by allowing our developers to test single component without deploying airbyte as a whole and by upgrading single component at a time. - -In most cases upgrade from older monolith chart to a new one should go without any issue, but if you've configured custom logging or specified custom configuration of DB or Logging then follow the instructions listed below - -### Minio migration - -Since the latest release of bitnami/minio chart, they've changed the way of setting up the credentials for accessing the minio. (written mid-2022) - -Going forward in new version you need to specify the following values in values yaml for user/password instead old one - -Before: - -```text -minio: - rootUser: airbyte-user - rootPassword: airbyte-password-123 -``` - -After: - -```text -minio: - auth: - rootUser: minio - rootPassword: minio123 - -``` - -Before upgrading the chart update values.yaml as stated above and then run: +### External Logs with S3 -- Get the old rootPassword by running `export ROOT_PASSWORD=$(kubectl get secret --namespace "default" %release_name%-minio -o jsonpath="{.data.root-password}" | base64 -d)` -- Perform upgrade of chart by running `helm upgrade %release_name% airbyte/airbyte --set auth.rootPassword=$ROOT_PASSWORD` - - If you get an error about setting the auth.rootPassword, then you forgot to update the `values.yaml` file +:::info -### External Logs with S3 +S3 logging was tested on +[Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13) -::info -S3 logging was tested on [Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13) ::: Create a file called `airbyte-logs-secrets.yaml` to store the AWS Keys and other informations: + ```yaml apiVersion: v1 kind: Secret @@ -186,10 +180,13 @@ stringData: S3_LOG_BUCKET: S3_LOG_BUCKET_REGION: ``` -Run `kubectl apply -f airbyte-logs-secrets.yaml -n ` to create the secret in the namespace you're using Airbyte. -This file contains more than just the keys but it needs for now. Future updates will make the configuration easier. + +Run `kubectl apply -f airbyte-logs-secrets.yaml -n ` to create the secret in the +namespace you're using Airbyte. This file contains more than just the keys but it needs for now. +Future updates will make the configuration easier. Change the global section to use `S3` external logs. + ```yaml global: # <...> @@ -209,7 +206,7 @@ global: existingSecretKey: "AWS_SECRET_KEY" # <...> storage: - type: "S3" + type: "S3" minio: # Change from true to false @@ -218,9 +215,12 @@ global: tolerations: [] affinity: {} ``` -GCS Logging information is below but you can try to use `External Minio` as well but it was not tested yet. Feel free to run tests and update the documentation. + +GCS Logging information is below but you can try to use `External Minio` as well but it was not +tested yet. Feel free to run tests and update the documentation. Add extra env variables to the following blocks: + ```yaml worker: extraEnv: @@ -298,17 +298,17 @@ Than run: ### External Logs with GCS - :::Info GCS Logging is similar to the approach taken for S3 above, with a few small differences -GCS logging was tested on [Airbyte Helm Chart Version 0.53.178](https://artifacthub.io/packages/helm/airbyte/airbyte/0.53.178) +GCS logging was tested on [Airbyte Helm Chart Version 0.54.69](https://artifacthub.io/packages/helm/airbyte/airbyte/0.54.69) ::: #### Create Google Cloud Storage Bucket -1. **Access Google Cloud Console**: Go to the Google Cloud Console and select or create a project where you want to create the bucket. +1. **Access Google Cloud Console**: Go to the Google Cloud Console and select or create a project + where you want to create the bucket. 2. **Open Cloud Storage**: Navigate to "Storage" > "Browser" in the left-side menu. -3. **Create Bucket**: Click on "Create bucket". Give your bucket a unique name, select a region for the bucket, and configure other settings such as storage class and access control according to your requirements. Finally, click "Create". +3. **Create Bucket**: Click on "Create bucket". Give your bucket a unique name, select a region for the bucket, and configure other settings such as storage class and access control according to your requirements. Finally, click "Create". The buckect will be referenced as `` #### Create Google Cloud Service Account @@ -316,28 +316,12 @@ GCS logging was tested on [Airbyte Helm Chart Version 0.53.178](https://artifact 2. **Create Service Account**: Click "Create Service Account", enter a name, description, and then click "Create". 3. **Grant Permissions**: Assign the role of "Storage Object Admin" to the service account by selecting it from the role list. 4. **Create Key**: After creating the service account, click on it, go to the "Keys" tab, and then click "Add Key" > "Create new key". Choose JSON as the key type and click "Create". The key file will be downloaded automatically to your computer. - -#### Create a Kubernetes Secret - -- Use the **`kubectl create secret`** command to create a Kubernetes secret from the JSON key file. Replace **``** with the desired name for your secret, **``** with the path to the JSON key file you downloaded, and **``** with the namespace where your deployment will be running. - -```kubectl create secret generic --from-file=gcp.json= --namespace=``` - -#### Create an extra Volume where the GCSFS secret will be added in the values.yaml inside of the worker section -``` -worker: - extraVolumes: - - name: gcsfs-creds - secret: - secretName: - extraVolumeMounts: - - name: gcsfs-creds - mountPath: "/etc/secrets" - readOnly: true -``` +5. **Encode Key**: Encode GCP credentials file contents using Base64. This key will be referenced as `` #### Update the values.yaml with the GCS Logging Information below + Update the following Environment Variables in the global section: + ``` global: state: @@ -348,46 +332,60 @@ global: storage: type: "GCS" gcs: - bucket: "" - credentials: "/etc/secrets/gcp.json" + bucket: "" + credentials: "/secrets/gcs-log-creds/gcp.json" + credentialsJson: "" +``` + + +Update the following Environment Variables in the worker section: +``` +worker: extraEnv: - name: STATE_STORAGE_GCS_BUCKET_NAME - value: + value: - name: STATE_STORAGE_GCS_APPLICATION_CREDENTIALS - value: /etc/secrets/gcp.json + value: /secrets/gcs-log-creds/gcp.json - name: CONTAINER_ORCHESTRATOR_SECRET_NAME - value: + value: <%RELEASE_NAME%>-gcs-log-creds - name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH - value: /etc/secrets/ + value: /secrets/gcs-log-creds ``` -Than run: -`helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.53.178` +Then run: +`helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.54.69` ### External Airbyte Database - - :::info -This was tested using [Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13). + +This was tested using +[Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13). Previous or newer version can change how the external database can be configured. -::: +::: -The Airbyte Database only works with Postgres 13. -Make sure the database is accessible inside the cluster using `busy-box` service using `telnet` or `ping` command. +The Airbyte Database only works with Postgres 13. Make sure the database is accessible inside the +cluster using `busy-box` service using `telnet` or `ping` command. :::warning -If you're using the external database for the first time you must ensure the database you're going to use exists. The default database Airbyte will try to use is `airbyte` but you can modified it in the `values.yaml`. + +If you're using the external database for the first time you must ensure the database you're going +to use exists. The default database Airbyte will try to use is `airbyte` but you can modified it in +the `values.yaml`. + ::: :::warning -You can use only one database to a one Airbyte Helm deployment. If you try to use the same database for a different deployment it will have conflict with Temporal internal databases. + +You can use only one database to a one Airbyte Helm deployment. If you try to use the same database +for a different deployment it will have conflict with Temporal internal databases. + ::: -Create a Kubernetes secret to store the database password. -Save the file as `db-secrets.yaml`. +Create a Kubernetes secret to store the database password. Save the file as `db-secrets.yaml`. + ```yaml apiVersion: v1 kind: Secret @@ -398,15 +396,19 @@ stringData: DATABASE_PASSWORD: ``` -Run `kubectl apply -f db-secrets.yaml -n ` to create the secret in the namespace you're using Airbyte. +Run `kubectl apply -f db-secrets.yaml -n ` to create the secret in the namespace you're +using Airbyte. Afterward, modify the following blocks in the Helm Chart `values.yaml` file: + ```yaml postgresql: # Change the value from true to false. enabled: false ``` + Then: + ```yaml externalDatabase: # Add the host, username and database name you're using. @@ -419,11 +421,13 @@ externalDatabase: port: 5432 jdbcUrl: "" ``` -Keep password empty as the Chart will use the `db-secrets` value. -Edit only the host, username, and database name. If your database is using a differnet `port` or need an special `jdbcUrl` you can edit here. -This wasn't fully tested yet. + +Keep password empty as the Chart will use the `db-secrets` value. Edit only the host, username, and +database name. If your database is using a differnet `port` or need an special `jdbcUrl` you can +edit here. This wasn't fully tested yet. Next, reference the secret in the global section: + ```yaml global: database: @@ -431,9 +435,196 @@ global: secretValue: "DATABASE_PASSWORD" ``` -Unfortunately, the `airbyte-bootloader` configuration uses this variable. Future improvements are planned. +Unfortunately, the `airbyte-bootloader` configuration uses this variable. Future improvements are +planned. Upgrade the chart by running: + ```shell helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.50.13 ``` + +## Migrate from old chart to Airbyte v0.52.0 and latest chart version + +To assist with upgrading to Airbyte App version 0.52.0 and higher with the latest Helm Charts, we've +simplified and consolidated several configuration options. Here's a breakdown of the changes: + +**Application.yaml Updates**: + +- We've streamlined the configuration for logs and state storage. +- Instead of separate configurations for logs and state, we now have a unified storage + configuration. +- The proposed changes involve specifying the storage type and bucket names directly, along with + credentials where necessary. + +**Helm Configuration Updates:** + +- The global configuration now includes a simplified storage section specifying the type and bucket + names for logs, state, and workload output. +- Credentials for MinIO are now set directly in the Helm values, ensuring smoother integration. +- Unused configurations have been removed, and configurations have been aligned with the simplified + application.yaml. + +**Technical Details and Renaming:** + +- We've renamed or consolidated several environment variables for clarity and consistency. +- Unused methods and classes have been removed, ensuring a cleaner codebase. +- Some configurations have been split into separate files for better management and compatibility + with different storage options. + +**Additional Changes:** + +- We've added support for workload output storage explicitly, improving flexibility and clarity in + configuration. +- The Helm charts have been updated to reflect these changes, removing or replacing old environment + variables for storage configuration. +- These changes aim to simplify configuration management and improve the overall user experience + during upgrades. Please review these updates and let us know if you have any questions or need + further assistance. + +### **Migration Steps** + +This guide aims to assist customers upgrading to the latest version of the Airbyte Helm charts, +specifically those using custom configurations for external logging and databases with AWS (S3) and +GCS (Google Cloud Buckets). + +### **For AWS S3 Users** + +#### **Prerequisites** + +- Access to your Kubernetes cluster where Airbyte is deployed. +- Helm and kubectl installed and configured on your machine. +- Existing Airbyte deployment using AWS S3 for storage and AWS Secrets Manager for secret + management. + +#### **Migration Steps** + +1. **Creating or Updating Kubernetes Secrets** + +If using AWS access keys, create a Kubernetes secret to store these credentials. If relying on an +IAM role from an instance profile, this step can be skipped. Apply the following Kubernetes +manifest, replacing the example AWS credentials with your actual credentials: + +```yaml +# Replace the example AWS credentials below with your actual credentials. +apiVersion: v1 +kind: Secret +metadata: + name: airbyte-config-secrets +type: Opaque +stringData: + s3-access-key-id: AKIAIOSFODNN7EXAMPLE # Enter your AWS Access Key ID here + s3-secret-access-key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY # Enter your AWS Secret Access Key here +``` + +2. **Update Airbyte Configuration** + + In your `airbyte.yml` configuration file, add the following configuration, adjusting + `` to match your AWS region: + + ```yaml + global: + storage: + type: s3 + storageSecretName: airbyte-config-secrets + bucket: + log: airbyte-storage + state: airbyte-storage + workloadOutput: airbyte-storage + s3: + region: + authenticationType: credentials # Use "credentials" or "instanceProfile" + accessKeyIdSecretKey: aws-secret-manager-access-key-id # Omit if using instanceProfile + secretAccessKeySecretKey: aws-secret-manager-secret-access-key # Omit if using instanceProfile + + secretsManager: + type: awsSecretManager + storageSecretName: airbyte-config-secrets + awsSecretManager: + region: + authenticationType: credentials # Use "credentials" or "instanceProfile" + accessKeyIdSecretKey: aws-secret-manager-access-key-id # Omit if using instanceProfile + secretAccessKeySecretKey: aws-secret-manager-secret-access-key # Omit if using instanceProfile + tags: + - key: team + value: deployment + - key: business-unit + value: engineering + ``` + +3. **Remove Deprecated Configuration from `values.yaml`** + + Edit your `values.yaml` or `airbyte-pro-values.yaml` files to remove any deprecated storage and + secrets manager environment variables related to S3 and AWS Secrets Manager. Ensure + configurations like `state.storage.type: "S3"` and AWS access keys under `server.extraEnv` and + `worker.extraEnv` are removed. + +### **For GCS Users** + +#### **Prerequisites** + +- Access to your Kubernetes cluster where Airbyte is deployed. +- Helm and kubectl installed and configured on your machine. +- Existing Airbyte deployment using Google Cloud Storage (GCS) and Google Secret Manager (GSM) for + secret management. + +#### **Migration Steps** + +1. **Setting Up or Updating Kubernetes Secrets** + + For Google Secret Manager, you may use existing credentials or create new ones. Apply a + Kubernetes manifest like below, replacing `` with your GCP credentials + JSON blob: + + ```yaml + apiVersion: v1 + kind: Secret + metadata: + name: gcp-cred-secrets + type: Opaque + stringData: + gcp.json: + ``` + + Or use `kubectl` to create the secret directly from a file: + + ```sh + kubectl create secret generic gcp-cred-secrets --from-file=gcp.json=.json + ``` + +2. **Update Airbyte Configuration** + + In your `airbyte.yml` configuration file, add the following configuration, adjusting + `` to match your GCP project ID: + + ```yaml + global: + storage: + type: gcs + storageSecretName: gcp-cred-secrets + bucket: + log: airbyte-storage + state: airbyte-storage + workloadOutput: airbyte-storage + gcs: + authenticationType: credentials + projectId: + credentialsPath: /secrets/gcs-log-creds/gcp.json + + secretsManager: + type: googleSecretManager + storageSecretName: gcp-cred-secrets + googleSecretManager: + authenticationType: credentials + projectId: + credentialsSecretKey: gcp-creds.json + ``` + +3. **Remove Deprecated Configuration from `values.yaml`** + + Edit your `values.yaml` files to remove any deprecated storage and secrets manager environment + variables related to GCS. Ensure configurations like `global.state.storage.type: "GCS"` and GCS + credentials paths under `extraEnv` are removed. + +This guide ensures that you leverage the latest Helm chart configurations for Airbyte, aligning with +best practices for managing storage and secrets in Kubernetes environments for AWS and GCS users. diff --git a/docs/enterprise-setup/api-access-config.md b/docs/enterprise-setup/api-access-config.md new file mode 100644 index 0000000000000..16f71fc20cc47 --- /dev/null +++ b/docs/enterprise-setup/api-access-config.md @@ -0,0 +1,64 @@ +--- +products: oss-enterprise +--- + +# Configuring API Access + +The Airbyte API enables you to programmatically interact with Airbyte: create sources, destinations, run syncs, list workspaces, and much more. + +Access to the API in Self-Managed Enterprise deployments is controlled via application keys. Applications keys are tied to individual users and their respective permissions. Prior to configuring API access, ensure you have an up and running deployment of Airbyte Self-Managed Enterprise that exposes the `airbyte-server` service. To do this, follow the steps in the [implementation guide](./implementation-guide.md). + +## Step 1: Create an Application + +While logged into the Airbyte UI, go to the `settings/applications` page, then create an application to get a pair of `client_id` and `client_secret` credentials. This can be exchanged anytime to get an access token to make requests to the API. These credentials do not expire, but may be deleted at any time. + +![Create an Application](./assets/enterprise-applications-creation.png) + +## Step 2: Obtain an Access Token + +With your `client_id` and `client_secret` in hand, make the following API request, replacing `` with the URL you use to access the Airbyte UI: + +```yml +POST /api/v1/applications/token +``` + +Ensure the following JSON Body is attached to the request, populated with your `client_id` and `client_secret`: + +```yaml +{ + "client_id" : "", + "client_secret": "" +} +``` + +The API response should provide an `access_token` which is a Bearer Token valid for 60 minutes that can be used to make requests to the API. Once your `access_token` expires, you may make a new request to the `applications/token` endpoint to get a new token. + +## Step 3: Operate Airbyte via API + +You may now make requests to any endpoint documented in our [Airbyte API Reference](https://reference.airbyte.com). For example, you may use the [List workspaces endpoint](https://reference.airbyte.com/reference/listworkspaces) to verify the list of workspaces in your organization. Ensure to include your `access_token` as a `Bearer Token` in your request.: + +```yaml +GET /api/public/v1/workspaces +``` + +Expect a response like the following: + +```json +{ + "data": [ + { + "workspaceId": "b5367aab-9d68-4fea-800f-0000000000", + "name": "Finance Team", + "dataResidency": "auto" + }, + { + "workspaceId": "b5367aab-9d68-4fea-800f-0000000001", + "name": "Analytics Team", + "dataResidency": "auto" + }, + ] +} +``` + +To go further, you may use our [Python](https://github.com/airbytehq/airbyte-api-python-sdk) and [Java](https://github.com/airbytehq/airbyte-api-java-sdk) SDKs to make API requests directly in code, or our [Terraform Provider](https://registry.terraform.io/providers/airbytehq/airbyte/latest) (which uses the Airbyte API) to declare your Airbyte configuration as infrastructure. + diff --git a/docs/enterprise-setup/assets/enterprise-applications-creation.png b/docs/enterprise-setup/assets/enterprise-applications-creation.png new file mode 100644 index 0000000000000..0e4183b34b138 Binary files /dev/null and b/docs/enterprise-setup/assets/enterprise-applications-creation.png differ diff --git a/docs/enterprise-setup/implementation-guide.md b/docs/enterprise-setup/implementation-guide.md index 0806e590c33fc..377a282bcba26 100644 --- a/docs/enterprise-setup/implementation-guide.md +++ b/docs/enterprise-setup/implementation-guide.md @@ -67,6 +67,137 @@ We also require you to create a Kubernetes namespace for your Airbyte deployment kubectl create namespace airbyte ``` +### Configure Kubernetes Secrets + +Sensitive credentials such as AWS access keys are required to be made available in Kubernetes Secrets during deployment. The Kubernetes secret store and secret keys are referenced in your `values.yml` file. Ensure all required secrets are configured before deploying Airbyte Self-Managed Enterprise. + +You may apply your Kubernetes secrets by applying the example manifests below to your cluster, or using `kubectl` directly. If your Kubernetes cluster already has permissions to make requests to an external entity via an instance profile, credentials are not required. For example, if your Amazon EKS cluster has been assigned a sufficient AWS IAM role to make requests to AWS S3, you do not need to specify access keys. + +#### External Log Storage + +For Self-Managed Enterprise deployments, we recommend spinning up standalone log storage for additional reliability using tools such as S3 and GCS instead of against using the default internal Minio storage (`airbyte/minio`). + +
    +Secrets for External Log Storage + + + + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: airbyte-config-secrets +type: Opaque +stringData: +## Storage Secrets + # S3 + s3-access-key-id: ## e.g. AKIAIOSFODNN7EXAMPLE + s3-secret-access-key: ## e.g. wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY +``` + +Overriding `name`, `s3-access-key-id` or `s3-secret-access-key` allows you to store these secrets in the location of your choice. If you do this, you will also need to specify the secret location in the bucket config for your `values.yml` file. + +Using `kubectl` to create the secret directly: + +```sh +kubectl create secret generic airbyte-config-secrets \ + --from-literal=s3-access-key-id='' \ + --from-literal=s3-secret-access-key='' \ + --namespace airbyte +``` + +Ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the cluster to S3 storage: + +```yaml +{ + "Version":"2012-10-17", + "Statement":[ + { + "Effect":"Allow", + "Action": "s3:ListAllMyBuckets", + "Resource":"*" + }, + { + "Effect":"Allow", + "Action":["s3:ListBucket","s3:GetBucketLocation"], + "Resource":"arn:aws:s3:::YOUR-S3-BUCKET-NAME" + }, + { + "Effect":"Allow", + "Action":[ + "s3:PutObject", + "s3:PutObjectAcl", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:DeleteObject" + ], + "Resource":"arn:aws:s3:::YOUR-S3-BUCKET-NAME/*" + } + ] +} +``` + + + + +First, create a new file `gcp.json` containing the credentials JSON blob for the service account you are looking to assume. + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: gcp-cred-secrets +type: Opaque +stringData: + gcp.json: +``` + +Using `kubectl` to create the secret directly from the `gcp.json` file: + +```sh +kubectl create secret generic gcp-cred-secrets --from-file=gcp.json --namespace airbyte +``` + + + +
    + + +#### External Connector Secret Management + +Airbyte's default behavior is to store encrypted connector secrets on your cluster as Kubernetes secrets. You may opt to instead store connector secrets in an external secret manager of your choosing (AWS Secrets Manager, Google Secrets Manager or Hashicorp Vault). + +
    +Secrets for External Connector Secret Management + +To store connector secrets in AWS Secrets Manager via a manifest: + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: airbyte-config-secrets +type: Opaque +stringData: + aws-secret-manager-access-key-id: ## e.g. AKIAIOSFODNN7EXAMPLE + aws-secret-manager-secret-access-key: ## e.g. wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY +``` + +Overriding `name`, `aws-secret-manager-access-key-id` or `aws-secret-manager-secret-access-key` allows you to store these secrets in the location of your choice. If you do this, you will also need to specify the secret location in the secret manager config for your `values.yml` file. + +Alternatively, you may choose to use `kubectl` to create the secret directly: + +```sh +kubectl create secret generic airbyte-config-secrets \ + --from-literal=aws-secret-manager-access-key-id='' \ + --from-literal=aws-secret-manager-secret-access-key='' \ + --namespace airbyte +``` + +
    + + ## Installation Steps ### Step 1: Add Airbyte Helm Repository @@ -76,16 +207,16 @@ Follow these instructions to add the Airbyte helm repository: 2. Perform the repo indexing process, and ensure your helm repository is up-to-date by running `helm repo update`. 3. You can then browse all charts uploaded to your repository by running `helm search repo airbyte`. -### Step 2: Create your Helm Values File +### Step 2: Create your Enterprise License File 1. Create a new `airbyte` directory. Inside, create an empty `airbyte.yml` file. -2. Paste the following into your newly created `airbyte.yml` file. This is the minimal values file to be used to deploy Self-Managed Enterprise. +2. Paste the following into your newly created `airbyte.yml` file:
    Template airbyte.yml file -``` +```yaml webapp-url: # example: http://localhost:8080 initial-user: @@ -95,40 +226,19 @@ initial-user: username: # your existing Airbyte instance username password: # your existing Airbyte instance password -license-key: - -# Enables Self-Managed Enterprise. -# Do not make modifications to this section. - -global: - edition: "pro" - -keycloak: - enabled: true - bypassInit: false - -keycloak-setup: - enabled: true - -server: - env_vars: - API_AUTHORIZATION_ENABLED: "true" +license-key: # license key provided by Airbyte team ```
    -### Step 3: Configure your Deployment - -#### Configure User Authentication - -1. Fill in the contents of the `initial-user` block. The credentials grant an initial user with admin permissions. You should store these credentials in a secure location. +3. Fill in the contents of the `initial-user` block. The credentials grant an initial user with admin permissions. You should store these credentials in a secure location. -2. Add your Airbyte Self-Managed Enterprise license key to your `airbyte.yml` in the `license-key` field. +4. Add your Airbyte Self-Managed Enterprise license key to your `airbyte.yml` in the `license-key` field. -3. To enable SSO authentication, add [SSO auth details](/access-management/sso) to your `airbyte.yml` file. +5. To enable SSO authentication, add [SSO auth details](/access-management/sso) to your `airbyte.yml` file.
    - Configuring auth in your airbyte.yml file +Configuring auth in your airbyte.yml file @@ -149,7 +259,7 @@ See the [following guide](/access-management/sso-providers/okta) on how to colle -To configure SSO with any identity provider via [OpenID Connect (OIDC)](https://openid.net/developers/how-connect-works/), such as Azure Entra ID (formerly ActiveDirectory), add the following at the end of your `airbyte.yml` file: +To configure SSO with any identity provider via [OpenID Connect (OIDC)](https://openid.net/developers/how-connect-works/), such as Azure Entra ID (formerly ActiveDirectory), add the following at the end of your `values.yml` file: ```yaml auth: @@ -168,10 +278,23 @@ See the [following guide](/access-management/sso-providers/azure-entra-id) on ho To modify auth configurations on an existing deployment (after Airbyte has been installed at least once), you will need to `helm upgrade` Airbyte with the additional environment variable `--set keycloak-setup.env_vars.KEYCLOAK_RESET_REALM=true`. As this also resets the list of Airbyte users and permissions, please use this with caution. -To deploy Self-Managed Enterprise without SSO, exclude the entire `auth:` section from your airbyte.yml config file. You will authenticate with the instance admin user and password included in your `airbyte.yml`. Without SSO, you cannot currently have unique logins for multiple users. +To deploy Self-Managed Enterprise without SSO, exclude the entire `auth:` section from your values.yml config file. You will authenticate with the instance admin user and password included in your `airbyte.yml`. Without SSO, you cannot currently have unique logins for multiple users.
    +### Step 3: Configure your Deployment + +1. Inside your `airbyte` directory, create an empty `values.yml` file. + +2. Paste the following into your newly created `values.yml` file. This is required to deploy Airbyte Self-Managed Enterprise: + +```yml +global: + edition: enterprise +``` + +3. The following subsections help you customize your deployment to use an external database, log storage, dedicated ingress, and more. To skip this and deploy a minimal, local version of Self-Managed Enterprise, [jump to Step 4](#step-4-deploy-self-managed-enterprise). + #### Configuring the Airbyte Database For Self-Managed Enterprise deployments, we recommend using a dedicated database instance for better reliability, and backups (such as AWS RDS or GCP Cloud SQL) instead of the default internal Postgres database (`airbyte/db`) that Airbyte spins up within the Kubernetes cluster. @@ -181,7 +304,7 @@ We assume in the following that you've already configured a Postgres instance:
    External database setup steps -1. Add external database details to your `airbyte.yml` file. This disables the default internal Postgres database (`airbyte/db`), and configures the external Postgres database: +1. Add external database details to your `values.yml` file. This disables the default internal Postgres database (`airbyte/db`), and configures the external Postgres database: ```yaml postgresql: @@ -214,90 +337,53 @@ The optional `jdbcUrl` field should be entered in the following format: `jdbc:po #### Configuring External Logging -For Self-Managed Enterprise deployments, we recommend spinning up standalone log storage for additional reliability using tools such as S3 and GCS instead of against using the defaul internal Minio storage (`airbyte/minio`). It's then a common practice to configure additional log forwarding from external log storage into your observability tool. +For Self-Managed Enterprise deployments, we recommend spinning up standalone log storage for additional reliability using tools such as S3 and GCS instead of against using the default internal Minio storage (`airbyte/minio`). It's then a common practice to configure additional log forwarding from external log storage into your observability tool.
    External log storage setup steps -To do this, add external log storage details to your `airbyte.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: +Add external log storage details to your `values.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: -```yaml -minio: - enabled: false +Ensure you've already created a Kubernetes secret containing both your S3 access key ID, and secret access key. By default, secrets are expected in the `airbyte-config-secrets` Kubernetes secret, under the `aws-s3-access-key-id` and `aws-s3-secret-access-key` keys. Steps to configure these are in the above [prerequisites](#configure-kubernetes-secrets). +```yaml global: storage: type: "S3" + storageSecretName: airbyte-config-secrets # Name of your Kubernetes secret. bucket: ## S3 bucket names that you've created. We recommend storing the following all in one bucket. log: airbyte-bucket state: airbyte-bucket workloadOutput: airbyte-bucket - s3: region: "" ## e.g. us-east-1 - accessKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Access Key. - accessKeyExistingSecretKey: ## The Kubernetes secret key containing the AWS Access Key. - secretKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. - secretKeyExistingSecretKey: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. + authenticationType: credentials ## Use "credentials" or "instanceProfile" ``` -Then, ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the user access to S3 storage: - -```yaml -{ - "Version":"2012-10-17", - "Statement":[ - { - "Effect":"Allow", - "Action": "s3:ListAllMyBuckets", - "Resource":"*" - }, - { - "Effect":"Allow", - "Action":["s3:ListBucket","s3:GetBucketLocation"], - "Resource":"arn:aws:s3:::YOUR-S3-BUCKET-NAME" - }, - { - "Effect":"Allow", - "Action":[ - "s3:PutObject", - "s3:PutObjectAcl", - "s3:GetObject", - "s3:GetObjectAcl", - "s3:DeleteObject" - ], - "Resource":"arn:aws:s3:::YOUR-S3-BUCKET-NAME/*" - } - ] -} -``` +Set `authenticationType` to `instanceProfile` if the compute infrastructure running Airbyte has pre-existing permissions (e.g. IAM role) to read and write from the appropriate buckets. - + +Ensure you've already created a Kubernetes secret containing the credentials blob for the service account to be assumed by the cluster. By default, secrets are expected in the `gcp-cred-secrets` Kubernetes secret, under a `gcp.json` file. Steps to configure these are in the above [prerequisites](#configure-kubernetes-secrets). ```yaml -minio: - enabled: false - global: storage: type: "GCS" + storageSecretName: gcp-cred-secrets bucket: ## GCS bucket names that you've created. We recommend storing the following all in one bucket. log: airbyte-bucket state: airbyte-bucket workloadOutput: airbyte-bucket - gcs: - credentials: "" - credentialsJson: "" ## Base64 encoded json GCP credentials file contents. + projectId: + credentialsPath: /secrets/gcs-log-creds/gcp.json ``` -Note that the `credentials` and `credentialsJson` fields are mutually exclusive. -
    @@ -309,7 +395,7 @@ To access the Airbyte UI, you will need to manually attach an ingress configurat
    Ingress configuration setup steps - + ```yaml apiVersion: networking.k8s.io/v1 @@ -319,6 +405,7 @@ metadata: annotations: ingress.kubernetes.io/ssl-redirect: "false" spec: + ingressClassName: nginx rules: - host: # host, example: enterprise-demo.airbyte.com http: @@ -326,26 +413,26 @@ spec: - backend: service: # format is ${RELEASE_NAME}-airbyte-webapp-svc - name: airbyte-pro-airbyte-webapp-svc + name: airbyte-enterprise-airbyte-webapp-svc port: - number: # service port, example: 8080 + number: 80 # service port, example: 8080 path: / pathType: Prefix - backend: service: # format is ${RELEASE_NAME}-airbyte-keycloak-svc - name: airbyte-pro-airbyte-keycloak-svc + name: airbyte-enterprise-airbyte-keycloak-svc port: - number: # service port, example: 8180 + number: 8180 path: /auth pathType: Prefix - backend: service: - # format is ${RELEASE_NAME}-airbyte-api-server-svc - name: airbyte-pro-airbyte-api-server-svc + # format is ${RELEASE_NAME}-airbyte--server-svc + name: airbyte-enterprise-airbyte-server-svc port: - number: # service port, example: 8180 - path: /v1 + number: 8001 + path: /api/public pathType: Prefix ``` @@ -358,7 +445,7 @@ If you are intending on using Amazon Application Load Balancer (ALB) for ingress apiVersion: networking.k8s.io/v1 kind: Ingress metadata: - name: + name: # ingress name, e.g. enterprise-demo annotations: # Specifies that the Ingress should use an AWS ALB. kubernetes.io/ingress.class: "alb" @@ -375,30 +462,30 @@ metadata: # alb.ingress.kubernetes.io/security-groups: spec: rules: - - host: e.g. enterprise-demo.airbyte.com + - host: # e.g. enterprise-demo.airbyte.com http: paths: - backend: service: - name: airbyte-pro-airbyte-webapp-svc + name: airbyte-enterprise-airbyte-webapp-svc port: number: 80 path: / pathType: Prefix - backend: service: - name: airbyte-pro-airbyte-keycloak-svc + name: airbyte-enterprise-airbyte-keycloak-svc port: number: 8180 path: /auth pathType: Prefix - backend: service: - # format is ${RELEASE_NAME}-airbyte-api-server-svc - name: airbyte-pro-airbyte-api-server-svc + # format is ${RELEASE_NAME}-airbyte-server-svc + name: airbyte-enterprise-airbyte-server-svc port: - number: # service port, example: 8180 - path: /v1 + number: 8001 + path: /api/public pathType: Prefix ``` @@ -408,10 +495,61 @@ The ALB controller will use a `ServiceAccount` that requires the [following IAM
    -Once this is complete, ensure that the value of the `webapp-url` field in your `airbyte.yml` is configured to match the ingress URL. +Once this is complete, ensure that the value of the `webapp-url` field in your `values.yml` is configured to match the ingress URL. You may configure ingress using a load balancer or an API Gateway. We do not currently support most service meshes (such as Istio). If you are having networking issues after fully deploying Airbyte, please verify that firewalls or lacking permissions are not interfering with pod-pod communication. Please also verify that deployed pods have the right permissions to make requests to your external database. +#### Configuring External Connector Secret Management + +Airbyte's default behavior is to store encrypted connector secrets on your cluster as Kubernetes secrets. You may optionally opt to instead store connector secrets in an external secret manager such as AWS Secrets Manager, Google Secrets Manager or Hashicorp Vault. Upon creating a new connector, secrets (e.g. OAuth tokens, database passwords) will be written to, then read from the configured secrets manager. + +
    +Configuring external connector secret management + +Modifing the configuration of connector secret storage will cause all existing connectors to fail. You will need to recreate these connectors to ensure they are reading from the appropriate secret store. + + + + +If authenticating with credentials, ensure you've already created a Kubernetes secret containing both your AWS Secrets Manager access key ID, and secret access key. By default, secrets are expected in the `airbyte-config-secrets` Kubernetes secret, under the `aws-secret-manager-access-key-id` and `aws-secret-manager-secret-access-key` keys. Steps to configure these are in the above [prerequisites](#configure-kubernetes-secrets). + +```yaml +secretsManager: + type: awsSecretManager + awsSecretManager: + region: + authenticationType: credentials ## Use "credentials" or "instanceProfile" + tags: ## Optional - You may add tags to new secrets created by Airbyte. + - key: ## e.g. team + value: ## e.g. deployments + - key: business-unit + value: engineering + kms: ## Optional - ARN for KMS Decryption. +``` + +Set `authenticationType` to `instanceProfile` if the compute infrastructure running Airbyte has pre-existing permissions (e.g. IAM role) to read and write from AWS Secrets Manager. + +To decrypt secrets in the secret manager with AWS KMS, configure the `kms` field, and ensure your Kubernetes cluster has pre-existing permissions to read and decrypt secrets. + + + + +Ensure you've already created a Kubernetes secret containing the credentials blob for the service account to be assumed by the cluster. By default, secrets are expected in the `gcp-cred-secrets` Kubernetes secret, under a `gcp.json` file. Steps to configure these are in the above [prerequisites](#configure-kubernetes-secrets). For simplicity, we recommend provisioning a single service account with access to both GCS and GSM. + +```yaml +secretsManager: + type: googleSecretManager + storageSecretName: gcp-cred-secrets + googleSecretManager: + projectId: + credentialsSecretKey: gcp.json +``` + + + + +
    + ### Step 4: Deploy Self-Managed Enterprise Install Airbyte Self-Managed Enterprise on helm using the following command: @@ -419,12 +557,13 @@ Install Airbyte Self-Managed Enterprise on helm using the following command: ```sh helm install \ --namespace airbyte \ -"airbyte-enterprise" \ -"airbyte/airbyte" \ ---set-file airbyteYml="./airbyte.yml" +--values ./values.yml \ +--set-file airbyteYml="./airbyte.yml" \ +airbyte-enterprise \ +airbyte/airbyte ``` -The default release name is `airbyte-enterprise`. You can change this by modifying the above `helm upgrade` command. +To uninstall Self-Managed Enterprise, run `helm uninstall airbyte-enterprise`. ## Updating Self-Managed Enterprise @@ -436,9 +575,10 @@ Upgrade Airbyte Self-Managed Enterprise by: ```sh helm upgrade \ --namespace airbyte \ ---install "airbyte-enterprise" \ -"airbyte/airbyte" \ ---set-file airbyteYml="./airbyte.yml" +--values ./values.yml \ +--set-file airbyteYml="./airbyte.yml" \ +--install airbyte-enterprise \ +airbyte/airbyte ``` ## Customizing your Deployment @@ -450,17 +590,18 @@ After specifying your own configuration, run the following command: ```sh helm upgrade \ --namespace airbyte \ ---install "airbyte-enterprise" \ -"airbyte/airbyte" \ - --set-file airbyteYml="./airbyte.yml" \ - --values path/to/values.yaml +--values path/to/values.yaml +--values ./values.yml \ +--set-file airbyteYml="./airbyte.yml" \ +--install airbyte-enterprise \ +airbyte/airbyte ``` ### Customizing your Service Account You may choose to use your own service account instead of the Airbyte default, `airbyte-sa`. This may allow for better audit trails and resource management specific to your organizational policies and requirements. -To do this, add the following to your `airbyte.yml`: +To do this, add the following to your `values.yml`: ``` serviceAccount: diff --git a/docs/integrations/README.md b/docs/integrations/README.md index fe41578bacf54..1d9ea1a2cdf56 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -4,13 +4,7 @@ import ConnectorRegistry from '@site/src/components/ConnectorRegistry'; ## Connector Support Levels -Airbyte uses a two tiered system for connectors to help you understand what to expect from a connector: - -**Certified**: A certified connector is actively maintained and supported by the Airbyte team and maintains a high quality bar. It is production ready. - -**Community**: A community connector is maintained by the Airbyte community until it becomes Certified. Airbyte has over 800 code contributors and 15,000 people in the Slack community to help. The Airbyte team is continually certifying Community connectors as usage grows. As these connectors are not maintained by Airbyte, we do not offer support SLAs around them, and we encourage caution when using them in production. - -For more information about the system, see [Connector Support Levels](./connector-support-levels.md) +Airbyte uses a tiered system for connectors to help you understand what to expect from a connector, see [Connector Support Levels](./connector-support-levels.md) for details. _[View the connector registries in full](https://connectors.airbyte.com/files/generated_reports/connector_registry_report.html)_ diff --git a/docs/integrations/destinations/astra.md b/docs/integrations/destinations/astra.md index b17eb8c29e8cd..8a0a04b27121b 100644 --- a/docs/integrations/destinations/astra.md +++ b/docs/integrations/destinations/astra.md @@ -36,7 +36,9 @@ This page contains the setup guide and reference information for the destination | Incremental - Append + Deduped | Yes | | ## Changelog + | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------- | +| 0.1.2 | 2024-04-15 | | Moved to Poetry; Updated CDK & pytest versions| | 0.1.1 | 2024-01-26 | | DS Branding Update | | 0.1.0 | 2024-01-08 | | Initial Release | diff --git a/docs/integrations/destinations/aws-datalake.md b/docs/integrations/destinations/aws-datalake.md index d0323ea35dd84..042ab57266fa5 100644 --- a/docs/integrations/destinations/aws-datalake.md +++ b/docs/integrations/destinations/aws-datalake.md @@ -88,11 +88,12 @@ which will be translated for compatibility with the Glue Data Catalog: ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :--------------------------------------------------------- | :--------------------------------------------------- | -| 0.1.5 | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | Add new ap-southeast-3 AWS region | -| 0.1.4 | 2023-10-25 | [\#29221](https://github.com/airbytehq/airbyte/pull/29221) | Upgrade AWSWrangler | -| 0.1.3 | 2023-03-28 | [\#24642](https://github.com/airbytehq/airbyte/pull/24642) | Prefer airbyte type for complex types when available | -| 0.1.2 | 2022-09-26 | [\#17193](https://github.com/airbytehq/airbyte/pull/17193) | Fix schema keyerror and add parquet support | -| 0.1.1 | 2022-04-20 | [\#11811](https://github.com/airbytehq/airbyte/pull/11811) | Fix name of required param in specification | -| 0.1.0 | 2022-03-29 | [\#10760](https://github.com/airbytehq/airbyte/pull/10760) | Initial release | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :--------------------------------------------------------- | :---------------------------------------------------- | +| `0.1.6` | 2024-03-22 | [#36386](https://github.com/airbytehq/airbyte/pull/36386) | Support new state message protocol | +| `0.1.5` | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | Add new ap-southeast-3 AWS region | +| `0.1.4` | 2023-10-25 | [\#29221](https://github.com/airbytehq/airbyte/pull/29221) | Upgrade AWSWrangler | +| `0.1.3` | 2023-03-28 | [\#24642](https://github.com/airbytehq/airbyte/pull/24642) | Prefer airbyte type for complex types when available | +| `0.1.2` | 2022-09-26 | [\#17193](https://github.com/airbytehq/airbyte/pull/17193) | Fix schema keyerror and add parquet support | +| `0.1.1` | 2022-04-20 | [\#11811](https://github.com/airbytehq/airbyte/pull/11811) | Fix name of required param in specification | +| `0.1.0` | 2022-03-29 | [\#10760](https://github.com/airbytehq/airbyte/pull/10760) | Initial release | diff --git a/docs/integrations/destinations/chroma.md b/docs/integrations/destinations/chroma.md index 10d65af7170a6..c3956cba832fe 100644 --- a/docs/integrations/destinations/chroma.md +++ b/docs/integrations/destinations/chroma.md @@ -76,10 +76,11 @@ You should now have all the requirements needed to configure Chroma as a destina | Version | Date | Pull Request | Subject | | :------ | :--------- | :--------------------------------------------------------- | :----------------------------------------- | -| 0.0.9 | 2023-12-11 | [33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | -| 0.0.8 | 2023-12-01 | [32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | -| 0.0.7 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | -| 0.0.6 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.0.10| 2024-04-15 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Updated CDK & pytest version to fix security vulnerabilities | +| 0.0.9 | 2023-12-11 | [#33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | +| 0.0.8 | 2023-12-01 | [#32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | +| 0.0.7 | 2023-11-16 | [#32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | +| 0.0.6 | 2023-11-13 | [#32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.0.5 | 2023-10-23 | [#31563](https://github.com/airbytehq/airbyte/pull/31563) | Add field mapping option | | 0.0.4 | 2023-10-15 | [#31329](https://github.com/airbytehq/airbyte/pull/31329) | Add OpenAI-compatible embedder option | | 0.0.3 | 2023-10-04 | [#31075](https://github.com/airbytehq/airbyte/pull/31075) | Fix OpenAI embedder batch size | diff --git a/docs/integrations/destinations/databricks.md b/docs/integrations/destinations/databricks.md index d39e64084c44e..24824938f897c 100644 --- a/docs/integrations/destinations/databricks.md +++ b/docs/integrations/destinations/databricks.md @@ -345,6 +345,7 @@ Delta Lake tables are created. You may want to consult the tutorial on | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------------------------------- | +| 1.1.2 | 2024-04-04 | [#36846](https://github.com/airbytehq/airbyte/pull/36846) | (incompatible with CDK, do not use) Remove duplicate S3 Region | | 1.1.1 | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | (incompatible with CDK, do not use) Add new ap-southeast-3 AWS region | | 1.1.0 | 2023-06-02 | [\#26942](https://github.com/airbytehq/airbyte/pull/26942) | Support schema evolution | | 1.0.2 | 2023-04-20 | [\#25366](https://github.com/airbytehq/airbyte/pull/25366) | Fix default catalog to be `hive_metastore` | diff --git a/docs/integrations/destinations/duckdb.md b/docs/integrations/destinations/duckdb.md index 47691837c9896..7bd22e2bbd499 100644 --- a/docs/integrations/destinations/duckdb.md +++ b/docs/integrations/destinations/duckdb.md @@ -12,7 +12,7 @@ Local file-based DBs will not work in Airbyte Cloud or Kubernetes. Please use Mo ## Overview -[DuckDB](https://duckdb.org/) is an in-process SQL OLAP database management system and this destination is meant to use locally if you have multiple smaller sources such as GitHub repos, some social media and local CSVs or files you want to run analytics workloads on. This destination writes data to the [MotherDuck](https://motherduck.com) service, or to a file on the _local_ filesystem on the host running Airbyte. +[DuckDB](https://duckdb.org/) is an in-process SQL OLAP database management system and this destination is meant to use locally if you have multiple smaller sources such as GitHub repos, some social media and local CSVs or files you want to run analytics workloads on. This destination writes data to the [MotherDuck](https://motherduck.com) service, or to a file on the _local_ filesystem on the host running Airbyte. For file-based DBs, data is written to `/tmp/airbyte_local` by default. To change this location, modify the `LOCAL_ROOT` environment variable for Airbyte. @@ -32,7 +32,7 @@ We do not recommend providing your API token in the `md:` connection string, as ### Authenticating to MotherDuck -For authentication, you can can provide your [MotherDuck Service Credential](https://motherduck.com/docs/authenticating-to-motherduck/#syntax) as the `motherduck_api_key` configuration option. +For authentication, you can can provide your [MotherDuck Service Credential](https://motherduck.com/docs/authenticating-to-motherduck/#syntax) as the `motherduck_api_key` configuration option. ### Sync Overview @@ -106,6 +106,8 @@ Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :--------------------- | +| 0.3.3 | 2024-0407 | [#36884](https://github.com/airbytehq/airbyte/pull/36884) | Fix stale dependency versions in lock file, add CLI for internal testing. | +| 0.3.2 | 2024-03-20 | [#32635](https://github.com/airbytehq/airbyte/pull/32635) | Instrument custom_user_agent to identify Airbyte-Motherduck connector usage. | | 0.3.1 | 2023-11-18 | [#32635](https://github.com/airbytehq/airbyte/pull/32635) | Upgrade DuckDB version to [`v0.9.2`](https://github.com/duckdb/duckdb/releases/tag/v0.9.2). | | 0.3.0 | 2022-10-23 | [#31744](https://github.com/airbytehq/airbyte/pull/31744) | Upgrade DuckDB version to [`v0.9.1`](https://github.com/duckdb/duckdb/releases/tag/v0.9.1). **Required update for all MotherDuck users.** Note, this is a **BREAKING CHANGE** for users who may have other connections using versions of DuckDB prior to 0.9.x. See the [0.9.0 release notes](https://github.com/duckdb/duckdb/releases/tag/v0.9.0) for more information and for upgrade instructions. | | 0.2.1 | 2022-10-20 | [#30600](https://github.com/airbytehq/airbyte/pull/30600) | Fix: schema name mapping | diff --git a/docs/integrations/destinations/e2e-test.md b/docs/integrations/destinations/e2e-test.md index 2bcc50112ebbf..f1d17327fac06 100644 --- a/docs/integrations/destinations/e2e-test.md +++ b/docs/integrations/destinations/e2e-test.md @@ -46,7 +46,9 @@ The OSS and Cloud variants have the same version number starting from version `0 | Version | Date | Pull Request | Subject | |:--------|:-----------| :------------------------------------------------------- |:----------------------------------------------------------| -| 0.3.1 | 2024-02-14 | [35278](https://github.com/airbytehq/airbyte/pull/35278) | Adopt CDK 0.20.6 | +| 0.3.3 | 2024-04-16 | [37366](https://github.com/airbytehq/airbyte/pull/37366) | Fix Log trace messages | +| 0.3.2 | 2024-02-14 | [36812](https://github.com/airbytehq/airbyte/pull/36812) | Log trace messages | +| 0.3.1 | 2024-02-14 | [35278](https://github.com/airbytehq/airbyte/pull/35278) | Adopt CDK 0.20.6 | | 0.3.0 | 2023-05-08 | [25776](https://github.com/airbytehq/airbyte/pull/25776) | Standardize spec and change property field to non-keyword | | 0.2.4 | 2022-06-17 | [13864](https://github.com/airbytehq/airbyte/pull/13864) | Updated stacktrace format for any trace message errors | | 0.2.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | diff --git a/docs/integrations/destinations/iceberg.md b/docs/integrations/destinations/iceberg.md index 512e7964b6493..31e748558a70f 100644 --- a/docs/integrations/destinations/iceberg.md +++ b/docs/integrations/destinations/iceberg.md @@ -61,6 +61,7 @@ specify the target size of compacted Iceberg data file. | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------- | :--------------------------------------------------------- | +| 0.1.6 | 2024-04-04 | [#36846](https://github.com/airbytehq/airbyte/pull/36846) | Remove duplicate S3 Region | | 0.1.5 | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | Add new ap-southeast-3 AWS region | | 0.1.4 | 2023-07-20 | [28506](https://github.com/airbytehq/airbyte/pull/28506) | Support server-managed storage config | | 0.1.3 | 2023-07-12 | [28158](https://github.com/airbytehq/airbyte/pull/28158) | Bump Iceberg library to 1.3.0 and add REST catalog support | diff --git a/docs/integrations/destinations/milvus.md b/docs/integrations/destinations/milvus.md index d2cff5caf09bc..0af64809cdda7 100644 --- a/docs/integrations/destinations/milvus.md +++ b/docs/integrations/destinations/milvus.md @@ -109,13 +109,15 @@ vector_store.similarity_search("test") | Version | Date | Pull Request | Subject | |:--------| :--------- |:--------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.0.12 | 2023-12-11 | [33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | -| 0.0.11 | 2023-12-01 | [32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | -| 0.0.10 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | -| 0.0.9 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.0.14 | 2024-3-22 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Update CDK & pytest version to fix security vulnerabilities | +| 0.0.13 | 2024-3-22 | [#35911](https://github.com/airbytehq/airbyte/pull/35911) | Move to poetry; Fix tests | +| 0.0.12 | 2023-12-11 | [#33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | +| 0.0.11 | 2023-12-01 | [#32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | +| 0.0.10 | 2023-11-16 | [#32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | +| 0.0.9 | 2023-11-13 | [#32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.0.8 | 2023-11-08 | [#31563](https://github.com/airbytehq/airbyte/pull/32262) | Auto-create collection if it doesn't exist | | 0.0.7 | 2023-10-23 | [#31563](https://github.com/airbytehq/airbyte/pull/31563) | Add field mapping option | -| 0.0.6 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.0.6 | 2023-10-19 | [#31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.0.5 | 2023-10-15 | [#31329](https://github.com/airbytehq/airbyte/pull/31329) | Add OpenAI-compatible embedder option | | 0.0.4 | 2023-10-04 | [#31075](https://github.com/airbytehq/airbyte/pull/31075) | Fix OpenAI embedder batch size | | 0.0.3 | 2023-09-29 | [#30820](https://github.com/airbytehq/airbyte/pull/30820) | Update CDK | diff --git a/docs/integrations/destinations/mysql.md b/docs/integrations/destinations/mysql.md index 2a19352e8ad79..6e43c134282fb 100644 --- a/docs/integrations/destinations/mysql.md +++ b/docs/integrations/destinations/mysql.md @@ -116,6 +116,7 @@ Using this feature requires additional configuration, when creating the destinat | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | +| 0.3.1 | 2024-04-12 | [36926](https://github.com/airbytehq/airbyte/pull/36926) | Upgrade to Kotlin CDK | | 0.3.0 | 2023-12-18 | [33468](https://github.com/airbytehq/airbyte/pull/33468) | Upgrade to latest Java CDK | | 0.2.0 | 2023-06-27 | [27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | | 0.1.21 | 2022-09-14 | [15668](https://github.com/airbytehq/airbyte/pull/15668) | Wrap logs in AirbyteLogMessage | diff --git a/docs/integrations/destinations/oracle-migrations.md b/docs/integrations/destinations/oracle-migrations.md new file mode 100644 index 0000000000000..212006e46b5a8 --- /dev/null +++ b/docs/integrations/destinations/oracle-migrations.md @@ -0,0 +1,65 @@ +# Oracle Migration Guide + +## Upgrading to 1.0.0 + +This version removes the option to use "normalization" with Oracle. It also changes +the schema and database of Airbyte's "raw" tables to be compatible with the new +[Destinations V2](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#what-is-destinations-v2) +format. These changes will likely require updates to downstream dbt / SQL models. After this update, +Airbyte will only produce the "raw" v2 tables, which store all content in JSON. These changes remove +the ability to do deduplicated syncs with Oracle. + +If you are interested in the Oracle destination gaining the full features +of Destinations V2 (including final tables), click [[https://github.com/airbytehq/airbyte/discussions/37024]] +to register your interest. + +This upgrade will ignore any existing raw tables and will not migrate any data to the new schema. +For each stream, you should perform the following query to migrate your data from the old raw table to the new raw table: + +```sql +-- assumes your database was 'PUBLIC' +-- replace `{{stream_name}}` with replace your stream name + +CREATE TABLE airbyte_internal.public_raw__stream_{{stream_name}} +( + _airbyte_raw_id VARCHAR(64) PRIMARY KEY, + _airbyte_data JSON, + _airbyte_extracted_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + _airbyte_loaded_at TIMESTAMP WITH TIME ZONE DEFAULT NULL, + _airbyte_meta JSON +); + +INSERT INTO airbyte_internal.default_raw__stream_{{stream_name}} + SELECT + _airbyte_ab_id AS "_airbyte_raw_id", + _airbyte_data AS "_airbyte_data" + _airbyte_emitted_at AS "_airbyte_extracted_at", + NULL AS "_airbyte_loaded_at", + NULL AS "_airbyte_meta", + FROM default._airbyte_raw_{{stream_name}}; +``` + +**Airbyte will not delete any of your v1 data.** + +### Database/Schema and the Internal Schema +We have split the raw and final tables into their own schemas, which means that +we will only write into the raw tables which will live in the `airbyte_internal` schema. +The tables written into this schema will be prefixed with either the default schema provided in +the `Default Schema` field when configuring Oracle (but can also be overridden in the connection). You can +change the "raw" database from the default `airbyte_internal` by supplying a value for +`Raw Table Schema Name`. + +For Example: + +- Default Schema: `default` +- Stream Name: `my_stream` + +Writes to `airbyte_intneral.default_raw__stream_my_stream` + +Where as: + +- Default Schema: `default` +- Stream Name: `my_stream` +- Raw Table Schema Name: `raw_data` + +Writes to `raw_data.default_raw__stream_my_stream` diff --git a/docs/integrations/destinations/oracle.md b/docs/integrations/destinations/oracle.md index d2e9867eb04a4..4d6e43f6daa14 100644 --- a/docs/integrations/destinations/oracle.md +++ b/docs/integrations/destinations/oracle.md @@ -91,7 +91,8 @@ Airbyte has the ability to connect to the Oracle source with 3 network connectiv ## Changelog | Version | Date | Pull Request | Subject | -| :---------- | :--------- | :--------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | +|:------------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 1.0.0 | 2024-04-11 | [\#36048](https://github.com/airbytehq/airbyte/pull/36048) | Removes Normalization, updates to V2 Raw Table Format | | 0.2.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | | 0.1.19 | 2022-07-26 | [\#10719](https://github.com/airbytehq/airbyte/pull/) | Destination Oracle: added custom JDBC parameters support. | | 0.1.18 | 2022-07-14 | [\#14618](https://github.com/airbytehq/airbyte/pull/14618) | Removed additionalProperties: false from JDBC destination connectors | diff --git a/docs/integrations/destinations/pinecone.md b/docs/integrations/destinations/pinecone.md index 51dae798ab8e1..99cbf6e3f0c4b 100644 --- a/docs/integrations/destinations/pinecone.md +++ b/docs/integrations/destinations/pinecone.md @@ -60,13 +60,13 @@ The connector can use one of the following embedding methods: 1. OpenAI - using [OpenAI API](https://beta.openai.com/docs/api-reference/text-embedding) , the connector will produce embeddings using the `text-embedding-ada-002` model with **1536 dimensions**. This integration will be constrained by the [speed of the OpenAI embedding API](https://platform.openai.com/docs/guides/rate-limits/overview). -2. Cohere - using the [Cohere API](https://docs.cohere.com/reference/embed), the connector will produce embeddings using the `embed-english-light-v2.0` model with **1024 dimensions**. +2. Cohere - using the [Cohere API](https://docs.cohere.com/reference/embed), the connector will produce embeddings using the `embed-english-light-v2.0` model with **1024 dimensions**. For testing purposes, it's also possible to use the [Fake embeddings](https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/fake) integration. It will generate random embeddings and is suitable to test a data pipeline without incurring embedding costs. ### Indexing -To get started, use the [Pinecone web UI or API](https://docs.pinecone.io/docs/quickstart) to create a project and an index before running the destination. All streams will be indexed into the same index, the `_ab_stream` metadata field is used to distinguish between streams. Overall, the size of the metadata fields is limited to 30KB per document. +To get started, use the [Pinecone web UI or API](https://docs.pinecone.io/docs/quickstart) to create a project and an index before running the destination. All streams will be indexed into the same index, the `_ab_stream` metadata field is used to distinguish between streams. Overall, the size of the metadata fields is limited to 30KB per document. OpenAI and Fake embeddings produce vectors with 1536 dimensions, and the Cohere embeddings produce vectors with 1024 dimensions. Make sure to configure the index accordingly. @@ -74,25 +74,27 @@ OpenAI and Fake embeddings produce vectors with 1536 dimensions, and the Cohere | Version | Date | Pull Request | Subject | |:--------| :--------- |:--------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.0.22 | 2023-12-11 | [33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | -| 0.0.21 | 2023-12-01 | [32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | -| 0.0.20 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.0.24 | 2023-04-15 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Update CDK & pytest version to fix security vulnerabilities. | +| 0.0.23 | 2023-03-22 | [#35911](https://github.com/airbytehq/airbyte/pull/35911) | Bump versions to latest, resolves test failures. | +| 0.0.22 | 2023-12-11 | [#33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | +| 0.0.21 | 2023-12-01 | [#32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | +| 0.0.20 | 2023-11-13 | [#32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.0.19 | 2023-10-20 | [#31329](https://github.com/airbytehq/airbyte/pull/31373) | Improve error messages | | 0.0.18 | 2023-10-20 | [#31329](https://github.com/airbytehq/airbyte/pull/31373) | Add support for namespaces and fix index cleaning when namespace is defined | -| 0.0.17 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.0.17 | 2023-10-19 | [#31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.0.16 | 2023-10-15 | [#31329](https://github.com/airbytehq/airbyte/pull/31329) | Add OpenAI-compatible embedder option | | 0.0.15 | 2023-10-04 | [#31075](https://github.com/airbytehq/airbyte/pull/31075) | Fix OpenAI embedder batch size | -| 0.0.14 | 2023-09-29 | [#30820](https://github.com/airbytehq/airbyte/pull/30820) | Update CDK | -| 0.0.13 | 2023-09-26 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Allow more text splitting options | -| 0.0.12 | 2023-09-25 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Fix bug with stale documents left on starter pods | -| 0.0.11 | 2023-09-22 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Set visible certified flag | -| 0.0.10 | 2023-09-20 | [#30514](https://github.com/airbytehq/airbyte/pull/30514) | Fix bug with failing embedding step on large records | -| 0.0.9 | 2023-09-18 | [#30510](https://github.com/airbytehq/airbyte/pull/30510) | Fix bug with overwrite mode on starter pods | -| 0.0.8 | 2023-09-14 | [#30296](https://github.com/airbytehq/airbyte/pull/30296) | Add Azure embedder | -| 0.0.7 | 2023-09-13 | [#30382](https://github.com/airbytehq/airbyte/pull/30382) | Promote to certified/beta | -| 0.0.6 | 2023-09-09 | [#30193](https://github.com/airbytehq/airbyte/pull/30193) | Improve documentation | -| 0.0.5 | 2023-09-07 | [#30133](https://github.com/airbytehq/airbyte/pull/30133) | Refactor internal structure of connector | -| 0.0.4 | 2023-09-05 | [#30086](https://github.com/airbytehq/airbyte/pull/30079) | Switch to GRPC client for improved performance. | -| 0.0.3 | 2023-09-01 | [#30079](https://github.com/airbytehq/airbyte/pull/30079) | Fix bug with potential data loss on append+dedup syncing. 🚨 Streams using append+dedup mode need to be reset after upgrade. | -| 0.0.2 | 2023-08-31 | [#29442](https://github.com/airbytehq/airbyte/pull/29946) | Improve test coverage | -| 0.0.1 | 2023-08-29 | [#29539](https://github.com/airbytehq/airbyte/pull/29539) | Pinecone connector with some embedders | \ No newline at end of file +| 0.0.14 | 2023-09-29 | [#30820](https://github.com/airbytehq/airbyte/pull/30820) | Update CDK | +| 0.0.13 | 2023-09-26 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Allow more text splitting options | +| 0.0.12 | 2023-09-25 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Fix bug with stale documents left on starter pods | +| 0.0.11 | 2023-09-22 | [#30649](https://github.com/airbytehq/airbyte/pull/30649) | Set visible certified flag | +| 0.0.10 | 2023-09-20 | [#30514](https://github.com/airbytehq/airbyte/pull/30514) | Fix bug with failing embedding step on large records | +| 0.0.9 | 2023-09-18 | [#30510](https://github.com/airbytehq/airbyte/pull/30510) | Fix bug with overwrite mode on starter pods | +| 0.0.8 | 2023-09-14 | [#30296](https://github.com/airbytehq/airbyte/pull/30296) | Add Azure embedder | +| 0.0.7 | 2023-09-13 | [#30382](https://github.com/airbytehq/airbyte/pull/30382) | Promote to certified/beta | +| 0.0.6 | 2023-09-09 | [#30193](https://github.com/airbytehq/airbyte/pull/30193) | Improve documentation | +| 0.0.5 | 2023-09-07 | [#30133](https://github.com/airbytehq/airbyte/pull/30133) | Refactor internal structure of connector | +| 0.0.4 | 2023-09-05 | [#30086](https://github.com/airbytehq/airbyte/pull/30079) | Switch to GRPC client for improved performance. | +| 0.0.3 | 2023-09-01 | [#30079](https://github.com/airbytehq/airbyte/pull/30079) | Fix bug with potential data loss on append+dedup syncing. 🚨 Streams using append+dedup mode need to be reset after upgrade. | +| 0.0.2 | 2023-08-31 | [#29442](https://github.com/airbytehq/airbyte/pull/29946) | Improve test coverage | +| 0.0.1 | 2023-08-29 | [#29539](https://github.com/airbytehq/airbyte/pull/29539) | Pinecone connector with some embedders | diff --git a/docs/integrations/destinations/postgres-migrations.md b/docs/integrations/destinations/postgres-migrations.md index 7e9d1a5ba3ddf..5c6375c6f91cb 100644 --- a/docs/integrations/destinations/postgres-migrations.md +++ b/docs/integrations/destinations/postgres-migrations.md @@ -10,5 +10,6 @@ Worthy of specific mention, this version includes: - Clearer table structure - Removal of sub-tables for nested properties - Removal of SCD tables +- Preserving [upper case column names](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#destinations-v2-implementation-differences) Learn more about what's new in Destinations V2 [here](/using-airbyte/core-concepts/typing-deduping). diff --git a/docs/integrations/destinations/postgres.md b/docs/integrations/destinations/postgres.md index dd680839b959e..d6d2add6a68ec 100644 --- a/docs/integrations/destinations/postgres.md +++ b/docs/integrations/destinations/postgres.md @@ -3,7 +3,20 @@ This page guides you through the process of setting up the Postgres destination connector. :::caution -Postgres, while an excellent relational database, is not a data warehouse. Postgres is likely to perform poorly with large data volumes. Even postgres-compatible destinations (e.g. AWS Aurora) are not immune to slowdowns when dealing with large writes or updates over ~500GB. Especially when using normalization with `destination-postgres`, be sure to monitor your database's memory and CPU usage during your syncs. It is possible for your destination to 'lock up', and incur high usage costs with large sync volumes. + +Postgres, while an excellent relational database, is not a data warehouse. + +1. Postgres is likely to perform poorly with large data volumes. Even postgres-compatible + destinations (e.g. AWS Aurora) are not immune to slowdowns when dealing with large writes or + updates over ~500GB. Especially when using normalization with `destination-postgres`, be sure to + monitor your database's memory and CPU usage during your syncs. It is possible for your + destination to 'lock up', and incur high usage costs with large sync volumes. +2. Postgres column [name length limitations](https://www.postgresql.org/docs/current/limits.html) + are likely to cause collisions when used as a destination receiving data from highly-nested and + flattened sources, e.g. `{63 byte name}_a` and `{63 byte name}_b` will both be truncated to + `{63 byte name}` which causes postgres to throw an error that a duplicate column name was + specified. This limit is applicable to table names too. + ::: ## Prerequisites @@ -18,11 +31,15 @@ used by default. Other than that, you can proceed with the open-source instructi You'll need the following information to configure the Postgres destination: - **Host** - The host name of the server. -- **Port** - The port number the server is listening on. Defaults to the PostgreSQL™ standard port number (5432). +- **Port** - The port number the server is listening on. Defaults to the PostgreSQL™ standard port + number (5432). - **Username** - **Password** -- **Default Schema Name** - Specify the schema (or several schemas separated by commas) to be set in the search-path. These schemas will be used to resolve unqualified object names used in statements executed over this connection. -- **Database** - The database name. The default is to connect to a database with the same name as the user name. +- **Default Schema Name** - Specify the schema (or several schemas separated by commas) to be set in + the search-path. These schemas will be used to resolve unqualified object names used in statements + executed over this connection. +- **Database** - The database name. The default is to connect to a database with the same name as + the user name. - **JDBC URL Params** (optional) [Refer to this guide for more details](https://jdbc.postgresql.org/documentation/use/#connecting-to-the-database) @@ -59,17 +76,18 @@ synced data from Airbyte. ## Naming Conventions -From [Postgres SQL Identifiers syntax](https://www.postgresql.org/docs/9.0/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS): +From +[Postgres SQL Identifiers syntax](https://www.postgresql.org/docs/9.0/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS): - SQL identifiers and key words must begin with a letter \(a-z, but also letters with diacritical marks and non-Latin letters\) or an underscore \(\_\). - Subsequent characters in an identifier or key word can be letters, underscores, digits \(0-9\), or dollar signs \($\). - Note that dollar signs are not allowed in identifiers according to the SQL standard, - so their use might render applications less portable. The SQL standard will not define a key word - that contains digits or starts or ends with an underscore, so identifiers of this form are safe - against possible conflict with future extensions of the standard. + Note that dollar signs are not allowed in identifiers according to the SQL standard, so their use + might render applications less portable. The SQL standard will not define a key word that contains + digits or starts or ends with an underscore, so identifiers of this form are safe against possible + conflict with future extensions of the standard. - The system uses no more than NAMEDATALEN-1 bytes of an identifier; longer names can be written in commands, but they will be truncated. By default, NAMEDATALEN is 64 so the maximum identifier @@ -80,61 +98,85 @@ From [Postgres SQL Identifiers syntax](https://www.postgresql.org/docs/9.0/sql-s still applies. - Quoting an identifier also makes it case-sensitive, whereas unquoted names are always folded to lower case. -- In order to make your applications portable and less error-prone, use consistent quoting with each name (either always quote it or never quote it). +- In order to make your applications portable and less error-prone, use consistent quoting with each + name (either always quote it or never quote it). :::info -Airbyte Postgres destination will create raw tables and schemas using the Unquoted -identifiers by replacing any special characters with an underscore. All final tables and their corresponding -columns are created using Quoted identifiers preserving the case sensitivity. +Airbyte Postgres destination will create raw tables and schemas using the Unquoted identifiers by +replacing any special characters with an underscore. All final tables and their corresponding +columns are created using Quoted identifiers preserving the case sensitivity. Special characters in final +tables are replaced with underscores. ::: **For Airbyte Cloud:** 1. [Log into your Airbyte Cloud](https://cloud.airbyte.com/workspaces) account. -2. In the left navigation bar, click **Destinations**. In the top-right corner, click **new destination**. -3. On the Set up the destination page, enter the name for the Postgres connector - and select **Postgres** from the Destination type dropdown. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **new + destination**. +3. On the Set up the destination page, enter the name for the Postgres connector and select + **Postgres** from the Destination type dropdown. 4. Enter a name for your source. -5. For the **Host**, **Port**, and **DB Name**, enter the hostname, port number, and name for your Postgres database. +5. For the **Host**, **Port**, and **DB Name**, enter the hostname, port number, and name for your + Postgres database. 6. List the **Default Schemas**. - :::note - The schema names are case sensitive. The 'public' schema is set by default. Multiple schemas may be used at one time. No schemas set explicitly - will sync all of existing. - ::: -7. For **User** and **Password**, enter the username and password you created in [Step 1](#step-1-optional-create-a-dedicated-read-only-user). -8. For Airbyte Open Source, toggle the switch to connect using SSL. For Airbyte Cloud uses SSL by default. + +:::note + +The schema names are case sensitive. The 'public' schema is set by default. Multiple schemas may be +used at one time. No schemas set explicitly - will sync all of existing. + +::: + +7. For **User** and **Password**, enter the username and password you created in + [Step 1](#step-1-optional-create-a-dedicated-read-only-user). +8. For Airbyte Open Source, toggle the switch to connect using SSL. For Airbyte Cloud uses SSL by + default. 9. For SSL Modes, select: - **disable** to disable encrypted communication between Airbyte and the source - **allow** to enable encrypted communication only when required by the source - **prefer** to allow unencrypted communication only when the source doesn't support encryption - - **require** to always require encryption. Note: The connection will fail if the source doesn't support encryption. - - **verify-ca** to always require encryption and verify that the source has a valid SSL certificate + - **require** to always require encryption. Note: The connection will fail if the source doesn't + support encryption. + - **verify-ca** to always require encryption and verify that the source has a valid SSL + certificate - **verify-full** to always require encryption and verify the identity of the source -10. To customize the JDBC connection beyond common options, specify additional supported [JDBC URL parameters](https://jdbc.postgresql.org/documentation/head/connect.html) as key-value pairs separated by the symbol & in the **JDBC URL Parameters (Advanced)** field. +10. To customize the JDBC connection beyond common options, specify additional supported + [JDBC URL parameters](https://jdbc.postgresql.org/documentation/head/connect.html) as key-value + pairs separated by the symbol & in the **JDBC URL Parameters (Advanced)** field. Example: key1=value1&key2=value2&key3=value3 - These parameters will be added at the end of the JDBC URL that the AirByte will use to connect to your Postgres database. + These parameters will be added at the end of the JDBC URL that the AirByte will use to connect + to your Postgres database. + + The connector now supports `connectTimeout` and defaults to 60 seconds. Setting connectTimeout + to 0 seconds will set the timeout to the longest time available. + + **Note:** Do not use the following keys in JDBC URL Params field as they will be overwritten by + Airbyte: `currentSchema`, `user`, `password`, `ssl`, and `sslmode`. - The connector now supports `connectTimeout` and defaults to 60 seconds. Setting connectTimeout to 0 seconds will set the timeout to the longest time available. +:::warning - **Note:** Do not use the following keys in JDBC URL Params field as they will be overwritten by Airbyte: - `currentSchema`, `user`, `password`, `ssl`, and `sslmode`. +This is an advanced configuration option. Users are advised to use it with caution. - :::warning - This is an advanced configuration option. Users are advised to use it with caution. - ::: +::: 11. For SSH Tunnel Method, select: - **No Tunnel** for a direct connection to the database - - **SSH Key Authentication** to use an RSA Private as your secret for establishing the SSH tunnel + - **SSH Key Authentication** to use an RSA Private as your secret for establishing the SSH + tunnel - **Password Authentication** to use a password as your secret for establishing the SSH tunnel - :::warning - Since Airbyte Cloud requires encrypted communication, select **SSH Key Authentication** or **Password Authentication** if you selected **disable**, **allow**, or **prefer** as the **SSL Mode**; otherwise, the connection will fail. - ::: +:::warning + +Since Airbyte Cloud requires encrypted communication, select **SSH Key Authentication** or +**Password Authentication** if you selected **disable**, **allow**, or **prefer** as the **SSL +Mode**; otherwise, the connection will fail. + +::: 12. Click **Set up destination**. @@ -154,22 +196,23 @@ following[ sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-s ### Output Schema (Raw Tables) -Each stream will be mapped to a separate raw table in Postgres. The default schema in which the raw tables are -created is `airbyte_internal`. This can be overridden in the configuration. -Each table will contain 3 columns: +Each stream will be mapped to a separate raw table in Postgres. The default schema in which the raw +tables are created is `airbyte_internal`. This can be overridden in the configuration. Each table +will contain 3 columns: - `_airbyte_raw_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Postgres is `VARCHAR`. - `_airbyte_extracted_at`: a timestamp representing when the event was pulled from the data source. The column type in Postgres is `TIMESTAMP WITH TIME ZONE`. -- `_airbyte_loaded_at`: a timestamp representing when the row was processed into final table. - The column type in Postgres is `TIMESTAMP WITH TIME ZONE`. -- `_airbyte_data`: a json blob representing with the event data. The column type in Postgres - is `JSONB`. +- `_airbyte_loaded_at`: a timestamp representing when the row was processed into final table. The + column type in Postgres is `TIMESTAMP WITH TIME ZONE`. +- `_airbyte_data`: a json blob representing with the event data. The column type in Postgres is + `JSONB`. ### Final Tables Data type mapping + | Airbyte Type | Postgres Type | -|:---------------------------|:-------------------------| +| :------------------------- | :----------------------- | | string | VARCHAR | | number | DECIMAL | | integer | BIGINT | @@ -182,6 +225,33 @@ Each table will contain 3 columns: | time_without_timezone | TIME | | date | DATE | +### Naming limitations + +Postgres restricts all identifiers to 63 characters or less. If your stream includes column names +longer than 63 characters, they will be truncated to this length. If this results in two columns +having the same name, Airbyte may modify these column names to avoid the collision. + +## Creating dependent objects + +:::caution + +This section involves running `DROP ... CASCADE` on the tables that Airbyte produces. Make sure you +fully understand the consequences before enabling this option. **Permanent** data loss is possible +with this option! + +::: + +You may want to create objects that depend on the tables generated by Airbyte, such as views. If you +do so, we strongly recommend: +* Using a tool like `dbt` to automate the creation +* And using an orchestrator to trigger `dbt`. + +This is because you will need to enable the "Drop tables with CASCADE" option. The connector +sometimes needs to recreate the tables; if you have created dependent objects, Postgres will require +the connector to run drop statements with CASCADE enabled. However, this will cause the connector to +**also drop the dependent objects**. Therefore, you MUST have a way to recreate those dependent +objects from scratch. + ## Tutorials Now that you have set up the Postgres destination connector, check out the following tutorials: @@ -191,37 +261,42 @@ Now that you have set up the Postgres destination connector, check out the follo ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------| -| 2.0.4 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | -| 2.0.3 | 2024-03-01 | [\#35528](https://github.com/airbytehq/airbyte/pull/35528) | Adopt CDK 0.23.11; Use Migration framework | -| 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | -| 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | -| 2.0.0 | 2024-02-09 | [\#35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | -| 0.6.3 | 2024-02-06 | [\#34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | -| 0.6.2 | 2024-01-30 | [\#34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | -| 0.6.1 | 2024-01-29 | [\#34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | -| 0.6.0 | 2024-01-19 | [\#34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | -| 0.5.5 | 2024-01-18 | [\#34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | -| 0.5.4 | 2024-01-11 | [\#34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | -| 0.5.3 | 2024-01-10 | [\#34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | -| 0.5.2 | 2024-01-08 | [\#33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | -| 0.5.1 | 2024-01-04 | [\#33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | -| 0.5.0 | 2023-12-18 | [\#33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | -| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | -| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | -| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | -| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | -| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | -| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | -| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | -| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | -| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | -| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | -| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | -| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | -| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | -| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | -| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------| +| 2.0.9 | 2024-04-11 | [\#36974](https://github.com/airbytehq/airbyte/pull/36974) | Add option to drop with `CASCADE` | +| 2.0.8 | 2024-04-10 | [\#36805](https://github.com/airbytehq/airbyte/pull/36805) | Adopt CDK 0.29.10 to improve long column name handling | +| 2.0.7 | 2024-04-08 | [\#36768](https://github.com/airbytehq/airbyte/pull/36768) | Adopt CDK 0.29.7 to improve destination state handling | +| 2.0.6 | 2024-04-05 | [\#36620](https://github.com/airbytehq/airbyte/pull/36620) | Adopt CDK 0.29.3 to use Kotlin CDK | +| 2.0.5 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.27.3; Bugfix for case-senstive table names in v1-v2 migration, `_airbyte_meta` in raw tables | +| 2.0.4 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | +| 2.0.3 | 2024-03-01 | [\#35528](https://github.com/airbytehq/airbyte/pull/35528) | Adopt CDK 0.23.11; Use Migration framework | +| 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | +| 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | +| 2.0.0 | 2024-02-09 | [\#35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | +| 0.6.3 | 2024-02-06 | [\#34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | +| 0.6.2 | 2024-01-30 | [\#34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | +| 0.6.1 | 2024-01-29 | [\#34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | +| 0.6.0 | 2024-01-19 | [\#34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | +| 0.5.5 | 2024-01-18 | [\#34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | +| 0.5.4 | 2024-01-11 | [\#34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | +| 0.5.3 | 2024-01-10 | [\#34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | +| 0.5.2 | 2024-01-08 | [\#33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | +| 0.5.1 | 2024-01-04 | [\#33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | +| 0.5.0 | 2023-12-18 | [\#33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | +| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | +| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | +| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | +| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | +| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | +| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | +| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | +| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | +| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | +| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | +| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | +| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | +| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | diff --git a/docs/integrations/destinations/qdrant.md b/docs/integrations/destinations/qdrant.md index 537df671fbd57..9e56e22368464 100644 --- a/docs/integrations/destinations/qdrant.md +++ b/docs/integrations/destinations/qdrant.md @@ -71,10 +71,11 @@ You should now have all the requirements needed to configure Qdrant as a destina | Version | Date | Pull Request | Subject | | :------ | :--------- | :--------------------------------------------------------- | :----------------------------------------- | -| 0.0.10 | 2023-12-11 | [33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | -| 0.0.9 | 2023-12-01 | [32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | -| 0.0.8 | 2023-11-29 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources and fix spec schema | -| 0.0.7 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.0.11 | 2024-04-15 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Updated CDK and pytest versions to fix security vulnerabilities | +| 0.0.10 | 2023-12-11 | [#33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | +| 0.0.9 | 2023-12-01 | [#32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | +| 0.0.8 | 2023-11-29 | [#32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources and fix spec schema | +| 0.0.7 | 2023-11-13 | [#32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.0.6 | 2023-10-23 | [#31563](https://github.com/airbytehq/airbyte/pull/31563) | Add field mapping option | | 0.0.5 | 2023-10-15 | [#31329](https://github.com/airbytehq/airbyte/pull/31329) | Add OpenAI-compatible embedder option | | 0.0.4 | 2023-10-04 | [#31075](https://github.com/airbytehq/airbyte/pull/31075) | Fix OpenAI embedder batch size | diff --git a/docs/integrations/destinations/rabbitmq.md b/docs/integrations/destinations/rabbitmq.md index 71ccdc0238f61..9e390c9dc846f 100644 --- a/docs/integrations/destinations/rabbitmq.md +++ b/docs/integrations/destinations/rabbitmq.md @@ -45,6 +45,7 @@ To use the RabbitMQ destination, you'll need: | Version | Date | Pull Request | Subject | | :------ | :--------------- | :-------------------------------------------------------- | :---------------------------------------------- | +| 0.1.3 | 2024-04-02 | [#36749](https://github.com/airbytehq/airbyte/pull/36749) | Un-archive connector (again) | | 0.1.2 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | | 0.1.1 | 2022-09-09 | [16528](https://github.com/airbytehq/airbyte/pull/16528) | Marked password field in spec as airbyte_secret | | 0.1.0 | October 29, 2021 | [\#7560](https://github.com/airbytehq/airbyte/pull/7560) | Initial release | diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 3757cdb462e3d..e1817a754193d 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -29,7 +29,8 @@ For INSERT strategy: 2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_best-practices-single-copy-command.html). - Requires an S3 bucket and credentials. Data is copied into S3 as multiple files with a manifest file. + Requires an S3 bucket and credentials. Data is copied into S3 as multiple files with a manifest + file. Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. @@ -50,13 +51,6 @@ For COPY strategy: to objects in the staging bucket. - **Secret Access Key** - Corresponding key to the above key id. -- **Part Size** - - Affects the size limit of an individual Redshift table. Optional. Increase this if syncing - tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each - part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This - is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will - result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get - the memory requirement. Modify this with care. - **S3 Filename pattern** - The pattern allows you to set the file-name format for the S3 staging file(s), next placeholders combinations are currently supported: `{date}`, `{date:yyyy_MM}`, `{timestamp}`, @@ -76,9 +70,9 @@ Optional parameters: (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. -NOTE: S3 staging does not use the SSH Tunnel option for copying data, if configured. SSH Tunnel supports the SQL -connection only. S3 is secured through public HTTPS access only. Subsequent typing and deduping queries on final table -are executed over using provided SSH Tunnel configuration. +NOTE: S3 staging does not use the SSH Tunnel option for copying data, if configured. SSH Tunnel +supports the SQL connection only. S3 is secured through public HTTPS access only. Subsequent typing +and deduping queries on final table are executed over using provided SSH Tunnel configuration. ## Step 1: Set up Redshift @@ -97,14 +91,16 @@ are executed over using provided SSH Tunnel configuration. staging S3 bucket \(for the COPY strategy\). ### Permissions in Redshift -Airbyte writes data into two schemas, whichever schema you want your data to land in, e.g. `my_schema` -and a "Raw Data" schema that Airbyte uses to improve ELT reliability. By default, this raw data schema -is `airbyte_internal` but this can be overridden in the Redshift Destination's advanced settings. -Airbyte also needs to query Redshift's + +Airbyte writes data into two schemas, whichever schema you want your data to land in, e.g. +`my_schema` and a "Raw Data" schema that Airbyte uses to improve ELT reliability. By default, this +raw data schema is `airbyte_internal` but this can be overridden in the Redshift Destination's +advanced settings. Airbyte also needs to query Redshift's [SVV_TABLE_INFO](https://docs.aws.amazon.com/redshift/latest/dg/r_SVV_TABLE_INFO.html) table for metadata about the tables airbyte manages. To ensure the `airbyte_user` has the correction permissions to: + - create schemas in your database - grant usage to any existing schemas you want Airbyte to use - grant select to the `svv_table_info` table @@ -192,9 +188,18 @@ characters. ### Data Size Limitations Redshift specifies a maximum limit of 16MB (and 65535 bytes for any VARCHAR fields within the JSON -record) to store the raw JSON record data. Thus, when a row is too big to fit, the Redshift -destination fails to load such data and currently ignores that record. See docs for -[SUPER](https://docs.aws.amazon.com/redshift/latest/dg/r_SUPER_type.html) and +record) to store the raw JSON record data. Thus, when a row is too big to fit, the destination +connector will do one of the following. + +1. Null the value if the varchar size > 65535, The corresponding key information is added to + `_airbyte_meta`. +2. Null the whole record while trying to preserve the Primary Keys and cursor field declared as part + of your stream configuration, if the total record size is > 16MB. + - For DEDUPE sync mode, if we do not find Primary key(s), we fail the sync. + - For OVERWRITE and APPEND mode, syncs will succeed with empty records emitted, if we fail to + find Primary key(s). + +See AWS docs for [SUPER](https://docs.aws.amazon.com/redshift/latest/dg/r_SUPER_type.html) and [SUPER limitations](https://docs.aws.amazon.com/redshift/latest/dg/limitations-super.html). ### Encryption @@ -209,15 +214,15 @@ Each stream will be output into its own raw table in Redshift. Each table will c Redshift is `VARCHAR`. - `_airbyte_extracted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. -- `_airbyte_loaded_at`: a timestamp representing when the row was processed into final table. - The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. +- `_airbyte_loaded_at`: a timestamp representing when the row was processed into final table. The + column type in Redshift is `TIMESTAMP WITH TIME ZONE`. - `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `SUPER`. ## Data type map | Airbyte type | Redshift type | -|:------------------------------------|:---------------------------------------| +| :---------------------------------- | :------------------------------------- | | STRING | VARCHAR | | STRING (BASE64) | VARCHAR | | STRING (BIG_NUMBER) | VARCHAR | @@ -237,6 +242,14 @@ Each stream will be output into its own raw table in Redshift. Each table will c | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.4.3 | 2024-04-10 | [\#36973](https://github.com/airbytehq/airbyte/pull/36973) | Limit the Standard inserts SQL statement to less than 16MB | +| 2.4.2 | 2024-04-05 | [\#36365](https://github.com/airbytehq/airbyte/pull/36365) | Remove unused config option | +| 2.4.1 | 2024-04-04 | [#36846](https://github.com/airbytehq/airbyte/pull/36846) | Remove duplicate S3 Region | +| 2.4.0 | 2024-03-21 | [\#36589](https://github.com/airbytehq/airbyte/pull/36589) | Adapt to Kotlin cdk 0.28.19 | +| 2.3.2 | 2024-03-21 | [\#36374](https://github.com/airbytehq/airbyte/pull/36374) | Supress Jooq DataAccessException error message in logs | +| 2.3.1 | 2024-03-18 | [\#36255](https://github.com/airbytehq/airbyte/pull/36255) | Mark as Certified-GA | +| 2.3.0 | 2024-03-18 | [\#36203](https://github.com/airbytehq/airbyte/pull/36203) | CDK 0.25.0; Record nulling for VARCHAR > 64K & record > 16MB (super limit) | +| 2.2.0 | 2024-03-14 | [\#35981](https://github.com/airbytehq/airbyte/pull/35981) | CDK 0.24.0; `_airbyte_meta` in Raw table for tracking upstream data modifications. | | 2.1.10 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | | 2.1.9 | 2024-03-04 | [\#35316](https://github.com/airbytehq/airbyte/pull/35316) | Update to CDK 0.23.11; Adopt migration framework | | 2.1.8 | 2024-02-09 | [\#35354](https://github.com/airbytehq/airbyte/pull/35354) | Update to CDK 0.23.0; Gather required initial state upfront, remove dependency on svv_table_info for table empty check | diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index d29d32464c4fa..9809dc4760f7c 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -401,8 +401,9 @@ In order for everything to work correctly, it is also necessary that the user wh ## CHANGELOG | Version | Date | Pull Request | Subject | -| :------ | :--------- | :--------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------- | -| 0.5.9 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.6.0 | 2024-04-08 | [36869](https://github.com/airbytehq/airbyte/pull/36869) | Adapt to CDK 0.29.8; Kotlin converted code. | +| 0.5.9 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 0.5.8 | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | Add new ap-southeast-3 AWS region | | 0.5.7 | 2023-12-28 | [#33788](https://github.com/airbytehq/airbyte/pull/33788) | Thread-safe fix for file part names | | 0.5.6 | 2023-12-08 | [#33263](https://github.com/airbytehq/airbyte/pull/33263) | (incorrect filename format, do not use) Adopt java CDK version 0.7.0. | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index b3f15eab7e60b..eb911490e62f0 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -1,21 +1,29 @@ # Snowflake -Setting up the Snowflake destination connector involves setting up Snowflake entities (warehouse, database, schema, user, and role) in the Snowflake console and configuring the Snowflake destination connector using the Airbyte UI. +Setting up the Snowflake destination connector involves setting up Snowflake entities (warehouse, +database, schema, user, and role) in the Snowflake console and configuring the Snowflake destination +connector using the Airbyte UI. This page describes the step-by-step process of setting up the Snowflake destination connector. ## Prerequisites -- A Snowflake account with the [ACCOUNTADMIN](https://docs.snowflake.com/en/user-guide/security-access-control-considerations.html) role. If you don’t have an account with the `ACCOUNTADMIN` role, contact your Snowflake administrator to set one up for you. -- (Optional) An AWS, or Google Cloud Storage. +- A Snowflake account with the + [ACCOUNTADMIN](https://docs.snowflake.com/en/user-guide/security-access-control-considerations.html) + role. If you don’t have an account with the `ACCOUNTADMIN` role, contact your Snowflake + administrator to set one up for you. ### Network policies -By default, Snowflake allows users to connect to the service from any computer or device IP address. A security administrator (i.e. users with the SECURITYADMIN role) or higher can create a network policy to allow or deny access to a single IP address or a list of addresses. +By default, Snowflake allows users to connect to the service from any computer or device IP address. +A security administrator (i.e. users with the SECURITYADMIN role) or higher can create a network +policy to allow or deny access to a single IP address or a list of addresses. -If you have any issues connecting with Airbyte Cloud please make sure that the list of IP addresses is on the allowed list +If you have any issues connecting with Airbyte Cloud please make sure that the list of IP addresses +is on the allowed list -To determine whether a network policy is set on your account or for a specific user, execute the _SHOW PARAMETERS_ command. +To determine whether a network policy is set on your account or for a specific user, execute the +_SHOW PARAMETERS_ command. **Account** @@ -29,20 +37,28 @@ SHOW PARAMETERS LIKE 'network_policy' IN ACCOUNT; SHOW PARAMETERS LIKE 'network_policy' IN USER ; ``` -To read more please check official [Snowflake documentation](https://docs.snowflake.com/en/user-guide/network-policies.html#) +To read more please check official +[Snowflake documentation](https://docs.snowflake.com/en/user-guide/network-policies.html#) ## Setup guide ### Step 1: Set up Airbyte-specific entities in Snowflake -To set up the Snowflake destination connector, you first need to create Airbyte-specific Snowflake entities (a warehouse, database, schema, user, and role) with the `OWNERSHIP` permission to write data into Snowflake, track costs pertaining to Airbyte, and control permissions at a granular level. +To set up the Snowflake destination connector, you first need to create Airbyte-specific Snowflake +entities (a warehouse, database, schema, user, and role) with the `OWNERSHIP` permission to write +data into Snowflake, track costs pertaining to Airbyte, and control permissions at a granular level. -You can use the following script in a new [Snowflake worksheet](https://docs.snowflake.com/en/user-guide/ui-worksheet.html) to create the entities: +You can use the following script in a new +[Snowflake worksheet](https://docs.snowflake.com/en/user-guide/ui-worksheet.html) to create the +entities: 1. [Log into your Snowflake account](https://www.snowflake.com/login/). -2. Edit the following script to change the password to a more secure password and to change the names of other resources if you so desire. +2. Edit the following script to change the password to a more secure password and to change the + names of other resources if you so desire. - **Note:** Make sure you follow the [Snowflake identifier requirements](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html) while renaming the resources. + **Note:** Make sure you follow the + [Snowflake identifier requirements](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html) + while renaming the resources. -- set variables (these need to be uppercase) set airbyte_role = 'AIRBYTE_ROLE'; @@ -113,22 +129,28 @@ You can use the following script in a new [Snowflake worksheet](https://docs.sno commit; -3. Run the script using the [Worksheet page](https://docs.snowflake.com/en/user-guide/ui-worksheet.html) or [Snowsight](https://docs.snowflake.com/en/user-guide/ui-snowsight-gs.html). Make sure to select the **All Queries** checkbox. +3. Run the script using the + [Worksheet page](https://docs.snowflake.com/en/user-guide/ui-worksheet.html) or + [Snowsight](https://docs.snowflake.com/en/user-guide/ui-snowsight-gs.html). Make sure to select + the **All Queries** checkbox. ### Step 2: Set up a data loading method -Airbyte uses Snowflake’s [Internal Stage](https://docs.snowflake.com/en/user-guide/data-load-local-file-system-create-stage.html) to load data. +Airbyte uses Snowflake’s +[Internal Stage](https://docs.snowflake.com/en/user-guide/data-load-local-file-system-create-stage.html) +to load data. Make sure the database and schema have the `USAGE` privilege. ### Step 3: Set up Snowflake as a destination in Airbyte -Navigate to the Airbyte UI to set up Snowflake as a destination. You can authenticate using username/password or OAuth 2.0: +Navigate to the Airbyte UI to set up Snowflake as a destination. You can authenticate using +username/password or OAuth 2.0: ### Login and Password | Field | Description | -|-------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ----------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | | [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | | [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | @@ -142,7 +164,7 @@ Navigate to the Airbyte UI to set up Snowflake as a destination. You can authent ### OAuth 2.0 | Field | Description | -|:------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| :---------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | | [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | | [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | @@ -178,25 +200,30 @@ Navigate to the Airbyte UI to set up Snowflake as a destination. You can authent ## Output schema -Airbyte outputs each stream into its own raw table in `airbyte_internal` schema by default (can be overriden by user) and a final table with Typed columns. Contents in raw table are _NOT_ deduplicated. +Airbyte outputs each stream into its own raw table in `airbyte_internal` schema by default (can be +overriden by user) and a final table with Typed columns. Contents in raw table are _NOT_ +deduplicated. ### Raw Table schema | Airbyte field | Description | Column type | -|------------------------|--------------------------------------------------------------------|--------------------------| +| ---------------------- | ------------------------------------------------------------------ | ------------------------ | | \_airbyte_raw_id | A UUID assigned to each processed event | VARCHAR | | \_airbyte_extracted_at | A timestamp for when the event was pulled from the data source | TIMESTAMP WITH TIME ZONE | | \_airbyte_loaded_at | Timestamp to indicate when the record was loaded into Typed tables | TIMESTAMP WITH TIME ZONE | | \_airbyte_data | A JSON blob with the event data. | VARIANT | -**Note:** Although the contents of the `_airbyte_data` are fairly stable, schema of the raw table could be subject to change in future versions. +**Note:** Although the contents of the `_airbyte_data` are fairly stable, schema of the raw table +could be subject to change in future versions. -**Note:** By default, Airbyte creates permanent tables. If you prefer transient tables, create a dedicated transient database for Airbyte. For more information, refer to[ Working with Temporary and Transient Tables](https://docs.snowflake.com/en/user-guide/tables-temp-transient.html) +**Note:** By default, Airbyte creates permanent tables. If you prefer transient tables, create a +dedicated transient database for Airbyte. For more information, refer +to[ Working with Temporary and Transient Tables](https://docs.snowflake.com/en/user-guide/tables-temp-transient.html) ## Data type map | Airbyte type | Snowflake type | -|:------------------------------------|:---------------| +| :---------------------------------- | :------------- | | STRING | TEXT | | STRING (BASE64) | TEXT | | STRING (BIG_NUMBER) | TEXT | @@ -223,7 +250,8 @@ The Snowflake destination supports the following sync modes: ## Snowflake tutorials -Now that you have set up the Snowflake destination connector, check out the following Snowflake tutorials: +Now that you have set up the Snowflake destination connector, check out the following Snowflake +tutorials: - [Build a data ingestion pipeline from Mailchimp to Snowflake](https://airbyte.com/tutorials/data-ingestion-pipeline-mailchimp-snowflake) - [Replicate data from a PostgreSQL database to Snowflake](https://airbyte.com/tutorials/postgresql-database-to-snowflake) @@ -234,190 +262,198 @@ Now that you have set up the Snowflake destination connector, check out the foll ### 'Current role does not have permissions on the target schema' -If you receive an error stating `Current role does not have permissions on the target schema` make sure that the -Snowflake destination `SCHEMA` is one that the role you've provided has permissions on. When creating a connection, -it may allow you to select `Mirror source structure` for the `Destination namespace`, which if you have followed -some of our default examples and tutorials may result in the connection trying to write to a `PUBLIC` schema. +If you receive an error stating `Current role does not have permissions on the target schema` make +sure that the Snowflake destination `SCHEMA` is one that the role you've provided has permissions +on. When creating a connection, it may allow you to select `Mirror source structure` for the +`Destination namespace`, which if you have followed some of our default examples and tutorials may +result in the connection trying to write to a `PUBLIC` schema. -A quick fix could be to edit your connection's 'Replication' settings from `Mirror source structure` to `Destination Default`. -Otherwise, make sure to grant the role the required permissions in the desired namespace. +A quick fix could be to edit your connection's 'Replication' settings from `Mirror source structure` +to `Destination Default`. Otherwise, make sure to grant the role the required permissions in the +desired namespace. ## Changelog -| Version | Date | Pull Request | Subject | -|:----------------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.6.1 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | -| 3.6.0 | 2024-03-06 | [35308](https://github.com/airbytehq/airbyte/pull/35308) | Upgrade CDK; use utc tz for extracted_at; Migrate existing extracted_at to utc; | -| 3.5.14 | 2024-02-22 | [35456](https://github.com/airbytehq/airbyte/pull/35456) | Adopt CDK 0.23.0; Gather initial state upfront, reduce information_schema calls | -| 3.5.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | -| 3.5.12 | 2024-02-15 | [35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | -| 3.5.11 | 2024-02-12 | [35194](https://github.com/airbytehq/airbyte/pull/35194) | Reorder auth options | -| 3.5.10 | 2024-02-12 | [35144](https://github.com/airbytehq/airbyte/pull/35144) | Adopt CDK 0.20.2 | -| 3.5.9 | 2024-02-12 | [35111](https://github.com/airbytehq/airbyte/pull/35111) | Adopt CDK 0.20.1 | -| 3.5.8 | 2024-02-09 | [34574](https://github.com/airbytehq/airbyte/pull/34574) | Adopt CDK 0.20.0 | -| 3.5.7 | 2024-02-08 | [34747](https://github.com/airbytehq/airbyte/pull/34747) | Adopt CDK 0.19.0 | -| 3.5.6 | 2024-02-08 | [\#35027](https://github.com/airbytehq/airbyte/pull/35027) | Upgrade CDK to version 0.17.1 | -| 3.5.5 | 2024-02-08 | [\#34502](https://github.com/airbytehq/airbyte/pull/34502) | Reduce COPY frequency | -| 3.5.4 | 2024-01-24 | [\#34451](https://github.com/airbytehq/airbyte/pull/34451) | Improve logging for unparseable input | -| 3.5.3 | 2024-01-25 | [\#34528](https://github.com/airbytehq/airbyte/pull/34528) | Fix spurious `check` failure (`UnsupportedOperationException: Snowflake does not use the native JDBC DV2 interface`) | -| 3.5.2 | 2024-01-24 | [\#34458](https://github.com/airbytehq/airbyte/pull/34458) | Improve error reporting | -| 3.5.1 | 2024-01-24 | [\#34501](https://github.com/airbytehq/airbyte/pull/34501) | Internal code changes for Destinations V2 | -| 3.5.0 | 2024-01-24 | [\#34462](https://github.com/airbytehq/airbyte/pull/34462) | Upgrade CDK to 0.14.0 | -| 3.4.22 | 2024-01-12 | [\#34227](https://github.com/airbytehq/airbyte/pull/34227) | Upgrade CDK to 0.12.0; Cleanup unused dependencies | -| 3.4.21 | 2024-01-10 | [\#34083](https://github.com/airbytehq/airbyte/pull/34083) | Emit destination stats as part of the state message | -| 3.4.20 | 2024-01-05 | [\#33948](https://github.com/airbytehq/airbyte/pull/33948) | Skip retrieving initial table state when setup fails | -| 3.4.19 | 2024-01-04 | [\#33730](https://github.com/airbytehq/airbyte/pull/33730) | Internal code structure changes | -| 3.4.18 | 2024-01-02 | [\#33728](https://github.com/airbytehq/airbyte/pull/33728) | Add option to only type and dedupe at the end of the sync | -| 3.4.17 | 2023-12-20 | [\#33704](https://github.com/airbytehq/airbyte/pull/33704) | Update to java CDK 0.10.0 (no changes) | -| 3.4.16 | 2023-12-18 | [\#33124](https://github.com/airbytehq/airbyte/pull/33124) | Make Schema Creation Seperate from Table Creation | -| 3.4.15 | 2023-12-13 | [\#33232](https://github.com/airbytehq/airbyte/pull/33232) | Only run typing+deduping for a stream if the stream had any records | -| 3.4.14 | 2023-12-08 | [\#33263](https://github.com/airbytehq/airbyte/pull/33263) | Adopt java CDK version 0.7.0 | -| 3.4.13 | 2023-12-05 | [\#32326](https://github.com/airbytehq/airbyte/pull/32326) | Use jdbc metadata for table existence check | -| 3.4.12 | 2023-12-04 | [\#33084](https://github.com/airbytehq/airbyte/pull/33084) | T&D SQL statements moved to debug log level | -| 3.4.11 | 2023-11-14 | [\#32526](https://github.com/airbytehq/airbyte/pull/32526) | Clean up memory manager logs. | -| 3.4.10 | 2023-11-08 | [\#32125](https://github.com/airbytehq/airbyte/pull/32125) | Fix compilation warnings. | -| 3.4.9 | 2023-11-06 | [\#32026](https://github.com/airbytehq/airbyte/pull/32026) | Add separate TRY_CAST transaction to reduce compute usage | -| 3.4.8 | 2023-11-06 | [\#32190](https://github.com/airbytehq/airbyte/pull/32190) | Further improve error reporting | -| 3.4.7 | 2023-11-06 | [\#32193](https://github.com/airbytehq/airbyte/pull/32193) | Adopt java CDK version 0.4.1. | -| 3.4.6 | 2023-11-02 | [\#32124](https://github.com/airbytehq/airbyte/pull/32124) | Revert `merge` statement | -| 3.4.5 | 2023-11-02 | [\#31983](https://github.com/airbytehq/airbyte/pull/31983) | Improve error reporting | -| 3.4.4 | 2023-10-30 | [\#31985](https://github.com/airbytehq/airbyte/pull/31985) | Delay upgrade deadline to Nov 7 | -| 3.4.3 | 2023-10-30 | [\#31960](https://github.com/airbytehq/airbyte/pull/31960) | Adopt java CDK version 0.2.0. | -| 3.4.2 | 2023-10-27 | [\#31897](https://github.com/airbytehq/airbyte/pull/31897) | Further filtering on extracted_at | -| 3.4.1 | 2023-10-27 | [\#31683](https://github.com/airbytehq/airbyte/pull/31683) | Performance enhancement (switch to a `merge` statement for incremental-dedup syncs) | -| 3.4.0 | 2023-10-25 | [\#31686](https://github.com/airbytehq/airbyte/pull/31686) | Opt out flag for typed and deduped tables | -| 3.3.0 | 2023-10-25 | [\#31520](https://github.com/airbytehq/airbyte/pull/31520) | Stop deduping raw table | -| 3.2.3 | 2023-10-17 | [\#31191](https://github.com/airbytehq/airbyte/pull/31191) | Improve typing+deduping performance by filtering new raw records on extracted_at | -| 3.2.2 | 2023-10-10 | [\#31194](https://github.com/airbytehq/airbyte/pull/31194) | Deallocate unused per stream buffer memory when empty | -| 3.2.1 | 2023-10-10 | [\#31083](https://github.com/airbytehq/airbyte/pull/31083) | Fix precision of numeric values in async destinations | -| 3.2.0 | 2023-10-09 | [\#31149](https://github.com/airbytehq/airbyte/pull/31149) | No longer fail syncs when PKs are null - try do dedupe anyway | -| 3.1.22 | 2023-10-06 | [\#31153](https://github.com/airbytehq/airbyte/pull/31153) | Increase jvm GC retries | -| 3.1.21 | 2023-10-06 | [\#31139](https://github.com/airbytehq/airbyte/pull/31139) | Bump CDK version | -| 3.1.20 | 2023-10-06 | [\#31129](https://github.com/airbytehq/airbyte/pull/31129) | Reduce async buffer size | -| 3.1.19 | 2023-10-04 | [\#31082](https://github.com/airbytehq/airbyte/pull/31082) | Revert null PK checks | -| 3.1.18 | 2023-10-01 | [\#30779](https://github.com/airbytehq/airbyte/pull/30779) | Final table PK columns become non-null and skip check for null PKs in raw records (performance) | -| 3.1.17 | 2023-09-29 | [\#30938](https://github.com/airbytehq/airbyte/pull/30938) | Upgrade snowflake-jdbc driver | -| 3.1.16 | 2023-09-28 | [\#30835](https://github.com/airbytehq/airbyte/pull/30835) | Fix regression from 3.1.15 in supporting concurrent syncs with identical stream name but different namespace | -| 3.1.15 | 2023-09-26 | [\#30775](https://github.com/airbytehq/airbyte/pull/30775) | Increase async block size | -| 3.1.14 | 2023-09-27 | [\#30739](https://github.com/airbytehq/airbyte/pull/30739) | Fix column name collision detection | -| 3.1.13 | 2023-09-19 | [\#30599](https://github.com/airbytehq/airbyte/pull/30599) | Support concurrent syncs with identical stream name but different namespace | -| 3.1.12 | 2023-09-21 | [\#30671](https://github.com/airbytehq/airbyte/pull/30671) | Reduce async buffer size | -| 3.1.11 | 2023-09-19 | [\#30592](https://github.com/airbytehq/airbyte/pull/30592) | Internal code changes | -| 3.1.10 | 2023-09-18 | [\#30546](https://github.com/airbytehq/airbyte/pull/30546) | Make sure that the async buffer are flush every 5 minutes | -| 3.1.9 | 2023-09-19 | [\#30319](https://github.com/airbytehq/airbyte/pull/30319) | Support column names that are reserved | -| 3.1.8 | 2023-09-18 | [\#30479](https://github.com/airbytehq/airbyte/pull/30479) | Fix async memory management | -| 3.1.7 | 2023-09-15 | [\#30491](https://github.com/airbytehq/airbyte/pull/30491) | Improve error message display | -| 3.1.6 | 2023-09-14 | [\#30439](https://github.com/airbytehq/airbyte/pull/30439) | Fix a transient error | -| 3.1.5 | 2023-09-13 | [\#30416](https://github.com/airbytehq/airbyte/pull/30416) | Support `${` in stream name/namespace, and in column names | -| 3.1.4 | 2023-09-12 | [\#30364](https://github.com/airbytehq/airbyte/pull/30364) | Add log message | -| 3.1.3 | 2023-08-29 | [\#29878](https://github.com/airbytehq/airbyte/pull/29878) | Reenable incremental typing and deduping | -| 3.1.2 | 2023-08-31 | [\#30020](https://github.com/airbytehq/airbyte/pull/30020) | Run typing and deduping tasks in parallel | -| 3.1.1 | 2023-09-05 | [\#30117](https://github.com/airbytehq/airbyte/pull/30117) | Type and Dedupe at sync start and then every 6 hours | -| 3.1.0 | 2023-09-01 | [\#30056](https://github.com/airbytehq/airbyte/pull/30056) | Upcase final table names to allow case-insensitive references | -| 3.0.2 | 2023-09-01 | [\#30121](https://github.com/airbytehq/airbyte/pull/30121) | Improve performance on very wide streams by skipping TRY_CAST on strings | -| 3.0.1 | 2023-08-27 | [\#30065](https://github.com/airbytehq/airbyte/pull/30065) | Clearer error thrown when records are missing a primary key | -| 3.0.0 | 2023-08-27 | [\#29783](https://github.com/airbytehq/airbyte/pull/29783) | Destinations V2 | -| 2.1.7 | 2023-08-29 | [\#29949](https://github.com/airbytehq/airbyte/pull/29949) | Destinations V2: Fix checking for empty table by ensuring upper-case DB names | -| 2.1.6 | 2023-08-28 | [\#29878](https://github.com/airbytehq/airbyte/pull/29878) | Destinations V2: Fix detection of existing table by ensuring upper-case DB names | -| 2.1.5 | 2023-08-28 | [\#29903](https://github.com/airbytehq/airbyte/pull/29917) | Destinations V2: Performance Improvement, Changing Metadata error array construction from ARRAY_CAT to ARRAY_CONSTRUCT_COMPACT | -| 2.1.4 | 2023-08-28 | [\#29903](https://github.com/airbytehq/airbyte/pull/29903) | Abort queries on crash | -| 2.1.3 | 2023-08-25 | [\#29881](https://github.com/airbytehq/airbyte/pull/29881) | Destinations v2: Only run T+D once at end of sync, to prevent data loss under async conditions | -| 2.1.2 | 2023-08-24 | [\#29805](https://github.com/airbytehq/airbyte/pull/29805) | Destinations v2: Don't soft reset in migration | -| 2.1.1 | 2023-08-23 | [\#29774](https://github.com/airbytehq/airbyte/pull/29774) | Destinations v2: Don't soft reset overwrite syncs | -| 2.1.0 | 2023-08-21 | [\#29636](https://github.com/airbytehq/airbyte/pull/29636) | Destinations v2: Several Critical Bug Fixes (cursorless dedup, improved floating-point handling, improved special characters handling; improved error handling) | -| 2.0.0 | 2023-08-09 | [\#28894](https://github.com/airbytehq/airbyte/pull/29236) | Remove support for Snowflake GCS/S3 loading method in favor of Snowflake Internal staging | -| 1.3.3 | 2023-08-15 | [\#29461](https://github.com/airbytehq/airbyte/pull/29461) | Changing a static constant reference | -| 1.3.2 | 2023-08-11 | [\#29381](https://github.com/airbytehq/airbyte/pull/29381) | Destinations v2: Add support for streams with no columns | -| 1.3.1 | 2023-08-04 | [\#28894](https://github.com/airbytehq/airbyte/pull/28894) | Destinations v2: Update SqlGenerator | -| 1.3.0 | 2023-08-07 | [\#29174](https://github.com/airbytehq/airbyte/pull/29174) | Destinations v2: early access release | -| 1.2.10 | 2023-08-07 | [\#29188](https://github.com/airbytehq/airbyte/pull/29188) | Internal code refactoring | -| 1.2.9 | 2023-08-04 | [\#28677](https://github.com/airbytehq/airbyte/pull/28677) | Destinations v2: internal code changes to prepare for early access release | -| 1.2.8 | 2023-08-03 | [\#29047](https://github.com/airbytehq/airbyte/pull/29047) | Avoid logging record if the format is invalid | -| 1.2.7 | 2023-08-02 | [\#28976](https://github.com/airbytehq/airbyte/pull/28976) | Fix composite PK handling in v1 mode | -| 1.2.6 | 2023-08-01 | [\#28618](https://github.com/airbytehq/airbyte/pull/28618) | Reduce logging noise | -| 1.2.5 | 2023-07-24 | [\#28618](https://github.com/airbytehq/airbyte/pull/28618) | Add hooks in preparation for destinations v2 implementation | -| 1.2.4 | 2023-07-21 | [\#28584](https://github.com/airbytehq/airbyte/pull/28584) | Install dependencies in preparation for destinations v2 work | -| 1.2.3 | 2023-07-21 | [\#28345](https://github.com/airbytehq/airbyte/pull/28345) | Pull in async framework minor bug fix for race condition on state emission | -| 1.2.2 | 2023-07-14 | [\#28345](https://github.com/airbytehq/airbyte/pull/28345) | Increment patch to trigger a rebuild | -| 1.2.1 | 2023-07-14 | [\#28315](https://github.com/airbytehq/airbyte/pull/28315) | Pull in async framework minor bug fix to avoid Snowflake hanging on close | -| 1.2.0 | 2023-07-5 | [\#27935](https://github.com/airbytehq/airbyte/pull/27935) | Enable Faster Snowflake Syncs with Asynchronous writes | -| 1.1.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | -| 1.0.6 | 2023-06-21 | [\#27555](https://github.com/airbytehq/airbyte/pull/27555) | Reduce image size | -| 1.0.5 | 2023-05-31 | [\#25782](https://github.com/airbytehq/airbyte/pull/25782) | Internal scaffolding for future development | -| 1.0.4 | 2023-05-19 | [\#26323](https://github.com/airbytehq/airbyte/pull/26323) | Prevent infinite retry loop under specific circumstances | -| 1.0.3 | 2023-05-15 | [\#26081](https://github.com/airbytehq/airbyte/pull/26081) | Reverts splits bases | -| 1.0.2 | 2023-05-05 | [\#25649](https://github.com/airbytehq/airbyte/pull/25649) | Splits bases (reverted) | -| 1.0.1 | 2023-04-29 | [\#25570](https://github.com/airbytehq/airbyte/pull/25570) | Internal library update | -| 1.0.0 | 2023-05-02 | [\#25739](https://github.com/airbytehq/airbyte/pull/25739) | Removed Azure Blob Storage as a loading method | -| 0.4.63 | 2023-04-27 | [\#25346](https://github.com/airbytehq/airbyte/pull/25346) | Added FlushBufferFunction interface | -| 0.4.61 | 2023-03-30 | [\#24736](https://github.com/airbytehq/airbyte/pull/24736) | Improve behavior when throttled by AWS API | -| 0.4.60 | 2023-03-30 | [\#24698](https://github.com/airbytehq/airbyte/pull/24698) | Add option in spec to allow increasing the stream buffer size to 50 | -| 0.4.59 | 2023-03-23 | [\#23904](https://github.com/airbytehq/airbyte/pull/24405) | Fail faster in certain error cases | -| 0.4.58 | 2023-03-27 | [\#24615](https://github.com/airbytehq/airbyte/pull/24615) | Fixed host validation by pattern on UI | -| 0.4.56 (broken) | 2023-03-22 | [\#23904](https://github.com/airbytehq/airbyte/pull/23904) | Added host validation by pattern on UI | -| 0.4.54 | 2023-03-17 | [\#23788](https://github.com/airbytehq/airbyte/pull/23788) | S3-Parquet: added handler to process null values in arrays | -| 0.4.53 | 2023-03-15 | [\#24058](https://github.com/airbytehq/airbyte/pull/24058) | added write attempt to internal staging Check method | -| 0.4.52 | 2023-03-10 | [\#23931](https://github.com/airbytehq/airbyte/pull/23931) | Added support for periodic buffer flush | -| 0.4.51 | 2023-03-10 | [\#23466](https://github.com/airbytehq/airbyte/pull/23466) | Changed S3 Avro type from Int to Long | -| 0.4.49 | 2023-02-27 | [\#23360](https://github.com/airbytehq/airbyte/pull/23360) | Added logging for flushing and writing data to destination storage | -| 0.4.48 | 2023-02-23 | [\#22877](https://github.com/airbytehq/airbyte/pull/22877) | Add handler for IP not in whitelist error and more handlers for insufficient permission error | -| 0.4.47 | 2023-01-30 | [\#21912](https://github.com/airbytehq/airbyte/pull/21912) | Catch "Create" Table and Stage Known Permissions and rethrow as ConfigExceptions | -| 0.4.46 | 2023-01-26 | [\#20631](https://github.com/airbytehq/airbyte/pull/20631) | Added support for destination checkpointing with staging | -| 0.4.45 | 2023-01-25 | [\#21087](https://github.com/airbytehq/airbyte/pull/21764) | Catch Known Permissions and rethrow as ConfigExceptions | -| 0.4.44 | 2023-01-20 | [\#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | -| 0.4.43 | 2023-01-20 | [\#21450](https://github.com/airbytehq/airbyte/pull/21450) | Updated Check methods to handle more possible s3 and gcs stagings issues | -| 0.4.42 | 2023-01-12 | [\#21342](https://github.com/airbytehq/airbyte/pull/21342) | Better handling for conflicting destination streams | -| 0.4.41 | 2022-12-16 | [\#20566](https://github.com/airbytehq/airbyte/pull/20566) | Improve spec to adhere to standards | -| 0.4.40 | 2022-11-11 | [\#19302](https://github.com/airbytehq/airbyte/pull/19302) | Set jdbc application env variable depends on env - airbyte_oss or airbyte_cloud | -| 0.4.39 | 2022-11-09 | [\#18970](https://github.com/airbytehq/airbyte/pull/18970) | Updated "check" connection method to handle more errors | -| 0.4.38 | 2022-09-26 | [\#17115](https://github.com/airbytehq/airbyte/pull/17115) | Added connection string identifier | -| 0.4.37 | 2022-09-21 | [\#16839](https://github.com/airbytehq/airbyte/pull/16839) | Update JDBC driver for Snowflake to 3.13.19 | -| 0.4.36 | 2022-09-14 | [\#15668](https://github.com/airbytehq/airbyte/pull/15668) | Wrap logs in AirbyteLogMessage | -| 0.4.35 | 2022-09-01 | [\#16243](https://github.com/airbytehq/airbyte/pull/16243) | Fix Json to Avro conversion when there is field name clash from combined restrictions (`anyOf`, `oneOf`, `allOf` fields). | -| 0.4.34 | 2022-07-23 | [\#14388](https://github.com/airbytehq/airbyte/pull/14388) | Add support for key pair authentication | -| 0.4.33 | 2022-07-15 | [\#14494](https://github.com/airbytehq/airbyte/pull/14494) | Make S3 output filename configurable. | -| 0.4.32 | 2022-07-14 | [\#14618](https://github.com/airbytehq/airbyte/pull/14618) | Removed additionalProperties: false from JDBC destination connectors | -| 0.4.31 | 2022-07-07 | [\#13729](https://github.com/airbytehq/airbyte/pull/13729) | Improve configuration field description | -| 0.4.30 | 2022-06-24 | [\#14114](https://github.com/airbytehq/airbyte/pull/14114) | Remove "additionalProperties": false from specs for connectors with staging | -| 0.4.29 | 2022-06-17 | [\#13753](https://github.com/airbytehq/airbyte/pull/13753) | Deprecate and remove PART_SIZE_MB fields from connectors based on StreamTransferManager | -| 0.4.28 | 2022-05-18 | [\#12952](https://github.com/airbytehq/airbyte/pull/12952) | Apply buffering strategy on GCS staging | -| 0.4.27 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | -| 0.4.26 | 2022-05-12 | [\#12805](https://github.com/airbytehq/airbyte/pull/12805) | Updated to latest base-java to emit AirbyteTraceMessages on error. | -| 0.4.25 | 2022-05-03 | [\#12452](https://github.com/airbytehq/airbyte/pull/12452) | Add support for encrypted staging on S3; fix the purge_staging_files option | -| 0.4.24 | 2022-03-24 | [\#11093](https://github.com/airbytehq/airbyte/pull/11093) | Added OAuth support (Compatible with Airbyte Version 0.35.60+) | -| 0.4.22 | 2022-03-18 | [\#10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | -| 0.4.21 | 2022-03-18 | [\#11071](https://github.com/airbytehq/airbyte/pull/11071) | Switch to compressed on-disk buffering before staging to s3/internal stage | -| 0.4.20 | 2022-03-14 | [\#10341](https://github.com/airbytehq/airbyte/pull/10341) | Add Azure blob staging support | -| 0.4.19 | 2022-03-11 | [\#10699](https://github.com/airbytehq/airbyte/pull/10699) | Added unit tests | -| 0.4.17 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | -| 0.4.16 | 2022-02-25 | [\#10627](https://github.com/airbytehq/airbyte/pull/10627) | Add try catch to make sure all handlers are closed | -| 0.4.15 | 2022-02-22 | [\#10459](https://github.com/airbytehq/airbyte/pull/10459) | Add FailureTrackingAirbyteMessageConsumer | -| 0.4.14 | 2022-02-17 | [\#10394](https://github.com/airbytehq/airbyte/pull/10394) | Reduce memory footprint. | -| 0.4.13 | 2022-02-16 | [\#10212](https://github.com/airbytehq/airbyte/pull/10212) | Execute COPY command in parallel for S3 and GCS staging | -| 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. | -| 0.4.11 | 2022-02-14 | [\#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | -| 0.4.10 | 2022-02-14 | [\#10297](https://github.com/airbytehq/airbyte/pull/10297) | Halve the record buffer size to reduce memory consumption. | -| 0.4.9 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `ExitOnOutOfMemoryError` JVM flag. | -| 0.4.8 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | -| 0.4.7 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | -| 0.4.6 | 2022-01-28 | [\#9623](https://github.com/airbytehq/airbyte/pull/9623) | Add jdbc_url_params support for optional JDBC parameters | -| 0.4.5 | 2021-12-29 | [\#9184](https://github.com/airbytehq/airbyte/pull/9184) | Update connector fields title/description | -| 0.4.4 | 2022-01-24 | [\#9743](https://github.com/airbytehq/airbyte/pull/9743) | Fixed bug with dashes in schema name | -| 0.4.3 | 2022-01-20 | [\#9531](https://github.com/airbytehq/airbyte/pull/9531) | Start using new S3StreamCopier and expose the purgeStagingData option | -| 0.4.2 | 2022-01-10 | [\#9141](https://github.com/airbytehq/airbyte/pull/9141) | Fixed duplicate rows on retries | -| 0.4.1 | 2021-01-06 | [\#9311](https://github.com/airbytehq/airbyte/pull/9311) | Update сreating schema during check | -| 0.4.0 | 2021-12-27 | [\#9063](https://github.com/airbytehq/airbyte/pull/9063) | Updated normalization to produce permanent tables | -| 0.3.24 | 2021-12-23 | [\#8869](https://github.com/airbytehq/airbyte/pull/8869) | Changed staging approach to Byte-Buffered | -| 0.3.23 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration in UI for S3 loading method | -| 0.3.22 | 2021-12-21 | [\#9006](https://github.com/airbytehq/airbyte/pull/9006) | Updated jdbc schema naming to follow Snowflake Naming Conventions | -| 0.3.21 | 2021-12-15 | [\#8781](https://github.com/airbytehq/airbyte/pull/8781) | Updated check method to verify permissions to create/drop stage for internal staging; compatibility fix for Java 17 | -| 0.3.20 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | -| 0.3.19 | 2021-12-06 | [\#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | -| 0.3.18 | 2021-11-26 | [\#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | -| 0.3.17 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.15 | 2021-10-11 | [\#6949](https://github.com/airbytehq/airbyte/pull/6949) | Each stream was split into files of 10,000 records each for copying using S3 or GCS | -| 0.3.14 | 2021-09-08 | [\#5924](https://github.com/airbytehq/airbyte/pull/5924) | Fixed AWS S3 Staging COPY is writing records from different table in the same raw table | -| 0.3.13 | 2021-09-01 | [\#5784](https://github.com/airbytehq/airbyte/pull/5784) | Updated query timeout from 30 minutes to 3 hours | -| 0.3.12 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.3.11 | 2021-07-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | -| 0.3.10 | 2021-07-12 | [\#4713](https://github.com/airbytehq/airbyte/pull/4713) | Tag traffic with `airbyte` label to enable optimization opportunities from Snowflake | +| Version | Date | Pull Request | Subject | +|:----------------|:-----------|:-------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.7.0 | 2024-04-08 | [\#35754](https://github.com/airbytehq/airbyte/pull/35754) | Allow configuring `data_retention_time_in_days`; apply to both raw and final tables. *Note*: Existing tables will not be affected; you must manually alter them.| +| 3.6.6 | 2024-03-26 | [\#36466](https://github.com/airbytehq/airbyte/pull/36466) | Correctly hhandle instances with `QUOTED_IDENTIFIERS_IGNORE_CASE` enabled globally | +| 3.6.5 | 2024-03-25 | [\#36461](https://github.com/airbytehq/airbyte/pull/36461) | Internal code change (use published CDK artifact instead of source dependency) | +| 3.6.4 | 2024-03-25 | [\#36396](https://github.com/airbytehq/airbyte/pull/36396) | Handle instances with `QUOTED_IDENTIFIERS_IGNORE_CASE` enabled globally | +| 3.6.3 | 2024-03-25 | [\#36452](https://github.com/airbytehq/airbyte/pull/36452) | Remove Query timeout | +| 3.6.2 | 2024-03-18 | [\#36240](https://github.com/airbytehq/airbyte/pull/36240) | Hide oAuth config option | +| 3.6.1 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | +| 3.6.0 | 2024-03-06 | [\#35308](https://github.com/airbytehq/airbyte/pull/35308) | Upgrade CDK; use utc tz for extracted_at; Migrate existing extracted_at to utc; | +| 3.5.14 | 2024-02-22 | [\#35456](https://github.com/airbytehq/airbyte/pull/35456) | Adopt CDK 0.23.0; Gather initial state upfront, reduce information_schema calls | +| 3.5.13 | 2024-02-22 | [\#35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.5.12 | 2024-02-15 | [\#35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | +| 3.5.11 | 2024-02-12 | [\#35194](https://github.com/airbytehq/airbyte/pull/35194) | Reorder auth options | +| 3.5.10 | 2024-02-12 | [\#35144](https://github.com/airbytehq/airbyte/pull/35144) | Adopt CDK 0.20.2 | +| 3.5.9 | 2024-02-12 | [\#35111](https://github.com/airbytehq/airbyte/pull/35111) | Adopt CDK 0.20.1 | +| 3.5.8 | 2024-02-09 | [\#34574](https://github.com/airbytehq/airbyte/pull/34574) | Adopt CDK 0.20.0 | +| 3.5.7 | 2024-02-08 | [\#34747](https://github.com/airbytehq/airbyte/pull/34747) | Adopt CDK 0.19.0 | +| 3.5.6 | 2024-02-08 | [\#35027](https://github.com/airbytehq/airbyte/pull/35027) | Upgrade CDK to version 0.17.1 | +| 3.5.5 | 2024-02-08 | [\#34502](https://github.com/airbytehq/airbyte/pull/34502) | Reduce COPY frequency | +| 3.5.4 | 2024-01-24 | [\#34451](https://github.com/airbytehq/airbyte/pull/34451) | Improve logging for unparseable input | +| 3.5.3 | 2024-01-25 | [\#34528](https://github.com/airbytehq/airbyte/pull/34528) | Fix spurious `check` failure (`UnsupportedOperationException: Snowflake does not use the native JDBC DV2 interface`) | +| 3.5.2 | 2024-01-24 | [\#34458](https://github.com/airbytehq/airbyte/pull/34458) | Improve error reporting | +| 3.5.1 | 2024-01-24 | [\#34501](https://github.com/airbytehq/airbyte/pull/34501) | Internal code changes for Destinations V2 | +| 3.5.0 | 2024-01-24 | [\#34462](https://github.com/airbytehq/airbyte/pull/34462) | Upgrade CDK to 0.14.0 | +| 3.4.22 | 2024-01-12 | [\#34227](https://github.com/airbytehq/airbyte/pull/34227) | Upgrade CDK to 0.12.0; Cleanup unused dependencies | +| 3.4.21 | 2024-01-10 | [\#34083](https://github.com/airbytehq/airbyte/pull/34083) | Emit destination stats as part of the state message | +| 3.4.20 | 2024-01-05 | [\#33948](https://github.com/airbytehq/airbyte/pull/33948) | Skip retrieving initial table state when setup fails | +| 3.4.19 | 2024-01-04 | [\#33730](https://github.com/airbytehq/airbyte/pull/33730) | Internal code structure changes | +| 3.4.18 | 2024-01-02 | [\#33728](https://github.com/airbytehq/airbyte/pull/33728) | Add option to only type and dedupe at the end of the sync | +| 3.4.17 | 2023-12-20 | [\#33704](https://github.com/airbytehq/airbyte/pull/33704) | Update to java CDK 0.10.0 (no changes) | +| 3.4.16 | 2023-12-18 | [\#33124](https://github.com/airbytehq/airbyte/pull/33124) | Make Schema Creation Seperate from Table Creation | +| 3.4.15 | 2023-12-13 | [\#33232](https://github.com/airbytehq/airbyte/pull/33232) | Only run typing+deduping for a stream if the stream had any records | +| 3.4.14 | 2023-12-08 | [\#33263](https://github.com/airbytehq/airbyte/pull/33263) | Adopt java CDK version 0.7.0 | +| 3.4.13 | 2023-12-05 | [\#32326](https://github.com/airbytehq/airbyte/pull/32326) | Use jdbc metadata for table existence check | +| 3.4.12 | 2023-12-04 | [\#33084](https://github.com/airbytehq/airbyte/pull/33084) | T&D SQL statements moved to debug log level | +| 3.4.11 | 2023-11-14 | [\#32526](https://github.com/airbytehq/airbyte/pull/32526) | Clean up memory manager logs. | +| 3.4.10 | 2023-11-08 | [\#32125](https://github.com/airbytehq/airbyte/pull/32125) | Fix compilation warnings. | +| 3.4.9 | 2023-11-06 | [\#32026](https://github.com/airbytehq/airbyte/pull/32026) | Add separate TRY_CAST transaction to reduce compute usage | +| 3.4.8 | 2023-11-06 | [\#32190](https://github.com/airbytehq/airbyte/pull/32190) | Further improve error reporting | +| 3.4.7 | 2023-11-06 | [\#32193](https://github.com/airbytehq/airbyte/pull/32193) | Adopt java CDK version 0.4.1. | +| 3.4.6 | 2023-11-02 | [\#32124](https://github.com/airbytehq/airbyte/pull/32124) | Revert `merge` statement | +| 3.4.5 | 2023-11-02 | [\#31983](https://github.com/airbytehq/airbyte/pull/31983) | Improve error reporting | +| 3.4.4 | 2023-10-30 | [\#31985](https://github.com/airbytehq/airbyte/pull/31985) | Delay upgrade deadline to Nov 7 | +| 3.4.3 | 2023-10-30 | [\#31960](https://github.com/airbytehq/airbyte/pull/31960) | Adopt java CDK version 0.2.0. | +| 3.4.2 | 2023-10-27 | [\#31897](https://github.com/airbytehq/airbyte/pull/31897) | Further filtering on extracted_at | +| 3.4.1 | 2023-10-27 | [\#31683](https://github.com/airbytehq/airbyte/pull/31683) | Performance enhancement (switch to a `merge` statement for incremental-dedup syncs) | +| 3.4.0 | 2023-10-25 | [\#31686](https://github.com/airbytehq/airbyte/pull/31686) | Opt out flag for typed and deduped tables | +| 3.3.0 | 2023-10-25 | [\#31520](https://github.com/airbytehq/airbyte/pull/31520) | Stop deduping raw table | +| 3.2.3 | 2023-10-17 | [\#31191](https://github.com/airbytehq/airbyte/pull/31191) | Improve typing+deduping performance by filtering new raw records on extracted_at | +| 3.2.2 | 2023-10-10 | [\#31194](https://github.com/airbytehq/airbyte/pull/31194) | Deallocate unused per stream buffer memory when empty | +| 3.2.1 | 2023-10-10 | [\#31083](https://github.com/airbytehq/airbyte/pull/31083) | Fix precision of numeric values in async destinations | +| 3.2.0 | 2023-10-09 | [\#31149](https://github.com/airbytehq/airbyte/pull/31149) | No longer fail syncs when PKs are null - try do dedupe anyway | +| 3.1.22 | 2023-10-06 | [\#31153](https://github.com/airbytehq/airbyte/pull/31153) | Increase jvm GC retries | +| 3.1.21 | 2023-10-06 | [\#31139](https://github.com/airbytehq/airbyte/pull/31139) | Bump CDK version | +| 3.1.20 | 2023-10-06 | [\#31129](https://github.com/airbytehq/airbyte/pull/31129) | Reduce async buffer size | +| 3.1.19 | 2023-10-04 | [\#31082](https://github.com/airbytehq/airbyte/pull/31082) | Revert null PK checks | +| 3.1.18 | 2023-10-01 | [\#30779](https://github.com/airbytehq/airbyte/pull/30779) | Final table PK columns become non-null and skip check for null PKs in raw records (performance) | +| 3.1.17 | 2023-09-29 | [\#30938](https://github.com/airbytehq/airbyte/pull/30938) | Upgrade snowflake-jdbc driver | +| 3.1.16 | 2023-09-28 | [\#30835](https://github.com/airbytehq/airbyte/pull/30835) | Fix regression from 3.1.15 in supporting concurrent syncs with identical stream name but different namespace | +| 3.1.15 | 2023-09-26 | [\#30775](https://github.com/airbytehq/airbyte/pull/30775) | Increase async block size | +| 3.1.14 | 2023-09-27 | [\#30739](https://github.com/airbytehq/airbyte/pull/30739) | Fix column name collision detection | +| 3.1.13 | 2023-09-19 | [\#30599](https://github.com/airbytehq/airbyte/pull/30599) | Support concurrent syncs with identical stream name but different namespace | +| 3.1.12 | 2023-09-21 | [\#30671](https://github.com/airbytehq/airbyte/pull/30671) | Reduce async buffer size | +| 3.1.11 | 2023-09-19 | [\#30592](https://github.com/airbytehq/airbyte/pull/30592) | Internal code changes | +| 3.1.10 | 2023-09-18 | [\#30546](https://github.com/airbytehq/airbyte/pull/30546) | Make sure that the async buffer are flush every 5 minutes | +| 3.1.9 | 2023-09-19 | [\#30319](https://github.com/airbytehq/airbyte/pull/30319) | Support column names that are reserved | +| 3.1.8 | 2023-09-18 | [\#30479](https://github.com/airbytehq/airbyte/pull/30479) | Fix async memory management | +| 3.1.7 | 2023-09-15 | [\#30491](https://github.com/airbytehq/airbyte/pull/30491) | Improve error message display | +| 3.1.6 | 2023-09-14 | [\#30439](https://github.com/airbytehq/airbyte/pull/30439) | Fix a transient error | +| 3.1.5 | 2023-09-13 | [\#30416](https://github.com/airbytehq/airbyte/pull/30416) | Support `${` in stream name/namespace, and in column names | +| 3.1.4 | 2023-09-12 | [\#30364](https://github.com/airbytehq/airbyte/pull/30364) | Add log message | +| 3.1.3 | 2023-08-29 | [\#29878](https://github.com/airbytehq/airbyte/pull/29878) | Reenable incremental typing and deduping | +| 3.1.2 | 2023-08-31 | [\#30020](https://github.com/airbytehq/airbyte/pull/30020) | Run typing and deduping tasks in parallel | +| 3.1.1 | 2023-09-05 | [\#30117](https://github.com/airbytehq/airbyte/pull/30117) | Type and Dedupe at sync start and then every 6 hours | +| 3.1.0 | 2023-09-01 | [\#30056](https://github.com/airbytehq/airbyte/pull/30056) | Upcase final table names to allow case-insensitive references | +| 3.0.2 | 2023-09-01 | [\#30121](https://github.com/airbytehq/airbyte/pull/30121) | Improve performance on very wide streams by skipping TRY_CAST on strings | +| 3.0.1 | 2023-08-27 | [\#30065](https://github.com/airbytehq/airbyte/pull/30065) | Clearer error thrown when records are missing a primary key | +| 3.0.0 | 2023-08-27 | [\#29783](https://github.com/airbytehq/airbyte/pull/29783) | Destinations V2 | +| 2.1.7 | 2023-08-29 | [\#29949](https://github.com/airbytehq/airbyte/pull/29949) | Destinations V2: Fix checking for empty table by ensuring upper-case DB names | +| 2.1.6 | 2023-08-28 | [\#29878](https://github.com/airbytehq/airbyte/pull/29878) | Destinations V2: Fix detection of existing table by ensuring upper-case DB names | +| 2.1.5 | 2023-08-28 | [\#29903](https://github.com/airbytehq/airbyte/pull/29917) | Destinations V2: Performance Improvement, Changing Metadata error array construction from ARRAY_CAT to ARRAY_CONSTRUCT_COMPACT | +| 2.1.4 | 2023-08-28 | [\#29903](https://github.com/airbytehq/airbyte/pull/29903) | Abort queries on crash | +| 2.1.3 | 2023-08-25 | [\#29881](https://github.com/airbytehq/airbyte/pull/29881) | Destinations v2: Only run T+D once at end of sync, to prevent data loss under async conditions | +| 2.1.2 | 2023-08-24 | [\#29805](https://github.com/airbytehq/airbyte/pull/29805) | Destinations v2: Don't soft reset in migration | +| 2.1.1 | 2023-08-23 | [\#29774](https://github.com/airbytehq/airbyte/pull/29774) | Destinations v2: Don't soft reset overwrite syncs | +| 2.1.0 | 2023-08-21 | [\#29636](https://github.com/airbytehq/airbyte/pull/29636) | Destinations v2: Several Critical Bug Fixes (cursorless dedup, improved floating-point handling, improved special characters handling; improved error handling) | +| 2.0.0 | 2023-08-09 | [\#28894](https://github.com/airbytehq/airbyte/pull/29236) | Remove support for Snowflake GCS/S3 loading method in favor of Snowflake Internal staging | +| 1.3.3 | 2023-08-15 | [\#29461](https://github.com/airbytehq/airbyte/pull/29461) | Changing a static constant reference | +| 1.3.2 | 2023-08-11 | [\#29381](https://github.com/airbytehq/airbyte/pull/29381) | Destinations v2: Add support for streams with no columns | +| 1.3.1 | 2023-08-04 | [\#28894](https://github.com/airbytehq/airbyte/pull/28894) | Destinations v2: Update SqlGenerator | +| 1.3.0 | 2023-08-07 | [\#29174](https://github.com/airbytehq/airbyte/pull/29174) | Destinations v2: early access release | +| 1.2.10 | 2023-08-07 | [\#29188](https://github.com/airbytehq/airbyte/pull/29188) | Internal code refactoring | +| 1.2.9 | 2023-08-04 | [\#28677](https://github.com/airbytehq/airbyte/pull/28677) | Destinations v2: internal code changes to prepare for early access release | +| 1.2.8 | 2023-08-03 | [\#29047](https://github.com/airbytehq/airbyte/pull/29047) | Avoid logging record if the format is invalid | +| 1.2.7 | 2023-08-02 | [\#28976](https://github.com/airbytehq/airbyte/pull/28976) | Fix composite PK handling in v1 mode | +| 1.2.6 | 2023-08-01 | [\#28618](https://github.com/airbytehq/airbyte/pull/28618) | Reduce logging noise | +| 1.2.5 | 2023-07-24 | [\#28618](https://github.com/airbytehq/airbyte/pull/28618) | Add hooks in preparation for destinations v2 implementation | +| 1.2.4 | 2023-07-21 | [\#28584](https://github.com/airbytehq/airbyte/pull/28584) | Install dependencies in preparation for destinations v2 work | +| 1.2.3 | 2023-07-21 | [\#28345](https://github.com/airbytehq/airbyte/pull/28345) | Pull in async framework minor bug fix for race condition on state emission | +| 1.2.2 | 2023-07-14 | [\#28345](https://github.com/airbytehq/airbyte/pull/28345) | Increment patch to trigger a rebuild | +| 1.2.1 | 2023-07-14 | [\#28315](https://github.com/airbytehq/airbyte/pull/28315) | Pull in async framework minor bug fix to avoid Snowflake hanging on close | +| 1.2.0 | 2023-07-5 | [\#27935](https://github.com/airbytehq/airbyte/pull/27935) | Enable Faster Snowflake Syncs with Asynchronous writes | +| 1.1.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | +| 1.0.6 | 2023-06-21 | [\#27555](https://github.com/airbytehq/airbyte/pull/27555) | Reduce image size | +| 1.0.5 | 2023-05-31 | [\#25782](https://github.com/airbytehq/airbyte/pull/25782) | Internal scaffolding for future development | +| 1.0.4 | 2023-05-19 | [\#26323](https://github.com/airbytehq/airbyte/pull/26323) | Prevent infinite retry loop under specific circumstances | +| 1.0.3 | 2023-05-15 | [\#26081](https://github.com/airbytehq/airbyte/pull/26081) | Reverts splits bases | +| 1.0.2 | 2023-05-05 | [\#25649](https://github.com/airbytehq/airbyte/pull/25649) | Splits bases (reverted) | +| 1.0.1 | 2023-04-29 | [\#25570](https://github.com/airbytehq/airbyte/pull/25570) | Internal library update | +| 1.0.0 | 2023-05-02 | [\#25739](https://github.com/airbytehq/airbyte/pull/25739) | Removed Azure Blob Storage as a loading method | +| 0.4.63 | 2023-04-27 | [\#25346](https://github.com/airbytehq/airbyte/pull/25346) | Added FlushBufferFunction interface | +| 0.4.61 | 2023-03-30 | [\#24736](https://github.com/airbytehq/airbyte/pull/24736) | Improve behavior when throttled by AWS API | +| 0.4.60 | 2023-03-30 | [\#24698](https://github.com/airbytehq/airbyte/pull/24698) | Add option in spec to allow increasing the stream buffer size to 50 | +| 0.4.59 | 2023-03-23 | [\#23904](https://github.com/airbytehq/airbyte/pull/24405) | Fail faster in certain error cases | +| 0.4.58 | 2023-03-27 | [\#24615](https://github.com/airbytehq/airbyte/pull/24615) | Fixed host validation by pattern on UI | +| 0.4.56 (broken) | 2023-03-22 | [\#23904](https://github.com/airbytehq/airbyte/pull/23904) | Added host validation by pattern on UI | +| 0.4.54 | 2023-03-17 | [\#23788](https://github.com/airbytehq/airbyte/pull/23788) | S3-Parquet: added handler to process null values in arrays | +| 0.4.53 | 2023-03-15 | [\#24058](https://github.com/airbytehq/airbyte/pull/24058) | added write attempt to internal staging Check method | +| 0.4.52 | 2023-03-10 | [\#23931](https://github.com/airbytehq/airbyte/pull/23931) | Added support for periodic buffer flush | +| 0.4.51 | 2023-03-10 | [\#23466](https://github.com/airbytehq/airbyte/pull/23466) | Changed S3 Avro type from Int to Long | +| 0.4.49 | 2023-02-27 | [\#23360](https://github.com/airbytehq/airbyte/pull/23360) | Added logging for flushing and writing data to destination storage | +| 0.4.48 | 2023-02-23 | [\#22877](https://github.com/airbytehq/airbyte/pull/22877) | Add handler for IP not in whitelist error and more handlers for insufficient permission error | +| 0.4.47 | 2023-01-30 | [\#21912](https://github.com/airbytehq/airbyte/pull/21912) | Catch "Create" Table and Stage Known Permissions and rethrow as ConfigExceptions | +| 0.4.46 | 2023-01-26 | [\#20631](https://github.com/airbytehq/airbyte/pull/20631) | Added support for destination checkpointing with staging | +| 0.4.45 | 2023-01-25 | [\#21087](https://github.com/airbytehq/airbyte/pull/21764) | Catch Known Permissions and rethrow as ConfigExceptions | +| 0.4.44 | 2023-01-20 | [\#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | +| 0.4.43 | 2023-01-20 | [\#21450](https://github.com/airbytehq/airbyte/pull/21450) | Updated Check methods to handle more possible s3 and gcs stagings issues | +| 0.4.42 | 2023-01-12 | [\#21342](https://github.com/airbytehq/airbyte/pull/21342) | Better handling for conflicting destination streams | +| 0.4.41 | 2022-12-16 | [\#20566](https://github.com/airbytehq/airbyte/pull/20566) | Improve spec to adhere to standards | +| 0.4.40 | 2022-11-11 | [\#19302](https://github.com/airbytehq/airbyte/pull/19302) | Set jdbc application env variable depends on env - airbyte_oss or airbyte_cloud | +| 0.4.39 | 2022-11-09 | [\#18970](https://github.com/airbytehq/airbyte/pull/18970) | Updated "check" connection method to handle more errors | +| 0.4.38 | 2022-09-26 | [\#17115](https://github.com/airbytehq/airbyte/pull/17115) | Added connection string identifier | +| 0.4.37 | 2022-09-21 | [\#16839](https://github.com/airbytehq/airbyte/pull/16839) | Update JDBC driver for Snowflake to 3.13.19 | +| 0.4.36 | 2022-09-14 | [\#15668](https://github.com/airbytehq/airbyte/pull/15668) | Wrap logs in AirbyteLogMessage | +| 0.4.35 | 2022-09-01 | [\#16243](https://github.com/airbytehq/airbyte/pull/16243) | Fix Json to Avro conversion when there is field name clash from combined restrictions (`anyOf`, `oneOf`, `allOf` fields). | +| 0.4.34 | 2022-07-23 | [\#14388](https://github.com/airbytehq/airbyte/pull/14388) | Add support for key pair authentication | +| 0.4.33 | 2022-07-15 | [\#14494](https://github.com/airbytehq/airbyte/pull/14494) | Make S3 output filename configurable. | +| 0.4.32 | 2022-07-14 | [\#14618](https://github.com/airbytehq/airbyte/pull/14618) | Removed additionalProperties: false from JDBC destination connectors | +| 0.4.31 | 2022-07-07 | [\#13729](https://github.com/airbytehq/airbyte/pull/13729) | Improve configuration field description | +| 0.4.30 | 2022-06-24 | [\#14114](https://github.com/airbytehq/airbyte/pull/14114) | Remove "additionalProperties": false from specs for connectors with staging | +| 0.4.29 | 2022-06-17 | [\#13753](https://github.com/airbytehq/airbyte/pull/13753) | Deprecate and remove PART_SIZE_MB fields from connectors based on StreamTransferManager | +| 0.4.28 | 2022-05-18 | [\#12952](https://github.com/airbytehq/airbyte/pull/12952) | Apply buffering strategy on GCS staging | +| 0.4.27 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | +| 0.4.26 | 2022-05-12 | [\#12805](https://github.com/airbytehq/airbyte/pull/12805) | Updated to latest base-java to emit AirbyteTraceMessages on error. | +| 0.4.25 | 2022-05-03 | [\#12452](https://github.com/airbytehq/airbyte/pull/12452) | Add support for encrypted staging on S3; fix the purge_staging_files option | +| 0.4.24 | 2022-03-24 | [\#11093](https://github.com/airbytehq/airbyte/pull/11093) | Added OAuth support (Compatible with Airbyte Version 0.35.60+) | +| 0.4.22 | 2022-03-18 | [\#10793](https://github.com/airbytehq/airbyte/pull/10793) | Fix namespace with invalid characters | +| 0.4.21 | 2022-03-18 | [\#11071](https://github.com/airbytehq/airbyte/pull/11071) | Switch to compressed on-disk buffering before staging to s3/internal stage | +| 0.4.20 | 2022-03-14 | [\#10341](https://github.com/airbytehq/airbyte/pull/10341) | Add Azure blob staging support | +| 0.4.19 | 2022-03-11 | [\#10699](https://github.com/airbytehq/airbyte/pull/10699) | Added unit tests | +| 0.4.17 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.4.16 | 2022-02-25 | [\#10627](https://github.com/airbytehq/airbyte/pull/10627) | Add try catch to make sure all handlers are closed | +| 0.4.15 | 2022-02-22 | [\#10459](https://github.com/airbytehq/airbyte/pull/10459) | Add FailureTrackingAirbyteMessageConsumer | +| 0.4.14 | 2022-02-17 | [\#10394](https://github.com/airbytehq/airbyte/pull/10394) | Reduce memory footprint. | +| 0.4.13 | 2022-02-16 | [\#10212](https://github.com/airbytehq/airbyte/pull/10212) | Execute COPY command in parallel for S3 and GCS staging | +| 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. | +| 0.4.11 | 2022-02-14 | [\#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | +| 0.4.10 | 2022-02-14 | [\#10297](https://github.com/airbytehq/airbyte/pull/10297) | Halve the record buffer size to reduce memory consumption. | +| 0.4.9 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `ExitOnOutOfMemoryError` JVM flag. | +| 0.4.8 | 2022-02-01 | [\#9959](https://github.com/airbytehq/airbyte/pull/9959) | Fix null pointer exception from buffered stream consumer. | +| 0.4.7 | 2022-01-29 | [\#9745](https://github.com/airbytehq/airbyte/pull/9745) | Integrate with Sentry. | +| 0.4.6 | 2022-01-28 | [\#9623](https://github.com/airbytehq/airbyte/pull/9623) | Add jdbc_url_params support for optional JDBC parameters | +| 0.4.5 | 2021-12-29 | [\#9184](https://github.com/airbytehq/airbyte/pull/9184) | Update connector fields title/description | +| 0.4.4 | 2022-01-24 | [\#9743](https://github.com/airbytehq/airbyte/pull/9743) | Fixed bug with dashes in schema name | +| 0.4.3 | 2022-01-20 | [\#9531](https://github.com/airbytehq/airbyte/pull/9531) | Start using new S3StreamCopier and expose the purgeStagingData option | +| 0.4.2 | 2022-01-10 | [\#9141](https://github.com/airbytehq/airbyte/pull/9141) | Fixed duplicate rows on retries | +| 0.4.1 | 2021-01-06 | [\#9311](https://github.com/airbytehq/airbyte/pull/9311) | Update сreating schema during check | +| 0.4.0 | 2021-12-27 | [\#9063](https://github.com/airbytehq/airbyte/pull/9063) | Updated normalization to produce permanent tables | +| 0.3.24 | 2021-12-23 | [\#8869](https://github.com/airbytehq/airbyte/pull/8869) | Changed staging approach to Byte-Buffered | +| 0.3.23 | 2021-12-22 | [\#9039](https://github.com/airbytehq/airbyte/pull/9039) | Added part_size configuration in UI for S3 loading method | +| 0.3.22 | 2021-12-21 | [\#9006](https://github.com/airbytehq/airbyte/pull/9006) | Updated jdbc schema naming to follow Snowflake Naming Conventions | +| 0.3.21 | 2021-12-15 | [\#8781](https://github.com/airbytehq/airbyte/pull/8781) | Updated check method to verify permissions to create/drop stage for internal staging; compatibility fix for Java 17 | +| 0.3.20 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | +| 0.3.19 | 2021-12-06 | [\#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | +| 0.3.18 | 2021-11-26 | [\#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | +| 0.3.17 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.3.15 | 2021-10-11 | [\#6949](https://github.com/airbytehq/airbyte/pull/6949) | Each stream was split into files of 10,000 records each for copying using S3 or GCS | +| 0.3.14 | 2021-09-08 | [\#5924](https://github.com/airbytehq/airbyte/pull/5924) | Fixed AWS S3 Staging COPY is writing records from different table in the same raw table | +| 0.3.13 | 2021-09-01 | [\#5784](https://github.com/airbytehq/airbyte/pull/5784) | Updated query timeout from 30 minutes to 3 hours | +| 0.3.12 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.3.11 | 2021-07-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | +| 0.3.10 | 2021-07-12 | [\#4713](https://github.com/airbytehq/airbyte/pull/4713) | Tag traffic with `airbyte` label to enable optimization opportunities from Snowflake | diff --git a/docs/integrations/destinations/typesense.md b/docs/integrations/destinations/typesense.md index 6279d47626daf..b56203cbbca0d 100644 --- a/docs/integrations/destinations/typesense.md +++ b/docs/integrations/destinations/typesense.md @@ -37,6 +37,7 @@ The setup only requires two fields. First is the `host` which is the address at | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :---------------------------- | +| 0.1.4 | 2024-03-25 | [36460](https://github.com/airbytehq/airbyte/pull/36460) | Added path config option | | 0.1.3 | 2024-01-17 | [34336](https://github.com/airbytehq/airbyte/pull/34336) | Fix check() arguments error | | 0.1.2 | 2023-08-25 | [29817](https://github.com/airbytehq/airbyte/pull/29817) | Fix writing multiple streams | | 0.1.1 | 2023-08-24 | [29555](https://github.com/airbytehq/airbyte/pull/29555) | Increasing connection timeout | diff --git a/docs/integrations/destinations/vectara.md b/docs/integrations/destinations/vectara.md index 30bce2b4d076a..af29d82dfdf12 100644 --- a/docs/integrations/destinations/vectara.md +++ b/docs/integrations/destinations/vectara.md @@ -63,6 +63,8 @@ In addition, in the connector UI you define two set of fields for this connector | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :---------------------------------------------------------------- | -| 0.2.1 | 2024-03-05 | [35206](https://github.com/airbytehq/airbyte/pull/35206) | Fix: improved title parsing | -| 0.2.0 | 2024-01-29 | [34579](https://github.com/airbytehq/airbyte/pull/34579) | Add document title file configuration | -| 0.1.0 | 2023-11-10 | [31958](https://github.com/airbytehq/airbyte/pull/31958) | 🎉 New Destination: Vectara (Vector Database) | +| 0.2.3 | 2024-03-22 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Updated CDK & pytest version to fix security vulnerabilities | +| 0.2.2 | 2024-03-22 | [#36261](https://github.com/airbytehq/airbyte/pull/36261) | Move project to Poetry | +| 0.2.1 | 2024-03-05 | [#35206](https://github.com/airbytehq/airbyte/pull/35206) | Fix: improved title parsing | +| 0.2.0 | 2024-01-29 | [#34579](https://github.com/airbytehq/airbyte/pull/34579) | Add document title file configuration | +| 0.1.0 | 2023-11-10 | [#31958](https://github.com/airbytehq/airbyte/pull/31958) | 🎉 New Destination: Vectara (Vector Database) | diff --git a/docs/integrations/destinations/weaviate.md b/docs/integrations/destinations/weaviate.md index 583247263ac02..05a1261c57484 100644 --- a/docs/integrations/destinations/weaviate.md +++ b/docs/integrations/destinations/weaviate.md @@ -85,13 +85,15 @@ When using [multi-tenancy](https://weaviate.io/developers/weaviate/manage-data/m | Version | Date | Pull Request | Subject | | :------ | :--------- | :--------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------- | -| 0.2.15 | 2023-01-25 | [34529](https://github.com/airbytehq/airbyte/pull/34529) | Fix tests | -| 0.2.14 | 2023-01-15 | [34229](https://github.com/airbytehq/airbyte/pull/34229) | Allow configuring tenant id | -| 0.2.13 | 2023-12-11 | [33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | -| 0.2.12 | 2023-12-07 | [33218](https://github.com/airbytehq/airbyte/pull/33218) | Normalize metadata field names | -| 0.2.11 | 2023-12-01 | [32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | -| 0.2.10 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | -| 0.2.9 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.2.17 | 2024-04-15 | [#37333](https://github.com/airbytehq/airbyte/pull/37333) | Update CDK & pytest version to fix security vulnerabilities. +| 0.2.16 | 2024-03-22 | [#35911](https://github.com/airbytehq/airbyte/pull/35911) | Fix tests and move to Poetry | +| 0.2.15 | 2023-01-25 | [#34529](https://github.com/airbytehq/airbyte/pull/34529) | Fix tests | +| 0.2.14 | 2023-01-15 | [#34229](https://github.com/airbytehq/airbyte/pull/34229) | Allow configuring tenant id | +| 0.2.13 | 2023-12-11 | [#33303](https://github.com/airbytehq/airbyte/pull/33303) | Fix bug with embedding special tokens | +| 0.2.12 | 2023-12-07 | [#33218](https://github.com/airbytehq/airbyte/pull/33218) | Normalize metadata field names | +| 0.2.11 | 2023-12-01 | [#32697](https://github.com/airbytehq/airbyte/pull/32697) | Allow omitting raw text | +| 0.2.10 | 2023-11-16 | [#32608](https://github.com/airbytehq/airbyte/pull/32608) | Support deleting records for CDC sources | +| 0.2.9 | 2023-11-13 | [#32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.2.8 | 2023-11-03 | [#32134](https://github.com/airbytehq/airbyte/pull/32134) | Improve test coverage | | 0.2.7 | 2023-11-03 | [#32134](https://github.com/airbytehq/airbyte/pull/32134) | Upgrade weaviate client library | | 0.2.6 | 2023-11-01 | [#32038](https://github.com/airbytehq/airbyte/pull/32038) | Retry failed object loads | diff --git a/docs/integrations/sources/adjust.md b/docs/integrations/sources/adjust.md index 6d96d7df3681a..a359a1e0b3b27 100644 --- a/docs/integrations/sources/adjust.md +++ b/docs/integrations/sources/adjust.md @@ -36,6 +36,6 @@ The source connector supports the following [sync modes](https://docs.airbyte.co ## Changelog -| Version | Date | Pull Request | Description | +| Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|------------------| | 0.1.0 | 2022-08-26 | [16051](https://github.com/airbytehq/airbyte/pull/16051) | Initial version. | diff --git a/docs/integrations/sources/airtable.md b/docs/integrations/sources/airtable.md index c465f9a960e3d..266ef1a1c9fcb 100644 --- a/docs/integrations/sources/airtable.md +++ b/docs/integrations/sources/airtable.md @@ -120,8 +120,9 @@ See information about rate limits [here](https://airtable.com/developers/web/api | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------| -| 4.1.6 | 2024-02-12 | [35149](https://github.com/airbytehq/airbyte/pull/35149) | Manage dependencies with Poetry. | -| 4.1.5 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 4.2.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| 4.1.6 | 2024-02-12 | [35149](https://github.com/airbytehq/airbyte/pull/35149) | Manage dependencies with Poetry. | +| 4.1.5 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | | 4.1.4 | 2023-10-19 | [31360](https://github.com/airbytehq/airbyte/pull/31360) | Update docstings | | 4.1.3 | 2023-10-13 | [31360](https://github.com/airbytehq/airbyte/pull/31360) | Update error message for invalid permissions | | 4.1.2 | 2023-10-10 | [31215](https://github.com/airbytehq/airbyte/pull/31215) | Exclude bases without permission | diff --git a/docs/integrations/sources/amazon-ads-migrations.md b/docs/integrations/sources/amazon-ads-migrations.md index 11f3e15cb5ef9..b9447fd491f9e 100644 --- a/docs/integrations/sources/amazon-ads-migrations.md +++ b/docs/integrations/sources/amazon-ads-migrations.md @@ -1,5 +1,46 @@ # Amazon Ads Migration Guide +## Upgrading to 5.0.0 + +The following streams have updated schemas due to a change with the Amazon Ads API: + +* `SponsoredBrandsCampaigns` +* `SponsoredBrandsAdGroups` +* `SponsoredProductsCampaigns` +* `SponsoredProductsAdGroupBidRecommendations` + +### Schema Changes - Removed/Added Fields + +| Stream Name | Removed Fields | Added Fields | +|-------------------------------------------------|-----------------------------|--------------------------| +| `SponsoredBrandsCampaigns` | `serviceStatus`, `bidOptimization`, `bidMultiplier`, `adFormat`, `bidAdjustments`, `creative`, `landingPage`, `supplySource` | `ruleBasedBudget`, `bidding`, `productLocation`, `costType`, `smartDefault`, `extendedData` | +| `SponsoredBrandsAdGroups` | `bid`, `keywordId`, `keywordText`, `nativeLanuageKeyword`, `matchType` | `extendedData` | +| `SponsoredProductsCampaigns` | `campaignType`, `dailyBudget`, `ruleBasedBudget`, `premiumBidAdjustment`, `networks` | `dynamicBidding`, `budget`, `extendedData` | +| `SponsoredProductsAdGroupBidRecommendations` | `suggestedBid` | `theme`, `bidRecommendationsForTargetingExpressions` | + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +```note +Any detected schema changes will be listed for your review. +``` +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +```note +Depending on destination type you may not be prompted to reset your data. +``` +4. Select **Save connection**. +```note +This will reset the data in your destination and initiate a fresh sync. +``` + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + + ## Upgrading to 4.0.0 Streams `SponsoredBrandsAdGroups` and `SponsoredBrandsKeywords` now have updated schemas. @@ -19,7 +60,7 @@ Any detected schema changes will be listed for your review. ```note Depending on destination type you may not be prompted to reset your data. ``` -4. Select **Save connection**. +4. Select **Save connection**. ```note This will reset the data in your destination and initiate a fresh sync. ``` diff --git a/docs/integrations/sources/amazon-ads.md b/docs/integrations/sources/amazon-ads.md index 275f2d38a5351..196dc1fd4b1ef 100644 --- a/docs/integrations/sources/amazon-ads.md +++ b/docs/integrations/sources/amazon-ads.md @@ -83,6 +83,10 @@ This source is capable of syncing the following streams: * [Products Reports](https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Reports) * [Attribution Reports](https://advertising.amazon.com/API/docs/en-us/amazon-attribution-prod-3p/#/) +:::note +As of connector version 5.0.0, the `Sponsored Products Ad Group Bid Recommendations` stream provides bid recommendations and impact metrics for an existing automatic targeting ad group. The stream returns bid recommendations for match types `CLOSE_MATCH`, `LOOSE_MATCH`, `SUBSTITUTES`, and `COMPLEMENTS` per theme. For more detail on theme-based bid recommendations, review Amazon's [Theme-base bid suggestions - Quick-start guide](https://advertising.amazon.com/API/docs/en-us/guides/sponsored-products/bid-suggestions/theme-based-bid-suggestions-quickstart-guide). +::: + ## Connector-specific features and highlights All the reports are generated relative to the target profile's timezone. @@ -110,6 +114,8 @@ Information about expected report generation waiting time can be found [here](ht | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| +| 5.0.0 | 2024-03-22 | [36169](https://github.com/airbytehq/airbyte/pull/36169) | Update `SponsoredBrand` and `SponsoredProduct` streams due to API endpoint deprecation | +| 4.1.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 4.0.4 | 2024-02-23 | [35481](https://github.com/airbytehq/airbyte/pull/35481) | Migrate source to `YamlDeclarativeSource` with custom `check_connection` | | 4.0.3 | 2024-02-12 | [35180](https://github.com/airbytehq/airbyte/pull/35180) | Manage dependencies with Poetry | | 4.0.2 | 2024-02-08 | [35013](https://github.com/airbytehq/airbyte/pull/35013) | Add missing field to `sponsored_display_budget_rules` stream | diff --git a/docs/integrations/sources/amazon-seller-partner.md b/docs/integrations/sources/amazon-seller-partner.md index 40f11a5e7d122..f6cee803262cb 100644 --- a/docs/integrations/sources/amazon-seller-partner.md +++ b/docs/integrations/sources/amazon-seller-partner.md @@ -135,10 +135,11 @@ The Amazon Seller Partner source connector supports the following [sync modes](h - [Unshipped Orders Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-order#order-reports) \(incremental\) - [Vendor Direct Fulfillment Shipping](https://developer-docs.amazon.com/sp-api/docs/vendor-direct-fulfillment-shipping-api-v1-reference) \(incremental\) - [Vendor Inventory Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) +- [Vendor Forecasting Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(full-refresh\) +- [Vendor Orders](https://developer-docs.amazon.com/sp-api/docs/vendor-orders-api-v1-reference#get-vendorordersv1purchaseorders) \(incremental\) - [Vendor Sales Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) - [Vendor Traffic Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) - [XML Orders By Order Date Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-order#order-tracking-reports) \(incremental\) -- [Vendor Orders](https://developer-docs.amazon.com/sp-api/docs/vendor-orders-api-v1-reference#get-vendorordersv1purchaseorders) \(incremental\) ## Report options @@ -149,6 +150,9 @@ Certain report types have required parameters that must be defined. For `GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL` and `GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE` streams maximum value for `period_in_days` 30 days and 60 days. So, for any value that exceeds the limit, the `period_in_days` will be automatically reduced to the limit for the stream. +For the Vendor Forecasting Report, we have two streams - `GET_VENDOR_FORECASTING_FRESH_REPORT` and `GET_VENDOR_FORECASTING_RETAIL_REPORT` which use the same `GET_VENDOR_FORECASTING_REPORT` Amazon's report, +but with different options for the `sellingProgram` parameter - `FRESH` and `RETAIL` respectively. + ## Performance considerations Information about rate limits you may find [here](https://developer-docs.amazon.com/sp-api/docs/usage-plans-and-rate-limits-in-the-sp-api). @@ -166,73 +170,76 @@ Information about rate limits you may find [here](https://developer-docs.amazon. ## Changelog -| Version | Date | Pull Request | Subject | -|:---------|:-----------|:------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `4.0.0` | 2024-02-23 | [\#35439](https://github.com/airbytehq/airbyte/pull/35439) | Update schema for the `GET_FBA_STORAGE_FEE_CHARGES_DATA` stream | -| `3.5.0` | 2024-02-09 | [\#35331](https://github.com/airbytehq/airbyte/pull/35331) | Fix check for Vendor accounts. Add failed report result message | -| `3.4.0` | 2024-02-15 | [\#35273](https://github.com/airbytehq/airbyte/pull/35273) | Add `VendorOrders` stream | -| `3.3.2` | 2024-02-13 | [\#33996](https://github.com/airbytehq/airbyte/pull/33996) | Add integration tests | -| `3.3.1` | 2024-02-09 | [\#35106](https://github.com/airbytehq/airbyte/pull/35106) | Add logs for the failed check command | -| `3.3.0` | 2024-02-09 | [\#35062](https://github.com/airbytehq/airbyte/pull/35062) | Fix the check command for the `Vendor` account type | -| `3.2.2` | 2024-02-07 | [\#34914](https://github.com/airbytehq/airbyte/pull/34914) | Fix date formatting for ledger reports with aggregation by month | -| `3.2.1` | 2024-01-30 | [\#34654](https://github.com/airbytehq/airbyte/pull/34654) | Fix date format in state message for streams with custom dates formatting | -| `3.2.0` | 2024-01-26 | [\#34549](https://github.com/airbytehq/airbyte/pull/34549) | Update schemas for vendor analytics streams | -| `3.1.0` | 2024-01-17 | [\#34283](https://github.com/airbytehq/airbyte/pull/34283) | Delete deprecated streams | -| `3.0.1` | 2023-12-22 | [\#33741](https://github.com/airbytehq/airbyte/pull/33741) | Improve report streams performance | -| `3.0.0` | 2023-12-12 | [\#32977](https://github.com/airbytehq/airbyte/pull/32977) | Make all streams incremental | -| `2.5.0` | 2023-11-27 | [\#32505](https://github.com/airbytehq/airbyte/pull/32505) | Make report options configurable via UI | -| `2.4.0` | 2023-11-23 | [\#32738](https://github.com/airbytehq/airbyte/pull/32738) | Add `GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT`, `GET_VENDOR_REAL_TIME_INVENTORY_REPORT`, and `GET_VENDOR_TRAFFIC_REPORT` streams | -| `2.3.0` | 2023-11-22 | [\#32541](https://github.com/airbytehq/airbyte/pull/32541) | Make `GET_AFN_INVENTORY_DATA`, `GET_AFN_INVENTORY_DATA_BY_COUNTRY`, and `GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE` streams incremental | -| `2.2.0` | 2023-11-21 | [\#32639](https://github.com/airbytehq/airbyte/pull/32639) | Make start date optional, if start date is not provided, date 2 years ago from today will be used | -| `2.1.1` | 2023-11-21 | [\#32560](https://github.com/airbytehq/airbyte/pull/32560) | Silently exit sync if the retry attempts were unsuccessful | -| `2.1.0` | 2023-11-21 | [\#32591](https://github.com/airbytehq/airbyte/pull/32591) | Add new fields to GET_LEDGER_DETAIL_VIEW_DATA, GET_FBA_INVENTORY_PLANNING_DATA and Orders schemas | -| `2.0.2` | 2023-11-17 | [\#32462](https://github.com/airbytehq/airbyte/pull/32462) | Remove Max time option from specification; set default waiting time for reports to 1 hour | -| `2.0.1` | 2023-11-16 | [\#32550](https://github.com/airbytehq/airbyte/pull/32550) | Fix the OAuth flow | -| `2.0.0` | 2023-11-23 | [\#32355](https://github.com/airbytehq/airbyte/pull/32355) | Remove Brand Analytics from Airbyte Cloud, permanently remove deprecated FBA reports | -| `1.6.2` | 2023-11-14 | [\#32508](https://github.com/airbytehq/airbyte/pull/32508) | Do not use AWS signature as it is no longer required by the Amazon API | -| `1.6.1` | 2023-11-13 | [\#32457](https://github.com/airbytehq/airbyte/pull/32457) | Fix report decompression | -| `1.6.0` | 2023-11-09 | [\#32259](https://github.com/airbytehq/airbyte/pull/32259) | mark "aws_secret_key" and "aws_access_key" as required in specification; update schema for stream `Orders` | -| `1.5.1` | 2023-08-18 | [\#29255](https://github.com/airbytehq/airbyte/pull/29255) | role_arn is optional on UI but not really on the backend blocking connector set up using oauth | -| `1.5.0` | 2023-08-08 | [\#29054](https://github.com/airbytehq/airbyte/pull/29054) | Add new stream `OrderItems` | -| `1.4.1` | 2023-07-25 | [\#27050](https://github.com/airbytehq/airbyte/pull/27050) | Fix - non vendor accounts connector create/check issue | -| `1.4.0` | 2023-07-21 | [\#27110](https://github.com/airbytehq/airbyte/pull/27110) | Add `GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING` and `GET_ORDER_REPORT_DATA_SHIPPING` streams | -| `1.3.0` | 2023-06-09 | [\#27110](https://github.com/airbytehq/airbyte/pull/27110) | Removed `app_id` from `InputConfiguration`, refactored `spec` | -| `1.2.0` | 2023-05-23 | [\#22503](https://github.com/airbytehq/airbyte/pull/22503) | Enabled stream attribute customization from Source configuration | -| `1.1.0` | 2023-04-21 | [\#23605](https://github.com/airbytehq/airbyte/pull/23605) | Add FBA Reimbursement Report stream | -| `1.0.1` | 2023-03-15 | [\#24098](https://github.com/airbytehq/airbyte/pull/24098) | Add Belgium Marketplace | -| `1.0.0` | 2023-03-13 | [\#23980](https://github.com/airbytehq/airbyte/pull/23980) | Make `app_id` required. Increase `end_date` gap up to 5 minutes from now for Finance streams. Fix connection check failure when trying to connect to Amazon Vendor Central accounts | -| `0.2.33` | 2023-03-01 | [\#23606](https://github.com/airbytehq/airbyte/pull/23606) | Implement reportOptions for all missing reports and refactor | -| `0.2.32` | 2022-02-21 | [\#23300](https://github.com/airbytehq/airbyte/pull/23300) | Make AWS Access Key, AWS Secret Access and Role ARN optional | -| `0.2.31` | 2022-01-10 | [\#16430](https://github.com/airbytehq/airbyte/pull/16430) | Implement slicing for report streams | -| `0.2.30` | 2022-12-28 | [\#20896](https://github.com/airbytehq/airbyte/pull/20896) | Validate connections without orders data | -| `0.2.29` | 2022-11-18 | [\#19581](https://github.com/airbytehq/airbyte/pull/19581) | Use user provided end date for GET_SALES_AND_TRAFFIC_REPORT | -| `0.2.28` | 2022-10-20 | [\#18283](https://github.com/airbytehq/airbyte/pull/18283) | Added multiple (22) report types | -| `0.2.26` | 2022-09-24 | [\#16629](https://github.com/airbytehq/airbyte/pull/16629) | Report API version to 2021-06-30, added multiple (5) report types | -| `0.2.25` | 2022-07-27 | [\#15063](https://github.com/airbytehq/airbyte/pull/15063) | Add Restock Inventory Report | -| `0.2.24` | 2022-07-12 | [\#14625](https://github.com/airbytehq/airbyte/pull/14625) | Add FBA Storage Fees Report | -| `0.2.23` | 2022-06-08 | [\#13604](https://github.com/airbytehq/airbyte/pull/13604) | Add new streams: Fullfiments returns and Settlement reports | -| `0.2.22` | 2022-06-15 | [\#13633](https://github.com/airbytehq/airbyte/pull/13633) | Fix - handle start date for financial stream | -| `0.2.21` | 2022-06-01 | [\#13364](https://github.com/airbytehq/airbyte/pull/13364) | Add financial streams | -| `0.2.20` | 2022-05-30 | [\#13059](https://github.com/airbytehq/airbyte/pull/13059) | Add replication end date to config | -| `0.2.19` | 2022-05-24 | [\#13119](https://github.com/airbytehq/airbyte/pull/13119) | Add OAuth2.0 support | -| `0.2.18` | 2022-05-06 | [\#12663](https://github.com/airbytehq/airbyte/pull/12663) | Add GET_XML_BROWSE_TREE_DATA report | -| `0.2.17` | 2022-05-19 | [\#12946](https://github.com/airbytehq/airbyte/pull/12946) | Add throttling exception managing in Orders streams | -| `0.2.16` | 2022-05-04 | [\#12523](https://github.com/airbytehq/airbyte/pull/12523) | allow to use IAM user arn or IAM role | -| `0.2.15` | 2022-01-25 | [\#9789](https://github.com/airbytehq/airbyte/pull/9789) | Add stream FbaReplacementsReports | -| `0.2.14` | 2022-01-19 | [\#9621](https://github.com/airbytehq/airbyte/pull/9621) | Add GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL report | -| `0.2.13` | 2022-01-18 | [\#9581](https://github.com/airbytehq/airbyte/pull/9581) | Change createdSince parameter to dataStartTime | -| `0.2.12` | 2022-01-05 | [\#9312](https://github.com/airbytehq/airbyte/pull/9312) | Add all remaining brand analytics report streams | -| `0.2.11` | 2022-01-05 | [\#9115](https://github.com/airbytehq/airbyte/pull/9115) | Fix reading only 100 orders | -| `0.2.10` | 2021-12-31 | [\#9236](https://github.com/airbytehq/airbyte/pull/9236) | Fix NoAuth deprecation warning | -| `0.2.9` | 2021-12-30 | [\#9212](https://github.com/airbytehq/airbyte/pull/9212) | Normalize GET_SELLER_FEEDBACK_DATA header field names | -| `0.2.8` | 2021-12-22 | [\#8810](https://github.com/airbytehq/airbyte/pull/8810) | Fix GET_SELLER_FEEDBACK_DATA Date cursor field format | -| `0.2.7` | 2021-12-21 | [\#9002](https://github.com/airbytehq/airbyte/pull/9002) | Extract REPORTS_MAX_WAIT_SECONDS to configurable parameter | -| `0.2.6` | 2021-12-10 | [\#8179](https://github.com/airbytehq/airbyte/pull/8179) | Add GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT report | -| `0.2.5` | 2021-12-06 | [\#8425](https://github.com/airbytehq/airbyte/pull/8425) | Update title, description fields in spec | -| `0.2.4` | 2021-11-08 | [\#8021](https://github.com/airbytehq/airbyte/pull/8021) | Added GET_SELLER_FEEDBACK_DATA report with incremental sync capability | -| `0.2.3` | 2021-11-08 | [\#7828](https://github.com/airbytehq/airbyte/pull/7828) | Remove datetime format from all streams | -| `0.2.2` | 2021-11-08 | [\#7752](https://github.com/airbytehq/airbyte/pull/7752) | Change `check_connection` function to use stream Orders | -| `0.2.1` | 2021-09-17 | [\#5248](https://github.com/airbytehq/airbyte/pull/5248) | Added `extra stream` support. Updated `reports streams` logics | -| `0.2.0` | 2021-08-06 | [\#4863](https://github.com/airbytehq/airbyte/pull/4863) | Rebuild source with `airbyte-cdk` | -| `0.1.3` | 2021-06-23 | [\#4288](https://github.com/airbytehq/airbyte/pull/4288) | Bugfix failing `connection check` | -| `0.1.2` | 2021-06-15 | [\#4108](https://github.com/airbytehq/airbyte/pull/4108) | Fixed: Sync fails with timeout when create report is CANCELLED` | +| Version | Date | Pull Request | Subject | +|:---------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `4.2.1` | 2024-04-08 | [\#36895](https://github.com/airbytehq/airbyte/pull/36895) | Fix `reportPeriod` day query params | +| `4.2.0` | 2024-03-19 | [\#36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| `4.1.0` | 2024-03-12 | [\#35954](https://github.com/airbytehq/airbyte/pull/35954) | Add `GET_VENDOR_FORECASTING_FRESH_REPORT` and `GET_VENDOR_FORECASTING_RETAIL_REPORT` streams | +| `4.0.0` | 2024-02-23 | [\#35439](https://github.com/airbytehq/airbyte/pull/35439) | Update schema for the `GET_FBA_STORAGE_FEE_CHARGES_DATA` stream | +| `3.5.0` | 2024-02-09 | [\#35331](https://github.com/airbytehq/airbyte/pull/35331) | Fix check for Vendor accounts. Add failed report result message | +| `3.4.0` | 2024-02-15 | [\#35273](https://github.com/airbytehq/airbyte/pull/35273) | Add `VendorOrders` stream | +| `3.3.2` | 2024-02-13 | [\#33996](https://github.com/airbytehq/airbyte/pull/33996) | Add integration tests | +| `3.3.1` | 2024-02-09 | [\#35106](https://github.com/airbytehq/airbyte/pull/35106) | Add logs for the failed check command | +| `3.3.0` | 2024-02-09 | [\#35062](https://github.com/airbytehq/airbyte/pull/35062) | Fix the check command for the `Vendor` account type | +| `3.2.2` | 2024-02-07 | [\#34914](https://github.com/airbytehq/airbyte/pull/34914) | Fix date formatting for ledger reports with aggregation by month | +| `3.2.1` | 2024-01-30 | [\#34654](https://github.com/airbytehq/airbyte/pull/34654) | Fix date format in state message for streams with custom dates formatting | +| `3.2.0` | 2024-01-26 | [\#34549](https://github.com/airbytehq/airbyte/pull/34549) | Update schemas for vendor analytics streams | +| `3.1.0` | 2024-01-17 | [\#34283](https://github.com/airbytehq/airbyte/pull/34283) | Delete deprecated streams | +| `3.0.1` | 2023-12-22 | [\#33741](https://github.com/airbytehq/airbyte/pull/33741) | Improve report streams performance | +| `3.0.0` | 2023-12-12 | [\#32977](https://github.com/airbytehq/airbyte/pull/32977) | Make all streams incremental | +| `2.5.0` | 2023-11-27 | [\#32505](https://github.com/airbytehq/airbyte/pull/32505) | Make report options configurable via UI | +| `2.4.0` | 2023-11-23 | [\#32738](https://github.com/airbytehq/airbyte/pull/32738) | Add `GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT`, `GET_VENDOR_REAL_TIME_INVENTORY_REPORT`, and `GET_VENDOR_TRAFFIC_REPORT` streams | +| `2.3.0` | 2023-11-22 | [\#32541](https://github.com/airbytehq/airbyte/pull/32541) | Make `GET_AFN_INVENTORY_DATA`, `GET_AFN_INVENTORY_DATA_BY_COUNTRY`, and `GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE` streams incremental | +| `2.2.0` | 2023-11-21 | [\#32639](https://github.com/airbytehq/airbyte/pull/32639) | Make start date optional, if start date is not provided, date 2 years ago from today will be used | +| `2.1.1` | 2023-11-21 | [\#32560](https://github.com/airbytehq/airbyte/pull/32560) | Silently exit sync if the retry attempts were unsuccessful | +| `2.1.0` | 2023-11-21 | [\#32591](https://github.com/airbytehq/airbyte/pull/32591) | Add new fields to GET_LEDGER_DETAIL_VIEW_DATA, GET_FBA_INVENTORY_PLANNING_DATA and Orders schemas | +| `2.0.2` | 2023-11-17 | [\#32462](https://github.com/airbytehq/airbyte/pull/32462) | Remove Max time option from specification; set default waiting time for reports to 1 hour | +| `2.0.1` | 2023-11-16 | [\#32550](https://github.com/airbytehq/airbyte/pull/32550) | Fix the OAuth flow | +| `2.0.0` | 2023-11-23 | [\#32355](https://github.com/airbytehq/airbyte/pull/32355) | Remove Brand Analytics from Airbyte Cloud, permanently remove deprecated FBA reports | +| `1.6.2` | 2023-11-14 | [\#32508](https://github.com/airbytehq/airbyte/pull/32508) | Do not use AWS signature as it is no longer required by the Amazon API | +| `1.6.1` | 2023-11-13 | [\#32457](https://github.com/airbytehq/airbyte/pull/32457) | Fix report decompression | +| `1.6.0` | 2023-11-09 | [\#32259](https://github.com/airbytehq/airbyte/pull/32259) | mark "aws_secret_key" and "aws_access_key" as required in specification; update schema for stream `Orders` | +| `1.5.1` | 2023-08-18 | [\#29255](https://github.com/airbytehq/airbyte/pull/29255) | role_arn is optional on UI but not really on the backend blocking connector set up using oauth | +| `1.5.0` | 2023-08-08 | [\#29054](https://github.com/airbytehq/airbyte/pull/29054) | Add new stream `OrderItems` | +| `1.4.1` | 2023-07-25 | [\#27050](https://github.com/airbytehq/airbyte/pull/27050) | Fix - non vendor accounts connector create/check issue | +| `1.4.0` | 2023-07-21 | [\#27110](https://github.com/airbytehq/airbyte/pull/27110) | Add `GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING` and `GET_ORDER_REPORT_DATA_SHIPPING` streams | +| `1.3.0` | 2023-06-09 | [\#27110](https://github.com/airbytehq/airbyte/pull/27110) | Removed `app_id` from `InputConfiguration`, refactored `spec` | +| `1.2.0` | 2023-05-23 | [\#22503](https://github.com/airbytehq/airbyte/pull/22503) | Enabled stream attribute customization from Source configuration | +| `1.1.0` | 2023-04-21 | [\#23605](https://github.com/airbytehq/airbyte/pull/23605) | Add FBA Reimbursement Report stream | +| `1.0.1` | 2023-03-15 | [\#24098](https://github.com/airbytehq/airbyte/pull/24098) | Add Belgium Marketplace | +| `1.0.0` | 2023-03-13 | [\#23980](https://github.com/airbytehq/airbyte/pull/23980) | Make `app_id` required. Increase `end_date` gap up to 5 minutes from now for Finance streams. Fix connection check failure when trying to connect to Amazon Vendor Central accounts | +| `0.2.33` | 2023-03-01 | [\#23606](https://github.com/airbytehq/airbyte/pull/23606) | Implement reportOptions for all missing reports and refactor | +| `0.2.32` | 2022-02-21 | [\#23300](https://github.com/airbytehq/airbyte/pull/23300) | Make AWS Access Key, AWS Secret Access and Role ARN optional | +| `0.2.31` | 2022-01-10 | [\#16430](https://github.com/airbytehq/airbyte/pull/16430) | Implement slicing for report streams | +| `0.2.30` | 2022-12-28 | [\#20896](https://github.com/airbytehq/airbyte/pull/20896) | Validate connections without orders data | +| `0.2.29` | 2022-11-18 | [\#19581](https://github.com/airbytehq/airbyte/pull/19581) | Use user provided end date for GET_SALES_AND_TRAFFIC_REPORT | +| `0.2.28` | 2022-10-20 | [\#18283](https://github.com/airbytehq/airbyte/pull/18283) | Added multiple (22) report types | +| `0.2.26` | 2022-09-24 | [\#16629](https://github.com/airbytehq/airbyte/pull/16629) | Report API version to 2021-06-30, added multiple (5) report types | +| `0.2.25` | 2022-07-27 | [\#15063](https://github.com/airbytehq/airbyte/pull/15063) | Add Restock Inventory Report | +| `0.2.24` | 2022-07-12 | [\#14625](https://github.com/airbytehq/airbyte/pull/14625) | Add FBA Storage Fees Report | +| `0.2.23` | 2022-06-08 | [\#13604](https://github.com/airbytehq/airbyte/pull/13604) | Add new streams: Fullfiments returns and Settlement reports | +| `0.2.22` | 2022-06-15 | [\#13633](https://github.com/airbytehq/airbyte/pull/13633) | Fix - handle start date for financial stream | +| `0.2.21` | 2022-06-01 | [\#13364](https://github.com/airbytehq/airbyte/pull/13364) | Add financial streams | +| `0.2.20` | 2022-05-30 | [\#13059](https://github.com/airbytehq/airbyte/pull/13059) | Add replication end date to config | +| `0.2.19` | 2022-05-24 | [\#13119](https://github.com/airbytehq/airbyte/pull/13119) | Add OAuth2.0 support | +| `0.2.18` | 2022-05-06 | [\#12663](https://github.com/airbytehq/airbyte/pull/12663) | Add GET_XML_BROWSE_TREE_DATA report | +| `0.2.17` | 2022-05-19 | [\#12946](https://github.com/airbytehq/airbyte/pull/12946) | Add throttling exception managing in Orders streams | +| `0.2.16` | 2022-05-04 | [\#12523](https://github.com/airbytehq/airbyte/pull/12523) | allow to use IAM user arn or IAM role | +| `0.2.15` | 2022-01-25 | [\#9789](https://github.com/airbytehq/airbyte/pull/9789) | Add stream FbaReplacementsReports | +| `0.2.14` | 2022-01-19 | [\#9621](https://github.com/airbytehq/airbyte/pull/9621) | Add GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL report | +| `0.2.13` | 2022-01-18 | [\#9581](https://github.com/airbytehq/airbyte/pull/9581) | Change createdSince parameter to dataStartTime | +| `0.2.12` | 2022-01-05 | [\#9312](https://github.com/airbytehq/airbyte/pull/9312) | Add all remaining brand analytics report streams | +| `0.2.11` | 2022-01-05 | [\#9115](https://github.com/airbytehq/airbyte/pull/9115) | Fix reading only 100 orders | +| `0.2.10` | 2021-12-31 | [\#9236](https://github.com/airbytehq/airbyte/pull/9236) | Fix NoAuth deprecation warning | +| `0.2.9` | 2021-12-30 | [\#9212](https://github.com/airbytehq/airbyte/pull/9212) | Normalize GET_SELLER_FEEDBACK_DATA header field names | +| `0.2.8` | 2021-12-22 | [\#8810](https://github.com/airbytehq/airbyte/pull/8810) | Fix GET_SELLER_FEEDBACK_DATA Date cursor field format | +| `0.2.7` | 2021-12-21 | [\#9002](https://github.com/airbytehq/airbyte/pull/9002) | Extract REPORTS_MAX_WAIT_SECONDS to configurable parameter | +| `0.2.6` | 2021-12-10 | [\#8179](https://github.com/airbytehq/airbyte/pull/8179) | Add GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT report | +| `0.2.5` | 2021-12-06 | [\#8425](https://github.com/airbytehq/airbyte/pull/8425) | Update title, description fields in spec | +| `0.2.4` | 2021-11-08 | [\#8021](https://github.com/airbytehq/airbyte/pull/8021) | Added GET_SELLER_FEEDBACK_DATA report with incremental sync capability | +| `0.2.3` | 2021-11-08 | [\#7828](https://github.com/airbytehq/airbyte/pull/7828) | Remove datetime format from all streams | +| `0.2.2` | 2021-11-08 | [\#7752](https://github.com/airbytehq/airbyte/pull/7752) | Change `check_connection` function to use stream Orders | +| `0.2.1` | 2021-09-17 | [\#5248](https://github.com/airbytehq/airbyte/pull/5248) | Added `extra stream` support. Updated `reports streams` logics | +| `0.2.0` | 2021-08-06 | [\#4863](https://github.com/airbytehq/airbyte/pull/4863) | Rebuild source with `airbyte-cdk` | +| `0.1.3` | 2021-06-23 | [\#4288](https://github.com/airbytehq/airbyte/pull/4288) | Bugfix failing `connection check` | +| `0.1.2` | 2021-06-15 | [\#4108](https://github.com/airbytehq/airbyte/pull/4108) | Fixed: Sync fails with timeout when create report is CANCELLED` | diff --git a/docs/integrations/sources/amplitude.md b/docs/integrations/sources/amplitude.md index 0972f2dcd5495..d47212f08ab45 100644 --- a/docs/integrations/sources/amplitude.md +++ b/docs/integrations/sources/amplitude.md @@ -52,8 +52,9 @@ The Amplitude connector ideally should gracefully handle Amplitude API limitatio | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| -| 0.3.7 | 2024-02-12 | [35162](https://github.com/airbytehq/airbyte/pull/35162) | Manage dependencies with Poetry. | -| 0.3.6 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.3.8 | 2024-03-12 | [35987](https://github.com/airbytehq/airbyte/pull/35987) | Unpin CDK version | +| 0.3.7 | 2024-02-12 | [35162](https://github.com/airbytehq/airbyte/pull/35162) | Manage dependencies with Poetry. | +| 0.3.6 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.3.5 | 2023-09-28 | [30846](https://github.com/airbytehq/airbyte/pull/30846) | Add support of multiple cursor date formats | | 0.3.4 | 2023-09-28 | [30831](https://github.com/airbytehq/airbyte/pull/30831) | Add user friendly error description on 403 error | | 0.3.3 | 2023-09-21 | [30652](https://github.com/airbytehq/airbyte/pull/30652) | Update spec: declare `start_date` type as `date-time` | diff --git a/docs/integrations/sources/avni.md b/docs/integrations/sources/avni.md new file mode 100644 index 0000000000000..8e2272a8643de --- /dev/null +++ b/docs/integrations/sources/avni.md @@ -0,0 +1,53 @@ +# Avni + +This page contains the setup guide and reference information for the Avni source connector. + +## Prerequisites + +- Username of Avni account +- Password of Avni account + +## Setup guide + +### Step 1: Set up an Avni account + +1. Signup on [Avni](https://avniproject.org/) to create an account. +2. Create Forms for Subjects Registrations, Programs Enrolment, Program Encounter using Avni Web Console -> [Getting Started](https://avniproject.org/getting-started/) +3. Register Subjects, Enrol them in Program using Avni Android Application [Here](https://play.google.com/store/apps/details?id=com.openchsclient&hl=en&gl=US) + +### Step 2: Set up the Avni connector in Airbyte + +**For Airbyte Open Source:** + +1. Go to local Airbyte page. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ New Source**. +3. On the source setup page, select **Avni** from the Source type dropdown and enter a name for this connector. +4. Enter the **username** and **password** of your Avni account +5. Enter the **lastModifiedDateTime**, ALl the data which have been updated since this time will be returned. The Value should be specified in "yyyy-MM-dd'T'HH:mm:ss.SSSz", e.g. "2000-10-31T01:30:00.000Z". If all the data needed to be fetch keep this parameter to any old date or use e.g. date. +6. Click **Set up source**. + +## Supported sync modes + +The Avni source connector supports the following[ sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): +​ + +- [Full Refresh - Overwrite](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-overwrite) +- [Full Refresh - Append](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-append) +- [Incremental Sync - Append](https://docs.airbyte.com/understanding-airbyte/connections/incremental-append) +- (Recommended)[ Incremental Sync - Deduped History](https://docs.airbyte.com/understanding-airbyte/connections/incremental-deduped-history) + + +## Supported Streams + +Avni Source connector Support Following Streams: + +- **Subjects Stream** : This stream provides details of registered subjects. You can retrieve information about subjects who have been registered in the system. +- **Program Enrolment Stream** : This stream provides program enrolment data. You can obtain information about subjects who have enrolled in programs. +- **Program Encounter Stream**, This stream provides data about encounters that occur within programs. You can retrieve information about all the encounters that have taken place within programs. +- **Subject Encounter Stream**, This stream provides data about encounters involving subjects, excluding program encounters. You can obtain information about all the encounters that subjects have had outside of program-encounter. + +avirajsingh7 marked this conversation as resolved. +## Changelog + +| Version | Date | Pull Request | Subject | +| 0.1.0 | 2023-09-07 | [30222](https://github.com/airbytehq/airbyte/pull/30222) | Avni Source Connector | \ No newline at end of file diff --git a/docs/integrations/sources/azure-blob-storage.md b/docs/integrations/sources/azure-blob-storage.md index a6e291398e528..50c698873c295 100644 --- a/docs/integrations/sources/azure-blob-storage.md +++ b/docs/integrations/sources/azure-blob-storage.md @@ -12,29 +12,50 @@ Cloud storage may incur egress costs. Egress refers to data that is transferred * Create a storage account with the permissions [details](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-create?tabs=azure-portal) + +:::warning +To use Oauth 2.0 Authentication method, Access Control (IAM) should be setup. +It is recommended +to use role [Storage Blob Data Reader](https://learn.microsoft.com/en-gb/azure/storage/blobs/assign-azure-role-data-access?tabs=portal) + +
    + +Follow this steps to setup IAM role: + + +1. Go to Azure portal, select the Storage (or Container) you'd like to sync from and get to Access Control(IAM) -> Role Assignment ![Access Control (IAM)](../../.gitbook/assets/source/azure-blob-storage/access_control_iam.png) +2. Click on `Add` and select `Add role assignment` from the dropdown list ![Add role assignment](../../.gitbook/assets/source/azure-blob-storage/add_role.png) +3. Search by role name `Storage Blob Data Reader` in search box, Select role from the list and click `Next` ![Search Role](../../.gitbook/assets/source/azure-blob-storage/search_role.png) +4. Select `User, Group, or service principal`, click on `members` and select member(s) so they appear in table and click `Next` ![Add Members](../../.gitbook/assets/source/azure-blob-storage/add_members.png) +5. (Optional) Add Conditions to restrict the role assignments a user can create. +6. Click `Review + Assign` +
    +::: + ### Step 2: Set up the Azure Blob Storage connector in Airbyte 1. [Log in to your Airbyte Cloud](https://cloud.airbyte.com/workspaces) account, or navigate to your Airbyte Open Source dashboard. 2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ New source**. 3. Find and select **Azure Blob Storage** from the list of available sources. 4. Enter the name of your Azure **Account**. -5. Enter the *Azure Blob Storage account key* which grants access to your account. -6. Enter the name of the **Container** containing your files to replicate. -7. Add a stream +5. Click **Authenticate your Azure Blob Storage account**. +6. Log in and authorize the Azure Blob Storage account. +7. Enter the name of the **Container** containing your files to replicate. +8. Add a stream 1. Write the **File Type** 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. 3. Give a **Name** to the stream 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -8. Optionally, enter the endpoint to use for the data replication. -9. Optionally, enter the desired start date from which to begin replicating data. +9. (Optional) Enter the endpoint to use for the data replication. +10. (Optional) Enter the desired start date from which to begin replicating data. ## Supported sync modes The Azure Blob Storage source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): | Feature | Supported? | -| :--------------------------------------------- |:-----------| +|:-----------------------------------------------|:-----------| | Full Refresh Sync | Yes | | Incremental Sync | Yes | | Replicate Incremental Deletes | No | @@ -45,7 +66,7 @@ The Azure Blob Storage source connector supports the following [sync modes](http ## File Compressions | Compression | Supported? | -| :---------- | :--------- | +|:------------|:-----------| | Gzip | Yes | | Zip | No | | Bzip2 | Yes | @@ -193,15 +214,18 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------| -| 0.3.4 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | -| 0.3.3 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | -| 0.3.2 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | -| 0.3.1 | 2024-01-10 | [34084](https://github.com/airbytehq/airbyte/pull/34084) | Fix bug for running check with document file format | -| 0.3.0 | 2023-12-14 | [33411](https://github.com/airbytehq/airbyte/pull/33411) | Bump CDK version to auto-set primary key for document file streams and support raw txt files | +| 0.4.0 | 2024-04-05 | [36825](https://github.com/airbytehq/airbyte/pull/36825) | Add oauth 2.0 support | +| 0.3.6 | 2024-04-03 | [36542](https://github.com/airbytehq/airbyte/pull/36542) | Use Latest CDK; add integration tests | +| 0.3.5 | 2024-03-26 | [36487](https://github.com/airbytehq/airbyte/pull/36487) | Manage dependencies with Poetry. | +| 0.3.4 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | +| 0.3.3 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | +| 0.3.2 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | +| 0.3.1 | 2024-01-10 | [34084](https://github.com/airbytehq/airbyte/pull/34084) | Fix bug for running check with document file format | +| 0.3.0 | 2023-12-14 | [33411](https://github.com/airbytehq/airbyte/pull/33411) | Bump CDK version to auto-set primary key for document file streams and support raw txt files | | 0.2.5 | 2023-12-06 | [33187](https://github.com/airbytehq/airbyte/pull/33187) | Bump CDK version to hide source-defined primary key | | 0.2.4 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Improve document file type parser | | 0.2.3 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | | 0.2.2 | 2023-10-30 | [31904](https://github.com/airbytehq/airbyte/pull/31904) | Update CDK to support document file types | | 0.2.1 | 2023-10-18 | [31543](https://github.com/airbytehq/airbyte/pull/31543) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.2.0 | 2023-10-10 | https://github.com/airbytehq/airbyte/pull/31336 | Migrate to File-based CDK. Add support of CSV, Parquet and Avro files | -| 0.1.0 | 2023-02-17 | https://github.com/airbytehq/airbyte/pull/23222 | Initial release with full-refresh and incremental sync with JSONL files | \ No newline at end of file +| 0.1.0 | 2023-02-17 | https://github.com/airbytehq/airbyte/pull/23222 | Initial release with full-refresh and incremental sync with JSONL files | diff --git a/docs/integrations/sources/bing-ads.md b/docs/integrations/sources/bing-ads.md index 5cdb8ad25123e..c52b06429dcd8 100644 --- a/docs/integrations/sources/bing-ads.md +++ b/docs/integrations/sources/bing-ads.md @@ -107,14 +107,14 @@ The Bing Ads source connector supports the following streams. For more informati ### Basic streams -- [Accounts](https://docs.microsoft.com/en-us/advertising/customer-management-service/searchaccounts?view=bingads-13) -- [Ad Groups](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getadgroupsbycampaignid?view=bingads-13) +- [Accounts](https://docs.microsoft.com/en-us/advertising/customer-management-service/searchaccounts?view=bingads-13) (Full Refresh) +- [Ad Groups](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getadgroupsbycampaignid?view=bingads-13) (Full Refresh) - [Ad Group Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/ad-group-label?view=bingads-13) -- [Ads](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getadsbyadgroupid?view=bingads-13) +- [Ads](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getadsbyadgroupid?view=bingads-13) (Full Refresh) - [App Install Ads](https://learn.microsoft.com/en-us/advertising/bulk-service/app-install-ad?view=bingads-13) - [App Install Ad Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/app-install-ad-label?view=bingads-13) - [Budget](https://learn.microsoft.com/en-us/advertising/bulk-service/budget?view=bingads-13&viewFallbackFrom=bingads-13) -- [Campaigns](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getcampaignsbyaccountid?view=bingads-13) +- [Campaigns](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getcampaignsbyaccountid?view=bingads-13) (Full Refresh) - [Campaign Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/campaign-label?view=bingads-13) - [Keywords](https://learn.microsoft.com/en-us/advertising/bulk-service/keyword?view=bingads-13) - [Keyword Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/keyword-label?view=bingads-13) @@ -251,6 +251,8 @@ The Bing Ads API limits the number of requests for all Microsoft Advertising cli | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.5.0 | 2024-03-21 | [35891](https://github.com/airbytehq/airbyte/pull/35891) | Accounts stream: add TaxCertificate field to schema. | +| 2.4.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 2.3.0 | 2024-03-05 | [35812](https://github.com/airbytehq/airbyte/pull/35812) | New streams: Audience Performance Report, Goals And Funnels Report, Product Dimension Performance Report. | | 2.2.0 | 2024-02-13 | [35201](https://github.com/airbytehq/airbyte/pull/35201) | New streams: Budget and Product Dimension Performance. | | 2.1.4 | 2024-02-12 | [35179](https://github.com/airbytehq/airbyte/pull/35179) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/chargebee.md b/docs/integrations/sources/chargebee.md index 27b6568ef8505..4aa7c18678e45 100644 --- a/docs/integrations/sources/chargebee.md +++ b/docs/integrations/sources/chargebee.md @@ -99,6 +99,9 @@ The Chargebee connector should not run into [Chargebee API](https://apidocs.char | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | +| 0.5.0 | 2024-03-28 | [36518](https://github.com/airbytehq/airbyte/pull/36518) | Updates CDK to ^0, updates IncrementalSingleSliceCursor | +| 0.4.2 | 2024-03-14 | [36037](https://github.com/airbytehq/airbyte/pull/36037) | Adds fields: `coupon_constraints` to `coupon` stream, `billing_month` to `customer stream`, and `error_detail` to `transaction` stream schemas | +| 0.4.1 | 2024-03-13 | [35509](https://github.com/airbytehq/airbyte/pull/35509) | Updates CDK version to latest (0.67.1), updates `site_migration_detail` record filtering | | 0.4.0 | 2024-02-12 | [34053](https://github.com/airbytehq/airbyte/pull/34053) | Add missing fields to and cleans up schemas, adds incremental support for `gift`, `site_migration_detail`, and `unbilled_charge` streams.` | | 0.3.1 | 2024-02-12 | [35169](https://github.com/airbytehq/airbyte/pull/35169) | Manage dependencies with Poetry. | | 0.3.0 | 2023-12-26 | [33696](https://github.com/airbytehq/airbyte/pull/33696) | Add new stream, add fields to existing streams | diff --git a/docs/integrations/sources/coda.md b/docs/integrations/sources/coda.md index 26d854f621320..5ba1c005a7420 100755 --- a/docs/integrations/sources/coda.md +++ b/docs/integrations/sources/coda.md @@ -62,9 +62,10 @@ The Coda source connector supports the following [sync modes](https://docs.airby ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :-------------------------------- | -| 1.2.0 | 2023-08-13 | [29288](https://github.com/airbytehq/airbyte/pull/29288) | Migrate python cdk to low-code | -| 1.1.0 | 2023-07-10 | [27797](https://github.com/airbytehq/airbyte/pull/27797) | Add `rows` stream | -| 1.0.0 | 2023-07-10 | [28093](https://github.com/airbytehq/airbyte/pull/28093) | Update `docs` and `pages` schemas | -| 0.1.0 | 2022-11-17 | [18675](https://github.com/airbytehq/airbyte/pull/18675) | 🎉 New source: Coda [python cdk] | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- |:------------------------------------------------------------------------------------------------------------| +| 1.2.1 | 2024-04-02 | [36775](https://github.com/airbytehq/airbyte/pull/36775) | Migrate to base image, manage dependencies with Poetry, and stop using last_records interpolation variable. | +| 1.2.0 | 2023-08-13 | [29288](https://github.com/airbytehq/airbyte/pull/29288) | Migrate python cdk to low-code | +| 1.1.0 | 2023-07-10 | [27797](https://github.com/airbytehq/airbyte/pull/27797) | Add `rows` stream | +| 1.0.0 | 2023-07-10 | [28093](https://github.com/airbytehq/airbyte/pull/28093) | Update `docs` and `pages` schemas | +| 0.1.0 | 2022-11-17 | [18675](https://github.com/airbytehq/airbyte/pull/18675) | 🎉 New source: Coda [python cdk] | diff --git a/docs/integrations/sources/commcare.md b/docs/integrations/sources/commcare.md index 6dd8fcebd6477..091bf496a40a3 100644 --- a/docs/integrations/sources/commcare.md +++ b/docs/integrations/sources/commcare.md @@ -36,4 +36,5 @@ The Commcare source connector supports the following streams: ## Changelog | Version | Date | Pull Request | Subject | +|---------|------|--------------|---------| | 0.1.0 | 2022-11-08 | [20220](https://github.com/airbytehq/airbyte/pull/20220) | Commcare Source Connector | diff --git a/docs/integrations/sources/confluence.md b/docs/integrations/sources/confluence.md index 76fe7adf7b6e7..574879ecf39b6 100644 --- a/docs/integrations/sources/confluence.md +++ b/docs/integrations/sources/confluence.md @@ -60,6 +60,7 @@ The Confluence connector should not run into Confluence API limitations under no | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------- | +| 0.2.1 | 2024-03-20 | [36339](https://github.com/airbytehq/airbyte/pull/36339) | 🐛 Source Confluence: 'expand' request parameter converted to comma separated string | 0.2.0 | 2023-08-14 | [29125](https://github.com/airbytehq/airbyte/pull/29125) | Migrate Confluence Source Connector to Low Code | | 0.1.3 | 2023-03-13 | [23988](https://github.com/airbytehq/airbyte/pull/23988) | Add view and storage to pages body, add check for stream Audit | | 0.1.2 | 2023-03-06 | [23775](https://github.com/airbytehq/airbyte/pull/23775) | Set additionalProperties: true, update docs and spec | diff --git a/docs/integrations/sources/e2e-test-cloud.md b/docs/integrations/sources/e2e-test-cloud.md index d93a76732dc6c..3e18bdf9f30fc 100644 --- a/docs/integrations/sources/e2e-test-cloud.md +++ b/docs/integrations/sources/e2e-test-cloud.md @@ -28,9 +28,9 @@ Here is its configuration: The OSS and Cloud variants have the same version number. The Cloud variant was initially released at version `1.0.0`. -| Version | Date | Pull request | Notes | +| Version | Date | Pull request | Subject | |---------|------------|----------------------------------------------------------|-----------------------------------------------------| -| 2.2.1 | 2024-02-13 | [35231](https://github.com/airbytehq/airbyte/pull/35231) | Adopt JDK 0.20.4. | +| 2.2.1 | 2024-02-13 | [35231](https://github.com/airbytehq/airbyte/pull/35231) | Adopt JDK 0.20.4. | | 2.1.5 | 2023-10-06 | [31092](https://github.com/airbytehq/airbyte/pull/31092) | Bring in changes from oss | | 2.1.4 | 2023-03-01 | [23656](https://github.com/airbytehq/airbyte/pull/23656) | Fix inheritance between e2e-test and e2e-test-cloud | | 0.1.0 | 2021-07-23 | [9720](https://github.com/airbytehq/airbyte/pull/9720) | Initial release. | diff --git a/docs/integrations/sources/e2e-test.md b/docs/integrations/sources/e2e-test.md index 76608078b542b..aa64bb2f42245 100644 --- a/docs/integrations/sources/e2e-test.md +++ b/docs/integrations/sources/e2e-test.md @@ -70,7 +70,7 @@ This mode is also excluded from the Cloud variant of this connector. The OSS and Cloud variants have the same version number. The Cloud variant was initially released at version `1.0.0`. -| Version | Date | Pull request | Notes | +| Version | Date | Pull request | Subject | |---------|------------| ------------------------------------------------------------------ |-------------------------------------------------------------------------------------------------------| | 2.2.1 | 2024-02-13 | [35231](https://github.com/airbytehq/airbyte/pull/35231) | Adopt JDK 0.20.4. | | 2.2.0 | 2023-12-18 | [33485](https://github.com/airbytehq/airbyte/pull/33485) | Remove LEGACY state | diff --git a/docs/integrations/sources/facebook-marketing-migrations.md b/docs/integrations/sources/facebook-marketing-migrations.md index d3cfd48652a79..d4c4c06765a7d 100644 --- a/docs/integrations/sources/facebook-marketing-migrations.md +++ b/docs/integrations/sources/facebook-marketing-migrations.md @@ -4,6 +4,11 @@ Streams Ads-Insights-* streams now have updated schemas. +:::danger +Please note that data older than 37 months will become unavailable due to Facebook limitations. +It is recommended to create a backup at the destination before proceeding with migration. +::: + ### Update Custom Insights Reports (this step can be skipped if you did not define any) 1. Select **Sources** in the main navbar. @@ -20,18 +25,18 @@ Streams Ads-Insights-* streams now have updated schemas. 1. Select **Refresh source schema**. 2. Select **OK**. -```note +:::note Any detected schema changes will be listed for your review. -``` +::: 3. Select **Save changes** at the bottom of the page. 1. Ensure the **Reset affected streams** option is checked. -```note +:::note Depending on destination type you may not be prompted to reset your data. -``` +::: 4. Select **Save connection**. -```note +:::note This will reset the data in your destination and initiate a fresh sync. -``` +::: For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index b38e5897c4f16..1efe72287e1a1 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -200,6 +200,10 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.1.3 | 2024-04-16 | [37320](https://github.com/airbytehq/airbyte/pull/37320) | Add retry for transient error | +| 2.1.2 | 2024-03-29 | [36689](https://github.com/airbytehq/airbyte/pull/36689) | Fix key error `account_id` for custom reports. | +| 2.1.1 | 2024-03-18 | [36025](https://github.com/airbytehq/airbyte/pull/36025) | Fix start_date selection behaviour | +| 2.1.0 | 2024-03-12 | [35978](https://github.com/airbytehq/airbyte/pull/35978) | Upgrade CDK to start emitting record counts with state and full refresh state | | 2.0.1 | 2024-03-08 | [35913](https://github.com/airbytehq/airbyte/pull/35913) | Fix lookback window | | 2.0.0 | 2024-03-01 | [35746](https://github.com/airbytehq/airbyte/pull/35746) | Update API to `v19.0` | | 1.4.2 | 2024-02-22 | [35539](https://github.com/airbytehq/airbyte/pull/35539) | Add missing config migration from `include_deleted` field | diff --git a/docs/integrations/sources/facebook-pages-migrations.md b/docs/integrations/sources/facebook-pages-migrations.md new file mode 100644 index 0000000000000..b6396e0595113 --- /dev/null +++ b/docs/integrations/sources/facebook-pages-migrations.md @@ -0,0 +1,41 @@ +# Facebook Pages Migration Guide + +## Upgrading to 1.0.0 + +:::note +This change is only breaking if you are syncing stream `Page`. +::: + +This version brings an updated schema for the `v19.0` API version of the `Page` stream. +The `messenger_ads_default_page_welcome_message` field has been deleted, and `call_to_actions`, `posts`, `published_posts`, `ratings`, `tabs` and `tagged` fields have been added. + +Users should: + - Refresh the source schema for the `Page` stream. + - Reset the stream after upgrading to ensure uninterrupted syncs. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. + +:::note +Any detected schema changes will be listed for your review. +::: + +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. + +:::note +Depending on destination type you may not be prompted to reset your data. +::: + +4. Select **Save connection**. + +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset) \ No newline at end of file diff --git a/docs/integrations/sources/facebook-pages.md b/docs/integrations/sources/facebook-pages.md index 7055a1f798a9e..1b92243d452c9 100644 --- a/docs/integrations/sources/facebook-pages.md +++ b/docs/integrations/sources/facebook-pages.md @@ -61,10 +61,10 @@ The Facebook Pages source connector supports the following [sync modes](https:// ## Supported Streams -- [Page](https://developers.facebook.com/docs/graph-api/reference/v15.0/page/#overview) -- [Post](https://developers.facebook.com/docs/graph-api/reference/v15.0/page/feed) -- [Page Insights](https://developers.facebook.com/docs/graph-api/reference/v15.0/page/insights) -- [Post Insights](https://developers.facebook.com/docs/graph-api/reference/v15.0/insights) +- [Page](https://developers.facebook.com/docs/graph-api/reference/v19.0/page/#overview) +- [Post](https://developers.facebook.com/docs/graph-api/reference/v19.0/page/feed) +- [Page Insights](https://developers.facebook.com/docs/graph-api/reference/v19.0/page/insights) +- [Post Insights](https://developers.facebook.com/docs/graph-api/reference/v19.0/insights) ## Data type map @@ -83,19 +83,20 @@ See Facebook's [documentation on rate limiting](https://developers.facebook.com/ ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------ | -| 0.3.0 | 2023-06-26 | [27728](https://github.com/airbytehq/airbyte/pull/27728) | License Update: Elv2 | -| 0.2.5 | 2023-04-13 | [26939](https://github.com/airbytehq/airbyte/pull/26939) | Add advancedAuth to the connector spec | -| 0.2.4 | 2023-04-13 | [25143](https://github.com/airbytehq/airbyte/pull/25143) | Update insight metrics request params | -| 0.2.3 | 2023-02-23 | [23395](https://github.com/airbytehq/airbyte/pull/23395) | Parse datetime to rfc3339 | -| 0.2.2 | 2023-02-10 | [22804](https://github.com/airbytehq/airbyte/pull/22804) | Retry 500 errors | -| 0.2.1 | 2022-12-29 | [20925](https://github.com/airbytehq/airbyte/pull/20925) | Fix tests; modify expected records | -| 0.2.0 | 2022-11-24 | [19788](https://github.com/airbytehq/airbyte/pull/19788) | Migrate lo low-code; Beta certification; Upgrade Facebook API to v.15 | -| 0.1.6 | 2021-12-22 | [9032](https://github.com/airbytehq/airbyte/pull/9032) | Remove deprecated field `live_encoders` from Page stream | -| 0.1.5 | 2021-11-26 | [8267](https://github.com/airbytehq/airbyte/pull/8267) | updated all empty objects in schemas for Page and Post streams | -| 0.1.4 | 2021-11-26 | [](https://github.com/airbytehq/airbyte/pull/) | Remove unsupported insights_export field from Pages request | -| 0.1.3 | 2021-10-28 | [7440](https://github.com/airbytehq/airbyte/pull/7440) | Generate Page token from config access token | -| 0.1.2 | 2021-10-18 | [7128](https://github.com/airbytehq/airbyte/pull/7128) | Upgrade Facebook API to v.12 | -| 0.1.1 | 2021-09-30 | [6438](https://github.com/airbytehq/airbyte/pull/6438) | Annotate Oauth2 flow initialization parameters in connector specification | -| 0.1.0 | 2021-09-01 | [5158](https://github.com/airbytehq/airbyte/pull/5158) | Initial Release | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :------------------------------------------------------- |:-------------------------------------------------------------------------------------| +| 1.0.0 | 2024-03-14 | [36015](https://github.com/airbytehq/airbyte/pull/36015) | Upgrade Facebook API to v19.0 | +| 0.3.0 | 2023-06-26 | [27728](https://github.com/airbytehq/airbyte/pull/27728) | License Update: Elv2 | +| 0.2.5 | 2023-04-13 | [26939](https://github.com/airbytehq/airbyte/pull/26939) | Add advancedAuth to the connector spec | +| 0.2.4 | 2023-04-13 | [25143](https://github.com/airbytehq/airbyte/pull/25143) | Update insight metrics request params | +| 0.2.3 | 2023-02-23 | [23395](https://github.com/airbytehq/airbyte/pull/23395) | Parse datetime to rfc3339 | +| 0.2.2 | 2023-02-10 | [22804](https://github.com/airbytehq/airbyte/pull/22804) | Retry 500 errors | +| 0.2.1 | 2022-12-29 | [20925](https://github.com/airbytehq/airbyte/pull/20925) | Fix tests; modify expected records | +| 0.2.0 | 2022-11-24 | [19788](https://github.com/airbytehq/airbyte/pull/19788) | Migrate lo low-code; Beta certification; Upgrade Facebook API to v.15 | +| 0.1.6 | 2021-12-22 | [9032](https://github.com/airbytehq/airbyte/pull/9032) | Remove deprecated field `live_encoders` from Page stream | +| 0.1.5 | 2021-11-26 | [8267](https://github.com/airbytehq/airbyte/pull/8267) | updated all empty objects in schemas for Page and Post streams | +| 0.1.4 | 2021-11-26 | [](https://github.com/airbytehq/airbyte/pull/) | Remove unsupported insights_export field from Pages request | +| 0.1.3 | 2021-10-28 | [7440](https://github.com/airbytehq/airbyte/pull/7440) | Generate Page token from config access token | +| 0.1.2 | 2021-10-18 | [7128](https://github.com/airbytehq/airbyte/pull/7128) | Upgrade Facebook API to v.12 | +| 0.1.1 | 2021-09-30 | [6438](https://github.com/airbytehq/airbyte/pull/6438) | Annotate Oauth2 flow initialization parameters in connector specification | +| 0.1.0 | 2021-09-01 | [5158](https://github.com/airbytehq/airbyte/pull/5158) | Initial Release | diff --git a/docs/integrations/sources/faker.md b/docs/integrations/sources/faker.md index 311d563a9ec25..7fb3ff7464a8a 100644 --- a/docs/integrations/sources/faker.md +++ b/docs/integrations/sources/faker.md @@ -101,9 +101,11 @@ None! | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------- | -| 6.0.2 | 2024-02-12 | [35174](https://github.com/airbytehq/airbyte/pull/35174) | Manage dependencies with Poetry. | -| 6.0.1 | 2024-02-12 | [35172](https://github.com/airbytehq/airbyte/pull/35172) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 6.0.0 | 2024-01-30 | [34644](https://github.com/airbytehq/airbyte/pull/34644) | Declare 'id' columns as primary keys. | +| 6.1.0 | 2024-04-08 | [36898](https://github.com/airbytehq/airbyte/pull/36898) | Update car prices and years | +| 6.0.3 | 2024-03-15 | [36167](https://github.com/airbytehq/airbyte/pull/36167) | Make 'count' an optional config parameter. | +| 6.0.2 | 2024-02-12 | [35174](https://github.com/airbytehq/airbyte/pull/35174) | Manage dependencies with Poetry. | +| 6.0.1 | 2024-02-12 | [35172](https://github.com/airbytehq/airbyte/pull/35172) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 6.0.0 | 2024-01-30 | [34644](https://github.com/airbytehq/airbyte/pull/34644) | Declare 'id' columns as primary keys. | | 5.0.2 | 2024-01-17 | [34344](https://github.com/airbytehq/airbyte/pull/34344) | Ensure unique state messages | | 5.0.1 | 2023-01-08 | [34033](https://github.com/airbytehq/airbyte/pull/34033) | Add standard entrypoints for usage with AirbyteLib | | 5.0.0 | 2023-08-08 | [29213](https://github.com/airbytehq/airbyte/pull/29213) | Change all `*id` fields and `products.year` to be integer | diff --git a/docs/integrations/sources/file.md b/docs/integrations/sources/file.md index 30da0439cb6b6..67bca4921a2a2 100644 --- a/docs/integrations/sources/file.md +++ b/docs/integrations/sources/file.md @@ -218,6 +218,7 @@ In order to read large files from a remote location, this connector uses the [sm | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------| +| 0.5.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.4.1 | 2024-03-04 | [35800](https://github.com/airbytehq/airbyte/pull/35800) | Add PyAirbyte support on Python 3.11 | | 0.4.0 | 2024-02-15 | [32354](https://github.com/airbytehq/airbyte/pull/32354) | Add Zip File Support | | 0.3.17 | 2024-02-13 | [34678](https://github.com/airbytehq/airbyte/pull/34678) | Add Fixed-Width File Support | diff --git a/docs/integrations/sources/freshdesk.md b/docs/integrations/sources/freshdesk.md index f1f1b144c3de3..da3c7f6ed6020 100644 --- a/docs/integrations/sources/freshdesk.md +++ b/docs/integrations/sources/freshdesk.md @@ -16,7 +16,8 @@ To set up the Freshdesk source connector, you'll need the Freshdesk [domain URL] 6. For **API Key**, enter your [Freshdesk API key](https://support.freshdesk.com/support/solutions/articles/215517). 7. For **Start Date**, enter the date in YYYY-MM-DDTHH:mm:ssZ format. The data added on and after this date will be replicated. 8. For **Requests per minute**, enter the number of requests per minute that this source allowed to use. The Freshdesk rate limit is 50 requests per minute per app per account. -9. Click **Set up source**. +9. For **Lookback Window**, you may specify a number of days back from the current stream state to re-read data for the **Satisfaction Ratings** stream. This helps capture updates made to existing ratings after their initial creation. However, keep in mind that records updated before (stream_state - lookback_window) won't be synced. To ensure no data loss, we recommend using a full refresh. The default lookback window is set to 14 days. +10. Click **Set up source**. ## Supported sync modes @@ -67,8 +68,9 @@ If you don't use the start date Freshdesk will retrieve only the last 30 days. M ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------ | -| 3.0.7 | 2024-02-12 | [35187](https://github.com/airbytehq/airbyte/pull/35187) | Manage dependencies with Poetry. | +|:--------|:-----------| :------------------------------------------------------- |:--------------------------------------------------------------------------------------| +| 3.1.0 | 2024-03-12 | [35699](https://github.com/airbytehq/airbyte/pull/35699) | Migrate to low-code | +| 3.0.7 | 2024-02-12 | [35187](https://github.com/airbytehq/airbyte/pull/35187) | Manage dependencies with Poetry. | | 3.0.6 | 2024-01-10 | [34101](https://github.com/airbytehq/airbyte/pull/34101) | Base image migration: remove Dockerfile and use the python-connector-base image | | 3.0.5 | 2023-11-30 | [33000](https://github.com/airbytehq/airbyte/pull/33000) | Base image migration: remove Dockerfile and use the python-connector-base image | | 3.0.4 | 2023-06-24 | [27680](https://github.com/airbytehq/airbyte/pull/27680) | Fix formatting | diff --git a/docs/integrations/sources/gcs.md b/docs/integrations/sources/gcs.md index c37c8f0268d49..e0d24e0716a2f 100644 --- a/docs/integrations/sources/gcs.md +++ b/docs/integrations/sources/gcs.md @@ -146,15 +146,16 @@ Leaving this field blank (default option) will disallow escaping. ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------| -| 0.3.7 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | -| 0.3.6 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | -| 0.3.5 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | -| 0.3.4 | 2024-01-11 | [34158](https://github.com/airbytehq/airbyte/pull/34158) | Fix issue in stream reader for document file type parser | -| 0.3.3 | 2023-12-06 | [33187](https://github.com/airbytehq/airbyte/pull/33187) | Bump CDK version to hide source-defined primary key | -| 0.3.2 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Improve document file type parser | -| 0.3.1 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | -| 0.3.0 | 2023-10-11 | [31212](https://github.com/airbytehq/airbyte/pull/31212) | Migrated to file based CDK | -| 0.2.0 | 2023-06-26 | [27725](https://github.com/airbytehq/airbyte/pull/27725) | License Update: Elv2 | -| 0.1.0 | 2023-02-16 | [23186](https://github.com/airbytehq/airbyte/pull/23186) | New Source: GCS | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------| +| 0.4.0 | 2024-03-21 | [36373](https://github.com/airbytehq/airbyte/pull/36373) | Add Gzip and Bzip compression support. Manage dependencies with Poetry. | +| 0.3.7 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | +| 0.3.6 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | +| 0.3.5 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | +| 0.3.4 | 2024-01-11 | [34158](https://github.com/airbytehq/airbyte/pull/34158) | Fix issue in stream reader for document file type parser | +| 0.3.3 | 2023-12-06 | [33187](https://github.com/airbytehq/airbyte/pull/33187) | Bump CDK version to hide source-defined primary key | +| 0.3.2 | 2023-11-16 | [32608](https://github.com/airbytehq/airbyte/pull/32608) | Improve document file type parser | +| 0.3.1 | 2023-11-13 | [32357](https://github.com/airbytehq/airbyte/pull/32357) | Improve spec schema | +| 0.3.0 | 2023-10-11 | [31212](https://github.com/airbytehq/airbyte/pull/31212) | Migrated to file based CDK | +| 0.2.0 | 2023-06-26 | [27725](https://github.com/airbytehq/airbyte/pull/27725) | License Update: Elv2 | +| 0.1.0 | 2023-02-16 | [23186](https://github.com/airbytehq/airbyte/pull/23186) | New Source: GCS | diff --git a/docs/integrations/sources/genesys.md b/docs/integrations/sources/genesys.md index 36b66946e13cf..63847913d4382 100644 --- a/docs/integrations/sources/genesys.md +++ b/docs/integrations/sources/genesys.md @@ -22,6 +22,7 @@ You can follow the documentation on [API credentials](https://developer.genesys. - [Users](https://developer.genesys.cloud/useragentman/users/) ## Changelog + | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------- | | 0.1.1 | 2023-04-27 | [25598](https://github.com/airbytehq/airbyte/pull/25598) | Use region specific API server | diff --git a/docs/integrations/sources/github.md b/docs/integrations/sources/github.md index a68273aaaf044..09d79f218e003 100644 --- a/docs/integrations/sources/github.md +++ b/docs/integrations/sources/github.md @@ -206,6 +206,9 @@ Your token should have at least the `repo` scope. Depending on which streams you | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 1.7.1 | 2024-03-24 | [00000](https://github.com/airbytehq/airbyte/pull/00000) | Support repository names with wildcards. Do not look for repository branches at discovery time. | +| 1.7.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| 1.6.5 | 2024-03-12 | [35986](https://github.com/airbytehq/airbyte/pull/35986) | Handle rate limit exception as config error | | 1.6.4 | 2024-03-08 | [35915](https://github.com/airbytehq/airbyte/pull/35915) | Fix per stream error handler; Make use the latest CDK version | | 1.6.3 | 2024-02-15 | [35271](https://github.com/airbytehq/airbyte/pull/35271) | Update branches schema | | 1.6.2 | 2024-02-12 | [34933](https://github.com/airbytehq/airbyte/pull/34933) | Update Airbyte CDK for integration tests | diff --git a/docs/integrations/sources/gitlab-migrations.md b/docs/integrations/sources/gitlab-migrations.md index 9e819b30f4d1a..a96dd9b0fc4a1 100644 --- a/docs/integrations/sources/gitlab-migrations.md +++ b/docs/integrations/sources/gitlab-migrations.md @@ -1,5 +1,68 @@ # Gitlab Migration Guide + +## Upgrading to 4.0.0 + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. +As part of our commitment to delivering exceptional service, we are transitioning Source Gitlab from the Python Connector Development Kit (CDK) +to our new low-code framework improving maintainability and reliability of the connector. +However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change. + +The primary key was changed for streams `group_members`, `group_labels`, `project_members`, `project_labels`, `branches`, and `tags`. +Users will need to reset the affected streams after upgrading. + +## Connector Upgrade Guide + +### For Airbyte Open Source: Update the local connector image + +Airbyte Open Source users must manually update the connector image in their local registry before proceeding with the migration. To do so: + +1. Select **Settings** in the main navbar. + 1. Select **Sources**. +2. Find Gitlab in the list of connectors. + +:::note +You will see two versions listed, the current in-use version and the latest version available. +::: + +3. Select **Change** to update your OSS version to the latest available version. + + +### Update the connector version + +1. Select **Sources** in the main navbar. +2. Select the instance of the connector you wish to upgrade. + +:::note +Each instance of the connector must be updated separately. If you have created multiple instances of a connector, updating one will not affect the others. +::: + +3. Select **Upgrade** + 1. Follow the prompt to confirm you are ready to upgrade to the new version. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +:::note +Depending on destination type you may not be prompted to reset your data. +::: +4. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + + ## Upgrading to 3.0.0 In this release, `merge_request_commits` stream schema has been fixed so that it returns commits for each merge_request. diff --git a/docs/integrations/sources/gitlab.md b/docs/integrations/sources/gitlab.md index b4a83d6add6e7..26780d01b821d 100644 --- a/docs/integrations/sources/gitlab.md +++ b/docs/integrations/sources/gitlab.md @@ -109,9 +109,10 @@ Gitlab has the [rate limits](https://docs.gitlab.com/ee/user/gitlab_com/index.ht | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.0.0 | 2024-03-25 | [35989](https://github.com/airbytehq/airbyte/pull/35989) | Migrate to low-code | | 3.0.0 | 2024-01-25 | [34548](https://github.com/airbytehq/airbyte/pull/34548) | Fix merge_request_commits stream to return commits for each merge request | -| 2.1.2 | 2024-02-12 | [35167](https://github.com/airbytehq/airbyte/pull/35167) | Manage dependencies with Poetry. | -| 2.1.1 | 2024-01-12 | [34203](https://github.com/airbytehq/airbyte/pull/34203) | prepare for airbyte-lib | +| 2.1.2 | 2024-02-12 | [35167](https://github.com/airbytehq/airbyte/pull/35167) | Manage dependencies with Poetry. | +| 2.1.1 | 2024-01-12 | [34203](https://github.com/airbytehq/airbyte/pull/34203) | prepare for airbyte-lib | | 2.1.0 | 2023-12-20 | [33676](https://github.com/airbytehq/airbyte/pull/33676) | Add fields to Commits (extended_trailers), Groups (emails_enabled, service_access_tokens_expiration_enforced) and Projects (code_suggestions, model_registry_access_level) streams | | 2.0.0 | 2023-10-23 | [31700](https://github.com/airbytehq/airbyte/pull/31700) | Add correct date-time format for Deployments, Projects and Groups Members streams | | 1.8.4 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index 1bc3c0733a7dd..3dc596be55364 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -280,6 +280,9 @@ Due to a limitation in the Google Ads API which does not allow getting performan | Version | Date | Pull Request | Subject | |:---------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| `3.4.1` | 2024-04-08 | [36891](https://github.com/airbytehq/airbyte/pull/36891) | Optimize `check` method | +| `3.4.0` | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| `3.3.7` | 2024-03-15 | [36208](https://github.com/airbytehq/airbyte/pull/36208) | Added error message when there is the `unrecognized field` inside of the `custom query` | | `3.3.6` | 2024-03-01 | [35664](https://github.com/airbytehq/airbyte/pull/35664) | Fix error for new customers for incremental events streams | | `3.3.5` | 2024-02-28 | [35709](https://github.com/airbytehq/airbyte/pull/35709) | Handle 2-Step Verification exception as config error | | `3.3.4` | 2024-02-21 | [35493](https://github.com/airbytehq/airbyte/pull/35493) | Rolling back the patch 3.3.3 made for `user_interest` steam | diff --git a/docs/integrations/sources/google-analytics-data-api.md b/docs/integrations/sources/google-analytics-data-api.md index c728f8304eb65..5ba658c1b182a 100644 --- a/docs/integrations/sources/google-analytics-data-api.md +++ b/docs/integrations/sources/google-analytics-data-api.md @@ -263,7 +263,8 @@ The Google Analytics connector is subject to Google Analytics Data API quotas. P ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------- | +|:--------|:-----------| :------------------------------------------------------- | :------------------------------------------------------------------------------------- | +| 2.4.2 | 2024-03-20 | [36302](https://github.com/airbytehq/airbyte/pull/36302) | Don't extract state from the latest record if stream doesn't have a cursor_field | | 2.4.1 | 2024-02-09 | [35073](https://github.com/airbytehq/airbyte/pull/35073) | Manage dependencies with Poetry. | | 2.4.0 | 2024-02-07 | [34951](https://github.com/airbytehq/airbyte/pull/34951) | Replace the spec parameter from previous version to convert all `conversions:*` fields | | 2.3.0 | 2024-02-06 | [34907](https://github.com/airbytehq/airbyte/pull/34907) | Add new parameter to spec to convert `conversions:purchase` field to float | @@ -303,4 +304,4 @@ The Google Analytics connector is subject to Google Analytics Data API quotas. P | 0.1.0 | 2023-01-08 | [20889](https://github.com/airbytehq/airbyte/pull/20889) | Improved config validation, SAT | | 0.0.3 | 2022-08-15 | [15229](https://github.com/airbytehq/airbyte/pull/15229) | Source Google Analytics Data Api: code refactoring | | 0.0.2 | 2022-07-27 | [15087](https://github.com/airbytehq/airbyte/pull/15087) | fix documentationUrl | -| 0.0.1 | 2022-05-09 | [12701](https://github.com/airbytehq/airbyte/pull/12701) | Introduce Google Analytics Data API source | +| 0.0.1 | 2022-05-09 | [12701](https://github.com/airbytehq/airbyte/pull/12701) | Introduce Google Analytics Data API source | \ No newline at end of file diff --git a/docs/integrations/sources/google-analytics-v4.md b/docs/integrations/sources/google-analytics-v4.md index e3b6d1720c9e5..0977f52a98513 100644 --- a/docs/integrations/sources/google-analytics-v4.md +++ b/docs/integrations/sources/google-analytics-v4.md @@ -282,7 +282,8 @@ The Google Analytics connector should not run into the "requests per 100 seconds ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------- | +|:--------| :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------- | +| 0.3.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.2.5 | 2024-02-09 | [35101](https://github.com/airbytehq/airbyte/pull/35101) | Manage dependencies with Poetry. | | 0.2.4 | 2024-01-22 | [34323](https://github.com/airbytehq/airbyte/pull/34323) | Update setup dependencies | | 0.2.3 | 2024-01-18 | [34353](https://github.com/airbytehq/airbyte/pull/34353) | Add End date option | diff --git a/docs/integrations/sources/google-drive.md b/docs/integrations/sources/google-drive.md index 9ea6d11db1f47..f8101bbf7edf0 100644 --- a/docs/integrations/sources/google-drive.md +++ b/docs/integrations/sources/google-drive.md @@ -245,15 +245,15 @@ Before parsing each document, the connector exports Google Document files to Doc ## Changelog -| Version | Date | Pull Request | Subject | -|---------|------------|-----------------------------------------------------------|--------------------------------------------------------------| -| 0.0.9 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | -| 0.0.8 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | -| 0.0.7 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | -| 0.0.6 | 2023-12-16 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | -| 0.0.5 | 2023-12-14 | [33411](https://github.com/airbytehq/airbyte/pull/33411) | Bump CDK version to auto-set primary key for document file streams and support raw txt files | -| 0.0.4 | 2023-12-06 | [33187](https://github.com/airbytehq/airbyte/pull/33187) | Bump CDK version to hide source-defined primary key | -| 0.0.3 | 2023-11-16 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Improve folder id input and update document file type parser | -| 0.0.2 | 2023-11-02 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Allow syncs on shared drives | -| 0.0.1 | 2023-11-02 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Initial Google Drive source | - +| Version | Date | Pull Request | Subject | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------| +| 0.0.10 | 2024-03-28 | [36581](https://github.com/airbytehq/airbyte/pull/36581) | Manage dependencies with Poetry | +| 0.0.9 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | +| 0.0.8 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | +| 0.0.7 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | +| 0.0.6 | 2023-12-16 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | +| 0.0.5 | 2023-12-14 | [33411](https://github.com/airbytehq/airbyte/pull/33411) | Bump CDK version to auto-set primary key for document file streams and support raw txt files | +| 0.0.4 | 2023-12-06 | [33187](https://github.com/airbytehq/airbyte/pull/33187) | Bump CDK version to hide source-defined primary key | +| 0.0.3 | 2023-11-16 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Improve folder id input and update document file type parser | +| 0.0.2 | 2023-11-02 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Allow syncs on shared drives | +| 0.0.1 | 2023-11-02 | [31458](https://github.com/airbytehq/airbyte/pull/31458) | Initial Google Drive source | diff --git a/docs/integrations/sources/google-search-console.md b/docs/integrations/sources/google-search-console.md index 6b2c2549e6c65..58db6c487a8f0 100644 --- a/docs/integrations/sources/google-search-console.md +++ b/docs/integrations/sources/google-search-console.md @@ -190,8 +190,13 @@ Expand to see details about Google Search Console connector limitations and trou ### Connector limitations #### Rate limiting + This connector attempts to back off gracefully when it hits Reports API's rate limits. To find more information about limits, see [Usage Limits](https://developers.google.com/webmaster-tools/limits) documentation. +#### Data retention + +Google Search Console only retains data for websites from the last 16 months. Any data prior to this cutoff point will not be accessible. [Please see this article for more information](https://seotesting.com/google-search-console/how-long-does-gsc-keep-my-data/#:~:text=Google%20Search%20Console%20holds%20relevant,October%2C%202022%2C%20until%20today.). + ### Troubleshooting * Check out common troubleshooting issues for the Google Search Console source connector on our [Airbyte Forum](https://github.com/airbytehq/airbyte/discussions). @@ -202,7 +207,8 @@ This connector attempts to back off gracefully when it hits Reports API's rate l | Version | Date | Pull Request | Subject | |:---------|:-----------|:--------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------| -| 1.3.7 | 2024-02-12 | [35163](https://github.com/airbytehq/airbyte/pull/35163) | Manage dependencies with Poetry. | +| `1.4.0` | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| `1.3.7` | 2024-02-12 | [35163](https://github.com/airbytehq/airbyte/pull/35163) | Manage dependencies with Poetry. | | `1.3.6` | 2023-10-26 | [31863](https://github.com/airbytehq/airbyte/pull/31863) | Base image migration: remove Dockerfile and use the python-connector-base image | | `1.3.5` | 2023-09-28 | [30822](https://github.com/airbytehq/airbyte/pull/30822) | Fix primary key for custom reports | | `1.3.4` | 2023-09-27 | [30785](https://github.com/airbytehq/airbyte/pull/30785) | Do not migrate config for the newly created connections | diff --git a/docs/integrations/sources/google-sheets.md b/docs/integrations/sources/google-sheets.md index 650d8bcdacdc8..e6cfd24ffc844 100644 --- a/docs/integrations/sources/google-sheets.md +++ b/docs/integrations/sources/google-sheets.md @@ -97,8 +97,18 @@ If your spreadsheet is viewable by anyone with its link, no further action is ne - To authenticate your Google account via OAuth, select **Authenticate via Google (OAuth)** from the dropdown and enter your Google application's client ID, client secret, and refresh token. 6. For **Spreadsheet Link**, enter the link to the Google spreadsheet. To get the link, go to the Google spreadsheet you want to sync, click **Share** in the top right corner, and click **Copy Link**. -7. (Optional) You may enable the option to **Convert Column Names to SQL-Compliant Format**. Enabling this option will allow the connector to convert column names to a standardized, SQL-friendly format. For example, a column name of `Café Earnings 2022` will be converted to `cafe_earnings_2022`. We recommend enabling this option if your target destination is SQL-based (ie Postgres, MySQL). Set to false by default. -8. Click **Set up source** and wait for the tests to complete. +7. For **Batch Size**, enter an integer which represents batch size when processing a Google Sheet. Default value is 200. +Batch size is an integer representing row batch size for each sent request to Google Sheets API. +Row batch size means how many rows are processed from the google sheet, for example default value 200 +would process rows 1-201, then 201-401 and so on. +Based on [Google Sheets API limits documentation](https://developers.google.com/sheets/api/limits), +it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, +otherwise the request returns a timeout error. In regards to this information, consider network speed and +number of columns of the google sheet when deciding a batch_size value. +Default value should cover most of the cases, but if a google sheet has over 100,000 records or more, +consider increasing batch_size value. +8. (Optional) You may enable the option to **Convert Column Names to SQL-Compliant Format**. Enabling this option will allow the connector to convert column names to a standardized, SQL-friendly format. For example, a column name of `Café Earnings 2022` will be converted to `cafe_earnings_2022`. We recommend enabling this option if your target destination is SQL-based (ie Postgres, MySQL). Set to false by default. +9. Click **Set up source** and wait for the tests to complete. @@ -151,6 +161,9 @@ Airbyte batches requests to the API in order to efficiently pull data and respec | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|-----------------------------------------------------------------------------------| +| 0.5.1 | 2024-04-11 | [35404](https://github.com/airbytehq/airbyte/pull/35404) | Add `row_batch_size` parameter more granular control read records | +| 0.5.0 | 2024-03-26 | [36515](https://github.com/airbytehq/airbyte/pull/36515) | Resolve poetry dependency conflict, add record counts to state messages | +| 0.4.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.3.17 | 2024-02-29 | [35722](https://github.com/airbytehq/airbyte/pull/35722) | Add logic to emit stream statuses | | 0.3.16 | 2024-02-12 | [35136](https://github.com/airbytehq/airbyte/pull/35136) | Fix license in `pyproject.toml`. | | 0.3.15 | 2024-02-07 | [34944](https://github.com/airbytehq/airbyte/pull/34944) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/greenhouse.md b/docs/integrations/sources/greenhouse.md index 05b270d34d2e3..2836e23492c6a 100644 --- a/docs/integrations/sources/greenhouse.md +++ b/docs/integrations/sources/greenhouse.md @@ -27,32 +27,39 @@ The Greenhouse source connector supports the following [sync modes](https://docs ## Supported Streams - [Activity Feed](https://developers.greenhouse.io/harvest.html#get-retrieve-activity-feed) -- [Applications](https://developers.greenhouse.io/harvest.html#get-list-applications) -- [Applications Interviews](https://developers.greenhouse.io/harvest.html#get-list-scheduled-interviews-for-application) +- [Applications](https://developers.greenhouse.io/harvest.html#get-list-applications) \(Incremental\) +- [Applications Interviews](https://developers.greenhouse.io/harvest.html#get-list-scheduled-interviews-for-application) \(Incremental\) +- [Applications Demographics Answers](https://developers.greenhouse.io/harvest.html#get-list-demographic-answers-for-application) \(Incremental\) +- [Demographics Answers](https://developers.greenhouse.io/harvest.html#get-list-demographic-answers) \(Incremental\) +- [Demographic Answer Options](https://developers.greenhouse.io/harvest.html#get-list-demographic-answer-options) +- [Demographic Answer Options For Question](https://developers.greenhouse.io/harvest.html#get-list-demographic-answer-options-for-demographic-question) +- [Demographic Questions](https://developers.greenhouse.io/harvest.html#get-list-demographic-questions) +- [Demographic Question Set](https://developers.greenhouse.io/harvest.html#get-list-demographic-question-sets) +- [Demographic Questions For Question Set](https://developers.greenhouse.io/harvest.html#get-list-demographic-questions-for-demographic-question-set) - [Approvals](https://developers.greenhouse.io/harvest.html#get-list-approvals-for-job) -- [Candidates](https://developers.greenhouse.io/harvest.html#get-list-candidates) +- [Candidates](https://developers.greenhouse.io/harvest.html#get-list-candidates) \(Incremental\) - [Close Reasons](https://developers.greenhouse.io/harvest.html#get-list-close-reasons) - [Custom Fields](https://developers.greenhouse.io/harvest.html#get-list-custom-fields) - [Degrees](https://developers.greenhouse.io/harvest.html#get-list-degrees) - [Departments](https://developers.greenhouse.io/harvest.html#get-list-departments) - [Disciplines](https://developers.greenhouse.io/harvest.html#get-list-approvals-for-job) -- [EEOC](https://developers.greenhouse.io/harvest.html#get-list-eeoc) -- [Email Templates](https://developers.greenhouse.io/harvest.html#get-list-email-templates) -- [Interviews](https://developers.greenhouse.io/harvest.html#get-list-scheduled-interviews) -- [Job Posts](https://developers.greenhouse.io/harvest.html#get-list-job-posts) -- [Job Stages](https://developers.greenhouse.io/harvest.html#get-list-job-stages) -- [Jobs](https://developers.greenhouse.io/harvest.html#get-list-jobs) +- [EEOC](https://developers.greenhouse.io/harvest.html#get-list-eeoc) \(Incremental\) +- [Email Templates](https://developers.greenhouse.io/harvest.html#get-list-email-templates) \(Incremental\) +- [Interviews](https://developers.greenhouse.io/harvest.html#get-list-scheduled-interviews) \(Incremental\) +- [Job Posts](https://developers.greenhouse.io/harvest.html#get-list-job-posts) \(Incremental\) +- [Job Stages](https://developers.greenhouse.io/harvest.html#get-list-job-stages) \(Incremental\) +- [Jobs](https://developers.greenhouse.io/harvest.html#get-list-jobs) \(Incremental\) - [Job Openings](https://developers.greenhouse.io/harvest.html#get-list-job-openings) -- [Jobs Stages](https://developers.greenhouse.io/harvest.html#get-list-job-stages-for-job) -- [Offers](https://developers.greenhouse.io/harvest.html#get-list-offers) +- [Jobs Stages](https://developers.greenhouse.io/harvest.html#get-list-job-stages-for-job) \(Incremental\) +- [Offers](https://developers.greenhouse.io/harvest.html#get-list-offers) \(Incremental\) - [Offices](https://developers.greenhouse.io/harvest.html#get-list-offices) - [Prospect Pools](https://developers.greenhouse.io/harvest.html#get-list-prospect-pools) - [Rejection Reasons](https://developers.greenhouse.io/harvest.html#get-list-rejection-reasons) - [Schools](https://developers.greenhouse.io/harvest.html#get-list-schools) -- [Scorecards](https://developers.greenhouse.io/harvest.html#get-list-scorecards) +- [Scorecards](https://developers.greenhouse.io/harvest.html#get-list-scorecards) \(Incremental\) - [Sources](https://developers.greenhouse.io/harvest.html#get-list-sources) - [Tags](https://developers.greenhouse.io/harvest.html#get-list-candidate-tags) -- [Users](https://developers.greenhouse.io/harvest.html#get-list-users) +- [Users](https://developers.greenhouse.io/harvest.html#get-list-users) \(Incremental\) - [User Permissions](https://developers.greenhouse.io/harvest.html#get-list-job-permissions) - [User Roles](https://developers.greenhouse.io/harvest.html#the-user-role-object) @@ -62,22 +69,23 @@ The Greenhouse connector should not run into Greenhouse API limitations under no ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.5.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | -| 0.4.5 | 2024-02-09 | [35077](https://github.com/airbytehq/airbyte/pull/35077) | Manage dependencies with Poetry. | -| 0.4.4 | 2023-11-29 | [32397](https://github.com/airbytehq/airbyte/pull/32397) | Increase test coverage and migrate to base image | -| 0.4.3 | 2023-09-20 | [30648](https://github.com/airbytehq/airbyte/pull/30648) | Update candidates.json | -| 0.4.2 | 2023-08-02 | [28969](https://github.com/airbytehq/airbyte/pull/28969) | Update CDK version | -| 0.4.1 | 2023-06-28 | [27773](https://github.com/airbytehq/airbyte/pull/27773) | Update following state breaking changes | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------| +| 0.5.1 | 2024-03-12 | [35988](https://github.com/airbytehq/airbyte/pull/35988) | Unpin CDK version | +| 0.5.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | +| 0.4.5 | 2024-02-09 | [35077](https://github.com/airbytehq/airbyte/pull/35077) | Manage dependencies with Poetry. | +| 0.4.4 | 2023-11-29 | [32397](https://github.com/airbytehq/airbyte/pull/32397) | Increase test coverage and migrate to base image | +| 0.4.3 | 2023-09-20 | [30648](https://github.com/airbytehq/airbyte/pull/30648) | Update candidates.json | +| 0.4.2 | 2023-08-02 | [28969](https://github.com/airbytehq/airbyte/pull/28969) | Update CDK version | +| 0.4.1 | 2023-06-28 | [27773](https://github.com/airbytehq/airbyte/pull/27773) | Update following state breaking changes | | 0.4.0 | 2023-04-26 | [25332](https://github.com/airbytehq/airbyte/pull/25332) | Add new streams: `ActivityFeed`, `Approvals`, `Disciplines`, `Eeoc`, `EmailTemplates`, `Offices`, `ProspectPools`, `Schools`, `Tags`, `UserPermissions`, `UserRoles` | -| 0.3.1 | 2023-03-06 | [23231](https://github.com/airbytehq/airbyte/pull/23231) | Publish using low-code CDK Beta version | -| 0.3.0 | 2022-10-19 | [18154](https://github.com/airbytehq/airbyte/pull/18154) | Extend `Users` stream schema | -| 0.2.11 | 2022-09-27 | [17239](https://github.com/airbytehq/airbyte/pull/17239) | Always install the latest version of Airbyte CDK | -| 0.2.10 | 2022-09-05 | [16338](https://github.com/airbytehq/airbyte/pull/16338) | Implement incremental syncs & fix SATs | -| 0.2.9 | 2022-08-22 | [15800](https://github.com/airbytehq/airbyte/pull/15800) | Bugfix to allow reading sentry.yaml and schemas at runtime | -| 0.2.8 | 2022-08-10 | [15344](https://github.com/airbytehq/airbyte/pull/15344) | Migrate connector to config-based framework | -| 0.2.7 | 2022-04-15 | [11941](https://github.com/airbytehq/airbyte/pull/11941) | Correct Schema data type for Applications, Candidates, Scorecards and Users | -| 0.2.6 | 2021-11-08 | [7607](https://github.com/airbytehq/airbyte/pull/7607) | Implement demographics streams support. Update SAT for demographics streams | -| 0.2.5 | 2021-09-22 | [6377](https://github.com/airbytehq/airbyte/pull/6377) | Refactor the connector to use CDK. Implement additional stream support | -| 0.2.4 | 2021-09-15 | [6238](https://github.com/airbytehq/airbyte/pull/6238) | Add identification of accessible streams for API keys with limited permissions | +| 0.3.1 | 2023-03-06 | [23231](https://github.com/airbytehq/airbyte/pull/23231) | Publish using low-code CDK Beta version | +| 0.3.0 | 2022-10-19 | [18154](https://github.com/airbytehq/airbyte/pull/18154) | Extend `Users` stream schema | +| 0.2.11 | 2022-09-27 | [17239](https://github.com/airbytehq/airbyte/pull/17239) | Always install the latest version of Airbyte CDK | +| 0.2.10 | 2022-09-05 | [16338](https://github.com/airbytehq/airbyte/pull/16338) | Implement incremental syncs & fix SATs | +| 0.2.9 | 2022-08-22 | [15800](https://github.com/airbytehq/airbyte/pull/15800) | Bugfix to allow reading sentry.yaml and schemas at runtime | +| 0.2.8 | 2022-08-10 | [15344](https://github.com/airbytehq/airbyte/pull/15344) | Migrate connector to config-based framework | +| 0.2.7 | 2022-04-15 | [11941](https://github.com/airbytehq/airbyte/pull/11941) | Correct Schema data type for Applications, Candidates, Scorecards and Users | +| 0.2.6 | 2021-11-08 | [7607](https://github.com/airbytehq/airbyte/pull/7607) | Implement demographics streams support. Update SAT for demographics streams | +| 0.2.5 | 2021-09-22 | [6377](https://github.com/airbytehq/airbyte/pull/6377) | Refactor the connector to use CDK. Implement additional stream support | +| 0.2.4 | 2021-09-15 | [6238](https://github.com/airbytehq/airbyte/pull/6238) | Add identification of accessible streams for API keys with limited permissions | diff --git a/docs/integrations/sources/harvest-migrations.md b/docs/integrations/sources/harvest-migrations.md new file mode 100644 index 0000000000000..ee28f959bc1a0 --- /dev/null +++ b/docs/integrations/sources/harvest-migrations.md @@ -0,0 +1,31 @@ +# Harvest Migration Guide + +## Upgrading to 1.0.0 + +This update results in a change the following streams, requiring reset: +- `expenses_clients` +- `expenses_categories` +- `expenses_projects` +- `expenses_team` +- `time_clients` +- `time_projects` +- `time_tasks` +- `time_team` +- `uninvoiced` +- `estimate_messages` +- `invoice_payments` +- `invoice_messages` +- `project_assignments` + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. As part of our commitment to delivering exceptional service, we are transitioning Source Harvest from the Python Connector Development Kit (CDK) to our new low-code framework improving maintainability and reliability of the connector. However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change. + +## Steps to Reset Streams + +To reset your data for the impacted streams, follow the steps below: + +1. Select **Connections** in the main nav bar. + 1. Select the connection(s) affected by the update. +2. Select the **Status** tab. + 1. In the **Enabled streams** list, click the three dots on the right side of the stream requiring reset and select **Reset this stream**. + +A fresh sync will run for the stream. For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). diff --git a/docs/integrations/sources/harvest.md b/docs/integrations/sources/harvest.md index aff5c5ad474db..ca67d7f04f852 100644 --- a/docs/integrations/sources/harvest.md +++ b/docs/integrations/sources/harvest.md @@ -65,12 +65,19 @@ The Harvest source connector supports the following [sync modes](https://docs.ai - [Project Task Assignments](https://help.getharvest.com/api-v2/projects-api/projects/task-assignments/) \(Incremental\) - [Projects](https://help.getharvest.com/api-v2/projects-api/projects/projects/) \(Incremental\) - [Roles](https://help.getharvest.com/api-v2/roles-api/roles/roles/) \(Incremental\) +- [Users](https://help.getharvest.com/api-v2/users-api/users/users/) \(Incremental\) - [User Billable Rates](https://help.getharvest.com/api-v2/users-api/users/billable-rates/) - [User Cost Rates](https://help.getharvest.com/api-v2/users-api/users/cost-rates/) - [User Project Assignments](https://help.getharvest.com/api-v2/users-api/users/project-assignments/) \(Incremental\) -- [Expense Reports](https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/) -- [Uninvoiced Report](https://help.getharvest.com/api-v2/reports-api/reports/uninvoiced-report/) -- [Time Reports](https://help.getharvest.com/api-v2/reports-api/reports/time-reports/) +- [Expense Clients Report](https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#clients-report) \(Incremental\) +- [Expense Projects Report](https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#projects-report) \(Incremental\) +- [Expense Categories Report](https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#expense-categories-report) \(Incremental\) +- [Expense Team Report](https://help.getharvest.com/api-v2/reports-api/reports/expense-reports/#team-report) \(Incremental\) +- [Uninvoiced Report](https://help.getharvest.com/api-v2/reports-api/reports/uninvoiced-report/) \(Incremental\) +- [Time Clients Report](https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#clients-report) \(Incremental\) +- [Time Projects Report](https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#projects-report) \(Incremental\) +- [Time Tasks Report](https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#tasks-report) \(Incremental\) +- [Time Team Report](https://help.getharvest.com/api-v2/reports-api/reports/time-reports/#team-report) \(Incremental\) - [Project Budget Report](https://help.getharvest.com/api-v2/reports-api/reports/project-budget-report/) ## Performance considerations @@ -81,6 +88,8 @@ The connector is restricted by the [Harvest rate limits](https://help.getharvest | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------| +| 1.0.0 | 2024-04-15 | [35863](https://github.com/airbytehq/airbyte/pull/35863) | Migrates connector to Low Code CDK, Updates incremental substream state to per-partition state | +| 0.2.0 | 2024-04-08 | [36889](https://github.com/airbytehq/airbyte/pull/36889) | Unpin CDK version | | 0.1.24 | 2024-02-26 | [35541](https://github.com/airbytehq/airbyte/pull/35541) | Improve check command to avoid missing alerts | | 0.1.23 | 2024-02-19 | [35305](https://github.com/airbytehq/airbyte/pull/35305) | Fix pendulum parsing error | | 0.1.22 | 2024-02-12 | [35154](https://github.com/airbytehq/airbyte/pull/35154) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/http-request.md b/docs/integrations/sources/http-request.md index 7d76e2a20c3b0..2cb1b0cb60d72 100644 --- a/docs/integrations/sources/http-request.md +++ b/docs/integrations/sources/http-request.md @@ -8,7 +8,7 @@ This connector is graveyarded and will not be receiving any updates from the Air ## Overview -This connector allows you to generally connect to any HTTP API. In order to use this connector, you must manually bring it in as a custom connector. The steps to do this can be found [here](../../connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md). +This connector allows you to generally connect to any HTTP API. In order to use this connector, you must manually bring it in as a custom connector. The steps to do this can be found [here](../../connector-development/tutorials/custom-python-connector/0-getting-started.md). ## Where do I find the Docker image? diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index c23b528377530..3605388dc6bc1 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -322,6 +322,8 @@ The connector is restricted by normal HubSpot [rate limitations](https://legacyd | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.1.1 | 2024-04-11 | [35945](https://github.com/airbytehq/airbyte/pull/35945) | Add integration tests | +| 4.1.0 | 2024-03-27 | [36541](https://github.com/airbytehq/airbyte/pull/36541) | Added test configuration features, fixed type hints | | 4.0.0 | 2024-03-10 | [35662](https://github.com/airbytehq/airbyte/pull/35662) | Update `Deals Property History` and `Companies Property History` schemas | | 3.3.0 | 2024-02-16 | [34597](https://github.com/airbytehq/airbyte/pull/34597) | Make start date not required, sync all data from default value if it's not provided | | 3.2.0 | 2024-02-15 | [35328](https://github.com/airbytehq/airbyte/pull/35328) | Add mailingIlsListsIncluded and mailingIlsListsExcluded fields to Marketing emails stream schema | diff --git a/docs/integrations/sources/instagram.md b/docs/integrations/sources/instagram.md index 96a516bc8cc08..5d36d9c0deddb 100644 --- a/docs/integrations/sources/instagram.md +++ b/docs/integrations/sources/instagram.md @@ -113,6 +113,7 @@ Instagram limits the number of requests that can be made at a time. See Facebook | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------| +| 3.0.5 | 2024-03-20 | [36314](https://github.com/airbytehq/airbyte/pull/36314) | Unpin CDK version | | 3.0.4 | 2024-03-07 | [35875](https://github.com/airbytehq/airbyte/pull/35875) | Remove `total_interactions` from the `MediaInsights` queries. | | 3.0.3 | 2024-02-12 | [35177](https://github.com/airbytehq/airbyte/pull/35177) | Manage dependencies with Poetry. | | 3.0.2 | 2024-01-15 | [34254](https://github.com/airbytehq/airbyte/pull/34254) | prepare for airbyte-lib | diff --git a/docs/integrations/sources/intercom.md b/docs/integrations/sources/intercom.md index aebeb77e3101e..4f9da48667c37 100644 --- a/docs/integrations/sources/intercom.md +++ b/docs/integrations/sources/intercom.md @@ -74,6 +74,9 @@ The Intercom connector should not run into Intercom API limitations under normal | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------| +| 0.6.3 | 2024-03-23 | [36414](https://github.com/airbytehq/airbyte/pull/36414) | Fixed `pagination` regression bug for `conversations` stream | +| 0.6.2 | 2024-03-22 | [36277](https://github.com/airbytehq/airbyte/pull/36277) | Fixed the bug for `conversations` stream failed due to `404 - User Not Found`, when the `2.10` API version is used | +| 0.6.1 | 2024-03-18 | [36232](https://github.com/airbytehq/airbyte/pull/36232) | Fixed the bug caused the regression when setting the `Intercom-Version` header, updated the source to use the latest CDK version | | 0.6.0 | 2024-02-12 | [35176](https://github.com/airbytehq/airbyte/pull/35176) | Update the connector to use `2.10` API version | | 0.5.1 | 2024-02-12 | [35148](https://github.com/airbytehq/airbyte/pull/35148) | Manage dependencies with Poetry. | | 0.5.0 | 2024-02-09 | [35063](https://github.com/airbytehq/airbyte/pull/35063) | Add missing fields for mutiple streams | diff --git a/docs/integrations/sources/ip2whois.md b/docs/integrations/sources/ip2whois.md index 9d0996e2b204c..41d403639cfa1 100644 --- a/docs/integrations/sources/ip2whois.md +++ b/docs/integrations/sources/ip2whois.md @@ -28,6 +28,7 @@ Ip2whois APIs allows you to query up to 500 WHOIS domain name per month. ## Changelog + | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------- | :----------------------------------------- | | 0.1.0 | 2022-10-29 | [#18651](https://github.com/airbytehq/airbyte/pull/18651) | 🎉 New source: Ip2whois [low-code SDK]| diff --git a/docs/integrations/sources/iterable.md b/docs/integrations/sources/iterable.md index ce4b7d2009678..2006af9cbe673 100644 --- a/docs/integrations/sources/iterable.md +++ b/docs/integrations/sources/iterable.md @@ -80,6 +80,8 @@ The Iterable source connector supports the following [sync modes](https://docs.a | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.5.0 | 2024-03-18 | [36231](https://github.com/airbytehq/airbyte/pull/36231) | Migrate connector to low-code | +| 0.4.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.3.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | | 0.2.2 | 2024-02-12 | [35150](https://github.com/airbytehq/airbyte/pull/35150) | Manage dependencies with Poetry. | | 0.2.1 | 2024-01-12 | [1234](https://github.com/airbytehq/airbyte/pull/1234) | prepare for airbyte-lib | diff --git a/docs/integrations/sources/jira.md b/docs/integrations/sources/jira.md index e742a19206e36..2040116c55092 100644 --- a/docs/integrations/sources/jira.md +++ b/docs/integrations/sources/jira.md @@ -124,6 +124,7 @@ The Jira connector should not run into Jira API limitations under normal usage. | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 1.2.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 1.1.0 | 2024-02-27 | [35656](https://github.com/airbytehq/airbyte/pull/35656) | Add new fields to streams `board_issues`, `filter_sharing`, `filters`, `issues`, `permission_schemes`, `sprint_issues`, `users_groups_detailed`, and `workflows` | | 1.0.2 | 2024-02-12 | [35160](https://github.com/airbytehq/airbyte/pull/35160) | Manage dependencies with Poetry. | | 1.0.1 | 2024-01-24 | [34470](https://github.com/airbytehq/airbyte/pull/34470) | Add state checkpoint interval for all streams | diff --git a/docs/integrations/sources/klaviyo.md b/docs/integrations/sources/klaviyo.md index 22fd67a883660..b60762a4f0b40 100644 --- a/docs/integrations/sources/klaviyo.md +++ b/docs/integrations/sources/klaviyo.md @@ -63,6 +63,9 @@ The Klaviyo connector should not run into Klaviyo API limitations under normal u | Version | Date | Pull Request | Subject | |:---------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| +| `2.5.0` | 2024-04-15 | [36264](https://github.com/airbytehq/airbyte/pull/36264) | Migrate to low-code | +| `2.4.0` | 2024-04-11 | [36989](https://github.com/airbytehq/airbyte/pull/36989) | Update `Campaigns` schema | +| `2.3.0` | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | `2.2.0` | 2024-02-27 | [35637](https://github.com/airbytehq/airbyte/pull/35637) | Fix `predictive_analytics` field in stream `profiles` | | `2.1.3` | 2024-02-15 | [35336](https://github.com/airbytehq/airbyte/pull/35336) | Added type transformer for the `profiles` stream. | | `2.1.2` | 2024-02-09 | [35088](https://github.com/airbytehq/airbyte/pull/35088) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/kyriba.md b/docs/integrations/sources/kyriba.md index 4792928ce5d96..24298bfe4080a 100644 --- a/docs/integrations/sources/kyriba.md +++ b/docs/integrations/sources/kyriba.md @@ -61,6 +61,7 @@ The Kyriba connector should not run into API limitations under normal usage. [Cr
    ## Changelog + | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :--------------------------- | | 0.1.1 | 2024-01-30 | [34545](https://github.com/airbytehq/airbyte/pull/34545) | Updates CDK, Base image migration: remove Dockerfile and use the python-connector-base image | diff --git a/docs/integrations/sources/kyve.md b/docs/integrations/sources/kyve.md index 4fd218ec4e550..ca023fcf6561e 100644 --- a/docs/integrations/sources/kyve.md +++ b/docs/integrations/sources/kyve.md @@ -23,7 +23,7 @@ You can fetch with one source configuration more than one pool simultaneously. Y ## Changelog -| Version | Date | Subject | -| :------ |:---------|:----------------------------------------------------| -| 0.1.0 | 25-05-23 | Initial release of KYVE source connector | -| 0.2.0 | 10-11-23 | Update KYVE source to support to Mainnet and Testnet| \ No newline at end of file +| Version | Date | Pull Request | Subject | +| :------ |:---------|:-------------|:----------------------------------------------------| +| 0.2.0 | 2023-11-10 | | Update KYVE source to support to Mainnet and Testnet| +| 0.1.0 | 2023-05-25 | | Initial release of KYVE source connector | diff --git a/docs/integrations/sources/linkedin-ads-migrations.md b/docs/integrations/sources/linkedin-ads-migrations.md new file mode 100644 index 0000000000000..6816e08cfde4b --- /dev/null +++ b/docs/integrations/sources/linkedin-ads-migrations.md @@ -0,0 +1,40 @@ +# LinkedIn Ads Migration Guide + +## Upgrading to 1.0.0 + +Version 1.0.0 introduces changes in primary key for all *-analytics streams (including custom ones). +- "ad_campaign_analytics" +- "ad_creative_analytics" +- "ad_impression_device_analytics" +- "ad_member_company_size_analytics" +- "ad_member_country_analytics" +- "ad_member_job_function_analytics" +- "ad_member_job_title_analytics" +- "ad_member_industry_analytics" +- "ad_member_seniority_analytics" +- "ad_member_region_analytics" +- "ad_member_company_analytics" + +## Migration Steps + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +:::note +Depending on destination type you may not be prompted to reset your data. +::: +4. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). diff --git a/docs/integrations/sources/linkedin-ads.md b/docs/integrations/sources/linkedin-ads.md index 858c297f7958b..a22f098beb20d 100644 --- a/docs/integrations/sources/linkedin-ads.md +++ b/docs/integrations/sources/linkedin-ads.md @@ -135,7 +135,7 @@ The LinkedIn Ads source connector supports the following [sync modes](https://do :::info -For Analytics Streams such as `Ad Analytics by Campaign` and `Ad Analytics by Creative`, the `pivot` column name is renamed to `_pivot` to handle the data normalization correctly and avoid name conflicts with certain destinations. +For Ad Analytics Streams such as `Ad Analytics by Campaign` and `Ad Analytics by Creative`, the `pivot` column name is renamed to `pivotValue` to handle the data normalization correctly and avoid name conflicts with certain destinations. This field contains the ID of the associated entity as a [URN](https://learn.microsoft.com/en-us/linkedin/shared/api-guide/concepts/urns). Please refer to the [LinkedIn documentation](https://learn.microsoft.com/en-us/linkedin/marketing/urn-resolution?view=li-lms-2023-05) for the format of the URN value for the Ad Analytics streams. ::: @@ -171,6 +171,8 @@ After 5 unsuccessful attempts - the connector will stop the sync operation. In s | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| +| 1.0.0 | 2024-04-10 | [36927](https://github.com/airbytehq/airbyte/pull/36927) | Update primary key for Analytics Streams | +| 0.8.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.7.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | | 0.6.8 | 2024-02-09 | [35086](https://github.com/airbytehq/airbyte/pull/35086) | Manage dependencies with Poetry. | | 0.6.7 | 2024-01-11 | [34152](https://github.com/airbytehq/airbyte/pull/34152) | prepare for airbyte-lib | diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md new file mode 100644 index 0000000000000..fe17e509f111e --- /dev/null +++ b/docs/integrations/sources/low-code.md @@ -0,0 +1,20 @@ +# Low-code + +This doc is currently used for changelog purposes only internally, so that we can make sure that the source stays +up-to-date with the latest version of the CDK. The doc is hidden from the docs site. + +The changelog below is automatically updated by the `bump_version` command as part of the Airbyte CDK publish flow. + +## CHANGELOG + +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------| +| 0.81.3 | 2024-04-12 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.81.4 | +| 0.81.2 | 2024-04-11 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.81.3 | +| 0.81.1 | 2024-04-11 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.81.1 | +| 0.81.0 | 2024-04-09 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.81.0 | +| 0.80.0 | 2024-04-09 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.80.0 | +| 0.79.2 | 2024-04-09 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.79.2 | +| 0.79.1 | 2024-04-05 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.79.1 | +| 0.79.0 | 2024-04-05 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 0.79.0 | +| 0.78.9 | 2024-04-04 | [36834](https://github.com/airbytehq/airbyte/pull/36834) | Update CDK dependency to version 0.78.9 (before new publishing flow) | diff --git a/docs/integrations/sources/mailchimp-migrations.md b/docs/integrations/sources/mailchimp-migrations.md index c236c549eef2f..d09683b8e643b 100644 --- a/docs/integrations/sources/mailchimp-migrations.md +++ b/docs/integrations/sources/mailchimp-migrations.md @@ -1,5 +1,34 @@ # Mailchimp Migration Guide +## Upgrading to 2.0.0 + +Version 2.0.0 introduces changes in primary key for streams `Segment Members` and `List Members`. + +## Migration Steps + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +:::note +Depending on destination type you may not be prompted to reset your data. +::: +4. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + + ## Upgrading to 1.0.0 Version 1.0.0 of the Source Mailchimp connector introduces a number of breaking changes to the schemas of all incremental streams. A full schema refresh and data reset are required when upgrading to this version. diff --git a/docs/integrations/sources/mailchimp.md b/docs/integrations/sources/mailchimp.md index c6aed7b58e578..c45fd006c7a0b 100644 --- a/docs/integrations/sources/mailchimp.md +++ b/docs/integrations/sources/mailchimp.md @@ -122,7 +122,9 @@ Now that you have set up the Mailchimp source connector, check out the following ## Changelog | Version | Date | Pull Request | Subject | -| ------- | ---------- | -------------------------------------------------------- | -------------------------------------------------------------------------- | +|---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------| +| 2.0.0 | 2024-04-01 | [35281](https://github.com/airbytehq/airbyte/pull/35281) | Migrate to Low-Code | +| 1.2.0 | 2024-03-28 | [36600](https://github.com/airbytehq/airbyte/pull/36600) | Migrate to latest Airbyte-CDK. | | 1.1.2 | 2024-02-09 | [35092](https://github.com/airbytehq/airbyte/pull/35092) | Manage dependencies with Poetry. | | 1.1.1 | 2024-01-11 | [34157](https://github.com/airbytehq/airbyte/pull/34157) | Prepare for airbyte-lib | | 1.1.0 | 2023-12-20 | [32852](https://github.com/airbytehq/airbyte/pull/32852) | Add optional start_date for incremental streams | diff --git a/docs/integrations/sources/marketo.md b/docs/integrations/sources/marketo.md index b7a26c41b35dd..bab59af8d5f42 100644 --- a/docs/integrations/sources/marketo.md +++ b/docs/integrations/sources/marketo.md @@ -116,9 +116,10 @@ If the 50,000 limit is too stringent, contact Marketo support for a quota increa ## Changelog | Version | Date | Pull Request | Subject | -| :------- | :--------- | :------------------------------------------------------- | :----------------------------------------------------------------------------------------------- | -| 1.2.6 | 2024-02-09 | [35078](https://github.com/airbytehq/airbyte/pull/35078) | Manage dependencies with Poetry. | -| 1.2.5 | 2024-01-15 | [34246](https://github.com/airbytehq/airbyte/pull/34246) | prepare for airbyte-lib | +|:---------| :--------- | :------------------------------------------------------- |:-------------------------------------------------------------------------------------------------| +| `1.3.0` | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| `1.2.6` | 2024-02-09 | [35078](https://github.com/airbytehq/airbyte/pull/35078) | Manage dependencies with Poetry. | +| `1.2.5` | 2024-01-15 | [34246](https://github.com/airbytehq/airbyte/pull/34246) | prepare for airbyte-lib | | `1.2.4` | 2024-01-08 | [33999](https://github.com/airbytehq/airbyte/pull/33999) | Fix for `Export daily quota exceeded` | | `1.2.3` | 2023-08-02 | [28999](https://github.com/airbytehq/airbyte/pull/28999) | Fix for ` _csv.Error: line contains NUL` | | `1.2.2` | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | diff --git a/docs/integrations/sources/microsoft-onedrive.md b/docs/integrations/sources/microsoft-onedrive.md index 4c7ce624213e4..f5ac00ffed8bd 100644 --- a/docs/integrations/sources/microsoft-onedrive.md +++ b/docs/integrations/sources/microsoft-onedrive.md @@ -21,16 +21,17 @@ This page contains the setup guide and reference information for the Microsoft O 3. On the Set up the source page, select **Microsoft OneDrive** from the Source type dropdown. 4. Enter the name for the Microsoft OneDrive connector. 5. Enter **Drive Name**. To find your drive name go to settings and at the top of setting menu you can find the name of your drive. -6. Enter **Folder Path**. -7. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Microsoft OneDrive account**. Log in and authorize your Microsoft account. -8. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. -9. Add a stream: - 1. Write the **File Type** - 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. - 3. Give a **Name** to the stream - 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). - 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -10. Click **Set up source** +6. Select **Search Scope**. Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. Default value is 'ALL'. +7. Enter **Folder Path**. Leave empty to search all folders of the drives. This does not apply to shared items. +8. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Microsoft OneDrive account**. Log in and authorize your Microsoft account. +9. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. +10. Add a stream: + 1. Write the **File Type** + 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. + 3. Give a **Name** to the stream + 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). + 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. +11. Click **Set up source** @@ -80,18 +81,19 @@ This source requires **Application permissions**. Follow these [instructions](ht 3. On the **Set up** the source page, select **Microsoft OneDrive** from the Source type dropdown. 4. Enter the name for the Microsoft OneDrive connector. 5. Enter **Drive Name**. To find your drive name go to settings and at the top of setting menu you can find the name of your drive. -6. Enter **Folder Path**. -7. Switch to **Service Key Authentication** -8. For **User Practical Name**, enter the [UPN](https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls) for your user. -9. Enter **Tenant ID**, **Client ID** and **Client secret**. -10. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. -11. Add a stream: +6. Select **Search Scope**. Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. Default value is 'ALL'. +7. Enter **Folder Path**. Leave empty to search all folders of the drives. This does not apply to shared items. +8. Switch to **Service Key Authentication** +9. For **User Practical Name**, enter the [UPN](https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls) for your user. +10. Enter **Tenant ID**, **Client ID** and **Client secret**. +11. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. +12. Add a stream: 1. Write the **File Type** 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. 3. Give a **Name** to the stream 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -12. Click **Set up source** +13. Click **Set up source** @@ -119,14 +121,16 @@ The connector is restricted by normal Microsoft Graph [requests limitation](http ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| -| 0.1.8 | 2024-03-06 | [35858](https://github.com/airbytehq/airbyte/pull/35858) | Bump poetry.lock to upgrade transitive dependency | -| 0.1.7 | 2024-03-0q | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Enable in Cloud | -| 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | -| 0.1.5 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | -| 0.1.4 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | -| 0.1.3 | 2024-01-24 | [34478](https://github.com/airbytehq/airbyte/pull/34478) | Fix OAuth | -| 0.1.2 | 2021-12-22 | [33745](https://github.com/airbytehq/airbyte/pull/33745) | Add ql and sl to metadata | -| 0.1.1 | 2021-12-15 | [33758](https://github.com/airbytehq/airbyte/pull/33758) | Fix for docs name | -| 0.1.0 | 2021-12-06 | [32655](https://github.com/airbytehq/airbyte/pull/32655) | New source | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------| +| 0.2.0 | 2024-03-12 | [35849](https://github.com/airbytehq/airbyte/pull/35849) | Add fetching shared items | +| 0.1.9 | 2024-03-11 | [35956](https://github.com/airbytehq/airbyte/pull/35956) | Pin `transformers` transitive dependency | +| 0.1.8 | 2024-03-06 | [35858](https://github.com/airbytehq/airbyte/pull/35858) | Bump poetry.lock to upgrade transitive dependency | +| 0.1.7 | 2024-03-04 | [35584](https://github.com/airbytehq/airbyte/pull/35584) | Enable in Cloud | +| 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | +| 0.1.5 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | +| 0.1.4 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | +| 0.1.3 | 2024-01-24 | [34478](https://github.com/airbytehq/airbyte/pull/34478) | Fix OAuth | +| 0.1.2 | 2021-12-22 | [33745](https://github.com/airbytehq/airbyte/pull/33745) | Add ql and sl to metadata | +| 0.1.1 | 2021-12-15 | [33758](https://github.com/airbytehq/airbyte/pull/33758) | Fix for docs name | +| 0.1.0 | 2021-12-06 | [32655](https://github.com/airbytehq/airbyte/pull/32655) | New source | diff --git a/docs/integrations/sources/microsoft-sharepoint.md b/docs/integrations/sources/microsoft-sharepoint.md index db706d7333553..15af897c15a71 100644 --- a/docs/integrations/sources/microsoft-sharepoint.md +++ b/docs/integrations/sources/microsoft-sharepoint.md @@ -25,16 +25,17 @@ This page contains the setup guide and reference information for the Microsoft S 3. On the Set up the source page, select **Microsoft SharePoint** from the Source type dropdown. 4. Enter the name for the Microsoft SharePoint connector. 5. Enter **Drive Name**. To find your drive name go to settings and at the top of setting menu you can find the name of your drive. -6. Enter **Folder Path**. -7. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Microsoft SharePoint account**. Log in and authorize your Microsoft account. -8. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. -9. Add a stream: +6. Select **Search Scope**. Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. Default value is 'ALL'. +7. Enter **Folder Path**. Leave empty to search all folders of the drives. This does not apply to shared items. +8. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Microsoft SharePoint account**. Log in and authorize your Microsoft account. +9. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. +10. Add a stream: 1. Write the **File Type** 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. 3. Give a **Name** to the stream 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -10. Click **Set up source** +11. Click **Set up source** @@ -86,18 +87,19 @@ This source requires **Application permissions**. Follow these [instructions](ht 3. On the **Set up** the source page, select **Microsoft SharePoint** from the Source type dropdown. 4. Enter the name for the Microsoft SharePoint connector. 5. Enter **Drive Name**. To find your drive name go to settings and at the top of setting menu you can find the name of your drive. -6. Enter **Folder Path**. -7. Switch to **Service Key Authentication** -8. For **User Practical Name**, enter the [UPN](https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls) for your user. -9. Enter **Tenant ID**, **Client ID** and **Client secret**. -10. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. -11. Add a stream: +6. Select **Search Scope**. Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. Default value is 'ALL'. +7. Enter **Folder Path**. Leave empty to search all folders of the drives. This does not apply to shared items. +8. Switch to **Service Key Authentication** +9. For **User Practical Name**, enter the [UPN](https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls) for your user. +10. Enter **Tenant ID**, **Client ID** and **Client secret**. +11. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. +12. Add a stream: 1. Write the **File Type** 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. 3. Give a **Name** to the stream 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -12. Click **Set up source** +13. Click **Set up source** @@ -127,8 +129,11 @@ The connector is restricted by normal Microsoft Graph [requests limitation](http ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:-----------| -| 0.1.0 | 2024-01-25 | [33537](https://github.com/airbytehq/airbyte/pull/33537) | New source | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:--------------------------| +| 0.2.2 | 2024-03-28 | [36573](https://github.com/airbytehq/airbyte/pull/36573) | Update QL to 400 | +| 0.2.1 | 2024-03-22 | [36381](https://github.com/airbytehq/airbyte/pull/36381) | Unpin CDK | +| 0.2.0 | 2024-03-06 | [35830](https://github.com/airbytehq/airbyte/pull/35830) | Add fetching shared items | +| 0.1.0 | 2024-01-25 | [33537](https://github.com/airbytehq/airbyte/pull/33537) | New source | diff --git a/docs/integrations/sources/microsoft-teams-migrations.md b/docs/integrations/sources/microsoft-teams-migrations.md index 5610ecd721adf..093f37714241f 100644 --- a/docs/integrations/sources/microsoft-teams-migrations.md +++ b/docs/integrations/sources/microsoft-teams-migrations.md @@ -1,4 +1,4 @@ -# Microsoft teams Migration Guide +# Microsoft Teams Migration Guide ## Upgrading to 1.0.0 diff --git a/docs/integrations/sources/microsoft-teams.md b/docs/integrations/sources/microsoft-teams.md index cc3846a489d30..7e306bffaf075 100644 --- a/docs/integrations/sources/microsoft-teams.md +++ b/docs/integrations/sources/microsoft-teams.md @@ -161,6 +161,7 @@ Token acquiring implemented by [instantiate](https://docs.microsoft.com/en-us/az | Version | Date | Pull Request | Subject | |:------- |:---------- | :------------------------------------------------------- | :----------------------------- | +| 1.1.0 | 2024-03-24 | [36223](https://github.com/airbytehq/airbyte/pull/36223) | Migration to low code | | 1.0.0 | 2024-01-04 | [33959](https://github.com/airbytehq/airbyte/pull/33959) | Schema updates | | 0.2.5 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.2.4 | 2021-12-07 | [7807](https://github.com/airbytehq/airbyte/pull/7807) | Implement OAuth support | diff --git a/docs/integrations/sources/mixpanel.md b/docs/integrations/sources/mixpanel.md index 359318423e2b1..0dc97291c7dd0 100644 --- a/docs/integrations/sources/mixpanel.md +++ b/docs/integrations/sources/mixpanel.md @@ -55,6 +55,7 @@ Syncing huge date windows may take longer due to Mixpanel's low API rate-limits | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------------------| +| 2.2.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 2.1.0 | 2024-02-13 | [35203](https://github.com/airbytehq/airbyte/pull/35203) | Update stream Funnels schema with custom_event_id and custom_event fields | | 2.0.2 | 2024-02-12 | [35151](https://github.com/airbytehq/airbyte/pull/35151) | Manage dependencies with Poetry. | | 2.0.1 | 2024-01-11 | [34147](https://github.com/airbytehq/airbyte/pull/34147) | prepare for airbyte-lib | diff --git a/docs/integrations/sources/monday.md b/docs/integrations/sources/monday.md index f8a98c6a258d2..cff86d5c7c071 100644 --- a/docs/integrations/sources/monday.md +++ b/docs/integrations/sources/monday.md @@ -74,6 +74,8 @@ The Monday connector should not run into Monday API limitations under normal usa | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------|:------------------------------------------------------------------------| +| 2.1.1 | 2024-04-05 | [36717](https://github.com/airbytehq/airbyte/pull/36717) | Add handling of complexityBudgetExhausted error. | +| 2.1.0 | 2024-04-03 | [36746](https://github.com/airbytehq/airbyte/pull/36746) | Pin airbyte-cdk version to `^0` | | 2.0.4 | 2024-02-28 | [35696](https://github.com/airbytehq/airbyte/pull/35696) | Fix extraction for `null` value in stream `Activity logs` | | 2.0.3 | 2024-02-21 | [35506](https://github.com/airbytehq/airbyte/pull/35506) | Support for column values of the mirror type for the `Items` stream. | | 2.0.2 | 2024-02-12 | [35146](https://github.com/airbytehq/airbyte/pull/35146) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/integrations/sources/mongodb-v2.md index 7bde62a2ff67c..7fdf6d2cd6ca8 100644 --- a/docs/integrations/sources/mongodb-v2.md +++ b/docs/integrations/sources/mongodb-v2.md @@ -4,6 +4,7 @@ Airbyte's certified MongoDB connector offers the following features: * [Change Data Capture (CDC)](https://docs.airbyte.com/understanding-airbyte/cdc) via [MongoDB's change streams](https://www.mongodb.com/docs/manual/changeStreams/)/[Replica Set Oplog](https://www.mongodb.com/docs/manual/core/replica-set-oplog/). * Reliable replication of any collection size with [checkpointing](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#state--checkpointing) and chunking of data reads. +* ***NEW*** Full refresh syncing of collections. ## Quick Start @@ -131,11 +132,17 @@ The source will test the connection to the MongoDB instance upon creation. ## Replication Methods The MongoDB source utilizes change data capture (CDC) as a reliable way to keep your data up to date. +In addtion MongoDB source now allows for syncing in a full refresh mode. ### CDC Airbyte utilizes [the change streams feature](https://www.mongodb.com/docs/manual/changeStreams/) of a [MongoDB replica set](https://www.mongodb.com/docs/manual/replication/) to incrementally capture inserts, updates and deletes using a replication plugin. To learn more how Airbyte implements CDC, refer to [Change Data Capture (CDC)](https://docs.airbyte.com/understanding-airbyte/cdc/). +### Full Refresh +The Full refresh sync mode added in v4.0.0 allows for reading a the entire contents of a collection, repeatedly. +The MongoDB source connector is using checkpointing in Full Refresh read so a sync job that failed for netwrok error for example, +Rather than starting over it will continue its full refresh read from a last known point. + ### Schema Enforcement By default the MongoDB V2 source connector enforces a schema. This means that while setting up a connector it will sample a configureable number of docuemnts and will create a set of fields to sync. From that set of fields, an admin can then deselect specific fields from the Replication screen to filter them out from the sync. @@ -214,6 +221,11 @@ For more information regarding configuration parameters, please see [MongoDb Doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| +| 1.3.4 | 2024-04-16 | [37348](https://github.com/airbytehq/airbyte/pull/37348) | Populate null values in airbyte record messages. | +| 1.3.3 | 2024-04-05 | [36872](https://github.com/airbytehq/airbyte/pull/36872) | Update to connector's metadat definition. | +| 1.3.2 | 2024-04-04 | [36845](https://github.com/airbytehq/airbyte/pull/36845) | Adopt Kotlin CDK. | +| 1.3.1 | 2024-04-04 | [36837](https://github.com/airbytehq/airbyte/pull/36837) | Adopt CDK 0.28.0. | +| 1.3.0 | 2024-03-15 | [35669](https://github.com/airbytehq/airbyte/pull/35669) | Full refresh read of collections. | | 1.2.16 | 2024-03-06 | [35669](https://github.com/airbytehq/airbyte/pull/35669) | State message will now include record count. | | 1.2.15 | 2024-02-27 | [35673](https://github.com/airbytehq/airbyte/pull/35673) | Consume user provided connection string. | | 1.2.14 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index c28ecd6d13722..7368a78ea9ef7 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -1,9 +1,16 @@ # Microsoft SQL Server (MSSQL) + Airbyte's certified MSSQL connector offers the following features: -* Multiple methods of keeping your data fresh, including [Change Data Capture (CDC)](https://docs.airbyte.com/understanding-airbyte/cdc) using the [binlog](https://dev.mysql.com/doc/refman/8.0/en/binary-log.html). -* Incremental as well as Full Refresh [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes), providing flexibility in how data is delivered to your destination. -* Reliable replication at any table size with [checkpointing](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#state--checkpointing) and chunking of database reads. +- Multiple methods of keeping your data fresh, including + [Change Data Capture (CDC)](https://docs.airbyte.com/understanding-airbyte/cdc) using the + [binlog](https://dev.mysql.com/doc/refman/8.0/en/binary-log.html). +- Incremental as well as Full Refresh + [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes), providing + flexibility in how data is delivered to your destination. +- Reliable replication at any table size with + [checkpointing](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#state--checkpointing) + and chunking of database reads. ## Features @@ -17,57 +24,85 @@ Airbyte's certified MSSQL connector offers the following features: | SSH Tunnel Connection | Yes | | | Namespaces | Yes | Enabled by default | -The MSSQL source does not alter the schema present in your database. Depending on the destination connected to this source, however, the schema may be altered. See the destination's documentation for more details. +The MSSQL source does not alter the schema present in your database. Depending on the destination +connected to this source, however, the schema may be altered. See the destination's documentation +for more details. ## Getting Started #### Requirements -1. MSSQL Server `Azure SQL Database`, `Azure Synapse Analytics`, `Azure SQL Managed Instance`, `SQL Server 2019`, `SQL Server 2017`, `SQL Server 2016`, `SQL Server 2014`, `SQL Server 2012`, `PDW 2008R2 AU34`. +1. MSSQL Server `Azure SQL Database`, `Azure Synapse Analytics`, `Azure SQL Managed Instance`, + `SQL Server 2019`, `SQL Server 2017`, `SQL Server 2016`, `SQL Server 2014`, `SQL Server 2012`, + `PDW 2008R2 AU34`. 2. Create a dedicated read-only Airbyte user with access to all tables needed for replication -3. If you want to use CDC, please see [the relevant section below](mssql.md#change-data-capture-cdc) for further setup requirements +3. If you want to use CDC, please see [the relevant section below](mssql.md#change-data-capture-cdc) + for further setup requirements #### 1. Make sure your database is accessible from the machine running Airbyte -This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect to your MSSQL instance is via the check connection tool in the UI. +This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect +to your MSSQL instance is via the check connection tool in the UI. #### 2. Create a dedicated read-only user with access to the relevant tables \(Recommended but optional\) -This step is optional but highly recommended to allow for better permission control and auditing. Alternatively, you can use Airbyte with an existing user in your database. +This step is optional but highly recommended to allow for better permission control and auditing. +Alternatively, you can use Airbyte with an existing user in your database. #### 3. Your database user should now be ready for use with Airbyte! #### Airbyte Cloud -On Airbyte Cloud, only secured connections to your MSSQL instance are supported in source configuration. You may either configure your connection using one of the supported SSL Methods or by using an SSH Tunnel. + +On Airbyte Cloud, only secured connections to your MSSQL instance are supported in source +configuration. You may either configure your connection using one of the supported SSL Methods or by +using an SSH Tunnel. ## Change Data Capture \(CDC\) -We use [SQL Server's change data capture feature](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017) with transaction logs to capture row-level `INSERT`, `UPDATE` and `DELETE` operations that occur on CDC-enabled tables. +We use +[SQL Server's change data capture feature](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017) +with transaction logs to capture row-level `INSERT`, `UPDATE` and `DELETE` operations that occur on +CDC-enabled tables. -Some extra setup requiring at least _db_owner_ permissions on the database\(s\) you intend to sync from will be required \(detailed [below](mssql.md#setting-up-cdc-for-mssql)\). +Some extra setup requiring at least _db_owner_ permissions on the database\(s\) you intend to sync +from will be required \(detailed [below](mssql.md#setting-up-cdc-for-mssql)\). -Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview of how Airbyte approaches CDC. +Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview of how Airbyte +approaches CDC. ### Should I use CDC for MSSQL? -- If you need a record of deletions and can accept the limitations posted below, CDC is the way to go! -- If your data set is small and/or you just want a snapshot of your table in the destination, consider using Full Refresh replication for your table instead of CDC. -- If the limitations below prevent you from using CDC and your goal is to maintain a snapshot of your table in the destination, consider using non-CDC incremental and occasionally reset the data and re-sync. -- If your table has a primary key but doesn't have a reasonable cursor field for incremental syncing \(i.e. `updated_at`\), CDC allows you to sync your table incrementally. +- If you need a record of deletions and can accept the limitations posted below, CDC is the way to + go! +- If your data set is small and/or you just want a snapshot of your table in the destination, + consider using Full Refresh replication for your table instead of CDC. +- If the limitations below prevent you from using CDC and your goal is to maintain a snapshot of + your table in the destination, consider using non-CDC incremental and occasionally reset the data + and re-sync. +- If your table has a primary key but doesn't have a reasonable cursor field for incremental syncing + \(i.e. `updated_at`\), CDC allows you to sync your table incrementally. #### CDC Limitations -- Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. -- `hierarchyid` and `sql_variant` types are not processed in CDC migration type (not supported by Debezium). For more details please check -[this ticket](https://github.com/airbytehq/airbyte/issues/14411) +- Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that + impact all databases using CDC replication. +- `hierarchyid` and `sql_variant` types are not processed in CDC migration type (not supported by + Debezium). For more details please check + [this ticket](https://github.com/airbytehq/airbyte/issues/14411) - CDC is only available for SQL Server 2016 Service Pack 1 \(SP1\) and later. -- _db_owner_ \(or higher\) permissions are required to perform the [neccessary setup](mssql.md#setting-up-cdc-for-mssql) for CDC. -- On Linux, CDC is not supported on versions earlier than SQL Server 2017 CU18 \(SQL Server 2019 is supported\). -- Change data capture cannot be enabled on tables with a clustered columnstore index. \(It can be enabled on tables with a _non-clustered_ columnstore index\). -- The SQL Server CDC feature processes changes that occur in user-created tables only. You cannot enable CDC on the SQL Server master database. -- Using variables with partition switching on databases or tables with change data capture \(CDC\) is not supported for the `ALTER TABLE` ... `SWITCH TO` ... `PARTITION` ... statement +- _db_owner_ \(or higher\) permissions are required to perform the + [neccessary setup](mssql.md#setting-up-cdc-for-mssql) for CDC. +- On Linux, CDC is not supported on versions earlier than SQL Server 2017 CU18 \(SQL Server 2019 is + supported\). +- Change data capture cannot be enabled on tables with a clustered columnstore index. \(It can be + enabled on tables with a _non-clustered_ columnstore index\). +- The SQL Server CDC feature processes changes that occur in user-created tables only. You cannot + enable CDC on the SQL Server master database. +- Using variables with partition switching on databases or tables with change data capture \(CDC\) + is not supported for the `ALTER TABLE` ... `SWITCH TO` ... `PARTITION` ... statement - Our CDC implementation uses at least once delivery for all change records. -- Read more on CDC limitations in the [Microsoft docs](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017#limitations). +- Read more on CDC limitations in the + [Microsoft docs](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017#limitations). ### Setting up CDC for MSSQL @@ -75,7 +110,8 @@ Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview o MS SQL Server provides some built-in stored procedures to enable CDC. -- To enable CDC, a SQL Server administrator with the necessary privileges \(_db_owner_ or _sysadmin_\) must first run a query to enable CDC at the database level. +- To enable CDC, a SQL Server administrator with the necessary privileges \(_db_owner_ or + _sysadmin_\) must first run a query to enable CDC at the database level. ```text USE {database name} @@ -99,18 +135,33 @@ MS SQL Server provides some built-in stored procedures to enable CDC. GO ``` - - \[1\] Specifies a role which will gain `SELECT` permission on the captured columns of the source table. We suggest putting a value here so you can use this role in the next step but you can also set the value of @role*name to `NULL` to allow only \_sysadmin* and _db_owner_ to have access. Be sure that the credentials used to connect to the source in Airbyte align with this role so that Airbyte can access the cdc tables. - - \[2\] Specifies the filegroup where SQL Server places the change table. We recommend creating a separate filegroup for CDC but you can leave this parameter out to use the default filegroup. - - \[3\] If 0, only the support functions to query for all changes are generated. If 1, the functions that are needed to query for net changes are also generated. If supports_net_changes is set to 1, index_name must be specified, or the source table must have a defined primary key. - -- \(For more details on parameters, see the [Microsoft doc page](https://docs.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sys-sp-cdc-enable-table-transact-sql?view=sql-server-ver15) for this stored procedure\). -- If you have many tables to enable CDC on and would like to avoid having to run this query one-by-one for every table, [this script](http://www.techbrothersit.com/2013/06/change-data-capture-cdc-sql-server_69.html) might help! - -For further detail, see the [Microsoft docs on enabling and disabling CDC](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/enable-and-disable-change-data-capture-sql-server?view=sql-server-ver15). + - \[1\] Specifies a role which will gain `SELECT` permission on the captured columns of the source + table. We suggest putting a value here so you can use this role in the next step but you can + also set the value of @role*name to `NULL` to allow only \_sysadmin* and _db_owner_ to have + access. Be sure that the credentials used to connect to the source in Airbyte align with this + role so that Airbyte can access the cdc tables. + - \[2\] Specifies the filegroup where SQL Server places the change table. We recommend creating a + separate filegroup for CDC but you can leave this parameter out to use the default filegroup. + - \[3\] If 0, only the support functions to query for all changes are generated. If 1, the + functions that are needed to query for net changes are also generated. If supports_net_changes + is set to 1, index_name must be specified, or the source table must have a defined primary key. + +- \(For more details on parameters, see the + [Microsoft doc page](https://docs.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sys-sp-cdc-enable-table-transact-sql?view=sql-server-ver15) + for this stored procedure\). +- If you have many tables to enable CDC on and would like to avoid having to run this query + one-by-one for every table, + [this script](http://www.techbrothersit.com/2013/06/change-data-capture-cdc-sql-server_69.html) + might help! + +For further detail, see the +[Microsoft docs on enabling and disabling CDC](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/enable-and-disable-change-data-capture-sql-server?view=sql-server-ver15). #### 2. Enable snapshot isolation -- When a sync runs for the first time using CDC, Airbyte performs an initial consistent snapshot of your database. To avoid acquiring table locks, Airbyte uses _snapshot isolation_, allowing simultaneous writes by other database clients. This must be enabled on the database like so: +- When a sync runs for the first time using CDC, Airbyte performs an initial consistent snapshot of + your database. To avoid acquiring table locks, Airbyte uses _snapshot isolation_, allowing + simultaneous writes by other database clients. This must be enabled on the database like so: ```text ALTER DATABASE {database name} @@ -119,7 +170,10 @@ For further detail, see the [Microsoft docs on enabling and disabling CDC](https #### 3. Create a user and grant appropriate permissions -- Rather than use _sysadmin_ or _db_owner_ credentials, we recommend creating a new user with the relevant CDC access for use with Airbyte. First let's create the login and user and add to the [db_datareader](https://docs.microsoft.com/en-us/sql/relational-databases/security/authentication-access/database-level-roles?view=sql-server-ver15) role: +- Rather than use _sysadmin_ or _db_owner_ credentials, we recommend creating a new user with the + relevant CDC access for use with Airbyte. First let's create the login and user and add to the + [db_datareader](https://docs.microsoft.com/en-us/sql/relational-databases/security/authentication-access/database-level-roles?view=sql-server-ver15) + role: ```text USE {database name}; @@ -135,14 +189,19 @@ For further detail, see the [Microsoft docs on enabling and disabling CDC](https EXEC sp_addrolemember '{role name}', '{user name}'; ``` - - This should be enough access, but if you run into problems, try also directly granting the user `SELECT` access on the cdc schema: + - This should be enough access, but if you run into problems, try also directly granting the user + `SELECT` access on the cdc schema: ```text USE {database name}; GRANT SELECT ON SCHEMA :: [cdc] TO {user name}; ``` - - If feasible, granting this user 'VIEW SERVER STATE' permissions will allow Airbyte to check whether or not the [SQL Server Agent](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-ver15#relationship-with-log-reader-agent) is running. This is preferred as it ensures syncs will fail if the CDC tables are not being updated by the Agent in the source database. + - If feasible, granting this user 'VIEW SERVER STATE' permissions will allow Airbyte to check + whether or not the + [SQL Server Agent](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-ver15#relationship-with-log-reader-agent) + is running. This is preferred as it ensures syncs will fail if the CDC tables are not being + updated by the Agent in the source database. ```text USE master; @@ -151,8 +210,13 @@ For further detail, see the [Microsoft docs on enabling and disabling CDC](https #### 4. Extend the retention period of CDC data -- In SQL Server, by default, only three days of data are retained in the change tables. Unless you are running very frequent syncs, we suggest increasing this retention so that in case of a failure in sync or if the sync is paused, there is still some bandwidth to start from the last point in incremental sync. -- These settings can be changed using the stored procedure [sys.sp_cdc_change_job](https://docs.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sys-sp-cdc-change-job-transact-sql?view=sql-server-ver15) as below: +- In SQL Server, by default, only three days of data are retained in the change tables. Unless you + are running very frequent syncs, we suggest increasing this retention so that in case of a failure + in sync or if the sync is paused, there is still some bandwidth to start from the last point in + incremental sync. +- These settings can be changed using the stored procedure + [sys.sp_cdc_change_job](https://docs.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sys-sp-cdc-change-job-transact-sql?view=sql-server-ver15) + as below: ```text -- we recommend 14400 minutes (10 days) as retention period @@ -171,9 +235,11 @@ For further detail, see the [Microsoft docs on enabling and disabling CDC](https - MSSQL uses the SQL Server Agent - to [run the jobs necessary](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-ver15#agent-jobs) + to + [run the jobs necessary](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-ver15#agent-jobs) - for CDC. It is therefore vital that the Agent is operational in order for to CDC to work effectively. You can check + for CDC. It is therefore vital that the Agent is operational in order for to CDC to work + effectively. You can check the status of the SQL Server Agent as follows: @@ -183,20 +249,27 @@ For further detail, see the [Microsoft docs on enabling and disabling CDC](https - If you see something other than 'Running.' please follow - the [Microsoft docs](https://docs.microsoft.com/en-us/sql/ssms/agent/start-stop-or-pause-the-sql-server-agent-service?view=sql-server-ver15) + the + [Microsoft docs](https://docs.microsoft.com/en-us/sql/ssms/agent/start-stop-or-pause-the-sql-server-agent-service?view=sql-server-ver15) to start the service. ## Connection to MSSQL via an SSH Tunnel -Airbyte has the ability to connect to a MSSQL instance via an SSH Tunnel. The reason you might want to do this because it is not possible \(or against security policy\) to connect to the database directly \(e.g. it does not have a public IP address\). +Airbyte has the ability to connect to a MSSQL instance via an SSH Tunnel. The reason you might want +to do this because it is not possible \(or against security policy\) to connect to the database +directly \(e.g. it does not have a public IP address\). -When using an SSH tunnel, you are configuring Airbyte to connect to an intermediate server \(a.k.a. a bastion sever\) that _does_ have direct access to the database. Airbyte connects to the bastion and then asks the bastion to connect directly to the server. +When using an SSH tunnel, you are configuring Airbyte to connect to an intermediate server \(a.k.a. +a bastion sever\) that _does_ have direct access to the database. Airbyte connects to the bastion +and then asks the bastion to connect directly to the server. -Using this feature requires additional configuration, when creating the source. We will talk through what each piece of configuration means. +Using this feature requires additional configuration, when creating the source. We will talk through +what each piece of configuration means. 1. Configure all fields for the source as you normally would, except `SSH Tunnel Method`. -2. `SSH Tunnel Method` defaults to `No Tunnel` \(meaning a direct connection\). If you want to use an +2. `SSH Tunnel Method` defaults to `No Tunnel` \(meaning a direct connection\). If you want to use + an SSH Tunnel choose `SSH Key Authentication` or `Password Authentication`. @@ -204,19 +277,23 @@ Using this feature requires additional configuration, when creating the source. establishing the SSH Tunnel \(see below for more information on generating this key\). - 2. Choose `Password Authentication` if you will be using a password as your secret for establishing + 2. Choose `Password Authentication` if you will be using a password as your secret for + establishing the SSH Tunnel. -3. `SSH Tunnel Jump Server Host` refers to the intermediate \(bastion\) server that Airbyte will connect to. This should +3. `SSH Tunnel Jump Server Host` refers to the intermediate \(bastion\) server that Airbyte will + connect to. This should be a hostname or an IP Address. -4. `SSH Connection Port` is the port on the bastion server with which to make the SSH connection. The default port for +4. `SSH Connection Port` is the port on the bastion server with which to make the SSH connection. + The default port for SSH connections is `22`, so unless you have explicitly changed something, go with the default. -5. `SSH Login Username` is the username that Airbyte should use when connection to the bastion server. This is NOT the +5. `SSH Login Username` is the username that Airbyte should use when connection to the bastion + server. This is NOT the MSSQL username. @@ -244,14 +321,21 @@ The connector expects an RSA key in PEM format. To generate this key: ssh-keygen -t rsa -m PEM -f myuser_rsa ``` -This produces the private key in pem format, and the public key remains in the standard format used by the `authorized_keys` file on your bastion host. The public key should be added to your bastion host to whichever user you want to use with Airbyte. The private key is provided via copy-and-paste to the Airbyte connector configuration screen, so it may log in to the bastion. +This produces the private key in pem format, and the public key remains in the standard format used +by the `authorized_keys` file on your bastion host. The public key should be added to your bastion +host to whichever user you want to use with Airbyte. The private key is provided via copy-and-paste +to the Airbyte connector configuration screen, so it may log in to the bastion. ## Data type mapping -MSSQL data types are mapped to the following data types when synchronizing data. You can check the test values examples [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceComprehensiveTest.java). If you can't find the data type you are looking for or have any problems feel free to add a new test! +MSSQL data types are mapped to the following data types when synchronizing data. You can check the +test values examples +[here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceComprehensiveTest.java). +If you can't find the data type you are looking for or have any problems feel free to add a new +test! | MSSQL Type | Resulting Type | Notes | -| :------------------------------------------------------ |:------------------------| :---- | +| :------------------------------------------------------ | :---------------------- | :---- | | `bigint` | number | | | `binary` | string | | | `bit` | boolean | | @@ -284,14 +368,17 @@ MSSQL data types are mapped to the following data types when synchronizing data. | `varchar(max) COLLATE Latin1_General_100_CI_AI_SC_UTF8` | string | | | `xml` | string | | -If you do not see a type in this list, assume that it is coerced into a string. We are happy to take feedback on preferred mappings. +If you do not see a type in this list, assume that it is coerced into a string. We are happy to take +feedback on preferred mappings. ## Upgrading from 0.4.17 and older versions to 0.4.18 and newer versions -There is a backwards incompatible spec change between Microsoft SQL Source connector versions 0.4.17 and 0.4.18. As part of that spec change -`replication_method` configuration parameter was changed to `object` from `string`. +There is a backwards incompatible spec change between Microsoft SQL Source connector versions 0.4.17 +and 0.4.18. As part of that spec change `replication_method` configuration parameter was changed to +`object` from `string`. -In Microsoft SQL source connector versions 0.4.17 and older, `replication_method` configuration parameter was saved in the configuration database as follows: +In Microsoft SQL source connector versions 0.4.17 and older, `replication_method` configuration +parameter was saved in the configuration database as follows: ``` "replication_method": "STANDARD" @@ -305,9 +392,11 @@ Starting with version 0.4.18, `replication_method` configuration parameter is sa } ``` -After upgrading Microsoft SQL Source connector from 0.4.17 or older version to 0.4.18 or newer version you need to fix source configurations in the `actor` table -in Airbyte database. To do so, you need to run two SQL queries. Follow the instructions in [Airbyte documentation](https://docs.airbyte.com/operator-guides/configuring-airbyte-db/#accessing-the-default-database-located-in-docker-airbyte-db) to -run SQL queries on Airbyte database. +After upgrading Microsoft SQL Source connector from 0.4.17 or older version to 0.4.18 or newer +version you need to fix source configurations in the `actor` table in Airbyte database. To do so, +you need to run two SQL queries. Follow the instructions in +[Airbyte documentation](https://docs.airbyte.com/operator-guides/configuring-airbyte-db/#accessing-the-default-database-located-in-docker-airbyte-db) +to run SQL queries on Airbyte database. If you have connections with Microsoft SQL Source using _Standard_ replication method, run this SQL: @@ -316,7 +405,8 @@ update public.actor set configuration =jsonb_set(configuration, '{replication_me WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configuration->>'replication_method' = 'STANDARD'); ``` -If you have connections with Microsoft SQL Source using _Logicai Replication (CDC)_ method, run this SQL: +If you have connections with Microsoft SQL Source using _Logicai Replication (CDC)_ method, run this +SQL: ```sql update public.actor set configuration =jsonb_set(configuration, '{replication_method}', '{"method": "CDC"}', true) @@ -326,7 +416,20 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura ## Changelog | Version | Date | Pull Request | Subject | -|:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +|:--------|:-----------| :---------------------------------------------------------------------------------------------------------------- |:------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.0.13 | 2024-04-16 | [37111](https://github.com/airbytehq/airbyte/pull/37111) | Populate null values in record message. | +| 4.0.12 | 2024-04-15 | [37326](https://github.com/airbytehq/airbyte/pull/37326) | Allow up to 60 minutes of wait for the an initial CDC record. | +| 4.0.11 | 2024-04-15 | [37325](https://github.com/airbytehq/airbyte/pull/37325) | Populate airbyte_meta.changes + error handling. | +| 4.0.10 | 2024-04-15 | [37110](https://github.com/airbytehq/airbyte/pull/37110) | Internal cleanup. | +| 4.0.9 | 2024-04-10 | [36919](https://github.com/airbytehq/airbyte/pull/36919) | Fix a bug in conversion of null values. | +| 4.0.8 | 2024-04-05 | [36872](https://github.com/airbytehq/airbyte/pull/36872) | Update to connector's metadat definition. | +| 4.0.7 | 2024-04-03 | [36772](https://github.com/airbytehq/airbyte/pull/36772) | Adopt latest CDK. | +| 4.0.6 | 2024-03-25 | [36333](https://github.com/airbytehq/airbyte/pull/36333) | Deprecate Dbz state iterator. | +| 4.0.5 | 2024-03-21 | [36364](https://github.com/airbytehq/airbyte/pull/36364) | Allow up to 40 minutes of wait for the an initial CDC record. | +| 4.0.4 | 2024-03-20 | [36325](https://github.com/airbytehq/airbyte/pull/36325) | [Refactor] : Remove mssql initial source operations . | +| 4.0.3 | 2024-03-19 | [36263](https://github.com/airbytehq/airbyte/pull/36263) | Fix a failure seen in CDC with tables containing default values. | +| 4.0.2 | 2024-03-06 | [35792](https://github.com/airbytehq/airbyte/pull/35792) | Initial sync will now send record count in state message. | +| 4.0.1 | 2024-03-12 | [36011](https://github.com/airbytehq/airbyte/pull/36011) | Read correctly null values of columns with default value in CDC. | | 4.0.0 | 2024-03-06 | [35873](https://github.com/airbytehq/airbyte/pull/35873) | Terabyte-sized tables support, reliability improvements, bug fixes. | | 3.7.7 | 2024-03-06 | [35816](https://github.com/airbytehq/airbyte/pull/35816) | Fix query that was failing on a case sensitive server. | | 3.7.6 | 2024-03-04 | [35721](https://github.com/airbytehq/airbyte/pull/35721) | Fix tests | diff --git a/docs/integrations/sources/my-hours.md b/docs/integrations/sources/my-hours.md index 7bcb78e0c179c..c44093725335e 100644 --- a/docs/integrations/sources/my-hours.md +++ b/docs/integrations/sources/my-hours.md @@ -22,7 +22,7 @@ This source allows you to synchronize the following data tables: ## Getting started **Requirements** -In order to use the My Hours API you need to provide the credentials to an admin My Hours account. +- In order to use the My Hours API you need to provide the credentials to an admin My Hours account. ### Performance Considerations (Airbyte Open Source) @@ -33,6 +33,7 @@ Depending on the amount of team members and time logs the source provides a prop | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :----------------------------------- | +| 0.2.0 | 2024-03-15 | [36063](https://github.com/airbytehq/airbyte/pull/36063) | Migrate to Low Code | | 0.1.2 | 2023-11-20 | [32680](https://github.com/airbytehq/airbyte/pull/32680) | Schema and CDK updates | | 0.1.1 | 2022-06-08 | [12964](https://github.com/airbytehq/airbyte/pull/12964) | Update schema for time_logs stream | | 0.1.0 | 2021-11-26 | [8270](https://github.com/airbytehq/airbyte/pull/8270) | New Source: My Hours | diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index 89caf6e38a717..9c151ff4024ec 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -223,7 +223,14 @@ Any database or table encoding combination of charset and collation is supported | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.3.13 | 2024-02-29 | [35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor state iterator messages. | +| 3.3.20 | 2024-04-16 | [37111](https://github.com/airbytehq/airbyte/pull/37111) | Populate null values in record message. | +| 3.3.19 | 2024-04-15 | [37328](https://github.com/airbytehq/airbyte/pull/37328) | Populate airbyte_meta.changes | +| 3.3.18 | 2024-04-15 | [37324](https://github.com/airbytehq/airbyte/pull/37324) | Refactor source operations. | +| 3.3.17 | 2024-04-10 | [36919](https://github.com/airbytehq/airbyte/pull/36919) | Fix a bug in conversion of null values. | +| 3.3.16 | 2024-04-05 | [36872](https://github.com/airbytehq/airbyte/pull/36872) | Update to connector's metadat definition. | +| 3.3.15 | 2024-04-05 | [36577](https://github.com/airbytehq/airbyte/pull/36577) | Config error will not send out system trace message | +| 3.3.14 | 2024-04-04 | [36742](https://github.com/airbytehq/airbyte/pull/36742) | To use new kotlin CDK | +| 3.3.13 | 2024-02-29 | [35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor state iterator messages. | | 3.3.12 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 3.3.11 | 2024-02-23 | [35527](https://github.com/airbytehq/airbyte/pull/35527) | Adopt 0.23.1 and shutdown timeouts. | | 3.3.10 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | diff --git a/docs/integrations/sources/notion-migrations.md b/docs/integrations/sources/notion-migrations.md index d08cd0e331e08..ae1e8414a4c5c 100644 --- a/docs/integrations/sources/notion-migrations.md +++ b/docs/integrations/sources/notion-migrations.md @@ -1,5 +1,24 @@ # Notion Migration Guide +## Upgrading to 3.0.0 + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. As part of our commitment to delivering exceptional service, we are transitioning source Notion from the Python Connector Development Kit (CDK) to our innovative low-code framework. This is part of a strategic move to streamline many processes across connectors, bolstering maintainability and freeing us to focus more of our efforts on improving the performance and features of our evolving platform and growing catalog. However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change for users syncing data from the `Comments` stream. + +Specifically, we’ve evolved and standardized how state is managed for incremental streams that are nested within a parent stream. This change impacts how individual states are tracked and stored for each partition, using a more structured approach to ensure the most granular and flexible state management. To gracefully handle these changes for your existing connections, we highly recommend resetting your data for the `Comments` stream before resuming your syncs with the new version. + +If you are not syncing data from the `Comments` stream, this change is non-breaking, and no further action is required. + +### Resetting your `Comments` data + +To reset your data for the `Comments` stream, follow the steps below: + +1. Select **Connections** in the main nav bar. + 1. Select the connection(s) affected by the update. +2. Select the **Status** tab. + 1. In the **Enabled streams** list, click the three dots on the right side of the **Comments** stream and select **Reset this stream**. + +A fresh sync will run for the `Comments` stream. For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + ## Upgrading to 2.0.0 Version 2.0.0 introduces a number of changes to the JSON schemas of all streams. These changes are being introduced to reflect updates to the Notion API. Some breaking changes have been introduced that will affect the Blocks, Databases and Pages stream. diff --git a/docs/integrations/sources/notion.md b/docs/integrations/sources/notion.md index 04d96e2a9be2e..d1d07d87b9ad2 100644 --- a/docs/integrations/sources/notion.md +++ b/docs/integrations/sources/notion.md @@ -112,6 +112,8 @@ The connector is restricted by Notion [request limits](https://developers.notion | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------| +| 3.0.0 | 2024-04-12 | [35794](https://github.com/airbytehq/airbyte/pull/35974) | Migrate to low-code CDK (python CDK for Blocks stream) | +| 2.2.0 | 2024-04-08 | [36890](https://github.com/airbytehq/airbyte/pull/36890) | Unpin CDK version | | 2.1.0 | 2024-02-19 | [35409](https://github.com/airbytehq/airbyte/pull/35409) | Update users stream schema with bot type info fields and block schema with mention type info fields. | | 2.0.9 | 2024-02-12 | [35155](https://github.com/airbytehq/airbyte/pull/35155) | Manage dependencies with Poetry. | | 2.0.8 | 2023-11-01 | [31899](https://github.com/airbytehq/airbyte/pull/31899) | Fix `table_row.cells` property in `Blocks` stream | diff --git a/docs/integrations/sources/openweather.md b/docs/integrations/sources/openweather.md index 1e76a99967829..cffea874f569f 100644 --- a/docs/integrations/sources/openweather.md +++ b/docs/integrations/sources/openweather.md @@ -34,6 +34,7 @@ The free plan allows 60 calls per minute and 1,000,000 calls per month, you won' | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.2.1 | 2024-04-07 | [36876](https://github.com/airbytehq/airbyte/pull/36876) | Fix bug in how lat and lon parameters can be set | | 0.2.0 | 2023-08-31 | [29983](https://github.com/airbytehq/airbyte/pull/29983) | Migrate to Low Code Framework | | 0.1.6 | 2022-06-21 | [16136](https://github.com/airbytehq/airbyte/pull/16136) | Update openweather onecall api to 3.0. | | 0.1.5 | 2022-06-21 | [13864](https://github.com/airbytehq/airbyte/pull/13864) | No changes. Used connector to test publish workflow changes. | diff --git a/docs/integrations/sources/orb.md b/docs/integrations/sources/orb.md index 9214043c4c7cd..bdad63402cf9a 100644 --- a/docs/integrations/sources/orb.md +++ b/docs/integrations/sources/orb.md @@ -38,6 +38,14 @@ In order to capture data that has been updated after creation, please run a peri The Orb connector should not run into Orb API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. +:::warning +The `credit_ledger_entries` stream will now include `events` data. This upgrade uses the `created_at` timestamps from the `credits` to establish a 30-day timeframe, with the earliest `created_at` as the starting point. This restriction is set by the Orb API. +::: + +:::info +If you are using the `start_date` and `end_date` parameter with the `credit_ledger_entries` stream it will sync all customers created during the that time window. It isn't possible to query data directly to `credit_ledger_entries`. The connector need to retrieve data from customers first to ingest the credit data. +::: + ## Getting started ### Requirements @@ -54,6 +62,8 @@ an Orb Account and API Key. | Version | Date | Pull Request | Subject | | --- |------------|----------------------------------------------------------| --- | +| 1.2.0 | 2024-03-19 | [x](https://github.com/airbytehq/airbyte/pull/x) | Expose `end_date`parameter | +| 1.1.2 | 2024-03-13 | [x](https://github.com/airbytehq/airbyte/pull/x) | Fix window to 30 days for events query timesframe start and query | | 1.1.1 | 2024-02-07 | [35005](https://github.com/airbytehq/airbyte/pull/35005) | Pass timeframe_start, timeframe_end to events query | | 1.1.0 | 2023-03-03 | [24567](https://github.com/airbytehq/airbyte/pull/24567) | Add Invoices incremental stream merged from [#24737](https://github.com/airbytehq/airbyte/pull/24737) | | 1.0.0 | 2023-02-02 | [21951](https://github.com/airbytehq/airbyte/pull/21951) | Add SubscriptionUsage stream, and made `start_date` a required field | diff --git a/docs/integrations/sources/paypal-transaction.md b/docs/integrations/sources/paypal-transaction.md index 50df0f002d981..6acbcc1a9c684 100644 --- a/docs/integrations/sources/paypal-transaction.md +++ b/docs/integrations/sources/paypal-transaction.md @@ -260,8 +260,8 @@ ___ * **Number of days per request:** The maximum supported date range is 31 days. * **Historical Data:** You can't retrieve more than 3yrs of data for the `transactions` stream. For `dispute_start_date` you can only retrieve 180 days of data (see specifications per stream) * `records_per_request`: The maximum number of records in a single request are 10K (API Server restriction) -* `page_size`: The maximum page size is 500. This has been configured by default. -* `requests_per_minute` = The maximum limit is 50 requests per minute from IP address to all endpoint (API Server restriction). +* `page_size`: The number of records per page is differs per stream. `source-paypal-transaction` sets maximum allowed page size for each stream by default. +* `requests_per_minute`: The maximum limit is 50 requests per minute from IP address to all endpoint (API Server restriction). @@ -279,6 +279,8 @@ ___ | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------| +| 2.5.1 | 2024-03-15 | [36165](https://github.com/airbytehq/airbyte/pull/36165) | Unpin CDK Version | +| 2.5.0 | 2024-03-15 | [36173](https://github.com/airbytehq/airbyte/pull/36173) | Extended `Disputes` stream schema with missing properties | | 2.4.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | | 2.3.0 | 2024-02-14 | [34510](https://github.com/airbytehq/airbyte/pull/34510) | Silver certified. New Streams Added | | 2.2.2 | 2024-02-09 | [35075](https://github.com/airbytehq/airbyte/pull/35075) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/pinterest.md b/docs/integrations/sources/pinterest.md index 3dbd7a7b8227b..7d783cd2c50d5 100644 --- a/docs/integrations/sources/pinterest.md +++ b/docs/integrations/sources/pinterest.md @@ -6,23 +6,28 @@ This page contains the setup guide and reference information for the Pinterest s -When setting up the Pinterest source connector with Airbyte Cloud, be aware that Pinterest does not allow configuring permissions during the OAuth authentication process. Therefore, the following permissions will be requested during authentication: - -- See all of your advertising data, including ads, ad groups, campaigns, etc. -- See your public boards, including group boards you join. -- See your secret boards. -- See all of your catalogs data. -- See your public Pins. -- See your secret Pins. +When setting up the Pinterest source connector with Airbyte Cloud, be aware that Pinterest does not +allow configuring permissions during the OAuth authentication process. Therefore, the following +permissions will be requested during authentication: + +- See all of your advertising data, including ads, ad groups, campaigns, etc. +- See your public boards, including group boards you join. +- See your secret boards. +- See all of your catalogs data. +- See your public Pins. +- See your secret Pins. - See your user accounts and followers. -For more information on the scopes required for Pinterest OAuth, please refer to the [Pinterest API Scopes documentation](https://developers.pinterest.com/docs/getting-started/scopes/#Read%20scopes). +For more information on the scopes required for Pinterest OAuth, please refer to the +[Pinterest API Scopes documentation](https://developers.pinterest.com/docs/getting-started/scopes/#Read%20scopes). -To set up the Pinterest source connector with Airbyte Open Source, you'll need your Pinterest [App ID and secret key](https://developers.pinterest.com/docs/getting-started/set-up-app/) and the [refresh token](https://developers.pinterest.com/docs/getting-started/authentication/#Refreshing%20an%20access%20token). +To set up the Pinterest source connector with Airbyte Open Source, you'll need your Pinterest +[App ID and secret key](https://developers.pinterest.com/docs/getting-started/set-up-app/) and the +[refresh token](https://developers.pinterest.com/docs/getting-started/authentication/#Refreshing%20an%20access%20token). @@ -36,11 +41,20 @@ To set up the Pinterest source connector with Airbyte Open Source, you'll need y 2. Click **Sources** and then click **+ New source**. 3. On the Set up the source page, select **Pinterest** from the Source type dropdown. 4. Enter the name for the Pinterest connector. -5. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. If this field is blank, Airbyte will replicate all data. As per Pinterest API restriction, the date cannot be more than 90 days in the past. -6. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Pinterest account**. Log in and authorize your Pinterest account. -7. (Optional) Enter a Start Date using the provided date picker, or by manually entering the date in YYYY-MM-DD format. Data added on and after this date will be replicated. If no date is set, it will default to the latest allowed date by the report API (913 days from today). -8. (Optional) Select one or multiple status values from the dropdown menu. For the ads, ad_groups, and campaigns streams, specifying a status will filter out records that do not match the specified ones. If a status is not specified, the source will default to records with a status of either ACTIVE or PAUSED. -9. (Optional) Add custom reports if needed. For more information, refer to the corresponding section. +5. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date + will be replicated. If this field is blank, Airbyte will replicate all data. As per Pinterest API + restriction, the date cannot be more than 90 days in the past. +6. The **OAuth2.0** authorization method is selected by default. Click **Authenticate your Pinterest + account**. Log in and authorize your Pinterest account. +7. (Optional) Enter a Start Date using the provided date picker, or by manually entering the date in + YYYY-MM-DD format. Data added on and after this date will be replicated. If no date is set, it + will default to the latest allowed date by the report API (913 days from today). +8. (Optional) Select one or multiple status values from the dropdown menu. For the ads, ad_groups, + and campaigns streams, specifying a status will filter out records that do not match the + specified ones. If a status is not specified, the source will default to records with a status of + either ACTIVE or PAUSED. +9. (Optional) Add custom reports if needed. For more information, refer to the corresponding + section. 10. Click **Set up source**. @@ -52,17 +66,30 @@ To set up the Pinterest source connector with Airbyte Open Source, you'll need y 2. Click **Sources** and then click **+ New source**. 3. On the Set up the source page, select **Pinterest** from the Source type dropdown. 4. Enter the name for the Pinterest connector. -5. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. If this field is blank, Airbyte will replicate all data. As per Pinterest API restriction, the date cannot be more than 90 days in the past. -6. The **OAuth2.0** authorization method is selected by default. For **Client ID** and **Client Secret**, enter your Pinterest [App ID and secret key](https://developers.pinterest.com/docs/getting-started/set-up-app/). For **Refresh Token**, enter your Pinterest [Refresh Token](https://developers.pinterest.com/docs/getting-started/authentication/#Refreshing%20an%20access%20token). -7. (Optional) Enter a Start Date using the provided date picker, or by manually entering the date in YYYY-MM-DD format. Data added on and after this date will be replicated. If no date is set, it will default to the latest allowed date by the report API (913 days from today). -8. (Optional) Select one or multiple status values from the dropdown menu. For the ads, ad_groups, and campaigns streams, specifying a status will filter out records that do not match the specified ones. If a status is not specified, the source will default to records with a status of either ACTIVE or PAUSED. -9. (Optional) Add custom reports if needed. For more information, refer to the corresponding section. +5. For **Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date + will be replicated. If this field is blank, Airbyte will replicate all data. As per Pinterest API + restriction, the date cannot be more than 90 days in the past. +6. The **OAuth2.0** authorization method is selected by default. For **Client ID** and **Client + Secret**, enter your Pinterest + [App ID and secret key](https://developers.pinterest.com/docs/getting-started/set-up-app/). For + **Refresh Token**, enter your Pinterest + [Refresh Token](https://developers.pinterest.com/docs/getting-started/authentication/#Refreshing%20an%20access%20token). +7. (Optional) Enter a Start Date using the provided date picker, or by manually entering the date in + YYYY-MM-DD format. Data added on and after this date will be replicated. If no date is set, it + will default to the latest allowed date by the report API (913 days from today). +8. (Optional) Select one or multiple status values from the dropdown menu. For the ads, ad_groups, + and campaigns streams, specifying a status will filter out records that do not match the + specified ones. If a status is not specified, the source will default to records with a status of + either ACTIVE or PAUSED. +9. (Optional) Add custom reports if needed. For more information, refer to the corresponding + section. 10. Click **Set up source**. ## Supported sync modes -The Pinterest source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): +The Pinterest source connector supports the following +[sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): - [Full Refresh - Overwrite](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-overwrite/) - [Full Refresh - Append](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-append) @@ -71,64 +98,111 @@ The Pinterest source connector supports the following [sync modes](https://docs. ## Supported Streams -- [Account analytics](https://developers.pinterest.com/docs/api/v5/#operation/user_account/analytics) \(Incremental\) +- [Account analytics](https://developers.pinterest.com/docs/api/v5/#operation/user_account/analytics) + \(Incremental\) - [Boards](https://developers.pinterest.com/docs/api/v5/#operation/boards/list) \(Full refresh\) -- [Board sections](https://developers.pinterest.com/docs/api/v5/#operation/board_sections/list) \(Full refresh\) -- [Pins on board section](https://developers.pinterest.com/docs/api/v5/#operation/board_sections/list_pins) \(Full refresh\) -- [Pins on board](https://developers.pinterest.com/docs/api/v5/#operation/boards/list_pins) \(Full refresh\) -- [Ad accounts](https://developers.pinterest.com/docs/api/v5/#operation/ad_accounts/list) \(Full refresh\) -- [Ad account analytics](https://developers.pinterest.com/docs/api/v5/#operation/ad_account/analytics) \(Incremental\) -- [Campaigns](https://developers.pinterest.com/docs/api/v5/#operation/campaigns/list) \(Incremental\) -- [Campaign analytics](https://developers.pinterest.com/docs/api/v5/#operation/campaigns/list) \(Incremental\) -- [Campaign Analytics Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Campaign Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Ad Groups](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/list) \(Incremental\) -- [Ad Group Analytics](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) \(Incremental\) -- [Ad Group Report](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) \(Incremental\) -- [Ad Group Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) \(Incremental\) +- [Board sections](https://developers.pinterest.com/docs/api/v5/#operation/board_sections/list) + \(Full refresh\) +- [Pins on board section](https://developers.pinterest.com/docs/api/v5/#operation/board_sections/list_pins) + \(Full refresh\) +- [Pins on board](https://developers.pinterest.com/docs/api/v5/#operation/boards/list_pins) \(Full + refresh\) +- [Ad accounts](https://developers.pinterest.com/docs/api/v5/#operation/ad_accounts/list) \(Full + refresh\) +- [Ad account analytics](https://developers.pinterest.com/docs/api/v5/#operation/ad_account/analytics) + \(Incremental\) +- [Campaigns](https://developers.pinterest.com/docs/api/v5/#operation/campaigns/list) + \(Incremental\) +- [Campaign analytics](https://developers.pinterest.com/docs/api/v5/#operation/campaigns/list) + \(Incremental\) +- [Campaign Analytics Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Campaign Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Ad Groups](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/list) + \(Incremental\) +- [Ad Group Analytics](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) + \(Incremental\) +- [Ad Group Report](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) + \(Incremental\) +- [Ad Group Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/ad_groups/analytics) + \(Incremental\) - [Ads](https://developers.pinterest.com/docs/api/v5/#operation/ads/list) \(Incremental\) -- [Ad analytics](https://developers.pinterest.com/docs/api/v5/#operation/ads/analytics) \(Incremental\) +- [Ad analytics](https://developers.pinterest.com/docs/api/v5/#operation/ads/analytics) + \(Incremental\) - [Catalogs](https://developers.pinterest.com/docs/api/v5/#operation/catalogs/list) \(Full refresh\) -- [Catalogs Feeds](https://developers.pinterest.com/docs/api/v5/#operation/feeds/list) \(Full refresh\) -- [Catalogs Product Groups](https://developers.pinterest.com/docs/api/v5/#operation/catalogs_product_groups/list) \(Full refresh\) -- [Audiences](https://developers.pinterest.com/docs/api/v5/#operation/audiences/list) \(Full refresh\) +- [Catalogs Feeds](https://developers.pinterest.com/docs/api/v5/#operation/feeds/list) \(Full + refresh\) +- [Catalogs Product Groups](https://developers.pinterest.com/docs/api/v5/#operation/catalogs_product_groups/list) + \(Full refresh\) +- [Audiences](https://developers.pinterest.com/docs/api/v5/#operation/audiences/list) \(Full + refresh\) - [Keywords](https://developers.pinterest.com/docs/api/v5/#operation/keywords/get) \(Full refresh\) -- [Conversion Tags](https://developers.pinterest.com/docs/api/v5/#operation/conversion_tags/list) \(Full refresh\) -- [Customer Lists](https://developers.pinterest.com/docs/api/v5/#tag/customer_lists) \(Full refresh\) -- [Advertizer Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Advertizer Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Pin Promotion Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Pin Promotion Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Product Group Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Product Group Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Product Item Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) -- [Keyword Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) \(Incremental\) +- [Conversion Tags](https://developers.pinterest.com/docs/api/v5/#operation/conversion_tags/list) + \(Full refresh\) +- [Customer Lists](https://developers.pinterest.com/docs/api/v5/#tag/customer_lists) \(Full + refresh\) +- [Advertizer Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Advertizer Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Pin Promotion Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Pin Promotion Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Product Group Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Product Group Targeting Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Product Item Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) +- [Keyword Report](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report) + \(Incremental\) ## Custom reports -Custom reports in the Pinterest connector allow you to create personalized analytics reports for your account. You can tailor these reports to your specific needs by choosing from various properties: - -1. **Name**: A unique identifier for the report. -2. **Level**: Specifies the data aggregation level, with options like ADVERTISER, CAMPAIGN, AD_GROUP, etc. The default level is ADVERTISER. -3. **Granularity**: Determines the data granularity, such as TOTAL, DAY, HOUR, etc. The default is TOTAL, where metrics are aggregated over the specified date range. -4. **Columns**: Identifies the data columns to be included in the report. -5. **Click Window Days (Optional)**: The number of days used for conversion attribution from a pin click action. This applies to Pinterest Tag conversion metrics. Defaults to 30 days if not specified. -6. **Engagement Window Days (Optional)**: The number of days used for conversion attribution from an engagement action. Engagements include saves, closeups, link clicks, and carousel card swipes. This applies to Pinterest Tag conversion metrics. Defaults to 30 days if not specified. -7. **View Window Days (Optional)**: The number of days used as the conversion attribution window for a view action. This applies to Pinterest Tag conversion metrics. Defaults to 1 day if not specified. -8. **Conversion Report Time (Optional)**: Indicates the date by which the conversion metrics returned will be reported. There are two dates associated with a conversion event: the date of ad interaction and the date of conversion event completion. The default is TIME_OF_AD_ACTION. -9. **Attribution Types (Optional)**: Lists the types of attribution for the report, such as INDIVIDUAL or HOUSEHOLD. -10. **Start Date (Optional)**: The start date for the report in YYYY-MM-DD format, defaulting to the latest allowed date by the report API (913 days from today). - -For more detailed information and guidelines on creating custom reports, please refer to the [Pinterest API documentation](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report). +Custom reports in the Pinterest connector allow you to create personalized analytics reports for +your account. You can tailor these reports to your specific needs by choosing from various +properties: + +1. **Name**: A unique identifier for the report. +2. **Level**: Specifies the data aggregation level, with options like ADVERTISER, CAMPAIGN, + AD_GROUP, etc. The default level is ADVERTISER. +3. **Granularity**: Determines the data granularity, such as TOTAL, DAY, HOUR, etc. The default is + TOTAL, where metrics are aggregated over the specified date range. +4. **Columns**: Identifies the data columns to be included in the report. +5. **Click Window Days (Optional)**: The number of days used for conversion attribution from a pin + click action. This applies to Pinterest Tag conversion metrics. Defaults to 30 days if not + specified. +6. **Engagement Window Days (Optional)**: The number of days used for conversion attribution from an + engagement action. Engagements include saves, closeups, link clicks, and carousel card swipes. + This applies to Pinterest Tag conversion metrics. Defaults to 30 days if not specified. +7. **View Window Days (Optional)**: The number of days used as the conversion attribution window for + a view action. This applies to Pinterest Tag conversion metrics. Defaults to 1 day if not + specified. +8. **Conversion Report Time (Optional)**: Indicates the date by which the conversion metrics + returned will be reported. There are two dates associated with a conversion event: the date of ad + interaction and the date of conversion event completion. The default is TIME_OF_AD_ACTION. +9. **Attribution Types (Optional)**: Lists the types of attribution for the report, such as + INDIVIDUAL or HOUSEHOLD. +10. **Start Date (Optional)**: The start date for the report in YYYY-MM-DD format, defaulting to the + latest allowed date by the report API (913 days from today). + +For more detailed information and guidelines on creating custom reports, please refer to the +[Pinterest API documentation](https://developers.pinterest.com/docs/api/v5/#operation/analytics/create_report). ## Performance considerations -The connector is restricted by the Pinterest [requests limitation](https://developers.pinterest.com/docs/reference/ratelimits/). +The connector is restricted by the Pinterest +[requests limitation](https://developers.pinterest.com/docs/reference/ratelimits/). ## Changelog | Version | Date | Pull Request | Subject | -|:--------|:-----------| :------------------------------------------------------- |:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| 1.3.2 | 2024-04-08 | [36912](https://github.com/airbytehq/airbyte/pull/36912) | Fix icon | +| 1.3.1 | 2024-04-03 | [36806](https://github.com/airbytehq/airbyte/pull/36806) | Update airbyte-cdk count bug to emit recordCount as float | +| 1.3.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 1.2.0 | 2024-02-20 | [35465](https://github.com/airbytehq/airbyte/pull/35465) | Per-error reporting and continue sync on stream failures | | 1.1.1 | 2024-02-12 | [35159](https://github.com/airbytehq/airbyte/pull/35159) | Manage dependencies with Poetry. | | 1.1.0 | 2023-11-22 | [32747](https://github.com/airbytehq/airbyte/pull/32747) | Update docs and spec. Add missing `placement_traffic_type` field to AdGroups stream | diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 6ac689d6cc578..44f14a27c554c 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,11 +292,22 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.3.15 | 2024-02-29 | [34724](https://github.com/airbytehq/airbyte/pull/34724) | Add record count in state message. | -| 3.3.14 | 2024-03-06 | [35842](https://github.com/airbytehq/airbyte/pull/35842) | Add logging to understand cases with a large number of records with the same LSN. | -| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | -| 3.3.13 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | -| 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | +| 3.3.26 | 2024-04-10 | [36982](https://github.com/airbytehq/airbyte/pull/36982) | Populate airyte_meta.changes for xmin path | +| 3.3.25 | 2024-04-10 | [36981](https://github.com/airbytehq/airbyte/pull/36981) | Track latest CDK | +| 3.3.24 | 2024-04-10 | [36865](https://github.com/airbytehq/airbyte/pull/36865) | Track latest CDK | +| 3.3.23 | 2024-04-02 | [36759](https://github.com/airbytehq/airbyte/pull/36759) | Track latest CDK | +| 3.3.22 | 2024-04-01 | [36739](https://github.com/airbytehq/airbyte/pull/36739) | Fix useLocalCdk flag. | +| 3.3.21 | 2024-03-25 | [36584](https://github.com/airbytehq/airbyte/pull/36584) | Adopt Kotlin CDK. | +| 3.3.20 | 2024-03-25 | [36432](https://github.com/airbytehq/airbyte/pull/36432) | Failure to serialize values from Postgres DB shouldn't fail sync. | +| 3.3.19 | 2024-03-12 | [36333](https://github.com/airbytehq/airbyte/pull/36333) | Use newest CDK - deprecate dbz iterator | +| 3.3.18 | 2024-03-12 | [35599](https://github.com/airbytehq/airbyte/pull/35599) | Use newest CDK | +| 3.3.17 | 2024-03-12 | [35939](https://github.com/airbytehq/airbyte/pull/35939) | Use lsn_commit value instead of lsn_proc for CDC checkpointing logic. | +| 3.3.16 | 2024-03-11 | [35904](https://github.com/airbytehq/airbyte/pull/35904) | Adopt Java CDK 0.23.1- debezium retries. | +| 3.3.15 | 2024-02-29 | [34724](https://github.com/airbytehq/airbyte/pull/34724) | Add record count in state message. | +| 3.3.14 | 2024-03-06 | [35842](https://github.com/airbytehq/airbyte/pull/35842) | Add logging to understand cases with a large number of records with the same LSN. | +| 3.3.13 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | +| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | | 3.3.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | | 3.3.8 | 2024-02-08 | [34751](https://github.com/airbytehq/airbyte/pull/34751) | Adopt CDK 0.19.0 | diff --git a/docs/integrations/sources/quickbooks.md b/docs/integrations/sources/quickbooks.md index 8e74d5a6081f8..fa2075cff3471 100644 --- a/docs/integrations/sources/quickbooks.md +++ b/docs/integrations/sources/quickbooks.md @@ -105,7 +105,8 @@ This Source is capable of syncing the following [Streams](https://developer.intu | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | -| `3.0.2` | 2024-02-20 | [32236](https://github.com/airbytehq/airbyte/pull/32236) | Small typo in spec correction | +| `3.0.3` | 2024-03-22 | [36389](https://github.com/airbytehq/airbyte/pull/36389) | Add refresh token updater and add missing properties to streams | +| `3.0.2` | 2024-02-20 | [32236](https://github.com/airbytehq/airbyte/pull/32236) | Small typo in spec correction | | `3.0.1` | 2023-11-06 | [32236](https://github.com/airbytehq/airbyte/pull/32236) | Upgrade to `airbyte-cdk>=0.52.10` to resolve refresh token issues | | `3.0.0` | 2023-09-26 | [30770](https://github.com/airbytehq/airbyte/pull/30770) | Update schema to use `number` instead of `integer` | | `2.0.5` | 2023-09-26 | [30766](https://github.com/airbytehq/airbyte/pull/30766) | Fix improperly named keyword argument | diff --git a/docs/integrations/sources/recharge.md b/docs/integrations/sources/recharge.md index fa784da249da1..307c4256dbfb1 100644 --- a/docs/integrations/sources/recharge.md +++ b/docs/integrations/sources/recharge.md @@ -76,6 +76,8 @@ The Recharge connector should gracefully handle Recharge API limitations under n | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------| +| 1.2.0 | 2024-03-13 | [35450](https://github.com/airbytehq/airbyte/pull/35450) | Migrated to low-code | +| 1.1.6 | 2024-03-12 | [35982](https://github.com/airbytehq/airbyte/pull/35982) | Added additional `query param` to guarantee the records are in `asc` order | | 1.1.5 | 2024-02-12 | [35182](https://github.com/airbytehq/airbyte/pull/35182) | Manage dependencies with Poetry. | | 1.1.4 | 2024-02-02 | [34772](https://github.com/airbytehq/airbyte/pull/34772) | Fix airbyte-lib distribution | | 1.1.3 | 2024-01-31 | [34707](https://github.com/airbytehq/airbyte/pull/34707) | Added the UI toggle `Use 'Orders' Deprecated API` to switch between `deprecated` and `modern` api versions for `Orders` stream | diff --git a/docs/integrations/sources/rocket-chat.md b/docs/integrations/sources/rocket-chat.md index 23bb8013972e0..030cc6aad433a 100644 --- a/docs/integrations/sources/rocket-chat.md +++ b/docs/integrations/sources/rocket-chat.md @@ -37,5 +37,5 @@ You need to setup a personal access token within the Rocket.chat workspace, see ## Changelog | Version | Date | Pull Request | Subject | -| :-----* | :--------* | :-------------------------------------------------------* | :----------------------------------------* | +| :------ | :--------- | :-------------------------------------------------------- | :----------------------------------------- | | 0.1.0 | 2022-10-29 | [#18635](https://github.com/airbytehq/airbyte/pull/18635) | 🎉 New Source: Rocket.chat API [low-code CDK] | diff --git a/docs/integrations/sources/rss-migrations.md b/docs/integrations/sources/rss-migrations.md new file mode 100644 index 0000000000000..494617dbf9036 --- /dev/null +++ b/docs/integrations/sources/rss-migrations.md @@ -0,0 +1,4 @@ +# Rss Migration Guide + +## Upgrading to 1.0.0 +The verison migrates the Rss connector to the low-code framework for greater maintainability. You may need to refresh the connection schema (with the reset), and run a sync. \ No newline at end of file diff --git a/docs/integrations/sources/rss.md b/docs/integrations/sources/rss.md index 320768aea7725..b6a4e881af914 100644 --- a/docs/integrations/sources/rss.md +++ b/docs/integrations/sources/rss.md @@ -32,6 +32,7 @@ None ## Changelog -| Version | Date | Pull Request | Subject | -| :--- | :--- | :--- | :--- | -| 0.1.0 | 2022-10-12 | [18838](https://github.com/airbytehq/airbyte/pull/18838) | Initial release supporting RSS | +| Version | Date | Pull Request | Subject | +| :------ | :---------- | :------------------------------------------------------- | :----------------------------- | +| 1.0.0 | 2024-04-20 | [36418](https://github.com/airbytehq/airbyte/pull/36418) | Migrate python cdk to low code | +| 0.1.0 | 2022-10-12 | [18838](https://github.com/airbytehq/airbyte/pull/18838) | Initial release supporting RSS | diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 298f921fdfa02..1b3ce9d76daa4 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -125,11 +125,14 @@ For more information on managing your access keys, please refer to the 3. Find and select **S3** from the list of available sources. 4. Enter the name of the **Bucket** containing your files to replicate. 5. Add a stream - 1. Write the **File Type** + 1. Choose the **File Format** 2. In the **Format** box, use the dropdown menu to select the format of the files you'd like to replicate. The supported formats are **CSV**, **Parquet**, **Avro** and **JSONL**. Toggling the **Optional fields** button within the **Format** box will allow you to enter additional configurations based on the selected format. For a detailed breakdown of these settings, refer to the [File Format section](#file-format-settings) below. 3. Give a **Name** to the stream - 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). - 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. + 4. (Optional) Enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Globs section](#globs) below. + 5. (Optional) Modify the **Days To Sync If History Is Full** value. This gives you control of the lookback window that we will use to determine which files to sync if the state history is full. Details are in the [State section](#state) below. + 6. (Optional) If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). + 7. (Optional) Select the **Schemaless** option, to skip all validation of the records against a schema. If this option is selected the schema will be `{"data": "object"}` and all downstream data will be nested in a "data" field. This is a good option if the schema of your records changes frequently. + 8. (Optional) Select a **Validation Policy** to tell Airbyte how to handle records that do not match the schema. You may choose to emit the record anyway (fields that aren't present in the schema may not arrive at the destination), skip the record altogether, or wait until the next discovery (which will happen in the next 24 hours). 6. **To authenticate your private bucket**: - If using an IAM role, enter the **AWS Role ARN**. - If using IAM user credentials, fill the **AWS Access Key ID** and **AWS Secret Access Key** fields with the appropriate credentials. @@ -166,7 +169,7 @@ There is no predefined streams. The streams are based on content of your bucket. Please let us know any specific compressions you'd like to see support for next! -## Path Patterns +## Globs \(tl;dr -> path pattern syntax using [wcmatch.glob](https://facelessuser.github.io/wcmatch/glob/). GLOBSTAR and SPLIT flags are enabled.\) @@ -216,6 +219,11 @@ We want to pick up part1.csv, part2.csv and part3.csv \(excluding another_part1. As you can probably tell, there are many ways to achieve the same goal with path patterns. We recommend using a pattern that ensures clarity and is robust against future additions to the directory structure. +## State + +To perform incremental syncs, Airbyte syncs files from oldest to newest. Each file that's synced (up to 10,000 files) will be added as an entry in a "history" section of the connection's state message. +Once history is full, we drop the older messages out of the file, and only read files that were last modified between the date of the newest file in history and `Days to Sync if History is Full` days prior. + ## User Schema Providing a schema allows for more control over the output of this stream. Without a provided schema, columns and datatypes will be inferred from the first created file in the bucket matching your path pattern and suffix. This will probably be fine in most cases but there may be situations you want to enforce a schema instead, e.g.: @@ -250,14 +258,6 @@ Please note, the S3 Source connector used to infer schemas from all the availabl - **AWS Access Key ID**: One half of the [required credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) for accessing a private bucket. - **AWS Secret Access Key**: The other half of the [required credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) for accessing a private bucket. -- **Path Prefix**: An optional string that limits the files returned by AWS when listing files to only those starting with the specified prefix. This is different than the **Path Pattern**, as the prefix is applied directly to the API call made to S3, rather than being filtered within Airbyte. **This is not a regular expression** and does not accept pattern-style symbols like wildcards (`*`). We recommend using this filter to improve performance if the connector if your bucket has many folders and files that are unrelated to the data you want to replicate, and all the relevant files will always reside under the specified prefix. - - Together with the **Path Pattern**, there are multiple ways to specify the files to sync. For example, all the following configurations are equivalent: - - **Prefix** = ``, **Pattern** = `path1/path2/myFolder/**/*` - - **Prefix** = `path1/`, **Pattern** = `path2/myFolder/**/*.csv` - - **Prefix** = `path1/path2/`, **Pattern** = `myFolder/**/*.csv` - - **Prefix** = `path1/path2/myFolder/`, **Pattern** = `**/*.csv` - - - The ability to individually configure the prefix and pattern has been included to accommodate situations where you do not want to replicate the majority of the files in the bucket. If you are unsure of the best approach, you can safely leave the **Path Prefix** field empty and just [set the Path Pattern](#path-patterns) to meet your requirements. - **Endpoint**: An optional parameter that enables the use of non-Amazon S3 compatible services. If you are using the default Amazon service, leave this field blank. - **Start Date**: An optional parameter that marks a starting date and time in UTC for data replication. Any files that have _not_ been modified since this specified date/time will _not_ be replicated. Use the provided datepicker (recommended) or enter the desired date programmatically in the format `YYYY-MM-DDTHH:mm:ssZ`. Leaving this field blank will replicate data from all files that have not been excluded by the **Path Pattern** and **Path Prefix**. @@ -325,6 +325,9 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.5.12 | 2024-04-11 | [37001](https://github.com/airbytehq/airbyte/pull/37001) | Update airbyte-cdk to flush print buffer for every message | +| 4.5.11 | 2024-03-14 | [36160](https://github.com/airbytehq/airbyte/pull/36160) | Bump python-cdk version to include CSV tab delimiter fix | +| 4.5.10 | 2024-03-11 | [35955](https://github.com/airbytehq/airbyte/pull/35955) | Pin `transformers` transitive dependency | | 4.5.9 | 2024-03-06 | [35857](https://github.com/airbytehq/airbyte/pull/35857) | Bump poetry.lock to upgrade transitive dependency | | 4.5.8 | 2024-03-04 | [35808](https://github.com/airbytehq/airbyte/pull/35808) | Use cached AWS client | | 4.5.7 | 2024-02-23 | [34895](https://github.com/airbytehq/airbyte/pull/34895) | Run incremental syncs with concurrency | diff --git a/docs/integrations/sources/salesforce.md b/docs/integrations/sources/salesforce.md index 3ce8568ca1ffd..7fcc97408283d 100644 --- a/docs/integrations/sources/salesforce.md +++ b/docs/integrations/sources/salesforce.md @@ -193,6 +193,14 @@ Now that you have set up the Salesforce source connector, check out the followin | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| 2.5.2 | 2024-04-15 | [37105](https://github.com/airbytehq/airbyte/pull/37105) | Raise error when schema generation fails | +| 2.5.1 | 2024-04-11 | [37001](https://github.com/airbytehq/airbyte/pull/37001) | Update airbyte-cdk to flush print buffer for every message | +| 2.5.0 | 2024-04-11 | [36942](https://github.com/airbytehq/airbyte/pull/36942) | Move Salesforce to partitioned state in order to avoid stuck syncs | +| 2.4.4 | 2024-04-08 | [36901](https://github.com/airbytehq/airbyte/pull/36901) | Upgrade CDK for empty internal_message empty when ExceptionWithDisplayMessage raised | +| 2.4.3 | 2024-04-08 | [36885](https://github.com/airbytehq/airbyte/pull/36885) | Add missing retry on REST API | +| 2.4.2 | 2024-04-05 | [36862](https://github.com/airbytehq/airbyte/pull/36862) | Upgrade CDK for updated error messaging regarding missing streams | +| 2.4.1 | 2024-04-03 | [36385](https://github.com/airbytehq/airbyte/pull/36385) | Retry HTTP requests and jobs on various cases | +| 2.4.0 | 2024-03-12 | [35978](https://github.com/airbytehq/airbyte/pull/35978) | Upgrade CDK to start emitting record counts with state and full refresh state | | 2.3.3 | 2024-03-04 | [35791](https://github.com/airbytehq/airbyte/pull/35791) | Fix memory leak (OOM) | | 2.3.2 | 2024-02-19 | [35421](https://github.com/airbytehq/airbyte/pull/35421) | Add Stream Slice Step option to specification | | 2.3.1 | 2024-02-12 | [35147](https://github.com/airbytehq/airbyte/pull/35147) | Manage dependencies with Poetry. | diff --git a/docs/integrations/sources/sendgrid-migrations.md b/docs/integrations/sources/sendgrid-migrations.md new file mode 100644 index 0000000000000..48080d1198c70 --- /dev/null +++ b/docs/integrations/sources/sendgrid-migrations.md @@ -0,0 +1,87 @@ +# Sendgrid Migration Guide + +## Upgrading to 1.0.0 + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. +As part of our commitment to delivering exceptional service, we are transitioning Source Sendgrid from the Python Connector Development Kit (CDK) +to our new low-code framework improving maintainability and reliability of the connector. +However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change. + +* The configuration options have been renamed to `api_key` and `start_date`. +* The `unsubscribe_groups` stream has been removed. It was the same as `suppression_groups`. You can use that and get the same data. +* The `single_sends` stream has been renamed `singlesend_stats`. This is closer to the data and API. +* The `segments` stream has been upgraded to use the Sendgrid 2.0 API because the older one has been deprecated. The schema has changed as a result. + +To ensure a smooth upgrade, please refresh your schemas and reset your data before resuming syncs. + +## Connector Upgrade Guide + +### For Airbyte Open Source: Update the local connector image + +Airbyte Open Source users must manually update the connector image in their local registry before proceeding with the migration. To do so: + +1. Select **Settings** in the main navbar. + 1. Select **Sources**. +2. Find Sendgrid in the list of connectors. + +:::note +You will see two versions listed, the current in-use version and the latest version available. +::: + +3. Select **Change** to update your OSS version to the latest available version. + +### Update the connector version + +1. Select **Sources** in the main navbar. +2. Select the instance of the connector you wish to upgrade. + +:::note +Each instance of the connector must be updated separately. If you have created multiple instances of a connector, updating one will not affect the others. +::: + +3. Select **Upgrade** + 1. Follow the prompt to confirm you are ready to upgrade to the new version. + + +### Refresh all schemas and reset data + +1. Select **Connections** in the main navbar. +2. Select the connection(s) affected by the update. +3. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +4. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset all streams** option is checked. +5. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +:::note +Depending on destination type you may not be prompted to reset your data. +::: +4. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + diff --git a/docs/integrations/sources/sendgrid.md b/docs/integrations/sources/sendgrid.md index b3851579bbf2e..179f30ba5e0fa 100644 --- a/docs/integrations/sources/sendgrid.md +++ b/docs/integrations/sources/sendgrid.md @@ -15,16 +15,16 @@ This page contains the setup guide and reference information for the [Sendgrid]( * Sendgrid Account * [Create Sendgrid API Key](https://docs.sendgrid.com/ui/account-and-settings/api-keys#creating-an-api-key) with the following permissions: - * Read-only access to all resources - * Full access to marketing resources +* Read-only access to all resources +* Full access to marketing resources ### Step 2: Set up the Sendgrid connector in Airbyte 1. [Log into your Airbyte Cloud](https://cloud.airbyte.com/workspaces) account or navigate to the Airbyte Open Source dashboard. 2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ New source**. 3. On the Set up the source page, enter the name for the Sendgrid connector and select **Sendgrid** from the Source type dropdown. -4. Enter your `apikey`. -5. Enter your `start_time`. +4. Enter your `api_key`. +5. Enter your `start_date`. 6. Click **Set up source**. @@ -48,11 +48,12 @@ The Sendgrid source connector supports the following [sync modes](https://docs.a * [Templates](https://docs.sendgrid.com/api-reference/transactional-templates/retrieve-paged-transactional-templates) * [Global suppression](https://docs.sendgrid.com/api-reference/suppressions-global-suppressions/retrieve-all-global-suppressions) \(Incremental\) * [Suppression groups](https://docs.sendgrid.com/api-reference/suppressions-unsubscribe-groups/retrieve-all-suppression-groups-associated-with-the-user) -* [Suppression group members](https://docs.sendgrid.com/api-reference/suppressions-suppressions/retrieve-all-suppressions) +* [Suppression group members](https://docs.sendgrid.com/api-reference/suppressions-suppressions/retrieve-all-suppressions) \(Incremental\) * [Blocks](https://docs.sendgrid.com/api-reference/blocks-api/retrieve-all-blocks) \(Incremental\) * [Bounces](https://docs.sendgrid.com/api-reference/bounces-api/retrieve-all-bounces) \(Incremental\) * [Invalid emails](https://docs.sendgrid.com/api-reference/invalid-e-mails-api/retrieve-all-invalid-emails) \(Incremental\) * [Spam reports](https://docs.sendgrid.com/api-reference/spam-reports-api/retrieve-all-spam-reports) +* [Unsubscribe Groups](https://docs.sendgrid.com/api-reference/suppressions-unsubscribe-groups/retrieve-all-suppression-groups-associated-with-the-user) ## Create a read-only API key (Optional) @@ -84,10 +85,12 @@ The connector is restricted by normal Sendgrid [requests limitation](https://doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.4.3 | 2024-02-21 | [35181](https://github.com/airbytehq/airbyte/pull/35343) | Handle uncompressed contacts downloads. | -| 0.4.2 | 2024-02-12 | [35181](https://github.com/airbytehq/airbyte/pull/35181) | Manage dependencies with Poetry. | -| 0.4.1 | 2023-10-18 | [31543](https://github.com/airbytehq/airbyte/pull/31543) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 0.4.0 | 2023-05-19 | [23959](https://github.com/airbytehq/airbyte/pull/23959) | Add `unsubscribe_groups`stream +| 1.0.0 | 2024-04-15 | [35776](https://github.com/airbytehq/airbyte/pull/35776) | Migration to low-code CDK. Breaking change that updates configuration keys, removes unsubscribe_groups stream, renames a stream to singlesend_stats, and adds the singlesends stream. | +| 0.5.0 | 2024-03-26 | [36455](https://github.com/airbytehq/airbyte/pull/36455) | Unpin CDK version, add record counts to state messages | +| 0.4.3 | 2024-02-21 | [35181](https://github.com/airbytehq/airbyte/pull/35343) | Handle uncompressed contacts downloads. | +| 0.4.2 | 2024-02-12 | [35181](https://github.com/airbytehq/airbyte/pull/35181) | Manage dependencies with Poetry. | +| 0.4.1 | 2023-10-18 | [31543](https://github.com/airbytehq/airbyte/pull/31543) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.4.0 | 2023-05-19 | [23959](https://github.com/airbytehq/airbyte/pull/23959) | Add `unsubscribe_groups`stream | | 0.3.1 | 2023-01-27 | [21939](https://github.com/airbytehq/airbyte/pull/21939) | Fix contacts missing records; Remove Messages stream | | 0.3.0 | 2023-01-25 | [21587](https://github.com/airbytehq/airbyte/pull/21587) | Make sure spec works as expected in UI - make start_time parameter an ISO string instead of an integer interpreted as timestamp (breaking, update your existing connections and set the start_time parameter to ISO 8601 date time string in UTC) | | 0.2.16 | 2022-11-02 | [18847](https://github.com/airbytehq/airbyte/pull/18847) | Skip the stream on `400, 401 - authorization required` with log message | diff --git a/docs/integrations/sources/sentry.md b/docs/integrations/sources/sentry.md index b51fd4f341d38..dab0ff0f04f39 100644 --- a/docs/integrations/sources/sentry.md +++ b/docs/integrations/sources/sentry.md @@ -47,8 +47,11 @@ The Sentry source connector supports the following [sync modes](https://docs.air | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| -| 0.4.1 | 2024-02-12 | [35145](https://github.com/airbytehq/airbyte/pull/35145) | Manage dependencies with Poetry. | -| 0.4.0 | 2024-01-05 | [32957](https://github.com/airbytehq/airbyte/pull/32957) | Added undeclared fields to schema, Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.5.1 | 2024-04-01 | [36731](https://github.com/airbytehq/airbyte/pull/36731) | Add `%Y-%m-%dT%H:%M:%S%z` to date time formats. | +| 0.5.0 | 2024-03-27 | [35755](https://github.com/airbytehq/airbyte/pull/35755) | Migrate to low-code. | +| 0.4.2 | 2024-03-25 | [36448](https://github.com/airbytehq/airbyte/pull/36448) | Unpin CDK version | +| 0.4.1 | 2024-02-12 | [35145](https://github.com/airbytehq/airbyte/pull/35145) | Manage dependencies with Poetry | +| 0.4.0 | 2024-01-05 | [32957](https://github.com/airbytehq/airbyte/pull/32957) | Added undeclared fields to schema and migrated to base image | | 0.3.0 | 2023-09-05 | [30192](https://github.com/airbytehq/airbyte/pull/30192) | Added undeclared fields to schema | | 0.2.4 | 2023-08-14 | [29401](https://github.com/airbytehq/airbyte/pull/29401) | Fix `null` value in stream state | | 0.2.3 | 2023-08-03 | [29023](https://github.com/airbytehq/airbyte/pull/29023) | Add incremental for `issues` stream | diff --git a/docs/integrations/sources/sftp-bulk-migrations.md b/docs/integrations/sources/sftp-bulk-migrations.md new file mode 100644 index 0000000000000..2a6d0a83f4994 --- /dev/null +++ b/docs/integrations/sources/sftp-bulk-migrations.md @@ -0,0 +1,36 @@ +# SFTP Bulk Migration Guide + +## Upgrading to 1.0.0 +This release upgrades the SFTP Bulk connector to file-based CDK which causes the following changes: + +- Configuration changes +- Stream changes + +Users should: + +- Reconfigure the source +- Refresh the source schema +- Reset affected streams after upgrading to ensure uninterrupted syncs. + + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +```note +Any detected schema changes will be listed for your review. +``` +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +```note +Depending on destination type you may not be prompted to reset your data. +``` +4. Select **Save connection**. +```note +This will reset the data in your destination and initiate a fresh sync. +``` + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). \ No newline at end of file diff --git a/docs/integrations/sources/sftp-bulk.md b/docs/integrations/sources/sftp-bulk.md index 8b1095a0a2c23..56bb62a7fa092 100644 --- a/docs/integrations/sources/sftp-bulk.md +++ b/docs/integrations/sources/sftp-bulk.md @@ -118,8 +118,9 @@ More formats \(e.g. Apache Avro\) will be supported in the future. ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :-------------------------------------------------------- | :---------------------------- | -| 0.1.2 | 2023-04-19 | [#19224](https://github.com/airbytehq/airbyte/pull/19224) | Support custom CSV separators | -| 0.1.1 | 2023-03-17 | [#24180](https://github.com/airbytehq/airbyte/pull/24180) | Fix field order | -| 0.1.0 | 2021-24-05 | | Initial version | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------| +| 1.0.0 | 2024-03-22 | [36256](https://github.com/airbytehq/airbyte/pull/36256) | Migrate to File-Based CDK. Manage dependencies with Poetry. | +| 0.1.2 | 2023-04-19 | [19224](https://github.com/airbytehq/airbyte/pull/19224) | Support custom CSV separators | +| 0.1.1 | 2023-03-17 | [24180](https://github.com/airbytehq/airbyte/pull/24180) | Fix field order | +| 0.1.0 | 2021-24-05 | | Initial version | diff --git a/docs/integrations/sources/shopify.md b/docs/integrations/sources/shopify.md index 86b9ebdc35c38..4e1bc6c840473 100644 --- a/docs/integrations/sources/shopify.md +++ b/docs/integrations/sources/shopify.md @@ -205,67 +205,71 @@ For all `Shopify GraphQL BULK` api requests these limitations are applied: https ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------ | -| 2.0.0 | 2024-02-12 | [32345](https://github.com/airbytehq/airbyte/pull/32345) | Fixed the issue with `state` causing the `substreams` to skip the records, made `metafield_*`: `collections, customers, draft_orders, locations, orders, product_images, product_variants, products`, and `fulfillment_orders, collections, discount_codes, inventory_levels, inventory_items, transactions_graphql, customer_address` streams to use `BULK Operations` instead of `REST`| -| 1.1.8 | 2024-02-12 | [35166](https://github.com/airbytehq/airbyte/pull/35166) | Manage dependencies with Poetry. | -| 1.1.7 | 2024-01-19 | [33804](https://github.com/airbytehq/airbyte/pull/33804) | Updated documentation with list of all supported streams | -| 1.1.6 | 2024-01-04 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | -| 1.1.5 | 2023-12-28 | [33827](https://github.com/airbytehq/airbyte/pull/33827) | Fix GraphQL query | -| 1.1.4 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 1.1.3 | 2023-10-17 | [31500](https://github.com/airbytehq/airbyte/pull/31500) | Fixed the issue caused by the `missing access token` while setup the new source and not yet authenticated | -| 1.1.2 | 2023-10-13 | [31381](https://github.com/airbytehq/airbyte/pull/31381) | Fixed the issue caused by the `state` presence while fetching the `deleted events` with pagination | -| 1.1.1 | 2023-09-18 | [30560](https://github.com/airbytehq/airbyte/pull/30560) | Performance testing - include socat binary in docker image | -| 1.1.0 | 2023-09-07 | [30246](https://github.com/airbytehq/airbyte/pull/30246) | Added ability to fetch `destroyed` records for `Articles, Blogs, CustomCollections, Orders, Pages, PriceRules, Products` | -| 1.0.0 | 2023-08-11 | [29361](https://github.com/airbytehq/airbyte/pull/29361) | Migrate to the `2023-07` Shopify API Version | -| 0.6.2 | 2023-08-09 | [29302](https://github.com/airbytehq/airbyte/pull/29302) | Handle the `Internal Server Error` when entity could be fetched | -| 0.6.1 | 2023-08-08 | [28291](https://github.com/airbytehq/airbyte/pull/28291) | Allow `shop` field to accept `*.myshopify.com` shop names, updated `OAuth Spec` | -| 0.6.0 | 2023-08-02 | [28770](https://github.com/airbytehq/airbyte/pull/28770) | Added `Disputes` stream | -| 0.5.1 | 2023-07-13 | [28700](https://github.com/airbytehq/airbyte/pull/28700) | Improved `error messages` with more user-friendly description, refactored code | -| 0.5.0 | 2023-06-13 | [27732](https://github.com/airbytehq/airbyte/pull/27732) | License Update: Elv2 | -| 0.4.0 | 2023-06-13 | [27083](https://github.com/airbytehq/airbyte/pull/27083) | Added `CustomerSavedSearch`, `CustomerAddress` and `Countries` streams | -| 0.3.4 | 2023-05-10 | [25961](https://github.com/airbytehq/airbyte/pull/25961) | Added validation for `shop` in input configuration (accepts non-url-like inputs) | -| 0.3.3 | 2023-04-12 | [25110](https://github.com/airbytehq/airbyte/pull/25110) | Fixed issue when `cursor_field` is `"None"`, added missing properties to stream schemas, fixed `access_scopes` validation error | -| 0.3.2 | 2023-02-27 | [23473](https://github.com/airbytehq/airbyte/pull/23473) | Fixed OOM / Memory leak issue for Airbyte Cloud | -| 0.3.1 | 2023-01-16 | [21461](https://github.com/airbytehq/airbyte/pull/21461) | Added `discount_applications` to `orders` stream | -| 0.3.0 | 2022-11-16 | [19492](https://github.com/airbytehq/airbyte/pull/19492) | Added support for graphql and add a graphql products stream | -| 0.2.0 | 2022-10-21 | [18298](https://github.com/airbytehq/airbyte/pull/18298) | Updated API version to the `2022-10`, make stream schemas backward cpmpatible | -| 0.1.39 | 2022-10-13 | [17962](https://github.com/airbytehq/airbyte/pull/17962) | Added metafield streams; support for nested list streams | -| 0.1.38 | 2022-10-10 | [17777](https://github.com/airbytehq/airbyte/pull/17777) | Fixed `404` for configured streams, fix missing `cursor` error for old records | -| 0.1.37 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | -| 0.1.36 | 2022-03-22 | [9850](https://github.com/airbytehq/airbyte/pull/9850) | Added `BalanceTransactions` stream | -| 0.1.35 | 2022-03-07 | [10915](https://github.com/airbytehq/airbyte/pull/10915) | Fixed a bug which caused `full-refresh` syncs of child REST entities configured for `incremental` | -| 0.1.34 | 2022-03-02 | [10794](https://github.com/airbytehq/airbyte/pull/10794) | Minor specification re-order, fixed links in documentation | -| 0.1.33 | 2022-02-17 | [10419](https://github.com/airbytehq/airbyte/pull/10419) | Fixed wrong field type for tax_exemptions for `Abandoned_checkouts` stream | -| 0.1.32 | 2022-02-18 | [10449](https://github.com/airbytehq/airbyte/pull/10449) | Added `tender_transactions` stream | -| 0.1.31 | 2022-02-08 | [10175](https://github.com/airbytehq/airbyte/pull/10175) | Fixed compatibility issues for legacy user config | -| 0.1.30 | 2022-01-24 | [9648](https://github.com/airbytehq/airbyte/pull/9648) | Added permission validation before sync | -| 0.1.29 | 2022-01-20 | [9049](https://github.com/airbytehq/airbyte/pull/9248) | Added `shop_url` to the record for all streams | -| 0.1.28 | 2022-01-19 | [9591](https://github.com/airbytehq/airbyte/pull/9591) | Implemented `OAuth2.0` authentication method for Airbyte Cloud | -| 0.1.27 | 2021-12-22 | [9049](https://github.com/airbytehq/airbyte/pull/9049) | Updated connector fields title/description | -| 0.1.26 | 2021-12-14 | [8597](https://github.com/airbytehq/airbyte/pull/8597) | Fixed `mismatched number of tables` for base-normalization, increased performance of `order_refunds` stream | -| 0.1.25 | 2021-12-02 | [8297](https://github.com/airbytehq/airbyte/pull/8297) | Added Shop stream | -| 0.1.24 | 2021-11-30 | [7783](https://github.com/airbytehq/airbyte/pull/7783) | Reviewed and corrected schemas for all streams | -| 0.1.23 | 2021-11-15 | [7973](https://github.com/airbytehq/airbyte/pull/7973) | Added `InventoryItems` | -| 0.1.22 | 2021-10-18 | [7101](https://github.com/airbytehq/airbyte/pull/7107) | Added FulfillmentOrders, Fulfillments streams | -| 0.1.21 | 2021-10-14 | [7382](https://github.com/airbytehq/airbyte/pull/7382) | Fixed `InventoryLevels` primary key | -| 0.1.20 | 2021-10-14 | [7063](https://github.com/airbytehq/airbyte/pull/7063) | Added `Location` and `InventoryLevels` as streams | -| 0.1.19 | 2021-10-11 | [6951](https://github.com/airbytehq/airbyte/pull/6951) | Added support of `OAuth 2.0` authorisation option | -| 0.1.18 | 2021-09-21 | [6056](https://github.com/airbytehq/airbyte/pull/6056) | Added `pre_tax_price` to the `orders/line_items` schema | -| 0.1.17 | 2021-09-17 | [5244](https://github.com/airbytehq/airbyte/pull/5244) | Created data type enforcer for converting prices into numbers | -| 0.1.16 | 2021-09-09 | [5965](https://github.com/airbytehq/airbyte/pull/5945) | Fixed the connector's performance for `Incremental refresh` | -| 0.1.15 | 2021-09-02 | [5853](https://github.com/airbytehq/airbyte/pull/5853) | Fixed `amount` type in `order_refund` schema | -| 0.1.14 | 2021-09-02 | [5801](https://github.com/airbytehq/airbyte/pull/5801) | Fixed `line_items/discount allocations` & `duties` parts of `orders` schema | -| 0.1.13 | 2021-08-17 | [5470](https://github.com/airbytehq/airbyte/pull/5470) | Fixed rate limits throttling | -| 0.1.12 | 2021-08-09 | [5276](https://github.com/airbytehq/airbyte/pull/5276) | Added status property to product schema | -| 0.1.11 | 2021-07-23 | [4943](https://github.com/airbytehq/airbyte/pull/4943) | Fixed products schema up to API 2021-07 | -| 0.1.10 | 2021-07-19 | [4830](https://github.com/airbytehq/airbyte/pull/4830) | Fixed for streams json schemas, upgrade to API version 2021-07 | -| 0.1.9 | 2021-07-04 | [4472](https://github.com/airbytehq/airbyte/pull/4472) | Incremental sync is now using updated_at instead of since_id by default | -| 0.1.8 | 2021-06-29 | [4121](https://github.com/airbytehq/airbyte/pull/4121) | Added draft orders stream | -| 0.1.7 | 2021-06-26 | [4290](https://github.com/airbytehq/airbyte/pull/4290) | Fixed the bug when limiting output records to 1 caused infinity loop | -| 0.1.6 | 2021-06-24 | [4009](https://github.com/airbytehq/airbyte/pull/4009) | Added pages, price rules and discount codes streams | -| 0.1.5 | 2021-06-10 | [3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` for Kubernetes support | -| 0.1.4 | 2021-06-09 | [3926](https://github.com/airbytehq/airbyte/pull/3926) | New attributes to Orders schema | -| 0.1.3 | 2021-06-08 | [3787](https://github.com/airbytehq/airbyte/pull/3787) | Added Native Shopify Source Connector | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.0.4 | 2024-03-22 | [36355](https://github.com/airbytehq/airbyte/pull/36355) | Update CDK version to ensure Per-Stream Error Messaging and Record Counts In State (features were already there so just upping the version) | +| 2.0.3 | 2024-03-15 | [36170](https://github.com/airbytehq/airbyte/pull/36170) | Fixed the `STATE` messages emittion frequency for the `nested` sub-streams | +| 2.0.2 | 2024-03-12 | [36000](https://github.com/airbytehq/airbyte/pull/36000) | Fix and issue where invalid shop name causes index out of bounds error | +| 2.0.1 | 2024-03-11 | [35952](https://github.com/airbytehq/airbyte/pull/35952) | Fixed the issue when `start date` is missing but the `stream` required it | +| 2.0.0 | 2024-02-12 | [32345](https://github.com/airbytehq/airbyte/pull/32345) | Fixed the issue with `state` causing the `substreams` to skip the records, made `metafield_*`: `collections, customers, draft_orders, locations, orders, product_images, product_variants, products`, and `fulfillment_orders, collections, discount_codes, inventory_levels, inventory_items, transactions_graphql, customer_address` streams to use `BULK Operations` instead of `REST` | +| 1.1.8 | 2024-02-12 | [35166](https://github.com/airbytehq/airbyte/pull/35166) | Manage dependencies with Poetry. | +| 1.1.7 | 2024-01-19 | [33804](https://github.com/airbytehq/airbyte/pull/33804) | Updated documentation with list of all supported streams | +| 1.1.6 | 2024-01-04 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | +| 1.1.5 | 2023-12-28 | [33827](https://github.com/airbytehq/airbyte/pull/33827) | Fix GraphQL query | +| 1.1.4 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 1.1.3 | 2023-10-17 | [31500](https://github.com/airbytehq/airbyte/pull/31500) | Fixed the issue caused by the `missing access token` while setup the new source and not yet authenticated | +| 1.1.2 | 2023-10-13 | [31381](https://github.com/airbytehq/airbyte/pull/31381) | Fixed the issue caused by the `state` presence while fetching the `deleted events` with pagination | +| 1.1.1 | 2023-09-18 | [30560](https://github.com/airbytehq/airbyte/pull/30560) | Performance testing - include socat binary in docker image | +| 1.1.0 | 2023-09-07 | [30246](https://github.com/airbytehq/airbyte/pull/30246) | Added ability to fetch `destroyed` records for `Articles, Blogs, CustomCollections, Orders, Pages, PriceRules, Products` | +| 1.0.0 | 2023-08-11 | [29361](https://github.com/airbytehq/airbyte/pull/29361) | Migrate to the `2023-07` Shopify API Version | +| 0.6.2 | 2023-08-09 | [29302](https://github.com/airbytehq/airbyte/pull/29302) | Handle the `Internal Server Error` when entity could be fetched | +| 0.6.1 | 2023-08-08 | [28291](https://github.com/airbytehq/airbyte/pull/28291) | Allow `shop` field to accept `*.myshopify.com` shop names, updated `OAuth Spec` | +| 0.6.0 | 2023-08-02 | [28770](https://github.com/airbytehq/airbyte/pull/28770) | Added `Disputes` stream | +| 0.5.1 | 2023-07-13 | [28700](https://github.com/airbytehq/airbyte/pull/28700) | Improved `error messages` with more user-friendly description, refactored code | +| 0.5.0 | 2023-06-13 | [27732](https://github.com/airbytehq/airbyte/pull/27732) | License Update: Elv2 | +| 0.4.0 | 2023-06-13 | [27083](https://github.com/airbytehq/airbyte/pull/27083) | Added `CustomerSavedSearch`, `CustomerAddress` and `Countries` streams | +| 0.3.4 | 2023-05-10 | [25961](https://github.com/airbytehq/airbyte/pull/25961) | Added validation for `shop` in input configuration (accepts non-url-like inputs) | +| 0.3.3 | 2023-04-12 | [25110](https://github.com/airbytehq/airbyte/pull/25110) | Fixed issue when `cursor_field` is `"None"`, added missing properties to stream schemas, fixed `access_scopes` validation error | +| 0.3.2 | 2023-02-27 | [23473](https://github.com/airbytehq/airbyte/pull/23473) | Fixed OOM / Memory leak issue for Airbyte Cloud | +| 0.3.1 | 2023-01-16 | [21461](https://github.com/airbytehq/airbyte/pull/21461) | Added `discount_applications` to `orders` stream | +| 0.3.0 | 2022-11-16 | [19492](https://github.com/airbytehq/airbyte/pull/19492) | Added support for graphql and add a graphql products stream | +| 0.2.0 | 2022-10-21 | [18298](https://github.com/airbytehq/airbyte/pull/18298) | Updated API version to the `2022-10`, make stream schemas backward cpmpatible | +| 0.1.39 | 2022-10-13 | [17962](https://github.com/airbytehq/airbyte/pull/17962) | Added metafield streams; support for nested list streams | +| 0.1.38 | 2022-10-10 | [17777](https://github.com/airbytehq/airbyte/pull/17777) | Fixed `404` for configured streams, fix missing `cursor` error for old records | +| 0.1.37 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | +| 0.1.36 | 2022-03-22 | [9850](https://github.com/airbytehq/airbyte/pull/9850) | Added `BalanceTransactions` stream | +| 0.1.35 | 2022-03-07 | [10915](https://github.com/airbytehq/airbyte/pull/10915) | Fixed a bug which caused `full-refresh` syncs of child REST entities configured for `incremental` | +| 0.1.34 | 2022-03-02 | [10794](https://github.com/airbytehq/airbyte/pull/10794) | Minor specification re-order, fixed links in documentation | +| 0.1.33 | 2022-02-17 | [10419](https://github.com/airbytehq/airbyte/pull/10419) | Fixed wrong field type for tax_exemptions for `Abandoned_checkouts` stream | +| 0.1.32 | 2022-02-18 | [10449](https://github.com/airbytehq/airbyte/pull/10449) | Added `tender_transactions` stream | +| 0.1.31 | 2022-02-08 | [10175](https://github.com/airbytehq/airbyte/pull/10175) | Fixed compatibility issues for legacy user config | +| 0.1.30 | 2022-01-24 | [9648](https://github.com/airbytehq/airbyte/pull/9648) | Added permission validation before sync | +| 0.1.29 | 2022-01-20 | [9049](https://github.com/airbytehq/airbyte/pull/9248) | Added `shop_url` to the record for all streams | +| 0.1.28 | 2022-01-19 | [9591](https://github.com/airbytehq/airbyte/pull/9591) | Implemented `OAuth2.0` authentication method for Airbyte Cloud | +| 0.1.27 | 2021-12-22 | [9049](https://github.com/airbytehq/airbyte/pull/9049) | Updated connector fields title/description | +| 0.1.26 | 2021-12-14 | [8597](https://github.com/airbytehq/airbyte/pull/8597) | Fixed `mismatched number of tables` for base-normalization, increased performance of `order_refunds` stream | +| 0.1.25 | 2021-12-02 | [8297](https://github.com/airbytehq/airbyte/pull/8297) | Added Shop stream | +| 0.1.24 | 2021-11-30 | [7783](https://github.com/airbytehq/airbyte/pull/7783) | Reviewed and corrected schemas for all streams | +| 0.1.23 | 2021-11-15 | [7973](https://github.com/airbytehq/airbyte/pull/7973) | Added `InventoryItems` | +| 0.1.22 | 2021-10-18 | [7101](https://github.com/airbytehq/airbyte/pull/7107) | Added FulfillmentOrders, Fulfillments streams | +| 0.1.21 | 2021-10-14 | [7382](https://github.com/airbytehq/airbyte/pull/7382) | Fixed `InventoryLevels` primary key | +| 0.1.20 | 2021-10-14 | [7063](https://github.com/airbytehq/airbyte/pull/7063) | Added `Location` and `InventoryLevels` as streams | +| 0.1.19 | 2021-10-11 | [6951](https://github.com/airbytehq/airbyte/pull/6951) | Added support of `OAuth 2.0` authorisation option | +| 0.1.18 | 2021-09-21 | [6056](https://github.com/airbytehq/airbyte/pull/6056) | Added `pre_tax_price` to the `orders/line_items` schema | +| 0.1.17 | 2021-09-17 | [5244](https://github.com/airbytehq/airbyte/pull/5244) | Created data type enforcer for converting prices into numbers | +| 0.1.16 | 2021-09-09 | [5965](https://github.com/airbytehq/airbyte/pull/5945) | Fixed the connector's performance for `Incremental refresh` | +| 0.1.15 | 2021-09-02 | [5853](https://github.com/airbytehq/airbyte/pull/5853) | Fixed `amount` type in `order_refund` schema | +| 0.1.14 | 2021-09-02 | [5801](https://github.com/airbytehq/airbyte/pull/5801) | Fixed `line_items/discount allocations` & `duties` parts of `orders` schema | +| 0.1.13 | 2021-08-17 | [5470](https://github.com/airbytehq/airbyte/pull/5470) | Fixed rate limits throttling | +| 0.1.12 | 2021-08-09 | [5276](https://github.com/airbytehq/airbyte/pull/5276) | Added status property to product schema | +| 0.1.11 | 2021-07-23 | [4943](https://github.com/airbytehq/airbyte/pull/4943) | Fixed products schema up to API 2021-07 | +| 0.1.10 | 2021-07-19 | [4830](https://github.com/airbytehq/airbyte/pull/4830) | Fixed for streams json schemas, upgrade to API version 2021-07 | +| 0.1.9 | 2021-07-04 | [4472](https://github.com/airbytehq/airbyte/pull/4472) | Incremental sync is now using updated_at instead of since_id by default | +| 0.1.8 | 2021-06-29 | [4121](https://github.com/airbytehq/airbyte/pull/4121) | Added draft orders stream | +| 0.1.7 | 2021-06-26 | [4290](https://github.com/airbytehq/airbyte/pull/4290) | Fixed the bug when limiting output records to 1 caused infinity loop | +| 0.1.6 | 2021-06-24 | [4009](https://github.com/airbytehq/airbyte/pull/4009) | Added pages, price rules and discount codes streams | +| 0.1.5 | 2021-06-10 | [3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` for Kubernetes support | +| 0.1.4 | 2021-06-09 | [3926](https://github.com/airbytehq/airbyte/pull/3926) | New attributes to Orders schema | +| 0.1.3 | 2021-06-08 | [3787](https://github.com/airbytehq/airbyte/pull/3787) | Added Native Shopify Source Connector | diff --git a/docs/integrations/sources/slack-migrations.md b/docs/integrations/sources/slack-migrations.md new file mode 100644 index 0000000000000..31458bc54c1c9 --- /dev/null +++ b/docs/integrations/sources/slack-migrations.md @@ -0,0 +1,18 @@ +# Slack Migration Guide + +## Upgrading to 1.0.0 + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. +As part of our commitment to delivering exceptional service, we are transitioning source Slack from the +Python Connector Development Kit (CDK) to our innovative low-code framework. +This is part of a strategic move to streamline many processes across connectors, bolstering maintainability and +freeing us to focus more of our efforts on improving the performance and features of our evolving platform and growing catalog. +However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change. + +We’ve evolved and standardized how state is managed for incremental streams that are nested within a parent stream. +This change impacts how individual states are tracked and stored for each partition, using a more structured approach +to ensure the most granular and flexible state management. +This change will affect the `Channel Messages` stream. + +## Migration Steps +* A `reset` for `Channel Messages` stream is required after upgrading to this version. diff --git a/docs/integrations/sources/slack.md b/docs/integrations/sources/slack.md index a5a99979b5239..a8db6c5c6ed23 100644 --- a/docs/integrations/sources/slack.md +++ b/docs/integrations/sources/slack.md @@ -161,38 +161,41 @@ Slack has [rate limit restrictions](https://api.slack.com/docs/rate-limits). ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------| -| 0.3.9 | 2024-02-12 | [35157](https://github.com/airbytehq/airbyte/pull/35157) | Manage dependencies with Poetry. | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------| +| 1.0.0 | 2024-04-02 | [35477](https://github.com/airbytehq/airbyte/pull/35477) | Migration to low-code CDK | +| 0.4.1 | 2024-03-27 | [36579](https://github.com/airbytehq/airbyte/pull/36579) | Upgrade airbyte-cdk version to emit record counts as floats | +| 0.4.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| 0.3.9 | 2024-02-12 | [35157](https://github.com/airbytehq/airbyte/pull/35157) | Manage dependencies with Poetry. | | 0.3.8 | 2024-02-09 | [35131](https://github.com/airbytehq/airbyte/pull/35131) | Fixed the issue when `schema discovery` fails with `502` due to the platform timeout | -| 0.3.7 | 2024-01-10 | [1234](https://github.com/airbytehq/airbyte/pull/1234) | prepare for airbyte-lib | -| 0.3.6 | 2023-11-21 | [32707](https://github.com/airbytehq/airbyte/pull/32707) | Threads: do not use client-side record filtering | -| 0.3.5 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 0.3.4 | 2023-10-06 | [31134](https://github.com/airbytehq/airbyte/pull/31134) | Update CDK and remove non iterable return from records | -| 0.3.3 | 2023-09-28 | [30580](https://github.com/airbytehq/airbyte/pull/30580) | Add `bot_id` field to threads schema | -| 0.3.2 | 2023-09-20 | [30613](https://github.com/airbytehq/airbyte/pull/30613) | Set default value for channel_filters during discover | -| 0.3.1 | 2023-09-19 | [30570](https://github.com/airbytehq/airbyte/pull/30570) | Use default availability strategy | -| 0.3.0 | 2023-09-18 | [30521](https://github.com/airbytehq/airbyte/pull/30521) | Add unexpected fields to streams `channel_messages`, `channels`, `threads`, `users` | -| 0.2.0 | 2023-05-24 | [26497](https://github.com/airbytehq/airbyte/pull/26497) | Fixed `lookback window` value limitations | -| 0.1.26 | 2023-05-17 | [26186](https://github.com/airbytehq/airbyte/pull/26186) | Limited the `lookback window` range for input configuration | -| 0.1.25 | 2023-03-20 | [22889](https://github.com/airbytehq/airbyte/pull/22889) | Specified date formatting in specification | -| 0.1.24 | 2023-03-20 | [24126](https://github.com/airbytehq/airbyte/pull/24126) | Increase page size to 1000 | -| 0.1.23 | 2023-02-21 | [21907](https://github.com/airbytehq/airbyte/pull/21907) | Do not join channels that not gonna be synced | -| 0.1.22 | 2023-01-27 | [22022](https://github.com/airbytehq/airbyte/pull/22022) | Set `AvailabilityStrategy` for streams explicitly to `None` | -| 0.1.21 | 2023-01-12 | [21321](https://github.com/airbytehq/airbyte/pull/21321) | Retry Timeout error | -| 0.1.20 | 2022-12-21 | [20767](https://github.com/airbytehq/airbyte/pull/20767) | Update schema | -| 0.1.19 | 2022-12-01 | [19970](https://github.com/airbytehq/airbyte/pull/19970) | Remove OAuth2.0 broken `refresh_token` support | -| 0.1.18 | 2022-09-28 | [17315](https://github.com/airbytehq/airbyte/pull/17315) | Always install latest version of Airbyte CDK | -| 0.1.17 | 2022-08-28 | [16085](https://github.com/airbytehq/airbyte/pull/16085) | Increase unit test coverage | -| 0.1.16 | 2022-08-28 | [16050](https://github.com/airbytehq/airbyte/pull/16050) | Fix SATs | -| 0.1.15 | 2022-03-31 | [11613](https://github.com/airbytehq/airbyte/pull/11613) | Add 'channel_filter' config and improve performance | -| 0.1.14 | 2022-01-26 | [9575](https://github.com/airbytehq/airbyte/pull/9575) | Correct schema | -| 0.1.13 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | -| 0.1.12 | 2021-10-07 | [6570](https://github.com/airbytehq/airbyte/pull/6570) | Implement OAuth support with OAuth authenticator | -| 0.1.11 | 2021-08-27 | [5830](https://github.com/airbytehq/airbyte/pull/5830) | Fix sync operations hang forever issue | -| 0.1.10 | 2021-08-27 | [5697](https://github.com/airbytehq/airbyte/pull/5697) | Fix max retries issue | -| 0.1.9 | 2021-07-20 | [4860](https://github.com/airbytehq/airbyte/pull/4860) | Fix reading threads issue | -| 0.1.8 | 2021-07-14 | [4683](https://github.com/airbytehq/airbyte/pull/4683) | Add float\_ts primary key | -| 0.1.7 | 2021-06-25 | [3978](https://github.com/airbytehq/airbyte/pull/3978) | Release Slack CDK Connector | +| 0.3.7 | 2024-01-10 | [1234](https://github.com/airbytehq/airbyte/pull/1234) | prepare for airbyte-lib | +| 0.3.6 | 2023-11-21 | [32707](https://github.com/airbytehq/airbyte/pull/32707) | Threads: do not use client-side record filtering | +| 0.3.5 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.3.4 | 2023-10-06 | [31134](https://github.com/airbytehq/airbyte/pull/31134) | Update CDK and remove non iterable return from records | +| 0.3.3 | 2023-09-28 | [30580](https://github.com/airbytehq/airbyte/pull/30580) | Add `bot_id` field to threads schema | +| 0.3.2 | 2023-09-20 | [30613](https://github.com/airbytehq/airbyte/pull/30613) | Set default value for channel_filters during discover | +| 0.3.1 | 2023-09-19 | [30570](https://github.com/airbytehq/airbyte/pull/30570) | Use default availability strategy | +| 0.3.0 | 2023-09-18 | [30521](https://github.com/airbytehq/airbyte/pull/30521) | Add unexpected fields to streams `channel_messages`, `channels`, `threads`, `users` | +| 0.2.0 | 2023-05-24 | [26497](https://github.com/airbytehq/airbyte/pull/26497) | Fixed `lookback window` value limitations | +| 0.1.26 | 2023-05-17 | [26186](https://github.com/airbytehq/airbyte/pull/26186) | Limited the `lookback window` range for input configuration | +| 0.1.25 | 2023-03-20 | [22889](https://github.com/airbytehq/airbyte/pull/22889) | Specified date formatting in specification | +| 0.1.24 | 2023-03-20 | [24126](https://github.com/airbytehq/airbyte/pull/24126) | Increase page size to 1000 | +| 0.1.23 | 2023-02-21 | [21907](https://github.com/airbytehq/airbyte/pull/21907) | Do not join channels that not gonna be synced | +| 0.1.22 | 2023-01-27 | [22022](https://github.com/airbytehq/airbyte/pull/22022) | Set `AvailabilityStrategy` for streams explicitly to `None` | +| 0.1.21 | 2023-01-12 | [21321](https://github.com/airbytehq/airbyte/pull/21321) | Retry Timeout error | +| 0.1.20 | 2022-12-21 | [20767](https://github.com/airbytehq/airbyte/pull/20767) | Update schema | +| 0.1.19 | 2022-12-01 | [19970](https://github.com/airbytehq/airbyte/pull/19970) | Remove OAuth2.0 broken `refresh_token` support | +| 0.1.18 | 2022-09-28 | [17315](https://github.com/airbytehq/airbyte/pull/17315) | Always install latest version of Airbyte CDK | +| 0.1.17 | 2022-08-28 | [16085](https://github.com/airbytehq/airbyte/pull/16085) | Increase unit test coverage | +| 0.1.16 | 2022-08-28 | [16050](https://github.com/airbytehq/airbyte/pull/16050) | Fix SATs | +| 0.1.15 | 2022-03-31 | [11613](https://github.com/airbytehq/airbyte/pull/11613) | Add 'channel_filter' config and improve performance | +| 0.1.14 | 2022-01-26 | [9575](https://github.com/airbytehq/airbyte/pull/9575) | Correct schema | +| 0.1.13 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | +| 0.1.12 | 2021-10-07 | [6570](https://github.com/airbytehq/airbyte/pull/6570) | Implement OAuth support with OAuth authenticator | +| 0.1.11 | 2021-08-27 | [5830](https://github.com/airbytehq/airbyte/pull/5830) | Fix sync operations hang forever issue | +| 0.1.10 | 2021-08-27 | [5697](https://github.com/airbytehq/airbyte/pull/5697) | Fix max retries issue | +| 0.1.9 | 2021-07-20 | [4860](https://github.com/airbytehq/airbyte/pull/4860) | Fix reading threads issue | +| 0.1.8 | 2021-07-14 | [4683](https://github.com/airbytehq/airbyte/pull/4683) | Add float\_ts primary key | +| 0.1.7 | 2021-06-25 | [3978](https://github.com/airbytehq/airbyte/pull/3978) | Release Slack CDK Connector | diff --git a/docs/integrations/sources/snapchat-marketing.md b/docs/integrations/sources/snapchat-marketing.md index f6c6d8e0b39de..078782a15486b 100644 --- a/docs/integrations/sources/snapchat-marketing.md +++ b/docs/integrations/sources/snapchat-marketing.md @@ -17,6 +17,10 @@ This page guides you through the process of setting up the Snapchat Marketing so * client_secret * refresh_token * start_date +* end_date +* action_report_time (Optional, Default value is conversion) It specifies the principle for conversion reporting. +* swipe_up_attribution_window (Optional, Default value is 1_DAY) This is the attribution window for swipe up. +* view_attribution_window (Optional, Default value is 28_DAY) This is the attribution window for views. ## Setup guide @@ -113,6 +117,8 @@ Snapchat Marketing API has limitations to 1000 items per page. | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------| +| 0.6.0 | 2024-04-10 | [30586](https://github.com/airbytehq/airbyte/pull/30586) | Add `attribution_windows`,`action_report_time` as optional configurable params | +| 0.5.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.4.0 | 2024-02-27 | [35660](https://github.com/airbytehq/airbyte/pull/35660) | Add new fields to streams `ads`, `adsquads`, `creatives`, and `media` | | 0.3.2 | 2024-02-12 | [35171](https://github.com/airbytehq/airbyte/pull/35171) | Manage dependencies with Poetry. | | 0.3.0 | 2023-05-22 | [26358](https://github.com/airbytehq/airbyte/pull/26358) | Remove deprecated authSpecification in favour of advancedAuth | diff --git a/docs/integrations/sources/stripe.md b/docs/integrations/sources/stripe.md index 2e5bb5e957c38..38305beb7b821 100644 --- a/docs/integrations/sources/stripe.md +++ b/docs/integrations/sources/stripe.md @@ -222,8 +222,12 @@ Each record is marked with `is_deleted` flag when the appropriate event happens ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :-------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| 5.2.4 | 2024-02-12 | [35137](https://github.com/airbytehq/airbyte/pull/35137) | Fix license in `pyproject.toml` | +|:--------|:-----------|:----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 5.3.3 | 2024-04-11 | [37001](https://github.com/airbytehq/airbyte/pull/37001) | Update airbyte-cdk to flush print buffer for every message | +| 5.3.2 | 2024-04-11 | [36964](https://github.com/airbytehq/airbyte/pull/36964) | Update CDK version to fix breaking change before another devs work on it | +| 5.3.1 | 2024-04-10 | [36960](https://github.com/airbytehq/airbyte/pull/36960) | Remove unused imports | +| 5.3.0 | 2024-03-12 | [35978](https://github.com/airbytehq/airbyte/pull/35978) | Upgrade CDK to start emitting record counts with state and full refresh state | +| 5.2.4 | 2024-02-12 | [35137](https://github.com/airbytehq/airbyte/pull/35137) | Fix license in `pyproject.toml` | | 5.2.3 | 2024-02-09 | [35068](https://github.com/airbytehq/airbyte/pull/35068) | Manage dependencies with Poetry. | | 5.2.2 | 2024-01-31 | [34619](https://github.com/airbytehq/airbyte/pull/34619) | Events stream concurrent on incremental syncs | | 5.2.1 | 2024-01-18 | [34495](https://github.com/airbytehq/airbyte/pull/34495) | Fix deadlock issue | diff --git a/docs/integrations/sources/surveycto.md b/docs/integrations/sources/surveycto.md index 70a373ad86179..cc680b3b2b498 100644 --- a/docs/integrations/sources/surveycto.md +++ b/docs/integrations/sources/surveycto.md @@ -48,6 +48,7 @@ The SurveyCTO source connector supports the following streams: ## Changelog | Version | Date | Pull Request | Subject | +|---------|------|--------------|---------| | 0.1.2 | 2023-07-27 | [28512](https://github.com/airbytehq/airbyte/pull/28512) | Added Check Connection | | 0.1.1 | 2023-04-25 | [24784](https://github.com/airbytehq/airbyte/pull/24784) | Fix incremental sync | | 0.1.0 | 2022-11-16 | [19371](https://github.com/airbytehq/airbyte/pull/19371) | SurveyCTO Source Connector | diff --git a/docs/integrations/sources/surveymonkey.md b/docs/integrations/sources/surveymonkey.md index 561df7c2dd404..e5909beac3833 100644 --- a/docs/integrations/sources/surveymonkey.md +++ b/docs/integrations/sources/surveymonkey.md @@ -47,11 +47,12 @@ Please read this [docs](https://developer.surveymonkey.com/api/v3/#getting-start ## Supported streams and sync modes -* [Surveys](https://developer.surveymonkey.com/api/v3/#surveys) \(Incremental\) -* [SurveyPages](https://developer.surveymonkey.com/api/v3/#surveys-id-pages) -* [SurveyQuestions](https://developer.surveymonkey.com/api/v3/#surveys-id-pages-id-questions) -* [SurveyResponses](https://developer.surveymonkey.com/api/v3/#survey-responses) \(Incremental\) -* [SurveyCollectors](https://developer.surveymonkey.com/api/v3/#api-endpoints-get-surveys-id-collectors) +* [Surveys](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys) \(Incremental\) +* [SurveyPages](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys-survey_id-pages) +* [SurveyQuestions](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys-survey_id-pages-page_id-questions) +* [SurveyResponses](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys-id-responses-bulk) \(Incremental\) +* [SurveyCollectors](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-surveys-survey_id-collectors) +* [Collectors](https://api.surveymonkey.com/v3/docs?shell#api-endpoints-get-collectors-collector_id-) ### Performance considerations @@ -66,8 +67,9 @@ To cover more data from this source we use caching. | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------| -| 0.2.4 | 2024-02-12 | [35168](https://github.com/airbytehq/airbyte/pull/35168) | Manage dependencies with Poetry. | -| 0.2.3 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.3.0 | 2024-02-22 | [35561](https://github.com/airbytehq/airbyte/pull/35561) | Migrate connector to low-code | +| 0.2.4 | 2024-02-12 | [35168](https://github.com/airbytehq/airbyte/pull/35168) | Manage dependencies with Poetry | +| 0.2.3 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.2.2 | 2023-05-12 | [26024](https://github.com/airbytehq/airbyte/pull/26024) | Fix dependencies conflict | | 0.2.1 | 2023-04-27 | [25109](https://github.com/airbytehq/airbyte/pull/25109) | Fix add missing params to stream `SurveyResponses` | | 0.2.0 | 2023-04-18 | [23721](https://github.com/airbytehq/airbyte/pull/23721) | Add `SurveyCollectors` and `Collectors` stream | @@ -76,7 +78,7 @@ To cover more data from this source we use caching. | 0.1.14 | 2023-01-27 | [22024](https://github.com/airbytehq/airbyte/pull/22024) | Set `AvailabilityStrategy` for streams explicitly to `None` | | 0.1.13 | 2022-11-29 | [19868](https://github.com/airbytehq/airbyte/pull/19868) | Fix OAuth flow urls | | 0.1.12 | 2022-10-13 | [17964](https://github.com/airbytehq/airbyte/pull/17964) | Add OAuth for Eu and Ca | -| 0.1.11 | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states. | +| 0.1.11 | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states | | 0.1.10 | 2022-09-14 | [16706](https://github.com/airbytehq/airbyte/pull/16706) | Fix 404 error when handling nonexistent surveys | | 0.1.9 | 2022-07-28 | [13046](https://github.com/airbytehq/airbyte/pull/14998) | Fix state for response stream, fixed backoff behaviour, added unittest | | 0.1.8 | 2022-05-20 | [13046](https://github.com/airbytehq/airbyte/pull/13046) | Fix incremental streams | diff --git a/docs/integrations/sources/talkdesk-explore.md b/docs/integrations/sources/talkdesk-explore.md index 74b3f89df4abd..dd40b48455f8e 100644 --- a/docs/integrations/sources/talkdesk-explore.md +++ b/docs/integrations/sources/talkdesk-explore.md @@ -64,5 +64,6 @@ Please refer to the [getting started with the API](https://docs.talkdesk.com/doc ## Changelog | Version | Date | Pull Request | Subject | +|---------|------|--------------|---------| | 0.1.0 | 2022-02-07 | | New Source: Talkdesk Explore | :--- | :--- | :--- | :--- | diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index 574c081d14211..efdf5703ad050 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -123,7 +123,8 @@ The connector is restricted by [requests limitation](https://business-api.tiktok | Version | Date | Pull Request | Subject | |:--------|:-----------| :------------------------------------------------------- |:------------------------------------------------------------------------------------------------------------| -| 3.9.3 | 2024-02-12 | [35161](https://github.com/airbytehq/airbyte/pull/35161) | Manage dependencies with Poetry. | +| 3.9.4 | 2024-03-20 | [36302](https://github.com/airbytehq/airbyte/pull/36302) | Don't extract state from the latest record if stream doesn't have a cursor_field | +| 3.9.3 | 2024-02-12 | [35161](https://github.com/airbytehq/airbyte/pull/35161) | Manage dependencies with Poetry. | | 3.9.2 | 2023-11-02 | [32091](https://github.com/airbytehq/airbyte/pull/32091) | Fix incremental syncs; update docs; fix field type of `preview_url_expire_time` to `date-time`. | | 3.9.1 | 2023-10-25 | [31812](https://github.com/airbytehq/airbyte/pull/31812) | Update `support level` in `metadata`, removed duplicated `tracking_pixel_id` field from `Ads` stream schema | | 3.9.0 | 2023-10-23 | [31623](https://github.com/airbytehq/airbyte/pull/31623) | Add AdsAudienceReportsByProvince stream and expand base report metrics | diff --git a/docs/integrations/sources/twilio.md b/docs/integrations/sources/twilio.md index c337bf8ed1939..a1cc111f36b1e 100644 --- a/docs/integrations/sources/twilio.md +++ b/docs/integrations/sources/twilio.md @@ -95,6 +95,7 @@ For more information, see [the Twilio docs for rate limitations](https://support | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------|:--------------------------------------------------------------------------------------------------------| +| 0.11.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | | 0.10.2 | 2024-02-12 | [35153](https://github.com/airbytehq/airbyte/pull/35153) | Manage dependencies with Poetry. | | 0.10.1 | 2023-11-21 | [32718](https://github.com/airbytehq/airbyte/pull/32718) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.10.0 | 2023-07-28 | [27323](https://github.com/airbytehq/airbyte/pull/27323) | Add new stream `Step` | diff --git a/docs/integrations/sources/typeform.md b/docs/integrations/sources/typeform.md index 826e822190f07..8b76ad7d85e99 100644 --- a/docs/integrations/sources/typeform.md +++ b/docs/integrations/sources/typeform.md @@ -90,7 +90,8 @@ API rate limits \(2 requests per second\): [https://developer.typeform.com/get-s | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------| -| 1.2.5 | 2024-02-12 | [35152](https://github.com/airbytehq/airbyte/pull/35152) | Manage dependencies with Poetry. | +| 1.2.6 | 2024-03-13 | [36164](https://github.com/airbytehq/airbyte/pull/36164) | Unpin CDK version | +| 1.2.5 | 2024-02-12 | [35152](https://github.com/airbytehq/airbyte/pull/35152) | Manage dependencies with Poetry. | | 1.2.4 | 2024-01-24 | [34484](https://github.com/airbytehq/airbyte/pull/34484) | Fix pagination stop condition | | 1.2.3 | 2024-01-11 | [34145](https://github.com/airbytehq/airbyte/pull/34145) | prepare for airbyte-lib | | 1.2.2 | 2023-12-12 | [33345](https://github.com/airbytehq/airbyte/pull/33345) | Fix single use refresh token authentication | diff --git a/docs/integrations/sources/vitally.md b/docs/integrations/sources/vitally.md index 49f7b16a5fa2f..12867bdf3b5c6 100644 --- a/docs/integrations/sources/vitally.md +++ b/docs/integrations/sources/vitally.md @@ -1,4 +1,4 @@ -# Vittally +# Vitally ## Sync overview diff --git a/docs/integrations/sources/zendesk-chat.md b/docs/integrations/sources/zendesk-chat.md index 1baf884155190..ef641f77cdf7a 100644 --- a/docs/integrations/sources/zendesk-chat.md +++ b/docs/integrations/sources/zendesk-chat.md @@ -80,6 +80,7 @@ The connector is restricted by Zendesk's [requests limitation](https://developer | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------- | +| 0.3.0 | 2024-03-07 | [35867](https://github.com/airbytehq/airbyte/pull/35867) | Migrated to `YamlDeclarativeSource (Low-code)` Airbyte CDK | | 0.2.2 | 2024-02-12 | [35185](https://github.com/airbytehq/airbyte/pull/35185) | Manage dependencies with Poetry. | | 0.2.1 | 2023-10-20 | [31643](https://github.com/airbytehq/airbyte/pull/31643) | Upgrade base image to airbyte/python-connector-base:1.1.0 | | 0.2.0 | 2023-10-11 | [30526](https://github.com/airbytehq/airbyte/pull/30526) | Use the python connector base image, remove dockerfile and implement build_customization.py | diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index c955743f36136..e1edec292069d 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -156,92 +156,93 @@ The Zendesk connector ideally should not run into Zendesk API limitations under ## Changelog -| Version | Date | Pull Request | Subject | -| :------- | :--------- | :------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| 2.2.8 | 2024-02-09 | [35083](https://github.com/airbytehq/airbyte/pull/35083) | Manage dependencies with Poetry. | -| `2.2.7` | 2024-02-05 | [34840](https://github.com/airbytehq/airbyte/pull/34840) | Fix missing fields in schema | -| `2.2.6` | 2024-01-11 | [34064](https://github.com/airbytehq/airbyte/pull/34064) | Skip 504 Error for stream `Ticket Audits` | -| `2.2.5` | 2024-01-08 | [34010](https://github.com/airbytehq/airbyte/pull/34010) | prepare for airbyte-lib | -| `2.2.4` | 2023-12-20 | [33680](https://github.com/airbytehq/airbyte/pull/33680) | Fix pagination issue for streams related to incremental export sync | -| `2.2.3` | 2023-12-14 | [33435](https://github.com/airbytehq/airbyte/pull/33435) | Fix 504 Error for stream Ticket Audits | -| `2.2.2` | 2023-12-01 | [33012](https://github.com/airbytehq/airbyte/pull/33012) | Increase number of retries for backoff policy to 10 | -| `2.2.1` | 2023-11-10 | [32440](https://github.com/airbytehq/airbyte/pull/32440) | Made refactoring to improve code maintainability | -| `2.2.0` | 2023-10-31 | [31999](https://github.com/airbytehq/airbyte/pull/31999) | Extended the `CustomRoles` stream schema | -| `2.1.1` | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | -| `2.1.0` | 2023-10-19 | [31606](https://github.com/airbytehq/airbyte/pull/31606) | Added new field `reply_time_in_seconds` to the `Ticket Metrics` stream schema | -| `2.0.0` | 2023-09-15 | [30440](https://github.com/airbytehq/airbyte/pull/30440) | Remove stream `Deleted Tickets` | -| `1.7.0` | 2023-09-11 | [30259](https://github.com/airbytehq/airbyte/pull/30259) | Add stream `Deleted Tickets` | -| `1.6.0` | 2023-09-09 | [30168](https://github.com/airbytehq/airbyte/pull/30168) | Make `start_date` field optional | -| `1.5.1` | 2023-09-05 | [30142](https://github.com/airbytehq/airbyte/pull/30142) | Handle non-JSON Response | -| `1.5.0` | 2023-09-04 | [30138](https://github.com/airbytehq/airbyte/pull/30138) | Add new Streams: `Article Votes`, `Article Comments`, `Article Comment Votes` | -| `1.4.0` | 2023-09-04 | [30134](https://github.com/airbytehq/airbyte/pull/30134) | Add incremental support for streams: `custom Roles`, `Schedules`, `SLA Policies` | -| `1.3.0` | 2023-08-30 | [30031](https://github.com/airbytehq/airbyte/pull/30031) | Add new streams: `Articles`, `Organization Fields` | -| `1.2.2` | 2023-08-30 | [29998](https://github.com/airbytehq/airbyte/pull/29998) | Fix typo in stream `AttributeDefinitions`: field condition | -| `1.2.1` | 2023-08-30 | [29991](https://github.com/airbytehq/airbyte/pull/29991) | Remove Custom availability strategy | -| `1.2.0` | 2023-08-29 | [29940](https://github.com/airbytehq/airbyte/pull/29940) | Add undeclared fields to schemas | -| `1.1.1` | 2023-08-29 | [29904](https://github.com/airbytehq/airbyte/pull/29904) | make `Organizations` stream incremental | -| `1.1.0` | 2023-08-28 | [29891](https://github.com/airbytehq/airbyte/pull/29891) | Add stream `UserFields` | -| `1.0.0` | 2023-07-27 | [28774](https://github.com/airbytehq/airbyte/pull/28774) | fix retry logic & update cursor for `Tickets` stream | -| `0.11.0` | 2023-08-10 | [27208](https://github.com/airbytehq/airbyte/pull/27208) | Add stream `Topics` | -| `0.10.7` | 2023-08-09 | [29256](https://github.com/airbytehq/airbyte/pull/29256) | Update tooltip descriptions in spec | -| `0.10.6` | 2023-08-04 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | -| `0.10.5` | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | -| `0.10.4` | 2023-07-25 | [28397](https://github.com/airbytehq/airbyte/pull/28397) | Handle 404 Error | -| `0.10.3` | 2023-07-24 | [28612](https://github.com/airbytehq/airbyte/pull/28612) | Fix pagination for stream `TicketMetricEvents` | -| `0.10.2` | 2023-07-19 | [28487](https://github.com/airbytehq/airbyte/pull/28487) | Remove extra page from params | -| `0.10.1` | 2023-07-10 | [28096](https://github.com/airbytehq/airbyte/pull/28096) | Replace `offset` pagination with `cursor` pagination | -| `0.10.0` | 2023-07-06 | [27991](https://github.com/airbytehq/airbyte/pull/27991) | Add streams: `PostVotes`, `PostCommentVotes` | -| `0.9.0` | 2023-07-05 | [27961](https://github.com/airbytehq/airbyte/pull/27961) | Add stream: `Post Comments` | -| `0.8.1` | 2023-06-27 | [27765](https://github.com/airbytehq/airbyte/pull/27765) | Bugfix: Nonetype error while syncing more then 100000 organizations | -| `0.8.0` | 2023-06-09 | [27156](https://github.com/airbytehq/airbyte/pull/27156) | Add stream `Posts` | -| `0.7.0` | 2023-06-27 | [27436](https://github.com/airbytehq/airbyte/pull/27436) | Add Ticket Skips stream | -| `0.6.0` | 2023-06-27 | [27450](https://github.com/airbytehq/airbyte/pull/27450) | Add Skill Based Routing streams | -| `0.5.0` | 2023-06-26 | [27735](https://github.com/airbytehq/airbyte/pull/27735) | License Update: Elv2 stream stream | -| `0.4.0` | 2023-06-16 | [27431](https://github.com/airbytehq/airbyte/pull/27431) | Add Organization Memberships stream | -| `0.3.1` | 2023-06-02 | [26945](https://github.com/airbytehq/airbyte/pull/26945) | Make `Ticket Metrics` stream to use cursor pagination | -| `0.3.0` | 2023-05-23 | [26347](https://github.com/airbytehq/airbyte/pull/26347) | Add stream `Audit Logs` logs` | -| `0.2.30` | 2023-05-23 | [26414](https://github.com/airbytehq/airbyte/pull/26414) | Added missing handlers when `empty json` or `JSONDecodeError` is received | -| `0.2.29` | 2023-04-18 | [25214](https://github.com/airbytehq/airbyte/pull/25214) | Add missing fields to `Tickets` stream | -| `0.2.28` | 2023-03-21 | [24053](https://github.com/airbytehq/airbyte/pull/24053) | Fix stream `sla_policies` schema data type error (events.value) | -| `0.2.27` | 2023-03-22 | [22817](https://github.com/airbytehq/airbyte/pull/22817) | Specified date formatting in specification | -| `0.2.26` | 2023-03-20 | [24252](https://github.com/airbytehq/airbyte/pull/24252) | Handle invalid `start_date` when checking connection | -| `0.2.25` | 2023-02-28 | [22308](https://github.com/airbytehq/airbyte/pull/22308) | Add `AvailabilityStrategy` for all streams | -| `0.2.24` | 2023-02-17 | [23246](https://github.com/airbytehq/airbyte/pull/23246) | Handle `StartTimeTooRecent` error for Tickets stream | -| `0.2.23` | 2023-02-15 | [23035](https://github.com/airbytehq/airbyte/pull/23035) | Handle 403 Error | -| `0.2.22` | 2023-02-14 | [22483](https://github.com/airbytehq/airbyte/pull/22483) | Fix test; handle 400 error | -| `0.2.21` | 2023-01-27 | [22027](https://github.com/airbytehq/airbyte/pull/22027) | Set `AvailabilityStrategy` for streams explicitly to `None` | -| `0.2.20` | 2022-12-28 | [20900](https://github.com/airbytehq/airbyte/pull/20900) | Remove synchronous time.sleep, add logging, reduce backoff time | -| `0.2.19` | 2022-12-09 | [19967](https://github.com/airbytehq/airbyte/pull/19967) | Fix reading response for more than 100k records | -| `0.2.18` | 2022-11-29 | [19432](https://github.com/airbytehq/airbyte/pull/19432) | Revert changes from version 0.2.15, use a test read instead | -| `0.2.17` | 2022-11-24 | [19792](https://github.com/airbytehq/airbyte/pull/19792) | Transform `ticket_comments.via` "-" to null | -| `0.2.16` | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states. | -| `0.2.15` | 2022-08-03 | [15233](https://github.com/airbytehq/airbyte/pull/15233) | Added `subscription plan` check on `streams discovery` step to remove streams that are not accessible for fetch due to subscription plan restrictions | -| `0.2.14` | 2022-07-27 | [15036](https://github.com/airbytehq/airbyte/pull/15036) | Convert `ticket_audits.previous_value` values to string | -| `0.2.13` | 2022-07-21 | [14829](https://github.com/airbytehq/airbyte/pull/14829) | Convert `tickets.custom_fields` values to string | -| `0.2.12` | 2022-06-30 | [14304](https://github.com/airbytehq/airbyte/pull/14304) | Fixed Pagination for Group Membership stream | -| `0.2.11` | 2022-06-24 | [14112](https://github.com/airbytehq/airbyte/pull/14112) | Fixed "Retry-After" non integer value | -| `0.2.10` | 2022-06-14 | [13757](https://github.com/airbytehq/airbyte/pull/13757) | Fixed the bug with `TicketMetrics` stream, HTTP Error 429, caused by lots of API requests | -| `0.2.9` | 2022-05-27 | [13261](https://github.com/airbytehq/airbyte/pull/13261) | Bugfix for the unhandled [ChunkedEncodingError](https://github.com/airbytehq/airbyte/issues/12591) and [ConnectionError](https://github.com/airbytehq/airbyte/issues/12155) | -| `0.2.8` | 2022-05-20 | [13055](https://github.com/airbytehq/airbyte/pull/13055) | Fixed minor issue for stream `ticket_audits` schema | -| `0.2.7` | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | +| Version | Date | Pull Request | Subject | +| :------- | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------------------------- | +| `2.3.0` | 2024-03-26 | [36403](https://github.com/airbytehq/airbyte/pull/36403) | Unpin CDK version, add record counts to state messages | +| `2.2.8` | 2024-02-09 | [35083](https://github.com/airbytehq/airbyte/pull/35083) | Manage dependencies with Poetry. | +| `2.2.7` | 2024-02-05 | [34840](https://github.com/airbytehq/airbyte/pull/34840) | Fix missing fields in schema | +| `2.2.6` | 2024-01-11 | [34064](https://github.com/airbytehq/airbyte/pull/34064) | Skip 504 Error for stream `Ticket Audits` | +| `2.2.5` | 2024-01-08 | [34010](https://github.com/airbytehq/airbyte/pull/34010) | prepare for airbyte-lib | +| `2.2.4` | 2023-12-20 | [33680](https://github.com/airbytehq/airbyte/pull/33680) | Fix pagination issue for streams related to incremental export sync | +| `2.2.3` | 2023-12-14 | [33435](https://github.com/airbytehq/airbyte/pull/33435) | Fix 504 Error for stream Ticket Audits | +| `2.2.2` | 2023-12-01 | [33012](https://github.com/airbytehq/airbyte/pull/33012) | Increase number of retries for backoff policy to 10 | +| `2.2.1` | 2023-11-10 | [32440](https://github.com/airbytehq/airbyte/pull/32440) | Made refactoring to improve code maintainability | +| `2.2.0` | 2023-10-31 | [31999](https://github.com/airbytehq/airbyte/pull/31999) | Extended the `CustomRoles` stream schema | +| `2.1.1` | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| `2.1.0` | 2023-10-19 | [31606](https://github.com/airbytehq/airbyte/pull/31606) | Added new field `reply_time_in_seconds` to the `Ticket Metrics` stream schema | +| `2.0.0` | 2023-09-15 | [30440](https://github.com/airbytehq/airbyte/pull/30440) | Remove stream `Deleted Tickets` | +| `1.7.0` | 2023-09-11 | [30259](https://github.com/airbytehq/airbyte/pull/30259) | Add stream `Deleted Tickets` | +| `1.6.0` | 2023-09-09 | [30168](https://github.com/airbytehq/airbyte/pull/30168) | Make `start_date` field optional | +| `1.5.1` | 2023-09-05 | [30142](https://github.com/airbytehq/airbyte/pull/30142) | Handle non-JSON Response | +| `1.5.0` | 2023-09-04 | [30138](https://github.com/airbytehq/airbyte/pull/30138) | Add new Streams: `Article Votes`, `Article Comments`, `Article Comment Votes` | +| `1.4.0` | 2023-09-04 | [30134](https://github.com/airbytehq/airbyte/pull/30134) | Add incremental support for streams: `custom Roles`, `Schedules`, `SLA Policies` | +| `1.3.0` | 2023-08-30 | [30031](https://github.com/airbytehq/airbyte/pull/30031) | Add new streams: `Articles`, `Organization Fields` | +| `1.2.2` | 2023-08-30 | [29998](https://github.com/airbytehq/airbyte/pull/29998) | Fix typo in stream `AttributeDefinitions`: field condition | +| `1.2.1` | 2023-08-30 | [29991](https://github.com/airbytehq/airbyte/pull/29991) | Remove Custom availability strategy | +| `1.2.0` | 2023-08-29 | [29940](https://github.com/airbytehq/airbyte/pull/29940) | Add undeclared fields to schemas | +| `1.1.1` | 2023-08-29 | [29904](https://github.com/airbytehq/airbyte/pull/29904) | make `Organizations` stream incremental | +| `1.1.0` | 2023-08-28 | [29891](https://github.com/airbytehq/airbyte/pull/29891) | Add stream `UserFields` | +| `1.0.0` | 2023-07-27 | [28774](https://github.com/airbytehq/airbyte/pull/28774) | fix retry logic & update cursor for `Tickets` stream | +| `0.11.0` | 2023-08-10 | [27208](https://github.com/airbytehq/airbyte/pull/27208) | Add stream `Topics` | +| `0.10.7` | 2023-08-09 | [29256](https://github.com/airbytehq/airbyte/pull/29256) | Update tooltip descriptions in spec | +| `0.10.6` | 2023-08-04 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | +| `0.10.5` | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | +| `0.10.4` | 2023-07-25 | [28397](https://github.com/airbytehq/airbyte/pull/28397) | Handle 404 Error | +| `0.10.3` | 2023-07-24 | [28612](https://github.com/airbytehq/airbyte/pull/28612) | Fix pagination for stream `TicketMetricEvents` | +| `0.10.2` | 2023-07-19 | [28487](https://github.com/airbytehq/airbyte/pull/28487) | Remove extra page from params | +| `0.10.1` | 2023-07-10 | [28096](https://github.com/airbytehq/airbyte/pull/28096) | Replace `offset` pagination with `cursor` pagination | +| `0.10.0` | 2023-07-06 | [27991](https://github.com/airbytehq/airbyte/pull/27991) | Add streams: `PostVotes`, `PostCommentVotes` | +| `0.9.0` | 2023-07-05 | [27961](https://github.com/airbytehq/airbyte/pull/27961) | Add stream: `Post Comments` | +| `0.8.1` | 2023-06-27 | [27765](https://github.com/airbytehq/airbyte/pull/27765) | Bugfix: Nonetype error while syncing more then 100000 organizations | +| `0.8.0` | 2023-06-09 | [27156](https://github.com/airbytehq/airbyte/pull/27156) | Add stream `Posts` | +| `0.7.0` | 2023-06-27 | [27436](https://github.com/airbytehq/airbyte/pull/27436) | Add Ticket Skips stream | +| `0.6.0` | 2023-06-27 | [27450](https://github.com/airbytehq/airbyte/pull/27450) | Add Skill Based Routing streams | +| `0.5.0` | 2023-06-26 | [27735](https://github.com/airbytehq/airbyte/pull/27735) | License Update: Elv2 stream stream | +| `0.4.0` | 2023-06-16 | [27431](https://github.com/airbytehq/airbyte/pull/27431) | Add Organization Memberships stream | +| `0.3.1` | 2023-06-02 | [26945](https://github.com/airbytehq/airbyte/pull/26945) | Make `Ticket Metrics` stream to use cursor pagination | +| `0.3.0` | 2023-05-23 | [26347](https://github.com/airbytehq/airbyte/pull/26347) | Add stream `Audit Logs` logs` | +| `0.2.30` | 2023-05-23 | [26414](https://github.com/airbytehq/airbyte/pull/26414) | Added missing handlers when `empty json` or `JSONDecodeError` is received | +| `0.2.29` | 2023-04-18 | [25214](https://github.com/airbytehq/airbyte/pull/25214) | Add missing fields to `Tickets` stream | +| `0.2.28` | 2023-03-21 | [24053](https://github.com/airbytehq/airbyte/pull/24053) | Fix stream `sla_policies` schema data type error (events.value) | +| `0.2.27` | 2023-03-22 | [22817](https://github.com/airbytehq/airbyte/pull/22817) | Specified date formatting in specification | +| `0.2.26` | 2023-03-20 | [24252](https://github.com/airbytehq/airbyte/pull/24252) | Handle invalid `start_date` when checking connection | +| `0.2.25` | 2023-02-28 | [22308](https://github.com/airbytehq/airbyte/pull/22308) | Add `AvailabilityStrategy` for all streams | +| `0.2.24` | 2023-02-17 | [23246](https://github.com/airbytehq/airbyte/pull/23246) | Handle `StartTimeTooRecent` error for Tickets stream | +| `0.2.23` | 2023-02-15 | [23035](https://github.com/airbytehq/airbyte/pull/23035) | Handle 403 Error | +| `0.2.22` | 2023-02-14 | [22483](https://github.com/airbytehq/airbyte/pull/22483) | Fix test; handle 400 error | +| `0.2.21` | 2023-01-27 | [22027](https://github.com/airbytehq/airbyte/pull/22027) | Set `AvailabilityStrategy` for streams explicitly to `None` | +| `0.2.20` | 2022-12-28 | [20900](https://github.com/airbytehq/airbyte/pull/20900) | Remove synchronous time.sleep, add logging, reduce backoff time | +| `0.2.19` | 2022-12-09 | [19967](https://github.com/airbytehq/airbyte/pull/19967) | Fix reading response for more than 100k records | +| `0.2.18` | 2022-11-29 | [19432](https://github.com/airbytehq/airbyte/pull/19432) | Revert changes from version 0.2.15, use a test read instead | +| `0.2.17` | 2022-11-24 | [19792](https://github.com/airbytehq/airbyte/pull/19792) | Transform `ticket_comments.via` "-" to null | +| `0.2.16` | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states. | +| `0.2.15` | 2022-08-03 | [15233](https://github.com/airbytehq/airbyte/pull/15233) | Added `subscription plan` check on `streams discovery` step to remove streams that are not accessible for fetch due to subscription plan restrictions | +| `0.2.14` | 2022-07-27 | [15036](https://github.com/airbytehq/airbyte/pull/15036) | Convert `ticket_audits.previous_value` values to string | +| `0.2.13` | 2022-07-21 | [14829](https://github.com/airbytehq/airbyte/pull/14829) | Convert `tickets.custom_fields` values to string | +| `0.2.12` | 2022-06-30 | [14304](https://github.com/airbytehq/airbyte/pull/14304) | Fixed Pagination for Group Membership stream | +| `0.2.11` | 2022-06-24 | [14112](https://github.com/airbytehq/airbyte/pull/14112) | Fixed "Retry-After" non integer value | +| `0.2.10` | 2022-06-14 | [13757](https://github.com/airbytehq/airbyte/pull/13757) | Fixed the bug with `TicketMetrics` stream, HTTP Error 429, caused by lots of API requests | +| `0.2.9` | 2022-05-27 | [13261](https://github.com/airbytehq/airbyte/pull/13261) | Bugfix for the unhandled [ChunkedEncodingError](https://github.com/airbytehq/airbyte/issues/12591) and [ConnectionError](https://github.com/airbytehq/airbyte/issues/12155) | +| `0.2.8` | 2022-05-20 | [13055](https://github.com/airbytehq/airbyte/pull/13055) | Fixed minor issue for stream `ticket_audits` schema | +| `0.2.7` | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | | `0.2.6` | 2022-04-19 | [12122](https://github.com/airbytehq/airbyte/pull/12122) | Fixed the bug when only 100,000 Users are synced [11895](https://github.com/airbytehq/airbyte/issues/11895) and fixed bug when `start_date` is not used on user stream [12059](https://github.com/airbytehq/airbyte/issues/12059). | -| `0.2.5` | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly | -| `0.2.4` | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections | -| `0.2.3` | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | -| `0.2.2` | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | -| `0.2.1` | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | -| `0.2.0` | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | -| `0.1.12` | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | -| `0.1.11` | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | -| `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | -| `0.1.8` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | -| `0.1.7` | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | -| `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | -| `0.1.5` | 2021-10-26 | [7679](https://github.com/airbytehq/airbyte/pull/7679) | Add ticket_id and ticket_comments | -| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | Fix initially_assigned_at type in ticket metrics | -| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | Corrected the connector's specification | -| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | Fixed TicketComments stream | -| `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | Fixed incremental logic for the ticket_comments stream | -| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | Created CDK native zendesk connector | +| `0.2.5` | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly | +| `0.2.4` | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections | +| `0.2.3` | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | +| `0.2.2` | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | +| `0.2.1` | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | +| `0.2.0` | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | +| `0.1.12` | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | +| `0.1.11` | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | +| `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | +| `0.1.8` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | +| `0.1.7` | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | +| `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | +| `0.1.5` | 2021-10-26 | [7679](https://github.com/airbytehq/airbyte/pull/7679) | Add ticket_id and ticket_comments | +| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | Fix initially_assigned_at type in ticket metrics | +| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | Corrected the connector's specification | +| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | Fixed TicketComments stream | +| `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | Fixed incremental logic for the ticket_comments stream | +| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | Created CDK native zendesk connector | diff --git a/docs/integrations/sources/zendesk-talk.md b/docs/integrations/sources/zendesk-talk.md index 0c544a84e8931..ee97814312266 100644 --- a/docs/integrations/sources/zendesk-talk.md +++ b/docs/integrations/sources/zendesk-talk.md @@ -74,6 +74,7 @@ The Zendesk connector should not run into Zendesk API limitations under normal u | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------| +| 0.2.0 | 2024-03-25 | [36459](https://github.com/airbytehq/airbyte/pull/36459) | Unpin CDK version, add record counts in state messages | | 0.1.13 | 2024-03-04 | [35783](https://github.com/airbytehq/airbyte/pull/35783) | Change order of authentication methods in spec | | 0.1.12 | 2024-02-12 | [35156](https://github.com/airbytehq/airbyte/pull/35156) | Manage dependencies with Poetry. | | 0.1.11 | 2024-01-12 | [34204](https://github.com/airbytehq/airbyte/pull/34204) | Prepare for airbyte-lib | diff --git a/docs/integrations/sources/zenloop.md b/docs/integrations/sources/zenloop.md index 54cb310d8e9cc..6a678bfd25e29 100644 --- a/docs/integrations/sources/zenloop.md +++ b/docs/integrations/sources/zenloop.md @@ -69,16 +69,17 @@ The Zenloop connector should not run into Zenloop API limitations under normal u ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------| :------------------------------------------------------- |:--------------------------------------------------------------------| -| 0.1.10 | 2023-06-29 | [27838](https://github.com/airbytehq/airbyte/pull/27838) | Update CDK version to avoid bug introduced during data feed release | -| 0.1.9 | 2023-06-28 | [27761](https://github.com/airbytehq/airbyte/pull/27761) | Update following state breaking changes | -| 0.1.8 | 2023-06-22 | [27243](https://github.com/airbytehq/airbyte/pull/27243) | Improving error message on state discrepancy | -| 0.1.7 | 2023-06-22 | [27243](https://github.com/airbytehq/airbyte/pull/27243) | State per partition (breaking change - require reset) | -| 0.1.6 | 2023-03-06 | [23231](https://github.com/airbytehq/airbyte/pull/23231) | Publish using low-code CDK Beta version | -| 0.1.5 | 2023-02-08 | [00000](https://github.com/airbytehq/airbyte/pull/00000) | Fix unhashable type in ZenloopSubstreamSlicer component | -| 0.1.4 | 2022-11-18 | [19624](https://github.com/airbytehq/airbyte/pull/19624) | Migrate to low code | -| 0.1.3 | 2022-09-28 | [17304](https://github.com/airbytehq/airbyte/pull/17304) | Migrate to per-stream states | -| 0.1.2 | 2022-08-22 | [15843](https://github.com/airbytehq/airbyte/pull/15843) | Adds Properties stream | -| 0.1.1 | 2021-10-26 | [8299](https://github.com/airbytehq/airbyte/pull/8299) | Fix missing seed files | -| 0.1.0 | 2021-10-26 | [7380](https://github.com/airbytehq/airbyte/pull/7380) | Initial Release | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :------------------------------------------------------- |:--------------------------------------------------------------------------------| +| 0.1.11 | 2024-04-10 | [36971](https://github.com/airbytehq/airbyte/pull/36971) | Use python-connector-base image, poetry, and update CDK version | +| 0.1.10 | 2023-06-29 | [27838](https://github.com/airbytehq/airbyte/pull/27838) | Update CDK version to avoid bug introduced during data feed release | +| 0.1.9 | 2023-06-28 | [27761](https://github.com/airbytehq/airbyte/pull/27761) | Update following state breaking changes | +| 0.1.8 | 2023-06-22 | [27243](https://github.com/airbytehq/airbyte/pull/27243) | Improving error message on state discrepancy | +| 0.1.7 | 2023-06-22 | [27243](https://github.com/airbytehq/airbyte/pull/27243) | State per partition (breaking change - require reset) | +| 0.1.6 | 2023-03-06 | [23231](https://github.com/airbytehq/airbyte/pull/23231) | Publish using low-code CDK Beta version | +| 0.1.5 | 2023-02-08 | [00000](https://github.com/airbytehq/airbyte/pull/00000) | Fix unhashable type in ZenloopSubstreamSlicer component | +| 0.1.4 | 2022-11-18 | [19624](https://github.com/airbytehq/airbyte/pull/19624) | Migrate to low code | +| 0.1.3 | 2022-09-28 | [17304](https://github.com/airbytehq/airbyte/pull/17304) | Migrate to per-stream states | +| 0.1.2 | 2022-08-22 | [15843](https://github.com/airbytehq/airbyte/pull/15843) | Adds Properties stream | +| 0.1.1 | 2021-10-26 | [8299](https://github.com/airbytehq/airbyte/pull/8299) | Fix missing seed files | +| 0.1.0 | 2021-10-26 | [7380](https://github.com/airbytehq/airbyte/pull/7380) | Initial Release | diff --git a/docs/integrations/sources/zoom-migrations.md b/docs/integrations/sources/zoom-migrations.md new file mode 100644 index 0000000000000..e334aefa166a4 --- /dev/null +++ b/docs/integrations/sources/zoom-migrations.md @@ -0,0 +1,57 @@ +# Zoom Migration Guide + +## Upgrading to 1.1.0 + +### Authentication + +As of September 8, 2023, Zoom has [deprecated JWT](https://developers.zoom.us/docs/internal-apps/jwt-faq/) authentication and now supports Oauth instead. + +### Creating a server-to-server OAuth app + +To successfully migrate, please use [Zoom's migration guide](https://developers.zoom.us/docs/internal-apps/jwt-app-migration/) to create a new server-to-server OAuth app and generate the necessary credentials. + +When creating the app, ensure you grant it access to the following scopes: + +- user:read:admin +- meeting:read:admin +- webinar:read:admin +- chat_channel:read:admin +- report:read:admin + +To successfully authenticate your connection in Airbyte, you will need to input the following OAuth credentials: + +- client_id +- client_secret +- account_id +- authorization_endpoint + +### Schema changes + +The type of the 'meeting_id' field in Meeting Registration Questions stream has been changed from "string" to "integer". Users with existing connections that are syncing data from this stream should refresh their schemas and reset their data. + +#### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. + +:::note +Any detected schema changes will be listed for your review. +::: + +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. + +:::note +Depending on destination type you may not be prompted to reset your data. +::: + +4. Select **Save connection**. + +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset) diff --git a/docs/integrations/sources/zoom.md b/docs/integrations/sources/zoom.md index f7b9764239ae2..dcff151dedd6f 100644 --- a/docs/integrations/sources/zoom.md +++ b/docs/integrations/sources/zoom.md @@ -64,11 +64,11 @@ JWT Tokens are deprecated, only Server-to-Server works now. [link to Zoom](https ::: - ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------| :--------------------------------------------------------------------- | -| 1.0.0 | 2023-7-28 | [25308](https://github.com/airbytehq/airbyte/pull/25308) | Replace JWT Auth methods with server-to-server Oauth | -| 0.1.1 | 2022-11-30 | [19939](https://github.com/airbytehq/airbyte/pull/19939) | Upgrade CDK version to fix bugs with SubStreamSlicer | -| 0.1.0 | 2022-10-25 | [18179](https://github.com/airbytehq/airbyte/pull/18179) | Initial Release | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------| :-----------------------------------------------------| +| 1.1.0 | 2024-02-22 | [35369](https://github.com/airbytehq/airbyte/pull/35369) | Publish S2S Oauth connector with fixed authenticator | +| 1.0.0 | 2023-7-28 | [25308](https://github.com/airbytehq/airbyte/pull/25308) | Replace JWT Auth methods with server-to-server Oauth | +| 0.1.1 | 2022-11-30 | [19939](https://github.com/airbytehq/airbyte/pull/19939) | Upgrade CDK version to fix bugs with SubStreamSlicer | +| 0.1.0 | 2022-10-25 | [18179](https://github.com/airbytehq/airbyte/pull/18179) | Initial Release | diff --git a/docs/operator-guides/telemetry.md b/docs/operator-guides/telemetry.md index 71352d7c8b474..813cedca9edd6 100644 --- a/docs/operator-guides/telemetry.md +++ b/docs/operator-guides/telemetry.md @@ -27,4 +27,20 @@ Also check our [privacy policy](https://airbyte.com/privacy-policy) for more det Server side telemetry collection can't be changed using Airbyte Cloud. - \ No newline at end of file + + When running [PyAirbyte](https://docs.airbyte.com/pyairbyte) for the first time on a new machine, you'll be informed that anonymous + usage data is collected, along with a link to this page for more information. + + Anonymous usage tracking ("telemetry") helps us understand how PyAirbyte is being used, + including which connectors are working well and which connectors are frequently failing. This helps + us to prioritize product improvements which benefit users of PyAirbyte as well as Airbyte Cloud, + OSS, and Enterprise. + + We will _never_ collect any information which could be considered PII (personally identifiable + information) or sensitive data. We _do not_ collect IP addresses, hostnames, or any other + information that could be used to identify you or your organization. + + You can opt-out of anonymous usage reporting by setting the environment variable `DO_NOT_TRACK` + to any value. + + diff --git a/docs/readme.md b/docs/readme.md index 708a6a7904300..039fea5dde2cc 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -1,10 +1,20 @@ --- displayed_sidebar: docs --- - # Welcome to Airbyte Docs +## What is Airbyte? + +Airbyte is an open-source data movement infrastructure for building extract and load (EL) data pipelines. It is designed for versatility, scalability, and ease-of-use.  + +There are three major components to know in Airbyte:  + +1. **The connector catalog** + * **350+ pre-built connectors**: Airbyte’s connector catalog comes “out-of-the-box” with over 350 pre-built connectors. These connectors can be used to start replicating data from a source to a destination in just a few minutes.  + * **No-Code Connector Builder**: You can easily extend Airbyte’s functionality to support your custom use cases through tools like the [No-Code Connector Builder](https://docs.airbyte.com/connector-development/connector-builder-ui/overview).  +2. **The platform:** Airbyte’s platform provides all the horizontal services required to configure and scale data movement operations, available as [cloud-managed](https://airbyte.com/product/airbyte-cloud) or [self-managed](https://airbyte.com/product/airbyte-enterprise). +3. **The user interface:** Airbyte features a UI, [**PyAirbyte**](https://docs.airbyte.com/pyairbyte) (Python library), [**API**](https://docs.airbyte.com/api-documentation), and [**Terraform Provider**](https://docs.airbyte.com/terraform-documentation) to integrate with your preferred tooling and approach to infrastructure management.  -Whether you are an Airbyte user or contributor, we have docs for you! +Airbyte is suitable for a wide range of data integration use cases, including AI data infrastructure and EL(T) workloads. Airbyte is also [embeddable](https://airbyte.com/product/powered-by-airbyte) within your own application or platform to power your product. ## For Airbyte Cloud users diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/openapi/config.yaml b/docs/reference/api/config.yaml similarity index 100% rename from airbyte-cdk/java/airbyte-cdk/dependencies/src/main/openapi/config.yaml rename to docs/reference/api/config.yaml diff --git a/docs/reference/api/rapidoc-api-docs.html b/docs/reference/api/rapidoc-api-docs.html index 5d867d94f803d..7faefd948b93a 100644 --- a/docs/reference/api/rapidoc-api-docs.html +++ b/docs/reference/api/rapidoc-api-docs.html @@ -5,7 +5,7 @@ - diff --git a/docs/release_notes/february_2024.md b/docs/release_notes/february_2024.md new file mode 100644 index 0000000000000..6c490d4c0f099 --- /dev/null +++ b/docs/release_notes/february_2024.md @@ -0,0 +1,22 @@ +# February 2024 +## airbyte v0.50.46 to v0.50.54 + +This page includes new features and improvements to the Airbyte Cloud and Airbyte Open Source platforms. + +## ✨ Highlights + +Airbyte migrated our [Postgres destination](https://github.com/airbytehq/airbyte/pull/35042) to the [Destinations V2](./upgrading_to_destinations_v2) framework. This enables you to map tables one-to-one with your source, experience better error handling, and deliver data incrementally. + +## Platform Releases + +- **Read-only Users** You can now enable read-only users in Airbyte Cloud (with Teams add-on) or Self-Managed Enterprise to administer read-only permissions in Airbyte. For access to this feature, reach out to our [Sales team](https://www.airbyte.com/company/talk-to-sales). + +- Our Slack and Email (for Cloud only) notifications have received a facelift! Additional contextual information about sync failures or schema changes are now included in the notification to ensure you can act on any pipeline changes quickly. + +## Connector Improvements + +In addition to our Postgres V2 destination, we also released a few notable Connector improvements: + + - Our [Paypal source](https://github.com/airbytehq/airbyte/pull/34510) has been rigorously tested for bugs and now syncs new streams `Catalog Products`, `Disputes`, `Invoicing`, `Orders`, `Payments` and `Subscriptions`. + - [Chargebee](https://github.com/airbytehq/airbyte/pull/34053) source now syncs incrementally for `unbilled-charge`, `gift`, and `site_migration_detail` + - We launched [PyAirbyte](/using-airbyte/pyairbyte/getting-started.mdx), a new interface to use Airbyte connectors with for Python developers. diff --git a/docs/release_notes/march_2024.md b/docs/release_notes/march_2024.md new file mode 100644 index 0000000000000..abf2486e399cb --- /dev/null +++ b/docs/release_notes/march_2024.md @@ -0,0 +1,32 @@ +# March 2024 +## airbyte v0.51.0 to v0.56.0 + +This page includes new features and improvements to the Airbyte Cloud and Airbyte Open Source platforms. + +## ✨ Highlights + +Airbyte now supports **OpenID Connect (OIDC) SSO** for Airbyte Enterprise and Airbyte Cloud Teams. This enables companies to use Airbyte with Entra ID/AD via the OIDC protocol. + +Airbyte certified our [Microsoft SQL Server source](/integrations/sources/mssql) to support terabyte-sized tables, expanded datetime data types, and reliability improvements. + +Airbyte migrated our [Redshift destination](https://github.com/airbytehq/airbyte/pull/36255) to the [Destinations V2](./upgrading_to_destinations_v2) framework. This enables you to map tables one-to-one with your source, experience better error handling (particularly with large records), and deliver data incrementally. + + +## Platform Releases + +In addition to our OpenID Connect support, we also released: + +- A major upgrade to our Docker and Helm deployments, which simplifies how external logs are configured. Learn more about the specific changes in our [migration guide](/deploying-airbyte/on-kubernetes-via-helm#migrate-from-old-chart-to-airbyte-v0520-and-latest-chart-version). + +- A breaking change to Docker deployments. Containers no longer run as the `root` user. If you are upgrading from an older version, the `airbyte_workspace` docker volume must be recreated to fix the ownership of staging directories under `/tmp` (in the container). Alternatively, you can manually update the ownership (`airbyte:airbyte`) of the directory mount point /tmp/workspace in all containers where `airbyte_workspace` is mounted (see: `docker-compose.yaml`) + +- Our major version upgrades (Airbyte Cloud only) now only require manual upgrading when you are actively syncing a stream that has changed. Otherwise, syncs will continue as is and the version will be upgraded automatically for you. + +## Connector Improvements + +In addition to our MS-SQL certification, we also released a few notable Connector improvements: + +- We released several connector builder enhancements, including support for raw YAML blocks, modification the start date when testing, and added the ability to adjust page/slice/record limits. We also resolved bugs in page size and interpolation inputs, improved the switching time between YAML and UI, and fixed several layout issues. +- Our [Bing source](https://github.com/airbytehq/airbyte/pull/35812) includes the following new streams: `Audience Performance Report`, `Goals And Funnels Report`, `Product Dimension Performance Report` +- Our [JIRA source](https://github.com/airbytehq/airbyte/pull/35656) now contains more fields to the following streams: `board_issues`,`filter_sharing`,`filters`,`issues`, `permission_schemes`, `sprint_issues`,`users_groups_detailed` and `workflows` +- Our [Snapchat Source](https://github.com/airbytehq/airbyte/pull/35660) now contains additional fields in the `ads`, `adsquads`, `creatives`, and `media` streams. diff --git a/docs/release_notes/upgrading_to_destinations_v2.md b/docs/release_notes/upgrading_to_destinations_v2.md index e0f27a394e64f..0d6e9407df053 100644 --- a/docs/release_notes/upgrading_to_destinations_v2.md +++ b/docs/release_notes/upgrading_to_destinations_v2.md @@ -232,6 +232,12 @@ SELECT "my column" from my_table; SELECT "MY COLUMN" from my_table; ``` +### Postgres + +#### Preserving mixed case column names in Final Tables + +Postgres will implicitly lower case column names with mixed case characters when using unquoted identifiers. Based on feedback, we chose to replace any special +characters like spaces with underscores and use quoted identifiers to preserve mixed case column names. ## Updating Downstream Transformations diff --git a/docs/understanding-airbyte/cdc.md b/docs/understanding-airbyte/cdc.md index fa012c6d53669..02c783472290d 100644 --- a/docs/understanding-airbyte/cdc.md +++ b/docs/understanding-airbyte/cdc.md @@ -33,7 +33,7 @@ We add some metadata columns for CDC sources: * [Postgres](../integrations/sources/postgres.md) \(For a quick video overview of CDC on Postgres, click [here](https://www.youtube.com/watch?v=NMODvLgZvuE&ab_channel=Airbyte)\) * [MySQL](../integrations/sources/mysql.md) * [Microsoft SQL Server / MSSQL](../integrations/sources/mssql.md) -* [MongoDB](../integrations/sources/mongodb-v2.md) \(More information on [Mongodb CDC: How to Sync in Near Real-Time](https://airbyte.com/data-engineering-resources/mongodb-cdc)\) +* [MongoDB](../integrations/sources/mongodb-v2.md) ## Coming Soon * Oracle DB diff --git a/docs/understanding-airbyte/heartbeats.md b/docs/understanding-airbyte/heartbeats.md index 88ce6f86d46dc..ce2f3499f5a57 100644 --- a/docs/understanding-airbyte/heartbeats.md +++ b/docs/understanding-airbyte/heartbeats.md @@ -1,34 +1,61 @@ # Heartbeats -During a data synchronization, many things can go wrong and sometimes the fix is just to restart the synchronization. -Airbyte aims to make this restart as automated as possible and uses heartbeating mechanism in order to do that. -This performed on 2 differents component: the source and the destination. They have different logics which will be -explained bellow. +Many transient issues can occur when moving data. One class of issues is an unresponsive Source or Destination. In this case, the fix is often a simple restart. -## Source +Airbyte aims to make restarts as automated as possible and uses a heartbeating mechanism to do so. -### Heartbeating logic +Airbyte monitors for responses from the Sources and Destination, interpreting these as 'heartbeats'. If the Source or Destination does not heartbeat within +a certain time frame, Airbyte triggers a heartbeat error and automatically restarts the job. + +Heartbeats are a final catch-all mechanism. Errors are expected to be transient and should automatically resolve. If they do not, it is likely a sign of a more serious issue. +In these cases, Airbyte takes the more conservative approach. Airbyte restarts the job to avoid a seemingly endless job, and highlight to users the existence of a potential issue. + +## Known Heartbeat Error Causes + +Possible reasons for a heartbeat error: +1. Certain API sources take an unknown amount of time to generate asynchronous responses (e.g., Salesforce, Facebook, Amplitude). No workaround currently exists. +2. Certain API sources can be rate-limited for a time period longer than their configured threshold. Although Airbyte tries its best to handle this on a per-connector basis, rate limits are not always predictable. +3. Database sources can be slow to respond to a query. This can be due to a variety of reasons, including the size of the database, the complexity of the query, and the number of other queries being made to the database at the same time. + 1. The most common reason we see is using an un-indexed column as a cursor column in an incremental sync, or a dramatically under-provisioned database. +4. Destinations can be slow to respond to write requests. + 1. The most common reason we see here is destination resource availability vis-a-vis data volumes. + +In general, +* **Database Sources and Destination errors are extremely rare**. Any issues are likely to be indicative of actual issues and need to be investigated. +* **API Sources errors are uncommon but not unexpected**. This is especially true if an API source generates asynchronous responses or has rate limits. + +## Airbyte Cloud +Airbyte Cloud has identical heartbeat monitoring and alerting as Airbyte Open Source. + +If these issues show up on Airbyte Cloud, +1. Please read [Known Causes](#known-causes). In many cases, the issue is with the source, the destination or the connection set up, and not with Airbyte. +2. Reach out to Airbyte Support for help. + +## Technical Details + +### Source +#### Heartbeating logic The platform considers both `RECORD` and `STATE` messages emitted by the source as source heartbeats. The Airbyte platform has a process which monitors when the last beat was send and if it reaches a threshold, the synchronization attempt will be failed. It fails with a cause being the source an message saying `The source is unresponsive`. Internal the error has a heartbeat timeout type, which is not display in the UI. -### Configuration +#### Configuration The heartbeat can be configured using the file flags.yaml through 2 entries: -* `heartbeat-max-seconds-between-messages`: this configures the maximum time allowed between 2 messages. +* `hseartbeat-max-seconds-between-messages`: this configures the maximum time allowed between 2 messages. The default is 3 hours. * `heartbeat.failSync`: Setting this to true will make the syncs to fail if a missed heartbeat is detected. If false no sync will be failed because of a missed heartbeat. The default value is true. -## Destination +### Destination -### Heartbeating logic +#### Heartbeating logic Adding a heartbeat to the destination similar to the one at the source is not straightforward since there isn't a constant stream of messages from the destination to the platform. Instead, we have implemented something that is more akin to a timeout. The platform monitors whether there has been a call to the destination that has taken more than a specified amount of time. If such a delay occurs, the platform considers the destination to have timed out. -### Configuration +#### Configuration The timeout can be configured using the file `flags.yaml` through 2 entries: * `destination-timeout-max-seconds`: If the platform detects a call to the destination exceeding the duration specified in this entry, it will consider the destination to have timed out. The default timeout value is 24 hours. * `destination-timeout.failSync`: If enabled (true by default), a detected destination timeout will cause the platform to fail the sync. If not, the platform will log a message and allow the sync to continue. When the platform fails a sync due to a destination timeout, the UI will display the message: `The destination is unresponsive`. diff --git a/docs/understanding-airbyte/supported-data-types.md b/docs/understanding-airbyte/supported-data-types.md index f588436e6aa61..3080f5186eb5a 100644 --- a/docs/understanding-airbyte/supported-data-types.md +++ b/docs/understanding-airbyte/supported-data-types.md @@ -69,7 +69,7 @@ Along with this `AirbyteRecordMessage` (observe that the `data` field conforms t The top-level `object` must conform to the type system. This [means](#objects) that all of the fields must also conform to the type system. #### Nulls -Many sources cannot guarantee that all fields are present on all records. In these cases, sources should simply not list them as `required` fields. In most cases, sources do not need to list fields as required; by default, all fields are treated as nullable. +Many sources cannot guarantee that all fields are present on all records. In these cases, sources should not list them as `required` fields, and add that the property can be null in the jsonSchema, e.g. `[null, string]`. If a null property is found for a non-nullable schema, a validation error may occur in the platform or the destination may have trouble storing the record. #### Unsupported types Destinations must have handling for all types, but they are free to cast types to a convenient representation. For example, let's say a source discovers a stream with this schema: diff --git a/docs/understanding-airbyte/tech-stack.md b/docs/understanding-airbyte/tech-stack.md index c21aea1b8dd8e..4bbb07010bb55 100644 --- a/docs/understanding-airbyte/tech-stack.md +++ b/docs/understanding-airbyte/tech-stack.md @@ -2,7 +2,7 @@ ## Airbyte Core Backend -* [Java 17](https://jdk.java.net/archive/) +* [Java 21](https://jdk.java.net/archive/) * Framework: [Micronaut](https://micronaut.io/) * API: [OAS3](https://www.openapis.org/) * Databases: [PostgreSQL](https://www.postgresql.org/) @@ -14,7 +14,7 @@ Connectors can be written in any language. However the most common languages are: * Python 3.9 or higher -* [Java 17](https://jdk.java.net/archive/) +* [Java 21](https://jdk.java.net/archive/) ## **Frontend** diff --git a/docs/using-airbyte/core-concepts/namespaces.md b/docs/using-airbyte/core-concepts/namespaces.md index ce7c8532d91ef..0595b2571da47 100644 --- a/docs/using-airbyte/core-concepts/namespaces.md +++ b/docs/using-airbyte/core-concepts/namespaces.md @@ -6,13 +6,14 @@ products: all Namespaces are used to generally organize data, separate tests and production data, and enforce permissions. In most cases, namespaces are schemas in the database you're replicating to. -As a part of connection setup, you select where in the destination you want to write your data. Note: The default configuration is **Destination default**. +As a part of connection setup, you select where in the destination you want to write your data. Note: The default configuration is **Destination-defined**. | Destination Namespace | Description | | ---------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | -| Destination default | All streams will be replicated to the single default namespace defined in the Destination's settings. | -| Mirror source structure | Some sources (for example, databases) provide namespace information for a stream. If a source provides namespace information, the destination will mirror the same namespace when this configuration is set. For sources or streams where the source namespace is not known, the behavior will default to the "Destination default" option. | -| Custom format | All streams will be replicated to a single user-defined namespace. See Custom format for more details | +| Custom | All streams will be replicated to a single user-defined namespace. See Custom format for more details | +| Destination-defined | All streams will be replicated to the single default namespace defined in the Destination's settings. | +| Source-defined | Some sources (for example, databases) provide namespace information for a stream. If a source provides namespace information, the destination will mirror the same namespace when this configuration is set. For sources or streams where the source namespace is not known, the behavior will default to the "Destination default" option. | + Most of our destinations support this feature. To learn if your connector supports this, head to the individual connector page to learn more. If your desired destination doesn't support it, you can ignore this feature. @@ -26,7 +27,19 @@ In a source, the namespace is the location from where the data is replicated to Airbyte supports namespaces and allows Sources to define namespaces, and Destinations to write to various namespaces. In Airbyte, the following options are available and are set on each individual connection. -### Destination default +### Custom + +When replicating multiple sources into the same destination, you may create table conflicts where tables are overwritten by different syncs. This is where using a custom namespace will ensure data is synced accurately. + +For example, a Github source can be replicated into a `github` schema. However, you may have multiple connections writing from different GitHub repositories \(common in multi-tenant scenarios\). + +:::tip +To write more than 1 table with the same name to your destination, Airbyte recommends writing the connections to unique namespaces to avoid mixing data from the different GitHub repositories. +::: + +You can enter plain text (most common) or additionally add a dynamic parameter `${SOURCE_NAMESPACE}`, which uses the namespace provided by the source if available. + +### Destination-defined All streams will be replicated and stored in the default namespace defined on the destination settings page, which is typically defined when the destination was set up. Depending on your destination, the namespace refers to: @@ -45,21 +58,9 @@ All streams will be replicated and stored in the default namespace defined on th If you prefer to replicate multiple sources into the same namespace, use the `Stream Prefix` configuration to differentiate data from these sources to ensure no streams collide when writing to the destination. ::: -### Mirror source structure +### Source-Defined -Some sources \(such as databases based on JDBC\) provide namespace information from which a stream has been extracted. Whenever a source is able to fill this field in the catalog.json file, the destination will try to write to exactly the same namespace when this configuration is set. For sources or streams where the source namespace is not known, the behavior will fall back to the "Destination default". Most APIs do not provide namespace information. - -### Custom format - -When replicating multiple sources into the same destination, you may create table conflicts where tables are overwritten by different syncs. This is where using a custom namespace will ensure data is synced accurately. - -For example, a Github source can be replicated into a `github` schema. However, you may have multiple connections writing from different GitHub repositories \(common in multi-tenant scenarios\). - -:::tip -To write more than 1 table with the same name to your destination, Airbyte recommends writing the connections to unique namespaces to avoid mixing data from the different GitHub repositories. -::: - -You can enter plain text (most common) or additionally add a dynamic parameter `${SOURCE_NAMESPACE}`, which uses the namespace provided by the source if available. +Some sources \(such as databases based on JDBC\) provide namespace information from which a stream has been extracted. Whenever a source is able to fill this field in the catalog.json file, the destination will try to write to exactly the same namespace when this configuration is set. For sources or streams where the source namespace is not known, the behavior will fall back to the default namespace defined in the destination configuration. Most APIs do not provide namespace information. ### Examples diff --git a/docs/using-airbyte/core-concepts/readme.md b/docs/using-airbyte/core-concepts/readme.md index 85c47d4431d39..9f7e12e6b53ed 100644 --- a/docs/using-airbyte/core-concepts/readme.md +++ b/docs/using-airbyte/core-concepts/readme.md @@ -26,11 +26,11 @@ A connection is an automated data pipeline that replicates data from a source to | Concept | Description | |-----------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------| -| [Replication Frequency](/using-airbyte/core-concepts/sync-schedules.md) | When should a data sync be triggered? | -| [Destination Namespace and Stream Prefix](/using-airbyte/core-concepts/namespaces.md) | Where should the replicated data be written? | +| [Catalog Selection](/cloud/managing-airbyte-cloud/configuring-connections.md#modify-streams-in-your-connection) | What data should be replicated from the source to the destination? | | [Sync Mode](/using-airbyte/core-concepts/sync-modes/README.md) | How should the streams be replicated (read and written)? | +| [Sync Schedule](/using-airbyte/core-concepts/sync-schedules.md) | When should a data sync be triggered? | +| [Destination Namespace and Stream Prefix](/using-airbyte/core-concepts/namespaces.md) | Where should the replicated data be written? | | [Schema Propagation](/cloud/managing-airbyte-cloud/manage-schema-changes.md) | How should Airbyte handle schema drift in sources? | -| [Catalog Selection](/cloud/managing-airbyte-cloud/configuring-connections.md#modify-streams-in-your-connection) | What data should be replicated from the source to the destination? | ## Stream @@ -51,7 +51,7 @@ Examples of fields: - A column in the table in a relational database - A field in an API response -## Sync Schedules +## Sync Schedule There are three options for scheduling a sync to run: @@ -108,6 +108,10 @@ For more details, see our [dbt integration documentation](/cloud/managing-airbyt A workspace is a grouping of sources, destinations, connections, and other configurations. It lets you collaborate with team members and share resources across your team under a shared billing account. +## Organization + +Organizations let you collaborate with team members and share workspaces across your team. + ## Glossary of Terms You can find a extended list of [Airbyte specific terms](https://glossary.airbyte.com/term/airbyte-glossary-of-terms/), [data engineering concepts](https://glossary.airbyte.com/term/data-engineering-concepts) or many [other data related terms](https://glossary.airbyte.com/). diff --git a/docs/using-airbyte/core-concepts/sync-schedules.md b/docs/using-airbyte/core-concepts/sync-schedules.md index c4514d9413968..1a0d091a1c2c6 100644 --- a/docs/using-airbyte/core-concepts/sync-schedules.md +++ b/docs/using-airbyte/core-concepts/sync-schedules.md @@ -4,13 +4,13 @@ products: all # Sync Schedules -For each connection, you can select between three options that allow a sync to run. The three options for `Replication Frequency` are: +For each connection, you can select between three options that allow a sync to run. The three options for `Schedule Type` are: - Scheduled (e.g. every 24 hours, every 2 hours) -- Cron scheduling +- Cron - Manual -## Sync Limitations +## Sync Considerations * Only one sync per connection can run at a time. * If a sync is scheduled to run before the previous sync finishes, the scheduled sync will start after the completion of the previous sync. @@ -21,6 +21,15 @@ For Scheduled or cron scheduled syncs, Airbyte guarantees syncs will initiate wi ::: ## Scheduled syncs +You can choose between the following scheduled options: +- Every 24 hours (most common) +- Every 12 hours +- Every 8 hours +- Every 6 hours +- Every 3 hours +- Every 2 hours +- Every 1 hour + When a scheduled connection is first created, a sync is executed immediately after creation. After that, a sync is run once the time since the last sync \(whether it was triggered manually or due to a schedule\) has exceeded the schedule interval. For example: - **October 1st, 2pm**, a user sets up a connection to sync data every 24 hours. @@ -30,7 +39,7 @@ When a scheduled connection is first created, a sync is executed immediately aft - **October 3rd, 2:01pm:** since the last sync was less than 24 hours ago, no sync is run - **October 3rd, 5:01pm:** It has been more than 24 hours since the last sync, so a sync is run -## Cron Scheduling +## Cron Syncs If you prefer more precision in scheduling your sync, you can also use CRON scheduling to set a specific time of day or month. Airbyte uses the CRON scheduler from [Quartz](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html). We recommend reading their [documentation](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) to understand the required formatting. You can also refer to these examples: diff --git a/docs/using-airbyte/getting-started/add-a-destination.md b/docs/using-airbyte/getting-started/add-a-destination.md index 4aa05d8970f2f..fe0786fa2b428 100644 --- a/docs/using-airbyte/getting-started/add-a-destination.md +++ b/docs/using-airbyte/getting-started/add-a-destination.md @@ -11,7 +11,7 @@ Destinations are the data warehouses, data lakes, databases and analytics tools Once you've signed up for Airbyte Cloud or logged in to your Airbyte Open Source deployment, click on the **Destinations** tab in the navigation bar found on the left side of the dashboard. This will take you to the list of available destinations. -![Destination List](./assets/getting-started-destination-list.png) +![Destination List](./assets/getting-started-destination-catalog.png) You can use the provided search bar at the top of the page, or scroll down the list to find the destination you want to replicate data from. diff --git a/docs/using-airbyte/getting-started/add-a-source.md b/docs/using-airbyte/getting-started/add-a-source.md index 15f4ce57bd615..145d6152887de 100644 --- a/docs/using-airbyte/getting-started/add-a-source.md +++ b/docs/using-airbyte/getting-started/add-a-source.md @@ -6,8 +6,7 @@ products: all Setting up a new source in Airbyte is a quick and simple process! When viewing the Airbyte UI, you'll see the main navigation bar on the left side of your screen. Click the **Sources** tab to bring up a list of all available sources. - - + You can use the provided search bar, or simply scroll down the list to find the source you want to replicate data from. Let's use a demo source, Faker, as an example. Clicking on the **Sample Data (Faker)** card will bring us to its setup page. diff --git a/docs/using-airbyte/getting-started/assets/getting-started-column-selection.png b/docs/using-airbyte/getting-started/assets/getting-started-column-selection.png new file mode 100644 index 0000000000000..3ba866dbad716 Binary files /dev/null and b/docs/using-airbyte/getting-started/assets/getting-started-column-selection.png differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-connection-configuration.png b/docs/using-airbyte/getting-started/assets/getting-started-connection-configuration.png deleted file mode 100644 index 92921edd1dc47..0000000000000 Binary files a/docs/using-airbyte/getting-started/assets/getting-started-connection-configuration.png and /dev/null differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-destination-catalog.png b/docs/using-airbyte/getting-started/assets/getting-started-destination-catalog.png new file mode 100644 index 0000000000000..58b99051c65d9 Binary files /dev/null and b/docs/using-airbyte/getting-started/assets/getting-started-destination-catalog.png differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-destination-list.png b/docs/using-airbyte/getting-started/assets/getting-started-destination-list.png deleted file mode 100644 index e79369996e0d0..0000000000000 Binary files a/docs/using-airbyte/getting-started/assets/getting-started-destination-list.png and /dev/null differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-select-streams.png b/docs/using-airbyte/getting-started/assets/getting-started-select-streams.png new file mode 100644 index 0000000000000..f8eed27c0f898 Binary files /dev/null and b/docs/using-airbyte/getting-started/assets/getting-started-select-streams.png differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-status-page.png b/docs/using-airbyte/getting-started/assets/getting-started-status-page.png new file mode 100644 index 0000000000000..f07bdbc6a9fa0 Binary files /dev/null and b/docs/using-airbyte/getting-started/assets/getting-started-status-page.png differ diff --git a/docs/using-airbyte/getting-started/assets/getting-started-stream-selection.png b/docs/using-airbyte/getting-started/assets/getting-started-stream-selection.png deleted file mode 100644 index fc7cc81d0ddc1..0000000000000 Binary files a/docs/using-airbyte/getting-started/assets/getting-started-stream-selection.png and /dev/null differ diff --git a/docs/using-airbyte/getting-started/set-up-a-connection.md b/docs/using-airbyte/getting-started/set-up-a-connection.md index c27c77c16f974..7acc58028f992 100644 --- a/docs/using-airbyte/getting-started/set-up-a-connection.md +++ b/docs/using-airbyte/getting-started/set-up-a-connection.md @@ -7,41 +7,50 @@ import TabItem from "@theme/TabItem"; # Set up a Connection -Now that you've learned how to set up your first [source](./add-a-source) and [destination](./add-a-destination), it's time to finish the job by creating your very first connection! +Now that you've learned how to set up your first [source](./add-a-source) and [destination](./add-a-destination), it's time to finish the setup by creating your very first connection! On the left side of your main Airbyte dashboard, select **Connections**. You will be prompted to choose which source and destination to use for this connection. For this example, we'll use the **Google Sheets** source and the destination you previously set up, either **Local JSON** or **Google Sheets**. ## Configure the connection -Once you've chosen your source and destination, you'll be able to configure the connection. You can refer to [this page](/cloud/managing-airbyte-cloud/configuring-connections.md) for more information on each available configuration. For this demo, we'll simply set the **Replication frequency** to a 24 hour interval and leave the other fields at their default values. +Once you've chosen your source and destination you can configure the connection. You'll first be asked a few questions about how your data should sync, these correlate to our sync modes which you can read more about on [this page](/cloud/managing-airbyte-cloud/configuring-connections.md). -![Connection config](./assets/getting-started-connection-configuration.png) +Most users select "Mirror Source", which will simply copy the data from the source to the destination where you'll see one row in the destination for each row in the source. If you prefer to Append Historical Changes or take a Full Snapshot with each sync, you can optionally select those options, but keep in mind those will create duplicate records in your destination. The sync mode we choose for all the enabled streams will reflect your selection here. -:::note -By default, data will sync to the default defined in the destination. To ensure your data is synced to the correct place, see our examples for [Destination Namespace](/using-airbyte/core-concepts/namespaces.md) -::: + + +Next, you can toggle which streams you want to replicate. Our test data consists of three streams, which we've enabled and set to `Incremental - Append + Deduped` sync mode. + +![Setup streams](./assets/getting-started-select-streams.png) -Next, you can toggle which streams you want to replicate, as well as setting up the desired sync mode for each stream. For more information on the nature of each sync mode supported by Airbyte, see [this page](/using-airbyte/core-concepts/sync-modes). +Your sync mode is already determined by your selection above, but you can change the sync mode for an individual stream. You can also select a cursor or primary key to enable incremental and/or deduplication. For more information on the nature of each sync mode supported by Airbyte, see [this page](/using-airbyte/core-concepts/sync-modes). -Our test data consists of three streams, which we've enabled and set to `Incremental - Append + Deduped` sync mode. +You can also select individual fields to sync on this page. Expand the fields available by clicking any stream. This is helpful when you have security concerns or don't want to sync all the data from the source. +![Column Selection](./assets/getting-started-column-selection.png) -![Stream config](./assets/getting-started-stream-selection.png) +Click **Next** to complete your stream setup and move to the connection configuration. This is where you'll set up how often your data will sync and where it will live in the destination. For this demo, we'll set the connection to run at 8 AM every day and sync the connection to a custom namespace with a stream prefix. + + + +:::note +To ensure your data is synced to the correct place, see our examples for [Destination Namespace](/using-airbyte/core-concepts/namespaces.md) +::: -Click **Set up connection** to complete your first connection. Your first sync is about to begin! +Once you've set up all the connection settings, click "Set up connection". You've successfully set up your first data pipeline with Airbyte. Your first sync is about to begin! ## Connection Overview Once you've finished setting up the connection, you will be automatically redirected to a connection overview containing all the tools you need to keep track of your connection. -![Connection dashboard](./assets/getting-started-connection-complete.png) +![Connection dashboard](./assets/getting-started-status-page.png) Here's a basic overview of the tabs and their use: 1. The **Status** tab shows you an overview of your connector's sync health. 2. The **Job History** tab allows you to check the logs for each sync. If you encounter any errors or unexpected behaviors during a sync, checking the logs is always a good first step to finding the cause and solution. -3. The **Replication** tab allows you to modify the configurations you chose during the connection setup. +3. The **Schema** tab allows you to modify the streams you chose during the connection setup. 4. The **Transformation** tab allows you to set up a custom post-sync transformations using dbt. -4. The **Settings** tab contains additional settings, and the option to delete the connection if you no longer wish to use it. +4. The **Settings** tab contains the connection settings, and the option to delete the connection if you no longer wish to use it. ### Check the data from your first sync diff --git a/docs/using-airbyte/pyairbyte/getting-started.mdx b/docs/using-airbyte/pyairbyte/getting-started.mdx index d8ece1301b4aa..fd919222d40a2 100644 --- a/docs/using-airbyte/pyairbyte/getting-started.mdx +++ b/docs/using-airbyte/pyairbyte/getting-started.mdx @@ -1,4 +1,4 @@ -import AirbyteLibConnectors from '@site/src/components/AirbyteLibConnectors'; +import PyAirbyteConnectors from '@site/src/components/PyAirbyteConnectors'; # Getting Started with PyAirbyte (Beta) @@ -48,6 +48,7 @@ for name, records in result.streams.items(): * [Shopify](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Shopify_Demo.ipynb) * [GitHub](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Github_Incremental_Demo.ipynb) * [Postgres (cache)](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Postgres_Custom_Cache_Demo.ipynb) +* [RAG With Langchain](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Document_Creation_RAG_with_Langchain_Demo.ipynb) ## API Reference @@ -58,7 +59,7 @@ For details on specific classes and methods, please refer to our [PyAirbyte API [comment]: <> (Edit under https://docs.google.com/drawings/d/1M7ti2D4ha6cEtPnk04RLp1SSh3au4dRJsLupnGPigHQ/edit?usp=sharing) -![Architecture](../../assets/docs/airbyte-lib-high-level-architecture.svg) +![Architecture](../../assets/docs/pyairbyte-high-level-architecture.svg) PyAirbyte is a python library that can be run in any context that supports Python >=3.9. It contains the following main components: * **Source**: A source object is using a Python connector and includes a configuration object. The configuration object is a dictionary that contains the configuration of the connector, like authentication or connection modalities. The source object is used to read data from the connector. @@ -69,4 +70,4 @@ PyAirbyte is a python library that can be run in any context that supports Pytho The following connectors are available: - + diff --git a/docs/using-airbyte/workspaces.md b/docs/using-airbyte/workspaces.md index 7b211c0a0cb0c..099c7044b12af 100644 --- a/docs/using-airbyte/workspaces.md +++ b/docs/using-airbyte/workspaces.md @@ -4,84 +4,59 @@ products: cloud, oss-enterprise # Manage your workspace -A workspace in Airbyte allows you to collaborate with other users and manage connections together. On Airbyte Cloud it will allow you to share billing details for a workspace. - -:::info -Airbyte [credits](https://airbyte.com/pricing) are assigned per workspace and cannot be transferred between workspaces. -::: +A workspace in Airbyte allows you to collaborate with other users and manage connections together. ## Add users to your workspace -To add a user to your workspace: - -1. Go to the **Settings** via the side navigation in Airbyte. - -2. Click **Workspace** > **Access Management**. - -3. Click **+ New user**. +1. To add a user to your workspace, go to the **Settings** via the side navigation in Airbyte. Navigate to **Workspace** > **General** and click **+ New member**. -4. On the **Add new users** dialog, enter the email address of the user you want to invite to your workspace. +2. On the **Add new member** dialog, enter the email address of the user you want to invite to your workspace. Click **Add new member**. -5. Click **Send invitation**. - - :::info - The user will have access to only the workspace you invited them to. They will be added as a workspace admin by default. - ::: +:::info +The user will have access to only the workspace you invited them to. They will be added with a role of `Workspace Admin`, which has the ability to add or delete other users and make changes to connections and connectors in the workspace. +::: ## Remove users from your workspace​ -To remove a user from your workspace: +1. To remove a user from your workspace, to the **Settings** via the side navigation in Airbyte. Navigate to **Workspace** > **General**. In the workspace role column, click the down carat and select **Remove user**. -1. Go to the **Settings** via the side navigation in Airbyte. +2. Complete removal by clicking **Remove** in the confirmation modal. -2. Click **Workspace** > **Access Management**. - -3. Click **Remove** next to the user’s email. - -4. The **Remove user** dialog displays. Click **Remove**. +:::tip +Organization admins cannot be removed from a workspace. Reach out to Airbyte Support if you need assistance removing an organization admin. +::: ## Rename a workspace -To rename a workspace: +To rename a workspace, go to the **Settings** via the side navigation in Airbyte. Navigate to **Workspace** > **General**. In the **Workspace name** field, enter the new name for your workspace. Click **Save changes**. -1. Go to the **Settings** via the side navigation in Airbyte. - -2. Click **Workspace** > **General**. - -3. In the **Workspace name** field, enter the new name for your workspace. +## Delete a workspace -4. Click **Save changes**. +To delete a workspace, go to the **Settings** via the side navigation in Airbyte. Navigate to **Workspace** > **General**. In the **Danger!** section, click **Delete your workspace**. -## Delete a workspace +## Managing multiple workspaces + +You can have access to one or multiple workspaces with Airbyte Cloud, which gives you flexibility in managing user access and billing. Workspaces can also be linked through an organization, which allows you to collaborate with team members and share workspaces across your team. -To delete a workspace: +:::info +Organizations are only available in Airbyte Cloud through Cloud Teams. [Get in touch](https://airbyte.com/company/talk-to-sales) with us if you would like to take advantage of organization features. +::: + +### Billing across multiple workspaces -1. Go to the **Settings** via the side navigation in Airbyte. +Airbyte [credits](https://airbyte.com/pricing) are by default assigned per workspace and cannot be transferred between workspaces. [Get in touch](https://airbyte.com/company/talk-to-sales) with us if you would like to centralize billing across workspaces. -2. Click **Workspace** > **General**. +## Managing User Roles -3. In the **Delete your workspace** section, click **Delete**. +Airbyte offers multiple user roles to enable teams to securely access workspaces or organizations. Some roles are only available to certain products. -## Single workspace vs. multiple workspaces - -You can use one or multiple workspaces with Airbyte Cloud, which gives you flexibility in managing user access and billing. - -### Access -| Number of workspaces | Benefits | Considerations | -|----------------------|-------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------| -| Single | All users in a workspace have access to the same data. | If you add a user to a workspace, you cannot limit their access to specific data within that workspace. | -| Multiple | You can create multiple workspaces to allow certain users to access the data. | Since you have to manage user access for each workspace individually, it can get complicated if you have many users in multiple workspaces. | - -### Billing -| Number of workspaces | Benefits | Considerations | -|----------------------|-------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------| -| Single | You can use the same payment method for all purchases. | Credits pay for the use of resources in a workspace when you run a sync. Resource usage cannot be divided and paid for separately (for example, you cannot bill different departments in your organization for the usage of some credits in one workspace). | -| Multiple | Workspaces are independent of each other, so you can use a different payment method card for each workspace (for example, different credit cards per department in your organization). | You can use the same payment method for different workspaces, but each workspace is billed separately. Managing billing for each workspace can become complicated if you have many workspaces. | +| Role | Cloud | Cloud Teams | Enterprise | +|---|------|------|------| +|**Organization Admin:** Administer the whole organization, create workspaces in it, and manage organization permissions| |✅|✅| +|**Workspace Admin:** Administer the workspace, create workspace permissions|✅| | | +|**Workspace Reader:** View information within a workspace, cannot modify anything within a workspace| |✅|✅| ## Switch between multiple workspaces -To switch between workspaces: - -1. Click the current workspace name under the Airbyte logo in the navigation bar. +To switch between workspaces, click the current workspace name under the Airbyte logo in the navigation bar. Search for the workspace or click the name of the workspace you want to switch to. -2. Search for the workspace or click the name of the workspace you want to switch to. diff --git a/docusaurus/README.md b/docusaurus/README.md index 8f6b20d231223..5d7071739d6f7 100644 --- a/docusaurus/README.md +++ b/docusaurus/README.md @@ -42,6 +42,7 @@ in [`sidebars.js`](https://github.com/airbytehq/airbyte/blob/master/docusaurus/s We welcome documentation updates! If you'd like to contribute a change, please make sure to: - Run `pnpm build` and check that all build steps are successful. +- Run `pnpm prettier . -w`. - Push your changes into a pull request, and follow the PR template instructions. When you make a pull request, Vercel will automatically build a test instance of the full docs site diff --git a/docusaurus/package.json b/docusaurus/package.json index c2476ce304c4b..12b8f7a74f06a 100644 --- a/docusaurus/package.json +++ b/docusaurus/package.json @@ -124,7 +124,7 @@ "react-dom": "^18.2.0", "react-markdown": "^8.0.7", "react-router": "5.3.3", - "sanitize-html": "^2.11.0", + "sanitize-html": "^2.12.1", "sockjs": "0.3.24", "trim": "0.0.3", "unist-builder": "^4.0.0", diff --git a/docusaurus/pnpm-lock.yaml b/docusaurus/pnpm-lock.yaml index ec5d26397ead7..c00f1913b09ad 100644 --- a/docusaurus/pnpm-lock.yaml +++ b/docusaurus/pnpm-lock.yaml @@ -336,8 +336,8 @@ dependencies: specifier: 5.3.3 version: 5.3.3(react@18.2.0) sanitize-html: - specifier: ^2.11.0 - version: 2.11.0 + specifier: ^2.12.1 + version: 2.12.1 sockjs: specifier: 0.3.24 version: 0.3.24 @@ -4188,8 +4188,8 @@ packages: engines: {node: '>=8'} dev: false - /body-parser@1.20.1: - resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} + /body-parser@1.20.2: + resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} dependencies: bytes: 3.1.2 @@ -4201,7 +4201,7 @@ packages: iconv-lite: 0.4.24 on-finished: 2.4.1 qs: 6.11.0 - raw-body: 2.5.1 + raw-body: 2.5.2 type-is: 1.6.18 unpipe: 1.0.0 transitivePeerDependencies: @@ -4653,8 +4653,8 @@ packages: resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} dev: false - /cookie@0.5.0: - resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + /cookie@0.6.0: + resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==} engines: {node: '>= 0.6'} dev: false @@ -5896,16 +5896,16 @@ packages: strip-final-newline: 2.0.0 dev: false - /express@4.18.2: - resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==} + /express@4.19.2: + resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} engines: {node: '>= 0.10.0'} dependencies: accepts: 1.3.8 array-flatten: 1.1.1 - body-parser: 1.20.1 + body-parser: 1.20.2 content-disposition: 0.5.4 content-type: 1.0.5 - cookie: 0.5.0 + cookie: 0.6.0 cookie-signature: 1.0.6 debug: 2.6.9 depd: 2.0.0 @@ -6079,8 +6079,8 @@ packages: hasBin: true dev: false - /follow-redirects@1.15.4: - resolution: {integrity: sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==} + /follow-redirects@1.15.6: + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -6695,7 +6695,7 @@ packages: engines: {node: '>=8.0.0'} dependencies: eventemitter3: 4.0.7 - follow-redirects: 1.15.4 + follow-redirects: 1.15.6 requires-port: 1.0.0 transitivePeerDependencies: - debug @@ -9629,8 +9629,8 @@ packages: engines: {node: '>= 0.6'} dev: false - /raw-body@2.5.1: - resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} + /raw-body@2.5.2: + resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} engines: {node: '>= 0.8'} dependencies: bytes: 3.1.2 @@ -10194,8 +10194,8 @@ packages: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} dev: false - /sanitize-html@2.11.0: - resolution: {integrity: sha512-BG68EDHRaGKqlsNjJ2xUB7gpInPA8gVx/mvjO743hZaeMCZ2DwzW7xvsqZ+KNU4QKwj86HJ3uu2liISf2qBBUA==} + /sanitize-html@2.12.1: + resolution: {integrity: sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA==} dependencies: deepmerge: 4.3.1 escape-string-regexp: 4.0.0 @@ -11228,8 +11228,8 @@ packages: - utf-8-validate dev: false - /webpack-dev-middleware@5.3.3(webpack@5.89.0): - resolution: {integrity: sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==} + /webpack-dev-middleware@5.3.4(webpack@5.89.0): + resolution: {integrity: sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==} engines: {node: '>= 12.13.0'} peerDependencies: webpack: ^4.0.0 || ^5.0.0 @@ -11269,7 +11269,7 @@ packages: compression: 1.7.4 connect-history-api-fallback: 2.0.0 default-gateway: 6.0.3 - express: 4.18.2 + express: 4.19.2 graceful-fs: 4.2.11 html-entities: 2.4.0 http-proxy-middleware: 2.0.6(@types/express@4.17.21) @@ -11284,7 +11284,7 @@ packages: sockjs: 0.3.24 spdy: 4.0.2 webpack: 5.89.0 - webpack-dev-middleware: 5.3.3(webpack@5.89.0) + webpack-dev-middleware: 5.3.4(webpack@5.89.0) ws: 8.16.0 transitivePeerDependencies: - bufferutil @@ -11318,7 +11318,7 @@ packages: compression: 1.7.4 connect-history-api-fallback: 1.6.0 default-gateway: 6.0.3 - express: 4.18.2 + express: 4.19.2 graceful-fs: 4.2.11 html-entities: 2.4.0 http-proxy-middleware: 2.0.6(@types/express@4.17.21) @@ -11332,7 +11332,7 @@ packages: sockjs: 0.3.24 spdy: 4.0.2 webpack: 5.89.0 - webpack-dev-middleware: 5.3.3(webpack@5.89.0) + webpack-dev-middleware: 5.3.4(webpack@5.89.0) ws: 8.16.0 transitivePeerDependencies: - bufferutil diff --git a/docusaurus/redirects.yml b/docusaurus/redirects.yml index d7b11e6a21104..18304de3117fb 100644 --- a/docusaurus/redirects.yml +++ b/docusaurus/redirects.yml @@ -1,4 +1,4 @@ -# A list of URLs that should be redirected to new pathes +# A list of URLs that should be redirected to new paths - from: /upgrading-airbyte to: /operator-guides/upgrading-airbyte - from: /catalog @@ -7,6 +7,18 @@ to: /integrations/sources/appstore - from: /connector-development/config-based/ to: /connector-development/config-based/low-code-cdk-overview +- from: + - /connector-development/tutorials/cdk-tutorial-python-http/ + - /connector-development/tutorials/cdk-tutorial-python-http/connection-checking/ + - /connector-development/tutorials/cdk-tutorial-python-http/creating-the-source/ + - /connector-development/tutorials/cdk-tutorial-python-http/declare-schema/ + - /connector-development/tutorials/cdk-tutorial-python-http/define-inputs/ + - /connector-development/tutorials/cdk-tutorial-python-http/getting-started/ + - /connector-development/tutorials/cdk-tutorial-python-http/install-dependencies/ + - /connector-development/tutorials/cdk-tutorial-python-http/read-data/ + - /connector-development/tutorials/cdk-tutorial-python-http/test-your-connector/ + - /connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte/ + to: /connector-development/tutorials/custom-python-connector/getting-started - from: /project-overview/changelog to: /category/release-notes - from: /connector-development/config-based/understanding-the-yaml-file/stream-slicers/ @@ -96,3 +108,6 @@ - from: - /pyairbyte to: /using-airbyte/pyairbyte/getting-started +- from: + - /telemetry + to: /operator-guides/telemetry diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index a5cd413ee494c..bee0ad7e1ed53 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -17,7 +17,8 @@ function getFilenamesInDir(prefix, dir, excludes) { !( fileName.endsWith(".inapp.md") || fileName.endsWith("-migrations.md") || - fileName.endsWith(".js") + fileName.endsWith(".js") || + fileName === "low-code.md" ) ) .map((fileName) => fileName.replace(".md", "")) @@ -234,22 +235,20 @@ const buildAConnector = { "connector-development/tutorials/cdk-speedrun", { type: "category", - label: "Python CDK: Creating a HTTP API Source", + label: "Python CDK: Creating a Python Source", items: [ - "connector-development/tutorials/cdk-tutorial-python-http/getting-started", - "connector-development/tutorials/cdk-tutorial-python-http/creating-the-source", - "connector-development/tutorials/cdk-tutorial-python-http/install-dependencies", - "connector-development/tutorials/cdk-tutorial-python-http/define-inputs", - "connector-development/tutorials/cdk-tutorial-python-http/connection-checking", - "connector-development/tutorials/cdk-tutorial-python-http/declare-schema", - "connector-development/tutorials/cdk-tutorial-python-http/read-data", - "connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte", - "connector-development/tutorials/cdk-tutorial-python-http/test-your-connector", + "connector-development/tutorials/custom-python-connector/getting-started", + "connector-development/tutorials/custom-python-connector/environment-setup", + "connector-development/tutorials/custom-python-connector/reading-a-page", + "connector-development/tutorials/custom-python-connector/reading-multiple-pages", + "connector-development/tutorials/custom-python-connector/check-and-error-handling", + "connector-development/tutorials/custom-python-connector/discover", + "connector-development/tutorials/custom-python-connector/incremental-reads", + "connector-development/tutorials/custom-python-connector/reading-from-a-subresource", + "connector-development/tutorials/custom-python-connector/concurrency", ], }, - "connector-development/tutorials/building-a-python-source", "connector-development/tutorials/building-a-java-destination", - "connector-development/tutorials/profile-java-connector-memory", ], }, "connector-development/connector-specification-reference", @@ -310,7 +309,6 @@ const contributeToAirbyte = { label: "Resources", items: [ "contributing-to-airbyte/resources/pull-requests-handbook", - "contributing-to-airbyte/resources/code-style", "contributing-to-airbyte/resources/code-formatting", "contributing-to-airbyte/resources/qa-checks", "contributing-to-airbyte/resources/developing-locally", @@ -332,6 +330,11 @@ const deployAirbyte = { label: "On your local machine", id: "deploying-airbyte/local-deployment", }, + { + type: "doc", + label: "Using docker compose", + id: "deploying-airbyte/docker-compose", + }, { type: "doc", label: "On AWS EC2", @@ -479,11 +482,6 @@ module.exports = { "cloud/managing-airbyte-cloud/manage-connection-state", ], }, - { - type: "doc", - label: "Using PyAirbyte", - id: "using-airbyte/pyairbyte/getting-started", - }, { type: "category", label: "Workspace Management", @@ -507,6 +505,7 @@ module.exports = { }, items: [ "enterprise-setup/implementation-guide", + "enterprise-setup/api-access-config", "enterprise-setup/upgrading-from-community", ] }, @@ -570,6 +569,11 @@ module.exports = { type: "doc", id: "terraform-documentation", }, + { + type: "doc", + label: "Using PyAirbyte", + id: "using-airbyte/pyairbyte/getting-started", + }, understandingAirbyte, contributeToAirbyte, { @@ -602,6 +606,8 @@ module.exports = { type: "generated-index", }, items: [ + "release_notes/march_2024", + "release_notes/february_2024", "release_notes/january_2024", "release_notes/december_2023", "release_notes/november_2023", diff --git a/docusaurus/src/components/AirbyteLibConnectors.jsx b/docusaurus/src/components/AirbyteLibConnectors.jsx deleted file mode 100644 index 2a546bda679b4..0000000000000 --- a/docusaurus/src/components/AirbyteLibConnectors.jsx +++ /dev/null @@ -1,22 +0,0 @@ -export default function AirbyteLibConnectors({ - connectorsJSON, - }) { - const connectors = JSON.parse(connectorsJSON); - return -} - -function getRelativeDocumentationUrl(connector) { - // get the relative path from the the dockerRepository_oss (e.g airbyte/source-amazon-sqs -> /integrations/sources/amazon-sqs) - - const fullDockerImage = connector.dockerRepository_oss; - console.log(fullDockerImage); - const dockerImage = fullDockerImage.split("airbyte/")[1]; - - const [integrationType, ...integrationName] = dockerImage.split("-"); - - return `/integrations/${integrationType}s/${integrationName.join("-")}`; -} \ No newline at end of file diff --git a/docusaurus/src/components/AirbyteLibDefinitions.jsx b/docusaurus/src/components/AirbyteLibDefinitions.jsx deleted file mode 100644 index e08462f8bfa49..0000000000000 --- a/docusaurus/src/components/AirbyteLibDefinitions.jsx +++ /dev/null @@ -1,17 +0,0 @@ -import React from 'react'; - -// Add additional modules here -import main_docs from "../../../airbyte-lib/docs/generated/airbyte_lib.html"; -import caches_docs from "../../../airbyte-lib/docs/generated/airbyte_lib/caches.html"; - -const docs = { - "airbyte_lib": main_docs, - "airbyte_lib.caches": caches_docs, -} - - -export default function AirbyteLibDefinitions({ module }) { - return <> -
    - -} diff --git a/docusaurus/src/components/AirbyteLibExample.jsx b/docusaurus/src/components/AirbyteLibExample.jsx deleted file mode 100644 index 403c80d99270a..0000000000000 --- a/docusaurus/src/components/AirbyteLibExample.jsx +++ /dev/null @@ -1,52 +0,0 @@ -import React, { useMemo } from "react"; -import { JSONSchemaFaker } from "json-schema-faker"; -import CodeBlock from '@theme/CodeBlock'; - -/** - * Generate a fake config based on the spec. - * - * As our specs are not 100% consistent, errors may occur. - * Try to generate a few times before giving up. - */ -function generateFakeConfig(spec) { - let tries = 5; - while (tries > 0) { - try { - return JSON.stringify(JSONSchemaFaker.generate(spec), null, 2) - } - catch (e) { - tries--; - } - } - return "{ ... }"; -} - -export const AirbyteLibExample = ({ - specJSON, - connector, -}) => { - const spec = useMemo(() => JSON.parse(specJSON), [specJSON]); - const fakeConfig = useMemo(() => generateFakeConfig(spec), [spec]); - return <> -

    - Install the Python library via: -

    - {"pip install airbyte-lib"} -

    Then, execute a sync by loading the connector like this:

    - {`import airbyte_lib as ab - -config = ${fakeConfig} - -result = ab.get_source( - "${connector}", - config=config, -).read() - -for record in result.cache.streams["my_stream:name"]: - print(record)`} -

    You can find more information in the airbyte_lib quickstart guide.

    - ; -}; diff --git a/docusaurus/src/components/ConnectorRegistry.jsx b/docusaurus/src/components/ConnectorRegistry.jsx index 079acedaa99dc..b698a38e3a5e6 100644 --- a/docusaurus/src/components/ConnectorRegistry.jsx +++ b/docusaurus/src/components/ConnectorRegistry.jsx @@ -2,9 +2,7 @@ import React from "react"; import { useEffect, useState } from "react"; import styles from "./ConnectorRegistry.module.css"; - -const registry_url = - "https://connectors.airbyte.com/files/generated_reports/connector_registry_report.json"; +import { REGISTRY_URL } from "../connector_registry"; const iconStyle = { maxWidth: 25 }; @@ -35,7 +33,7 @@ export default function ConnectorRegistry({ type }) { const [registry, setRegistry] = useState([]); useEffect(() => { - fetchCatalog(registry_url, setRegistry); + fetchCatalog(REGISTRY_URL, setRegistry); }, []); if (registry.length === 0) return
    {`Loading ${type}s...`}
    ; diff --git a/docusaurus/src/components/ManifestYamlDefinitions.jsx b/docusaurus/src/components/ManifestYamlDefinitions.jsx index c8b018becc533..90ae2b7665829 100644 --- a/docusaurus/src/components/ManifestYamlDefinitions.jsx +++ b/docusaurus/src/components/ManifestYamlDefinitions.jsx @@ -10,19 +10,13 @@ function Description({ text }) { function Examples({ examples }) { if (!examples) return null; - if (examples.length === 1) { - return <>Example: -
    -        {JSON.stringify(examples[0], null, 2)}
    -      
    - ; - } - return <>Examples: + return <> + {examples.length === 1 ? "Example:" : "Examples:"} {examples.map((example, index) =>
    -      {JSON.stringify(example, null, 2)}
    +      {typeof example === "string" ? example : JSON.stringify(example, null, 2)}
         
    )} - ; + } function Name({ name, definition }) { @@ -91,5 +85,17 @@ export default function ManifestYamlDefinitions() { } )} + + Interpolation filters +

    The following filters can be called as part of jinja expressions, for example like this: {"{{ 1 | string }}"}.

    + {schema.interpolation.filters.map((filter) => + {filter.title} + + {Object.keys(filter.arguments).length > 0 && <>Arguments:
      {Object.entries(filter.arguments).map(([name, argument]) =>
    • + {name}: {argument} +
    • )} +
    } + +
    )} } diff --git a/docusaurus/src/components/PyAirbyteConnectors.jsx b/docusaurus/src/components/PyAirbyteConnectors.jsx new file mode 100644 index 0000000000000..6060a9d5d0b29 --- /dev/null +++ b/docusaurus/src/components/PyAirbyteConnectors.jsx @@ -0,0 +1,22 @@ +export default function PyAirbyteConnectors({ + connectorsJSON, + }) { + const connectors = JSON.parse(connectorsJSON); + return +} + +function getRelativeDocumentationUrl(connector) { + // get the relative path from the the dockerRepository_oss (e.g airbyte/source-amazon-sqs -> /integrations/sources/amazon-sqs) + + const fullDockerImage = connector.dockerRepository_oss; + console.log(fullDockerImage); + const dockerImage = fullDockerImage.split("airbyte/")[1]; + + const [integrationType, ...integrationName] = dockerImage.split("-"); + + return `/integrations/${integrationType}s/${integrationName.join("-")}`; +} diff --git a/docusaurus/src/components/PyAirbyteExample.jsx b/docusaurus/src/components/PyAirbyteExample.jsx new file mode 100644 index 0000000000000..53424e79d051a --- /dev/null +++ b/docusaurus/src/components/PyAirbyteExample.jsx @@ -0,0 +1,52 @@ +import React, { useMemo } from "react"; +import { JSONSchemaFaker } from "json-schema-faker"; +import CodeBlock from '@theme/CodeBlock'; + +/** + * Generate a fake config based on the spec. + * + * As our specs are not 100% consistent, errors may occur. + * Try to generate a few times before giving up. + */ +function generateFakeConfig(spec) { + let tries = 5; + while (tries > 0) { + try { + return JSON.stringify(JSONSchemaFaker.generate(spec), null, 2) + } + catch (e) { + tries--; + } + } + return "{ ... }"; +} + +export const PyAirbyteExample = ({ + specJSON, + connector, +}) => { + const spec = useMemo(() => JSON.parse(specJSON), [specJSON]); + const fakeConfig = useMemo(() => generateFakeConfig(spec), [spec]); + return <> +

    + Install the Python library via: +

    + {"pip install airbyte"} +

    Then, execute a sync by loading the connector like this:

    + {`import airbyte as ab + +config = ${fakeConfig} + +result = ab.get_source( + "${connector}", + config=config, +).read() + +for record in result.cache.streams["my_stream:name"]: + print(record)`} +

    You can find more information in the airbyte_lib quickstart guide.

    + ; +}; diff --git a/docusaurus/src/connector_registry.js b/docusaurus/src/connector_registry.js index 013650dc8a47c..530a5782711c3 100644 --- a/docusaurus/src/connector_registry.js +++ b/docusaurus/src/connector_registry.js @@ -9,8 +9,9 @@ const fetchCatalog = async () => { }; module.exports = { + REGISTRY_URL, catalog: fetchCatalog(), isPypiConnector: (connector) => { return Boolean(connector.remoteRegistries_oss?.pypi?.enabled); - } -} + }, +}; diff --git a/docusaurus/src/remark/connectorList.js b/docusaurus/src/remark/connectorList.js index c6fffd88d8679..e39767f8bfee2 100644 --- a/docusaurus/src/remark/connectorList.js +++ b/docusaurus/src/remark/connectorList.js @@ -7,7 +7,7 @@ const plugin = () => { const registry = await catalog; visit(ast, "mdxJsxFlowElement", (node) => { - if (node.name !== "AirbyteLibConnectors") return; + if (node.name !== "PyAirbyteConnectors") return; const connectors = registry.filter(isPypiConnector); diff --git a/docusaurus/src/remark/specDecoration.js b/docusaurus/src/remark/specDecoration.js index 0474e252764b4..ff87476ab7aa3 100644 --- a/docusaurus/src/remark/specDecoration.js +++ b/docusaurus/src/remark/specDecoration.js @@ -4,7 +4,7 @@ const { isDocsPage, getRegistryEntry } = require("./utils"); const plugin = () => { const transformer = async (ast, vfile) => { - await injectDefaultAirbyteLibSection(vfile, ast); + await injectDefaultPyAirbyteSection(vfile, ast); await injectSpecSchema(ast); }; return transformer; @@ -13,7 +13,7 @@ const plugin = () => { async function injectSpecSchema(ast) { const registry = await catalog; visit(ast, "mdxJsxFlowElement", (node) => { - if (node.name !== "SpecSchema" && node.name !== "AirbyteLibExample") return; + if (node.name !== "SpecSchema" && node.name !== "PyAirbyteExample") return; const connectorName = node.attributes.find( (attr) => attr.name === "connector" @@ -29,7 +29,7 @@ async function injectSpecSchema(ast) { }); } -async function injectDefaultAirbyteLibSection(vfile, ast) { +async function injectDefaultPyAirbyteSection(vfile, ast) { const registryEntry = await getRegistryEntry(vfile); const docsPageInfo = isDocsPage(vfile); @@ -37,7 +37,7 @@ async function injectDefaultAirbyteLibSection(vfile, ast) { !docsPageInfo.isTrueDocsPage || !registryEntry || !isPypiConnector(registryEntry) || - vfile.value.includes("## Usage with airbyte-lib") + vfile.value.includes("## Usage with PyAirbyte") ) { return; } @@ -71,7 +71,7 @@ async function injectDefaultAirbyteLibSection(vfile, ast) { }); if (!added) { throw new Error( - `Could not find a changelog heading in ${vfile.path} to add the default airbyte-lib section. This connector won't have a reference section. Make sure there is either a ## Changelog section or add a manual reference section.` + `Could not find a changelog heading in ${vfile.path} to add the default PyAirbyte section. This connector won't have a reference section. Make sure there is either a ## Changelog section or add a manual reference section.` ); } } diff --git a/docusaurus/src/theme/MDXComponents/index.js b/docusaurus/src/theme/MDXComponents/index.js index 88510a7ffa09b..fe8efacba063d 100644 --- a/docusaurus/src/theme/MDXComponents/index.js +++ b/docusaurus/src/theme/MDXComponents/index.js @@ -6,7 +6,7 @@ import { FieldAnchor } from "@site/src/components/FieldAnchor"; import { HideInUI } from "@site/src/components/HideInUI"; import { HeaderDecoration } from "@site/src/components/HeaderDecoration"; import { SpecSchema } from "@site/src/components/SpecSchema"; -import { AirbyteLibExample } from "@site/src/components/AirbyteLibExample"; +import { PyAirbyteExample } from "@site/src/components/PyAirbyteExample"; import { ProductInformation } from "@site/src/components/ProductInformation"; import { Arcade } from "@site/src/components/Arcade"; @@ -19,6 +19,6 @@ export default { HideInUI, HeaderDecoration, SpecSchema, - AirbyteLibExample, + PyAirbyteExample, ProductInformation, }; diff --git a/gradle.properties b/gradle.properties index c43254b47004d..72c35a2a7a8e7 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -VERSION=0.52.0 +VERSION=0.57.3 # NOTE: some of these values are overwritten in CI! # NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties diff --git a/pyproject.toml b/pyproject.toml index 1977450d14a5b..f185e94c449da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ black = "~22.3.0" [tool.black] line-length = 140 target-version = ["py37"] -extend-exclude = "(build|integration_tests|unit_tests|generated|airbyte-lib)" +extend-exclude = "(build|integration_tests|unit_tests|generated)" [tool.coverage.report] fail_under = 0 @@ -50,6 +50,7 @@ extend-ignore = [ "E231", # Bad trailing comma (conflicts with Black) "E501", # line too long (conflicts with Black) "W503", # line break before binary operator (conflicts with Black) + "F811", # TODO: ella fix after pflake8 version update ] [tool.isort] @@ -63,7 +64,6 @@ force_grid_wrap = 0 use_parentheses = true skip_glob = [ "**/connector_builder/generated/**", - "airbyte-lib" # Handled by Ruff, with some conflicting rules # TODO: Remove this after we move to Ruff. Ruff is mono-repo-aware and # correctly handles first-party imports in subdirectories. ] diff --git a/run-ab-platform.sh b/run-ab-platform.sh index 1c447a612ff30..dada957866a92 100755 --- a/run-ab-platform.sh +++ b/run-ab-platform.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION=0.52.0 +VERSION=0.57.3 # Run away from anything even a little scary set -o nounset # -u exit if a variable is not set set -o errexit # -f exit for any command failure" @@ -31,6 +31,7 @@ Help() echo -e " -h --help Print this Help." echo -e " -x --debug Verbose mode." echo -e " -b --background Run docker compose up in detached mode." + echo -e " --dnt Disable telemetry collection" echo -e "" } @@ -39,13 +40,142 @@ Help() docker_compose_debug_yaml="docker-compose.debug.yaml" dot_env=".env" dot_env_dev=".env.dev" - flags="flags.yml" - temporal_yaml="temporal/dynamicconfig/development.yaml" -# any string is an array to POSIX shell. Space seperates values + flags="flags.yml" + temporal_yaml="temporal/dynamicconfig/development.yaml" +# any string is an array to POSIX shell. Space separates values all_files="$docker_compose_yaml $docker_compose_debug_yaml $dot_env $dot_env_dev $flags $temporal_yaml" base_github_url="https://raw.githubusercontent.com/airbytehq/airbyte-platform/v$VERSION/" +# event states are used for telemetry data +readonly eventStateStarted="started" +readonly eventStateFailed="failed" +readonly eventStateSuccess="succeeded" + +# event types are used for telemetry data +readonly eventTypeDownload="download" +readonly eventTypeInstall="install" +readonly eventTypeRefresh="refresh" + +telemetrySuccess=false +telemetrySessionULID="" +telemetryUserULID="" +telemetryEnabled=true +# telemetry requires curl to be installed +if ! command -v curl > /dev/null; then + telemetryEnabled=false +fi + +# TelemetryConfig configures the telemetry variables and will disable telemetry if it cannot be configured. +TelemetryConfig() +{ + # only attempt to do anything if telemetry is not disabled + if $telemetryEnabled; then + telemetrySessionULID=$(curl -s http://ulid.abapp.cloud/ulid | xargs) + + if [[ $telemetrySessionULID = "" || ${#telemetrySessionULID} -ne 26 ]]; then + # if we still don't have a ulid, give up on telemetry data + telemetryEnabled=false + return + fi + + # if we have an analytics file, use it + if test -f ~/.airbyte/analytics.yml; then + telemetryUserULID=$(cat ~/.airbyte/analytics.yml | grep "anonymous_user_id" | cut -d ":" -f2 | xargs) + fi + # if the telemtery ulid is still undefined, attempt to create it and write the analytics file + if [[ $telemetryUserULID = "" || ${#telemetryUserULID} -ne 26 ]]; then + telemetryUserULID=$(curl -s http://ulid.abapp.cloud/ulid | xargs) + if [[ $telemetryUserULID = "" || ${#telemetryUserULID} -ne 26 ]]; then + # if we still don't have a ulid, give up on telemetry data + telemetryEnabled=false + else + # we created a new ulid, write it out + echo "Thanks you for using Airbyte!" + echo "Anonymous usage reporting is currently enabled. For more information, please see https://docs.airbyte.com/telemetry" + mkdir -p ~/.airbyte + cat > ~/.airbyte/analytics.yml </dev/null | grep server | cut -d ":" -f2 | xargs) + if [ "$webappState" = "running" ]; then + TelemetrySend $eventStateSuccess $eventTypeInstall + break + fi + sleep 1 + done + + TelemetrySend "failed" "install" "webapp was not running within 600 seconds" +} + +readonly telemetryKey="kpYsVGLgxEqD5OuSZAQ9zWmdgBlyiaej" +readonly telemetryURL="https://api.segment.io/v1/track" +TelemetrySend() +{ + if $telemetrySuccess; then + # due to how traps work, we don't want to send a failure for exiting docker after we sent a success + return + fi + + if $telemetryEnabled; then + # start, failed, success + local state=$1 + # install, uninstall + local event=$2 + # optional error + local err=${3:-""} + + local now=$(date -u "+%Y-%m-%dT%H:%M:%SZ") + local body=$(cat << EOL +{ + "anonymousId":"$telemetryUserULID", + "event":"$event", + "properties": { + "deployment_method":"run_ab", + "session_id":"$telemetrySessionULID", + "state":"$state", + "os":"$OSTYPE", + "script_version":"$VERSION", + "error":"$err" + }, + "timestamp":"$now", + "writeKey":"$telemetryKey" +} +EOL +) + curl -s -o /dev/null -H "Content-Type: application/json" -X POST -d "$body" $telemetryURL + if [[ $state = "success" ]]; then { + telemetrySuccess=true + } + fi + fi +} + +TelemetryConfig + ############################################################ # Download # ############################################################ @@ -95,27 +225,53 @@ this_file_directory=$(dirname $0) # Run this from the / directory because we assume relative paths cd ${this_file_directory} +args=$@ +# Parse the arguments for specific flags before parsing for actions. +for argument in $args; do + case $argument in + -h | --help) + Help + exit + ;; + -b | --background) + dockerDetachedMode="-d" + ;; + --dnt) + telemetryEnabled=false + ;; + esac +done -for argument in $@; do +for argument in $args; do case $argument in -d | --download) + TelemetrySend $eventStateStarted $eventTypeDownload + trap 'TelemetrySend $eventStateFailed $eventTypeDownload "sigint"' SIGINT + trap 'TelemetrySend $eventStateFailed $eventTypeDownload "sigterm"' SIGTERM Download + TelemetrySend $eventStateSuccess $eventTypeDownload exit ;; -r | --refresh) + TelemetrySend $eventStateStarted $eventTypeRefresh + trap 'TelemetrySend $eventStateFailed $eventTypeRefresh "sigint"' SIGINT + trap 'TelemetrySend $eventStateFailed $eventTypeRefresh "sigterm"' SIGTERM DeleteLocalAssets Download - exit - ;; - -h | --help) - Help + TelemetrySend $eventStateSuccess $eventTypeRefresh exit ;; -x | --debug) set -o xtrace # -x display every line before execution; enables PS4 ;; + -h | --help) + # noop, this was checked in the previous for loop + ;; -b | --background) - dockerDetachedMode="-d" + # noop, this was checked in the previous for loop + ;; + --dnt) + # noop, this was checked in the previous for loop ;; *) echo "$argument is not a known command." @@ -124,9 +280,11 @@ for argument in $@; do exit ;; esac - shift done +TelemetrySend $eventStateStarted $eventTypeInstall +trap 'TelemetrySend $eventStateFailed $eventTypeInstall "sigint"' SIGINT +trap 'TelemetrySend $eventStateFailed $eventTypeInstall "sigterm"' SIGTERM ########## Pointless Banner for street cred ########## # Make sure the console is huuuge @@ -148,12 +306,13 @@ fi ########## Dependency Check ########## if ! docker compose version >/dev/null 2>/dev/null; then echo -e "$red_text""docker compose v2 not found! please install docker compose!""$default_text" + TelemetrySend $eventStateFailed $eventTypeInstall "docker compose not installed" exit 1 fi Download -########## Source Envionmental Variables ########## +########## Source Environmental Variables ########## for file in $dot_env $dot_env_dev; do echo -e "$blue_text""Loading Shell Variables from $file...""$default_text" @@ -162,9 +321,13 @@ done ########## Start Docker ########## - echo echo -e "$blue_text""Starting Docker Compose""$default_text" +if [ -z "$dockerDetachedMode" ]; then + # if running in docker-detach mode, kick off a background task as `docker compose up` will be a blocking + # call and we'll have no way to determine when we've successfully started. + TelemetryDockerUp & +fi docker compose up $dockerDetachedMode @@ -172,6 +335,9 @@ docker compose up $dockerDetachedMode if test $? -ne 0; then echo -e "$red_text""Docker compose failed. If you are seeing container conflicts""$default_text" echo -e "$red_text""please consider removing old containers""$default_text" + TelemetrySend $eventStateFailed $eventTypeInstall "docker compose failed" +else + TelemetrySend $eventStateSuccess $eventTypeInstall fi ########## Ending Docker ########## diff --git a/settings.gradle b/settings.gradle index c373a535e49cf..8b568ab1d84ab 100644 --- a/settings.gradle +++ b/settings.gradle @@ -19,11 +19,10 @@ pluginManagement { // as much information as possible. plugins { id "com.gradle.enterprise" version "3.15.1" - id 'com.github.burrunan.s3-build-cache' version "1.5" + id 'com.github.burrunan.s3-build-cache' version "1.8.1" } -ext.isCiServer = System.getenv().containsKey("CI") -ext.isAirbyteCI = System.getenv().containsKey("RUN_IN_AIRBYTE_CI") +final boolean isCI = System.getenv().containsKey("CI") || System.getenv().containsKey("RUN_IN_AIRBYTE_CI") dependencyResolutionManagement { // Set FAIL_ON_PROJECT_REPOS to ensure there are no more `repositories { ... }` blocks than necessary. @@ -115,27 +114,30 @@ gradleEnterprise { buildScan { termsOfServiceUrl = "https://gradle.com/terms-of-service" termsOfServiceAgree = "yes" - uploadInBackground = (!isCiServer && !isAirbyteCI) // Disable in CI or scan URLs may not work. + uploadInBackground = !isCI // Disable in CI or scan URLs may not work. buildScanPublished { PublishedBuildScan scan -> file("scan-journal.log") << "${new Date()} - ${scan.buildScanId} - ${scan.buildScanUri}\n" } } } -if (isCiServer || isAirbyteCI) { +if (isCI) { buildCache { local { // Local build cache is dangerous as it might produce inconsistent results // in case developer modifies files while the build is running - enabled = isAirbyteCI + enabled = true + push = true } remote(com.github.burrunan.s3cache.AwsS3BuildCache) { - region = 'us-west-2' // close to dagger runners + enabled = !System.getenv().getOrDefault("S3_BUILD_CACHE_ACCESS_KEY_ID", "").isEmpty() + push = true + // The 'us-west-2' region was chosen because it was close to our v0.6 dagger runners + // we used to maintain in AWS, prior to migrating to v0.9 and to github runners. + // This may in fact no longer be the best choice, who knows? It's not terrible, at least. + region = 'us-west-2' bucket = 'ab-ci-cache' prefix = "${System.getProperty('s3BuildCachePrefix', 'connectors')}-ci-cache/" - push = isAirbyteCI - // Sometimes the env var is set, but with an empty value. Ignore this case. - enabled = !System.getenv().getOrDefault("S3_BUILD_CACHE_ACCESS_KEY_ID", "").isEmpty() } } } @@ -147,7 +149,9 @@ def cdkPath = rootDir.toPath().resolve('airbyte-cdk/java/airbyte-cdk') cdkPath.eachDir { dir -> def buildFiles = file(dir).list { file, name -> name == "build.gradle" } if (buildFiles.length == 1) { - include ":airbyte-cdk:java:airbyte-cdk:${dir.getFileName()}" + def path = ":airbyte-cdk:java:airbyte-cdk:${dir.getFileName()}" + include path + project(path).name = "airbyte-cdk-${dir.getFileName()}" } } diff --git a/tools/README.md b/tools/README.md deleted file mode 100644 index f14fadaf8c875..0000000000000 --- a/tools/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Tools - -Contains various tools (usually bash scripts) to improve quality of life or the build system. - -## Releasing a new version -``` -Trigger the Github Action Release Open Source Airbyte (https://github.com/airbytehq/airbyte/actions/workflows/release-airbyte-os.yml) -# Merge PR created by the Github Action -# The [Create Release github action](https://github.com/airbytehq/airbyte/blob/master/.github/workflows/create-release.yml) should be automatically triggered by this merge, which will automatically create a new release of OSS Airbyte. -``` diff --git a/tools/gradle/pmd/rules.xml b/tools/gradle/pmd/rules.xml deleted file mode 100644 index 1ffcab27a517e..0000000000000 --- a/tools/gradle/pmd/rules.xml +++ /dev/null @@ -1,187 +0,0 @@ - - - - PMD Rules for Airbyte - - - .*/generated/.* - .*/airbyte-integrations/.* - .*/test-integration/.* - .*/airbyte-cdk/.* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/integrations/manage.sh b/tools/integrations/manage.sh deleted file mode 100755 index 7e27f33771485..0000000000000 --- a/tools/integrations/manage.sh +++ /dev/null @@ -1,374 +0,0 @@ -#!/usr/bin/env bash - -set -e -set -x - -. tools/lib/lib.sh - -# If you are looking at this file because you find yourself needing to publish a connector image manually, you might not need to do all of this! -# If the connector you are publishing is a python connector (e.g. not using our base images), you can do the following: -# -# # NAME="source-foo"; VERSION="1.2.3" -# -# git pull -# -# cd airbyte-integrations/connectors/$NAME -# -# docker buildx build . --platform "linux/amd64,linux/arm64" --tag airbyte/$NAME:latest --push -# docker buildx build . --platform "linux/amd64,linux/arm64" --tag airbyte/$NAME:$VERSION --push - - -USAGE=" -Usage: $(basename "$0") -For publish, if you want to push the spec to the spec cache, provide a path to a service account key file that can write to the cache. -Available commands: - scaffold - test - build [] - publish [] [--publish_spec_to_cache] [--publish_spec_to_cache_with_key_file ] [--pre_release] - publish_external -" - -# these filenames must match DEFAULT_SPEC_FILE and CLOUD_SPEC_FILE in GcsBucketSpecFetcher.java -default_spec_file="spec.json" -cloud_spec_file="spec.cloud.json" - -_check_tag_exists() { - DOCKER_CLI_EXPERIMENTAL=enabled docker manifest inspect "$1" > /dev/null -} - -_error_if_tag_exists() { - if _check_tag_exists "$1"; then - error "You're trying to push a version that was already released ($1). Make sure you bump it up." - fi -} - -cmd_scaffold() { - echo "Scaffolding connector" - ( - cd airbyte-integrations/connector-templates/generator && - ./generate.sh "$@" - ) -} - -cmd_build() { - local path=$1; shift || error "Missing target (root path of integration) $USAGE" - [ -d "$path" ] || error "Path must be the root path of the integration" - - local run_tests=$1; shift || run_tests=true - - echo "Building $path" - # Note that we are only building (and testing) once on this build machine's architecture - # Learn more @ https://github.com/airbytehq/airbyte/pull/13004 - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" clean)" - - if [ "$run_tests" = false ] ; then - echo "Building and skipping unit tests + integration tests..." - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" build)" -x check - else - echo "Building and running unit tests + integration tests..." - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" build)" - - if test "$path" == "airbyte-integrations/bases/base-normalization"; then - export RANDOM_TEST_SCHEMA="true" - ./gradlew --no-daemon --scan :airbyte-integrations:bases:base-normalization:assemble - fi - - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" integrationTest)" - fi -} - -# Experimental version of the above for a new way to build/tag images -cmd_build_experiment() { - local path=$1; shift || error "Missing target (root path of integration) $USAGE" - [ -d "$path" ] || error "Path must be the root path of the integration" - - echo "Building $path" - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" clean)" - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" build)" - - # After this happens this image should exist: "image_name:dev" - # Re-tag with CI candidate label - local image_name; image_name=$(_get_docker_image_name "$path/Dockerfile") - local image_version; image_version=$(_get_docker_image_version "$path/Dockerfile") - local image_candidate_tag; image_candidate_tag="$image_version-candidate-$PR_NUMBER" - - # If running via the bump-build-test-connector job, re-tag gradle built image following candidate image pattern - if [[ "$GITHUB_JOB" == "bump-build-test-connector" ]]; then - docker tag "$image_name:dev" "$image_name:$image_candidate_tag" - # TODO: docker push "$image_name:$image_candidate_tag" - fi -} - -cmd_test() { - local path=$1; shift || error "Missing target (root path of integration) $USAGE" - [ -d "$path" ] || error "Path must be the root path of the integration" - - # TODO: needs to know to use alternate image tag from cmd_build_experiment - echo "Running integration tests..." - ./gradlew --no-daemon --scan "$(_to_gradle_path "$path" integrationTest)" -} - -cmd_publish() { - local path=$1; shift || error "Missing target (root path of integration) $USAGE" - [ -d "$path" ] || error "Path must be the root path of the integration" - - local run_tests=$1; shift || run_tests=true - local publish_spec_to_cache - local pre_release - local spec_cache_writer_sa_key_file - - while [ $# -ne 0 ]; do - case "$1" in - --publish_spec_to_cache) - publish_spec_to_cache=true - shift 1 - ;; - --pre_release) - pre_release=true - shift 1 - ;; - --publish_spec_to_cache_with_key_file) - publish_spec_to_cache=true - spec_cache_writer_sa_key_file="$2" - shift 2 - ;; - *) - error "Unknown option: $1" - ;; - esac - done - - if [[ ! $path =~ "connectors" ]] - then - # Do not publish spec to cache in case this is not a connector - publish_spec_to_cache=false - fi - - # setting local variables for docker image versioning - local image_name; image_name=$(_get_docker_image_name "$path"/Dockerfile) - local image_version; image_version=$(_get_docker_image_version "$path"/Dockerfile "$pre_release") - local versioned_image=$image_name:$image_version - local latest_image="$image_name" # don't include ":latest", that's assumed here - local build_arch="linux/amd64,linux/arm64" - - # learn about this version of Docker - echo "--- docker info ---" - docker --version - docker buildx version - - # Install docker emulators - # TODO: Don't run this command on M1 macs locally (it won't work and isn't needed) - apt-get update && apt-get install -y qemu-user-static - - # log into docker - if test -z "${DOCKER_HUB_USERNAME}"; then - echo 'DOCKER_HUB_USERNAME not set.'; - exit 1; - fi - - if test -z "${DOCKER_HUB_PASSWORD}"; then - echo 'DOCKER_HUB_PASSWORD for docker user not set.'; - exit 1; - fi - - set +x - DOCKER_TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${DOCKER_HUB_USERNAME}'", "password": "'${DOCKER_HUB_PASSWORD}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) - set -x - - echo "image_name $image_name" - echo "versioned_image $versioned_image" - - if [ "$pre_release" == "true" ] - then - echo "will skip updating latest_image $latest_image tag due to pre_release" - else - echo "latest_image $latest_image" - fi - - # before we start working sanity check that this version has not been published yet, so that we do not spend a lot of - # time building, running tests to realize this version is a duplicate. - _error_if_tag_exists "$versioned_image" - - # building the connector - if [ "$path" != "airbyte-cdk/python" ]; then - # The python CDK will already have been built and tested earlier in the github workflow. - cmd_build "$path" "$run_tests" - fi - - # in case curing the build / tests someone this version has been published. - _error_if_tag_exists "$versioned_image" - - if [[ "airbyte/normalization" == "${image_name}" ]]; then - echo "Publishing normalization images (version: $versioned_image)" - GIT_REVISION=$(git rev-parse HEAD) - - # We use a buildx docker container when building multi-stage builds from one docker compose file - # This works because all the images depend only on already public images - docker buildx create --name connector-buildx --driver docker-container --use - - # Note: "buildx bake" needs to be run within the directory - local original_pwd=$PWD - cd airbyte-integrations/bases/base-normalization - - VERSION=$image_version GIT_REVISION=$GIT_REVISION docker buildx bake \ - --set "*.platform=$build_arch" \ - -f docker-compose.build.yaml \ - --push - - if [ "$pre_release" != "true" ]; then - VERSION=latest GIT_REVISION=$GIT_REVISION docker buildx bake \ - --set "*.platform=$build_arch" \ - -f docker-compose.build.yaml \ - --push - fi - - docker buildx rm connector-buildx - - cd $original_pwd - else - # We have to go arch-by-arch locally (see https://github.com/docker/buildx/issues/59 for more info) due to our base images (e.g. airbyte-integrations/bases/base-java) - # Alternative local approach @ https://github.com/docker/buildx/issues/301#issuecomment-755164475 - # We need to use the regular docker buildx driver (not docker container) because we need this intermediate contaiers to be available for later build steps - - - echo Installing arm64 docker emulation - docker run --privileged --rm tonistiigi/binfmt --install arm64 - - for arch in $(echo $build_arch | sed "s/,/ /g") - do - # These images aren't needed for the CDK - if [ "$path" != "airbyte-cdk/python" ]; then - echo "building base images for $arch" - docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java - docker buildx build -t airbyte/integration-base:dev --platform $arch --load airbyte-integrations/bases/base - fi - - # For a short while (https://github.com/airbytehq/airbyte/pull/25034), destinations rely on the normalization image to build - # Thanks to gradle, destinstaions which need normalization will already have built base-normalization's "build" artifacts - if [[ "$image_name" == *"destination-"* ]]; then - if [ -f "airbyte-integrations/bases/base-normalization/build/sshtunneling.sh" ]; then - docker buildx build -t airbyte/normalization:dev --platform $arch --load airbyte-integrations/bases/base-normalization - fi - fi - - local arch_versioned_image=$image_name:`echo $arch | sed "s/\//-/g"`-$image_version - echo "Publishing new version ($arch_versioned_image) from $path" - docker buildx build -t $arch_versioned_image --platform $arch --push $path - docker manifest create $versioned_image --amend $arch_versioned_image - - if [ "$pre_release" != "true" ]; then - docker manifest create $latest_image --amend $arch_versioned_image - fi - - done - - docker manifest push $versioned_image - docker manifest rm $versioned_image - - if [ "$pre_release" != "true" ]; then - docker manifest push $latest_image - docker manifest rm $latest_image - fi - - # delete the temporary image tags made with arch_versioned_image - sleep 10 - for arch in $(echo $build_arch | sed "s/,/ /g") - do - local arch_versioned_tag=`echo $arch | sed "s/\//-/g"`-$image_version - echo "deleting temporary tag: ${image_name}/tags/${arch_versioned_tag}" - TAG_URL="https://hub.docker.com/v2/repositories/${image_name}/tags/${arch_versioned_tag}/" # trailing slash is needed! - set +x - curl -X DELETE -H "Authorization: JWT ${DOCKER_TOKEN}" "$TAG_URL" - set -x - done - - fi - - # Checking if the image was successfully registered on DockerHub - # see the description of this PR to understand why this is needed https://github.com/airbytehq/airbyte/pull/11654/ - sleep 5 - - # To work for private repos we need a token as well - TAG_URL="https://hub.docker.com/v2/repositories/${image_name}/tags/${image_version}" - set +x - DOCKERHUB_RESPONSE_CODE=$(curl --silent --output /dev/null --write-out "%{http_code}" -H "Authorization: JWT ${DOCKER_TOKEN}" ${TAG_URL}) - set -x - if [[ "${DOCKERHUB_RESPONSE_CODE}" == "404" ]]; then - echo "Tag ${image_version} was not registered on DockerHub for image ${image_name}, please try to bump the version again." && exit 1 - fi - - if [[ "true" == "${publish_spec_to_cache}" ]]; then - echo "Publishing and writing to spec cache." - - # use service account key file is provided. - if [[ -n "${spec_cache_writer_sa_key_file}" ]]; then - echo "Using provided service account key" - gcloud auth activate-service-account --key-file "$spec_cache_writer_sa_key_file" - else - echo "Using environment gcloud" - fi - - publish_spec_files "$image_name" "$image_version" - else - echo "Publishing without writing to spec cache." - fi -} - -cmd_publish_external() { - local image_name=$1; shift || error "Missing target (image name) $USAGE" - # Get version from the command - local image_version=$1; shift || error "Missing target (image version) $USAGE" - - echo "image $image_name:$image_version" - echo "Publishing and writing to spec cache." - echo "Using environment gcloud" - - publish_spec_files "$image_name" "$image_version" -} - -generate_spec_file() { - local image_name=$1; shift || error "Missing target (image name)" - local image_version=$1; shift || error "Missing target (image version)" - local tmp_spec_file=$1; shift || error "Missing target (temp spec file name)" - local deployment_mode=$1; shift || error "Missing target (deployment mode)" - - docker run --env DEPLOYMENT_MODE="$deployment_mode" --rm "$image_name:$image_version" spec | \ - # 1. filter out any lines that are not valid json. - jq -R "fromjson? | ." | \ - # 2. grab any json that has a spec in it. - # 3. if there are more than one, take the first one. - # 4. if there are none, throw an error. - jq -s "map(select(.spec != null)) | map(.spec) | first | if . != null then . else error(\"no spec found\") end" \ - > "$tmp_spec_file" -} - -publish_spec_files() { - local image_name=$1; shift || error "Missing target (image name)" - local image_version=$1; shift || error "Missing target (image version)" - - # publish spec to cache. do so, by running get spec locally and then pushing it to gcs. - local tmp_default_spec_file; tmp_default_spec_file=$(mktemp) - local tmp_cloud_spec_file; tmp_cloud_spec_file=$(mktemp) - - # generate oss and cloud spec files - generate_spec_file "$image_name" "$image_version" "$tmp_default_spec_file" "OSS" - generate_spec_file "$image_name" "$image_version" "$tmp_cloud_spec_file" "CLOUD" - - gsutil cp "$tmp_default_spec_file" "gs://io-airbyte-cloud-spec-cache/specs/$image_name/$image_version/$default_spec_file" - if cmp --silent -- "$tmp_default_spec_file" "$tmp_cloud_spec_file"; then - echo "This connector has the same spec file for OSS and cloud" - else - echo "Uploading cloud specific spec file" - gsutil cp "$tmp_cloud_spec_file" "gs://io-airbyte-cloud-spec-cache/specs/$image_name/$image_version/$cloud_spec_file" - fi -} - -main() { - assert_root - - local cmd=$1; shift || error "Missing cmd $USAGE" - cmd_"$cmd" "$@" -} - -main "$@"